repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
sdoran35/hate-to-hugs | venv/lib/python3.6/site-packages/nltk/test/gluesemantics_malt_fixt.py | Python | mit | 302 | 0.003311 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
def setup_module(module):
from nose import SkipTest
| from nltk.parse.malt import MaltParser
try:
depparser = MaltParser('maltpar | ser-1.7.2')
except LookupError:
raise SkipTest("MaltParser is not available")
|
heschlie/taskbuster | functional_tests/test_allauth.py | Python | mit | 2,343 | 0 | from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from | selenium.common.exceptions import TimeoutException
from django.core.urlresolvers import reverse
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.utils.translation import activate
class TestGoogleLogin(StaticLiveServerTestCase):
fixtures = ['allauth_fixture']
def setUp(self):
self.browser = webdriver.Firefox()
self. | browser.implicitly_wait(3)
self.browser.wait = WebDriverWait(self.browser, 10)
activate('en')
def tearDown(self):
self.browser.quit()
def get_element_by_id(self, element_id):
return self.browser.wait.until(EC.presence_of_element_located(
(By.ID, element_id)))
def get_button_by_id(self, element_id):
return self.browser.wait.until(EC.element_to_be_clickable(
(By.ID, element_id)))
def get_full_url(self, namespace):
return self.live_server_url + reverse(namespace)
def user_login(self):
import json
with open('taskbuster/fixtures/google_user.json') as f:
credentials = json.loads(f.read())
self.get_element_by_id('Email').send_keys(credentials['Email'])
self.get_button_by_id('next').click()
self.get_element_by_id('Passwd').send_keys(credentials['Passwd'])
for btn in ['signIn', 'submit_approve_access']:
self.get_button_by_id(btn).click()
return
def test_google_login(self):
self.browser.get(self.get_full_url('home'))
google_login = self.get_element_by_id('google_login')
with self.assertRaises(TimeoutException):
self.get_element_by_id('logout')
self.assertEqual(google_login.get_attribute('href'),
self.live_server_url + '/accounts/google/login')
google_login.click()
self.user_login()
with self.assertRaises(TimeoutException):
self.get_element_by_id('google_login')
google_logout = self.get_element_by_id('logout')
google_logout.click()
google_login = self.get_element_by_id('google_login')
|
hmpf/nav | tests/unittests/general/logengine_test.py | Python | gpl-3.0 | 7,207 | 0.003746 | import datetime
import pytest
from mock import Mock
from unittest import TestCase
import random
import logging
logging.raiseExceptions = False
from nav import logengine
now = datetime.datetime.now()
@pytest.fixture
def loglines():
return """
Oct 28 13:15:06 10.0.42.103 1030: Oct 28 13:15:05.310 CEST: %LINEPROTO-5-UPDOWN: Line protocol on Interface GigabitEthernet1/0/29, changed state to up
Oct 28 13:15:21 10.0.42.103 1031: Oct 28 13:15:20.191 CEST: %EC-5-COMPATIBLE: Gi1/0/30 is compatible with port-channel members
Oct 28 13:15:21 10.0.42.103 1032: Oct 28 13:15:21.181 CEST: %LINEPROTO-5-UPDOWN: Line protocol on Interface GigabitEthernet1/0/29, changed state to down
Oct 28 13:15:23 10.0.42.103 1033: Oct 28 13:15:22.196 CEST: %LINK-3-UPDOWN: Interface GigabitEthernet1/0/29, changed state to down
Oct 28 13:15:27 10.0.42.103 1034: Oct 28 13:15:26.390 CEST: %LINK-3-UPDOWN: Interface GigabitEthernet1/0/29, changed state to up
Oct 28 13:15:28 10.0.80.11 877630: Oct 28 13:15:27.383 CEST: %SEC-6-IPACCESSLOGP: list hpc-v2 denied udp 87.202.31.111(59646) (TenGigabitEthernet3/3 0022.bd37.c800) -> 128.39.62.195(45134), 1 packet
Oct 28 13:15:28 10.0.42.103 1035: Oct 28 13:15:27.388 CEST: %EC-5-CANNOT_BUNDLE2: Gi1/0/29 is not compatible with Gi1/0/30 and will be suspended (speed of Gi1/0/29 is 1000M, Gi1/0/30 is 100M)
Oct 28 13:15:40 10.0.42.103 1036: Oct 28 13:15:39.769 CEST: %EC-5-COMPATIBLE: Gi1/0/29 is compatible with port-channel members
Oct 28 13:15:42 10.0.42.103 1037: Oct 28 13:15:41.774 CEST: %LINK-3-UPDOWN: Interface GigabitEthernet1/0/30, changed state to down
Oct 28 13:15:44 10.0.42.103 1038: Oct 28 13:15:43.468 CEST: %SPANTREE-5-TOPOTRAP: Topology Change Trap for vlan 1
Oct 28 13:15:44 10.0.42.103 1039: Oct 28 13:15:44.382 CEST: %LINEPROTO-5-UPDOWN: Line protocol on Interface GigabitEthernet1/0/29, changed state to up
Oct 28 13:15:46 10.0.42.103 1040: Oct 28 13:15:45.372 CEST: %LINK-3-UPDOWN: Interface Port-channel10, changed state to up
Oct 28 13:15:46 10.0.42.103 1041: Oct 28 13:15:46.379 CEST: %LINEPROTO-5-UPDOWN: Line protocol on Interface Port-channel10, changed state to up
Oct 28 13:15:52 10.0.42.103 1042: Oct 28 13:15:51.915 CEST: %LINK-3-UPDOWN: Interface GigabitEthernet1/0/30, changed state to up
Oct 28 13:15:52 10.0.128.13 71781: *Oct 28 2010 12:08:49 CET: %MV64340_ETHERNET-5-LATECOLLISION: GigabitEthernet0/1, late collision error
Oct 28 13:15:58 10.0.42.103 1043: Oct 28 13:15:57.560 CEST: %LINEPROTO-5-UPDOWN: Line protocol on Interface GigabitEthernet1/0/30, changed state to up
""".strip().split(
"\n"
)
def test_parse_without_exceptions(loglines):
for line in loglines:
msg = logengine.create_message(line)
assert msg, "unparseable: %s" % line
assert msg.facility is not None, "Message has no facility: {0!r}\n{1!r}".format(
line, vars(msg)
)
def test_insert(loglines):
for line in loglines:
database = Mock('cursor')
database.fetchone = lambda: [random.randint(1, 10000)]
| def execute(sql, params=()):
return sql % params
database.execute = execute
message = logengine.create_message(line)
assert message, "unparseable: %s" % line
logengine.insert_message(message, database, {}, {}, {}, {}, {}, {})
def test_swallow_generic_exceptions():
@logengine.swallow_all_but_db_exceptions
def raiser():
raise Exception("This is an ex-parrot")
raiser()
def test_raise_db_exception():
from na | v.db import driver
@logengine.swallow_all_but_db_exceptions
def raiser():
raise driver.Error("This is an ex-database")
with pytest.raises(driver.Error):
raiser()
def test_non_failing_function_should_run_fine():
@logengine.swallow_all_but_db_exceptions
def nonraiser(input):
return input
value = 'foo'
assert nonraiser(value) == value
class TestParsing(object):
message = "Oct 28 13:15:58 10.0.42.103 1043: Oct 28 13:15:57.560 CEST: %LINEPROTO-5-UPDOWN: Line protocol on Interface GigabitEthernet1/0/30, changed state to up"
timestamp = datetime.datetime(now.year, 10, 28, 13, 15, 57)
facility = 'LINEPROTO'
priority = 5
mnemonic = 'UPDOWN'
description = (
"Line protocol on Interface GigabitEthernet1/0/30," " changed state to up"
)
def test_should_parse_without_exception(self):
assert logengine.create_message(self.message)
def test_should_parse_timestamp_correctly(self):
msg = logengine.create_message(self.message)
assert msg.time == self.timestamp
def test_should_parse_facility_correctly(self):
msg = logengine.create_message(self.message)
assert msg.facility == self.facility
def test_should_parse_priority_correctly(self):
msg = logengine.create_message(self.message)
assert msg.priorityid == self.priority
def test_should_parse_mnemonic_correctly(self):
msg = logengine.create_message(self.message)
assert msg.mnemonic == self.mnemonic
def test_should_parse_description_correctly(self):
msg = logengine.create_message(self.message)
assert msg.description == self.description
class TestParseMessageWithStrangeGarbage(TestParsing):
message = "Mar 25 10:54:25 somedevice 72: AP:000b.adc0.ffee: *Mar 25 10:15:51.666: %LINK-3-UPDOWN: Interface Dot11Radio0, changed state to up"
timestamp = datetime.datetime(now.year, 3, 25, 10, 15, 51)
facility = 'LINK'
priority = 3
mnemonic = 'UPDOWN'
description = "Interface Dot11Radio0, changed state to up"
class TestParseMessageEndingWithColon(TestParsing):
"""Regression test for issue LP#720024"""
message = "Feb 16 11:55:08 10.0.1.15 22877425: Feb 16 11:55:09.436 MET: %HA_EM-6-LOG: on_high_cpu: CPU utilization is over 80%:"
timestamp = datetime.datetime(now.year, 2, 16, 11, 55, 9)
facility = 'HA_EM'
priority = 6
mnemonic = 'LOG'
description = "on_high_cpu: CPU utilization is over 80%:"
class TestParseMessageWithNoOriginTimestamp(TestParsing):
message = "Nov 13 11:21:02 10.0.1.15 : %ASA-3-321007: System is low on free memory blocks of size 8192 (0 CNT out of 250 MAX)"
timestamp = datetime.datetime(now.year, 11, 13, 11, 21, 2)
facility = 'ASA'
priority = 3
mnemonic = '321007'
description = (
"System is low on free memory blocks of size 8192 (0 CNT out of 250 MAX)"
)
non_conforming_lines = [
"Dec 20 15:16:04 10.0.101.179 SNTP[141365768]: sntp_client.c(1917) 2945474 %% SNTP: system clock synchronized on THU DEC 20 15:16:04 2012 UTC. Indicates that SNTP has successfully synchronized the time of the box with the server.",
"Dec 20 16:23:37 10.0.3.15 2605010: CPU utilization for five seconds: 86%/14%; one minute: 33%; five minutes: 31%",
"Jan 29 10:21:26 10.0.129.61 %LINK-W-Down: e30",
"pr 18 05:12:59.716 CEST: %SISF-6-ENTRY_CHANGED: Entry changed A=FE80::10F1:F7E9:6EDF:2129 V=204 I=Gi0/8 P=0005 M=",
]
@pytest.mark.parametrize("line", non_conforming_lines)
def test_non_conforming_lines(line):
msg = logengine.create_message(line)
assert msg is None, "line shouldn't be parseable: %s" % line
|
pytorch/vision | torchvision/models/quantization/shufflenetv2.py | Python | bsd-3-clause | 5,067 | 0.002566 | from typing import Any, Optional
import torch
import torch.nn as nn
from torch import Tensor
from torchvision.models import shufflenetv2
from ..._internally_replaced_utils import load_state_dict_from_url
from .utils import _fuse_modules, _replace_relu, quantize_model
__all__ = [
"Quant | izableShuffleNetV2",
"shufflenet_v2_x0_5",
"shufflenet_v2_x1_0",
]
quant_model_urls = {
"shufflenetv2_x0.5_fbgemm": "https://download.pytorch. | org/models/quantized/shufflenetv2_x0.5_fbgemm-00845098.pth",
"shufflenetv2_x1.0_fbgemm": "https://download.pytorch.org/models/quantized/shufflenetv2_x1_fbgemm-db332c57.pth",
}
class QuantizableInvertedResidual(shufflenetv2.InvertedResidual):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.cat = nn.quantized.FloatFunctional()
def forward(self, x: Tensor) -> Tensor:
if self.stride == 1:
x1, x2 = x.chunk(2, dim=1)
out = self.cat.cat([x1, self.branch2(x2)], dim=1)
else:
out = self.cat.cat([self.branch1(x), self.branch2(x)], dim=1)
out = shufflenetv2.channel_shuffle(out, 2)
return out
class QuantizableShuffleNetV2(shufflenetv2.ShuffleNetV2):
# TODO https://github.com/pytorch/vision/pull/4232#pullrequestreview-730461659
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, inverted_residual=QuantizableInvertedResidual, **kwargs) # type: ignore[misc]
self.quant = torch.ao.quantization.QuantStub()
self.dequant = torch.ao.quantization.DeQuantStub()
def forward(self, x: Tensor) -> Tensor:
x = self.quant(x)
x = self._forward_impl(x)
x = self.dequant(x)
return x
def fuse_model(self, is_qat: Optional[bool] = None) -> None:
r"""Fuse conv/bn/relu modules in shufflenetv2 model
Fuse conv+bn+relu/ conv+relu/conv+bn modules to prepare for quantization.
Model is modified in place. Note that this operation does not change numerics
and the model after modification is in floating point
"""
for name, m in self._modules.items():
if name in ["conv1", "conv5"] and m is not None:
_fuse_modules(m, [["0", "1", "2"]], is_qat, inplace=True)
for m in self.modules():
if type(m) is QuantizableInvertedResidual:
if len(m.branch1._modules.items()) > 0:
_fuse_modules(m.branch1, [["0", "1"], ["2", "3", "4"]], is_qat, inplace=True)
_fuse_modules(
m.branch2,
[["0", "1", "2"], ["3", "4"], ["5", "6", "7"]],
is_qat,
inplace=True,
)
def _shufflenetv2(
arch: str,
pretrained: bool,
progress: bool,
quantize: bool,
*args: Any,
**kwargs: Any,
) -> QuantizableShuffleNetV2:
model = QuantizableShuffleNetV2(*args, **kwargs)
_replace_relu(model)
if quantize:
# TODO use pretrained as a string to specify the backend
backend = "fbgemm"
quantize_model(model, backend)
else:
assert pretrained in [True, False]
if pretrained:
model_url: Optional[str] = None
if quantize:
model_url = quant_model_urls[arch + "_" + backend]
else:
model_url = shufflenetv2.model_urls[arch]
state_dict = load_state_dict_from_url(model_url, progress=progress)
model.load_state_dict(state_dict)
return model
def shufflenet_v2_x0_5(
pretrained: bool = False,
progress: bool = True,
quantize: bool = False,
**kwargs: Any,
) -> QuantizableShuffleNetV2:
"""
Constructs a ShuffleNetV2 with 0.5x output channels, as described in
`"ShuffleNet V2: Practical Guidelines for Efficient CNN Architecture Design"
<https://arxiv.org/abs/1807.11164>`_.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
quantize (bool): If True, return a quantized version of the model
"""
return _shufflenetv2(
"shufflenetv2_x0.5", pretrained, progress, quantize, [4, 8, 4], [24, 48, 96, 192, 1024], **kwargs
)
def shufflenet_v2_x1_0(
pretrained: bool = False,
progress: bool = True,
quantize: bool = False,
**kwargs: Any,
) -> QuantizableShuffleNetV2:
"""
Constructs a ShuffleNetV2 with 1.0x output channels, as described in
`"ShuffleNet V2: Practical Guidelines for Efficient CNN Architecture Design"
<https://arxiv.org/abs/1807.11164>`_.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
quantize (bool): If True, return a quantized version of the model
"""
return _shufflenetv2(
"shufflenetv2_x1.0", pretrained, progress, quantize, [4, 8, 4], [24, 116, 232, 464, 1024], **kwargs
)
|
tobspr/panda3d | direct/src/showbase/ShowBase.py | Python | bsd-3-clause | 123,227 | 0.004764 | """ This module contains ShowBase, an application framework responsible
for opening a graphical display, setting up input devices and creating
the scene graph. """
__all__ = ['ShowBase', 'WindowControls']
# This module redefines the builtin import function with one
# that prints out every import it does in a hierarchical form
# Annoying and very noisy, but sometimes useful
#import VerboseImport
from panda3d.core import *
from panda3d.direct import get_config_showbase, throw_new_frame, init_app_for_gui
from panda3d.direct import storeAccessibilityShortcutKeys, allowAccessibilityShortcutKeys
# Register the extension methods for NodePath.
from direct.extensions_native import NodePath_extensions
# This needs to be available early for DirectGUI imports
import sys
if sys.version_info >= (3, 0):
import builtins
else:
import __builtin__ as builtins
builtins.config = get_config_showbase()
from direct.directnotify.DirectNotifyGlobal import directNotify, giveNotify
from .MessengerGlobal import messenger
from .BulletinBoardGlobal import bulletinBoard
from direct.task.TaskManagerGlobal import taskMgr
from .JobManagerGlobal import jobMgr
from .EventManagerGlobal import eventMgr
#from PythonUtil import *
from direct.interval import IntervalManager
from direct.showbase.BufferViewer import BufferViewer
from direct.task import Task
from . import Loader
import time
import atexit
import importlib
from direct.showbase import ExceptionVarDump
from . import DirectObject
from . import SfxPlayer
if __debug__:
from direct.showbase import GarbageReport
from direct.directutil import DeltaProfiler
from . import OnScreenDebug
from . import AppRunnerGlobal
def legacyRun():
assert builtins.base.notify.warning("run() is deprecated, use base.run() instead")
builtins.base.run()
@atexit.register
def exitfunc():
if getattr(builtins, 'base', None) is not None:
builtins.base.destroy()
# Now ShowBase is a DirectObject. We need this so ShowBase can hang
# hooks on messages, particularly on window-event. This doesn't
# *seem* to cause anyone any problems.
class ShowBase(DirectObject.DirectObject):
config = get_config_showbase()
notify = directNotify.newCategory("ShowBase")
def __init__(self, fStartDirect = True, windowType = None):
self.__dev__ = self.config.GetBool('want-dev', __debug__)
builtins.__dev__ = self.__dev__
logStackDump = (self.config.GetBool('log-stack-dump', False) or
self.config.GetBool('client-log-stack-dump', False))
uploadStackDump = self.config.GetBool('upload-stack-dump', False)
if logStackDump or uploadStackDump:
ExceptionVarDump.install(logStackDump, uploadStackDump)
if __debug__:
self.__autoGarbageLogging = self.__dev__ and self.config.GetBool('auto-garbage-logging', False)
## The directory containing the main Python file of this application.
self.mainDir = ExecutionEnvironment.getEnvironmentVariable("MAIN_DIR")
self.main_dir = self.mainDir
## This contains the global appRunner instance, as imported from
## AppRunnerGlobal. This will be None if we are not running in the
## runtime environment (ie. from a .p3d file).
self.appRunner = AppRunnerGlobal.appRunner
self.app_runner = self.appRunner
#debug running multiplier
self.debugRunningMultiplier = 4
# [gjeon] to disable sticky keys
if self.config.GetBool('disable-sticky-keys', 0):
storeAccessibilityShortcutKeys()
allowAccessibilityShortcutKeys(False)
self.printEnvDebugInfo()
vfs = VirtualFileSystem.getGlobalPtr()
self.nextWindowIndex = 1
self.__directStarted = False
self.__deadInputs = 0
# Store dconfig variables
self.sfxActive = self.config.GetBool('audio-sfx-active', 1)
self.musicActive = self.config.GetBool('audio-music-active', 1)
self.wantFog = self.config.GetBool('want-fog', 1)
self.wantRender2dp = self.config.GetBool('want-render2dp', 1)
self.screenshotExtension = self.config.GetString('screenshot-extension', 'jpg')
self.musicManager = None
self.musicManagerIsValid = None
self.sfxManagerList = []
self.sfxManagerIsValidList = []
self.wantStats = self.config.GetBool('want-pstats', 0)
self.wantTk = False
self.wantWx = False
## Fill this in with a function to invoke when the user "exits"
## the program by closing the main window.
self.exitFunc = None
## Add final-exit callbacks to this list. These will be called
## when sys.exit() is called, after Panda has unloaded, and
## just before Python is about to shut down.
self.finalExitCallbacks = []
# Set up the TaskManager to reset the PStats clock back
# whenever we resume from a pause. This callback function is
# a little hacky, but we can't call it directly from within
# the TaskManager because he doesn't know about PStats (and
# has to run before libpanda is even loaded).
taskMgr.resumeFunc = PStatClient.resumeAfterPause
if self.__dev__:
self.__setupProfile()
# If the aspect ratio is 0 or None, it means to infer the
# aspect ratio from the window size.
# If you need to know the actual aspect ratio call base.getAspectRatio()
self.__configAspectRatio = ConfigVariableDouble('aspect-ratio', 0).getValue()
# This variable is used to see if the aspect ratio has changed when
# we get | a window-event.
self.__oldAspectRatio = None
## This is set to the value of the window-type config variable, but may
## optionally be overridden in the Showbase constructor. Should either be
## 'onscreen' (the default), 'offscreen' or 'none'.
self.windowType = windowType
if self.windowType is None:
self.windowType = self.config.GetString('window-type', 'onscreen')
self.requireWindow = self.config.GetBool('require-wi | ndow', 1)
## This is the main, or only window; see winList for a list of *all* windows.
self.win = None
self.frameRateMeter = None
self.sceneGraphAnalyzerMeter = None
self.winList = []
self.winControls = []
self.mainWinMinimized = 0
self.mainWinForeground = 0
self.pipe = None
self.pipeList = []
self.mouse2cam = None
self.buttonThrowers = None
self.mouseWatcher = None
self.mouseWatcherNode = None
self.pointerWatcherNodes = None
self.mouseInterface = None
self.drive = None
self.trackball = None
self.texmem = None
self.showVertices = None
## This is a NodePath pointing to the Camera object set up for the 3D scene.
## This is usually a child of self.camera.
self.cam = None
self.cam2d = None
self.cam2dp = None
## This is the NodePath that should be used to manipulate the camera. This
## is the node to which the default camera is attached.
self.camera = None
self.camera2d = None
self.camera2dp = None
## This is a list of all cameras created with makeCamera, including base.cam.
self.camList = []
## Convenience accessor for base.cam.node()
self.camNode = None
## Convenience accessor for base.camNode.get_lens()
self.camLens = None
self.camFrustumVis = None
self.direct = None
## This is used to store the wx.Application object used when want-wx is
## set or base.startWx() is called.
self.wxApp = None
self.wxAppCreated = False
self.tkRoot = None
self.tkRootCreated = False
# This is used for syncing multiple PCs in a distributed cluster
try:
# Has the cluster sync variable been set externally?
self.clusterSyncFlag = clusterSyncFlag
except NameError:
# Has the clusterSyncFlag been set via a |
JoGall/ubitrail | windowsBuild/share/glib-2.0/gdb/gobject.py | Python | gpl-3.0 | 9,374 | 0.014508 | import gdb
import glib
import gdb.backtrace
import gdb.command.backtrace
# This is not quite right, as local vars may override symname
def read_global_var (symname):
return gdb.selected_frame().read_var(symname)
def g_type_to_name (gtype):
def lookup_fundamental_type (typenode):
if typenode == 0:
return None
val = read_global_var ("static_fundamental_type_nodes")
if val == None:
return None
return val[typenode >> 2].address()
gtype = long(gtype)
typenode = gtype - gtype % 4
if typenode > (255 << 2):
typenode = gdb.Value(typenode).cast (gdb.lookup_type("TypeNode").pointer())
else:
typenode = lookup_fundamental_type (typenode)
if typenode != None:
return glib.g_quark_to_string (typenode["qname"])
return None
def is_g_type_instance (val):
def is_g_type_instance_helper (type):
if str(type) == "GTypeInstance":
return | True
while type.code == gdb.TYPE_CODE_TYPEDEF:
type = type.target()
if type.code != gdb.TYPE_CODE_STRUCT:
return False
fields = type.fields()
if len (fields) < 1:
return False
first_field = fields[0]
| return is_g_type_instance_helper(first_field.type)
type = val.type
if type.code != gdb.TYPE_CODE_PTR:
return False
type = type.target()
return is_g_type_instance_helper (type)
def g_type_name_from_instance (instance):
if long(instance) != 0:
try:
inst = instance.cast (gdb.lookup_type("GTypeInstance").pointer())
klass = inst["g_class"]
gtype = klass["g_type"]
name = g_type_to_name (gtype)
return name
except RuntimeError:
pass
return None
class GTypePrettyPrinter:
"Prints a GType instance pointer"
def __init__ (self, val):
self.val = val
def to_string (self):
name = g_type_name_from_instance (self.val)
if name:
return ("0x%x [%s]")% (long(self.val), name)
return ("0x%x") % (long(self.val))
def pretty_printer_lookup (val):
if is_g_type_instance (val):
return GTypePrettyPrinter (val)
return None
def get_signal_name (id):
if id == None:
return None
id = long(id)
if id == 0:
return None
val = read_global_var ("g_signal_nodes")
max_s = read_global_var ("g_n_signal_nodes")
max_s = long(max_s)
if id < max_s:
return val[id]["name"].string()
return None
class GFrameWrapper:
def __init__ (self, frame):
self.frame = frame;
def name (self):
name = self.frame.name()
if name and name.startswith("IA__"):
return name[4:]
return name
def __getattr__ (self, name):
return getattr (self.frame, name)
# Monkey patch FrameWrapper to avoid IA__ in symbol names
old__init__ = gdb.command.backtrace.FrameWrapper.__init__
def monkey_patched_init(self, frame):
name = frame.name()
if name and name.startswith("IA__"):
frame = GFrameWrapper(frame)
old__init__(self,frame)
gdb.command.backtrace.FrameWrapper.__init__ = monkey_patched_init
class DummyFrame:
def __init__ (self, frame):
self.frame = frame
def name (self):
return "signal-emission-dummy"
def describe (self, stream, full):
stream.write (" <...>\n")
def __getattr__ (self, name):
return getattr (self.frame, name)
class SignalFrame:
def __init__ (self, frames):
self.frame = frames[-1]
self.frames = frames;
def name (self):
return "signal-emission"
def read_var (self, frame, name, array = None):
try:
v = frame.read_var (name)
if v == None or v.is_optimized_out:
return None
if array != None:
array.append (v)
return v
except ValueError:
return None
def read_object (self, frame, name, array = None):
try:
v = frame.read_var (name)
if v == None or v.is_optimized_out:
return None
v = v.cast (gdb.lookup_type("GObject").pointer())
# Ensure this is a somewhat correct object pointer
if v != None and g_type_name_from_instance (v):
if array != None:
array.append (v)
return v
return None
except ValueError:
return None
def append (self, array, obj):
if obj != None:
array.append (obj)
def or_join_array (self, array):
if len(array) == 0:
return "???"
v = {}
for i in range(len(array)):
v[str(array[i])] = 1
array = v.keys()
s = array[0]
for i in range(1, len(array)):
s = s + " or %s"%array[i]
return s
def describe (self, stream, full):
instances = []
signals = []
for frame in self.frames:
name = frame.name()
if name == "signal_emit_unlocked_R":
self.read_object (frame, "instance", instances)
node = self.read_var (frame, "node")
if node:
signal = node["name"].string()
detail = self.read_var (frame, "detail")
detail = glib.g_quark_to_string (detail)
if detail != None:
signal = signal + ":" + detail
self.append (signals, signal)
if name == "g_signal_emitv":
instance_and_params = self.read_var (frame, "instance_and_params")
if instance_and_params:
instance = instance_and_params[0]["v_pointer"].cast (gdb.Type("GObject").pointer())
self.append (instances, instance)
id = self.read_var (frame, "signal_id")
signal = get_signal_name (id)
if signal:
detail = self.read_var (frame, "detail")
detail = glib.g_quark_to_string (detail)
if detail != None:
signal = signal + ":" + detail
self.append (signals, signal)
if name == "g_signal_emit_valist" or name == "g_signal_emit":
self.read_object (frame, "instance", instances)
id = self.read_var (frame, "signal_id")
signal = get_signal_name (id)
if signal:
detail = self.read_var (frame, "detail")
detail = glib.g_quark_to_string (detail)
if detail != None:
signal = signal + ":" + detail
self.append (signals, signal)
if name == "g_signal_emit_by_name":
self.read_object (frame, "instance", instances)
self.read_var (frame, "detailed_signal", signals)
break
instance = self.or_join_array (instances)
signal = self.or_join_array (signals)
stream.write (" <emit signal %s on instance %s>\n" % (signal, instance))
def __getattr__ (self, name):
return getattr (self.frame, name)
class GFrameFilter:
def __init__ (self, iter):
self.queue = []
self.iter = iter
def __iter__ (self):
return self
def fill (self):
while len(self.queue) <= 6:
try:
f = self.iter.next ()
self.queue.append (f)
except StopIteration:
return
def find_signal_emission (self):
for i in range (min (len(self.queue), 3)):
if self.queue[i].name() == "signal_emit_unlocked_R":
return i
return -1
def next (self):
# Ensure we have enough frames for a full signal emission
self.fill()
# Are we at the end?
if len(self.queue) == 0:
raise StopIteration
emission = self.find_signal_emission ()
if emission > 0:
start = emission
while True:
|
devunt/hydrocarbon | board/migrations/0001_squashed_0022_auto_20141229_2347.py | Python | mit | 10,743 | 0.003262 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
import django.utils.timezone
from django.conf import settings
def siteconf_func(apps, schema_editor):
Site = apps.get_model('sites', 'Site')
db_alias = schema_editor.connection.alias
Site.objects.using(db_alias).create(
domain='herocomics.kr',
name='히어로코믹스',
)
class Migration(migrations.Migration):
replaces = [('board', '0001_initial'), ('board', '0002_auto_20141210_2117'), ('board', '0003_attachment_checksum'), ('board', '0004_auto_20141211_1736'), ('board', '0005_auto_20141213_1307'), ('board', '0006_auto_20141213_2149'), ('board', '0007_auto_20141213_2221'), ('board', '0008_auto_20141214_1355'), ('board', '0009_auto_20141214_1817'), ('board', '0010_attachment'), ('board', '0011_auto_20141215_1514'), ('board', '0012_auto_20141221_1249'), ('board', '0013_auto_20141221_2059'), ('board', '0014_auto_20141222_1913'), ('board', '0015_auto_20141227_1704'), ('board', '0016_auto_20141227_2033'), ('board', '0017_auto_20141228_1146'), ('board', '0018_auto_20141228_2307'), ('board', '0019_auto_20141228_2318'), ('board', '0020_auto_20141229_0103'), ('board', '0021_auto_20141229_2017'), ('board', '0022_auto_20141229_2347')]
dependencies = [
('sites', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(db_index=True, max_length=255, unique=True, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('nickname', models.CharField(max_length=16, unique=True)),
('groups', models.ManyToManyField(to='auth.Group', related_name='user_set', help_text='The groups this user belongs to. A user will get all permissions granted to each of his/her group.', blank=True, verbose_name='groups', related_query_name='user')),
('user_permissions', models.ManyToManyField(to='auth.Permission', related_name='user_set', help_text='Specific permissions for this user.', blank=True, verbose_name='user permissions', related_query_name='user')),
| ],
options={
'abstract': Fals | e,
'verbose_name_plural': 'users',
'verbose_name': 'user',
},
bases=(models.Model,),
),
migrations.RunPython(
code=siteconf_func,
reverse_code=None,
atomic=False,
),
migrations.CreateModel(
name='Announcement',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, serialize=False, verbose_name='ID')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Board',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=16)),
('slug', models.SlugField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=8)),
('slug', models.SlugField()),
('board', models.ForeignKey(to='board.Board')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, serialize=False, verbose_name='ID')),
('ipaddress', models.GenericIPAddressField(protocol='IPv4')),
('contents', models.TextField()),
('created_time', models.DateTimeField(auto_now_add=True)),
('comment', models.ForeignKey(to='board.Comment', related_name='subcomments', blank=True, null=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, serialize=False, verbose_name='ID')),
('ipaddress', models.GenericIPAddressField(protocol='IPv4')),
('title', models.CharField(max_length=32)),
('contents', models.TextField()),
('viewcount', models.PositiveIntegerField(default=0)),
('created_time', models.DateTimeField(auto_now_add=True)),
('modified_time', models.DateTimeField()),
('board', models.ForeignKey(to='board.Board')),
('category', models.ForeignKey(to='board.Category', blank=True, null=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=32, unique=True)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='post',
name='tags',
field=models.ManyToManyField(to='board.Tag', blank=True, null=True),
preserve_default=True,
),
migrations.AddField(
model_name='post',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL, related_name='posts', blank=True, null=True),
preserve_default=True,
),
migrations.AddField(
model_name='comment',
name='post',
field=models.ForeignKey(to='board.Post', related_name='comments'),
preserve_default=True,
),
migrations.AddField(
model_name='comment',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL, related_name='comments', blank=True, null=True),
preserve_default=True,
),
migrations.AddField(
model_name='announcement',
name='boards',
field=models.ManyToManyField(to='board.Board', blank=True, null=True, related_name='announcements'),
preserve_default=True,
),
migrations.AddField(
model_name='announcement',
name='post',
field=models.OneToOneField(to='board.Post', related_name='announcement'),
preserve_default=True,
),
migrations.AlterField(
model_name='category',
name='board',
field=models.ForeignKey(to='board.Board', related_name='categories'),
pre |
poliastro/poliastro | src/poliastro/core/angles.py | Python | mit | 9,195 | 0.000545 | import numpy as np
from numba import njit as jit
@jit
def _kepler_equation(E, M, ecc):
return E_to_M(E, ecc) - M
@jit
def _kepler_equation_prime(E, M, ecc):
return 1 - ecc * np.cos(E)
@jit
def _kepler_equation_hyper(F, M, ecc):
return F_to_M(F, ecc) - M
@jit
def _kepler_equation_prime_hyper(F, M, ecc):
return ecc * np.cosh(F) - 1
def newton_factory(func, fprime):
@jit
def jit_newton_wrapper(x0, args=(), tol=1.48e-08, maxiter=50):
p0 = float(x0)
for _ in range(maxiter):
fval = func(p0, *args)
fder = fprime(p0, *args)
newton_step = fval / fder
p = p0 - newton_step
if abs(p - p0) < tol:
return p
p0 = p
return np.nan
return jit_newton_wrapper
_newton_elliptic = newton_factory(_kepler_equation, _kepler_equation_prime)
_newton_hyperbolic = newton_factory(
_kepler_equation_hyper, _kepler_equation_prime_hyper
)
@jit
def D_to_nu(D):
r"""True anomaly from parabolic anomaly.
Parameters
----------
D : float
Eccentric anomaly.
Returns
-------
nu : float
True anomaly.
Notes
-----
From [1]_:
.. math::
\nu = 2 \arctan{D}
"""
return 2.0 * np.arctan(D)
@jit
def nu_to_D(nu):
r"""Parabolic anomaly from true anomaly.
Parameters
----------
nu : float
True anomaly in radians.
Returns
-------
D : float
Parabolic anomaly.
Warnings
--------
The parabolic anomaly will be continuous in (-∞, ∞)
only if the true anomaly is in (-π, π].
No validation or wrapping is performed.
Notes
-----
The treatment of the parabolic case is heterogeneous in the literature,
and that includes the use of an equivalent quantity to the eccentric anomaly:
[1]_ calls it "parabolic eccentric anomaly" D,
[2]_ also uses the letter D but calls it just "parabolic anomaly",
[3]_ uses the letter B citing indirectly [4]_
(which however calls it "parabolic time argument"),
and [5]_ does not bother to define it.
We use this definition:
.. math::
B = \tan{\frac{\nu}{2}}
References
----------
.. [1] Farnocchia, Davide, Davide Bracali Cioci, and Andrea Milani.
"Robust resolution of Kepler’s equation in all eccentricity regimes."
.. [2] Bate, Muller, White.
.. [3] Vallado, David. "Fundamentals of Astrodynamics and Applications",
2013.
.. [4] IAU VIth General Assembly, 1938.
.. [5] Battin, Richard H. "An introduction to the Mathematics and Methods
of Astrodynamics, Revised Edition", 1999.
"""
# TODO: Rename to B
return np.tan(nu / 2.0)
@jit
def nu_to_E(nu, ecc):
r"""Eccentric anomaly from true anomaly.
.. versionadded:: 0.4.0
Parameters
----------
nu : float
True anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
E : float
Eccentric anomaly, between -π and π radians.
Warnings
--------
The eccentric anomaly will be between -π and π radians,
no matter the value of the true anomaly.
Notes
-----
The implementation uses the half-angle formula from [3]_:
.. math::
E = 2 \arctan \left ( \sqrt{\frac{1 - e}{1 + e}} \tan{\frac{\nu}{2}} \right)
\in (-\pi, \pi]
"""
E = 2 * np.arctan(np.sqrt((1 - ecc) / (1 + ecc)) * np.tan(nu / 2))
return E
@jit
def nu_to_F(nu, ecc):
r"""Hyperbolic anomaly from true anomaly.
Parameters
----------
nu : float
True anomaly in radians.
ecc : float
Eccentricity (>1).
Returns
-------
F : float
Hyperbolic anomaly.
Warnings
--------
The hyperbolic anomaly will be continuous in (-∞, ∞)
only if the true anomaly is in (-π, π],
which should happen anyway
because the true anomaly is limited for hyperbolic orbits.
No validation or wrapping is performed.
Notes
-----
The implementation uses the half-angle formula from [3]_:
.. math::
F = 2 \operatorname{arctanh} \left( \sqrt{\frac{e-1}{e+1}} \tan{\frac{\nu}{2}} \right)
"""
F = 2 * np.arctanh(np.sqrt((ecc - 1) / (ecc + 1)) * np.tan(nu / 2))
return F
@jit
def E_to_nu(E, ecc):
r"""True anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : float
Eccentric anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
nu : float
True anomaly, between -π and π radians.
Warnings
--------
The true anomaly will be between -π and π radians,
no matter the value of the eccentric anomaly.
Notes
-----
The implementation uses the half-angle formula from [3]_:
.. math::
\nu = 2 \arctan \left( \sqrt{\frac{1 + e}{1 - e}} \tan{\frac{E}{2}} \right)
\in (-\pi, \pi]
"""
nu = 2 * np.arctan(np.sqrt((1 + ecc) / (1 - ecc)) * np.tan(E / 2))
return nu
@jit
def F_to_nu(F, ecc):
r"""True anomaly from hyperbolic anomaly.
Parameters
----------
F : float
Hyperbolic anomaly.
ecc : float
Eccentricity (>1).
Returns
-------
nu : float
True anomaly.
Notes
-----
The implementation uses the half-angle formula from [3]_:
.. math::
\nu = 2 \arctan \left( \sqrt{\frac{e + 1}{e - 1}} \tanh{\frac{F}{2}} \right)
\in (-\pi, \pi]
"""
nu = 2 * np.arctan(np.sqrt((ecc + 1) / (ecc - 1)) * np.tanh(F / 2))
return nu
@jit
def M_to_E(M, ecc):
"""Eccentric anomaly from mean anomaly.
.. versionadded:: 0.4.0
Parameters
----------
M : float
Mean anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
E : float
Eccentric anomaly.
Notes
-----
This uses a Newton iteration on the Kepler equation.
"""
if -np.pi < M < 0 or np.pi < M:
E0 = M - ecc
else:
E0 = M + ecc
E = _newton_elliptic(E0, args=(M, ecc))
return E
@jit
def M_to_F(M, ecc):
"""Hyperbolic anomaly from mean anomaly.
Parameters
----------
M : float
Mean anomaly in radians.
ecc : float
Eccentricity (>1).
Returns
-------
F : float
Hyperbolic anomaly.
| Notes
-----
This uses a Newton iteration on the hyperbolic Kepler equation.
"""
F0 = np.arcsinh(M / ecc)
F = _newton_hyperbolic(F0, args=(M, ecc), maxiter=100)
return F
@jit
def M_to_D(M):
"""Parabolic anomaly from mean anomaly.
Parameters
----------
M : float
Mean anomaly in radians.
Returns
-------
D : float
Parabolic anomaly.
Notes
-----
This uses the analytical solution of Barker's equation from [5] | _.
"""
B = 3.0 * M / 2.0
A = (B + (1.0 + B**2) ** 0.5) ** (2.0 / 3.0)
D = 2 * A * B / (1 + A + A**2)
return D
@jit
def E_to_M(E, ecc):
r"""Mean anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : float
Eccentric anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
M : float
Mean anomaly.
Warnings
--------
The mean anomaly will be outside of (-π, π]
if the eccentric anomaly is.
No validation or wrapping is performed.
Notes
-----
The implementation uses the plain original Kepler equation:
.. math::
M = E - e \sin{E}
"""
M = E - ecc * np.sin(E)
return M
@jit
def F_to_M(F, ecc):
r"""Mean anomaly from eccentric anomaly.
Parameters
----------
F : float
Hyperbolic anomaly.
ecc : float
Eccentricity (>1).
Returns
-------
M : float
Mean anomaly.
Notes
-----
As noted in [5]_, by manipulating
the parametric equations of the hyperbola
we can derive a quantity that is equivalent
to the eccentric anomaly in the elliptic case:
.. math::
M = e \sinh{F} - F
"""
M = ecc * np.sinh(F) - F
return M
@jit
def D_to_M(D):
|
bstdenis/pymerra2 | scripts/merra2_subdaily_download.py | Python | apache-2.0 | 1,774 | 0.000564 | import logging
from pathlib import Path
from pymerra2 import download
# Here we process multiple variables at a time | to avoid downloading
# o | riginal data twice (all these variables are in the same files).
# These variables names are user choices, their merra-2 equivalent are
# specified below or in the default pymerra2_variables.py
var_names = ["evspsbl", "huss", "prbc", "tas", "sic", "snw", "uas", "vas", "ps"]
var_names = ["hur"]
delete_temp_dir = False
download_dir = Path.cwd().joinpath("downloaded")
merra2_server = "https://goldsmr4.gesdisc.eosdis.nasa.gov/data/"
merra2_server = "https://goldsmr5.gesdisc.eosdis.nasa.gov/data/"
# The variables specification is in the same order as var_names above.
# esdt_dir, collection and merra_name can be found from
# https://gmao.gsfc.nasa.gov/pubs/docs/Bosilovich785.pdf
# https://goldsmr4.gesdisc.eosdis.nasa.gov/data/
# standard_name comes from
# http://cfconventions.org/standard-names.html
# Optionally, if all the variables are already in the default
# pymerra2_variables.py, this can be set to None.
# This loop will create monthly files of hourly MERRA2 data
for yyyy in range(2017, 2019):
for mm in range(1, 13):
try:
download.subdaily_download_and_convert(
merra2_server,
var_names,
merra2_var_dicts=None,
initial_year=yyyy,
final_year=yyyy,
initial_month=mm,
final_month=mm,
initial_day=1,
final_day=None,
output_dir=download_dir,
delete_temp_dir=delete_temp_dir,
)
except Exception as e:
msg = "{}: File not found".format(e)
logging.error(msg)
continue
|
Finntack/pootle | pootle/apps/accounts/managers.py | Python | gpl-3.0 | 3,836 | 0 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from django.contrib.auth.models import BaseUserManager
from django.db.models import Q
from django.utils import timezone
from django.utils.lru_cache import lru_cache
from pootle_app.models.permissions import check_user_permission
from pootle_translationproject.models import TranslationProject
from . import utils
__all__ = ('UserManager', )
class UserManager(BaseUserManager):
"""Pootle User manager.
This manager hides the 'nobody' and 'default' users for normal
queries, since they are special users. Code that needs access to these
users should use the methods get_default_user and get_nobody_user.
"""
PERMISSION_USERS = ('default', 'nobody')
META_USERS = ('default', 'nobody', 'system')
def _create_user(self, username, email, password, is_superuser,
**extra_fields):
"""Creates and saves a User with the given username, email,
password and superuser status.
Adapted from the core ``auth.User`` model's ``UserManager``: we
have no use for the ``is_staff`` field.
"""
now = timezone.now()
if not username:
raise ValueError('The given username must be set')
email = self.normalize_email(email)
utils.validate_email_unique(email)
user = self.model(username=username, email=email,
is_active=True, is_superuser=is_superuser,
last_login=now, date_joined=now, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, username, email=None, password=None, **extra_fields):
return self._create_user(username, email, password, False,
**extra_fields)
def create_superuser(self, username, email, password, **extra_fields):
return self._create_user(username, email, password, True,
**extra_fields)
@lru_cache()
def get_default_user(self):
return self.get_queryset().get(username='default')
@lru_cache()
def get_nobody_user(self):
return self.get_queryset().get(username='nobody')
@lru_cache()
def get_system_user(self):
return self.get_queryset().get(username='system')
def hide_permission_users(self):
return self.get_queryset().exclude(username__in=self.PERMISSION_USERS)
def hide_meta(self):
return self.get_queryset().exclude(username__in=self.META_USERS)
def meta_users(self):
return self.get_queryset().filter(username__in=self.META_USERS)
def get_users_with_permission(self, permission_code, project, language):
default = self.get_default_user()
directory = TranslationProject.objects.get(
project=project,
language=language
).directory |
if check_user_permission(default, permission_code, directory):
return self.hide_meta().filter(is_active=True)
user_filter = Q(
permissionset__positive_permissions__codename=permission_code
)
language_path = language.directory.pootle_path
p | roject_path = project.directory.pootle_path
user_filter &= (
Q(permissionset__directory__pootle_path=directory.pootle_path)
| Q(permissionset__directory__pootle_path=language_path)
| Q(permissionset__directory__pootle_path=project_path)
)
user_filter |= Q(is_superuser=True)
return self.get_queryset().filter(user_filter).distinct()
|
openstack/nova | nova/tests/functional/regressions/test_bug_1896463.py | Python | apache-2.0 | 9,311 | 0.000107 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import fixtures
import time
from oslo_config import cfg
from nova import context
from nova import objects
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional import fixtures as func_fixtures
from nova.tests.functional import integrated_helpers
from nova import utils
from nova.virt import fake
CONF = cfg.CONF
class TestEvacuateResourceTrackerRace(
test.TestCase, integrated_helpers.InstanceHelperMixin,
):
"""Demonstrate bug #1896463.
Trigger a race condition between an almost finished evacuation that is
dropping the migration context, and the _update_available_resource()
periodic task that already loaded the instance list but haven't loaded the
migration list yet. The result is that the PCI allocation made by the
evacuation is deleted by the overlapping periodic task run and the instance
will not have PCI allocation after the evacuation.
"""
def setUp(self):
super().setUp()
self.neutron = self.useFixture(nova_fixtures.NeutronFixture(self))
self.glance = self.useFixture(nova_fixtures.GlanceFixture(self))
self.placement = self.useFixture(func_fixtures.PlacementFixture()).api
self.api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
api_version='v2.1'))
self.useFixture(fixtures.MockPatch(
'nova.pci.utils.get_mac_by_pci_address',
return_value='52:54:00:1e:59:c6'))
self.useFixture(fixtures.MockPatch(
'nova.pci.utils.get_vf_num_by_pci_address',
return_value=1))
self.admin_api = self.api_fixture.admin_api
self.admin_api.microversion = 'latest'
self.api = self.admin_api
self.start_service('conductor')
self.start_service('scheduler')
self.flags(compute_driver='fake.FakeDriverWithPciResources')
self.useFixture(
fake.FakeDriverWithPciResources.
FakeDriverWithPciResourcesConfigFixture())
self.compute1 = self._start_compute('host1')
self.compute1_id = self._get_compute_node_id_by_host('host1')
self.compute1_service_id = self.admin_api.get_services(
host='host1', binary='nova-compute')[0]['id']
self.compute2 = self._start_compute('host2')
self.compute2_id = self._get_compute_node_id_by_host('host2')
self.compute2_service_id = self.admin_api.get_services(
host='host2', binary='nova-compute')[0]['id']
# add extra ports and the related network to the neutron fixture
# specifically for these tests. It cannot be added globally in the
# fixture init as it adds a second network that makes auto allocation
# based test to fail due to ambiguous networks.
self.neutron._ports[self.neutron.sriov_port['id']] = \
copy.deepcopy(self.neutron.sriov_port)
self.neutron._networks[
self.neutron.network_2['id']] = self.neutron.network_2
self.neutron._subnets[
self.neutron.subnet_2['id']] = self.neutron.subnet_2
self.ctxt = context.get_admin_context()
def _get_compute_node_id_by_host(self, host):
# we specifically need the integer id of the node not the UUID so we
# need to use the old microversion
with utils.temporary_mutation(self.admin_api, microversion='2.52'):
hypers = self.admin_api.api_get(
'os-hypervisors').body['hypervisors']
for hyper in hypers:
if hyper['hypervisor_hostname'] == host:
return hyper['id']
self.fail('Hypervisor with hostname=%s not found' % host)
def _assert_pci_device_allocated(
self, instance_uuid, compute_node_id, num=1):
"""Assert that a given number of PCI devices are allocated to the
instance on the given host.
"""
devices = objects.PciDeviceList.get_by_instance_uuid(
self.ctxt, instance_uuid)
devices_on_host = [dev for dev in devices
if dev.compute_node_id == compute_node_id]
self.assertEqual(num, len(devices_on_host))
def test_evacuate_races_with | _update_available_resource(self):
# Create a server with a direct port to have PCI allocation
server = self._create_server(
name='test-server | -for-bug-1896463',
networks=[{'port': self.neutron.sriov_port['id']}],
host='host1'
)
self._assert_pci_device_allocated(server['id'], self.compute1_id)
self._assert_pci_device_allocated(
server['id'], self.compute2_id, num=0)
# stop and force down the compute the instance is on to allow
# evacuation
self.compute1.stop()
self.admin_api.put_service(
self.compute1_service_id, {'forced_down': 'true'})
# Inject some sleeps both in the Instance.drop_migration_context and
# the MigrationList.get_in_progress_and_error code to make them
# overlap.
# We want to create the following execution scenario:
# 1) The evacuation makes a move claim on the dest including the PCI
# claim. This means there is a migration context. But the evacuation
# is not complete yet so the instance.host does not point to the
# dest host.
# 2) The dest resource tracker starts an _update_available_resource()
# periodic task and this task loads the list of instances on its
# host from the DB. Our instance is not in this list due to #1.
# 3) The evacuation finishes, the instance.host is set to the dest host
# and the migration context is deleted.
# 4) The periodic task now loads the list of in-progress migration from
# the DB to check for incoming our outgoing migrations. However due
# to #3 our instance is not in this list either.
# 5) The periodic task cleans up every lingering PCI claim that is not
# connected to any instance collected above from the instance list
# and from the migration list. As our instance is not in either of
# the lists, the resource tracker cleans up the PCI allocation for
# the already finished evacuation of our instance.
#
# Unfortunately we cannot reproduce the above situation without sleeps.
# We need that the evac starts first then the periodic starts, but not
# finishes, then evac finishes, then periodic finishes. If I trigger
# and run the whole periodic in a wrapper of drop_migration_context
# then I could not reproduce the situation described at #4). In general
# it is not
#
# evac
# |
# |
# | periodic
# | |
# | |
# | x
# |
# |
# x
#
# but
#
# evac
# |
# |
# | periodic
# | |
# | |
# | |
# x |
# |
# x
#
# what is needed need.
#
# Starting the periodic from the test in a separate thread at
# drop_migration_context() might work but that is an extra complexity
# in the test code. Also it might need a sleep still to make the
# reproduction stable but only one sleep instead of two.
orig_drop = objects.Instance.drop_migration_context
def slow_d |
elephantum/python-daemon | daemon/runner.py | Python | gpl-2.0 | 7,087 | 0.000424 | # -*- coding: utf-8 -*-
# daemon/runner.py
# Part of python-daemon, an implementation of PEP 3143.
#
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2007–2008 Robert Niederreiter, Jens Klein
# Copyright © 2003 Clark Evans
# Copyright © 2002 Noah Spurrier
# Copyright © 2001 Jürgen Hermann
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Daemon runner library.
"""
import sys
import os
import signal
import errno
import pidlockfile
from daemon import DaemonContext
class DaemonRunnerError(Exception):
""" Abstract base class for errors from DaemonRunner. """
class DaemonRunnerInvalidActionError(ValueError, DaemonRunnerError):
""" Raised when specified action for DaemonRunner is invalid. """
class DaemonRunnerStartFailureError(RuntimeError, DaemonRunnerError):
""" Raised when failure starting DaemonRunner. """
class DaemonRunnerStopFailureError(RuntimeError, DaemonRunnerError):
""" Raised when failure stopping DaemonRunner. """
class DaemonRunner(object):
""" Controller for a callable running in a separate background process.
The first command-line argument is the action to take:
* 'start': Become a daemon and call `app.run()`.
* 'stop': Exit the daemon process specified in the PID file.
* 'restart': Stop, then start.
"""
start_message = "started with pid %(pid)d"
def __init__(self, app):
""" Set up the parameters of a new runner.
The `app` argument must have the following attributes:
* `stdin_path`, `stdout_path`, `stderr_path`: Filesystem
paths to open and replace the existing `sys.stdin`,
`sys.stdout`, `sys.stderr`.
* `pidfile_path`: Absolute filesystem path to a file that
will be used as the PID file for the daemon. If
``None``, no PID file will be used.
* `pidfile_timeout`: Used as the default acquisition
timeout value supplied to the runner's PID lock file.
* `run`: Callable that will be invoked when the daemon is
started.
"""
self.parse_args()
self.app = app
self.daemon_context = DaemonContext()
self.daemon_context.stdin = open(app.stdin_path, 'r')
self.daemon_context.stdout = open(app.stdout_path, 'w+')
self.daemon_context.stderr = open(
app.stderr_path, 'w+', buffering=0)
self.pidfile = None
if app.pidfile_path is not None:
self.pidfile = make_pidlockfile(
app.pidfile_path, app.pidfile_timeout)
self.daemon_context.pidfile = self.pidfile
def _usage_exit(self, argv):
""" Emit a usage message, then exit.
"""
progname = os.path.basename(argv[0])
usage_exit_code = 2
action_usage = "|".join(self.action_funcs.keys())
message = "usage: %(progname)s %(action_usage)s" % vars()
emit_message(message)
sys.exit(usage_exit_code)
def parse_args(self, argv=None):
""" Parse command-line arguments.
"""
if argv is None:
argv = sys.argv
min_args = 2
if len(argv) < min_args:
self._usage_exit(argv)
self.action = argv[1]
if self.action not in self.action_funcs:
self._usage_exit(argv)
def _start(self):
""" Open the daemon context and run the application.
"""
if is_pidfile_stale(self.pidfile):
self.pidfile.break_lock()
try:
self.daemon_context.open()
except pidlockfile.AlreadyLocked:
pidfile_path = self.pidfile.path
raise DaemonRunnerStartFailureError(
"PID file %(pidfile_path)r already locked" % vars())
pid = os.getpid()
message = self.start_message % vars()
emit_message(message)
self.app.run()
def _terminate_daemon_process(self):
""" Terminate the daemon process specified in the current PID file.
"""
pid = self.pidfile.read_pid()
try:
os.kill(pid, signal.SIGTERM)
except OSError, exc:
raise DaemonRunnerStopFailureError(
"Failed to terminate %(pid)d: %(exc)s" % vars())
def _stop(self):
""" Exit the daemon process specified in the current PID file.
"""
if not self.pidfile.is_locked():
pidfile_path = self.pidfile.path
raise DaemonRunnerStopFailureError(
"PID file %(pidfile_path)r not locked" % vars())
if is_pidfile_stale(self.pidfile):
self.pidfile.break_lock()
else:
self._terminate_daemon_process()
def _restart(self):
""" Stop, then start.
"""
self._stop()
self._start()
action_funcs = {
'start': _start,
'stop': _stop,
'restart': _restart,
}
def _get_action_func(self):
""" Return the function for the specified action.
Raises ``DaemonRunnerInvalidActionError`` if the action is
unknown.
"""
try:
func = self.action_funcs[self.action]
except KeyError:
raise DaemonRunnerInvalidActionError(
"Unknown action: %(action)r" % vars(self))
return func
def do_action(self):
""" Perform the requested action.
"""
func = self._get_action_func()
| func(self)
def emit_message(message, stream=None):
""" Emit a message to the specified stream (default `sys.stderr`). """
if stream is None:
stream = sys.stderr
stream.write("%(message)s\n" % vars())
stream.flush()
def make_p | idlockfile(path, acquire_timeout):
""" Make a PIDLockFile instance with the given filesystem path. """
if not isinstance(path, basestring):
error = ValueError("Not a filesystem path: %(path)r" % vars())
raise error
if not os.path.isabs(path):
error = ValueError("Not an absolute path: %(path)r" % vars())
raise error
lockfile = pidlockfile.TimeoutPIDLockFile(path, acquire_timeout)
return lockfile
def is_pidfile_stale(pidfile):
""" Determine whether a PID file is stale.
Return ``True`` (“stale”) if the contents of the PID file are
valid but do not match the PID of a currently-running process;
otherwise return ``False``.
"""
result = False
pidfile_pid = pidfile.read_pid()
if pidfile_pid is not None:
try:
os.kill(pidfile_pid, signal.SIG_DFL)
except OSError, exc:
if exc.errno == errno.ESRCH:
# The specified PID does not exist
result = True
return result
|
chaluemwut/fbserver | venv/lib/python2.7/site-packages/sklearn/cluster/tests/test_mean_shift.py | Python | apache-2.0 | 2,844 | 0 | """
Testing for mean shift clustering methods
"""
import numpy as np
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_array_equal
from sklearn.cluster import MeanShift
from sklearn.cluster import mean_shift
from sklearn.cluster import estimate_bandwidth
from sklearn.cluster import get_bin_seeds
from sklearn.datasets.samples_generator import make_blobs
n_clusters = 3
centers = np.array([[1, 1], [-1, -1], [1, -1]]) + 10
X, _ = make_blobs(n_samples=300, n_features=2, centers=centers,
cluster_std=0.4, shuffle=True, random_state=11)
def test_estimate_bandwidth():
"""Test estimate_bandwidth"""
bandwidth = estimate_bandwidth(X, n_samples=200)
assert_true(0.9 <= bandwidth <= 1.5)
def test_mean_shift():
""" Test MeanShift algorithm """
bandwidth = 1.2
ms = MeanShift(bandwidth=bandwidth)
labels = ms.fit(X).labels_
cluster_centers = ms.cluster_centers_
labels_unique = np.unique(labels)
n_clusters_ = len(labels_unique)
assert_equal(n_clusters_, n_clusters)
cluster_centers, labels = mean_shift(X, bandwidth=bandwidth)
labels_unique = np.unique(labels)
n_clusters_ = len(labels_unique)
assert_equal(n_clusters_, n_clusters)
def test_meanshift_predict():
"""Test MeanShift.predict"""
ms = MeanShift(bandwidth=1.2)
labels = ms.fit_predict(X)
labels2 = ms.predict(X)
assert_array_equal(labels, labels2)
def test_unfitted():
"""Non-regression: before fit, there should be not fitted attributes."""
ms = MeanShift()
assert_false(hasattr(ms, "cluster_centers_"))
assert_false(hasattr(ms, "labels_"))
def test_bin_seeds():
"""
Test the bin seeding technique which can be used in the mean shift
algorithm
"""
# Data is j | ust 6 points in the plane
X = np.array([[1., 1.], [1.5, 1.5], [1.8, 1.2],
[2., 1.], [2.1, 1.1], [0., 0.]])
# With a bin coarseness of 1.0 and min_bin_freq of 1, 3 bins should be
# found
ground_truth = set([(1., 1.), (2., 1.), (0., 0.)])
test_bins = get_bin_seeds(X, 1, 1)
test_re | sult = set([tuple(p) for p in test_bins])
assert_true(len(ground_truth.symmetric_difference(test_result)) == 0)
# With a bin coarseness of 1.0 and min_bin_freq of 2, 2 bins should be
# found
ground_truth = set([(1., 1.), (2., 1.)])
test_bins = get_bin_seeds(X, 1, 2)
test_result = set([tuple(p) for p in test_bins])
assert_true(len(ground_truth.symmetric_difference(test_result)) == 0)
# With a bin size of 0.01 and min_bin_freq of 1, 6 bins should be found
test_bins = get_bin_seeds(X, 0.01, 1)
test_result = set([tuple(p) for p in test_bins])
assert_true(len(test_result) == 6)
|
jackrzhang/zulip | zerver/tests/test_timestamp.py | Python | apache-2.0 | 1,871 | 0.003741 |
from django.utils.timezone import utc as timezone_utc
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.timestamp import floor_to_hour, floor_to_day, ceiling_to_hour, \
ceiling_to_day, timestamp_to_datetime, datetime_to_timestamp, \
TimezoneNotUTCException, convert_to_UTC
from datetime import datetime, timedelta
from dateutil import parser
import pytz
class TestTimestamp(ZulipTestCase):
def test_datetime_and_timestamp_conversions(self) -> None:
timestamp = 1483228800
for dt in [
parser.parse('2017-01-01 00:00:00.123 UTC'),
parser.parse('2017-01-01 00:00:00.123').r | eplace(tzinfo=timezone_utc),
parser.parse('2017-01-01 00:00:00.123').replace(tzinfo=pytz.utc)]:
self.assertEqual(timestamp_to_datetime(timestamp), dt-timedelta(microseconds=123000))
self.assertEqual(datetime_to_ti | mestamp(dt), timestamp)
for dt in [
parser.parse('2017-01-01 00:00:00.123+01:00'),
parser.parse('2017-01-01 00:00:00.123')]:
with self.assertRaises(TimezoneNotUTCException):
datetime_to_timestamp(dt)
def test_convert_to_UTC(self) -> None:
utc_datetime = parser.parse('2017-01-01 00:00:00.123 UTC')
for dt in [
parser.parse('2017-01-01 00:00:00.123').replace(tzinfo=timezone_utc),
parser.parse('2017-01-01 00:00:00.123'),
parser.parse('2017-01-01 05:00:00.123+05')]:
self.assertEqual(convert_to_UTC(dt), utc_datetime)
def test_enforce_UTC(self) -> None:
non_utc_datetime = parser.parse('2017-01-01 00:00:00.123')
for function in [floor_to_hour, floor_to_day, ceiling_to_hour, ceiling_to_hour]:
with self.assertRaises(TimezoneNotUTCException):
function(non_utc_datetime)
|
eldarion-gondor/pykube | pykube/exceptions.py | Python | apache-2.0 | 413 | 0 | """
Exceptions.
"""
class KubernetesError(Exception):
"""
Base exception for all Kubernetes errors.
"""
pass
class PyKubeError(KubernetesError):
"""
PyKube specific errors.
"""
pass
class HTTPError(PyKubeError):
def __in | it__(self, co | de, message):
super(HTTPError, self).__init__(message)
self.code = code
class ObjectDoesNotExist(PyKubeError):
pass
|
gnome-keysign/gnome-keysign | tests/test_bluetooth.py | Python | gpl-3.0 | 6,199 | 0.001936 | import os
import logging
import select
import socket
from subprocess import check_call
import tempfile
import unittest
import gi
gi.require_version('Gtk', '3.0')
from nose.twistedtools import deferred
from nose.tools import *
from twisted.internet import threads
from twisted.internet.defer import inlineCallbacks
try:
from keysign.bluetoothoffer import BluetoothOffer
from keysign.bluetoothreceive import BluetoothReceive
HAVE_BT = True
except ImportError:
HAVE_BT = False
from keysign.gpgmeh import get_public_key_data, openpgpkey_from_data
from keysign.util import mac_generate
log = logging.getLogger(__name__)
thisdir = os.path.dirname(os.path.realpath(__file__))
parentdir = os.path.join(thisdir, "..")
@unittest.skipUnless(HAVE_BT, "requires bluetooth module")
def get_fixture_dir(fixture=""):
dname = os.path.join(thisdir, "fixtures", fixture)
return dname
@unittest.skipUnless(HAVE_BT, "requires bluetooth module")
def get_fixture_file(fixture):
fname = os.path.join(get_fixture_dir(), fixture)
return fname
@unittest.skipUnless(HAVE_BT, "requires bluetooth module")
def import_key_from_file(fixture, homedir):
fname = get_fixture_file(fixture)
original = open(fname, 'rb').read()
gpgcmd = ["gpg", "--homedir={}".format(homedir)]
# Now we import a single key
check_call(gpgcmd + ["--import", fname])
return openpgpkey_from_data(original)
@deferred(timeout=15)
@inlineCallbacks
@unittest.skipUnless(HAVE_BT, "requires bluetooth module")
def test_bt():
"""This test requires two working Bluetooth devices"""
# This should be a new, empty directory
homedir = tempfile.mkdtemp()
os.environ["GNUPGHOME"] = homedir
key = import_key_from_file("seckey-no-pw-1.asc", homedir)
file_key_data = get_public_key_data(key.fingerprint)
log.info("Running with key %r", key)
hmac = mac_generate( | key.fingerprint.encode('ascii'), file_key_data)
# Start offering the key
offer = BluetoothOffer(key)
data = yield offer.allocate_code()
# getting the code from "BT=code;...."
code = data.split("=", 1)[1]
code = code.split(";", 1)[0]
| port = int(data.rsplit("=", 1)[1])
offer.start()
receive = BluetoothReceive(port)
msg_tuple = yield receive.find_key(code, hmac)
downloaded_key_data, success, _ = msg_tuple
assert_true(success)
log.info("Checking with key: %r", downloaded_key_data)
assert_equal(downloaded_key_data.encode("utf-8"), file_key_data)
@deferred(timeout=15)
@inlineCallbacks
@unittest.skipUnless(HAVE_BT, "requires bluetooth module")
def test_bt_wrong_hmac():
"""This test requires two working Bluetooth devices"""
# This should be a new, empty directory
homedir = tempfile.mkdtemp()
os.environ["GNUPGHOME"] = homedir
key = import_key_from_file("seckey-no-pw-1.asc", homedir)
log.info("Running with key %r", key)
hmac = "wrong_hmac_eg_tampered_key"
# Start offering the key
offer = BluetoothOffer(key)
data = yield offer.allocate_code()
# getting the code from "BT=code;...."
code = data.split("=", 1)[1]
code = code.split(";", 1)[0]
port = int(data.rsplit("=", 1)[1])
offer.start()
receive = BluetoothReceive(port)
msg_tuple = yield receive.find_key(code, hmac)
downloaded_key_data, success, _ = msg_tuple
assert_false(success)
@deferred(timeout=15)
@inlineCallbacks
@unittest.skipUnless(HAVE_BT, "requires bluetooth module")
def test_bt_wrong_mac():
"""This test requires one working Bluetooth device"""
receive = BluetoothReceive()
msg_tuple = yield receive.find_key("01:23:45:67:89:AB", "hmac")
downloaded_key_data, success, error = msg_tuple
assert_is_none(downloaded_key_data)
assert_false(success)
assert_equal(error.args[0], "(112, 'Host is down')")
@deferred(timeout=15)
@inlineCallbacks
@unittest.skipUnless(HAVE_BT, "requires bluetooth module")
def test_bt_corrupted_key():
"""This test requires two working Bluetooth devices"""
@inlineCallbacks
def start(bo):
success = False
try:
while not success:
# server_socket.accept() is not stoppable. So with select we can call accept()
# only when we are sure that there is already a waiting connection
ready_to_read, ready_to_write, in_error = yield threads.deferToThread(
select.select, [bo.server_socket], [], [], 0.5)
if ready_to_read:
# We are sure that a connection is available, so we can call
# accept() without deferring it to a thread
client_socket, address = bo.server_socket.accept()
key_data = get_public_key_data(bo.key.fingerprint)
kd_decoded = key_data.decode('utf-8')
# We send only a part of the key. In this way we can simulate the case
# where the connection has been lost
half = len(kd_decoded)/2
kd_corrupted = kd_decoded[:half]
yield threads.deferToThread(client_socket.sendall, kd_corrupted)
client_socket.shutdown(socket.SHUT_RDWR)
client_socket.close()
success = True
except Exception as e:
log.error("An error occurred: %s" % e)
# This should be a new, empty directory
homedir = tempfile.mkdtemp()
os.environ["GNUPGHOME"] = homedir
key = import_key_from_file("seckey-no-pw-1.asc", homedir)
log.info("Running with key %r", key)
file_key_data = get_public_key_data(key.fingerprint)
hmac = mac_generate(key.fingerprint.encode('ascii'), file_key_data)
# Start offering the key
offer = BluetoothOffer(key)
data = yield offer.allocate_code()
# getting the code from "BT=code;...."
code = data.split("=", 1)[1]
code = code.split(";", 1)[0]
port = int(data.rsplit("=", 1)[1])
start(offer)
receive = BluetoothReceive(port)
msg_tuple = yield receive.find_key(code, hmac)
downloaded_key_data, result, error = msg_tuple
assert_false(result)
assert_equal(type(error), ValueError)
|
mikisvaz/rbbt-util | python/rbbt.py | Python | mit | 109 | 0.009174 | impo | rt warnings
import sys
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
def rbbt():
print | ("Rbbt")
|
cliftonmcintosh/openstates | billy_metadata/de.py | Python | gpl-3.0 | 3,486 | 0.00918 |
metadata = {
'name': 'Delaware',
'abbreviation': 'de',
'legislature_name': 'Delaware General Assembly',
'legislature_url': 'http://legis.delaware.gov/',
'capitol_timezone': 'America/New_York',
'chambers': {
'upper': {'name': 'Senate', 'title': 'Senator'},
'lower': {'name': 'House', 'title': 'Representative'},
},
'terms': [
{
'name': '1999-2000',
'start_year': 1999,
'end_year': 2000,
'sessions': ['140'],
},
{
'name': '2001-2002',
'start_year': 2001,
'end_year': 2002,
'sess | ions': ['141'],
},
{
'name | ': '2003-2004',
'start_year': 2003,
'end_year': 2004,
'sessions': ['142'],
},
{
'name': '2005-2006',
'start_year': 2005,
'end_year': 2006,
'sessions': ['143'],
},
{
'name': '2007-2008',
'start_year': 2007,
'end_year': 2008,
'sessions': ['144'],
},
{
'name': '2009-2010',
'start_year': 2009,
'end_year': 2010,
'sessions': ['145'],
},
{
'name': '2011-2012',
'start_year': 2011,
'end_year': 2012,
'sessions': ['146'],
},
{
'name': '2013-2014',
'start_year': 2013,
'end_year': 2014,
'sessions': ['147'],
},
{
'name': '2015-2016',
'start_year': 2015,
'end_year': 2016,
'sessions': ['148'],
},
{
'name': '2017-2018',
'start_year': 2017,
'end_year': 2018,
'sessions': ['149'],
},
],
'session_details': {
'140': {
'display_name': '140th General Assembly (1999-2000)',
'_scraped_name': '1998 - 2000 (GA 140)',
},
'141': {
'display_name': '141st General Assembly (2001-2002)',
'_scraped_name': '2000 - 2002 (GA 141)',
},
'142': {
'display_name': '142nd General Assembly (2003-2004)',
'_scraped_name': '2002 - 2004 (GA 142)',
},
'143': {
'display_name': '143rd General Assembly (2005-2006)',
'_scraped_name': '2004 - 2006 (GA 143)',
},
'144': {
'display_name': '144th General Assembly (2007-2008)',
'_scraped_name': '2006 - 2008 (GA 144)',
},
'145': {
'display_name': '145th General Assembly (2009-2010)',
'_scraped_name': '2008 - 2010 (GA 145)',
},
'146': {
'display_name': '146th General Assembly (2011-2012)',
'_scraped_name': '2010 - 2012 (GA 146)',
},
'147': {
'display_name': '147th General Assembly (2013-2014)',
'_scraped_name': '2012 - 2014 (GA 147)',
},
'148': {
'display_name': '148th General Assembly (2015-2016)',
'_scraped_name': '2014 - 2016 (GA 148)',
},
'149': {
'display_name': '149th General Assembly (2017-2018)',
'_scraped_name': '2016 - 2018 (GA 149)',
},
},
'feature_flags': ['events', 'influenceexplorer'],
'_ignored_scraped_sessions': [
],
}
|
rushiagr/keystone | keystone/contrib/federation/routers.py | Python | apache-2.0 | 9,192 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
from keystone.common import json_home
from keystone.common import wsgi
from keystone.contrib.federation import controllers
build_resource_relation = functools.partial(
json_home.build_v3_extension_resource_relation,
extension_name='OS-FEDERATION', extension_version='1.0')
build_parameter_relation = functools.partial(
json_home.build_v3_extension_parameter_relation,
extension_name='OS-FEDERATION', extension_version='1.0')
IDP_ID_PARAMETER_RELATION = build_parameter_relation(parameter_name='idp_id')
PROTOCOL_ID_PARAMETER_RELATION = build_parameter_relation(
parameter_name='protocol_id')
SP_ID_PARAMETER_RELATION = build_parameter_relation(parameter_name='sp_id')
class Fede | rationExtension(wsgi.V3ExtensionRouter):
"""API Endpoints for the Federation extension.
The API looks like::
PUT /OS-FEDERATION/identity_providers/$identity_provider
GET /OS-FEDERATION/identity_providers
GET /OS-FEDERATION/identity_providers/$identity_provider
DELETE /OS-FEDERATION/identity_providers/$identity_provider
PATCH /OS-FEDERATION/identity_pr | oviders/$identity_provider
PUT /OS-FEDERATION/identity_providers/
$identity_provider/protocols/$protocol
GET /OS-FEDERATION/identity_providers/
$identity_provider/protocols
GET /OS-FEDERATION/identity_providers/
$identity_provider/protocols/$protocol
PATCH /OS-FEDERATION/identity_providers/
$identity_provider/protocols/$protocol
DELETE /OS-FEDERATION/identity_providers/
$identity_provider/protocols/$protocol
PUT /OS-FEDERATION/mappings
GET /OS-FEDERATION/mappings
PATCH /OS-FEDERATION/mappings/$mapping_id
GET /OS-FEDERATION/mappings/$mapping_id
DELETE /OS-FEDERATION/mappings/$mapping_id
GET /OS-FEDERATION/projects
GET /OS-FEDERATION/domains
PUT /OS-FEDERATION/service_providers/$service_provider
GET /OS-FEDERATION/service_providers
GET /OS-FEDERATION/service_providers/$service_provider
DELETE /OS-FEDERATION/service_providers/$service_provider
PATCH /OS-FEDERATION/service_providers/$service_provider
GET /OS-FEDERATION/identity_providers/$identity_provider/
protocols/$protocol/auth
POST /OS-FEDERATION/identity_providers/$identity_provider/
protocols/$protocol/auth
POST /auth/OS-FEDERATION/saml2
GET /OS-FEDERATION/saml2/metadata
GET /auth/OS-FEDERATION/websso/{protocol_id}
?origin=https%3A//horizon.example.com
POST /auth/OS-FEDERATION/websso/{protocol_id}
?origin=https%3A//horizon.example.com
"""
def _construct_url(self, suffix):
return "/OS-FEDERATION/%s" % suffix
def add_routes(self, mapper):
auth_controller = controllers.Auth()
idp_controller = controllers.IdentityProvider()
protocol_controller = controllers.FederationProtocol()
mapping_controller = controllers.MappingController()
project_controller = controllers.ProjectAssignmentV3()
domain_controller = controllers.DomainV3()
saml_metadata_controller = controllers.SAMLMetadataV3()
sp_controller = controllers.ServiceProvider()
# Identity Provider CRUD operations
self._add_resource(
mapper, idp_controller,
path=self._construct_url('identity_providers/{idp_id}'),
get_action='get_identity_provider',
put_action='create_identity_provider',
patch_action='update_identity_provider',
delete_action='delete_identity_provider',
rel=build_resource_relation(resource_name='identity_provider'),
path_vars={
'idp_id': IDP_ID_PARAMETER_RELATION,
})
self._add_resource(
mapper, idp_controller,
path=self._construct_url('identity_providers'),
get_action='list_identity_providers',
rel=build_resource_relation(resource_name='identity_providers'))
# Protocol CRUD operations
self._add_resource(
mapper, protocol_controller,
path=self._construct_url('identity_providers/{idp_id}/protocols/'
'{protocol_id}'),
get_action='get_protocol',
put_action='create_protocol',
patch_action='update_protocol',
delete_action='delete_protocol',
rel=build_resource_relation(
resource_name='identity_provider_protocol'),
path_vars={
'idp_id': IDP_ID_PARAMETER_RELATION,
'protocol_id': PROTOCOL_ID_PARAMETER_RELATION,
})
self._add_resource(
mapper, protocol_controller,
path=self._construct_url('identity_providers/{idp_id}/protocols'),
get_action='list_protocols',
rel=build_resource_relation(
resource_name='identity_provider_protocols'),
path_vars={
'idp_id': IDP_ID_PARAMETER_RELATION,
})
# Mapping CRUD operations
self._add_resource(
mapper, mapping_controller,
path=self._construct_url('mappings/{mapping_id}'),
get_action='get_mapping',
put_action='create_mapping',
patch_action='update_mapping',
delete_action='delete_mapping',
rel=build_resource_relation(resource_name='mapping'),
path_vars={
'mapping_id': build_parameter_relation(
parameter_name='mapping_id'),
})
self._add_resource(
mapper, mapping_controller,
path=self._construct_url('mappings'),
get_action='list_mappings',
rel=build_resource_relation(resource_name='mappings'))
# Service Providers CRUD operations
self._add_resource(
mapper, sp_controller,
path=self._construct_url('service_providers/{sp_id}'),
get_action='get_service_provider',
put_action='create_service_provider',
patch_action='update_service_provider',
delete_action='delete_service_provider',
rel=build_resource_relation(resource_name='service_provider'),
path_vars={
'sp_id': SP_ID_PARAMETER_RELATION,
})
self._add_resource(
mapper, sp_controller,
path=self._construct_url('service_providers'),
get_action='list_service_providers',
rel=build_resource_relation(resource_name='service_providers'))
self._add_resource(
mapper, domain_controller,
path=self._construct_url('domains'),
get_action='list_domains_for_groups',
rel=build_resource_relation(resource_name='domains'))
self._add_resource(
mapper, project_controller,
path=self._construct_url('projects'),
get_action='list_projects_for_groups',
rel=build_resource_relation(resource_name='projects'))
self._add_resource(
mapper, auth_controller,
path=self._construct_url('identity_providers/{identity_provider}/'
'protocols/{protocol}/auth'),
get_post_action='federated_authentication',
rel=build_resource_relation(
resource_name='identity_provider_protocol_auth'),
path_vars={
'identity_provider' |
steinitzu/aptfinder | aptfinder/web/__init__.py | Python | mit | 249 | 0.004016 | from flask import Flask
from flask_compress | import Compress
from .. import db
app = Flask(__name__)
app.config.from_pyfile('../config.py')
from . import views
Compress(app)
@app.before_first_request
def initialize_database():
| db.init_db()
|
caseman/grease | test/entity_test.py | Python | mit | 6,470 | 0.036012 | import unittest
import itertools
class TestWorld(object):
def __init__(self, **kw):
self.__dict__.update(kw)
self.components = self
self.entities = set()
self.new_entity_id = itertools.count().__next__
self.new_entity_id() # skip id 0
for comp in list(kw.values()):
comp.world = self
class TestComponent(dict):
def __init__(self):
self.entities = set()
def set(self, entity):
data = TestData()
self[entity] = data
self.entities.add(entity)
return data
def remove(self, entity):
del self[entity]
class TestData(object):
attr = 'deadbeef'
def __init__(self, **kw):
self.__dict__.update(kw)
class EntityTestCase(unittest.TestCase):
def test_repr(self):
from grease import Entity
entity = Entity(TestWorld())
self.assertTrue(repr(entity).startswith(
'<Entity id: %s of TestWorld' % entity.entity_id),
('<Entity id: %s of TestWorld' % entity.entity_id, repr(entity)))
def test_accessor_getattr_for_nonexistant_component(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
self.assertTrue(entity not in comp)
self.assertRaises(AttributeError, getattr, entity, 'foo')
def test_accessor_getattr_for_non_member_entity(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
accessor = entity.test
self.assertFalse(entity in comp)
self.assertRaises(AttributeError, getattr, accessor, 'attr')
def test_accessor_getattr_for_member_entity(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
comp.set(entity)
self.assertTrue(entity in comp)
self.assertEqual(entity.test.attr, 'deadbeef')
def test_accessor_setattr_adds_non_member_entity(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
self.assertFalse(entity in comp)
entity.test.attr = 'foobar'
self.assertEqual(entity.test.attr, 'foobar')
self.assertTrue(entity in comp)
def test_accessor_setattr_for_member_entity(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
comp.set(entity)
self.assertNotEqual(entity.test.attr, 'spam')
entity.test.attr = 'spam'
self.assertTrue(entity in comp)
self.assertEqual(entity.test.attr, 'spam')
def test_eq(self):
from grease import Entity
world = TestWorld()
e1 = Entity(world)
e2 = Entity(world)
self.assertNotEqual(e1, e2)
e2.entity_id = e1.entity_id
self.assertEqual(e1, e2)
otherworld = TestWorld()
e3 = Entity(otherworld)
self.assertNotEqual(e1, e3)
self.assertNotEqual(e2, e3)
e3.entity_id = e1.entity_id
self.assertNotEqual(e1, e3)
self.assertNotEqual(e2, e3)
def test_delattr(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
comp.set(entity)
self.assertTrue(entity in comp)
del entity.test
self.assertFalse(entity in comp)
def test_entity_id(self):
from grease import Entity
world = TestWorld()
entity1 = Entity(world)
entity2 = Entity(world)
self.assertTrue(entity1.entity_id > 0)
self.assertTrue(entity2.entity_id > 0)
self.assertNotEqual(entity1.entity_id, entity2.entity_id)
def test_delete_exists( | self):
from grease import Entity
world = TestWorld()
self.assertEqual(world.entities, set())
e | ntity1 = Entity(world)
entity2 = Entity(world)
self.assertEqual(world.entities, set([entity1, entity2]))
self.assertTrue(entity1.exists)
self.assertTrue(entity2.exists)
entity1.delete()
self.assertEqual(world.entities, set([entity2]))
self.assertFalse(entity1.exists)
self.assertTrue(entity2.exists)
entity2.delete()
self.assertEqual(world.entities, set())
self.assertFalse(entity1.exists)
self.assertFalse(entity2.exists)
def test_entity_subclass_slots(self):
from grease import Entity
class NewEntity(Entity):
pass
world = TestWorld()
entity = NewEntity(world)
self.assertRaises(AttributeError, setattr, entity, 'notanattr', 1234)
def test_entity_subclass_cant_have_slots(self):
from grease import Entity
self.assertRaises(TypeError,
type, 'Test', (Entity,), {'__slots__': ('foo', 'bar')})
def test_entity_subclass_init(self):
from grease import Entity
stuff = []
class TestEntity(Entity):
def __init__(self, world, other):
stuff.append(world)
stuff.append(other)
world = TestWorld()
TestEntity(world, self)
self.assertEqual(stuff, [world, self])
class EntityComponentAccessorTestCase(unittest.TestCase):
def test_getattr(self):
from grease.entity import EntityComponentAccessor
from grease import Entity
world = TestWorld()
entity = Entity(world)
component = {entity: TestData(foo=5)}
accessor = EntityComponentAccessor(component, entity)
self.assertEqual(accessor.foo, 5)
self.assertRaises(AttributeError, getattr, accessor, 'bar')
entity2 = Entity(world)
accessor = EntityComponentAccessor(component, entity2)
self.assertRaises(AttributeError, getattr, accessor, 'foo')
self.assertRaises(AttributeError, getattr, accessor, 'bar')
def test_setattr_member_entity(self):
from grease.entity import EntityComponentAccessor
from grease import Entity
world = TestWorld()
entity = Entity(world)
data = TestData(foo=5)
accessor = EntityComponentAccessor({entity: data}, entity)
self.assertEqual(data.foo, 5)
accessor.foo = 66
self.assertEqual(data.foo, 66)
accessor.bar = '!!'
self.assertEqual(data.bar, '!!')
def test_setattr_nonmember_entity(self):
from grease.entity import EntityComponentAccessor
from grease import Entity
world = TestWorld()
entity = Entity(world)
component = TestComponent()
accessor = EntityComponentAccessor(component, entity)
self.assertRaises(AttributeError, getattr, entity, 'baz')
self.assertTrue(entity not in component)
accessor.baz = 1000
self.assertTrue(entity in component)
self.assertEqual(accessor.baz, 1000)
self.assertEqual(component[entity].baz, 1000)
def test_truthiness(self):
from grease.entity import EntityComponentAccessor
from grease import Entity
world = TestWorld()
entity = Entity(world)
component = TestComponent()
accessor = EntityComponentAccessor(component, entity)
self.assertFalse(accessor)
component[entity] = 456
self.assertTrue(accessor)
if __name__ == '__main__':
unittest.main()
|
pf4d/dolfin-adjoint | tests_dolfin/viscoelasticity/timings/unannotated.py | Python | lgpl-3.0 | 7,692 | 0.00533 | __author__ = "Marie E. Rognes (meg@simula.no)"
__copyright__ = "Copyright (C) 2012 Marie Rognes"
__license__ = "Distribute at will"
"""
Schematic drawing (starts with 1 springs, starts with 0 dashpots)
| A10 --- A00 |
----- | | --------
| A11 |
Standard linear solid (SLS) viscoelastic model:
A_E^0 \dot \sigma_0 + A_V^0 \sigma_0 = strain(u)
A_E^1 \dot \sigma_1 = strain(v)
\sigma = \sigma_0 + \sigma_1
\div \sigma = gx
\skew \sigma = 0
NB: Mesh in mm, remember that Pa = N/m^2 = kg/(m s^2) = g/(mm s^2)
Give bc and Lame parameters in kPa -> displacements in mm, velocities
in mm/s, stresses in kPa
"""
import sys
import pylab
from dolfin import *
from dolfin import div as d
penalty_beta = 10**8 # NB: Sensitive to this for values less than 10^6
dirname = "test-results"
# Vectorized div
def div(v):
return as_vector((d(v[0]), d(v[1]), d(v[2])))
# Vectorized skew
def skw(tau):
s = 2*skew(tau) # FIXME: Why did I put a 2 here?
return as_vector((s[0][1], s[0][2], s[1][2]))
# Compliance tensors (Semi-arbitrarily chosen values and units)
def A00(tau):
"Maxwell dashpot (eta)"
mu = Constant(3.7466 * 10) # kPa
lamda = Constant(10**4) # kPa
foo = 1.0/(2*mu)*(tau - lamda/(2*mu + 3*lamda)*tr(tau)*Identity(3))
return foo
def A10(tau):
"Maxwell spring (A2)"
mu = Constant(4.158)
lamda = Constant(10**3) # kPa
foo = 1.0/(2*mu)*(tau - lamda/(2*mu + 3*lamda)*tr(tau)*Identity(3))
return foo
def A11(tau):
"Elastic spring (A1)"
mu = Constant(2.39) # kPa
lamda = Constant(10**3) # kPa
foo = 1.0/(2*mu)*(tau - lamda/(2*mu + 3*lamda)*tr(tau)*Identity(3))
return foo
def get_box():
"Use this for simple testing."
n = 2
mesh = Box(0., 0., 0., 20., 20., 100., 2*n, 2*n, 10*n)
# Mark all facets by 0, exterior facets by 1, and then top and
# bottom by 2
boundaries = FacetFunction("uint", mesh)
boundaries.set_all(0)
on_bdry = AutoSubDomain(lambda x, on_boundary: on_boundary)
top = AutoSubDomain(lambda x, on_boundary: near(x[2], 100.))
bottom = AutoSubDomain(lambda x, on_boundary: near(x[2], 0.0))
on_bdry.mark(boundaries, 1)
top.mark(boundaries, 2)
bottom.mark(boundaries, 2)
return (mesh, boundaries)
def get_spinal_cord():
"Mesh generated by Martin Alnaes using VMTK"
#mesh = Mesh("../mesh_edgelength4.xml.gz") # Coarse mesh
mesh = Mesh("mesh_edgelength2.xml.gz")
boundaries = mesh.domains().facet_domains(mesh)
for (i, a) in enumerate(boundaries.array()):
if a > 10:
boundaries.array()[i] = 0
if a == 3:
boundaries.array()[i] = 2
return (mesh, boundaries)
def crank_nicolson_step(Z, z_, k_n, g, v_D_mid, ds):
# Define trial and test functions
(sigma0, sigma1, v, gamma) = TrialFunctions(Z)
(tau0, tau1, w, eta) = TestFunctions(Z)
# Extract previous components
(sigma0_, sigma1_, v_, gamma_) = split(z_)
# Define midpoint values for brevity
def avg(q, q_):
return 0.5*(q + q_)
sigma0_mid = avg(sigma0, sigma0_)
sigma1_mid = avg(sigma1, sigma1_)
v_mid = avg(v, v_)
gamma_mid = avg(gamma, gamma_)
# Define form
n = FacetNormal(Z.mesh())
F = (inner(inv(k_n)*A10(sigma0 - sigma0_), tau0)*dx
+ inner(A00(sigma0_mid), tau0)*dx
+ inner(inv(k_n)*A11(sigma1 - sigma1_), tau1)*dx
+ inner(div(tau0 + tau1), v_mid)*dx
+ inner(skw(tau0 + tau1), gamma_mid)*dx
+ inner(div(sigma0_mid + sigma1_mid), w)*dx
+ inner(skw(sigma0_mid + sigma1_mid), eta)*dx
- inner(0.5*v_, (tau0 + tau1)*n)*ds(1)
- inner(v_D_mid, (tau0 + tau1)*n)*ds(2) # Velocity on dO_D
)
# Tricky to enforce Dirichlet boundary conditions on varying sums
# of components (same deal as for slip for Stokes for
# instance). Use penalty instead
beta = Constant(penalty_beta)
h = tetrahedron.volume
F_penalty = 0.5*(beta*inv(h)*inner((tau0 + tau1)*n,
(sigma0 + sigma1)*n - g)*ds(1))
F = F + F_penalty
return F
def bdf2_step(Z, z_, z__, k_n, g, v_D, ds):
# Define trial and test functions
(sigma0, sigma1, v, gamma) = TrialFunctions(Z)
(tau0, tau1, w, eta) = TestFunctions(Z)
# Extract previous components
(sigma0_, sigma1_, v_, gamma_) = split(z_)
(sigma0__, sigma1__, v__, gamma__) = split(z__)
# Define complete form
n = FacetNormal(Z.mesh())
F = (inner(inv(k_n)*A10(1.5*sigma0 - 2.*sigma0_ + 0.5*sigma0__), tau0)*dx
+ inner(A00(sigma0), tau0)*dx
+ inner(inv(k_n)*A11(1.5*sigma1 - 2.*sigma1_ + 0.5*sigma1__), tau1)*dx
+ inner(div(tau0 + tau1), v)*dx
+ inner(skw(tau0 + tau1), gamma)*dx
+ inner(div(sigma0 + sigma1), w)*dx
+ inner(skw(sigma0 + sigma1), eta)*dx
- inner(v_D, (tau0 + tau1)*n)*ds(2)
)
# Enforce essential bc on stress by penalty
beta = Constant(penalty_beta)
h = tetrahedron.volume
F_penalty = beta*inv(h)*inner((tau0 + tau1)*n,
(sigma0 + sigma1)*n - g)*ds(1)
F = F + F_penalty
return F
# Quick testing for box:
(mesh, boundaries) = get_box()
p = Expression("0.05*sin(2*pi*t)*1.0/(100)*x[2]", t=0)
# Semi-realistic stuff:
#(mesh, boundaries) = get_spinal_cord()
#p = Expression("0.05*sin(2*pi*t)*(1.0/(171 - 78)*(x[2] - 78))", t=0) # kPa
# Define function spaces
S = VectorFunctionSpace(mesh, "BDM", 1)
V = VectorFunctionSpace(mesh, "DG", 0)
Q = VectorFunctionSpace(mesh, "DG", 0)
CG1 = VectorFunctionSpace(mesh, "CG", 1)
Z = MixedFunctionSpace([S, S, V, Q])
def main(ic, T=1.0, dt=0.01):
# dk = half the timestep
dk = dt/2.0
parameters["form_compiler"]["optimize"] = True
parameters["form_compiler"]["cpp_optimize"] = True
ds = Measure("ds")[boundaries]
# Define functions for previous timestep (z_), half-time (z_star)
# and current (z)
z_ = Function(ic)
z_star = Function(Z)
z = Function(Z)
# Boundary conditions
v_D_mid = Function(V) # Velocity condition at half time
v_D = Function(V) # Velocity condition at time
# Boundary traction (pressure originating from CSF flow)
n = FacetNormal(mesh)
g = - p*n
F_cn = crank_nicolson_step(Z, z_, Constant(dk), g, v_D_mid, ds)
(a_cn, L_cn) = system(F_cn)
A_cn = assemble(a_cn)
cn_solver = LUSolver(A_cn)
cn_solver.parameters["reuse_factorization"] = True
F_bdf = bdf2_step(Z, z_star, z_, Constant(dk), g, v_D, ds)
(a_bdf, L_bdf) = system(F_bdf)
A_bdf = assemble(a_bdf)
bdf_solver = LUSolver(A_bdf)
bdf_solver.parameters["reuse_factorization"] = True
progress = Progress("Time-iteration", int(T/dt))
t = dk
iteration = 1
while (t <= T):
# Half-time step:
# Update source(s)
p.t = t
# Assemble right-hand side for CN system
b = assemble(L_cn)
# Solve Crank-Nicolson system
cn_solver.solve(z_star.vector(), b)
# Increase time
t += dk
# Next-time step:
# Update sources
p.t = t
# Assemble right-hand side for BDF system
b = assemble(L_bdf)
# Solve BDF system
bdf_solver.solve(z.vector(), b)
# Update time and variables
t += dk
z_.assign(z)
progress += 1
iteration += 1
return z_
if __name__ == "__main__":
# Adjust behaviour at will:
T = 0.05
dt = 0.01
set_log_level(PROGRES | S)
ic = Function(Z)
ic_co | py = Function(ic)
# Play forward run
info_blue("Running forward ... ")
z = main(ic, T=T, dt=dt)
|
morinim/vita | src/setversion.py | Python | mpl-2.0 | 2,950 | 0.004407 | #!/usr/bin/env python3
#
# Copyright (C) 2017-2020 EOS di Manlio Morini.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/
#
# A python program that helps to set up a new version of Vita.
#
import argparse
import datetime
import os
import re
def version_str(args):
return str(args.major) + "." + str(args.minor) + "." + str(args.maintenance)
def file_process(name, rule, args):
print("--- Processing " + os.path.basename(name))
with open(name) as source:
data = rule(source.read(), args)
if not data:
return
print("Writing " + name)
with open(name) as dest:
dest = open(name, "w")
dest.write(data)
def changelog_rule(data, args):
new_version = version_str(args)
regex = r"## \[Unreleased\]"
subst = r"## [Unreleased]\n\n## [" + new_version + r"] - " + datetime.date.today().isoformat()
result = re.subn(regex, subst, data)
if result[1] != 1:
return None
regex = r"(\[Unreleased)(\]: https://github.com/morinim/vita/compare/v)(.+)(\.\.\.HEAD)"
subst = r"\g<1> | \g<2>" + new_version + r"\g<4>\n[" + new_version + r"\g<2>\g<3>...v" + new_version
| result = re.subn(regex, subst, result[0])
return result[0] if result[1] == 1 else None
def doxygen_rule(data, args):
regex = r"([\s]+)(\*[\s]+\\mainpage VITA v)([\d]+)\.([\d]+)\.([\d]+)([\s]*)"
subst = r"\g<1>\g<2>" + version_str(args) + r"\g<6>"
result = re.subn(regex, subst, data)
return result[0] if result[1] > 0 else None
def get_cmd_line_options():
description = "Helps to set up a new version of Vita"
parser = argparse.ArgumentParser(description = description)
parser.add_argument("-v", "--verbose", action = "store_true",
help = "Turn on verbose mode")
# Now the positional arguments.
parser.add_argument("major", type=int)
parser.add_argument("minor", type=int)
parser.add_argument("maintenance", type=int)
return parser
def main():
args = get_cmd_line_options().parse_args()
print("Setting version to v" + str(args.major)
+ "." + str(args.minor)
+ "." + str(args.maintenance))
file_process("../NEWS.md", changelog_rule, args)
file_process("../doc/doxygen/doxygen.h", doxygen_rule, args)
print("\n\nRELEASE NOTE\n")
print("1. Build. cmake -DCMAKE_BUILD_TYPE=Release -B build/ src/ ; cmake --build build/")
print("2. Check. cd build/ ; ./tests")
print('3. Commit. git commit -am "[DOC] Changed revision number to v'
+ version_str(args) + '"')
print("4. Tag. git tag -a v" + version_str(args) + " -m \"tag message\"")
print("\nRemember to 'git push' both code and tag. For the tag:\n")
print(" git push origin [tagname]\n")
if __name__ == "__main__":
main()
|
rg3/youtube-dl | youtube_dl/extractor/adobepass.py | Python | unlicense | 41,407 | 0.000773 | # coding: utf-8
from __future__ import unicode_literals
import re
import time
import xml.etree.ElementTree as etree
from .common import InfoExtractor
from ..compat import (
compat_kwargs,
compat_urlparse,
)
from ..utils import (
unescapeHTML,
urlencode_postdata,
unified_timestamp,
ExtractorError,
NO_DEFAULT,
)
MSO_INFO = {
'DTV': {
'name': 'DIRECTV',
'username_field': 'username',
'password_field': 'password',
},
'ATT': {
'name': 'AT&T U-verse',
'username_field': 'userid',
'password_field': 'password',
},
'ATTOTT': {
'name': 'DIRECTV NOW',
'username_field': 'email',
'password_field': 'loginpassword',
},
'Rogers': {
'name': 'Rogers',
'username_field': 'UserName',
'password_field': 'UserPassword',
},
'Comcast_SSO': {
'name': 'Comcast XFINITY',
'username_field': 'user',
'password_field': 'passwd',
},
'TWC': {
'name': 'Time Warner Cable | Spectrum',
'username_field': 'Ecom_User_ID',
'password_field': 'Ecom_Password',
},
'Brighthouse': {
'name': 'Bright House Networks | Spectrum',
'username_field': 'j_username',
'password_field': 'j_password',
},
'Charter_Direct': {
'name': 'Charter Spectrum',
'username_field': 'IDToken1',
'password_field': 'IDToken2',
},
'Verizon': {
'name': 'Verizon FiOS',
'username_field': 'IDToken1',
'password_field': 'IDToken2',
},
'thr030': {
'name': '3 Rivers Communications'
},
'com140': {
'name': 'Access Montana'
},
'acecommunications': {
'name': 'AcenTek'
},
'acm010': {
'name': 'Acme Communications'
},
'ada020': {
'name': 'Adams Cable Service'
},
'alb020': {
'name': 'Albany Mutual Telephone'
},
'algona': {
'name': 'Algona Municipal Utilities'
},
'allwest': {
'name': 'All West Communications'
},
'all025': {
'name': 'Allen\'s Communications'
},
'spl010': {
'name': 'Alliance Communications'
},
'all070': {
'name': 'ALLO Communications'
},
'alpine': {
'name': 'Alpine Communications'
},
'hun015': {
'name': 'American Broadband'
},
'nwc010': {
'name': 'American Broadband Missouri'
},
'com130-02': {
'name': 'American Community Networks'
},
'com130-01': {
'name': 'American Warrior Networks'
},
'tom020': {
'name': 'Amherst Telephone/Tomorrow Valley'
},
'tvc020': {
'name': 'Andycable'
},
'arkwest': {
'name': 'Arkwest Communications'
},
'art030': {
'name': 'Arthur Mutual Telephone Company'
},
'arvig': {
'name': 'Arvig'
},
'nttcash010': {
'name': 'Ashland Home Net'
},
'astound': {
'name': 'Astound (now Wave)'
},
'dix030': {
'name': 'ATC Broadband'
},
'ara010': {
'name': 'ATC Communications'
},
'she030-02': {
'name': 'Ayersville Communications'
},
'baldwin': {
'name': 'Baldwin Lightstream'
},
'bal040': {
'name': 'Ballard TV'
},
'cit025': {
'name': 'Bardstown Cable TV'
},
'bay030': {
'name': 'Bay Country Communications'
},
'tel095': {
'name': 'Beaver Creek Cooperative Telephone'
},
'bea020': {
'name': 'Beaver Valley Cable'
},
'bee010': {
'name': 'Bee Line Cable'
},
'wir030': {
'name': 'Beehive Broadband'
},
'bra020': {
'name': 'BELD'
},
'bel020': {
'name': 'Bellevue Municipal Cable'
},
'vol040-01': {
'name': 'Ben Lomand Connect / BLTV'
},
'bev010': {
'name': 'BEVCOMM'
},
'big020': {
'name': 'Big Sandy Broadband'
},
'ble020': {
'name': 'Bledsoe Telephone Cooperative'
},
'bvt010': {
'name': 'Blue Valley Tele-Communications'
},
'bra050': {
'name': 'Brandenburg Telephone Co.'
},
'bte010': {
'name': 'Bristol Tennessee Essential Services'
},
'annearundel': {
'name': 'Broadstripe'
},
'btc010': {
'name': 'BTC Communications'
},
'btc040': {
'name': 'BTC Vision - Nahunta'
},
'bul010': {
'name': 'Bulloch Telephone Cooperative'
},
'but010': {
'name': 'Butler-Bremer Communications'
},
'tel160-csp': {
'name': 'C Spire SNAP'
},
'csicable': {
'name': 'Cable Services Inc.'
},
'cableamerica': {
'name': 'CableAmerica'
},
'cab038': {
'name': 'CableSouth Media 3'
},
'weh010-camtel': {
'name': 'Cam-Tel Company'
},
'car030': {
'name': 'Cameron Communications'
},
'canbytel': {
'name': 'Canby Telcom'
},
'crt020': {
'name': 'CapRock Tv'
},
'car050': {
'name': 'Carnegie Cable'
},
'cas': {
'name': 'CAS Cable'
},
'casscomm': {
'name': 'CASSCOMM'
},
'mid180-02': {
'name': 'Catalina Broadband Solutions'
},
'cccomm': {
'name': 'CC Communications'
},
'nttccde010': {
'name': 'CDE Lightband'
},
'cfunet': {
'name': 'Cedar Falls Utilities'
},
'dem010-01': {
'name': 'Celect-Bloomer Telephone Area'
},
'dem010-02': {
'name': 'Celect-Bruce Telephone Area'
},
'dem010-03': {
'name': 'Celect-Citizens Connected Area'
},
'dem010-04': {
'name': 'Celect-Elmwood/Spring Valley Area'
},
'dem010-06': {
'name': 'Celect-Mosaic Telecom'
},
'dem010-05': {
'name': 'Celect-West WI Telephone Area'
},
'net010-02': {
'name': 'Cellcom/Nsight Telservices'
},
'cen100': {
'name': 'CentraCom'
},
'nttccst010': {
'name': 'Central Scott / CSTV'
},
'cha035': {
'name': 'Chaparral CableVision'
},
'cha050': {
'name': 'Chariton Valley Communication Corporation, Inc.'
},
'cha060': {
'name': 'Chatmoss Cablevision'
},
'nttcche010': {
'name': 'Cherokee Communications'
},
'che050': {
'name': 'Chesapeake Bay Communications'
},
'cimtel': {
'name': 'Cim-Tel Cable, LLC.'
},
'cit180': {
'name': 'Citizens Cablevision - Floyd, VA'
},
'cit210': {
'name': 'Citizens Cablevision, Inc.'
},
'cit040': {
'name': 'Citizens Fiber'
},
'cit250': {
'name': 'Citizens Mutual'
},
'war040': {
'name': 'Citizens Telephone Corporation'
},
'wat025': {
'name': 'City Of Monroe'
},
'wadsworth': {
'name': 'CityLink'
},
'nor100': {
'name': 'CL Tel'
},
'cla010': {
'name': 'Clarence Telephone and Cedar Communications'
},
'ser060': {
'name': 'Clear Choice Communications'
},
'tac020': {
'name': 'Click! Cable TV'
},
'war020': {
'name': 'CLICK1.NET'
},
'cml010': {
'name': 'CML Telephone C | ooperative Association'
},
'cns': {
'name': 'CNS'
| },
'com160': {
'name': 'Co-Mo Connect'
},
'coa020': {
'name': 'Coast Communications'
},
'coa030': {
'name': 'Coaxial Cable TV'
},
'mid055': {
'name': 'Cobalt TV (Mid-State Community TV)'
},
'col070': {
'name': 'Columbia Power & Water Systems'
},
'col080': {
'name': 'Columbus Telephone'
},
'nor105': {
'name': 'Communications 1 Cablevision, Inc.'
},
'com150': {
'name': 'Community Cable & Broadband'
},
'com020': {
'name': 'Community Communications Company'
},
'coy010': {
'name': 'commZoom'
},
'com025': {
'name': 'Complete Communication Services'
},
'cat020': {
'name |
hassanibi/erpnext | erpnext/patches/v7_1/move_sales_invoice_from_parent_to_child_timesheet.py | Python | gpl-3.0 | 853 | 0.029308 | from __future__ | import unicode_literals
import frappe
def execute():
frappe.reload_doc('projects', 'doctype', 'timesheet_detail')
frappe.reload_doc('accounts', 'doctype', 'sales_invoice_timesheet')
| frappe.db.sql(""" update
`tabTimesheet` as ts,
(select
sum(billing_amount) as billing_amount, sum(billing_hours) as billing_hours, time_sheet
from `tabSales Invoice Timesheet` where docstatus = 1 group by time_sheet
) as sit
set
ts.total_billed_amount = sit.billing_amount, ts.total_billed_hours = sit.billing_hours,
ts.per_billed = ((sit.billing_amount * 100)/ts.total_billable_amount)
where ts.name = sit.time_sheet and ts.docstatus = 1""")
frappe.db.sql(""" update `tabTimesheet Detail` tsd, `tabTimesheet` ts set tsd.sales_invoice = ts.sales_invoice
where tsd.parent = ts.name and ts.sales_invoice is not null""") |
agry/NGECore2 | scripts/object/tangible/wearables/bracelet/item_bracelet_r_set_officer_dps_01_01.py | Python | lgpl-3.0 | 1,158 | 0.022453 | import sys
def setup(core, object):
object.setStfFilename('static_item_n')
object.setStfName('item_bracelet_r_set_officer_dps_01_01')
object.setDetailFilename('static_item_d')
object.setDetailName('item_bracelet_r_set_officer_dps_01_01')
object.setStringAttribute('class_required', 'Officer')
object.setIntAttribute('required_combat_level', 85)
object.setIntAttribute('cat_skill_mod_bonus.@stat_n:expertise_action_line_of_paint', 1)
object.setIntAttribute('cat_skill_mod_bonus.@stat_n:expertise_action_line_of_sure', 1)
object.setIntAttribute('cat_skill_mod_bonus.@stat_n:expertise_cooldown_line_of_paint', 2)
object.setIntAttribute('cat_skill_mod_bonus.@stat_n:expertise_cooldown_line_of_sure', 1)
object.setIntAttribute('cat_skill_mod_bonus.@stat_n:expertise_freeshot_of_sure', 1)
object.setStringAttribute('@set_bonus:piece_bonus_count_3', '@set_bonus:set_bonus_officer_dps_1')
object.setStringAttribute('@set_bonus:piece_bonus_count_4', '@set_bonus:set_bonus_officer_dps_2')
object.setStringAttribute('@set_bonus:piece_bonus_count_5', '@set_b | onus:set_bonus_officer_dps_3')
object.setAtt | achment('setBonus', 'set_bonus_officer_dps')
return |
webdev1001/ansible | v2/ansible/plugins/connections/__init__.py | Python | gpl-3.0 | 1,420 | 0.000704 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ans | ible is free software: you can redistribute it and/or modify
# it under the terms of the GNU | General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import constants as C
# FIXME: this object should be created upfront and passed through
# the entire chain of calls to here, as there are other things
# which may want to output display/logs too
from ansible.utils.display import Display
__all__ = ['ConnectionBase']
class ConnectionBase:
'''
A base class for connections to contain common code.
'''
def __init__(self, connection_info, *args, **kwargs):
self._connection_info = connection_info
self._has_pipelining = False
self._display = Display(connection_info)
|
tcpcloud/contrail-controller | src/config/common/zkclient.py | Python | apache-2.0 | 14,983 | 0.00287 | #
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
import os
import gevent
import logging
import kazoo.client
import kazoo.exceptions
import kazoo.handlers.gevent
import kazoo.recipe.election
from kazoo.client import KazooState
from kazoo.retry import KazooRetry
from bitarray import bitarray
from cfgm_common.exceptions import ResourceExhaustionError, ResourceExistsError
from gevent.coros import BoundedSemaphore
import uuid
LOG_DIR = '/var/log/contrail/'
class IndexAllocator(object):
def __init__(self, zookeeper_client, path, size=0, start_idx=0,
reverse=False,alloc_list=None, max_alloc=0):
self._size = size
self._start_idx = start_idx
if alloc_list is None:
self._alloc_list = [{'start':start_idx, 'end':start_idx+size}]
else:
sorted_alloc_list = sorted(alloc_list, key=lambda k: k['start'])
self._alloc_list = sorted_alloc_list
alloc_count = len(self._alloc_list)
total_size = 0
size = 0
#check for overlap in alloc_list --TODO
for alloc_idx in range (0, alloc_count -1):
idx_start_addr = self._alloc_list[alloc_idx]['start']
idx_end_addr = self._alloc_list[alloc_idx]['end']
next_start_addr = self._alloc_list[alloc_idx+1]['start']
if next_start_addr <= idx_end_addr:
raise Exception(
'Allocation Lists Overlapping: %s' %(alloc_list))
size += idx_end_addr - idx_start_addr + 1
size += self._alloc_list[alloc_count-1]['end'] - self._alloc_list[alloc_count-1]['start'] + 1
if max_alloc == 0:
self._max_alloc = size
else:
self._max_alloc = max_alloc
self._zookeeper_client = zookeeper_client
self._path = path
self._in_use = bitarray('0')
self._reverse = reverse
for idx in self._zookeeper_client.get_children(path):
idx_int = self._get_bit_from_zk_index(int(idx))
if idx_int >= 0:
self._set_in_use(idx_int)
# end for idx
# end __init__
def _get_zk_index_from_bit(self, idx):
size = idx
if self._reverse:
for alloc in reversed(self._alloc_list):
size -= alloc['end'] - alloc['start'] + 1
if size < 0:
return alloc['start']-size - 1
else:
for alloc in self._alloc_list:
size -= alloc['end'] - alloc['start'] + 1
if size < 0:
return alloc['end']+size + 1
raise ResourceExhaustionError(
'Cannot get zk index from bit %s' %(idx))
# end _get_zk_index
def _get_bit_from_zk_index(self, idx):
size = 0
if self._reverse:
for alloc in reversed(self._alloc_list):
if alloc['start'] <= idx <= alloc['end']:
return alloc['end'] - idx + size
size += alloc['end'] - alloc['start'] + 1
pass
else:
for alloc in self._alloc_list:
if alloc['start'] <= idx <= alloc['end']:
return idx - alloc['start'] + size
size += alloc['end'] - alloc['start'] + 1
return -1
# end _get_bit_from_zk_index
def _set_in_use(self, bitnum):
# if the index is higher than _max_alloc, do not use the bitarray, in
# order to reduce the size of the bitarray. Otherwise, set the bit
# corresponding to idx to 1 and extend the _in_use bitarray if needed
if bitnum > self._max_alloc:
return
if bitnum >= self._in_use.length():
temp = bitarray(bitnum - self._in_use.length())
temp.setall(0)
temp.append('1')
self._in_use.extend(temp)
else:
self._in_use[bitnum] = 1
# end _set_in_use
def _reset_in_use(self, bitnum):
# if the index is higher than _max_alloc, do not use the bitarray, in
# order to reduce the size of the bitarray. Otherwise, set the bit
# corresponding to idx to 1 and extend the _in_use bitarray if needed
if bitnum > self._max_alloc:
return
if bitnum >= self._in_use.length():
return
else:
self._in_use[bitnum] = 0
# end _reset_in_use
def set_in_use(self, idx):
bit_idx = self._get_bit_from_zk_index(idx)
if bit_idx < 0:
return
self._set_in_use(bit_idx)
# end set_in_use
def reset_in_use(self, idx):
bit_idx = self._get_bit_from_zk_index(idx)
if bit_idx < 0:
return
self._reset_in_use(bit_idx)
# end reset_in_use
def get_alloc_count(self):
return self._in_use.count()
# end get_alloc_count
def alloc(self, value=None):
# Allocates a index from the allocation list
if self._in_use.all():
idx = self._in_use.length()
if idx > self._max_alloc:
raise ResourceExhaustionError()
self._in_use.append(1)
else:
idx = self._in_use.index(0)
self._in_use[idx] = 1
idx = self._get_zk_index_from_bit(idx)
try:
# Create a node at path and return its integer value
id_str = "%(#)010d" % {'#': idx}
self._zookeeper_client.create_node(self._path + id_str, value)
return idx
except ResourceExistsError:
return self.alloc(value)
# end alloc
def reserve(self, idx, value=None):
# Reserves the requested index if available
if not self._start_idx <= idx < self._start_idx + self._size:
return None
try:
# Create a node at path and return its integer value
id_str = "%(#)010d" % {'#': idx}
self._zookeeper_client.create_node(self._path + id_str, value)
self.set_in_use(idx)
return idx
except ResourceExistsError:
self.set_in_use(idx)
existing_value = self.read(idx)
if (value == existing_value):
# idempotent reserve
return idx
msg = 'For index %s reserve conflicts with existing value %s.' \
%(idx, existing_value)
self._zookeeper_client.syslog(msg, level='notice')
raise
# end reserve
def delete(self, idx):
id_str = "%(#)010d" % {'#': idx}
self._zookeeper_client.delete_node(self._path + id_str)
bit_idx = self._get_bit_from_zk_index(idx)
if 0 <= bit_idx < self._in_use.length():
self._in_use[bit_idx] = 0
# end delete
def read(self, idx):
id_str = "%(#)010d" % {'#': idx}
id_val = self._zookeeper_client.read_node(self._path+id_str)
if id_val is not None:
bit_idx | = self._get_bit_from_zk_index(idx)
if bit_idx >= 0:
self._set_in_use(bit_idx)
return id_val
# end read
def empty(self):
return not self._in_use.any()
# end empty
@classmethod
def delete_all(cls, zookeeper_client, path):
try:
zookeeper_client.delete_node(path, recursive=True)
except | kazoo.exceptions.NotEmptyError:
#TODO: Add retries for NotEmptyError
zookeeper_client.syslog("NotEmptyError while deleting %s" % path)
# end delete_all
#end class IndexAllocator
class ZookeeperClient(object):
def __init__(self, module, server_list, logging_fn=None):
# logging
logger = logging.getLogger(module)
logger.setLevel(logging.DEBUG)
try:
handler = logging.handlers.RotatingFileHandler(
LOG_DIR + module + '-zk.log', maxBytes=10*1024*1024, backupCount=5)
except IOError:
print "Cannot open log file in %s" %(LOG_DIR)
else:
log_format = logging.Formatter('%(asctime)s [%(name)s]: %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p')
handler.setFormatter(log_format)
logge |
Tesora/tesora-tempest | tempest/api/compute/servers/test_create_server.py | Python | apache-2.0 | 14,903 | 0 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
import testtools
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest.common.utils.linux import remote_client
from tempest.common import waiters
from tempest import config
from tempest import test
CONF = config.CONF
class ServersTestJSON(base.BaseV2ComputeTest):
disk_config = 'AUTO'
@classmethod
def setup_credentials(cls):
cls.prepare_instance_network()
super(ServersTestJSON, cls).setup_credentials()
@classmethod
def setup_clients(cls):
super(ServersTestJSON, cls).setup_clients()
cls.client = cls.servers_client
cls.networks_client = cls.os.networks_client
cls.subnets_client = cls.os.subnets_client
@classmethod
def resource_setup(cls):
cls.set_validation_resources()
super(ServersTestJSON, cls).resource_setup()
cls.meta = {'hello': 'world'}
cls.accessIPv4 = '1.1.1.1'
cls.accessIPv6 = '0000:0000:0000:0000:0000:babe:220.12.22.2'
cls.name = data_utils.rand_name(cls.__name__ + '-server')
cls.password = data_utils.rand_password()
disk_config = cls.disk_config
cls.server_initial = cls.create_test_server(
validatable=True,
wait_until='ACTIVE',
name=cls.name,
metadata=cls.meta,
accessIPv4=cls.accessIPv4,
accessIPv6=cls.accessIPv6,
disk_config=disk_config,
adminPass=cls.password)
cls.server = (cls.client.show_server(cls.server_initial['id'])
['server'])
def _create_net_subnet_ret_net_from_cidr(self, cidr):
name_net = data_utils.rand_name(self.__class__.__name__)
net = self.networks_client.create_network(name=name_net)
self.addCleanup(self.networks_client.delete_network,
net['network']['id'])
subnet = self.subnets_client.create_subnet(
network_id=net['network']['id'],
cidr=cidr,
ip_version=4)
self.addCleanup(self.subnets_client.delete_subnet,
subnet['subnet']['id'])
return net
@test.attr(type='smoke')
@test.idempotent_id('5de47127-9977-400a-936f-abcfbec1218f')
def test_verify_server_details(self):
# Verify the specified server attributes are set correctly
self.assertEqual(self.accessIPv4, self.server['accessIPv4'])
# NOTE(maurosr): See http://tools.ietf.org/html/rfc5952 (section 4)
# Here we compare directly with the canonicalized format.
self.assertEqual(self.server['accessIPv6'],
str(netaddr.IPAddress(self.accessIPv6)))
self.assertEqual(self.name, self.server['name'])
self.assertEqual(self.image_ref, self.server['image']['id'])
self.assertEqual(self.flavor_ref, self.server['flavor']['id'])
self.assertEqual(self.meta, self.server['metadata'])
@test.attr(type='smoke')
@test.idempotent_id('9a438d88-10c6-4bcd-8b5b-5b6e25e1346f')
def test_list_servers(self):
# The created server should be in the list of all servers
body = self.client.list_servers()
servers = body['servers']
found = any([i for i in servers if i['id'] == self.server['id']])
self.assertTrue(found)
@test.idempotent_id('585e934c-448e-43c4-acbf-d06a9b899997')
def test_list_servers_with_detail(self):
# The created server should be in the detailed list of all servers
body = self.client.list_servers(detail=True)
servers = body['servers']
found = any([i for i in servers if i['id'] == self.server['id']])
self.assertTrue(found)
@test.idempotent_id('cbc0f52f-05aa-492b-bdc1-84b575ca294b')
@testtools.skipUnless(CONF.validation.run_validation,
'Instance validation tests are disabled.')
def test_verify_created_server_vcpus(self):
# Verify that the number of vcpus reported by the instance matches
# the amount stated by the flavor
flavor = self.flavors_client.show_flavor(self.flavor_ref)['flavor']
linux_client = remote_client.RemoteClient(
self.get_server_ip(self.server),
self.ssh_user,
self.password,
self.validation_resources['keypair']['private_key'],
server=self.server,
servers_client=self.client)
self.assertEqual(flavor['vcpus'], linux_client.get_number_of_vcpus())
@test.idempotent_id('ac1ad47f-984b-4441-9274-c9079b7a0666')
@testtools.skipUnless(CONF.validation.run_validation,
'Instance validation tests are disabled.')
def test_host_name_is_same_as_server_name(self):
# Verify the instance host name is the same as the server name
linux_client = remote_client.RemoteClient(
self.get_server_ip(self.server),
self.ssh_user,
self.password,
self.validation_resources['keypair']['private_key'],
server=self.server,
servers_client=self.client)
hostname = linux_client.get_hostname()
msg = ('Failed while verifying servername equals hostname. Expected '
'hostname "%s" but got "%s".' % (self.name, hostname))
self.assertEqual(self.name.lower(), hostname, msg)
@test.idempotent_id('ed20d3fb-9d1f-4329-b160-543fbd5d9811')
@testtools.skipUnless(
test.is_scheduler_filter_enabled("ServerGroupAffinityFilter"),
'ServerGroupAffinityFilter is not available.')
def test_create_server_with_scheduler_hint_group(self):
# Create a server with the scheduler hint "group".
group_id = self.create_test_server_group()['id']
hints = {'group': group_id}
server = self.create_test_server(scheduler_hints=hints,
wait_until='ACTIVE')
# Check a server is in the group
server_group = (self.server_groups_client.show_server_group(group_id)
['server_group'])
self.assertIn(server['id'], server_group['members'])
@test.idempotent_id('0578d144-ed74-43f8-8e57-ab10dbf9b3c2')
@testtools.skipUnless(CONF.service_available.neutron,
'Neutron service must be available.')
def test_verify_multiple_nics_order(self):
# Verify that the networks order given at the server creation is
# preserved within the server.
net1 = self._create_net_subnet_ret_net_from_cidr('19.80.0.0/24')
net2 = self._create_net_subnet_ret_net_from_cidr('19.86.0.0/24')
networks = [{'uuid': net1['network']['id']},
{'uuid': net2['network']['id']}]
server_multi_nics = self.create_test_server(
networks=networks, wait_until='ACTIVE')
# Cleanup server; this is needed in the test case because with the LIFO
# nature of the cleanups, if we don't delete the server first, the port
# will still be part of the subnet and we'll get a 409 from Neutron
# when trying to delete the subnet. The tear down in the base class
# will try to delete the server and get a 404 but it's ignored so
# we're OK.
def cleanup_server():
self.client.delete_server( | server_multi_nics['id'])
waiters.wait_for_server_termination(self.client,
| server_multi_nics['id'])
self.addCleanup(cleanup_server)
addresses = |
kyleabeauchamp/pymbar | scripts/benchmark_covariance.py | Python | lgpl-2.1 | 2,091 | 0.015304 | import pandas as pd
import numpy as np
import pymbar
from pymbar.testsystems.pymbar_datasets import load_gas_data, load_8proteins_data
import time
def load_oscillators(n_states, n_samples):
name = "%dx%d oscillators" % (n_states, n_samples)
O_k = np.linspace(1, 5, n_states)
k_k = np.linspace(1, 3, n_states)
N_k = (np.ones(n_states) * n_samples).astype('int')
test = pymbar.testsystems.harmonic_osc | illators.HarmonicOscillatorsTestCase(O_k, k_k)
x_n, u_kn, N_k_output, s_n = test.sample(N_k, mode='u_kn')
return name, u_kn, N_k_output, s_n
def load_expone | ntials(n_states, n_samples):
name = "%dx%d exponentials" % (n_states, n_samples)
rates = np.linspace(1, 3, n_states)
N_k = (np.ones(n_states) * n_samples).astype('int')
test = pymbar.testsystems.exponential_distributions.ExponentialTestCase(rates)
x_n, u_kn, N_k_output, s_n = test.sample(N_k, mode='u_kn')
return name, u_kn, N_k_output, s_n
mbar_gens = {"new":lambda u_kn, N_k: pymbar.MBAR(u_kn, N_k)}
systems = [lambda : load_exponentials(25, 100), lambda : load_exponentials(100, 100), lambda : load_exponentials(250, 250),
lambda : load_oscillators(25, 100), lambda : load_oscillators(100, 100), lambda : load_oscillators(250, 250),
lambda : load_oscillators(500, 100), lambda : load_oscillators(1000, 50), lambda : load_oscillators(2000, 20), lambda : load_oscillators(4000, 10),
lambda : load_exponentials(500, 100), lambda : load_exponentials(1000, 50), lambda : load_exponentials(2000, 20), lambda : load_oscillators(4000, 10),
load_gas_data, load_8proteins_data]
timedata = []
for version, mbar_gen in mbar_gens.items():
for sysgen in systems:
name, u_kn, N_k, s_n = sysgen()
K, N = u_kn.shape
mbar = mbar_gen(u_kn, N_k)
time0 = time.time()
fij, dfij = mbar.getFreeEnergyDifferences(uncertainty_method="svd-ew-kab")
dt = time.time() - time0
timedata.append([name, K, N, dt])
timedata = pd.DataFrame(timedata, columns=["name", "K", "N", "time"])
print timedata.to_string(float_format=lambda x: "%.3g" % x)
|
joefutrelle/pocean-core | pocean/dsg/profile/om.py | Python | mit | 7,029 | 0.001423 | # -*- coding: utf-8 -*-
from datetime import datetime
from collections import namedtuple
import netCDF4 as nc4
import numpy as np
import pandas as pd
from pygc import great_distance
from shapely.geometry import Point, LineString
from pocean.utils import unique_justseen, normalize_array, generic_masked
from pocean.cf import CFDataset
from | pocean import logger
|
class OrthogonalMultidimensionalProfile(CFDataset):
"""
If the profile instances have the same number of elements and the vertical
coordinate values are identical for all instances, you may use the
orthogonal multidimensional array representation. This has either a
one-dimensional coordinate variable, z(z), provided the vertical coordinate
values are ordered monotonically, or a one-dimensional auxiliary coordinate
variable, alt(o), where o is the element dimension. In the former case,
listing the vertical coordinate variable in the coordinates attributes of
the data variables is optional.
"""
@classmethod
def is_mine(cls, dsg):
try:
pvars = dsg.filter_by_attrs(cf_role='profile_id')
assert len(pvars) == 1
assert dsg.featureType.lower() == 'profile'
assert len(dsg.t_axes()) == 1
assert len(dsg.x_axes()) == 1
assert len(dsg.y_axes()) == 1
assert len(dsg.z_axes()) == 1
# Allow for string variables
pvar = pvars[0]
# 0 = single
# 1 = array of strings/ints/bytes/etc
# 2 = array of character arrays
assert 0 <= len(pvar.dimensions) <= 2
ps = normalize_array(pvar)
is_single = ps.size == 1
t = dsg.t_axes()[0]
x = dsg.x_axes()[0]
y = dsg.y_axes()[0]
z = dsg.z_axes()[0]
assert len(z.dimensions) == 1
z_dim = dsg.dimensions[z.dimensions[0]]
if is_single:
assert t.size == 1
assert x.size == 1
assert y.size == 1
for dv in dsg.data_vars():
assert len(dv.dimensions) == 1
assert z_dim.name in dv.dimensions
assert dv.size == z_dim.size
else:
assert t.size == pvar.size
assert x.size == pvar.size
assert y.size == pvar.size
p_dim = dsg.dimensions[pvar.dimensions[0]]
for dv in dsg.data_vars():
assert len(dv.dimensions) == 2
assert z_dim.name in dv.dimensions
assert p_dim.name in dv.dimensions
assert dv.size == z_dim.size * p_dim.size
except BaseException:
return False
return True
def from_dataframe(self, df, variable_attributes=None, global_attributes=None):
variable_attributes = variable_attributes or {}
global_attributes = global_attributes or {}
raise NotImplementedError
def calculated_metadata(self, df=None, geometries=True, clean_cols=True, clean_rows=True):
if df is None:
df = self.to_dataframe(clean_cols=clean_cols, clean_rows=clean_rows)
profiles = {}
for pid, pgroup in df.groupby('profile'):
pgroup = pgroup.sort_values('t')
first_row = pgroup.iloc[0]
profile = namedtuple('Profile', ['min_z', 'max_z', 't', 'x', 'y', 'loc'])
profiles[pid] = profile(
min_z=pgroup.z.min(),
max_z=pgroup.z.max(),
t=first_row.t,
x=first_row.x,
y=first_row.y,
loc=Point(first_row.x, first_row.y)
)
geometry = None
first_row = df.iloc[0]
first_loc = Point(first_row.x, first_row.y)
if geometries:
coords = list(unique_justseen(zip(df.x, df.y)))
if len(coords) > 1:
geometry = LineString(coords)
elif len(coords) == 1:
geometry = first_loc
meta = namedtuple('Metadata', ['min_z', 'max_z', 'min_t', 'max_t', 'profiles', 'first_loc', 'geometry'])
return meta(
min_z=df.z.min(),
max_z=df.z.max(),
min_t=df.t.min(),
max_t=df.t.max(),
profiles=profiles,
first_loc=first_loc,
geometry=geometry
)
def to_dataframe(self, clean_cols=True, clean_rows=True):
zvar = self.z_axes()[0]
zs = len(self.dimensions[zvar.dimensions[0]])
# Profiles
pvar = self.filter_by_attrs(cf_role='profile_id')[0]
try:
p = normalize_array(pvar)
except ValueError:
p = np.asarray(list(range(len(pvar))), dtype=np.integer)
ps = p.size
p = p.repeat(zs)
logger.debug(['profile data size: ', p.size])
# Z
z = generic_masked(zvar[:], attrs=self.vatts(zvar.name)).round(5)
try:
z = np.tile(z, ps)
except ValueError:
z = z.flatten()
logger.debug(['z data size: ', z.size])
# T
tvar = self.t_axes()[0]
t = nc4.num2date(tvar[:], tvar.units, getattr(tvar, 'calendar', 'standard'))
if isinstance(t, datetime):
# Size one
t = np.array([t.isoformat()], dtype='datetime64')
t = t.repeat(zs)
logger.debug(['time data size: ', t.size])
# X
xvar = self.x_axes()[0]
x = generic_masked(xvar[:].repeat(zs), attrs=self.vatts(xvar.name)).round(5)
logger.debug(['x data size: ', x.size])
# Y
yvar = self.y_axes()[0]
y = generic_masked(yvar[:].repeat(zs), attrs=self.vatts(yvar.name)).round(5)
logger.debug(['y data size: ', y.size])
# Distance
d = np.ma.zeros(y.size, dtype=np.float64)
d[1:] = great_distance(start_latitude=y[0:-1], end_latitude=y[1:], start_longitude=x[0:-1], end_longitude=x[1:])['distance']
d = generic_masked(np.cumsum(d), minv=0).round(2)
logger.debug(['distance data size: ', d.size])
df_data = {
't': t,
'x': x,
'y': y,
'z': z,
'profile': p,
'distance': d
}
building_index_to_drop = np.ones(t.size, dtype=bool)
extract_vars = list(set(self.data_vars() + self.ancillary_vars()))
for i, dvar in enumerate(extract_vars):
vdata = np.ma.fix_invalid(np.ma.MaskedArray(dvar[:].round(3).flatten()))
building_index_to_drop = (building_index_to_drop == True) & (vdata.mask == True) # noqa
df_data[dvar.name] = vdata
df = pd.DataFrame(df_data)
# Drop all data columns with no data
if clean_cols:
df = df.dropna(axis=1, how='all')
# Drop all data rows with no data variable data
if clean_rows:
df = df.iloc[~building_index_to_drop]
return df
|
cevaris/python-consul-hiera | consul_hiera/utils.py | Python | mit | 1,919 | 0.001042 | import fnmatch
import os
import re
import yaml
from consul_hiera import (
HieraConfig,
HIERARCHY,
)
def find_files(directory, pattern='*'):
if not os.path.exists(directory):
raise ValueError("Directory not found {}".format(directory))
matches = []
for root, dirnames, filenames in os.walk(directory):
for filename in filenames:
full_path = os.path.join(root, filename)
if fnmatch.filter([full_path], | pattern):
matches.append(os.path.join(root, filename))
return matches
def find_files_regex(directory, pattern='.+'):
pattern = re.compile(pattern)
if not os.path.exists(directory):
raise ValueError("Directory not found {}".format(directory))
matches = []
for root, dirnames, filenames in os.walk(directory):
| for filename in filenames:
full_path = os.path.join(root, filename)
if re.search(pattern, full_path):
matches.append(os.path.join(root, filename))
return matches
def find_yaml_files(directory):
return find_files(directory, pattern='*.yaml')
def parse_hiera_config(config_path):
"""
Given file path to valid hiera yaml file, parse yaml
return HieraConfig object
"""
if not os.path.exists(config_path):
raise ValueError("File not found {}".format(config_path))
hconfig = HieraConfig()
with open(config_path) as config_file:
hash = yaml.load(config_file)
if HIERARCHY in hash:
hconfig.hierarchy = hash[HIERARCHY]
hconfig._yaml_doc = hash
return hconfig
def convert_hierarchy_to_regex(line):
"""
...
'customer/%{customer}/nodes/%{clientcert}',
'customer/%{customer}/%{program}/%{deploy_environment}/%{role}',
...
"""
pattern = r'%{\w+}'
regex = re.compile(pattern)
result = regex.sub('.*', line, re.DOTALL)
return result
|
mirimmad/LiveCam | _camera.py | Python | mit | 325 | 0.049231 | #Copyright | Mir Immad - RealTimeCam
import cv2
import numpy as np
class VideoCamera(object):
def __init__(self):
self.video = cv2.VideoCapture(0)
def __del__(self):
self.video.release()
def get_frame(self):
success, image = self.video.read()
| ret, jpeg = cv2.imencode('.jpg', image)
return jpeg.tostring()
|
seatgeek/businesstime | businesstime/holidays/uk.py | Python | bsd-2-clause | 2,093 | 0 | import datetime
import json
import os
from businesstime.holidays import Holidays
__all__ = (
'EnglandHolidays', 'WalesHolidays', 'ScotlandHolidays',
'NorthernIrelandHolidays',
)
PWD = os.path.dirname(os.path.realpath(__file__))
DEFAULT_ | HOLIDAYS_FILEPATH = os.path.join(PWD, 'data', 'uk-bank-holidays.json')
class UKHolidays(Holidays):
"""
List from https://www.gov.uk/bank-holidays.json
e.g. curl https://www.gov.uk/bank-holidays.json -o uk-bank-holidays.json
"""
DIVISION_CHOICES = ('england-and-wales', 'scotland', 'northern-ireland', )
division = None
def __init__(self, *args, **kwargs):
if self.division not in | self.DIVISION_CHOICES:
raise ValueError(
"'division' class attribute must be one of {}. You picked: {}"
.format(self.DIVISION_CHOICES, self.division)
)
self.holidays = kwargs.pop('holidays', None)
holidays_filepath = kwargs.pop('holidays_filepath',
DEFAULT_HOLIDAYS_FILEPATH)
if self.holidays is None:
self.holidays = self._get_holidays_from_filepath(holidays_filepath)
super(UKHolidays, self).__init__(*args, **kwargs)
@classmethod
def _get_holidays_from_filepath(cls, filepath):
with open(filepath) as f:
return cls._parse_holidays_file(f)
@classmethod
def _parse_holidays_file(cls, holidays_file):
_holidays = json.load(holidays_file)
return [
datetime.datetime.strptime(event['date'], "%Y-%m-%d").date()
for event in _holidays[cls.division]['events']
]
def isholiday(self, dt):
if isinstance(dt, datetime.datetime):
return dt.date() in self.holidays
return dt in self.holidays
class EnglandHolidays(UKHolidays):
division = 'england-and-wales'
class WalesHolidays(UKHolidays):
division = 'england-and-wales'
class ScotlandHolidays(UKHolidays):
division = 'scotland'
class NorthernIrelandHolidays(UKHolidays):
division = 'northern-ireland'
|
yeleman/snisi | snisi_reprohealth/aggregations.py | Python | mit | 7,786 | 0.000257 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4 nu
from __future__ import (unicode_literals, absolute_import,
division, print_function)
import logging
from django.utils import timezone
from snisi_core.models.Reporting import (ExpectedReporting,
ExpectedValidation, ReportClass)
from snisi_core.models.ValidationPeriods import DefaultRegionValidationPeriod
from snisi_core.models.Projects import Cluster
from snisi_core.models.Roles import Role
from snisi_core.models.Entities import Entity
from snisi_core.models.Providers import Provider
from snisi_reprohealth.models.PFActivities import (AggPFActivitiesR,
PFActivitiesR)
from snisi_reprohealth import (ROUTINE_DISTRICT_AGG_DAY,
ROUTINE_REGION_AGG_DAY)
from snisi_reprohealth.integrity import PROJECT_BRAND
from snisi_core.models.Notifications import Notification
logger = logging.getLogger(__name__)
autobot = Provider.get_or_none('autobot')
reportcls_slug = "msi_pf_monthly_routine_aggregated"
rclass = ReportClass.get_or_none(reportcls_slug)
charge_sis = Role.get_or_none("charge_sis")
mali = Entity.get_or_none("mali")
cluster = Cluster.get_or_none("msi_reprohealth_routine")
get_districts = lambda: [e for e in cluster.members()
if e.type.slug == 'health_district']
get_regions = lambda: [e for e in cluster.members()
if e.type.slug == 'health_region']
def generate_district_reports(period,
ensure_correct_date=True):
logger.info("Switching to {}".format(period))
region_validation_period = DefaultRegionValidationPeriod \
.find_create_by_date(period.middle())
if ensure_correct_date:
now = timezone.now()
if not period.following().includes(now) \
or not now.day == ROUTINE_DISTRICT_AGG_DAY:
logger.error("Not allowed to generate district agg "
"outside the 11th of the following period")
return
districts = get_districts()
# loop on all districts
for district in districts:
# skip if exists
if AggPFActivitiesR.objects.filter(
period=period, entity=district).count():
continue
# ack expected
exp = ExpectedReporting.objects.filter(
report_class=rclass,
entity__slug=district.slug,
period=period)
# not expected
if exp.count() == 0:
continue
else:
# might explode if 2 exp but that's the point
exp = exp.get()
logger.info("\tAt district {}".format(district))
# auto-validate non-validated reports
for report in PFActivitiesR.objects.filter(
period=period, entity__in=district.get_health_centers()):
if not report.validated:
expv = ExpectedValidation.objects.get(report=report)
expv.acknowledge_validation(
validated=True,
validated_by=autobot,
validated_on=timezone.now(),
auto_validated=True)
# create AggPFActivitiesR
agg = AggPFActivitiesR.create_from(
period=period,
entity=district,
created_by=autobot)
exp.acknowledge_report(agg)
# create expected validation
ExpectedValidation.objects.create(
report=agg,
validation_period=region_validation_period,
validating_entity=district.get_health_region(),
validating_role=charge_sis)
# send notification to Region
# for recipient in Provider.active.filter(
# role=charge_sis, location=agg.entity.get_health_region()):
# Notification.create(
# provider=recipient,
# deliver=Notification.TODAY,
# expirate_on=region_validation_period.end_on,
# category=PROJECT_BRAND,
# text="Le rapport (aggrégé) de routine PF/MSI mensuel "
# "de {period} pour {entity} est prêt. "
# "No reçu: #{receipt}. "
| # "Vous devez le valider avant le 25.".format(
# entity=agg.entity.display_full_name(),
# period=agg.period,
# | receipt=agg.receipt)
# )
def generate_region_country_reports(period,
ensure_correct_date=True):
logger.info("Switching to {}".format(period))
if ensure_correct_date:
now = timezone.now()
if not period.following().includes(now) \
or not now.day == ROUTINE_REGION_AGG_DAY:
logger.error("Not allowed to generate district agg "
"outside the 11th of the following period")
return
regions = get_regions()
# loop on all regions
for region in regions:
# ack expected
exp = ExpectedReporting.objects.filter(
report_class=rclass,
entity__slug=region.slug,
period=period)
if exp.count() == 0:
continue
else:
exp = exp.get()
logger.info("\tAt region {}".format(region))
# loop on districts
for district in [d for d in region.get_health_districts()
if d in cluster.members()]:
logger.info("\t\tAt district {}".format(district))
try:
# ack validation (auto)
expv = ExpectedValidation.objects.get(
report__entity=district,
report__period=period,
report__report_cls='snisi_reprohealth.models.'
'PFActivities.AggPFActivitiesR')
except ExpectedValidation.DoesNotExist:
continue
expv.acknowledge_validation(
validated=True,
validated_by=autobot,
validated_on=timezone.now(),
auto_validated=True)
# create AggPFActivitiesR/region
agg = AggPFActivitiesR.create_from(
period=period,
entity=region,
created_by=autobot)
exp.acknowledge_report(agg)
# validate (no expected validation)
agg.record_validation(
validated=True,
validated_by=autobot,
validated_on=timezone.now(),
auto_validated=True)
logger.info("\t\tAt {}".format(mali))
# ack expected
exp = ExpectedReporting.objects.get(
report_class=rclass,
entity__slug=mali.slug,
period=period)
if exp is None:
return
# create AggPFActivitiesR/country
agg = AggPFActivitiesR.create_from(
period=period,
entity=mali,
created_by=autobot)
exp.acknowledge_report(agg)
# validate (no expected validation)
agg.record_validation(
validated=True,
validated_by=autobot,
validated_on=timezone.now(),
auto_validated=True)
# send notification to National level
for recipient in Provider.active.filter(location__level=0):
Notification.create(
provider=recipient,
deliver=Notification.TODAY,
expirate_on=agg.period.following().following().start_on,
category=PROJECT_BRAND,
text="Le rapport national (aggrégé) de routine PF/MSI mensuel "
"pour {period} est disponible. No reçu: #{receipt}."
.format(period=agg.period, receipt=agg.receipt))
|
8l/beri | cheritest/trunk/tests/mem/test_lh_unalign.py | Python | apache-2.0 | 2,215 | 0.004515 | #-
# Copyright (c) 2011 Robert N. M. Watson
# All rights reserved.
#
# This software was developed by SRI International and the University of
# Cambridge Computer Laboratory under DARPA/AFRL contract FA8750-10-C-0237
# ("CTSRD"), as part of the DARPA CRASH research programme.
#
# @BERI_LICENSE_HEADER_START@
#
# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/license-1-0.txt
#
# Unless required by applic | able law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
from beritest_tools import BaseBERITestCase
class test_lh_unalign(BaseBERITestCase):
def test_epc(self):
self.assertRegisterEqual(self.MIPS | .a0, self.MIPS.a5, "Unexpected EPC")
def test_returned(self):
self.assertRegisterEqual(self.MIPS.a1, 1, "flow broken by lh instruction")
def test_handled(self):
self.assertRegisterEqual(self.MIPS.a2, 1, "lh exception handler not run")
def test_exl_in_handler(self):
self.assertRegisterEqual((self.MIPS.a3 >> 1) & 0x1, 1, "EXL not set in exception handler")
def test_cause_bd(self):
self.assertRegisterEqual((self.MIPS.a4 >> 31) & 0x1, 0, "Branch delay (BD) flag improperly set")
def test_cause_code(self):
self.assertRegisterEqual((self.MIPS.a4 >> 2) & 0x1f, 4, "Code not set to AdEL")
def test_not_exl_after_handler(self):
self.assertRegisterEqual((self.MIPS.a6 >> 1) & 0x1, 0, "EXL still set after ERET")
def test_badvaddr(self):
self.assertRegisterEqual(self.MIPS.a7, self.MIPS.s0, "BadVAddr equal to Unaligned Address")
|
hastern/jelly | cli.py | Python | mit | 3,654 | 0.000274 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
import argparse
import collections
from functools import wraps
import logging
# We are assuming, that there is an already configured logger present
logger = logging.getLogger(__name__)
class CommandLine(object):
"""Create a command line interface for the application.
Can call any core method as action.
Careful: The function defintion order is reflected in the cli.
Can be reorder using the *weight* flag of the initializer.
'lighter' Arguments will go first
"""
arguments = collections.OrderedDict()
@classmethod
def handle(self, core, name):
"""Handle the command line arguments.
Returns true if the gui is to be shown, this is controlled
through the 'batch' argument."""
call_buckets = collections.defaultdict(list)
# Build the ArgumentParser
arg_parser = argparse.ArgumentParser(name)
for name, arg in self.arguments.iteritems():
arg_parser.add_argument(
"--{}".format(name),
**{key: val for key, val in filter(lambda e: e is not None, [
("nargs", len(arg.args)) if len(arg.args) > 0 else None,
("metavar", arg.args) if arg.action == "store" else None,
("type", arg.type) if arg.action == "store" else None,
("default", arg.default),
("action", arg.action),
("help", arg.help)
])}
)
call_buckets[arg.weight].append(arg)
# Add batch argument to suppress gui
arg_parser.add_argument("--batch", "-b", "--no-gui",
help="Run in batch mode (Don't show the gui)",
action="store_true",
default=sys.flags.interactive)
# Parse all arguments
args = arg_parser.parse_args()
# Check all actions
logger.debug(call_buckets)
call_order = sorte | d(call_buckets.keys())
for weight in call_order:
for arg in call_buckets[weight]:
params = getattr(args, arg.name.replace("-", "_"))
method = getattr(core, arg.method)
if params is not None and params != arg.default:
if isinstance(params, list):
method(*params)
el | se:
method()
return not args.batch
def __init__(self, name, *args, **flags):
"""The constructor for the CommandLine object.
Accepts the same flags as the add_argument function of the
ArgumentParser class.
The *weight* flag can be used to reorder the execution of
arguments. 'lighter' commands will go first."""
self.name = name
self.args = args
self.help = flags.get("help", "")
self.type = flags.get("type", str)
self.default = flags.get("default", None)
self.action = flags.get("action", "store")
self.weight = flags.get("weight", 0)
if self.name in CommandLine.arguments:
raise KeyError(self.name)
CommandLine.arguments[self.name] = self
def __call__(self, func):
if self.help == "":
self.help = func.__doc__
self.method = func.__name__
@wraps(func)
def wrapper(instance, *args, **kwargs):
return func(instance, *args, **kwargs)
return wrapper
def __str__(self):
return "--{} -> {}('{}')".format(self.name, self.method, "', '".join(self.args))
__repr__ = __str__
|
jdavidrcamacho/Tests_GP | 01 - Trials and attempts/Cheat_attempt/Likelihood.py | Python | mit | 3,613 | 0.036318 | # -*- coding: utf-8 -*-
"""
Created on Mon Oct 10 17:27:49 2016
@author: camacho
"""
import Kernel;reload(Kernel);kl = Kernel
import numpy as np
from time import time
import inspect as i
##### LIKELIHOOD
def likelihood(kernel, x, xcalc, y, yerr): #covariance matrix calculations
K = np.zeros((len(x),len(x))) #covariance matrix K
for i in range(len(x)):
x1 = x[i]
for j in range(len(xcalc)):
x2 = xcalc[j]
K[i,j] = kernel(x1, x2)
K=K+yerr**2*np.identity(len(x))
log_p_correct = lnlike(K, y)
print 'likelihood ->', log_p_correct
return K
def lnlike(K, r): #log-likelihood calculations
from scipy.linalg import cho_factor, cho_solve
L1 = cho_factor(K) # tuple (L, lower)
sol = cho_solve(L1, r) # this is K^-1*(r)
n = r.size
logLike = -0.5*np.dot(r, sol) \
- np.sum(np.log(np.diag(L1[0]))) \
- n*0.5*np.log(2*np.pi)
return logLike
##### LIKELIHOOD GRADIENT
def grad_logp(kernel,x,xcalc,y,yerr,cov_matrix):
K_grad = np.zeros((len(x),len(x)))
for i in range(len(x)):
x1 = x[i]
for j in range(len(xcalc)):
x2 = xcalc[j]
K_grad[i,j] = kernel(x1, x2)
K_inv = np.linalg.inv(cov_matrix)
alpha = np.dot(K_inv,y)
alpha_trans = alpha.T
#formula do gradiente tiradas do Rasmussen&Williams chapter 5, equaçao(5.9)
grad = 0.5 * np.dot(y.T,np.dot(K_inv,np.dot(K_grad,np.dot(K_inv,y)))) \
-0.5 * np.einsum('ij,ij',K_inv,K_grad)
return grad
def gradient_likelihood(kernel,x,xcalc,y,yerr):
import inspect
cov_matrix=likelihood(kernel,x,xcalc,y,yerr)#ele volta a imprimir a likelihood acho que
#por causa disto mas preciso da matriz de
#covariancia original
if isinstance(kernel,kl.ExpSquared):
grad1=grad_logp(kernel.dES_dtheta, x, xcalc, y, yerr, cov_matrix)
grad2=grad_logp(kernel.dES_dl, x, xcalc, y, yerr, cov_matrix)
print 'gradient ->', grad1, grad2
elif isinstance(kernel,kl.ExpSineSquared):
grad1=grad_logp(kernel.dESS_dtheta,x,xcalc,y,yerr,cov_matrix)
grad2=grad_logp(kernel.dESS_dl,x,xcalc,y,yerr,cov_matrix)
grad3=grad_logp(kernel.dESS_dP,x,xcalc,y,yerr,cov_matrix)
print 'gradient ->', grad1, grad2, grad3
elif isinstance(kernel,kl.RatQuadratic):
grad1=grad_logp(kernel.dRQ_dtheta,x,xcalc,y,yerr,cov_matrix)
grad2=grad_logp(kernel.dRQ_dalpha,x,xcalc,y,yerr,cov_matrix)
grad3=grad_logp(kernel.dRQ_dl,x,xcalc,y,yerr,cov_matrix)
print 'gradient ->', grad1, grad2, grad3
elif isinstance(kernel,kl.Exponential):
grad1=grad_logp(kernel.dExp_dtheta,x,xcalc,y,yerr,cov_matrix)
grad2=grad_logp(kernel.dExp_dl,x,xcalc,y,yerr,cov_matrix)
print 'gradient ->', grad1, grad2
# elif isinstance(kernel,Sum) is True:
# initial=kernel.__dict__
# for i in range(len(initial)):
# cond_i = initial.popitem()
# print 'gradient -> Olha olha é uma soma com', cond_i
#
else:
print 'gradient -> We | dont need no calculation \n We dont need no optimization control'
# Nao apliquei a mesma logica às kernels exponential e matern pois
#até isto funcionar como deve ser não vale a pena fazer
#funcionar como deve ser = saber se estou a calcular o gradiente bem
#e arranjar maneira de isto | funcionar com somas e multiplicaçoes de kernels |
legoktm/legoktm | icstalker/iclib/scrape.py | Python | mit | 695 | 0.05036 | #!usr/bin/python
from BeautifulSou | p import BeautifulSoup as bs
def schedul | e(text):
soup = bs(text)
l = soup.body.table.findAll('td', attrs={'class':'scheduleBody'})
final = []
for row in l:
if 'portal' in str(row):
if row:
sp = bs(str(row))
url = sp.a['href']
name = sp.a.b.contents[0]
final.append({'url':url,'name':name})
return final
def classes(text):
soup = bs(text)
search = soup.body.findAll('td',attrs={'align':'right'})
for row in search:
if row.br:
set = str(row)
soup = bs(set)
grade = soup.td.contents[0]
return grade.split('%')[0]
if __name__ == "__main__":
f=open('../schedule.html','r')
t=f.read()
f.close()
print schedule(t)
|
Radagast-red/golem | tests/gui/controller/test_customizer.py | Python | gpl-3.0 | 1,855 | 0.001078 | import unittest
import tempfile
import os
from mock import Mock, patch
from gui.controller.customizer import Customizer
class TestCustomizer(unittest.TestCase):
def test_init(self):
customizer = Customizer(Mock(), Mock())
self.assertIsInstance(customizer, Customizer)
@patch("gui.controller.customizer.subprocess")
@patch("gui.controller.customizer.is_osx")
@patch("gui.controller.customizer.is_windows")
@patch("gui.controller.customizer.os")
def test_show_file(self, mock_os, mock_is_windows, mock_is_osx, mock_subprocess):
with tempfile.NamedTemporaryFile(prefix="golem", delete=False) as file_:
file_name = file_.name
print file_name
try:
mock_is_windows.return_value = True
mock_is_osx.return_value = False
Customizer.show_file(file_name)
mock_os.startfile.assert_called_once_with(file_name)
mock_subprocess.assert_not_called()
mock_is_windows.return_value = False
Customizer.show_file(file_name)
mock_os.startfile.assert_called_once_with(file_name)
mock_subprocess.call.assert_called_with(["xdg-open", file_name])
mock_is_osx.return_value = True
Customizer.show_file(file_name)
mock_os.startfile.assert_called_once_with(file_name)
mock_subprocess.call.assert_called_with(["open", file_name])
finally:
if os.path.isfile(file_name):
| os.remove(file_name)
@patch('gui.c | ontroller.customizer.QMessageBox')
def test_show_warning_window(self, mock_messagebox):
mock_messagebox.return_value = mock_messagebox
customizer = Customizer(Mock(), Mock())
customizer.show_warning_window("Test warning message")
assert mock_messagebox.exec_.called
|
applicationdevm/XlsxWriter | xlsxwriter/test/comparison/test_comment10.py | Python | bsd-2-clause | 1,144 | 0 | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from | ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'comment10.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
| self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with comments."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.write('A1', 'Foo')
worksheet.write_comment('B2', 'Some text', {'color': '#98fe97'})
worksheet.set_comments_author('John')
workbook.close()
self.assertExcelEqual()
|
mementum/backtrader | tests/test_ind_oscillator.py | Python | gpl-3.0 | 1,764 | 0 | #!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015-2020 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import testcommon
import backtrader as bt
import backtrader.indicators as btind
chkdatas = 1
chkvals = [
['56.477000', '51.185333', '2.386667']
]
chkmin = 30
chkind = btind.Oscillator
class TS2(testcommon.TestStrategy):
def __init__(self):
ind = btind.MovAv.SMA(self.data)
self.p.inddata = [ind]
super(TS2, self).__init__()
def test_run(main=False):
datas = [testcommon.getdata(i) for i in range(chkdatas)]
testcommon.runtest(datas,
TS2,
main=main,
| plot=main,
chkind=chkind,
chkmin=chkmin,
| chkvals=chkvals)
if __name__ == '__main__':
test_run(main=True)
|
SpencerBelleau/glyptodon | modules/scanner.py | Python | mit | 1,018 | 0.018664 | import os, sys, hashlib, random, time
from functions import *
from helpers import *
working_dir = os.getcwd()
args = sys.argv
package = 0
if(os.path.isfile(args[1])):
package = open(args[1], 'rb')
else:
sys.exit(-1)
IV = package.read(64)
key = setupKey(args[2], IV)
checksum = list(applyXOR(bytearray(package.read(128)), key))
csgen = hashlib.sha512()
csgen.update(bytearray(checksum[:64]))
check = csgen.digest()
if(compareBytes(check, bytes(checksum[64:]))):
print("Checksum validated")
else:
sys.exit(-1)
#Create Directory
dirLength = readSizeBytes(list(applyXOR(bytearray(package.read(4)), key)))
directory = list(applyXOR(bytearray(package.read(dirLength)), key))
dirIndex = 0
while(dir | Index < len(directory)):
fnLength = directory[dirIndex]
dirIndex = dirIndex+1
name = ""
for i in range(fnLength):
name = name + chr(directory[dirIndex])
| dirIndex = dirIndex+1
size = []
for i in range(4):
size.append(directory[dirIndex])
dirIndex = dirIndex+1
size = readSizeBytes(size)
print("> " + name) |
UAVCAN/gui_tool | uavcan_gui_tool/widgets/bus_monitor/window.py | Python | mit | 16,283 | 0.002211 | #
# Copyright (C) 2016 UAVCAN Development Team <uavcan.org>
#
# This software is distributed under the terms of the MIT License.
#
# Author: Pavel Kirienko <pavel.kirienko@zubax.com>
#
import datetime
import time
import os
from functools import partial
import pyuavcan_v0
from pyuavcan_v0.driver import CANFrame
from PyQt5.QtWidgets import QMainWindow, QHeaderView, QLabel, QSplitter, QSizePolicy, QWidget, QHBoxLayout, \
QPlainTextEdit, QDialog, QVBoxLayout, QMenu, QAction
from PyQt5.QtGui import QColor, QIcon, QTextOption
from PyQt5.QtCore import Qt, QTimer
from ...thirdparty.pyqtgraph import PlotWidget, mkPen
from logging import getLogger
from .. import BasicTable, map_7bit_to_color, RealtimeLogWidget, get_monospace_font, get_icon, flash, get_app_icon, \
show_error
from .transfer_decoder import decode_transfer_from_frame
logger = getLogger(__name__)
def parse_can_frame(frame):
if frame.extended:
can_id = frame.id
source_node_id = can_id & 0x7F
service_not_message = bool((can_id >> 7) & 1)
if service_not_message:
destination_node_id = (can_id >> 8) & 0x7F
request_not_response = bool((can_id >> 15) & 1)
service_type_id = (can_id >> 16) & 0xFF
try:
data_type_name = pyuavcan_v0.DATATYPES[(service_type_id, pyuavcan_v0.dsdl.CompoundType.KIND_SERVICE)].full_name
except KeyError:
data_type_name = '<unknown service %d>' % service_type_id
else:
message_type_id = (can_id >> 8) & 0xFFFF
if source_node_id == 0:
source_node_id = 'Anon'
message_type_id &= 0b11
destination_node_id = ''
try:
data_type_name = pyuavcan_v0.DATATYPES[(message_type_id, pyuavcan_v0.dsdl.CompoundType.KIND_MESSAGE)].full_name
except KeyError:
data_type_name = '<unknown message %d>' % message_type_id
else:
data_type_name = 'N/A'
source_node_id = 'N/A'
destination_node_id = 'N/A'
return {
'data_type': data_type_name,
'src': source_node_id,
'dst': destination_node_id,
}
def render_node_id_with_color(frame, field):
nid = parse_can_frame(frame)[field]
return nid, (map_7bit_to_color(nid) if isinstance(nid, int) else None)
def render_data_type_with_color(frame):
dtname = parse_can_frame(frame)['data_type']
color_hash = sum(dtname.encode('ascii')) & 0xF7
return dtname, map_7bit_to_color(color_hash)
def colorize_can_id(frame):
if not frame.extended:
return
mask = 0b11111
priority = (frame.id >> 24) & mask
col = QColor()
col.setRgb(0xFF, 0xFF - (mask - priority) * 6, 0xFF)
return col
def colorize_transfer_id(e):
if len(e[1].data) < 1:
return
# Making a rather haphazard hash using transfer ID and a part of CAN ID
x = (e[1].data[-1] & 0b11111) | (((e[1].id >> 16) & 0b1111) << 5)
red = ((x >> 6) & 0b111) * 25
green = ((x >> 3) & 0b111) * 25
blue = (x & 0b111) * 25
col = QColor()
col.setRgb(0xFF - red, 0xFF - green, 0xFF - blue)
return col
class TimestampRenderer:
FORMAT = '%H:%M:%S.%f'
def __init__(self):
self._prev_ts = 0
def __call__(self, e):
ts = datetime.datetime.fromtimestamp(e[1].ts_real).strftime(self.FORMAT)
col = QColor()
# Constraining delta to [0, 1]
delta = min(1, e[1].ts_real - self._prev_ts)
if delta < 0:
col.setRgb(255, 230, 230)
else:
self._prev_ts = e[1].ts_real
col.setRgb(*([255 - int(192 * delta)] * 3))
return ts, col
@staticmethod
def compute_timestamp_difference(earlier, later):
def s2delta(string):
h, m, s = [float(x) for x in string.split(':')]
return datetime.timedelta(hours=h, minutes=m, seconds=s)
return (s2delta(later) - s2delta(earlier)).total_seconds()
class TrafficStatCounter:
MOVING_AVERAGE_LENGTH = 4
FPS_ESTIMATION_WINDOW = 0.5
def __init__(self):
self._rx = 0
self._tx = 0
self._fps = 0
self._prev_fps_checkpoint_mono = 0
self._frames_since_fps_checkpoint = 0
self._last_fps_estimates = [0] * self.MOVING_AVERAGE_LENGTH
def add_frame(self, direction, frame):
if direction == 'tx':
self._tx += 1
else:
self._rx += 1
# Updating FPS estimate.
# It is extremely important that the algorithm relies only on the timestamps provided by the driver!
# Naive timestamping produces highly unreliable estimates, because the application is not nearly real-time.
self._frames_since_fps_checkpoint += 1
if direction == 'rx':
dt = frame.ts_monotonic - self._prev_fps_checkpoint_mono
if dt >= self.FPS_ESTIMATION_WINDOW:
self._last_fps_estimates.pop()
self._last_fps_estimates.insert(0, self._frames_since_fps_checkpoint / dt)
self._prev_fps_checkpoint_mono = frame.ts_monotonic
self._frames_since_fps_checkpoint = 0
@property
def rx(self):
return self._rx
@property
def tx(self):
return self._tx
@property
def total(self):
return self._rx + self._tx
def get_frames_per_second(self):
return (sum(self._last_fps_estimates) / len(self._last_fps_estimates)), self._prev_fps_checkpoint_mono
COLUMNS = [
BasicTable.Column('Dir',
lambda e: (e[0].upper()),
searchable=False),
BasicTable.Column('Local Time', TimestampRenderer(), searchable=False),
BasicTable.Column('CAN ID',
lambda e: (('%0*X' % (8 if e[1].extended else 3, e[1].id)).rjust(8),
colorize_can_id(e[1]))),
BasicTable.Column('Data Hex',
lambda e: (' '.join(['%02X' % x for x in e[1].data]).ljust(3 * e[1].MAX_DATA_LENGTH),
colorize_transfer_id(e))),
BasicTable.Column('Data ASCII',
lambda e: (''.join([(chr(x) if 32 <= x <= 126 else '.') for x in e[1].data]),
colorize_transfer_id(e))),
BasicTable.Column('Src',
lambda e: render_node_id_with_color(e[1], 'src')),
BasicTable.Column('Dst',
lambda e: render_node_id_with_color(e[1], 'dst')),
BasicTable.Column('Data Type',
lambda e: render_data_type_with_color(e[1]),
resize_mode=QHeaderView.Stretch),
]
def row_to_frame(table, row_index):
if row_index >= table.rowCount():
return None, None
can_id = None
payload = None
extended = None
direction = None
for col_index, col_spec in enumerate(COLUMNS):
item = table.item(row_index, col_index).text()
if col_spec.name == 'CAN ID':
extended = len(item.strip()) > 3
can_id = int(item, 16)
if col_spec.n | ame == 'Data Hex':
payload = bytes([int(x, 16) for x in item.split()])
if col_spec.name == 'Dir':
direction = item.strip()
assert all(map(lambda x: x is not None, [can_id, payload, extended, direction]))
return CANFrame(can_id, payl | oad, extended, ts_monotonic=-1, ts_real=-1), direction
class BusMonitorWindow(QMainWindow):
DEFAULT_PLOT_X_RANGE = 120
BUS_LOAD_PLOT_MAX_SAMPLES = 50000
def __init__(self, get_frame, iface_name):
super(BusMonitorWindow, self).__init__()
self.setWindowTitle('CAN bus monitor (%s)' % iface_name.split(os.path.sep)[-1])
self.setWindowIcon(get_app_icon())
# get dsdl_directory from parent process, if set
dsdl_directory = os.environ.get('UAVCAN_CUSTOM_DSDL_PATH',None)
if dsdl_directory:
pyuavcan_v0.load_dsdl(dsdl_directory)
self._get_frame = get_frame
self._log_widget = RealtimeLogWidget(self, columns=COLUMNS, font=get_monospace_font(),
pre_redraw_hook=self._redraw_ho |
zolyomiake/400 | rle_coder.py | Python | gpl-3.0 | 3,975 | 0.000755 |
from operator import itemgetter
import logging
def test_coder():
# original = [1,2,3,3,3,4,18,18,2,3,4,4,4,5]
# original = [1, 2, 3, 3, 3, 4, 18, 18, 2, 3, 4, 4, 4, 5, 44, 44, 45, 46, 46, 46, 49, 49, 49, 45]
original = [1, 2, 0, 0, 0, 0, 3, 3, 3, 4, 18, 18, 2, 3, 4, 4, 4, 5, 44, 44, 45, 46, 46, 46, 49, 49, 49, 45]
encoded = rle_encode(original)
decoded = rle_decode(encoded)
code_matches(original, decoded)
def code_matches(original, decoded):
if len(original) != len(decoded):
logging.error("Gebo at array comparison")
logging.error("orig: %s ", original)
logging.error("dec: %s", decoded)
return
for i in range(0, len(decoded)):
if original[i] != decoded[i]:
logging.error("Gebo at array comparison")
logging.error("orig: %s ", original)
logging.error("dec: %s", decoded)
return
logging.info("hooray")
def rle_encode(memberships):
last_member = None
repeats = 1
rle_str = ''
# ASCII 48 to 57 are digits -> move them above 230 (add 182)
for member in memberships:
if 48 <= member < 58:
member += 182
if last_member is None:
last_member = member
continue
# if member == last_member:
# repeats += 1
if member == last_member:
repeats += 1
else:
if repeats < 2:
rle_str += chr(last_member)
else:
rle_str += str(repeats) + chr(last_member)
repeats = 1
last_member = member
if repeats != 0:
if repeats < 2:
rle_str += chr(last_member)
else:
rle_str += str(repeats) + chr(last_member)
return rle_str
def rle_decode(encoded):
number_str = ''
numbers = list()
for char in encoded:
if 48 <= ord(char) < 58:
number_str += char
else:
if ord(char) >= 230:
c | har = chr(ord(char) - 182)
if len(number_str) != 0:
repeat = int(number_str)
number_str = ''
| for i in range(0, repeat):
numbers.append(ord(char))
else:
numbers.append(ord(char))
return numbers
def rle_encode_as_str(memberships):
last_member = None
repeats = 0
rle_str = ''
# work_array = memberships[:100]
rep_dict = dict()
for member in memberships:
if not last_member:
last_member = member
if member == last_member:
repeats += 1
else:
if repeats < 2:
rle_str += chr(member)
else:
if str(repeats) not in rep_dict:
rep_dict[str(repeats)] = 1
else:
rep_dict[str(repeats)] += 1
rle_str += str(repeats) + chr(member)
repeats = 0
last_member = member
if repeats != 0:
if repeats < 2:
rle_str += chr(member)
else:
rle_str += str(repeats) + chr(member)
distinct_saving = 0
savings = list()
for key in rep_dict.keys():
cost = len(key) + 1
repeats = rep_dict[key]
saving = len(key) * repeats
profit = saving - cost
if profit > 0:
savings.append((key, profit))
distinct_saving += profit
savings.sort(key=itemgetter(1), reverse=True)
# last number is ASCII 57, we're good to go until 255
char_coded = 58
# encoding_table = dict()
enc_str = rle_str
for saving in savings:
# encoding_table[saving[0]] = char_coded
enc_str = enc_str.replace(saving[0], chr(char_coded))
char_coded += 1
if char_coded > 255:
break
print("Orig {} - mod {}".format(len(rle_str), len(enc_str)))
return rle_str
def main():
test_coder()
if __name__ == "__main__":
main()
|
DevEd2/DevSound | demo/makegbs.py | Python | mit | 508 | 0.007874 | # makegbs.py - create GBS file from DevSound_GBS.gb
# open files
ROMFile = open("DevSound_GBS.gbc", "rb") # demo ROM
OutFile = open("DevSound.gbs", "wb") # output file
# f | ind end of data
endpos = ROMFile.seek(-1,2) + 2
while endpos >= 0x4000:
if ROMFile.read(1)[0] != 0xff: break;
ROMFile.seek(-2,1)
endpos -= 1
# copy song data
RO | MFile.seek(0x3f90)
OutFile.write(ROMFile.read(endpos - 0x3f90)) # write song data
# close files
ROMFile.close()
OutFile.close()
|
tutorcruncher/morpheus | src/ext.py | Python | mit | 3,555 | 0.003094 | import json
import logging
from foxglove import glove
from httpx import Response
from .settings import Settings
logger = logging.getLogger('ext')
def lenient_json(v):
if isinstance(v, (str, bytes)):
try:
return json.loads(v)
except (ValueError, TypeError):
pass
return v
class ApiError(RuntimeError):
def __init__(self, method, url, status, response_text):
self.method = method
self.url = url
self.status = status
self.body = response_text
def __str__(self):
return f'{self.method} {self.url}, unexpected response {self.status}'
class ApiSession | :
def __init__(self, root_url, settings: Settings):
self.settings = settings
self.root = root_url.rstrip('/') + '/'
| async def get(self, uri, *, allowed_statuses=(200,), **data) -> Response:
return await self._request('GET', uri, allowed_statuses=allowed_statuses, **data)
async def delete(self, uri, *, allowed_statuses=(200,), **data) -> Response:
return await self._request('DELETE', uri, allowed_statuses=allowed_statuses, **data)
async def post(self, uri, *, allowed_statuses=(200, 201), **data) -> Response:
return await self._request('POST', uri, allowed_statuses=allowed_statuses, **data)
async def put(self, uri, *, allowed_statuses=(200, 201), **data) -> Response:
return await self._request('PUT', uri, allowed_statuses=allowed_statuses, **data)
async def _request(self, method, uri, allowed_statuses=(200, 201), **data) -> Response:
method, url, data = self._modify_request(method, self.root + str(uri).lstrip('/'), data)
kwargs = {}
headers = data.pop('headers_', None)
if headers is not None:
kwargs['headers'] = headers
if timeout := data.pop('timeout_', None):
kwargs['timeout'] = timeout
r = await glove.http.request(method, url, json=data or None, **kwargs)
if isinstance(allowed_statuses, int):
allowed_statuses = (allowed_statuses,)
if allowed_statuses != '*' and r.status_code not in allowed_statuses:
data = {
'request_real_url': str(r.request.url),
'request_headers': dict(r.request.headers),
'request_data': data,
'response_headers': dict(r.headers),
'response_content': lenient_json(r.text),
}
logger.warning(
'%s unexpected response %s /%s -> %s',
self.__class__.__name__,
method,
uri,
r.status_code,
extra={'data': data} if self.settings.verbose_http_errors else {},
)
raise ApiError(method, url, r.status_code, r.text)
else:
logger.debug('%s /%s -> %s', method, uri, r.status_code)
return r
def _modify_request(self, method, url, data):
return method, url, data
class Mandrill(ApiSession):
def __init__(self, settings):
super().__init__(settings.mandrill_url, settings)
def _modify_request(self, method, url, data):
data['key'] = self.settings.mandrill_key
return method, url, data
class MessageBird(ApiSession):
def __init__(self, settings):
super().__init__(settings.messagebird_url, settings)
def _modify_request(self, method, url, data):
data['headers_'] = {'Authorization': f'AccessKey {self.settings.messagebird_key}'}
return method, url, data
|
carolFrohlich/nipype | nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py | Python | bsd-3-clause | 2,331 | 0.02145 | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ....testing import assert_equal
from ..model import MRISPreproc
def test_MRISPreproc_inputs():
input_map = dict(args=dict(argstr='%s',
),
environ=dict(nohash=True,
usedefault=Tru | e,
),
fsgd_file=dict(argstr='--fsgd %s',
xor=(u'subjects', u'fsgd_file', u'subject_file'),
),
fwhm=dict(argstr='--fwhm %f',
xor=[u'num_iter | s'],
),
fwhm_source=dict(argstr='--fwhm-src %f',
xor=[u'num_iters_source'],
),
hemi=dict(argstr='--hemi %s',
mandatory=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
num_iters=dict(argstr='--niters %d',
xor=[u'fwhm'],
),
num_iters_source=dict(argstr='--niterssrc %d',
xor=[u'fwhm_source'],
),
out_file=dict(argstr='--out %s',
genfile=True,
),
proj_frac=dict(argstr='--projfrac %s',
),
smooth_cortex_only=dict(argstr='--smooth-cortex-only',
),
source_format=dict(argstr='--srcfmt %s',
),
subject_file=dict(argstr='--f %s',
xor=(u'subjects', u'fsgd_file', u'subject_file'),
),
subjects=dict(argstr='--s %s...',
xor=(u'subjects', u'fsgd_file', u'subject_file'),
),
subjects_dir=dict(),
surf_area=dict(argstr='--area %s',
xor=(u'surf_measure', u'surf_measure_file', u'surf_area'),
),
surf_dir=dict(argstr='--surfdir %s',
),
surf_measure=dict(argstr='--meas %s',
xor=(u'surf_measure', u'surf_measure_file', u'surf_area'),
),
surf_measure_file=dict(argstr='--is %s...',
xor=(u'surf_measure', u'surf_measure_file', u'surf_area'),
),
target=dict(argstr='--target %s',
mandatory=True,
),
terminal_output=dict(nohash=True,
),
vol_measure_file=dict(argstr='--iv %s %s...',
),
)
inputs = MRISPreproc.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_MRISPreproc_outputs():
output_map = dict(out_file=dict(),
)
outputs = MRISPreproc.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
acsone/knowledge | attachment_lock/tests/test_attachment_lock.py | Python | agpl-3.0 | 1,537 | 0 | # -*- coding: utf-8 -*-
# Copyright 2018 Therp BV <https://therp.nl>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from base64 import b64encode
from openerp.tests.common import TransactionCase
from openerp.exceptions import AccessError, ValidationError
class TestAttachmentLock(TransactionCase):
def test_attachment_lock(self):
demo = self.env.ref('base.user_demo')
testattachment = self.env['ir.attachment'].create({
'name': 'testattachment',
'datas': b64encode('hello world'),
'datas_fname': 'test.txt',
})
self.assertTrue(testattachment.can_lock)
self.assertFalse(testattachment.locked)
testattachment.lock()
self.assertTrue(testattachment.can_lock)
self.assertTrue(testattachment.locked)
with self.assertRaises(ValidationError):
testattachment.sudo(demo).write({
'datas': b64encode('hello world2'),
})
with self.assertRaises(AccessError):
testattachment.sudo(demo).lock()
demo.write({'groups_id': [
(4, self.env.ref('attachment_lock.group_attachment_lock').id),
]})
with self.asse | rtRaises(AccessError):
testattachment.sudo(demo).lock()
testattachment.unlock()
self.assertTrue(testattachment.sudo(demo).can_lock)
testattachment.sudo(demo).lock()
self.assertTrue(testattachment.sudo(de | mo).can_lock)
self.assertTrue(testattachment.sudo(demo).locked)
|
srfraser/services | src/releng_treestatus/releng_treestatus/config.py | Python | mpl-2.0 | 331 | 0 | # -*- codin | g: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
PROJECT_NAME = 'releng-treestatus'
APP_NAME = 'releng_tre | estatus'
|
mozillazg/django-simple-projects | projects/pagination/hello/models.py | Python | mit | 157 | 0 | from django.db import models
class Topic(models.Model):
| title = models.CharField(max_length=200)
def __unicode__(s | elf):
return self.title
|
PARINetwork/pari | article/migrations/0010_add_show_modular_content_field.py | Python | bsd-3-clause | 445 | 0 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import mig | rations, models
class Migration(migrations.Migration):
dependencies = [
('article', '0009_add_stream_fields_for_modular_article_content'),
]
operations = [
migrations.AddField(
model_name='article',
name='show_modular_content',
field=models | .BooleanField(default=False),
),
]
|
aestrivex/PySurfer | examples/plot_label_foci.py | Python | bsd-3-clause | 1,955 | 0.001023 | """
=======================
Generate Surface Labels
=======================
Define a label that is centered on a specific vertex in the surface mesh. Plot
that label and the focus that defines its center.
"""
print __doc__
from surfer import Brain, utils
subject_id = "fsaverage"
"""
Bring up the visualization.
"""
brain = Brain(subject_id, "lh", "inflated")
"""
First we'll identify a stereotaxic focus in the MNI coordinate system. This
might be a peak activations from a volume based analysis.
"""
coord = [-43, 25, 24]
"""
Next we grow a label along the surface around the neareset vertex to this
coordinate in the white surface mesh. The `n_steps` argument controls the size
of the resulting label.
"""
utils.coord_to_label(subject_id, coord, label='example_data/coord',
hemi='lh', n_steps=25, map_surface="white")
brain.add_label('example_data/coord-lh.label', color="darkseagreen", alpha=.8)
"""
Now we plot the focus on the inflated surface at the vertex identified in the
previous step.
"""
brain.add_foci([coord], map_surface="white", color="mediumseagreen")
"""
We can also do this using a vertex index, perhaps defined as the peak
activation in a surface analysis. This will be more accurate than using a
volume-based focus.
"""
coord = 0
utils.coord_to_label(subject_id, coord, label='example_data/coord',
hemi='lh', n_steps=40, map_surface=" | white",
coord_as_vert=True)
brain.add_label('example_data/coord-lh.label', color='royalblue', alpha=.8)
"""
No | w we plot the foci on the inflated surface. We will map the foci onto the
surface by finding the vertex on the "white" mesh that is closest to the
coordinate of the point we want to display.
"""
brain.add_foci([coord], map_surface="white", coords_as_verts=True,
color="mediumblue")
"""
Set the camera position to show the extent of the labels.
"""
brain.show_view(dict(elevation=40, distance=430))
|
GoogleCloudPlatform/cloud-opensource-python | compatibility_server/configs.py | Python | apache-2.0 | 9,653 | 0 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common configs for compatibility_lib and compatibility_server.
Note that a unit test exists for checking that the configs.py file in
compatibility_lib is the same as the configs.py file in compatibility_server.
The reason for this set up is that these modules need to be isolated from
each other, but there also needs to be consistency in the objects and data
in this file since they exist in the same workflow.
Steps for updating the package list / white list:
1. Make sure to update both lists when appropriate (the package has been
release to PyPI and the github repo exists)
2. Skip the dashboard tests and build when adding any new packages to
either list
3. Release a new version of compatibility lib
4. Redeploy the badge and compatibility servers
5. Unskip the dashboard tests and build
"""
def _format_url(repo_name, setuppy_path=''):
url = 'git+git://github.com/{}.git'.format(repo_name)
if setuppy_path != '':
url = '{}#subdirectory={}'.format(url, setuppy_path)
return url
# IGNORED_DEPENDENCIES are not direct dependencies for many packages and are
# not installed via pip, resulting in unresolvable high priority warnings.
IGNORED_DEPENDENCIES = [
'pip',
'setuptools',
'wheel',
'virtualenv',
]
# If updating this list, make sure to update the whitelist as well with the
# appropiate github repo if one exists.
PKG_LIST = [
'google-api-core',
'google-api-python-client',
'google-auth',
'google-cloud-asset',
'google-cloud-automl',
'google-cloud-bigquery',
'google-cloud-bigquery-datatransfer',
'google-cloud-bigquery-storage',
'google-cloud-bigtable',
'google-cloud-container',
'google-cloud-core',
'google-cloud-datacatalog',
'google-cloud-datalabeling',
'google-cloud-dataproc',
'google-cloud-datastore',
'google-cloud-dlp',
'google-cloud-dns',
'google-cloud-error-reporting' | ,
'google-cloud-firestore',
'google-cloud-iam',
'google-cloud-iot',
| # 'google-cloud-irm', # unreleased
'google-cloud-kms',
'google-cloud-language',
'google-cloud-logging',
'google-cloud-monitoring',
'google-cloud-os-login',
# 'google-cloud-phishing-protection', # unreleased
'google-cloud-pubsub',
'google-cloud-redis',
'google-cloud-resource-manager',
'google-cloud-runtimeconfig',
'google-cloud-scheduler',
'google-cloud-securitycenter',
'google-cloud-spanner',
'google-cloud-speech',
'google-cloud-storage',
'google-cloud-talent',
'google-cloud-tasks',
'google-cloud-texttospeech',
'google-cloud-trace',
'google-cloud-translate',
'google-cloud-videointelligence',
'google-cloud-vision',
'google-cloud-webrisk',
'google-cloud-websecurityscanner',
'google-resumable-media',
'apache-beam[gcp]',
'google-apitools',
'googleapis-common-protos',
'grpc-google-iam-v1',
'grpcio',
'opencensus',
'protobuf',
'protorpc',
'tensorboard',
'tensorflow',
'gcloud',
'compatibility-lib',
]
WHITELIST_PKGS = PKG_LIST
# WHITELIST_URLS maps a github url to its associated pypi package name. This is
# used for sanitizing input packages and making sure we don't run random pypi
# or github packages.
# If updating this list, make sure to update the `PKG_LIST` with the
# appropriate pypi package if one has been released.
WHITELIST_URLS = {
_format_url('googleapis/google-cloud-python', 'asset'):
'google-cloud-asset',
_format_url('googleapis/google-cloud-python', 'automl'):
'google-cloud-automl',
_format_url('googleapis/google-cloud-python', 'datacatalog'):
'google-cloud-datacatalog',
_format_url('googleapis/google-cloud-python', 'datalabeling'):
'google-cloud-datalabeling',
_format_url('googleapis/google-cloud-python', 'dataproc'):
'google-cloud-dataproc',
_format_url('googleapis/google-cloud-python', 'dlp'):
'google-cloud-dlp',
_format_url('googleapis/google-cloud-python', 'iam'):
'google-cloud-iam',
_format_url('googleapis/google-cloud-python', 'iot'):
'google-cloud-iot',
# unreleased
_format_url('googleapis/google-cloud-python', 'irm'):
'google-cloud-irm',
_format_url('googleapis/google-cloud-python', 'kms'):
'google-cloud-kms',
_format_url('googleapis/python-ndb', ''):
'google-cloud-ndb',
_format_url('googleapis/google-cloud-python', 'oslogin'):
'google-cloud-os-login',
_format_url('googleapis/google-cloud-python', 'redis'):
'google-cloud-redis',
_format_url('googleapis/google-cloud-python', 'scheduler'):
'google-cloud-scheduler',
_format_url('googleapis/google-cloud-python', 'securitycenter'):
'google-cloud-securitycenter',
_format_url('googleapis/google-cloud-python', 'tasks'):
'google-cloud-tasks',
_format_url('googleapis/google-cloud-python', 'texttospeech'):
'google-cloud-texttospeech',
_format_url('googleapis/google-cloud-python', 'webrisk'):
'google-cloud-webrisk',
_format_url('googleapis/google-cloud-python', 'websecurityscanner'):
'google-cloud-websecurityscanner',
_format_url('googleapis/google-cloud-python', 'api_core'):
'google-api-core',
_format_url('googleapis/google-cloud-python', 'bigquery'):
'google-cloud-bigquery',
_format_url('googleapis/google-cloud-python', 'bigquery_datatransfer'):
'google-cloud-bigquery-datatransfer',
_format_url('googleapis/google-cloud-python', 'bigquery_storage'):
'google-cloud-bigquery-storage',
_format_url('googleapis/google-cloud-python', 'bigtable'):
'google-cloud-bigtable',
_format_url('googleapis/google-cloud-python', 'container'):
'google-cloud-container',
_format_url('googleapis/google-cloud-python', 'core'):
'google-cloud-core',
_format_url('googleapis/google-cloud-python', 'datastore'):
'google-cloud-datastore',
_format_url('googleapis/google-cloud-python', 'dns'): 'google-cloud-dns',
_format_url('googleapis/google-cloud-python', 'error_reporting'):
'google-cloud-error-reporting',
_format_url('googleapis/google-cloud-python', 'firestore'):
'google-cloud-firestore',
_format_url('googleapis/google-cloud-python', 'language'):
'google-cloud-language',
_format_url('googleapis/google-cloud-python', 'logging'):
'google-cloud-logging',
_format_url('googleapis/google-cloud-python', 'monitoring'):
'google-cloud-monitoring',
# unreleased
_format_url('googleapis/google-cloud-python', 'phishingprotection'):
'google-cloud-phishing-protection',
_format_url('googleapis/google-cloud-python', 'pubsub'):
'google-cloud-pubsub',
_format_url('googleapis/google-cloud-python', 'resource_manager'):
'google-cloud-resource-manager',
_format_url('googleapis/google-cloud-python', 'runtimeconfig'):
'google-cloud-runtimeconfig',
_format_url('googleapis/google-cloud-python', 'spanner'):
'google-cloud-spanner',
_format_url('googleapis/google-cloud-python', 'speech'):
'google-cloud-speech',
_format_url('googleapis/google-cloud-python', 'storage'):
'google-cloud-storage',
_format_url('googleapis/google-cloud-python', 'talent'):
'google-cloud-talent',
_format_url('googleapis/google-cloud-python', 'trace'):
'google-cloud-trace',
_format_url('googleapis/google-cloud-python', 'translate'):
'google-cloud-translate',
|
antoinecarme/pyaf | tests/model_control/detailed/transf_None/model_control_one_enabled_None_MovingAverage_Seasonal_Second_AR.py | Python | bsd-3-clause | 156 | 0.051282 | import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['None'] , ['MovingAverage'] , ['Seasonal_Second'] | , ['AR'] ); | |
OpusVL/Odoo-UK-Format-Reports | UK_Reports/__init__.py | Python | agpl-3.0 | 1,037 | 0 | # -*- coding: utf-8 -*-
##############################################################################
#
# UK Report Template
# Copyright (C) 2015 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gn | u.org/licenses/>.
#
##############################################################################
import report
import account_invoice
import sale_order
# vim:expandtab:sma | rtindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
sjsucohort6/openstack | python/venv/lib/python2.7/site-packages/cinderclient/v2/qos_specs.py | Python | mit | 4,789 | 0 | # Copyright (c) 2013 eBay Inc.
# Copyright (c) OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
QoS Specs interface.
"""
from cinderclient import base
class QoSSpecs(base.Resource):
"""QoS specs entity represents quality-of-service parameters/requirements.
A QoS specs is a set of parameters or requirements for quality-of-service
purpose, which can be associated with volume types (for now). In future,
QoS specs may be extended to be associated other entities, such as single
volume.
"""
def __repr__(self):
return "<QoSSpecs: %s>" % self.name
def delete(self):
return self.manager.delete(self)
class QoSSpecsManager(base.ManagerWithFind):
"""
Manage :class:`QoSSpecs` resources.
"""
resource_class = QoSSpecs
def list(self, search_opts=None):
"""Get a list of all qos specs.
:rtype: list of :class:`QoSSpecs`.
"""
return self._list("/qos-specs", "qos_specs")
def get(self, qos_specs):
"""Get a specific qos specs.
:param qos_specs: The ID of the :class:`QoSSpecs` to get.
:rtype: :class:`QoSSpecs` |
"""
return self._get("/qos-specs/%s" % base.getid(qos_specs), "qos_specs")
|
def delete(self, qos_specs, force=False):
"""Delete a specific qos specs.
:param qos_specs: The ID of the :class:`QoSSpecs` to be removed.
:param force: Flag that indicates whether to delete target qos specs
if it was in-use.
"""
self._delete("/qos-specs/%s?force=%s" %
(base.getid(qos_specs), force))
def create(self, name, specs):
"""Create a qos specs.
:param name: Descriptive name of the qos specs, must be unique
:param specs: A dict of key/value pairs to be set
:rtype: :class:`QoSSpecs`
"""
body = {
"qos_specs": {
"name": name,
}
}
body["qos_specs"].update(specs)
return self._create("/qos-specs", body, "qos_specs")
def set_keys(self, qos_specs, specs):
"""Add/Update keys in qos specs.
:param qos_specs: The ID of qos specs
:param specs: A dict of key/value pairs to be set
:rtype: :class:`QoSSpecs`
"""
body = {
"qos_specs": {}
}
body["qos_specs"].update(specs)
return self._update("/qos-specs/%s" % qos_specs, body)
def unset_keys(self, qos_specs, specs):
"""Remove keys from a qos specs.
:param qos_specs: The ID of qos specs
:param specs: A list of key to be unset
:rtype: :class:`QoSSpecs`
"""
body = {'keys': specs}
return self._update("/qos-specs/%s/delete_keys" % qos_specs,
body)
def get_associations(self, qos_specs):
"""Get associated entities of a qos specs.
:param qos_specs: The id of the :class: `QoSSpecs`
:return: a list of entities that associated with specific qos specs.
"""
return self._list("/qos-specs/%s/associations" % base.getid(qos_specs),
"qos_associations")
def associate(self, qos_specs, vol_type_id):
"""Associate a volume type with specific qos specs.
:param qos_specs: The qos specs to be associated with
:param vol_type_id: The volume type id to be associated with
"""
self.api.client.get("/qos-specs/%s/associate?vol_type_id=%s" %
(base.getid(qos_specs), vol_type_id))
def disassociate(self, qos_specs, vol_type_id):
"""Disassociate qos specs from volume type.
:param qos_specs: The qos specs to be associated with
:param vol_type_id: The volume type id to be associated with
"""
self.api.client.get("/qos-specs/%s/disassociate?vol_type_id=%s" %
(base.getid(qos_specs), vol_type_id))
def disassociate_all(self, qos_specs):
"""Disassociate all entities from specific qos specs.
:param qos_specs: The qos specs to be associated with
"""
self.api.client.get("/qos-specs/%s/disassociate_all" %
base.getid(qos_specs))
|
osgee/django-web-demo | webdemo/views.py | Python | apache-2.0 | 1,352 | 0.005917 | from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.core.urlresolvers import reverse
from django.template import RequestContext, loader
from django.views.decorators.csrf import csrf_exempt
def index(request):
template = loader.get_template('index.html')
result=10
context = RequestContext(request, {
# 'result': result,
})
return HttpResponse(template.render(conte | xt))
@csrf_exempt
def predict(request):
result=5
try:
img=request.POST['img']
except KeyError:
# Redisplay the question voting form.
# return render(request, 'polls/detail.html', {
# 'question': p,
# 'error_message': "You didn't select a choice.",
# })
# print "error_message"
return HttpResponse("Your predict is %s." % result)
else:
# Always return an HttpResponseRedirect after success | fully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
# return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
return HttpResponse("Your predict is %s." % result)
# pass
# name = request.POST.get('name')
# return HttpResponse(json.dumps({'name': name}), content_type="application/json")
|
opennode/nodeconductor-assembly-waldur | src/waldur_core/server/base_settings.py | Python | mit | 9,157 | 0.001092 | """
Django base settings for Waldur Core.
"""
from datetime import timedelta
import locale
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import warnings
from waldur_core.core import WaldurExtension
from waldur_core.core.metadata import WaldurConfiguration
from waldur_core.server.admin.settings import * # noqa: F403
encoding = locale.getpreferredencoding()
if encoding.lower() != 'utf-8':
raise Exception("""Your system's preferred encoding is `{}`, but Waldur requires `UTF-8`.
Fix it by setting the LC_* and LANG environment settings. Example:
LC_ALL=en_US.UTF-8 LANG=en_US.UTF-8
""".format(encoding))
ADMINS = ()
BASE_DIR = os.path.abspath(os.path.join(os.path.join(os.path.dirname(os.path.dirname(__file__)), '..'), '..'))
DEBUG = False
MEDIA_ROOT = '/media_root/'
MEDIA_URL = '/media/'
ALLOWED_HOSTS = []
SITE_ID = 1
DBTEMPLATES_USE_REVERSION = True
DBTEMPLATES_USE_CODEMIRROR = True
# Application definition
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.humanize',
'django.contrib.staticfiles',
'django.contrib.sites',
'waldur_core.landing',
'waldur_core.logging',
'waldur_core.core',
'waldur_core.quotas',
'waldur_core.structure',
'waldur_core.users',
'waldur_core.media',
'rest_framework',
'rest_framework.authtoken',
'rest_framework_swagger',
'django_filters',
'axes',
'django_fsm',
'reversion',
'taggit',
'jsoneditor',
'modeltranslation',
'import_export',
'health_check',
'health_check.db',
'health_check.cache',
'health_check.storage',
'health_check.contrib.migrations',
'health_check.contrib.celery_ping',
'dbtemplates',
'binary_database_files',
)
INSTALLED_APPS += ADMIN_INSTALLED_APPS # noqa: F405
MIDDLEWARE = (
'waldur_core.server.middleware.cors_middleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'waldur_core.logging.middleware.CaptureEventContextMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'axes.middleware.AxesMiddleware'
)
REST_FRAMEWORK = {
'TEST_REQUEST_DEFAULT_FORMAT': 'json',
'DEFAULT_AUTHENTICATION_CLASSES': (
'waldur_core.core.authentication.TokenAuthentication',
'waldur_core.core.authentication.SessionAuthentication',
),
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',),
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'waldur_core.core.renderers.BrowsableAPIRenderer',
),
'DEFAULT_PAGINATION_CLASS': 'waldur_core.core.pagination.LinkHeaderPagination',
'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.coreapi.AutoSchema',
'PAGE_SIZE': 10,
'EXCEPTION_HANDLER': 'waldur_core.core.views.exception_handler',
# Return native `Date` and `Time` objects in `serializer.data`
'DATETIME_FORMAT': None,
'DATE_FORMAT': None,
'TIME_FORMAT': None,
'ORDERING_PARAM': 'o'
}
AUTHENTICATION_BACKENDS = (
'axes.backends.AxesBackend',
'django.contrib.auth.backends.ModelBackend',
'waldur_core.core.authentication.AuthenticationBackend',
)
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator' | ,
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
ANONYMOUS_USER_ID = None
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': (os.path.join(BASE_DIR, 'src', 'waldur_core', 'templates'),),
'OPTIONS': {
'context_processors': (
| 'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
),
'loaders': ADMIN_TEMPLATE_LOADERS + (
'dbtemplates.loader.Loader',
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
), # noqa: F405
},
},
]
ROOT_URLCONF = 'waldur_core.server.urls'
AUTH_USER_MODEL = 'core.User'
# Session
# https://docs.djangoproject.com/en/2.2/ref/settings/#sessions
SESSION_COOKIE_AGE = 3600
SESSION_SAVE_EVERY_REQUEST = True
WSGI_APPLICATION = 'waldur_core.server.wsgi.application'
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
LOCALE_PATHS = (
os.path.join(BASE_DIR, 'src', 'waldur_core', 'locale'),
)
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
# Celery
CELERY_BROKER_URL = 'redis://localhost'
CELERY_RESULT_BACKEND = 'redis://localhost'
CELERY_TASK_QUEUES = {
'tasks': {'exchange': 'tasks'},
'heavy': {'exchange': 'heavy'},
'background': {'exchange': 'background'},
}
CELERY_TASK_DEFAULT_QUEUE = 'tasks'
CELERY_TASK_ROUTES = ('waldur_core.server.celery.PriorityRouter',)
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': 'redis://localhost',
'OPTIONS': {
'DB': 1,
'PARSER_CLASS': 'redis.connection.HiredisParser',
'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool',
'PICKLE_VERSION': -1,
},
},
}
# Regular tasks
CELERY_BEAT_SCHEDULE = {
'pull-service-properties': {
'task': 'waldur_core.structure.ServicePropertiesListPullTask',
'schedule': timedelta(hours=24),
'args': (),
},
'pull-service-resources': {
'task': 'waldur_core.structure.ServiceResourcesListPullTask',
'schedule': timedelta(hours=1),
'args': (),
},
'pull-service-subresources': {
'task': 'waldur_core.structure.ServiceSubResourcesListPullTask',
'schedule': timedelta(hours=2),
'args': (),
},
'check-expired-permissions': {
'task': 'waldur_core.structure.check_expired_permissions',
'schedule': timedelta(hours=24),
'args': (),
},
'cancel-expired-invitations': {
'task': 'waldur_core.users.cancel_expired_invitations',
'schedule': timedelta(hours=24),
'args': (),
},
'structure-set-erred-stuck-resources': {
'task': 'waldur_core.structure.SetErredStuckResources',
'schedule': timedelta(hours=1),
'args': (),
},
'create_customer_permission_reviews': {
'task': 'waldur_core.structure.create_customer_permission_reviews',
'schedule': timedelta(hours=24),
'args': (),
},
}
globals().update(WaldurConfiguration().dict())
for ext in WaldurExtension.get_extensions():
INSTALLED_APPS += (ext.django_app(),)
for name, task in ext.celery_tasks().items():
if name in CELERY_BEAT_SCHEDULE:
warnings.warn(
"Celery beat task %s from Waldur extension %s "
"is overlapping with primary tasks definition" % (name, ext.django_app()))
else:
CELERY_BEAT_SCHEDULE[name] = task
for key, val in ext.Settings.__ |
alfredgamulo/cloud-custodian | tests/test_cli.py | Python | apache-2.0 | 21,634 | 0.001433 | # Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import json
import os
import sys
from argparse import ArgumentTypeError
from datetime import datetime, timedelta
from c7n import cli, version, commands
from c7n.resolver import ValuesFrom
from c7n.resources import aws
from c7n.schema import ElementSchema, generate
from c7n.utils import yaml_dump, yaml_load
from .common import BaseTest, TextTestIO
class CliTest(BaseTest):
""" A subclass of BaseTest with some handy functions for CLI related tests. """
def patch_account_id(self):
def test_account_id(options):
options.account_id = self.account_id
self.patch(aws, "_default_account_id", test_account_id)
def get_output(self, argv):
""" Run cli.main with the supplied argv and return the output. """
out, err = self.run_and_expect_success(argv)
return out
def capture_output(self):
out = TextTestIO()
err = TextTestIO()
self.patch(sys, "stdout", out)
self.patch(sys, "stderr", err)
return out, err
def run_and_expect_success(self, argv):
""" Run cli.main() with supplied argv and expect normal execution. """
self.patch_account_id()
self.patch(sys, "argv", argv)
out, err = self.capture_output()
try:
cli.main()
except SystemExit as e:
self.fail(
"Expected sys.exit would not be called. Exit code was ({})".format(
e.code
)
)
return out.getvalue(), err.getvalue()
def run_and_expect_failure(self, argv, exit_code):
""" Run cli.main() with supplied argv and expect exit_code. """
self.patch_account_id()
self.patch(sys, "argv", argv)
out, err = self.capture_output()
# clear_resources()
with self.assertRaises(SystemExit) as cm:
cli.main()
self.assertEqual(cm.exception.code, exit_code)
return out.getvalue(), err.getvalue()
def run_and_expect_exception(self, argv, exception):
""" Run cli.main() with supplied argv and expect supplied exception. """
self.patch_account_id()
self.patch(sys, "argv", argv)
# clear_resources()
try:
cli.main()
except exception:
return
self.fail("Error: did not raise {}.".format(exception))
class UtilsTest(BaseTest):
def test_key_val_pair(self):
self.assertRaises(ArgumentTypeError, cli._key_val_pair, "invalid option")
param = "day=today"
self.assertIs(cli._key_val_pair(param), param)
class VersionTest(CliTest):
def test_version(self):
output = self.get_output(["custodian", "version"])
self.assertEqual(output.strip(), version.version)
def test_debug_version(self):
output = self.get_output(["custodian", "version", "--debug"])
self.assertIn(version.version, output)
self.assertIn('botocore==', output)
self.assertIn('python-dateutil==', output)
class ValidateTest(CliTest):
def test_invalidate_structure_exit(self):
invalid_policies = {"policies": [{"name": "foo"}]}
yaml_file = self.write_policy_file(invalid_policies)
self.run_and_expect_failure(["custodian", "validate", yaml_file], 1)
def test_validate(self):
invalid_policies = {
"policies": [
{
"name": "foo",
"resource": "s3",
"filters": [{"tag:custodian_tagging": "not-null"}],
"actions": [
{"type": "untag", "tags": {"custodian_cleanup": "yes"}}
],
}
]
}
yaml_file = self.write_policy_file(invalid_policies)
json_file = self.write_policy_file(invalid_policies, format="json")
# YAML validation
self.run_and_expect_exception(["custodian", "validate", yaml_file], SystemExit)
# JSON validation
self.run_and_expect_failure(["custodian", "validate", json_file], 1)
# no config files given
self.run_and_expect_failure(["custodian", "validate"], 1)
# nonexistent file given
self.run_and_expect_exception(
["custodian", "validate", "fake.yaml"], ValueError
)
valid_policies = {
"policies": [
{
"name": "foo",
"resource": "s3",
"filters": [{"tag:custodian_tagging": "not-null"}],
"actions": [{"type": "tag", "tags": {"custodian_cleanup": "yes"}}],
}
]
}
yaml_file = self.write_policy_file(valid_policies)
self.run_and_expect_success(["custodian", "validate", yaml_file])
# legacy -c option
self.run_and_expect_success(["custodian", "validate", "-c", yaml_file])
# duplicate policy names
self.run_and_expect_failure(["custodian", "validate", yaml_file, yaml_file], 1)
class SchemaTest(CliTest):
def test_schema_outline(self):
| stdout, stderr = self.run_and_expect_success([
"custodian", "schema", "--outline", "--json", "aws"])
data = json.loads(stdout)
| self.assertEqual(list(data.keys()), ["aws"])
self.assertTrue(len(data['aws']) > 100)
self.assertEqual(
sorted(data['aws']['aws.ec2'].keys()), ['actions', 'filters'])
self.assertTrue(len(data['aws']['aws.ec2']['actions']) > 10)
def test_schema_alias(self):
stdout, stderr = self.run_and_expect_success([
"custodian", "schema", "aws.network-addr"])
self.assertIn("aws.elastic-ip:", stdout)
def test_schema_alias_unqualified(self):
stdout, stderr = self.run_and_expect_success([
"custodian", "schema", "network-addr"])
self.assertIn("aws.elastic-ip:", stdout)
def test_schema(self):
# no options
stdout, stderr = self.run_and_expect_success(["custodian", "schema"])
data = yaml_load(stdout)
assert data['resources']
# summary option
self.run_and_expect_success(["custodian", "schema", "--summary"])
# json option
self.run_and_expect_success(["custodian", "schema", "--json"])
# with just a cloud
self.run_and_expect_success(["custodian", "schema", "aws"])
# with just a resource
self.run_and_expect_success(["custodian", "schema", "ec2"])
# with just a mode
self.run_and_expect_success(["custodian", "schema", "mode"])
# mode.type
self.run_and_expect_success(["custodian", "schema", "mode.phd"])
# resource.actions
self.run_and_expect_success(["custodian", "schema", "ec2.actions"])
# resource.filters
self.run_and_expect_success(["custodian", "schema", "ec2.filters"])
# specific item
self.run_and_expect_success(["custodian", "schema", "ec2.filters.tag-count"])
def test_invalid_options(self):
# invalid resource
self.run_and_expect_failure(["custodian", "schema", "fakeresource"], 1)
# invalid category
self.run_and_expect_failure(["custodian", "schema", "ec2.arglbargle"], 1)
# invalid item
self.run_and_expect_failure(
["custodian", "schema", "ec2.filters.nonexistent"], 1
)
# invalid number of selectors
self.run_and_expect_failure(["custodian", "schema", "ec2.filters.and.foo"], 1)
def test_schema_output(self):
output = self.get_output(["custodian", "schema"])
self.assertIn("aws.ec2", output)
# self.assertIn("azure.vm", output)
# self.assertIn("gcp.instance", output)
output = self.get_output(["custodian", "schema", "aws"])
self.assertIn("aws.ec2", output)
self.assertNotIn("azure.vm", output)
self.assertNotIn("gcp.instance", output)
output = self.get_output(["custodian", "schema", "aws.ec2"])
self.assertIn("actions:", output)
self.assertIn("filters:", output)
|
cjaymes/pyscap | src/scap/model/oval_5/defs/windows/Process58TestElement.py | Python | gpl-3.0 | 896 | 0.001116 | # Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of | the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT | ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
import logging
from scap.model.oval_5.defs.windows.TestType import TestType
logger = logging.getLogger(__name__)
class Process58TestElement(TestType):
MODEL_MAP = {
'tag_name': 'process58_test',
}
|
futurecolors/gopython3 | gopython3/api/tests/test_unit.py | Python | mit | 7,904 | 0.006832 | # coding: utf-8
import datetime
import pytz
from django.test import TestCase
from httpretty import HTTPretty
from api.pypi import PyPI
from api.travis import TravisCI
from api.github import Github
class APITestCase(TestCase):
def setUp(self):
HTTPretty.reset()
HTTPretty.enable()
def tearDown(self):
HTTPretty.disable()
class TestGithubApi(APITestCase):
def test_get_most_popular_repo(self):
HTTPretty.register_uri(HTTPretty.GET,
'https://api.github.com/search/repositories',
'{"items":[{"full_name": "coagulant/requests2", "name": "requests2"},'
'{"full_name": "kennethreitz/fake_requests", "name": "fake_requests"}]}' | ,
)
assert Github().get_most_popular_repo('Fake_Requests') == 'kennethreitz/fake_requests'
def test_get_repo_info(self):
HTTPretty.register_uri(HTTPretty.GET,
'https://api.github.com/repos/coagulant/coveralls-python',
'{"html_url": "https://github.com/coagulant/coveralls-python", "updated_at": "2013-01-26T19:14:43Z"}',
)
assert Github().get_repo('coagulant/cov | eralls-python') == {
"html_url": "https://github.com/coagulant/coveralls-python",
"updated_at": datetime.datetime(2013, 1, 26, 19, 14, 43, tzinfo=pytz.utc)
}
def test_crawl_py3_issues(self):
HTTPretty.register_uri(HTTPretty.GET,
'https://api.github.com/repos/embedly/embedly-python/issues',
responses=[HTTPretty.Response('[{"state": "open", "title": "WTF?", "html_url": "https://github.com/embedly/embedly-python/issues/1"},'
'{"state": "closed", "title": "Python 3 support", "html_url": "https://github.com/embedly/embedly-python/pull/13"}]'),
HTTPretty.Response('[{"state": "open", "title": "Broken", "html_url": "https://github.com/embedly/embedly-python/issues/2"}]')]
)
assert Github().get_py3_issues('embedly/embedly-python', search=False) == [{
'state': 'closed',
'title': 'Python 3 support',
'html_url': 'https://github.com/embedly/embedly-python/pull/13'
}]
assert HTTPretty.last_request.querystring['state'][0] == 'closed'
def test_get_py3_pull_requests(self):
HTTPretty.register_uri(HTTPretty.GET,
'https://api.github.com/repos/django/django/pulls',
responses=[HTTPretty.Response('[{"html_url": "https://github.com/django/django/pull/1", "title": "testing python requests 3", "state": "open"},'
' {"html_url": "https://github.com/django/django/pull/2", "title": "please support Python 3", "state": "closed"}]'),
HTTPretty.Response('[{"state": "open", "title": "Broken", "html_url": "https://github.com/django/django/pull/3"}]')]
)
assert Github().get_py3_pulls('django/django') == [{
'state': 'closed',
'title': 'please support Python 3',
'html_url': 'https://github.com/django/django/pull/2'
}]
assert HTTPretty.last_request.querystring['state'][0] == 'closed'
def test_get_py3_forks(self):
HTTPretty.register_uri(HTTPretty.GET,
'https://api.github.com/repos/nick/progressbar/forks',
'[{"html_url": "https://github.com/coagulant/progressbar-python3", "name": "progressbar-python3"},'
'{"html_url": "https://github.com/mick/progressbar", "name": "progressbar"}]',
)
assert Github().get_py3_forks('nick/progressbar') == [{
'name': 'progressbar-python3',
'html_url': 'https://github.com/coagulant/progressbar-python3'
}]
def test_get_py3_forks_branches(self):
HTTPretty.register_uri(HTTPretty.GET,
'https://api.github.com/repos/embedly/embedly-python/forks',
'[{"html_url": "https://github.com/coagulant/embedly-python", "name": "embedly-python", "full_name": "coagulant/embedly-python"}]',
)
HTTPretty.register_uri(HTTPretty.GET,
'https://api.github.com/repos/coagulant/embedly-python/branches',
'[{"name": "master"}, {"name": "py3k"}]',
)
assert Github().get_py3_forks('embedly/embedly-python', True) == [{
'name': 'embedly-python',
'html_url': 'https://github.com/coagulant/embedly-python'
}]
class TestTravisApi(APITestCase):
def test_get_build_status(self):
HTTPretty.register_uri(HTTPretty.GET,
'https://api.travis-ci.org/repos/coagulant/cleanweb',
'{"repo":{"slug": "coagulant/cleanweb", "last_build_state": "passed"}}'
)
assert TravisCI().get_build_status('coagulant/cleanweb') == {
'html_url': 'https://travis-ci.org/coagulant/cleanweb',
'last_build_state': 'passed',
}
def test_get_build_status_partial(self):
HTTPretty.register_uri(HTTPretty.GET,
'https://api.travis-ci.org/repos/coxmediagroup/django-admin-tools',
'{"repo":{"slug": "coxmediagroup/django-admin-tools", "last_build_state": ""}}'
)
assert TravisCI().get_build_status('coxmediagroup/django-admin-tools') == {
'html_url': 'https://travis-ci.org/coxmediagroup/django-admin-tools',
'last_build_state': 'unknown',
}
class TestPypiApi(APITestCase):
def test_get_info_without_version(self):
json_string = """{"info":{
"name": "Django",
"home_page": "http://www.djangoproject.com/",
"classifiers": [
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3"]
}, "urls": [{"upload_time": "2013-09-15T06:30:37"}]}"""
HTTPretty.register_uri(HTTPretty.GET,
"http://pypi.python.org/pypi/Django/json", json_string
)
assert PyPI().get_info('Django') == {
'py3_versions': ['3', '3.2', '3.3'],
'last_release_date': datetime.datetime(2013, 9, 15, 6, 30, 37, tzinfo=pytz.utc),
'name': 'Django',
'url': 'http://www.djangoproject.com/'
}
def test_get_info_with_version(self):
json_string = """{"info":{
"name": "Django",
"home_page": "http://www.djangoproject.com/",
"classifiers": [
"Programming Language :: Python",
"Programming Language :: Python :: 2.4",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7"]
}, "urls": [{"upload_time": "2013-02-19T20:32:04"}]}"""
HTTPretty.register_uri(HTTPretty.GET,
"http://pypi.python.org/pypi/Django/1.3.6/json", json_string
)
assert PyPI().get_info(name='Django', version='1.3.6') == {
'py3_versions': [],
'last_release_date': datetime.datetime(2013, 2, 19, 20, 32, 4, tzinfo=pytz.utc),
'name': 'Django',
'url': 'http://www.djangoproject.com/'
}
def test_incomplete_info(self):
json_string = """{"info":{
"name": "printtree",
"home_page": "UNKNOWN",
"classifiers": []
}, "urls": []}"""
HTTPretty.register_uri(HTTPretty.GET,
"http://pypi.python.org/pypi/printtree/1.0.10/json", json_string
)
assert PyPI().get_info(name='printtree', version='1.0.10') == {
'py3_versions': [],
'last_release_date': None,
'name': 'printtree',
'url': None
}
|
ssdi-drive/nuxeo-drive | nuxeo-drive-client/nxdrive/engine/next/processor.py | Python | lgpl-2.1 | 4,670 | 0.004497 | # coding: utf-8
import os
import shutil
from nxdrive.client.base_automation_client import DOWNLOAD_TMP_FILE_PREFIX, \
DOWNLOAD_TMP_FILE_SUFFIX
from nxdrive.engine.processor import Processor as OldProcessor
from nxdrive.logging_config import get_logger
log = get_logger(__name__)
class Processor(OldProcessor):
def __init__(self, engine, item_getter, name=None):
super(Processor, self).__init__(engine, item_getter, name)
def acquire_state(self, row_id):
log.warning("acquire...")
result = super(Processor, self).acquire_state(row_id)
if result is not None and self._engine.get_local_watcher().is_pending_scan(result.local_parent_path):
self._dao.release_processor(self | ._thread_id)
# Postpone pair for watcher delay
self._engine.get_queue_manager().postpone_pair(result, self._engine.get_local_watcher().get_scan_delay())
return None
log.warning("Acquired: %r", result)
return resul | t
def _get_partial_folders(self):
local_client = self._engine.get_local_client()
if not local_client.exists('/.partials'):
local_client.make_folder('/', '.partials')
return local_client.abspath('/.partials')
def _download_content(self, local_client, remote_client, doc_pair, file_path):
# TODO Should share between threads
file_out = os.path.join(self._get_partial_folders(), DOWNLOAD_TMP_FILE_PREFIX +
doc_pair.remote_digest + str(self._thread_id) + DOWNLOAD_TMP_FILE_SUFFIX)
# Check if the file is already on the HD
pair = self._dao.get_valid_duplicate_file(doc_pair.remote_digest)
if pair:
shutil.copy(local_client.abspath(pair.local_path), file_out)
return file_out
tmp_file = remote_client.stream_content( doc_pair.remote_ref, file_path,
parent_fs_item_id=doc_pair.remote_parent_ref, file_out=file_out)
self._update_speed_metrics()
return tmp_file
def _update_remotely(self, doc_pair, local_client, remote_client, is_renaming):
log.warning("_update_remotely")
os_path = local_client.abspath(doc_pair.local_path)
if is_renaming:
new_os_path = os.path.join(os.path.dirname(os_path), doc_pair.remote_name)
log.debug("Replacing local file '%s' by '%s'.", os_path, new_os_path)
else:
new_os_path = os_path
log.debug("Updating content of local file '%s'.", os_path)
tmp_file = self._download_content(local_client, remote_client, doc_pair, new_os_path)
# Delete original file and rename tmp file
local_client.delete_final(doc_pair.local_path)
rel_path = local_client.get_path(tmp_file)
local_client.set_remote_id(rel_path, doc_pair.remote_ref)
# Move rename
updated_info = local_client.move(rel_path, doc_pair.local_parent_path, doc_pair.remote_name)
doc_pair.local_digest = updated_info.get_digest()
self._dao.update_last_transfer(doc_pair.id, "download")
self._refresh_local_state(doc_pair, updated_info)
def _create_remotely(self, local_client, remote_client, doc_pair, parent_pair, name):
local_parent_path = parent_pair.local_path
# TODO Shared this locking system / Can have concurrent lock
self._unlock_readonly(local_client, local_parent_path)
tmp_file = None
try:
if doc_pair.folderish:
log.debug("Creating local folder '%s' in '%s'", name,
local_client.abspath(parent_pair.local_path))
# Might want do temp name to original
path = local_client.make_folder(local_parent_path, name)
else:
path, os_path, name = local_client.get_new_file(local_parent_path,
name)
tmp_file = self._download_content(local_client, remote_client, doc_pair, os_path)
log.debug("Creating local file '%s' in '%s'", name,
local_client.abspath(parent_pair.local_path))
# Move file to its folder - might want to split it in two for events
local_client.move(local_client.get_path(tmp_file),local_parent_path, name)
self._dao.update_last_transfer(doc_pair.id, "download")
finally:
self._lock_readonly(local_client, local_parent_path)
# Clean .nxpart if needed
if tmp_file is not None and os.path.exists(tmp_file):
os.remove(tmp_file)
return path
|
TinghuiWang/ActivityLearning | actlearn/feature/sensorCount.py | Python | bsd-3-clause | 1,358 | 0.002946 | from .AlFeatureTemplate import AlFeatureTemplate
from .sensorCountRoutine import AlFeatureSensorCountRoutine
import numpy as np
class AlFeatureSensorCount(AlFeatureTemplate):
def __init__(self, normalize=False):
"""
Initialization of Template Class
:return:
"""
AlFeatureTemplate.__init__(self,
| name='senso | rCount',
description='Number of Events in the window related to the sensor',
per_sensor=True,
enabled=True,
routine=AlFeatureSensorCountRoutine())
# Normalize the number between 0 to 1
self.normalize = normalize
def get_feature_value(self, data_list, cur_index, window_size, sensor_name=None):
"""
Counts the number of occurrence of the sensor specified in current window.
:param data_list: list of sensor data
:param cur_index: current data record index
:param window_size: window size
:param sensor_name: name of sensor
:return: a double value
"""
if self.normalize:
return np.float(self.routine.sensor_count[sensor_name])/(window_size * 2)
else:
return np.float(self.routine.sensor_count[sensor_name])
|
d4rkl0rd3r3b05/Firewall | Firewall/syndelcnsl.py | Python | mit | 5,853 | 0.075688 | from Tkinter import *
import os
import tkMessageBox
class syndelcnsl:
def show(self,mainfrm):
#-----------------------------------------This is mainframe--------------------------------------------
syndelfrm=Frame(mainfrm,relief=SOLID,borderwidth=1,bg="white")
txtfont=("FreeSerif", 11,"bold")
fnt=("FreeSerif",11,"bold")
fntopt=("FreeSerif",10)
fnttxt=("FreeSerif",12)
#------------Chain------------
#chain frame
chnfrm=Frame(syndelfrm,bg="white")
#chain label
chnlbl=Label(chnfrm,text="Chain"+"\t"*4,font=txtfont,relief=FLAT,width=24,bg="white")
chnlbl.pack(side=LEFT)
#chain options
self.chn=StringVar()
self.chn.set("INPUT")
chnopt = OptionMenu(chnfrm,self.chn,"INPUT","OUTPUT","FORWARD")
chnopt["bg"]="white"
chnopt["font"]=fntopt
chnopt["width"]=18
chnopt.pack(side=RIGHT)
chnfrm.pa | ck(expand=YES,fill=BOTH)
#------------------------------------------Action--------------------------------------------------
#Action frame
actfrm=Frame(syndelfrm,bg="white")
#Action label
actlbl=Label(actfrm,text="Action"+"\t"*4,font=fnt,relief=FLAT,wid | th=24,bg="white")
actlbl.pack(side=LEFT,pady=4)
#chain options
self.act=StringVar()
self.act.set("REJECT")
actopt = OptionMenu(actfrm,self.act,"REJECT", "DROP", "LOG","ACCEPT","RETURN")
actopt["bg"]="white"
actopt["font"]=fntopt
actopt["width"]=18
actopt.pack(side=RIGHT)
actfrm.pack(expand=YES,fill=BOTH)
#--------------------------------------Protocol------------------------------------------------------
#Protocol frame
protofrm=Frame(syndelfrm,bg="white")
#protocol label
protolbl=Label(protofrm,text="Protocol"+"\t"*4,font=fnt,relief=FLAT,width=24,bg="white")
protolbl.pack(side=LEFT,pady=4)
#protocol options
self.proto=StringVar()
self.proto.set("tcp")
protoopt = OptionMenu(protofrm,self.proto,"tcp", "udp","sctp", "icmp","udplite","esp","ah","all")
protoopt["bg"]="white"
protoopt["font"]=fntopt
protoopt["width"]=18
protoopt.pack(side=RIGHT)
protofrm.pack(expand=YES,fill=X)
#-----------------------------------------source address-----------------------------------------------
#source address frame
srcaddfrm=Frame(syndelfrm,bg="white")
self.srcadd=StringVar()
#source address label
srcaddlbl=Label(srcaddfrm,text="Source Address"+"\t"*3,font=fnt,relief=FLAT,width=24,bg="white")
srcaddlbl.pack(side=LEFT,pady=4)
#source address text
srcaddtxt=Entry(srcaddfrm,textvariable=self.srcadd,relief=SUNKEN,bg="white")
srcaddtxt.pack(side=LEFT,pady=4)
srcaddfrm.pack(expand=YES,fill=X)
#-----------------------------------------source port-----------------------------------------------
#source port frame
srcprtfrm=Frame(syndelfrm,bg="white")
self.srcprt=StringVar()
#source port label
srcprtlbl=Label(srcprtfrm,text="Source port"+"\t"*3,font=fnt,relief=FLAT,width=24,bg="white")
srcprtlbl.pack(side=LEFT,pady=4)
#source port text
srcprttxt=Entry(srcprtfrm,textvariable=self.srcprt,relief=SUNKEN,bg="white")
srcprttxt.pack(side=LEFT,pady=4)
srcprtfrm.pack(expand=YES,fill=X)
#-----------------------------------------destination address-----------------------------------------------
#destination address frame
desaddfrm=Frame(syndelfrm,bg="white")
self.desadd=StringVar()
#destination address label
desaddlbl=Label(desaddfrm,text="Destination Address"+"\t"*2,font=fnt,relief=FLAT,width=24,bg="white")
desaddlbl.pack(side=LEFT,pady=4)
#desination address text
desaddtxt=Entry(desaddfrm,textvariable=self.desadd,relief=SUNKEN,bg="white")
desaddtxt.pack(side=LEFT,pady=4)
desaddfrm.pack(expand=YES,fill=X)
#-----------------------------------------destination port-----------------------------------------------
#destination port frame
desprtfrm=Frame(syndelfrm,bg="white")
self.desprt=StringVar()
#desination port label
desprtlbl=Label(desprtfrm,text="Destination port"+"\t"*3,font=fnt,relief=FLAT,width=24,bg="white")
desprtlbl.pack(side=LEFT,pady=4)
#desination port text
desprttxt=Entry(desprtfrm,textvariable=self.desprt,relief=SUNKEN,bg="white")
desprttxt.pack(side=LEFT,pady=4)
desprtfrm.pack(expand=YES,fill=X)
#---------------------------------------------Delete button--------------------------------------------------
delbtn=Button(syndelfrm,text="Delete Rule",command=self.delr,\
font=fnt,relief=RAISED,width=18,height=1,bg="white")
delbtn.pack(side=TOP,anchor=CENTER,pady=4)
return syndelfrm
def delr(self):
if (not tkMessageBox.askyesno("Delete", "Do you really want to Delete specified rule?")):
return
chain='iptables -D '+self.chn.get()
action=' -j '+self.act.get()
#--------------------------------------------------------------------
if(self.chn.get()=="INPUT"):
if(self.srcadd.get()!=""):
addrs=' -s '+self.srcadd.get()
else:
addrs=" -s 0/0"
else:
if(self.desadd.get()!=""):
addrs=' -d '+self.desadd.get()
else:
addrs=" -d 0/0"
#---------------------------------------------------------------------
if(self.srcprt.get()!=""):
sprt=' --sport '+self.srcprt.get()
else:
sprt=""
#---------------------------------------------------------------------
if(self.desprt.get()!=""):
dprt=' --dport '+self.desprt.get()
else:
dprt=""
#---------------------------------------------------------------------
if(self.proto.get()!=""):
proto=' -p '+self.proto.get()
else:
proto=""
try:
print chain+proto+addrs+sprt+dprt+action
os.popen(chain+proto+addrs+sprt+dprt+action,"r")
tkMessageBox.showinfo("Deleted","Specified rule has been Deleted.")
except:
pass
|
jeppeter/pytest | testing/test_core.py | Python | mit | 21,642 | 0.002726 | import pytest, py, os
from _pytest.core import PluginManager
from _pytest.core import MultiCall, HookRelay, varnames
class TestBootstrapping:
def test_consider_env_fails_to_import(self, monkeypatch):
pluginmanager = PluginManager()
monkeypatch.setenv('PYTEST_PLUGINS', 'nonexisting', prepend=",")
pytest.raises(ImportError, "pluginmanager.consider_env()")
def test_preparse_args(self):
pluginmanager = PluginManager()
pytest.raises(ImportError, """
pluginmanager.consider_preparse(["xyz", "-p", "hello123"])
""")
def test_plugin_prevent_register(self):
pluginmanager = PluginManager()
pluginmanager.consider_preparse(["xyz", "-p", "no:abc"])
l1 = pluginmanager.getplugins()
pluginmanager.register(42, name="abc")
l2 = pluginmanager.getplugins()
assert len(l2) == len(l1)
def test_plugin_prevent_register_unregistered_alredy_registered(self):
pluginmanager = PluginManager()
pluginmanager.register(42, name="abc")
l1 = pluginmanager.getplugins()
assert 42 in l1
pluginmanager.consider_preparse(["xyz", "-p", "no:abc"])
l2 = pluginmanager.getplugins()
assert 42 not in l2
def test_plugin_double_register(self):
pm = PluginManager()
pm.register(42, name="abc")
pytest.raises(ValueError, lambda: pm.register(42, name="abc"))
def test_plugin_skip(self, testdir, monkeypatch):
p = testdir.makepyfile(skipping1="""
import pytest
pytest.skip("hello")
""")
p.copy(p.dirpath("skipping2.py"))
monkeypatch.setenv("PYTEST_PLUGINS", "skipping2")
result = testdir.runpytest("-p", "skipping1", "--traceconfig")
assert result.ret == 0
result.stdout.fnmatch_lines([
"*hint*skipping1*hello*",
"*hint*skipping2*hello*",
])
def test_consider_env_plugin_instantiation(self, testdir, monkeypatch):
pluginmanager = PluginManager()
testdir.syspathinsert()
testdir.makepyfile(xy123="#")
monkeypatch.setitem(os.environ, 'PYTEST_PLUGINS', 'xy123')
l1 = len(pluginmanager.getplugins())
pluginmanager.consider_env()
l2 = len(pluginmanager.getplugins())
assert l2 == l1 + 1
assert pluginmanager.getplugin('xy123')
pluginmanager.consider_env()
l3 = len(pluginmanager.getplugins())
assert l2 == l3
def test_consider_setuptools_instantiation(self, monkeypatch):
pkg_resources = py.test.importorskip("pkg_resources")
def my_iter(name):
assert name == "pytest11"
class EntryPoint:
name = "pytest_mytestplugin"
dist = None
def load(self):
class PseudoPlugin:
x = 42
return PseudoPlugin()
return iter([EntryPoint()])
monkeypatch.setattr(pkg_resources, 'iter_entry_points', my_iter)
pluginmanager = PluginManager()
pluginmanager.consider_setuptools_entrypoints()
plugin = pluginmanager.getplugin("mytestplugin")
assert plugin.x == 42
def test_consider_setuptools_not_installed(self, monkeypatch):
monkeypatch.setitem(py.std.sys.modules, 'pkg_resources',
py.std.types.ModuleType("pkg_resources"))
pluginmanager = PluginManager()
pluginmanager.consider_setuptools_entrypoints()
# ok, we did not explode
def test_pluginmanager_ENV_startup(self, testdir, monkeypatch):
x500 = testdir.makepyfile(pytest_x500="#")
p = testdir.makepyfile("""
import pytest
def test_hello(pytestconfig):
plugin = pytestconfig.pluginmanager.getplugin('pytest_x500')
assert plugin is not None
""")
monkeypatch.setenv('PYTEST_PLUGINS', 'pytest_x500', prepend=",")
result = testdir.runpytest(p)
assert result.ret == 0
result.stdout.fnmatch_lines(["*1 passed in*"])
def test_import_plugin_importname(self, testdir):
pluginmanager = PluginManager()
pytest.raises(ImportError, 'pluginmanager.import_plugin("qweqwex.y")')
pytest.raises(ImportError, 'pluginmanager.import_plugin("pytest_qweqwx.y")')
reset = testdir.syspathinsert()
pluginname = "pytest_hello"
testdir.makepyfile(**{pluginname: ""})
pluginmanager.import_plugin("pytest_hello")
len1 = len(pluginmanager.getplugins())
pluginmanager.import_plugin("pytest_hello")
len2 = len(pluginmanager.getplugins())
assert len1 == len2
plugin1 = pluginmanager.getplugin("pytest_hello")
assert plugin1.__name__.endswith('pytest_hello')
plugin2 = pluginmanager.getplugin("pytest_hello")
assert plugin2 is plugin1
def test_import_plugin_dotted_name(self, testdir):
pluginmanager = PluginManager()
pytest.raises(ImportError, 'pluginmanager.import_plugin("qweqwex.y")')
pytest.raises(ImportError, 'pluginmanager.import_plugin("pytest_qweqwex.y")')
reset = testdir.syspathinsert()
testdir.mkpydir("pkg").join("plug.py").write("x=3")
pluginname = "pkg.plug"
pluginmanager.import_plugin(pluginname)
mod = pluginmanager.getplugin("pkg.plug")
assert mod.x == 3
def test_consider_module(self, testdir):
pluginmanager = PluginManager()
testdir.syspathinsert()
testdir.makepyfile(pytest_p1="#")
testdir.makepyfile(pytest_p2="#")
mod = py.std.types.ModuleType("temp")
mod.pytest_plugins = ["pytest_p1", "pytest_p2"]
pluginmanager.consider_module(mod)
assert pluginmanager.getplugin("pytest_p1").__name__ == "pytest_p1"
assert pluginmanager.getplugin("pytest_p2").__name__ == "pytest_p2"
def test_consider_module_import_module(self, testdir):
mod = py.std.types.ModuleType("x")
mod.pytest_plugins = "pytest_a"
aplugin = testdir.makepyfile(pytest_a="#")
pluginmanager = PluginManager()
reprec = testdir.getreportrecorder(pluginmanager)
#syspath.prepend(aplugin.dirpath())
py.std.sys.path.insert(0, str(aplugin.dirpath()))
pluginmanager.consider_module(mod)
call = reprec.getcall(pluginmanager.hook.pytest_plugin_registered.name)
assert call.plugin.__name__ == "pytest_a"
# check that it is not registered twice
pluginmanager.consider_module(mod)
l = reprec.getcalls("pytest_plugin_registered")
assert len(l) == 1
def test_config_sets_conftesthandle_onimport(self, testdir):
config = testdir.parseconfig([])
assert config._conftest._onimport == config._onimportconftest
def test_consider_conftest_deps(self, testdir):
mod = testdir.makepyfile("pytest_plugins='xyz'").pyimport()
pp = PluginManager()
pytest.raises(ImportError, "pp.consider_conftest(mod)")
def test_pm(self):
pp = PluginManager()
class A: pass
a1, a2 = A(), A()
pp.register(a1)
assert pp.isregistered(a1)
pp.register(a2, "hello")
assert pp.isregistered(a2)
l = | pp.getplugins()
assert a1 in l
assert a2 in l
assert pp.getplugin('hello') == a2
pp.unregister(a1)
assert not pp.isregistered(a1)
pp.unregister(name="hello")
assert not pp.isregistered(a2)
def test_pm_ordering(self):
pp = PluginManager()
class A: pass
a1, a2 = A(), A()
pp.register(a1)
pp.regi | ster(a2, "hello")
l = pp.getplugins()
assert l.index(a1) < l.index(a2)
a3 = A()
pp.register(a3, prepend=True)
l = pp.getplugins()
assert l.index(a3) == 0
def test_register_imported_modules(self):
pp = PluginManager()
mod = py.std.types.ModuleType("x.y.pytest_hello")
pp.register(mod)
assert pp.isregistered(mod)
l = pp.getplugins()
assert mod in l
pytest.raises(Valu |
rven/odoo | addons/sale_stock/wizard/stock_rules_report.py | Python | agpl-3.0 | 594 | 0.005051 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class StockRulesReport(models.TransientModel):
_inherit = 'stock.rules.report'
so_route_ids = fields.Many2many('stock.location.route', string='Apply specific routes',
domain="[('sale_selectable', '=', True)]", help="Choose to apply SO lines specific routes.")
def _prepare_report_data(self):
data = super(StockRulesReport, self)._prepare_report_data()
| data['so_route_ids'] = self.so_rou | te_ids.ids
return data
|
PeachyPrinter/peachyinstaller | windows/test/test_application_remove.py | Python | apache-2.0 | 6,117 | 0.003923 | import unittest
import logging
import os
import sys
from helpers import TestHelpers
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..',))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
from application_remove import RemoveApplication
from action_handler import ActionHandlerException
from mock import patch, MagicMock, call
@patch('application_remove.isfile')
@patch('application_remove.isdir')
@patch('application_remove.remove')
@patch('application_remove.rmtree')
class RemoveApplicationTest(unittest.TestCase, TestHelpers):
def test_start_does_it_right_on_a_happy_path(self, mock_rmtree, mock_remove, mock_isdir, mock_isfile):
app = self.get_application()
mock_isdir.return_value = True
mock_isfile.return_value = True
status_cb = MagicMock()
expected_callbacks = ["Initializing", "Removing Application", "Removing Shortcut", "Cleaning up install history", "Finished Removing Files"]
expected_config_file = os.path.join(os.getenv('USERPROFILE'), 'AppData', 'Local', "Peachy", 'PeachyInstaller', 'app-{}.json'.format(app.id))
RemoveApplication(app, status_cb).start()
mock_isdir.assert_called_once_with(app.installed_path)
mock_rmtree.assert_called_once_with(app.installed_path)
mock_isfile.assert_has_calls([call(app.shortcut_path), call(expected_config_file)])
mock_remove.assert_has_calls([call(app.shortcut_path), call(expected_config_file)])
callbacks = [arg[0][0] for arg in status_cb.call_args_list]
self.assertEqual(expected_callbacks, callbacks)
def test_start_does_not_delete_shortcut_when_missing(self, mock_rmtree, mock_remove, mock_isdir, mock_isfile):
app = self.get_application()
mock_isdir.return_value = True
is_file_returns = [True, False]
def side_effect(self):
return is_file_returns.pop()
mock_isfile.side_effect = side_effect
status_cb = MagicMock()
expected_callbacks = ["Initializing", "Removing Application", "Removing Shortcut", "Shortcut Not Found", "Cleaning up install history", "Finished Removing Files"]
expected_config_file = os.path.join(os.getenv('USERPROFILE'), 'AppData', 'Local', "Peachy", 'PeachyInstaller', 'app-{}.json'.format(app.id))
RemoveApplication(app, status_cb).start()
mock_isfile.assert_has_calls([call(app.shortcut_path), call(expected_config_file)])
mock_remove.assert_has_calls([call(expected_config_file)])
callbac | ks = [arg[0][0] for arg in status_cb.call_args_list]
self.assertEqual(expected_callbacks, callbacks)
def test_start_does_not_delete_app_when_missing(self, mock_rmtree, mock_remove, mock_isdir, mock_isfile):
app = self.get_application()
mock_isdir.return_value = False
mock_isfile.return_value = True
| status_cb = MagicMock()
expected_callbacks = ["Initializing", "Removing Application", "Application Not Found", "Removing Shortcut", "Cleaning up install history", "Finished Removing Files"]
RemoveApplication(app, status_cb).start()
mock_isdir.assert_called_once_with(app.installed_path)
self.assertFalse(mock_rmtree.called)
callbacks = [arg[0][0] for arg in status_cb.call_args_list]
self.assertEqual(expected_callbacks, callbacks)
def test_start_does_not_delete_config_when_missing(self, mock_rmtree, mock_remove, mock_isdir, mock_isfile):
app = self.get_application()
mock_isdir.return_value = True
is_file_returns = [False, True]
def side_effect(self):
return is_file_returns.pop()
mock_isfile.side_effect = side_effect
status_cb = MagicMock()
expected_callbacks = ["Initializing", "Removing Application", "Removing Shortcut", "Cleaning up install history", "Install history missing", "Finished Removing Files"]
RemoveApplication(app, status_cb).start()
self.assertEqual(1, mock_remove.call_count)
callbacks = [arg[0][0] for arg in status_cb.call_args_list]
self.assertEqual(expected_callbacks, callbacks)
def test_start_raises_exception_if_removing_app_fails(self, mock_rmtree, mock_remove, mock_isdir, mock_isfile):
app = self.get_application()
mock_isdir.return_value = True
mock_isfile.return_value = True
mock_rmtree.side_effect = IOError("Bad Stuff")
with self.assertRaises(ActionHandlerException) as ex:
RemoveApplication(app).start()
self.assertEquals(10601, ex.exception.error_code)
self.assertEquals("Critical Failure Removing Application", ex.exception.message)
def test_start_raises_exception_if_removing_shortcut(self, mock_rmtree, mock_remove, mock_isdir, mock_isfile):
app = self.get_application()
mock_isdir.return_value = True
mock_isfile.return_value = True
mock_remove.side_effect = IOError("Bad Stuff")
with self.assertRaises(ActionHandlerException) as ex:
RemoveApplication(app).start()
self.assertEquals(10602, ex.exception.error_code)
self.assertEquals("Critical Failure Removing Shortcut", ex.exception.message)
def test_start_raises_exception_if_removing_history(self, mock_rmtree, mock_remove, mock_isdir, mock_isfile):
app = self.get_application()
mock_isdir.return_value = True
mock_isfile.return_value = True
def side_effect(arg):
if arg == os.path.join(os.getenv('USERPROFILE'), 'AppData', 'Local', "Peachy", 'PeachyInstaller', 'app-{}.json'.format(app.id)):
raise IOError("Bad stuff")
mock_remove.side_effect = side_effect
with self.assertRaises(ActionHandlerException) as ex:
RemoveApplication(app).start()
self.assertEquals(10603, ex.exception.error_code)
self.assertEquals("Critical Failure Removing History", ex.exception.message)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', level='INFO')
unittest.main()
|
davidbarkhuizen/sdmm | django_web_server/sitemodel/frp/interface.py | Python | gpl-2.0 | 7,900 | 0.035443 | import random
import json
from django.db import connection
from django.conf import settings
from sitemodel.interface import SiteInterface, SiteModel, random_str
from django.core.files import File
from server.models import Site, TextUpload, BinaryUpload
from sitemodel.frp.model import FRP_Category, FRP_Contact, FRP_Property, FRP_PropertyImage, FRP_SubProperty, FRP_SubPropertyImage
SITE_NAME = 'FisherRoelandProperty'
SITE_TOKEN = 'frp'
USER_EDITABLE_MODEL_NAMES = [ 'frp_contact', 'frp_property', 'frp_propertyimage', 'frp_subproperty', 'frp_subpropertyimage' ]
SITE_USER_NAMES = [ 'frpjenny', 'frpmelissa' ]
IMPORT_ROOT_LOCATION = settings.SITE_DATA_IMPORT_ROOT_FOLDER + SITE_TOKEN + '/'
def load_as_json(file_name):
path = IMPORT_ROOT_LOCATION + file_name
d = None
json_str = None
try:
with open(path, 'rt') as source_file:
json_str = source_file.read()
d = json.loads(json_str)
except Exception as e:
print('error loading JSON file @ {0}'.format(path))
print(e)
print(json_str)
raise e
return d
def populate_model_constants():
if len(FRP_Category.objects.all()) > 0:
return
# categories
categories = load_as_json('categories.json')
for category_name in categories['categories']:
if FRP_Category.objects.filter(name=category_name).exists():
continue
db_category = FRP_Category(name=category_name)
db_category.save()
def populate_datamodel():
import_root_location = settings.SITE_DATA_IMPORT_ROOT_FOLDER + SITE_TOKEN + '/'
# CONTACTS
#
if len(FRP_Contact.objects.all()) == 0:
contacts = load_as_json('contacts.json')
for i in range(len(contacts['contacts'])):
| contact = contacts['contacts'][i]
print(contact)
db_contact = FRP_Contact(name=contact['name'],
email=contact['email'],
phone=contact['phone'],
isprimary=contact['isprimary'],
iscc=contact['iscc']
)
db_contact.save()
for category in contact['categories']:
db_category = FRP_Category.objects.get(name=category)
db_conta | ct.categories.add(db_category)
db_contact.save()
# PROPERTIES BY CATEGORY
#
for category in FRP_Category.objects.all():
# PROPERTIES
try:
to_import = load_as_json(category.name + '.json')
except IOError as e:
continue
for prop in to_import['properties']:
db_property = FRP_Property(category=category, sold=False, name=prop['name'], areaSQM=prop['areaSQM'], description=prop['description'], shortLocation=prop['shortLocation'],longLocation=prop['longLocation'], latitude=prop['latitude'], longitude=prop['longitude'])
db_property.save()
for i in range(len(prop['images'])):
prop_image_file_name = prop['images'][i]
image_source_location = import_root_location + category.name + '/' + prop_image_file_name
db_property_image = FRP_PropertyImage(property=db_property)
image_source_django_file = None
with open(image_source_location) as image_source_python_file:
image_source_django_file = File(image_source_python_file)
db_property_image.file.save(prop_image_file_name, image_source_django_file)
if i == 0:
db_property_image.isprimary = True
db_property_image.save()
for j in range(len(prop['subproperties'])):
sub_prop = prop['subproperties'][j]
db_subproperty = FRP_SubProperty(property=db_property, name=sub_prop['name'], areaSQM=sub_prop['areaSQM'], description=sub_prop['description'])
db_subproperty.save()
if ('images' in sub_prop.keys()):
for k in range(len(sub_prop['images'])):
sub_prop_image_file_name = sub_prop['images'][k]
image_source_location = import_root_location + category.name + '/' + sub_prop_image_file_name
db_sub_property_image = FRP_SubPropertyImage(subproperty=db_subproperty)
image_source_django_file = None
with open(image_source_location) as image_source_python_file:
image_source_django_file = File(image_source_python_file)
db_sub_property_image.file.save(sub_prop_image_file_name, image_source_django_file)
if k == 0:
db_sub_property_image.isprimary = True
db_sub_property_image.save()
def render_site_model(site_token):
data_model = {}
db_text_uploads = []
db_binary_uploads = []
db_site = Site.objects.get(token=site_token)
# CONTACTS
#
data_model['contacts'] = []
for db_contact in FRP_Contact.objects.all():
contact = { 'name' : db_contact.name,
'phone' : db_contact.phone,
'email' : db_contact.email,
'categories' : [],
'isprimary' : db_contact.isprimary,
'iscc' : db_contact.iscc
}
for db_category in db_contact.categories.all():
contact['categories'].append(db_category.name)
data_model['contacts'].append(contact)
# PROPERTIES
#
data_model['properties'] = []
for db_prop in FRP_Property.objects.all():
property = { 'category' : db_prop.category.name,
'sold' : db_prop.sold,
'name': db_prop.name,
'areaSQM': db_prop.areaSQM,
'description': [],
'shortLocation': db_prop.shortLocation,
'longLocation': db_prop.longLocation,
'latitude': float(str(db_prop.latitude)) if db_prop.latitude is not None else None,
'longitude': float(str(db_prop.longitude)) if db_prop.longitude is not None else None,
'images' : [],
'subproperties' : []
}
# description
#
if db_prop.description is not None:
property['description'] = db_prop.description.split('\n')
db_images = FRP_PropertyImage.objects.filter(property=db_prop)
primary_db_images = [x for x in db_images if x.isprimary == True]
secondary_db_images = [x for x in db_images if x.isprimary == False]
ordered_db_images = []
ordered_db_images.extend(primary_db_images)
ordered_db_images.extend(secondary_db_images)
for db_image in ordered_db_images:
if (db_image.file.name is None) or (len(db_image.file.name) == 0):
continue
source = None
dest_path = None
source = settings.MEDIA_ROOT + '/' + db_image.file.name
dest_path = '/'.join(db_image.file.name.split('/')[1:])
db_binary_upload = BinaryUpload(source_path=source, destination_path=dest_path, site=db_site)
db_binary_uploads.append(db_binary_upload)
property['images'].append(dest_path)
# sub property
#
for db_sub_property in FRP_SubProperty.objects.filter(property=db_prop):
sub_property = { 'name' : db_sub_property.name,
'areaSQM' : db_sub_property.areaSQM,
'description' : [],
'sold' : db_sub_property.sold,
'images' : []
}
# description
#
if db_sub_property.description is not None:
sub_property['description'] = db_sub_property.description.split('\n')
db_images = FRP_SubPropertyImage.objects.filter(subproperty=db_sub_property)
primary_db_images = [x for x in db_images if x.isprimary == True]
secondary_db_images = [x for x in db_images if x.isprimary == False]
ordered_db_images = []
ordered_db_images.extend(primary_db_images)
ordered_db_images.extend(secondary_db_images)
for db_image in ordered_db_images:
if (db_image.file.name is None) or (len(db_image.file.name) == 0):
continue
source = None
dest_path = None
source = settings.MEDIA_ROOT + '/' + db_image.file.name
dest_path = '/'.join(db_image.file.name.split('/')[1:])
db_binary_upload = BinaryUpload(source_path=source, destination_path=dest_path, site=db_site)
db_binary_uploads.append(db_binary_upload)
# append sub-property images to main property image list
property['images'].append(dest_path)
sub_property['images'].append(dest_path)
property['subproperties'].append(sub_property)
data_model['properties'].append(property)
return SiteModel(data_model,
db_text_uploads=db_text_uploads,
db_binary_uploads=db_binary_uploads)
SiteInterface.register(
SITE_NAME,
SITE_TOKEN,
SITE_USER_NAMES,
USER_EDITABLE_MODEL_NAMES,
populate_model_constants,
render_site_model,
populate_datamodel
) |
clld/tsammalex | tsammalex/interfaces.py | Python | apache-2.0 | 139 | 0 | from zope.inte | rface import Interface
class IEcoregion(In | terface):
"""marker
"""
class IImage(Interface):
"""marker
"""
|
BillyAbildgaard/RackHD | test/fit_tests/tests/switch/test_rackhd11_switch_pollers.py | Python | apache-2.0 | 13,050 | 0.004291 | '''
Copyright 2016, EMC, Inc.
Author(s):
FIT test script template
'''
import sys
import subprocess
import pprint
# set path to common libraries
sys.path.append(subprocess.check_output("git rev-parse --show-toplevel", shell=True).rstrip("\n") + "/test/fit_tests/common")
import fit_common
import test_api_utils
# LOCAL
NODELIST = []
def get_switches():
# returns a list with valid node IDs that match ARGS_LIST.sku in 'Name' or 'Model' field
# and matches node BMC MAC address in ARGS_LIST.obmmac if specified
# Otherwise returns list of all IDs that are not 'Unknown' or 'Unmanaged'
nodelist = []
# check if user specified a single nodeid to run against
# user must know the nodeid and any check for a valid nodeid is skipped
nodeid = fit_common.ARGS_LIST['nodeid']
if nodeid != 'None':
nodelist.append(nodeid)
else:
catalog = fit_common.rackhdapi('/api/1.1/nodes')
for nodeentry in catalog['json']:
if nodeentry['type'] == 'switch':
nodelist.append(nodeentry['id'])
return nodelist
NODELIST = get_switches()
def get_rackhd_nodetype(nodeid):
nodetype = ""
# get the node info
mondata = fit_common.rackhdapi("/api/1.1/nodes/" + nodeid)
if mondata['status'] != 200:
print "Incorrect HTTP return code on nodeid, expected 200, received: {}".format(mondata['status'])
else:
# get the sku id contained in the node
sku = mondata['json'].get("sku")
if sku:
skudata = fit_common.rackhdapi("/api/1.1/skus/" + sku)
if skudata['status'] != 200:
print "Incorrect HTTP return code on sku, expected 200, received: {}".format(skudata['status'])
else:
nodetype = mondata['json'].get("name")
else:
nodetype = mondata['json'].get("name")
print "nodeid {} did not return a valid sku in get_rackhd_nodetype".format(nodeid)
return nodetype
from nose.plugins.attrib import attr
@attr(all=True, regression=True, smoke=True)
@fit_common.unittest.skipIf(NODELIST == [],"No switches defined, skipping test.")
class rackhd11_switch_pollers(fit_common.unittest.TestCase):
def test_get_id_pollers(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display the poller data per node."
print "\t{0}".format(msg)
for node in NODELIST:
mon_data = fit_common.rackhdapi("/api/1.1/nodes/" + node + "/pollers")
self.assertIn(mon_data['status'], [200], "Incorrect HTTP return code")
for item in mon_data['json']:
# check required fields
self.assertGreater(item['pollInterval'], 0, 'pollInterval field error')
for subitem in ['node', 'config', 'createdAt', 'id', 'name', 'config']:
self.assertIn(subitem, ite | m, subitem + ' field error')
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poll_data = fit_common.rackhdapi("/api/1. | 1/pollers/" + poller_id)
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
print fit_common.json.dumps(poll_data['json'], indent=4)
def test_verify_poller_headers(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Verify header data reported on the poller"
print "\t{0}".format(msg)
for node in NODELIST:
mon_data = fit_common.rackhdapi("/api/1.1/nodes/" + node + "/pollers")
self.assertIn(mon_data['status'], [200], "Incorrect HTTP return code")
nodetype = get_rackhd_nodetype(node)
if fit_common.VERBOSITY >= 2:
print "\nNode: {} Type: {}".format(node, nodetype)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poller_data = test_api_utils.get_poller_data_by_id(poller_id)
if fit_common.VERBOSITY >= 3:
print "\nPoller: " + poller + " ID: " + str(poller_id)
print fit_common.json.dumps(poller_data, indent=4)
def test_verify_poller_data(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Check number of polls being kept for poller ID"
print "\t{0}".format(msg)
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
nodetype = get_rackhd_nodetype(node)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poller_data = test_api_utils.get_poller_data_by_id(poller_id)
poll_len = len(poller_data)
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
print "Number of polls for "+ str(poller_id) + ": " + str(len(poller_data))
self.assertLessEqual(poll_len, 10, 'Number of cached polls should not exceed 10')
def test_get_current_poller_data(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display most current data from poller"
print "\t{0}".format(msg)
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
nodetype = get_rackhd_nodetype(node)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
monurl = "/api/1.1/pollers/" + str(poller_id) + "/data/current"
mondata = fit_common.rackhdapi(url_cmd=monurl)
if fit_common.VERBOSITY >= 2:
print fit_common.json.dumps(mondata, indent=4)
def test_get_poller_status_timestamp(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display status and timestamp from current poll"
print "\t{0}".format(msg)
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
nodetype = get_rackhd_nodetype(node)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
monurl = "/api/1.1/pollers/" + str(poller_id) + "/data/current"
mondata = fit_common.rackhdapi(url_cmd=monurl)
print "Return status", mondata['status']
if mondata['status'] == 200:
if fit_common.VERBOSITY >= 2:
print "Timestamp:", mondata['json'][0]['timestamp']
print fit_common.json.dumps(mondata['json'][0], indent=4)
def test_verify_poller_error_counter(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Check for Poller Errors"
print " |
ooici/marine-integrations | mi/dataset/driver/ctdpf_ckl/wfp_sio_mule/driver.py | Python | bsd-2-clause | 7,545 | 0.004374 | """
@package mi.dataset.driver.ctdpf_ckl.wfp_sio_mule.driver
@file marine-integrations/mi/dataset/driver/ctdpf_ckl/wfp_sio_mule/driver.py
@author cgoodrich
@brief Driver for the ctdpf_ckl_wfp_sio_mule
Release notes:
Initial Release
"""
__author__ = 'cgoodrich'
__license__ = 'Apache 2.0'
import os
from mi.core.log import get_logger
log = get_logger()
from mi.core.common import BaseEnum
from mi.core.exceptions import ConfigurationException
from mi.dataset.driver.sio_mule.sio_mule_driver import SioMuleDataSetDriver
from mi.dataset.dataset_driver import HarvesterType
from mi.dataset.harvester import SingleFileHarvester, SingleDirectoryHarvester
from mi.dataset.dataset_driver import DataSetDriverConfigKeys
from mi.dataset.parser.ctdpf_ckl_wfp_particles import CtdpfCklWfpRecoveredDataParticle,\
CtdpfCklWfpRecoveredMetadataParticle
from mi.dataset.parser.ctdpf_ckl_wfp_sio_mule import CtdpfCklWfpSioMuleParser, \
CtdpfCklWfpSioMuleDataParticle, \
CtdpfCklWfpSioMuleMetadataParticle
from mi.dataset.parser.ctdpf_ckl_wfp import CtdpfCklWfpParser
class DataTypeKey(BaseEnum):
CTDPF_CKL_WFP = 'ctdpf_ckl_wfp'
CTDPF_CKL_WFP_SIO_MULE = 'ctdpf_ckl_wfp_sio_mule'
INSTRUMENT_DATA_PARTICLE_CLASS = 'instrument_data_particle_class'
METADATA_PARTICLE_CLASS = 'metadata_particle_class'
class CtdpfCklWfpDataSetDriver(SioMuleDataSetDriver):
def __init__(self,
config,
memento,
data_callback,
state_callback,
event_callback,
exception_callback):
# initialize the possible types of harvester/parser pairs for this driver
data_keys = [DataTypeKey.CTDPF_CKL_WFP, DataTypeKey.CTDPF_CKL_WFP_SIO_MULE]
# link the data keys to the harvester type, single or multiple file harvester
harvester_type = {DataTypeKey.CTDPF_CKL_WFP: HarvesterType.SINGLE_DIRECTORY,
DataTypeKey.CTDPF_CKL_WFP_SIO_MULE: HarvesterType.SINGLE_FILE}
super(CtdpfCklWfpDataSetDriver, self).__init__(config,
memento,
data_callback,
state_callback,
event_callback,
exception_callback,
data_keys,
harvester_type)
@classmethod
def stream_config(cls):
return [CtdpfCklWfpRecoveredDataParticle.type(),
CtdpfCklWfpRecoveredMetadataParticle.type(),
CtdpfCklWfpSioMuleMetadataParticle.type(),
CtdpfCklWfpSioMuleDataParticle.type()]
def _build_parser(self, parser_state, infile, data_key=None):
"""
Build and return the parser
"""
# Default the parser to None
parser = None
config = self._parser_config.get(data_key)
#
# If the key is CTDPF_CKL_WFP, build the ctdpf_ckl_wfp parser and
# provide a config that includes the specific recovered particle types.
#
if data_key == DataTypeKey.CTDPF_CKL_WFP:
log.debug('CAG DRIVER - build parser for %s. State is %s', data_key, parser_state)
config.update({
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.ctdpf_ckl_wfp_particles',
DataSetDriverConfigKeys.PARTICLE_CLASS: None,
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
INSTRUMENT_DATA_PARTICLE_CLASS: CtdpfCklWfpRecoveredDataParticle,
METADATA_PARTICLE_CLASS: CtdpfCklWfpRecoveredMetadataParticle
}
})
parser = CtdpfCklWfpParser(
config,
parser_state,
infile,
lambda state, ingested: self._save_parser_state(state, data_key, ingested),
self._data_callback,
self._sample_exception_callback,
os.path.getsize(infile.name))
#
# If the key is CTDPF_CKL_WFP_SIO_MULE, build the ctdpf_ckl_wfp_sio_mule parser and
# provide a config that includes the specific telemetered particle types.
#
elif data_key == DataTypeKey.CTDPF_CKL_WFP_SIO_MULE:
log.debug('CAG DRIVER - build parser for %s. State is %s', data_key, parser_state)
config.update({
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.ctdpf_ckl_wfp_sio_mule',
DataSetDriverConfigKeys.PARTICLE_CLASS: None,
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
INSTRUMENT_DATA_PARTICLE_CLASS: CtdpfCklWfpSioMuleDataParticle,
METADATA_PARTICLE_CLASS: CtdpfCklWfpSioMuleMetadataParticle
}
})
parser = CtdpfCklWfpSioMuleParser(
config,
parser_state,
infile,
lambda state: self._save_parser_state(state, DataTypeKey.CTDPF_CKL | _WFP_SIO_MULE),
self._data_callback,
self._sample_exception_callback
)
else:
raise ConfigurationException('Bad Configuration: %s - Failed to build ctdpf_ckl_wfp parser', config)
return parser
def _build_harvester(self, driver_state):
"""
Build and return the harvesters
"""
| harvesters = [] # list of harvesters to be returned
#
# Verify that the CTDPF_CKL_WFP harvester has been configured.
# If so, build the CTDPF_CKL_WFP harvester and add it to the
# list of harvesters.
#
if DataTypeKey.CTDPF_CKL_WFP in self._harvester_config:
log.debug('CAG DRIVER - build harvester for %s', driver_state[DataTypeKey.CTDPF_CKL_WFP])
harvester = SingleDirectoryHarvester(
self._harvester_config.get(DataTypeKey.CTDPF_CKL_WFP),
driver_state[DataTypeKey.CTDPF_CKL_WFP],
lambda filename: self._new_file_callback(filename, DataTypeKey.CTDPF_CKL_WFP),
lambda modified: self._modified_file_callback(modified, DataTypeKey.CTDPF_CKL_WFP),
self._exception_callback
)
if harvester is not None:
harvesters.append(harvester)
else:
log.warning('CTDPF_CKL_WFP HARVESTER NOT BUILT')
#
# Verify that the CTDPF_CKL_WFP_SIO_MULE harvester has been configured.
# If so, build the CTDPF_CKL_WFP_SIO_MULE harvester and add it to the
# list of harvesters.
#
if DataTypeKey.CTDPF_CKL_WFP_SIO_MULE in self._harvester_config:
log.debug('CAG DRIVER - build harvester for %s', driver_state[DataTypeKey.CTDPF_CKL_WFP_SIO_MULE])
harvester = SingleFileHarvester(
self._harvester_config.get(DataTypeKey.CTDPF_CKL_WFP_SIO_MULE),
driver_state[DataTypeKey.CTDPF_CKL_WFP_SIO_MULE],
lambda file_state: self._file_changed_callback(file_state, DataTypeKey.CTDPF_CKL_WFP_SIO_MULE),
self._exception_callback
)
if harvester is not None:
harvesters.append(harvester)
else:
log.warning('CTDPF_CKL_WFP_SIO_MULE HARVESTER NOT BUILT')
return harvesters
|
h-2/seqan | core/apps/razers2/tests/run_tests.py | Python | bsd-3-clause | 12,963 | 0.003857 | #!/usr/bin/env python
"""Execute the tests for the razers2 program.
The golden test outputs are generated by the script generate_outputs.sh.
You have to give the root paths to the source and the binaries as arguments to
the program. These are the paths to the directory that contains the 'projects'
directory.
Usage: run_tests.py SOURCE_ROOT_PATH BINARY_ROOT_PATH
"""
import logging
import os.path
import sys
# Automagically add util/py_lib to PYTHONPATH environment variable.
path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..',
'..', '..', 'util', 'py_lib'))
sys.path.insert(0, path)
import seqan.app_tests as app_tests
def main(source_base, binary_base):
"""Main entry point of the script."""
print 'Executing test for razers2'
print '==========================='
print
ph = app_tests.TestPathHelper(
source_base, binary_base,
'core/apps/razers2/tests') # tests dir
# ============================================================
# Auto-detect the binary path.
# ============================================================
path_to_program = app_tests.autolocateBinary(
binary_base, 'core/apps/razers2', 'razers2')
# ============================================================
# Built TestConf list.
# ============================================================
# Build list with TestConf objects, analoguely to how the output
# was generated in generate_outputs.sh.
conf_list = []
# ============================================================
# Run Adeno Single-End Tests
# ============================================================
# We run the following for all read lengths we have reads for.
for rl in [36, 100]:
# Run with default options.
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('se-adeno-reads%d_1.stdout' % rl),
args=['--low-memory',
ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
'-o', ph.outFile('se-adeno-reads%d_1.razers' % rl)],
to_diff=[(ph.inFile('se-adeno-reads%d_1.razers' % rl),
ph.outFile('se-adeno-reads%d_1.razers' % rl)),
(ph.inFile('se-adeno-reads%d_1.stdout' % rl),
ph.outFile('se-adeno-reads%d_1.stdout' % rl))])
conf_list.append(conf)
# Allow indels.
conf = app_tests.TestConf(
| program=path_to_program,
redir_stdout=ph.outFile('se-adeno-reads%d_1-id.stdout' % rl),
args=['--low-memory', '-id',
ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
'-o', ph.outFile('se-adeno-reads%d_1-id.razers' % rl)],
to_diff=[(ph.inFile('se-adeno-reads%d_1-id.razers' % rl),
ph.outFile('se-adeno-reads%d_1-id.ra | zers' % rl)),
(ph.inFile('se-adeno-reads%d_1-id.stdout' % rl),
ph.outFile('se-adeno-reads%d_1-id.stdout' % rl))])
conf_list.append(conf)
# Compute forward/reverse matches only.
for o in ['-r', '-f']:
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('se-adeno-reads%d_1-id%s.stdout' % (rl, o)),
args=['--low-memory', '-id', o,
ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
'-o', ph.outFile('se-adeno-reads%d_1-id%s.razers' % (rl, o))],
to_diff=[(ph.inFile('se-adeno-reads%d_1-id%s.razers' % (rl, o)),
ph.outFile('se-adeno-reads%d_1-id%s.razers' % (rl, o))),
(ph.inFile('se-adeno-reads%d_1-id%s.stdout' % (rl, o)),
ph.outFile('se-adeno-reads%d_1-id%s.stdout' % (rl, o)))])
conf_list.append(conf)
# Compute with different identity rates.
for i in range(90, 101):
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('se-adeno-reads%d_1-id-i%d.stdout' % (rl, i)),
args=['--low-memory', '-id', '-i', str(i),
ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
'-o', ph.outFile('se-adeno-reads%d_1-id-i%d.razers' % (rl, i))],
to_diff=[(ph.inFile('se-adeno-reads%d_1-id-i%d.razers' % (rl, i)),
ph.outFile('se-adeno-reads%d_1-id-i%d.razers' % (rl, i))),
(ph.inFile('se-adeno-reads%d_1-id-i%d.stdout' % (rl, i)),
ph.outFile('se-adeno-reads%d_1-id-i%d.stdout' % (rl, i)))])
conf_list.append(conf)
# Compute with different output formats.
for suffix in ['razers', 'fa', 'eland', 'gff', 'sam', 'afg']:
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('se-adeno-reads%d_1-id.%s.stdout' % (rl, suffix)),
args=['--low-memory', '-id',
ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
'-o', ph.outFile('se-adeno-reads%d_1-id.%s' % (rl, suffix))],
to_diff=[(ph.inFile('se-adeno-reads%d_1-id.%s' % (rl, suffix)),
ph.outFile('se-adeno-reads%d_1-id.%s' % (rl, suffix))),
(ph.inFile('se-adeno-reads%d_1-id.%s.stdout' % (rl, suffix)),
ph.outFile('se-adeno-reads%d_1-id.%s.stdout' % (rl, suffix)))])
conf_list.append(conf)
# Compute with different sort orders.
for so in [0, 1]:
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('se-adeno-reads%d_1-id-so%d.stdout' % (rl, so)),
args=['--low-memory', '-id', '-so', str(so),
ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
'-o', ph.outFile('se-adeno-reads%d_1-id-so%d.razers' % (rl, so))],
to_diff=[(ph.inFile('se-adeno-reads%d_1-id-so%d.razers' % (rl, so)),
ph.outFile('se-adeno-reads%d_1-id-so%d.razers' % (rl, so))),
(ph.inFile('se-adeno-reads%d_1-id-so%d.stdout' % (rl, so)),
ph.outFile('se-adeno-reads%d_1-id-so%d.stdout' % (rl, so)))])
conf_list.append(conf)
# ============================================================
# Run Adeno Paired-End Tests
# ============================================================
# We run the following for all read lengths we have reads for.
for rl in [36, 100]:
# Run with default options.
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('pe-adeno-reads%d_2.stdout' % rl),
args=['--low-memory',
ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
ph.inFile('adeno-reads%d_2.fa' % rl),
'-o', ph.outFile('pe-adeno-reads%d_2.razers' % rl)],
to_diff=[(ph.inFile('pe-adeno-reads%d_2.razers' % rl),
ph.outFile('pe-adeno-reads%d_2.razers' % rl)),
(ph.inFile('pe-adeno-reads%d_2.stdout' % rl),
ph.outFile('pe-adeno-reads%d_2.stdout' % rl))])
conf_list.append(conf)
# Allow indels.
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('pe-adeno-reads%d_2-id.stdout' % rl),
args=['--low-memory', '-id',
ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
ph.inFile('adeno-reads%d_2.fa' % rl),
'-o', ph.outFile('pe-adeno-reads%d_2-id.razers' % rl)],
|
scottynomad/spoonerist | spoonerist/__init__.py | Python | apache-2.0 | 65 | 0 | f | rom spoonerist.data impo | rt pairs
from spoonerist.app import app
|
GabrieleNunez/fofix | fofix/game/Dialogs.py | Python | gpl-2.0 | 87,557 | 0.01125 | #####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyöstilä #
# 2008 myfingershurt #
# 2008 Glorandwarf #
# 2008 ShiekOdaSandz #
# 2008 QQStarS #
# 2008 Blazingamer #
# 2008 evilynux <evilynux@gmail.com> #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
"""A bunch of dialog functions for interacting with the user."""
from __future__ import with_statement
import fnmatch
import math
import os
import pygame
from OpenGL.GL import *
from fofix.core.View import Layer, BackgroundLayer
from fofix.core.Input import KeyListener
from fofix.core.Unicode import unicodify
from fofix.core.Image import drawImage
from fofix.game.Credits import Credits
from fofix.core.constants import *
from fofix.core.Language import _
from fofix.game.Menu import Menu
from fofix.core import Microphone
from fofix.core import Log
from fofix.core import Player
from fofix.core import Config
#MFH - for loading phrases
def wrapCenteredText(font, pos, text, rightMargin = 1.0, scale = 0.002, visibility = 0.0, linespace = 1.0, allowshadowoffset = False, shadowoffset = (.0022, .0005)):
"""
Wrap a piece of text inside given margins.
@param pos: (x, y) tuple, x defines the centerline
@param text: Text to wrap
@param rightMargin: Right margin
@param scale: Text scale
@param visibility: Visibility factor [0..1], 0 is fully visible
"""
x, y = pos
#MFH: rewriting WrapCenteredText function to properly wrap lines in a centered fashion around a defined centerline (x)
sentence = ""
for n, word in enumerate(text.split(" ")):
w, h = font.getStringSize(sentence + " " + word, scale = scale)
if x + (w/2) > rightMargin or word == "\n":
w, h = font.getStringSize(sentence, scale = scale)
glPushMatrix()
glRotate(visibility * (n + 1) * -45, 0, 0, 1)
if allowshadowoffset:
font.render(sentence, (x - (w/2), y + visibility * n), scale = scale, shadowoffset = shadowoffset)
else:
font.render(sentence, (x - (w/2), y + visibility * n), scale = scale)
glPopMatrix()
sentence = word
y += h * linespace
else:
if sentence == "" or sentence == "\n":
sentence = word
else:
sentence = sentence + " " + word
else:
w, h = font.getStringSize(sentence, scale = scale)
glPushMatrix()
glRotate(visibility * (n + 1) * -45, 0, 0, 1)
if allowshadowoffset:
font.render(sentence, (x - (w/2), y + visibility * n), scale = scale, shadowoffset = shadowoffset)
else:
font.render(sentence, (x - (w/2), y + visibility * n), scale = scale)
glPopMatrix()
y += h * linespace
return (x, y)
def wrapText(font, pos, text, rightMargin = 0.9, scale = 0.002, visibility = 0.0):
"""
Wrap a piece of text inside given margins.
@param pos: (x, y) tuple, x defines the left margin
@param text: Text to wrap
@param rightMargin: Right margin
@param scale: Text scale
@param visibility: Visibility factor [0..1], 0 is fully visible
"""
x, y = pos
w = h = 0
space = font.getStringSize(" ", scale = scale)[0]
# evilynux - No longer requires "\n" to be in between spaces
for n, sentence in enumerate(text.split("\n")):
y += h
x = pos[0]
if n == 0:
y = pos[1]
for n, word in enumerate(sentence.strip().split(" ")):
w, h = font.getStringSize(word, scale = scale)
if x + w > rightMargin:
x = pos[0]
y += h
glPushMatrix()
glRotate(visibility * (n + 1) * -45, 0, 0, 1)
font.render(word, (x, y + visibility * n), scale = scale)
glPopMatrix()
x += w + space
return (x - space, y)
class MainDialog(Layer, KeyListener):
def __init__(self, engine):
self.engine = engine
self.fontDict = self.engine.data.fontDict
self.geometry = self.engine.view.geometry[2:4]
self.fontScreenBottom = self.engine.data.fontScreenBottom
self.aspectRatio = self.engine.view.aspectRatio
self.drawStarScore = self.engine.drawStarScore
def shown(self):
self.engine.input.addKeyListener(self, priority = True)
def hidden(self):
self.engine.input.removeKeyListener(self)
class GetText(Layer, KeyListener):
"""Text input layer."""
def __init__(self, engine, prompt = "", text = ""):
self.text = text
self.prompt = prompt
self.engine = engine
self.time = 0
self.accepted = False
self.logClassInits = self.engine.config.get("game", "log_class_inits")
if self.logClassInits == 1:
Log.debug("GetText class init (Dialogs.py)...")
self.sfxVolume = self.engine.config.get("audio", "SFX_volume")
self.drumHighScoreNav = self.engine.config.ge | t("game", "drum_navigation") #MFH
def shown(self):
self.engine.input.addKeyListener(self, priority = True)
self.engine.input.enableKeyRepeat()
def hidden(self):
self.engine.input.removeKeyListener(self)
self.engine.input.disableKeyRepeat()
def keyPressed(self, key, unicode):
self.time = 0
c = self.engin | e.input.controls.getMapping(key)
if key == pygame.K_BACKSPACE and not self.accepted:
self.text = self.text[:-1]
elif unicode and ord(unicode) > 31 and not self.accepted:
self.text += unicode
elif key == pygame.K_LSHIFT or key == pygame.K_RSHIFT:
return True
elif (c in Player.menuYes or key == pygame.K_RETURN) and not self.accepted: #MFH - adding support for green drum "OK"
self.engine.view.popLayer(self)
self.accepted = True
if c in Player.key1s:
self.engine.data.acceptSound.setVolume(self.sfxVolume) #MFH
self.engine.data.acceptSound.play()
elif (c in Player.menuNo or key == pygame.K_ESCAPE) and not self.accepted:
self.text = ""
self.engine.view.popLayer(self)
self.accepted = True
if c in Player.key2s:
self.engine.data.cancelSound.setVolume(self.sfxVolume) #MFH
self.engine.data.cancelSound.play()
elif c in Player.key4s and not self.accepted:
self.text = self.text[:-1] |
wangjiaxi/django-dynamic-forms | dynamic_forms/fields.py | Python | bsd-3-clause | 3,454 | 0.000579 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import six
from django.core.exceptions import ValidationError
from django.db import models
from django.forms import CheckboxSelectMultiple
from django.utils.text import capfirst
from dynamic_forms.forms import MultiSelectFormField
class TextMultiSelectField(six.with_metaclass(models.SubfieldBase,
models.TextField)):
# http://djangosnippets.org/snippets/2753/
widget = CheckboxSelectMultiple
def __init__(self, *args, **kwargs):
self.separate_values_by = kwargs.pop('separate_values_by', '\n')
super(TextMultiSelectField, self).__init__(*args, **kwargs)
def contribute_to_class(self, cls, name):
super(TextMultiSelectField, self).contribute_to_class(cls, name)
if self.choices:
def _func(self, fieldname=name):
return self.separate_values_by.join([
self.choices.get(value, value) for value in
getattr(self, fieldname)
])
setattr(cls, 'get_%s_display' % self.name, _func)
def deconstruct(self):
name, path, args, kwargs = super(TextMultiSelectField, self).deconstruct()
kwargs['separate_values_by'] = self.separate_values_by
if kwargs.get('separate_values_by', None) == '\n':
del kwargs['separate_values_by']
return name, path, args, kwargs
def formfield(self, **kwargs):
# don't call super, as that overrides default widget if it has choices
defaults = {
'choices': self.choices,
'help_text': self.help_text,
'label': capfirst(self.verbose_name),
'required': not self.blank,
'separate_values_by': self.separate_values_by,
}
if self.has_default():
defaults['initial'] = self.get_default()
defaults.update(kwargs)
defaults['widget'] = self.widget
return MultiSelectFormField(**defaults)
def get_db_prep_value(self, value, connection=None, prepared=False):
if isinstance(value, six.string_types):
return value
elif isinstance(value, list):
return self.separate_values_by.join(value)
def get_choices_default(self):
return self.get_choices(include_blank=False)
def get_choices_selected(self, arr_choi | ces=''):
if not arr_choices:
return False
chces = []
for choice_selected in arr_choices:
chces.append(choice_selected[0])
return chces
def get_prep_value(self, value):
return value
def to_python(self, value):
if value is not None:
return (value if isinstance(value, list) else
value.split(self.separate_values_by))
return []
d | ef validate(self, value, model_instance):
"""
:param callable convert: A callable to be applied for each choice
"""
arr_choices = self.get_choices_selected(self.get_choices_default())
for opt_select in value:
if opt_select not in arr_choices:
raise ValidationError(
self.error_messages['invalid_choice'] % value)
return
def value_to_string(self, obj):
value = self._get_val_from_obj(obj)
return self.get_db_prep_value(value)
def get_internal_type(self):
return "TextField"
|
ryandub/skew | skew/resources/aws/elb.py | Python | apache-2.0 | 1,155 | 0 | # Copyright (c) 2014 Scopely, Inc.
# Copyright (c) 2015 Mitch Gar | naat
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
| # http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from skew.resources.aws import AWSResource
class LoadBalancer(AWSResource):
class Meta(object):
service = 'elb'
type = 'loadbalancer'
enum_spec = ('describe_load_balancers',
'LoadBalancerDescriptions', None)
detail_spec = None
id = 'LoadBalancerName'
filter_name = 'LoadBalancerNames'
filter_type = 'list'
name = 'DNSName'
date = 'CreatedTime'
dimension = 'LoadBalancerName'
tags_spec = ('describe_tags', 'TagDescriptions[].Tags[]',
'LoadBalancerNames', 'id')
|
aaxelb/osf.io | osf/migrations/0006_add_jsonb_index_for_fileversions.py | Python | apache-2.0 | 765 | 0.001307 | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-04-03 20:50
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0005_merge'),
]
operations = [
migr | ations.RunSQL(
[
"""
CREATE INDEX fileversion_metadata_sha_arch_vault_index
ON osf_fileversion ((osf_fileversion.metadata -> 'sha256'), (osf_fileversion.metadata -> 'archive'), (
osf_fileversion.metadata -> 'vault'));
"""
],
[
"""
DROP INDEX fileversion_metadata_sha_arch_vault_index | ;
"""
]
)
]
|
tengqm/senlin-container | senlin/tests/tempest_tests/config.py | Python | apache-2.0 | 1,284 | 0 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
service_available_group = cfg.OptGroup(name="service_available",
| title="Available OpenStack Services")
ServiceAvailableGroup = [
cfg.BoolOpt("senlin",
default=True,
| help="Whether or not senlin is expected to be available"),
]
clustering_group = cfg.OptGroup(name="clustering",
title="Clustering Service Options")
ClusteringGroup = [
cfg.StrOpt("catalog_type",
default="clustering",
help="Catalog type of the clustering service."),
cfg.IntOpt("wait_timeout",
default=60,
help="Waiting time for a specific status, in seconds.")
]
|
jokajak/itweb | data/env/lib/python2.6/site-packages/distribute-0.6.14-py2.6.egg/setuptools/tests/test_packageindex.py | Python | gpl-3.0 | 3,722 | 0.002149 | """Package Index Tests
"""
# More would be better!
import sys
import os, shutil, tempfile, unittest, urllib2
import pkg_resources
import setuptools.package_index
from server import IndexServer
class TestPackageIndex(unittest.TestCase):
def test_bad_urls(self):
index = setuptools.package_index.PackageIndex()
url = 'http://127.0.0.1:0/nonesuch/test_package_index'
try:
v = index.open_url(url)
except Exception, v:
self.assert_(url in str(v))
else:
self.assert_(isinstance(v,urllib2.HTTPError))
# issue 16
# easy_install inquant.contentmirror.plone breaks because of a typo
# in its home URL
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
try:
v = index.open_url(url)
except Exception, v:
self.assert_(url in str(v))
else:
self.assert_(isinstance(v, urllib2.HTTPError))
def _urlopen(*args):
import httplib
raise httplib.BadStatusLine('line')
old_urlopen = urllib2.urlopen
urllib2.urlopen = _urlopen
url = 'http://example.com'
try:
try:
v = index.open_url(url)
except Exception, v:
self.assert_('line' in str(v))
else:
raise AssertionError('Should have raise here!')
finally:
urllib2.urlopen = old_urlopen
# issue 20
url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
try:
index.open_url(url)
except Exception, v:
self.assert_('nonnumeric port' in str(v))
# issue #160
if sys.version_info[0] == 2 and sys.version_info[1] == 7:
# this should not fail
url = 'http://example.com'
page = ('<a href="http://www.famfamfam.com]('
'http://www.famfamfam.com/">')
index.process_index(url, page)
def test_url_ok(self):
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
url = 'file:///tmp/test_package_index'
self.assert_(index.url_ok(url, True))
def test_links_priority(self):
"""
Download links from the pypi simple index should be used before
external download links.
http://bitbucket.org/tarek/distribute/issue/163/md5-validation-error
Usecase :
- someone uploads a package on pypi, a md5 is generated
- someone manually copies this link (with the md5 in the url) onto an
external page accessible from th | e package page.
- someone reuploads the package (with a different md5)
- while easy_installing, an MD5 error occurs because the external link
is used
-> Distribute should use the link from pypi, not the external one.
"""
# start an index server
server = | IndexServer()
server.start()
index_url = server.base_url() + 'test_links_priority/simple/'
# scan a test index
pi = setuptools.package_index.PackageIndex(index_url)
requirement = pkg_resources.Requirement.parse('foobar')
pi.find_packages(requirement)
server.stop()
# the distribution has been found
self.assert_('foobar' in pi)
# we have only one link, because links are compared without md5
self.assert_(len(pi['foobar'])==1)
# the link should be from the index
self.assert_('correct_md5' in pi['foobar'][0].location)
|
jonaustin/advisoryscan | django/django/contrib/localflavor/jp/jp_prefectures.py | Python | mit | 2,057 | 0 | from django.utils.translation import gettext_lazy as gettext_lazy
JP_PREFECTURES = (
('hokkaido', gettext_lazy('Hokkaido'),),
('aomori', gettext_lazy('Aomori'),),
('iwate', gettext_lazy('Iwate'),),
('miyagi', gettext_lazy('Miyagi'),),
('akita', gettext_lazy('Akita'),),
('yamagata', gettext_lazy('Yamagata'),),
('fukushima', gettext_lazy('Fukushima'),),
('ibaraki', gettext_lazy('Ibaraki'),),
('tochigi', gettext_lazy('Tochigi'),),
('gunma', gettext_lazy('Gunma'),),
('saitama', gettext_lazy('Saitama'),),
('chiba', gettext_lazy('Chiba'),),
('tokyo', gettext_lazy('Tokyo'),),
('kanagawa', gettext_lazy('Kanagawa'),),
('yamanashi', gettext_lazy('Yamanashi'),),
('nagano', gettext_lazy('Nagano'),),
('niigata', gettext_lazy('Niigata'),),
('toyama', gettext_lazy('Toyama'),),
('ishikawa', gettext_lazy('Ishikawa'),),
('fukui', gettext_lazy('Fukui'),),
('gifu', gettext_lazy('Gifu'),),
('shizuoka', gettext_lazy('Shizuoka'),),
('aichi', gettext_lazy('Aichi') | ,),
('mie', gettext_lazy('Mie'),),
('shiga', gettext_lazy('Shiga'),),
('kyoto', gettext_lazy('Kyoto'),),
('osaka', gettext_lazy('Osaka'),),
('hyogo', gettext_lazy('Hyogo'),),
('nara', gettext_lazy('Nara'),),
('wakayama', gettext_lazy('Wakayama'),),
('tottori', gettext_lazy('Tottori'),),
('shimane', gettext_lazy('Shima | ne'),),
('okayama', gettext_lazy('Okayama'),),
('hiroshima', gettext_lazy('Hiroshima'),),
('yamaguchi', gettext_lazy('Yamaguchi'),),
('tokushima', gettext_lazy('Tokushima'),),
('kagawa', gettext_lazy('Kagawa'),),
('ehime', gettext_lazy('Ehime'),),
('kochi', gettext_lazy('Kochi'),),
('fukuoka', gettext_lazy('Fukuoka'),),
('saga', gettext_lazy('Saga'),),
('nagasaki', gettext_lazy('Nagasaki'),),
('kumamoto', gettext_lazy('Kumamoto'),),
('oita', gettext_lazy('Oita'),),
('miyazaki', gettext_lazy('Miyazaki'),),
('kagoshima', gettext_lazy('Kagoshima'),),
('okinawa', gettext_lazy('Okinawa'),),
)
|
besm6/micro-besm | doc/opcodes/opcode.py | Python | mit | 1,130 | 0.006195 | #!/usr/bin/python
#
# Manage JSON database of Micro-BESM opcodes.
#
import sys, json, codecs
# Check parameters.
if len(sys.argv) != 2:
print "Usage:"
print " opcode [option] file.json"
print "Options:"
print " TODO"
sys.exit(1)
opcode = [] # List of all opcodes
#
# Process the input file.
#
def main(filename):
read_d | ata(filename)
write_results("output.json")
#
# Load opcode[] from JSON file.
#
def read_data(filename):
global opcode
try:
file = open(filename)
opcode = json.load(file)
file.close()
except:
print "Fatal error: Cannot load file '" + filename + "'"
sys.exit(1)
print "Load file '"+filename+"':",
print "%d opcodes" % len(opcode)
#print "Opcodes:", opcode
#
# Write the data to another JSON file.
#
def | write_results(filename):
file = codecs.open(filename, 'w', encoding="utf-8")
json.dump(opcode, file, indent=4, sort_keys=True, ensure_ascii=False)
file.close()
print "Write file %s: %d opcodes" % (filename, len(opcode))
if __name__ == "__main__":
main(sys.argv[1])
|
AstroTech/atlassian-python-api | atlassian/bitbucket/cloud/base.py | Python | apache-2.0 | 3,104 | 0.002255 | # coding=utf-8
from ..base import BitbucketBase
class BitbucketCloudBase(BitbucketBase):
def __ini | t__(self, url, *args, **kwargs):
"""
Init the rest api wrapper
:param url: string: The base url used for the rest api.
:param *args: list: The fixed arguments for the AtlassianRestApi.
:param **kwargs: dict: The keyword arguments for the AtlassianRestApi.
:return: nothing
| """
expected_type = kwargs.pop("expected_type", None)
super(BitbucketCloudBase, self).__init__(url, *args, **kwargs)
if expected_type is not None and not expected_type == self.get_data("type"):
raise ValueError("Expected type of data is [{}], got [{}].".format(expected_type, self.get_data("type")))
def get_link(self, link):
"""
Get a link from the data.
:param link: string: The link identifier
:return: The requested link or None if it isn't present
"""
links = self.get_data("links")
if links is None or link not in links:
return None
return links[link]["href"]
def _get_paged(
self, url, params=None, data=None, flags=None, trailing=None, absolute=False, paging_workaround=False
):
"""
Used to get the paged data
:param url: string: The url to retrieve
:param params: dict (default is None): The parameters
:param data: dict (default is None): The data
:param flags: string[] (default is None): The flags
:param trailing: bool (default is None): If True, a trailing slash is added to the url
:param absolute: bool (default is False): If True, the url is used absolute and not relative to the root
:param paging_workaround: bool (default is False): If True, the paging is done on our own because
of https://jira.atlassian.com/browse/BCLOUD-13806
:return: A generator object for the data elements
"""
if params is None:
params = {}
if paging_workaround:
params["page"] = 1
while True:
response = super(BitbucketCloudBase, self).get(
url,
trailing=trailing,
params=params,
data=data,
flags=flags,
absolute=absolute,
)
if len(response.get("values", [])) == 0:
return
for value in response["values"]:
yield value
if paging_workaround:
params["page"] += 1
else:
url = response.get("next")
if url is None:
break
# From now on we have absolute URLs with parameters
absolute = True
# Params are now provided by the url
params = {}
# Trailing should not be added as it is already part of the url
trailing = False
return
|
ESSolutions/ESSArch_Core | ESSArch_Core/api/forms/widgets.py | Python | gpl-3.0 | 605 | 0 | from django import forms
class Multipl | eTextWidget(forms.widgets.Widget):
template_name = 'django/forms/widgets/text.html'
def format_value(self, value):
"""Return selected values as a list."""
if value is None:
return []
if not isinstance(value, (tuple, list)):
value = [value]
return [str(v) if v is not None else '' for v in value]
def value_from_datadict(self, data, files, name):
getter = data.get |
try:
getter = data.getlist
except AttributeError:
pass
return getter(name)
|
gmalmquist/pants | tests/python/pants_test/pantsd/test_process_manager.py | Python | apache-2.0 | 16,494 | 0.01352 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import errno
import os
import subprocess
import sys
from contextlib import contextmanager
import mock
import psutil
from pants.pantsd.process_manager import (ProcessGroup, ProcessManager, ProcessMetadataManager,
swallow_psutil_exceptions)
from pants.util.contextutil import temporary_dir
from pants.util.dirutil import safe_file_dump
from pants_test.base_test import BaseTest
PATCH_OPTS = dict(autospec=True, spec_set=True)
def fake_process(**kwargs):
proc = mock.create_autospec(psutil.Process, spec_set=True)
[setattr(getattr(pro | c, k), 'return_value', v) for k, v in kwargs.items()]
return proc
class TestProcessGroup(BaseTest):
def setUp(self):
super(TestProcessGroup, self).setUp()
self.pg = ProcessGroup('test', metadata_base_dir=self.subprocess_dir)
def test_swallow_psutil_exceptions(self):
with swallow_psutil_exceptions():
raise psutil.NoSuchProcess('te | st')
def test_iter_processes(self):
with mock.patch('psutil.process_iter', **PATCH_OPTS) as mock_process_iter:
mock_process_iter.return_value = [5, 4, 3, 2, 1]
items = [item for item in self.pg.iter_processes()]
self.assertEqual(items, [5, 4, 3, 2, 1])
def test_iter_processes_filtered(self):
with mock.patch('psutil.process_iter', **PATCH_OPTS) as mock_process_iter:
mock_process_iter.return_value = [5, 4, 3, 2, 1]
items = [item for item in self.pg.iter_processes(lambda x: x != 3)]
self.assertEqual(items, [5, 4, 2, 1])
def test_iter_instances(self):
with mock.patch('psutil.process_iter', **PATCH_OPTS) as mock_process_iter:
mock_process_iter.return_value = [
fake_process(name='a_test', pid=3, status=psutil.STATUS_IDLE),
fake_process(name='b_test', pid=4, status=psutil.STATUS_IDLE)
]
items = [item for item in self.pg.iter_instances()]
self.assertEqual(len(items), 2)
for item in items:
self.assertIsInstance(item, ProcessManager)
self.assertTrue('_test' in item.name)
class TestProcessMetadataManager(BaseTest):
NAME = '_test_'
TEST_KEY = 'TEST'
TEST_VALUE = '300'
TEST_VALUE_INT = 300
BUILDROOT = '/mock_buildroot/'
def setUp(self):
super(TestProcessMetadataManager, self).setUp()
self.pmm = ProcessMetadataManager(metadata_base_dir=self.subprocess_dir)
def test_maybe_cast(self):
self.assertIsNone(self.pmm._maybe_cast(None, int))
self.assertEqual(self.pmm._maybe_cast('3333', int), 3333)
self.assertEqual(self.pmm._maybe_cast('ssss', int), 'ssss')
def test_get_metadata_dir_by_name(self):
self.pmm = ProcessMetadataManager(metadata_base_dir=self.BUILDROOT)
self.assertEqual(self.pmm._get_metadata_dir_by_name(self.NAME),
os.path.join(self.BUILDROOT, self.NAME))
def test_maybe_init_metadata_dir_by_name(self):
with mock.patch('pants.pantsd.process_manager.safe_mkdir', **PATCH_OPTS) as mock_mkdir:
self.pmm._maybe_init_metadata_dir_by_name(self.NAME)
mock_mkdir.assert_called_once_with(
self.pmm._get_metadata_dir_by_name(self.NAME))
def test_readwrite_metadata_by_name(self):
with temporary_dir() as tmpdir, \
mock.patch('pants.pantsd.process_manager.get_buildroot', return_value=tmpdir):
self.pmm.write_metadata_by_name(self.NAME, self.TEST_KEY, self.TEST_VALUE)
self.assertEqual(
self.pmm.read_metadata_by_name(self.NAME, self.TEST_KEY),
self.TEST_VALUE
)
self.assertEqual(
self.pmm.read_metadata_by_name(self.NAME, self.TEST_KEY, int),
self.TEST_VALUE_INT
)
def test_deadline_until(self):
with self.assertRaises(self.pmm.Timeout):
self.pmm._deadline_until(lambda: False, timeout=.1)
def test_wait_for_file(self):
with temporary_dir() as td:
test_filename = os.path.join(td, 'test.out')
safe_file_dump(test_filename, 'test')
self.pmm._wait_for_file(test_filename, timeout=.1)
def test_wait_for_file_timeout(self):
with temporary_dir() as td:
with self.assertRaises(self.pmm.Timeout):
self.pmm._wait_for_file(os.path.join(td, 'non_existent_file'), timeout=.1)
def test_await_metadata_by_name(self):
with temporary_dir() as tmpdir, \
mock.patch('pants.pantsd.process_manager.get_buildroot', return_value=tmpdir):
self.pmm.write_metadata_by_name(self.NAME, self.TEST_KEY, self.TEST_VALUE)
self.assertEquals(
self.pmm.await_metadata_by_name(self.NAME, self.TEST_KEY, .1),
self.TEST_VALUE
)
def test_purge_metadata(self):
with mock.patch('pants.pantsd.process_manager.rm_rf') as mock_rm:
self.pmm.purge_metadata_by_name(self.NAME)
self.assertGreater(mock_rm.call_count, 0)
def test_purge_metadata_error(self):
with mock.patch('pants.pantsd.process_manager.rm_rf') as mock_rm:
mock_rm.side_effect = OSError(errno.EACCES, os.strerror(errno.EACCES))
with self.assertRaises(ProcessManager.MetadataError):
self.pmm.purge_metadata_by_name(self.NAME)
self.assertGreater(mock_rm.call_count, 0)
class TestProcessManager(BaseTest):
def setUp(self):
super(TestProcessManager, self).setUp()
# N.B. We pass in `metadata_base_dir` here because ProcessManager (itself a non-task/non-
# subsystem) depends on an initialized `GlobalOptions` subsystem for the value of
# `--pants-subprocessdir` in the default case. This is normally provided by subsystem
# dependencies in a typical pants run (and integration tests), but not in unit tests.
# Thus, passing this parameter here short-circuits the subsystem-reliant path for the
# purposes of unit testing without requiring adhoc subsystem initialization.
self.pm = ProcessManager('test', metadata_base_dir=self.subprocess_dir)
def test_process_properties(self):
with mock.patch.object(ProcessManager, '_as_process', **PATCH_OPTS) as mock_as_process:
mock_as_process.return_value = fake_process(name='name',
cmdline=['cmd', 'line'],
status='status')
self.assertEqual(self.pm.cmdline, ['cmd', 'line'])
self.assertEqual(self.pm.cmd, 'cmd')
def test_process_properties_cmd_indexing(self):
with mock.patch.object(ProcessManager, '_as_process', **PATCH_OPTS) as mock_as_process:
mock_as_process.return_value = fake_process(cmdline='')
self.assertEqual(self.pm.cmd, None)
def test_process_properties_none(self):
with mock.patch.object(ProcessManager, '_as_process', **PATCH_OPTS) as mock_asproc:
mock_asproc.return_value = None
self.assertEqual(self.pm.cmdline, None)
self.assertEqual(self.pm.cmd, None)
def test_get_subprocess_output(self):
test_str = '333'
self.assertEqual(self.pm.get_subprocess_output(['echo', '-n', test_str]), test_str)
def test_get_subprocess_output_interleaved(self):
cmd_payload = 'import sys; ' + 'sys.stderr.write("9"); sys.stdout.write("3"); ' * 3
cmd = [sys.executable, '-c', cmd_payload]
self.assertEqual(self.pm.get_subprocess_output(cmd), '333')
self.assertEqual(self.pm.get_subprocess_output(cmd, ignore_stderr=False), '939393')
self.assertEqual(self.pm.get_subprocess_output(cmd, stderr=subprocess.STDOUT), '939393')
def test_get_subprocess_output_oserror_exception(self):
with self.assertRaises(self.pm.ExecutionError):
self.pm.get_subprocess_output(['i_do_not_exist'])
def test_get_subprocess_output_failure_exception(self):
with self.assertRaises(self.pm.ExecutionError):
self.pm.get_subprocess_output(['false'])
def test_await_pid(self):
with mock.patch.object(ProcessManager, 'await_metadata_by_name') as mock_await:
self.pm.await_pid(5)
mock_await.assert_called_once_with(self.pm.name, 'pid', 5, mock.ANY)
def test_await_sock |
sniperganso/python-manilaclient | manilaclient/v2/share_instances.py | Python | apache-2.0 | 3,686 | 0 | # Copyright 2015 Mirantis inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from manilaclient import api_versions
from manilaclient import base
from manilaclient.openstack.common.apiclient import base as common_base
class ShareInstance(common_base.Resource):
"""A share is an extra block level storage to the OpenStack instances."""
def __repr__(self):
return "<Share: %s>" % self.id
def force_delete(self):
"""Delete the specified share ignoring its current state."""
self.manager.force_delete(self)
def reset_state(self, state):
"""Update the share with the provided state."""
self.manager.reset_state(self, state)
class ShareInstanceManager(base.ManagerWithFind):
"""Manage :class:`ShareInstances` resources."""
resource_class = ShareInstance
@api_versions.wraps("2.3")
def get(self, instance):
"""Get a share instance.
:param instance: either share object or text with its ID.
:rtype: :class:`ShareInstance`
"""
share_id = common_base.getid(instance)
return self._get("/share_instances/%s" % share_id, "share_instance")
@api_versions.wraps("2.3")
def list(self):
"""List all share instances."""
return self._list('/share_instances', 'share_instances')
def _action(self, action | , instance, info=None, **kwargs):
"""Perform a share instnace 'action'.
:param action: text with action name.
:param instance: either share object or text with its ID.
:param info: dict with data for specified 'action'.
:param kwargs: dict with data to be provided for action hooks.
"""
body = {action: info}
self.run_hooks(' | modify_body_for_action', body, **kwargs)
url = '/share_instances/%s/action' % common_base.getid(instance)
return self.api.client.post(url, body=body)
def _do_force_delete(self, instance, action_name="force_delete"):
"""Delete a share instance forcibly - share status will be avoided.
:param instance: either share instance object or text with its ID.
"""
return self._action(action_name, common_base.getid(instance))
@api_versions.wraps("2.3", "2.6")
def force_delete(self, instance):
return self._do_force_delete(instance, "os-force_delete")
@api_versions.wraps("2.7") # noqa
def force_delete(self, instance):
return self._do_force_delete(instance, "force_delete")
def _do_reset_state(self, instance, state, action_name):
"""Update the provided share instance with the provided state.
:param instance: either share object or text with its ID.
:param state: text with new state to set for share.
"""
return self._action(action_name, instance, {"status": state})
@api_versions.wraps("2.3", "2.6")
def reset_state(self, instance, state):
return self._do_reset_state(instance, state, "os-reset_status")
@api_versions.wraps("2.7") # noqa
def reset_state(self, instance, state):
return self._do_reset_state(instance, state, "reset_status")
|
miloszz/DIRAC | Resources/Computing/ARCComputingElement.py | Python | gpl-3.0 | 10,864 | 0.030836 | ########################################################################
# File : ARCComputingElement.py
# Author : A.T.
########################################################################
""" ARC Computing Element
"""
__RCSID__ = "58c42fc (2013-07-07 22:54:57 +0200) Andrei Tsaregorodtsev <atsareg@in2p3.fr>"
import os
import stat
import tempfile
from types import StringTypes
from DIRAC import S_OK, S_ERROR
from DIRAC.Resources.Computing.ComputingElement import ComputingElement
from DIRAC.Core.Utilities.Grid import executeGridCommand
CE_NAME = 'ARC'
MANDATORY_PARAMETERS = [ 'Queue' ]
class ARCComputingElement( ComputingElement ):
#############################################################################
def __init__( self, ceUniqueID ):
""" Standard constructor.
"""
ComputingElement.__init__( self, ceUniqueID )
self.ceType = CE_NAME
self.submittedJobs = 0
self.mandatoryParameters = MANDATORY_PARAMETERS
self.pilotProxy = ''
self.queue = ''
self.outputURL = 'gsiftp://localhost'
self.gridEnv = ''
self.ceHost = self.ceName
if 'Host' in self.ceParameters:
self.ceHost = self.ceParameters['Host']
if 'GridEnv' in self.ceParameters:
self.gridEnv = self.ceParameters['GridEnv']
#############################################################################
def _addCEConfigDefaults( self ):
"""Method to make sure all necessary Configuration Parameters are defined
"""
# First assure that any global parameters are loaded
ComputingElement._addCEConfigDefaults( self )
def __writeXRSL( self, executableFile ):
""" Create the JDL for submission
"""
workingDirectory = self.ceParameters['WorkingDirectory']
fd, name = tempfile.mkstemp( suffix = '.xrsl', prefix = 'ARC_', dir = workingDirectory )
diracStamp = os.path.basename( name ).replace( '.xrsl', '' ).replace( 'ARC_', '' )
xrslFile = os.fdopen( fd, 'w' )
xrsl = """
&(executable="%(executable)s")
(inputFiles=(%(executable)s "%(executableFile)s"))
(stdout="%(diracStamp)s.out")
(stderr="%(diracStamp)s.err")
(outputFiles=("%(diracStamp)s.out" "") ("%(diracStamp)s.err" ""))
""" % {
'executableFile':executableFile,
'executable':os.path.basename( executableFile ),
'diracStamp':diracStamp
}
xrslFile.write( xrsl )
xrslFile.close()
return name, diracStamp
def _reset( self ):
self.queue = self.ceParameters['Queue']
self.gridEnv = self.ceParameters['GridEnv']
#############################################################################
def submitJob( self, executableFile, proxy, numberOfJobs = 1 ):
""" Method to submit job
"""
self.log.verbose( "Executable file path: %s" % executableFile )
if not os.access( executableFile, 5 ):
os.chmod( executableFile, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH + stat.S_IXOTH )
batchIDList = []
stampDict = {}
i = 0
while i < numberOfJobs:
i += 1
xrslName, diracStamp = self.__writeXRSL( executableFile )
cmd = ['arcsub', '-j', self.ceParameters['JobListFile'],
'-c', '%s' % self.ceHost, '%s' % xrslName ]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
os.unlink( xrslName )
if not result['OK']:
break
if result['Value'][0] != 0:
break
pilotJobReference = result['Value'][1].strip()
if pilotJobReference and pilotJobReference.startswith('Job submitted with jobid:'):
pilotJobReference = pilotJobReference.replace('Job submitted with jobid:', '').strip()
batchIDList.append( pilotJobReference )
stampDict[pilotJobReference] = diracStamp
else:
break
#os.unlink( executableFile )
if batchIDList:
result = S_OK( batchIDList )
result['PilotStampDict'] = stampDict
else:
result = S_ERROR('No pilot references obtained from the glite job submission')
return result
def killJob( self, jobIDList ):
""" Kill the specified jobs
"""
workingDirectory = self.ceParameters['WorkingDirectory']
fd, name = tempfile.mkstemp( suffix = '.list', prefix = 'KillJobs_', dir = workingDirectory )
jobListFile = os.fdopen( fd, 'w' )
jobList = list( jobIDList )
if type( jobIDList ) in StringTypes:
jobList = [ jobIDList ]
for job in jobList:
jobListFile.write( job+'\n' )
cmd = ['arckill', '-c', self.ceHost, '-i', name]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
os.unlink( name )
if not result['OK']:
return result
if result['Value'][0] != 0:
return S_ERROR( 'Failed kill job: %s' % result['Value'][0][1] )
return S_OK()
#############################################################################
def getCEStatus( self ):
""" Method to return information on running and pending jobs.
"""
cmd = ['arcstat', '-c', self.ceHost, '-j', self.ceParameters['JobListFile'] ]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
resultDict = {}
if not result['OK']:
return result
if result['Value'][0] == 1 and result['Value'][1] == "No jobs\n":
result = S_OK()
result['RunningJobs'] = 0
result['WaitingJobs'] = 0
result['SubmittedJobs'] = 0
return result
if result['Value'][0]:
if result['Value'][2]:
return S_ERROR(result['Value'][2])
else:
return S_ERROR('Error while interrogating CE status')
if result['Value'][1]:
resultDict = self.__parseJobStatus( result['Value'][1] )
running = 0
waiting = 0
for ref in resultDict:
status = resultDict[ref]
if status == 'Scheduled':
waiting += 1
if status == 'Running':
running += 1
result = S_OK()
result['RunningJobs'] = running
result['WaitingJobs'] = waiting
result['SubmittedJobs'] = 0
return result
def __parseJobStatus( self, commandOutput ):
"""
"""
resultDict = {}
lines = commandOutput.split('\n')
ln = 0
while ln < len( lines ):
if lines[ln].startswith( 'Job:' ):
jobRef = lines[ln].split()[1]
ln += 1
line = lines[ln].strip()
stateARC = ''
if line.startswith( 'State' ):
stateARC = line.replace( 'State:','' ).strip()
line = lines[ln+1].strip()
exitCode = None
if line.startswith( 'Exit Code' ):
line = line.replace( 'Exit Code:','' ).strip()
exitCode = int( line )
# Evaluate state now
if stateARC in ['Accepted','Preparing','Submitting','Queuing','Hold']:
resultDict[jobRef] = "Scheduled"
elif stateARC in ['Running','Finishing']:
resultDict[jobRef] = "Running"
elif stateARC in ['Killed','Deleted']:
resultDict[jobRef] = "Killed"
elif stateARC in ['Finished','Other']:
if exitCode is not None:
if exitCode == 0:
resultDict[jobRef] = "Done"
else:
resultDict[jobRef] = "Failed"
else:
resultDict[jobRef] = "Failed"
elif stateARC in ['Failed']:
resultDict[jobRef] = "Failed"
else:
self.log.warn( "Unknown state %s for job %s" % ( stateARC, jobRef ) )
elif lines[ln].startswith( "WARNING: Job information not found:" ):
jobRef = lines[ln].replace( 'WARNING: Job information not found:', '' ).strip()
resultDict[jobRef] = "Scheduled"
ln += 1
return resultDict
def getJobStatus( self, jobIDList ):
""" G | et the status information for the given list of jobs
"""
workingDirectory = self.ceParameters['WorkingDirectory']
fd, name = tempfile.mkstemp( suffix = '.list', prefix = 'StatJobs_', dir = workingDirectory )
jobL | istFile = os.fdopen( fd, 'w' )
jobTmpList = list( jobIDList )
if type( jobIDList ) in StringTypes:
jobT |
Schamnad/cclib | test/method/testpopulation.py | Python | bsd-3-clause | 3,489 | 0.00172 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2016, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Test the various population analyses (MPA, LPA, CSPA) in cclib"""
from __future__ import print_function
import sys
import os
import logging
import unittest
import numpy
sys.path.append("..")
from test_data import getdatafile
from cclib.method import MPA, LPA, CSPA
from cclib.parser import Gaussian
class GaussianMPATest(unittest.TestCase):
"""Mulliken Population Analysis test"""
def setUp(self):
self.data, self.logfile = getdatafile(Gaussian, "basicGaussian03", "dvb_un_sp.out")
self.analysis = MPA(self.data)
self.analysis.logger.setLevel(0)
self.analysis.calculate()
def testsumcharges(self):
"""Do the Mulliken charges sum up to the total formal charge?"""
formalcharge = sum(self.data.atomnos) - self.data.charge
totalpopulation = sum(self.analysis.fragcharges)
self.assertAlmostEqual(totalpopulation, formalcharge, delta=1.0e-3)
def testsumspins(self):
"""Do the Mulliken spins sum up to the total formal spin?"""
formalspin = self.data.homos[0] - self.data.homos[1]
totalspin = sum(self.analysis.fragspins)
self.assertAlmostEqual(totalspin, formalspin, delta=1.0e-3)
class GaussianLPATest(unittest.TestCase):
"""Lowdin Population Analysis test"""
def setUp(self):
self.data, self.logfile = getdatafile(Gaussian, "basicGaussian03", "dvb_un_sp.out")
self.analysis = L | PA(self.data)
self.analysis.logger.setLevel(0)
self.analysis.calculate()
def testsumcharges(self):
"""Do the Lowdin charges sum up to the total formal charge?"""
formalcharge = sum(self.data.atomnos) - self.data.charge
totalpopulation = sum(self.analysis.fragcharges)
self.assertAlmostEqual(totalpopulation, formalcharge, delta=0.001)
def testsumspi | ns(self):
"""Do the Lowdin spins sum up to the total formal spin?"""
formalspin = self.data.homos[0] - self.data.homos[1]
totalspin = sum(self.analysis.fragspins)
self.assertAlmostEqual(totalspin, formalspin, delta=1.0e-3)
class GaussianCSPATest(unittest.TestCase):
"""C-squared Population Analysis test"""
def setUp(self):
self.data, self.logfile = getdatafile(Gaussian, "basicGaussian03", "dvb_un_sp.out")
self.analysis = CSPA(self.data)
self.analysis.logger.setLevel(0)
self.analysis.calculate()
def testsumcharges(self):
"""Do the CSPA charges sum up to the total formal charge?"""
formalcharge = sum(self.data.atomnos) - self.data.charge
totalpopulation = sum(self.analysis.fragcharges)
self.assertAlmostEqual(totalpopulation, formalcharge, delta=1.0e-3)
def testsumspins(self):
"""Do the CSPA spins sum up to the total formal spin?"""
formalspin = self.data.homos[0] - self.data.homos[1]
totalspin = sum(self.analysis.fragspins)
self.assertAlmostEqual(totalspin, formalspin, delta=1.0e-3)
tests = [GaussianMPATest, GaussianLPATest, GaussianCSPATest]
if __name__ == "__main__":
for test in tests:
thistest = unittest.makeSuite(test)
unittest.TextTestRunner(verbosity=2).run(thistest)
|
pekimmoche/AudioExperiment | tests/test_wave_generator.py | Python | gpl-3.0 | 413 | 0.002421 | from unittest import TestCase
import matplotlib.pyplot as plt
from wave_generator import WaveGener | ator
class TestWaveGenerator(TestCase):
def test_sin(self):
length = 44100
sin1000 = WaveGenerator.sin(10000, 1000, 0, length)
self.assertEqual(0, sin1000[0])
self.assertEqual(9999.4290856537190, sin1000[44067])
self.assertEqual(len( | sin1000), length)
|
Metaswitch/calico-nova | nova/openstack/common/service.py | Python | apache-2.0 | 15,254 | 0 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Generic Node base class for all workers that run on hosts."""
import errno
import logging as std_logging
import os
import random
import signal
import sys
import time
try:
# Importing just the symbol here because the io module does not
# exist in Python 2.6.
from io import UnsupportedOperation # noqa
except ImportError:
# Python 2.6
UnsupportedOperation = None
import eventlet
from eventlet import event
from oslo.config import cfg
from nova.openstack.common import eventlet_backdoor
from nova.openstack.common._i18n import _LE, _LI, _LW
from nova.openstack.common import log as logging
from nova.openstack.common import systemd
from nova.openstack.common import threadgroup
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
def _sighup_supported():
return hasattr(signal, 'SIGHUP')
def _is_daemon():
# The process group for a foreground process will match the
# process group of the controlling terminal. If those values do
# not match, or ioctl() fails on the stdout file handle, we assume
# the process is running in the background as a daemon.
# http://www.gnu.org/software/bash/manual/bashref.html#Job-Control-Basics
try:
is_daemon = os.getpgrp() != os.tcgetpgrp(sys.stdout.fileno())
except OSError as err:
if err.errno == errno.ENOTTY:
# Assume we are a daemon because there is no terminal.
is_daemon = True
else:
raise
except UnsupportedOperation:
# Could not get the fileno for stdout, so we must be a daemon.
is_daemon = True
return is_daemon
def _is_sighup_and_daemon(signo):
if not (_sighup_supported() and signo == signal.SIGHUP):
# Avoid checking if we are a daemon, because the signal isn't
# SIGHUP.
return False
return _is_daemon()
def _signo_to_signame(signo):
signals = {signal.SIGTERM: 'SIGTERM',
signal.SIGINT: 'SIGINT'}
if _sighup_supported():
signals[signal.SIGHUP] = 'SIGHUP'
return signals[signo]
def _set_signals_handler(handler):
signal.signal(signal.SIGTERM, handler)
signal.signal(signal.SIGINT, handler)
if _sighup_supported():
signal.signal(signal.SIGHUP, handler)
class Launcher(object):
"""Launch one or more services and wait for them to complete."""
def __init__(self):
"""Initialize the service launcher.
:returns: None
"""
self.services = Services()
self.backdoor_port = eventlet_backdoor.initialize_if_enabled()
def launch_service(self, service):
"""Load and start the given service.
:param service: The service you would like to start.
:returns: None
"""
service.backdoor_port = self.backdoor_port
self.services.add(service)
def stop(self):
"""Stop all services which are currently running.
:returns: None
"""
self.services.stop()
def wait(self):
"""Waits until all services have been stopped, and then returns.
:returns: None
"""
self.services.wait()
def restart(self):
"""Reload config files and restart service.
:returns: None
"""
cfg.CONF.reload_config_files()
self.services.restart()
class SignalExit(SystemExit):
def __init__(self, signo, exccode=1):
super(SignalExit, self).__init__(exccode)
self.signo = signo
class ServiceLauncher(Launcher):
def _handle_signal(self, signo, frame):
# Allow the process to be killed again and die from natural causes
_set_signals_handler(signal.SIG_DFL)
raise SignalExit(signo)
def handle_signal(self):
_set_signals_handler(self._handle_signal)
def _wait_for_exit_or_signal(self, ready_callback=None):
status = None
signo = 0
LOG.debug('Full set of CONF:')
CONF.log_opt_values(LOG, std_logging.DEBUG)
try:
if ready_callback:
ready_callback()
super(ServiceLauncher, self).wait()
except SignalExit as exc:
signame = _signo_to_signame(exc.signo)
LOG.info(_LI('Caught %s, exiting'), signame)
status = exc.code
signo = exc.signo
except SystemExit as exc:
status = exc.code
finally:
self.stop()
return status, signo
def wait(self, ready_callback=None):
systemd.notify_once()
while True:
self.handle_signal()
| status, signo = self._wait_for_exit_or_signal(ready_callback)
if not _is_sighup_and_daemon(signo):
return status
self.restart()
class ServiceWrapper(object):
def __init__(self, service, workers):
self.service = service
self.workers = workers
self.children = set()
self.forktimes = []
class Proc | essLauncher(object):
def __init__(self, wait_interval=0.01):
"""Constructor.
:param wait_interval: The interval to sleep for between checks
of child process exit.
"""
self.children = {}
self.sigcaught = None
self.running = True
self.wait_interval = wait_interval
rfd, self.writepipe = os.pipe()
self.readpipe = eventlet.greenio.GreenPipe(rfd, 'r')
self.handle_signal()
def handle_signal(self):
_set_signals_handler(self._handle_signal)
def _handle_signal(self, signo, frame):
self.sigcaught = signo
self.running = False
# Allow the process to be killed again and die from natural causes
_set_signals_handler(signal.SIG_DFL)
def _pipe_watcher(self):
# This will block until the write end is closed when the parent
# dies unexpectedly
self.readpipe.read()
LOG.info(_LI('Parent process has died unexpectedly, exiting'))
sys.exit(1)
def _child_process_handle_signal(self):
# Setup child signal handlers differently
def _sigterm(*args):
signal.signal(signal.SIGTERM, signal.SIG_DFL)
raise SignalExit(signal.SIGTERM)
def _sighup(*args):
signal.signal(signal.SIGHUP, signal.SIG_DFL)
raise SignalExit(signal.SIGHUP)
signal.signal(signal.SIGTERM, _sigterm)
if _sighup_supported():
signal.signal(signal.SIGHUP, _sighup)
# Block SIGINT and let the parent send us a SIGTERM
signal.signal(signal.SIGINT, signal.SIG_IGN)
def _child_wait_for_exit_or_signal(self, launcher):
status = 0
signo = 0
# NOTE(johannes): All exceptions are caught to ensure this
# doesn't fallback into the loop spawning children. It would
# be bad for a child to spawn more children.
try:
launcher.wait()
except SignalExit as exc:
signame = _signo_to_signame(exc.signo)
LOG.info(_LI('Child caught %s, exiting'), signame)
status = exc.code
signo = exc.signo
except SystemExit as exc:
status = exc.code
except BaseException:
LOG.exception(_LE('Unhandled exception'))
status = 2
finally:
launcher.stop()
return status, signo
def _child_process(self, service):
self._ |
secnot/django-param-field | param_field/validators.py | Python | lgpl-3.0 | 1,141 | 0.003506 | from django.core.exceptions import ValidationError
from pyparsing import ParseBaseException
from django.core.validators import MaxLengthValidator
class ParamValidator(object):
def __init__(self, file_support=False):
self._file_support = file_support
def __call__(self, value):
# Imported here to avoid circular dependency
from .parser import parse_fields
try:
par = parse_fields(value, self._file_support)
| except ParseBaseException as err:
# Parser Error
raise ValidationError(str(err))
except ValueError as err:
# Error while creating Param
raise Val | idationError(str(err))
class ParamLengthValidator(MaxLengthValidator):
def clean(self, x):
return len(str(x))
class ParamFormFieldValidator(object):
def __init__(self, param):
self._param = param
def __call__(self, value):
try:
self._param.validate(value)
except ValueError as err:
raise ValidationError(err)
except TypeError as err:
raise ValidationError(err)
|
rohitw1991/latestadbwnf | core/doctype/workflow_state/test_workflow_state.py | Python | mit | 104 | 0.009615 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. |
# MIT License. See license.txt
test_records = [ | ] |
gardiac2002/sunshine | sunshine/ext/html.py | Python | mit | 207 | 0.009662 | __author__ = 'sen'
from bs4 import BeautifulSo | up
def prettify_html(html_str):
"""
:param html_str:
:return:
"""
soup = BeautifulSoup(html_str, 'html.parser')
return soup.pret | tify() |
tolimit/tp-qemu | qemu/tests/mq_change_qnum.py | Python | gpl-2.0 | 9,855 | 0.000304 | import logging
import re
import aexpect
from autotest.client import utils
from autotest.client.shared import error
from virttest import utils_net
from virttest import utils_test
from virttest import utils_misc
@error.context_aware
def run(test, params, env):
"""
MULTI_QUEUE chang queues number test
1) Boot up VM, and login guest
2) Check guest pci msi support and reset it as expection
3) Enable the queues in guest
4) Run bg_stress_test(pktgen, netperf or file copy) if needed
5) Change queues number repeatly during stress test running
6) Ping external host (local host, if external host not available)
:param test: QEMU test object.
:param params: Dictionary with the test parameters.
:param env: Dictionary with test environment.
"""
def change_queues_number(session, ifname, q_number, queues_status=None):
"""
Change queues number
"""
mq_set_cmd = "ethtool -L %s combined %d" % (ifname, q_number)
if not queues_status:
queues_status = get_queues_status(session, ifname)
if (q_number != queues_status[1] and q_number <= queues_status[0] and
q_number > 0):
expect_status = 0
else:
expect_status = 1
status, output = session.cmd_status_output(mq_set_cmd)
cur_queues_status = get_queues_status(session, ifname)
if status != expect_status:
err_msg = "Change queues number failed, "
err_msg += "current queues set is %s, " % queues_status[1]
err_msg += "max allow queues set is %s, " % queues_status[0]
err_msg += "when run cmd: '%s', " % mq_set_cmd
err_msg += "expect exit status is: %s, " % expect_status
err_msg += "output: '%s'" % output
raise error.TestFail(err_msg)
if not status and cur_queues_status == queues_status:
raise error.TestFail("params is right, but change queues failed")
elif status and cur_queues_status != queues_status:
raise error.TestFail("No need change queues number")
return [int(_) for _ in cur_queues_status]
def get_queues_status(session, ifname, timeout=240):
"""
Get queues status
"""
mq_get_cmd = "ethtool -l %s" % ifname
nic_mq_info = session.cmd_output(mq_get_cmd, timeout=timeout)
queues_reg = re.compile(r"Combined:\s+(\d)", re.I)
queues_info = queues_reg.findall(" ".join(nic_mq_info.splitlines()))
if len(queues_info) != 2:
err_msg = "Oops, get guest queues info failed, "
err_msg += "make sure your guest support MQ.\n"
err_msg += "Check cmd is: '%s', " % mq_get_cmd
err_msg += "Command output is: '%s'." % nic_mq_info
raise error.TestNAError(err_msg)
return [int(x) for x in queues_info]
def enable_multi_queues(vm):
sess = vm.wait_for_serial_login(timeout=login_timeout)
error.context("Enable multi queues in guest.", logging.info)
for nic_index, nic in enumerate(vm.virtnet):
ifname = utils_net.get_linux_ifname(sess, nic.mac)
queues = int(nic.queues)
change_queues_number(sess, ifname, queues)
def ping_test(dest_ip, ping_time, lost_raito, session=None):
status, output = utils_test.ping(dest=dest_ip, timeout=ping_time,
session=session)
packets_lost = utils_test.get_loss_ratio(output)
if packets_lost > lost_raito:
err = " %s%% packages lost during ping. " % packets_lost
err += "Ping command log:\n %s" % "\n".join(output.splitlines()[-3:])
raise error.TestFail(err)
error.context("Init guest and try to login", logging.info)
login_timeout = int(params.get("login_timeout", 360))
bg_stress_test = params.get("run_bgstress")
bg_stress_run_flag = params.get("bg_stress_run_flag")
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
vm.wait_for_login(timeout=login_timeout)
if params.get("pci_nomsi", "no") == "yes":
error.context("Disable pci msi in guest", logging.info)
utils_test.update_boot_option(vm, args_added="pci=nomsi")
vm.wait_for_login(timeout=login_timeout)
enable_multi_queues(vm)
session_serial = vm.wait_for_serial_login(timeout=login_timeout)
s_session = None
bg_ping = params.get("bg_ping")
b_ping_lost_ratio = int(params.get("background_ping_package_lost_ratio", 5))
f_ping_lost_ratio = int(params.get("final_ping_package_lost_ratio", 5))
guest_ip = vm.get_address()
b_ping_time = int(params.get("background_ping_time", 60))
f_ping_time = int(params.get("final_ping_time", 60))
bg_test = None
try:
ifnames = []
for nic_index, nic in enumerate(vm.virtnet):
ifname = utils_net.get_linux_ifname(session_serial,
vm.virtnet[nic_index].mac)
ifnames.append(ifname)
if bg_stress_test:
error.context("Run test %s background" % bg_stress_test,
logging.info)
stress_thread = ""
wait_time = float(params.get("wait_bg_time", 60))
env[bg_stress_run_flag] = False
stress_thread = utils.InterruptedThread(
utils_test.run_virt_sub_test, (test, params, env),
{"sub_type": bg_stress_test})
stress_thread.sta | rt()
if bg_stress_run_flag:
utils_misc.wait_for(lambda: env.get(bg_stress_run_flag),
| wait_time, 0, 5,
"Wait %s start background" % bg_stress_test)
if bg_ping == "yes":
error.context("Ping guest from host", logging.info)
args = (guest_ip, b_ping_time, b_ping_lost_ratio)
bg_test = utils.InterruptedThread(ping_test, args)
bg_test.start()
error.context("Change queues number repeatly", logging.info)
repeat_counts = int(params.get("repeat_counts", 10))
for nic_index, nic in enumerate(vm.virtnet):
if "virtio" not in nic['nic_model']:
continue
queues = int(vm.virtnet[nic_index].queues)
if queues == 1:
logging.info("Nic with single queue, skip and continue")
continue
ifname = ifnames[nic_index]
default_change_list = xrange(1, int(queues + 1))
change_list = params.get("change_list")
if change_list:
change_list = change_list.split(",")
else:
change_list = default_change_list
for repeat_num in xrange(1, repeat_counts + 1):
error.context("Change queues number -- %sth" % repeat_num,
logging.info)
try:
queues_status = get_queues_status(session_serial, ifname)
for q_number in change_list:
queues_status = change_queues_number(session_serial,
ifname,
int(q_number),
queues_status)
except aexpect.ShellProcessTerminatedError:
vm = env.get_vm(params["main_vm"])
session = vm.wait_for_serial_login(timeout=login_timeout)
session_serial = session
queues_status = get_queues_status(session_serial, ifname)
for q_number in change_list:
queues_status = change_queues_number(session_serial,
ifname,
int(q_number),
queues_status)
if params.get("ping_after_changing_queues", "yes") == "yes":
default_host = "www.redhat.com"
try:
|
miguelgrinberg/Flask-Intro | 07-ScalableStructure/app/forms.py | Python | mit | 197 | 0.030457 | from flask.ext.wtf import Form
from wtforms import TextField
from wtforms.validators import Required
class NameForm(Form):
name = TextField('What i | s your name?', validators = [ Required() ])
| |
gopythongo/gopythongo | src/py/gopythongo/versioners/static.py | Python | mpl-2.0 | 1,614 | 0.001859 | # -* encoding: utf-8 *-
# This Source Code Form is subject to the terms of the Mozilla Public
# | License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import configargparse
from typing import Any, Type
from gopythongo.utils import highlight, ErrorMessage
from gopythongo.versioners import BaseVersioner
class StaticVersioner(BaseVersi | oner):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
@property
def versioner_name(self) -> str:
return u"static"
def add_args(self, parser: configargparse.ArgumentParser) -> None:
gp_static = parser.add_argument_group("Static Versioner options")
gp_static.add_argument("--static-version", dest="static_version", default=None,
help="The static version string to use.")
def validate_args(self, args: configargparse.Namespace) -> None:
if not args.static_version:
raise ErrorMessage("Static versioner requires %s" % highlight("--static-version"))
@property
def can_read(self) -> bool:
return True
def read(self, args: configargparse.Namespace) -> str:
return args.static_version
def print_help(self) -> None:
print("Static Versioner\n"
"================\n"
"\n"
"The static Versioner simply reads a version string from the command-line\n"
"parameter %s.\n" % highlight("--static-version"))
versioner_class = StaticVersioner # type: Type[StaticVersioner]
|
aviau/python-pass | pypass/command.py | Python | gpl-3.0 | 13,445 | 0 | #
# Copyright (C) 2014 Alexandre Viau <alexandre@alexandreviau.net>
#
# This file is part of python-pass.
#
# python-pass is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# | (at yo | ur option) any later version.
#
# python-pass is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with python-pass. If not, see <http://www.gnu.org/licenses/>.
#
import os
import subprocess
import shutil
import sys
import tempfile
import click
import colorama
from pexpect import pxssh
from pypass.entry_type import EntryType
from pypass import PasswordStore
@click.group(invoke_without_command=True)
@click.option('--PASSWORD_STORE_DIR',
envvar='PASSWORD_STORE_DIR',
default=os.path.join(os.getenv("HOME"), ".password-store"),
type=click.Path(file_okay=False, resolve_path=True))
@click.option('--PASSWORD_STORE_GIT',
envvar='PASSWORD_STORE_GIT',
type=click.Path(file_okay=False, resolve_path=True),
default=None)
@click.option('--EDITOR',
envvar='EDITOR',
default='editor',
type=click.STRING)
@click.pass_context
def main(ctx, password_store_dir, password_store_git, editor):
# init does not need any of this.
if ctx.invoked_subcommand == "init":
return
# Prepare the config file
config = {
'password_store': PasswordStore(
path=password_store_dir,
git_dir=password_store_git
),
'editor': editor
}
ctx.obj = config
# By default, invoke ls
if ctx.invoked_subcommand is None:
ctx.invoke(ls)
@main.command(name='help')
@click.pass_context
def hlp(contex):
click.echo(contex.parent.get_help())
@main.command()
@click.option('--path', '-p',
type=click.Path(file_okay=False, resolve_path=True),
default=os.path.join(os.getenv("HOME"), ".password-store"),
help='Where to create the password store.')
@click.option('--clone', '-c',
type=click.STRING,
help='Git url to clone')
@click.argument('gpg-id', type=click.STRING)
def init(path, clone, gpg_id):
PasswordStore.init(gpg_id, path, clone_url=clone)
click.echo("Password store initialized for %s." % gpg_id)
@main.command()
@click.option('--echo', '-e', is_flag=True)
@click.option('--multiline', '-m', is_flag=True)
@click.argument('path', type=click.STRING)
@click.pass_obj
def insert(config, path, echo, multiline):
if echo and multiline:
sys.exit('--echo and --multiline are mutually exclusive.')
if multiline:
click.echo(
'Enter contents of %s and press Ctrl+D when finished:\n' % path
)
password = ''.join(sys.stdin)
else:
password = click.prompt(
'Enter password for %s' % path,
hide_input=not echo
)
if not echo:
confirmation = click.prompt(
'Retype password for %s' % path,
hide_input=True
)
if confirmation != password:
sys.exit('Error: the entered passwords do not match.')
config['password_store'].insert_password(path, password)
if config['password_store'].uses_git:
config['password_store'].git_add_and_commit(
path + '.gpg',
message='Add given password for %s to store.' % path
)
@main.command()
@click.option('--no-symbols', '-n', is_flag=True)
@click.option('--clip', '-c', is_flag=True)
@click.option('--in-place', '-i', is_flag=True)
@click.argument('pass_name', type=click.STRING)
@click.argument(
'pass_length',
type=int,
required=False,
envvar='PASSWORD_STORE_GENERATED_LENGTH',
default=25
)
@click.pass_obj
def generate(config, pass_name, pass_length, no_symbols, clip, in_place):
symbols = not no_symbols
password = config['password_store'].generate_password(
pass_name,
digits=True,
symbols=symbols,
length=pass_length,
first_line_only=in_place
)
if config['password_store'].uses_git:
config['password_store'].git_add_and_commit(
pass_name + '.gpg',
message='%s generated password for %s.' % (
'Replace' if in_place else 'Add',
pass_name
)
)
if clip:
xclip = subprocess.Popen(
['xclip', '-selection', 'clipboard'],
stdin=subprocess.PIPE
)
xclip.stdin.write(password.encode())
xclip.stdin.close()
click.echo('Copied %s to clipboard.' % pass_name)
else:
click.echo(
'The generated password for %s is:\n%s' % (pass_name, password))
@main.command()
@click.pass_obj
@click.argument('path', type=click.STRING)
def edit(config, path):
if path in config['password_store'].get_passwords_list():
old_password = config['password_store'].get_decrypted_password(path)
with tempfile.NamedTemporaryFile() as temp_file:
temp_file.write(old_password.encode())
temp_file.flush()
subprocess.call([config['editor'], temp_file.name])
temp_file.seek(0)
config['password_store'].insert_password(
path, temp_file.file.read().decode()
)
click.echo("%s was updated." % path)
if config['password_store'].uses_git:
config['password_store'].git_add_and_commit(
path + '.gpg',
message='Edited password for %s using %s'
% (path, config['editor'])
)
else:
click.echo("%s is not in the password store." % path)
@main.command()
@click.option('--clip', '-c', is_flag=True)
@click.argument('path', type=click.STRING)
@click.pass_obj
def show(config, path, clip):
if path not in config['password_store'].get_passwords_list():
click.echo('Error: %s is not in the password store.' % path)
sys.exit(1)
decrypted_password = \
config['password_store'].get_decrypted_password(path).strip()
if clip:
xclip = subprocess.Popen(
[
'xclip',
'-selection', 'clipboard'
],
stdin=subprocess.PIPE
)
xclip.stdin.write(decrypted_password.split('\n')[0].encode('utf8'))
xclip.stdin.close()
click.echo('Copied %s to clipboard.' % path)
else:
click.echo(decrypted_password)
@main.command()
@click.argument('path', type=click.STRING)
@click.pass_obj
def connect(config, path):
store = config['password_store']
hostname = store.get_decrypted_password(path, entry=EntryType.hostname)
username = store.get_decrypted_password(path, entry=EntryType.username)
password = store.get_decrypted_password(path, entry=EntryType.password)
s = pxssh.pxssh()
click.echo("Connectig to %s" % hostname)
s.login(hostname, username, password=password)
s.sendline()
s.interact()
@main.command()
@click.argument('subfolder', required=False, type=click.STRING, default='')
@click.pass_obj
def ls(config, subfolder):
tree = subprocess.Popen(
[
'tree',
'-C',
'-l',
'--noreport',
os.path.join(config['password_store'].path, subfolder),
],
shell=False,
stdout=subprocess.PIPE
)
tree.wait()
if tree.returncode == 0:
output_without_gpg = \
tree.stdout.read().decode('utf8').replace('.gpg', '')
output_replaced_first_line =\
"Password Store\n" + '\n'.join(output_without_gpg.split('\n')[1:])
output_stripped = output_replaced_first_line.strip()
click |
cgrice/django-staticblog | staticblog/urls.py | Python | mit | 294 | 0.003401 | from django.conf.urls import patterns
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns( | 'staticblog.views',
(r'^$', 'archive'),
(r'^([\-\w]+)$', 'ren | der_post'),
(r'^git/receive', 'handle_hook'),
)
|
adijo/codeforces | 287/AmrAndPins.py | Python | gpl-2.0 | 147 | 0.006803 | imp | ort math
r, x, y, x_p, y_p = map(int, raw_input().split())
dist = math.sqrt((x - x_p)**2 + (y - y_p)**2)
print int(math.ceil(dist / (2 | * r))) |
fah-designs/feincms-blogs | blogs/views.py | Python | mit | 3,343 | 0.001496 | import datetime
from django.views.generic.detail import SingleObjectMixin
from django.views.generic.l | ist import ListView
from django.views.generic.base import View
| from django.utils.translation import ugettext as _
from django.http import Http404, HttpResponseRedirect
from django.core.urlresolvers import reverse
from feincms.module.mixins import ContentView
from djapps.blogs.models import Blog, Post
class PostPermalinkView(SingleObjectMixin, View):
model = Post
def get_queryset(self):
return self.model.objects.active()
def get(self, request, *args, **kwargs):
post = self.get_object()
return HttpResponseRedirect(
post.get_pretty_url()
)
class PostView(ContentView):
model = Post
context_object_name = 'post'
def get_queryset(self):
return self.model.objects.active().filter(**self.kwargs)
def get_context_data(self, *args, **kwargs):
data = super(PostView, self).get_context_data(*args, **kwargs)
self.request._blogs_current_blog = self.object.blog
self.request._blogs_current_date = self.object.date
self.request._blogs_current_year = int(self.kwargs.get('date__year', 0))
self.request._blogs_current_month = int(self.kwargs.get('date__month', 0))
self.request._blogs_current_day = int(self.kwargs.get('date__day', 0))
return data
def get_object(self):
queryset = self.get_queryset()
obj = queryset.first()
if obj is None:
raise Http404(_("No %(verbose_name)s found matching the query") %
{'verbose_name': queryset.model._meta.verbose_name})
return obj
class PostListView(ListView):
model = Post
paginate_by = 15
def dispatch(self, *args, **kwargs):
try:
self.range_start = datetime.date(
year=int(self.kwargs.get("date__year", 1)),
month=int(self.kwargs.get("date__month", 1)),
day=int(self.kwargs.get("date__day", 1)),
)
except ValueError:
raise Http404(_("Invalid date"))
return super(PostListView, self).dispatch(*args, **kwargs)
def get_queryset(self):
return self.model.objects.active().filter(
**dict(
(k, v) for k,v in self.kwargs.items()
)
)
def get_context_data(self, *args, **kwargs):
data = super(PostListView, self).get_context_data(*args, **kwargs)
data["range_start"] = self.range_start
data["year"] = self.kwargs.get('date__year', None)
data["month"] = self.kwargs.get('date__month', None)
data["day"] = self.kwargs.get('date__day', None)
blogslug = self.kwargs.get('blog__slug', False)
if blogslug:
blog = Blog.objects.filter(slug=blogslug).first()
if blog is None:
raise Http404(_("Blog not found"))
data["blog"] = blog
self.request._blogs_current_blog = blog
self.request._blogs_current_date = self.range_start
self.request._blogs_current_year = int(self.kwargs.get('date__year', 0))
self.request._blogs_current_month = int(self.kwargs.get('date__month', 0))
self.request._blogs_current_day = int(self.kwargs.get('date__day', 0))
return data
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.