repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
williamjmorenor/mkdocs | mkdocs/utils/__init__.py | Python | bsd-2-clause | 13,602 | 0.000588 | # coding: utf-8
"""
Standalone file utils.
Nothing in this module should have an knowledge of config or the layout
and structure of the site and pages in the site.
"""
from __future__ import unicode_literals
import logging
import markdown
import os
import pkg_resources
import shutil
import sys
import yaml
import fnmatch
from mkdocs import toc, exceptions
try: # pragma: no cover
from urllib.parse import urlparse, urlunparse, urljoin # noqa
from urllib.request import pathname2url # noqa
from collections import UserDict # noqa
except ImportError: # pragma: no cover
from urlparse import urlparse, urlunparse, urljoin # noqa
from urllib import pathname2url # noqa
from UserDict import UserDict # noqa
PY3 = sys.version_info[0] == 3
if PY3: # pragma: no cover
string_types = str, # noqa
text_type = str # noqa
else: # pragma: no cover
string_types = basestring, # noqa
text_type = unicode # noqa
log = logging.getLogger(__name__)
def yaml_load(source, loader=yaml.Loader):
"""
Wrap PyYaml's loader so we can extend it to suit our needs.
Load all strings as unicode.
http://stackoverflow.com/a/2967461/3609487
"""
def construct_yaml_str(self, node):
"""
Override the default string handling function to always return
unicode objects.
"""
return self.construct_scalar(node)
class Loader(loader):
"""
Define a custom loader derived from the global loader to leave the
global loader unaltered.
"""
# Attach our unicode constructor to our custom loader ensuring all strings
# will be unicode on translation.
Loader.add_constructor('tag:yaml.org,2002:str', construct_yaml_str)
try:
return yaml.load(source, Loader)
finally:
# TODO: Remove this when external calls are properly cleaning up file
# objects. Some mkdocs internal calls, sometimes in test lib, will
# load configs with a file object but never close it. On some
# systems, if a delete action is performed on that file without Python
# closing that object, there will be an access error. This will
# process the file and close it as there should be no more use for the
# file once we process the yaml content.
if hasattr(source, 'close'):
source.close()
def reduce_list(data_set):
""" Reduce duplicate items in a list and preserve order """
seen = set()
return [item for item in data_set if
item not in seen and not seen.add(item)]
def copy_file(source_path, output_path):
"""
Copy source_path to output_path, making sure any parent directories exist.
"""
output_dir = os.path.dirname(output_path)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
shutil.copy(source_path, output_path)
def write_file(content, output_path):
"""
Write content to output_path, making sure any parent directories exist.
"""
ou | tput_dir = os.path.dirname(output_path)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
open(output_path, 'wb').write(content)
def clean_directory(directory):
"""
Rem | ove the content of a directory recursively but not the directory itself.
"""
if not os.path.exists(directory):
return
for entry in os.listdir(directory):
# Don't remove hidden files from the directory. We never copy files
# that are hidden, so we shouldn't delete them either.
if entry.startswith('.'):
continue
path = os.path.join(directory, entry)
if os.path.isdir(path):
shutil.rmtree(path, True)
else:
os.unlink(path)
def copy_media_files(from_dir, to_dir, exclude=None):
"""
Recursively copy all files except markdown and exclude[ed] files into another directory.
`exclude` accepts a list of Unix shell-style wildcards (`['*.py', '*.pyc']`).
Note that `exclude` only operates on file names, not directories.
"""
for (source_dir, dirnames, filenames) in os.walk(from_dir):
relative_path = os.path.relpath(source_dir, from_dir)
output_dir = os.path.normpath(os.path.join(to_dir, relative_path))
# Filter file names using Unix pattern matching
# Always filter file names starting with a '.'
exclude_patterns = ['.*']
exclude_patterns.extend(exclude or [])
for pattern in exclude_patterns:
filenames = [f for f in filenames if not fnmatch.fnmatch(f, pattern)]
# Filter the dirnames that start with a '.' and update the list in
# place to prevent us walking these.
dirnames[:] = [d for d in dirnames if not d.startswith('.')]
for filename in filenames:
if not is_markdown_file(filename):
source_path = os.path.join(source_dir, filename)
output_path = os.path.join(output_dir, filename)
copy_file(source_path, output_path)
def get_html_path(path):
"""
Map a source file path to an output html path.
Paths like 'index.md' will be converted to 'index.html'
Paths like 'about.md' will be converted to 'about/index.html'
Paths like 'api-guide/core.md' will be converted to 'api-guide/core/index.html'
"""
path = os.path.splitext(path)[0]
if os.path.basename(path) == 'index':
return path + '.html'
return "/".join((path, 'index.html'))
def get_url_path(path, use_directory_urls=True):
"""
Map a source file path to an output html path.
Paths like 'index.md' will be converted to '/'
Paths like 'about.md' will be converted to '/about/'
Paths like 'api-guide/core.md' will be converted to '/api-guide/core/'
If `use_directory_urls` is `False`, returned URLs will include the a trailing
`index.html` rather than just returning the directory path.
"""
path = get_html_path(path)
url = '/' + path.replace(os.path.sep, '/')
if use_directory_urls:
return url[:-len('index.html')]
return url
def is_homepage(path):
return os.path.splitext(path)[0] == 'index'
def is_markdown_file(path):
"""
Return True if the given file path is a Markdown file.
http://superuser.com/questions/249436/file-extension-for-markdown-files
"""
ext = os.path.splitext(path)[1].lower()
return ext in [
'.markdown',
'.mdown',
'.mkdn',
'.mkd',
'.md',
]
def is_css_file(path):
"""
Return True if the given file path is a CSS file.
"""
ext = os.path.splitext(path)[1].lower()
return ext in [
'.css',
]
def is_javascript_file(path):
"""
Return True if the given file path is a Javascript file.
"""
ext = os.path.splitext(path)[1].lower()
return ext in [
'.js',
'.javascript'
]
def is_html_file(path):
"""
Return True if the given file path is an HTML file.
"""
ext = os.path.splitext(path)[1].lower()
return ext in [
'.html',
'.htm',
]
def is_template_file(path):
"""
Return True if the given file path is an HTML file.
"""
ext = os.path.splitext(path)[1].lower()
return ext in [
'.html',
'.htm',
'.xml',
]
def create_media_urls(nav, path_list):
"""
Return a list of URLs that have been processed correctly for inclusion in
a page.
"""
final_urls = []
for path in path_list:
# Allow links to fully qualified URL's
parsed = urlparse(path)
if parsed.netloc:
final_urls.append(path)
continue
# We must be looking at a local path.
url = path_to_url(path)
relative_url = '%s/%s' % (nav.url_context.make_relative('/'), url)
final_urls.append(relative_url)
return final_urls
def crea |
physicalattraction/kerstpuzzel | src/Conundrum/repeated_letters.py | Python | mit | 853 | 0.001172 | from pprint import pprint
from Conundrum.utils import sanitize
def decrypt(msg: str, repeated_letter: str) -> str:
"""
Extract every letter after an occurrence of the repeated letter
"""
msg = sanitize(msg)
result = []
remove_next = False
for letter in msg:
take_this = remove_next
remove_next = letter == rep | eated_letter
if take_this:
result += letter
return ''.join(result)
def decrypt_try_all(msg: str) -> [str]:
msg = sanitize(msg)
letters_to_try = sorted({letter for letter in msg})
return {letter: decrypt(msg, letter) for letter in letters_to_try}
if __name__ == '__main__':
# Used in Movies 4
encrypted_msg = 'i b | et pews or leisure chains can seem to stink of effort, george, under no illusions of vanity'
pprint(decrypt_try_all(encrypted_msg))
|
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/spyderlib/widgets/externalshell/pythonshell.py | Python | gpl-3.0 | 27,927 | 0.003688 | # -*- coding: utf-8 -*-
#
# Copyright © 2009-2010 Pierre Raybaut
# Licensed under the terms of the MIT License
# (see spyderlib/__init__.py for details)
"""External Python Shell widget: execute Python script in a separate process"""
import sys
import os
import os.path as osp
import socket
from spyderlib.qt.QtGui import QApplication, QMessageBox, QSplitter, QMenu
from spyderlib.qt.QtCore import QProcess, SIGNAL, Qt
from spyderlib.qt.compat import getexistingdirectory
# Local imports
from spyderlib.utils.qthelpers import (get_icon, get_std_icon, add_actions,
create_toolbutton, create_action,
DialogManager)
from spyderlib.utils.environ import RemoteEnvDialog
from spyderlib.utils.programs import get_python_args
from spyderlib.utils.misc import get_python_executable
from spyderlib.baseconfig import (_, get_module_source_path, DEBUG,
MAC_APP_NAME, running_in_mac_app)
from spyderlib.widgets.shell import PythonShellWidget
from spyderlib.widgets.externalshell.namespacebrowser import NamespaceBrowser
from spyderlib.utils.bsdsocket import communicate, write_packet
from spyderlib.widgets.externalshell.baseshell import (ExternalShellBase,
add_pathlist_to_PYTHONPATH)
from spyderlib.widgets.dicteditor import DictEditor
from spyderlib.py3compat import (is_text_string, to_text_string,
to_binary_string)
class ExtPythonShellWidget(PythonShellWidget):
def __init__(self, parent, history_filename, profile=False):
PythonShellWidget.__init__(self, parent, history_filename, profile)
self.path = []
def set_externalshell(self, externalshell):
# ExternalShellBase instance:
self.externalshell = externalshell
def clear_terminal(self):
"""Reimplement ShellBaseWidget method"""
self.clear()
self.emit(SIGNAL("execute(QString)"), "\n")
def execute_lines(self, lines):
"""
Execute a set of lines as multiple command
lines: multiple lines of text to be executed as single commands
"""
for line in lines.splitlines():
stripped_line = line.strip()
if stripped_line.startswith('#'):
continue
self.write(line+os.linesep, flush=True)
self.execute_command(line)
# Workaround for Issue 502
# Emmiting wait_for_ready_read was making the console hang
# in Mac OS X
if sys.platform.startswith("darwin"):
import time
time.sleep(0.025)
else:
self.emit(SIGNAL("wait_for_ready_read()"))
self.flush()
#------ Code completion / Calltips
def ask_monitor(self, command, settings=[]):
sock = self.externalshell.introspection_socket
if sock is None:
return
try:
return communicate(sock, command, settings=settings)
except socket.error:
# Process was just closed
pass
except MemoryError:
# Happens when monitor is not ready on slow machines
pass
def get_dir(self, objtxt):
"""Return dir(object)"""
return self.ask_monitor("__get_dir__('%s')" % objtxt)
def get_globals_keys(self):
"""Return shell globals() keys"""
return self.ask_monitor("get_globals_keys()")
def get_cdlistdir(self):
"""Return shell current directory list dir"""
return self.ask_monitor("getcdlistdir()")
def iscallable(self, objtxt):
"""Is object callable?"""
return self.ask_monitor("__iscallable__('%s')" % objtxt)
| def get_arglist(self, objtxt):
"""Get func/method argument list"""
return self.ask_monitor("__get_arglist__('%s')" % objtxt)
def get__doc__(self, objtxt):
"""Get object __doc__"""
return self.ask_monitor("__get | __doc____('%s')" % objtxt)
def get_doc(self, objtxt):
"""Get object documentation dictionary"""
return self.ask_monitor("__get_doc__('%s')" % objtxt)
def get_source(self, objtxt):
"""Get object source"""
return self.ask_monitor("__get_source__('%s')" % objtxt)
def is_defined(self, objtxt, force_import=False):
"""Return True if object is defined"""
return self.ask_monitor("isdefined('%s', force_import=%s)"
% (objtxt, force_import))
def get_module_completion(self, objtxt):
"""Return module completion list associated to object name"""
return self.ask_monitor("getmodcomplist('%s', %s)" % \
(objtxt, self.path))
def get_cwd(self):
"""Return shell current working directory"""
return self.ask_monitor("getcwd()")
def set_cwd(self, dirname):
"""Set shell current working directory"""
return self.ask_monitor("setcwd(r'%s')" % dirname)
def get_env(self):
"""Return environment variables: os.environ"""
return self.ask_monitor("getenv()")
def set_env(self, env):
"""Set environment variables via os.environ"""
return self.ask_monitor('setenv()', settings=[env])
def get_syspath(self):
"""Return sys.path[:]"""
return self.ask_monitor("getsyspath()")
def set_spyder_breakpoints(self):
"""Set Spyder breakpoints into debugging session"""
return self.ask_monitor("set_spyder_breakpoints()")
class ExternalPythonShell(ExternalShellBase):
"""External Shell widget: execute Python script in a separate process"""
SHELL_CLASS = ExtPythonShellWidget
def __init__(self, parent=None, fname=None, wdir=None,
interact=False, debug=False, path=[], python_args='',
ipykernel=False, arguments='', stand_alone=None,
umr_enabled=True, umr_namelist=[], umr_verbose=True,
pythonstartup=None, pythonexecutable=None,
monitor_enabled=True, mpl_backend=None, ets_backend='qt4',
qt_api=None, pyqt_api=0,
ignore_sip_setapi_errors=False, merge_output_channels=False,
colorize_sys_stderr=False, autorefresh_timeout=3000,
autorefresh_state=True, light_background=True,
menu_actions=None, show_buttons_inside=True,
show_elapsed_time=True):
assert qt_api in (None, 'pyqt', 'pyside')
self.namespacebrowser = None # namespace browser widget!
self.dialog_manager = DialogManager()
self.stand_alone = stand_alone # stand alone settings (None: plugin)
self.interact = interact
self.is_ipykernel = ipykernel
self.pythonstartup = pythonstartup
self.pythonexecutable = pythonexecutable
self.monitor_enabled = monitor_enabled
self.mpl_backend = mpl_backend
self.ets_backend = ets_backend
self.qt_api = qt_api
self.pyqt_api = pyqt_api
self.ignore_sip_setapi_errors = ignore_sip_setapi_errors
self.merge_output_channels = merge_output_channels
self.colorize_sys_stderr = colorize_sys_stderr
self.umr_enabled = umr_enabled
self.umr_namelist = umr_namelist
self.umr_verbose = umr_verbose
self.autorefresh_timeout = autorefresh_timeout
self.autorefresh_state = autorefresh_state
self.namespacebrowser_button = None
self.cwd_button = None
self.env_button = None
self.syspath_button = None
self.terminate_button = None
self.notification_thread = None
ExternalShellBase.__init__(self, parent=parent, fname=fname, wdir=wdir,
history_filename='history.py',
light_background=light_background,
menu_actions=menu |
JudoWill/ResearchNotebooks | Woundy.py | Python | mit | 2,781 | 0.023013 | # -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
import os, os.path
from matplotlib import pyplot as plt
from pylab import get_cmap
import SimpleCV as cv
from glob import glob
# <codecell>
def show_img(img, ax = None):
if ax is not None:
plt.sca(ax)
nimg = img | .getNumpy()
return plt.imshow(nimg, aspect='equal')
# <codecell>
path = '/home/will/Dropbox/burnimages/*.jpg'
norm_files = sorted(f for f in glo | b(path) if '-e' not in f)
masked_files = sorted(f for f in glob(path) if '-e' in f)
fig, axs = plt.subplots(6,6, figsize = (10,10))
for f, ax in zip(norm_files, axs.flatten()):
img = cv.Image(f)
show_img(img, ax = ax)
ax.set_xticks([])
ax.set_yticks([])
fig.tight_layout()
# <codecell>
from itertools import islice, izip_longest
from dateutil.parser import parse
def make_wound_mask(norm_img, green_img, color,
minsize = None,
maxsize = None):
wmask = green_img.hueDistance(color).invert().threshold(200)
blobs = norm_img.findBlobsFromMask(wmask,
minsize = minsize,
maxsize = maxsize)
return wmask, blobs
fig, axs = plt.subplots(6,6, figsize = (10,10))
results = []
for fname, mf, of, ax in izip_longest(norm_files, masked_files, norm_files, axs.flatten()):
mask_img = cv.Image(mf)
norm_img = cv.Image(of)
dt = parse(fname.rsplit(os.sep,1)[1].replace('.jpg', '').replace('.',':'))
wound_mask, wound_blobs = make_wound_mask(norm_img, mask_img, cv.Color.GREEN,
minsize = 1000)
dime_mask, dime_blobs = make_wound_mask(norm_img, mask_img, cv.Color.BLUE,
minsize = 500)
layer = cv.DrawingLayer((norm_img.width, norm_img.height))
wound_blobs[-1].drawHull(color=cv.Color.BLUE, width = 100, layer = layer)
dime_blobs[-1].drawHull(color=cv.Color.RED, width = 100, layer = layer)
norm_img.addDrawingLayer(layer)
fnorm = norm_img.applyLayers()
ratio = wound_blobs[-1].area()/dime_blobs[-1].area()
results.append((dt, ratio))
if ax is not None:
show_img(fnorm, ax = ax)
ax.set_xticks([])
ax.set_yticks([])
ax.set_title(ratio)
fig.tight_layout()
# <codecell>
import pandas as pd
res_df = pd.DataFrame(sorted(results), columns = ['SampleTime', 'Ratio'])
dime_diameter = 18 #mm
dime_area = 3.141*(dime_diameter/2)**2
res_df['Area-mm2'] = dime_area*res_df['Ratio']
res_df.set_index('SampleTime', inplace=True)
res_df
# <codecell>
res_df['Area-mm2'].plot()
out = pd.ewma(res_df['Area-mm2'], freq='d', span = 1)
out.plot(lw = 10, alpha = 0.7)
plt.ylabel('Wound-Area-mm^2')
# <codecell>
|
sonofeft/ODPSlides | odpslides/zip_file.py | Python | lgpl-3.0 | 882 | 0.006803 | # Support Python 2 and 3
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
import zipfile
import time
from odpslides.template_xml_file import TemplateXML_File
here = os.path.abspath(os.path.dirname(__file__))
def zipfile_insert( zipfileobj, filename, data):
"""Create a file named filename, inside the zip archive.
"data" is the encode('UTF-8') string that is placed into filename.
(Not called by User)
"""
if isinstance( data, TemplateXML_File ):
data = data.tostring()
# zip seems to struggle | with non-ascii characters
#data = | data.encode('utf-8')
now = time.localtime(time.time())[:6]
info = zipfile.ZipInfo(filename)
info.date_time = now
info.compress_type = zipfile.ZIP_DEFLATED
zipfileobj.writestr(info, data)
|
Ziemin/telepathy-gabble | tests/twisted/olpc/olpc-muc-prop-change.py | Python | lgpl-2.1 | 14,987 | 0.001802 | """
Test OLPC MUC properties.
"""
import dbus
from twisted.words.xish import domish, xpath
from gabbletest import exec_test, acknowledge_iq, make_muc_presence
from servicetest import call_async, EventPattern, wrap_channel
import constants as cs
import ns
from mucutil import echo_muc_presence
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
buddy_iface = dbus.Interface(conn, 'org.laptop.Telepathy.BuddyInfo')
act_prop_iface = dbus.Interface(conn, 'org.laptop.Telepathy.ActivityProperties')
bob_handle = conn.get_contact_handle_sync('bob@localhost')
# Bob invites us to a chatroom, pre-seeding properties
message = domish.Element(('jabber:client', 'message'))
message['from'] = 'bob@localhost'
message['to'] = 'test@localhost'
properties = message.addElement(
(ns.OLPC_ACTIVITY_PROPS, 'properties'))
properties['room'] = 'chat@conf.localhost'
properties['activity'] = 'foo_id'
property = properties.addElement((None, 'property'))
property['type'] = 'str'
property['name'] = 'title'
property.addContent('From the invitation')
property = properties.addElement((None, 'property'))
property['type'] = 'bool'
property['name'] = 'private'
property.addContent('1')
stream.send(message)
message = domish.Element((None, 'message'))
message['from'] = 'chat@conf.localhost'
message['to'] = 'test@localhost'
x = message.addElement((ns.MUC_USER, 'x'))
invite = x.addElement((None, 'invite'))
invite['from'] = 'bob@localhost'
reason = invite.addElement((None, 'reason'))
reason.addContent('No good reason')
stream.send(message)
event = q.expect('dbus-signal', signal='NewChannel')
assert event.args[1] == cs.CHANNEL_TYPE_TEXT
assert event.args[2] == 2 # handle type
assert event.args[3] == 1 # handle
room_handle = 1
text_chan = wrap_channel(bus.get_object(conn.bus_name, event.args[0]),
'Text')
group_iface = text_chan.Group
members = group_iface.GetAllMembers()[0]
local_pending = group_iface.GetAllMembers()[1]
remote_pending = group_iface.GetAllMembers()[2]
assert len(members) == 1
assert conn.inspect_contact_sync(members[0]) == 'bob@localhost'
bob_handle = members[0]
assert len(local_pending) == 1
# FIXME: the username-part-is-nickname assumption
assert conn.inspect_contact_sync(local_pending[0]) == \
'chat@conf.localhost/test'
assert len(remote_pending) == 0
room_self_handle = text_chan.Properties.Get(cs.CHANNEL_IFACE_GROUP,
"SelfHandle")
assert room_self_handle == local_pending[0]
# by now, we should have picked up the extra activity properties
buddy_iface = dbus.Interface(conn, 'org.laptop.Telepathy.BuddyInfo')
call_async(q, buddy_iface, 'GetActivities', bob_handle)
event = q.expect('stream-iq', iq_type='get', to='bob@localhost')
# Bob still has no (public) activities
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'bob@localhost'
stream.send(event.stanza)
event = q.expect('dbus-return', method='GetActivities')
assert event.value == ([('foo_id', room_handle)],)
props = act_prop_iface.GetProperties(room_handle)
assert len(props) == 2
assert props['title'] == 'From the invitation'
assert props['private'] == True
# Now Bob changes the properties
message = domish.Element(('jabber:client', 'message'))
message['from'] = 'bob@localhost'
message['to'] = 'test@localhost'
properties = message.addElement(
(ns.OLPC_ACTIVITY_PROPS, 'properties'))
properties['room'] = 'chat@conf.localhost'
properties['activity'] = 'foo_id'
property = properties.addElement((None, 'property'))
property['type'] = 'str'
property['name'] = 'title'
property.addContent('Mushroom, mushroom')
property = properties.addElement((None, 'property'))
property['type'] = 'bool'
property['name'] = 'private'
property.addContent('0')
stream.send(message)
event = q.expect('dbus-signal', signal='ActivityPropertiesChanged')
assert event.args == [room_handle, {'title': 'Mushroom, mushroom',
'private': False }]
assert act_prop_iface.GetProperties(room_handle) == \
event.args[1]
# OK, now accept the invitation
call_async(q, group_iface, 'AddMembers', [room_self_handle], 'Oh, OK then')
q.expect_many(
EventPattern('stream-presence', to='chat@conf.localhost/test'),
EventPattern('dbus-signal', signal='MembersChanged',
args=['', [], [bob_handle], [], [room_self_handle],
0, cs.GC_REASON_INVITED]),
EventPattern('dbus-return', method='AddMembers'),
)
# Send presence for own membership of room.
stream.send(make_muc_presence('owner', 'moderator', 'chat@conf.localh | ost', 'test'))
event = q.expect('dbus-signal', signal='MembersChanged')
assert event.args == ['', [room_self_handle], [], [], [], 0, 0]
call_async(q, buddy_iface, 'SetActivities', [('foo_id', room_handle)])
event = q.expect('stream-iq', iq_type='set')
# Now that it's not private, it'll go in my PEP
event.stanz | a['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
q.expect('dbus-return', method='SetActivities')
# Bob changes the properties and tells the room he's done so
message = domish.Element(('jabber:client', 'message'))
message['from'] = 'chat@conf.localhost/bob'
message['to'] = 'chat@conf.localhost'
properties = message.addElement(
(ns.OLPC_ACTIVITY_PROPS, 'properties'))
properties['activity'] = 'foo_id'
property = properties.addElement((None, 'property'))
property['type'] = 'str'
property['name'] = 'title'
property.addContent('Badger badger badger')
property = properties.addElement((None, 'property'))
property['type'] = 'bool'
property['name'] = 'private'
property.addContent('0')
stream.send(message)
event = q.expect('stream-iq', iq_type='set')
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITY_PROPS
properties = xpath.queryForNodes('/activities/properties', activities[0])
assert (properties is not None and len(properties) == 1), repr(properties)
assert properties[0].uri == ns.OLPC_ACTIVITY_PROPS
assert properties[0]['room'] == 'chat@conf.localhost'
assert properties[0]['activity'] == 'foo_id'
property = xpath.queryForNodes('/properties/property', properties[0])
assert (property is not None and len(property) == 2), repr(property)
seen = set()
for p in property:
seen.add(p['name'])
if p['name'] == 'title':
assert p['type'] == 'str'
assert str(p) == 'Badger badger badger'
elif p['name'] == 'private':
assert p['type'] == 'bool'
assert str(p) == '0'
else:
assert False, 'Unexpected property %s' % p['name']
assert 'title' in seen, seen
assert 'private' in seen, seen
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
act_prop_iface = dbus.Interface(conn, 'org.laptop.Telepathy.ActivityProperties')
# test sets the title and sets private back to True
call_async(q, act_prop_iface, 'SetProperties',
room_handle, {'title': 'I can set the properties too', 'private': True})
event = q.expect('stream-message', to='chat@conf.localhost')
message = event.stanza
properties = xpath.queryForNodes('/message/properties', message)
assert (properties is not None and len(properties) == 1), repr(properties)
assert properties[0].uri == ns.OLPC_ACTIVITY_PROPS
|
AdrianGaudebert/configman | configman/tests/test_option.py | Python | bsd-3-clause | 12,635 | 0.000237 | # ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is configman
#
# The Initial Developer of the Original Code is
# Mozilla Foundation
# Portions created by the Initial Developer are Copyright (C) 2011
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# K Lars Lohn, lars@mozilla.com
# Peter Bengtsson, peterbe@mozilla.com
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
import unittest
import re
import datetime
import configman.converters as conv
import configman.datetime_util as dtu
from configman.option import Option
from configman.config_exceptions import CannotConvertError, OptionError
class TestCase(unittest.TestCase):
def test_option_constructor_basics(self):
o = Option('name')
self.assertEqual(o.name, 'name')
self.assertEqual(o.default, None)
self.assertEqual(o.doc, None)
self.assertEqual(o.from_string_converter, None)
self.assertEqual(o.value, None)
o = Option('lucy')
self.assertEqual(o.name, 'lucy')
self.assertEqual(o.default, None)
self.assertEqual(o.doc, None)
self.assertEqual(o.from_string_converter, None)
self.assertEqual(o.value, None)
o = Option(u'spa\xa0e')
self.assertEqual(o.name, u'spa\xa0e')
self.assertEqual(o.default, None)
self.assertEqual(o.doc, None)
self.assertEqual(o.from_string_converter, None)
self.assertEqual(o.value, None)
data = {
'name': 'lucy',
'default': 1,
'doc': "lucy's integer"
}
o = Option(**data)
self.assertEqual(o.name, 'lucy')
self.assertEqual(o.default, 1)
self.ass | ertEqual(o.doc, "lucy's integer")
self.assertEqual(o.from_string_converter, int)
| self.assertEqual(o.value, 1)
data = {
'name': 'lucy',
'default': 1,
'doc': "lucy's integer",
'value': '1'
}
o = Option(**data)
self.assertEqual(o.name, 'lucy')
self.assertEqual(o.default, 1)
self.assertEqual(o.doc, "lucy's integer")
self.assertEqual(o.from_string_converter, int)
self.assertEqual(o.value, 1)
data = {
'name': 'lucy',
'default': '1',
'doc': "lucy's integer",
'from_string_converter': int
}
o = Option(**data)
self.assertEqual(o.name, 'lucy')
self.assertEqual(o.default, 1) # converted using `int`
self.assertEqual(o.doc, "lucy's integer")
self.assertEqual(o.from_string_converter, int)
self.assertEqual(o.value, 1)
data = {
'name': 'lucy',
'default': '1',
'doc': "lucy's integer",
'from_string_converter': int,
}
o = Option(**data)
self.assertEqual(o.name, 'lucy')
self.assertEqual(o.default, 1)
self.assertEqual(o.doc, "lucy's integer")
self.assertEqual(o.from_string_converter, int)
self.assertEqual(o.value, 1)
data = {
'default': '1',
'doc': "lucy's integer",
'from_string_converter': int,
}
o = Option('now', **data)
self.assertEqual(o.name, 'now')
self.assertEqual(o.default, 1)
self.assertEqual(o.doc, "lucy's integer")
self.assertEqual(o.from_string_converter, int)
self.assertEqual(o.value, 1)
d = datetime.datetime.now()
o = Option('now', default=d)
self.assertEqual(o.name, 'now')
self.assertEqual(o.default, d)
self.assertEqual(o.doc, None)
self.assertEqual(o.from_string_converter,
dtu.datetime_from_ISO_string)
self.assertEqual(o.value, d)
data = {
'default': '1.0',
'doc': "lucy's height",
'from_string_converter': float,
}
o = Option('now', **data)
self.assertEqual(o.name, 'now')
self.assertEqual(o.default, 1.0)
self.assertEqual(o.doc, "lucy's height")
self.assertEqual(o.from_string_converter, float)
self.assertEqual(o.value, 1.0)
def test_option_constructor_more_complex_default_converters(self):
data = {
'default': '2011-12-31',
'doc': "lucy's bday",
'from_string_converter': dtu.date_from_ISO_string,
}
o = Option('now', **data)
self.assertEqual(o.name, 'now')
self.assertEqual(o.default, datetime.date(2011, 12, 31))
self.assertEqual(o.doc, "lucy's bday")
self.assertEqual(o.from_string_converter, dtu.date_from_ISO_string)
self.assertEqual(o.value, datetime.date(2011, 12, 31))
data = {
'default': '2011-12-31',
'doc': "lucy's bday",
'from_string_converter': \
'configman.datetime_util.date_from_ISO_string',
}
o = Option('now', **data)
self.assertEqual(o.name, 'now')
self.assertEqual(o.default, datetime.date(2011, 12, 31))
self.assertEqual(o.doc, "lucy's bday")
self.assertEqual(o.from_string_converter, dtu.date_from_ISO_string)
self.assertEqual(o.value, datetime.date(2011, 12, 31))
def test_setting_known_from_string_converter_onOption(self):
opt = Option('name', default=u'Peter')
self.assertEqual(opt.default, u'Peter')
self.assertEqual(opt.from_string_converter, unicode)
opt = Option('name', default=100)
self.assertEqual(opt.default, 100)
self.assertEqual(opt.from_string_converter, int)
opt = Option('name', default=100L)
self.assertEqual(opt.default, 100L)
self.assertEqual(opt.from_string_converter, long)
opt = Option('name', default=100.0)
self.assertEqual(opt.default, 100.0)
self.assertEqual(opt.from_string_converter, float)
from decimal import Decimal
opt = Option('name', default=Decimal('100.0'))
self.assertEqual(opt.default, Decimal('100.0'))
self.assertEqual(opt.from_string_converter, Decimal)
opt = Option('name', default=False)
self.assertEqual(opt.default, False)
self.assertEqual(opt.from_string_converter,
conv.boolean_converter)
dt = datetime.datetime(2011, 8, 10, 0, 0, 0)
opt = Option('name', default=dt)
self.assertEqual(opt.default, dt)
self.assertEqual(opt.from_string_converter,
dtu.datetime_from_ISO_string)
dt = datetime.date(2011, 8, 10)
opt = Option('name', default=dt)
self.assertEqual(opt.default, dt)
self.assertEqual(opt.from_string_converter,
dtu.date_from_ISO_string)
def test_boolean_converter_inOption(self):
opt = Option('name', default=False)
self.assertEqual(opt.defaul |
CSC-IT-Center-for-Science/pouta-blueprints | pebbles/services/openstack_service.py | Python | mit | 23,846 | 0.001048 | import novaclient
from novaclient.exceptions import NotFound
import novaclient.client
from keystoneauth1 import loading
from keystoneauth1 import session
import neutronclient.v2_0.client
import cinderclient.v2.client
from osc_lib.utils import wait_for_delete
import taskflow.engines
from taskflow.patterns import linear_flow as lf
from taskflow.patterns import graph_flow as gf
from taskflow import task
import logging
import os
import json
import time
NOVACLIENT_VERSION = "2.37"
def get_openstack_nova_client(config):
return get_openstack_clients(config)[0]
def get_openstack_neutron_client(config):
return get_openstack_clients(config)[1]
def get_openstack_cinder_client(config):
return get_openstack_clients(config)[2]
def get_openstack_clients(config):
""" gets a tuple of various openstack clients.
(novaclient, neutronclient, cinderclient).
Caller can pick up one or all of the returned clients.
"""
if config:
if config.get('M2M_CREDENTIAL_STORE'):
logging.debug("loading credentials from %s" % config.get('M2M_CREDENTIAL_STORE'))
m2m_config = json.load(open(config.get('M2M_CREDENTIAL_STORE')))
source_config = m2m_config
else:
logging.debug("using config as provided")
source_config = config
else:
logging.debug("no config, trying environment vars")
source_config = os.environ
os_username = source_config['OS_USERNAME']
os_password = source_config['OS_PASSWORD']
os_tenant_name = source_config['OS_TENANT_NAME']
os_auth_url = source_config['OS_AUTH_URL']
loader = loading.get_plugin_loader('password')
auth = loader.load_from_options(auth_url=os_auth_url,
username=os_username,
password=os_password,
project_name=os_tenant_name
)
sess = session.Session(auth=auth, verify=False)
return (novaclient.client.Client(NOVACLIENT_VERSION,
session=sess),
neutronclient.v2_0.client.Client(session=sess),
cinderclient.v2.client.Client(NOVACLIENT_VERSION, session=sess)
)
def _format_nics(nics):
""" Create a networks data structure for python-novaclient.
**Note** "auto" is the safest default to pass to novaclient
:param nics: either None, one of strings "auto" or "none"or string with a
comma-separated list of nic IDs from OpenStack.
:return: A data structure that can be passed as Nics
"""
if not nics:
return "auto"
if nics == "none":
return "none"
if nics.lower() == "auto":
return "auto"
return [{"net-id": item, "v4-fixed-ip": ""}
for item in nics.strip().split(",")]
class GetServer(task.Task):
def execute(self, server_id, config):
logging.debug("getting server %s" % server_id)
nc = get_openstack_nova_client(config)
return nc.servers.get(server_id)
class GetImage(task.Task):
def execute(self, image_name, config):
logging.debug("getting image %s" % image_name)
nc = get_openstack_nova_client(config)
return nc.glance.find_image(image_name)
def revert(self, *args, **kwargs):
pass
class ListImages(task.Task):
def execute(self, image_name, config):
logging.debug("getting images")
nc = get_openstack_nova_client(config)
return nc.glance.list()
def revert(self, *args, **kwargs):
pass
class GetFlavor(task.Task):
def execute(self, flavor_name, config):
logging.debug("getting flavor %s" % flavor_name)
nc = get_openstack_nova_client(config)
return nc.flavors.find(name=flavor_name)
def revert(self, *args, **kwargs):
pass
class ListFlavors(task.Task):
def execute(self, flavor_name, config):
logging.debug("getting flavors")
nc = get_openstack_nova_client(config)
return nc.flavors.list()
def revert(self, *args, **kwargs):
pass
class CreateSecurityGroup(task.Task):
# note this uses neutron client
secgroup_id = ""
def execute(self, display_name, master_sg_name, config):
logging.debug("create security group %s" % display_name)
security_group_name = display_name
nc = get_openstack_neutron_client(config)
self.secgroup = nc.create_security_group({"security_group": {
"name": security_group_name,
"description": "Security group generated by Pebbles"
}})
self.secgroup_id = self.secgroup["security_group"]["id"]
self.secgroup_name = self.secgroup["security_group"]["name"]
if master_sg_name:
master_sg = nc.find_resource("security_group", master_sg_name)
nc.create_security_group_rule({"security_group_rule": dict(
security_group_id=self.secgroup_id,
protocol='tcp',
ethertype='ipv4',
port_range_min=1,
direction='ingress',
port_range_max=65535,
remote_group_id=master_sg["id"]
)})
nc.create_security_group_rule({"security_group_rule": dict(
security_group_id=self.secgroup_id,
protocol='udp',
ethertype='ipv4',
port_range_min=1,
direction='ingress',
port_range_max=65535,
remote_group_id=master_sg["id"]
)})
nc.create_security_group_rule({"security_group_rule": dict(
security_group_id=self.secgroup_id,
protocol='icmp',
ethertype='ipv4',
port_range_min=1,
direction='ingress',
port_range_max=255,
remote_group_id=master_sg["id"]
)})
logging.info("Created security group %s" % self.secgroup_id)
return self.secgroup_id
def revert(self, config, **kwargs):
logging.debug("revert: delete security group")
nc = get_openstack_neutron_client(config)
nc.delete_security_group(self.secgroup_id)
class CreateRootVolume(task.Task):
def execute(self, display_name, image, root_volume_size, config):
if root_volume_size:
logging.debug("creating a root volume for instance %s from image %s" % (display_name, image))
nc = get_openstack_cinder_client(config)
volume_name = '%s-root' % display_name
volume = nc.volumes.create(
size=root_volume_size,
imageRef=image.id,
name=volume_name
)
self.volume_id = volume.id
retries = 0
while nc.volumes.get(volume.id).status not in ('available',):
logging.debug("...waiting for volume to be ready")
time.sleep(5)
retries += 1
if retries > 30:
raise RuntimeError('Volume creation %s is stuck')
return volume.id
else:
logging | .debug("no root volume defined")
return ""
def revert(self, config, **kwargs):
logging.debug("revert: delete root volume")
try:
if getattr(self, 'volume_id', None):
nc = get_openstack_cinder_client(config)
nc. | volumes.delete(
nc.volumes.get(self.volume_id))
else:
logging.debug("revert: no volume_id stored, unable to revert")
except Exception as e:
logging.error('revert: deleting volume failed: %s' % e)
class CreateDataVolume(task.Task):
def execute(self, display_name, data_volume_size, data_volume_type, config):
if data_volume_size:
logging.debug("creating a data volume for instance %s, %d" % (display_name, data_volume_size))
nc = get_openstack_cinder_client(config)
volume_name = '%s-data' % display_name
volume = nc.volumes.create(
size=data_volume_size,
|
qilicun/python | python3/tutorials/fibonacci.py | Python | gpl-3.0 | 355 | 0.011268 | # - | *- coding: utf-8 -*-
#!/usr/bin/env python3
def fib(n):
"""This a simple fibonacci function"""
result = []
a, b = 0, 1
while a < n:
result.append(a)
# prin | t(a, end=' ')
a, b = b, a+b
return result
if __name__ == "__main__":
import sys
print(fib(int(sys.argv[1])))
#print(fib.__doc__)
#fib = fib(100)
|
mozaik-association/mozaik | mozaik_event_question_thesaurus/models/event_question.py | Python | agpl-3.0 | 573 | 0 | # Copyright 2021 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models |
class EventQuestion(models.Model):
_inherit = "event.question"
# invisible for simple_choice and text_input questions, intended to
# be used for other types of questions when inheriting this module
interest_ids = fields.Many2many("thesaurus.term", string="Interests")
class EventQuestionAnswer(models.Model):
_inherit = "event.question.answer"
interest_ids = fields.Many2many("t | hesaurus.term", string="Interests")
|
Eficent/odoo-operating-unit | account_operating_unit/wizard/account_report_account_balance.py | Python | agpl-3.0 | 1,755 | 0 | # -*- coding: utf-8 -*-
# © 2016 Eficent Business and IT Consulting Services S.L. -
# Jordi Ballester Alomar
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from openerp.osv import fields, orm
from openerp.addons.account.report.account_balance import account_balance
from openerp.report import report_sxw
class AccountBalanceReport(orm.TransientModel):
_inherit = "account.balance.report"
_columns = {
'operating_unit_ids': fields.many2many('operating.unit',
string='Operating Units',
required=False),
}
def _build_contexts(self, cr, uid, ids, data, context=None):
result = super(AccountBalanceReport, self)._build_contexts(
cr, uid, ids, data, context=context)
data2 = {}
data2['form'] = self.read(cr, uid, ids, ['operating_unit_ids'],
context=context)[0]
result['operating_unit_ids'] = 'operating_unit_ids' in data2['form']\
and data2['form']['operating_unit_ids']\
or False
return result
def _build_comparison_context(self, cr, uid, ids, data, context=None):
result = super(AccountBalanceReport, self)._build_comparison_context(
cr, | uid, ids, data, context=context)
data['form'] = self.read(cr, uid, ids, ['operating_unit_ids'],
context=context)[0]
result['operating_unit_ids'] = 'operating_unit_ids' in data['form'] \
| and data['form']['operating_unit_ids'] \
or False
return result
|
xyficu/rts2 | python/testalchemy.py | Python | gpl-2.0 | 575 | 0.006957 | #!/usr/bin/env python
from sqlalchemy import create_engine, and_, or_
from sqlalchemy.orm.session import sessionmaker
from rts2.db import Targets,Grb
Session = sessionmaker()
en | gine = create_engine('postgresql://petr:petr@localhost/stars',echo='debug')
Session.configure(bind=engine)
sess = Session()
targ = sess.query(Targets)
#q = sess.query(ApacheCatalog)
print targ.filter(Targets.tar_id == 1000).all()
print targ.filter(and_ | (Targets.tar_ra < 20, Targets.tar_dec < 0, Targets.tar_dec > -20)).all()
grb = sess.query(Grb)
print grb.filter(Grb.tar_id == 50001).all()
|
kunaltyagi/nsiqcppstyle | rules/RULE_9_2_D_use_reentrant_function.py | Python | gpl-2.0 | 3,067 | 0.00163 | """
Use reentrant functions. | Do not use not reentrant functions.(ctime, strtok, toupper)
== Violation ==
void A() {
k = ctime(); <== Violation. ctime() is not the reenterant function.
j = strok(blar blar); <== Violation. strok() is not the reenterant function.
}
== Good ==
void A() {
k = t.ctime(); <== Correct. It may be the reentrant function.
}
void A() {
k = ctime; <== Correct. It may be the reentrant functio | n.
}
"""
from nsiqunittest.nsiqcppstyle_unittestbase import *
from nsiqcppstyle_rulehelper import *
from nsiqcppstyle_reporter import *
from nsiqcppstyle_rulemanager import *
no_reenterant_functions = (
'ctime',
'strtok',
'toupper',
)
def RunRule(lexer, contextStack):
"""
Use reenterant keyword.
"""
t = lexer.GetCurToken()
if t.type == "ID":
if t.value in no_reenterant_functions:
t2 = lexer.PeekNextTokenSkipWhiteSpaceAndComment()
t3 = lexer.PeekPrevTokenSkipWhiteSpaceAndComment()
if t2 is not None and t2.type == "LPAREN":
if t3 is None or t3.type != "PERIOD":
if t.value == "toupper" and nsiqcppstyle_state._nsiqcppstyle_state.GetVar(
"ignore_toupper", "false") == "true":
return
nsiqcppstyle_reporter.Error(t, __name__,
"Do not use not reentrant function(%s)." % t.value)
ruleManager.AddFunctionScopeRule(RunRule)
##########################################################################
# Unit Test
##########################################################################
class testRule(nct):
def setUpRule(self):
ruleManager.AddFunctionScopeRule(RunRule)
def test1(self):
self.Analyze("thisfile.c",
"""
void func1()
{
k = ctime()
}
""")
self.ExpectError(__name__)
def test2(self):
self.Analyze("thisfile.c",
"""
void func1() {
#define ctime() k
}
""")
self.ExpectSuccess(__name__)
def test3(self):
self.Analyze("thisfile.c",
"""
void ctime() {
}
""")
self.ExpectSuccess(__name__)
def test4(self):
self.Analyze("thisfile.c",
"""
void ctime () {
}
""")
self.ExpectSuccess(__name__)
def test5(self):
self.Analyze("thisfile.c",
"""
void func1()
{
k = help.ctime ()
}
""")
self.ExpectSuccess(__name__)
def test6(self):
self.Analyze("thisfile.c",
"""
void func1()
{
k = toupper()
}
""")
self.ExpectError(__name__)
def test7(self):
nsiqcppstyle_state._nsiqcppstyle_state.varMap["ignore_toupper"] = "true"
self.Analyze("thisfile.c",
"""
void func1()
{
k = toupper()
}
""")
self.ExpectSuccess(__name__)
|
ecerulm/python-heatclient | heatclient/tests/functional/base.py | Python | apache-2.0 | 1,527 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
| #
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from tempest_lib.cli import base
class ClientTestBase(base.ClientTestBase):
"""This is a firs | t pass at a simple read only python-heatclient test.
This only exercises client commands that are read only.
This should test commands:
* as a regular user
* as a admin user
* with and without optional parameters
* initially just check return codes, and later test command outputs
"""
def _get_clients(self):
cli_dir = os.environ.get(
'OS_HEATCLIENT_EXEC_DIR',
os.path.join(os.path.abspath('.'), '.tox/functional/bin'))
return base.CLIClient(
username=os.environ.get('OS_USERNAME'),
password=os.environ.get('OS_PASSWORD'),
tenant_name=os.environ.get('OS_TENANT_NAME'),
uri=os.environ.get('OS_AUTH_URL'),
cli_dir=cli_dir)
def heat(self, *args, **kwargs):
return self.clients.heat(*args, **kwargs)
|
AnishShah/tensorflow | tensorflow/contrib/data/python/ops/sliding.py | Python | apache-2.0 | 5,041 | 0.004364 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Sliding dataset transformations."""
from __future__ import a | bsolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.util import nest
from tensorflow.pyt | hon.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import gen_dataset_ops
from tensorflow.python.util import deprecation
class _SlideDataset(dataset_ops.Dataset):
"""A `Dataset` that passes a sliding window over its input."""
def __init__(self, input_dataset, window_size, window_shift, window_stride):
"""See `sliding_window_batch` for details."""
super(_SlideDataset, self).__init__()
self._input_dataset = input_dataset
self._window_size = ops.convert_to_tensor(
window_size, dtype=dtypes.int64, name="window_stride")
self._window_stride = ops.convert_to_tensor(
window_stride, dtype=dtypes.int64, name="window_stride")
self._window_shift = ops.convert_to_tensor(
window_shift, dtype=dtypes.int64, name="window_shift")
def _as_variant_tensor(self):
return gen_dataset_ops.slide_dataset(
self._input_dataset._as_variant_tensor(), # pylint: disable=protected-access
window_size=self._window_size,
window_shift=self._window_shift,
window_stride=self._window_stride,
**dataset_ops.flat_structure(self))
@property
def output_classes(self):
return self._input_dataset.output_classes
@property
def output_shapes(self):
input_shapes = self._input_dataset.output_shapes
return nest.pack_sequence_as(input_shapes, [
tensor_shape.vector(None).concatenate(s)
for s in nest.flatten(self._input_dataset.output_shapes)
])
@property
def output_types(self):
return self._input_dataset.output_types
@deprecation.deprecated_args(
None, "stride is deprecated, use window_shift instead", "stride")
def sliding_window_batch(window_size,
stride=None,
window_shift=None,
window_stride=1):
"""A sliding window over a dataset.
This transformation passes a sliding window over this dataset. The window size
is `window_size`, the stride of the input elements is `window_stride`, and the
shift between consecutive windows is `window_shift`. If the remaining elements
cannot fill up the sliding window, this transformation will drop the final
smaller element. For example:
```python
# NOTE: The following examples use `{ ... }` to represent the
# contents of a dataset.
a = { [1], [2], [3], [4], [5], [6] }
a.apply(sliding_window_batch(window_size=3)) ==
{ [[1], [2], [3]], [[2], [3], [4]], [[3], [4], [5]], [[4], [5], [6]] }
a.apply(sliding_window_batch(window_size=3, window_shift=2)) ==
{ [[1], [2], [3]], [[3], [4], [5]] }
a.apply(sliding_window_batch(window_size=3, window_stride=2)) ==
{ [[1], [3], [5]], [[2], [4], [6]] }
```
Args:
window_size: A `tf.int64` scalar `tf.Tensor`, representing the number of
elements in the sliding window. It must be positive.
stride: (Optional.) A `tf.int64` scalar `tf.Tensor`, representing the
forward shift of the sliding window in each iteration. The default is `1`.
It must be positive. Deprecated alias for `window_shift`.
window_shift: (Optional.) A `tf.int64` scalar `tf.Tensor`, representing the
forward shift of the sliding window in each iteration. The default is `1`.
It must be positive.
window_stride: (Optional.) A `tf.int64` scalar `tf.Tensor`, representing the
stride of the input elements in the sliding window. The default is `1`.
It must be positive.
Returns:
A `Dataset` transformation function, which can be passed to
`tf.data.Dataset.apply`.
Raises:
ValueError: if invalid arguments are provided.
"""
if stride is None and window_shift is None:
window_shift = 1
elif stride is not None and window_shift is None:
window_shift = stride
elif stride is not None and window_shift is not None:
raise ValueError("Cannot specify both `stride` and `window_shift`")
def _apply_fn(dataset):
return _SlideDataset(dataset, window_size, window_shift, window_stride)
return _apply_fn
|
bonniejools/cabot | cabot/cabotapp/calendar.py | Python | mit | 638 | 0 | import requests
from django.conf import s | ettings
from icalendar import Calendar
def get_calendar_data():
feed_url = settings.CALENDAR_ICAL_URL
resp = requests.get(feed_url)
cal = Calendar.from_ical(resp.content)
return cal
def get_events():
events = []
for component in get_calendar_data().walk():
if component.name == 'VEVENT':
events.append({
'start': component.decoded( | 'dtstart'),
'end': component.decoded('dtend'),
'summary': component.decoded('summary'),
'uid': component.decoded('uid'),
})
return events
|
helixyte/TheLMA | thelma/tests/entity/test_experiment.py | Python | mit | 3,511 | 0 | import pytest
from everest.repositories.rdb.testing import check_attributes
from everest.repositories.rdb.testing import persist
from thelma.tests.entity.conftest import TestEntityBase
class TestExperimentEntity(TestEntityBase):
def test_init(self, experiment_fac):
exp = experiment_fac()
check_attributes(exp, experiment_fac.init_kw)
assert len(exp.experiment_racks) == 0
@pytest.mark.parametrize('kw1,kw2,result',
[(dict(id=-1), dict(id=-1), True),
(dict(id=-1), dict(id=-2), False)])
def test_equality(self, experiment_fac, experiment_design_fac, plate_fac,
kw1, kw2, result):
ed1 = experiment_design_fac(**kw1)
ed2 = experiment_design_fac(**kw2)
rack1 = plate_fac(**kw1)
rack2 = plate_fac(**kw2)
exp1 = experiment_fac(experiment_design=ed1, source_rack=rack1)
exp2 = experiment_fac(experiment_design=ed2, source_rack=rack2)
exp3 = experiment_fac(experiment_design=ed2, source_rack=rack1)
exp4 = experiment_fac(experiment_design=ed1, source_rack=rack2)
assert (exp1 == exp2) is result
assert (exp1 == exp3) is result
assert (exp1 == exp4) is result
def test_persist(self, nested_session, experiment_fac,
experiment_job_fac):
exp = experiment_fac()
# FIXME: Working around the circular dependency of experiment and
# experiment job here.
exp_job = experiment_job_fac(experiments=[exp])
kw = experiment_fac.init_kw
kw['job'] = exp.job
exp.job = exp_job
persist(nested_session, exp, kw, True)
class TestExperimentRackEntity(TestEntityBase):
def test_init(self, experiment_rack_fac):
exp_r = experiment_rack_fac()
check_attributes(exp_r, experiment_rack_fac.init_kw)
class TestExperimentDesignEntity(TestEntityBase):
def test_init(self, experiment_design_fac):
exp_dsgn = experiment_design_fac()
check_attributes(exp_dsgn | , experiment_design_fac.init_kw)
def test_persist(self, nested_session, experiment_design_fac):
exp_design = experiment_design_fac()
persist(nested_session, exp_design, experiment_design_fac.init_kw,
True)
class TestExperim | entDesignRackEntity(TestEntityBase):
def test_init(self, experiment_design_rack_fac):
exp_dr = experiment_design_rack_fac()
check_attributes(exp_dr, experiment_design_rack_fac.init_kw)
class TestExperimentMetadataEntity(TestEntityBase):
def test_init(self, experiment_metadata_fac):
em = experiment_metadata_fac()
check_attributes(em, experiment_metadata_fac.init_kw)
@pytest.mark.parametrize('kw1,kw2,result',
[(dict(label='em1'), dict(label='em1'), True),
(dict(label='em1'), dict(label='em2'), False)])
def test_equality(self, subproject_fac, experiment_metadata_fac,
kw1, kw2, result):
sp1 = subproject_fac(**kw1)
sp2 = subproject_fac(**kw2)
em1 = experiment_metadata_fac(subproject=sp1, **kw1)
em2 = experiment_metadata_fac(subproject=sp2, **kw2)
assert (em1 == em2) is result
def test_persist(self, nested_session, experiment_metadata_fac):
exp_metadata = experiment_metadata_fac()
persist(nested_session, exp_metadata, experiment_metadata_fac.init_kw,
True)
|
wallarelvo/keeper | keeper/point.py | Python | apache-2.0 | 1,627 | 0.000615 |
import math
import random
class Point(object):
def __init__(self, x, y, z=0):
self.x = x
self.y = y |
self.z = z
def get_x(self):
return self.x
def get_y(self):
return self.y
def get_z(self):
return self.z
def set_x(self, x):
self.x = x
return self
def set_y(self, y):
self.y = y
return self
def set_z(self, z):
self.z = z
return self
def dist_to(self, other_point):
return math.sqrt(
pow(self.x - other_point.x, 2) +
| pow(self.y - other_point.y, 2) +
pow(self.z - other_point.z, 2)
)
def to_unit_vector(self):
mag = self.dist_to(Point(0, 0, 0))
if mag == 0:
return Point(0, 0, 0)
else:
return Point(self.x / mag, self.y / mag, self.z / mag)
def to_list(self):
return [self.x, self.y, self.z]
def __str__(self):
return "X: {0}, Y: {1}, Z: {2}".format(self.x, self.y, self.z)
def __repr__(self):
return "Point({0}, {1}, {2})".format(self.x, self.y, self.z)
def __hash__(self):
return hash(str(self))
def __eq__(self, val):
try:
return val.x == self.x and val.y == self.y and val.z == self.z
except:
return False
def get_random_point(width, height):
x = random.randint(0, width)
y = random.randint(0, height)
return Point(x, y)
def get_random_point_3d(width, height, altitude):
p = get_random_point(width, height)
p.set_z(random.randint(0, altitude))
return p
|
saullocastro/pyNastran | pyNastran/f06/dev/f06_classes.py | Python | lgpl-3.0 | 904 | 0.002212 | from six import iteritems
class MaxDis | placement(object):
def __init__(self, data):
self.translations = {}
self.rotations = {}
for line in data:
sid = line[0]
self.translations[sid] = line[1:4]
| self.rotations[sid] = line[4:]
def write_f06(self, page_stamp='%i', pageNum=1):
msg = ['0 MAXIMUM DISPLACEMENTS',
' SUBCASE/',
' DAREA ID T1 T2 T3 R1 R2 R3']
for sid, trans in sorted(iteritems(self.translations)):
rot = self.rotations[sid]
msg.append('0 %8i %13.8E %13.8E %13.8E %13.8E %13.8E %13.8E' %
(tuple([sid] + trans + rot)))
msg.append(page_stamp % page_num)
return '\n'.join(msg)
|
etkirsch/legends-of-erukar | erukar/content/conditions/magical/__init__.py | Python | agpl-3.0 | 394 | 0 | from .AugmentedWeapon import AugmentedWeapon
from .Cloaked import Cloaked
fr | om .Ethereal import Ethereal
from .Muted import Muted
from .TemporaryIllumination import TemporaryIllumination
from .Undead import Undead
from .HealthDrain import HealthDrain
__all__ = [
"AugmentedWeapon",
"Cloa | ked",
"Ethereal",
"Muted",
"TemporaryIllumination",
"Undead",
"HealthDrain"
]
|
MaralAfris/wasp-challenge-lth-team2 | planning/src/world.py | Python | gpl-3.0 | 12,092 | 0.003225 | import sys
from settings import *
from map import *
from copy import deepcopy
import numpy as np
class World(object):
def __init__(self, agents, waypoints, boxes, persons):
self.agents = agents
self.waypoints = waypoints
self.boxes = boxes
self.persons = persons
def add_map_info(self, map):
for point,relocate in map.moved_points.iteritems():
for waypoint in self.waypoints:
if waypoint.point == point:
waypoint.point = relocate
wp_as_nodes = set()
for wp in self.waypoints:
wp_as_nodes.add(wp.point)
mesh = 1
for point in map.nodes:
if point not in wp_as_nodes:
self.waypoints.append(Waypoint('mesh'+str(mesh), point, PointType.mesh))
mesh += 1
self.edges = []
visited = set()
for i,node in enumerate(map.nodes):
for j in map.neighbors[i]:
neighbor = map.nodes[j]
if (i,j) in visited:
continue
visited.add((i,j))
visited.add((j,i))
self.edges.append(Edge(self.point_to_waypoint(node), self.point_to_waypoint(neighbor)))
@classmethod
def from_json(cls, json_file):
import json
| with open(json_file) as data_file:
data = json.load(data_file)
agents = []
waypoints = []
boxes = []
persons = []
way | point_dict = {}
names = set()
def check_name(name):
if name in names:
raise Exception("names must be unique among all objects (yahsp3 limitation)" \
+ ", offender: " + name)
names.add(name)
def get_location(data, source):
if data is None:
return None
if isinstance(data, basestring):
return waypoint_dict[data]
else:
data = (data[0], data[1])
for wp in waypoints:
if np.hypot(data[0]-wp.point[0],data[1]-wp.point[1]) \
< settings.mesh['initial_min_dist']:
print('info: joining waypoint ' + str(data) + ' with ' + str(wp.point))
return wp
wp = Waypoint(source + "_wp", data, PointType.location)
waypoints.append(wp)
return wp
if "waypoints" in data:
for waypoint in data["waypoints"]:
x = waypoint["x"]
y = waypoint["y"]
if "type" in waypoint:
waypoint_type = PointType.from_string(waypoint["type"])
else:
waypoint_type = PointType.initial
name = waypoint["name"]
wp = Waypoint(name, (x, y), PointType.initial)
waypoints.append(wp)
waypoint_dict[name] = wp
check_name(name)
for agent in data["agents"]:
if "carrying" not in agent:
carrying = None
else:
carrying = agent["carrying"]
name = agent["name"]
agent_type = agent["agent_type"]
location = get_location(agent["location"], name)
agents.append(Agent(name, agent_type, location, carrying))
for box in data["boxes"]:
name = box["name"]
location = get_location(box["location"], name)
boxes.append(Box(name, location, box["free"]))
for person in data["persons"]:
name = person["name"]
location = get_location(person["location"], name)
persons.append(Person(name, location, person["handled"]))
# waypoints must be sorted to make virtual waypoints consistently generated
waypoints.sort(key=lambda waypoint: waypoint.name)
obj = cls(agents, waypoints, boxes, persons)
return obj
def points(self):
points = []
for waypoint in self.waypoints:
points.append(waypoint.point)
return points
def to_json(self, json_file):
import json
data = {}
data['agents'] = []
for agent in self.agents:
d = {}
d['name'] = agent.name
d['agent_type'] = agent.agent_type
d['location'] = [agent.location.point[0], agent.location.point[1]]
d['carrying'] = agent.carrying
data['agents'].append(d)
data['waypoints'] = self._list_as_dict(filter(lambda wp: wp.point_type == PointType.initial, self.waypoints))
data['boxes'] = self._list_as_dict(self.boxes)
data['persons'] = self._list_as_dict(self.persons)
j = json.dumps(data, indent=4)
f = open(json_file, 'w')
print >> f, j
f.close()
def agent(self, name):
for agent in self.agents:
if agent.name == name:
return agent
raise Exception('unknown agent ' + name)
def point_to_waypoint(self, point):
for waypoint in self.waypoints:
if waypoint.point == point:
return waypoint
raise Exception('point not found: ' + str(point))
def waypoint(self, name):
for waypoint in self.waypoints:
if waypoint.name == name or waypoint.name + '_air' == name:
return waypoint
raise Exception('unknown waypoint ' + name)
def box(self, name):
for box in self.boxes:
if box.name == name:
return box
raise Exception('unknown box ' + name)
def person(self, name):
for person in self.persons:
if person.name == name:
return person
raise Exception('unknown person ' + name)
def generate_problem(self, problem_file):
f = open(problem_file, 'w')
f.write('(define (problem emergency-template)\n')
f.write('(:domain emergency)\n')
f.write('(:objects\n')
for agent in self.agents:
f.write(' ' + agent.name + ' - ' + agent.agent_type + '\n')
empty_waypoint_set = set()
occupied_waypoint_set = set()
f.write(' ')
for waypoint in self.waypoints:
f.write(waypoint.name + ' ')
empty_waypoint_set.add(waypoint.name)
f.write('- waypoint\n')
f.write(' ')
for waypoint in self.waypoints:
f.write(waypoint.as_air().name + ' ')
empty_waypoint_set.add(waypoint.as_air().name)
f.write('- airwaypoint\n')
f.write(' ')
for box in self.boxes:
f.write(box.name + ' ')
f.write('- box\n')
f.write(' ')
for person in self.persons:
f.write(person.name + ' ')
f.write('- person)\n')
f.write('\n')
f.write('(:init\n')
def print_edge(fname, wp1, wp2, dist):
f.write(' (= (' + fname + ' ' + wp1.name + \
' ' + wp2.name + ') ' + str(dist) + ')\n')
for edge in self.edges:
dist = edge.p1.dist(edge.p2)
tdist = settings.plan['turtle_delay'] + dist / settings.plan['turtle_speed']
ddist = settings.plan['drone_delay'] + dist / settings.plan['drone_speed']
print_edge('move-duration', edge.p1, edge.p2, tdist)
print_edge('move-duration', edge.p2, edge.p1, tdist)
print_edge('fly-duration', edge.p1.as_air(), edge.p2.as_air(), ddist)
print_edge('fly-duration', edge.p2.as_air(), edge.p1.as_air(), ddist)
f.write('\n')
for waypoint in self.waypoints:
f.write(' (over ' + waypoint.as_air().name + ' ' + waypoint.name + ')\n')
f.write('\n')
for box in self.boxes:
if box.free:
f.write(' (free ' + box.name+')\n')
f.write(' (at ' + box.name + ' ' + box.location.name + ')\n')
f.write('\n')
for agent in self.agents:
if agent.carrying is None:
f.write(' (empty ' + agent.name + ')\n')
|
pabigot/pyxb | tests/drivers/test-mg-sequence.py | Python | apache-2.0 | 8,660 | 0.008199 | # -*- coding: utf-8 -*-
import logging
if __name__ == '__main__':
logging.basicConfig()
_log = logging.getLogger(__name__)
import pyxb.binding.generate
import pyxb.utils.domutils
from pyxb.utils import six
from xml.dom import Node
import os.path
schema_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../schemas/test-mg-sequence.xsd'))
code = pyxb.binding.generate.GeneratePython(schema_location=schema_path)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
from pyxb.utils import domutils
def ToDOM (instance, tag=None):
return instance.toDOM().documentElement
import unittest
import collections
class TestMGSeq (unittest.TestCase):
def setUp (self):
# Hide the warning about failure to convert DOM node {}third
# to a binding
self.__basis_log = logging.getLogger('pyxb.binding.basis')
self.__basis_loglevel = self.__basis_log.level
def tearDown (self):
self.__basis_log.level = self.__basis_loglevel
def testBad (self):
# Second is wrong element tag
# Hide warning about failure to convert
self.__basis_log.level = logging.ERROR
xml = '<ns1:wrapper xmlns:ns1="URN:test-mg-sequence"><first/><second/><third/><fourth_0_2/></ns1:wrapper>'
dom = pyxb.utils.domutils.StringToDOM(xml)
self.assertRaises(UnrecognizedContentError, wrapper.createFromDOM, dom.documentElement)
def testBasics (self):
xmlt = six.u('<ns1:wrapper xmlns:ns1="URN:test-mg-sequence"><first/><second_opt/><third/><fourth_0_2/></ns1:wrapper>')
xmld = xmlt.encode('utf-8')
dom = pyxb.utils.domutils.StringToDOM(xmlt)
instance = wrapper.createFromDOM(dom.documentElement)
self.assertTrue(isinstance(instance.first, sequence._ElementMap['first'].elementBinding().typeDefinition()))
self.assertTrue(isinstance(instance.second_opt, sequence._ElementMap['second_opt'].elementBinding().typeDefinition()))
self.assertTrue(isinstance(instance.third, sequence._ElementMap['third'].elementBinding().typeDefinition()))
self.assertTrue(isinstance(instance.fourth_0_2, collections.MutableSequence))
self.assertEqual(1, len(instance.fourth_0_2))
self.assertTrue(isinstance(instance.fourth_0_2[0], sequence._ElementMap['fourth_0_2'].elementBinding().typeDefinition()))
self.assertEqual(ToDOM(instance).toxml("utf-8"), xmld)
def testMultiplesAtEnd (self):
xmlt = six.u('<ns1:wrapper xmlns:ns1="URN:test-mg-sequence"><first/><third/><fourth_0_2/><fourth_0_2/></ns1:wrapper>')
xmld = xmlt.encode('utf-8')
dom = pyxb.utils.domutils.StringToDOM(xmlt)
instance = wrapper.createFromDOM(dom.documentElement)
self.assertTrue(isinstance(instance.first, sequence._ElementMap['first'].elementBinding().typeDefinition()))
self.assertTrue(instance.second_opt is None)
self.assertTrue(isinstance(instance.third, sequence._ElementMap['third'].elementBinding().typeDefinition()))
self.assertTrue(isinstance(instance.fourth_0_2, collections.MutableSequence))
self.assertEqual(2, len(instance.fourth_0_2))
self.assertTrue(isinstance(instance.fourth_0_2[0], sequence._ElementMap['fourth_0_2'].elementBinding().typeDefinition()))
self.assertEqual(ToDOM(instance).toxml("utf-8"), xmld)
def testMultiplesInMiddle (self):
xmlt = six.u('<ns1:altwrapper xmlns:ns1="URN:test-mg-sequence"><first/><second_multi/><second_multi/><third/></ns1:altwrapper>')
xmld = xmlt.encode('utf-8')
dom = pyxb.utils.domutils.StringToDOM(xmlt)
instance = altwrapper.createFromDOM(dom.documentElement)
self.assertTrue(isinstance(instance.first, collections.MutableSequence))
self.assertEqual(1, len(instance.first))
self.assertEqual(2, len(instance.second_multi))
self.assertTrue(isinstance(instance.third, altsequence._ElementMap['third'].elementBinding().typeDefinition()))
self.assertEqual(ToDOM(instance).toxml("utf-8"), xmld)
def testMultiplesAtStart (self):
xmlt = six.u('<ns1:altwrapper xmlns:ns1="URN:test-mg-sequence"><first/><first/><third/></ns1:altwrapper>')
xmld = xmlt.encode('utf-8')
dom = pyxb.utils.domutils.StringToDOM(xmlt)
instance = altwrapper.createFromDOM(dom.documentElement)
self.assertTrue(isinstance(instance.first, collections.MutableSequence))
self.assertEqual(2, len(instance.first))
self.assertEqual(0, len(instance.second_multi))
self.assertTrue(isinstance(instance.third, altsequence._ElementMap['third'].elementBinding().typeDefinition()))
self.assertEqual(ToDOM(instance).toxml("utf-8"), xmld)
instance = altwrapper(first=[ altsequence._ElementMap['first'].elementBinding()(), altsequence._ElementMap['first'].elementBinding()() ], third=altsequence._ElementMap['third'].elementBinding()())
self.assertEqual(ToDOM(instance).toxml("utf-8"), xmld)
def testMissingInMiddle (self):
xmlt = six.u('<ns1:wrapper xmlns:ns1="URN:test-mg-sequence"><first/><third/></ns1:wrapper>')
xmld = xmlt.encode('utf-8')
dom = pyxb.utils.domutils.StringToDOM(xmlt)
instance = wrapper.createFromDOM(dom.documentElement)
self.assertTrue(isinstance(instance.first, sequence._ElementMap['first'].elementBinding().typeDefinition()))
self.assertTrue(instance.second_opt is None)
self.assertTrue(isinstance(instance.third, sequence._ElementMap['third'].elementBinding().typeDefinition()))
self.assertTrue(isinstance(instance.fourth_0_2, collections.MutableSequence))
self.assertEqual(0, len(instance.fourth_0_2))
self.assertEqual(ToDOM(insta | nce).toxml("utf-8"), xmld)
def testMissingAtStart (self):
xmlt = six.u('<ns1:altwrapper xmlns:ns1="URN:test-mg-sequence"><third/></ns1:altwrapper>')
dom = pyxb.utils.domutils.StringToDOM(xmlt)
self.assertRaises(UnrecognizedContentError, altwrapper.createFromDOM, dom.documentElement)
instance = altwrapper(third=altsequence._ElementMap['third'].elementBinding()())
self.assertRaises(pyxb.IncompleteElem | entContentError, ToDOM, instance)
def testMissingAtEndLeadingContent (self):
xmlt = six.u('<ns1:altwrapper xmlns:ns1="URN:test-mg-sequence"><first/></ns1:altwrapper>')
dom = pyxb.utils.domutils.StringToDOM(xmlt)
self.assertRaises(IncompleteElementContentError, altwrapper.createFromDOM, dom.documentElement)
def testMissingAtEndNoContent (self):
xmlt = six.u('<ns1:altwrapper xmlns:ns1="URN:test-mg-sequence"></ns1:altwrapper>')
dom = pyxb.utils.domutils.StringToDOM(xmlt)
self.assertRaises(IncompleteElementContentError, altwrapper.createFromDOM, dom.documentElement)
def testTooManyAtEnd (self):
xmlt = six.u('<ns1:wrapper xmlns:ns1="URN:test-mg-sequence"><first/><third/><fourth_0_2/><fourth_0_2/><fourth_0_2/></ns1:wrapper>')
dom = pyxb.utils.domutils.StringToDOM(xmlt)
self.assertRaises(UnrecognizedContentError, wrapper.createFromDOM, dom.documentElement)
def testTooManyAtStart (self):
xmlt = six.u('<ns1:altwrapper xmlns:ns1="URN:test-mg-sequence"><first/><first/><first/><third/></ns1:altwrapper>')
dom = pyxb.utils.domutils.StringToDOM(xmlt)
self.assertRaises(UnrecognizedContentError, altwrapper.createFromDOM, dom.documentElement)
instance = altwrapper(first=[ altsequence._ElementMap['first'].elementBinding()(), altsequence._ElementMap['first'].elementBinding()(), altsequence._ElementMap['first'].elementBinding()() ], third=altsequence._ElementMap['third'].elementBinding()())
self.assertRaises(pyxb.UnprocessedElementContentError, ToDOM, instance)
if sys.version_info[:2] >= (2, 7):
with self.assertRaises(UnprocessedElementContentError) as cm:
instance.toxml('utf-8')
# Verify the exception tells us what was being processed
self.assertEqual(instance, cm.exception.instance)
# Verify the exception tells us what was left over
first_ed = altsequence |
KennyLv/learnPython | src/24.modules.multiprocessing_Pipe.py | Python | gpl-2.0 | 1,154 | 0.020833 | # Multiprocessing with Pipe
# Written by Vamei
import multiprocessing as mul
def proc1(pipe):
pipe.send('hello')
print('proc1 rec:',pipe.recv())
def proc2(pipe):
print('proc2 rec:',pipe.recv())
pipe.send('hello, too')
if __name__ == '__ | main__':
#1) Pipe可 | 以是单向(half-duplex),也可以是双向(duplex)。
#我们通过mutiprocessing.Pipe(duplex=False)创建单向管道 (默认为双向)。
#一个进程从PIPE一端输入对象,然后被PIPE另一端的进程接收,单向管道只允许管道一端的进程输入,而双向管道则允许从两端输入。
# Build a pipe
#Pipe对象建立的时候,返回一个含有两个元素的表,每个元素代表Pipe的一端(Connection对象)。
pipe = mul.Pipe()
#我们对Pipe的某一端调用send()方法来传送对象,在另一端使用recv()来接收。
# Pass an end of the pipe to process 1
p1 = mul.Process(target=proc1, args=(pipe[0],))
# Pass the other end of the pipe to process 2
p2 = mul.Process(target=proc2, args=(pipe[1],))
p1.start()
p2.start()
#p1.join()
#p2.join() |
apple/swift-clang | utils/check_cfc/check_cfc.py | Python | apache-2.0 | 14,638 | 0.002664 | #!/usr/bin/env python
"""Check CFC - Check Compile Flow Consistency
This is a compiler wrapper for testing that code generation is consistent with
different compilation processes. It checks that code is not unduly affected by
compiler options or other changes which should not have side effects.
To use:
-Ensure that the compiler under test (i.e. clang, clang++) is on the PATH
-On Linux copy this script to the name of the compiler
e.g. cp check_cfc.py clang && cp check_cfc.py clang++
-On Windows use setup.py to generate check_cfc.exe and copy that to clang.exe
and clang++.exe
-Enable the desired checks in check_cfc.cfg (in the same directory as the
wrapper)
e.g.
[Checks]
dash_g_no_change = true
dash_s_no_change = false
-The wrapper can be run using its absolute path or added to PATH before the
compiler under test
e.g. export PATH=<path to check_cfc>:$PATH
-Compile as normal. The wrapper intercepts normal -c compiles and will return
non-zero if the check fails.
e.g.
$ clang -c test.cpp
Code difference detected with -g
--- /tmp/tmp5nv893.o
+++ /tmp/tmp6Vwjnc.o
@@ -1 +1 @@
- 0: 48 8b 05 51 0b 20 00 mov 0x200b51(%rip),%rax
+ 0: 48 39 3d 51 0b 20 00 cmp %rdi,0x200b51(%rip)
-To run LNT with Check CFC specify | the abso | lute path to the wrapper to the --cc
and --cxx options
e.g.
lnt runtest nt --cc <path to check_cfc>/clang \\
--cxx <path to check_cfc>/clang++ ...
To add a new check:
-Create a new subclass of WrapperCheck
-Implement the perform_check() method. This should perform the alternate compile
and do the comparison.
-Add the new check to check_cfc.cfg. The check has the same name as the
subclass.
"""
from __future__ import absolute_import, division, print_function
import imp
import os
import platform
import shutil
import subprocess
import sys
import tempfile
try:
import configparser
except ImportError:
import ConfigParser as configparser
import io
import obj_diff
def is_windows():
"""Returns True if running on Windows."""
return platform.system() == 'Windows'
class WrapperStepException(Exception):
"""Exception type to be used when a step other than the original compile
fails."""
def __init__(self, msg, stdout, stderr):
self.msg = msg
self.stdout = stdout
self.stderr = stderr
class WrapperCheckException(Exception):
"""Exception type to be used when a comparison check fails."""
def __init__(self, msg):
self.msg = msg
def main_is_frozen():
"""Returns True when running as a py2exe executable."""
return (hasattr(sys, "frozen") or # new py2exe
hasattr(sys, "importers") or # old py2exe
imp.is_frozen("__main__")) # tools/freeze
def get_main_dir():
"""Get the directory that the script or executable is located in."""
if main_is_frozen():
return os.path.dirname(sys.executable)
return os.path.dirname(sys.argv[0])
def remove_dir_from_path(path_var, directory):
"""Remove the specified directory from path_var, a string representing
PATH"""
pathlist = path_var.split(os.pathsep)
norm_directory = os.path.normpath(os.path.normcase(directory))
pathlist = [x for x in pathlist if os.path.normpath(
os.path.normcase(x)) != norm_directory]
return os.pathsep.join(pathlist)
def path_without_wrapper():
"""Returns the PATH variable modified to remove the path to this program."""
scriptdir = get_main_dir()
path = os.environ['PATH']
return remove_dir_from_path(path, scriptdir)
def flip_dash_g(args):
"""Search for -g in args. If it exists then return args without. If not then
add it."""
if '-g' in args:
# Return args without any -g
return [x for x in args if x != '-g']
else:
# No -g, add one
return args + ['-g']
def derive_output_file(args):
"""Derive output file from the input file (if just one) or None
otherwise."""
infile = get_input_file(args)
if infile is None:
return None
else:
return '{}.o'.format(os.path.splitext(infile)[0])
def get_output_file(args):
"""Return the output file specified by this command or None if not
specified."""
grabnext = False
for arg in args:
if grabnext:
return arg
if arg == '-o':
# Specified as a separate arg
grabnext = True
elif arg.startswith('-o'):
# Specified conjoined with -o
return arg[2:]
assert grabnext == False
return None
def is_output_specified(args):
"""Return true is output file is specified in args."""
return get_output_file(args) is not None
def replace_output_file(args, new_name):
"""Replaces the specified name of an output file with the specified name.
Assumes that the output file name is specified in the command line args."""
replaceidx = None
attached = False
for idx, val in enumerate(args):
if val == '-o':
replaceidx = idx + 1
attached = False
elif val.startswith('-o'):
replaceidx = idx
attached = True
if replaceidx is None:
raise Exception
replacement = new_name
if attached == True:
replacement = '-o' + new_name
args[replaceidx] = replacement
return args
def add_output_file(args, output_file):
"""Append an output file to args, presuming not already specified."""
return args + ['-o', output_file]
def set_output_file(args, output_file):
"""Set the output file within the arguments. Appends or replaces as
appropriate."""
if is_output_specified(args):
args = replace_output_file(args, output_file)
else:
args = add_output_file(args, output_file)
return args
gSrcFileSuffixes = ('.c', '.cpp', '.cxx', '.c++', '.cp', '.cc')
def get_input_file(args):
"""Return the input file string if it can be found (and there is only
one)."""
inputFiles = list()
for arg in args:
testarg = arg
quotes = ('"', "'")
while testarg.endswith(quotes):
testarg = testarg[:-1]
testarg = os.path.normcase(testarg)
# Test if it is a source file
if testarg.endswith(gSrcFileSuffixes):
inputFiles.append(arg)
if len(inputFiles) == 1:
return inputFiles[0]
else:
return None
def set_input_file(args, input_file):
"""Replaces the input file with that specified."""
infile = get_input_file(args)
if infile:
infile_idx = args.index(infile)
args[infile_idx] = input_file
return args
else:
# Could not find input file
assert False
def is_normal_compile(args):
"""Check if this is a normal compile which will output an object file rather
than a preprocess or link. args is a list of command line arguments."""
compile_step = '-c' in args
# Bitcode cannot be disassembled in the same way
bitcode = '-flto' in args or '-emit-llvm' in args
# Version and help are queries of the compiler and override -c if specified
query = '--version' in args or '--help' in args
# Options to output dependency files for make
dependency = '-M' in args or '-MM' in args
# Check if the input is recognised as a source file (this may be too
# strong a restriction)
input_is_valid = bool(get_input_file(args))
return compile_step and not bitcode and not query and not dependency and input_is_valid
def run_step(command, my_env, error_on_failure):
"""Runs a step of the compilation. Reports failure as exception."""
# Need to use shell=True on Windows as Popen won't use PATH otherwise.
p = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, env=my_env, shell=is_windows())
(stdout, stderr) = p.communicate()
if p.returncode != 0:
raise WrapperStepException(error_on_failure, stdout, stderr)
def get_temp_file_name(suffix):
"""Get a temporary file name with a particular suffix. Let the caller be
responsible for deleting it."""
tf = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
tf. |
fkolacek/FIT-VUT | bp-revok/python/lib/python2.7/site-packages/cryptography-0.5.2-py2.7-linux-x86_64.egg/cryptography/hazmat/backends/openssl/dsa.py | Python | apache-2.0 | 7,193 | 0 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.primitives import hashes, interfaces
from cryptography.hazmat.primitives.asymmetric import dsa
from cryptography.hazmat.primitives.interfaces import (
DSAParametersWithNumbers, DSAPrivateKeyWithNumbers, DSAPublicKeyWithNumbers
)
@utils.register_interface(interfaces.AsymmetricVerificationContext)
class _DSAVerificationContext(object):
def __init__(self, backend, public_key, signature, algorithm):
self._backend = backend
self._public_key = public_key
self._signature = signature
self._algorithm = algorithm
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
def update(self, data):
self._hash_ctx.update(data)
def verify(self):
self._dsa_cdata = sel | f._backend._ffi.gc(self._public_key._dsa_cdata,
self._backend._lib.DSA_free)
data_to_verify = self._hash_ctx.finalize()
# The first parameter passed to DSA_verify is unused by OpenSSL but
# must be an integer.
res = self._backend._l | ib.DSA_verify(
0, data_to_verify, len(data_to_verify), self._signature,
len(self._signature), self._public_key._dsa_cdata)
if res != 1:
errors = self._backend._consume_errors()
assert errors
if res == -1:
assert errors[0].lib == self._backend._lib.ERR_LIB_ASN1
raise InvalidSignature
@utils.register_interface(interfaces.AsymmetricSignatureContext)
class _DSASignatureContext(object):
def __init__(self, backend, private_key, algorithm):
self._backend = backend
self._private_key = private_key
self._algorithm = algorithm
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
def update(self, data):
self._hash_ctx.update(data)
def finalize(self):
data_to_sign = self._hash_ctx.finalize()
sig_buf_len = self._backend._lib.DSA_size(self._private_key._dsa_cdata)
sig_buf = self._backend._ffi.new("unsigned char[]", sig_buf_len)
buflen = self._backend._ffi.new("unsigned int *")
# The first parameter passed to DSA_sign is unused by OpenSSL but
# must be an integer.
res = self._backend._lib.DSA_sign(
0, data_to_sign, len(data_to_sign), sig_buf,
buflen, self._private_key._dsa_cdata)
assert res == 1
assert buflen[0]
return self._backend._ffi.buffer(sig_buf)[:buflen[0]]
@utils.register_interface(DSAParametersWithNumbers)
class _DSAParameters(object):
def __init__(self, backend, dsa_cdata):
self._backend = backend
self._dsa_cdata = dsa_cdata
def parameter_numbers(self):
return dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(self._dsa_cdata.p),
q=self._backend._bn_to_int(self._dsa_cdata.q),
g=self._backend._bn_to_int(self._dsa_cdata.g)
)
def generate_private_key(self):
return self._backend.generate_dsa_private_key(self)
@utils.register_interface(DSAPrivateKeyWithNumbers)
class _DSAPrivateKey(object):
def __init__(self, backend, dsa_cdata):
self._backend = backend
self._dsa_cdata = dsa_cdata
self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p)
@property
def key_size(self):
return self._key_size
def signer(self, algorithm):
return _DSASignatureContext(self._backend, self, algorithm)
def private_numbers(self):
return dsa.DSAPrivateNumbers(
public_numbers=dsa.DSAPublicNumbers(
parameter_numbers=dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(self._dsa_cdata.p),
q=self._backend._bn_to_int(self._dsa_cdata.q),
g=self._backend._bn_to_int(self._dsa_cdata.g)
),
y=self._backend._bn_to_int(self._dsa_cdata.pub_key)
),
x=self._backend._bn_to_int(self._dsa_cdata.priv_key)
)
def public_key(self):
dsa_cdata = self._backend._lib.DSA_new()
assert dsa_cdata != self._backend._ffi.NULL
dsa_cdata = self._backend._ffi.gc(
dsa_cdata, self._backend._lib.DSA_free
)
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
dsa_cdata.pub_key = self._backend._lib.BN_dup(self._dsa_cdata.pub_key)
return _DSAPublicKey(self._backend, dsa_cdata)
def parameters(self):
dsa_cdata = self._backend._lib.DSA_new()
assert dsa_cdata != self._backend._ffi.NULL
dsa_cdata = self._backend._ffi.gc(
dsa_cdata, self._backend._lib.DSA_free
)
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
return _DSAParameters(self._backend, dsa_cdata)
@utils.register_interface(DSAPublicKeyWithNumbers)
class _DSAPublicKey(object):
def __init__(self, backend, dsa_cdata):
self._backend = backend
self._dsa_cdata = dsa_cdata
self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p)
@property
def key_size(self):
return self._key_size
def verifier(self, signature, algorithm):
return _DSAVerificationContext(
self._backend, self, signature, algorithm
)
def public_numbers(self):
return dsa.DSAPublicNumbers(
parameter_numbers=dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(self._dsa_cdata.p),
q=self._backend._bn_to_int(self._dsa_cdata.q),
g=self._backend._bn_to_int(self._dsa_cdata.g)
),
y=self._backend._bn_to_int(self._dsa_cdata.pub_key)
)
def parameters(self):
dsa_cdata = self._backend._lib.DSA_new()
assert dsa_cdata != self._backend._ffi.NULL
dsa_cdata = self._backend._ffi.gc(
dsa_cdata, self._backend._lib.DSA_free
)
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
return _DSAParameters(self._backend, dsa_cdata)
|
andrewebdev/django-ostinato | ostinato/statemachine/forms.py | Python | mit | 2,636 | 0.000379 | import new
from django import forms
from ostinato.statemachine import InvalidTransition
def sm_form_factory(sm_class, state_field='state'):
"""
A factory to create a custom StateMachineModelForm class
"""
class StateMachineModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(StateMachineModelForm, self).__init__(*args, **kwargs)
sm = sm_class(instance=self.instance, state_field=state_field)
self.old_state = sm._state
actions = ((sm._state, '-- %s --' % sm.state),)
for action in sm.actions:
actions += ((action, action),)
self.fields[state_field] = forms.ChoiceField(
choices=actions, label="State/Actions", required=False)
# We need a custom clean method for the state_field. Since
# django expects this method to be called ``clean_<field_name>``
# we will have to dynamical | ly generate it here
def clean_action(self):
"""
make sure that the selected action does not pass through to
the actual form cleaned_data, since it can | cause validation
issues on the field.
"""
self._sm_action = self.cleaned_data[state_field]
try:
return sm.action_result(self._sm_action)
except InvalidTransition:
return self.old_state
setattr(self, 'clean_%s' % state_field,
new.instancemethod(clean_action, self, None))
def take_action(self):
"""
This is where we pass the form values to the StateMachine
to do it's thing.
"""
sm = sm_class(instance=self.instance, state_field=state_field,
state=self.old_state)
# The ``clean_<state_field>`` method stored the action in self
if self._sm_action in sm.actions:
sm.take_action(self._sm_action)
def save(self, *args, **kwargs):
"""
Override the save method so that we can perform statemachine
actions.
For most simple cases this is all that is required. If however
you want to do more advanced processing based on the state/action,
then you should override the save method and call ``take_action()``
when you are ready.
"""
self.take_action()
return super(StateMachineModelForm, self).save(*args, **kwargs)
return StateMachineModelForm
|
EzyInsights/log4mongo-python | log4mongo/test/test_handlers.py | Python | bsd-3-clause | 6,198 | 0.003872 | from log4mongo.handlers import MongoHandler
from pymongo.errors import PyMongoError
from StringIO import StringIO
import unittest
import logging
import sys
class TestMongoHandler(unittest.TestCase):
host_name = 'localhost'
database_name = 'log4mongo_test'
collection_name = 'logs_test'
def setUp(self):
self.handler = MongoHandler(host=self.host_name, database_name=self.database_name, collection=self.collection_name)
self.log = logging.getLogger('testLogger')
self.log.setLevel(logging.DEBUG)
self.log.addHandler(self.handler)
self.old_stderr = sys.stdout
sys.stderr = StringIO()
def tearDown(self):
self.handler.connection.drop_database(self.database_name)
self.handler.close()
self.log.removeHandler(self.handler)
self.log = None
self.handler = None
sys.stderr.close()
sys.stderr = self.old_stderr
def test_connect(self):
handler = MongoHandler(host='localhost', database_name=self.database_name, collection=self.collection_name)
self.assertTrue(isinstance(handler, MongoHandler))
self.handler.connection.drop_database(self.database_name)
handler.close()
def test_connect_failed(self):
with self.assertRaises(PyMongoError):
MongoHandler(host='unknow_host', database_name=self.database_name, collection=self.collection_name)
def test_connect_failed_silent(self):
handler = MongoHandler(host='unknow_host', database_name=self.database_name, collection=self.collection_name, fail_silently=True)
self.assertTrue(isinstance(handler, MongoHandler))
self.handler.connection.drop_database(self.database_name)
handler.close()
def test_emit(self):
self.log.warning('test message')
document = self.handler.collection.find_one({'message': 'test message', 'level': 'WARNING'})
self.assertEqual(document['message'], 'test message')
self.assertEqual(document['level'], 'WARNING')
def test_emit_exception(self):
try:
raise Exception('exc1')
except:
self.log.exception('test message')
document = self.handler.collection.find_one({'message': 'test message', 'level': 'ERROR'})
self.assertEqual(document['message'], 'test message')
self.assertEqual(document['level'], 'ERROR')
self.assertEqual(document['exception']['message'], 'exc1')
def test_emit_fail(self):
self.handler.collection = ''
self.log.warn('test warning')
self.assertRegexpMatches(sys.stderr.getvalue(), r"AttributeError: 'str' object has no attribute 'save'")
def test_email_fail_silent(self):
self.handler.fail_silently = True
self.handler.collection = ''
self.log.warn('test warming')
self.assertEqual(sys.stderr.getvalue(), '')
def test_contextual_info(self):
self.log.info('test message with contextual info', extra={'ip': '127.0.0.1', 'host': 'localhost'})
document = self.handler.collection.find_one({'message': 'test message with contextual info', 'level': 'INFO'})
self.assertEqual(document['message'], 'test message with contextual info')
self.assertEqual(document['level'], 'INFO')
self.assertEqual(document['ip'], '127.0.0.1')
self.assertEqual(document['host'], 'localhost')
def test_contextual_info_adapter(self):
adapter = logging.LoggerAdapter(self.log, {'ip': '127.0.0.1', 'host': 'localhost'})
adapter.info('test message with contextual info')
document = self.handler.collection.find_one({'message': 'test message with contextual info', 'level': 'INFO'})
self.assertEqual(document['message'], 'test message with contextual info')
self.assertEqual(document['level'], 'INFO')
self.assertEqual(document['ip'], '127.0.0.1')
self.assertEqual(document['host'], 'localhost')
class TestCappedMongoHandler(TestMongoHandler):
capped_max = 10
def setUp(self):
self.handler = MongoHandler(host=self.host_name, database_name=self.database_name,
collection=self.collection_name, capped=True, capped_max=self.capped_max)
self.log = logging.getLogger('testLogger')
self.log.setLevel(logging.DEBUG)
self.log.addHandler(self.handler)
self.old_stderr = sys.stdout
sys.stderr = StringIO()
def test_capped(self):
options = self.handler.db.command('collstats', self.collection_name)
self.assertEqual(o | ptions['max'], 10)
self.assertEqual(options['capped'], 1)
def test_capped_max(self):
for i in range(self.capped_max * 2):
self.log.info('test capped info')
documents = self.handler.collection.find()
self.assertEqual(documents.count(), 10)
def test_override_no_capped_collection(self):
# Creating no capped handler
self.handler_no_capped = MongoHandler(host=self.host_name, database_name=self.database_name, collectio | n=self.collection_name)
self.log.removeHandler(self.handler)
self.log.addHandler(self.handler_no_capped)
self.log.info('test info')
# Creating capped handler
self.handler_capped = MongoHandler(host=self.host_name, database_name=self.database_name,
collection=self.collection_name, capped=True, capped_max=self.capped_max)
self.log.addHandler(self.handler)
self.log.info('test info')
def test_override_capped_collection(self):
# Creating capped handler
self.handler_capped = MongoHandler(host=self.host_name, database_name=self.database_name,
collection=self.collection_name, capped=True, capped_max=self.capped_max)
self.log.removeHandler(self.handler)
self.log.addHandler(self.handler)
self.log.info('test info')
# Creating no capped handler
self.handler_no_capped = MongoHandler(host=self.host_name, database_name=self.database_name, collection=self.collection_name)
self.log.addHandler(self.handler_no_capped)
self.log.info('test info')
|
ROB-Seismology/oq-hazardlib | openquake/hazardlib/gsim/atkinson_wald_2007.py | Python | agpl-3.0 | 2,419 | 0.000827 | # coding: utf-8
# The Hazard Library
# Copyright (C) 2012 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITH | OUT ANY WARRA | NTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Module exports :class:`AtkinsonWald2007`.
"""
from __future__ import division
import numpy as np
from openquake.hazardlib.gsim.base import IPE
from openquake.hazardlib import const
from openquake.hazardlib.imt import MMI
class AtkinsonWald2007(IPE):
"""
Implements IPE developed by Atkinson and Wald (2007)
California, USA
MS!
"""
DEFINED_FOR_TECTONIC_REGION_TYPE = const.TRT.ACTIVE_SHALLOW_CRUST
DEFINED_FOR_INTENSITY_MEASURE_TYPES = set([
MMI
])
DEFINED_FOR_INTENSITY_MEASURE_COMPONENT = const.IMC.AVERAGE_HORIZONTAL
DEFINED_FOR_STANDARD_DEVIATION_TYPES = set([
const.StdDev.TOTAL
])
# TODO !
REQUIRES_SITES_PARAMETERS = set(('vs30', ))
REQUIRES_RUPTURE_PARAMETERS = set(('mag',))
REQUIRES_DISTANCES = set(('rrup', ))
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
h = 14.0
R = np.sqrt(dists.rrup**2 + h**2)
B = np.zeros_like(dists.rrup)
B[R > 30.] = np.log10(R / 30.)[R > 30.]
mean_mmi = 12.27 + 2.270 * (rup.mag - 6) + 0.1304 * (rup.mag - 6)**2 - 1.30 * np.log10(R) - 0.0007070 * R + 1.95 * B - 0.577 * rup.mag * np.log10(R)
mean_mmi += self.compute_site_term(sites)
mean_mmi = mean_mmi.clip(min=1, max=12)
stddevs = np.zeros_like(dists.rrup)
stddevs.fill(0.4)
stddevs = stddevs.reshape(1, len(stddevs))
return mean_mmi, stddevs
def compute_site_term(self, sites):
# TODO !
return 0
|
apexkid/Wikiapiary | apiary/tasks/bot/deletewebsitelogs.py | Python | gpl-2.0 | 837 | 0.003584 | """
Clean up the website log table.
"""
# pylint: disable=C0301,C0103,W1201
from apiary.tasks import BaseApiaryTask
import logging
import datetime
LOGGER = logging.getLogger()
class DeleteWebsiteLogsTask(BaseApiaryTask):
"""Delete old entries from the bot_log."""
def run(self):
"""Execute the task."""
sql_query = """
DELETE FROM
apiary_website_logs
WHERE
log_date < '%s'
"""
delete_before = datetime.datetime.utcnow() - datetime.timedelta(weeks=8)
delete_before_str = delete_before.strftime('%Y-%m-%d %H:%M:%S')
LOGGER.info("Deleting apiary_we | bsite_logs before %s." % delete_before_str)
my_sql = sql_query % (delete_before_str)
(success, rows_deleted) = self.runSql(my_sql)
| LOGGER.info("Deleted %d rows." % rows_deleted)
return rows_deleted
|
manojklm/pywinauto-x64 | pywinauto/controls/__init__.py | Python | lgpl-2.1 | 2,175 | 0.001839 | # GUI Application automation and testing library
# Copyright (C) 2006 Mark Mc Mahon
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
"Controls package"
__revision__ = "$Revision$"
from .HwndWrapper import GetDialogPropsFromHandle
from .HwndWrapper import InvalidWindowHandle
# make an alias for the HwndWrapper object as WrapHandle
from .HwndWrapper import HwndWrapper as WrapHandle
# import the control clases - this will register the classes they
# contain
#import custom_controls
from . import common_controls
from . import win32_controls
#
##====================================================================
#def _unittests():
# "Run some tests on the controls"
# from pywinauto import win32functions
#
# "do some basic testing"
# from pywinauto.findwindows import find_windows
# import sys
#
# if len(sys.argv) < 2:
# handle = win32functions.GetDesktopWindow()
# else:
# try:
# handle = int(eval(sys.argv[1]))
#
# except ValueError:
#
# handle = find_windows(
# title_re = "^" + sys.argv[1], class_name = "#32770", )
# #visible_only = False)
#
# if not handle:
# print "dialog not found"
# sys.exit()
#
#
# props = GetDialogPropsFromHandle(handle)
# print len | (props)
# #pprint(GetDialogPropsFromHandle(handle))
#
#if __name__ == "__main__": |
# _unittests()
|
edx/edx-platform | openedx/core/djangoapps/bookmarks/__init__.py | Python | agpl-3.0 | 390 | 0 | """
# lint-amnesty, pylint: disable=dj | ango-not-configured
# lint-amnesty, pylint: disable=django-not-configured
Bookmarks module.
"""
from collections import namedtuple
DEFAULT_FIELDS = [
'id',
'course_id',
'usage_id',
'block_type',
'created',
]
OPTIONAL_FIELDS = [
'display_name',
'path',
]
PathItem = namedtuple('PathItem', ['usage_key', 'display_name | '])
|
diagramsoftware/l10n-spain | l10n_es_aeat_mod347/models/mod347.py | Python | agpl-3.0 | 38,108 | 0 | # -*- coding: utf-8 -*-
# © 2004-2011 Pexego Sistemas Informáticos. (http://pexego.es)
# © 2012 NaN·Tic (http://www.nan-tic.com)
# © 2013 Acysos (http://www.acysos.com)
# © 2013 Joaquín Pedrosa Gutierrez (http://gutierrezweb.es)
# © 2014-2015 Serv. Tecnol. Avanzados - Pedro M. Baeza
# (http://www.serviciosbaeza.com)
# © 2016 Antiun Ingenieria S.L. - Antonio Espinosa
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import re
from openerp import fields, models, api, exceptions, _
from openerp.addons import decimal_precision as dp
class L10nEsAeatMod347Report(models.Model):
_inherit = "l10n.es.aeat.report"
_name = "l10n.es.aeat.mod347.report"
_description = "AEAT 347 Report"
_period_yearly = True
_period_quarterly = False
_period_monthly = False
def _get_default_address(self, partner):
"""Get the default invoice address of the partner"""
partner_obj = self.env['res.partner']
address_ids = partner.address_get(['invoice', 'default'])
if address_ids.get('invoice'):
return partner_obj.browse(address_ids['invoice'])
elif address_ids.get('default'):
return partner_obj.browse(address_ids['default'])
else:
return None
def _invoice_amount_get(self, invoices, refunds):
invoice_amount = sum(x.amount_total_wo_irpf for x in invoices)
refund_amount = sum(x.amount_total_wo_irpf for x in refunds)
amount = invoice_amount - refund_amount
if abs(amount) > self.operations_limit:
return amount
return 0
def _cash_amount_get(self, moves):
amount = sum([line.credit for line in moves])
if abs(amount) > self.received_cash_limit:
return amount
return 0
def _cash_moves_group(self, moves):
cash_moves = {}
# Group cash move lines by origin operation fiscalyear
for move_line in moves:
# FIXME: ugly group by reconciliation invoices, because there
# isn't any direct relationship between payments and invoice
invoices = []
if move_line.reconcile_id:
for line in move_line.reconcile_id.line_id:
if line.invoice:
invoices.append(line.invoice)
elif move_line.reconcile_partial_id:
for line in move_line.reconcile_partial_id.line_partial_ids:
if line.invoice:
invoices.append(line.invoice)
# Remove duplicates
invoices = list(set(invoices))
if invoices:
invoice = invoices[0]
fy_id = invoice.period_id.fiscalyear_id.id
if fy_id not in cash_moves:
cash_moves[fy_id] = [move_line]
else:
cash_moves[fy_id].append(move_line)
return cash_moves
def _partner_record_a_create(self, data, vals):
"""Partner record type A: Adquisiciones de bienes y servicios
Create from income (from supplier) invoices
"""
vals = vals.copy()
partner_record_obj = self.env['l10n.es.aeat.mod347.partner_record']
record = False
vals['operation_key'] = 'A'
invoices = data.get('in_invoices', self.env['account.invoice'])
refunds = data.get('in_refunds', self.env['account.invoice'])
amount = self._invoice_amount_get(invoices, refunds)
if amount:
vals['amount'] = amount
vals['invoice_record_ids'] = [
(0, 0, {'invoice_id': x})
for x in (invoices.ids + refunds.ids)]
record = partner_record_obj.create(vals)
return record
def _partner_record_b_create(self, data, vals):
"""Partner record type B: Entregas de bienes y servicios
Create from outcome (from customer) invoices and cash movements
"""
vals = vals.copy()
partner_record_obj = self.env['l10n.es.aeat.mod347.partner_record']
cash_record_obj = self.env['l10n.es.aeat.mod347.cash_record']
records = []
invoice_record = False
vals['operation_key'] = 'B'
invoices = data.get('out_invoices', self.env['account.invoice'])
refunds = data.get('out_refunds', self.env['account.invoice'])
moves = data.get('cash_moves', self.env['account.move.line'])
amount = s | elf._invoice_amount_get(invoices, refunds)
| if amount:
vals['amount'] = amount
vals['invoice_record_ids'] = [
(0, 0, {'invoice_id': x})
for x in (invoices.ids + refunds.ids)]
invoice_record = partner_record_obj.create(vals)
if invoice_record:
records.append(invoice_record)
if self._cash_amount_get(moves):
cash_moves = self._cash_moves_group(moves)
for fy_id in cash_moves.keys():
amount = self._cash_amount_get(cash_moves[fy_id])
if amount:
if (fy_id != self.fiscalyear_id.id or not invoice_record):
vals['amount'] = 0.0
vals['cash_amount'] = amount
vals['origin_fiscalyear_id'] = fy_id
partner_record = partner_record_obj.create(vals)
if partner_record:
records.append(partner_record)
else:
invoice_record.write({
'cash_amount': amount,
'origin_fiscalyear_id': fy_id,
})
partner_record = invoice_record
for line in cash_moves[fy_id]:
cash_record_obj.create({
'partner_record_id': partner_record.id,
'move_line_id': line.id,
'date': line.date,
'amount': line.credit,
})
return records
def _partner_records_create(self, data):
partner = data.get('partner')
address = self._get_default_address(partner)
partner_country_code, partner_vat = (
re.match(r"([A-Z]{0,2})(.*)", partner.vat or '').groups())
community_vat = ''
if not partner_country_code:
partner_country_code = address.country_id.code
partner_state_code = address.state_id.code
if partner_country_code != 'ES':
partner_vat = ''
community_vat = partner.vat
partner_state_code = 99
vals = {
'report_id': self.id,
'partner_id': partner.id,
'partner_vat': partner_vat,
'representative_vat': '',
'community_vat': community_vat,
'partner_state_code': partner_state_code,
'partner_country_code': partner_country_code,
}
# Create A record
self._partner_record_a_create(data, vals)
# Create B records
self._partner_record_b_create(data, vals)
return True
def _invoices_search(self, partners):
invoice_obj = self.env['account.invoice']
partner_obj = self.env['res.partner']
domain = [
('state', 'in', ['open', 'paid']),
('period_id', 'in', self.periods.ids),
('not_in_mod347', '=', False),
('commercial_partner_id.not_in_mod347', '=', False),
]
if self.only_supplier:
domain.append(('type', 'in', ('in_invoice', 'in_refund')))
key_field = 'id'
if self.group_by_vat:
key_field = 'vat'
groups = invoice_obj.read_group(
domain, ['commercial_partner_id'], ['commercial_partner_id'])
for group in groups:
partner = partner_obj.browse(group['commercial_partner_id'][0])
key_value = partner[key_field]
invoices = invoice_obj.search(group['__domain'])
in_invoices = invoices.filtered(
lambda x: x.type in 'in_invoice')
|
Afnarel/django-forms-builder | forms_builder/forms/south_migrations/0014_auto__chg_field_field_choices.py | Python | bsd-2-clause | 8,984 | 0.008014 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def fo | rwards(self, orm):
# Changing field 'Field.choices'
db.alter_column(u'forms_field', 'choices', self.gf('django.db.models.fields.CharField')(max_length=5000))
def backwards(self, orm):
# Changing field | 'Field.choices'
db.alter_column(u'forms_field', 'choices', self.gf('django.db.models.fields.CharField')(max_length=1000))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'forms.field': {
'Meta': {'ordering': "(u'order',)", 'object_name': 'Field'},
'choices': ('django.db.models.fields.CharField', [], {'max_length': '5000', 'blank': 'True'}),
'condition': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'default': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'dependency': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'blank': 'True'}),
'field_type': ('django.db.models.fields.IntegerField', [], {}),
'form': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'fields'", 'to': u"orm['forms.Form']"}),
'help_text': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'merge': ('django.db.models.fields.CharField', [], {'default': "u'0'", 'max_length': '100', 'blank': 'True'}),
'meta': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'placeholder_text': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'default': "u''", 'max_length': '100', 'blank': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'forms.fieldentry': {
'Meta': {'object_name': 'FieldEntry'},
'entry': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'fields'", 'to': u"orm['forms.FormEntry']"}),
'field_id': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True'})
},
u'forms.form': {
'Meta': {'object_name': 'Form'},
'button_text': ('django.db.models.fields.CharField', [], {'default': "u'Submit'", 'max_length': '50'}),
'email_copies': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'email_from': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email_subject': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intro': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'redirect_url': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'response': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'send_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'default': '[2]', 'related_name': "u'forms_form_forms'", 'symmetrical': 'False', 'to': u"orm['sites.Site']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'forms.formentry': {
'Meta': {'object_name': 'FormEntry'},
'entry_time': ('django.d |
HyperloopTeam/FullOpenMDAO | lib/python2.7/site-packages/openmdao.main-0.13.0-py2.7.egg/openmdao/main/factory.py | Python | gpl-2.0 | 2,382 | 0.001259 | import inspect
#public symbols
__all__ = ["Factory"]
class Factory(object):
"""Base class for objects that know how to create other objects
based on a type argument and several optional arguments (version,
server id, and resource description).
"""
def __init__(self):
pass
def create(self, typname, version=None, server=None,
res_desc=None, **ctor_args):
"""Return an object of type *typname* (or a proxy to it if it resides
in another process) using the specified package version, server
location, and resource description. Returns None if this factory is
unable to create the specified type.
"""
raise NotImplementedError('create')
def get_available_types(self, groups=None):
"""Return a set of tuples of the form (typename, metadata_dict), one
for each available plugin type in the given entry point groups.
If groups is *None,* return the set for all openm | dao entry point groups.
"""
raise NotImplementedError('get_available_types')
de | f get_signature(self, typname, version=None):
"""Return constructor argument signature for *typname,* using the
specified package version. The return value is a dictionary:
args: list
List of 1 or 2-element lists. The first element is the argument
name; the second element is the default value.
varargs: string
The name of the '*' argument.
kwargs: string
The name of the '**' argument.
"""
raise NotImplementedError('get_signature')
@staticmethod
def form_signature(cls):
"""Return constructor signature for class `cls`."""
argspec = inspect.getargspec(cls.__init__)
arglist = argspec.args[1:] # Drop self.
non_default = len(arglist)
if argspec.defaults is not None:
non_default -= len(argspec.defaults)
args = [[arg] for arg in arglist[:non_default]]
if argspec.defaults is not None:
defstrs = [repr(default) for default in argspec.defaults]
args.extend([arg, default]
for arg, default in zip(arglist[non_default:], defstrs))
return dict(args=args,
varargs=argspec.varargs or '',
kwargs=argspec.keywords or '')
|
bwohlberg/sporco | sporco/array.py | Python | bsd-3-clause | 14,977 | 0.000935 | # -*- coding: utf-8 -*-
# Copyright (C) 2015-2020 by Brendt Wohlberg <brendt@ieee.org>
# All rights reserved. BSD 3-clause License.
# This file is part of the SPORCO package. Details of the copyright
# and user license can be found in the 'LICENSE.txt' file distributed
# with the package.
"""Functions operating on numpy arrays etc."""
from __future__ import division
from builtins import range
import collections
fro | m future.moves.itertools import zip_longest
import numpy as np
from sporco._util import renamed_function
__author__ = """Brendt Wohlberg <brendt@ieee.org>"""
def ntpl2array(ntpl):
"""Convert a namedtuple to an array.
Convert a :func:`collections.namedtuple` object to a
:class:`numpy.ndarray` object that can be saved using
:func:`numpy.savez`.
Parameters
----------
| ntpl : collections.namedtuple object
Named tuple object to be converted to ndarray
Returns
-------
arr : ndarray
Array representation of input named tuple
"""
return np.asarray((np.hstack([col for col in ntpl]), ntpl._fields,
ntpl.__class__.__name__), dtype=object)
def array2ntpl(arr):
"""Convert an array representation of a namedtuple back to a namedtuple.
Convert a :class:`numpy.ndarray` object constructed by
:func:`ntpl2array` back to the original
:func:`collections.namedtuple` representation.
Parameters
----------
arr : ndarray
Array representation of named tuple constructed by :func:`ntpl2array`
Returns
-------
ntpl : collections.namedtuple object
Named tuple object with the same name and fields as the original
named typle object provided to :func:`ntpl2array`
"""
cls = collections.namedtuple(arr[2], arr[1])
return cls(*tuple(arr[0]))
def transpose_ntpl_list(lst):
"""Transpose a list of named tuple objects (of the same type) into a
named tuple of lists.
Parameters
----------
lst : list of collections.namedtuple object
List of named tuple objects of the same type
Returns
-------
ntpl : collections.namedtuple object
Named tuple object with each entry consisting of a list of the
corresponding fields of the named tuple objects in list ``lst``
"""
if not lst:
return None
else:
cls = collections.namedtuple(lst[0].__class__.__name__,
lst[0]._fields)
return cls(*[[lst[k][l] for k in range(len(lst))]
for l in range(len(lst[0]))])
@renamed_function(depname='zpad', depmod='sporco.linalg')
def zpad(x, pd, ax):
"""Zero-pad array `x` with `pd = (leading, trailing)` zeros on axis `ax`.
Parameters
----------
x : array_like
Array to be padded
pd : tuple
Sequence of two ints (leading,trailing) specifying number of zeros
for padding
ax : int
Axis to be padded
Returns
-------
xp : array_like
Padded array
"""
xpd = ((0, 0),)*ax + (pd,) + ((0, 0),)*(x.ndim-ax-1)
return np.pad(x, xpd, 'constant')
@renamed_function(depname='zdivide', depmod='sporco.linalg')
def zdivide(x, y):
"""Return `x`/`y`, with 0 instead of NaN where `y` is 0.
Parameters
----------
x : array_like
Numerator
y : array_like
Denominator
Returns
-------
z : ndarray
Quotient `x`/`y`
"""
# See https://stackoverflow.com/a/37977222
return np.divide(x, y, out=np.zeros_like(x), where=(y != 0))
@renamed_function(depname='promote16', depmod='sporco.linalg')
def promote16(u, fn=None, *args, **kwargs):
r"""Promote ``np.float16`` arguments to ``np.float32`` dtype.
Utility function for use with functions that do not support arrays
of dtype ``np.float16``. This function has two distinct modes of
operation. If called with only the `u` parameter specified, the
returned value is either `u` itself if `u` is not of dtype
``np.float16``, or `u` promoted to ``np.float32`` dtype if it is. If
the function parameter `fn` is specified then `u` is conditionally
promoted as described above, passed as the first argument to
function `fn`, and the returned values are converted back to dtype
``np.float16`` if `u` is of that dtype. Note that if parameter `fn`
is specified, it may not be be specified as a keyword argument if it
is followed by any non-keyword arguments.
Parameters
----------
u : array_like
Array to be promoted to np.float32 if it is of dtype ``np.float16``
fn : function or None, optional (default None)
Function to be called with promoted `u` as first parameter and
\*args and \*\*kwargs as additional parameters
*args
Variable length list of arguments for function `fn`
**kwargs
Keyword arguments for function `fn`
Returns
-------
up : ndarray
Conditionally dtype-promoted version of `u` if `fn` is None,
or value(s) returned by `fn`, converted to the same dtype as `u`,
if `fn` is a function
"""
dtype = np.float32 if u.dtype == np.float16 else u.dtype
up = np.asarray(u, dtype=dtype)
if fn is None:
return up
else:
v = fn(up, *args, **kwargs)
if isinstance(v, tuple):
vp = tuple([np.asarray(vk, dtype=u.dtype) for vk in v])
else:
vp = np.asarray(v, dtype=u.dtype)
return vp
@renamed_function(depname='atleast_nd', depmod='sporco.linalg')
def atleast_nd(n, u):
"""Append axes to an array so that it is ``n`` dimensional.
If the input array has fewer than ``n`` dimensions, append singleton
dimensions so that it is ``n`` dimensional. Note that the interface
differs substantially from that of :func:`numpy.atleast_3d` etc.
Parameters
----------
n : int
Minimum number of required dimensions
u : array_like
Input array
Returns
-------
v : ndarray
Output array with at least `n` dimensions
"""
if u.ndim >= n:
return u
else:
return u.reshape(u.shape + (1,)*(n-u.ndim))
@renamed_function(depname='split', depmod='sporco.linalg')
def split(u, axis=0):
"""Split an array into a list of arrays on the specified axis.
Split an array into a list of arrays on the specified axis. The
length of the list is the shape of the array on the specified axis,
and the corresponding axis is removed from each entry in the list.
This function does not have the same behaviour as :func:`numpy.split`.
Parameters
----------
u : array_like
Input array
axis : int, optional (default 0)
Axis on which to split the input array
Returns
-------
v : list of ndarray
List of arrays
"""
# Convert negative axis to positive
if axis < 0:
axis = u.ndim + axis
# Construct axis selection slice
slct0 = (slice(None),) * axis
return [u[slct0 + (k,)] for k in range(u.shape[axis])]
def rolling_window(x, wsz, wnm=None, pad='wrap'):
"""Construct a rolling window view of the input array.
Use :func:`numpy.lib.stride_tricks.as_strided` to construct a view
of the input array that represents different positions of a rolling
window as additional axes of the array. If the number of shifts
requested is such that the window extends beyond the boundary of the
input array, it is padded before the view is constructed. For
example, if ``x`` is 4 x 5 array, the output of
``y = rolling_window(x, (3, 3))`` is a 3 x 3 x 2 x 3 array, with the
first window position indexed as ``y[..., 0, 0]``.
Parameters
----------
x : ndarray
Input array
wsz : tuple
Window size
wnm : tuple, optional (default None)
Number of shifts of window on each axis. If None, the number of
shifts is set so that the end sample in the array is also the end
sample in the final window position.
pad : string, optional (default 'wrap')
A pad mode specification for :func:`numpy.pad`
Returns
-------
xw : ndarray
An array of shape wsz + wnm representing a |
djangogirlstaipei/eshop | bookshop/books/migrations/0006_auto_20161129_0636.py | Python | mit | 747 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-29 06:36
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('books', | '0005_book_tags'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'verbose_name': '書籍', 'verbose_name_plural': '書籍'},
),
migrations.AlterModelOptions(
name='category',
options={'verbose_name': '分類', 'verbose_name_plural': | '分類'},
),
migrations.AlterModelOptions(
name='tag',
options={'verbose_name': '標籤', 'verbose_name_plural': '標籤'},
),
]
|
cryptapus/electrum | electrum/tests/__init__.py | Python | mit | 902 | 0.001109 | import unittest
import threading
from electrum import constants
# Set this locally to make the test suite run faster.
# If set | , unit tests that would normally test functions with multiple implementations,
# will only be run once, using the fastest implementation.
# e.g. libsecp256k1 vs python-ecdsa. pycryptodomex vs pyaes.
FAST_TESTS = False
# some unit tests are modifying globals; sorry.
class SequentialTestCase(unittest.TestCase):
test_lock = threading.Lock()
def setUp(self):
super().setUp()
self.test_lock.acquire()
def tearDown(self):
super().tearDown()
self.tes | t_lock.release()
class TestCaseForTestnet(SequentialTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
constants.set_testnet()
@classmethod
def tearDownClass(cls):
super().tearDownClass()
constants.set_mainnet()
|
ankur22/Butler | topia/termextract/interfaces.py | Python | mit | 1,701 | 0.000588 | ##############################################################################
| #
# Copyright (c) 2009 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
| # FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Interfaces
$Id: interfaces.py 100556 2009-05-30 15:42:14Z srichter $
"""
__docformat__ = "reStructuredText"
import zope.interface
class ITagger(zope.interface.Interface):
"""A utility to provide POS tag extractions from a given text."""
def initialize():
"""Initializes the tagger.
This method only needs to be called once. It should do any expensive
initial computation, such as creating indices, loading the lexicon,
etc.
"""
def tokenize(text):
"""Tokenize the given text into single words."""
def tag(terms):
"""Returns the tagged list of terms.
Additionally, all terms are normalized.
The ouput format is a list of: (term, tag, normalized-term)
"""
def __call__(text):
"""Get a tagged list of words."""
class ITermExtractor(zope.interface.Interface):
"""Extract important terms from a given text."""
def __call__(text):
"""Returns a list of extracted terms, the amount of occurences and
their search strength."""
|
lixiangning888/whole_project | modules/signatures_orginal_20151110/browser_scanbox.py | Python | lgpl-3.0 | 1,730 | 0.002894 | # Copyright (C) 2015 Will Metcalf william.metcalf@ | gmail.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or | FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from lib.cuckoo.common.abstracts import Signature
class BrowserScanbox(Signature):
name = "browser_scanbox"
description = "Scanbox Activity in Browser"
weight = 3
severity = 3
categories = ["exploit"]
authors = ["Will Metcalf"]
minimum = "1.3"
evented = True
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
filter_categories = set(["browser"])
# backward compat
filter_apinames = set(["JsEval", "COleScript_Compile", "COleScript_ParseScriptText"])
def on_call(self, call, process):
if call["api"] == "JsEval":
buf = self.get_argument(call, "Javascript")
else:
buf = self.get_argument(call, "Script")
if 'softwarelist.push(' in buf.lower() and 'indexof("-2147023083")' in buf.lower():
return True
elif 'var logger' in buf.lower() and 'document.onkeypress = keypress;' in buf.lower() and 'setinterval(sendchar,' in buf.lower():
return True
|
nnmware/nnmware | apps/booking/__init__.py | Python | gpl-3.0 | 66 | 0 | default_app_config = "nnm | ware.apps.booking.apps.BookingAppConfi | g"
|
ingenioustechie/zamboni | mkt/api/base.py | Python | bsd-3-clause | 8,836 | 0 | import functools
from django.db.models.sql import EmptyResultSet
import commonware.log
from rest_framework.decorators import api_view
from rest_framework.exceptions import ParseError
from rest_framework.mixins import ListModelMixin
from rest_framework.routers import Route, SimpleRouter
from rest_framework.response import Response
from rest_framework.urlpatterns import format_suffix_patterns
import mkt
from mkt.api.paginator import CustomPagination, PageNumberPagination
log = commonware.log.getLogger('z.api')
def list_url(name, **kw):
kw['resource_name'] = name
return ('api_dispatch_list', kw)
def get_url(name, pk, **kw):
kw.update({'resource_name': name, 'pk': pk})
return ('api_dispatch_detail', kw)
def _collect_form_errors(forms):
errors = {}
if not isinstance(forms, list):
forms = [forms]
for f in forms:
# If we've got form objects, get the error object off it.
# Otherwise assume we've just been passed a form object.
form_errors = getattr(f, 'errors', f)
if isinstance(form_errors, list): # Cope with formsets.
for e in form_errors:
errors.update(e)
continue
errors.update(dict(form_errors.items()))
return errors
def form_errors(forms):
errors = _collect_form_errors(forms)
exc = ParseError()
exc.detail = {'detail': errors}
raise exc
def get_region_from_request(request):
region = request.GET.get('region')
if region and region == 'None':
return None
return getattr(request, 'REGION', mkt.regions.RESTOFWORLD)
class SubRouter(SimpleRouter):
"""
Like SimpleRouter, but with the lookup before the prefix, so that it can be
easily used for sub-actions that are children of a main router.
This is a convenient way of linking one or more viewsets to a parent one
without having to set multiple @action and @link manually.
"""
routes = [
# List route.
Route(
url=r'^{lookup}/{prefix}{trailing_slash}$',
map | ping={
'get': 'list',
'post': 'create'
},
name='{basename}-list',
initkwargs={'suffix': 'List'}
),
# Detail route.
Route(
url=r'^{lookup}/{prefix}{trailing_slash}$',
mapping={
'get': 'retrieve',
'put': 'update',
| 'post': 'detail_post',
'patch': 'partial_update',
'delete': 'destroy'
},
name='{basename}-detail',
initkwargs={'suffix': 'Instance'}
)
]
class SubRouterWithFormat(SubRouter):
"""
SubRouter that also adds the optional format to generated URL patterns.
This is similar to DRF's DefaultRouter, except it's a SubRouter and we
don't respect the trailing_slash parameter with the URLs containing the
format parameter, because that'd make ugly, weird URLs.
"""
def get_urls(self):
# Keep trailing slash value...
trailing_slash = self.trailing_slash
# Generate base URLs without format.
base_urls = super(SubRouterWithFormat, self).get_urls()
# Generate the same URLs, but forcing to omit the trailing_slash.
self.trailing_slash = ''
extra_urls = super(SubRouterWithFormat, self).get_urls()
# Reset trailing slash and add format to our extra URLs.
self.trailing_slash = trailing_slash
extra_urls = format_suffix_patterns(extra_urls, suffix_required=True)
# Return the addition of both lists of URLs.
return base_urls + extra_urls
class MarketplaceView(object):
"""
Base view for DRF views.
It includes:
- An implementation of handle_exception() that goes with our custom
exception handler. It stores the request and originating class in the
exception before it's handed over the the handler, so that the handler
can in turn properly propagate the got_request_exception signal if
necessary.
- A implementation of paginate_queryset() that goes with our custom
pagination handler. It does tastypie-like offset pagination instead of
the default page mechanism.
"""
pagination_class = CustomPagination
def handle_exception(self, exc):
exc._request = self.request._request
exc._klass = self.__class__
return super(MarketplaceView, self).handle_exception(exc)
def paginate_queryset(self, queryset):
page = self.request.query_params.get('page')
offset = self.request.query_params.get('offset')
# If 'offset' (tastypie-style pagination) parameter isn't present and
# 'page' is, use page numbers instead.
if page is not None and offset is None:
self._paginator = PageNumberPagination()
return self.paginator.paginate_queryset(
queryset, self.request, view=self)
def get_region_from_request(self, request):
"""
Returns the REGION object for the passed request. If the GET param
`region` is `'None'`, return `None`. Otherwise, return `request.REGION`
which will have been set by the RegionMiddleware. If somehow we didn't
go through the middleware and request.REGION is absent, we fall back to
RESTOFWORLD.
"""
return get_region_from_request(request)
class MultiSerializerViewSetMixin(object):
"""
Allows attaching multiple serializers to a single viewset. A serializer
is chosen based on request.GET['serializer'] which is used to look up the
appropriate serializer in a serializers_classes map. Useful to not have to
create separate endpoints just to use different serializers (e.g.,
product-specific serializers, slimmed serializers).
"""
def get_serializer_class(self):
"""
Look for serializer class in self.serializer_classes. It will be looked
up using request.GET.serializer, i.e.:
class MyViewSet(ViewSet):
serializer_class = MySerializer
serializer_classes = {
'mini': MyMiniSerializer,
}
If there's no entry for that param then just fallback to the regular
get_serializer_class lookup: self.serializer_class.
"""
try:
return self.serializer_classes[self.request.GET.get('serializer')]
except KeyError:
return super(MultiSerializerViewSetMixin,
self).get_serializer_class()
class CORSMixin(object):
"""
Mixin to enable CORS for DRF API.
"""
def finalize_response(self, request, response, *args, **kwargs):
if not hasattr(request._request, 'CORS'):
request._request.CORS = self.cors_allowed_methods
if hasattr(self, 'cors_allowed_headers'):
request._request.CORS_HEADERS = self.cors_allowed_headers
return super(CORSMixin, self).finalize_response(
request, response, *args, **kwargs)
def cors_api_view(methods, headers=None):
def decorator(view):
def add_cors(handler):
@functools.wraps(handler)
def view_with_cors(request, *args, **kw):
request.CORS = methods
if headers:
request.CORS_HEADERS = headers
return handler(request, *args, **kw)
return view_with_cors
# The request.CORS attributes need to be added to the view before
# the DRF @api_view handler executes.
return add_cors(api_view(methods)(view))
return decorator
class SlugOrIdMixin(object):
"""
Mixin that allows you to pass slugs instead of pk in your URLs. Use with
any router or urlpattern that relies on a relaxed regexp for pks, like
(?P<pk>[^/]+) (DRF does this by default).
If the name of your `slug` is called something else, override
`self.slug_field`.
"""
def get_object(self):
pk = self.kwargs.get('pk')
if pk and not pk.isdigit():
# If the `pk` contains anything other than a digit, it's a `slug`.
self.lookup_fi |
studybuffalo/rdrhc_calendar | modules/retrieve.py | Python | gpl-3.0 | 1,263 | 0.001584 | """Retrieves the paths to the required schedule files."""
from datet | ime import datetime
import logging
import pytz
from unipath import Path
# Setup Logging
LOG = logging.getLogger(__name__)
def get_date(tz_string):
"""Generates todays date as string (in format yyyy-m | m-dd)"""
schedule_tz = pytz.timezone(tz_string)
today = datetime.now(schedule_tz)
return today.strftime('%Y-%m-%d')
def retrieve_schedule_file_paths(config):
"""Creates the path to the schedules from supplied config file."""
schedule_loc = config['excel']['schedule_loc']
date = get_date(config['timezone'])
# Assemble the details for the assistant schedule
file_name_a = '{0}_{1}.{2}'.format(
date, 'assistant', config['excel']['ext_a']
)
# Assemble the details for the pharmacist schedule
file_name_p = '{0}_{1}.{2}'.format(
date, 'pharmacist', config['excel']['ext_p']
)
# Assemble the details for the technician schedule
file_name_t = '{0}_{1}.{2}'.format(
date, 'technician', config['excel']['ext_t']
)
# Return the final details
return {
'a': Path(schedule_loc, file_name_a),
'p': Path(schedule_loc, file_name_p),
't': Path(schedule_loc, file_name_t),
}
|
phillipgreenii/loan_payoff_tools | loan_payoff_tools/money.py | Python | mit | 4,298 | 0.001396 | from decimal import Decimal
import decimal
from functools import total_ordering
@total_ordering
class Money(object):
_round_amount = Decimal('1.00')
def __init__(self, dollars=None, cents=None):
if isinstance(dollars, Money):
if cents:
raise ValueError("if dollars is Money, then cents should not be specified")
self._cents = dollars._cents
else:
total_cents = 0
round = False
if dollars:
#handle dollars
if isinstance(dollars, long):
total_cents += dollars * 100
elif isinstance(dollars, int):
total_cents += dollars * 100
elif isinstance(dollars, float):
total_cents += dollars * 100
round = True
elif isinstance(dollars, Decimal):
total_cents += float(dollars*100)
round = True
elif isinstance(dollars, basestring):
total_cents += float(Decimal(dollars)*100)
round = True
else:
raise ValueError("Unsupported dollars type: {} ({})".format(type(dollars), dollars))
if cents:
#handle cents
if isinstance(cents, long):
total_cents += cents
elif isinstance(cents, int):
total_cents += cents
elif isinstance(cents, float):
total_cents += cents
round = True
elif isinstance(cents, Decimal):
total_cents += float(cents)
round = True
elif isinstance(cents, basestring):
total_cents += float(Decimal(cents))
round = True
else:
raise ValueError("Unsupported cents type: {} ({})".format(type(cents), cents))
if round:
total_cents = self._round(total_cents)
self._cents = total_cents
def _round(self, cents):
if cents > 0:
return long(cents + 0.5)
elif cents < 0:
return long(cents - 0.5)
else:
return 0
def __repr__(self):
return "${:.2f}".format(self._cents/100.0)
def __eq__(self, other):
if | isinstance(other, self.__class__):
return self._cents == other._cents
return NotImplemented
def __ne__(self, other):
if isinstance(other, self.__class__):
return not self.__eq__(other)
return NotImplemented
def __lt__(self, other):
if isinstance(other, self.__class__):
return self._cents < other._cents
return NotImplemented
def __add__(self, other):
| if isinstance(other, self.__class__):
return Money(cents=(self._cents + other._cents))
return NotImplemented
def __sub__(self, other):
if isinstance(other, self.__class__):
return Money(cents=(self._cents - other._cents))
return NotImplemented
def __mul__(self, other):
if isinstance(other, int):
return Money(cents=(self._cents * other))
elif isinstance(other, float):
return Money(cents=(self._cents * other))
return NotImplemented
def __rmul__(self, other):
if isinstance(other, int):
return Money(cents=(self._cents * other))
elif isinstance(other, float):
return Money(cents=(self._cents * other))
return NotImplemented
def __div__(self, other):
if isinstance(other, int):
return Money(cents=(float(self._cents) / other))
elif isinstance(other, float):
return Money(cents=(float(self._cents) / other))
return NotImplemented
def __neg__(self):
return Money(cents=-self._cents)
def __hash__(self):
return hash(self._cents)
def __float__(self):
return float(self._cents/100.00)
def __int__(self):
return int(self._cents/100)
def __long__(self):
return long(self._cents/100)
def __nonzero__(self):
return bool(self._cents)
ZERO = Money(0)
|
RomainBrault/operalib | operalib/tests/test_QuantileRegression.py | Python | bsd-3-clause | 1,549 | 0 | """OVK learning, unit tests.
The :mod:`sklearn.tests.test_QuantileRegression` tests OVK quantile regression
estimator.
"""
import operalib as ovk
from sklearn.utils.estimator_checks import check_estimator
import numpy as np
def test_valid_estimator():
"""Test whether ovk.Quantile is a valid sklearn estimator."""
check_estimator(ovk.Quantile)
def test_learn_qu | antile():
"""Test OVK quantile estimator fit, predict."""
probs = np.linspace(0.1, 0.9, 5) # Quantile levels of interest
x_train, y_train, _ = ovk.toy_data_quantile(50)
x_test, y_test, _ = ovk.toy_data_quantile(1000, probs=probs)
# | Joint quantile regression
lbda = 1e-2
gamma = 1e1
joint = ovk.Quantile(probs=probs, kernel='DGauss', lbda=lbda,
gamma=gamma, gamma_quantile=1e-2)
# Independent quantile regression
ind = ovk.Quantile(probs=probs, kernel='DGauss', lbda=lbda,
gamma=gamma, gamma_quantile=np.inf)
# Independent quantile regression (with non-crossing constraints)
non_crossing = ovk.Quantile(probs=probs, kernel='DGauss', lbda=lbda,
gamma=gamma, gamma_quantile=np.inf,
nc_const=True)
# Sparse quantile regression
joint = ovk.Quantile(probs=probs, kernel='DGauss', lbda=lbda,
gamma=gamma, gamma_quantile=1e-2, eps=1)
# Fit on training data
for reg in [joint, ind, non_crossing]:
reg.fit(x_train, y_train)
assert reg.score(x_test, y_test) > 0.5
|
jkeifer/pyHytemporal | old_TO_MIGRATE/test5.py | Python | mit | 787 | 0.007624 | __author__ = 'phoetrymaster'
import subprocess
nodatain = -3000
nodataout = -3000
inputshape = "'/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/DepartmentSelection/ARG_adm/pellegrini.shp'"
inputimg = "'/Users/phoetrymaster/Documents/School/Geography/Thesis | /Data/MODIS 7_2012-2013/argentina_1/test.tif'"
outputimg = "'/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS 7_2012-2013/argentina_1/MODIS_pellegrini_clip.tif'"
outprj = "'+proj=utm +zone=20 +datum=WGS84'"
outformat = "ENVI"
#Need to reproject | input shapefile to outprj before warping...
subprocess.call("gdalwarp -t_srs {0} -srcnodata {1} -dstnodata {2} -crop_to_cutline -cutline {3} {4} {5} -of {6}".format(outprj, nodatain, nodataout, inputshape, inputimg, outputimg, outformat), shell=True) |
WesleyHsieh/policy_gradient | policy_gradient/policy_gradient.py | Python | mit | 10,569 | 0.028101 | import numpy as np
import numpy.linalg as la
import collections
import IPython
import tensorflow as tf
from utils import *
import time
from collections import defaultdict
class PolicyGradient(Utils):
"""
Calculates policy gradient
for given input state/actions.
Users should primarily be calling main
PolicyGradient class methods.
"""
def __init__(self, net_dims, filepath=None, q_net_dims=None, output_function=None, seed=0, seed_state=None):
"""
Initializes PolicyGradient class.
Parameters:
net_dims: array-like
1D list corresponding to dimensions
of each layer in the net.
output_function: string
Non-linearity function applied to output of
neural network.
Options are: 'tanh', 'sigmoid', 'relu', 'softmax'.
"""
self.q_dict = defaultdict(lambda: defaultdict(float))
self.prev_weight_grad = self.prev_bias_grad = self.prev_weight_update_vals = \
self.prev_bias_update_ | vals = self.prev_weight_inverse_hess = self.prev_bias_inverse_hess = \
self.total_weight_grad = self.total_bias_grad = None
self.init_action_neural_net(net_dims, output_function, filepath)
if seed_state is not None:
np.random.set_state(seed_state)
tf.set_random_seed(seed)
def train_agent(self, dynamics_func, reward_func, update_method, initial_state, num_iters, batch_size, traj_len, step_size=0.1, mome | ntum=0.5, normalize=True):
"""
Trains agent using input dynamics and rewards functions.
Parameters:
dynamics_func: function
User-provided function that takes in
a state and action, and returns the next state.
reward_func: function
User-provided function that takes in
a state and action, and returns the associated reward.
initial_state: array-like
Initial state that each trajectory starts at.
Must be 1-dimensional NumPy array.
num_iters: int
Number of iterations to run gradient updates.
batch_size: int
Number of trajectories to run in a single iteration.
traj_len: int
Number of state-action pairs in a trajectory.
Output:
mean_rewards: array-like
Mean ending rewards of all iterations.
"""
mean_rewards = []
ending_states = []
for i in range(num_iters):
traj_states = []
traj_actions = []
rewards = []
for j in range(batch_size):
states = []
actions = []
curr_rewards = []
curr_state = initial_state
# Rolls out single trajectory
for k in range(traj_len):
# Get action from learner
curr_action = self.get_action(curr_state)
# Update values
states.append(curr_state)
curr_rewards.append(reward_func(curr_state, curr_action))
actions.append(curr_action)
# Update state
curr_state = dynamics_func(curr_state, curr_action)
# Append trajectory/rewards
traj_states.append(states)
traj_actions.append(actions)
rewards.append(curr_rewards)
# Apply policy gradient iteration
self.gradient_update(np.array(traj_states), np.array(traj_actions), np.array(rewards), \
update_method, step_size, momentum, normalize)
mean_rewards.append(np.mean([np.sum(reward_list) for reward_list in rewards]))
ending_states.append([traj[-1] for traj in traj_states])
return np.array(mean_rewards), ending_states
def gradient_update(self, traj_states, traj_actions, rewards, update_method='sgd', step_size=1.0, momentum=0.5, normalize=True):
"""
Estimates and applies gradient update according to a policy.
States, actions, rewards must be lists of lists; first dimension indexes
the ith trajectory, second dimension indexes the jth state-action-reward of that
trajectory.
Parameters:
traj_states: array-like
List of list of states.
traj_actions: array-like
List of list of actions.
rewards: array-like
List of list of rewards.
step_size: float
Step size.
momentum: float
Momentum value.
normalize: boolean
Determines whether to normalize gradient update.
Recommended if running into NaN/infinite value errors.
"""
assert update_method in ['sgd', 'momentum', 'lbfgs', 'adagrad', 'rmsprop', 'adam']
# Calculate updates and create update pairs
curr_weight_grad = 0
curr_bias_grad = 0
curr_weight_update_vals = []
curr_bias_update_vals = []
curr_weight_inverse_hess = []
curr_bias_inverse_hess = []
iters = traj_states.shape[0]
q_vals = self.estimate_q(traj_states, traj_actions, rewards)
assert traj_states.shape[0] == traj_actions.shape[0] == rewards.shape[0]
assert q_vals.shape[0] == iters
# Update for each example
for i in range(iters):
# Estimate q-values and extract gradients
curr_traj_states = traj_states[i]
curr_traj_actions = traj_actions[i]
curr_q_val_list = q_vals[i]
curr_traj_states = curr_traj_states.reshape(curr_traj_states.shape[0], curr_traj_states.shape[1] * curr_traj_states.shape[2])
curr_traj_actions = curr_traj_actions.reshape(curr_traj_actions.shape[0], curr_traj_actions.shape[1] * curr_traj_actions.shape[2])
curr_q_val_list = curr_q_val_list.reshape(curr_q_val_list.shape[0], 1)
curr_weight_grad_vals = self.sess.run(self.weight_grads, \
feed_dict={self.input_state: curr_traj_states, self.observed_action: curr_traj_actions, self.q_val: curr_q_val_list})
curr_bias_grad_vals = self.sess.run(self.bias_grads, \
feed_dict={self.input_state: curr_traj_states, self.observed_action: curr_traj_actions, self.q_val: curr_q_val_list})
curr_weight_grad += np.array(curr_weight_grad_vals) / np.float(iters)
curr_bias_grad += np.array(curr_bias_grad_vals) / np.float(iters)
# Update weights
for j in range(len(self.weights)):
if update_method == 'sgd':
update_val = step_size * curr_weight_grad[j]
elif update_method == 'momentum':
if self.prev_weight_grad is None:
update_val = step_size * curr_weight_grad[j]
else:
update_val = momentum * self.prev_weight_grad[j] + step_size * curr_weight_grad[j]
elif update_method == 'lbfgs':
if self.prev_weight_inverse_hess is None:
curr_inverse_hess = np.eye(curr_weight_grad[j].shape[0])
update_val = curr_weight_grad[j]
else:
update_val, curr_inverse_hess = \
self.bfgs_update(self.prev_inverse_hess[j], self.prev_update_val[j], self.prev_weight_grad[j], update_val)
update_val = update_val * step_size
curr_weight_inverse_hess.append(curr_inverse_hess)
elif update_method == 'adagrad':
if self.total_weight_grad is None:
self.total_weight_grad = curr_weight_grad
else:
self.total_weight_grad[j] += np.square(curr_weight_grad[j])
update_val = step_size * curr_weight_grad[j] / (np.sqrt(np.abs(self.total_weight_grad[j])) + 1e-8)
elif update_method == 'rmsprop':
decay = 0.99
if self.total_weight_grad is None:
self.total_weight_grad = curr_weight_grad
else:
self.total_weight_grad[j] = decay * self.total_weight_grad[j] + (1 - decay) * np.square(curr_weight_grad[j])
update_val = step_size * curr_weight_grad[j] / (np.sqrt(np.abs(self.total_weight_grad[j])) + 1e-8)
elif update_method == 'adam':
beta1, beta2 = 0.9, 0.999
if self.total_weight_grad is None:
self.total_weight_grad = curr_weight_grad
self.total_sq_weight_grad = np.square(curr_weight_grad)
else:
self.total_weight_grad[j] = beta1 * self.total_weight_grad[j] + (1 - beta1) * curr_weight_grad[j]
self.total_sq_weight_grad[j] = beta2 * self.total_sq_weight_grad[j] + (1 - beta2) * np.sqrt(np.abs(self.total_weight_grad[j]))
update_val = np.divide(step_size * self.total_weight_grad[j], (np.sqrt(np.abs(self.total_sq_weight_grad[j])) + 1e-8))
if normalize:
norm = la.norm(update_val)
if norm != 0:
update_val = update_val / norm
curr_weight_update_vals.append(update_val)
update = tf.assign(self.weights[j], self.weights[j] + update_val)
self.sess.run(update)
# Update biases
for j in range(len(self.biases)):
if update_method == 'sgd':
update_val = step_size * curr_bias_grad[j]
elif update_method == 'momentum':
if self.prev_bias_grad is None:
update_val = step_size * curr_bias_grad[j]
else:
update_val = momentum * self.prev_bias_grad[j] + step_size * curr_bias_grad[j]
elif update_method = |
DMPwerkzeug/DMPwerkzeug | rdmo/questions/models.py | Python | apache-2.0 | 27,506 | 0.001927 | from django.conf import settings
from django.contrib.auth.models import Group
from django.contrib.sites.models import Site
from django.core.cache import caches
from django.db import models
from django.utils.translation import ugettext_lazy as _
from rdmo.conditions.models import Condition
from rdmo.core.constants import VALUE_TYPE_CHOICES
from rdmo.core.models import Model, TranslationMixin
from rdmo.core.utils import copy_model, get_language_fields, join_url
from rdmo.domain.models import Attribute
from .managers import CatalogManager, QuestionManager, QuestionSetManager
class Catalog(Model, TranslationMixin):
objects = CatalogManager()
uri = models.URLField(
max_length=640, blank=True,
verbose_name=_('URI'),
help_text=_('The Uniform Resource Identifier of this catalog (auto-generated).')
)
uri_prefix = models.URLField(
max_length=256,
verbose_name=_('URI Prefix'),
help_text=_('The prefix for the URI of this catalog.')
)
key = models.SlugField(
max_length=128, blank=True,
verbose_name=_('Key'),
help_text=_('The internal identifier of this catalog.')
)
comment = models.TextField(
blank=True,
verbose_name=_('Comment'),
help_text=_('Additional internal information about this catalog.')
)
locked = models.BooleanField(
default=False,
verbose_name=_('Locked'),
help_text=_('Designates whether this catalog (and it\'s sections, question sets and questions) can be changed.')
)
order = models.IntegerField(
default=0,
verbose_name=_('Order'),
help_text=_('The position of this catalog in lists.')
)
sites = models.ManyToManyField(
Site, blank=True,
verbose_name=_('Sites'),
help_text=_('The sites this catalog belongs to (in a multi site setup).')
)
groups = models.ManyToManyField(
Group, blank=True,
verbose_name=_('Group'),
help_text=_('The groups for which this catalog is active.')
)
title_lang1 = models.CharField(
max_length=256, blank=True,
verbose_name=_('Title (primary)'),
help_text=_('The title for this catalog in the primary language.')
)
title_lang2 = models.CharField(
max_length=256, blank=True,
verbose_name=_('Title (secondary)'),
help_text=_('The title for this catalog in the secondary language.')
)
title_lang3 = models.CharField(
max_length=256, blank=True,
verbose_name=_('Title (tertiary)'),
help_text=_('The title for this catalog in the tertiary language.')
)
title_lang4 = models.CharField(
max_length=256, blank=True,
verbose_name=_('Title (quaternary)'),
help_text=_('The title for this catalog in the quaternary language.')
)
title_lang5 = models.CharField(
max_length=256, blank=True,
verbose_name=_('Title (quinary)'),
help_text=_('The title for this catalog in the quinary language.')
)
help_lang1 = models.TextField(
blank=True,
verbose_name=_('Help (primary)'),
help_text=_('The help text for this catalog in the primary language.')
)
help_lang2 = models.TextField(
blank=True,
verbose_name=_('Help (secondary)'),
help_text=_('The help text for this catalog in the secondary language.')
)
help_lang3 = models.TextField(
blank=True,
verbose_name=_('Help (tertiary)'),
help_text=_('The help text for this catalog in the tertiary language.')
)
help_lang4 = models.TextField(
blank=True,
verbose_name=_('Help (quaternary)'),
help_text=_('The help text for this catalog in the quaternary language.')
)
help_lang5 = models.TextField(
blank=True,
verbose_name=_('Help (quinary)'),
help_text=_('The help text for this catalog in the quinary language.')
)
available = models.BooleanField(
default=True,
verbose_name=_('Available'),
help_text=_('Designates whether this catalog is generally available for projects.')
)
class Meta:
ordering = ('order',)
verbose_name = _('Catalog')
verbose_name_plural = _('Catalogs')
def __str__(self):
return self.key
def save(self, *args, **kwargs):
self.uri = self.build_uri(self.uri_prefix, self.key)
super().save(*args, **kwargs)
for section in self.sections.all():
section.save()
def copy(self, uri_prefix, key):
# create a new title
kwargs = {}
for field in get_language_fields('title'):
kwargs[field] = getattr(self, field) + '*'
# copy instance
catalog = copy_model(self, uri_prefix=uri_prefix, key=key, **kwargs)
# copy m2m fields
catalog.sites.set(self.sites.all())
catalog.groups.set(self.groups.all())
# copy children
for section in self.sections.all():
section.copy(uri_prefix, section.key, catalog=catalog)
return catalog
@property
def title(self):
return self.trans('title')
@property
def help(self):
return self.trans('help')
@property
def is_locked(self):
return self.locked
@classmethod
def build_uri(cls, uri_prefix, key):
assert key
return join_url(uri_prefix or settings.DEFAULT_URI_PREFIX, '/questions/', key)
class Section(Model, TranslationMixin):
uri = models.URLField(
max_length=640, blank=True,
verbose_name=_('URI'),
help_text=_('The Uniform Resource Identifier of this section (auto-generated).')
)
uri_prefix = models.URLField(
max_length=256,
verbose_name=_('URI Prefix'),
help_text=_('The prefix for the URI of this section.')
)
key = models.SlugField(
max_length=128, blank=True,
verbose_name=_('Key'),
help_text=_('The internal identifier of this section.')
)
path = models.CharField(
max_length=512, blank=True,
verbose | _name=_('Label'),
help_text=_('The path part of the URI of this section (auto-generated).')
)
comment = models.TextField(
blank=True,
verbose_name=_('Comment'),
help_text=_('Additional internal information about this section.')
)
locked = models.BooleanField(
default=False,
verbose_name=_('Locked'),
help_text=_('Designates wh | ether this section (and it\'s question sets and questions) can be changed.')
)
catalog = models.ForeignKey(
Catalog, on_delete=models.CASCADE, related_name='sections',
verbose_name=_('Catalog'),
help_text=_('The catalog this section belongs to.')
)
order = models.IntegerField(
default=0,
verbose_name=_('Order'),
help_text=_('Position in lists.')
)
title_lang1 = models.CharField(
max_length=256, blank=True,
verbose_name=_('Title (primary)'),
help_text=_('The title for this section in the primary language.')
)
title_lang2 = models.CharField(
max_length=256, blank=True,
verbose_name=_('Title (secondary)'),
help_text=_('The title for this section in the secondary language.')
)
title_lang3 = models.CharField(
max_length=256, blank=True,
verbose_name=_('Title (tertiary)'),
help_text=_('The title for this section in the tertiary language.')
)
title_lang4 = models.CharField(
max_length=256, blank=True,
verbose_name=_('Title (quaternary)'),
help_text=_('The title for this section in the quaternary language.')
)
title_lang5 = models.CharField(
max_length=256, blank=True,
verbose_name=_('Title (quinary)'),
help_text=_('The title for this section in the quinary language.')
)
class Meta:
ordering = ('catalog__order', 'order')
verbose_name = _('Section')
verbose_name_plural = _('Sections')
def __str__(self):
return self.path
def save(self, *args, **kwa |
nikesh-mahalka/nova | nova/tests/functional/test_legacy_v2_compatible_wrapper.py | Python | apache-2.0 | 3,299 | 0.000606 | # Copyright 2015 Intel Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api import openstack
from nova.api.openstack import compute
from nova.api.openstack import wsgi
from nova.tests.functional.api import client
from nova.tests.functional import api_paste_fixture
from nova.tests.functional import test_servers
from nova.tests.unit import fake_network
class LegacyV2CompatibleTestBase(test_servers.ServersTestBase):
_api_version = 'v2'
def setUp(self):
self.useFixture(api_paste_fixture.ApiPasteV2CompatibleFixture())
super(LegacyV2CompatibleTestBase, self).setUp()
self._check_api_endpoint('/v2', [compute.APIRouterV21,
openstack.LegacyV2CompatibleWrapper])
def test_request_with_microversion_headers(self):
response = self.api.api_post('os-keypairs',
{"keypair": {"name": "test"}},
headers={wsgi.API_VERSION_REQUEST_HEADER: '2.100'})
self.assertNotIn(wsgi.API_VERSION_REQUEST_HEADER, response.headers)
self.assertNotIn('Vary', response.headers)
self.assertNotIn('type', response.body["keypair"])
def test_request_without_addtional_properties_check(self):
response = self.api.api_post('os-keypairs',
{"keypair": {"name": "test", "foooooo": "barrrrrr"}},
headers={wsgi.API_VERSION_REQUEST_HEADER: '2.100'})
self.assertNotIn(wsgi.API_VERSION_REQUEST_HEADER, response.headers)
self.assertNotIn('Vary', response.headers)
self.assertNotIn('type', response.body["keypair"])
def test_request_with_pattern_properties_check(self):
fake_network.set_stub_network_methods(self.stubs)
server = self._build_minimal_create_server_request()
post = {'server': server}
| created_server = self.api.post_server(post)
| self._wait_for_state_change(created_server, 'BUILD')
response = self.api.post_server_metadata(created_server['id'],
{'a': 'b'})
self.assertEqual(response, {'a': 'b'})
def test_request_with_pattern_properties_with_avoid_metadata(self):
fake_network.set_stub_network_methods(self.stubs)
server = self._build_minimal_create_server_request()
post = {'server': server}
created_server = self.api.post_server(post)
exc = self.assertRaises(client.OpenStackApiException,
self.api.post_server_metadata,
created_server['id'],
{'a': 'b',
'x' * 300: 'y',
'h' * 300: 'i'})
self.assertEqual(exc.response.status_code, 400)
|
Akrog/cinder | cinder/zonemanager/drivers/brocade/brcd_fc_san_lookup_service.py | Python | apache-2.0 | 10,817 | 0 | # (c) Copyright 2014 Brocade Communications Systems Inc.
# All Rights Reserved.
#
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo_utils import excutils
import paramiko
from cinder import exception
from cinder.i18n import _, _LE
from cinder.openstack.common import log as logging
from cinder import utils
from cinder.zonemanager.drivers.brocade import brcd_fabric_opts as fabric_opts
import cinder.zonemanager.drivers.brocade.fc_zone_constants as zone_constant
from cinder.zonemanager import fc_san_lookup_service as fc_service
LOG = logging.getLogger(__name__)
class BrcdFCSanLookupService(fc_service.FCSanLookupService):
"""The SAN lookup service that talks to Brocade switches.
Version History:
1.0.0 - Initial version
"""
VERSION = "1.0.0"
def __init__(self, **kwargs):
"""Initializing the client."""
super(BrcdFCSanLookupService, self).__init__(**kwargs)
self.configuration = kwargs.get('configuration', None)
self.create_configuration()
self.client = self.create_ssh_client(**kwargs)
def create_configuration(self):
"""Configuration specific to SAN context values."""
config = self.configuration
fabric_names = [x.strip() for x in config.fc_fabric_names.split(',')]
LOG.debug('Fabric Names: %s', fabric_names)
# There can be more than one SAN in the network and we need to
# get credentials for each for SAN context lookup later.
if len(fabric_names) > 0:
self.fabric_configs = fabric_opts.load_fabric_configurations(
fabric_names)
def create_ssh_client(self, **kwargs):
ssh_client = paramiko.SSHClient()
known_hosts_file = kwargs.get('known_hosts_file', None)
if known_hosts_file is None:
ssh_client.load_system_host_keys()
else:
ssh_client.load_host_keys(known_hosts_file)
missing_key_policy = kwargs.get('missing_key_policy', None)
if missing_key_policy is None:
missing_key_policy = paramiko.WarningPolicy()
ssh_client.set_missing_host_key_policy(missing_key_policy)
return ssh_client
def get_device_mapping_from_network(self,
initiator_wwn_list,
target_wwn_list):
"""Provides the initiator/target map for available SAN contexts.
Looks up nameserver of each fc SAN configured to find logged in devices
and returns a map of initiator and target port WWNs for each fabric.
:param initiator_wwn_list: List of initiator port WWN
:param target_wwn_list: List of target port WWN
:returns List -- device wwn map in following format
{
<San name>: {
'initiator_port_wwn_list':
('200000051e55a100', '200000051e55a121'..)
'target_port_wwn_list':
('100000051e55a100', '100000051e55a121'..)
}
}
:raises Exception when connection to fabric is failed
"""
device_map = {}
formatted_target_list = []
formatted_initiator_list = []
fabric_map = {}
fabric_names = self.configuration.fc_fabric_names
fabrics = None
if not fabric_names:
raise exception.InvalidParameterValue(
err=_("Missing Fibre Channel SAN configuration "
"param - fc_fabric_names"))
fabrics = [x.strip() for x in fabric_names.split(',')]
LOG.debug("FC Fabric List: %s", fabrics)
if fabrics:
for t in target_wwn_list:
formatted_target_list | .append(self.get_formatted_wwn(t))
for i in initiator_wwn_list:
formatted_initiator_list.append(self.
get_formatted_wwn(i))
for fabric_name in fabrics:
fabric_ip = self.fabric_configs[fabric_name].safe_get(
'fc_fabric_address')
fabric_user = self.fabric_configs[fabric_name].safe_get(
'fc_fabr | ic_user')
fabric_pwd = self.fabric_configs[fabric_name].safe_get(
'fc_fabric_password')
fabric_port = self.fabric_configs[fabric_name].safe_get(
'fc_fabric_port')
# Get name server data from fabric and find the targets
# logged in
nsinfo = ''
try:
LOG.debug("Getting name server data for "
"fabric %s", fabric_ip)
self.client.connect(
fabric_ip, fabric_port, fabric_user, fabric_pwd)
nsinfo = self.get_nameserver_info()
except exception.FCSanLookupServiceException:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed collecting name server info from"
" fabric %s") % fabric_ip)
except Exception as e:
msg = _("SSH connection failed "
"for %(fabric)s with error: %(err)s"
) % {'fabric': fabric_ip, 'err': e}
LOG.error(msg)
raise exception.FCSanLookupServiceException(message=msg)
finally:
self.client.close()
LOG.debug("Lookup service:nsinfo-%s", nsinfo)
LOG.debug("Lookup service:initiator list from "
"caller-%s", formatted_initiator_list)
LOG.debug("Lookup service:target list from "
"caller-%s", formatted_target_list)
visible_targets = filter(lambda x: x in formatted_target_list,
nsinfo)
visible_initiators = filter(lambda x: x in
formatted_initiator_list, nsinfo)
if visible_targets:
LOG.debug("Filtered targets is: %s", visible_targets)
# getting rid of the : before returning
for idx, elem in enumerate(visible_targets):
elem = str(elem).replace(':', '')
visible_targets[idx] = elem
else:
LOG.debug("No targets are in the nameserver for SAN %s",
fabric_name)
if visible_initiators:
# getting rid of the : before returning ~sk
for idx, elem in enumerate(visible_initiators):
elem = str(elem).replace(':', '')
visible_initiators[idx] = elem
else:
LOG.debug("No initiators are in the nameserver "
"for SAN %s", fabric_name)
fabric_map = {
'initiator_port_wwn_list': visible_initiators,
'target_port_wwn_list': visible_targets
}
device_map[fabric_name] = fabric_map
LOG.debug("Device map for SAN context: %s", device_map)
return device_map
def get_nameserver_info(self):
"""Get name server data from fabric.
This method will return the connected node port wwn list(local
and remote) for the given switch fabric
"""
cli_output = None
nsinfo_list = []
|
maxalbert/sumatra | sumatra/datastore/davfs.py | Python | bsd-2-clause | 3,618 | 0.00387 | '''
Datastore via remote webdav connection
'''
from __future__ import unicode_literals
from future import standard_library
standard_library.install_aliases()
import os
import tarfile
import logging
from fs.contrib.davfs import DAVFS
from urllib.parse import urlparse
from contextlib import closing # needed for Python 2.6
from sumatra.core import component
from .archivingfs import ArchivingFileSystemDataStore, ArchivedDataFile, TIMESTAMP_FORMAT
class DavFsDataItem(ArchivedDataFile):
"""Base class for data item classes, that may represent files or database records."""
def __init__(self, path, store):
# needs to be first cause _get_info is called in Base __init__
self.store = store
super(DavFsDataItem, self).__init__(path, store)
def get_content(self, max_length=None):
obj = self.store.dav_fs.open(self.tarfile_path, 'rb')
with closing(tarfile.open(fileobj=obj)) as data_archive:
f = data_archive.extractfile(self.path)
if max_length:
content = f.read(max_length)
else:
content = f.read()
f.close()
return content
# mandatory repeat
content = property(fget=get_content)
def _get_info(self):
obj = self.store.dav_fs.open(self.tarfile_path, 'rb')
with closing(tarfile.open(fileobj=obj)) as data_archive:
return data_archive.getmember(self.path)
return tarfile.TarInfo()
@component
class DavFsDataStore(ArchivingFileSystemDataStore):
"""ArchivingFileSystemDataStore that archives to webdav storage"""
data_item_class = DavFsDataItem
def __init__(self, root, dav_url, dav_user=None, dav_pw=None):
super(DavFsDataStore, self).__init__(root)
parsed = urlparse(dav_url)
self.dav_user = dav_user or parsed.username
self.dav_pw = dav_pw or parsed.password
self.dav_url = parsed.geturl()
self.dav_fs = DAVFS(url=self.dav_url, credentials={'username': self.dav_user, 'password': self.dav_pw})
def __getstate__(self):
return {'root': self.root, 'dav_url': self.dav_url, 'dav_user': self.dav_user, 'dav_pw': self.dav_pw}
def find_new_data(self, timestamp):
"""Finds newly created/changed data items"""
new_files = self._find_new_data_files(timestamp)
label = timestamp.strftime(TIMESTAMP_FORMAT)
archive_paths = self._archive(label, new_files)
return [DavFsDataItem(path, self).generate_key()
for path in archive_paths]
def _archive(self, label, files, delete_originals=True):
"""
Archives files and, by default, deletes the originals.
"""
fs = self.dav_fs
if not fs.isdir(self.archive_store):
fs.makedir(self.archive_store, recursive=True)
tf_obj = fs.open(os.path.join(self.archive_store, label + ".tar.gz"), mode='wb')
with tarfile.open(fileobj=tf_obj, mode='w:gz') as tf:
logging.info("Archiving data to file %s" % tf.name)
# Add data files
archive_paths = []
for file_path in files:
archive_path = os.path.join(label, file_path)
tf.add(os.path.join(self.root, file_path), archive_path)
ar | chive_paths.append(archive_path)
tf.close()
tf_obj.close()
# Delete original files.
if delete_originals:
for file_path in files:
os.remove(os.path.join(self.root, file | _path))
self._last_label = label # useful for testing
return archive_paths
|
Kuwagata/md5crack | LocallyThreaded/client_mod.py | Python | mit | 1,033 | 0.03001 | import os, sys, socket, hashlib, pickle
hashedword = "4f9c4e19748be021372db6d5e10cfd02"
def main():
target = '127.0.0.1'
port = 5555
chars = "abcdefghijklmnopqrstuvwxyz"
charl = len(chars)
while True:
s = socket.socket()
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.connect((target,port))
start = s.recv(1024)
for i in xrange(charl):
for j in xrange(charl):
for k in xrange(charl):
for l in xrange(charl):
word = start + chars[i] + chars[j] + chars[k] + chars[l]
print "Trying ", word
temp = hashlib.md5(word)
if temp == hashedword:
print word
sock = socket.socket()
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR,1)
sock.connect((target,5556))
| sock.send(word)
sock.close()
s.close()
if __name__ == '__main__':
main()
| |
Naeka/vosae-app | www/contacts/models/entity.py | Python | agpl-3.0 | 3,242 | 0.002159 | # -*- coding:Utf-8 -*-
from mongoengine import Document, fields
from django_gravatar.helpers import get_gravatar_url
from core.mixins import ZombieMixin, AsyncTTLUploadsMixin
from notification.mixins import NotificationAwareDocumentMixin
from core.tasks import es_document_index, es_document_deindex
__all__ = (
'Entity',
)
class Entity(ZombieMixin, Document, AsyncTTLUploadsMixin, NotificationAwareDocumentMixin):
"""A base class for :class:`~contacts.models.Contact` and :class:`~contacts.models.Organization`."""
STATUSES = ('ACTIVE', 'DELETED')
DELETE_STATUS = 'DELETED'
RELATED_WITH_TTL = ['photo']
tenant = fields.ReferenceField("Tenant", required=True)
creator = fields.ReferenceField("VosaeUser", required=True)
| status = fields.StringField(choices=STATUSES, required=True, default=ZombieMixin.DEFAULT_STATUS)
private = fields.BooleanField(required=True, default=False)
photo_source = fields.StringField(choices=["LOCAL", "GRAVATAR"])
photo = fields.ReferenceField("VosaeFile")
gravatar_mail = fields.EmailField(max_length=128)
note = fields.StringField(max_length=2048)
addresses = fiel | ds.ListField(fields.EmbeddedDocumentField("Address"))
emails = fields.ListField(fields.EmbeddedDocumentField("Email"))
phones = fields.ListField(fields.EmbeddedDocumentField("Phone"))
invoicing_settings = {} # fields.ReferenceField("InvoicingSettings")
meta = {
"indexes": ["tenant"],
"allow_inheritance": True,
# Vosae Specific
"vosae_permissions": ("see_contact", "add_contact", "change_contact", "delete_contact"),
"vosae_mandatory_permissions": ("contacts_access",),
"vosae_timeline_permission": "see_contact",
"forced_class_name": "contact",
}
@classmethod
def post_save(self, sender, document, created, **kwargs):
"""
Post save hook handler
- Removes related TTL
- Index entity in elasticsearch
"""
# Removes related TTL
document.remove_related_ttl()
# Index entity in elasticsearch
es_document_index.delay(document)
@classmethod
def post_delete(self, sender, document, **kwargs):
"""
Post delete hook handler
- Deletes related photo, if exists
- De-index entity from elasticsearch
"""
# Deletes related photo, if exists
if document.photo:
document.photo.delete()
# De-index entity from elasticsearch
es_document_deindex.delay(document)
@classmethod
def get_indexable_documents(cls, **kwargs):
"""Overrides `get_indexable_documents` method by filtering on only active entities"""
return cls.objects.filter(status="ACTIVE", **kwargs)
@property
def photo_uri(self):
"""Return the photo URI or the default *empty* picture."""
if self.photo_source:
if self.photo_source == "LOCAL":
try:
return self.photo.stream_link
except:
return None
if self.photo_source == "GRAVATAR":
return get_gravatar_url(self.gravatar_mail)
else:
return None
|
aio-libs/aiohttp_session | demo/memcached_storage.py | Python | apache-2.0 | 788 | 0 | import time
from aiohttp import web
import asyncio
import aiomcache
from aiohttp_session import setup, get_session
from aioht | tp_session.memcached_storage import MemcachedStorage
async def handler(request: web.Request) -> web.Response:
session = await get_session(request)
last_visit = session['last_visit'] if 'last_visit' in se | ssion else None
session['last_visit'] = time.time()
text = 'Last visited: {}'.format(last_visit)
return web.Response(text=text)
async def make_app() -> web.Application:
app = web.Application()
mc = aiomcache.Client("127.0.0.1", 11211, loop=loop)
setup(app, MemcachedStorage(mc))
app.router.add_get('/', handler)
return app
loop = asyncio.get_event_loop()
app = loop.run_until_complete(make_app())
web.run_app(app)
|
jeffbaumes/jeffbaumes-vtk | Utilities/vtkTclTest2Py/vtkTclToPyConvertor.py | Python | bsd-3-clause | 18,843 | 0.005148 | # This is the translator that converts Tcl test to python.
# Not all Tcl test are translatable.
# To ensure that a test can be translated :
# 1) do not use Tcl arrays
# 2) do not use string substitution except in variable names
# eg. obj${i} GetOutput is okay
# obj12 GetOutputAs${i} is not okay.
# 3) do not use expr within expr. As such it is typically superflous.
# 4) event handler procedures in Python take 2 arguments, hence,
# define the Tcl event handlers with 2 default arguments.
# 5) define procedures before using them or setting them on VTK objects
# as callbacks.
# 6) do not use puts etc.
# 7) always quote strings such as filenames, arguments etc.
import sys
import re
import string
for i in range(0, len(sys.argv)):
if sys.argv[i] == '-A' and i < len(sys.argv)-1:
sys.path = [sys.argv[i+1]] + sys.path
import vtkTclParser
reVariable = re.compile("^([+\-])?\$([^\$\{\}]+)$")
reCompoundVariable = re.compile("\$(?:([^\$\}\{]+)|\{([^\$\}]+)\})")
class vtkTclToPyConvertor(vtkTclParser.vtkTclParser):
def __init__(self):
vtkTclParser.vtkTclParser.__init__(self)
self.output = ""
self.indent = ""
self._procedure_list = []
self.class_list = []
self.name_space = "vtk"
def print_header(self, prefix_content=""):
self.handle_command("""#!/usr/bin/env python
%s""" % (prefix_content))
pass
def print_footer(self):
self.handle_command("# --- end of script --")
pass
def reset(self):
self.output = ""
self.indent = ""
vtkTclParser.vtkTclParser.reset(self)
self._procedure_list = []
def _get_block_parser(self):
p = vtkTclToPyConvertor()
p.class_list = self.class_list
p.name_space = self.name_space
p.indent = self.indent + " "
p._procedure_list = self._procedure_list[:]
return p
def translate_block(self, block):
p = self._get_block_parser()
p.indent += " "
block = block.strip()
if block[0] == "{":
block = block[1:]
if block[-1] == "}":
block = block[:-1]
p.feed(block)
return p.output
def translate_operator(self, op):
if op == "&&":
return "and"
if op == "||":
return "or"
return op
def translate_token(self, token):
"""called to transate every token."""
if token.find("$") == -1:
return token
match = reVariable.match(token)
if match:
result = ""
if match.group(1) != None:
result += match.group(1)
result += match.group(2)
return result
result = "locals()[get_variable_name(\""
match = reCompoundVariable.search(token)
while match:
result += token[:match.start(0)] + "\", "
if match.group(1) != None:
result += match.group(1)
else:
result += match.group(2)
result += ", \""
token = token[match.end(0):]
match = reCompoundVariable.search(token)
result += token +"\")]"
return result
def translate_command(self, command, arguments):
#self._error("to translate_command %s %s" % (command, `arguments`))
translated_cmd = None
if len(command) > 0 and command[0] == "#":
translated_cmd = command
elif len(command) > 0 and command[0] == "\"":
translated_cmd = command
elif command == "global" and len(arguments) >= 1:
translated_cmd = "global %s" % arguments[0]
for arg in arguments[1:]:
translated_cmd += ", %s" % arg
elif command == "eval" and len(arguments) > 0:
translate | d_cmd = self.translate_command(arguments[0], arguments[1:])
pass
elif command == "catch" and len(arguments) == 1:
translated_cmd = "catch.catch(globals(),\"\"\"%s\"\"\")" % \
| self.translate_block(arguments[0]).strip()
elif command == "expr":
translated_cmd = "expr.expr(globals(), locals(),["
i = False
for arg in arguments:
if i:
translated_cmd += ","
translated_cmd += "\"%s\"" % arg
i = True
translated_cmd += "])"
elif command == "lindex" and len(arguments) == 2:
translated_cmd = "lindex(%s,%s)" % (arguments[0], arguments[1])
elif command == "append" and len(arguments) >= 2:
translated_cmd = "%s += %s" % (arguments[0], arguments[1])
for arg in arguments[2:]:
translated_cmd += " + %s" % arg
elif command == "proc" and len(arguments) == 3:
translated_cmd = "def %s (" % arguments[0]
proc_args = arguments[1].split()
# We add 2 default arguments to any procedure. This is necessary
# since Tcl event handlers don't take any arguments while python
# event handlers need 2 arguments.
# Added 2 default arguments to ensure that such handler don't raise
# errors. For all other procedures, adding two unused default arguments
# makes no harm.
proc_args.append("__vtk__temp0=0")
proc_args.append("__vtk__temp1=0")
i = False
pair = 0
for pa in proc_args:
if pa.strip() == "{":
pair = 1
continue
elif pa.strip() == "}":
pair = 0
continue
if i and pair != 2:
translated_cmd += ","
if pair == 2:
translated_cmd += "="
if pair == 1:
pair = 2
i = True
translated_cmd += pa
translated_cmd +="):\n"
p = self._get_block_parser()
p.feed(arguments[2])
translated_cmd += p.output
self._procedure_list.append(arguments[0])
elif command == "set" and len(arguments) == 2:
#translate a set command.
translated_cmd = "%s = %s" % (arguments[0], arguments[1])
elif command == "foreach" and len(arguments) == 3:
p = self._get_block_parser()
p.feed(arguments[0])
arguments[0] = p.output.strip()
p = self._get_block_parser()
p.feed(arguments[1])
arguments[1] = p.output.strip()
p = self._get_block_parser()
p.indent = self.indent + " "
p.feed(arguments[2])
translated_cmd = "for %s in %s.split():\n" % (arguments[0], arguments[1])
translated_cmd += p.output
translated_cmd += "\n" + self.indent + " pass"
elif command == "for" and len(arguments) == 4:
p = self._get_block_parser()
p.feed(arguments[0])
translated_cmd = p.output.strip() + "\n"
p = self._get_block_parser()
p.feed(arguments[1])
translated_cmd += self.indent + "while " + p.output.strip() + ":\n"
p = self._get_block_parser()
p.indent = self.indent + " "
p.feed(arguments[3])
translated_cmd += p.output
p = self._get_block_parser()
p.indent = self.indent + " "
p.feed(arguments[2])
translated_cmd += p.output
elif command == "while" and len(arguments) == 2:
p = self._get_block_parser()
p.feed(arguments[0])
translated_cmd = "while %s:\n" % p.output.strip()
p = self._get_block_parser()
p.indent = self.indent + " "
p.feed(arguments[1])
translated_cmd += p.output
translated_cmd += "\n" + self.indent + " pass"
elif command in ["if", "elseif"] and len(arguments) >= 2:
p = self._get_block_parser()
p.indent = self.indent |
slivingston/flymovieformat | motmot/FlyMovieFormat/ffmpeg2fmf.py | Python | bsd-3-clause | 1,625 | 0.008615 | import pyglet
import pyglet.media as media
import numpy as np
import sys, os
import motmot.FlyMovieFormat.FlyMovieFormat as fmf_mod
from optparse import OptionParser
def get_frame(source):
ts = source.get_next_video_timestamp()
im = source.get_next_video_frame()
if im is None:
return None, None
n_8bit_chans = im.pitch / im.width
imn = np.fromstring(im.data,np.uint8)
if n_8bit_chans==1:
imn.resize((im.height,im.width))
else:
im | n.resize((im.height,im.width,n_8bit_chans))
imn = np.mean(imn,axis=2) # collapse last dim, convert to | float64
imn = imn.astype(np.uint8) # convert back to uint8
imn2 = np.array(imn,copy=True)
return imn2, ts
def main():
usage = '%prog [options] FILE'
parser = OptionParser(usage)
parser.add_option("--format", type='string', default=None)
(options, args) = parser.parse_args()
in_filename = args[0]
if options.format is None or options.format.lower() != 'mono8':
raise NotImplementedError('Only mono8 format is supported with no '
'autodetection. Use "--format=mono8".')
source = media.load(in_filename)
imn, ts = get_frame(source)
file_base = os.path.splitext(in_filename)[0]
out_filename = file_base+'.fmf'
fmf = fmf_mod.FlyMovieSaver(out_filename,
format='MONO8',
bits_per_pixel=8,
version=3,
)
while imn is not None:
fmf.add_frame( imn, ts )
imn, ts = get_frame(source)
|
yunlongliukm/chm1_scripts | ClassifyRepeats.py | Python | mit | 1,195 | 0.005021 | #!/usr/bin/env python |
import argparse
ap = argparse.ArgumentParser(description="count masked repeats.")
ap.add_argument("table", help="Repeat table.")
ap.add_argument("--fraction", help="Calls | must be at least this fraction to be made.", type=float, default=0.51)
args = ap.parse_args()
table = open(args.table, 'r')
table.readline()
table.readline()
table.readline()
indels = dict([("insertion", {}), ("deletion", {})])
for line in table:
vals = line.split()
title = vals[4]
titleVals = title.split('/')
region = titleVals[0]
inordel = titleVals[1]
if (region not in indels[inordel]):
indels[inordel][region] = {}
if (vals[9] not in indels[inordel][region]):
indels[inordel][region][vals[9]] = 0
indels[inordel][region][vals[9]] += 1
for k in ("insertion", "deletion"):
for region in indels[k].keys():
total = 0.0;
for op in indels[k][region].keys():
total += indels[k][region][op]
for op in indels[k][region].keys():
frac = indels[k][region][op] / total
if ( frac >= args.fraction):
print k + "\t" + region + "\t" + op + "\t" + str(frac)
|
coUrbanize/django-rest-framework-jwt | tests/test_authentication.py | Python | mit | 9,810 | 0.000102 | from django.http import HttpResponse
from django.test import TestCase
from django.utils import unittest
from django.conf.urls import patterns
from rest_framework import permissions, status
try:
from rest_framework_oauth.authentication import OAuth2Authentication
except ImportError:
try:
from rest_framework.authentication import OAuth2Authentication
except ImportError:
OAuth2Authentication = None
try:
try:
from rest_framework_oauth.compat import oauth2_provider
from rest_framework_oauth.compat.oauth2_provider import oauth2
except ImportError:
# if oauth2 module can not be imported, skip the tests,
# because models have not been initialized.
oauth2_provider = None
except ImportError:
try:
from rest_framework.compat import oauth2_provider
from rest_framework.compat.oauth2_provider import oauth2 # NOQA
except ImportError:
# if oauth2 module can not be imported, skip the tests,
# because models have not been initialized.
oauth2_provider = None
from rest_framework.test import APIRequestFactory, APIClient
from rest_framework.views import APIView
from rest_framework_jwt import utils
from rest_framework_jwt_courb.compat import get_user_model
from rest_framework_jwt_courb.settings import api_settings, DEFAULTS
from rest_framework_jwt_courb.authentication import JSONWebTokenAuthentication
User = get_user_model()
DJANGO_OAUTH2_PROVIDER_NOT_INSTALLED = 'django-oauth2-provider not installed'
factory = APIRequestFactory()
class MockView(APIView):
permission_classes = (permissions.IsAuthenticated,)
def get(self, request):
return HttpResponse({'a': 1, 'b': 2, 'c': 3})
def post(self, request):
return HttpResponse({'a': 1, 'b': 2, 'c': 3})
urlpatterns = patterns(
'',
(r'^jwt/$', MockView.as_view(
authentication_classes=[JSONWebTokenAuthentication])),
(r'^jwt-oauth2/$', MockView.as_view(
authentication_classes=[
JSONWebTokenAuthentication, OAuth2Authentication])),
(r'^oauth2-jwt/$', MockView.as_view(
authentication_classes=[
OAuth2Authentication, JSONWebTokenAuthentication])),
)
class JSONWebTokenAuthenticationTests(TestCase):
"""JSON Web Token Authentication"""
urls = 'tests.test_authentication'
def setUp(self):
self.csrf_client = APIClient(enforce_csrf_checks=True)
self.username = 'jpueblo'
self.email = 'jpueblo@example.com'
self.user = User.objects.create_user(self.username, self.email)
def test_post_form_passing_jwt_auth(self):
"""
Ensure POSTing form over JWT auth with correct credentials
passes and does not require CSRF
"""
payload = utils.jwt_payload_handler(self.user)
token = utils.jwt_encode_handler(payload)
auth = 'JWT {0}'.format(token)
response = self.csrf_client.post(
'/jwt/', {'example': 'example'}, HTTP_AUTHORIZATION=auth)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_post_json_passing_jwt_auth(self):
"""
Ensure POSTing JSON over JWT auth with correct credentials
passes and does not require CSRF
"""
payload = utils.jwt_payload_handler(self.user)
token = utils.jwt_encode_handler(payload)
auth = 'JWT {0}'.format(token)
response = self.csrf_client.post(
'/jwt/', {'example': 'example'},
HTTP_AUTHORIZATION=auth, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_post_form_failing_jwt_auth(self):
"""
Ensure POSTing form over JWT auth without correct credentials fails
"""
response = self.csrf_client.post('/jwt/', {'example': 'example'})
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_post_json_failing_jwt_auth(self):
"""
Ensure POSTing json over JWT auth without correct credentials fails
" | ""
response = self.csrf_client.post('/jwt/', {'example': 'example'},
format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response['WWW-Authenticate' | ], 'JWT realm="api"')
def test_post_no_jwt_header_failing_jwt_auth(self):
"""
Ensure POSTing over JWT auth without credentials fails
"""
auth = 'JWT'
response = self.csrf_client.post(
'/jwt/', {'example': 'example'},
HTTP_AUTHORIZATION=auth, format='json')
msg = 'Invalid Authorization header. No credentials provided.'
self.assertEqual(response.data['detail'], msg)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response['WWW-Authenticate'], 'JWT realm="api"')
def test_post_invalid_jwt_header_failing_jwt_auth(self):
"""
Ensure POSTing over JWT auth without correct credentials fails
"""
auth = 'JWT abc abc'
response = self.csrf_client.post(
'/jwt/', {'example': 'example'},
HTTP_AUTHORIZATION=auth, format='json')
msg = ('Invalid Authorization header. Credentials string '
'should not contain spaces.')
self.assertEqual(response.data['detail'], msg)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response['WWW-Authenticate'], 'JWT realm="api"')
def test_post_expired_token_failing_jwt_auth(self):
"""
Ensure POSTing over JWT auth with expired token fails
"""
payload = utils.jwt_payload_handler(self.user)
payload['exp'] = 1
token = utils.jwt_encode_handler(payload)
auth = 'JWT {0}'.format(token)
response = self.csrf_client.post(
'/jwt/', {'example': 'example'},
HTTP_AUTHORIZATION=auth, format='json')
msg = 'Signature has expired.'
self.assertEqual(response.data['detail'], msg)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response['WWW-Authenticate'], 'JWT realm="api"')
def test_post_invalid_token_failing_jwt_auth(self):
"""
Ensure POSTing over JWT auth with invalid token fails
"""
auth = 'JWT abc123'
response = self.csrf_client.post(
'/jwt/', {'example': 'example'},
HTTP_AUTHORIZATION=auth, format='json')
msg = 'Error decoding signature.'
self.assertEqual(response.data['detail'], msg)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response['WWW-Authenticate'], 'JWT realm="api"')
@unittest.skipUnless(oauth2_provider, DJANGO_OAUTH2_PROVIDER_NOT_INSTALLED)
def test_post_passing_jwt_auth_with_oauth2_priority(self):
"""
Ensure POSTing over JWT auth with correct credentials
passes and does not require CSRF when OAuth2Authentication
has priority on authentication_classes
"""
payload = utils.jwt_payload_handler(self.user)
token = utils.jwt_encode_handler(payload)
auth = 'JWT {0}'.format(token)
response = self.csrf_client.post(
'/oauth2-jwt/', {'example': 'example'},
HTTP_AUTHORIZATION=auth, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK, response)
@unittest.skipUnless(oauth2_provider, DJANGO_OAUTH2_PROVIDER_NOT_INSTALLED)
def test_post_passing_oauth2_with_jwt_auth_priority(self):
"""
Ensure POSTing over OAuth2 with correct credentials
passes and does not require CSRF when JSONWebTokenAuthentication
has priority on authentication_classes
"""
Client = oauth2_provider.oauth2.models.Client
AccessToken = oauth2_provider.oauth2.models.AccessToken
oauth2_client = Client.objects.create(
user=self.user,
client_type=0,
)
access_token = AccessToken.objects.crea |
opalmer/aws | awsutil/__init__.py | Python | mit | 75 | 0.013333 | """ |
The modules and packages responsible for wrapping boto |
functions.
"""
|
bdestombe/flopy-1 | flopy/pakbase.py | Python | bsd-3-clause | 35,922 | 0.001392 | """
pakbase module
This module contains the base package class from which
all of the other packages inherit from.
"""
from __future__ import print_function
import os
import webbrowser as wb
import numpy as np
from numpy.lib.recfunctions import stack_arrays
from .modflow.mfparbc import ModflowParBc as mfparbc
from .utils import Util2d, Util3d, Transient2d, MfList, check
class Package(object):
"""
Base package class from which most other packages are derived.
"""
def __init__(self, parent, extension='glo', name='GLOBAL', unit_number=1,
extra='', filenames=None, allowDuplicates=False):
"""
Package init
"""
self.parent = parent # To be able to access the parent modflow object's attributes
if (not isinstance(extension, list)):
extension = [extension]
self.extension = []
self.file_name = []
for idx, e in enumerate(extension):
self.extension.append(e)
file_name = self.parent.name + '.' + e
if filenames is not None:
try:
if filenames[idx] is not None:
file_name = filenames[idx]
except:
pass
self.file_name.append(file_name)
self.fn_path = os.path.join(self.parent.model_ws, self.file_name[0])
if (not isinstance(name, list)):
name = [name]
self.name = name
if (not isinstance(unit_number, list)):
unit_number = [unit_number]
self.unit_number = unit_number
if (not isinstance(extra, list)):
self.extra = len(self.unit_number) * [extra]
else:
self.extra = extra
self.url = 'index.html'
self.allowDuplicates = allowDuplicates
self.acceptable_dtypes = [int, np.float32, str]
return
def __repr__(self):
s = self.__doc__
exclude_attributes = ['extension', 'heading', 'name', 'parent', 'url']
for attr, value in sorted(self.__dict__.items()):
if not (attr in exclude_attributes):
if (isinstance(value, list)):
if (len(value) == 1):
s = s + ' {0:s} = {1:s}\n'.format(attr, str(value[0]))
else:
s = s + ' {0:s} (list, items = {1:d}\n'.format(attr,
len(
value))
elif (isinstance(value, np.ndarray)):
s = s + ' {0:s} (array, shape = {1:s})\n'.format(attr,
value.shape.__str__()[
1:-1])
else:
s = s + ' {0:s} = {1:s} ({2:s})\n'.format(attr, str(value),
str(type(value))[
7:-2])
return s
def __getitem__(self, item):
if hasattr(self, 'stress_period_data'):
# added this check because stress_period_data also used in Oc and Oc88 but is not a MfList
if isinstance(item, MfList):
if not isinstance(item, list) and not isinstance(item, tuple):
assert item in list(
self.stress_period_data.data.keys()), "package.__getitem__() kper " + str(
item) + " not in data.keys()"
return self.stress_period_data[item]
else:
if item[1] not in self.dtype.names:
raise Exception(
"package.__getitem(): item \'" + item + "\' not in dtype names " + str(
self.dtype.names))
assert item[0] in list(
self.stress_period_data.data.keys()), "package.__getitem__() kper " + str(
item[0]) + " not in data.keys()"
if self.stress_period_data.vtype[item[0]] == np.recarray:
return self.stress_period_data[item[0]][item[1]]
def __setitem__(self, key, value):
raise NotImplementedError("package.__setitem__() not implemented")
def __setattr__(self, key, value):
var_dict = vars(self)
if key in list(var_dict.keys()):
old_value = var_dict[key]
if isinstance(old_value, Util2d):
value = Util2d(self.parent, old_value.shape,
old_value.dtype, value,
name=old_value.name,
| fmtin=old_value.format.fortran,
locat=old_value.locat,
array_free_format=old_value.format.array_free_format)
elif isinstance(old_value, Util3d):
value = Util3d(self.parent, old_value.shape,
old_value.dtype, value,
name=old_value.name_base,
fmtin=old_value.fmtin,
locat=old_value.locat,
array_free_format=old_value.array_free_format)
elif isinstance(old_value, Transient2d):
value = Transient2d(self.parent, old_value.shape,
old_value.dtype, value,
name=old_value.name_base,
fmtin=old_value.fmtin,
locat=old_value.locat)
elif isinstance(old_value, MfList):
value = MfList(self, dtype=old_value.dtype,
data=value)
elif isinstance(old_value, list):
if len(old_value) > 0:
if isinstance(old_value[0], Util3d):
new_list = []
for vo, v in zip(old_value, value):
new_list.append(Util3d(self.parent, vo.shape,
vo.dtype, v,
name=vo.name_base,
fmtin=vo.fmtin,
locat=vo.locat))
value = new_list
elif isinstance(old_value[0], Util2d):
new_list = []
for vo, v in zip(old_value, value):
new_list.append(Util2d(self.parent, vo.shape,
vo.dtype, v,
name=vo.name,
fmtin=vo.fmtin,
locat=vo.locat))
value = new_list
super(Package, self).__setattr__(key, value)
def export(self, f, **kwargs):
from flopy import export
return export.utils.package_helper(f, self, **kwargs)
@staticmethod
def add_to_dtype(dtype, field_names, field_types):
if not isinstance(field_names, list):
field_names = [field_names]
if not isinstance(field_types, list):
field_types = [field_types] * len(field_names)
newdtypes = [dtype]
for field_name, field_type in zip(field_names, field_types):
tempdtype = np.dtype([(field_name, field_type)])
newdtypes.append(tempdtype)
newdtype = sum((dtype.descr for dtype in newdtypes), [])
newdtype = np.dtype(newdtype)
return newdtype
def check(self, f=None, verbose=True, level=1):
"""
Check package data for common errors.
Parameters
----------
f : str or file handle
String defining file name or file handle for summary file
of check method output. If a sting is passed a file handle
is created. If f | |
aronsky/home-assistant | homeassistant/components/counter/__init__.py | Python | apache-2.0 | 9,477 | 0.000633 | """Component to count within automations."""
from __future__ import annotations
import logging
import voluptuous as vol
from homeassistant.const import (
ATTR_EDITABLE,
CONF_ICON,
CONF_ID,
CONF_MAXIMUM,
CONF_MINIMUM,
CONF_NAME,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import collection
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.storage import Store
from homeassistant.helpers.typing import ConfigType
_LOGGER = logging.getLogger(__name__)
ATTR_INITIAL = "initial"
ATTR_STEP = "step"
ATTR_MINIMUM = "minimum"
ATTR_MAXIMUM = "maximum"
VALUE = "value"
CONF_INITIAL = "initial | "
CONF_RESTORE = "restore"
CONF_STEP = "step"
DEFAULT_INITIAL = 0
DEFAULT_STEP = 1
DOMAIN = "counter"
ENTITY_ID_FORMAT = DOMAIN + ".{}"
SERVICE_DECREMENT = "decrement"
SERVICE_INCREMENT = "increment"
SERVICE_RESET = "reset"
SERVICE_CONFIGURE = "configure"
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
CREATE_FIELDS = {
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(CONF_INITIAL, default=DEFAULT_INITIAL): cv.positive_int,
vol.Required(CONF_NAME | ): vol.All(cv.string, vol.Length(min=1)),
vol.Optional(CONF_MAXIMUM, default=None): vol.Any(None, vol.Coerce(int)),
vol.Optional(CONF_MINIMUM, default=None): vol.Any(None, vol.Coerce(int)),
vol.Optional(CONF_RESTORE, default=True): cv.boolean,
vol.Optional(CONF_STEP, default=DEFAULT_STEP): cv.positive_int,
}
UPDATE_FIELDS = {
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(CONF_INITIAL): cv.positive_int,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_MAXIMUM): vol.Any(None, vol.Coerce(int)),
vol.Optional(CONF_MINIMUM): vol.Any(None, vol.Coerce(int)),
vol.Optional(CONF_RESTORE): cv.boolean,
vol.Optional(CONF_STEP): cv.positive_int,
}
def _none_to_empty_dict(value):
if value is None:
return {}
return value
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: cv.schema_with_slug_keys(
vol.All(
_none_to_empty_dict,
{
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(
CONF_INITIAL, default=DEFAULT_INITIAL
): cv.positive_int,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_MAXIMUM, default=None): vol.Any(
None, vol.Coerce(int)
),
vol.Optional(CONF_MINIMUM, default=None): vol.Any(
None, vol.Coerce(int)
),
vol.Optional(CONF_RESTORE, default=True): cv.boolean,
vol.Optional(CONF_STEP, default=DEFAULT_STEP): cv.positive_int,
},
)
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the counters."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
id_manager = collection.IDManager()
yaml_collection = collection.YamlCollection(
logging.getLogger(f"{__name__}.yaml_collection"), id_manager
)
collection.sync_entity_lifecycle(
hass, DOMAIN, DOMAIN, component, yaml_collection, Counter.from_yaml
)
storage_collection = CounterStorageCollection(
Store(hass, STORAGE_VERSION, STORAGE_KEY),
logging.getLogger(f"{__name__}.storage_collection"),
id_manager,
)
collection.sync_entity_lifecycle(
hass, DOMAIN, DOMAIN, component, storage_collection, Counter
)
await yaml_collection.async_load(
[{CONF_ID: id_, **(conf or {})} for id_, conf in config.get(DOMAIN, {}).items()]
)
await storage_collection.async_load()
collection.StorageCollectionWebsocket(
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
).async_setup(hass)
component.async_register_entity_service(SERVICE_INCREMENT, {}, "async_increment")
component.async_register_entity_service(SERVICE_DECREMENT, {}, "async_decrement")
component.async_register_entity_service(SERVICE_RESET, {}, "async_reset")
component.async_register_entity_service(
SERVICE_CONFIGURE,
{
vol.Optional(ATTR_MINIMUM): vol.Any(None, vol.Coerce(int)),
vol.Optional(ATTR_MAXIMUM): vol.Any(None, vol.Coerce(int)),
vol.Optional(ATTR_STEP): cv.positive_int,
vol.Optional(ATTR_INITIAL): cv.positive_int,
vol.Optional(VALUE): cv.positive_int,
},
"async_configure",
)
return True
class CounterStorageCollection(collection.StorageCollection):
"""Input storage based collection."""
CREATE_SCHEMA = vol.Schema(CREATE_FIELDS)
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
async def _process_create_data(self, data: dict) -> dict:
"""Validate the config is valid."""
return self.CREATE_SCHEMA(data)
@callback
def _get_suggested_id(self, info: dict) -> str:
"""Suggest an ID based on the config."""
return info[CONF_NAME]
async def _update_data(self, data: dict, update_data: dict) -> dict:
"""Return a new updated data object."""
update_data = self.UPDATE_SCHEMA(update_data)
return {**data, **update_data}
class Counter(RestoreEntity):
"""Representation of a counter."""
def __init__(self, config: dict) -> None:
"""Initialize a counter."""
self._config: dict = config
self._state: int | None = config[CONF_INITIAL]
self.editable: bool = True
@classmethod
def from_yaml(cls, config: dict) -> Counter:
"""Create counter instance from yaml config."""
counter = cls(config)
counter.editable = False
counter.entity_id = ENTITY_ID_FORMAT.format(config[CONF_ID])
return counter
@property
def should_poll(self) -> bool:
"""If entity should be polled."""
return False
@property
def name(self) -> str | None:
"""Return name of the counter."""
return self._config.get(CONF_NAME)
@property
def icon(self) -> str | None:
"""Return the icon to be used for this entity."""
return self._config.get(CONF_ICON)
@property
def state(self) -> int | None:
"""Return the current value of the counter."""
return self._state
@property
def extra_state_attributes(self) -> dict:
"""Return the state attributes."""
ret = {
ATTR_EDITABLE: self.editable,
ATTR_INITIAL: self._config[CONF_INITIAL],
ATTR_STEP: self._config[CONF_STEP],
}
if self._config[CONF_MINIMUM] is not None:
ret[CONF_MINIMUM] = self._config[CONF_MINIMUM]
if self._config[CONF_MAXIMUM] is not None:
ret[CONF_MAXIMUM] = self._config[CONF_MAXIMUM]
return ret
@property
def unique_id(self) -> str | None:
"""Return unique id of the entity."""
return self._config[CONF_ID]
def compute_next_state(self, state) -> int:
"""Keep the state within the range of min/max values."""
if self._config[CONF_MINIMUM] is not None:
state = max(self._config[CONF_MINIMUM], state)
if self._config[CONF_MAXIMUM] is not None:
state = min(self._config[CONF_MAXIMUM], state)
return state
async def async_added_to_hass(self) -> None:
"""Call when entity about to be added to Home Assistant."""
await super().async_added_to_hass()
# __init__ will set self._state to self._initial, only override
# if needed.
if self._config[CONF_RESTORE]:
state = await self.async_get_last_state()
if state is not None:
self._state = self.compute_next_state(int(state.state))
self._config[CONF_INITIAL] = state.attributes.get(ATTR_INITIAL)
self._config[CONF_MAXIMUM] = state.attributes.get(ATTR_MAXIMUM)
self._config[C |
Aluriak/24hducode2016 | pocs/test_doc_turinsoft.py | Python | unlicense | 529 | 0.005671 | """
Test de la doc turinsoft:
http://api-doc.tourinsoft.com/#/questionnaire-web
prospose quelques fonctions de wrapping
""" |
import requests
URL = 'http://wcf.tourinsoft.com/QuestionnaireWeb/QuestionnaireWebService.svc/{}/{}/{}'
AUTH = ('cdt72', '969e24f9-75a2-4cc6-a46c-db1f6ebbfe97')
def access(service, client, questionnaire, auth=AUTH):
"""Return json object on given data"""
return requests.get(
URL.format(service, client, questionnaire),
auth=au | th
).json()
print(access()) # TODO
|
plotly/plotly.py | packages/python/plotly/plotly/validators/funnelarea/hoverlabel/font/_familysrc.py | Python | mit | 456 | 0 | import _plotly_utils.basevalidators
class FamilysrcValidator( | _plotly_utils.basevalidators.SrcValidator):
def __init__(
self,
plotly_name="familysrc",
parent_name="funnela | rea.hoverlabel.font",
**kwargs
):
super(FamilysrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs
)
|
rememberlenny/google-course-builder | tools/etl/etl.py | Python | apache-2.0 | 30,584 | 0.000458 | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Extract-transform-load utility.
There are three features:
1. Download and upload of Course Builder 1.3 data:
$ python etl.py download course /cs101 myapp server.appspot.com archive.zip
This will result in a file called archive.zip that contains the files that make
up the Course Builder 1.3 course found at the URL /cs101 on the application with
id myapp running on the server named server.appspot.com. archive.zip will
contain assets and data files from the course along with a manifest.json
enumerating them. The format of archive.zip will change and should not be relied
upon.
For upload,
$ python etl.py upload course /cs101 myapp server.appspot.com \
--archive_path archive.zip
2. Download of datastore entities. This feature is experimental and upload is
not supported:
$ python etl.py download datastore /cs101 myapp server.appspot.com \
--archive_path archive.zip --datastore_types model1,model2
This will result in a file called archive.zip that contains a dump of all model1
and model2 instances found in the specified course, identified as above. The
archive will contain serialized data along with a manifest. The format of
archive.zip will change and should not be relied upon.
3. Execution of custom jobs.
$ python etl.py run path.to.my.Job /cs101 myapp server.appspot.com \
--job_args='more_args --delegated_to my.Job'
This requires that you have written a custom class named Job found in the
directory path/to/my, relative to the Course Builder root. Job's main method
will be executed against the specified course, identified as above. See
etl_lib.Job for more information.
In order to run this script, you must add the following to the head of sys.path:
1. The absolute path of your Course Builder installation.
2. The absolute path of your App Engine SDK.
3. The absolute paths of third party libraries from the SDK used by Course
Builder:
fancy_urllib
jinja2
webapp2
webob
Their locations in the supported 1.7.7 App Engine SDK are
<sdk_path>/lib/fancy_urllib
<sdk_path>/lib/jinja2-2.6
<sdk_path>/lib/webapp2-2.5.2
<sdk_path>/lib/webob-1.2.3
where <sdk_path> is the absolute path of the 1.7.7 App Engine SDK.
4. If you are running a custom job, the absolute paths of all code required by
your custom job, unless covered above.
When running etl.py against a remote endpoint you will be prompted for a
username and password. If the remote endpoint is a development server, you may
enter any username and password. If the remote endpoint is in production, enter
your username and an application-specific password. See
http://support.google.com/accounts/bin/answer.py?hl=en&answer=185833 for help on
application-specific passwords.
Pass --help for additional usage information.
"""
__author__ = [
'johncox@google.com (John Cox)',
]
import argparse
import logging
import os
import re
import sys
import traceback
import zipfile
import yaml
# Placeholders for modules we'll import after setting up sys.path. This allows
# us to avoid lint suppressions at every callsite.
announcements = None
appengine_config = None
courses = None
db = None
etl_lib = None
jobs = None
metadata = None
namespace_manager = None
remote = None
transforms = None
vfs = None
# String. Prefix for files stored in an archive.
_ARCHIVE_PATH_PREFIX = 'files'
# String. End of the path to course.json in an archive.
_COURSE_JSON_PATH_SUFFIX = 'data/course.json'
# String. End of the path to course.yaml in an archive.
_COURSE_YAML_PATH_SUFFIX = 'course.yaml'
# Regex. Format of __internal_names__ used by datastore kinds.
_INTERNAL_DATASTORE_KIND_REGEX = re.compile(r'^__.*__$')
# Path prefix strings from local disk that will be included in the archive.
_LOCAL_WHITELIST = frozenset([_COURSE_YAML_PATH_SUFFIX, 'assets', 'data'])
# Path prefix strings that are subdirectories of the whitelist that we actually
# want to exclude because they aren't user | land code and will cause conflicts.
_LOCAL_WHITELIST_EXCLUDES = frozenset(['assets/lib'])
# logging.Logger. Module logger.
_LOG = logging.getLogger('coursebuilder.tools.etl')
logging.basicConfig()
# List of string. Valid values for --log_level.
_LOG_LEVEL_CHOICES = ['DEBUG', 'ERROR', 'INFO', 'WARNING']
# String. Name of the manifest file.
_MANIFEST_FILENAME = 'manifest.json'
# String. Identifier for download mode.
_MODE_DOWNLOAD = 'download'
# String. Identifier for custom run mode.
_MODE_RUN = 'run'
# S | tring. Identifier for upload mode.
_MODE_UPLOAD = 'upload'
# List of all modes.
_MODES = [_MODE_DOWNLOAD, _MODE_RUN, _MODE_UPLOAD]
# Int. The number of times to retry remote_api calls.
_RETRIES = 3
# String. Identifier for type corresponding to course definition data.
_TYPE_COURSE = 'course'
# String. Identifier for type corresponding to datastore entities.
_TYPE_DATASTORE = 'datastore'
# Command-line argument configuration.
PARSER = argparse.ArgumentParser()
PARSER.add_argument(
'mode', choices=_MODES,
help='Indicates whether we are downloading or uploading data', type=str)
PARSER.add_argument(
'type',
help=(
'Type of entity to process. If mode is %s or %s, should be one of '
'%s or %s. If mode is %s, should be an importable dotted path to your '
'etl_lib.Job subclass') % (
_MODE_DOWNLOAD, _MODE_UPLOAD, _TYPE_COURSE, _TYPE_DATASTORE,
_MODE_RUN),
type=str)
PARSER.add_argument(
'course_url_prefix',
help=(
"URL prefix of the course you want to download (e.g. '/foo' in "
"'course:/foo:/directory:namespace'"), type=str)
PARSER.add_argument(
'application_id',
help="The id of the application to read from (e.g. 'myapp')", type=str)
PARSER.add_argument(
'server',
help=(
'The full name of the source application to read from (e.g. '
'myapp.appspot.com)'), type=str)
PARSER.add_argument(
'--archive_path',
help=(
'Absolute path of the archive file to read or write; required if mode '
'is %s or %s' % (_MODE_DOWNLOAD, _MODE_UPLOAD)), type=str)
PARSER.add_argument(
'--batch_size',
help='Number of results to attempt to retrieve per batch',
default=20, type=int)
PARSER.add_argument(
'--datastore_types',
help=(
"When type is '%s', comma-separated list of datastore model types to "
'process; all models are processed by default' % _TYPE_DATASTORE),
type=lambda s: s.split(','))
PARSER.add_argument(
'--disable_remote', action='store_true',
help=(
'If mode is %s, pass this flag to skip authentication and remote '
'environment setup. Should only pass for jobs that run entirely '
'locally and do not require RPCs') % _MODE_RUN)
PARSER.add_argument(
'--force_overwrite', action='store_true',
help=(
'If mode is download and type is course, forces overwrite of entities '
'on the target system that are also present in the archive. Note that '
'this operation is dangerous and may result in data loss'))
PARSER.add_argument(
'--job_args', default=[],
help=(
'If mode is %s, string containing args delegated to etl_lib.Job '
'subclass') % _MODE_RUN, type=lambda s: s.split())
PARSER.add_argument(
'--log_level', choices=_LOG_LEVEL_CHOICES,
help='Level of logging messages to emit', default='INFO',
type=lambda s: s.upper())
class _Archive(object):
"""Manager for local archives of Course Builder data.
The internal format of the archive may change from version to version; users
must not depend on it.
Archives contain assets and data fro |
inspirehep/inspire-next | inspirehep/modules/authors/forms.py | Python | gpl-3.0 | 17,833 | 0.001123 | # -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2014-2018 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
from __future__ import absolute_import, division, print_function
from wtforms import validators
from wtforms.fields import Flags
from wtforms.widgets import (
HTMLString,
HiddenInput,
Select,
TextInput,
html_params,
)
from inspire_schemas.api import load_schema
from inspirehep.modules.forms.field_widgets import (
ColumnInput,
DynamicItemWidget,
DynamicListWidget,
ExtendedListWidget,
ItemWidget,
WrappedInput,
)
from inspirehep.modules.forms.form import INSPIREForm
from inspirehep.modules.forms import fields
from inspirehep.modules.forms.filter_utils import clean_empty_list
from inspirehep.modules.forms.validators.simple_fields import (
duplicated_orcid_validator,
)
from inspirehep.modules.forms.validators.dynamic_fields import LessThan
from inspirehep.modules.forms.validation_utils import (
ORCIDValidator,
RegexpStopValidator,
)
def currentCheckboxWidget(field, **kwargs):
"""Current institution checkbox widget."""
field_id = kwargs.pop('id', field.id)
html = [u'<div class="col-md-10 col-margin-top pull-left">\
<input %s %s type="checkbox">\
<label for=%s>Current</label></div>'
% (html_params(id=field_id,
name=field_id),
| field.data and "checked" or "",
field_id)]
return HTMLString(u''.join(html))
class WrappedSelect(Select):
"""Widget to wrap select input in further markup."""
wrapper = '<div>%(field)s</div>'
wrapped_widget = Select()
def __init__(self, widget=None, wrapper=None, **kwargs):
"""Initialize wrapped input with widget and wrapper."""
self.wrapped_wid | get = widget or self.wrapped_widget
self.wrapper_args = kwargs
if wrapper is not None:
self.wrapper = wrapper
def __call__(self, field, **kwargs):
"""Render wrapped input."""
return HTMLString(self.wrapper % dict(
field=self.wrapped_widget(field, **kwargs),
**self.wrapper_args
))
class ColumnSelect(WrappedSelect):
"""Specialized column wrapped input."""
@property
def wrapper(self):
"""Wrapper template with description support."""
if 'description' in self.wrapper_args:
return ('<div class="%(class_)s">%(field)s'
'<p class="text-muted field-desc">'
'<small>%(description)s</small></p></div>')
return '<div class="%(class_)s">%(field)s</div>'
class InstitutionInlineForm(INSPIREForm):
"""Institution inline form."""
rank_options = [
("rank", "Rank"),
("SENIOR", "Senior (permanent)"),
("JUNIOR", "Junior (leads to Senior)"),
("STAFF", "Staff (non-research)"),
("VISITOR", "Visitor"),
("PD", "PostDoc"),
("PHD", "PhD"),
("MASTER", "Master"),
("UNDERGRADUATE", "Undergrad"),
("OTHER", "Other"),
]
name = fields.StringField(
widget_classes='form-control',
widget=ColumnInput(class_="col-md-6"),
autocomplete='affiliation',
placeholder="Institution. Type for suggestions",
)
rank = fields.SelectField(
choices=rank_options,
default="rank",
widget=ColumnSelect(class_="col-md-6"),
widget_classes='form-control',
validators=[validators.DataRequired()],
)
start_year = fields.StringField(
placeholder='Start Year',
description=u'Format: YYYY.',
widget=WrappedInput(
wrapped_widget=TextInput(),
wrapper='<div class="col-md-6 col-margin-top">%(field)s</div>'
),
validators=[
LessThan('end_year', message='Start year should be earlier than End year'),
RegexpStopValidator(
r'^(\d{4})?$',
message='{} is not a valid year. Please use <i>yyyy</i> format.',
),
],
widget_classes="form-control"
)
end_year = fields.StringField(
placeholder='End Year',
description=u'Format: YYYY.',
widget=WrappedInput(
wrapped_widget=TextInput(),
wrapper='<div class="col-md-6 col-margin-top">%(field)s</div>'
),
validators=[RegexpStopValidator(
"^(\d{4})?$",
message="{} is not a valid year. Please use <i>yyyy</i> format."
)],
widget_classes="form-control"
)
current = fields.BooleanField(
widget=currentCheckboxWidget
)
emails = fields.FieldList(
fields.HiddenField(label=''),
widget_classes='hidden-list'
)
old_emails = fields.FieldList(
fields.HiddenField(label=''),
widget_classes='hidden-list'
)
class EmailInlineForm(INSPIREForm):
"""Public emails inline form."""
email = fields.StringField(
widget_classes="form-control",
validators=[validators.Optional(), validators.Email()],
)
original_email = fields.HiddenField()
class ExperimentsInlineForm(INSPIREForm):
"""Experiments inline form."""
name = fields.StringField(
placeholder="Experiment. Type for suggestions",
label='Experiment',
widget=ColumnInput(class_="col-md-6"),
widget_classes="form-control",
autocomplete="experiment"
)
start_year = fields.StringField(
placeholder='Start Year',
description=u'Format: YYYY.',
widget=WrappedInput(
wrapped_widget=TextInput(),
wrapper='<div class="col-md-6">%(field)s</div>',
),
validators=[
LessThan('end_year', message='Start year should be earlier than End year'),
RegexpStopValidator(
r'^(\d{4})?$',
message='{} is not a valid year. Please use <i>yyyy</i> format.',
),
],
widget_classes="form-control"
)
end_year = fields.StringField(
placeholder='End Year',
description=u'Format: YYYY.',
widget=WrappedInput(
wrapped_widget=TextInput(),
wrapper='<div class="col-md-6 col-margin-top">%(field)s</div>'
),
validators=[
RegexpStopValidator(
r'^(\d{4})?$',
message='{} is not a valid year. Please use <i>yyyy</i> format.',
),
],
widget_classes="form-control"
)
current = fields.BooleanField(
widget=currentCheckboxWidget
)
class AdvisorsInlineForm(INSPIREForm):
"""Advisors inline form."""
name = fields.TextField(
widget_classes='form-control',
placeholder="Name. Type for suggestions",
autocomplete='author',
widget=ColumnInput(
class_="col-xs-5", description=u"Family name, First name"),
export_key='full_name',
)
degree_types_schema = load_schema('elements/degree_type.json')
degree_type_options = [
(val, val.capitalize())
for val in degree_types_schema['enum']
]
degree_type_options.sort(key=lambda x: x[1])
degree_type = fields.SelectField(
choices=degree_type_options,
label='Degree Type',
widget_classes="form-control",
default="phd",
widget=Col |
ferustigris/battleship | simplelevel.py | Python | gpl-3.0 | 552 | 0.007246 | from levels import Abst | ractLevel, LevelsFactory, check
import bomblevel
def allBombed(self, player):
""" Check, is all players units are bombed """
return player.units.count('default_unit') == self.fieldSize()
class Level(AbstractLevel):
def fieldSize(self):
return 5
def units(self):
return ["default_unit" for i in range(self.fieldSize())]
@check(allBombed)
def isGameOver(self, player):
return False
def nextLevel(self):
return "bomblevel"
LevelsFactory.levels[ | "default"] = Level()
|
FedoraScientific/salome-paravis | test/VisuPrs/dump_study/A5.py | Python | lgpl-2.1 | 7,364 | 0.003802 | # Copyright (C) 2010-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA | 02111-1307 USA
#
# See http://www.salome-platform.org/ | or email : webmaster.salome@opencascade.com
#
# This case corresponds to: /visu/dump_study/A5 case
from paravistest import datadir, delete_with_inputs
from presentations import *
import pvserver as paravis
from pvsimple import *
my_paravis = paravis.myParavis
settings = {"Offset": [0.0001, 0.0002, 0], "ScalarMode": ("Component", 2), "Position": [0.1, 0.2], "Size": [0.15, 0.25], "Discretize": 1, "NbColors": 44, "NbLabels": 22, "Title": "My presentation", "UseLogScale": 1, "Orientation": 'Horizontal', "ScaleFactor": 0.25, "NbContours": 4}
# 1. TimeStamps.med import
file_path = datadir + "fra.med"
OpenDataFile(file_path)
med_reader = GetActiveSource()
if med_reader is None :
raise RuntimeError, "TimeStamps.med wasn't imported..."
# 2. Plot3D creation
med_field = "VITESSE"
plot3d = Plot3DOnField(med_reader, EntityType.NODE, med_field, 1, is_contour=True)
# apply settings
plot3d.Position = settings["Offset"]
plot3d.LookupTable.VectorMode = settings["ScalarMode"][0]
plot3d.LookupTable.VectorComponent = settings["ScalarMode"][1]
plot3d.LookupTable.Discretize = settings["Discretize"]
plot3d.LookupTable.NumberOfTableValues = settings["NbColors"]
plot3d.LookupTable.UseLogScale = settings["UseLogScale"]
slice_filter = plot3d.Input.Input.Input.Input
normal = get_normal_by_orientation(Orientation.ZX, radians(22), radians(33))
bounds = get_bounds(med_reader)
pos = get_positions(1, normal, bounds, 0.11)
slice_filter.SliceType.Normal = normal
slice_filter.SliceOffsetValues = pos
plot3d.Input.Input.ScaleFactor = settings["ScaleFactor"]
contour_filter = plot3d.Input
rgb_points = plot3d.LookupTable.RGBPoints
scalar_range = [rgb_points[0], rgb_points[4]]
surfaces = get_contours(scalar_range, settings["NbContours"])
contour_filter.Isosurfaces = surfaces
bar = get_bar()
bar.Position = settings["Position"]
bar.Position2 = settings["Size"]
bar.NumberOfLabels = settings["NbLabels"]
bar.Title = settings["Title"]
bar.Orientation = settings["Orientation"]
# 3. Dump Study
path_to_save = os.path.join(os.getenv("HOME"), "Plot3D.py")
SaveTrace( path_to_save )
# 4. Delete the created objects, recreate the view
delete_with_inputs(plot3d)
Delete(GetActiveView())
view = CreateRenderView()
# 5. Execution of the created script
execfile(path_to_save)
# 6. Checking of the settings done before dump
recreated_bar = view.Representations[0]
recreated_plot3d = view.Representations[1]
errors = 0
tolerance = 1e-05
# Offset
offset = recreated_plot3d.Position
for i in range(len(settings["Offset"])):
if abs(offset[i] - settings["Offset"][i]) > tolerance:
print "ERROR!!! Offset value with ", i, " index is incorrect: ", offset[i], " instead of ", settings["Offset"][i]
errors += 1
# Scalar mode
vector_mode = recreated_plot3d.LookupTable.VectorMode
vector_component = recreated_plot3d.LookupTable.VectorComponent
if vector_mode != settings["ScalarMode"][0]:
print "ERROR!!! Vector mode value is incorrect: ", vector_mode, " instead of ", settings["ScalarMode"][0]
errors += 1
if vector_component != settings["ScalarMode"][1]:
print "ERROR!!! Vector component value is incorrect: ", vector_component, " instead of ", settings["ScalarMode"][1]
errors += 1
# Position of scalar bar
pos_x = recreated_bar.Position[0]
pos_y = recreated_bar.Position[1]
if abs(pos_x - settings["Position"][0]) > tolerance:
print "ERROR!!! X coordinate of position of scalar bar is incorrect: ", pos_x, " instead of ", settings["Position"][0]
errors += 1
if abs(pos_y - settings["Position"][1]) > tolerance:
print "ERROR!!! Y coordinate of position of scalar bar is incorrect: ", pos_y, " instead of ", settings["Position"][1]
errors += 1
# Size of scalar bar
width = recreated_bar.Position2[0]
height = recreated_bar.Position2[1]
if abs(width - settings["Size"][0]) > tolerance:
print "ERROR!!! Width of scalar bar is incorrect: ", width, " instead of ", settings["Size"][0]
errors += 1
if abs(height - settings["Size"][1]) > tolerance:
print "ERROR!!! Height of scalar bar is incorrect: ", height, " instead of ", settings["Size"][1]
errors += 1
# Discretize
discretize = recreated_plot3d.LookupTable.Discretize
if discretize != settings["Discretize"]:
print "ERROR!!! Discretize property is incorrect: ", discretize, " instead of ", settings["Discretize"]
errors += 1
# Number of colors
nb_colors = recreated_plot3d.LookupTable.NumberOfTableValues
if nb_colors != settings["NbColors"]:
print "ERROR!!! Number of colors of scalar bar is incorrect: ", nb_colors, " instead of ", settings["NbColors"]
errors += 1
# Number of labels
nb_labels = recreated_bar.NumberOfLabels
if nb_labels != settings["NbLabels"]:
print "ERROR!!! Number of labels of scalar bar is incorrect: ", nb_labels, " instead of ", settings["NbLabels"]
errors += 1
# Title
title = recreated_bar.Title
if title != settings["Title"]:
print "ERROR!!! Title of presentation is incorrect: ", title, " instead of ", settings["Title"]
errors += 1
# Scaling
use_log_scale = recreated_plot3d.LookupTable.UseLogScale
if use_log_scale != settings["UseLogScale"]:
print "ERROR!!! Scaling of presentation is incorrect: ", use_log_scale, " instead of ", settings["UseLogScale"]
errors += 1
# Bar Orientation
orientation = recreated_bar.Orientation
if orientation != settings["Orientation"]:
print "ERROR!!! Orientation of scalar bar is incorrect: ", orientation, " instead of ", settings["Orientation"]
errors += 1
# Scale Factor
scale_factor = recreated_plot3d.Input.Input.ScaleFactor
if abs(scale_factor - settings["ScaleFactor"]) > tolerance:
print "ERROR!!! Scale factor of presentation is incorrect: ", scale_factor, " instead of ", settings["ScaleFactor"]
errors += 1
# Cutting plane
cur_slice_filter = recreated_plot3d.Input.Input.Input.Input
cur_normal = list(cur_slice_filter.SliceType.Normal)
if cur_normal != normal:
print "ERROR!!! Normal of the cutting plane is incorrect: ", cur_normal, " instead of ", normal
errors += 1
cur_pos = list(cur_slice_filter.SliceOffsetValues)
if cur_pos != pos:
print "ERROR!!! Position of the cuttting plane is incorrect: ", cur_pos, " instead of ", pos
errors += 1
# Contours
cur_surfaces = list(recreated_plot3d.Input.Isosurfaces)
if cur_surfaces != surfaces:
print "ERROR!!! Contours is incorrect: ", cur_surfaces, " instead of ", surfaces
errors += 1
if errors > 0:
raise RuntimeError, "There is(are) some error(s) was(were) found... For more info see ERRORs above..."
|
johnrocamora/ImagePy | max_tiff.py | Python | mit | 1,346 | 0.005201 | from PIL import Image
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img1 = Image.open('multipage.tif')
# The following approach seems to be having issue with the
# current TIFF format data
print('The size of each frame is:')
print(img1.size)
# Plots first frame
print('Frame 1')
fig1 = plt.figure(1)
img1.seek(0)
# for i in range(250):
# pixA11 = img1.getpixel((1,i))
# print(pixA11)
f1 = list(img1.getdata())
print(f1[1000])
plt.imshow(img1)
fig1.show()
input()
# Plots eleventh frame
# print('Frame 11')
# fig2 = plt.figure(2)
# img1.seek(10)
# # for i in range(250):
# # pixB11 = img1.getpixel((1,i))
# # print(pixB11)
# f2 = list(img1.getdata())
# print(f2[10000])
# plt.imshow(img1)
# fig2.show()
# input()
# Create a new image
fig3 = plt.figure(3)
imgAvg = I | mage.new(img1.mode, img1.size)
print(img1.mode)
print(img1.size)
fAvg = list()
pix = imgAvg.load()
for i in range(512):
for j in range(512):
pixVal = (f1[i*512+j] + f1[i*512+j]) / 2
# fAvg.append(pixVal)
fAvg.insert(i*512+j,pixVal)
imgAvg.putdata(fAvg)
imgAvg.save('avg.tiff')
plt.imshow(imgAvg)
fig3.show()
print('Average')
|
# The following is necessary to keep the above figures 'alive'
input()
# data = random.random((256, 256))
# img1 = Image.fromarray(data)
# img1.save('test.tiff')
|
AlexisTM/Indoor_Position_lasers | laserpack/bin/Scenari/scenario_circle.py | Python | mit | 1,452 | 0.013085 | #!/usr/bin/env python
from tasks import *
from math import cos, sin, pi
| import rospy
from signal import signal, SIGINT
from geometry_msgs.msg import Point
import sys
def signal_handler(signal, frame):
print('You pressed Ctrl+C')
print('Leaving the Controller & closing the UAV')
Controller.__exit__()
sys.exit(0)
def PointsInCircum(r,z= | 3,n=8):
#return (x,y,z) points of a circle
#PointsInCircum(Rayon(m), Altitude{defaut:3}(m), NombreDePoints{defaut:8})
return [(round(cos(2*pi/n*x)*r,3),round(sin(2*pi/n*x)*r,3),z) for x in range(0,n+1)]
rospy.init_node('test_tasks')
Controller = taskController(rate=3, setpoint_rate=10)
rospy.loginfo("Controller initiated")
signal(SIGINT, signal_handler)
#Initialisation
tasks = []
rospy.loginfo("Circle sequencing")
tasks.append(init_UAV("Init UAV"))
tasks.append(arm("Arming"))
tasks.append(takeoff("TAKEOFF"))
#Targetting circle points
CirclePoints = PointsInCircum(3)
for circle in CirclePoints:
tasks.append(target("target", Point(circle[0], circle[1], circle[2])))
tasks.append(land("LANDING"))
#Disarming
tasks.append(disarm("Disarming"))
#Adding tasks
Controller.addTasks(tasks)
rospy.loginfo("Tasks added")
# for i in range(100):
while True:
Controller.rate.sleep()
Controller.spinOnce()
rospy.loginfo("Task %s on %s : %s", Controller.current+1, Controller.count, Controller.getCurrentTask().__str__())
Controller.__exit__()
sys.exit(0)
|
internap/fake-switches | tests/juniper/assertion_tools.py | Python | apache-2.0 | 679 | 0 | f | rom hamcrest import assert_that, not_, is_
from hamcrest import has_length
from hamcrest.core.base_matcher import BaseMatcher
def has_xpath(xpath, matcher):
return XPathMatcher(xpath, matcher)
class XPathMatcher(BaseMatcher):
def __init__(self, xpath, matcher):
self.xpath = xpath
self.matcher = matcher
def _matches(self, other):
assert_that(other, is_(not_(None)), "Lookup node doesn't exist")
nodes = othe | r.xpath(self.xpath)
assert_that(nodes, has_length(1), "Nodes length should be 1 element")
assert_that(nodes[0].text, self.matcher)
return True
def describe_to(self, description):
pass
|
avedaee/DIRAC | RequestManagementSystem/test/RequestDBFileTests.py | Python | gpl-3.0 | 7,036 | 0.014639 | ########################################################################
# $HeadURL $
# File: RequestDBFileTests.py
# Author: Krzysztof.Ciba@NOSPAMgmail.com
# Date: 2012/06/13 07:51:20
########################################################################
""" :mod: RequestDBFileTests
=======================
.. module: RequestDBFileTests
:synopsis: Test suites for RequestDBFile module.
.. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com
Test suites for RequestDBFile module.
:deprecated:
"""
__RCSID__ = "$Id $"
##
# @file RequestDBFileTests.py
# @author Krzysztof.Ciba@NOSPAMgmail.com
# @date 2012/06/13 07:51:37
# @brief Definition of RequestDBFileTests class.
## imports
import unittest
import mock
## from DIRAC
from DIRAC import gConfig, gLogger, S_OK, S_ERROR
from DIRAC.RequestManagementSystem.DB.RequestDBFile import RequestDBFile
from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer
reqStr = """<?xml version="1.0" encoding="UTF-8" ?>
<DIRAC_REQUEST>
<Header
Status="Waiting"
LastUpdate="2012-06-01 04:57:02"
DIRACSetup="LHCb-Certification"
CreationTime="0000-00-00 00:00:00"
OwnerGroup="None"
RequestName="testRequest"
SourceComponent="None"
JobID="Unknown"
/>
<TRANSFER_SUBREQUEST element_type="dictionary">
<Files element_type="list">
<EncodedString element_type="leaf"><![CDATA[lds6:Addlers8:cf4f13f9s7:Attempti1es6:FileIDi18es4:GUIDs36:5D5F45D5-9565-7AFB-3896-DB8695F2B35As3:LFNs33:/lhcb/user/c/cibak/cert-test1k-18s3:Md5ns3:PFNs75:srm://srm-lhcb.cern.ch/castor/cern.ch/grid/lhcb/user/c/cibak/cert-test1k-18s4:Sizei1024es6:Statuss7:Waitingee]]></EncodedString>
</Files>
<Attributes element_type="dictionary">
<Status element_type="leaf"><![CDATA[Waiting]]></Status>
<LastUpdate element_type="leaf"><![CDATA[2012-06-01 04:57:02]]></LastUpdate>
<TargetSE element_type="leaf"><![CDATA[PIC-USER]]></TargetSE>
<ExecutionOrder element_type="leaf"><![CDATA[0]]></ExecutionOrder>
<SubRequestID element_type="leaf"><![CDATA[1]]></SubRequestID>
<CreationTime element_type="leaf"><![CDATA[2012-06-01 04:57:02]]></CreationTime>
<SourceSE element_type="leaf"><![CDATA[CERN-USER]]></SourceSE>
<Catalogue element_type="leaf"><![CDATA[LcgFileCatalogCombined]]></Catalogue>
<Arguments element_type="leaf"><![CDATA[None]]></Arguments>
<Error element_type="leaf"><![CDATA[]]></Error>
<SubmissionTime element_type="leaf"><![CDATA[ | 2012-06-01 04:57:02]]></SubmissionTime>
<Operation element_type="leaf"><![CDATA[replicateAndRegister]]></Operation>
</Attributes>
</TRANSFER_SUBREQUEST>
<REMOVAL_SUBREQ | UEST element_type="dictionary">
<Files element_type="list">
<EncodedString element_type="leaf"><![CDATA[lds6:Addlers8:cf4f13f9s7:Attempti1es6:FileIDi18es4:GUIDs36:5D5F45D5-9565-7AFB-3896-DB8695F2B35As3:LFNs33:/lhcb/user/c/cibak/cert-test1k-18s3:Md5ns3:PFNs75:srm://srm-lhcb.cern.ch/castor/cern.ch/grid/lhcb/user/c/cibak/cert-test1k-18s4:Sizei1024es6:Statuss7:Waitingee]]></EncodedString>
</Files>
<Attributes element_type="dictionary">
<Status element_type="leaf"><![CDATA[Waiting]]></Status>
<LastUpdate element_type="leaf"><![CDATA[2012-06-01 04:57:02]]></LastUpdate>
<TargetSE element_type="leaf"><![CDATA[PIC-USER]]></TargetSE>
<ExecutionOrder element_type="leaf"><![CDATA[0]]></ExecutionOrder>
<SubRequestID element_type="leaf"><![CDATA[1]]></SubRequestID>
<CreationTime element_type="leaf"><![CDATA[2012-06-01 04:57:02]]></CreationTime>
<SourceSE element_type="leaf"><![CDATA[CERN-USER]]></SourceSE>
<Catalogue element_type="leaf"><![CDATA[LcgFileCatalogCombined]]></Catalogue>
<Arguments element_type="leaf"><![CDATA[None]]></Arguments>
<Error element_type="leaf"><![CDATA[]]></Error>
<SubmissionTime element_type="leaf"><![CDATA[2012-06-01 04:57:02]]></SubmissionTime>
<Operation element_type="leaf"><![CDATA[replicaRemoval]]></Operation>
</Attributes>
</REMOVAL_SUBREQUEST>
</DIRAC_REQUEST>
"""
########################################################################
class RequestDBFileTests(unittest.TestCase):
"""
.. class:: RequestDBFileTests
"""
def setUp( self ):
""" set up
:param self: self reference
"""
self.log = gLogger.getSubLogger( self.__class__.__name__ )
self.requestDB = RequestDBFile()
setRequest = self.requestDB.setRequest( "testRequest", reqStr )
def tearDown( self ):
#deleteRequest = self.requestDB.deleteRequest( "testRequest" )
pass
def test_01_getRequestStatus( self ):
self.assertEqual( self.requestDB.getRequestStatus( "testRequest" ),
{'OK': True, 'Value': {'SubRequestStatus': 'Waiting', 'RequestStatus': 'Waiting'}})
## get request
getRemoval = self.requestDB.getRequest( "removal" )
oRequest = RequestContainer( getRemoval["Value"]["RequestString"] )
self.assertEqual( self.requestDB.getRequestStatus( "testRequest" ),
{'OK': True, 'Value': {'SubRequestStatus': 'Assigned', 'RequestStatus': 'Waiting'}} )
## make removal Done
oRequest.subRequests["removal"][0]["Attributes"]["Status"] = "Done"
oRequest.subRequests["removal"][0]["Files"][0]["Status"] = "Done"
update = self.requestDB.updateRequest( getRemoval["Value"]["RequestName"],
oRequest.toXML()["Value"] )
## get status
self.assertEqual( self.requestDB.getRequestStatus( "testRequest" ),
{'OK': True, 'Value': {'SubRequestStatus': 'Waiting', 'RequestStatus': u'Waiting'}})
## make transfer Done
oRequest.subRequests["transfer"][0]["Attributes"]["Status"] = "Done"
oRequest.subRequests["transfer"][0]["Files"][0]["Status"] = "Done"
update = self.requestDB.updateRequest( getRemoval["Value"]["RequestName"],
oRequest.toXML()["Value"] )
## get status
self.assertEqual( self.requestDB.getRequestStatus( "testRequest" ),
{'OK': True, 'Value': {'SubRequestStatus': 'Done', 'RequestStatus': 'Done'}} )
def test_02_getRequest( self ):
""" getRequest and JobID """
getRequest = self.requestDB.getRequest("transfer")
self.assertEqual( getRequest["OK"], True )
self.assertEqual( getRequest["Value"]["JobID"], 0 )
## test exeution
if __name__ == "__main__":
testLoader = unittest.TestLoader()
suite = testLoader.loadTestsFromTestCase( RequestDBFileTests )
suite = unittest.TestSuite( [ suite ] )
unittest.TextTestRunner(verbosity=3).run(suite)
|
FePhyFoFum/quartetsampling | pysrc/quartet_sampling.py | Python | gpl-3.0 | 18,821 | 0.000053 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# vim:fileencoding=utf-8 |
"""
quartet_samping.py: Quartet Sampling method for
phylogenetic branch support evaluation
<http://www.github.com/FePhyFoFum/quartetsampling>
"""
import argparse
import os
import sys
import time
from multiprocessing import Manager, Pool
from shutil import copyfile
from tree_data import TreeData, write_test_trees
from rep_data import DataStore
from rep_data import process_replicate_raxml, process_replicate_raxml_lrt
fr | om rep_data import process_replicate_raxmlng, process_replicate_raxmlng_lrt
from rep_data import process_replicate_iqtree, process_replicate_iqtree_lrt
from rep_data import process_replicate_paup
from rep_data import get_replicates_exhaustive, get_replicates_random
from rep_data import write_run_stats
from paramset import ParamSet, read_config
from alignment import Alignment
LICENSE = """from rep_data import
This file is part of 'quartetsampling'.
'quartetsampling' is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
'quartetsampling' is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with 'quartetsampling'. If not, see <http://www.gnu.org/licenses/>.
"""
def generate_argparser():
parser = argparse.ArgumentParser(
prog="quartet_sampling.py",
description=__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
epilog=LICENSE)
parser.add_argument("--tree", type=open, nargs=1, required=True,
# Prev -t
help=("The input tree in Newick "
"(parenthetical) format."))
parser.add_argument("--align", "--alignment", type=open, nargs=1,
# Prev -a
required=True, dest="align",
help=("Alignment file in \"relaxed phylip\" format, "
"as used by RAxML."))
parser.add_argument("--reps", "--number-of-reps", type=int, nargs=1,
# Prev -N
required=True, default=100, dest="reps",
help=("The number of replicate quartet topology "
"searches to be performed at each node."))
parser.add_argument("--threads", "--number-of-threads", type=int, nargs=1,
# Prev -T
required=True, default=1, dest="threads",
help=("The number of parallel threads to be used "
"by Python for quartet topology searches."))
parser.add_argument("--lnlike", "--lnlike-thresh", type=float, nargs=1,
# Prev -L
default=2.0, dest="lnlike",
help=("The lnlike threshhold that is the minimum "
"value by which the log-likelihood value "
"of the best-likelihood tree must be "
"higher than the second-best-likelihood tree "
"for the replicate to register as the "
"best-likelihood topology rather than "
"'uncertain'. If set to zero, this turns off "
"likelihood evaluation mode and invokes tree "
"inference mode where a tree is simply inferred "
"from the alignment without considering "
"likelihood (QI values are N/A in this case)."))
parser.add_argument("--result-prefix", type=str, nargs=1,
# Prev -r
help="A prefix to put on the result files.")
parser.add_argument("--data-type", choices=('nuc', 'amino', 'cat'),
# Prev -d
default=["nuc"], nargs=1,
help=("(nuc)leotide, (amino) acid, "
"or (cat)egorical data"))
parser.add_argument("--min-overlap", type=int,
# Prev -O
help=("The minimum sites required to be sampled for "
"all taxa in a given quartet."))
parser.add_argument("--results-dir", type=os.path.abspath, nargs=1,
# Prev -o
help=("A directory to which output files will "
"be saved. If not supplied, the current working "
"directory will be used. (default is current "
"folder)."))
parser.add_argument("--verbout", action="store_true",
# Prev -V
help=("Provide output of the frequencies of each "
"topology and QC."))
parser.add_argument("--partitions", type=os.path.abspath, nargs=1,
# Prev -q
help=("Partitions file in RAxML format. If omitted "
"then the entire alignment will be treated "
"as one partition for all quartet replicate "
"topology searches."))
parser.add_argument("--genetrees", type=os.path.abspath, nargs=1,
# Prev -g
help=("Use partitions file (RAxML format) to divide "
"the alignment into separate gene tree regions. "
"Gene alignments will be sampled random for the "
"quartet topology searches."))
parser.add_argument("--temp-dir", type=os.path.abspath, nargs=1,
# Prev -e
help=("A directory to which temporary files will be "
"saved. If not supplied, 'QuartetSampling' "
"will be created in the current "
"working directory. "
"When specifying a custom temporary output "
"the characters 'QuartetSampling' must appear "
"in the directory name to prevent accidental "
"file deletion. (default='./QuartetSampling'"))
parser.add_argument("--retain-temp", action="store_true",
help=("Do not remove temporary files"))
parser.add_argument("--clade", type=str,
# Prev: -C
help=("Conduct analysis on specific clade identified "
"by CSV taxon list"))
parser.add_argument("--start-node-number", type=int, nargs=1,
# Prev -s
help=("An integer denoting the node to which to start "
"from. Nodes will be read from topologically "
"identical (and isomorphic!) input trees in "
"deterministic order, so this argument may be "
"used to restart at an intermediate position "
"(in case the previous run was canceled before "
"completion, for example)."))
parser.add_argument("--stop-node-number", type=int, nargs=1,
# Prev -p
help=("An integer denoting the node at which to stop. "
"Will include nodes with indices <= the stop "
"node number. This argument may be used to "
"limit the length of a given run in case only "
"a certain part of the tree is of interest. "
"Nodes will be read from topologica |
tomaash/gaeo | gaeo/controller/helper.py | Python | apache-2.0 | 792 | 0.002525 | # -*- coding: utf-8 -*-
#
# Copyright 2008 Lin-Chieh Shangkuan & Liang-Heng Chen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may o | btain a copy of the License at
#
# | http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" helper module """
def clear_session(self):
if self.session:
import logging
logging.info('clear session')
self.session.invalidate() |
elbeardmorez/quodlibet | quodlibet/quodlibet/util/dbusutils.py | Python | gpl-2.0 | 11,310 | 0.000177 | # -*- coding: utf-8 -*-
# Copyright 2012, 2013 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import xml.etree.ElementTree as ET
import dbus
import dbus.service
from quodlibet.compat import unichr, text_type, PY3, iteritems
def dbus_unicode_validate(text):
"""Takes a unicode string and replaces all invalid codepoints that would
lead to errors if passed to dbus"""
if isinstance(text, bytes):
text = text.decode("utf-8")
# https://bugs.freedesktop.org/show_bug.cgi?id=40817
def valid(c):
return (c < 0x110000 and
(c & 0xFFFFF800) != 0xD800 and
(c < 0xFDD0 or c > 0xFDEF) and
(c & 0xFFFE) != 0xFFFE)
cps = []
for c in map(ord, text):
if valid(c):
cps.append(c)
else:
cps.append(0xFFFD)
return u"".join(map(unichr, cps))
def list_spec_properties(spec):
"""Parse a property spec and return a dict:
{'Metadata': {'access': 'read', 'type': 'a{sv}', 'emit': 'true'}
'access' can be: read/write/readwrite
'type' is the dbus data type (dbus.Signature instance)
'emit' can be true/false/invalidates (see dbus spec)
"""
assert isinstance(spec, str)
ANNOTATION_EMITS = "org.freedesktop.DBus.Property.EmitsChangedSignal"
def get_emit(element, fallback):
for anno in element.findall("annotation"):
if anno.attrib["name"] == ANNOTATION_EMITS:
emit = anno.attrib["value"]
break
else:
emit = fallback
return emit
root = ET.fromstring(
b'<?xml version="1.0" encoding="UTF-8"?><props>' +
spec.encode("utf-8") +
b'</props>')
props = {}
root_emit = get_emit(root, "true")
for element in root:
if element.tag != "property":
continue
attrs = element.attrib
attrs["emit"] = get_emit(element, root_emit)
attrs["type"] = dbus.Signature(attrs["type"])
props[attrs.pop("name")] = attrs
return props
def filter_property_spec(spec, wl=None, bl=None):
"""Remove properties based on a white list or black list."""
assert isinstance(spec, str)
if wl and bl:
raise ValueError
if not wl and not bl:
return spec
root = ET.fromstring(
b'<?xml version="1.0" encoding="UTF-8"?><props>' +
spec.encode("utf-8") +
b'</props>')
if wl:
to_rm = lambda e: e.attrib["name"] not in wl
elif bl:
to_rm = lambda e: e.attrib["name"] in bl
strs = []
for element in root:
if element.tag != "property" or not to_rm(element):
strs.append(ET.tostring(element, encoding="unicode").strip())
return "\n".join(strs)
TYPE_MAP = {
"b": dbus.Boolean,
"n": dbus.Int16,
"i": dbus.Int32,
"x": dbus.Int64,
"q": dbus.UInt16,
"u": dbus.UInt32,
"t": dbus.UInt64,
"d": dbus.Double,
"o": dbus.ObjectPath,
"g": dbus.Signature,
"v": lambda x: x,
}
def apply_signature(value, sig, utf8_strings=False):
"""Casts basic types to the right dbus types and packs them
into containers with the right signature, so they get validated on
sending."""
# dbus properties are variant, but have a signature defined, so
# we have to convert manually here.
if sig in TYPE_MAP:
return TYPE_MAP[sig](value)
elif sig.startswith("a{"):
return dbus.Dictionary(value, signature=sig[2:-1])
elif sig.startswith("a("):
return dbus.Struct(value, signature=sig[2:-1])
elif sig.startswith("a"):
return dbus.Array(value, signature=sig[1:])
elif sig == "s":
if utf8_strings and not PY3:
if isinstance(value, text_type):
value = value.encode("utf-8")
return dbus.UTF8String(value)
else:
if isinstance(value, bytes):
value = value.decode("utf-8")
return dbus.String(value)
else:
return TYPE_MAP[sig](value)
# Unknown type, just return as is
return value
class DBusIntrospectable(object):
"""Simply collects all introspection data from other mixins
and provides the Intros | pect DBus method returning all combined.
All classes need to call set_introspection with their interface
and provided signals, properties, methods in the introspection
xml format.
The dbus bindings already provide a Introspect method, but it doesn't
understand properties, also having them in text format in the class
is a nice documentation.
"""
IFACE | = "org.freedesktop.DBus.Introspectable"
ISPEC = """
<method name="Introspect">
<arg type="s" name="xml_data" direction="out"/>
</method>
"""
def __init__(self):
self.__ispec = {}
self.set_introspection(DBusIntrospectable.IFACE,
DBusIntrospectable.ISPEC)
def set_introspection(self, interface, introspection):
self.__ispec.setdefault(interface, []).append(introspection)
@dbus.service.method(IFACE)
def Introspect(self):
parts = []
parts.append("<node>")
for iface, intros in iteritems(self.__ispec):
parts.append("<interface name=\"%s\">" % iface)
parts.extend(intros)
parts.append("</interface>")
parts.append("</node>")
return "\n".join(parts)
class DBusProperty(object):
"""A mixin for dbus.Object classes to support dbus properties.
Register properties by passing the XML introspection to
'set_properties'.
The class needs to provide 'get/set_property'.
In case the base Object is a FallbackObject, 'get/set_property' also
need to handle an additional realtive path parameter.
Whenever a property changes, 'emit_properties_changed' needs to be
called (except if the annotations disable it). In case of
FallbackObject, with a relative path to the real object (defaults to
the main one).
"""
IFACE = "org.freedesktop.DBus.Properties"
ISPEC = """
<method name="Get">
<arg type="s" name="interface_name" direction="in"/>
<arg type="s" name="property_name" direction="in"/>
<arg type="v" name="value" direction="out"/>
</method>
<method name="GetAll">
<arg type="s" name="interface_name" direction="in"/>
<arg type="a{sv}" name="properties" direction="out"/>
</method>
<method name="Set">
<arg type="s" name="interface_name" direction="in"/>
<arg type="s" name="property_name" direction="in"/>
<arg type="v" name="value" direction="in"/>
</method>
<signal name="PropertiesChanged">
<arg type="s" name="interface_name"/>
<arg type="a{sv}" name="changed_properties"/>
<arg type="as" name="invalidated_properties"/>
</signal>"""
def __init__(self):
self.__props = {}
self.__impl = {}
self.set_introspection(DBusProperty.IFACE, DBusProperty.ISPEC)
def set_properties(self, interface, ispec, bl=None, wl=None):
"""Register properties and set instrospection for the given
property spec. Provide a black list or white list, for
optional, not implemented properties."""
ispec = filter_property_spec(ispec, wl=wl, bl=bl)
self.__props[interface] = list_spec_properties(ispec)
self.__impl.setdefault(interface, [])
self.set_introspection(interface, ispec)
def get_properties(self, interface):
"""Returns a list of (interface, property) for all properties of
the specified interface and subinterfaces"""
result = [(interface, p) for p in self.__props[interface].keys()]
for sub in self.__impl[interface]:
result.extend(self.get_properties(sub))
return result
def get_value(self, interface, prop, path="/"):
"""Returns the value of a property"""
interface = self.get_interface(interface, prop)
if self.SUPPORTS_MULTIPLE_OBJECT_PATHS:
|
RoboCupULaval/StrategyIA | ai/STA/Action/GoBehind.py | Python | mit | 3,560 | 0.005073 | # Under MIT licence, see LICENCE.txt
from typing import Optional
from Util.ai_command import MoveTo
__author__ = "Maxime Gagnon-Legault"
import math
import numpy as np
from Util import Pose
from Util.position import Position
from Util.geometry import wrap_to_pi
from ai.GameDomainObjects import Player
RAYON_AVOID = 300 # (mm)
def GoBehind(player: Player, position1: Position, position2: Optional[Position]=None,
distance_behind: float=250,
orientation: str= 'front'):
"""
Action GoBehind: Déplace le robot au point le plus proche sur la droite, d | errière un objet dont la position
est passée | en paramètre, de sorte que cet objet se retrouve entre le robot et la seconde position passée
en paramètre
Méthodes :
exec(self): Retourne la pose où se rendre
Attributs (en plus de ceux de Action):
player_id : L'identifiant du joueur
position1 : La position de l'objet derrière lequel le robot doit se placer (exemple: le ballon)
position2 : La position par rapport à laquelle le robot doit être "derrière" l'objet de la position 1
(exemple: le but)
"""
delta_x = position2.x - position1.x
delta_y = position2.y - position1.y
theta = math.atan2(delta_y, delta_x)
x = position1.x - distance_behind * math.cos(theta)
y = position1.y - distance_behind * math.sin(theta)
player_x = player.pose.position.x
player_y = player.pose.position.y
norm_player_2_position2 = math.sqrt((player_x - position2.x) ** 2+(player_y - position2.y) ** 2)
norm_position1_2_position2 = math.sqrt((position1.x - position2.x) ** 2 +
(position1.y - position2.y) ** 2)
if norm_player_2_position2 < norm_position1_2_position2:
# on doit contourner l'objectif
vecteur_position1_2_position2 = np.array([position1.x - position2.x,
position1.y - position2.y, 0])
vecteur_vertical = np.array([0, 0, 1])
vecteur_player_2_position1 = np.array([position1.x - player_x,
position1.y - player_y, 0])
vecteur_perp = np.cross(vecteur_position1_2_position2, vecteur_vertical)
vecteur_perp /= np.linalg.norm(vecteur_perp)
if np.dot(vecteur_perp, vecteur_player_2_position1) > 0:
vecteur_perp = -vecteur_perp
position_intermediaire_x = x + vecteur_perp[0] * RAYON_AVOID
position_intermediaire_y = y + vecteur_perp[1] * RAYON_AVOID
if math.sqrt((player_x-position_intermediaire_x)**2+(player_y-position_intermediaire_y)**2) < 50:
position_intermediaire_x += vecteur_perp[0] * RAYON_AVOID * 2
position_intermediaire_y += vecteur_perp[1] * RAYON_AVOID * 2
destination_position = Position(position_intermediaire_x, position_intermediaire_y)
else:
if math.sqrt((player_x-x)**2+(player_y-y)**2) < 50:
x -= math.cos(theta) * 2
y -= math.sin(theta) * 2
destination_position = Position(x, y)
# Calcul de l'orientation de la pose de destination
destination_orientation = 0
if orientation == 'front':
destination_orientation = wrap_to_pi((position1 - destination_position).angle)
elif orientation == 'back':
destination_orientation = wrap_to_pi((position1 - destination_position).angle + np.pi)
destination_pose = Pose(destination_position, destination_orientation)
return MoveTo(destination_pose)
|
artisan-roaster-scope/artisan | src/artisanlib/qrcode.py | Python | gpl-3.0 | 1,715 | 0.009913 | # -*- coding: utf-8 -*-
#
import qrcode
try:
#pylint: disable = E, W, R, C
from PyQt6.QtGui import QImage, QPixmap,QPainter # @UnusedImport @Reimport @UnresolvedImport
from PyQt6.QtCore import Qt # @UnusedImport @Reimport @UnresolvedImport
except Exception:
#pylint: disable = E, W, R, C
from PyQt5.QtGui import QImage, QPixmap,QPainter # @UnusedImpor | t @Reimport @UnresolvedImport
from PyQt5.QtCore import Qt # @UnusedImport @Rei | mport @UnresolvedImport
##########################################################################
##################### QR Image #####################################
##########################################################################
class QRImage(qrcode.image.base.BaseImage):
def new_image(self, **_kwargs):
img = QImage(self.pixel_size, self.pixel_size, QImage.Format.Format_RGB16)
img.fill(Qt.GlobalColor.white)
return img
def pixmap(self):
return QPixmap.fromImage(self.get_image())
def drawrect(self, row, col):
painter = QPainter(self.get_image())
painter.fillRect(
(col + self.border) * self.box_size,
(row + self.border) * self.box_size,
self.box_size, self.box_size,
Qt.GlobalColor.black)
def save(self, stream, kind=None):
pass
def process(self):
pass
def drawrect_context(self, row, col, active, context):
pass
def QRlabel(url_str):
qr = qrcode.QRCode(
version=None, # 1,
error_correction=qrcode.constants.ERROR_CORRECT_L,
box_size=4,
border=1,
image_factory=QRImage)
qr.add_data(url_str)
qr.make(fit=True)
return qr |
jolyonb/edx-platform | lms/djangoapps/lms_xblock/runtime.py | Python | agpl-3.0 | 8,551 | 0.001988 | """
Module implementing `xblock.runtime.Runtime` functionality for the LMS
"""
from completion.services import CompletionService
from django.conf import settings
from django.urls import reverse
from edx_django_utils.cache import DEFAULT_REQUEST_CACHE
import xblock.reference.plugins
from badges.service import BadgingService
from badges.utils import badges_enabled
from lms.djangoapps.lms_xblock.models import XBlockAsidesConfig
from openedx.core.djangoapps.user_api.course_tag import api as user_course_tag_api
from openedx.core.lib.url_utils import quote_slashes
from openedx.core.lib.xblock_utils import xblock_local_resource_url, wrap_xblock_aside
from xmodule.library_tools import LibraryToolsService
from xmodule.modulestore.django import ModuleI18nService, modulestore
from xmodule.partitions.partitions_service import PartitionService
from xmodule.services import SettingsService
from xmodule.x_module import ModuleSystem
def handler_url(block, handler_name, suffix='', query='', thirdparty=False):
"""
This method matches the signature for `xblock.runtime:Runtime.handler_url()`
See :method:`xblock.runtime:Runtime.handler_url`
"""
view_name = 'xblock_handler'
if handler_name:
# Be sure this is really a handler.
#
# We're checking the .__class__ instead of the block itself to avoid
# auto-proxying from Descriptor -> Module, in case descriptors want
# to ask for handler URLs without a student context.
func = getattr(block.__class__, handler_name, None)
if not func:
raise ValueError(u"{!r} is not a function name".format(handler_name))
# Is the following necessary? ProxyAttribute causes an UndefinedContext error
# if trying this without the module system.
#
#if not getattr(func, "_is_xblock_handler", False):
# raise ValueError("{!r} is not a handler name".format(handler_name))
if thirdparty:
view_name = 'xblock_handler_noauth'
url = reverse(view_name, kwargs={
'course_id': unicode(block.location.course_key),
'usage_id': quote_slashes(unicode(block.scope_ids.usage_id).encode('utf-8')),
'handler': handler_name,
'suffix': suffix,
})
# If suffix is an empty string, remove the trailing '/'
if not suffix:
url = url.rstrip('/')
# If there is a query string, append it
if query:
url += '?' + query
# If third-party, return fully-qualified url
if thirdparty:
scheme = "https" if settings.HTTPS == "on" else "http"
url = '{scheme}://{host}{path}'.format(
scheme=scheme,
host=settings.SITE_NAME,
path=url
)
return url
def local_resource_url(block, uri):
"""
local_resource_url for Studio
"""
return xblock_local_resource_url(block, uri)
class UserTagsService(object):
"""
A runtime class that provides an interface to the user service. It handles filling in
the current course id and current user.
"""
COURSE_SCOPE = user_course_tag_api.COURSE_SCOPE
def __init__(self, runtime):
self.runtime = runtime
| def _get_current_user(self):
"""Returns the real, not anonymized, current user."""
real_user = self.runtime.get_real_user(self.runtime.anonymous_student_id)
return real_user
def get_tag(self, scope, key):
"""
Get a user tag for the current course and the current user for a | given key
scope: the current scope of the runtime
key: the key for the value we want
"""
if scope != user_course_tag_api.COURSE_SCOPE:
raise ValueError(u"unexpected scope {0}".format(scope))
return user_course_tag_api.get_course_tag(
self._get_current_user(),
self.runtime.course_id, key
)
def set_tag(self, scope, key, value):
"""
Set the user tag for the current course and the current user for a given key
scope: the current scope of the runtime
key: the key that to the value to be set
value: the value to set
"""
if scope != user_course_tag_api.COURSE_SCOPE:
raise ValueError(u"unexpected scope {0}".format(scope))
return user_course_tag_api.set_course_tag(
self._get_current_user(),
self.runtime.course_id, key, value
)
class LmsModuleSystem(ModuleSystem): # pylint: disable=abstract-method
"""
ModuleSystem specialized to the LMS
"""
def __init__(self, **kwargs):
request_cache_dict = DEFAULT_REQUEST_CACHE.data
store = modulestore()
services = kwargs.setdefault('services', {})
user = kwargs.get('user')
if user and user.is_authenticated:
services['completion'] = CompletionService(user=user, course_key=kwargs.get('course_id'))
services['fs'] = xblock.reference.plugins.FSService()
services['i18n'] = ModuleI18nService
services['library_tools'] = LibraryToolsService(store)
services['partitions'] = PartitionService(
course_id=kwargs.get('course_id'),
cache=request_cache_dict
)
services['settings'] = SettingsService()
services['user_tags'] = UserTagsService(self)
if badges_enabled():
services['badging'] = BadgingService(course_id=kwargs.get('course_id'), modulestore=store)
self.request_token = kwargs.pop('request_token', None)
super(LmsModuleSystem, self).__init__(**kwargs)
def handler_url(self, *args, **kwargs):
"""
Implement the XBlock runtime handler_url interface.
This is mostly just proxying to the module level `handler_url` function
defined higher up in this file.
We're doing this indirection because the module level `handler_url`
logic is also needed by the `DescriptorSystem`. The particular
`handler_url` that a `DescriptorSystem` needs will be different when
running an LMS process or a CMS/Studio process. That's accomplished by
monkey-patching a global. It's a long story, but please know that you
can't just refactor and fold that logic into here without breaking
things.
https://openedx.atlassian.net/wiki/display/PLAT/Convert+from+Storage-centric+runtimes+to+Application-centric+runtimes
See :method:`xblock.runtime:Runtime.handler_url`
"""
return handler_url(*args, **kwargs)
def local_resource_url(self, *args, **kwargs):
return local_resource_url(*args, **kwargs)
def wrap_aside(self, block, aside, view, frag, context):
"""
Creates a div which identifies the aside, points to the original block,
and writes out the json_init_args into a script tag.
The default implementation creates a frag to wraps frag w/ a div identifying the xblock. If you have
javascript, you'll need to override this impl
"""
if not frag.content:
return frag
runtime_class = 'LmsRuntime'
extra_data = {
'block-id': quote_slashes(unicode(block.scope_ids.usage_id)),
'course-id': quote_slashes(unicode(block.course_id)),
'url-selector': 'asideBaseUrl',
'runtime-class': runtime_class,
}
if self.request_token:
extra_data['request-token'] = self.request_token
return wrap_xblock_aside(
runtime_class,
aside,
view,
frag,
context,
usage_id_serializer=unicode,
request_token=self.request_token,
extra_data=extra_data,
)
def applicable_aside_types(self, block):
"""
Return all of the asides which might be decorating this `block`.
Arguments:
block (:class:`.XBlock`): The block to render retrieve asides for.
"""
config = XBlockAsidesConfig.current()
if not config.enabled:
return []
if block.scope_ids.block_type in config.disabled_blocks.split():
|
adrienbrunet/EulerProject | problem_030.py | Python | gpl-2.0 | 863 | 0.003476 | # coding: utf-8
'''
Surprisingly there are only three numbers that can be written as the sum of fourth powers of their digits:
1634 = 1 ** 4 + 6 ** 4 + 3 ** 4 + 4 ** 4
8208 = 8 ** 4 + 2 ** 4 + 0 ** 4 + 8 ** 4
9474 = 9 ** 4 + 4 ** 4 + 7 ** 4 + 4 ** 4
As 1 = 1 ** 4 is not a sum it is not included.
The sum of these numbers is 1634 + 8208 + 9474 = 19316.
Find the sum of all the numbers that can be written as the sum of fifth powers of their digits.
'''
# Tips
# 9 ** 5 = 59049
# 9 999 999 = 7*59049 --> au maximum on atteint: 413343
# Update: I do not remember where 295245 comes from. :/
def result(power):
return sum([i for i in range(295245) if sum([int(j) ** power for j in str(i)]) == i]) - 1 # the power of 1
def main():
| return result(5)
if __name__ == '__main__':
assert result(4) == 19316
print(main())
# 4438 | 39 in 967ms
|
magayorker/magatip | crypto.py | Python | mit | 10,464 | 0.002771 | import datetime
import getpass
import logging
import time
import traceback
from bitcoinrpc.authproxy import AuthServiceProxy
import bot_logger
import config
import models
import user_function
def get_rpc():
return AuthServiceProxy("http://%s:%s@%s:%s" % (
config.rpc_config['doge_rpc_username'], config.rpc_config['doge_rpc_password'],
config.rpc_config['doge_rpc_host'],
config.rpc_config['doge_rpc_port']), timeout=config.rpc_config['timeout'])
def backup_wallet():
rpc = get_rpc()
rpc.backupwallet(
config.backup_wallet_path + "backup_" + datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + ".dat")
def init_passphrase():
# enter user passphrase
global wallet_passphrase
wallet_passphrase = getpass.getpass("wallet passphrase : ")
def check_passphrase():
rpc = get_rpc()
logging.disable(logging.DEBUG)
rpc.walletpassphrase(wallet_passphrase, int(config.rpc_config['timeout']))
logging.disable(logging.NOTSET)
# check
wallet_info = rpc.getwalletinfo()
if wallet_info['unlocked_until'] < time.time():
exit()
rpc.walletlock()
def balance_user(msg, failover_time):
user = models.User(msg.author.name)
if user.is_registered():
# get confirmed balance
spendable_balance = user.get_balance_confirmed()
if time.time() > int(failover_time.value) + 86400:
# not in safe mode so add unconfirmed balance
spendable_balance += get_user_spendable_balance(user.username)
return spendable_balance
def get_user_spendable_balance(user, rpc=None):
if rpc is None:
rpc = get_rpc()
# spendable_balance is the confirmed balance and the unconfirmed balance of
# transactions that the bot has generated, but not the unconfirmed balance of
# transactions originating from a wallet address that does not belong to the bot
unspent_amounts = []
address = user_function.get_user_address(user)
list_unspent = rpc.listunspent(0, 0, [address])
# in case of no un-spent transaction
if len(list_unspent) == 0:
return 0
for i in range(0, len(list_unspent), 1):
trans = rpc.decoderawtransaction(rpc.getrawtransaction(list_unspent[i]['txid']))
# for v_in in range(0,len(trans['vin']),1):
vin = rpc.decoderawtransaction(rpc.getrawtransaction(trans['vin'][0]['txid']))
if vin['vout'][0]['scriptPubKey']['addresses'][0] in user_function.get_users().values():
unspent_amounts.append(list_unspent[i]['amount'])
bot_logger.logger.debug("unspent_amounts %s" % (str(sum(unspent_amounts))))
# check if user have pending tips
pending_tips = user_function.get_balance_unregistered_tip(user)
bot_logger.logger.debug("pending_tips %s" % (str(pending_tips)))
return int(sum(unspent_amounts) - int(pending_tips))
def get_user_confirmed_balance(address):
rpc = get_rpc()
unspent_amounts = []
list_unspent = rpc.listunspent(1, 99999999, [address])
# in case of no un-spent transaction
if len(list_unspent) == 0:
return 0
for i in range(0, len(list_unspent), 1):
unspent_amounts.append(list_unspent[i]['amount'])
bot_logger.logger.debug("unspent_amounts %s" % (str(sum(unspent_amounts))))
return int(sum(unspent_amounts))
def get_user_unconfirmed_balance(address):
rpc = get_rpc()
unspent_amounts = []
list_unspent = rpc.listunspent(0, 0, [address])
# in case of no unconfirmed transactions
if len(list_unspent) == 0:
return 0
for i in range(0, len(list_unspent), 1):
unspent_amounts.append(list_unspent[i]['amount'])
bot_logger.logger.debug("unconfirmed_amounts %s" % (str(sum(unspent_amounts))))
return int(sum(unspent_amounts))
def tip_user(sender_address, receiver_address, amount_tip, tx_queue, failover_time):
bot_logger.logger.debug("failover_time : %s " % (str(failover_time.value)))
if time.time() > int(failover_time.value) + 86400:
bot_logger.logger.info("tip send in normal mode")
try:
return send_to(None, sender_address, receiver_address, amount_tip, False, tx_queue)
except:
traceback.print_exc()
else:
bot_logger.logger.info("tip send in safe mode")
try:
return send_to_failover(None, sender_address, receiver_address, amount_tip, False, tx_queue)
except:
traceback.print_exc()
def send_to(rpc, sender_address, receiver_address, amount, take_fee_on_amount=False, tx_queue=None):
if rpc is None:
rpc = get_rpc()
bot_logger.logger.info("send %s to %s from %s" % (amount, sender_address, receiver_address))
list_unspent = rpc.listunspent(1, 99999999, [sender_address])
unspent_amounts = []
raw_inputs = []
fee = 1
for i in range(0, len(list_unspent), 1):
unspent_amounts.append(list_unspent[i]['amount'])
# check if we have enough tx
tx = {
"txid": str(list_unspent[i]['txid']),
"vout": list_unspent[i]['vout']
}
raw_inputs.append(tx)
fee = calculate_fee(len(raw_inputs), 2)
if sum(unspent_amounts) > (float(amount) + float(fee)) and (calculate_size(len(raw_inputs), 2) >= 750):
break
list_unspent = rpc.listunspent(0, 0, [sender_address])
for i in range(0, len(list_unspent), 1):
trans = rpc.decoderawtransaction(rpc.getrawtransaction(list_unspent[i]['txid']))
vin = rpc.decoderawtransaction(rpc.getrawtransaction(trans['vin'][0]['txid']))
if vin['vout'][0]['scriptPubKey']['addresses'][0] in user_function.get_users().values():
unspent_amounts.append(list_unspent[i]['amount'])
tx = {
"txid": str(list_unspent[i]['txid']),
"vout": list_unspent[i]['vout']
}
raw_inputs.append(tx)
fee = calculate_fee(len(raw_inputs), 2)
if sum(unspent_amounts) > (float(amount) + float(fee)):
break
bot_logger.logger.debug("sum of unspend : " + str(sum(unspent_amounts)))
bot_logger.logger.debug("fee : %s" % str(fee))
bot_logger.logger.debug("size : %s" % str(calculate_size(len(raw_inputs), 2)))
bot_logger.logger.debug("raw input : %s" % raw_inputs)
if take_fee_on_amount:
return_amount = int(sum(unspent_amounts)) - int(int(amount) - int(fee))
else:
return_amount = int(sum(unspent_amounts)) - int(amount) - int(fee)
bot_logger.logger.debug("return amount : %s" % str(return_amount))
if int(return_amount) < 1:
raw_addresses = {receiver_address: int(amount)}
else:
# when consolidate tx
if receiver_address == sender_address:
raw_addresses = {receiver_address: int(int(amount) - int(fee))}
else:
raw_addresses = {receiver_address: int(amount), sender_address: int(return_amount)}
bot_logger.logger.debug("raw addresses : %s" % raw_addresses)
raw_tx = rpc.createrawtransaction(raw_inputs, raw_addresses)
bot_logger.logger.debug("raw tx : %s" % raw_tx)
bot_logger.logger.info('send %s Maga from %s to %s ' % (str(amount), receiver_address, receiver_address))
logging.disable(logging.DEBUG)
rpc.walletpassphrase(wallet_passphrase, int(config.rpc_config['timeout']))
logging.disable(logging.NOTSET)
signed = rpc.signrawtransaction(raw_tx)
rpc.walletlock()
send = rpc.sendrawtransaction(signed['hex'])
# add tx id in queue for double spend check
if tx_queue is not None:
time.sleep(4)
tx_queue.put(send)
return send
def send_to_failover(rpc, sender_address, receiver_address, amount, take_fee_on_amount=False, tx_queue=None):
if rpc is None:
rpc = get_rpc()
bot_logger.logger.info("send %s to %s from %s" % (amount, sender_address, receiver_address))
list_unspent = rpc.listunspent(1 | , 9999999 | 9, [sender_address])
unspent_amounts = []
raw_inputs = []
fee = 1
for i in range(0, len(list_unspent), 1):
unspent_amounts.append(list_unspent[i]['amount'])
# check if we have enough |
altai/nova-billing | setup.py | Python | lgpl-2.1 | 1,852 | 0.00216 | #!/usr/bin/python2
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Nova Billing
# Copyright (C) 2010-2012 Grid Dynamics Consulting Services, Inc
# All Rights Reserved
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free So | ftware Foundation; either
# version 2.1 of | the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
import gettext
import glob
import os
import subprocess
import sys
from setuptools import setup, find_packages
ROOT = os.path.dirname(__file__)
sys.path.append(ROOT)
from nova_billing.version import version_string
setup(name='nova-billing',
version=version_string(),
license='GNU LGPL 2.1',
description='cloud computing fabric controller',
author='Alessio Ababilov, Ivan Kolodyazhny (GridDynamics Openstack Core Team, (c) GridDynamics)',
author_email='openstack@griddynamics.com',
url='http://www.griddynamics.com/openstack',
packages=find_packages(exclude=['bin', 'smoketests', 'tests']),
entry_points={
'console_scripts': [
'nova-billing-heart = nova_billing.heart.main:main',
'nova-billing-os-amqp = nova_billing.os_amqp.main:main',
'nova-billing-populate = nova_billing.populate:main',
'nova-billing-configure = nova_billing.configure:main',
]
},
py_modules=[],
test_suite='tests'
)
|
rmvanhees/pys5p | src/pys5p/ckd_io.py | Python | bsd-3-clause | 23,526 | 0.000043 | """
This file is part of pyS5p
https://github.com/rmvanhees/pys5p.git
Provides access to the S5P Tropomi CKD (static and dynamic)
ToDo
----
- access to UVN CKD, still incomplete
- identify latest Static CKD product, e.g. using the validity period
Copyright (c) 2018-2021 SRON - Netherlands Institute for Space Research
All Rights Reserved
License: BSD-3-Clause
"""
from pathlib import Path, PosixPath
import h5py
import numpy as np
import xarray as xr
from pys5p.s5p_xarray import h5_to_xr
# - local functions ------------------------------
def reject_row257(xarr):
"""
Remove row 257 from DataArray or Dataset
"""
return xarr.isel(row=np.s_[0:256])
# - class definition -------------------------------
class CKDio():
"""
Read Tropomi CKD from the Static CKD product or from dynamic CKD products
Attributes
----------
ckd_dir : pathlib.Path
ckd_version : int
ckd_file : pathlib.Path
ckd_dyn_file : pathlib.Path
fid : h5py.File
Methods
-------
close()
Close resources.
creation_time()
Returns datetime when the L1b product was created.
creator_version()
Returns version of Tropomi L01B processor used to generate this procuct.
validity_period()
Return validity period of CKD product as a tuple of 2 datetime objects.
get_param(ds_name, band='7')
Returns value(s) of a CKD parameter from the Static CKD product.
dn2v_factors()
Returns digital number to Volt CKD, SWIR only.
v2c_factors()
Returns Voltage to Charge CKD, SWIR only.
absirr(qvd=1, bands='78')
Returns absolute irradiance responsivity.
absrad(bands='78')
Returns absolute radiance responsivity.
memory()
Returns memory CKD, SWIR only.
noise()
Returns pixel read-noise CKD, SWIR only
prnu(bands='78')
Returns Pixel Response Non-Uniformity (PRNU).
relirr(qvd=1, bands='78')
Returns relative irradiance correction.
saa()
Returns definition of the SAA region.
wavelength(bands='78')
Returns wavelength CKD.
darkflux()
Returns dark-flux CKD, SWIR only.
offset()
Returns offset CKD, SWIR only.
pixel_quality()
Returns Detector Pixel Quality Mask (float [0, 1]), SWIR only.
dpqf(threshold=None)
Returns Detector Pixel Quality Mask (boolean), SWIR only.
saturation()
Returns saturation values (pre-offset), SWIR only
Notes
-----
Not all CKD are defined or derived for all bands.
You can request a CKD for one band or for a channel (bands: '12', '34',
'56', '78'). Do not mix bands from different channels
The option to have dynamic CKD is not used for the Tropomi mission, only
for S/W version 1 a dynamic CKD product is defin | ed. This product contained
the OCAL CKD and was not updated automatically. For version 2, all CKD are
stored in one product, where some CKD have a time-axis to correct any
in-flight degradation.
Therefore, the logic to find a CKD is implemented as follows:
1) ckd_dir, defines the base directory to search for the CKD products
(see below)
2) ckd_file, defines the full path to (static) CKD product;
(version 1) any product with dynamic CKD has to be in the same director | y
Version 1:
* Static CKD are stored in one file: glob('*_AUX_L1_CKD_*')
* Dynamic CKD are stored in two files:
- UVN, use glob('*_ICM_CKDUVN_*')
- SWIR, use glob('*_ICM_CKDSIR_*')
Version 2+:
* All CKD in one file: glob('*_AUX_L1_CKD_*')
* Dynamic CKD are empty
Examples
--------
"""
def __init__(self, ckd_dir=None, ckd_version=1, ckd_file=None):
"""
Initialize access to a Tropomi Static CKD product
Parameters
----------
ckd_dir : str, optional
Directory where the CKD files are stored,
default='/nfs/Tropomi/share/ckd'
ckd_version : int, optional
Version of the CKD, default=1
ckd_file : str, optional
Name of the CKD file, default=None then the CKD file is searched
in the directory ckd_dir with ckd_version in the glob-string
"""
if ckd_dir is None:
ckd_dir = '/nfs/Tropomi/share/ckd'
self.ckd_version = max(1, ckd_version)
self.ckd_dyn_file = None
# define path to CKD product
if ckd_file is None:
if not Path(ckd_dir).is_dir():
raise FileNotFoundError(f'Not found CKD directory: {ckd_dir}')
self.ckd_dir = Path(ckd_dir)
glob_str = f'*_AUX_L1_CKD_*_*_00000_{self.ckd_version:02d}_*_*.h5'
if (self.ckd_dir / 'static').is_dir():
res = sorted((self.ckd_dir / 'static').glob(glob_str))
else:
res = sorted(self.ckd_dir.glob(glob_str))
if not res:
raise FileNotFoundError('Static CKD product not found')
self.ckd_file = res[-1]
else:
if not Path(ckd_file).is_file():
raise FileNotFoundError(f'Not found CKD file: {ckd_file}')
self.ckd_dir = Path(ckd_file).parent
self.ckd_file = Path(ckd_file)
# obtain path to dynamic CKD product (version 1, only)
if self.ckd_version == 1:
if (self.ckd_dir / 'dynamic').is_dir():
res = sorted((self.ckd_dir / 'dynamic').glob('*_ICM_CKDSIR_*'))
else:
res = sorted(self.ckd_dir.glob('*_ICM_CKDSIR_*'))
if res:
self.ckd_dyn_file = res[-1]
# open access to CKD product
self.fid = h5py.File(self.ckd_file, "r")
def __enter__(self):
"""
method called to initiate the context manager
"""
return self
def __exit__(self, exc_type, exc_value, traceback):
"""
method called when exiting the context manager
"""
self.close()
return False # any exception is raised by the with statement.
def close(self) -> None:
"""
Make sure that we close all resources
"""
if self.fid is not None:
self.fid.close()
def creation_time(self) -> str:
"""
Returns datetime when the L1b product was created
"""
if self.ckd_version == 2:
attr = self.fid['METADATA'].attrs['production_datetime']
else:
group = PosixPath('METADATA', 'earth_explorer_header',
'fixed_header', 'source')
attr = self.fid[str(group)].attrs['Creator_Date'][0]
if isinstance(attr, bytes):
attr = attr.decode('ascii')
return attr
def creator_version(self) -> str:
"""
Returns version of Tropomi L01B processor used to generate this procuct
"""
group = PosixPath('METADATA', 'earth_explorer_header', 'fixed_header')
attr = self.fid[str(group)].attrs['File_Version']
if self.ckd_version == 1:
attr = attr[0]
if isinstance(attr, bytes):
attr = attr.decode('ascii')
return attr
@staticmethod
def __get_spectral_channel(bands: str):
"""
Check bands is valid: single band or belong to one channel
Parameters
----------
bands : str
Tropomi bands [1..8] or channels ['12', '34', '56', '78'],
default: '78'
"""
band2channel = ['UNKNOWN', 'UV', 'UV', 'VIS', 'VIS',
'NIR', 'NIR', 'SWIR', 'SWIR']
if 0 < len(bands) > 2:
raise ValueError('read per band or channel, only')
if len(bands) == 2:
if band2channel[int(bands[0])] != band2channel[int(bands[1])]:
raise ValueError('bands should be of the same channel')
return band2channel[int(bands[0])]
def get_param(self, ds_name: str, band='7'):
"""
Returns value(s) of a CKD parameter from the Static CKD product.
Parameters
----------
ds_name : string
Name of the HDF5 dataset, default='pixel_full_wel |
infobloxopen/infoblox-netmri | infoblox_netmri/api/broker/v3_8_0/fail_over_configuration_broker.py | Python | apache-2.0 | 12,204 | 0.002704 | from ..broker import Broker
class FailOverConfigurationBroker(Broker):
controller = "fail_over_configurations"
def get_config(self, **kwargs):
"""Get the failover configuration for the specified unit.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param unit_id: Unit ID. While not set in OC environment, the API request returns the failover configuration of all units.
:type unit_id: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return failover_progress: The id of the failover action output file.
:rtype failover_progress: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return fail_over_configuration: Text (json,xml or csv) interpretation of current failover configuration.
:rtype fail_over_configuration: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return sync_ok: Success indicator of sync to neighbour operation.
:rtype sync_ok: Boolean
"""
return self.api_request(self._get_method_fullname("get_config"), kwargs)
def action_status(self, **kwargs):
"""Shows failover action progress for specified unit.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param unit_id: Unit ID. Should be specified in OC/Collector environment. Default value is 0.
:type unit_id: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param id: The id of the session output file.
:type id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param read: File offset to show
:type read: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return read: Offset in bytes from the start of the file, to be used in the next get_progress call, in order to retrieve the next lines of the output.
:rtype read: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return output: Result of the failover action.
:rtype output: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return status: Status of the remaining output data to dump: 0 - no data to dump, 1 - more data is available
:rtype status: Integer
"""
return self.api_request(self._get_method_fullname("action_status"), kwargs)
def action(self, **kwargs):
"""Performs the failover action (enable or disable) for the specified unit.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``req | uired:`` False
| ``default:`` None
:param | unit_id: Unit ID. Should be specified in OC/Collector environment. Default value is 0.
:type unit_id: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param name: Failover action name, possible values: 'enable', 'disable'
:type name: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return failover_progress: The internal id of the failover action progress.
:rtype failover_progress: String
"""
return self.api_request(self._get_method_fullname("action"), kwargs)
def failover(self, **kwargs):
"""Switches the specified unit to the secondary role.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param unit_id: Unit ID. Should be specified in OC/Collector environment. Default value is 0.
:type unit_id: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return status: Text (json,xml or csv) interpretation of the operation result. Contains just unit_id and current status.
:rtype status: String
"""
return self.api_request(self._get_method_fullname("failover"), kwargs)
def set_config(self, **kwargs):
"""Sets the failover configuration for the specified unit.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param unit_id: Unit ID. Should be specified in OC/Collector environment. Default value is 0.
:type unit_id: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param primary_index: Primary index. It indicates who is primary now (1-first, 2-second).
:type primary_index: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param virtual_ip: Virtual IP address.
:type virtual_ip: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param virtual_hostname: Virtual hostname.
:type virtual_hostname: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param replication_direct_connect: Indicates if replication uses a direct connection through HA port. Default value is true.
:type replication_direct_connect: Boolean
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param replication_port: Replication port. Required for non direct connection replication.
:type replication_port: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param first_replication_ip: First replication IP. Required for non direct connection replication.
:type first_replication_ip: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param first_management_ip: First management IP. Required for secondary peer.
:type first_management_ip: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param first_management_hostname: First management hostname. Required for secondary peer.
|
lociii/jukebox_live_indexer | setup.py | Python | mit | 1,135 | 0 | # -*- coding: UTF-8 -*-
from setuptools import setup, find_packages
setup(
name="jukebox-live-indexer",
packages=find_packages(),
version="0.1.1",
description="Background library indexing service for jukebox",
author="Jens Nistler",
author_email="opensource@jensnistler.de",
url="http://jensnistler.de/",
download_url='http://github.com/lociii/jukebox_live_indexer',
keywords=["jukebox", "music", "mp3", "background", "library", "inde | xer"],
license="MIT",
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Intended Audience :: End Users/Desktop",
"Intended Audienc | e :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Multimedia :: Sound/Audio :: Players",
],
install_requires=[
"jukebox>=0.3.7",
"python-daemon==1.6",
"watchdog>=0.6.0",
],
include_package_data=True,
long_description=open("README.rst").read()
)
|
discentes-imd/IntegraTI-API | app/mod_auth/ns_models.py | Python | gpl-3.0 | 75 | 0.013333 | from flask_restplus import fields
from app.mod | _auth.con | trollers import ns
|
AGHPythonCourse2017/zad3-chudy1997 | song_singer/ConnectionException.py | Python | gpl-3.0 | 152 | 0 | class Conn | ectionException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.va | lue)
|
malon/presupuesto | budget_app/models/budget_item.py | Python | gpl-2.0 | 5,467 | 0.004024 | from django.db import models
from django.conf import settings
class BudgetItemManager(models.Manager):
def each_denormalized(self, additional_constraints=None, additional_arguments=None):
sql = \
"select " \
| "fc.area, fc.poli | cy, fc.function, fc.programme, " \
"ec.chapter, ec.article, ec.heading, ec.subheading, " \
"ic.institution, ic.section, ic.department, " \
"fdc.source, fdc.fund, "\
"i.id, i.item_number, i.description, i.expense, i.actual, i.amount, " \
"b.year, " \
"e.name " \
"from " \
"budget_items i, " \
"functional_categories fc, " \
"institutional_categories ic, " \
"economic_categories ec, " \
"funding_categories fdc, " \
"budgets b, " \
"entities e " \
"where " \
"i.functional_category_id = fc.id and " \
"i.institutional_category_id = ic.id and " \
"i.economic_category_id = ec.id and " \
"i.funding_category_id = fdc.id and " \
"i.budget_id = b.id and " \
"b.entity_id = e.id"
if additional_constraints:
sql += " and " + additional_constraints
return self.raw(sql, additional_arguments)
# Do a full-text search in the database. Note we ignore execution data, as it doesn't
# add anything new to the budget descriptions.
def search(self, query, year, page):
sql = "select " \
"b.year, " \
"e.name, e.level, " \
"i.id, i.description, i.amount, i.expense, " \
"ec.article, ec.heading, ec.subheading, " \
"ic.institution, ic.section, ic.department, " \
"fc.policy, fc.programme " \
"from " \
"budget_items i, " \
"budgets b, " \
"entities e, " \
"functional_categories fc, " \
"economic_categories ec, " \
"institutional_categories ic " \
"where " \
"i.budget_id = fc.budget_id and " \
"i.budget_id = b.id and " \
"b.entity_id = e.id and " \
"i.actual = false and " \
"i.functional_category_id = fc.id and " \
"i.institutional_category_id = ic.id and " \
"i.economic_category_id = ec.id and " \
"to_tsvector('"+settings.SEARCH_CONFIG+"',i.description) @@ plainto_tsquery('"+settings.SEARCH_CONFIG+"',%s)"
if year:
sql += " and b.year='%s'" % year
sql += " order by i.amount desc"
return self.raw(sql, (query, ))
class BudgetItem(models.Model):
budget = models.ForeignKey('Budget')
actual = models.BooleanField()
expense = models.BooleanField()
item_number = models.CharField(max_length=3)
description = models.CharField(max_length=512)
amount = models.BigIntegerField()
economic_category = models.ForeignKey('EconomicCategory', db_column='economic_category_id')
functional_category = models.ForeignKey('FunctionalCategory', db_column='functional_category_id')
funding_category = models.ForeignKey('FundingCategory', db_column='funding_category_id')
institutional_category = models.ForeignKey('InstitutionalCategory', db_column='institutional_category_id')
updated_at = models.DateTimeField(auto_now=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = BudgetItemManager()
class Meta:
app_label = "budget_app"
db_table = "budget_items"
# Return a budget id unique across all years, so we can match them later
# with the descriptions. Important note: this won't work in a normal budget
# item, it expects a denormalized record.
# CAREFUL: An 'item number' plus an economic category doesn't make a line
# unique: you need the institution too! (I.e. you basically need the whole
# line. In this case we can skip the functional category, since we filter
# along that dimension)
def uid(self):
# XXX: The subheading call originally assumed the values do exist; not true anymore
# with smaller entities. I'm working around it for now, partially, but I haven't
# thought fully about the implications of all this.
department = getattr(self, 'department') if getattr(self, 'department') else ''
subheading = getattr(self, 'subheading') if getattr(self, 'subheading') else (getattr(self, 'heading') if getattr(self, 'heading') else (getattr(self, 'article') if getattr(self, 'article') else getattr(self, 'chapter')))
item_number = getattr(self, 'item_number') if getattr(self, 'item_number') else ''
return str(getattr(self, 'year')) + '/' + \
department + '/' + \
subheading + '/' + \
item_number
def year(self):
return self.budget.year
def programme(self):
return self.functional_category.programme
def policy(self):
return self.functional_category.policy
# Whether an item is a financial expense (i.e. paying debt, mostly) or income (i.e. new debt).
# Only works on a denormalized record.
def is_financial(self):
return getattr(self, 'chapter') == '8' or getattr(self, 'chapter') == '9'
def __unicode__(self):
return self.description
|
TheTimmy/spack | lib/spack/spack/cmd/cd.py | Python | lgpl-2.1 | 1,684 | 0 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This pr | ogram is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PA | RTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import spack.cmd.location
import spack.modules
description = "cd to spack directories in the shell"
section = "environment"
level = "long"
def setup_parser(subparser):
"""This is for decoration -- spack cd is used through spack's
shell support. This allows spack cd to print a descriptive
help message when called with -h."""
spack.cmd.location.setup_parser(subparser)
def cd(parser, args):
spack.modules.print_help()
|
lipis/guestbook | main/appengine_config.py | Python | mit | 408 | 0.017157 | # coding: utf-8
import os
import sys
if os.environ.get('SERVER_SOFTWARE', '').startswith('Google App Engine'):
sys.path.i | nsert(0, 'lib.zip')
else:
import re
from google.appengine.tools.devappserver2.python import stubs
re_ = stubs.FakeFile._skip_ | files.pattern.replace('|^lib/.*', '')
re_ = re.compile(re_)
stubs.FakeFile._skip_files = re_
sys.path.insert(0, 'lib')
sys.path.insert(0, 'libx')
|
kaichogami/sympy | sympy/physics/quantum/cg.py | Python | bsd-3-clause | 22,514 | 0.002132 | #TODO:
# -Implement Clebsch-Gordan symmetries
# -Improve simplification method
# -Implement new simpifications
"""Clebsch-Gordon Coefficients."""
from __future__ import print_function, division
from sympy import (Add, expand, Eq, Expr, Mul, Piecewise, Pow, sqrt, Sum,
symbols, sympify, Wild)
from sympy.core.compatibility import range
from sympy.printing.pretty.stringpict import prettyForm, stringPict
from sympy.functions.special.tensor_functions import KroneckerDelta
from sympy.physics.wigner import clebsch_gordan, wigner_3j, wigner_6j, wigner_9j
__all__ = [
'CG',
'Wigner3j',
'Wigner6j',
'Wigner9j',
'cg_simp'
]
#-----------------------------------------------------------------------------
# CG Coefficients
#-----------------------------------------------------------------------------
class Wigner3j(Expr):
"""Class for the Wigner-3j symbols
Wigner 3j-symbols are coefficients determined by the coupling of
two angular momenta. When created, they are expressed as symbolic
quantities that, for numerical parameters, can be evaluated using the
``.doit()`` method [1]_.
Parameters
==========
j1, m1, j2, m2, j3, m3 : Number, Symbol
Terms determining the angular momentum of coupled angular momentum
systems.
Examples
========
Declare a Wigner-3j coefficient and calcualte its value
>>> from sympy.physics.quantum.cg import Wigner3j
>>> w3j = Wigner3j(6,0,4,0,2,0)
>>> w3j
Wigner3j(6, 0, 4, 0, 2, 0)
>>> w3j.doit()
sqrt(715)/143
See Also
========
CG: Clebsch-Gordan coefficients
References
==========
.. [1] Varshalovich, D A, Quantum Theory of Angular Momentum. 1988.
"""
is_commutative = True
def __new__(cls, j1, m1, j2, m2, j3, m3):
args = map(sympify, (j1, m1, j2, m2, j3, m3))
return Expr.__new__(cls, *args)
@property
def j1(self):
return self.args[0]
@property
def m1(self):
return self.args[1]
@property
def j2(self):
return self.args[2]
@property
def m2(self):
return self.args[3]
@property
def j3(self):
return self.args[4]
@property
def m3(self):
return self.args[5]
@property
def is_symbolic(self):
return not all([arg.is_number for arg in self.args])
# This is modified from the _print_Matrix method
def _pretty(self, printer, *args):
m = ((printer._print(self.j1), printer._print(self.m1)),
(printer._print(self.j2), printer._print(self.m2)),
(printer._print(self.j3), printer._print(self.m3)))
hsep = 2
vsep = 1
maxw = [-1] * 3
for j in range(3):
maxw[j] = max([ m[j][i].width() for i in range(2) ])
D = None
for i in range(2):
D_row = None
for j in range(3):
s = m[j][i]
wdelta = maxw[j] - s.width()
wleft = wdelta //2
wright = wdelta - wleft
s = prettyForm(*s.right(' '*wright))
s = prettyForm(*s.left(' '*wleft))
if D_row is None:
D_row = s
continue
D_row = prettyForm(*D_row.right(' '*hsep))
D_row = prettyForm(*D_row.right(s))
if D is None:
D = D_row
continue
for _ in range(vsep):
D = prettyForm(*D.below(' '))
D = prettyForm(*D.below(D_row))
D = prettyForm(*D.parens())
return D
def _latex(self, printer, *args):
label = map(printer._print, (self.j1, self.j2, self.j3,
self.m1, self.m2, self.m3))
return r'\left(\begin{array}{ccc} %s & %s & %s \\ %s & %s & %s \end{array}\right)' % \
tuple(label)
def doit(self, **hints):
if self.is_symbolic:
raise ValueError("Coefficients must be numerical")
return wigner_3j(self.j1, self.j2, self.j3, self.m1, self.m2, self.m3)
class CG(Wigner3j):
"""Class for Clebsch-Gordan coefficient
Clebsch-Gordan coefficients describe the angular mo | mentum coupling between
two systems. The coefficients give the expansion of a cou | pled total angular
momentum state and an uncoupled tensor product state. The Clebsch-Gordan
coefficients are defined as [1]_:
.. math ::
C^{j_1,m_1}_{j_2,m_2,j_3,m_3} = \langle j_1,m_1;j_2,m_2 | j_3,m_3\\rangle
Parameters
==========
j1, m1, j2, m2, j3, m3 : Number, Symbol
Terms determining the angular momentum of coupled angular momentum
systems.
Examples
========
Define a Clebsch-Gordan coefficient and evaluate its value
>>> from sympy.physics.quantum.cg import CG
>>> from sympy import S
>>> cg = CG(S(3)/2, S(3)/2, S(1)/2, -S(1)/2, 1, 1)
>>> cg
CG(3/2, 3/2, 1/2, -1/2, 1, 1)
>>> cg.doit()
sqrt(3)/2
See Also
========
Wigner3j: Wigner-3j symbols
References
==========
.. [1] Varshalovich, D A, Quantum Theory of Angular Momentum. 1988.
"""
def doit(self, **hints):
if self.is_symbolic:
raise ValueError("Coefficients must be numerical")
return clebsch_gordan(self.j1, self.j2, self.j3, self.m1, self.m2, self.m3)
def _pretty(self, printer, *args):
bot = printer._print_seq(
(self.j1, self.m1, self.j2, self.m2), delimiter=',')
top = printer._print_seq((self.j3, self.m3), delimiter=',')
pad = max(top.width(), bot.width())
bot = prettyForm(*bot.left(' '))
top = prettyForm(*top.left(' '))
if not pad == bot.width():
bot = prettyForm(*bot.right(' ' * (pad - bot.width())))
if not pad == top.width():
top = prettyForm(*top.right(' ' * (pad - top.width())))
s = stringPict('C' + ' '*pad)
s = prettyForm(*s.below(bot))
s = prettyForm(*s.above(top))
return s
def _latex(self, printer, *args):
label = map(printer._print, (self.j3, self.m3, self.j1,
self.m1, self.j2, self.m2))
return r'C^{%s,%s}_{%s,%s,%s,%s}' % tuple(label)
class Wigner6j(Expr):
"""Class for the Wigner-6j symbols
See Also
========
Wigner3j: Wigner-3j symbols
"""
def __new__(cls, j1, j2, j12, j3, j, j23):
args = map(sympify, (j1, j2, j12, j3, j, j23))
return Expr.__new__(cls, *args)
@property
def j1(self):
return self.args[0]
@property
def j2(self):
return self.args[1]
@property
def j12(self):
return self.args[2]
@property
def j3(self):
return self.args[3]
@property
def j(self):
return self.args[4]
@property
def j23(self):
return self.args[5]
@property
def is_symbolic(self):
return not all([arg.is_number for arg in self.args])
# This is modified from the _print_Matrix method
def _pretty(self, printer, *args):
m = ((printer._print(self.j1), printer._print(self.j3)),
(printer._print(self.j2), printer._print(self.j)),
(printer._print(self.j12), printer._print(self.j23)))
hsep = 2
vsep = 1
maxw = [-1] * 3
for j in range(3):
maxw[j] = max([ m[j][i].width() for i in range(2) ])
D = None
for i in range(2):
D_row = None
for j in range(3):
s = m[j][i]
wdelta = maxw[j] - s.width()
wleft = wdelta //2
wright = wdelta - wleft
s = prettyForm(*s.right(' '*wright))
s = prettyForm(*s.left(' '*wleft))
if D_row is None:
D_row = s
continue
D_row = prettyForm(*D_row.right(' '*hsep))
D_row = prettyForm(*D_row.right(s))
if D is None:
D = D_row
continue
for _ in range(vsep):
|
edx/edx-e2e-tests | regression/tests/lms/test_instructor_dashboard.py | Python | agpl-3.0 | 1,326 | 0.001508 | """
End to end tests for Instructor Dashboard.
" | ""
from bok_choy.web_app_test import WebAppTest
from regression.pages.lms.course_page_lms import CourseHomePageExtended
from regression.pages.lms.dashboard_lms import DashboardPageExtended
from regression.pages.lms.instructor_dashboard import InstructorDashboardPageExtended
from regression.pages.lms.utils import get_course_k | ey
from regression.tests.helpers.api_clients import LmsLoginApi
from regression.tests.helpers.utils import get_course_display_name, get_course_info
class AnalyticsTest(WebAppTest):
"""
Regression tests on Analytics on Instructor Dashboard
"""
def setUp(self):
super().setUp()
login_api = LmsLoginApi()
login_api.authenticate(self.browser)
course_info = get_course_info()
self.dashboard_page = DashboardPageExtended(self.browser)
self.instructor_dashboard = InstructorDashboardPageExtended(
self.browser,
get_course_key(course_info)
)
self.course_page = CourseHomePageExtended(
self.browser,
get_course_key(course_info)
)
self.dashboard_page.visit()
self.dashboard_page.select_course(get_course_display_name())
self.course_page.wait_for_page()
self.instructor_dashboard.visit()
|
tiancj/emesene | emesene/e3/xmpp/SleekXMPP/sleekxmpp/plugins/xep_0060/pubsub.py | Python | gpl-3.0 | 25,426 | 0.00059 | """
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2011 Nathanael C. Fritz
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
import logging
from sleekxmpp.xmlstream import JID
from sleekxmpp.xmlstream.handler import Callback
from sleekxmpp.xmlstream.matcher import StanzaPath
from sleekxmpp.plugins.base import BasePlugin
from sleekxmpp.plugins.xep_0060 import stanza
log = logging.getLogger(__name__)
class XEP_0060(BasePlugin):
"""
XEP-0060 Publish Subscribe
"""
name = 'xep_0060'
description = 'XEP-0060: Publish-Subscribe'
dependencies = set(['xep_0030', 'xep_0004'])
stanza = stanza
def plugin_init(self):
self.node_event_map = {}
self.xmpp.register_handler(
Callback('Pubsub Event: Items',
StanzaPath('message/pubsub_event/items'),
self._handle_event_items))
self.xmpp.register_handler(
Callback('Pubsub Event: Purge',
StanzaPath('message/pubsub_event/purge'),
self._handle_event_purge))
self.xmpp.register_handler(
Callback('Pubsub Event: Delete',
StanzaPath('message/pubsub_event/delete'),
self._handle_event_delete))
self.xmpp.register_handler(
Callback('Pubsub Event: Configuration',
StanzaPath('message/pubsub_event/configuration'),
self._handle_event_configuration))
self.xmpp.register_handler(
Callback('Pubsub Event: Subscription',
StanzaPath('message/pubsub_event/subscription'),
self._handle_event_subscription))
def plugin_end(self):
self.xmpp.remove_handler('Pubsub Event: Items')
self.xmpp.remove_handler('Pubsub Event: Purge')
self.xmpp.remove_handler('Pubsub Event: Delete')
self.xmpp.remove_handler('Pubsub Event: Configuration')
self.xmpp.remove_handler('Pubsub Event: Subscription')
def _handle_event_items(self, msg):
"""Raise events for publish and retraction notifications."""
node = msg['pubsub_event']['items']['node']
multi = len(msg['pubsub_event']['items']) > 1
values = {}
if multi:
values = msg.values
del values['pubsub_event']
for item in msg['pubsub_event']['items']:
event_name = self.node_event_map.get(node, None)
event_type = 'publish'
if item.name == 'retract':
event_type = 'retract'
if multi:
condensed = self.xmpp.Message()
condensed.values = values
condensed['pubsub_event']['items']['node'] = node
condensed['pubsub_event']['items'].append(item)
self.xmpp.event('pubsub_%s' % event_type, msg)
if event_name:
self.xmpp.event('%s_%s' % (event_name, event_type),
condensed)
else:
self.xmpp.event('pubsub_%s' % event_type, msg)
if event_name:
self.xmpp.event('%s_%s' % (event_name, event_type), msg)
def _handle_event_purge(self, msg):
"""Raise events for node purge notifications."""
node = msg['pubsub_event']['purge']['node']
event_name = self.node_event_map.get(node, None)
self.xmpp.event('pubsub_purge', msg)
if event_name:
self.xmpp.event('%s_purge' % event_name, msg)
def _handle_event_delete(self, msg):
"""Raise events for node deletion notifications."""
node = msg['pubsub_event']['delete']['node']
event_name = self.node_event_map.get(node, None)
self.xmpp.event('pubsub_delete', msg)
if event_name:
self.xmpp.event('%s_delete' % event_name, msg)
def _handle_event_configuration(self, msg):
"""Raise events for node configuration notifications."""
node = msg['pubsub_event']['configuration']['node']
event_name = self.node_event_map.get(node, None)
self.xmpp.event('pubsub_config', msg)
if event_name:
self.xmpp.event('%s_config' % event_name, msg)
def _handle_event_subscription(self, msg):
"""Raise events for node subscription notifications."""
node = msg['pubsub_event']['subscription']['node']
event_name = self.node_event_map.get(node, None)
self.xmpp.event('pubsub_subscription', msg)
if event_name:
self.xmpp.event('%s_subscription' % event_name, msg)
def map_node_event(self, node, event_name):
"""
Map node names to events.
When a pubsub event is received for the given node,
raise the provided event.
For example::
map_node_event('http://jabber.org/protocol/tune',
'user_tune')
will produce the events 'user_tune_publish' and 'user_tune_retract'
when the respective notifications are received from the node
'http://jabber.org/protocol/tune', among other events.
Arguments:
node -- The node name to map to an event.
event_name -- The name of the event to raise when a
notification from the given node is received.
"""
self.node_event_map[node] = event_name
def create_node(self, jid, node, config=None, ntype=None, ifrom=None,
block=True, callback=None, timeout=None):
"""
Create and configure a new pubsub node.
A server MAY use a different name for th | e node than the one provided,
| so be sure to check the result stanza for a server assigned name.
If no configuration form is provided, the node will be created using
the server's default configuration. To get the default configuration
use get_node_config().
Arguments:
jid -- The JID of the pubsub service.
node -- Optional name of the node to create. If no name is
provided, the server MAY generate a node ID for you.
The server can also assign a different name than the
one you provide; check the result stanza to see if
the server assigned a name.
config -- Optional XEP-0004 data form of configuration settings.
ntype -- The type of node to create. Servers typically default
to using 'leaf' if no type is provided.
ifrom -- Specify the sender's JID.
block -- Specify if the send call will block until a response
is received, or a timeout occurs. Defaults to True.
timeout -- The length of time (in seconds) to wait for a response
before exiting the send call if blocking is used.
Defaults to sleekxmpp.xmlstream.RESPONSE_TIMEOUT
callback -- Optional reference to a stream handler function. Will
be executed when a reply stanza is received.
"""
iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='set')
iq['pubsub']['create']['node'] = node
if config is not None:
form_type = 'http://jabber.org/protocol/pubsub#node_config'
if 'FORM_TYPE' in config['fields']:
config.field['FORM_TYPE']['value'] = form_type
else:
config.add_field(var='FORM_TYPE',
ftype='hidden',
value=form_type)
if ntype:
if 'pubsub#node_type' in config['fields']:
config.field['pubsub#node_type']['value'] = ntype
else:
config.add_field(var='pubsub#node_type', value=ntype)
iq['pubsub']['configure'].append(config)
return iq.send(block=block, callback=callback, timeout=timeout)
def subscribe(self, jid, node, bare=True, subscribee=None, option |
13steinj/praw | praw/models/reddit/comment.py | Python | bsd-2-clause | 11,117 | 0 | """Provide the Comment class."""
from ...exceptions import ClientException
from ..comment_forest import CommentForest
from .base import RedditBase
from .mixins import InboxableMixin, ThingModerationMixin, UserContentMixin
from .redditor import Redditor
class Comment(RedditBase, InboxableMixin, UserContentMixin):
"""A class that represents a reddit comments.
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``author`` Provides an instance of :class:`.Redditor`.
``body`` The body of the comment.
``created_utc`` Time the comment was created, represented in
`Unix Time`_.
``distinguished`` Whether or not the comment is distinguished.
``edited`` Whether or not the comment has been edited.
``id`` The ID of the comment.
``is_submitter`` Whether or not the comment author is also the
author of the submission.
``link_id`` The submission ID that the comment belongs to.
``parent_id`` The ID of the parent comment. If it is a top-level
comment, this returns the submission ID instead
(prefixed with 't3').
``permalink`` A permalink for the comment.
``replies`` Provides an instance of :class:`.CommentForest`.
``score`` The number of upvotes for the comment.
``stickied`` Whether or not the comment is stickied.
``submission`` Provides an instance of :class:`.Submission`. The
submission that the comment belongs to.
``subreddit`` Provides an instance of :class:`.Subreddit`. The
subreddit that the comment belongs to.
``subreddit_id`` The subreddit ID that the comment belongs to.
======================= ===================================================
.. _Unix Time: https://en.wikipedia.org/wiki/Unix_time
"""
MISSING_COMMENT_MESSAGE = ('This comment does not appear to be in the '
'comment tree')
STR_FIELD = 'id'
@staticmethod
def id_from_url(url):
"""Get the ID of a comment from the full URL."""
parts = RedditBase._url_parts(url)
try:
comment_index = parts.index('comments')
except ValueError:
raise ClientException('Invalid URL: {}'.format(url))
if len(parts) - 4 != comment_index:
raise ClientException('Invalid URL: {}'.format(url))
return parts[-1]
@property
def is_root(self):
"""Return True when the comment is a top level comment."""
parent_type = self.parent_id.split('_', 1)[0]
return parent_type == self._reddit.config.kinds['submission']
@property
def mod(self):
"""Provide an instance of :class:`.CommentModeration`."""
if self._mod is None:
self._mod = CommentModeration(self)
return self._mod
@property
def replies(self):
| """Provide an instance of :class:`.CommentForest`.
This property may return an empty list if the comment
has not been refreshed with :meth:`.refresh()`
Sort order and reply limit can b | e set with the ``reply_sort`` and
``reply_limit`` attributes before replies are fetched, including
any call to :meth:`.refresh`:
.. code:: python
comment.reply_sort = 'new'
comment.refresh()
replies = comment.replies
"""
if isinstance(self._replies, list):
self._replies = CommentForest(self.submission, self._replies)
return self._replies
@property
def submission(self):
"""Return the Submission object this comment belongs to."""
if not self._submission: # Comment not from submission
self._submission = self._reddit.submission(
self._extract_submission_id())
return self._submission
@submission.setter
def submission(self, submission):
"""Update the Submission associated with the Comment."""
submission._comments_by_id[self.name] = self
self._submission = submission
# pylint: disable=not-an-iterable
for reply in getattr(self, 'replies', []):
reply.submission = submission
def __init__(self, reddit, id=None, # pylint: disable=redefined-builtin
url=None, _data=None):
"""Construct an instance of the Comment object."""
if [id, url, _data].count(None) != 2:
raise TypeError('Exactly one of `id`, `url`, or `_data` must be '
'provided.')
self._mod = self._replies = self._submission = None
super(Comment, self).__init__(reddit, _data)
if id:
self.id = id # pylint: disable=invalid-name
elif url:
self.id = self.id_from_url(url)
else:
self._fetched = True
def __setattr__(self, attribute, value):
"""Objectify author, replies, and subreddit."""
if attribute == 'author':
value = Redditor.from_data(self._reddit, value)
elif attribute == 'replies':
if value == '':
value = []
else:
value = self._reddit._objector.objectify(value).children
attribute = '_replies'
elif attribute == 'subreddit':
value = self._reddit.subreddit(value)
super(Comment, self).__setattr__(attribute, value)
def _extract_submission_id(self):
if 'context' in self.__dict__:
return self.context.rsplit('/', 4)[1]
return self.link_id.split('_', 1)[1]
def parent(self):
"""Return the parent of the comment.
The returned parent will be an instance of either
:class:`.Comment`, or :class:`.Submission`.
If this comment was obtained through a :class:`.Submission`, then its
entire ancestry should be immediately available, requiring no extra
network requests. However, if this comment was obtained through other
means, e.g., ``reddit.comment('COMMENT_ID')``, or
``reddit.inbox.comment_replies``, then the returned parent may be a
lazy instance of either :class:`.Comment`, or :class:`.Submission`.
Lazy Comment Example:
.. code:: python
comment = reddit.comment('cklhv0f')
parent = comment.parent()
# `replies` is empty until the comment is refreshed
print(parent.replies) # Output: []
parent.refresh()
print(parent.replies) # Output is at least: [Comment(id='cklhv0f')]
.. warning:: Successive calls to :meth:`.parent()` may result in a
network request per call when the comment is not obtained through a
:class:`.Submission`. See below for an example of how to minimize
requests.
If you have a deeply nested comment and wish to most efficiently
discover its top-most :class:`.Comment` ancestor you can chain
successive calls to :meth:`.parent()` with calls to :meth:`.refresh()`
at every 9 levels. For example:
.. code:: python
comment = reddit.comment('dkk4qjd')
ancestor = comment
refresh_counter = 0
while not ancestor.is_root:
ancestor = ancestor.parent()
if refresh_counter % 9 == 0:
ancestor.refresh()
refresh_counter += 1
print('Top-most Ancestor: {}'.format(ancestor))
Th |
jmatthed/avatar-python | avatar/util/processes.py | Python | apache-2.0 | 796 | 0.007538 | '''
Created on Jun 26, 2013
@author: Jonas Zaddach <zaddach@eurecom.fr>
'''
import subprocess
def get_process_list():
processes = []
ps_output = subprocess.check_output(["ps", "-A", "-w", "-w", "-o", "pid", "-o", "command"])
ps_output = ps_output.decode('latin-1')
for line in ps_output.split("\n")[1:]:
line = line.strip()
if not line:
continue
pid = line[:line.find(" ")]
cmd = line[line.find(" ") + 1:]
processe | s.append({"pid": int(pid), "cmd | ": cmd})
return processes
def find_processes(name):
process_list = get_process_list()
found_processes = []
for process in process_list:
if process["cmd"].startswith(name):
found_processes.append(process)
return found_processes |
hsgweon/sgtoolkit | sgtoolkit/sgtk_getsamplelistfromfasta.py | Python | gpl-2.0 | 1,239 | 0.021792 | #!/usr/bin/python
############################################################
# Argument Options
import argparse
parser = argparse.ArgumentParser("Creates a sample list f | ile from a fasta sequences.")
parser.add_argument("-i",
action = "st | ore",
dest = "infile",
metavar = "infile",
help = "[REQUIRED]",
required = True)
parser.add_argument("-o",
action = "store",
dest = "outfile",
metavar = "outfile",
help = "[REQUIRED]",
required = True)
options = parser.parse_args()
############################################################
infile = open(options.infile, "r")
outfile = open(options.outfile, "w")
uniquesampleids = []
with open(options.infile, "r") as f:
for l in f:
if l.startswith(">"):
sampleid = l[1:].split("_")[0]
if not sampleid in uniquesampleids:
uniquesampleids.append(sampleid)
import natsort
uniquesampleids = natsort.natsorted(uniquesampleids)
uniquesampleids = uniquesampleids[::-1]
for i in uniquesampleids:
outfile.write(i + "\n")
outfile.close()
|
alikzao/tao1 | tao1/core/utils.py | Python | mit | 121 | 0.008264 | #!/u | sr/bin/env python
from tao1.core.utils_ import manage_console
if __name__ == "__main__":
| manage_console()
|
shafaypro/PYSHA | _StackoverFlow.py | Python | gpl-3.0 | 606 | 0.013201 | import webbrowser
# ToDO: You can improve the Search using the OAUTH and other apis of the stack over flow
class Stack | overFlow:
def __init__ (self):
pass
def search (self, search_text=''):
search_text = search_text.strip()
search_text = search_text.replace(' ', '+') # This replaces the spaces with the + sign
search_url = "http://stackoverflow.com/search?q=" + sea | rch_text
webbrowser.open(search_url)
return search_url # returns the search url for the reading purposes
#need to work with the api instead of the static referring |
166MMX/openjdk.java.net-openjfx-8u40-rt | modules/web/src/main/native/Tools/Scripts/webkitpy/common/checkout/scm/detection.py | Python | gpl-2.0 | 3,835 | 0.002868 | # Copyright (c) 2009, 2010, 2011 Google Inc. All rights reserved.
# Copyright (c) 2009 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from webkitpy.common.system.filesystem import FileSystem
from webkitpy.common.system.executive import Executive
from .svn import SVN
from .git import Git
_log = logging.getLogger(__name__)
class SCMDetector(object):
def __init__(self, | filesyst | em, executive):
self._filesystem = filesystem
self._executive = executive
def default_scm(self, patch_directories=None):
"""Return the default SCM object as determined by the CWD and running code.
Returns the default SCM object for the current working directory; if the
CWD is not in a checkout, then we attempt to figure out if the SCM module
itself is part of a checkout, and return that one. If neither is part of
a checkout, None is returned.
"""
cwd = self._filesystem.getcwd()
scm_system = self.detect_scm_system(cwd, patch_directories)
if not scm_system:
script_directory = self._filesystem.dirname(self._filesystem.path_to_module(self.__module__))
scm_system = self.detect_scm_system(script_directory, patch_directories)
if scm_system:
_log.info("The current directory (%s) is not a WebKit checkout, using %s" % (cwd, scm_system.checkout_root))
else:
raise Exception("FATAL: Failed to determine the SCM system for either %s or %s" % (cwd, script_directory))
return scm_system
def detect_scm_system(self, path, patch_directories=None):
absolute_path = self._filesystem.abspath(path)
if patch_directories == []:
patch_directories = None
if SVN.in_working_directory(absolute_path, executive=self._executive):
return SVN(cwd=absolute_path, patch_directories=patch_directories, filesystem=self._filesystem, executive=self._executive)
if Git.in_working_directory(absolute_path, executive=self._executive):
return Git(cwd=absolute_path, filesystem=self._filesystem, executive=self._executive)
return None
# FIXME: These free functions are all deprecated:
def detect_scm_system(path, patch_directories=None):
return SCMDetector(FileSystem(), Executive()).detect_scm_system(path, patch_directories)
|
Hernanarce/pelisalacarta | python/main-classic/servers/youtube.py | Python | gpl-3.0 | 8,299 | 0.006145 | # s-*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para Youtube
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import re
import urllib
import urlparse
from core import config
from core import httptools
from core import logger
from core import scrapertools
from core import jsontools as json
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("(page_url='%s')" % page_url)
if not page_url.startswith("http"):
page_url = "http://www.youtube.com/watch?v=%s" % page_url
logger.info(" page_url->'%s'" % page_url)
video_id = scrapertools.find_single_match(page_url, 'v=([A-z0-9_-]{11})')
video_urls = extract_videos(video_id)
video_urls.reverse()
for video_url in video_urls:
logger.info(str(video_url))
return video_urls
def remove_additional_ending_delimiter(data):
pos = data.find("};")
if pos != -1:
data = data[:pos + 1]
return data
def normalize_url(url):
if url[0:2] == "//":
url = "http:" + url
return url
def extract_flashvars(data):
assets = 0
flashvars = {}
found = False
for line in data.split("\n"):
if line.strip().find(";ytplayer.config = ") > 0:
found = True
p1 = line.find(";ytplayer.config = ") + len(";ytplayer.config = ") - 1
p2 = line.rfind(";")
if p1 <= 0 or p2 <= 0:
continue
data = line[p1 + 1:p2]
break
data = remove_additional_ending_delimiter(data)
if found:
data = json.load_json(data)
if assets:
flashvars = data["assets"]
else:
flashvars = data["args"]
for k in ["html", "css", "js"]:
if k in flashvars:
flashvars[k] = normalize_url(flashvars[k])
return flashvars
def extract_videos(video_id):
fmt_value = {
5: "240p h263 flv",
6: "240p h263 flv",
18: "360p h264 mp4",
22: "720p h264 mp4",
26: "???",
33: "???",
34: "360p h264 flv",
35: "480p h264 flv",
36: "3gpp",
37: "1080p h264 mp4",
38: "4K h264 mp4",
43: "360p vp8 webm",
44: "480p vp8 webm",
45: "720p vp8 webm",
46: "1080p vp8 webm",
59: "480p h264 mp4",
78: "480p h264 mp4",
82: "360p h264 3D",
83: "480p h264 3D",
84: "720p h264 3D",
85: "1080p h264 3D",
100: "360p vp8 3D",
101: "480p vp8 3D",
102: "720p vp8 3D"
}
url = 'http://www.youtube.com/get_video_info?video_id=%s&eurl=https://youtube.googleapis.com/v/%s&ssl_stream=1' % \
(video_id, video_id)
data = httptools.downloadpage(url).data
video_urls = []
params = dict(urlparse.parse_qsl(data))
if params.get('hlsvp'):
video_urls.append(["(LIVE .m3u8) [youtube]", params['hlsvp']])
return video_urls
if config.is_xbmc():
import xbmc
xbmc_version = config.get_platform(True)['num_version']
if xbmc_version >= 17 and xbmc.getCondVisibility('System.HasAddon(inputstream.adaptive)') \
and params.get('dashmpd'):
if params.get('use_cipher_signature', '') != 'True':
video_urls.append(['mpd HD [youtube]', params['dashmpd'], 0, '', True])
js_signature = ""
youtube_page_data = httptools.downloadpage("http://www.youtube.com/watch?v=%s" % video_id).data
params = extract_flashvars(youtube_page_data)
if params.get('url_encoded_fmt_stream_map'):
data_flashvars = params["url_encoded_fmt_stream_map"].split(",")
for url_desc in data_flashvars:
url_desc_map = dict(urlparse.parse_qsl(url_desc))
if not url_desc_map.get("url") and not url_desc_map.get("stream"):
continue
try:
key = int(url_desc_map["itag"])
if not fmt_value.get(key):
continue
if url_desc_map.get("url"):
url = urllib.unquote(url_desc_map["url"])
elif url_desc_map.get("conn") and url_desc_map.get("stream"):
url = urllib.unquote(url_desc_map["conn"])
if url.rfind("/") < len(url) - | 1:
url += "/"
url += urllib.unquote(url_desc_map["stream"])
elif url_desc_map.get("stream") and not url_desc_map.get("conn"):
url = urllib.unquote(url_desc_map["stream"])
if url_de | sc_map.get("sig"):
url += "&signature=" + url_desc_map["sig"]
elif url_desc_map.get("s"):
sig = url_desc_map["s"]
if not js_signature:
urljs = scrapertools.find_single_match(youtube_page_data, '"assets":.*?"js":\s*"([^"]+)"')
urljs = urljs.replace("\\", "")
if urljs:
if not re.search(r'https?://', urljs):
urljs = urlparse.urljoin("https://www.youtube.com", urljs)
data_js = httptools.downloadpage(urljs).data
from jsinterpreter import JSInterpreter
funcname = scrapertools.find_single_match(data_js, '\.sig\|\|([A-z0-9$]+)\(')
if not funcname:
funcname = scrapertools.find_single_match(data_js, '["\']signature["\']\s*,\s*'
'([A-z0-9$]+)\(')
jsi = JSInterpreter(data_js)
js_signature = jsi.extract_function(funcname)
signature = js_signature([sig])
url += "&signature=" + signature
url = url.replace(",", "%2C")
video_urls.append(["("+fmt_value[key]+") [youtube]", url])
except:
import traceback
logger.info(traceback.format_exc())
return video_urls
def find_videos(data):
encontrados = set()
devuelve = []
patronvideos = 'youtube(?:-nocookie)?\.com/(?:(?:(?:v/|embed/))|(?:(?:watch(?:_popup)?(?:\.php)?)?(?:\?|#!?)(?:.+&)?v=))?([0-9A-Za-z_-]{11})'#'"http://www.youtube.com/v/([^"]+)"'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url!='':
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube'])
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
patronvideos = 'www.youtube.*?v(?:=|%3D)([0-9A-Za-z_-]{11})'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube'])
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#http://www.youtube.com/v/AcbsMOMg2fQ
patronvideos = 'youtube.com/v/([0-9A-Za-z_-]{11})'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube' |
dunphyben/btce-api | test/test_public.py | Python | mit | 2,447 | 0.000409 | import decimal
import unittest
from btceapi.public import *
class TestPublic(unittest.TestCase):
def test_constructTrade(self):
d = {"pair": "btc_usd",
"trade_type": "bid",
"price": decimal.Decimal("1.234"),
"tid": 1,
"amount": decimal.Decimal("3.2"),
"date": 1368805684.878004}
t = Trade(**d)
self.assertEqual(t.pair, d.get("pair"))
self.assertEqual(t.trade_type, d.get("trade_type"))
self.assertEqual(t.price, d.get("price"))
self.assertEqual(t.tid, d.get("tid"))
self.assertEqual(t.amount, d.get("amount"))
assert type(t.date) is datetime.datetime
test_date = datetime.datetime.fromtimestamp(1368805684.878004)
self.assertEqual(t.date, test_date)
# check conversion | of decimal dates
d["date"] = decimal.Decimal("1368805684.878004")
t = Trade(**d)
assert type(t.date) is datetime.datetime
self.assertEqual(t.date, test_date)
# check conversion of integer dates
d["date"] = 1368805684
test_date = datetime.datetime.fromtimestamp(1368805684)
t = Trade(**d)
assert type(t.date) is datetime.datetime
self.assertEqual(t.date, | test_date)
# check conversion of string dates with no fractional seconds
d["date"] = "2013-05-17 08:48:04"
t = Trade(**d)
assert type(t.date) is datetime.datetime
self.assertEqual(t.date, datetime.datetime(2013, 5, 17, 8, 48, 4, 0))
# check conversion of string dates with fractional seconds
d["date"] = "2013-05-17 08:48:04.878004"
t = Trade(**d)
assert type(t.date) is datetime.datetime
self.assertEqual(t.date,
datetime.datetime(2013, 5, 17, 8, 48, 4, 878004))
# check conversion of unicode dates with no fractional seconds
d["date"] = u"2013-05-17 08:48:04"
t = Trade(**d)
assert type(t.date) is datetime.datetime
self.assertEqual(t.date, datetime.datetime(2013, 5, 17, 8, 48, 4, 0))
# check conversion of string dates with fractional seconds
d["date"] = u"2013-05-17 08:48:04.878004"
t = Trade(**d)
assert type(t.date) is datetime.datetime
self.assertEqual(t.date,
datetime.datetime(2013, 5, 17, 8, 48, 4, 878004))
if __name__ == '__main__':
unittest.main()
|
salv-orlando/MyRepo | nova/tests/scheduler/test_distributed_scheduler.py | Python | apache-2.0 | 10,588 | 0.000472 | # Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Distributed Scheduler.
"""
import json
import nova.db
from nova import context
from nova import exception
from nova import rpc
from nova import test
from nova.compute import api as compute_api
from nova.scheduler import distributed_scheduler
from nova.scheduler import least_cost
from nova.scheduler import zone_manager
from nova.tests.scheduler import fake_zone_manager as ds_fakes
class FakeEmptyZoneManager(zone_manager.ZoneManager):
def __init__(self):
self.service_states = {}
def get_host_list_from_db(self, context):
return []
def _compute_node_get_all(*args, **kwargs):
return []
def _instance_get_all(*args, **kwargs):
return []
def fake_call_zone_method(context, method, specs, zones):
return [
(1, [
dict(weight=2, blob='AAAAAAA'),
dict(weight=4, blob='BBBBBBB'),
| dict(weight=6, blob='CCCCCCC'),
dict(weight=8, blob='DDDDDDD'),
]),
(2, [
dict(weight=10, blob='EEEEEEE'),
dict(weight=12, blob='FFFFFFF'),
dict(weight=14, blob='GGGGGGG'),
dict(weight=16, blob='HHHHHHH'),
]),
(3, [
dict(weight=18, blob='IIIIIII'),
dict(weight=2 | 0, blob='JJJJJJJ'),
dict(weight=22, blob='KKKKKKK'),
dict(weight=24, blob='LLLLLLL'),
]),
]
def fake_zone_get_all(context):
return [
dict(id=1, api_url='zone1',
username='admin', password='password',
weight_offset=0.0, weight_scale=1.0),
dict(id=2, api_url='zone2',
username='admin', password='password',
weight_offset=1000.0, weight_scale=1.0),
dict(id=3, api_url='zone3',
username='admin', password='password',
weight_offset=0.0, weight_scale=1000.0),
]
class DistributedSchedulerTestCase(test.TestCase):
"""Test case for Distributed Scheduler."""
def test_adjust_child_weights(self):
"""Make sure the weights returned by child zones are
properly adjusted based on the scale/offset in the zone
db entries.
"""
sched = ds_fakes.FakeDistributedScheduler()
child_results = fake_call_zone_method(None, None, None, None)
zones = fake_zone_get_all(None)
weighted_hosts = sched._adjust_child_weights(child_results, zones)
scaled = [130000, 131000, 132000, 3000]
for weighted_host in weighted_hosts:
w = weighted_host.weight
if weighted_host.zone == 'zone1': # No change
self.assertTrue(w < 1000.0)
if weighted_host.zone == 'zone2': # Offset +1000
self.assertTrue(w >= 1000.0 and w < 2000)
if weighted_host.zone == 'zone3': # Scale x1000
self.assertEqual(scaled.pop(0), w)
def test_run_instance_no_hosts(self):
"""
Ensure empty hosts & child_zones result in NoValidHosts exception.
"""
def _fake_empty_call_zone_method(*args, **kwargs):
return []
sched = ds_fakes.FakeDistributedScheduler()
sched.zone_manager = FakeEmptyZoneManager()
self.stubs.Set(sched, '_call_zone_method',
_fake_empty_call_zone_method)
self.stubs.Set(nova.db, 'zone_get_all', fake_zone_get_all)
fake_context = context.RequestContext('user', 'project')
request_spec = dict(instance_type=dict(memory_mb=1, local_gb=1))
self.assertRaises(exception.NoValidHost, sched.schedule_run_instance,
fake_context, request_spec)
def test_run_instance_with_blob_hint(self):
"""
Check the local/child zone routing in the run_instance() call.
If the zone_blob hint was passed in, don't re-schedule.
"""
self.schedule_called = False
self.from_blob_called = False
self.locally_called = False
self.child_zone_called = False
def _fake_schedule(*args, **kwargs):
self.schedule_called = True
return least_cost.WeightedHost(1, host='x')
def _fake_make_weighted_host_from_blob(*args, **kwargs):
self.from_blob_called = True
return least_cost.WeightedHost(1, zone='x', blob='y')
def _fake_provision_resource_locally(*args, **kwargs):
self.locally_called = True
return 1
def _fake_ask_child_zone_to_create_instance(*args, **kwargs):
self.child_zone_called = True
return 2
sched = ds_fakes.FakeDistributedScheduler()
self.stubs.Set(sched, '_schedule', _fake_schedule)
self.stubs.Set(sched, '_make_weighted_host_from_blob',
_fake_make_weighted_host_from_blob)
self.stubs.Set(sched, '_provision_resource_locally',
_fake_provision_resource_locally)
self.stubs.Set(sched, '_ask_child_zone_to_create_instance',
_fake_ask_child_zone_to_create_instance)
request_spec = {
'instance_properties': {},
'instance_type': {},
'filter_driver': 'nova.scheduler.host_filter.AllHostsFilter',
'blob': "Non-None blob data",
}
fake_context = context.RequestContext('user', 'project')
instances = sched.schedule_run_instance(fake_context, request_spec)
self.assertTrue(instances)
self.assertFalse(self.schedule_called)
self.assertTrue(self.from_blob_called)
self.assertTrue(self.child_zone_called)
self.assertFalse(self.locally_called)
self.assertEquals(instances, [2])
def test_run_instance_non_admin(self):
"""Test creating an instance locally using run_instance, passing
a non-admin context. DB actions should work."""
self.was_admin = False
def fake_schedule(context, *args, **kwargs):
# make sure this is called with admin context, even though
# we're using user context below
self.was_admin = context.is_admin
return []
sched = ds_fakes.FakeDistributedScheduler()
self.stubs.Set(sched, '_schedule', fake_schedule)
fake_context = context.RequestContext('user', 'project')
self.assertRaises(exception.NoValidHost, sched.schedule_run_instance,
fake_context, {})
self.assertTrue(self.was_admin)
def test_schedule_bad_topic(self):
"""Parameter checking."""
sched = ds_fakes.FakeDistributedScheduler()
self.assertRaises(NotImplementedError, sched._schedule, None, "foo",
{})
def test_schedule_no_instance_type(self):
"""Parameter checking."""
sched = ds_fakes.FakeDistributedScheduler()
self.assertRaises(NotImplementedError, sched._schedule, None,
"compute", {})
def test_schedule_happy_day(self):
"""_schedule() has no branching logic beyond basic input parameter
checking. Just make sure there's nothing glaringly wrong by doing
a happy day pass through."""
self.next_weight = 1.0
def _fake_filter_hosts(topic, request_info, unfiltered_hosts,
options):
return unfiltered_hosts
def _fake_weighted_sum(functions, hosts, options):
self.next_weight += 2.0
host, hostinfo = hosts[0]
|
kiwifb/numpy | numpy/f2py/crackfortran.py | Python | bsd-3-clause | 126,846 | 0.001908 | #!/usr/bin/env python
"""
crackfortran --- read fortran (77,90) code and extract declaration information.
Copyright 1999-2004 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy License.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Date: 2005/09/27 07:13:49 $
Pearu Peterson
Usage of crackfortran:
======================
Command line keys: -quiet,-verbose,-fix,-f77,-f90,-show,-h <pyffilename>
-m <module name for f77 routines>,--ignore-contains
Functions: crackfortran, crack2fortran
The following Fortran statements/constructions are supported
(or will be if needed):
block data,byte,call,character,common,complex,contains,data,
dimension,double complex,double precision,end,external,function,
implicit,integer,intent,interface,intrinsic,
logical,module,optional,parameter,private,public,
program,real,(sequence?),subroutine,type,use,virtual,
include,pythonmodule
Note: 'virtual' is mapped to 'dimension'.
Note: 'implicit integer (z) static (z)' is 'implicit static (z)' (this is minor bug).
Note: code after 'contains' will be ignored until its scope ends.
Note: 'common' statement is extended: dimensions are moved to variable definitions
Note: f2py directive: <commentchar>f2py<line> is read as <line>
Note: pythonmodule is introduced to represent Python module
Usage:
`postlist=crackfortran(files,funcs)`
`postlist` contains declaration information read from the list of files `files`.
`crack2fortran(postlist)` returns a fortran code to be saved to pyf-file
`postlist` has the following structure:
*** it is a list of dictionaries containing `blocks':
B = {'block','body','vars','parent_block'[,'name','prefix','args','result',
'implicit','externals','interfaced','common','sortvars',
'commonvars','note']}
B['block'] = 'interface' | 'function' | 'subroutine' | 'module' |
'program' | 'block data' | 'type' | 'pythonmodule'
B['body'] --- list containing `subblocks' with the same structure as `blocks'
B['parent_block'] --- dictionary of a parent block:
C['body'][<index>]['parent_block'] is C
B['vars'] --- dictionary of variable definitions
B['sortvars'] --- dictionary of variable definitions sorted by dependence (independent first)
B['name'] --- name of the block (not if B['block']=='interface')
B['prefix'] --- prefix string (only if B['block']=='function')
B['args'] --- list of argument names if B['block']== 'function' | 'subroutine'
B['result'] --- name of the return value (only if B['block']=='function')
B['implicit'] --- dictionary {'a':<variable definition>,'b':...} | None
B['externals'] --- list of variables being external
B['interfaced'] --- list of variables being external and defined
B['common'] --- dictionary of common blocks (list of objects)
B['commonvars'] --- list of variables used in common blocks (dimensions are moved to variable definitions)
B['from'] --- string showing the 'parents' of the current block
B['use'] --- dictionary of modules used in current block:
{<modulename>:{['only':<0|1>],['map':{<local_name1>:<use_name1>,...}]}}
B['note'] --- list of LaTeX comments on the block
B['f2pyenhancements'] --- optional dictionary
{'threadsafe':'','fortranname':<name>,
'callstatement':<C-expr>|<multi-line block>,
'callprotoargument':<C-expr-list>,
'usercode':<multi-line block>|<list of multi-line blocks>,
'pymethoddef:<multi-line block>'
}
B['entry'] --- dictionary {entryname:argslist,..}
B['varnames'] --- list of variable names given in the order of reading the
Fortran code, useful for derived types.
B['saved_interface'] --- a string of scanned routine signature, defines explicit interface
*** Variable definition is a dictionary
D = B['vars'][<variable name>] =
{'typespec'[,'attrspec','kindselector','charselector','=','typename']}
D['typespec'] = 'byte' | 'character' | 'complex' | 'double complex' |
'double precision' | 'integer' | 'logical' | 'real' | 'type'
D['attrspec'] --- list of attributes (e.g. 'dimension(<arrayspec>)',
'external','intent(in|out|inout|hide|c|callback|cache|aligned4|aligned8|aligned16)',
'optional','required', etc)
K = D['kindselector'] = {['*','kind']} (only if D['typespec'] =
'complex' | 'integer' | 'logical' | 'real' )
C = D['charselector'] = {['*','len','kind']}
(only if D['typespec']=='character')
D['='] --- initialization expression string
D['typename'] --- name of the type if D['typespec']=='type'
D['dimension'] --- list of dimension bounds
D['intent'] --- list of intent specifications
D['depend'] --- list of variable names on which current variable depends on
D['check'] --- list of C-expressions; if C-expr returns zero, exception is raised
D['note'] --- list of LaTeX comments on the variable
*** Meaning of kind/char selectors (few examples):
D['typespec>']*K['*']
D['typespec'](kind=K['kind'])
character*C['*']
character(len=C['len'],kind=C['kind'])
(see also fortran type declaration statement formats below)
Fortran 90 type declaration statement format (F77 is subset of F90) |
====================================================================
(Main source: IBM XL Fortran 5.1 Language Reference Manual)
type declaration = <typespec> [[<attrspec>]::] <entitydecl>
<typespec> = byte |
character[<charselector>] |
complex[<kindselector>] | |
double complex |
double precision |
integer[<kindselector>] |
logical[<kindselector>] |
real[<kindselector>] |
type(<typename>)
<charselector> = * <charlen> |
([len=]<len>[,[kind=]<kind>]) |
(kind=<kind>[,len=<len>])
<kindselector> = * <intlen> |
([kind=]<kind>)
<attrspec> = comma separated list of attributes.
Only the following attributes are used in
building up the interface:
external
(parameter --- affects '=' key)
optional
intent
Other attributes are ignored.
<intentspec> = in | out | inout
<arrayspec> = comma separated list of dimension bounds.
<entitydecl> = <name> [[*<charlen>][(<arrayspec>)] | [(<arrayspec>)]*<charlen>]
[/<init_expr>/ | =<init_expr>] [,<entitydecl>]
In addition, the following attributes are used: check,depend,note
TODO:
* Apply 'parameter' attribute (e.g. 'integer parameter :: i=2' 'real x(i)'
-> 'real x(2)')
The above may be solved by creating appropriate preprocessor program, for example.
"""
from __future__ import division, absolute_import, print_function
import sys
import string
import fileinput
import re
import os
import copy
import platform
from . import __version__
# The eviroment provided by auxfuncs.py is needed for some calls to eval.
# As the needed functions cannot be determined by static inspection of the
# code, it is safest to use import * pending a major refactoring of f2py.
from .auxfuncs import *
f2py_version = __version__.version
# Global flags:
strictf77 = 1 # Ignore `!' comments unless line[0]=='!'
sourcecodeform = 'fix' # 'fix','free'
quiet = 0 # Be verbose if 0 (Obsolete: not used any more)
verbose = 1 # Be quiet if 0, extra verbose if > 1.
tabchar = 4 * ' '
pyffilename = ''
f77modulename = ''
skipemptyends = 0 # for old F77 programs without 'program' statement
ignorecontains = 1
dolowercase = 1
debug = []
# Global variables
beginpattern = ''
currentfilename = ''
expectbegin = 1
f90modulevars = {}
filepositiontext = '' |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.