repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
skipmodea1/plugin.video.xbmctorrent
|
resources/site-packages/xbmcswift2/listitem.py
|
1
|
7446
|
'''
xbmcswift2.listitem
------------------
This module contains the ListItem class, which acts as a wrapper
for xbmcgui.ListItem.
:copyright: (c) 2012 by Jonathan Beluch
:license: GPLv3, see LICENSE for more details.
'''
from xbmcswift2 import xbmcgui
class ListItem(object):
'''A wrapper for the xbmcgui.ListItem class. The class keeps track
of any set properties that xbmcgui doesn't expose getters for.
'''
def __init__(self, label=None, label2=None, icon=None, thumbnail=None,
path=None):
'''Defaults are an emtpy string since xbmcgui.ListItem will not
accept None.
'''
kwargs = {
'label': label,
'label2': label2,
'iconImage': icon,
'thumbnailImage': thumbnail,
'path': path,
}
#kwargs = dict((key, val) for key, val in locals().items() if val is
#not None and key != 'self')
kwargs = dict((key, val) for key, val in kwargs.items()
if val is not None)
self._listitem = xbmcgui.ListItem(**kwargs)
# xbmc doesn't make getters available for these properties so we'll
# keep track on our own
self._icon = icon
self._path = path
self._thumbnail = thumbnail
self._context_menu_items = []
self.is_folder = True
self._played = False
def __repr__(self):
return ("<ListItem '%s'>" % self.label).encode('utf-8')
def __str__(self):
return ('%s (%s)' % (self.label, self.path)).encode('utf-8')
def get_context_menu_items(self):
'''Returns the list of currently set context_menu items.'''
return self._context_menu_items
def add_context_menu_items(self, items, replace_items=False):
'''Adds context menu items. If replace_items is True all
previous context menu items will be removed.
'''
for label, action in items:
assert isinstance(label, basestring)
assert isinstance(action, basestring)
if replace_items:
self._context_menu_items = []
self._context_menu_items.extend(items)
self._listitem.addContextMenuItems(items, replace_items)
# dirty hack
self._listitem.setInfo("mediatype", "video")
def get_label(self):
'''Sets the listitem's label'''
return self._listitem.getLabel()
def set_label(self, label):
'''Returns the listitem's label'''
return self._listitem.setLabel(label)
label = property(get_label, set_label)
def get_label2(self):
'''Returns the listitem's label2'''
return self._listitem.getLabel2()
def set_label2(self, label):
'''Sets the listitem's label2'''
return self._listitem.setLabel2(label)
label2 = property(get_label2, set_label2)
def is_selected(self):
'''Returns True if the listitem is selected.'''
return self._listitem.isSelected()
def select(self, selected_status=True):
'''Sets the listitems selected status to the provided value.
Defaults to True.
'''
return self._listitem.select(selected_status)
selected = property(is_selected, select)
def set_info(self, type, info_labels):
'''Sets the listitems info'''
return self._listitem.setInfo(type, info_labels)
def get_property(self, key):
'''Returns the property associated with the given key'''
return self._listitem.getProperty(key)
def set_property(self, key, value):
'''Sets a property for the given key and value'''
return self._listitem.setProperty(key, value)
def add_stream_info(self, stream_type, stream_values):
'''Adds stream details'''
return self._listitem.addStreamInfo(stream_type, stream_values)
def get_icon(self):
'''Returns the listitem's icon image'''
return self._icon
def set_icon(self, icon):
'''Sets the listitem's icon image'''
self._icon = icon
return self._listitem.setIconImage(icon)
icon = property(get_icon, set_icon)
def get_thumbnail(self):
'''Returns the listitem's thumbnail image'''
return self._thumbnail
def set_thumbnail(self, thumbnail):
'''Sets the listitem's thumbnail image'''
self._thumbnail = thumbnail
return self._listitem.setThumbnailImage(thumbnail)
thumbnail = property(get_thumbnail, set_thumbnail)
def get_path(self):
'''Returns the listitem's path'''
return self._path
def set_path(self, path):
'''Sets the listitem's path'''
self._path = path
return self._listitem.setPath(path)
path = property(get_path, set_path)
def get_is_playable(self):
'''Returns True if the listitem is playable, False if it is a
directory
'''
return not self.is_folder
def set_is_playable(self, is_playable):
'''Sets the listitem's playable flag'''
value = 'false'
if is_playable:
value = 'true'
self.set_property('isPlayable', value)
self.is_folder = not is_playable
playable = property(get_is_playable, set_is_playable)
def set_played(self, was_played):
'''Sets the played status of the listitem. Used to
differentiate between a resolved video versus a playable item.
Has no effect on XBMC, it is strictly used for xbmcswift2.
'''
self._played = was_played
def get_played(self):
'''Returns True if the video was played.'''
return self._played
def as_tuple(self):
'''Returns a tuple of list item properties:
(path, the wrapped xbmcgui.ListItem, is_folder)
'''
return self.path, self._listitem, self.is_folder
def as_xbmc_listitem(self):
'''Returns the wrapped xbmcgui.ListItem'''
return self._listitem
@classmethod
def from_dict(cls, label=None, label2=None, icon=None, thumbnail=None,
path=None, selected=None, info=None, properties=None,
context_menu=None, replace_context_menu=False,
is_playable=None, info_type='video', stream_info=None):
'''A ListItem constructor for setting a lot of properties not
available in the regular __init__ method. Useful to collect all
the properties in a dict and then use the **dct to call this
method.
'''
listitem = cls(label, label2, icon, thumbnail, path)
if selected is not None:
listitem.select(selected)
if info:
listitem.set_info(info_type, info)
if is_playable:
listitem.set_is_playable(True)
listitem.set_info("video", {"mediatype": "video"})
if properties:
# Need to support existing tuples, but prefer to have a dict for
# properties.
if hasattr(properties, 'items'):
properties = properties.items()
for key, val in properties:
listitem.set_property(key, val)
if stream_info:
for stream_type, stream_values in stream_info.items():
listitem.add_stream_info(stream_type, stream_values)
if context_menu:
listitem.add_context_menu_items(context_menu, replace_context_menu)
return listitem
|
gpl-3.0
| -4,435,313,053,114,563,600
| 32.241071
| 79
| 0.60274
| false
| 4.088962
| false
| false
| false
|
markkorput/py2030
|
tests/test_omxvideo.py
|
1
|
7634
|
#!/usr/bin/env python
import unittest
from py2030.components.omxvideo import OmxVideo
from py2030.event_manager import EventManager
class TestOmxVideo(unittest.TestCase):
def test_init(self):
omxvideo = OmxVideo()
self.assertEqual(omxvideo.player, None)
self.assertIsNone(omxvideo.event_manager)
def test_args_option(self):
# default; black background (to hide console) and disable OSD
self.assertEqual(OmxVideo().args, ['--no-osd', '-b'])
# customizable through 'args' option
args = ['--no-osd', '-adev', 'both', '-b', '--loop']
omxvideo = OmxVideo({'args': args})
self.assertEqual(omxvideo.args, args)
def test_setup(self):
omxvideo = OmxVideo()
em = EventManager()
omxvideo.setup(em)
self.assertEqual(omxvideo.event_manager, em)
def test_setup_doesnt_require_event_manager(self):
omxvideo = OmxVideo()
omxvideo.setup()
self.assertIsNone(omxvideo.event_manager)
def test_input_event_play(self):
omxvideo = OmxVideo({'input_events': {'play_event': 'play'}})
em = EventManager()
self.assertEqual(len(em.get('play_event')), 0) # no yet registered
omxvideo.setup(em)
self.assertEqual(len(em.get('play_event')), 1) # registered
self.assertEqual(omxvideo.playEvent._fireCount, 0) # not fire
omxvideo.event_manager.fire('play_event')
self.assertEqual(omxvideo.playEvent._fireCount, 1) # fired
omxvideo.destroy()
self.assertEqual(len(em.get('play_event')), 0) # unregistered
def test_input_event_pause(self):
omxvideo = OmxVideo({'input_events': {'pause_event': 'pause'}})
em = EventManager()
self.assertEqual(len(em.get('pause_event')), 0)
omxvideo.setup(em)
self.assertEqual(len(em.get('pause_event')), 1) # registered
self.assertEqual(omxvideo.pauseEvent._fireCount, 0)
omxvideo.event_manager.fire('pause_event')
self.assertEqual(omxvideo.pauseEvent._fireCount, 1)
omxvideo.destroy()
self.assertEqual(len(em.get('pause_event')), 0) # unregistered
def test_input_event_toggle(self):
omxvideo = OmxVideo({'input_events': {'toggle_event': 'toggle'}})
em = EventManager()
self.assertEqual(len(em.get('toggle_event')), 0)
omxvideo.setup(em)
self.assertEqual(len(em.get('toggle_event')), 1) # registered
self.assertEqual(omxvideo.toggleEvent._fireCount, 0)
omxvideo.event_manager.fire('toggle_event')
self.assertEqual(omxvideo.toggleEvent._fireCount, 1)
omxvideo.destroy()
self.assertEqual(len(em.get('toggle_event')), 0) # unregistered
def test_input_event_stop(self):
omxvideo = OmxVideo({'input_events': {'stop_event': 'stop'}})
em = EventManager()
self.assertEqual(len(em.get('stop_event')), 0)
omxvideo.setup(em)
self.assertEqual(len(em.get('stop_event')), 1) # registered
self.assertEqual(omxvideo.stopEvent._fireCount, 0)
omxvideo.event_manager.fire('stop_event')
self.assertEqual(omxvideo.stopEvent._fireCount, 1)
omxvideo.destroy()
self.assertEqual(len(em.get('stop_event')), 0) # unregistered
def test_input_event_start(self):
omxvideo = OmxVideo({'input_events': {'start_event': 'start'}})
em = EventManager()
self.assertEqual(len(em.get('start_event')), 0)
omxvideo.setup(em)
self.assertEqual(len(em.get('start_event')), 1) # registered
self.assertEqual(omxvideo.startEvent._fireCount, 0)
omxvideo.event_manager.fire('start_event') # fire without params
self.assertEqual(omxvideo.startEvent._fireCount, 1) # performed
omxvideo.event_manager.get('start_event').fire(3) # fire with number param
self.assertEqual(omxvideo.startEvent._fireCount, 2) # performed again
omxvideo.destroy()
self.assertEqual(len(em.get('start_event')), 0) # unregistered
def test_input_event_load(self):
omxvideo = OmxVideo({'input_events': {'load_event': 'load'}, 'playlist': ['1', '2', '3', '4']})
em = EventManager()
self.assertEqual(len(em.get('load_event')), 0)
omxvideo.setup(em)
self.assertEqual(len(em.get('load_event')), 1) # registered
self.assertEqual(omxvideo.loadEvent._fireCount, 0)
omxvideo.event_manager.fire('load_event') # fire without params
self.assertEqual(omxvideo.loadEvent._fireCount, 1) # performed
omxvideo.event_manager.get('load_event').fire(3) # fire with number param
self.assertEqual(omxvideo.loadEvent._fireCount, 2) # performed again
omxvideo.destroy()
self.assertEqual(len(em.get('load_event')), 0) # unregistered
def test_input_event_seek(self):
omxvideo = OmxVideo({'input_events': {'seek_event': 'seek'}})
em = EventManager()
self.assertEqual(len(em.get('seek_event')), 0)
omxvideo.setup(em)
self.assertEqual(len(em.get('seek_event')), 1) # registered
self.assertEqual(omxvideo.seekEvent._fireCount, 0)
omxvideo.event_manager.fire('seek_event') # fire without params
self.assertEqual(omxvideo.seekEvent._fireCount, 1) # performed
omxvideo.event_manager.get('seek_event').fire(3) # fire with number param
self.assertEqual(omxvideo.seekEvent._fireCount, 2) # performed again
omxvideo.destroy()
self.assertEqual(len(em.get('seek_event')), 0) # unregistered
def test_multiple_input_events(self):
input_events = {
'play_event1': 'play',
'play_event2': 'play',
'pause_event1': 'pause',
'pause_event2': 'pause',
'toggle_event1': 'toggle',
'toggle_event2': 'toggle',
'stop_event1': 'stop',
'stop_event2': 'stop',
'load_event1': 'load',
'load_event2': 'load',
'start_event1': 'start',
'start_event2': 'start',
'seek_event1': 'seek',
'seek_event2': 'seek'
}
omxvideo = OmxVideo({'input_events': input_events, 'playlist': ['1']})
em = EventManager()
for name in input_events.keys():
self.assertEqual(len(em.get(name)), 0) # not yet registered
omxvideo.setup(em)
for name in input_events.keys():
self.assertEqual(len(em.get(name)), 1) # registered
self.assertEqual(omxvideo.playEvent._fireCount, 0)
self.assertEqual(omxvideo.pauseEvent._fireCount, 0)
self.assertEqual(omxvideo.toggleEvent._fireCount, 0)
self.assertEqual(omxvideo.stopEvent._fireCount, 0)
self.assertEqual(omxvideo.startEvent._fireCount, 0)
self.assertEqual(omxvideo.loadEvent._fireCount, 0)
self.assertEqual(omxvideo.seekEvent._fireCount, 0)
for name in input_events.keys():
omxvideo.event_manager.fire(name)
self.assertEqual(omxvideo.playEvent._fireCount, 4) # the two 'start' actions also call play
self.assertEqual(omxvideo.pauseEvent._fireCount, 2)
self.assertEqual(omxvideo.toggleEvent._fireCount, 2)
self.assertEqual(omxvideo.stopEvent._fireCount, 2)
self.assertEqual(omxvideo.startEvent._fireCount, 2)
self.assertEqual(omxvideo.loadEvent._fireCount, 4) # the two start actions also load
self.assertEqual(omxvideo.seekEvent._fireCount, 2)
omxvideo.destroy()
for name in input_events.keys():
self.assertEqual(len(em.get(name)), 0) # unregistered
|
mit
| -3,664,988,029,826,936,300
| 43.127168
| 103
| 0.631124
| false
| 3.54575
| true
| false
| false
|
hcuffy/concourse
|
concourse-driver-python/tests/utils_tests.py
|
1
|
2664
|
# Copyright (c) 2015 Cinchapi Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nose.tools import *
import string
import random
from concourse.utils import *
class TestUtils(object):
@staticmethod
def generate_random_string(size=6, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
def test_convert_string_roundtrip(self):
orig = TestUtils.generate_random_string()
assert_equals(orig, thrift_to_python(python_to_thrift(orig)))
def test_convert_tag_roundtrip(self):
orig = Tag.create(TestUtils.generate_random_string())
assert_equals(orig, thrift_to_python(python_to_thrift(orig)))
def test_convert_int_roundtrip(self):
orig = 100
assert_equals(orig, thrift_to_python(python_to_thrift(orig)))
def test_convert_long_roundtrip(self):
orig = 2147483648
assert_equals(orig, thrift_to_python(python_to_thrift(orig)))
def test_convert_link_roundtrip(self):
orig = Link.to(2147483648)
assert_equals(orig, thrift_to_python(python_to_thrift(orig)))
def test_convert_boolean_roundtrip(self):
orig = False
assert_equals(orig, thrift_to_python(python_to_thrift(orig)))
def test_convert_float_roundtrip(self):
orig = 3.14353
assert_equals(orig, thrift_to_python(python_to_thrift(orig)))
def test_find_in_kwargs_bad_key(self):
value = find_in_kwargs_by_alias('foo', {})
assert_is_none(value)
def test_find_in_kwargs_criteria(self):
kwargs = {
'ccl': 'foo'
}
value = find_in_kwargs_by_alias('criteria', kwargs)
assert_equals('foo', value)
kwargs = {
'query': 'foo'
}
value = find_in_kwargs_by_alias('criteria', kwargs)
assert_equals('foo', value)
kwargs = {
'where': 'foo'
}
value = find_in_kwargs_by_alias('criteria', kwargs)
assert_equals('foo', value)
kwargs = {
'foo': 'foo'
}
value = find_in_kwargs_by_alias('criteria', kwargs)
assert_is_none(value)
|
apache-2.0
| -8,572,895,711,197,043,000
| 33.153846
| 85
| 0.646772
| false
| 3.705146
| true
| false
| false
|
gumblex/zhconv
|
zhconv/zhconv.py
|
1
|
19496
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This module implements a simple conversion and localization between simplified and traditional Chinese using tables from MediaWiki.
It doesn't contains a segmentation function and uses maximal forward matching, so it's simple.
For a complete and accurate solution, see OpenCC.
For Chinese segmentation, see Jieba.
>>> print(convert('我幹什麼不干你事。', 'zh-cn'))
我干什么不干你事。
>>> print(convert('人体内存在很多微生物', 'zh-tw'))
人體內存在很多微生物
Support MediaWiki's convertion format:
>>> print(convert_for_mw('在现代,机械计算-{}-机的应用已经完全被电子计算-{}-机所取代', 'zh-hk'))
在現代,機械計算機的應用已經完全被電子計算機所取代
>>> print(convert_for_mw('-{zh-hant:資訊工程;zh-hans:计算机工程学;}-是电子工程的一个分支,主要研究计算机软硬件和二者间的彼此联系。', 'zh-tw'))
資訊工程是電子工程的一個分支,主要研究計算機軟硬體和二者間的彼此聯繫。
>>> print(convert_for_mw('張國榮曾在英國-{zh:利兹;zh-hans:利兹;zh-hk:列斯;zh-tw:里茲}-大学學習。', 'zh-sg'))
张国荣曾在英国利兹大学学习。
"""
# Only Python3 can pass the doctest here due to unicode problems.
__version__ = '1.4.1'
import os
import sys
import re
import json
try:
from pkg_resources import resource_stream
get_module_res = lambda *res: resource_stream(__name__, os.path.join(*res))
except ImportError:
get_module_res = lambda *res: open(os.path.normpath(
os.path.join(os.getcwd(), os.path.dirname(__file__), *res)), 'rb')
# Locale fallback order lookup dictionary
Locales = {
'zh-cn': ('zh-cn', 'zh-hans', 'zh-sg', 'zh'),
'zh-hk': ('zh-hk', 'zh-hant', 'zh-tw', 'zh'),
'zh-tw': ('zh-tw', 'zh-hant', 'zh-hk', 'zh'),
'zh-sg': ('zh-sg', 'zh-hans', 'zh-cn', 'zh'),
'zh-my': ('zh-my', 'zh-sg', 'zh-hans', 'zh-cn', 'zh'),
'zh-mo': ('zh-mo', 'zh-hk', 'zh-hant', 'zh-tw', 'zh'),
'zh-hant': ('zh-hant', 'zh-tw', 'zh-hk', 'zh'),
'zh-hans': ('zh-hans', 'zh-cn', 'zh-sg', 'zh'),
'zh': ('zh',) # special value for no conversion
}
_DEFAULT_DICT = "zhcdict.json"
DICTIONARY = _DEFAULT_DICT
zhcdicts = None
dict_zhcn = None
dict_zhsg = None
dict_zhtw = None
dict_zhhk = None
pfsdict = {}
RE_langconv = re.compile(r'(-\{|\}-)')
RE_splitflag = re.compile(r'\s*\|\s*')
RE_splitmap = re.compile(r'\s*;\s*')
RE_splituni = re.compile(r'\s*=>\s*')
RE_splitpair = re.compile(r'\s*:\s*')
def loaddict(filename=DICTIONARY):
"""
Load the dictionary from a specific JSON file.
"""
global zhcdicts
if zhcdicts:
return
if filename == _DEFAULT_DICT:
zhcdicts = json.loads(get_module_res(filename).read().decode('utf-8'))
else:
with open(filename, 'rb') as f:
zhcdicts = json.loads(f.read().decode('utf-8'))
zhcdicts['SIMPONLY'] = frozenset(zhcdicts['SIMPONLY'])
zhcdicts['TRADONLY'] = frozenset(zhcdicts['TRADONLY'])
def getdict(locale):
"""
Generate or get convertion dict cache for certain locale.
Dictionaries are loaded on demand.
"""
global zhcdicts, dict_zhcn, dict_zhsg, dict_zhtw, dict_zhhk, pfsdict
if zhcdicts is None:
loaddict(DICTIONARY)
if locale == 'zh-cn':
if dict_zhcn:
got = dict_zhcn
else:
dict_zhcn = zhcdicts['zh2Hans'].copy()
dict_zhcn.update(zhcdicts['zh2CN'])
got = dict_zhcn
elif locale == 'zh-tw':
if dict_zhtw:
got = dict_zhtw
else:
dict_zhtw = zhcdicts['zh2Hant'].copy()
dict_zhtw.update(zhcdicts['zh2TW'])
got = dict_zhtw
elif locale == 'zh-hk' or locale == 'zh-mo':
if dict_zhhk:
got = dict_zhhk
else:
dict_zhhk = zhcdicts['zh2Hant'].copy()
dict_zhhk.update(zhcdicts['zh2HK'])
got = dict_zhhk
elif locale == 'zh-sg' or locale == 'zh-my':
if dict_zhsg:
got = dict_zhsg
else:
dict_zhsg = zhcdicts['zh2Hans'].copy()
dict_zhsg.update(zhcdicts['zh2SG'])
got = dict_zhsg
elif locale == 'zh-hans':
got = zhcdicts['zh2Hans']
elif locale == 'zh-hant':
got = zhcdicts['zh2Hant']
else:
got = {}
if locale not in pfsdict:
pfsdict[locale] = getpfset(got)
return got
def getpfset(convdict):
pfset = []
for word in convdict:
for ch in range(len(word)):
pfset.append(word[:ch+1])
return frozenset(pfset)
def issimp(s, full=False):
"""
Detect text is whether Simplified Chinese or Traditional Chinese.
Returns True for Simplified; False for Traditional; None for unknown.
If full=False, it returns once first simplified- or traditional-only
character is encountered, so it's for quick and rough identification;
else, it compares the count and returns the most likely one.
Use `is` (True/False/None) to check the result.
`s` must be unicode (Python 2) or str (Python 3), or you'll get None.
"""
if zhcdicts is None:
loaddict(DICTIONARY)
simp, trad = 0, 0
if full:
for ch in s:
if ch in zhcdicts['SIMPONLY']:
simp += 1
elif ch in zhcdicts['TRADONLY']:
trad += 1
if simp > trad:
return True
elif simp < trad:
return False
else:
return None
else:
for ch in s:
if ch in zhcdicts['SIMPONLY']:
return True
elif ch in zhcdicts['TRADONLY']:
return False
return None
def fallback(locale, mapping):
for l in Locales[locale]:
if l in mapping:
return mapping[l]
return convert(tuple(mapping.values())[0], locale)
def convtable2dict(convtable, locale, update=None):
"""
Convert a list of conversion dict to a dict for a certain locale.
>>> sorted(convtable2dict([{'zh-hk': '列斯', 'zh-hans': '利兹', 'zh': '利兹', 'zh-tw': '里茲'}, {':uni': '巨集', 'zh-cn': '宏'}], 'zh-cn').items())
[('列斯', '利兹'), ('利兹', '利兹'), ('巨集', '宏'), ('里茲', '利兹')]
"""
rdict = update.copy() if update else {}
for r in convtable:
if ':uni' in r:
if locale in r:
rdict[r[':uni']] = r[locale]
elif locale[:-1] == 'zh-han':
if locale in r:
for word in r.values():
rdict[word] = r[locale]
else:
v = fallback(locale, r)
for word in r.values():
rdict[word] = v
return rdict
def tokenize(s, locale, update=None):
"""
Tokenize `s` according to corresponding locale dictionary.
Don't use this for serious text processing.
"""
zhdict = getdict(locale)
pfset = pfsdict[locale]
if update:
zhdict = zhdict.copy()
zhdict.update(update)
newset = set()
for word in update:
for ch in range(len(word)):
newset.add(word[:ch+1])
pfset = pfset | newset
ch = []
N = len(s)
pos = 0
while pos < N:
i = pos
frag = s[pos]
maxword = None
maxpos = 0
while i < N and frag in pfset:
if frag in zhdict:
maxword = frag
maxpos = i
i += 1
frag = s[pos:i+1]
if maxword is None:
maxword = s[pos]
pos += 1
else:
pos = maxpos + 1
ch.append(maxword)
return ch
def convert(s, locale, update=None):
"""
Main convert function.
:param s: must be `unicode` (Python 2) or `str` (Python 3).
:param locale: should be one of ``('zh-hans', 'zh-hant', 'zh-cn', 'zh-sg'
'zh-tw', 'zh-hk', 'zh-my', 'zh-mo')``.
:param update: a dict which updates the conversion table, eg.
``{'from1': 'to1', 'from2': 'to2'}``
>>> print(convert('我幹什麼不干你事。', 'zh-cn'))
我干什么不干你事。
>>> print(convert('我幹什麼不干你事。', 'zh-cn', {'不干': '不幹'}))
我干什么不幹你事。
>>> print(convert('人体内存在很多微生物', 'zh-tw'))
人體內存在很多微生物
"""
if locale == 'zh' or locale not in Locales:
# "no conversion"
return s
zhdict = getdict(locale)
pfset = pfsdict[locale]
newset = set()
if update:
# TODO: some sort of caching
#zhdict = zhdict.copy()
#zhdict.update(update)
newset = set()
for word in update:
for ch in range(len(word)):
newset.add(word[:ch+1])
#pfset = pfset | newset
ch = []
N = len(s)
pos = 0
while pos < N:
i = pos
frag = s[pos]
maxword = None
maxpos = 0
while i < N and (frag in pfset or frag in newset):
if update and frag in update:
maxword = update[frag]
maxpos = i
elif frag in zhdict:
maxword = zhdict[frag]
maxpos = i
i += 1
frag = s[pos:i+1]
if maxword is None:
maxword = s[pos]
pos += 1
else:
pos = maxpos + 1
ch.append(maxword)
return ''.join(ch)
def convert_for_mw(s, locale, update=None):
"""
Recognizes MediaWiki's human conversion format.
Use locale='zh' for no conversion.
Reference: (all tests passed)
https://zh.wikipedia.org/wiki/Help:高级字词转换语法
https://www.mediawiki.org/wiki/Writing_systems/Syntax
>>> print(convert_for_mw('在现代,机械计算-{}-机的应用已经完全被电子计算-{}-机所取代', 'zh-hk'))
在現代,機械計算機的應用已經完全被電子計算機所取代
>>> print(convert_for_mw('-{zh-hant:資訊工程;zh-hans:计算机工程学;}-是电子工程的一个分支,主要研究计算机软硬件和二者间的彼此联系。', 'zh-tw'))
資訊工程是電子工程的一個分支,主要研究計算機軟硬體和二者間的彼此聯繫。
>>> print(convert_for_mw('張國榮曾在英國-{zh:利兹;zh-hans:利兹;zh-hk:列斯;zh-tw:里茲}-大学學習。', 'zh-hant'))
張國榮曾在英國里茲大學學習。
>>> print(convert_for_mw('張國榮曾在英國-{zh:利兹;zh-hans:利兹;zh-hk:列斯;zh-tw:里茲}-大学學習。', 'zh-sg'))
张国荣曾在英国利兹大学学习。
>>> convert_for_mw('-{zh-hant:;\\nzh-cn:}-', 'zh-tw') == ''
True
>>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘}-;}-。', 'zh-tw'))
毫米(毫公分),符號mm,是長度單位和降雨量單位,港澳和大陸稱為毫米(台灣亦有使用,但較常使用名稱為毫公分)。
>>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘}-;}-。', 'zh-cn'))
毫米(毫公分),符号mm,是长度单位和降雨量单位,台湾作公釐或公厘。
>>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘', 'zh-hk')) # unbalanced test
毫米(毫公分),符號mm,是長度單位和降雨量單位,台灣作公釐或公厘
>>> print(convert_for_mw('报头的“-{參攷消息}-”四字摘自鲁迅笔迹-{zh-hans:,“-{參}-”是“-{参}-”的繁体字,读音cān,与简体的“-{参}-”字相同;;zh-hant:,;}-“-{攷}-”是“考”的异体字,读音kǎo,与“考”字相同。', 'zh-tw'))
報頭的「參攷消息」四字摘自魯迅筆跡,「攷」是「考」的異體字,讀音kǎo,與「考」字相同。
>>> print(convert_for_mw('报头的“-{參攷消息}-”四字摘自鲁迅笔迹-{zh-hans:,“-{參}-”是“-{参}-”的繁体字,读音cān,与简体的“-{参}-”字相同;;zh-hant:,;}-“-{攷}-”是“考”的异体字,读音kǎo,与“考”字相同。', 'zh-cn'))
报头的“參攷消息”四字摘自鲁迅笔迹,“參”是“参”的繁体字,读音cān,与简体的“参”字相同;“攷”是“考”的异体字,读音kǎo,与“考”字相同。
>>> print(convert_for_mw('{{Col-break}}-->', 'zh-hant'))
{{Col-break}}-->
"""
ch = []
rules = []
ruledict = update.copy() if update else {}
nested = 0
block = ''
for frag in RE_langconv.split(s):
if frag == '-{':
nested += 1
block += frag
elif frag == '}-':
if not nested:
# bogus }-
ch.append(frag)
continue
block += frag
nested -= 1
if nested:
continue
newrules = []
delim = RE_splitflag.split(block[2:-2].strip(' \t\n\r\f\v;'))
if len(delim) == 1:
flag = None
mapping = RE_splitmap.split(delim[0])
else:
flag = RE_splitmap.split(delim[0].strip(' \t\n\r\f\v;'))
mapping = RE_splitmap.split(delim[1])
rule = {}
for m in mapping:
uni = RE_splituni.split(m)
if len(uni) == 1:
pair = RE_splitpair.split(uni[0])
else:
if rule:
newrules.append(rule)
rule = {':uni': uni[0]}
else:
rule[':uni'] = uni[0]
pair = RE_splitpair.split(uni[1])
if len(pair) == 1:
rule['zh'] = convert_for_mw(pair[0], 'zh', ruledict)
else:
rule[pair[0]] = convert_for_mw(pair[1], pair[0], ruledict)
newrules.append(rule)
if not flag:
ch.append(fallback(locale, newrules[0]))
elif any(ch in flag for ch in 'ATRD-HN'):
for f in flag:
# A: add rule for convert code (all text convert)
# H: Insert a conversion rule without output
if f in ('A', 'H'):
for r in newrules:
if not r in rules:
rules.append(r)
if f == 'A':
if ':uni' in r:
if locale in r:
ch.append(r[locale])
else:
ch.append(convert(r[':uni'], locale))
else:
ch.append(fallback(locale, newrules[0]))
# -: remove convert
elif f == '-':
for r in newrules:
try:
rules.remove(r)
except ValueError:
pass
# D: convert description (useless)
#elif f == 'D':
#ch.append('; '.join(': '.join(x) for x in newrules[0].items()))
# T: title convert (useless)
# R: raw content (implied above)
# N: current variant name (useless)
#elif f == 'N':
#ch.append(locale)
ruledict = convtable2dict(rules, locale, update)
else:
fblimit = frozenset(flag) & frozenset(Locales[locale])
limitedruledict = update.copy() if update else {}
for r in rules:
if ':uni' in r:
if locale in r:
limitedruledict[r[':uni']] = r[locale]
else:
v = None
for l in Locales[locale]:
if l in r and l in fblimit:
v = r[l]
break
for word in r.values():
limitedruledict[word] = v if v else convert(word, locale)
ch.append(convert(delim[1], locale, limitedruledict))
block = ''
elif nested:
block += frag
else:
ch.append(convert(frag, locale, ruledict))
if nested:
# unbalanced
ch.append(convert_for_mw(block + '}-'*nested, locale, ruledict))
return ''.join(ch)
def test_convert_mw(locale, update=None):
s = ('英國-{zh:利兹;zh-hans:利兹;zh-hk:列斯;zh-tw:里茲}-大学\n'
'-{zh-hans:计算机; zh-hant:電腦;}-\n'
'-{H|巨集=>zh-cn:宏;}-\n'
'测试:巨集、宏\n'
'-{简体字繁體字}-\n'
'北-{}-韓、北朝-{}-鲜\n'
'-{H|zh-cn:博客; zh-hk:網誌; zh-tw:部落格;}-\n'
'测试:博客、網誌、部落格\n'
'-{A|zh-cn:博客; zh-hk:網誌; zh-tw:部落格;}-\n'
'测试:博客、網誌、部落格\n'
'-{H|zh-cn:博客; zh-hk:網誌; zh-tw:部落格;}-\n'
'测试1:博客、網誌、部落格\n'
'-{-|zh-cn:博客; zh-hk:網誌; zh-tw:部落格;}-\n'
'测试2:博客、網誌、部落格\n'
'-{T|zh-cn:汤姆·汉克斯; zh-hk:湯·漢斯; zh-tw:湯姆·漢克斯;}-\n'
'-{D|zh-cn:汤姆·汉克斯; zh-hk:湯·漢斯; zh-tw:湯姆·漢克斯;}-\n'
'-{H|zh-cn:博客; zh-hk:網誌; zh-tw:部落格;}-\n'
'测试1:-{zh;zh-hans;zh-hant|博客、網誌、部落格}-\n'
'测试2:-{zh;zh-cn;zh-hk|博客、網誌、部落格}-')
return convert_for_mw(s, locale, update)
def main():
"""
Simple stdin/stdout interface.
"""
if len(sys.argv) == 2 and sys.argv[1] in Locales:
locale = sys.argv[1]
convertfunc = convert
elif len(sys.argv) == 3 and sys.argv[1] == '-w' and sys.argv[2] in Locales:
locale = sys.argv[2]
convertfunc = convert_for_mw
else:
thisfile = __file__ if __name__ == '__main__' else 'python -mzhconv'
print("usage: %s [-w] {zh-cn|zh-tw|zh-hk|zh-sg|zh-hans|zh-hant|zh} < input > output" % thisfile)
sys.exit(1)
loaddict()
ln = sys.stdin.readline()
while ln:
l = ln.rstrip('\r\n')
if sys.version_info[0] < 3:
l = unicode(l, 'utf-8')
res = convertfunc(l, locale)
if sys.version_info[0] < 3:
print(res.encode('utf-8'))
else:
print(res)
ln = sys.stdin.readline()
if __name__ == '__main__':
main()
|
mit
| 6,765,431,804,391,552,000
| 34.587866
| 198
| 0.505967
| false
| 2.446218
| false
| false
| false
|
jmah/cinesync_python
|
cinesync/event_handler.py
|
1
|
1815
|
import cinesync
import sys, os
from optparse import OptionParser
class EventHandler:
def __init__(self, argv=sys.argv, stdin=sys.stdin):
try:
self.session = cinesync.Session.load(stdin)
except Exception:
self.session = None
parser = OptionParser()
parser.add_option('--key')
parser.add_option('--save-format')
parser.add_option('--save-dir')
parser.add_option('--url')
(options, rest_args) = parser.parse_args(argv[1:])
if options.key is None: raise cinesync.CineSyncError('--key argument is required')
if options.save_format is None: raise cinesync.CineSyncError('--save-format argument is required')
self.session_key = options.key if options.key != cinesync.OFFLINE_KEY else None
self.save_format = options.save_format
self.save_ext = { 'JPEG': 'jpg', 'PNG': 'png' }[self.save_format]
self.save_parent = options.save_dir
self.url = options.url
def is_offline(self):
return self.session_key == None
def saved_frame_path(self, media_file, frame):
if self.save_parent is None: return None
if not media_file.annotations[frame].drawing_objects: return None
base = '%s-%05d' % (media_file.name, frame)
i = 1; p2 = None
while True:
p = p2
p2, i = self.__saved_frame_ver_path(base, i)
if not os.path.exists(p2):
return p
def __saved_frame_ver_path(self, base, version):
v = ' (%d)' % version if version > 1 else ''
basename = base + v + '.' + self.save_ext
return (os.path.join(self.save_parent, basename), version + 1)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
pass
|
bsd-3-clause
| 8,719,700,762,281,527,000
| 33.245283
| 106
| 0.592837
| false
| 3.681542
| false
| false
| false
|
StephenLujan/Naith
|
game/plugins/cullaabb/aabb.py
|
1
|
10362
|
# Copyright Tom SF Haines
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
from pandac.PandaModules import *
class AABB:
"""Defines an axis aligned bounding box."""
def __init__(self,low,high):
self.x = [low[0],high[0]]
self.y = [low[1],high[1]]
self.z = [low[2],high[2]]
self.bounds = [self.x,self.y,self.z]
self.centre = (0.5*(self.x[0]+self.x[1]),0.5*(self.y[0]+self.y[1]),0.5*(self.z[0]+self.z[1]))
self.volume = (self.x[1]-self.x[0]) * (self.y[1]-self.y[0]) * (self.z[1]-self.z[0])
def within(self,node):
"""Given a NodePath returns True if its in the AABB, False if it isn't."""
pos = node.getPos(render)
return pos[0]>=self.x[0] and pos[0]<=self.x[1] and pos[1]>=self.y[0] and pos[1]<=self.y[1] and pos[2]>=self.z[0] and pos[2]<=self.z[1]
def __str__(self):
return '{'+str(self.x)+','+str(self.y)+','+str(self.z)+'}'
aabbLambda = 1e-3
aabbCutCost = 2.0
class SetAABB:
"""A set of AABB's - uses a kd tree with hieuristic dimension detection to subdivide at each point. Each level keeps a list of aabb's that cross the dividing line."""
def __init__(self,aabbs):
"""Given a list of AABB's."""
# Work out what happens for dividing on each dimension - sort by the AABB's centres and then select the centre aabb by volume, then try dividing by the sides & centre of the centre aabb and count how many nodes are intercepted with a cost for offsetting too far - select the dimension division with the least divided nodes...
# Get half the volume...
totVolume = sum(map(lambda x:x.volume,aabbs))
halfVolume = totVolume*0.5
# Variables we are finding the best option for...
bestDimension = 0
bestCutPoint = 0.0
bestCost = 1e20
bestLow = []
bestMid = aabbs
bestHigh = []
# Try each dimension, with multiple centre choice, store the best...
for dim in xrange(3):
byDim = sorted(aabbs,key=lambda x: x.centre[dim])
centre = 0
volume = 0.0
while centre+1<len(byDim) and volume<halfVolume:
volume += byDim[centre].volume
centre += 1
options = (byDim[centre].bounds[dim][0]-aabbLambda, byDim[centre].centre[dim], byDim[centre].bounds[dim][1]+aabbLambda)
for cutPoint in options:
cost = 0.0
lowVol = 0.0
highVol = 0.0
low = []
mid = []
high = []
for aabb in byDim:
if aabb.bounds[dim][1]<cutPoint:
lowVol += aabb.volume
low.append(aabb)
elif aabb.bounds[dim][0]>cutPoint:
highVol += aabb.volume
high.append(aabb)
else:
cost += aabb.volume*aabbCutCost
mid.append(aabb)
cost += math.fabs(lowVol-highVol)
if cost<bestCost:
bestDimension = dim
bestCutPoint = cutPoint
bestCost = cost
bestLow = low
bestMid = mid
bestHigh = high
# We have our bests - we now make this actual object, and then recurse to make the full tree...
zeroCount = 0
if len(bestLow)==0: zeroCount += 1
if len(bestHigh)==0: zeroCount += 1
if zeroCount!=2:
self.leaf = True
self.data = bestLow + bestMid + bestHigh
else:
self.leaf = False
self.splitDim = bestDimension
self.split = bestCutPoint
self.low = SetAABB(bestLow)
if len(bestMid)!=0:
self.mid = SetAABB(bestMid)
else:
self.mid = None
self.high = SetAABB(bestHigh)
def within(self,node):
"""Returns an AABB that contains the given node, or None is none do."""
if self.leaf:
for aabb in self.data:
if aabb.within(node):
return aabb
return None
else:
if self.mid:
res = self.mid.within(node)
if res!=None: return res
if node.getPos(render)[self.splitDim]<self.split:
res = self.low.within(node)
if res!=None: return res
else:
res = self.high.within(node)
if res!=None: return res
return None
class Portal:
"""Defines a portal by its 4 vertices."""
def __init__(self):
self.verts = [(1.0,0.0,1.0),(-1.0,0.0,1.0),(-1.0,0.0,-1.0),(1.0,0.0,-1.0)]
self.aabb1 = None
self.aabb2 = None
def fromFace(self,aabb,dim,side):
"""Setup the portal from a face of the given aabb - you specify the dim of the face, with side==False meaning the low side and side==True meaning the high side. Will be anti-clockwise looking at it from inside the cube."""
if side:
side = 1
else:
side = 0
# Define square2d, remap it to 3D coordinates based on dim and side...
square2d = [(0,0),(0,1),(1,1),(1,0)]
def incDim(base):
ret = [0,0,0]
ret[(dim+1)%3] = base[0]
ret[(dim+2)%3] = base[1]
ret[dim] = side
return ret
square3d = map(incDim,square2d)
# Extract the 4 coordinates...
self.verts = []
for index in square3d:
coord = map(lambda d: aabb.bounds[d][index[d]],xrange(3))
self.verts.append(coord)
# If needed reorder them so its anticlockwise ordering from the view of the centre of the aabb...
offsetC = map(lambda x: map(lambda a,b: a-b,x,aabb.centre),self.verts)
ind = sum(map(lambda i:offsetC[1][i]*(offsetC[0][(i+1)%3]*offsetC[2][(i+2)%3] - offsetC[0][(i+2)%3]*offsetC[2][(i+1)%3]),xrange(3)))
if ind<0.0:
self.verts = [self.verts[0],self.verts[3],self.verts[2],self.verts[1]]
def setupPortal(self,portal,portalNode,flip):
if flip:
order = [0,3,2,2]
else:
order = [0,1,2,3]
c = map(lambda i:sum(map(lambda x:x[i],self.verts))/4.0,xrange(3))
portalNode.setPos(render,Vec3(c[0],c[1],c[2]))
portal.clearVertices()
for o in order:
portal.addVertex(Point3(self.verts[o][0] - c[0],self.verts[o][1] - c[1],self.verts[o][2] - c[2]))
def findPortals(aabbs,overlap = 1e-3):
"""Given a list of AABB's this finds all intersections and creates portals. To store the portals creates a variable in each aabb, portals = [[[],[]],[[],[]],[[],[]]] - first index is dimension, second index is low (0) and high (1), final list is all portals using that face. Returns the portals as a list. Will throw an error if the geometry is bad. Will modify the dimensions of the given aabb's to account for overlap."""
# Before we start add the portal variable to each aabb...
for aabb in aabbs:
aabb.portals = [[[],[]],[[],[]],[[],[]]]
# We process each dimension seperatly - this first loop is over the dimensions...
ret = []
for dim in xrange(3):
otherDim = [0,1,2]
del otherDim[dim]
# Iterate all aabbs and create a push event and pop event for each - push it on when you reach the minimum, pop it when you get to the maximum. (True,d,aabb) to push, (False,d,aabb) to pop...
events = []
for aabb in aabbs:
events.append((True,aabb.bounds[dim][0],aabb))
events.append((False,aabb.bounds[dim][1],aabb))
# Sort the events...
events.sort(key=lambda x: x[1])
# Iterate through the events in sequence - each time a aabb is pushed or popped check if it intercepts a face larger than it - if so add the relevant portal... (Partial interception is considered an error as it results in ambiguous behaviour. Multiple interception is also not allowed as its an entire face that intercepts from our point of view. (Larger face can have multiple intercepts of course.))
state = dict() # Index by id of aabb's
for event in events:
if not event[0]:
# Pop event - remove its aabb from the state...
del state[id(event[2])]
# Check event aabb against existing aabbs for being the smaller face...
done = False
for key,aabb in state.iteritems():
# Verify that the sorting dimension is not contained, i.e. they overlap so a portal can be created...
if (event[2].bounds[dim][0]>aabb.bounds[dim][0]) == (event[2].bounds[dim][1]<aabb.bounds[dim][1]):
continue
# Check if bounds overlap, done such that we can detect corner overlaps...
withinCount = [0,0,0]
for od in otherDim:
if event[2].bounds[od][0]>aabb.bounds[od][0] and event[2].bounds[od][0]<aabb.bounds[od][1]:
withinCount[od] += 1
if event[2].bounds[od][1]>aabb.bounds[od][0] and event[2].bounds[od][1]<aabb.bounds[od][1]:
withinCount[od] += 1
if sum(withinCount)==4:
if done:
raise Exception('Double interception - each culling aabb face can only intrecept one other cube as a fully contained face')
done = True
# We have an interception - update the relevant aabb to have only the slightest overlap then create the portal and finally arrange for all the links...
if event[0]:
event[2].bounds[dim][0] = aabb.bounds[dim][1] - overlap
evSide = 0
else:
event[2].bounds[dim][1] = aabb.bounds[dim][0] + overlap
evSide = 1
portal = Portal()
portal.fromFace(event[2],dim,not event[0])
ret.append(portal)
portal.aabb1 = event[2]
portal.aabb2 = aabb
event[2].portals[dim][evSide].append(portal)
aabb.portals[dim][(evSide+1)%2].append(portal)
elif len(filter(lambda x:x>0,withinCount))==2:
exp = 'Partial interception - culling aabbs can not intecept at corners/edges due to undefinable behaviour - must only overlap with one face fully contained within another.'
exp += ' dimension = ' + str(dim) + '; within = ' + str(withinCount) + '; '
exp += str(event[2]) + ' against ' + str(aabb)
raise Exception(exp)
if event[0]:
# Push event - add the events aabb to the state...
state[id(event[2])] = event[2]
return ret
|
apache-2.0
| 1,652,840,935,139,971,000
| 36.817518
| 425
| 0.615422
| false
| 3.305263
| false
| false
| false
|
ryanss/holidays.py
|
holidays/countries/mexico.py
|
1
|
4211
|
# -*- coding: utf-8 -*-
# python-holidays
# ---------------
# A fast, efficient Python library for generating country, province and state
# specific sets of holidays on the fly. It aims to make determining whether a
# specific date is a holiday as fast and flexible as possible.
#
# Author: ryanss <ryanssdev@icloud.com> (c) 2014-2017
# dr-prodigy <maurizio.montel@gmail.com> (c) 2017-2020
# Website: https://github.com/dr-prodigy/python-holidays
# License: MIT (see LICENSE file)
from datetime import date
from dateutil.relativedelta import relativedelta as rd, MO
from holidays.constants import FRI, SAT, SUN
from holidays.constants import JAN, FEB, MAR, MAY, SEP, NOV, DEC
from holidays.holiday_base import HolidayBase
class Mexico(HolidayBase):
def __init__(self, **kwargs):
self.country = 'MX'
HolidayBase.__init__(self, **kwargs)
def _populate(self, year):
# New Year's Day
name = "Año Nuevo [New Year's Day]"
self[date(year, JAN, 1)] = name
if self.observed and date(year, JAN, 1).weekday() == SUN:
self[date(year, JAN, 1) + rd(days=+1)] = name + " (Observed)"
# The next year's observed New Year's Day can be in this year
# when it falls on a Friday (Jan 1st is a Saturday)
if self.observed and date(year, DEC, 31).weekday() == FRI:
self[date(year, DEC, 31)] = name + " (Observed)"
# Constitution Day
name = "Día de la Constitución [Constitution Day]"
if self.observed and year >= 2007:
self[date(year, FEB, 1) + rd(weekday=MO(+1))] = \
name + " (Observed)"
if year >= 1917:
self[date(year, FEB, 5)] = name
# Benito Juárez's birthday
name = "Natalicio de Benito Juárez [Benito Juárez's birthday]"
if self.observed and year >= 2007:
self[date(year, MAR, 1) + rd(weekday=MO(+3))] = \
name + " (Observed)"
if year >= 1917:
self[date(year, MAR, 21)] = name
# Labor Day
if year >= 1923:
name = "Día del Trabajo [Labour Day]"
self[date(year, MAY, 1)] = name
if self.observed and date(year, MAY, 1).weekday() == SAT:
self[date(year, MAY, 1) + rd(days=-1)] = name + " (Observed)"
elif self.observed and date(year, MAY, 1).weekday() == SUN:
self[date(year, MAY, 1) + rd(days=+1)] = name + " (Observed)"
# Independence Day
name = "Día de la Independencia [Independence Day]"
self[date(year, SEP, 16)] = name
if self.observed and date(year, SEP, 16).weekday() == SAT:
self[date(year, SEP, 16) + rd(days=-1)] = name + " (Observed)"
elif self.observed and date(year, SEP, 16).weekday() == SUN:
self[date(year, SEP, 16) + rd(days=+1)] = name + " (Observed)"
# Revolution Day
name = "Día de la Revolución [Revolution Day]"
if self.observed and year >= 2007:
self[date(year, NOV, 1) + rd(weekday=MO(+3))] = \
name + " (Observed)"
if year >= 1917:
self[date(year, NOV, 20)] = name
# Change of Federal Government
# Every six years--next observance 2018
name = "Transmisión del Poder Ejecutivo Federal"
name += " [Change of Federal Government]"
if year >= 1970 and (2096 - year) % 6 == 0:
self[date(year, DEC, 1)] = name
if self.observed and date(year, DEC, 1).weekday() == SAT:
self[date(year, DEC, 1) + rd(days=-1)] = name + " (Observed)"
elif self.observed and date(year, DEC, 1).weekday() == SUN:
self[date(year, DEC, 1) + rd(days=+1)] = name + " (Observed)"
# Christmas
self[date(year, DEC, 25)] = "Navidad [Christmas]"
if self.observed and date(year, DEC, 25).weekday() == SAT:
self[date(year, DEC, 25) + rd(days=-1)] = name + " (Observed)"
elif self.observed and date(year, DEC, 25).weekday() == SUN:
self[date(year, DEC, 25) + rd(days=+1)] = name + " (Observed)"
class MX(Mexico):
pass
class MEX(Mexico):
pass
|
mit
| -2,919,136,386,831,123,500
| 37.888889
| 78
| 0.562619
| false
| 3.211009
| false
| false
| false
|
Mausy5043/ubundiagd
|
daemon98.py
|
1
|
4199
|
#!/usr/bin/env python
# Based on previous work by
# Charles Menguy (see: http://stackoverflow.com/questions/10217067/implementing-a-full-python-unix-style-daemon-process)
# and Sander Marechal (see: http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python/)
# Adapted by M.Hendrix [2015] (deprecated)
# daemon98.py uploads data to the server.
import syslog, traceback
import os, sys, shutil, glob, time, commands
from libdaemon import Daemon
import ConfigParser
import subprocess
DEBUG = False
leaf = os.path.realpath(__file__).split('/')[-2]
class MyDaemon(Daemon):
def run(self):
iniconf = ConfigParser.ConfigParser()
inisection = "98"
home = os.path.expanduser('~')
s = iniconf.read(home + '/' + leaf + '/config.ini')
if DEBUG: print "config file : ", s
if DEBUG: print iniconf.items(inisection)
reportTime = iniconf.getint(inisection, "reporttime")
cycles = iniconf.getint(inisection, "cycles")
samplesperCycle = iniconf.getint(inisection, "samplespercycle")
flock = iniconf.get(inisection, "lockfile")
samples = samplesperCycle * cycles # total number of samples averaged
sampleTime = reportTime/samplesperCycle # time [s] between samples
cycleTime = samples * sampleTime # time [s] per cycle
myname = os.uname()[1]
mount_path = '/srv/array1/dataspool/'
remote_path = mount_path + myname
remote_lock = remote_path + '/client.lock'
while True:
try:
startTime=time.time()
if os.path.exists(remote_path):
do_mv_data(remote_path)
else:
if DEBUG:print remote_path + " not available"
waitTime = sampleTime - (time.time() - startTime) - (startTime%sampleTime)
if (waitTime > 0):
if DEBUG:print "Waiting {0} s".format(waitTime)
time.sleep(waitTime)
except Exception as e:
if DEBUG:
print "Unexpected error:"
print e.message
syslog.syslog(syslog.LOG_ALERT,e.__doc__)
syslog_trace(traceback.format_exc())
raise
def do_mv_data(rpath):
hostlock = rpath + '/host.lock'
clientlock = rpath + '/client.lock'
count_internal_locks=1
# wait 5 seconds for processes to finish
time.sleep(5)
while os.path.isfile(hostlock):
if DEBUG:print "hostlock exists"
# wait while the server has locked the directory
time.sleep(1)
# server already sets the client.lock. Do it anyway.
lock(clientlock)
# prevent race conditions
while os.path.isfile(hostlock):
if DEBUG:print "hostlock exists. WTF?"
# wait while the server has locked the directory
time.sleep(1)
while (count_internal_locks > 0):
time.sleep(1)
count_internal_locks=0
for fname in glob.glob(r'/tmp/' + leaf + '/*.lock'):
count_internal_locks += 1
if DEBUG:print "{0} internal locks exist".format(count_internal_locks)
for fname in glob.glob(r'/tmp/' + leaf + '/*.csv'):
if os.path.isfile(clientlock) and not (os.path.isfile(rpath + "/" + os.path.split(fname)[1])):
if DEBUG:print "moving data " + fname
shutil.move(fname, rpath)
unlock(clientlock)
if DEBUG:print "unlocked..."
def lock(fname):
fd = open(fname, 'a').close()
def unlock(fname):
if os.path.isfile(fname):
os.remove(fname)
def syslog_trace(trace):
# Log a python stack trace to syslog
log_lines = trace.split('\n')
for line in log_lines:
if line:
syslog.syslog(syslog.LOG_ALERT,line)
if __name__ == "__main__":
daemon = MyDaemon('/tmp/' + leaf + '/98.pid')
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
daemon.stop()
elif 'restart' == sys.argv[1]:
daemon.restart()
elif 'foreground' == sys.argv[1]:
# assist with debugging.
print "Debug-mode started. Use <Ctrl>+C to stop."
DEBUG = True
if DEBUG:
logtext = "Daemon logging is ON"
syslog.syslog(syslog.LOG_DEBUG, logtext)
daemon.run()
else:
print "Unknown command"
sys.exit(2)
sys.exit(0)
else:
print "usage: {0!s} start|stop|restart|foreground".format(sys.argv[0])
sys.exit(2)
|
mit
| 4,576,095,632,037,689,300
| 29.875
| 120
| 0.6392
| false
| 3.413821
| false
| false
| false
|
Polarcraft/KbveBot
|
commands/timeuntil.py
|
1
|
2038
|
# Copyright (C) 2013-2015 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Fox Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import dateutil
import dateutil.parser
import datetime
from helpers import arguments
from helpers.command import Command
@Command(['timeuntil', 'timetill'], ['config'])
def cmd(send, msg, args):
"""Reports the difference between now and some specified time.
Syntax: {command} <time>
"""
parser = arguments.ArgParser(args['config'])
parser.add_argument('date', nargs='*', action=arguments.DateParser)
try:
cmdargs = parser.parse_args(msg)
except arguments.ArgumentException as e:
send(str(e))
return
if not cmdargs.date:
send("Time until when?")
return
delta = dateutil.relativedelta.relativedelta(cmdargs.date, datetime.datetime.now())
diff = "%s is " % cmdargs.date.strftime("%x")
if delta.years:
diff += "%d years " % (delta.years)
if delta.months:
diff += "%d months " % (delta.months)
if delta.days:
diff += "%d days " % (delta.days)
if delta.hours:
diff += "%d hours " % (delta.hours)
if delta.minutes:
diff += "%d minutes " % (delta.minutes)
if delta.seconds:
diff += "%d seconds " % (delta.seconds)
diff += "away"
send(diff)
|
gpl-2.0
| 7,107,087,741,139,688,000
| 36.740741
| 134
| 0.679588
| false
| 3.781076
| false
| false
| false
|
seewindcn/tortoisehg
|
src/tortoisehg/util/menuthg.py
|
1
|
11553
|
# menuthg.py - TortoiseHg shell extension menu
#
# Copyright 2009 Steve Borho <steve@borho.org>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2, incorporated herein by reference.
import os
from mercurial import hg, ui, node, error
from tortoisehg.util.i18n import _ as gettext
from tortoisehg.util import cachethg, paths, hglib
def _(msgid):
return {'id': msgid, 'str': gettext(msgid).encode('utf-8')}
thgcmenu = {
'commit': { 'label': _('Commit...'),
'help': _('Commit changes in repository'),
'icon': 'menucommit.ico'},
'init': { 'label': _('Create Repository Here'),
'help': _('Create a new repository'),
'icon': 'menucreaterepos.ico'},
'clone': { 'label': _('Clone...'),
'help': _('Create clone here from source'),
'icon': 'menuclone.ico'},
'status': { 'label': _('File Status'),
'help': _('Repository status & changes'),
'icon': 'menushowchanged.ico'},
'add': { 'label': _('Add Files...'),
'help': _('Add files to version control'),
'icon': 'menuadd.ico'},
'revert': { 'label': _('Revert Files...'),
'help': _('Revert file changes'),
'icon': 'menurevert.ico'},
'forget': { 'label': _('Forget Files...'),
'help': _('Remove files from version control'),
'icon': 'menurevert.ico'},
'remove': { 'label': _('Remove Files...'),
'help': _('Remove files from version control'),
'icon': 'menudelete.ico'},
'rename': { 'label': _('Rename File'),
'help': _('Rename file or directory'),
'icon': 'general.ico'},
'workbench': { 'label': _('Workbench'),
'help': _('View change history in repository'),
'icon': 'menulog.ico'},
'log': { 'label': _('File History'),
'help': _('View change history of selected files'),
'icon': 'menulog.ico'},
'shelve': { 'label': _('Shelve Changes'),
'help': _('Move changes between working dir and patch'),
'icon': 'menucommit.ico'},
'synch': { 'label': _('Synchronize'),
'help': _('Synchronize with remote repository'),
'icon': 'menusynch.ico'},
'serve': { 'label': _('Web Server'),
'help': _('Start web server for this repository'),
'icon': 'proxy.ico'},
'update': { 'label': _('Update...'),
'help': _('Update working directory'),
'icon': 'menucheckout.ico'},
'thgstatus': { 'label': _('Update Icons'),
'help': _('Update icons for this repository'),
'icon': 'refresh_overlays.ico'},
'userconf': { 'label': _('Global Settings'),
'help': _('Configure user wide settings'),
'icon': 'settings_user.ico'},
'repoconf': { 'label': _('Repository Settings'),
'help': _('Configure repository settings'),
'icon': 'settings_repo.ico'},
'shellconf': { 'label': _('Explorer Extension Settings'),
'help': _('Configure Explorer extension'),
'icon': 'settings_user.ico'},
'about': { 'label': _('About TortoiseHg'),
'help': _('Show About Dialog'),
'icon': 'menuabout.ico'},
'vdiff': { 'label': _('Diff to parent'),
'help': _('View changes using GUI diff tool'),
'icon': 'TortoiseMerge.ico'},
'hgignore': { 'label': _('Edit Ignore Filter'),
'help': _('Edit repository ignore filter'),
'icon': 'ignore.ico'},
'guess': { 'label': _('Guess Renames'),
'help': _('Detect renames and copies'),
'icon': 'detect_rename.ico'},
'grep': { 'label': _('Search History'),
'help': _('Search file revisions for patterns'),
'icon': 'menurepobrowse.ico'},
'dndsynch': { 'label': _('DnD Synchronize'),
'help': _('Synchronize with dragged repository'),
'icon': 'menusynch.ico'}}
_ALWAYS_DEMOTE_ = ('about', 'userconf', 'repoconf')
class TortoiseMenu(object):
def __init__(self, menutext, helptext, hgcmd, icon=None, state=True):
self.menutext = menutext
self.helptext = helptext
self.hgcmd = hgcmd
self.icon = icon
self.state = state
def isSubmenu(self):
return False
def isSep(self):
return False
class TortoiseSubmenu(TortoiseMenu):
def __init__(self, menutext, helptext, menus=[], icon=None):
TortoiseMenu.__init__(self, menutext, helptext, None, icon)
self.menus = menus[:]
def add_menu(self, menutext, helptext, hgcmd, icon=None, state=True):
self.menus.append(TortoiseMenu(menutext, helptext,
hgcmd, icon, state))
def add_sep(self):
self.menus.append(TortoiseMenuSep())
def get_menus(self):
return self.menus
def append(self, entry):
self.menus.append(entry)
def isSubmenu(self):
return True
class TortoiseMenuSep(object):
hgcmd = '----'
def isSubmenu(self):
return False
def isSep(self):
return True
class thg_menu(object):
def __init__(self, ui, promoted, name = "TortoiseHg"):
self.menus = [[]]
self.ui = ui
self.name = name
self.sep = [False]
self.promoted = promoted
def add_menu(self, hgcmd, icon=None, state=True):
if hgcmd in self.promoted:
pos = 0
else:
pos = 1
while len(self.menus) <= pos: #add Submenu
self.menus.append([])
self.sep.append(False)
if self.sep[pos]:
self.sep[pos] = False
self.menus[pos].append(TortoiseMenuSep())
self.menus[pos].append(TortoiseMenu(
thgcmenu[hgcmd]['label']['str'],
thgcmenu[hgcmd]['help']['str'], hgcmd,
thgcmenu[hgcmd]['icon'], state))
def add_sep(self):
self.sep = [True for _s in self.sep]
def get(self):
menu = self.menus[0][:]
for submenu in self.menus[1:]:
menu.append(TortoiseSubmenu(self.name, 'Mercurial', submenu, "hg.ico"))
menu.append(TortoiseMenuSep())
return menu
def __iter__(self):
return iter(self.get())
def open_repo(path):
root = paths.find_root(path)
if root:
try:
repo = hg.repository(ui.ui(), path=root)
return repo
except error.RepoError:
pass
except StandardError, e:
print "error while opening repo %s:" % path
print e
return None
class menuThg:
"""shell extension that adds context menu items"""
def __init__(self, internal=False):
self.name = "TortoiseHg"
promoted = []
pl = ui.ui().config('tortoisehg', 'promoteditems', 'commit,log')
for item in pl.split(','):
item = item.strip()
if item:
promoted.append(item)
if internal:
for item in thgcmenu.keys():
promoted.append(item)
for item in _ALWAYS_DEMOTE_:
if item in promoted:
promoted.remove(item)
self.promoted = promoted
def get_commands_dragdrop(self, srcfiles, destfolder):
"""
Get a list of commands valid for the current selection.
Commands are instances of TortoiseMenu, TortoiseMenuSep or TortoiseMenu
"""
# we can only accept dropping one item
if len(srcfiles) > 1:
return []
# open repo
drag_repo = None
drop_repo = None
drag_path = srcfiles[0]
drag_repo = open_repo(drag_path)
if not drag_repo:
return []
if drag_repo and drag_repo.root != drag_path:
return [] # dragged item must be a hg repo root directory
drop_repo = open_repo(destfolder)
menu = thg_menu(drag_repo.ui, self.promoted, self.name)
menu.add_menu('clone')
if drop_repo:
menu.add_menu('dndsynch')
return menu
def get_norepo_commands(self, cwd, files):
menu = thg_menu(ui.ui(), self.promoted, self.name)
menu.add_menu('clone')
menu.add_menu('init')
menu.add_menu('userconf')
menu.add_sep()
menu.add_menu('about')
menu.add_sep()
return menu
def get_commands(self, repo, cwd, files):
"""
Get a list of commands valid for the current selection.
Commands are instances of TortoiseMenu, TortoiseMenuSep or TortoiseMenu
"""
states = set()
onlyfiles = len(files) > 0
hashgignore = False
for f in files:
if not os.path.isfile(f):
onlyfiles = False
if f.endswith('.hgignore'):
hashgignore = True
states.update(cachethg.get_states(f, repo))
if not files:
states.update(cachethg.get_states(cwd, repo))
if cachethg.ROOT in states and len(states) == 1:
states.add(cachethg.MODIFIED)
changed = bool(states & set([cachethg.ADDED, cachethg.MODIFIED]))
modified = cachethg.MODIFIED in states
clean = cachethg.UNCHANGED in states
tracked = changed or modified or clean
new = bool(states & set([cachethg.UNKNOWN, cachethg.IGNORED]))
menu = thg_menu(repo.ui, self.promoted, self.name)
if changed or cachethg.UNKNOWN in states or 'qtip' in repo['.'].tags():
menu.add_menu('commit')
if hashgignore or new and len(states) == 1:
menu.add_menu('hgignore')
if changed or cachethg.UNKNOWN in states:
menu.add_menu('status')
# Visual Diff (any extdiff command)
has_vdiff = repo.ui.config('tortoisehg', 'vdiff', 'vdiff') != ''
if has_vdiff and modified:
menu.add_menu('vdiff')
if len(files) == 0 and cachethg.UNKNOWN in states:
menu.add_menu('guess')
elif len(files) == 1 and tracked: # needs ico
menu.add_menu('rename')
if files and new:
menu.add_menu('add')
if files and tracked:
menu.add_menu('remove')
if files and changed:
menu.add_menu('revert')
menu.add_sep()
if tracked:
menu.add_menu(files and 'log' or 'workbench')
if len(files) == 0:
menu.add_sep()
menu.add_menu('grep')
menu.add_sep()
menu.add_menu('synch')
menu.add_menu('serve')
menu.add_sep()
menu.add_menu('clone')
if repo.root != cwd:
menu.add_menu('init')
# add common menu items
menu.add_sep()
menu.add_menu('userconf')
if tracked:
menu.add_menu('repoconf')
menu.add_menu('about')
menu.add_sep()
return menu
|
gpl-2.0
| 6,529,017,472,903,865,000
| 33.281899
| 83
| 0.515624
| false
| 3.880752
| false
| false
| false
|
google/transperf
|
outparser.py
|
1
|
14206
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Processes transperf outputs including pcap files and kernel log files.
"""
import csv
import logging
import re
from scapy.all import PcapReader
LOG = logging.getLogger('transperf/outparser')
class Visitor(object):
"""The abstract base class for all the classes that process output files.
Visitors are added to transperf to process logs and pcap files in one pass,
*hopefully* with O(1) memory.
Visitor methods are called in the following sequence:
1) begin()
2) visit_conn()
3) visit_packet()
4) visit_klog()
5) end()
"""
def begin(self, exp, exp_dir, rcv_ip):
"""Called when the visitor should start a new experiment.
Args:
exp: The experiment object.
exp_dir: The experiment output directory.
rcv_ip: The receiver's IP address.
"""
pass
def end(self):
"""Called when all the output entries are passed to the visitor."""
pass
def visit_conn(self, ip, port, tool, cc, params, start, dur, tput):
"""Called for each connection.
Args:
ip: The ip address of the connection.
port: The port of the connection.
tool: The tool used in the experiment.
cc: The congestion control algorithm.
params: Parameters used for cc.
start: The relative start time in seconds.
dur: The duration of the connection in seconds.
tput: The throughput reported by the benchmarking
application (e.g., netperf).
"""
pass
def visit_klog(self, time, line, match):
"""Called for a new klog line.
The log lines of each connection are sorted by time, but the lines of
different connections can interleave.
Args:
time: Time of the log entry relative to the start time of the
experiment.
line: The raw content of the log file.
match: The dictionary of all grouped regex matches.
"""
pass
def visit_strmr_log(self, time, pline):
"""Called for a new streamer log line.
The log lines of each connection are sorted by time.
Args:
time: Time of the log entry relative to the start time of the
experiment.
pline: The dictionary of all relvant parsed fields of a log line.
"""
pass
def visit_ss_log(self, time, data):
"""Called for a new ss log entry.
The log entries are sorted by time.
Args:
time: The time of the log entry when run ss command.
data: The dictionary of all relvant parsed fields of a log line.
"""
pass
def visit_packet(self, time, packet):
"""Called when there is a new packet available to be processed.
The packets of each connection are sorted by time but packets of
different connections can interleave..
Args:
time: Captured time relative to the start time of the experiment.
packet: The packet parsed by scapy.
"""
pass
def visit_metric(self, metric):
"""Called when a metric is available to be processed.
Args:
metric: The metric of type metric.Metric.
"""
pass
class SsLog(object):
"""Parses ss logs and provides the flows of the experiment.
Attributes:
__readers: The ss log file readers.
__entries: The most recent read entry from each log file. We keep this
list to make sure the entries are yielded sorted by time.
"""
def __init__(self, log_paths):
self.__readers = [open(path) for path in log_paths]
self.__times = [0] * len(log_paths)
self.__entries = [None] * len(log_paths)
def __read_sslog(self, i):
"""Read the next entry in file.
Args:
i: The index of the file reader.
Returns:
The next entry in file f. None if there is no entry.
"""
f = self.__readers[i]
if not f:
return None
time = self.__times[i]
line = f.readline()
if not line:
return None
while line.startswith('# '):
self.__times[i] = time = float(line[2:])
f.readline()
line = f.readline()
if not line:
return None
data = {}
port = line.strip()
port = int(port[port.rfind(':') + 1:])
data['port'] = port
line = f.readline()
if not line:
return None
stat = line.strip().split()
for item in stat:
if item.startswith('bytes_acked:'):
data['bytes_acked'] = int(item[item.rfind(':') + 1:])
elif item.startswith('retrans:'):
data['retrans'] = int(item[item.rfind('/') + 1:])
elif item.startswith('data_segs_out:'):
data['data_segs_out'] = int(item[item.rfind(':') + 1:])
elif item.startswith('rtt:'):
data['rtt'] = (
float(item[item.find(':') + 1:item.rfind('/')]) / 1000
)
elif item.startswith('unacked:'):
data['unacked'] = int(item[item.find(':') + 1:])
return time, data
def __next_entry(self):
"""Returns the next entry ordered by time.
Returns:
The next entry. None if there is no entry.
"""
min_time = -1
min_index = -1
for i, entry in enumerate(self.__entries):
# If the reader has finished reading entries, check the next slot.
if not self.__readers[i]:
continue
# Fill the holes.
if not entry:
entry = self.__read_sslog(i)
self.__entries[i] = entry
# If entry is not set, it means that there is no entry in the
# reader. So, we can remove the reader.
if not entry:
self.__readers[i] = None
continue
entry_time = entry[0]
if min_index == -1 or entry_time < min_time:
min_index = i
min_time = entry_time
if min_index == -1:
return None
entry = self.__entries[min_index]
self.__entries[min_index] = None
return entry
def entries(self):
"""Entries stored in the ss log files.
Yields:
A tuple in the form of (relative time in sec, entry).
"""
min_time = -1
while True:
entry = self.__next_entry()
if not entry:
break
if min_time == -1:
min_time = entry[0]
yield (entry[0] - min_time, entry[1])
class Pcap(object):
"""Parses pcap files and provides the flows of the experiment.
Attributes:
__readers: The pcap readers.
__packets: The most recent read packet from each pcap file. We keep this
list to make sure the packets are yielded sorted by time.
"""
def __init__(self, pcap_paths):
self.__readers = [PcapReader(path) for path in pcap_paths]
self.__packets = [None] * len(pcap_paths)
def __next_packet(self):
"""Returns the next packet ordered by time.
Returns:
The next packet. None if there is no packet.
"""
min_time = -1
min_index = -1
for i, pkt in enumerate(self.__packets):
# If the reader has finished reading packets, check the next slot.
if not self.__readers[i]:
continue
# Fill the holes.
if not pkt:
self.__packets[i] = pkt = self.__readers[i].read_packet()
# If pkt is not set, it means that there is no packet in the reader.
# So, we can remove the reader.
if not pkt:
self.__readers[i] = None
continue
if min_index == -1 or pkt.time < min_time:
min_index = i
min_time = pkt.time
if min_index == -1:
return None
pkt = self.__packets[min_index]
self.__packets[min_index] = None
return pkt
def packets(self):
"""Packets stored in the pcap files.
Yields:
A tuple in the form of (relative time in sec, raw packet, ip, tcp).
"""
min_time = -1
while True:
pkt = self.__next_packet()
if not pkt:
break
if min_time == -1:
min_time = pkt.time
yield (pkt.time - min_time, pkt)
# These are regular expressions to parse congestion control output in
# kern-debug.log.
_LOG_PATTERNS = [
# BBR:
re.compile((
r'\w+\s+\d+\s+\d{2}:\d{2}:\d{2}\s+(\w[\w\d\-]+)\s+kernel:\s+'
r'\[\s*(?P<ts>[\d\.]+)\] BBR '
r'(?P<ip>(\d{1,3}\.){3}\d{1,3}):(?P<port>\d{1,6})\s+'
r'(?P<ack>[\d,]+):(?P<fack>\d+)\s+'
r'(?P<castate>\S)\s+(?P<mode>\S)\s+'
r'(?P<snd_cwnd>\d+)\s+'
r'br\s+(?P<extra_acked>\d+)\s+'
r'cr\s+(?P<crtt>-?\d+)\s+'
r'rtt\s+(?P<rtt>-?\d+)\s+'
r'd\s+(?P<rs_delivered>-?\d+)\s+'
r'i\s+(?P<interval_us>-?\d+)\s+'
r'mrtt\s+(?P<mrtt>-?\d+)\s+'
r'(?P<rs_app_limited>\S)bw\s+(?P<sample_bw>\d+)\s+'
r'bw\s+(?P<bw>\d+)\s+'
r'lb\s+(?P<unused1>\d+)\s+'
r'ib\s+(?P<interval_bw>\d+)\s+'
r'qb\s+(?P<pacing_bw>\d+)\s+'
r'a\s+(?P<acked>\d+)\s+'
r'if\s+(?P<inflight>\d+)\s+'
r'(?P<unused2>\S)\s+'
r'(?P<round_start>\S)\s+'
r'dl\s+(?P<tp_delivered>\d+)\s+'
r'l\s+(?P<tp_loss>\d+)\s+'
r'al\s+(?P<tp_app_limited>\d+)\s+'
r'#\s+(?P<unused3>\d+)\s+'
r't\s+(?P<targetcw>\d+)\s+'
r'(?P<reord_seen>r|\.)\s+'
r'(?P<prev_ca_state>O|D|C|R|L)\s+'
r'lr\s+(?P<lr_x1000>-?\d+)\s+'
r'er\s+(?P<ecn_x1000>-?\d+)\s+'
r'ea\s+(?P<ecn_alpha_x1000>-?\d+)\s+'
r'bwl\s+(?P<bw_lo>-?\d+)\s+'
r'il\s+(?P<inflight_lo>-?\d+)\s+'
r'ih\s+(?P<inflight_hi>-?\d+)\s+'
r'c\s+(?P<bw_probe_up_cnt>-?\d+)\s+'
r'v\s+(?P<version>-?\d+)\s+'
r'(?P<debug_event>[\S])\s+'
r'(?P<cycle_idx>\d+)\s+'
r'(?P<ack_phase>I|R|B|F|A)\s+'
r'(?P<bw_probe_samples>Y|N)'
)),
]
class KernLog(object):
"""Parses kern-debug.log files.
Attributes:
__log_paths: The paths of kernel log files.
"""
def __init__(self, log_paths):
self.__log_paths = log_paths
def lines(self):
"""Yields a tuple for each log entry.
Yields:
Tuples in the form of: (timestamp in sec, raw line, parsed line)
"""
min_ts = {}
for path in self.__log_paths:
f = open(path)
for l in f:
# All log patterns must have "ts" and "port" fields.
m = None
for p in _LOG_PATTERNS:
m = p.match(l.strip())
if m:
break
if not m:
LOG.debug('cannot match log line: %s', l)
continue
mdict = m.groupdict()
if 'ts' not in mdict or 'port' not in mdict:
LOG.debug('no port or timestamp in log line: %s', l)
continue
ts = float(mdict['ts'])
# Make timestamps relative to the timestamp of the first
# entry of this port in the log file.
port = mdict['port']
if port not in min_ts:
min_ts[port] = ts
ts = 0
else:
ts -= min_ts[port]
yield (ts, l, m.groupdict())
class ConnInfo(object):
"""Parses the exp_dir/conn.info file.
This file is dumped by the sender and includes a line per connection.
"""
def __init__(self, cinfo_files):
self.__port_infos = {}
for f in cinfo_files:
lines = open(f).readlines()
for l in lines:
l = l.strip()
port, conn_info = l.split('=', 1)
self.__port_infos[int(port)] = conn_info.split(',', 6)
def conn_info(self, port):
"""Connection information of the given port."""
return self.__port_infos[port]
def ports(self):
"""Ports that exist in the conn.info files."""
return self.__port_infos.keys()
class RecvInfo(object):
"""Parses the recv.info file that is dumped by receiver.
This file only contains the IP address of the receiver.
"""
def __init__(self, rcvinf_file):
f = open(rcvinf_file)
self.ip = f.readlines()[0].strip()
f.close()
class ExpInfo(object):
"""Parses the exp.info file that is dumped by the orchestrator.
This file contains a readable string representation of the experiment.
"""
def __init__(self, expinf_file):
f = open(expinf_file)
self.__lines = f.readlines()
f.close()
def info(self):
"""Returns the lines in the exp.info file."""
return self.__lines
def fields(self):
"""Returns a dictionary of experiment parameters and their values."""
field_dict = {}
for l in self.__lines:
p, v = l.strip().split('=', 1)
field_dict[p] = v
return field_dict
|
apache-2.0
| -1,043,250,681,706,200,200
| 29.815618
| 80
| 0.518795
| false
| 3.827047
| false
| false
| false
|
mnahm5/django-estore
|
Lib/site-packages/awscli/customizations/ec2/protocolarg.py
|
1
|
1400
|
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""
This customization allows the user to specify the values "tcp", "udp",
or "icmp" as values for the --protocol parameter. The actual Protocol
parameter of the operation accepts only integer protocol numbers.
"""
def _fix_args(params, **kwargs):
key_name = 'Protocol'
if key_name in params:
if params[key_name] == 'tcp':
params[key_name] = '6'
elif params[key_name] == 'udp':
params[key_name] = '17'
elif params[key_name] == 'icmp':
params[key_name] = '1'
elif params[key_name] == 'all':
params[key_name] = '-1'
def register_protocol_args(cli):
cli.register('before-parameter-build.ec2.CreateNetworkAclEntry',
_fix_args)
cli.register('before-parameter-build.ec2.ReplaceNetworkAclEntry',
_fix_args)
|
mit
| -8,376,659,984,141,722,000
| 36.837838
| 73
| 0.664286
| false
| 3.763441
| false
| false
| false
|
nce/sedater
|
sedater/test/test_options.py
|
1
|
2109
|
# ./sedater/test/test_options.py
# Author: Ulli Goschler <ulligoschler@gmail.com>
# Created: Mon, 05.10.2015 - 12:59:56
# Modified: Thu, 10.12.2015 - 19:41:38
import unittest
from sedater.options import CLIParser
class TestCommandLineParameters(unittest.TestCase):
def setUp(self):
self.cli = CLIParser()
def test_default_settings(self):
self.cli.parseForSedater(['foo'])
self.assertFalse(self.cli.args.csv_headers)
self.assertFalse(self.cli.args.left_calibration)
self.assertFalse(self.cli.args.right_calibration)
self.assertFalse(self.cli.args.output_dir)
def test_toggle_csv_header(self):
self.cli.parseForSedater(['foo'])
self.assertFalse(self.cli.args.csv_headers)
self.cli.parseForSedater(['-c', 'foo'])
self.assertTrue(self.cli.args.csv_headers)
self.cli.parseForSedater(['--csv-headers', 'foo'])
self.assertTrue(self.cli.args.csv_headers)
def test_left_calibration_file(self):
ref = res = 'foobar'
self.cli.parseForSedater(['-l', ref, 'barfoo'])
self.assertEquals(self.cli.args.left_calibration, res)
self.cli.parseForSedater(['--left-calibration', ref, 'barfoo'])
self.assertEquals(self.cli.args.left_calibration, res)
def test_right_calibration_file(self):
ref = res = 'foobar'
self.cli.parseForSedater(['-r', ref, 'barfoo'])
self.assertEquals(self.cli.args.right_calibration, res)
self.cli.parseForSedater(['--right-calibration', ref, 'barfoo'])
self.assertEquals(self.cli.args.right_calibration, res)
def test_output_dir(self):
ref = res = 'foobar'
self.cli.parseForSedater(['-o', ref, 'barfoo'])
self.assertEquals(self.cli.args.output_dir, res)
self.cli.parseForSedater(['--output-dir', ref, 'barfoo'])
self.assertEquals(self.cli.args.output_dir, res)
def test_input_source_arguments(self):
ref = res = ['foo', 'bar', 'foobar', 'barfoo']
self.cli.parseForSedater(ref)
self.assertEquals(self.cli.args.inputSource, ref)
|
mit
| 4,717,097,843,753,090,000
| 41.18
| 72
| 0.655287
| false
| 3.300469
| true
| false
| false
|
JohnReid/biopsy
|
Python/site_dpm/check-programs/map_factors.py
|
1
|
2225
|
#
# Copyright John Reid 2009
#
tp_indices = [
1,
6,
14,
17,
24,
28
]
def go_ids_for_genes(genes):
import biopsy.identifiers.biomart as B
for row in B.quick_query(
dataset='mmusculus_gene_ensembl',
attributes=[
'ensembl_gene_id',
#'go_cellular_component_id',
'go_biological_process_id',
#'go_molecular_function_id'
],
filters=[('ensembl_gene_id', ','.join(genes))],
):
yield row
def genes_for_go_id(go_id):
import biopsy.identifiers.biomart as B
for row in B.quick_query(
dataset='mmusculus_gene_ensembl',
attributes=['ensembl_gene_id'],
filters=[('go', go_id)],
):
yield row[0]
from boost.graph import Graph
class LabelledGraph(Graph):
def __init__(self):
Graph.__init__(self)
self.labels = Graph.add_vertex_property(self, name='label', type='string')
self.vertex_map = {}
def add_labelled_vertex(self, label):
if label in self.vertices:
raise RuntimeError('Vertex for "%s" already in graph' % label)
v = self.add_vertex()
self.labels[v] = label
self.vertex_map[label] = v
return v
def get_vertex(self, label):
if label in self.vertex_map:
return self.vertex_map[label]
else:
return self.add_labelled_vertex(label)
def create_graph(factors, pssm_map):
"""
Create a bipartite graph representing which matrices map onto the factors.
"""
import boost.graph as bgl
g = LabelledGraph()
vertices = {}
for f in factors:
for matrix, domain in pssm_map.iteritems():
if f in domain:
g.add_edge(g.get_vertex(matrix), g.get_vertex(f))
return g
for tp_index in tp_indices:
tp = transcriptional_programs[tp_index]
print tp_index
print tp.tp_factors
g = create_graph(tp.tp_factors, pssm_map)
graphviz_file = 'tp-%03d-factors.dot' % tp_index
svg_file = 'tp-%03d-factors.svg' % tp_index
g.write_graphviz(graphviz_file)
os.system('dot %s -Tsvg -o %s' % (graphviz_file, svg_file))
|
mit
| 6,979,182,918,673,666,000
| 24.872093
| 82
| 0.571236
| false
| 3.407351
| false
| false
| false
|
jeffh/describe
|
describe/flags.py
|
1
|
5303
|
""" flags.py - Various constants that have special meaning in describe.
INIT - Represents a matcher be instanciated for initialization purposes only
NO_ARG - Represents no argument. This is Noner than None.
"""
__all__ = (
'NO_ARG', 'NO_ARGS', 'ANY_ARG', 'ANYTHING', 'ANY_ARGS', 'ANY_KWARGS', 'is_flag',
'params_match',
)
class Flag(object):
def __init__(self, name):
self.name = name
def __callable__(self):
return self
def __repr__(self):
return 'flag(%s)' % self.name
INIT = Flag('INIT')
NO_ARG = Flag('NO_ARG')
NO_KWARGS = NO_ARGS = Flag('NO_ARGS')
# used for argument matching
ANY_ARG = Flag('ANY_ARG')
ANYTHING = Flag('ANYTHING')
ANY_ARGS = Flag('ANY_ARGS')
ANY_KWARGS = Flag('ANY_KWARGS')
class DynamicFlag(object):
def __repr__(self):
return getattr(self, 'name', self.__class__.__name__.lower())
def validate(self, argument):
raise NotImplemented()
class Subclasses(DynamicFlag):
def __init__(self, cls):
self.cls = cls
def validate(self, argument):
try:
return issubclass(argument, self.cls)
except TypeError:
return False
class InstanceOf(DynamicFlag):
def __init__(self, cls):
self.cls = cls
def validate(self, argument):
return isinstance(argument, self.cls)
class Contains(DynamicFlag):
def __init__(self, item):
self.item = item
def validate(self, argument):
try:
return self.item in list(argument)
except TypeError:
return False
class IncludesPairs(DynamicFlag):
def __init__(self, **kwargs):
self.kwargs = kwargs
def validate(self, argument):
for key, value in self.kwargs.items():
try:
if argument[key] != value:
return False
except (IndexError, KeyError, TypeError):
return False
return True
class _Callable(DynamicFlag):
def __call__(self):
return self
def validate(self, argument):
return callable(argument)
Callable = _Callable()
class _AmountCompare(DynamicFlag):
def __init__(self, size):
self.size = size
def validate(self, argument):
try:
return self.cmp(argument, self.size)
except TypeError:
return False
def cmp(self, arg, value):
raise NotImplemented()
class LengthOf(_AmountCompare):
def cmp(self, arg, value):
return len(arg) == self.size
class AtLeast(DynamicFlag):
def cmp(self, arg, value):
return arg < self.size
class AtLeastEqual(DynamicFlag):
def cmp(self, arg, value):
return arg <= self.size
class AtMost(DynamicFlag):
def cmp(self, arg, value):
return arg > self.size
class AtMostEqual(DynamicFlag):
def cmp(self, arg, value):
return arg >= self.size
def is_flag(value):
try:
return issubclass(value, Flag) or issubclass(value, DynamicFlag)
except TypeError:
return isinstance(value, Flag) or isinstance(value, DynamicFlag)
def __arg_is(arg, *flags):
if arg in flags:
return True
try:
tuple(arg)
except TypeError:
return False
if tuple(arg) in set((f,) for f in flags):
return True
return False
def args_match(actual_args, expected_args):
if __arg_is(expected_args, ANYTHING, ANY_ARGS):
return True
if __arg_is(expected_args, NO_ARG, NO_ARGS):
return not list(actual_args)
if len(actual_args) != len(expected_args):
return False
for aarg, earg in zip(actual_args, expected_args):
assert earg not in (ANYTHING, ANY_ARGS, NO_ARG, NO_ARGS), 'expected_args cannot have a list containing any of the following: (ANYTHING, ANY_ARGS, NO_ARG, NO_ARGS)'
if aarg == earg or earg is ANY_ARG:
continue
if isinstance(earg, DynamicFlag):
if earg.validate(aarg):
continue
return False
return True
def kwargs_match(actual_args, expected_args):
if __arg_is(expected_args, ANYTHING, ANY_KWARGS):
return True
if __arg_is(expected_args, NO_ARG, NO_KWARGS):
return not list(actual_args)
if len(actual_args) != len(expected_args):
return False
for (akey, aarg), (ekey, earg) in zip(sorted(actual_args.items()), sorted(expected_args.items())):
assert earg not in (ANYTHING, ANY_ARGS, NO_ARG, NO_ARGS), 'expected_args cannot have a list containing any of the following: (ANYTHING, ANY_ARGS, NO_ARG, NO_ARGS)'
if akey != ekey:
return False
if aarg == earg or earg is ANY_ARG:
continue
if isinstance(earg, DynamicFlag):
if earg.validate(aarg):
continue
return False
return True
def params_match(actual_args, actual_kwargs, expected_args, expected_kwargs):
if __arg_is(expected_args, ANYTHING, ANY_ARGS) or __arg_is(expected_kwargs, ANYTHING, ANY_KWARGS):
return True
return args_match(actual_args, expected_args) and kwargs_match(actual_kwargs, expected_kwargs)
|
mit
| 4,358,649,939,190,283,000
| 27.461111
| 171
| 0.593815
| false
| 3.795991
| false
| false
| false
|
werdeil/pibooth
|
pibooth/controls/light.py
|
1
|
2328
|
# -*- coding: utf-8 -*-
import threading
from pibooth.controls import GPIO
class BlinkingThread(threading.Thread):
"""Thread which manage blinking LEDs synchronously.
"""
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True
self._leds = []
self._tick = 0.3
self._lock = threading.Lock()
self._stop_event = threading.Event()
self.start()
def register(self, led):
"""Add a new LED to manage.
"""
with self._lock:
if led not in self._leds:
self._leds.append(led)
def unregister(self, led):
"""Remove the given LED from the blinking management.
"""
with self._lock:
if led in self._leds:
self._leds.remove(led)
def run(self):
"""Cyclic call to the method :py:meth:`PtbLed.switch_on` and
:py:meth:`PtbLed.switch_off` of the registered LED.
"""
sequence = ['switch_on', 'switch_off']
while not self._stop_event.is_set():
for func_name in sequence:
with self._lock:
for led in self._leds:
getattr(led, func_name)()
if self._stop_event.wait(self._tick):
return # Stop requested
def stop(self):
"""Stop the thread.
"""
self._stop_event.set()
self.join()
class PtbLed(object):
"""LED management.
"""
_blinking_thread = BlinkingThread()
def __init__(self, pin):
self.pin = pin
GPIO.setup(pin, GPIO.OUT)
def switch_on(self):
"""Switch on the LED.
"""
if threading.current_thread() != self._blinking_thread:
self._blinking_thread.unregister(self)
GPIO.output(self.pin, GPIO.HIGH)
def switch_off(self):
"""Switch off the LED.
"""
if threading.current_thread() != self._blinking_thread:
self._blinking_thread.unregister(self)
GPIO.output(self.pin, GPIO.LOW)
def blink(self):
"""Blink the LED.
"""
self._blinking_thread.register(self)
def quit(self):
"""Switch off and stop the blinking thread.
"""
self.switch_off()
self._blinking_thread.stop()
|
mit
| 8,372,595,440,265,631,000
| 24.866667
| 68
| 0.531357
| false
| 3.854305
| false
| false
| false
|
agconti/njode
|
env/lib/python2.7/site-packages/allauth/account/adapter.py
|
1
|
11030
|
import warnings
import json
from django.conf import settings
from django.http import HttpResponse
from django.template.loader import render_to_string
from django.template import TemplateDoesNotExist
from django.contrib.sites.models import Site
from django.core.mail import EmailMultiAlternatives, EmailMessage
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib import messages
try:
from django.utils.encoding import force_text
except ImportError:
from django.utils.encoding import force_unicode as force_text
from ..utils import (import_attribute, get_user_model,
generate_unique_username,
resolve_url)
from . import app_settings
class DefaultAccountAdapter(object):
def stash_verified_email(self, request, email):
request.session['account_verified_email'] = email
def unstash_verified_email(self, request):
ret = request.session.get('account_verified_email')
request.session['account_verified_email'] = None
return ret
def is_email_verified(self, request, email):
"""
Checks whether or not the email address is already verified
beyond allauth scope, for example, by having accepted an
invitation before signing up.
"""
ret = False
verified_email = request.session.get('account_verified_email')
if verified_email:
ret = verified_email.lower() == email.lower()
return ret
def format_email_subject(self, subject):
prefix = app_settings.EMAIL_SUBJECT_PREFIX
if prefix is None:
site = Site.objects.get_current()
prefix = u"[{name}] ".format(name=site.name)
return prefix + force_text(subject)
def render_mail(self, template_prefix, email, context):
"""
Renders an e-mail to `email`. `template_prefix` identifies the
e-mail that is to be sent, e.g. "account/email/email_confirmation"
"""
subject = render_to_string('{0}_subject.txt'.format(template_prefix),
context)
# remove superfluous line breaks
subject = " ".join(subject.splitlines()).strip()
subject = self.format_email_subject(subject)
bodies = {}
for ext in ['html', 'txt']:
try:
template_name = '{0}_message.{1}'.format(template_prefix, ext)
bodies[ext] = render_to_string(template_name,
context).strip()
except TemplateDoesNotExist:
if ext == 'txt' and not bodies:
# We need at least one body
raise
if 'txt' in bodies:
msg = EmailMultiAlternatives(subject,
bodies['txt'],
settings.DEFAULT_FROM_EMAIL,
[email])
if 'html' in bodies:
msg.attach_alternative(bodies['html'], 'text/html')
else:
msg = EmailMessage(subject,
bodies['html'],
settings.DEFAULT_FROM_EMAIL,
[email])
msg.content_subtype = 'html' # Main content is now text/html
return msg
def send_mail(self, template_prefix, email, context):
msg = self.render_mail(template_prefix, email, context)
msg.send()
def get_login_redirect_url(self, request):
"""
Returns the default URL to redirect to after logging in. Note
that URLs passed explicitly (e.g. by passing along a `next`
GET parameter) take precedence over the value returned here.
"""
assert request.user.is_authenticated()
url = getattr(settings, "LOGIN_REDIRECT_URLNAME", None)
if url:
warnings.warn("LOGIN_REDIRECT_URLNAME is deprecated, simply"
" use LOGIN_REDIRECT_URL with a URL name",
DeprecationWarning)
else:
url = settings.LOGIN_REDIRECT_URL
return resolve_url(url)
def get_logout_redirect_url(self, request):
"""
Returns the URL to redriect to after the user logs out. Note that
this method is also invoked if you attempt to log out while no users
is logged in. Therefore, request.user is not guaranteed to be an
authenticated user.
"""
return resolve_url(app_settings.LOGOUT_REDIRECT_URL)
def get_email_confirmation_redirect_url(self, request):
"""
The URL to return to after successful e-mail confirmation.
"""
if request.user.is_authenticated():
if app_settings.EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL:
return \
app_settings.EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL
else:
return self.get_login_redirect_url(request)
else:
return app_settings.EMAIL_CONFIRMATION_ANONYMOUS_REDIRECT_URL
def is_open_for_signup(self, request):
"""
Checks whether or not the site is open for signups.
Next to simply returning True/False you can also intervene the
regular flow by raising an ImmediateHttpResponse
"""
return True
def new_user(self, request):
"""
Instantiates a new User instance.
"""
user = get_user_model()()
return user
def populate_username(self, request, user):
"""
Fills in a valid username, if required and missing. If the
username is already present it is assumed to be valid
(unique).
"""
from .utils import user_username, user_email, user_field
first_name = user_field(user, 'first_name')
last_name = user_field(user, 'last_name')
email = user_email(user)
username = user_username(user)
if app_settings.USER_MODEL_USERNAME_FIELD:
user_username(user,
username
or generate_unique_username([first_name,
last_name,
email,
'user']))
def save_user(self, request, user, form, commit=True):
"""
Saves a new `User` instance using information provided in the
signup form.
"""
from .utils import user_username, user_email, user_field
data = form.cleaned_data
first_name = data.get('first_name')
last_name = data.get('last_name')
email = data.get('email')
username = data.get('username')
user_email(user, email)
user_username(user, username)
user_field(user, 'first_name', first_name or '')
user_field(user, 'last_name', last_name or '')
if 'password1' in data:
user.set_password(data["password1"])
else:
user.set_unusable_password()
self.populate_username(request, user)
if commit:
# Ability not to commit makes it easier to derive from
# this adapter by adding
user.save()
return user
def clean_username(self, username):
"""
Validates the username. You can hook into this if you want to
(dynamically) restrict what usernames can be chosen.
"""
from django.contrib.auth.forms import UserCreationForm
USERNAME_REGEX = UserCreationForm().fields['username'].regex
if not USERNAME_REGEX.match(username):
raise forms.ValidationError(_("Usernames can only contain "
"letters, digits and @/./+/-/_."))
# TODO: Add regexp support to USERNAME_BLACKLIST
username_blacklist_lower = [ub.lower() for ub in app_settings.USERNAME_BLACKLIST]
if username.lower() in username_blacklist_lower:
raise forms.ValidationError(_("Username can not be used. "
"Please use other username."))
username_field = app_settings.USER_MODEL_USERNAME_FIELD
assert username_field
user_model = get_user_model()
try:
query = {username_field + '__iexact': username}
user_model.objects.get(**query)
except user_model.DoesNotExist:
return username
raise forms.ValidationError(_("This username is already taken. Please "
"choose another."))
def clean_email(self, email):
"""
Validates an email value. You can hook into this if you want to
(dynamically) restrict what email addresses can be chosen.
"""
return email
def add_message(self, request, level, message_template,
message_context={}, extra_tags=''):
"""
Wrapper of `django.contrib.messages.add_message`, that reads
the message text from a template.
"""
if 'django.contrib.messages' in settings.INSTALLED_APPS:
try:
message = render_to_string(message_template,
message_context).strip()
if message:
messages.add_message(request, level, message,
extra_tags=extra_tags)
except TemplateDoesNotExist:
pass
def ajax_response(self, request, response, redirect_to=None, form=None):
data = {}
if redirect_to:
status = 200
data['location'] = redirect_to
if form:
if form.is_valid():
status = 200
else:
status = 400
data['form_errors'] = form._errors
if hasattr(response, 'render'):
response.render()
data['html'] = response.content.decode('utf8')
return HttpResponse(json.dumps(data),
status=status,
content_type='application/json')
def login(self, request, user):
from django.contrib.auth import login
# HACK: This is not nice. The proper Django way is to use an
# authentication backend
if not hasattr(user, 'backend'):
user.backend \
= "allauth.account.auth_backends.AuthenticationBackend"
login(request, user)
def confirm_email(self, request, email_address):
"""
Marks the email address as confirmed on the db
"""
email_address.verified = True
email_address.set_as_primary(conditional=True)
email_address.save()
def set_password(self, user, password):
user.set_password(password)
user.save()
def get_adapter():
return import_attribute(app_settings.ADAPTER)()
|
bsd-3-clause
| 4,666,278,003,445,794,000
| 37.566434
| 89
| 0.568722
| false
| 4.661877
| false
| false
| false
|
uclouvain/osis
|
base/migrations/0577_auto_20210201_1741.py
|
1
|
4928
|
# Generated by Django 2.2.14 on 2021-02-01 17:41
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('base', '0576_populate_not_null_fields'),
# ('continuing_education', '0084_auto_20210127_1119'),
('dissertation', '0051_auto_20191211_1458'),
]
operations = [
migrations.AlterField(
model_name='offerenrollment',
name='education_group_year',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='base.EducationGroupYear'),
),
migrations.AlterField(
model_name='offeryearcalendar',
name='education_group_year',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base.EducationGroupYear'),
),
migrations.AlterField(
model_name='sessionexam',
name='education_group_year',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base.EducationGroupYear'),
),
migrations.RemoveField(
model_name='offeryear',
name='academic_year',
),
migrations.RemoveField(
model_name='offeryear',
name='campus',
),
migrations.RemoveField(
model_name='offeryear',
name='country',
),
migrations.RemoveField(
model_name='offeryear',
name='entity_administration',
),
migrations.RemoveField(
model_name='offeryear',
name='entity_administration_fac',
),
migrations.RemoveField(
model_name='offeryear',
name='entity_management',
),
migrations.RemoveField(
model_name='offeryear',
name='entity_management_fac',
),
migrations.RemoveField(
model_name='offeryear',
name='grade_type',
),
migrations.RemoveField(
model_name='offeryear',
name='offer',
),
migrations.RemoveField(
model_name='offeryear',
name='offer_type',
),
migrations.RemoveField(
model_name='offeryear',
name='parent',
),
migrations.RemoveField(
model_name='offeryeardomain',
name='domain',
),
migrations.RemoveField(
model_name='offeryeardomain',
name='offer_year',
),
migrations.AlterUniqueTogether(
name='offeryearentity',
unique_together=None,
),
migrations.RemoveField(
model_name='offeryearentity',
name='education_group_year',
),
migrations.RemoveField(
model_name='offeryearentity',
name='entity',
),
migrations.RemoveField(
model_name='offeryearentity',
name='offer_year',
),
migrations.RemoveField(
model_name='structure',
name='organization',
),
migrations.RemoveField(
model_name='structure',
name='part_of',
),
migrations.RemoveField(
model_name='structureaddress',
name='country',
),
migrations.RemoveField(
model_name='structureaddress',
name='structure',
),
migrations.RemoveField(
model_name='entitymanager',
name='structure',
),
migrations.RemoveField(
model_name='learningunityear',
name='structure',
),
migrations.RemoveField(
model_name='offerenrollment',
name='offer_year',
),
migrations.RemoveField(
model_name='offeryearcalendar',
name='offer_year',
),
migrations.RemoveField(
model_name='sessionexam',
name='offer_year',
),
migrations.AlterUniqueTogether(
name='programmanager',
unique_together={('person', 'education_group')},
),
migrations.DeleteModel(
name='ExternalOffer',
),
migrations.DeleteModel(
name='Offer',
),
migrations.DeleteModel(
name='OfferType',
),
migrations.DeleteModel(
name='OfferYearDomain',
),
migrations.DeleteModel(
name='OfferYearEntity',
),
migrations.DeleteModel(
name='Structure',
),
migrations.DeleteModel(
name='StructureAddress',
),
migrations.RemoveField(
model_name='programmanager',
name='offer_year',
),
migrations.DeleteModel(
name='OfferYear',
),
]
|
agpl-3.0
| -2,530,800,407,347,505,000
| 28.508982
| 111
| 0.52638
| false
| 4.550323
| false
| false
| false
|
mleger45/turnex
|
turnex/urls.py
|
1
|
1209
|
"""turnex URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from django.views.static import serve
from django.conf import settings
urlpatterns = [
url(r'^admin/', admin.site.urls, name="admin"),
url(r'^api-auth/', include('rest_framework.urls',
namespace='rest_framework'), name="api_auth"),
url(r'api/v1/', include('api.v1.urls'), name="api"),
url(r'turnex/', include('msn.urls', namespace='msn')),
url(r'^media/(?P<path>.*)$', serve, {
'document_root': settings.MEDIA_ROOT,
}),
]
|
mit
| -6,026,112,504,968,619,000
| 39.3
| 79
| 0.656741
| false
| 3.524781
| false
| false
| false
|
ehliang/myo-unlock
|
Myo_unlock.py
|
1
|
5681
|
#This program takes simple input from the Mio to unlock a complex password string and log in to email
#Further developement would allow it to log into social media accounts/computerss
#A program by Ethan Liang
from __future__ import print_function
import myo as libmyo; libmyo.init()
import time
import sys
import smtplib
import getpass
import email.header
import re
import datetime
import json
import email
import requests
gesturesGiven = []
userInput = []
userPassword = []
originalUsernameString = ""
originalPasswordString = ""
activated = False
useringesture = ""
userpasscheck = ""
counter = 1
class Listener(libmyo.DeviceListener):
"""
Listener implementation. Return False from any function to
stop the Hub.
"""
interval = 0.05 # Output only 0.05 seconds
pose_run = False
def on_connect(self, myo, timestamp):
print("Hello, Myo!")
def on_disconnect(self, myo, timestamp):
print("Goodbye, Myo!")
# def on_orientation_data(self, myo, timestamp, quat):
# print("Orientation:", quat.x, quat.y, quat.z, quat.w)
def passcheck(self):
print("Enter the password. Please make a gesture. When finished with your combination, press 0, otherwise enter any character after each gesture.")
userPassword.append(userpasscheck)
print("Detected: " + str(userpasscheck))
confirm3 = raw_input()
if confirm3 == "3":
activated = True
if userpasscheck == userInput:
print(userInput)
fromaddr = originalUsernameString
toaddrs = "ethanliang@live.com"
msg = "Test"
username = originalUsernameString
password = originalPasswordString
server = smtplib.SMTP('smtp.gmail.com:587')
server.starttls()
server.login(username,password)
server.sendmail(fromaddr, toaddrs, msg)
server.quit()
activated=True
else:
print("Error")
activated=True
else:
activated = False
def record(self):
global count, activated
count = 1
print("Please make a gesture, this is the "+ str(count) + " character of the password. When finished with your combination, press 0, otherwise enter any character")
activated = True
userInput.append(useringesture)
print("Detected: " + str(useringesture))
confirm = raw_input()
if confirm == "0":
# activated = True
print("Was your password string " + str(userInput) + "? If yes, enter 3. Otherwise, enter 4. ")
confirm2 = int(raw_input())
if confirm2 == 3:
print("abc")
activated = False
self.passcheck()
confirm = "p"
elif confirm2 == 4:
del userInput[:]
activated = False
else:
activated = False
# print("Was your gesture" + str(useringesture) + "? Please enter yes or no")
# print("Was your gesture" + str(useringesture) + "? Please enter yes or no")
# confirm = raw_input()
# while confirm != "yes" and confirm != "no":
# print("Was your gesture" + str(useringesture) + "? Please enter yes or no")
# confirm = raw_input()
# if confirm == "yes":
#def keyPressHandler(event):
# if event.keysym == "0":
# activated = True
def on_pose(self, myo, timestamp, pose):
global useringesture, userpasscheck, confirm2
if activated == False:
if pose!= libmyo.Pose.rest and confirm2==3:
userpasscheck = pose
self.passcheck()
elif pose!= libmyo.Pose.rest:
useringesture = pose
self.record()
#count+=1
# if pose == libmyo.Pose.fist:
# print("Don't show me 'ya fist!")
# gesturesGiven.append(pose)
# print(gesturesGiven[0])
# #Stops the Hub
# if pose == libmyo.Pose.wave_out:
# print("abcd")
# gesturesGiven.append(pose)
# print(gesturesGiven)
# return False
# if self.pose_run:
# return
# self.pose_run = True
if userPassword == userInput:
originalUsernameString = ""
originalPasswordString = ""
fromaddr = originalUsernameString
toaddrs = ""
msg = "Test"
username = originalUsernameString
password = originalPasswordString
server = smtplib.SMTP('smtp.gmail.com:587')
server.starttls()
server.login(username,password)
server.sendmail(fromaddr, toaddrs, msg)
server.quit()
def main():
print("Connecting to Myo ... Use CTRL^C to exit.")
print("If nothing happens, make sure the Bluetooth adapter is plugged in,")
print("Myo Connect is running and your Myo is put on.")
hub = libmyo.Hub()
hub.set_locking_policy(libmyo.LockingPolicy.none)
hub.run(1, Listener())
# Listen to keyboard interrupts and stop the hub in that case.
try:
while hub.running:
time.sleep(0.25)
except KeyboardInterrupt:
print("\nQuitting ...")
finally:
print("Shutting down hub...")
hub.shutdown()
if __name__ == '__main__':
main()
|
mit
| 8,310,767,131,224,216,000
| 28.588542
| 172
| 0.556064
| false
| 4.131636
| false
| false
| false
|
techinc/techinc_badge
|
pathtokicad/pathtokicad.py
|
1
|
6404
|
#!/usr/bin/python
import sys, math
fill_paths = [
("23", "soldermask.path"), # soldermask front
# ("21", "silkscreen.path"),
# ("15", "copper_top.path"),
("15", "copper_top_x.path"),
("0", "copper_bottom.path"),
("0", "battery_holder.path"),
("22", "battery_holder_mask.path"),
("21", "ispmark.path"),
("0", "safetypin.path"),
("22", "safetypin.path"),
]
segment_paths = [
("21", "silkscreen.segments", .9),
("28", "edges_round.segments", .9),
# ("28", "edges.segments", .9),
("20", "techincnl.segments", .9),
]
pads = [
( (-129.50091,49.85), 2, 3 )
]
vias = [
( 10, 10),
( 20, 10),
( 10, 20),
( 20, 20),
]
name = "techinc_badge"
start = cur = None
cubic_sections = 32
in_dpi, out_dpi = 90., 10000.
scale = out_dpi/in_dpi
def dist(a, b):
ax, ay = a
bx, by = b
return math.sqrt((ax-bx)**2 + (ay-by)**2)
def set_cur(newcur):
global cur
x, y = cur = newcur
def interpolate(pos1, pos2, d):
x1, y1 = pos1
x2, y2 = pos2
return ( x1*(1-d) + x2*d, y1*(1-d) + y2*d )
def get_abs(coords):
x, y = cur
dx, dy = coords
return (x+dx, y+dy)
def coord_fmt( coords ):
x, y = coords
return "%d %d" % ( round(x*scale), round(y*scale) )
def output_line( coords ):
set_cur(coords)
return [ "Dl " + coord_fmt(coords) ]
def output_rel_line( coords ):
return output_line(get_abs(coords))
def output_move( coords ):
global start
if start == None:
start = coords
set_cur(coords)
return [ "Dl " + coord_fmt(coords) ]
def output_rel_move( coords ):
return output_move(get_abs(coords))
def output_cubic( guide1, guide2, end ):
start = cur
n = min(int(dist(start, end)*scale/40.)+1, cubic_sections)
v = []
for i in xrange(1, n+1):
d = i/float(n)
a = interpolate(start, guide1, d)
b = interpolate(guide1, guide2, d)
c = interpolate(guide2, end, d)
ab = interpolate(a, b, d)
bc = interpolate(b, c, d)
abc = interpolate(ab, bc, d)
v += output_line(abc)
return v
def output_line_segment( coords, layer ):
print "DS %s %s %d %s" % (coord_fmt(cur), coord_fmt(coords),width*scale,layer)
set_cur(coords)
def output_cubic_segment( guide1, guide2, end, layer ):
start = cur
n = min(int(dist(start, end)*scale/40.)+1, cubic_sections)
for i in xrange(1, n+1):
d = i/float(n)
a = interpolate(start, guide1, d)
b = interpolate(guide1, guide2, d)
c = interpolate(guide2, end, d)
ab = interpolate(a, b, d)
bc = interpolate(b, c, d)
abc = interpolate(ab, bc, d)
output_line_segment(abc, layer)
def output_rel_cubic( guide1, guide2, end ):
return output_cubic( get_abs(guide1), get_abs(guide2), get_abs(end) )
def output_rel_move( coords ):
return output_move(get_abs(coords))
def output_close():
global start
set_cur(start)
start = None
return [ "Dl " + coord_fmt(cur) ]
def get_coords(s):
return map(float, s)
def pad_at(coords):
return """$PAD
Sh "1" C 600 600 0 0 0
Dr 400 0 0
At STD N 00E0FFFF
Ne 0 ""
Po """+coord_fmt(coords)+"""
$EndPAD"""
def via_at(coords):
return """$TRACK
Po 3 """+coord_fmt(coords)+" "+coord_fmt(coords)+""" 350 -1
De 15 1 0 0 0
$EndTRACK"""
def pad_grid(coords, w, h, pitch=.1):
x, y = coords
v = []
for i in xrange(w):
for j in xrange(h):
v += [ pad_at( (x + pitch*in_dpi*i, y + pitch*in_dpi*j) ) ]
return '\n'.join(v)
def print_path(data, layer):
global start, cur
values = (x for x in data.replace(',', ' ').split(' ') if x != '' )
mode = 'z'
cur = (0.,0.)
start = None
v = []
for x in values:
if x[-1] == '\n':
x = x[:-1]
if x in 'mclMCL':
mode = x
continue
if x in 'zZ':
mode = x
if mode in 'zZ':
v += output_close()
print 'DP 0 0 0 0 %d 1 %s' % (len(v), layer)
print '\n'.join(v)
v = []
elif mode == 'm':
v += output_rel_move(get_coords((x, values.next())))
mode = 'l'
elif mode == 'M':
v += output_move(get_coords((x, values.next())))
mode = 'L'
elif mode == 'c':
guide1 = x, values.next()
guide2 = values.next(), values.next()
end = values.next(), values.next()
v += output_rel_cubic(get_coords(guide1), get_coords(guide2), get_coords(end))
elif mode == 'C':
guide1 = x, values.next()
guide2 = values.next(), values.next()
end = values.next(), values.next()
v += output_cubic(get_coords(guide1), get_coords(guide2), get_coords(end))
elif mode == 'l':
v += output_rel_line(get_coords((x, values.next())))
elif mode == 'L':
v += output_line(get_coords((x, values.next())))
else:
print "ERROR: " + x
sys.exit(1)
def print_segments(data, layer, width):
global start
values = (x for x in data.replace(',', ' ').split(' ') if x != '' )
set_cur( (0.,0.) )
start = cur
for x in values:
if x[-1] == '\n':
x = x[:-1]
if x in 'mclMCL':
mode = x
continue
if x in 'zZ':
mode = x
if mode in 'zZ':
print "DS %s %s %d %s" % (coord_fmt(cur), coord_fmt(start),width*scale,layer)
set_cur(start)
elif mode == 'm':
set_cur(get_abs(get_coords((x, values.next()))))
start = cur
mode = 'l'
elif mode == 'M':
set_cur(get_coords((x, values.next())))
start = cur
mode = 'L'
elif mode == 'l':
pos = get_abs(get_coords((x, values.next())))
print "DS %s %s %d %s" % (coord_fmt(cur), coord_fmt(pos),width*scale,layer)
set_cur(pos)
elif mode == 'L':
pos = get_coords((x, values.next()))
print "DS %s %s %d %s" % (coord_fmt(cur), coord_fmt(pos),width*scale,layer)
set_cur(pos)
elif mode == 'c':
guide1 = x, values.next()
guide2 = values.next(), values.next()
end = values.next(), values.next()
output_cubic_segment(get_abs(get_coords(guide1)), get_abs(get_coords(guide2)), get_abs(get_coords(end)),layer)
elif mode == 'C':
guide1 = x, values.next()
guide2 = values.next(), values.next()
end = values.next(), values.next()
output_cubic_segment(get_coords(guide1), get_coords(guide2), get_coords(end),layer)
else:
print "ERROR: " + x
sys.exit(1)
print """PCBNEW-LibModule-V1
$INDEX
"""
print name
print """$EndINDEX
$MODULE """ + name + """
Po 0 0 0 15 00000000 00000000 ~~
Li """ + name
for layer, filename in fill_paths:
f = open(filename)
print_path(f.read(1000000), layer)
f.close()
for layer, filename, width in segment_paths:
f = open(filename)
print_segments(f.read(1000000), layer, width)
f.close()
for topleft, w, h in pads:
print pad_grid(topleft, w, h)
#for coords in vias:
# print via_at( coords )
print """$EndMODULE """ + name + """
$EndLIBRARY"""
|
mit
| -4,837,021,785,273,147,000
| 20.275748
| 113
| 0.593379
| false
| 2.459293
| false
| false
| false
|
tobetter/linaro-image-tools
|
linaro_image_tools/hwpack/package_unpacker.py
|
1
|
2545
|
# Copyright (C) 2010, 2011, 2013 Linaro
#
# This file is part of Linaro Image Tools.
#
# Linaro Image Tools is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# Linaro Image Tools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Linaro Image Tools; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
import logging
import os
import tempfile
from subprocess import PIPE
from shutil import rmtree
from linaro_image_tools import cmd_runner
logger = logging.getLogger(__name__)
class PackageUnpacker(object):
def __enter__(self):
self.tempdir = tempfile.mkdtemp()
return self
def __exit__(self, type, value, traceback):
if self.tempdir is not None and os.path.exists(self.tempdir):
rmtree(self.tempdir)
def get_path(self, package_file_name, file_name=''):
"""Get package or file path in unpacker tmp dir."""
package_dir = os.path.basename(package_file_name)
return os.path.join(self.tempdir, package_dir, file_name)
def unpack_package(self, package_file_name):
# We could extract only a single file, but since dpkg will pipe
# the entire package through tar anyway we might as well extract all.
unpack_dir = self.get_path(package_file_name)
if not os.path.isdir(unpack_dir):
os.mkdir(unpack_dir)
p = cmd_runner.run(["tar", "-C", unpack_dir, "-xf", "-"], stdin=PIPE)
cmd_runner.run(["dpkg", "--fsys-tarfile", package_file_name],
stdout=p.stdin).communicate()
p.communicate()
def get_file(self, package, file):
# File path passed here must not be absolute, or file from
# real filesystem will be referenced.
assert file and file[0] != '/'
self.unpack_package(package)
logger.debug("Unpacked package %s." % package)
temp_file = self.get_path(package, file)
assert os.path.exists(temp_file), "The file '%s' was " \
"not found in the package '%s'." % (file, package)
return temp_file
|
gpl-3.0
| 7,460,180,015,763,455,000
| 37.560606
| 77
| 0.669155
| false
| 3.873668
| false
| false
| false
|
SCUEvals/scuevals-api
|
scuevals_api/models/api_key.py
|
1
|
1666
|
from sqlalchemy import func
from . import db
from .assoc import api_key_permission
from .permission import Permission
API_KEY_TYPE = 'api_key'
class APIKey(db.Model):
__tablename__ = 'api_keys'
id = db.Column(db.Integer, primary_key=True)
key = db.Column(db.Text, nullable=False, unique=True)
issued_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), nullable=False)
university_id = db.Column('university_id', db.Integer, db.ForeignKey('universities.id'), nullable=False)
university = db.relationship('University', back_populates='api_keys')
permissions = db.relationship(
'Permission', secondary=api_key_permission, back_populates='api_keys', passive_deletes=True
)
def _get_permissions(self):
return [permission.id for permission in self.permissions]
def _set_permissions(self, value):
while self.permissions:
del self.permissions[0]
for permission_id in value:
permission = Permission.query.get(permission_id)
if permission is None:
raise ValueError('permission does not exist: {}'.format(permission_id))
self.permissions.append(permission)
permissions_list = property(_get_permissions,
_set_permissions,
None,
'Property permissions_list is a simple wrapper for permissions relation')
def identity(self):
return {
'id': self.id,
'university_id': self.university_id,
'type': API_KEY_TYPE,
'permissions': self.permissions_list
}
|
agpl-3.0
| -8,820,916,295,296,028,000
| 33
| 108
| 0.62425
| false
| 4.175439
| false
| false
| false
|
AI-comp/Orientation2015Problems
|
rime/basic/consts.py
|
1
|
3991
|
#!/usr/bin/python
#
# Copyright (c) 2011 Rime Project.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
RIMEROOT_FILE = 'RIMEROOT'
PROBLEM_FILE = 'PROBLEM'
SOLUTION_FILE = 'SOLUTION'
TESTS_FILE = 'TESTS'
STAMP_FILE = '.stamp'
IN_EXT = '.in'
DIFF_EXT = '.diff'
OUT_EXT = '.out'
EXE_EXT = '.exe'
JUDGE_EXT = '.judge'
CACHE_EXT = '.cache'
LOG_EXT = '.log'
VALIDATION_EXT = '.validation'
RIME_OUT_DIR = 'rime-out'
### Limit the width of help messages to 75 characters!
GLOBAL_HELP = """\
Rime is a tool for programming contest organizers to automate usual, boring
and error-prone process of problem set preparation. It supports various
programming contest styles like ACM-ICPC, TopCoder, etc. by plugins.
To see a brief description and available options of a command, try:
rime.py help <command>
"""
BUILD_HELP = """\
If the target is a project, Rime builds all problems recursively.
If the target is a problem, Rime builds all solutions and a testset
recursively.
If the target is a solution, Rime compiles the solution program specified
in SOLUTION file.
If the target is a testset, Rime compiles all necessary programs including
input generators, input validators, output judges, and a reference solution
that is automatically selected or explicitly specified in PROBLEM file.
Then it copies static input/output files (*.in, *.diff) into rime-out
directory, runs input generators, runs input validators against all
static/generated input files, and finally runs a reference solution over
them to generate reference output files.
<target> can be omitted to imply the target in the current working
directory.
If -k (--keep_going) is set, build does not stop even if a compile error
happens.
-j (--jobs) can be used to make build faster to allow several processes to
run in parallel.
"""
TEST_HELP = """\
If the target is a project, Rime runs tests of all problems recursively.
If the target is a problem, Rime runs tests of all solutions recursively.
If the target is a solution, Rime builds it and the testset of the problem,
and then runs the solution against a series of tests.
If the target is a testset, Rime runs tests of all solutions recursively.
<target> can be omitted to imply the target in the current working
directory.
If -k (--keep_going) is set, build does not stop even if a compile error
or a test failure happens.
-j (--jobs) can be used to make build and test faster to allow several
processes to run in parallel. If a test failed by time limit exceed in
parallelized tests, the same test is re-run after all other concurrent
processes are finished to see if it really does not run in the specified
time limit. You can always force this behavior not to run tests
concurrently by -p (--precise).
If -C (--cache_tests) is set, Rime skips unchanged tests which passed
previously.
"""
CLEAN_HELP = """\
Deletes files under corresponding directory in rime-out.
<target> can be omitted to imply the target in the current working
directory.
"""
|
mit
| 1,220,425,375,560,909,600
| 34.008772
| 79
| 0.761964
| false
| 3.815488
| true
| false
| false
|
wengzhilai/family
|
iSoft/entity/model.py
|
1
|
28429
|
# coding: utf-8
from sqlalchemy import Column, DateTime, ForeignKey, Integer, Numeric, String, Table, Text
from sqlalchemy.orm import relationship
from sqlalchemy.schema import FetchedValue
from flask_sqlalchemy import SQLAlchemy
from iSoft import db
class FaAppVersion(db.Model):
__tablename__ = 'fa_app_version'
ID = db.Column(db.Integer, primary_key=True)
IS_NEW = db.Column(db.Numeric(1, 0), nullable=False)
TYPE = db.Column(db.String(20), nullable=False)
REMARK = db.Column(db.String(1000))
UPDATE_TIME = db.Column(db.DateTime)
UPDATE_URL = db.Column(db.String(200))
class FaBulletin(db.Model):
__tablename__ = 'fa_bulletin'
ID = db.Column(db.Integer, primary_key=True)
TITLE = db.Column(db.String(255), nullable=False)
PIC = db.Column(db.String(255))
TYPE_CODE = db.Column(db.String(50))
CONTENT = db.Column(db.Text)
USER_ID = db.Column(db.Integer)
PUBLISHER = db.Column(db.String(255), nullable=False)
ISSUE_DATE = db.Column(db.DateTime, nullable=False)
IS_SHOW = db.Column(db.Numeric(1, 0), nullable=False)
IS_IMPORT = db.Column(db.Numeric(1, 0), nullable=False)
IS_URGENT = db.Column(db.Numeric(1, 0), nullable=False)
AUTO_PEN = db.Column(db.Numeric(1, 0), nullable=False)
CREATE_TIME = db.Column(db.DateTime, nullable=False)
UPDATE_TIME = db.Column(db.DateTime, nullable=False)
REGION = db.Column(db.String(10), nullable=False)
fa_files = db.relationship(u'FaFile', secondary=u'fa_bulletin_file', backref=u'fa_bulletins')
fa_role = db.relationship(u'FaRole', secondary=u'fa_bulletin_role', backref=u'fa_bulletins')
t_fa_bulletin_file = db.Table(
'fa_bulletin_file',
db.Column('BULLETIN_ID', db.ForeignKey(u'fa_bulletin.ID'), primary_key=True, nullable=False),
db.Column('FILE_ID', db.ForeignKey(u'fa_files.ID'), primary_key=True, nullable=False)
)
class FaBulletinLog(db.Model):
__tablename__ = 'fa_bulletin_log'
ID = db.Column(db.Integer, primary_key=True)
BULLETIN_ID = db.Column(db.ForeignKey(u'fa_bulletin.ID'), nullable=False)
USER_ID = db.Column(db.Integer, nullable=False)
LOOK_TIME = db.Column(db.DateTime, nullable=False)
fa_bulletin = db.relationship(u'FaBulletin', primaryjoin='FaBulletinLog.BULLETIN_ID == FaBulletin.ID', backref=u'fa_bulletin_logs')
class FaBulletinReview(db.Model):
__tablename__ = 'fa_bulletin_review'
ID = db.Column(db.Integer, primary_key=True)
PARENT_ID = db.Column(db.ForeignKey(u'fa_bulletin_review.ID'))
BULLETIN_ID = db.Column(db.ForeignKey(u'fa_bulletin.ID'), nullable=False)
NAME = db.Column(db.String(50))
CONTENT = db.Column(db.Text)
USER_ID = db.Column(db.Integer, nullable=False)
ADD_TIME = db.Column(db.DateTime, nullable=False)
STATUS = db.Column(db.String(10), nullable=False)
STATUS_TIME = db.Column(db.DateTime, nullable=False)
fa_bulletin = db.relationship(u'FaBulletin', primaryjoin='FaBulletinReview.BULLETIN_ID == FaBulletin.ID', backref=u'fa_bulletin_reviews')
parent = db.relationship(u'FaBulletinReview', remote_side=[ID], primaryjoin='FaBulletinReview.PARENT_ID == FaBulletinReview.ID', backref=u'fa_bulletin_reviews')
t_fa_bulletin_role = db.Table(
'fa_bulletin_role',
db.Column('BULLETIN_ID', db.ForeignKey(u'fa_bulletin.ID'), primary_key=True, nullable=False),
db.Column('ROLE_ID', db.ForeignKey(u'fa_role.ID'), primary_key=True, nullable=False)
)
class FaBulletinType(db.Model):
__tablename__ = 'fa_bulletin_type'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(80))
class FaConfig(db.Model):
__tablename__ = 'fa_config'
ID = db.Column(db.Integer, primary_key=True)
TYPE = db.Column(db.String(10))
CODE = db.Column(db.String(32), nullable=False)
NAME = db.Column(db.String(50))
VALUE = db.Column(db.String(300))
REMARK = db.Column(db.String(500))
REGION = db.Column(db.String(10), nullable=False)
ADD_USER_ID = db.Column(db.Integer)
ADD_TIEM = db.Column(db.DateTime)
class FaDbServer(db.Model):
__tablename__ = 'fa_db_server'
ID = db.Column(db.Integer, primary_key=True)
DB_TYPE_ID = db.Column(db.ForeignKey(u'fa_db_server_type.ID'), nullable=False)
TYPE = db.Column(db.String(10), nullable=False)
IP = db.Column(db.String(20), nullable=False)
PORT = db.Column(db.Integer, nullable=False)
DBNAME = db.Column(db.String(20))
DBUID = db.Column(db.String(20), nullable=False)
PASSWORD = db.Column(db.String(32), nullable=False)
REMARK = db.Column(db.String(500))
DB_LINK = db.Column(db.String(200))
NICKNAME = db.Column(db.String(32))
TO_PATH_M = db.Column(db.String(300))
TO_PATH_D = db.Column(db.String(300))
fa_db_server_type = db.relationship(u'FaDbServerType', primaryjoin='FaDbServer.DB_TYPE_ID == FaDbServerType.ID', backref=u'fa_db_servers')
class FaDbServerType(db.Model):
__tablename__ = 'fa_db_server_type'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(20))
REMARK = db.Column(db.String(500))
class FaDistrict(db.Model):
__tablename__ = 'fa_district'
ID = db.Column(db.Integer, primary_key=True)
PARENT_ID = db.Column(db.ForeignKey(u'fa_district.ID'))
NAME = db.Column(db.String(255), nullable=False)
CODE = db.Column(db.String(50))
IN_USE = db.Column(db.Numeric(1, 0), nullable=False)
LEVEL_ID = db.Column(db.Integer, nullable=False)
ID_PATH = db.Column(db.String(200))
REGION = db.Column(db.String(10), nullable=False)
parent = db.relationship(u'FaDistrict', remote_side=[ID], primaryjoin='FaDistrict.PARENT_ID == FaDistrict.ID', backref=u'fa_districts')
fa_user = db.relationship(u'FaUser', secondary=u'fa_user_district', backref=u'fa_districts')
class FaDynasty(db.Model):
__tablename__ = 'fa_dynasty'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(20), nullable=False)
class FaElder(db.Model):
__tablename__ = 'fa_elder'
ID = db.Column(db.Integer, primary_key=True)
FAMILY_ID = db.Column(db.ForeignKey(u'fa_family.ID'))
NAME = db.Column(db.String(2), nullable=False)
SORT = db.Column(db.Integer)
fa_family = db.relationship(u'FaFamily', primaryjoin='FaElder.FAMILY_ID == FaFamily.ID', backref=u'fa_elders')
t_fa_event_files = db.Table(
'fa_event_files',
db.Column('EVENT_ID', db.ForeignKey(u'fa_user_event.ID'), primary_key=True, nullable=False),
db.Column('FILES_ID', db.ForeignKey(u'fa_files.ID'), primary_key=True, nullable=False)
)
class FaExportLog(db.Model):
__tablename__ = 'fa_export_log'
ID = db.Column(db.Integer, primary_key=True)
USER_ID = db.Column(db.Integer)
LOGIN_NAME = db.Column(db.String(50))
NAME = db.Column(db.String(50))
SQL_CONTENT = db.Column(db.Text)
EXPORT_TIME = db.Column(db.DateTime)
REMARK = db.Column(db.String(100))
class FaFamily(db.Model):
__tablename__ = 'fa_family'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(20), nullable=False)
class FaFile(db.Model):
__tablename__ = 'fa_files'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(50), nullable=False)
PATH = db.Column(db.String(200), nullable=False)
USER_ID = db.Column(db.Integer)
LENGTH = db.Column(db.Integer, nullable=False)
UPLOAD_TIME = db.Column(db.DateTime)
REMARK = db.Column(db.String(2000))
URL = db.Column(db.String(254))
FILE_TYPE = db.Column(db.String(50))
fa_task_flow_handle = db.relationship(u'FaTaskFlowHandle', secondary=u'fa_task_flow_handle_files', backref=u'fa_files')
class FaFlow(db.Model):
__tablename__ = 'fa_flow'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(100), nullable=False)
FLOW_TYPE = db.Column(db.String(20), nullable=False)
REMARK = db.Column(db.String(100))
X_Y = db.Column(db.String(500))
REGION = db.Column(db.String(10))
class FaFlowFlownode(db.Model):
__tablename__ = 'fa_flow_flownode'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(100), nullable=False)
HANDLE_URL = db.Column(db.String(200))
SHOW_URL = db.Column(db.String(200))
class FaFlowFlownodeFlow(db.Model):
__tablename__ = 'fa_flow_flownode_flow'
ID = db.Column(db.Integer, primary_key=True)
FLOW_ID = db.Column(db.ForeignKey(u'fa_flow.ID'), nullable=False)
FROM_FLOWNODE_ID = db.Column(db.ForeignKey(u'fa_flow_flownode.ID'), nullable=False)
TO_FLOWNODE_ID = db.Column(db.Integer, nullable=False)
HANDLE = db.Column(db.Numeric(1, 0), nullable=False)
ASSIGNER = db.Column(db.Numeric(1, 0), nullable=False)
STATUS = db.Column(db.String(20))
REMARK = db.Column(db.String(20))
EXPIRE_HOUR = db.Column(db.Integer, nullable=False)
fa_flow = db.relationship(u'FaFlow', primaryjoin='FaFlowFlownodeFlow.FLOW_ID == FaFlow.ID', backref=u'fa_flow_flownode_flows')
fa_flow_flownode = db.relationship(u'FaFlowFlownode', primaryjoin='FaFlowFlownodeFlow.FROM_FLOWNODE_ID == FaFlowFlownode.ID', backref=u'fa_flow_flownode_flows')
fa_role = db.relationship(u'FaRole', secondary=u'fa_flow_flownode_role', backref=u'fa_flow_flownode_flows')
t_fa_flow_flownode_role = db.Table(
'fa_flow_flownode_role',
db.Column('FLOW_ID', db.ForeignKey(u'fa_flow_flownode_flow.ID'), primary_key=True, nullable=False),
db.Column('ROLE_ID', db.ForeignKey(u'fa_role.ID'), primary_key=True, nullable=False)
)
class FaFunction(db.Model):
__tablename__ = 'fa_function'
ID = db.Column(db.Integer, primary_key=True)
REMARK = db.Column(db.String(100))
FULL_NAME = db.Column(db.String(100))
NAMESPACE = db.Column(db.String(100))
CLASS_NAME = db.Column(db.String(100))
METHOD_NAME = db.Column(db.String(100))
DLL_NAME = db.Column(db.String(100))
XML_NOTE = db.Column(db.String(254))
fa_role = db.relationship(u'FaRole', secondary=u'fa_role_function', backref=u'fa_functions')
class FaLog(db.Model):
__tablename__ = 'fa_log'
ID = db.Column(db.Integer, primary_key=True)
ADD_TIME = db.Column(db.DateTime, nullable=False)
MODULE_NAME = db.Column(db.String(100), nullable=False)
USER_ID = db.Column(db.Integer, nullable=False)
class FaLogin(db.Model):
__tablename__ = 'fa_login'
ID = db.Column(db.Integer, primary_key=True)
LOGIN_NAME = db.Column(db.String(20))
PASSWORD = db.Column(db.String(255))
PHONE_NO = db.Column(db.String(20))
EMAIL_ADDR = db.Column(db.String(255))
VERIFY_CODE = db.Column(db.String(10))
VERIFY_TIME = db.Column(db.DateTime)
IS_LOCKED = db.Column(db.Integer)
PASS_UPDATE_DATE = db.Column(db.DateTime)
LOCKED_REASON = db.Column(db.String(255))
FAIL_COUNT = db.Column(db.Integer)
fa_oauth = db.relationship(u'FaOauth', secondary=u'fa_oauth_login', backref=u'fa_logins')
class FaLoginHistory(db.Model):
__tablename__ = 'fa_login_history'
ID = db.Column(db.Integer, primary_key=True)
USER_ID = db.Column(db.Integer)
LOGIN_TIME = db.Column(db.DateTime)
LOGIN_HOST = db.Column(db.String(255))
LOGOUT_TIME = db.Column(db.DateTime)
LOGIN_HISTORY_TYPE = db.Column(db.Integer)
MESSAGE = db.Column(db.String(255))
class FaMessage(db.Model):
__tablename__ = 'fa_message'
ID = db.Column(db.Integer, primary_key=True)
MESSAGE_TYPE_ID = db.Column(db.ForeignKey(u'fa_message_type.ID'))
KEY_ID = db.Column(db.Integer)
TITLE = db.Column(db.String(100))
CONTENT = db.Column(db.String(500))
CREATE_TIME = db.Column(db.DateTime)
CREATE_USERNAME = db.Column(db.String(50))
CREATE_USERID = db.Column(db.Integer)
STATUS = db.Column(db.String(10))
PUSH_TYPE = db.Column(db.String(10))
DISTRICT_ID = db.Column(db.Integer)
ALL_ROLE_ID = db.Column(db.String(500))
fa_message_type = db.relationship(u'FaMessageType', primaryjoin='FaMessage.MESSAGE_TYPE_ID == FaMessageType.ID', backref=u'fa_messages')
class FaMessageType(db.Model):
__tablename__ = 'fa_message_type'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(50))
TABLE_NAME = db.Column(db.String(50))
IS_USE = db.Column(db.Integer)
REMARK = db.Column(db.String(500))
class FaModule(db.Model):
__tablename__ = 'fa_module'
ID = db.Column(db.Integer, primary_key=True)
PARENT_ID = db.Column(db.ForeignKey(u'fa_module.ID'))
NAME = db.Column(db.String(60))
LOCATION = db.Column(db.String(2000))
CODE = db.Column(db.String(20))
IS_DEBUG = db.Column(db.Numeric(1, 0), nullable=False)
IS_HIDE = db.Column(db.Numeric(1, 0), nullable=False)
SHOW_ORDER = db.Column(db.Numeric(2, 0), nullable=False)
DESCRIPTION = db.Column(db.String(2000))
IMAGE_URL = db.Column(db.String(2000))
DESKTOP_ROLE = db.Column(db.String(200))
W = db.Column(db.Integer)
H = db.Column(db.Integer)
parent = db.relationship(u'FaModule', remote_side=[ID], primaryjoin='FaModule.PARENT_ID == FaModule.ID', backref=u'fa_modules')
fa_role = db.relationship(u'FaRole', secondary=u'fa_role_module', backref=u'fa_modules')
fa_user = db.relationship(u'FaUser', secondary=u'fa_user_module', backref=u'fa_modules')
class FaOauth(db.Model):
__tablename__ = 'fa_oauth'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(50))
REG_URL = db.Column(db.String(500))
LOGIN_URL = db.Column(db.String(500))
REMARK = db.Column(db.String(500))
t_fa_oauth_login = db.Table(
'fa_oauth_login',
db.Column('OAUTH_ID', db.ForeignKey(u'fa_oauth.ID'), primary_key=True, nullable=False),
db.Column('LOGIN_ID', db.ForeignKey(u'fa_login.ID'), primary_key=True, nullable=False)
)
class FaQuery(db.Model):
__tablename__ = 'fa_query'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(50), nullable=False)
CODE = db.Column(db.String(20), nullable=False)
AUTO_LOAD = db.Column(db.Numeric(1, 0), nullable=False)
PAGE_SIZE = db.Column(db.Integer, nullable=False)
SHOW_CHECKBOX = db.Column(db.Numeric(1, 0), nullable=False)
IS_DEBUG = db.Column(db.Numeric(1, 0), nullable=False)
FILTR_LEVEL = db.Column(db.Numeric(1, 0))
DB_SERVER_ID = db.Column(db.Integer)
QUERY_CONF = db.Column(db.Text)
QUERY_CFG_JSON = db.Column(db.Text)
IN_PARA_JSON = db.Column(db.Text)
JS_STR = db.Column(db.Text)
ROWS_BTN = db.Column(db.Text)
HEARD_BTN = db.Column(db.Text)
REPORT_SCRIPT = db.Column(db.Text)
CHARTS_CFG = db.Column(db.Text)
CHARTS_TYPE = db.Column(db.String(50))
FILTR_STR = db.Column(db.Text)
REMARK = db.Column(db.Text)
NEW_DATA = db.Column(db.String(50))
class FaRole(db.Model):
__tablename__ = 'fa_role'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(80))
REMARK = db.Column(db.String(255))
TYPE = db.Column(db.Integer)
fa_user = db.relationship(u'FaUser', secondary=u'fa_user_role', backref=u'fa_roles')
class FaRoleConfig(db.Model):
__tablename__ = 'fa_role_config'
ID = db.Column(db.Integer, primary_key=True)
ROLE_ID = db.Column(db.ForeignKey(u'fa_role.ID'), nullable=False)
TYPE = db.Column(db.String(10))
NAME = db.Column(db.String(50), nullable=False)
VALUE = db.Column(db.String(300))
REMARK = db.Column(db.String(500))
fa_role = db.relationship(u'FaRole', primaryjoin='FaRoleConfig.ROLE_ID == FaRole.ID', backref=u'fa_role_configs')
t_fa_role_function = db.Table(
'fa_role_function',
db.Column('FUNCTION_ID', db.ForeignKey(u'fa_function.ID'), primary_key=True, nullable=False),
db.Column('ROLE_ID', db.ForeignKey(u'fa_role.ID'), primary_key=True, nullable=False)
)
t_fa_role_module = db.Table(
'fa_role_module',
db.Column('ROLE_ID', db.ForeignKey(u'fa_role.ID'), primary_key=True, nullable=False),
db.Column('MODULE_ID', db.ForeignKey(u'fa_module.ID'), primary_key=True, nullable=False)
)
class FaRoleQueryAuthority(db.Model):
__tablename__ = 'fa_role_query_authority'
ROLE_ID = db.Column(db.ForeignKey(u'fa_role.ID'), primary_key=True, nullable=False)
QUERY_ID = db.Column(db.ForeignKey(u'fa_query.ID'), primary_key=True, nullable=False)
NO_AUTHORITY = db.Column(db.String(200))
fa_query = db.relationship(u'FaQuery', primaryjoin='FaRoleQueryAuthority.QUERY_ID == FaQuery.ID', backref=u'fa_role_query_authorities')
fa_role = db.relationship(u'FaRole', primaryjoin='FaRoleQueryAuthority.ROLE_ID == FaRole.ID', backref=u'fa_role_query_authorities')
class FaScript(db.Model):
__tablename__ = 'fa_script'
ID = db.Column(db.Integer, primary_key=True)
CODE = db.Column(db.String(20), nullable=False)
NAME = db.Column(db.String(255), nullable=False)
BODY_TEXT = db.Column(db.Text, nullable=False)
BODY_HASH = db.Column(db.String(255), nullable=False)
RUN_WHEN = db.Column(db.String(30))
RUN_ARGS = db.Column(db.String(255))
RUN_DATA = db.Column(db.String(20), nullable=False, server_default=db.FetchedValue())
STATUS = db.Column(db.String(10))
DISABLE_REASON = db.Column(db.String(50))
SERVICE_FLAG = db.Column(db.String(50))
REGION = db.Column(db.String(10))
IS_GROUP = db.Column(db.Numeric(1, 0), nullable=False)
class FaScriptGroupList(db.Model):
__tablename__ = 'fa_script_group_list'
SCRIPT_ID = db.Column(db.Integer, primary_key=True, nullable=False)
GROUP_ID = db.Column(db.ForeignKey(u'fa_script.ID'), primary_key=True, nullable=False)
ORDER_INDEX = db.Column(db.Integer, nullable=False)
fa_script = db.relationship(u'FaScript', primaryjoin='FaScriptGroupList.GROUP_ID == FaScript.ID', backref=u'fa_script_group_lists')
class FaScriptTask(db.Model):
__tablename__ = 'fa_script_task'
ID = db.Column(db.Integer, primary_key=True)
SCRIPT_ID = db.Column(db.ForeignKey(u'fa_script.ID'), nullable=False)
BODY_TEXT = db.Column(db.Text, nullable=False)
BODY_HASH = db.Column(db.String(255), nullable=False)
RUN_STATE = db.Column(db.String(10), nullable=False, server_default=db.FetchedValue())
RUN_WHEN = db.Column(db.String(30))
RUN_ARGS = db.Column(db.String(255))
RUN_DATA = db.Column(db.String(20), nullable=False, server_default=db.FetchedValue())
LOG_TYPE = db.Column(db.Numeric(1, 0), server_default=db.FetchedValue())
DSL_TYPE = db.Column(db.String(255))
RETURN_CODE = db.Column(db.String(10), server_default=db.FetchedValue())
START_TIME = db.Column(db.DateTime)
END_TIME = db.Column(db.DateTime)
DISABLE_DATE = db.Column(db.DateTime)
DISABLE_REASON = db.Column(db.String(50))
SERVICE_FLAG = db.Column(db.String(50))
REGION = db.Column(db.String(10))
GROUP_ID = db.Column(db.Integer)
fa_script = db.relationship(u'FaScript', primaryjoin='FaScriptTask.SCRIPT_ID == FaScript.ID', backref=u'fa_script_tasks')
class FaScriptTaskLog(db.Model):
__tablename__ = 'fa_script_task_log'
ID = db.Column(db.Integer, primary_key=True)
SCRIPT_TASK_ID = db.Column(db.ForeignKey(u'fa_script_task.ID'), nullable=False)
LOG_TIME = db.Column(db.DateTime, nullable=False)
LOG_TYPE = db.Column(db.Numeric(1, 0), nullable=False, server_default=db.FetchedValue())
MESSAGE = db.Column(db.Text)
SQL_TEXT = db.Column(db.Text)
fa_script_task = db.relationship(u'FaScriptTask', primaryjoin='FaScriptTaskLog.SCRIPT_TASK_ID == FaScriptTask.ID', backref=u'fa_script_task_logs')
class FaSmsSend(db.Model):
__tablename__ = 'fa_sms_send'
GUID = db.Column(db.String(32), primary_key=True)
MESSAGE_ID = db.Column(db.Integer)
PHONE_NO = db.Column(db.String(50), nullable=False)
ADD_TIME = db.Column(db.DateTime)
SEND_TIME = db.Column(db.DateTime)
CONTENT = db.Column(db.String(500), nullable=False)
STAUTS = db.Column(db.String(15))
TRY_NUM = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
class FaTask(db.Model):
__tablename__ = 'fa_task'
ID = db.Column(db.Integer, primary_key=True)
FLOW_ID = db.Column(db.ForeignKey(u'fa_flow.ID'))
TASK_NAME = db.Column(db.String(50))
CREATE_TIME = db.Column(db.DateTime)
CREATE_USER = db.Column(db.Integer)
CREATE_USER_NAME = db.Column(db.String(50))
STATUS = db.Column(db.String(50))
STATUS_TIME = db.Column(db.DateTime)
REMARK = db.Column(db.Text)
REGION = db.Column(db.String(10))
KEY_ID = db.Column(db.String(32))
START_TIME = db.Column(db.DateTime)
END_TIME = db.Column(db.DateTime)
DEAL_TIME = db.Column(db.DateTime)
ROLE_ID_STR = db.Column(db.String(200))
fa_flow = db.relationship(u'FaFlow', primaryjoin='FaTask.FLOW_ID == FaFlow.ID', backref=u'fa_tasks')
class FaTaskFlow(db.Model):
__tablename__ = 'fa_task_flow'
ID = db.Column(db.Integer, primary_key=True)
PARENT_ID = db.Column(db.ForeignKey(u'fa_task_flow.ID'))
TASK_ID = db.Column(db.ForeignKey(u'fa_task.ID'), nullable=False)
LEVEL_ID = db.Column(db.Integer)
FLOWNODE_ID = db.Column(db.Integer)
EQUAL_ID = db.Column(db.Integer)
IS_HANDLE = db.Column(db.Integer, nullable=False)
NAME = db.Column(db.String(100))
HANDLE_URL = db.Column(db.String(200))
SHOW_URL = db.Column(db.String(200))
EXPIRE_TIME = db.Column(db.DateTime)
START_TIME = db.Column(db.DateTime, nullable=False)
DEAL_STATUS = db.Column(db.String(50))
ROLE_ID_STR = db.Column(db.String(200))
HANDLE_USER_ID = db.Column(db.Integer)
DEAL_TIME = db.Column(db.DateTime)
ACCEPT_TIME = db.Column(db.DateTime)
parent = db.relationship(u'FaTaskFlow', remote_side=[ID], primaryjoin='FaTaskFlow.PARENT_ID == FaTaskFlow.ID', backref=u'fa_task_flows')
fa_task = db.relationship(u'FaTask', primaryjoin='FaTaskFlow.TASK_ID == FaTask.ID', backref=u'fa_task_flows')
class FaTaskFlowHandle(db.Model):
__tablename__ = 'fa_task_flow_handle'
ID = db.Column(db.Integer, primary_key=True)
TASK_FLOW_ID = db.Column(db.ForeignKey(u'fa_task_flow.ID'), nullable=False)
DEAL_USER_ID = db.Column(db.Integer, nullable=False)
DEAL_USER_NAME = db.Column(db.String(50), nullable=False)
DEAL_TIME = db.Column(db.DateTime, nullable=False)
CONTENT = db.Column(db.String(2000), nullable=False)
fa_task_flow = db.relationship(u'FaTaskFlow', primaryjoin='FaTaskFlowHandle.TASK_FLOW_ID == FaTaskFlow.ID', backref=u'fa_task_flow_handles')
t_fa_task_flow_handle_files = db.Table(
'fa_task_flow_handle_files',
db.Column('FLOW_HANDLE_ID', db.ForeignKey(u'fa_task_flow_handle.ID'), primary_key=True, nullable=False),
db.Column('FILES_ID', db.ForeignKey(u'fa_files.ID'), primary_key=True, nullable=False)
)
class FaTaskFlowHandleUser(db.Model):
__tablename__ = 'fa_task_flow_handle_user'
TASK_FLOW_ID = db.Column(db.ForeignKey(u'fa_task_flow.ID'), primary_key=True, nullable=False)
HANDLE_USER_ID = db.Column(db.Integer, primary_key=True, nullable=False)
fa_task_flow = db.relationship(u'FaTaskFlow', primaryjoin='FaTaskFlowHandleUser.TASK_FLOW_ID == FaTaskFlow.ID', backref=u'fa_task_flow_handle_users')
class FaUpdataLog(db.Model):
__tablename__ = 'fa_updata_log'
ID = db.Column(db.Integer, primary_key=True)
CREATE_TIME = db.Column(db.DateTime)
CREATE_USER_NAME = db.Column(db.String(50))
CREATE_USER_ID = db.Column(db.Integer)
OLD_CONTENT = db.Column(db.Text)
NEW_CONTENT = db.Column(db.Text)
TABLE_NAME = db.Column(db.String(50))
class FaUser(db.Model):
__tablename__ = 'fa_user'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(80))
LOGIN_NAME = db.Column(db.String(20))
ICON_FILES_ID = db.Column(db.Integer)
DISTRICT_ID = db.Column(db.ForeignKey(u'fa_district.ID'), nullable=False)
IS_LOCKED = db.Column(db.Numeric(1, 0))
CREATE_TIME = db.Column(db.DateTime)
LOGIN_COUNT = db.Column(db.Integer)
LAST_LOGIN_TIME = db.Column(db.DateTime)
LAST_LOGOUT_TIME = db.Column(db.DateTime)
LAST_ACTIVE_TIME = db.Column(db.DateTime)
REMARK = db.Column(db.String(2000))
fa_district = db.relationship(u'FaDistrict', primaryjoin='FaUser.DISTRICT_ID == FaDistrict.ID', backref=u'fa_users')
fa_user_info = db.relationship(u'FaUserInfo', secondary=u'fa_user_friend', backref=u'fa_users', lazy="select")
class FaUserInfo(FaUser):
__tablename__ = 'fa_user_info'
ID = db.Column(db.ForeignKey(u'fa_user.ID'), primary_key=True)
LEVEL_ID = db.Column(db.Integer)
FAMILY_ID = db.Column(db.ForeignKey(u'fa_family.ID'))
ELDER_ID = db.Column(db.ForeignKey(u'fa_elder.ID'))
LEVEL_NAME = db.Column(db.String(2))
FATHER_ID = db.Column(db.ForeignKey(u'fa_user_info.ID'))
MOTHER_ID = db.Column(db.Integer)
BIRTHDAY_TIME = db.Column(db.DateTime)
BIRTHDAY_PLACE = db.Column(db.String(500))
IS_LIVE = db.Column(db.Numeric(1, 0))
DIED_TIME = db.Column(db.DateTime)
DIED_PLACE = db.Column(db.String(500))
SEX = db.Column(db.String(2))
YEARS_TYPE = db.Column(db.String(10))
CONSORT_ID = db.Column(db.Integer)
STATUS = db.Column(db.String(10), nullable=False, server_default=db.FetchedValue())
CREATE_USER_NAME = db.Column(db.String(50), nullable=False, server_default=db.FetchedValue())
CREATE_USER_ID = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
UPDATE_TIME = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue())
UPDATE_USER_NAME = db.Column(db.String(50), nullable=False, server_default=db.FetchedValue())
UPDATE_USER_ID = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
fa_elder = db.relationship(u'FaElder', primaryjoin='FaUserInfo.ELDER_ID == FaElder.ID', backref=u'fa_user_infos')
fa_family = db.relationship(u'FaFamily', primaryjoin='FaUserInfo.FAMILY_ID == FaFamily.ID', backref=u'fa_user_infos')
parent = db.relationship(u'FaUserInfo', remote_side=[ID], primaryjoin='FaUserInfo.FATHER_ID == FaUserInfo.ID', backref=u'fa_user_infos')
t_fa_user_district = db.Table(
'fa_user_district',
db.Column('USER_ID', db.ForeignKey(u'fa_user.ID'), primary_key=True, nullable=False),
db.Column('DISTRICT_ID', db.ForeignKey(u'fa_district.ID'), primary_key=True, nullable=False)
)
class FaUserEvent(db.Model):
__tablename__ = 'fa_user_event'
ID = db.Column(db.Integer, primary_key=True)
USER_ID = db.Column(db.ForeignKey(u'fa_user_info.ID'))
NAME = db.Column(db.String(50))
HAPPEN_TIME = db.Column(db.DateTime)
CONTENT = db.Column(db.String(500))
ADDRESS = db.Column(db.String(500))
fa_user_info = db.relationship(u'FaUserInfo', primaryjoin='FaUserEvent.USER_ID == FaUserInfo.ID', backref=u'fa_user_events')
fa_files = db.relationship(u'FaFile', secondary=u'fa_event_files', backref=u'fa_user_events')
t_fa_user_friend = db.Table(
'fa_user_friend',
db.Column('USER_ID', db.ForeignKey(u'fa_user_info.ID'), primary_key=True, nullable=False),
db.Column('FRIEND_ID', db.ForeignKey(u'fa_user.ID'), primary_key=True, nullable=False)
)
class FaUserMessage(db.Model):
__tablename__ = 'fa_user_message'
MESSAGE_ID = db.Column(db.ForeignKey(u'fa_message.ID'), primary_key=True, nullable=False)
USER_ID = db.Column(db.Integer, primary_key=True, nullable=False)
PHONE_NO = db.Column(db.String(20))
STATUS = db.Column(db.String(10))
STATUS_TIME = db.Column(db.DateTime, nullable=False)
REPLY = db.Column(db.String(500))
PUSH_TYPE = db.Column(db.String(10))
fa_message = db.relationship(u'FaMessage', primaryjoin='FaUserMessage.MESSAGE_ID == FaMessage.ID', backref=u'fa_user_messages')
t_fa_user_module = db.Table(
'fa_user_module',
db.Column('USER_ID', db.ForeignKey(u'fa_user.ID'), primary_key=True, nullable=False),
db.Column('MODULE_ID', db.ForeignKey(u'fa_module.ID'), primary_key=True, nullable=False)
)
t_fa_user_role = db.Table(
'fa_user_role',
db.Column('ROLE_ID', db.ForeignKey(u'fa_role.ID'), primary_key=True, nullable=False),
db.Column('USER_ID', db.ForeignKey(u'fa_user.ID'), primary_key=True, nullable=False)
)
class Sequence(db.Model):
__tablename__ = 'sequence'
seq_name = db.Column(db.String(50), primary_key=True)
current_val = db.Column(db.Integer, nullable=False)
increment_val = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
|
bsd-3-clause
| 1,656,765,812,792,908,000
| 35.307791
| 164
| 0.679236
| false
| 2.863806
| false
| false
| false
|
Just-D/chromium-1
|
tools/telemetry/telemetry/page/shared_page_state.py
|
1
|
19615
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import shutil
import sys
import tempfile
import zipfile
from catapult_base import cloud_storage
from telemetry.core import exceptions
from telemetry.core import util
from telemetry import decorators
from telemetry.internal.browser import browser_finder
from telemetry.internal.browser import browser_finder_exceptions
from telemetry.internal.browser import browser_info as browser_info_module
from telemetry.internal.platform.profiler import profiler_finder
from telemetry.internal.util import exception_formatter
from telemetry.internal.util import file_handle
from telemetry.page import action_runner as action_runner_module
from telemetry.page import page_test
from telemetry import story
from telemetry.util import wpr_modes
from telemetry.web_perf import timeline_based_measurement
def _PrepareFinderOptions(finder_options, test, device_type):
browser_options = finder_options.browser_options
# Set up user agent.
browser_options.browser_user_agent_type = device_type
test.CustomizeBrowserOptions(finder_options.browser_options)
if finder_options.profiler:
profiler_class = profiler_finder.FindProfiler(finder_options.profiler)
profiler_class.CustomizeBrowserOptions(browser_options.browser_type,
finder_options)
class SharedPageState(story.SharedState):
"""
This class contains all specific logic necessary to run a Chrome browser
benchmark.
"""
_device_type = None
def __init__(self, test, finder_options, story_set):
super(SharedPageState, self).__init__(test, finder_options, story_set)
if isinstance(test, timeline_based_measurement.TimelineBasedMeasurement):
# This is to avoid the cyclic-import caused by timeline_based_page_test.
from telemetry.web_perf import timeline_based_page_test
self._test = timeline_based_page_test.TimelineBasedPageTest(test)
else:
self._test = test
device_type = self._device_type
# TODO(aiolos, nednguyen): Remove this logic of pulling out user_agent_type
# from story_set once all page_set are converted to story_set
# (crbug.com/439512).
def _IsPageSetInstance(s):
# This is needed to avoid importing telemetry.page.page_set which will
# cause cyclic import.
return 'PageSet' == s.__class__.__name__ or 'PageSet' in (
list(c.__name__ for c in s.__class__.__bases__))
if not device_type and _IsPageSetInstance(story_set):
device_type = story_set.user_agent_type
_PrepareFinderOptions(finder_options, self._test, device_type)
self._browser = None
self._finder_options = finder_options
self._possible_browser = self._GetPossibleBrowser(
self._test, finder_options)
# TODO(slamm): Remove _append_to_existing_wpr when replay lifetime changes.
self._append_to_existing_wpr = False
self._first_browser = True
self._did_login_for_current_page = False
self._current_page = None
self._current_tab = None
self._migrated_profile = None
self._pregenerated_profile_archive = None
self._test.SetOptions(self._finder_options)
@property
def browser(self):
return self._browser
def _FindBrowser(self, finder_options):
possible_browser = browser_finder.FindBrowser(finder_options)
if not possible_browser:
raise browser_finder_exceptions.BrowserFinderException(
'No browser found.\n\nAvailable browsers:\n%s\n' %
'\n'.join(browser_finder.GetAllAvailableBrowserTypes(finder_options)))
return possible_browser
def _GetPossibleBrowser(self, test, finder_options):
"""Return a possible_browser with the given options for |test|. """
possible_browser = self._FindBrowser(finder_options)
finder_options.browser_options.browser_type = (
possible_browser.browser_type)
(enabled, msg) = decorators.IsEnabled(test, possible_browser)
if (not enabled and
not finder_options.run_disabled_tests):
logging.warning(msg)
logging.warning('You are trying to run a disabled test.')
logging.warning('Pass --also-run-disabled-tests to squelch this message.')
sys.exit(0)
if possible_browser.IsRemote():
possible_browser.RunRemote()
sys.exit(0)
return possible_browser
def DidRunStory(self, results):
if self._finder_options.profiler:
self._StopProfiling(results)
# We might hang while trying to close the connection, and need to guarantee
# the page will get cleaned up to avoid future tests failing in weird ways.
try:
if self._current_tab and self._current_tab.IsAlive():
self._current_tab.CloseConnections()
except Exception:
if self._current_tab:
self._current_tab.Close()
finally:
if self._current_page.credentials and self._did_login_for_current_page:
self.browser.credentials.LoginNoLongerNeeded(
self._current_tab, self._current_page.credentials)
if self._test.StopBrowserAfterPage(self.browser, self._current_page):
self._StopBrowser()
self._current_page = None
self._current_tab = None
@property
def platform(self):
return self._possible_browser.platform
def _PrepareWpr(self, network_controller, archive_path,
make_javascript_deterministic):
browser_options = self._finder_options.browser_options
if self._finder_options.use_live_sites:
browser_options.wpr_mode = wpr_modes.WPR_OFF
elif browser_options.wpr_mode != wpr_modes.WPR_RECORD:
browser_options.wpr_mode = (
wpr_modes.WPR_REPLAY
if archive_path and os.path.isfile(archive_path)
else wpr_modes.WPR_OFF)
# Replay's life-cycle is tied to the browser. Start and Stop are handled by
# platform_backend.DidCreateBrowser and platform_backend.WillCloseBrowser,
# respectively.
# TODO(slamm): Update life-cycle comment with https://crbug.com/424777 fix.
wpr_mode = browser_options.wpr_mode
if self._append_to_existing_wpr and wpr_mode == wpr_modes.WPR_RECORD:
wpr_mode = wpr_modes.WPR_APPEND
network_controller.SetReplayArgs(
archive_path, wpr_mode, browser_options.netsim,
browser_options.extra_wpr_args, make_javascript_deterministic)
def _StartBrowser(self, page):
assert self._browser is None
self._possible_browser.SetCredentialsPath(page.credentials_path)
self._test.WillStartBrowser(self.platform)
self._browser = self._possible_browser.Create(self._finder_options)
self._test.DidStartBrowser(self.browser)
if self._first_browser:
self._first_browser = False
self.browser.credentials.WarnIfMissingCredentials(page)
logging.info('OS: %s %s',
self.platform.GetOSName(),
self.platform.GetOSVersionName())
if self.browser.supports_system_info:
system_info = self.browser.GetSystemInfo()
if system_info.model_name:
logging.info('Model: %s', system_info.model_name)
if system_info.gpu:
for i, device in enumerate(system_info.gpu.devices):
logging.info('GPU device %d: %s', i, device)
if system_info.gpu.aux_attributes:
logging.info('GPU Attributes:')
for k, v in sorted(system_info.gpu.aux_attributes.iteritems()):
logging.info(' %-20s: %s', k, v)
if system_info.gpu.feature_status:
logging.info('Feature Status:')
for k, v in sorted(system_info.gpu.feature_status.iteritems()):
logging.info(' %-20s: %s', k, v)
if system_info.gpu.driver_bug_workarounds:
logging.info('Driver Bug Workarounds:')
for workaround in system_info.gpu.driver_bug_workarounds:
logging.info(' %s', workaround)
else:
logging.info('No GPU devices')
else:
logging.warning('System info not supported')
def WillRunStory(self, page):
if self._ShouldDownloadPregeneratedProfileArchive():
self._DownloadPregeneratedProfileArchive()
if self._ShouldMigrateProfile():
self._MigratePregeneratedProfile()
page_set = page.page_set
self._current_page = page
if self._test.RestartBrowserBeforeEachPage() or page.startup_url:
self._StopBrowser()
started_browser = not self.browser
self._PrepareWpr(self.platform.network_controller,
page_set.WprFilePathForStory(page),
page.make_javascript_deterministic)
if self.browser:
# Set new credential path for browser.
self.browser.credentials.credentials_path = page.credentials_path
self.platform.network_controller.UpdateReplayForExistingBrowser()
else:
self._StartBrowser(page)
if self.browser.supports_tab_control and self._test.close_tabs_before_run:
# Create a tab if there's none.
if len(self.browser.tabs) == 0:
self.browser.tabs.New()
# Ensure only one tab is open, unless the test is a multi-tab test.
if not self._test.is_multi_tab_test:
while len(self.browser.tabs) > 1:
self.browser.tabs[-1].Close()
# Must wait for tab to commit otherwise it can commit after the next
# navigation has begun and RenderFrameHostManager::DidNavigateMainFrame()
# will cancel the next navigation because it's pending. This manifests as
# the first navigation in a PageSet freezing indefinitely because the
# navigation was silently cancelled when |self.browser.tabs[0]| was
# committed. Only do this when we just started the browser, otherwise
# there are cases where previous pages in a PageSet never complete
# loading so we'll wait forever.
if started_browser:
self.browser.tabs[0].WaitForDocumentReadyStateToBeComplete()
# Start profiling if needed.
if self._finder_options.profiler:
self._StartProfiling(self._current_page)
def CanRunStory(self, page):
return self.CanRunOnBrowser(browser_info_module.BrowserInfo(self.browser),
page)
def CanRunOnBrowser(self, browser_info,
page): # pylint: disable=unused-argument
"""Override this to return whether the browser brought up by this state
instance is suitable for running the given page.
Args:
browser_info: an instance of telemetry.core.browser_info.BrowserInfo
page: an instance of telemetry.page.Page
"""
return True
def _PreparePage(self):
self._current_tab = self._test.TabForPage(self._current_page, self.browser)
if self._current_page.is_file:
self.browser.SetHTTPServerDirectories(
self._current_page.page_set.serving_dirs |
set([self._current_page.serving_dir]))
if self._current_page.credentials:
if not self.browser.credentials.LoginNeeded(
self._current_tab, self._current_page.credentials):
raise page_test.Failure(
'Login as ' + self._current_page.credentials + ' failed')
self._did_login_for_current_page = True
if self._test.clear_cache_before_each_run:
self._current_tab.ClearCache(force=True)
def _ImplicitPageNavigation(self):
"""Executes the implicit navigation that occurs for every page iteration.
This function will be called once per page before any actions are executed.
"""
self._test.WillNavigateToPage(self._current_page, self._current_tab)
self._test.RunNavigateSteps(self._current_page, self._current_tab)
self._test.DidNavigateToPage(self._current_page, self._current_tab)
def RunStory(self, results):
try:
self._PreparePage()
self._ImplicitPageNavigation()
action_runner = action_runner_module.ActionRunner(
self._current_tab, skip_waits=self._current_page.skip_waits)
self._current_page.RunPageInteractions(action_runner)
self._test.ValidateAndMeasurePage(
self._current_page, self._current_tab, results)
except exceptions.Error:
if self._test.is_multi_tab_test:
# Avoid trying to recover from an unknown multi-tab state.
exception_formatter.PrintFormattedException(
msg='Telemetry Error during multi tab test:')
raise page_test.MultiTabTestAppCrashError
raise
def TearDownState(self):
if self._migrated_profile:
shutil.rmtree(self._migrated_profile)
self._migrated_profile = None
self._StopBrowser()
def _StopBrowser(self):
if self._browser:
self._browser.Close()
self._browser = None
# Restarting the state will also restart the wpr server. If we're
# recording, we need to continue adding into the same wpr archive,
# not overwrite it.
self._append_to_existing_wpr = True
def _StartProfiling(self, page):
output_file = os.path.join(self._finder_options.output_dir,
page.file_safe_name)
is_repeating = (self._finder_options.page_repeat != 1 or
self._finder_options.pageset_repeat != 1)
if is_repeating:
output_file = util.GetSequentialFileName(output_file)
self.browser.profiling_controller.Start(
self._finder_options.profiler, output_file)
def _StopProfiling(self, results):
if self.browser:
profiler_files = self.browser.profiling_controller.Stop()
for f in profiler_files:
if os.path.isfile(f):
results.AddProfilingFile(self._current_page,
file_handle.FromFilePath(f))
def _ShouldMigrateProfile(self):
return not self._migrated_profile
def _MigrateProfile(self, finder_options, found_browser,
initial_profile, final_profile):
"""Migrates a profile to be compatible with a newer version of Chrome.
Launching Chrome with the old profile will perform the migration.
"""
# Save the current input and output profiles.
saved_input_profile = finder_options.browser_options.profile_dir
saved_output_profile = finder_options.output_profile_path
# Set the input and output profiles.
finder_options.browser_options.profile_dir = initial_profile
finder_options.output_profile_path = final_profile
# Launch the browser, then close it.
browser = found_browser.Create(finder_options)
browser.Close()
# Load the saved input and output profiles.
finder_options.browser_options.profile_dir = saved_input_profile
finder_options.output_profile_path = saved_output_profile
def _MigratePregeneratedProfile(self):
"""Migrates the pregenerated profile by launching Chrome with it.
On success, updates self._migrated_profile and
self._finder_options.browser_options.profile_dir with the directory of the
migrated profile.
"""
self._migrated_profile = tempfile.mkdtemp()
logging.info("Starting migration of pregenerated profile to %s",
self._migrated_profile)
pregenerated_profile = self._finder_options.browser_options.profile_dir
possible_browser = self._FindBrowser(self._finder_options)
self._MigrateProfile(self._finder_options, possible_browser,
pregenerated_profile, self._migrated_profile)
self._finder_options.browser_options.profile_dir = self._migrated_profile
logging.info("Finished migration of pregenerated profile to %s",
self._migrated_profile)
def GetPregeneratedProfileArchive(self):
return self._pregenerated_profile_archive
def SetPregeneratedProfileArchive(self, archive):
"""
Benchmarks can set a pre-generated profile archive to indicate that when
Chrome is launched, it should have a --user-data-dir set to the
pregenerated profile, rather than to an empty profile.
If the benchmark is invoked with the option --profile-dir=<dir>, that
option overrides this value.
"""
self._pregenerated_profile_archive = archive
def _ShouldDownloadPregeneratedProfileArchive(self):
"""Whether to download a pre-generated profile archive."""
# There is no pre-generated profile archive.
if not self.GetPregeneratedProfileArchive():
return False
# If profile dir is specified on command line, use that instead.
if self._finder_options.browser_options.profile_dir:
logging.warning("Profile directory specified on command line: %s, this"
"overrides the benchmark's default profile directory.",
self._finder_options.browser_options.profile_dir)
return False
# If the browser is remote, a local download has no effect.
if self._possible_browser.IsRemote():
return False
return True
def _DownloadPregeneratedProfileArchive(self):
"""Download and extract the profile directory archive if one exists.
On success, updates self._finder_options.browser_options.profile_dir with
the directory of the extracted profile.
"""
# Download profile directory from cloud storage.
test_data_dir = os.path.join(util.GetChromiumSrcDir(), 'tools', 'perf',
'generated_profiles',
self._possible_browser.target_os)
archive_name = self.GetPregeneratedProfileArchive()
generated_profile_archive_path = os.path.normpath(
os.path.join(test_data_dir, archive_name))
try:
cloud_storage.GetIfChanged(generated_profile_archive_path,
cloud_storage.PUBLIC_BUCKET)
except (cloud_storage.CredentialsError,
cloud_storage.PermissionError) as e:
if os.path.exists(generated_profile_archive_path):
# If the profile directory archive exists, assume the user has their
# own local copy simply warn.
logging.warning('Could not download Profile archive: %s',
generated_profile_archive_path)
else:
# If the archive profile directory doesn't exist, this is fatal.
logging.error('Can not run without required profile archive: %s. '
'If you believe you have credentials, follow the '
'instructions below.',
generated_profile_archive_path)
logging.error(str(e))
sys.exit(-1)
# Check to make sure the zip file exists.
if not os.path.isfile(generated_profile_archive_path):
raise Exception("Profile directory archive not downloaded: ",
generated_profile_archive_path)
# The location to extract the profile into.
extracted_profile_dir_path = (
os.path.splitext(generated_profile_archive_path)[0])
# Unzip profile directory.
with zipfile.ZipFile(generated_profile_archive_path) as f:
try:
f.extractall(os.path.dirname(generated_profile_archive_path))
except e:
# Cleanup any leftovers from unzipping.
if os.path.exists(extracted_profile_dir_path):
shutil.rmtree(extracted_profile_dir_path)
logging.error("Error extracting profile directory zip file: %s", e)
sys.exit(-1)
# Run with freshly extracted profile directory.
logging.info("Using profile archive directory: %s",
extracted_profile_dir_path)
self._finder_options.browser_options.profile_dir = (
extracted_profile_dir_path)
class SharedMobilePageState(SharedPageState):
_device_type = 'mobile'
class SharedDesktopPageState(SharedPageState):
_device_type = 'desktop'
class SharedTabletPageState(SharedPageState):
_device_type = 'tablet'
class Shared10InchTabletPageState(SharedPageState):
_device_type = 'tablet_10_inch'
|
bsd-3-clause
| 8,667,519,244,234,333,000
| 39.277207
| 80
| 0.69248
| false
| 4.03767
| true
| false
| false
|
AusTac/parma
|
b3/lib/corestats.py
|
1
|
5732
|
#!/usr/bin/env python
# corestats.py (COREy STATS)
# Copyright (c) 2006-2007, Corey Goldberg (corey@goldb.org)
# updated on 2010-09 by GrosBedo
#
# statistical calculation class
# for processing numeric sequences
#
# license: GNU LGPL
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# CHANGELOG:
# 2010-09-14 - GrosBedo:
# * enhanced variance(), no more memory leak
# 2010-09-13 - GrosBedo:
# * added variance()
# * added mode()
# * added unique()
# * fixed median() algo
# 2010-09-09 - GrosBedo:
# * added percentileforvalue() (inverse of valueforpercentile() )
# * CRITICAL: removed the init function and the self.sequence float conversion (which was a BIG memory hog !)
import sys, math
class Stats:
def sum(self, sequence):
if len(sequence) < 1:
return None
else:
return sum(sequence)
def count(self, sequence):
return len(sequence)
def min(self, sequence):
if len(sequence) < 1:
return None
else:
return min(sequence)
def max(self, sequence):
if len(sequence) < 1:
return None
else:
return max(sequence)
def mean(self, sequence):
if len(sequence) < 1:
return None
else:
return float(sum(sequence)) / len(sequence)
def median(self, sequence):
if len(sequence) < 1:
return None
else:
sequence.sort()
element_idx = float(len(sequence)) / 2
if (element_idx != int(element_idx)):
median1 = sequence[int(math.floor(element_idx))]
median2 = sequence[int(math.ceil(element_idx))]
return float(median1 + median2) / 2
else:
return sequence[int(element_idx)]
def modeold(self, sequence):
results = {}
for item in sequence:
results.setdefault(item, 0) # if index does not already exists, create it and set a value of 0
results[item] += 1
results = sorted(results.iteritems(), key=lambda (k,v):(v,k), reverse=True) # Sort by value (count), then if 2 keys have the same count, it will sort them by their keys
return results
def mode(self, sequence):
"""
Enhanced version of mode(), inspired by statlib/stats.py
The advantage is that this function (as well as mode) can return several modes at once (so you can see the next most frequent values)
"""
scores = self.unique(sequence)
scores.sort()
freq = {}
for item in scores:
freq.setdefault(item, 0) # if index does not already exists, create it and set a value of 0
freq[item] = sequence.count(item)
results = sorted(freq.iteritems(), key=lambda (k,v):(v,k), reverse=True) # Sort by value (count), then if 2 keys have the same count, it will sort them by their keys
return results
def variance(self, sequence):
if len(sequence) < 1:
return None
else:
avg = self.mean(sequence)
sdsq = 0
for i in sequence:
sdsq += (i - avg) ** 2
#sdsq = sum([(i - avg) ** 2 for i in sequence]) # this one-liner hogs a lot of memory, avoid
variance = (float(sdsq) / (len(sequence) - 1))
return variance
def stdev(self, sequence):
if len(sequence) < 1:
return None
else:
variance = self.variance(sequence)
stdev = float(variance) ** 0.5
return stdev
def valueforpercentile(self, sequence, percentile):
if len(sequence) < 1:
value = None
elif (percentile > 100):
sys.stderr.write('ERROR: percentile must be <= 100. you supplied: %s\n'% percentile)
value = None
elif (percentile == 100):
value = max(sequence)
else:
element_idx = int(len(sequence) * (float(percentile) / 100.0))
sequence.sort()
value = sequence[element_idx]
return value
def percentileforvalue(self, sequence, value):
maxnb = max(sequence)
minnb = min(sequence)
if len(sequence) < 1:
percentile = None
elif (value > maxnb or value < minnb ):
#sys.stderr.write('ERROR: value must be between %s < value < %s. you supplied: %s\n'% (minnb, maxnb, value))
#percentile = None
if (value > maxnb):
percentile = 100
else:
percentile = 0
else:
sequence.sort()
sequence.reverse()
element_idx = sequence.index(value) # list.index() returns the first occurence, but we want to enclose all equal values, so we must reverse the sequence and do some calculations in order to get the right value
element_idx = (len(sequence) - element_idx)
percentile = float(element_idx) * 100.0 / len(sequence)
return percentile
def unique(self, sequence):
return list(set(sequence))
# Sample script using this class:
# -------------------------------------------
# #!/usr/bin/env python
# import corestats
#
# sequence = [1, 2.5, 7, 13.4, 8.0]
# stats = corestats.Stats()
# print stats.mean(sequence)
# print stats.valueforpercentile(sequence, 90)
# -------------------------------------------
|
gpl-2.0
| -1,045,647,948,741,987,700
| 31.948276
| 221
| 0.565422
| false
| 4.022456
| false
| false
| false
|
kyuridenamida/atcoder-tools
|
tests/utils/gzip_controller.py
|
2
|
1295
|
import os
import shutil
import tarfile
class GZipController:
def __init__(self, target_dir, gzip_file_path, main_dirname):
self.target_dir = target_dir
self.gzip_file_path = gzip_file_path
self.main_dirname = main_dirname
def create_dir(self):
tf = tarfile.open(self.gzip_file_path, 'r')
tf.extractall(self.target_dir)
main_dir_path = os.path.join(self.target_dir, self.main_dirname)
if os.path.exists(main_dir_path):
return main_dir_path
raise FileNotFoundError("{} is not found".format(main_dir_path))
def remove_dir(self):
shutil.rmtree(self.target_dir)
def _make_data_full_path(filename: str):
return os.path.join(
os.path.dirname(os.path.abspath(__file__)),
filename)
def make_tst_data_controller(target_dir: str):
return GZipController(target_dir,
_make_data_full_path(
'../resources/common/test_data.tar.gz'),
"test_data")
def make_html_data_controller(target_dir: str):
return GZipController(target_dir,
_make_data_full_path(
'../resources/common/problem_htmls.tar.gz'),
"problem_htmls")
|
mit
| -8,670,779,840,069,245,000
| 29.833333
| 74
| 0.579151
| false
| 3.7
| false
| false
| false
|
davidhdz/crits
|
crits/domains/handlers.py
|
2
|
33476
|
import json
import re
import datetime
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
from mongoengine.base import ValidationError
from crits.core import form_consts
from crits.core.crits_mongoengine import EmbeddedSource, EmbeddedCampaign
from crits.core.crits_mongoengine import json_handler, create_embedded_source
from crits.core.handsontable_tools import convert_handsontable_to_rows, parse_bulk_upload
from crits.core.handlers import build_jtable, jtable_ajax_list, jtable_ajax_delete
from crits.core.data_tools import convert_string_to_bool
from crits.core.handlers import csv_export
from crits.core.user_tools import user_sources, is_user_favorite
from crits.core.user_tools import is_user_subscribed
from crits.domains.domain import Domain, TLD
from crits.domains.forms import AddDomainForm
from crits.ips.ip import IP
from crits.ips.handlers import validate_and_normalize_ip
from crits.notifications.handlers import remove_user_from_notification
from crits.objects.handlers import object_array_to_dict, validate_and_add_new_handler_object
from crits.relationships.handlers import forge_relationship
from crits.services.handlers import run_triage, get_supported_services
def get_valid_root_domain(domain):
"""
Validate the given domain and TLD, and if valid, parse out the root domain
:param domain: the domain to validate and parse
:type domain: str
:returns: tuple: (Valid root domain, Valid FQDN, Error message)
"""
root = fqdn = error = ""
black_list = "/:@\ "
domain = domain.strip()
if any(c in black_list for c in domain):
error = 'Domain cannot contain space or characters %s' % (black_list)
else:
root = tld_parser.parse(domain)
if root == "no_tld_found_error":
error = 'No valid TLD found'
root = ""
else:
fqdn = domain.lower()
return (root, fqdn, error)
def get_domain_details(domain, analyst):
"""
Generate the data to render the Domain details template.
:param domain: The name of the Domain to get details for.
:type domain: str
:param analyst: The user requesting this information.
:type analyst: str
:returns: template (str), arguments (dict)
"""
template = None
allowed_sources = user_sources(analyst)
dmain = Domain.objects(domain=domain,
source__name__in=allowed_sources).first()
if not dmain:
error = ("Either no data exists for this domain"
" or you do not have permission to view it.")
template = "error.html"
args = {'error': error}
return template, args
dmain.sanitize_sources(username="%s" % analyst,
sources=allowed_sources)
# remove pending notifications for user
remove_user_from_notification("%s" % analyst, dmain.id, 'Domain')
# subscription
subscription = {
'type': 'Domain',
'id': dmain.id,
'subscribed': is_user_subscribed("%s" % analyst,
'Domain',
dmain.id),
}
#objects
objects = dmain.sort_objects()
#relationships
relationships = dmain.sort_relationships("%s" % analyst, meta=True)
# relationship
relationship = {
'type': 'Domain',
'value': dmain.id
}
#comments
comments = {'comments': dmain.get_comments(),
'url_key':dmain.domain}
#screenshots
screenshots = dmain.get_screenshots(analyst)
# favorites
favorite = is_user_favorite("%s" % analyst, 'Domain', dmain.id)
# services
service_list = get_supported_services('Domain')
# analysis results
service_results = dmain.get_analysis_results()
args = {'objects': objects,
'relationships': relationships,
'comments': comments,
'favorite': favorite,
'relationship': relationship,
'subscription': subscription,
'screenshots': screenshots,
'domain': dmain,
'service_list': service_list,
'service_results': service_results}
return template, args
def generate_domain_csv(request):
"""
Generate a CSV file of the Domain information
:param request: The request for this CSV.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
response = csv_export(request,Domain)
return response
def generate_domain_jtable(request, option):
"""
Generate the jtable data for rendering in the list template.
:param request: The request for this jtable.
:type request: :class:`django.http.HttpRequest`
:param option: Action to take.
:type option: str of either 'jtlist', 'jtdelete', or 'inline'.
:returns: :class:`django.http.HttpResponse`
"""
obj_type = Domain
type_ = "domain"
mapper = obj_type._meta['jtable_opts']
if option == "jtlist":
# Sets display url
details_url = mapper['details_url']
details_url_key = mapper['details_url_key']
fields = mapper['fields']
response = jtable_ajax_list(obj_type,
details_url,
details_url_key,
request,
includes=fields)
return HttpResponse(json.dumps(response,
default=json_handler),
content_type="application/json")
if option == "jtdelete":
response = {"Result": "ERROR"}
if jtable_ajax_delete(obj_type,request):
response = {"Result": "OK"}
return HttpResponse(json.dumps(response,
default=json_handler),
content_type="application/json")
jtopts = {
'title': "Domains",
'default_sort': mapper['default_sort'],
'listurl': reverse('crits.%ss.views.%ss_listing' % (type_, type_),
args=('jtlist',)),
'deleteurl': reverse('crits.%ss.views.%ss_listing' % (type_, type_),
args=('jtdelete',)),
'searchurl': reverse(mapper['searchurl']),
'fields': mapper['jtopts_fields'],
'hidden_fields': mapper['hidden_fields'],
'linked_fields': mapper['linked_fields'],
'details_link': mapper['details_link']
}
jtable = build_jtable(jtopts,request)
jtable['toolbar'] = [
{
'tooltip': "'All Domains'",
'text': "'All'",
'click': "function () {$('#domain_listing').jtable('load', {'refresh': 'yes'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'New Domains'",
'text': "'New'",
'click': "function () {$('#domain_listing').jtable('load', {'refresh': 'yes', 'status': 'New'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'In Progress Domains'",
'text': "'In Progress'",
'click': "function () {$('#domain_listing').jtable('load', {'refresh': 'yes', 'status': 'In Progress'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Analyzed Domains'",
'text': "'Analyzed'",
'click': "function () {$('#domain_listing').jtable('load', {'refresh': 'yes', 'status': 'Analyzed'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Deprecated Domains'",
'text': "'Deprecated'",
'click': "function () {$('#domain_listing').jtable('load', {'refresh': 'yes', 'status': 'Deprecated'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Add Domain'",
'text': "'Add Domain'",
'click': "function () {$('#new-domain').click()}",
},
]
if option == "inline":
return render_to_response("jtable.html",
{'jtable': jtable,
'jtid': '%s_listing' % type_,
'button' : '%ss_tab' % type_},
RequestContext(request))
else:
return render_to_response("%s_listing.html" % type_,
{'jtable': jtable,
'jtid': '%s_listing' % type_},
RequestContext(request))
def add_new_domain_via_bulk(data, rowData, request, errors,
is_validate_only=False, cache={}):
"""
Wrapper for add_new_domain to pass in rowData.
:param data: The data about the domain.
:type data: dict
:param rowData: Any objects that need to be added to the domain.
:type rowData: dict
:param request: The Django request.
:type request: :class:`django.http.HttpRequest`
:param errors: A list of current errors to append to.
:type errors: list
:param is_validate_only: Only validate the data and return any errors.
:type is_validate_only: boolean
:param cache: Cached data, typically for performance enhancements
during bulk uperations.
:type cache: dict
:returns: tuple
"""
return add_new_domain(data, request, errors, rowData=rowData,
is_validate_only=is_validate_only, cache=cache)
def retrieve_domain(domain, cache):
"""
Retrieves a domain by checking cache first. If not in cache
then queries mongo for the domain.
:param domain: The domain name.
:type domain: str
:param cache: Cached data, typically for performance enhancements
during bulk uperations.
:type cache: dict
:returns: :class:`crits.domains.domain.Domain`
"""
domain_obj = None
cached_results = cache.get(form_consts.Domain.CACHED_RESULTS)
if cached_results:
domain_obj = cached_results.get(domain.lower())
if not domain_obj:
domain_obj = Domain.objects(domain__iexact=domain).first()
return domain_obj
def add_new_domain(data, request, errors, rowData=None, is_validate_only=False, cache={}):
"""
Add a new domain to CRITs.
:param data: The data about the domain.
:type data: dict
:param request: The Django request.
:type request: :class:`django.http.HttpRequest`
:param errors: A list of current errors to append to.
:type errors: list
:param rowData: Any objects that need to be added to the domain.
:type rowData: dict
:param is_validate_only: Only validate the data and return any errors.
:type is_validate_only: boolean
:param cache: Cached data, typically for performance enhancements
during bulk operations.
:type cache: dict
:returns: tuple (<result>, <errors>, <retVal>)
"""
result = False
retVal = {}
domain = data['domain']
add_ip = data.get('add_ip')
ip = data.get('ip')
ip_type = data.get('ip_type')
if add_ip:
error = validate_and_normalize_ip(ip, ip_type)[1]
if error:
errors.append(error)
if is_validate_only:
error = get_valid_root_domain(domain)[2]
if error:
errors.append(error)
# check for duplicate domains
fqdn_domain = retrieve_domain(domain, cache)
if fqdn_domain:
if isinstance(fqdn_domain, Domain):
resp_url = reverse('crits.domains.views.domain_detail', args=[domain])
message = ('Warning: Domain already exists: '
'<a href="%s">%s</a>' % (resp_url, domain))
retVal['message'] = message
retVal['status'] = form_consts.Status.DUPLICATE
retVal['warning'] = message
else:
result_cache = cache.get(form_consts.Domain.CACHED_RESULTS);
result_cache[domain.lower()] = True
elif not errors:
username = request.user.username
reference = data.get('domain_reference')
source_name = data.get('domain_source')
method = data.get('domain_method')
source = [create_embedded_source(source_name, reference=reference,
method=method, analyst=username)]
bucket_list = data.get(form_consts.Common.BUCKET_LIST_VARIABLE_NAME)
ticket = data.get(form_consts.Common.TICKET_VARIABLE_NAME)
if data.get('campaign') and data.get('confidence'):
campaign = [EmbeddedCampaign(name=data.get('campaign'),
confidence=data.get('confidence'),
analyst=username)]
else:
campaign = []
retVal = upsert_domain(domain, source, username, campaign,
bucket_list=bucket_list, ticket=ticket, cache=cache)
if not retVal['success']:
errors.append(retVal.get('message'))
retVal['message'] = ""
else:
new_domain = retVal['object']
ip_result = {}
if add_ip:
if data.get('same_source'):
ip_source = source_name
ip_method = method
ip_reference = reference
else:
ip_source = data.get('ip_source')
ip_method = data.get('ip_method')
ip_reference = data.get('ip_reference')
from crits.ips.handlers import ip_add_update
ip_result = ip_add_update(ip,
ip_type,
ip_source,
ip_method,
ip_reference,
campaign=campaign,
analyst=username,
bucket_list=bucket_list,
ticket=ticket,
cache=cache)
if not ip_result['success']:
errors.append(ip_result['message'])
else:
#add a relationship with the new IP address
new_ip = ip_result['object']
if new_domain and new_ip:
new_domain.add_relationship(rel_item=new_ip,
rel_type='Resolved_To',
analyst=username,
get_rels=False)
new_domain.save(username=username)
new_ip.save(username=username)
#set the URL for viewing the new data
resp_url = reverse('crits.domains.views.domain_detail', args=[domain])
if retVal['is_domain_new'] == True:
retVal['message'] = ('Success! Click here to view the new domain: '
'<a href="%s">%s</a>' % (resp_url, domain))
else:
message = ('Updated existing domain: <a href="%s">%s</a>' % (resp_url, domain))
retVal['message'] = message
retVal[form_consts.Status.STATUS_FIELD] = form_consts.Status.DUPLICATE
retVal['warning'] = message
#add indicators
if data.get('add_indicators'):
from crits.indicators.handlers import create_indicator_from_tlo
# If we have an IP object, add an indicator for that.
if ip_result.get('success'):
ip = ip_result['object']
result = create_indicator_from_tlo('IP',
ip,
username,
ip_source,
add_domain=False)
ip_ind = result.get('indicator')
if not result['success']:
errors.append(result['message'])
# Add an indicator for the domain.
result = create_indicator_from_tlo('Domain',
new_domain,
username,
source_name,
add_domain=False)
if not result['success']:
errors.append(result['message'])
elif ip_result.get('success') and ip_ind:
forge_relationship(left_class=result['indicator'],
right_class=ip_ind,
rel_type='Resolved_To',
analyst=username)
result = True
# This block validates, and may also add, objects to the Domain
if retVal.get('success') or is_validate_only == True:
if rowData:
objectsData = rowData.get(form_consts.Common.OBJECTS_DATA)
# add new objects if they exist
if objectsData:
objectsData = json.loads(objectsData)
current_domain = retrieve_domain(domain, cache)
for object_row_counter, objectData in enumerate(objectsData, 1):
if current_domain != None:
# if the domain exists then try to add objects to it
if isinstance(current_domain, Domain) == True:
objectDict = object_array_to_dict(objectData,
"Domain",
current_domain.id)
else:
objectDict = object_array_to_dict(objectData,
"Domain",
"")
current_domain = None;
else:
objectDict = object_array_to_dict(objectData,
"Domain",
"")
(obj_result,
errors,
obj_retVal) = validate_and_add_new_handler_object(
None, objectDict, request, errors, object_row_counter,
is_validate_only=is_validate_only,
cache=cache, obj=current_domain)
if not obj_result:
retVal['success'] = False
return result, errors, retVal
def edit_domain_name(domain, new_domain, analyst):
"""
Edit domain name for an entry.
:param domain: The domain name to edit.
:type domain: str
:param new_domain: The new domain name.
:type new_domain: str
:param analyst: The user editing the domain name.
:type analyst: str
:returns: boolean
"""
# validate new domain
(root, validated_domain, error) = get_valid_root_domain(new_domain)
if error:
return False
domain = Domain.objects(domain=domain).first()
if not domain:
return False
try:
domain.domain = validated_domain
domain.save(username=analyst)
return True
except ValidationError:
return False
def upsert_domain(domain, source, username=None, campaign=None,
confidence=None, bucket_list=None, ticket=None, cache={}):
"""
Add or update a domain/FQDN. Campaign is assumed to be a list of campaign
dictionary objects.
:param domain: The domain to add/update.
:type domain: str
:param source: The name of the source.
:type source: str
:param username: The user adding/updating the domain.
:type username: str
:param campaign: The campaign to attribute to this domain.
:type campaign: list, str
:param confidence: Confidence for the campaign attribution.
:type confidence: str
:param bucket_list: List of buckets to add to this domain.
:type bucket_list: list, str
:param ticket: The ticket for this domain.
:type ticket: str
:param cache: Cached data, typically for performance enhancements
during bulk uperations.
:type cache: dict
:returns: dict with keys:
"success" (boolean),
"object" the domain that was added,
"is_domain_new" (boolean)
"""
# validate domain and grab root domain
(root, domain, error) = get_valid_root_domain(domain)
if error:
return {'success': False, 'message': error}
is_fqdn_domain_new = False
is_root_domain_new = False
if not campaign:
campaign = []
# assume it's a list, but check if it's a string
elif isinstance(campaign, basestring):
c = EmbeddedCampaign(name=campaign, confidence=confidence, analyst=username)
campaign = [c]
# assume it's a list, but check if it's a string
if isinstance(source, basestring):
s = EmbeddedSource()
s.name = source
instance = EmbeddedSource.SourceInstance()
instance.reference = ''
instance.method = ''
instance.analyst = username
instance.date = datetime.datetime.now()
s.instances = [instance]
source = [s]
fqdn_domain = None
root_domain = None
cached_results = cache.get(form_consts.Domain.CACHED_RESULTS)
if cached_results != None:
if domain != root:
fqdn_domain = cached_results.get(domain)
root_domain = cached_results.get(root)
else:
root_domain = cached_results.get(root)
else:
#first find the domain(s) if it/they already exist
root_domain = Domain.objects(domain=root).first()
if domain != root:
fqdn_domain = Domain.objects(domain=domain).first()
#if they don't exist, create them
if not root_domain:
root_domain = Domain()
root_domain.domain = root
root_domain.source = []
root_domain.record_type = 'A'
is_root_domain_new = True
if cached_results != None:
cached_results[root] = root_domain
if domain != root and not fqdn_domain:
fqdn_domain = Domain()
fqdn_domain.domain = domain
fqdn_domain.source = []
fqdn_domain.record_type = 'A'
is_fqdn_domain_new = True
if cached_results != None:
cached_results[domain] = fqdn_domain
# if new or found, append the new source(s)
for s in source:
if root_domain:
root_domain.add_source(s)
if fqdn_domain:
fqdn_domain.add_source(s)
#campaigns
#both root and fqdn get campaigns updated
for c in campaign:
if root_domain:
root_domain.add_campaign(c)
if fqdn_domain:
fqdn_domain.add_campaign(c)
if username:
if root_domain:
root_domain.analyst = username
if fqdn_domain:
fqdn_domain.analyst = username
if bucket_list:
if root_domain:
root_domain.add_bucket_list(bucket_list, username)
if fqdn_domain:
fqdn_domain.add_bucket_list(bucket_list, username)
if ticket:
if root_domain:
root_domain.add_ticket(ticket, username)
if fqdn_domain:
fqdn_domain.add_ticket(ticket, username)
# save
try:
if root_domain:
root_domain.save(username=username)
if fqdn_domain:
fqdn_domain.save(username=username)
except Exception, e:
return {'success': False, 'message': e}
#Add relationships between fqdn, root
if fqdn_domain and root_domain:
root_domain.add_relationship(rel_item=fqdn_domain,
rel_type="Supra-domain_Of",
analyst=username,
get_rels=False)
root_domain.save(username=username)
fqdn_domain.save(username=username)
# run domain triage
if is_fqdn_domain_new:
fqdn_domain.reload()
run_triage(fqdn_domain, username)
if is_root_domain_new:
root_domain.reload()
run_triage(root_domain, username)
# return fqdn if they added an fqdn, or root if they added a root
if fqdn_domain:
return {'success': True, 'object': fqdn_domain, 'is_domain_new': is_fqdn_domain_new}
else:
return {'success': True, 'object': root_domain, 'is_domain_new': is_root_domain_new}
def update_tlds(data=None):
"""
Update the TLD list in the database.
:param data: The TLD data.
:type data: file handle.
:returns: dict with key "success" (boolean)
"""
if not data:
return {'success': False}
line = data.readline()
while line:
line = line.rstrip()
if line and not line.startswith('//'):
TLD.objects(tld=line).update_one(set__tld=line, upsert=True)
line = data.readline()
# Update the package local tld_parser with the new domain info
tld_parser = etld()
return {'success': True}
class etld(object):
"""
TLD class to assist with extracting root domains.
"""
def __init__(self):
self.rules = {}
etlds = TLD.objects()
for etld in etlds:
tld = etld.tld.split('.')[-1]
self.rules.setdefault(tld, [])
self.rules[tld].append(re.compile(self.regexpize(etld.tld)))
def regexpize(self, etld):
"""
Generate regex for this TLD.
:param etld: The TLD to generate regex for.
:returns: str
"""
etld = etld[::-1].replace('.',
'\\.').replace('*',
'[^\\.]*').replace('!',
'')
return '^(%s)\.(.*)$' % etld
def parse(self, hostname):
"""
Parse the domain.
:param hostname: The domain to parse.
:returns: str
"""
try:
hostname = hostname.lower()
tld = hostname.split('.')[-1]
hostname = hostname[::-1]
etld = ''
for rule in self.rules[tld]:
m = rule.match(hostname)
if m and m.group(1) > etld:
mytld = "%s.%s" % ( m.group(2)[::-1].split(".")[-1],
m.group(1)[::-1])
if not mytld:
return ("no_tld_found_error")
return (mytld)
except Exception:
return ("no_tld_found_error")
def parse_row_to_bound_domain_form(request, rowData, cache):
"""
Parse a row in bulk upload into form data that can be used to add a Domain.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param rowData: The objects to add for the Domain.
:type rowData: dict
:param cache: Cached data, typically for performance enhancements
during bulk uperations.
:type cache: dict
:returns: :class:`crits.domains.forms.AddDomainForm`
"""
bound_domain_form = None
# TODO Add common method to convert data to string
domain_name = rowData.get(form_consts.Domain.DOMAIN_NAME, "").strip();
campaign = rowData.get(form_consts.Domain.CAMPAIGN, "")
confidence = rowData.get(form_consts.Domain.CAMPAIGN_CONFIDENCE, "")
domain_source = rowData.get(form_consts.Domain.DOMAIN_SOURCE, "")
domain_method = rowData.get(form_consts.Domain.DOMAIN_METHOD, "")
domain_reference = rowData.get(form_consts.Domain.DOMAIN_REFERENCE, "")
#is_add_ip = convert_string_to_bool(rowData.get(form_consts.Domain.ADD_IP_ADDRESS, ""))
is_add_ip = False
ip = rowData.get(form_consts.Domain.IP_ADDRESS, "")
ip_type = rowData.get(form_consts.Domain.IP_TYPE, "")
created = rowData.get(form_consts.Domain.IP_DATE, "")
#is_same_source = convert_string_to_bool(rowData.get(form_consts.Domain.SAME_SOURCE, "False"))
is_same_source = False
ip_source = rowData.get(form_consts.Domain.IP_SOURCE, "")
ip_method = rowData.get(form_consts.Domain.IP_METHOD, "")
ip_reference = rowData.get(form_consts.Domain.IP_REFERENCE, "")
is_add_indicators = convert_string_to_bool(rowData.get(form_consts.Domain.ADD_INDICATORS, "False"))
bucket_list = rowData.get(form_consts.Common.BUCKET_LIST, "")
ticket = rowData.get(form_consts.Common.TICKET, "")
if(ip or created or ip_source or ip_method or ip_reference):
is_add_ip = True
if is_add_ip == True:
data = {'domain': domain_name,
'campaign': campaign,
'confidence': confidence,
'domain_source': domain_source,
'domain_method': domain_method,
'domain_reference': domain_reference,
'add_ip': is_add_ip,
'ip': ip,
'ip_type': ip_type,
'created': created,
'same_source': is_same_source,
'ip_source': ip_source,
'ip_method': ip_method,
'ip_reference': ip_reference,
'add_indicators': is_add_indicators,
'bucket_list': bucket_list,
'ticket': ticket}
bound_domain_form = cache.get("domain_ip_form")
if bound_domain_form == None:
bound_domain_form = AddDomainForm(request.user, data)
cache['domain_ip_form'] = bound_domain_form
else:
bound_domain_form.data = data
else:
data = {'domain': domain_name,
'campaign': campaign,
'confidence': confidence,
'domain_source': domain_source,
'domain_method': domain_method,
'domain_reference': domain_reference,
'add_ip': is_add_ip,
'bucket_list': bucket_list,
'ticket': ticket}
bound_domain_form = cache.get("domain_form")
if bound_domain_form == None:
bound_domain_form = AddDomainForm(request.user, data)
cache['domain_form'] = bound_domain_form
else:
bound_domain_form.data = data
if bound_domain_form != None:
bound_domain_form.full_clean()
return bound_domain_form
def process_bulk_add_domain(request, formdict):
"""
Performs the bulk add of domains by parsing the request data. Batches
some data into a cache object for performance by reducing large
amounts of single database queries.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param formdict: The form representing the bulk uploaded data.
:type formdict: dict
:returns: :class:`django.http.HttpResponse`
"""
domain_names = []
ip_addresses = []
cached_domain_results = {}
cached_ip_results = {}
cleanedRowsData = convert_handsontable_to_rows(request)
for rowData in cleanedRowsData:
if rowData != None:
if rowData.get(form_consts.Domain.DOMAIN_NAME) != None:
domain = rowData.get(form_consts.Domain.DOMAIN_NAME).strip().lower()
(root_domain, full_domain, error) = get_valid_root_domain(domain)
domain_names.append(full_domain)
if domain != root_domain:
domain_names.append(root_domain)
if rowData.get(form_consts.Domain.IP_ADDRESS) != None:
ip_addr = rowData.get(form_consts.Domain.IP_ADDRESS)
ip_type = rowData.get(form_consts.Domain.IP_TYPE)
(ip_addr, error) = validate_and_normalize_ip(ip_addr, ip_type)
ip_addresses.append(ip_addr)
domain_results = Domain.objects(domain__in=domain_names)
ip_results = IP.objects(ip__in=ip_addresses)
for domain_result in domain_results:
cached_domain_results[domain_result.domain] = domain_result
for ip_result in ip_results:
cached_ip_results[ip_result.ip] = ip_result
cache = {form_consts.Domain.CACHED_RESULTS: cached_domain_results,
form_consts.IP.CACHED_RESULTS: cached_ip_results,
'cleaned_rows_data': cleanedRowsData}
response = parse_bulk_upload(request, parse_row_to_bound_domain_form, add_new_domain_via_bulk, formdict, cache)
return response
# Global definition of the TLD parser -- etld.
# This is a workaround to use a global instance because the __init__ method takes ~0.5 seconds to
# initialize. Was causing performance problems (high CPU usage) with bulk uploading of domains since
# each domain needed to create the etld() class.
# TODO investigate if updating of TLDs causes this global instance to become stale.
tld_parser = etld()
|
mit
| 5,617,783,682,987,655,000
| 36.613483
| 119
| 0.549588
| false
| 4.25795
| false
| false
| false
|
JonathanSchmalhofer/RecursiveStereoUAV
|
js_recursive_stereo/python_vs_matlab/airsim/generate_pcl_calib.py
|
1
|
2784
|
import numpy as np
import cv2
# Copied from https://github.com/utiasSTARS/pykitti/blob/master/pykitti/utils.py
def read_calib_file(filepath):
"""Read in a calibration file and parse into a dictionary."""
data = {}
with open(filepath, 'r') as f:
for line in f.readlines():
key, value = line.split(':', 1)
# The only non-float values in these files are dates, which
# we don't care about anyway
try:
data[key] = np.array([float(x) for x in value.split()])
except ValueError:
pass
return data
# Focal Length in [pix]
f = 573.2981
# u-Coordinate of Center Point in [pix]
c_u = 399.5661
# u-Coordinate of Center Point in [pix]
c_v = 295.6579
# baseline with respect to reference camera 0 in [m]
b = 0.1400
Q = np.matrix(
[[ 0, 0, 0, f],
[-1.0000, 0, 0, c_u],
[ 0, -1.0000, 0, c_v],
[ 0, 0, 1/b, 0]])
ply_header = '''ply
format ascii 1.0
element vertex %(vert_num)d
property float x
property float y
property float z
property uchar red
property uchar green
property uchar blue
end_header
'''
def write_ply(fn, verts, colors):
verts = verts.reshape(-1, 3)
colors = colors.reshape(-1, 3)
verts = np.hstack([verts, colors])
with open(fn, 'wb') as f:
f.write((ply_header % dict(vert_num=len(verts))).encode('utf-8'))
np.savetxt(f, verts, fmt='%f %f %f %d %d %d ')
# Following part for converting a disparity image to a PointCloud was originally copied from
# https://stackoverflow.com/questions/45325795/point-cloud-from-kitti-stereo-images
imgC = cv2.imread('../../resources/AirSimCameraCalibration/left/left_00000.png')
imgL = cv2.imread('../../resources/AirSimCameraCalibration/left/left_00000.png')
imgR = cv2.imread('../../resources/AirSimCameraCalibration/right/right_00000.png')
imgL = cv2.cvtColor( imgL, cv2.COLOR_RGB2GRAY )
imgR = cv2.cvtColor( imgR, cv2.COLOR_RGB2GRAY )
window_size = 9
minDisparity = 1
stereo = cv2.StereoSGBM_create(
blockSize=10,
numDisparities=64,
preFilterCap=10,
minDisparity=minDisparity,
P1=4 * 3 * window_size ** 2,
P2=32 * 3 * window_size ** 2
)
print('computing disparity...')
disp = stereo.compute(imgL, imgR).astype(np.float32) / 16.0
points = cv2.reprojectImageTo3D(disp, Q)
colors = cv2.cvtColor(imgC, cv2.COLOR_BGR2RGB)
mask = disp > disp.min()
out_points = points[mask]
out_colors = colors[mask]
out_fn = 'checkerboard.ply'
write_ply('checkerboard.ply', out_points, out_colors)
print('%s saved' % 'checkerboard.ply')
|
mit
| 6,907,530,882,995,174,000
| 31.383721
| 92
| 0.602011
| false
| 3.093333
| false
| false
| false
|
Tanych/CodeTracking
|
312-Burst-Balloons/solution.py
|
1
|
2616
|
class Solution(object):
def bursthelper(self,memo,nums,left,right):
if left+1==right: return 0
if memo[left][right]>0: return memo[left][right]
res=0
for i in xrange(left+1,right):
res=max(res,nums[left]*nums[i]*nums[right]+self.bursthelper(memo,nums,left,i)+\
self.bursthelper(memo,nums,i,right))
memo[left][right]=res
return res
def maxCoinsMemo(self,nums):
n=len(nums)
# burst the 0 in middle of nums
# since the get nothing
new_nums=[0]*(n+2)
cnt=1
for num in nums:
if num:
new_nums[cnt]=num
cnt+=1
# buidling the new_nums
new_nums[0]=new_nums[cnt]=1
cnt+=1
memo=[[0 for _ in xrange(cnt)] for _ in xrange(cnt)]
return self.bursthelper(memo,new_nums,0,cnt-1)
def dpmethod(self,nums):
n=len(nums)
# burst the 0 in middle of nums
# since the get nothing
new_nums=[0]*(n+2)
cnt=1
for num in nums:
if num:
new_nums[cnt]=num
cnt+=1
# buidling the new_nums
new_nums[0]=new_nums[cnt]=1
cnt+=1
dp=[[0 for _ in xrange(cnt)] for _ in xrange(cnt)]
# k is the diff between left and right
for k in xrange(2,cnt):
for left in xrange(0,cnt-k):
right=left+k
for i in xrange(left+1,right):
dp[left][right]=max(dp[left][right],new_nums[left]*new_nums[i]*new_nums[right]+dp[left][i]+dp[i][right])
return dp[0][cnt-1]
def maxCoins(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
"""
Extend the nums to 1+nums+1
"""
return self.dpmethod(nums)
n=len(nums)
new_num=[1]+nums+[1]
#stote the dpfun values
dp=[[0 for i in xrange(n+2)] for j in xrange(n+2)]
#get the dpfun
def dpfun(i,j):
#if done, return
if dp[i][j]>0:return dp[i][j]
#find the max x for depart the i,j,x is the last ele to push out
#and the final x is for [i-i]*[x]*[j+1]
#we can simple assume that [2] is the last ele, therefore the nums we can easily understand
for x in xrange(i,j+1):
dp[i][j]=max(dp[i][j],dpfun(i,x-1)+new_num[i-1]*new_num[x]*new_num[j+1]+dpfun(x+1,j))
return dp[i][j]
#return 1-n max value
return dpfun(1,n)
|
mit
| 2,221,113,808,342,528,500
| 32.126582
| 124
| 0.495413
| false
| 3.379845
| false
| false
| false
|
JoeJasinski/evesch
|
evesch/org/views.py
|
1
|
22743
|
# Create your views here.
from datetime import datetime
from django.utils.translation import ugettext_lazy as _
from django.shortcuts import render_to_response
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.core.paginator import Paginator
from django.db.models import Q
from django.contrib.auth import get_user_model
from evesch.core.lib import Message, ePage
from evesch.egroup.models import UserGroup
from evesch.euser.models import get_current_user
from evesch.org.models import Organization, OrgInvite
from evesch.event.models import EventType
from evesch.org.forms import OrganizationForm, OrganizationFormEdit, OrganizationJoinForm, OrganizationInviteMember
def org_browse(request, filter_abc=None, template_name=None):
public_orgs = Organization.objects.get_browsable_orgs()
if filter_abc:
raise AssertionError(filter_abc)
public_orgs = public_orgs.filter(org_name__istartswith=filter_abc)
context = {'orgs':public_orgs,}
return render_to_response(template_name,context, context_instance=RequestContext(request))
@login_required
def orgs_list(request, template_name=None):
current_user, message = get_current_user(request.user)
if not message:
all_orgs_page = ePage(1)
if request.GET.__contains__("all_orgs_page"):
try:
all_orgs_page.curr = int(request.GET['all_orgs_page'])
except ValueError:
all_orgs_page.curr = 1
orgs = Organization.objects.filter(org_active=True).order_by('org_name')
all_orgs_page.set_pages(Paginator(orgs, 3))
my_orgs_page = ePage(1)
if request.GET.__contains__("my_orgs_page"):
try:
my_orgs_page.curr = int(request.GET['my_orgs_page'])
#my_orgs_page.curr = int(request.GET.get('my_orgs_page',1))
except:
my_orgs_page.curr = 1
my_org_groups = UserGroup.objects.filter(pk__in=current_user.get_user_groups())
my_groups = orgs.filter(group_set__in=my_org_groups)
my_orgs = current_user.get_user_orgs().order_by('org_name')
jaz_orgs = []
for org in my_orgs:
org.user_perms = org.org_perms(current_user)
jaz_orgs.append(org)
my_orgs_page.set_pages(Paginator(jaz_orgs, 3))
#raise AssertionError(jaz_orgs[0].user_perms)
context = {'message':_("Index"),
'all_orgs_page':all_orgs_page,
'my_groups':my_groups,
'my_orgs_page':my_orgs_page,
'ajax_page_my':reverse('org_orgs_list_my_ajax',kwargs={}),
'ajax_page_all':reverse('org_orgs_list_all_ajax',kwargs={}),
}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context, context_instance=RequestContext(request))
@login_required
def orgs_list_all(request, template_name=None):
message = None
if not request.is_ajax():
template_name = "core/message.html"
message = Message(title=_("Cannot Be Viewed"), text=_("Cannot view this page" ))
context = {'message':message,}
if not message:
current_user, message = get_current_user(request.user)
if not message:
all_orgs_page = ePage(1)
if request.GET.__contains__("all_orgs_page"):
try:
all_orgs_page.curr = int(request.GET['all_orgs_page'])
except:
all_orgs_page.curr = 1
orgs = Organization.objects.filter(org_active=True).order_by('org_name')
all_orgs_page.set_pages(Paginator(orgs, 3))
context = {'all_orgs_page':all_orgs_page, 'ajax_page_all':reverse('org_orgs_list_all_ajax',kwargs={}),}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context, context_instance=RequestContext(request))
@login_required
def orgs_list_my(request, template_name=None):
message = None
if not request.is_ajax():
template_name = "core/message.html"
message = Message(title=_("Cannot Be Viewed"), text=_("Cannot view this page" ))
context = {'message':message,}
if not message:
current_user, message = get_current_user(request.user)
if not message:
orgs = Organization.objects.filter(org_active=True).order_by('org_name')
my_orgs_page = ePage(1)
if request.GET.__contains__("my_orgs_page"):
try:
my_orgs_page.curr = int(request.GET['my_orgs_page'])
except:
my_orgs_page.curr = 1
my_org_groups = UserGroup.objects.filter(pk__in=current_user.get_user_groups())
my_groups = orgs.filter(group_set__in=my_org_groups)
my_orgs = current_user.get_user_orgs().order_by('org_name')
jaz_orgs = []
for org in my_orgs:
org.user_perms = org.org_perms(current_user)
jaz_orgs.append(org)
my_orgs_page.set_pages(Paginator(jaz_orgs, 3))
#raise AssertionError(my_orgs_page.current_page().object_list)
context = {'my_orgs_page':my_orgs_page,'ajax_page_my':reverse('org_orgs_list_my_ajax',kwargs={}),}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context, context_instance=RequestContext(request))
@login_required
def org_join(request, org_short_name, template_name=None):
current_user, message = get_current_user(request.user)
if not message:
current_org, message = Organization.objects.get_current_org(org_short_name, message)
if not message:
operms = current_org.org_perms(current_user)
if operms['is_memberof_org']:
template_name = "core/message.html"
message = Message(title=_("Already a Member"), text=_("You are already a member of this organization." ))
message.addlink(_("Continue"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
if not operms['can_join_org']:
template_name = "core/message.html"
message = Message(title=_("Approval Needed"), text=_("In order to join this organization, you need approval from the organization admin."))
message.addlink(_("Back"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
if request.method == 'POST':
current_user.user_organizations.add(current_org)
current_user.user_invites_set.filter(org=current_org).delete()
template_name = "core/message.html"
#message = Message(title="You have Joined the organization", text="Org Join Successful: " + org_user_group.group_name )
message = Message(title=_("You have Joined the organization"), text=_("Org Join Successful: %s" % (current_org.org_name,)) )
message.addlink(_("Continue"),current_org.get_absolute_url())
context = {'message':message,}
else:
form = OrganizationJoinForm()
context = {'form':form,'current_org':current_org}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
@login_required
def org_leave(request, org_short_name, template_name=None):
current_user, message = get_current_user(request.user)
if not message:
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
operms = current_org.org_perms(current_user)
if operms['is_memberof_org']:
if request.method == 'POST':
current_user.user_organizations.remove(current_org)
template_name = "core/message.html"
message = Message(title=_("Left Organization"), text=_("You have left the Organization"))
message.addlink(_("Continue"),reverse('org_orgs_list',kwargs={}))
else:
template_name = "core/message.html"
message = Message(title=_("Not a Member"), text=_("You cannot leave this organization because you are not a member of the organization."))
message.addlink(_("Back"),reverse('org_orgs_list',kwargs={}))
context = {'message':message, 'current_org':current_org, }
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
@login_required
def org_view(request,org_short_name,template_name=None):
""" Displays organization detail information """
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
members_page = ePage(1)
if request.GET.__contains__("members_page"):
try:
members_page.curr = int(request.GET['members_page'])
except:
members_page.curr = 1
members = current_org.get_members()
members_page.set_pages(Paginator(members, 48))
#raise AssertionError(members_page.prev)
org_eventtypes = current_org.get_eventtypes()
context = {'message':_("Org View"),'current_org':current_org,'org_eventtypes':org_eventtypes, 'members':members_page, 'ajax_page_members': reverse('org_org_user_list_ajax', kwargs={'org_short_name':current_org.org_short_name,})}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
@login_required
def org_members(request,org_short_name,template_name=None):
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
current_user, message = get_current_user(request.user)
if not message:
operms = current_org.org_perms(current_user)
if not operms['is_memberof_org']:
template_name = "core/message.html"
message = Message(title=_("Can Not Edit Org"), text=_("You cannot view members of an organization that you do not belong to."))
message.addlink(_("Back"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
members_page = ePage(1)
if request.GET.__contains__("members_page"):
try:
members_page.curr = int(request.GET['members_page'])
except:
members_page.curr = 1
members = current_org.get_members()
members_page.set_pages(Paginator(members, 48))
context = {'current_org':current_org,'members':members_page,'ajax_page_members': reverse('org_org_user_list_ajax', kwargs={'org_short_name':current_org.org_short_name,})}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
@login_required
def org_edit(request,org_short_name=None,template_name=None):
""" Edits an organization """
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
current_user, message = get_current_user(request.user)
if not message:
operms = current_org.org_perms(current_user)
if not operms['is_memberof_org']:
template_name = "core/message.html"
message = Message(title=_("Can Not Edit Org"), text=_("You cannot edit an organization that you do not belong to."))
message.addlink(_("Back"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
if not operms['can_edit_org']:
template_name = "core/message.html"
message = Message(title=_("Can Not Edit Org"), text=_("You cannot edit this organization because you do not have permission to."))
message.addlink(_("Back"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
show_dialog=False
if request.method == 'POST':
form = OrganizationFormEdit(request.POST, instance=current_org)
if form.is_valid():
form.save()
message = Message(title=_("Organization Changes Saved"), text=_("Organization Changes Saved"))
message.addlink(_("View"),current_org.get_absolute_url())
message.addlink(_("Edit"),reverse('org_org_edit',kwargs={'org_short_name':current_org.org_short_name,}))
if request.POST.get("dialog",'') == "False":
template_name = "core/message.html"
show_dialog=False
else:
show_dialog=True
context = {'org_short_name':org_short_name,'form':form,'current_org':current_org,'message':message,'show_dialog':show_dialog,}
else:
form = OrganizationFormEdit(auto_id=False,instance=current_org)
context = {'org_short_name':org_short_name,'form':form,'current_org':current_org}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name, context,context_instance=RequestContext(request))
@login_required
def org_remove(request,org_short_name=None,template_name=None):
""" Removes an organization """
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
current_user, message = get_current_user(request.user)
if not message:
operms = current_org.org_perms(current_user)
if not operms['is_memberof_org']:
template_name = "core/message.html"
message = Message(title=_("Can Not Remove Org"), text=_("You cannot remove an organization that you do not belong to."))
message.addlink(_("Continue"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
if not operms['can_remove_org']:
template_name = "core/message.html"
message = Message(title=_("Can Not Remove Org"), text=_("You cannot remove this organization because you do not have permission to."))
message.addlink(_("Back"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
context = {'current_org':current_org}
if request.method == 'POST':
current_org.org_active = False
current_org.save()
return HttpResponseRedirect(reverse('org_orgs_list',))
else:
pass
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
@login_required
def org_add(request,template_name=None):
""" Adds an organization """
current_user, message = get_current_user(request.user)
if not message:
show_dialog=False
if request.method == 'POST':
form = OrganizationForm(request.POST)
if form.is_valid():
current_org = form.save()
current_org.save()
groups = UserGroup.objects.init_org_groups(current_org, current_user)
eventtypes = EventType.objects.init_event_types(current_org)
message = Message(title=_("Organization Added"), text=_("Organization Added"))
message.addlink(_("View"),current_org.get_absolute_url())
message.addlink(_("Edit"),reverse('org_org_edit',kwargs={'org_short_name':current_org.org_short_name,}))
if request.POST.get("dialog",'') == "False":
template_name = "core/message.html"
show_dialog=False
else:
show_dialog=True
context = {'message':message,'current_org':current_org,'form':form,'show_dialog':show_dialog,}
else:
context = { 'form':form,'show_dialog':show_dialog,}
else:
form = OrganizationForm()
context = { 'form':form }
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
@login_required
def org_member_remove(request,org_short_name=None, username=None, template_name=None):
current_user, message = get_current_user(request.user)
if not message:
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
current_member, message = get_current_user(username)
if not message:
operms = current_org.org_perms(current_user)
if not operms['is_memberof_org']:
template_name = "core/message.html"
message = Message(title=_("Can Not Remove User"), text=_("You cannot remove a user in an organization that you do not belong to."))
message.addlink(_("Continue"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
if not operms['can_remove_users']:
template_name = "core/message.html"
message = Message(title=_("Can Not Remove Member"), text=_("You cannot remove this member because you do not have permission to."))
message.addlink(_("Back"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
if request.method == 'POST':
current_member.user_organizations.remove(current_org)
return HttpResponseRedirect(current_org.get_absolute_url())
else:
pass
context = {'current_org':current_org, 'current_member':current_member, }
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
@login_required
def org_member_invite(request,org_short_name=None, template_name=None):
current_user, message = get_current_user(request.user)
invited_users = get_user_model().objects.none()
if not message:
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
operms = current_org.org_perms(current_user)
if not operms['is_memberof_org']:
template_name = "core/message.html"
message = Message(title=_("Can Not Invite User"), text=_("You cannot invite a user to an organization that you do not belong to."))
message.addlink(_("Continue"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
if not operms['can_invite_users']:
template_name = "core/message.html"
message = Message(title=_("Can Not Invite Member"), text=_("You cannot invite people to this organization because you do not have permission to."))
message.addlink(_("Back"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
invited_users_page = ePage(1)
org_invites = current_org.invite_set.all()
invited_users = get_user_model().objects.filter(user_invites_set__in=org_invites)
if request.method == 'POST':
form = OrganizationInviteMember(request.POST)
if form.is_valid():
user_list = form.cleaned_data['invite_list'].strip().strip(',').split(',')
new_user_list = []
for user in user_list:
new_user_list.append(user.strip().strip(','))
new_invited_users = get_user_model().objects.filter(username__in=new_user_list).exclude(user_invites_set__in=org_invites)
for user in new_invited_users:
i = OrgInvite()
i.user = user
i.org = current_org
i.direction = True
i.save()
invited_users = invited_users | new_invited_users
else:
form = OrganizationInviteMember()
if request.GET.__contains__("members_page"):
try:
members_page.curr = int(request.GET['members_page'])
except:
members_page.curr = 1
invited_users_page.set_pages(Paginator(invited_users, 5))
context = {'current_org':current_org,'form':form,'invited_users':invited_users_page,'ajax_page_members':reverse('org_org_invites_list_ajax', kwargs={'org_short_name':current_org.org_short_name,})}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
def org_list_invites(request,org_short_name,template_name=None):
invited_users_page = ePage(1)
message = None
if not True: # request.is_ajax():
template_name = "core/message.html"
message = Message(title=_("Cannot Be Viewed"), text=_("Cannot view this page" ))
context = {'message':message,}
if not message:
current_user, message = get_current_user(request.user)
if not message:
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
if request.GET.__contains__("invited_users_page"):
try:
invited_users_page.curr = int(request.GET['invited_users_page'])
except:
invited_users_page.curr = 1
org_invites = current_org.invite_set.all()
invited_users = get_user_model().objects.filter(user_invites_set__in=org_invites)
invited_users_page.set_pages(Paginator(invited_users, 5))
context = {'current_org':current_org,'invited_users':invited_users_page,'ajax_page_members':reverse('org_org_invites_list_ajax', kwargs={'org_short_name':current_org.org_short_name,})}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
|
gpl-2.0
| -6,959,863,144,797,156,000
| 47.804721
| 236
| 0.617597
| false
| 3.901029
| false
| false
| false
|
jamesp/jpy
|
jpy/maths/pde.py
|
1
|
2463
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Numerical Methods of Partial Differential Equations.
Provides integration methods and other utility functions such as the RA & RAW
time filters for numerical integration of PDEs.
"""
import numpy as np
def RA_filter(phi, epsilon=0.1):
"""Robert-Asselin-Williams time filter.
phi: A tuple of phi at time levels (n-1), n, (n+1)
epsilon: The RA filter weighting
Takes variable phi at 3 timelevels (n-1), n, (n+1) and recouples the values
at (n) and (n+1).
φ_bar(n) = φ(n) + ϵ[ φ(n+1) - 2φ(n) + φ(n-1) ]
"""
_phi, phi, phi_ = phi
return (_phi, phi + epsilon*(_phi - 2.0 * phi + phi_), phi_)
def RAW_filter(phi, nu=0.2, alpha=0.53):
"""The RAW time filter, an improvement on RA filter.
phi: A tuple of phi at time levels (n-1), n, (n+1)
nu: Equivalent to 2*ϵ; the RA filter weighting
alpha: Scaling factor for n and (n+1) timesteps.
With α=1, RAW —> RA.
For more information, see [Williams 2009].
"""
_phi, phi, phi_ = phi
d = nu*0.5*(_phi - 2.0 * phi + phi_)
return (_phi, phi+alpha*d, phi_ + (alpha-1)*d)
if __name__ == '__main__':
import matplotlib.pyplot as plt
# simple harmonic osicallator example from [Williams 2009]
xt = lambda x,y,t,omega: -omega*y
yt = lambda x,y,t,omega: omega*x
x0, y0 = 1.0, 0.0
dt = 0.2
omega = 1.0
alpha = 0.53 # RAW filter parameter
t=0.0
# initialise with a single euler step
_x = x = x0
_y = y = y0
x = _x + dt*xt(x,y,t,omega)
y = _y + dt*yt(x,y,t,omega)
xs = [x0,x]
ys = [y0,y]
ts = [0, dt]
# integrate forward using leapfrog method
for t in np.arange(0+dt,100,dt):
x_ = _x + 2*dt*xt(x,y,t,omega)
y_ = _y + 2*dt*yt(x,y,t,omega)
(_x,x,x_) = RAW_filter((_x,x,x_), alpha=alpha)
(_y,y,y_) = RAW_filter((_y,y,y_), alpha=alpha)
# step variables forward
ts.append(t+dt)
_x,x = x,x_
_y,y = y,y_
xs.append(x)
ys.append(y)
ts = np.array(ts)
xs = np.array(xs)
ys = np.array(ys)
print np.array([ts,xs,ys])
plt.subplot(211)
plt.plot(ts,xs)
plt.plot(ts, np.cos(ts), 'grey')
plt.xlabel('x')
plt.subplot(212)
plt.plot(ts,ys)
plt.plot(ts, np.sin(ts), 'grey')
plt.ylabel('y')
plt.show()
# [Williams 2009] - Paul Williams. A Proposed Modification to the Robert–Asselin Time Filter.
|
mit
| -3,893,353,240,602,028,000
| 27.172414
| 93
| 0.565891
| false
| 2.635484
| false
| false
| false
|
Aipakazuma/b_scouter
|
src/preprocess.py
|
1
|
4275
|
# -*- coding: utf8 -*-
import cv2
import numpy as np
import glob
import os
ASSETS_PATH = os.path.join(os.path.dirname(__file__), 'assets', 'calibrate')
calibrate_files = []
def preprocess_calibrate():
"""preprocess calibrate."""
global calibrate_files
for file in glob.glob(os.path.join(ASSETS_PATH, 'left*.jpg')):
calibrate_files.append(file)
for file in glob.glob(os.path.join(ASSETS_PATH, 'right*.jpg')):
calibrate_files.append(file)
def calibrate():
"""exec calibrate."""
# termination criteria
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001)
# prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0)
objp = np.zeros((6 * 7, 3), np.float32)
objp[:,:2] = np.mgrid[0:7, 0:6].T.reshape(-1,2)
# Arrays to store object points and image points from all the images.
objpoints = [] # 3d point in real world space
imgpoints = [] # 2d points in image plane.
shape = None
global calibrate_files
for fname in calibrate_files:
img = cv2.imread(fname)
gray_image = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# Find the chess board corners
ret, corners = cv2.findChessboardCorners(gray_image, (7, 6), None)
# If found, add object points, image points (after refining them)
if ret == True:
objpoints.append(objp)
corners2 = cv2.cornerSubPix(gray_image, corners, (11, 11), (-1, -1), criteria)
imgpoints.append(corners2)
if shape is None:
shape = gray_image.shape[::-1]
# キャリブレーション
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, shape, None, None)
return mtx, dist
def distortion_correction(original_image, gray_image):
"""distortion correction."""
mtx, dist = calibrate()
# 歪み補正
h, w = gray_image.shape[:2]
newcameramtx, roi = cv2.getOptimalNewCameraMatrix(mtx, dist, (w, h), 1, (w, h))
# 歪補正
dist2 = cv2.undistort(gray_image, mtx, dist, None, newcameramtx)
# 画像の切り落とし
x, y, w, h = roi
return dist2[y:y+h, x:x+w]
def line_processing(gray_image, output_threshold_min=200):
"""dilate and substract."""
gaussian_blur_image = cv2.GaussianBlur(gray_image.copy(), (7, 7), 1)
_, threshold = cv2.threshold(gaussian_blur_image.copy(), 125, 255, cv2.THRESH_BINARY)
kernel = np.ones((5, 5), np.uint8)
dilation = cv2.dilate(gaussian_blur_image.copy(), kernel, iterations=1)
diff = cv2.subtract(dilation, gaussian_blur_image.copy())
inverted_white = 255 - diff
_, line_threshold = cv2.threshold(inverted_white, output_threshold_min, 255, cv2.THRESH_BINARY)
return line_threshold
def rect_processing(original_image, line_threshold):
"""rect processing."""
find_contours_image, contours, hierarchy = cv2.findContours(line_threshold.copy(), cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
draw_image = cv2.drawContours(line_threshold.copy(), contours, -1, (255, 255, 255), 3)
th_area = line_threshold.shape[0] * line_threshold.shape[1] / 100
contours_large = list(filter(lambda c:cv2.contourArea(c) > th_area, contours))
outputs = []
rects = []
approxes = []
for (i,cnt) in enumerate(contours_large):
# 面積の計算
arclen = cv2.arcLength(cnt, True)
# 周囲長を計算(要は多角形の辺の総和)
approx = cv2.approxPolyDP(cnt, 0.02 * arclen, True)
# 小さいやつは除外
if len(approx) < 4:
continue
x, y, w, h = cv2.boundingRect(cnt)
if is_video_frame_size(x, y, w, h):
approxes.append(approx)
rects.append([x, y, w, h])
rect = cv2.rectangle(original_image.copy(), (x, y), (x+w, y+h), (255, 255, 255), 2)
outputs.append(rect)
return rects, outputs, approxes
def is_video_frame_size(x, y, w, h, threshold=200):
"""check video frame size.
DVD 68:95
"""
width = w - x
height = h - y
# 68:95 = width:height -> height = (95 * width) / 68
_height = (95 * width) / 68
loss = height - _height
if threshold > abs(loss):
return True
return False
preprocess_calibrate()
|
mit
| -8,244,897,975,416,989,000
| 29.40146
| 125
| 0.617527
| false
| 2.920757
| false
| false
| false
|
gmt/overlay-upstream-tracking
|
OUT/__init__.py
|
1
|
5112
|
from __future__ import print_function
from FixPython import is_string
__all__ = [
"Ephemeral",
"FixPython",
"OOParsing",
"OUTDate",
"OUTDbgPrint",
"OUTDebug",
"OUTDebugModes",
"OUTUpdated",
"OUTVerboseModes",
"OUTVersion",
"Repo",
"Rules",
"RulesParser"
]
__version__ = 0.1
__date__ = '2014-07-18'
__updated__ = '2014-07-18'
OUTVersion = __version__
OUTDate = __date__
OUTUpdated = __updated__
OUTDebugModes = frozenset([
'misc', # General debug output not fitting any other category
'cmd', # Command-line processing
])
OUTVerboseModes = frozenset([
'misc'
])
class _OUTDebug(object):
def __init__(self, mode=set()):
self.mode = mode
@property
def mode(self):
return self._mode
@mode.setter
def mode(self, value):
if isinstance(value, _OUTDebug):
self._mode = value.mode.copy()
elif is_string(value):
self._mode = set(value.split(' '))
else:
self._mode = set(value)
self.validate()
def enable(self, mode):
if is_string(mode):
mode = mode.split(' ')
mode = set(mode)
self._mode |= mode
self.validate()
def disable(self, mode):
if is_string(mode):
mode = mode.split(' ')
mode = set(mode)
self._mode -= mode
# no need to validate as always valid
def validate(self):
if not OUTDebugModes >= self._mode:
raise Exception('Invalid debug mode(s) set: %s' % ', '.join(self._mode - OUTDebugModes))
def __getitem__(self, attrs):
return self._mode >= attrs
def __setitem__(self, attrs, value):
if value:
self.enable(attrs)
else:
self.disable(attrs)
def __contains__(self, attrs):
return self[attrs]
def __iadd__(self, value):
if isinstance(value, _OUTDebug):
# no validation needed
self._mode |= value.mode
elif is_string(value):
self.mode |= set(value.split(' '))
else:
self.mode |= set(value)
return self
def __add__(self, value):
if isinstance(value, _OUTDebug):
return _OUTDebug(self._mode | value.mode)
elif is_string(value):
return _OUTDebug(self._mode | set(value.split(' ')))
else:
return _OUTDebug(self._mode | set(value))
__or__ = __add__
__ior__ = __iadd__
def __iand__(self, value):
if isinstance(value, _OUTDebug):
self._mode &= value.mode
elif is_string(value):
self.mode = self.mode & set(value.split(' '))
else:
self.mode = self.mode & set(value)
return self
def __and__(self, value):
if isinstance(value, _OUTDebug):
return _OUTDebug(self._mode & value.mode)
elif is_string(value):
return _OUTDebug(self._mode & set(value.split(' ')))
else:
return _OUTDebug(self._mode & set(value))
def __isub__(self, value):
if isinstance(value, _OUTDebug):
self._mode -= value.mode
else:
self.disable(value)
return self
def __sub__(self, value):
if isinstance(value, _OUTDebug):
return _OUTDebug(self._mode - value.mode)
elif is_string(value):
return self._mode - set(value.split(' '))
else:
return self._mode - set(value)
def __invert__(self):
return _OUTDebug(OUTDebugModes - self._mode)
__not__ = __invert__
def __le__(self, value):
if isinstance(value, _OUTDebug):
return self._mode <= value.mode
elif is_string(value):
return self._mode <= set(value.split(' '))
else:
return self._mode <= set(value)
def __ge__(self, value):
if isinstance(value, _OUTDebug):
return self.mode >= value.mode
elif is_string(value):
return self.mode >= set(value.split(' '))
else:
return self.mode >= set(value)
def __eq__(self, value):
if isinstance(value, _OUTDebug):
return self._mode == value._mode
elif is_string(value):
return self.mode == set(value.split(' '))
else:
return self.mode == set(value)
def __ne__(self, value):
return (not (self == value))
def __lt__(self, value):
return self <= value and self != value
def __gt__(self, value):
return self >= value and self != value
def __len__(self):
return len(self._mode)
issubset = __lt__
issuperset = __gt__
union = __or__
intersection = __and__
difference = __sub__
def __copy__(self):
return _OUTDebug(self._mode.copy())
def __repr__(self):
return '_OUTDebug("%s")' % ' '.join(self.mode)
__str__ = __repr__
OUTDebug = _OUTDebug()
OUTProfile = False
def OUTDbgPrint(mode, *args, **kwargs):
if len(OUTDebug & mode) > 0:
print('debug: ', end='')
print(*args, **kwargs)
|
gpl-2.0
| 1,658,684,616,275,879,200
| 27.4
| 100
| 0.52856
| false
| 3.80924
| false
| false
| false
|
justinvforvendetta/electrum-rby
|
gui/qt/main_window.py
|
1
|
110670
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, time, re, threading
from electrum_rby.i18n import _, set_language
from electrum_rby.util import block_explorer, block_explorer_info, block_explorer_URL
from electrum_rby.util import print_error, print_msg
import os.path, json, ast, traceback
import shutil
import StringIO
import PyQt4
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import PyQt4.QtCore as QtCore
from electrum_rby.bitcoin import MIN_RELAY_TX_FEE, is_valid
from electrum_rby.plugins import run_hook
import icons_rc
from electrum_rby.util import format_satoshis, format_time, NotEnoughFunds, StoreDict
from electrum_rby import Transaction
from electrum_rby import mnemonic
from electrum_rby import util, bitcoin, commands, Wallet
from electrum_rby import SimpleConfig, Wallet, WalletStorage
from electrum_rby import Imported_Wallet
from amountedit import AmountEdit, BTCAmountEdit, MyLineEdit
from network_dialog import NetworkDialog
from qrcodewidget import QRCodeWidget, QRDialog
from qrtextedit import ScanQRTextEdit, ShowQRTextEdit
from decimal import Decimal
import httplib
import socket
import webbrowser
import csv
from electrum_rby import ELECTRUM_VERSION
import re
from util import *
class StatusBarButton(QPushButton):
def __init__(self, icon, tooltip, func):
QPushButton.__init__(self, icon, '')
self.setToolTip(tooltip)
self.setFlat(True)
self.setMaximumWidth(25)
self.clicked.connect(func)
self.func = func
self.setIconSize(QSize(25,25))
def keyPressEvent(self, e):
if e.key() == QtCore.Qt.Key_Return:
apply(self.func,())
from electrum_rby.paymentrequest import PR_UNPAID, PR_PAID, PR_EXPIRED
from electrum_rby.paymentrequest import PaymentRequest, InvoiceStore, get_payment_request, make_payment_request
pr_icons = {
PR_UNPAID:":icons/unpaid.png",
PR_PAID:":icons/confirmed.png",
PR_EXPIRED:":icons/expired.png"
}
pr_tooltips = {
PR_UNPAID:_('Pending'),
PR_PAID:_('Paid'),
PR_EXPIRED:_('Expired')
}
expiration_values = [
(_('1 hour'), 60*60),
(_('1 day'), 24*64*64),
(_('1 week'), 7*24*60*60),
(_('Never'), None)
]
class ElectrumWindow(QMainWindow):
labelsChanged = pyqtSignal()
def __init__(self, config, network, gui_object):
QMainWindow.__init__(self)
self.config = config
self.network = network
self.gui_object = gui_object
self.tray = gui_object.tray
self.go_lite = gui_object.go_lite
self.lite = None
self.app = gui_object.app
self.invoices = InvoiceStore(self.config)
self.contacts = StoreDict(self.config, 'contacts')
self.create_status_bar()
self.need_update = threading.Event()
self.decimal_point = config.get('decimal_point', 8)
self.num_zeros = int(config.get('num_zeros',0))
self.completions = QStringListModel()
self.tabs = tabs = QTabWidget(self)
tabs.addTab(self.create_history_tab(), _('History') )
tabs.addTab(self.create_send_tab(), _('Send') )
tabs.addTab(self.create_receive_tab(), _('Receive') )
tabs.addTab(self.create_addresses_tab(), _('Addresses') )
tabs.addTab(self.create_contacts_tab(), _('Contacts') )
tabs.addTab(self.create_console_tab(), _('Console') )
tabs.setMinimumSize(600, 400)
tabs.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.setCentralWidget(tabs)
try:
self.setGeometry(*self.config.get("winpos-qt"))
except:
self.setGeometry(100, 100, 840, 400)
if self.config.get("is_maximized"):
self.showMaximized()
self.setWindowIcon(QIcon(":icons/electrum-rby.png"))
self.init_menubar()
QShortcut(QKeySequence("Ctrl+W"), self, self.close)
QShortcut(QKeySequence("Ctrl+Q"), self, self.close)
QShortcut(QKeySequence("Ctrl+R"), self, self.update_wallet)
QShortcut(QKeySequence("Ctrl+PgUp"), self, lambda: tabs.setCurrentIndex( (tabs.currentIndex() - 1 )%tabs.count() ))
QShortcut(QKeySequence("Ctrl+PgDown"), self, lambda: tabs.setCurrentIndex( (tabs.currentIndex() + 1 )%tabs.count() ))
for i in range(tabs.count()):
QShortcut(QKeySequence("Alt+" + str(i + 1)), self, lambda i=i: tabs.setCurrentIndex(i))
self.connect(self, QtCore.SIGNAL('stop'), self.close)
self.connect(self, QtCore.SIGNAL('update_status'), self.update_status)
self.connect(self, QtCore.SIGNAL('banner_signal'), lambda: self.console.showMessage(self.network.banner) )
self.connect(self, QtCore.SIGNAL('transaction_signal'), lambda: self.notify_transactions() )
self.connect(self, QtCore.SIGNAL('payment_request_ok'), self.payment_request_ok)
self.connect(self, QtCore.SIGNAL('payment_request_error'), self.payment_request_error)
self.labelsChanged.connect(self.update_tabs)
self.history_list.setFocus(True)
# network callbacks
if self.network:
self.network.register_callback('updated', lambda: self.need_update.set())
self.network.register_callback('banner', lambda: self.emit(QtCore.SIGNAL('banner_signal')))
self.network.register_callback('status', lambda: self.emit(QtCore.SIGNAL('update_status')))
self.network.register_callback('new_transaction', lambda: self.emit(QtCore.SIGNAL('transaction_signal')))
self.network.register_callback('stop', lambda: self.emit(QtCore.SIGNAL('stop')))
# set initial message
self.console.showMessage(self.network.banner)
self.wallet = None
self.payment_request = None
self.qr_window = None
self.not_enough_funds = False
self.pluginsdialog = None
def update_account_selector(self):
# account selector
accounts = self.wallet.get_account_names()
self.account_selector.clear()
if len(accounts) > 1:
self.account_selector.addItems([_("All accounts")] + accounts.values())
self.account_selector.setCurrentIndex(0)
self.account_selector.show()
else:
self.account_selector.hide()
def close_wallet(self):
self.wallet.stop_threads()
run_hook('close_wallet')
def load_wallet(self, wallet):
import electrum_rby as electrum
self.wallet = wallet
# backward compatibility
self.update_wallet_format()
self.import_old_contacts()
# address used to create a dummy transaction and estimate transaction fee
a = self.wallet.addresses(False)
self.dummy_address = a[0] if a else None
self.accounts_expanded = self.wallet.storage.get('accounts_expanded',{})
self.current_account = self.wallet.storage.get("current_account", None)
title = 'Electrum-RBY %s - %s' % (self.wallet.electrum_version, self.wallet.basename())
if self.wallet.is_watching_only():
title += ' [%s]' % (_('watching only'))
self.setWindowTitle( title )
self.update_history_tab()
self.need_update.set()
# Once GUI has been initialized check if we want to announce something since the callback has been called before the GUI was initialized
self.notify_transactions()
self.update_account_selector()
# update menus
self.new_account_menu.setVisible(self.wallet.can_create_accounts())
self.private_keys_menu.setEnabled(not self.wallet.is_watching_only())
self.password_menu.setEnabled(self.wallet.can_change_password())
self.seed_menu.setEnabled(self.wallet.has_seed())
self.mpk_menu.setEnabled(self.wallet.is_deterministic())
self.import_menu.setVisible(self.wallet.can_import())
self.export_menu.setEnabled(self.wallet.can_export())
self.update_lock_icon()
self.update_buttons_on_seed()
self.update_console()
self.clear_receive_tab()
self.update_receive_tab()
self.show()
run_hook('load_wallet', wallet)
def import_old_contacts(self):
# backward compatibility: import contacts
addressbook = set(self.wallet.storage.get('contacts', []))
for k in addressbook:
l = self.wallet.labels.get(k)
if bitcoin.is_address(k) and l:
self.contacts[l] = ('address', k)
self.wallet.storage.put('contacts', None)
def update_wallet_format(self):
# convert old-format imported keys
if self.wallet.imported_keys:
password = self.password_dialog(_("Please enter your password in order to update imported keys")) if self.wallet.use_encryption else None
try:
self.wallet.convert_imported_keys(password)
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
# call synchronize to regenerate addresses in case we are offline
if self.wallet.get_master_public_keys() and self.wallet.addresses() == []:
self.wallet.synchronize()
def open_wallet(self):
wallet_folder = self.wallet.storage.path
filename = unicode( QFileDialog.getOpenFileName(self, "Select your wallet file", wallet_folder) )
if not filename:
return
try:
storage = WalletStorage(filename)
except Exception as e:
self.show_message(str(e))
return
if not storage.file_exists:
self.show_message(_("File not found") + ' ' + filename)
return
# read wizard action
try:
wallet = Wallet(storage)
except BaseException as e:
QMessageBox.warning(None, _('Warning'), str(e), _('OK'))
return
action = wallet.get_action()
self.hide()
# run wizard
if action is not None:
wallet = self.gui_object.run_wizard(storage, action)
else:
wallet.start_threads(self.network)
# keep current wallet
if not wallet:
self.show()
return
# close current wallet
self.close_wallet()
# load new wallet in gui
self.load_wallet(wallet)
# save path
if self.config.get('wallet_path') is None:
self.config.set_key('gui_last_wallet', filename)
def backup_wallet(self):
import shutil
path = self.wallet.storage.path
wallet_folder = os.path.dirname(path)
filename = unicode( QFileDialog.getSaveFileName(self, _('Enter a filename for the copy of your wallet'), wallet_folder) )
if not filename:
return
new_path = os.path.join(wallet_folder, filename)
if new_path != path:
try:
shutil.copy2(path, new_path)
QMessageBox.information(None,"Wallet backup created", _("A copy of your wallet file was created in")+" '%s'" % str(new_path))
except (IOError, os.error), reason:
QMessageBox.critical(None,"Unable to create backup", _("Electrum was unable to copy your wallet file to the specified location.")+"\n" + str(reason))
def new_wallet(self):
import installwizard
wallet_folder = os.path.dirname(os.path.abspath(self.wallet.storage.path))
i = 1
while True:
filename = "wallet_%d"%i
if filename in os.listdir(wallet_folder):
i += 1
else:
break
filename = line_dialog(self, _('New Wallet'), _('Enter file name') + ':', _('OK'), filename)
if not filename:
return
full_path = os.path.join(wallet_folder, filename)
storage = WalletStorage(full_path)
if storage.file_exists:
QMessageBox.critical(None, "Error", _("File exists"))
return
self.hide()
wizard = installwizard.InstallWizard(self.config, self.network, storage)
action, wallet_type = wizard.restore_or_create()
if not action:
self.show()
return
# close current wallet, but keep a reference to it
self.close_wallet()
wallet = wizard.run(action, wallet_type)
if wallet:
self.load_wallet(wallet)
else:
self.wallet.start_threads(self.network)
self.load_wallet(self.wallet)
self.show()
def init_menubar(self):
menubar = QMenuBar()
file_menu = menubar.addMenu(_("&File"))
file_menu.addAction(_("&Open"), self.open_wallet).setShortcut(QKeySequence.Open)
file_menu.addAction(_("&New/Restore"), self.new_wallet).setShortcut(QKeySequence.New)
file_menu.addAction(_("&Save Copy"), self.backup_wallet).setShortcut(QKeySequence.SaveAs)
file_menu.addAction(_("&Quit"), self.close)
wallet_menu = menubar.addMenu(_("&Wallet"))
wallet_menu.addAction(_("&New contact"), self.new_contact_dialog)
self.new_account_menu = wallet_menu.addAction(_("&New account"), self.new_account_dialog)
wallet_menu.addSeparator()
self.password_menu = wallet_menu.addAction(_("&Password"), self.change_password_dialog)
self.seed_menu = wallet_menu.addAction(_("&Seed"), self.show_seed_dialog)
self.mpk_menu = wallet_menu.addAction(_("&Master Public Keys"), self.show_master_public_keys)
wallet_menu.addSeparator()
labels_menu = wallet_menu.addMenu(_("&Labels"))
labels_menu.addAction(_("&Import"), self.do_import_labels)
labels_menu.addAction(_("&Export"), self.do_export_labels)
self.private_keys_menu = wallet_menu.addMenu(_("&Private keys"))
self.private_keys_menu.addAction(_("&Sweep"), self.sweep_key_dialog)
self.import_menu = self.private_keys_menu.addAction(_("&Import"), self.do_import_privkey)
self.export_menu = self.private_keys_menu.addAction(_("&Export"), self.export_privkeys_dialog)
wallet_menu.addAction(_("&Export History"), self.export_history_dialog)
wallet_menu.addAction(_("Search"), self.toggle_search).setShortcut(QKeySequence("Ctrl+S"))
tools_menu = menubar.addMenu(_("&Tools"))
# Settings / Preferences are all reserved keywords in OSX using this as work around
tools_menu.addAction(_("Electrum preferences") if sys.platform == 'darwin' else _("Preferences"), self.settings_dialog)
tools_menu.addAction(_("&Network"), self.run_network_dialog)
tools_menu.addAction(_("&Plugins"), self.plugins_dialog)
tools_menu.addSeparator()
tools_menu.addAction(_("&Sign/verify message"), self.sign_verify_message)
tools_menu.addAction(_("&Encrypt/decrypt message"), self.encrypt_message)
tools_menu.addSeparator()
paytomany_menu = tools_menu.addAction(_("&Pay to many"), self.paytomany)
raw_transaction_menu = tools_menu.addMenu(_("&Load transaction"))
raw_transaction_menu.addAction(_("&From file"), self.do_process_from_file)
raw_transaction_menu.addAction(_("&From text"), self.do_process_from_text)
raw_transaction_menu.addAction(_("&From the blockchain"), self.do_process_from_txid)
raw_transaction_menu.addAction(_("&From QR code"), self.read_tx_from_qrcode)
self.raw_transaction_menu = raw_transaction_menu
help_menu = menubar.addMenu(_("&Help"))
help_menu.addAction(_("&About"), self.show_about)
help_menu.addAction(_("&Official website"), lambda: webbrowser.open("http://electrum-rby.space"))
help_menu.addSeparator()
help_menu.addAction(_("&Documentation"), lambda: webbrowser.open("http://electrum-rby.space")).setShortcut(QKeySequence.HelpContents)
help_menu.addAction(_("&Report Bug"), self.show_report_bug)
self.setMenuBar(menubar)
def show_about(self):
QMessageBox.about(self, "Electrum-RBY",
_("Version")+" %s" % (self.wallet.electrum_version) + "\n\n" + _("Electrum's focus is speed, with low resource usage and simplifying RubyCoin. You do not need to perform regular backups, because your wallet can be recovered from a secret phrase that you can memorize or write on paper. Startup times are instant because it operates in conjunction with high-performance servers that handle the most complicated parts of the RubyCoin system."))
def show_report_bug(self):
QMessageBox.information(self, "Electrum-RBY - " + _("Reporting Bugs"),
_("Please report any bugs as issues on github:")+" <a href=\"https://github.com/rby/electrum-rby/issues\">https://github.com/rby/electrum-rby/issues</a>")
def notify_transactions(self):
if not self.network or not self.network.is_connected():
return
print_error("Notifying GUI")
if len(self.network.pending_transactions_for_notifications) > 0:
# Combine the transactions if there are more then three
tx_amount = len(self.network.pending_transactions_for_notifications)
if(tx_amount >= 3):
total_amount = 0
for tx in self.network.pending_transactions_for_notifications:
is_relevant, is_mine, v, fee = self.wallet.get_tx_value(tx)
if(v > 0):
total_amount += v
self.notify(_("%(txs)s new transactions received. Total amount received in the new transactions %(amount)s %(unit)s") \
% { 'txs' : tx_amount, 'amount' : self.format_amount(total_amount), 'unit' : self.base_unit()})
self.network.pending_transactions_for_notifications = []
else:
for tx in self.network.pending_transactions_for_notifications:
if tx:
self.network.pending_transactions_for_notifications.remove(tx)
is_relevant, is_mine, v, fee = self.wallet.get_tx_value(tx)
if(v > 0):
self.notify(_("New transaction received. %(amount)s %(unit)s") % { 'amount' : self.format_amount(v), 'unit' : self.base_unit()})
def notify(self, message):
if self.tray:
self.tray.showMessage("Electrum-RBY", message, QSystemTrayIcon.Information, 20000)
# custom wrappers for getOpenFileName and getSaveFileName, that remember the path selected by the user
def getOpenFileName(self, title, filter = ""):
directory = self.config.get('io_dir', unicode(os.path.expanduser('~')))
fileName = unicode( QFileDialog.getOpenFileName(self, title, directory, filter) )
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def getSaveFileName(self, title, filename, filter = ""):
directory = self.config.get('io_dir', unicode(os.path.expanduser('~')))
path = os.path.join( directory, filename )
fileName = unicode( QFileDialog.getSaveFileName(self, title, path, filter) )
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def close(self):
if self.qr_window:
self.qr_window.close()
QMainWindow.close(self)
run_hook('close_main_window')
def connect_slots(self, sender):
self.connect(sender, QtCore.SIGNAL('timersignal'), self.timer_actions)
self.previous_payto_e=''
def timer_actions(self):
if self.need_update.is_set():
self.update_wallet()
self.need_update.clear()
run_hook('timer_actions')
def format_amount(self, x, is_diff=False, whitespaces=False):
return format_satoshis(x, is_diff, self.num_zeros, self.decimal_point, whitespaces)
def get_decimal_point(self):
return self.decimal_point
def base_unit(self):
assert self.decimal_point in [2, 5, 8]
if self.decimal_point == 2:
return 'bits'
if self.decimal_point == 5:
return 'mRBY'
if self.decimal_point == 8:
return 'RBY'
raise Exception('Unknown base unit')
def update_status(self):
if not self.wallet:
return
if self.network is None or not self.network.is_running():
text = _("Offline")
icon = QIcon(":icons/status_disconnected.png")
elif self.network.is_connected():
server_lag = self.network.get_local_height() - self.network.get_server_height()
if not self.wallet.up_to_date:
text = _("Synchronizing...")
icon = QIcon(":icons/status_waiting.png")
elif server_lag > 1:
text = _("Server is lagging (%d blocks)"%server_lag)
icon = QIcon(":icons/status_lagging.png")
else:
c, u, x = self.wallet.get_account_balance(self.current_account)
text = _("Balance" ) + ": %s "%(self.format_amount(c)) + self.base_unit()
if u:
text += " [%s unconfirmed]"%(self.format_amount(u, True).strip())
if x:
text += " [%s unmatured]"%(self.format_amount(x, True).strip())
# append fiat balance and price from exchange rate plugin
r = {}
run_hook('get_fiat_status_text', c+u, r)
quote = r.get(0)
if quote:
text += "%s"%quote
if self.tray:
self.tray.setToolTip("%s (%s)" % (text, self.wallet.basename()))
icon = QIcon(":icons/status_connected.png")
else:
text = _("Not connected")
icon = QIcon(":icons/status_disconnected.png")
self.balance_label.setText(text)
self.status_button.setIcon( icon )
def update_wallet(self):
self.update_status()
if self.wallet.up_to_date or not self.network or not self.network.is_connected():
self.update_tabs()
def update_tabs(self):
self.update_history_tab()
self.update_receive_tab()
self.update_address_tab()
self.update_contacts_tab()
self.update_completions()
self.update_invoices_list()
def create_history_tab(self):
from history_widget import HistoryWidget
self.history_list = l = HistoryWidget(self)
return l
def show_address(self, addr):
import address_dialog
d = address_dialog.AddressDialog(addr, self)
d.exec_()
def show_transaction(self, tx):
import transaction_dialog
d = transaction_dialog.TxDialog(tx, self)
d.exec_()
def update_history_tab(self):
domain = self.wallet.get_account_addresses(self.current_account)
h = self.wallet.get_history(domain)
self.history_list.update(h)
def create_receive_tab(self):
self.receive_grid = grid = QGridLayout()
grid.setColumnMinimumWidth(3, 300)
self.receive_address_e = ButtonsLineEdit()
self.receive_address_e.addCopyButton(self.app)
self.receive_address_e.setReadOnly(True)
self.receive_address_label = QLabel(_('Receiving address'))
self.receive_address_e.textChanged.connect(self.update_receive_qr)
self.receive_address_e.setFocusPolicy(Qt.NoFocus)
grid.addWidget(self.receive_address_label, 0, 0)
grid.addWidget(self.receive_address_e, 0, 1, 1, 4)
self.receive_message_e = QLineEdit()
grid.addWidget(QLabel(_('Description')), 1, 0)
grid.addWidget(self.receive_message_e, 1, 1, 1, 4)
self.receive_message_e.textChanged.connect(self.update_receive_qr)
self.receive_amount_e = BTCAmountEdit(self.get_decimal_point)
grid.addWidget(QLabel(_('Requested amount')), 2, 0)
grid.addWidget(self.receive_amount_e, 2, 1, 1, 2)
self.receive_amount_e.textChanged.connect(self.update_receive_qr)
self.expires_combo = QComboBox()
self.expires_combo.addItems(map(lambda x:x[0], expiration_values))
self.expires_combo.setCurrentIndex(1)
grid.addWidget(QLabel(_('Expires in')), 3, 0)
grid.addWidget(self.expires_combo, 3, 1)
self.expires_label = QLineEdit('')
self.expires_label.setReadOnly(1)
self.expires_label.setFocusPolicy(Qt.NoFocus)
self.expires_label.hide()
grid.addWidget(self.expires_label, 3, 1, 1, 2)
self.save_request_button = QPushButton(_('Save'))
self.save_request_button.clicked.connect(self.save_payment_request)
self.new_request_button = QPushButton(_('New'))
self.new_request_button.clicked.connect(self.new_payment_request)
self.receive_qr = QRCodeWidget(fixedSize=200)
self.receive_qr.mouseReleaseEvent = lambda x: self.toggle_qr_window()
self.receive_qr.enterEvent = lambda x: self.app.setOverrideCursor(QCursor(Qt.PointingHandCursor))
self.receive_qr.leaveEvent = lambda x: self.app.setOverrideCursor(QCursor(Qt.ArrowCursor))
self.receive_buttons = buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self.save_request_button)
buttons.addWidget(self.new_request_button)
self.receive_requests_label = QLabel(_('My Requests'))
self.receive_list = MyTreeWidget(self, self.receive_list_menu, [_('Date'), _('Account'), _('Address'), _('Description'), _('Amount'), _('Status')], 3)
self.receive_list.currentItemChanged.connect(self.receive_item_changed)
self.receive_list.itemClicked.connect(self.receive_item_changed)
self.receive_list.setSortingEnabled(True)
self.receive_list.setColumnWidth(0, 180)
self.receive_list.hideColumn(1) # the update will show it if necessary
self.receive_list.hideColumn(2) # don't show address
self.receive_list.setColumnWidth(2, 340)
h = self.receive_list.header()
h.setStretchLastSection(False)
h.setResizeMode(3, QHeaderView.Stretch)
# layout
vbox_g = QVBoxLayout()
vbox_g.addLayout(grid)
vbox_g.addLayout(buttons)
hbox = QHBoxLayout()
hbox.addLayout(vbox_g)
hbox.addStretch()
hbox.addWidget(self.receive_qr)
w = QWidget()
vbox = QVBoxLayout(w)
vbox.addLayout(hbox)
vbox.addStretch(1)
vbox.addWidget(self.receive_requests_label)
vbox.addWidget(self.receive_list)
return w
def receive_item_changed(self, item):
if item is None:
return
if not self.receive_list.isItemSelected(item):
return
addr = str(item.text(2))
req = self.receive_requests[addr]
expires = _('Never') if req.get('expiration') is None else format_time(req['time'] + req['expiration'])
amount = req['amount']
message = self.wallet.labels.get(addr, '')
self.receive_address_e.setText(addr)
self.receive_message_e.setText(message)
self.receive_amount_e.setAmount(amount)
self.expires_combo.hide()
self.expires_label.show()
self.expires_label.setText(expires)
self.new_request_button.setEnabled(True)
def delete_payment_request(self, item):
addr = str(item.text(2))
self.receive_requests.pop(addr)
self.wallet.storage.put('receive_requests2', self.receive_requests)
self.update_receive_tab()
self.clear_receive_tab()
def get_receive_URI(self):
addr = str(self.receive_address_e.text())
amount = self.receive_amount_e.get_amount()
message = unicode(self.receive_message_e.text())
URI = util.create_URI(addr, amount, message)
return URI
def receive_list_menu(self, position):
item = self.receive_list.itemAt(position)
addr = str(item.text(2))
req = self.receive_requests[addr]
time, amount = req['time'], req['amount']
message = self.wallet.labels.get(addr, '')
URI = util.create_URI(addr, amount, message)
menu = QMenu()
menu.addAction(_("Copy Address"), lambda: self.app.clipboard().setText(addr))
menu.addAction(_("Copy URI"), lambda: self.app.clipboard().setText(str(URI)))
menu.addAction(_("Save as BIP70 file"), lambda: self.export_payment_request(addr))
menu.addAction(_("Delete"), lambda: self.delete_payment_request(item))
menu.exec_(self.receive_list.viewport().mapToGlobal(position))
def save_payment_request(self):
addr = str(self.receive_address_e.text())
amount = self.receive_amount_e.get_amount()
message = unicode(self.receive_message_e.text())
if not message and not amount:
QMessageBox.warning(self, _('Error'), _('No message or amount'), _('OK'))
return
self.receive_requests = self.wallet.storage.get('receive_requests2', {})
if addr in self.receive_requests:
self.receive_requests[addr]['amount'] = amount
else:
now = int(time.time())
i = self.expires_combo.currentIndex()
expiration = map(lambda x: x[1], expiration_values)[i]
self.receive_requests[addr] = {'time':now, 'amount':amount, 'expiration':expiration}
self.wallet.storage.put('receive_requests2', self.receive_requests)
self.wallet.set_label(addr, message)
self.update_receive_tab()
self.update_address_tab()
self.save_request_button.setEnabled(False)
def make_payment_request(self, addr):
req = self.receive_requests[addr]
time = req['time']
amount = req['amount']
expiration = req['expiration']
message = self.wallet.labels.get(addr, '')
script = Transaction.pay_script('address', addr).decode('hex')
outputs = [(script, amount)]
key_path = self.config.get('ssl_key_path')
cert_path = self.config.get('ssl_cert_path')
return make_payment_request(outputs, message, time, time + expiration, key_path, cert_path)
def export_payment_request(self, addr):
pr = self.make_payment_request(addr)
name = 'request.bip70'
fileName = self.getSaveFileName(_("Select where to save your payment request"), name, "*.bip70")
if fileName:
with open(fileName, "wb+") as f:
f.write(str(pr))
self.show_message(_("Request saved successfully"))
self.saved = True
def get_receive_address(self):
domain = self.wallet.get_account_addresses(self.current_account, include_change=False)
for addr in domain:
if not self.wallet.history.get(addr) and addr not in self.receive_requests.keys():
return addr
def new_payment_request(self):
addr = self.get_receive_address()
if addr is None:
if isinstance(self.wallet, Imported_Wallet):
self.show_message(_('No more addresses in your wallet.'))
return
if not self.question(_("Warning: The next address will not be recovered automatically if you restore your wallet from seed; you may need to add it manually.\n\nThis occurs because you have too many unused addresses in your wallet. To avoid this situation, use the existing addresses first.\n\nCreate anyway?")):
return
addr = self.wallet.create_new_address(self.current_account, False)
self.set_receive_address(addr)
self.expires_label.hide()
self.expires_combo.show()
self.new_request_button.setEnabled(False)
self.receive_message_e.setFocus(1)
def set_receive_address(self, addr):
self.receive_address_e.setText(addr)
self.receive_message_e.setText('')
self.receive_amount_e.setAmount(None)
def clear_receive_tab(self):
self.receive_requests = self.wallet.storage.get('receive_requests2',{})
domain = self.wallet.get_account_addresses(self.current_account, include_change=False)
for addr in domain:
if not self.wallet.history.get(addr) and addr not in self.receive_requests.keys():
break
else:
addr = ''
self.receive_address_e.setText(addr)
self.receive_message_e.setText('')
self.receive_amount_e.setAmount(None)
self.expires_label.hide()
self.expires_combo.show()
def toggle_qr_window(self):
import qrwindow
if not self.qr_window:
self.qr_window = qrwindow.QR_Window(self)
self.qr_window.setVisible(True)
self.qr_window_geometry = self.qr_window.geometry()
else:
if not self.qr_window.isVisible():
self.qr_window.setVisible(True)
self.qr_window.setGeometry(self.qr_window_geometry)
else:
self.qr_window_geometry = self.qr_window.geometry()
self.qr_window.setVisible(False)
self.update_receive_qr()
def receive_at(self, addr):
if not bitcoin.is_address(addr):
return
self.tabs.setCurrentIndex(2)
self.receive_address_e.setText(addr)
self.new_request_button.setEnabled(True)
def update_receive_tab(self):
self.receive_requests = self.wallet.storage.get('receive_requests2',{})
# hide receive tab if no receive requests available
b = len(self.receive_requests) > 0
self.receive_list.setVisible(b)
self.receive_requests_label.setVisible(b)
if not b:
self.expires_label.hide()
self.expires_combo.show()
# check if it is necessary to show the account
self.receive_list.setColumnHidden(1, len(self.wallet.get_accounts()) == 1)
# update the receive address if necessary
current_address = self.receive_address_e.text()
domain = self.wallet.get_account_addresses(self.current_account, include_change=False)
addr = self.get_receive_address()
if not current_address in domain and addr:
self.set_receive_address(addr)
self.new_request_button.setEnabled(addr != current_address)
# clear the list and fill it again
self.receive_list.clear()
for address, req in self.receive_requests.viewitems():
timestamp, amount = req['time'], req['amount']
expiration = req.get('expiration', None)
message = self.wallet.labels.get(address, '')
# only show requests for the current account
if address not in domain:
continue
date = format_time(timestamp)
account = self.wallet.get_account_name(self.wallet.get_account_from_address(address))
amount_str = self.format_amount(amount) if amount else ""
if amount:
paid = amount <= self.wallet.get_addr_received(address)
status = PR_PAID if paid else PR_UNPAID
if status == PR_UNPAID and expiration is not None and time.time() > timestamp + expiration:
status = PR_EXPIRED
else:
status = ''
item = QTreeWidgetItem([date, account, address, message, amount_str, pr_tooltips.get(status,'')])
if status is not '':
item.setIcon(5, QIcon(pr_icons.get(status)))
self.receive_list.addTopLevelItem(item)
def update_receive_qr(self):
addr = str(self.receive_address_e.text())
amount = self.receive_amount_e.get_amount()
message = unicode(self.receive_message_e.text()).encode('utf8')
self.save_request_button.setEnabled((amount is not None) or (message != ""))
uri = util.create_URI(addr, amount, message)
self.receive_qr.setData(uri)
if self.qr_window and self.qr_window.isVisible():
self.qr_window.set_content(addr, amount, message, uri)
def create_send_tab(self):
self.send_grid = grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnMinimumWidth(3,300)
grid.setColumnStretch(5,1)
grid.setRowStretch(8, 1)
from paytoedit import PayToEdit
self.amount_e = BTCAmountEdit(self.get_decimal_point)
self.payto_e = PayToEdit(self)
msg = _('Recipient of the funds.') + '\n\n'\
+ _('You may enter a RubyCoin address, a label from your list of contacts (a list of completions will be proposed), or an alias (email-like address that forwards to a RubyCoin address)')
payto_label = HelpLabel(_('Pay to'), msg)
grid.addWidget(payto_label, 1, 0)
grid.addWidget(self.payto_e, 1, 1, 1, 3)
completer = QCompleter()
completer.setCaseSensitivity(False)
self.payto_e.setCompleter(completer)
completer.setModel(self.completions)
msg = _('Description of the transaction (not mandatory).') + '\n\n'\
+ _('The description is not sent to the recipient of the funds. It is stored in your wallet file, and displayed in the \'History\' tab.')
description_label = HelpLabel(_('Description'), msg)
grid.addWidget(description_label, 2, 0)
self.message_e = MyLineEdit()
grid.addWidget(self.message_e, 2, 1, 1, 3)
self.from_label = QLabel(_('From'))
grid.addWidget(self.from_label, 3, 0)
self.from_list = MyTreeWidget(self, self.from_list_menu, ['',''])
self.from_list.setHeaderHidden(True)
self.from_list.setMaximumHeight(80)
grid.addWidget(self.from_list, 3, 1, 1, 3)
self.set_pay_from([])
msg = _('Amount to be sent.') + '\n\n' \
+ _('The amount will be displayed in red if you do not have enough funds in your wallet.') + ' ' \
+ _('Note that if you have frozen some of your addresses, the available funds will be lower than your total balance.') + '\n\n' \
+ _('Keyboard shortcut: type "!" to send all your coins.')
amount_label = HelpLabel(_('Amount'), msg)
grid.addWidget(amount_label, 4, 0)
grid.addWidget(self.amount_e, 4, 1, 1, 2)
msg = _('RubyCoin transactions are in general not free. A transaction fee is paid by the sender of the funds.') + '\n\n'\
+ _('The amount of fee can be decided freely by the sender. However, transactions with low fees take more time to be processed.') + '\n\n'\
+ _('A suggested fee is automatically added to this field. You may override it. The suggested fee increases with the size of the transaction.')
self.fee_e_label = HelpLabel(_('Fee'), msg)
self.fee_e = BTCAmountEdit(self.get_decimal_point)
grid.addWidget(self.fee_e_label, 5, 0)
grid.addWidget(self.fee_e, 5, 1, 1, 2)
self.send_button = EnterButton(_("Send"), self.do_send)
self.clear_button = EnterButton(_("Clear"), self.do_clear)
buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self.send_button)
buttons.addWidget(self.clear_button)
def on_shortcut():
sendable = self.get_sendable_balance()
inputs = self.get_coins()
for i in inputs: self.wallet.add_input_info(i)
addr = self.payto_e.payto_address if self.payto_e.payto_address else self.dummy_address
output = ('address', addr, sendable)
dummy_tx = Transaction.from_io(inputs, [output])
fee = self.wallet.estimated_fee(dummy_tx)
self.amount_e.setAmount(max(0,sendable-fee))
self.amount_e.textEdited.emit("")
self.fee_e.setAmount(fee)
self.amount_e.shortcut.connect(on_shortcut)
self.payto_e.textChanged.connect(lambda: self.update_fee(False))
self.amount_e.textEdited.connect(lambda: self.update_fee(False))
self.fee_e.textEdited.connect(lambda: self.update_fee(True))
def entry_changed():
if not self.not_enough_funds:
palette = QPalette()
palette.setColor(self.amount_e.foregroundRole(), QColor('black'))
text = ""
else:
palette = QPalette()
palette.setColor(self.amount_e.foregroundRole(), QColor('red'))
text = _( "Not enough funds" )
c, u, x = self.wallet.get_frozen_balance()
if c+u+x:
text += ' (' + self.format_amount(c+u+x).strip() + ' ' + self.base_unit() + ' ' +_("are frozen") + ')'
self.statusBar().showMessage(text)
self.amount_e.setPalette(palette)
self.fee_e.setPalette(palette)
self.amount_e.textChanged.connect(entry_changed)
self.fee_e.textChanged.connect(entry_changed)
self.invoices_label = QLabel(_('Invoices'))
self.invoices_list = MyTreeWidget(self, self.create_invoice_menu,
[_('Date'), _('Requestor'), _('Description'), _('Amount'), _('Status')], 2)
self.invoices_list.header().setResizeMode(1, QHeaderView.Interactive)
self.invoices_list.setColumnWidth(1, 200)
vbox0 = QVBoxLayout()
vbox0.addLayout(grid)
vbox0.addLayout(buttons)
vbox0.addStretch(1)
hbox = QHBoxLayout()
hbox.addLayout(vbox0)
hbox.addStretch(1)
w = QWidget()
vbox = QVBoxLayout(w)
vbox.addLayout(hbox)
vbox.addStretch()
vbox.addWidget(self.invoices_label)
vbox.addWidget(self.invoices_list)
# Defer this until grid is parented to avoid ugly flash during startup
self.update_fee_edit()
run_hook('create_send_tab', grid)
return w
def update_fee(self, is_fee):
outputs = self.payto_e.get_outputs()
amount = self.amount_e.get_amount()
fee = self.fee_e.get_amount() if is_fee else None
if amount is None:
self.fee_e.setAmount(None)
self.not_enough_funds = False
else:
if not outputs:
addr = self.payto_e.payto_address if self.payto_e.payto_address else self.dummy_address
outputs = [('address', addr, amount)]
try:
tx = self.wallet.make_unsigned_transaction(outputs, fee, coins = self.get_coins())
self.not_enough_funds = False
except NotEnoughFunds:
self.not_enough_funds = True
if not is_fee:
fee = None if self.not_enough_funds else self.wallet.get_tx_fee(tx)
self.fee_e.setAmount(fee)
def update_fee_edit(self):
b = self.config.get('can_edit_fees', False)
self.fee_e.setVisible(b)
self.fee_e_label.setVisible(b)
def from_list_delete(self, item):
i = self.from_list.indexOfTopLevelItem(item)
self.pay_from.pop(i)
self.redraw_from_list()
def from_list_menu(self, position):
item = self.from_list.itemAt(position)
menu = QMenu()
menu.addAction(_("Remove"), lambda: self.from_list_delete(item))
menu.exec_(self.from_list.viewport().mapToGlobal(position))
def set_pay_from(self, domain = None):
self.pay_from = [] if domain == [] else self.wallet.get_spendable_coins(domain)
self.redraw_from_list()
def redraw_from_list(self):
self.from_list.clear()
self.from_label.setHidden(len(self.pay_from) == 0)
self.from_list.setHidden(len(self.pay_from) == 0)
def format(x):
h = x.get('prevout_hash')
return h[0:8] + '...' + h[-8:] + ":%d"%x.get('prevout_n') + u'\t' + "%s"%x.get('address')
for item in self.pay_from:
self.from_list.addTopLevelItem(QTreeWidgetItem( [format(item), self.format_amount(item['value']) ]))
def get_contact_payto(self, key):
_type, value = self.contacts.get(key)
return key + ' <' + value + '>' if _type == 'address' else key
def update_completions(self):
l = [self.get_contact_payto(key) for key in self.contacts.keys()]
self.completions.setStringList(l)
def protected(func):
return lambda s, *args: s.do_protect(func, args)
def read_send_tab(self):
if self.payment_request and self.payment_request.has_expired():
QMessageBox.warning(self, _('Error'), _('Payment request has expired'), _('OK'))
return
label = unicode( self.message_e.text() )
if self.payment_request:
outputs = self.payment_request.get_outputs()
else:
errors = self.payto_e.get_errors()
if errors:
self.show_warning(_("Invalid Lines found:") + "\n\n" + '\n'.join([ _("Line #") + str(x[0]+1) + ": " + x[1] for x in errors]))
return
outputs = self.payto_e.get_outputs()
if not outputs:
QMessageBox.warning(self, _('Error'), _('No outputs'), _('OK'))
return
for _type, addr, amount in outputs:
if addr is None:
QMessageBox.warning(self, _('Error'), _('RubyCoin Address is None'), _('OK'))
return
if _type == 'address' and not bitcoin.is_address(addr):
QMessageBox.warning(self, _('Error'), _('Invalid RubyCoin Address'), _('OK'))
return
if amount is None:
QMessageBox.warning(self, _('Error'), _('Invalid Amount'), _('OK'))
return
fee = self.fee_e.get_amount()
if fee is None:
QMessageBox.warning(self, _('Error'), _('Invalid Fee'), _('OK'))
return
amount = sum(map(lambda x:x[2], outputs))
confirm_amount = self.config.get('confirm_amount', 1000000000)
if amount >= confirm_amount:
o = '\n'.join(map(lambda x:x[1], outputs))
if not self.question(_("send %(amount)s to %(address)s?")%{ 'amount' : self.format_amount(amount) + ' '+ self.base_unit(), 'address' : o}):
return
coins = self.get_coins()
return outputs, fee, label, coins
def do_send(self):
if run_hook('before_send'):
return
r = self.read_send_tab()
if not r:
return
outputs, fee, label, coins = r
try:
tx = self.wallet.make_unsigned_transaction(outputs, fee, None, coins = coins)
if not tx:
raise BaseException(_("Insufficient funds"))
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
if tx.get_fee() < tx.required_fee(self.wallet):
QMessageBox.warning(self, _('Error'), _("This transaction requires a higher fee, or it will not be propagated by the network."), _('OK'))
return
if not self.config.get('can_edit_fees', False):
if not self.question(_("A fee of %(fee)s will be added to this transaction.\nProceed?")%{ 'fee' : self.format_amount(fee) + ' '+ self.base_unit()}):
return
else:
confirm_fee = self.config.get('confirm_fee', 10000000)
if fee >= confirm_fee:
if not self.question(_("The fee for this transaction seems unusually high.\nAre you really sure you want to pay %(fee)s in fees?")%{ 'fee' : self.format_amount(fee) + ' '+ self.base_unit()}):
return
self.send_tx(tx, label)
@protected
def send_tx(self, tx, label, password):
self.send_button.setDisabled(True)
# call hook to see if plugin needs gui interaction
run_hook('send_tx', tx)
# sign the tx
def sign_thread():
if self.wallet.is_watching_only():
return tx
self.wallet.sign_transaction(tx, password)
return tx
def sign_done(tx):
if label and tx.is_complete():
self.wallet.set_label(tx.hash(), label)
if not tx.is_complete() or self.config.get('show_before_broadcast'):
self.show_transaction(tx)
self.do_clear()
return
self.broadcast_transaction(tx)
# keep a reference to WaitingDialog or the gui might crash
self.waiting_dialog = WaitingDialog(self, 'Signing..', sign_thread, sign_done, lambda: self.send_button.setDisabled(False))
self.waiting_dialog.start()
def broadcast_transaction(self, tx):
def broadcast_thread():
# non-GUI thread
pr = self.payment_request
if pr is None:
return self.wallet.sendtx(tx)
if pr.has_expired():
self.payment_request = None
return False, _("Payment request has expired")
status, msg = self.wallet.sendtx(tx)
if not status:
return False, msg
key = pr.get_id()
self.invoices.set_paid(key, tx.hash())
self.payment_request = None
refund_address = self.wallet.addresses()[0]
ack_status, ack_msg = pr.send_ack(str(tx), refund_address)
if ack_status:
msg = ack_msg
return status, msg
def broadcast_done(status, msg):
# GUI thread
if status:
QMessageBox.information(self, '', _('Payment sent.') + '\n' + msg, _('OK'))
self.update_invoices_list()
self.do_clear()
else:
QMessageBox.warning(self, _('Error'), msg, _('OK'))
self.send_button.setDisabled(False)
self.waiting_dialog = WaitingDialog(self, 'Broadcasting..', broadcast_thread, broadcast_done)
self.waiting_dialog.start()
def prepare_for_payment_request(self):
self.tabs.setCurrentIndex(1)
self.payto_e.is_pr = True
for e in [self.payto_e, self.amount_e, self.message_e]:
e.setFrozen(True)
self.payto_e.setText(_("please wait..."))
return True
def payment_request_ok(self):
pr = self.payment_request
key = self.invoices.add(pr)
status = self.invoices.get_status(key)
self.update_invoices_list()
if status == PR_PAID:
self.show_message("invoice already paid")
self.do_clear()
self.payment_request = None
return
if not pr.has_expired():
self.payto_e.setGreen()
else:
self.payto_e.setExpired()
self.payto_e.setText(pr.get_requestor())
self.amount_e.setText(self.format_amount(pr.get_amount()))
self.message_e.setText(pr.get_memo())
# signal to set fee
self.amount_e.textEdited.emit("")
def payment_request_error(self):
self.do_clear()
self.show_message(self.payment_request.error)
self.payment_request = None
def pay_from_URI(self,URI):
if not URI:
return
try:
address, amount, label, message, request_url = util.parse_URI(URI)
except Exception as e:
QMessageBox.warning(self, _('Error'), _('Invalid rubycoin URI:') + '\n' + str(e), _('OK'))
return
self.tabs.setCurrentIndex(1)
if not request_url:
if label:
if self.wallet.labels.get(address) != label:
if self.question(_('Save label "%(label)s" for address %(address)s ?'%{'label':label,'address':address})):
if address not in self.wallet.addressbook and not self.wallet.is_mine(address):
self.wallet.addressbook.append(address)
self.wallet.set_label(address, label)
else:
label = self.wallet.labels.get(address)
if address:
self.payto_e.setText(label + ' <'+ address +'>' if label else address)
if message:
self.message_e.setText(message)
if amount:
self.amount_e.setAmount(amount)
self.amount_e.textEdited.emit("")
return
def get_payment_request_thread():
self.payment_request = get_payment_request(request_url)
if self.payment_request.verify():
self.emit(SIGNAL('payment_request_ok'))
else:
self.emit(SIGNAL('payment_request_error'))
t = threading.Thread(target=get_payment_request_thread)
t.setDaemon(True)
t.start()
self.prepare_for_payment_request()
def do_clear(self):
self.not_enough_funds = False
self.payto_e.is_pr = False
for e in [self.payto_e, self.message_e, self.amount_e, self.fee_e]:
e.setText('')
e.setFrozen(False)
self.set_pay_from([])
self.update_status()
run_hook('do_clear')
def set_addrs_frozen(self,addrs,freeze):
for addr in addrs:
if not addr: continue
if addr in self.wallet.frozen_addresses and not freeze:
self.wallet.unfreeze(addr)
elif addr not in self.wallet.frozen_addresses and freeze:
self.wallet.freeze(addr)
self.update_address_tab()
def create_list_tab(self, l):
w = QWidget()
vbox = QVBoxLayout()
w.setLayout(vbox)
vbox.setMargin(0)
vbox.setSpacing(0)
vbox.addWidget(l)
buttons = QWidget()
vbox.addWidget(buttons)
return w
def create_addresses_tab(self):
l = MyTreeWidget(self, self.create_receive_menu, [ _('Address'), _('Label'), _('Balance'), _('Tx')], 1)
l.setSelectionMode(QAbstractItemView.ExtendedSelection)
l.setSortingEnabled(False)
self.address_list = l
return self.create_list_tab(l)
def create_contacts_tab(self):
l = MyTreeWidget(self, self.create_contact_menu, [_('Key'), _('Value'), _('Type')], 1)
self.contacts_list = l
return self.create_list_tab(l)
def update_invoices_list(self):
inv_list = self.invoices.sorted_list()
l = self.invoices_list
l.clear()
for pr in inv_list:
key = pr.get_id()
status = self.invoices.get_status(key)
requestor = pr.get_requestor()
date_str = format_time(pr.get_expiration_date())
item = QTreeWidgetItem( [ date_str, requestor, pr.memo, self.format_amount(pr.get_amount(), whitespaces=True), pr_tooltips.get(status,'')] )
item.setIcon(4, QIcon(pr_icons.get(status)))
item.setData(0, Qt.UserRole, key)
item.setFont(1, QFont(MONOSPACE_FONT))
item.setFont(3, QFont(MONOSPACE_FONT))
l.addTopLevelItem(item)
l.setCurrentItem(l.topLevelItem(0))
self.invoices_list.setVisible(len(inv_list))
self.invoices_label.setVisible(len(inv_list))
def delete_imported_key(self, addr):
if self.question(_("Do you want to remove")+" %s "%addr +_("from your wallet?")):
self.wallet.delete_imported_key(addr)
self.update_address_tab()
self.update_history_tab()
def edit_account_label(self, k):
text, ok = QInputDialog.getText(self, _('Rename account'), _('Name') + ':', text = self.wallet.labels.get(k,''))
if ok:
label = unicode(text)
self.wallet.set_label(k,label)
self.update_address_tab()
def account_set_expanded(self, item, k, b):
item.setExpanded(b)
self.accounts_expanded[k] = b
def create_account_menu(self, position, k, item):
menu = QMenu()
exp = item.isExpanded()
menu.addAction(_("Minimize") if exp else _("Maximize"), lambda: self.account_set_expanded(item, k, not exp))
menu.addAction(_("Rename"), lambda: self.edit_account_label(k))
if self.wallet.seed_version > 4:
menu.addAction(_("View details"), lambda: self.show_account_details(k))
if self.wallet.account_is_pending(k):
menu.addAction(_("Delete"), lambda: self.delete_pending_account(k))
menu.exec_(self.address_list.viewport().mapToGlobal(position))
def delete_pending_account(self, k):
self.wallet.delete_pending_account(k)
self.update_address_tab()
self.update_account_selector()
def create_receive_menu(self, position):
# fixme: this function apparently has a side effect.
# if it is not called the menu pops up several times
#self.address_list.selectedIndexes()
selected = self.address_list.selectedItems()
multi_select = len(selected) > 1
addrs = [unicode(item.text(0)) for item in selected]
if not multi_select:
item = self.address_list.itemAt(position)
if not item:
return
addr = addrs[0]
if not is_valid(addr):
k = str(item.data(0,32).toString())
if k:
self.create_account_menu(position, k, item)
else:
item.setExpanded(not item.isExpanded())
return
menu = QMenu()
if not multi_select:
menu.addAction(_("Copy to clipboard"), lambda: self.app.clipboard().setText(addr))
menu.addAction(_("Request payment"), lambda: self.receive_at(addr))
menu.addAction(_("Edit label"), lambda: self.address_list.edit_label(item))
menu.addAction(_('History'), lambda: self.show_address(addr))
menu.addAction(_('Public Keys'), lambda: self.show_public_keys(addr))
if self.wallet.can_export():
menu.addAction(_("Private key"), lambda: self.show_private_key(addr))
if not self.wallet.is_watching_only():
menu.addAction(_("Sign/verify message"), lambda: self.sign_verify_message(addr))
menu.addAction(_("Encrypt/decrypt message"), lambda: self.encrypt_message(addr))
if self.wallet.is_imported(addr):
menu.addAction(_("Remove from wallet"), lambda: self.delete_imported_key(addr))
addr_URL = block_explorer_URL(self.config, 'addr', addr)
if addr_URL:
menu.addAction(_("View on block explorer"), lambda: webbrowser.open(addr_URL))
if any(addr not in self.wallet.frozen_addresses for addr in addrs):
menu.addAction(_("Freeze"), lambda: self.set_addrs_frozen(addrs, True))
if any(addr in self.wallet.frozen_addresses for addr in addrs):
menu.addAction(_("Unfreeze"), lambda: self.set_addrs_frozen(addrs, False))
def can_send(addr):
return addr not in self.wallet.frozen_addresses and self.wallet.get_addr_balance(addr) != (0, 0)
if any(can_send(addr) for addr in addrs):
menu.addAction(_("Send From"), lambda: self.send_from_addresses(addrs))
run_hook('receive_menu', menu, addrs)
menu.exec_(self.address_list.viewport().mapToGlobal(position))
def get_sendable_balance(self):
return sum(map(lambda x:x['value'], self.get_coins()))
def get_coins(self):
if self.pay_from:
return self.pay_from
else:
domain = self.wallet.get_account_addresses(self.current_account)
for i in self.wallet.frozen_addresses:
if i in domain: domain.remove(i)
return self.wallet.get_spendable_coins(domain)
def send_from_addresses(self, addrs):
self.set_pay_from( addrs )
self.tabs.setCurrentIndex(1)
def paytomany(self):
self.tabs.setCurrentIndex(1)
self.payto_e.paytomany()
def payto(self, addr):
if not addr:
return
self.tabs.setCurrentIndex(1)
self.payto_e.setText(addr)
self.amount_e.setFocus()
def delete_contact(self, x):
if not self.question(_("Do you want to remove")+" %s "%x +_("from your list of contacts?")):
return
self.contacts.pop(x)
self.update_history_tab()
self.update_contacts_tab()
self.update_completions()
def create_contact_menu(self, position):
item = self.contacts_list.itemAt(position)
menu = QMenu()
if not item:
menu.addAction(_("New contact"), lambda: self.new_contact_dialog())
else:
key = unicode(item.text(0))
menu.addAction(_("Copy to Clipboard"), lambda: self.app.clipboard().setText(key))
menu.addAction(_("Pay to"), lambda: self.payto(self.get_contact_payto(key)))
menu.addAction(_("Delete"), lambda: self.delete_contact(key))
run_hook('create_contact_menu', menu, item)
menu.exec_(self.contacts_list.viewport().mapToGlobal(position))
def show_invoice(self, key):
pr = self.invoices.get(key)
pr.verify()
self.show_pr_details(pr)
def show_pr_details(self, pr):
d = QDialog(self)
d.setWindowTitle(_("Invoice"))
vbox = QVBoxLayout(d)
grid = QGridLayout()
grid.addWidget(QLabel(_("Requestor") + ':'), 0, 0)
grid.addWidget(QLabel(pr.get_requestor()), 0, 1)
grid.addWidget(QLabel(_("Expires") + ':'), 1, 0)
grid.addWidget(QLabel(format_time(pr.get_expiration_date())), 1, 1)
grid.addWidget(QLabel(_("Memo") + ':'), 2, 0)
grid.addWidget(QLabel(pr.get_memo()), 2, 1)
grid.addWidget(QLabel(_("Signature") + ':'), 3, 0)
grid.addWidget(QLabel(pr.get_verify_status()), 3, 1)
grid.addWidget(QLabel(_("Payment URL") + ':'), 4, 0)
grid.addWidget(QLabel(pr.payment_url), 4, 1)
grid.addWidget(QLabel(_("Outputs") + ':'), 5, 0)
outputs_str = '\n'.join(map(lambda x: x[1] + ' ' + self.format_amount(x[2])+ self.base_unit(), pr.get_outputs()))
grid.addWidget(QLabel(outputs_str), 5, 1)
if pr.tx:
grid.addWidget(QLabel(_("Transaction ID") + ':'), 6, 0)
l = QLineEdit(pr.tx)
l.setReadOnly(True)
grid.addWidget(l, 6, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CloseButton(d)))
d.exec_()
return
def do_pay_invoice(self, key):
pr = self.invoices.get(key)
self.payment_request = pr
self.prepare_for_payment_request()
if pr.verify():
self.payment_request_ok()
else:
self.payment_request_error()
def create_invoice_menu(self, position):
item = self.invoices_list.itemAt(position)
if not item:
return
key = str(item.data(0, 32).toString())
pr = self.invoices.get(key)
status = self.invoices.get_status(key)
menu = QMenu()
menu.addAction(_("Details"), lambda: self.show_invoice(key))
if status == PR_UNPAID:
menu.addAction(_("Pay Now"), lambda: self.do_pay_invoice(key))
def delete_invoice(key):
self.invoices.remove(key)
self.update_invoices_list()
menu.addAction(_("Delete"), lambda: delete_invoice(key))
menu.exec_(self.invoices_list.viewport().mapToGlobal(position))
def update_address_tab(self):
l = self.address_list
item = l.currentItem()
current_address = item.data(0, Qt.UserRole).toString() if item else None
l.clear()
accounts = self.wallet.get_accounts()
if self.current_account is None:
account_items = sorted(accounts.items())
else:
account_items = [(self.current_account, accounts.get(self.current_account))]
for k, account in account_items:
if len(accounts) > 1:
name = self.wallet.get_account_name(k)
c, u, x = self.wallet.get_account_balance(k)
account_item = QTreeWidgetItem([ name, '', self.format_amount(c + u + x), ''])
l.addTopLevelItem(account_item)
account_item.setExpanded(self.accounts_expanded.get(k, True))
account_item.setData(0, Qt.UserRole, k)
else:
account_item = l
sequences = [0,1] if account.has_change() else [0]
for is_change in sequences:
if len(sequences) > 1:
name = _("Receiving") if not is_change else _("Change")
seq_item = QTreeWidgetItem( [ name, '', '', '', ''] )
account_item.addChild(seq_item)
if not is_change:
seq_item.setExpanded(True)
else:
seq_item = account_item
used_item = QTreeWidgetItem( [ _("Used"), '', '', '', ''] )
used_flag = False
addr_list = account.get_addresses(is_change)
for address in addr_list:
num, is_used = self.wallet.is_used(address)
label = self.wallet.labels.get(address,'')
c, u, x = self.wallet.get_addr_balance(address)
balance = self.format_amount(c + u + x)
item = QTreeWidgetItem( [ address, label, balance, "%d"%num] )
item.setFont(0, QFont(MONOSPACE_FONT))
item.setData(0, Qt.UserRole, address)
item.setData(0, Qt.UserRole+1, True) # label can be edited
if address in self.wallet.frozen_addresses:
item.setBackgroundColor(0, QColor('lightblue'))
if self.wallet.is_beyond_limit(address, account, is_change):
item.setBackgroundColor(0, QColor('red'))
if is_used:
if not used_flag:
seq_item.insertChild(0, used_item)
used_flag = True
used_item.addChild(item)
else:
seq_item.addChild(item)
if address == current_address:
l.setCurrentItem(item)
def update_contacts_tab(self):
l = self.contacts_list
item = l.currentItem()
current_key = item.data(0, Qt.UserRole).toString() if item else None
l.clear()
for key in sorted(self.contacts.keys()):
_type, value = self.contacts[key]
item = QTreeWidgetItem([key, value, _type])
item.setData(0, Qt.UserRole, key)
l.addTopLevelItem(item)
if key == current_key:
l.setCurrentItem(item)
run_hook('update_contacts_tab', l)
def create_console_tab(self):
from console import Console
self.console = console = Console()
return console
def update_console(self):
console = self.console
console.history = self.config.get("console-history",[])
console.history_index = len(console.history)
console.updateNamespace({'wallet' : self.wallet, 'network' : self.network, 'gui':self})
console.updateNamespace({'util' : util, 'bitcoin':bitcoin})
c = commands.Commands(self.wallet, self.network, lambda: self.console.set_json(True))
methods = {}
def mkfunc(f, method):
return lambda *args: apply( f, (method, args, self.password_dialog ))
for m in dir(c):
if m[0]=='_' or m in ['network','wallet']: continue
methods[m] = mkfunc(c._run, m)
console.updateNamespace(methods)
def change_account(self,s):
if s == _("All accounts"):
self.current_account = None
else:
accounts = self.wallet.get_account_names()
for k, v in accounts.items():
if v == s:
self.current_account = k
self.update_history_tab()
self.update_status()
self.update_address_tab()
self.update_receive_tab()
def create_status_bar(self):
sb = QStatusBar()
sb.setFixedHeight(35)
qtVersion = qVersion()
self.balance_label = QLabel("")
sb.addWidget(self.balance_label)
from version_getter import UpdateLabel
self.updatelabel = UpdateLabel(self.config, sb)
self.account_selector = QComboBox()
self.account_selector.setSizeAdjustPolicy(QComboBox.AdjustToContents)
self.connect(self.account_selector,SIGNAL("activated(QString)"),self.change_account)
sb.addPermanentWidget(self.account_selector)
self.search_box = QLineEdit()
self.search_box.textChanged.connect(self.do_search)
self.search_box.hide()
sb.addPermanentWidget(self.search_box)
if (int(qtVersion[0]) >= 4 and int(qtVersion[2]) >= 7):
sb.addPermanentWidget( StatusBarButton( QIcon(":icons/switchgui.png"), _("Switch to Lite Mode"), self.go_lite ) )
self.lock_icon = QIcon()
self.password_button = StatusBarButton( self.lock_icon, _("Password"), self.change_password_dialog )
sb.addPermanentWidget( self.password_button )
sb.addPermanentWidget( StatusBarButton( QIcon(":icons/preferences.png"), _("Preferences"), self.settings_dialog ) )
self.seed_button = StatusBarButton( QIcon(":icons/seed.png"), _("Seed"), self.show_seed_dialog )
sb.addPermanentWidget( self.seed_button )
self.status_button = StatusBarButton( QIcon(":icons/status_disconnected.png"), _("Network"), self.run_network_dialog )
sb.addPermanentWidget( self.status_button )
run_hook('create_status_bar', sb)
self.setStatusBar(sb)
def update_lock_icon(self):
icon = QIcon(":icons/lock.png") if self.wallet.use_encryption else QIcon(":icons/unlock.png")
self.password_button.setIcon( icon )
def update_buttons_on_seed(self):
self.seed_button.setVisible(self.wallet.has_seed())
self.password_button.setVisible(self.wallet.can_change_password())
self.send_button.setText(_("Create unsigned transaction") if self.wallet.is_watching_only() else _("Send"))
def change_password_dialog(self):
from password_dialog import PasswordDialog
d = PasswordDialog(self.wallet, self)
d.run()
self.update_lock_icon()
def toggle_search(self):
self.search_box.setHidden(not self.search_box.isHidden())
if not self.search_box.isHidden():
self.search_box.setFocus(1)
else:
self.do_search('')
def do_search(self, t):
i = self.tabs.currentIndex()
if i == 0:
self.history_list.filter(t, [1, 2, 3]) # Date, Description, Amount
elif i == 1:
self.invoices_list.filter(t, [0, 1, 2, 3]) # Date, Requestor, Description, Amount
elif i == 2:
self.receive_list.filter(t, [0, 1, 2, 3, 4]) # Date, Account, Address, Description, Amount
elif i == 3:
self.address_list.filter(t, [0,1, 2]) # Address, Label, Balance
elif i == 4:
self.contacts_list.filter(t, [0, 1]) # Key, Value
def new_contact_dialog(self):
d = QDialog(self)
d.setWindowTitle(_("New Contact"))
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('New Contact') + ':'))
grid = QGridLayout()
line1 = QLineEdit()
line2 = QLineEdit()
grid.addWidget(QLabel(_("Address")), 1, 0)
grid.addWidget(line1, 1, 1)
grid.addWidget(QLabel(_("Name")), 2, 0)
grid.addWidget(line2, 2, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if not d.exec_():
return
address = str(line1.text())
label = unicode(line2.text())
if not is_valid(address):
QMessageBox.warning(self, _('Error'), _('Invalid Address'), _('OK'))
return
self.contacts[label] = ('address', address)
self.update_contacts_tab()
self.update_history_tab()
self.update_completions()
self.tabs.setCurrentIndex(3)
@protected
def new_account_dialog(self, password):
dialog = QDialog(self)
dialog.setModal(1)
dialog.setWindowTitle(_("New Account"))
vbox = QVBoxLayout()
vbox.addWidget(QLabel(_('Account name')+':'))
e = QLineEdit()
vbox.addWidget(e)
msg = _("Note: Newly created accounts are 'pending' until they receive rubycoins.") + " " \
+ _("You will need to wait for 2 confirmations until the correct balance is displayed and more addresses are created for that account.")
l = QLabel(msg)
l.setWordWrap(True)
vbox.addWidget(l)
vbox.addLayout(Buttons(CancelButton(dialog), OkButton(dialog)))
dialog.setLayout(vbox)
r = dialog.exec_()
if not r:
return
name = str(e.text())
self.wallet.create_pending_account(name, password)
self.update_address_tab()
self.update_account_selector()
self.tabs.setCurrentIndex(3)
def show_master_public_keys(self):
dialog = QDialog(self)
dialog.setModal(1)
dialog.setWindowTitle(_("Master Public Keys"))
mpk_dict = self.wallet.get_master_public_keys()
vbox = QVBoxLayout()
# only show the combobox in case multiple accounts are available
if len(mpk_dict) > 1:
gb = QGroupBox(_("Master Public Keys"))
vbox.addWidget(gb)
group = QButtonGroup()
first_button = None
for key in sorted(mpk_dict.keys()):
is_mine = self.wallet.master_private_keys.has_key(key)
b = QRadioButton(gb)
name = 'Self' if is_mine else 'Cosigner'
b.setText(name + ' (%s)'%key)
b.key = key
group.addButton(b)
vbox.addWidget(b)
if not first_button:
first_button = b
mpk_text = ShowQRTextEdit()
mpk_text.setMaximumHeight(170)
vbox.addWidget(mpk_text)
def show_mpk(b):
mpk = mpk_dict.get(b.key, "")
mpk_text.setText(mpk)
group.buttonReleased.connect(show_mpk)
first_button.setChecked(True)
show_mpk(first_button)
elif len(mpk_dict) == 1:
mpk = mpk_dict.values()[0]
mpk_text = ShowQRTextEdit(text=mpk)
mpk_text.setMaximumHeight(170)
vbox.addWidget(mpk_text)
mpk_text.addCopyButton(self.app)
vbox.addLayout(Buttons(CloseButton(dialog)))
dialog.setLayout(vbox)
dialog.exec_()
@protected
def show_seed_dialog(self, password):
if not self.wallet.has_seed():
QMessageBox.information(self, _('Message'), _('This wallet has no seed'), _('OK'))
return
try:
mnemonic = self.wallet.get_mnemonic(password)
except BaseException as e:
QMessageBox.warning(self, _('Error'), str(e), _('OK'))
return
from seed_dialog import SeedDialog
d = SeedDialog(self, mnemonic, self.wallet.has_imported_keys())
d.exec_()
def show_qrcode(self, data, title = _("QR code")):
if not data:
return
d = QRDialog(data, self, title)
d.exec_()
def do_protect(self, func, args):
if self.wallet.use_encryption:
while True:
password = self.password_dialog()
if not password:
return
try:
self.wallet.check_password(password)
break
except Exception as e:
QMessageBox.warning(self, _('Error'), str(e), _('OK'))
continue
else:
password = None
if args != (False,):
args = (self,) + args + (password,)
else:
args = (self, password)
apply(func, args)
def show_public_keys(self, address):
if not address: return
try:
pubkey_list = self.wallet.get_public_keys(address)
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
d = QDialog(self)
d.setMinimumSize(600, 200)
d.setModal(1)
d.setWindowTitle(_("Public key"))
vbox = QVBoxLayout()
vbox.addWidget( QLabel(_("Address") + ': ' + address))
vbox.addWidget( QLabel(_("Public key") + ':'))
keys_e = ShowQRTextEdit(text='\n'.join(pubkey_list))
keys_e.addCopyButton(self.app)
vbox.addWidget(keys_e)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
d.exec_()
@protected
def show_private_key(self, address, password):
if not address: return
try:
pk_list = self.wallet.get_private_key(address, password)
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
d = QDialog(self)
d.setMinimumSize(600, 200)
d.setModal(1)
d.setWindowTitle(_("Private key"))
vbox = QVBoxLayout()
vbox.addWidget( QLabel(_("Address") + ': ' + address))
vbox.addWidget( QLabel(_("Private key") + ':'))
keys_e = ShowQRTextEdit(text='\n'.join(pk_list))
keys_e.addCopyButton(self.app)
vbox.addWidget(keys_e)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
d.exec_()
@protected
def do_sign(self, address, message, signature, password):
message = unicode(message.toPlainText())
message = message.encode('utf-8')
try:
sig = self.wallet.sign_message(str(address.text()), message, password)
signature.setText(sig)
except Exception as e:
self.show_message(str(e))
def do_verify(self, address, message, signature):
message = unicode(message.toPlainText())
message = message.encode('utf-8')
if bitcoin.verify_message(address.text(), str(signature.toPlainText()), message):
self.show_message(_("Signature verified"))
else:
self.show_message(_("Error: wrong signature"))
def sign_verify_message(self, address=''):
d = QDialog(self)
d.setModal(1)
d.setWindowTitle(_('Sign/verify Message'))
d.setMinimumSize(410, 290)
layout = QGridLayout(d)
message_e = QTextEdit()
layout.addWidget(QLabel(_('Message')), 1, 0)
layout.addWidget(message_e, 1, 1)
layout.setRowStretch(2,3)
address_e = QLineEdit()
address_e.setText(address)
layout.addWidget(QLabel(_('Address')), 2, 0)
layout.addWidget(address_e, 2, 1)
signature_e = QTextEdit()
layout.addWidget(QLabel(_('Signature')), 3, 0)
layout.addWidget(signature_e, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Sign"))
b.clicked.connect(lambda: self.do_sign(address_e, message_e, signature_e))
hbox.addWidget(b)
b = QPushButton(_("Verify"))
b.clicked.connect(lambda: self.do_verify(address_e, message_e, signature_e))
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
d.exec_()
@protected
def do_decrypt(self, message_e, pubkey_e, encrypted_e, password):
try:
decrypted = self.wallet.decrypt_message(str(pubkey_e.text()), str(encrypted_e.toPlainText()), password)
message_e.setText(decrypted)
except BaseException as e:
traceback.print_exc(file=sys.stdout)
self.show_warning(str(e))
def do_encrypt(self, message_e, pubkey_e, encrypted_e):
message = unicode(message_e.toPlainText())
message = message.encode('utf-8')
try:
encrypted = bitcoin.encrypt_message(message, str(pubkey_e.text()))
encrypted_e.setText(encrypted)
except BaseException as e:
traceback.print_exc(file=sys.stdout)
self.show_warning(str(e))
def encrypt_message(self, address = ''):
d = QDialog(self)
d.setModal(1)
d.setWindowTitle(_('Encrypt/decrypt Message'))
d.setMinimumSize(610, 490)
layout = QGridLayout(d)
message_e = QTextEdit()
layout.addWidget(QLabel(_('Message')), 1, 0)
layout.addWidget(message_e, 1, 1)
layout.setRowStretch(2,3)
pubkey_e = QLineEdit()
if address:
pubkey = self.wallet.get_public_keys(address)[0]
pubkey_e.setText(pubkey)
layout.addWidget(QLabel(_('Public key')), 2, 0)
layout.addWidget(pubkey_e, 2, 1)
encrypted_e = QTextEdit()
layout.addWidget(QLabel(_('Encrypted')), 3, 0)
layout.addWidget(encrypted_e, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Encrypt"))
b.clicked.connect(lambda: self.do_encrypt(message_e, pubkey_e, encrypted_e))
hbox.addWidget(b)
b = QPushButton(_("Decrypt"))
b.clicked.connect(lambda: self.do_decrypt(message_e, pubkey_e, encrypted_e))
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
d.exec_()
def question(self, msg):
return QMessageBox.question(self, _('Message'), msg, QMessageBox.Yes | QMessageBox.No, QMessageBox.No) == QMessageBox.Yes
def show_message(self, msg):
QMessageBox.information(self, _('Message'), msg, _('OK'))
def show_warning(self, msg):
QMessageBox.warning(self, _('Warning'), msg, _('OK'))
def password_dialog(self, msg=None):
d = QDialog(self)
d.setModal(1)
d.setWindowTitle(_("Enter Password"))
pw = QLineEdit()
pw.setEchoMode(2)
vbox = QVBoxLayout()
if not msg:
msg = _('Please enter your password')
vbox.addWidget(QLabel(msg))
grid = QGridLayout()
grid.setSpacing(8)
grid.addWidget(QLabel(_('Password')), 1, 0)
grid.addWidget(pw, 1, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
d.setLayout(vbox)
run_hook('password_dialog', pw, grid, 1)
if not d.exec_(): return
return unicode(pw.text())
def tx_from_text(self, txt):
"json or raw hexadecimal"
txt = txt.strip()
try:
txt.decode('hex')
is_hex = True
except:
is_hex = False
if is_hex:
try:
return Transaction(txt)
except:
traceback.print_exc(file=sys.stdout)
QMessageBox.critical(None, _("Unable to parse transaction"), _("Electrum was unable to parse your transaction"))
return
try:
tx_dict = json.loads(str(txt))
assert "hex" in tx_dict.keys()
tx = Transaction(tx_dict["hex"])
#if tx_dict.has_key("input_info"):
# input_info = json.loads(tx_dict['input_info'])
# tx.add_input_info(input_info)
return tx
except Exception:
traceback.print_exc(file=sys.stdout)
QMessageBox.critical(None, _("Unable to parse transaction"), _("Electrum was unable to parse your transaction"))
def read_tx_from_qrcode(self):
from electrum_rby import qrscanner
try:
data = qrscanner.scan_qr(self.config)
except BaseException, e:
QMessageBox.warning(self, _('Error'), _(e), _('OK'))
return
if not data:
return
# if the user scanned a bitcoin URI
if data.startswith("rubycoin:"):
self.pay_from_URI(data)
return
# else if the user scanned an offline signed tx
# transactions are binary, but qrcode seems to return utf8...
data = data.decode('utf8')
z = bitcoin.base_decode(data, length=None, base=43)
data = ''.join(chr(ord(b)) for b in z).encode('hex')
tx = self.tx_from_text(data)
if not tx:
return
self.show_transaction(tx)
def read_tx_from_file(self):
fileName = self.getOpenFileName(_("Select your transaction file"), "*.txn")
if not fileName:
return
try:
with open(fileName, "r") as f:
file_content = f.read()
except (ValueError, IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to read file or no transaction found"), _("Electrum was unable to open your transaction file") + "\n" + str(reason))
return self.tx_from_text(file_content)
@protected
def sign_raw_transaction(self, tx, password):
try:
self.wallet.sign_transaction(tx, password)
except Exception as e:
traceback.print_exc(file=sys.stdout)
QMessageBox.warning(self, _("Error"), str(e))
def do_process_from_text(self):
text = text_dialog(self, _('Input raw transaction'), _("Transaction:"), _("Load transaction"))
if not text:
return
tx = self.tx_from_text(text)
if tx:
self.show_transaction(tx)
def do_process_from_file(self):
tx = self.read_tx_from_file()
if tx:
self.show_transaction(tx)
def do_process_from_txid(self):
from electrum_rby import transaction
txid, ok = QInputDialog.getText(self, _('Lookup transaction'), _('Transaction ID') + ':')
if ok and txid:
r = self.network.synchronous_get([ ('blockchain.transaction.get',[str(txid)]) ])[0]
if r:
tx = transaction.Transaction(r)
if tx:
self.show_transaction(tx)
else:
self.show_message("unknown transaction")
def do_process_from_csvReader(self, csvReader):
outputs = []
errors = []
errtext = ""
try:
for position, row in enumerate(csvReader):
address = row[0]
if not bitcoin.is_address(address):
errors.append((position, address))
continue
amount = Decimal(row[1])
amount = int(100000000*amount)
outputs.append(('address', address, amount))
except (ValueError, IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to read file or no transaction found"), _("Electrum was unable to open your transaction file") + "\n" + str(reason))
return
if errors != []:
for x in errors:
errtext += "CSV Row " + str(x[0]+1) + ": " + x[1] + "\n"
QMessageBox.critical(None, _("Invalid Addresses"), _("ABORTING! Invalid Addresses found:") + "\n\n" + errtext)
return
try:
tx = self.wallet.make_unsigned_transaction(outputs, None, None)
except Exception as e:
self.show_message(str(e))
return
self.show_transaction(tx)
@protected
def export_privkeys_dialog(self, password):
if self.wallet.is_watching_only():
self.show_message(_("This is a watching-only wallet"))
return
try:
self.wallet.check_password(password)
except Exception as e:
QMessageBox.warning(self, _('Error'), str(e), _('OK'))
return
d = QDialog(self)
d.setWindowTitle(_('Private keys'))
d.setMinimumSize(850, 300)
vbox = QVBoxLayout(d)
msg = "%s\n%s\n%s" % (_("WARNING: ALL your private keys are secret."),
_("Exposing a single private key can compromise your entire wallet!"),
_("In particular, DO NOT use 'redeem private key' services proposed by third parties."))
vbox.addWidget(QLabel(msg))
e = QTextEdit()
e.setReadOnly(True)
vbox.addWidget(e)
defaultname = 'electrum-rby-private-keys.csv'
select_msg = _('Select file to export your private keys to')
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
vbox.addLayout(hbox)
b = OkButton(d, _('Export'))
b.setEnabled(False)
vbox.addLayout(Buttons(CancelButton(d), b))
private_keys = {}
addresses = self.wallet.addresses(True)
done = False
def privkeys_thread():
for addr in addresses:
time.sleep(0.1)
if done:
break
private_keys[addr] = "\n".join(self.wallet.get_private_key(addr, password))
d.emit(SIGNAL('computing_privkeys'))
d.emit(SIGNAL('show_privkeys'))
def show_privkeys():
s = "\n".join( map( lambda x: x[0] + "\t"+ x[1], private_keys.items()))
e.setText(s)
b.setEnabled(True)
d.connect(d, QtCore.SIGNAL('computing_privkeys'), lambda: e.setText("Please wait... %d/%d"%(len(private_keys),len(addresses))))
d.connect(d, QtCore.SIGNAL('show_privkeys'), show_privkeys)
threading.Thread(target=privkeys_thread).start()
if not d.exec_():
done = True
return
filename = filename_e.text()
if not filename:
return
try:
self.do_export_privkeys(filename, private_keys, csv_button.isChecked())
except (IOError, os.error), reason:
export_error_label = _("Electrum was unable to produce a private key-export.")
QMessageBox.critical(None, _("Unable to create csv"), export_error_label + "\n" + str(reason))
except Exception as e:
self.show_message(str(e))
return
self.show_message(_("Private keys exported."))
def do_export_privkeys(self, fileName, pklist, is_csv):
with open(fileName, "w+") as f:
if is_csv:
transaction = csv.writer(f)
transaction.writerow(["address", "private_key"])
for addr, pk in pklist.items():
transaction.writerow(["%34s"%addr,pk])
else:
import json
f.write(json.dumps(pklist, indent = 4))
def do_import_labels(self):
labelsFile = self.getOpenFileName(_("Open labels file"), "*.dat")
if not labelsFile: return
try:
f = open(labelsFile, 'r')
data = f.read()
f.close()
for key, value in json.loads(data).items():
self.wallet.set_label(key, value)
QMessageBox.information(None, _("Labels imported"), _("Your labels were imported from")+" '%s'" % str(labelsFile))
except (IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to import labels"), _("Electrum was unable to import your labels.")+"\n" + str(reason))
def do_export_labels(self):
labels = self.wallet.labels
try:
fileName = self.getSaveFileName(_("Select file to save your labels"), 'electrum-rby_labels.dat', "*.dat")
if fileName:
with open(fileName, 'w+') as f:
json.dump(labels, f)
QMessageBox.information(None, _("Labels exported"), _("Your labels where exported to")+" '%s'" % str(fileName))
except (IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to export labels"), _("Electrum was unable to export your labels.")+"\n" + str(reason))
def export_history_dialog(self):
d = QDialog(self)
d.setWindowTitle(_('Export History'))
d.setMinimumSize(400, 200)
vbox = QVBoxLayout(d)
defaultname = os.path.expanduser('~/electrum-rby-history.csv')
select_msg = _('Select file to export your wallet transactions to')
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
vbox.addLayout(hbox)
vbox.addStretch(1)
hbox = Buttons(CancelButton(d), OkButton(d, _('Export')))
vbox.addLayout(hbox)
run_hook('export_history_dialog', self, hbox)
self.update()
if not d.exec_():
return
filename = filename_e.text()
if not filename:
return
try:
self.do_export_history(self.wallet, filename, csv_button.isChecked())
except (IOError, os.error), reason:
export_error_label = _("Electrum was unable to produce a transaction export.")
QMessageBox.critical(self, _("Unable to export history"), export_error_label + "\n" + str(reason))
return
QMessageBox.information(self,_("History exported"), _("Your wallet history has been successfully exported."))
def do_export_history(self, wallet, fileName, is_csv):
history = wallet.get_history()
lines = []
for item in history:
tx_hash, confirmations, value, timestamp, balance = item
if confirmations:
if timestamp is not None:
time_string = format_time(timestamp)
else:
time_string = "unknown"
else:
time_string = "pending"
if value is not None:
value_string = format_satoshis(value, True)
else:
value_string = '--'
if tx_hash:
label, is_default_label = wallet.get_label(tx_hash)
label = label.encode('utf-8')
else:
label = ""
if is_csv:
lines.append([tx_hash, label, confirmations, value_string, time_string])
else:
lines.append({'txid':tx_hash, 'date':"%16s"%time_string, 'label':label, 'value':value_string})
with open(fileName, "w+") as f:
if is_csv:
transaction = csv.writer(f, lineterminator='\n')
transaction.writerow(["transaction_hash","label", "confirmations", "value", "timestamp"])
for line in lines:
transaction.writerow(line)
else:
import json
f.write(json.dumps(lines, indent = 4))
def sweep_key_dialog(self):
d = QDialog(self)
d.setWindowTitle(_('Sweep private keys'))
d.setMinimumSize(600, 300)
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_("Enter private keys")))
keys_e = QTextEdit()
keys_e.setTabChangesFocus(True)
vbox.addWidget(keys_e)
h, address_e = address_field(self.wallet.addresses(False))
vbox.addLayout(h)
vbox.addStretch(1)
button = OkButton(d, _('Sweep'))
vbox.addLayout(Buttons(CancelButton(d), button))
button.setEnabled(False)
def get_address():
addr = str(address_e.text())
if bitcoin.is_address(addr):
return addr
def get_pk():
pk = str(keys_e.toPlainText()).strip()
if Wallet.is_private_key(pk):
return pk.split()
f = lambda: button.setEnabled(get_address() is not None and get_pk() is not None)
keys_e.textChanged.connect(f)
address_e.textChanged.connect(f)
if not d.exec_():
return
fee = self.wallet.fee_per_kb
tx = Transaction.sweep(get_pk(), self.network, get_address(), fee)
self.show_transaction(tx)
@protected
def do_import_privkey(self, password):
if not self.wallet.has_imported_keys():
r = QMessageBox.question(None, _('Warning'), '<b>'+_('Warning') +':\n</b><br/>'+ _('Imported keys are not recoverable from seed.') + ' ' \
+ _('If you ever need to restore your wallet from its seed, these keys will be lost.') + '<p>' \
+ _('Are you sure you understand what you are doing?'), 3, 4)
if r == 4: return
text = text_dialog(self, _('Import private keys'), _("Enter private keys")+':', _("Import"))
if not text: return
text = str(text).split()
badkeys = []
addrlist = []
for key in text:
try:
addr = self.wallet.import_key(key, password)
except Exception as e:
badkeys.append(key)
continue
if not addr:
badkeys.append(key)
else:
addrlist.append(addr)
if addrlist:
QMessageBox.information(self, _('Information'), _("The following addresses were added") + ':\n' + '\n'.join(addrlist))
if badkeys:
QMessageBox.critical(self, _('Error'), _("The following inputs could not be imported") + ':\n'+ '\n'.join(badkeys))
self.update_address_tab()
self.update_history_tab()
def settings_dialog(self):
self.need_restart = False
d = QDialog(self)
d.setWindowTitle(_('Electrum Settings'))
d.setModal(1)
vbox = QVBoxLayout()
grid = QGridLayout()
grid.setColumnStretch(0,1)
widgets = []
lang_label = QLabel(_('Language') + ':')
lang_help = HelpButton(_('Select which language is used in the GUI (after restart).'))
lang_combo = QComboBox()
from electrum_rby.i18n import languages
lang_combo.addItems(languages.values())
try:
index = languages.keys().index(self.config.get("language",''))
except Exception:
index = 0
lang_combo.setCurrentIndex(index)
if not self.config.is_modifiable('language'):
for w in [lang_combo, lang_label]: w.setEnabled(False)
def on_lang(x):
lang_request = languages.keys()[lang_combo.currentIndex()]
if lang_request != self.config.get('language'):
self.config.set_key("language", lang_request, True)
self.need_restart = True
lang_combo.currentIndexChanged.connect(on_lang)
widgets.append((lang_label, lang_combo, lang_help))
nz_label = QLabel(_('Zeros after decimal point') + ':')
nz_help = HelpButton(_('Number of zeros displayed after the decimal point. For example, if this is set to 2, "1." will be displayed as "1.00"'))
nz = QSpinBox()
nz.setMinimum(0)
nz.setMaximum(self.decimal_point)
nz.setValue(self.num_zeros)
if not self.config.is_modifiable('num_zeros'):
for w in [nz, nz_label]: w.setEnabled(False)
def on_nz():
value = nz.value()
if self.num_zeros != value:
self.num_zeros = value
self.config.set_key('num_zeros', value, True)
self.update_history_tab()
self.update_address_tab()
nz.valueChanged.connect(on_nz)
widgets.append((nz_label, nz, nz_help))
fee_label = QLabel(_('Transaction fee per kb') + ':')
fee_help = HelpButton(_('Fee per kilobyte of transaction.') + '\n' \
+ _('Recommended value') + ': ' + self.format_amount(bitcoin.RECOMMENDED_FEE) + ' ' + self.base_unit())
fee_e = BTCAmountEdit(self.get_decimal_point)
fee_e.setAmount(self.wallet.fee_per_kb)
if not self.config.is_modifiable('fee_per_kb'):
for w in [fee_e, fee_label]: w.setEnabled(False)
def on_fee():
fee = fee_e.get_amount()
self.wallet.set_fee(fee)
fee_e.editingFinished.connect(on_fee)
widgets.append((fee_label, fee_e, fee_help))
units = ['RBY', 'mRBY', 'bits']
unit_label = QLabel(_('Base unit') + ':')
unit_combo = QComboBox()
unit_combo.addItems(units)
unit_combo.setCurrentIndex(units.index(self.base_unit()))
msg = _('Base unit of your wallet.')\
+ '\n1BTC=1000mRBY.\n' \
+ _(' These settings affects the fields in the Send tab')+' '
unit_help = HelpButton(msg)
def on_unit(x):
unit_result = units[unit_combo.currentIndex()]
if self.base_unit() == unit_result:
return
if unit_result == 'RBY':
self.decimal_point = 8
elif unit_result == 'mRBY':
self.decimal_point = 5
elif unit_result == 'bits':
self.decimal_point = 2
else:
raise Exception('Unknown base unit')
self.config.set_key('decimal_point', self.decimal_point, True)
self.update_history_tab()
self.update_receive_tab()
self.update_address_tab()
fee_e.setAmount(self.wallet.fee_per_kb)
self.update_status()
unit_combo.currentIndexChanged.connect(on_unit)
widgets.append((unit_label, unit_combo, unit_help))
block_explorers = sorted(block_explorer_info.keys())
block_ex_label = QLabel(_('Online Block Explorer') + ':')
block_ex_combo = QComboBox()
block_ex_combo.addItems(block_explorers)
block_ex_combo.setCurrentIndex(block_explorers.index(block_explorer(self.config)))
block_ex_help = HelpButton(_('Choose which online block explorer to use for functions that open a web browser'))
def on_be(x):
be_result = block_explorers[block_ex_combo.currentIndex()]
self.config.set_key('block_explorer', be_result, True)
block_ex_combo.currentIndexChanged.connect(on_be)
widgets.append((block_ex_label, block_ex_combo, block_ex_help))
from electrum_rby import qrscanner
system_cameras = qrscanner._find_system_cameras()
qr_combo = QComboBox()
qr_combo.addItem("Default","default")
for camera, device in system_cameras.items():
qr_combo.addItem(camera, device)
#combo.addItem("Manually specify a device", config.get("video_device"))
index = qr_combo.findData(self.config.get("video_device"))
qr_combo.setCurrentIndex(index)
qr_label = QLabel(_('Video Device') + ':')
qr_combo.setEnabled(qrscanner.zbar is not None)
qr_help = HelpButton(_("Install the zbar package to enable this.\nOn linux, type: 'apt-get install python-zbar'"))
on_video_device = lambda x: self.config.set_key("video_device", str(qr_combo.itemData(x).toString()), True)
qr_combo.currentIndexChanged.connect(on_video_device)
widgets.append((qr_label, qr_combo, qr_help))
usechange_cb = QCheckBox(_('Use change addresses'))
usechange_cb.setChecked(self.wallet.use_change)
usechange_help = HelpButton(_('Using change addresses makes it more difficult for other people to track your transactions.'))
if not self.config.is_modifiable('use_change'): usechange_cb.setEnabled(False)
def on_usechange(x):
usechange_result = x == Qt.Checked
if self.wallet.use_change != usechange_result:
self.wallet.use_change = usechange_result
self.wallet.storage.put('use_change', self.wallet.use_change)
usechange_cb.stateChanged.connect(on_usechange)
widgets.append((usechange_cb, None, usechange_help))
showtx_cb = QCheckBox(_('Show transaction before broadcast'))
showtx_cb.setChecked(self.config.get('show_before_broadcast', False))
showtx_cb.stateChanged.connect(lambda x: self.config.set_key('show_before_broadcast', showtx_cb.isChecked()))
showtx_help = HelpButton(_('Display the details of your transactions before broadcasting it.'))
widgets.append((showtx_cb, None, showtx_help))
can_edit_fees_cb = QCheckBox(_('Set transaction fees manually'))
can_edit_fees_cb.setChecked(self.config.get('can_edit_fees', False))
def on_editfees(x):
self.config.set_key('can_edit_fees', x == Qt.Checked)
self.update_fee_edit()
can_edit_fees_cb.stateChanged.connect(on_editfees)
can_edit_fees_help = HelpButton(_('This option lets you edit fees in the send tab.'))
widgets.append((can_edit_fees_cb, None, can_edit_fees_help))
for a,b,c in widgets:
i = grid.rowCount()
if b:
grid.addWidget(a, i, 0)
grid.addWidget(b, i, 1)
else:
grid.addWidget(a, i, 0, 1, 2)
grid.addWidget(c, i, 2)
vbox.addLayout(grid)
vbox.addStretch(1)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
# run the dialog
d.exec_()
run_hook('close_settings_dialog')
if self.need_restart:
QMessageBox.warning(self, _('Success'), _('Please restart Electrum to activate the new GUI settings'), _('OK'))
def run_network_dialog(self):
if not self.network:
QMessageBox.warning(self, _('Offline'), _('You are using Electrum in offline mode.\nRestart Electrum if you want to get connected.'), _('OK'))
return
NetworkDialog(self.wallet.network, self.config, self).do_exec()
def closeEvent(self, event):
self.config.set_key("is_maximized", self.isMaximized())
if not self.isMaximized():
g = self.geometry()
self.config.set_key("winpos-qt", [g.left(),g.top(),g.width(),g.height()])
self.config.set_key("console-history", self.console.history[-50:], True)
self.wallet.storage.put('accounts_expanded', self.accounts_expanded)
event.accept()
def plugins_dialog(self):
from electrum_rby.plugins import plugins, descriptions, is_available, loader
self.pluginsdialog = d = QDialog(self)
d.setWindowTitle(_('Electrum Plugins'))
d.setModal(1)
vbox = QVBoxLayout(d)
# plugins
scroll = QScrollArea()
scroll.setEnabled(True)
scroll.setWidgetResizable(True)
scroll.setMinimumSize(400,250)
vbox.addWidget(scroll)
w = QWidget()
scroll.setWidget(w)
w.setMinimumHeight(len(plugins)*35)
grid = QGridLayout()
grid.setColumnStretch(0,1)
w.setLayout(grid)
def do_toggle(cb, name, w):
p = plugins.get(name)
if p:
p.disable()
p.close()
plugins.pop(name)
else:
module = loader(name)
plugins[name] = p = module.Plugin(self.config, name)
p.enable()
p.wallet = self.wallet
p.load_wallet(self.wallet)
p.init_qt(self.gui_object)
r = p.is_enabled()
cb.setChecked(r)
if w: w.setEnabled(r)
def mk_toggle(cb, name, w):
return lambda: do_toggle(cb, name, w)
for i, descr in enumerate(descriptions):
name = descr['name']
p = plugins.get(name)
try:
cb = QCheckBox(descr['fullname'])
cb.setEnabled(is_available(name, self.wallet))
cb.setChecked(p is not None)
grid.addWidget(cb, i, 0)
if p and p.requires_settings():
w = p.settings_widget(self)
w.setEnabled(p.is_enabled())
grid.addWidget(w, i, 1)
else:
w = None
cb.clicked.connect(mk_toggle(cb, name, w))
grid.addWidget(HelpButton(descr['description']), i, 2)
except Exception:
print_msg("Error: cannot display plugin", name)
traceback.print_exc(file=sys.stdout)
grid.setRowStretch(i+1,1)
vbox.addLayout(Buttons(CloseButton(d)))
d.exec_()
def show_account_details(self, k):
account = self.wallet.accounts[k]
d = QDialog(self)
d.setWindowTitle(_('Account Details'))
d.setModal(1)
vbox = QVBoxLayout(d)
name = self.wallet.get_account_name(k)
label = QLabel('Name: ' + name)
vbox.addWidget(label)
vbox.addWidget(QLabel(_('Address type') + ': ' + account.get_type()))
vbox.addWidget(QLabel(_('Derivation') + ': ' + k))
vbox.addWidget(QLabel(_('Master Public Key:')))
text = QTextEdit()
text.setReadOnly(True)
text.setMaximumHeight(170)
vbox.addWidget(text)
mpk_text = '\n'.join( account.get_master_pubkeys() )
text.setText(mpk_text)
vbox.addLayout(Buttons(CloseButton(d)))
d.exec_()
@protected
def create_csr(self, alias, challenge, password):
from electrum_rby import x509
import tlslite
xprv = self.wallet.get_master_private_key(self.wallet.root_name, password)
_, _, _, c, k = bitcoin.deserialize_xkey(xprv)
csr = x509.create_csr(alias, challenge, k)
csr = tlslite.utils.pem.pem(bytearray(csr), "CERTIFICATE REQUEST")
with open('test.csr', 'w') as f:
f.write(csr)
#os.system('openssl asn1parse -i -in test.csr')
return 'test.csr'
|
gpl-3.0
| 5,672,153,572,525,321,000
| 39.039797
| 454
| 0.585696
| false
| 3.895322
| true
| false
| false
|
binary-signal/mass-apk-installer
|
mass_apk/helpers.py
|
1
|
1688
|
"""Mass apk helper functions module."""
import functools
import logging
import os
import platform
from enum import Enum, unique
from timeit import default_timer as timer
__all__ = ["Platform", "detect_platform", "human_time", "elapsed_time", "MB"]
log = logging.getLogger(__name__)
MB = 1024 * 1024
@unique
class Platform(Enum):
"""Platform enum used to detected running operating system."""
OSX = "osx"
LINUX = "linux"
WIN = "win"
def detect_platform() -> Platform:
"""Detect running operating system.
raises RuntimeError if operating system can't be detected.
"""
detected_system = platform.system()
if os.name == "posix" and detected_system == "Darwin":
return Platform.OSX
elif os.name == "posix" and detected_system == "Linux":
return Platform.LINUX
elif os.name == "nt" and detected_system == "Windows":
return Platform.WIN
raise RuntimeError("Unsupported OS")
def human_time(start: float, end: float) -> str:
"""Create a human readable string.
Create a human readable string for elapsed time between
start and end timestamps.
"""
hours, rem = divmod(end - start, 3600)
minutes, seconds = divmod(rem, 60)
return "Elapsed time {:0>2}:{:0>2}:{:05.2f}".format(
int(hours), int(minutes), seconds
)
def elapsed_time(func):
"""Decorate function `func` to measure its execution time."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
start = timer()
result = func(*args, **kwargs)
end = timer()
log.debug("%s elapsed time: %s", func.__name__, human_time(start, end))
return result
return wrapper
|
bsd-3-clause
| 1,378,833,015,522,749,400
| 24.19403
| 79
| 0.637441
| false
| 3.925581
| false
| false
| false
|
optimamodel/Optima
|
tests/testproject.py
|
1
|
5062
|
#!/usr/bin/env python
"""
Test script to see if Optima works.
To use: comment out lines in the definition of 'tests' to not run those tests.
NOTE: for best results, run in interactive mode, e.g.
python -i tests.py
Version: 2016feb03 by cliffk
"""
## Define tests to run here!!!
tests = [
'makeproject',
'parametercheck',
#'resultsaddition',
#'saveload',
'loadspreadsheet',
#'loadeconomics',
'runsim'
]
##############################################################################
## Initialization -- same for every test script
##############################################################################
from optima import tic, toc, blank, pd # analysis:ignore
if 'doplot' not in locals(): doplot = True
def done(t=0):
print('Done.')
toc(t)
blank()
blank()
print('Running tests:')
for i,test in enumerate(tests): print(('%i. '+test) % (i+1))
blank()
##############################################################################
## The tests
##############################################################################
T = tic()
## Project creation test
if 'makeproject' in tests:
t = tic()
print('Running make project test...')
from optima import Project
P = Project()
print(P)
done(t)
if 'parametercheck' in tests:
from optima import defaultproject, OptimaException
t = tic()
print('Running parameters check test...')
P = defaultproject()
datakeys = P.data.keys()
parkeys = P.pars().keys()
dataonly = set([
'condomcas', 'condomcom', 'condomreg',
'hivprev', 'meta', 'npops',
'numactscas', 'numactscom', 'numactsinj', 'numactsreg',
'optdeath', 'optnewtreat', 'optnumdiag', 'optnuminfect', 'optnumtest', 'optplhiv', 'optprev','optpropdx','optpropcare','optproptx','optpropsupp','optproppmtct',
'partcas', 'partcom', 'partinj', 'partreg',
'pops', 'pships', 'years'])
parsonly = set([
'actscas', 'actscom', 'actsinj', 'actsreg', 'age', 'transnorm',
'condcas', 'condcom', 'condreg', 'numcirc',
'female', 'force', 'inhomo', 'initprev','hivdeath',
'propdx','propcare','proptx','propsupp','proppmtct',
'injects', 'male', 'popkeys', 'fromto', 'transmatrix',
'fixproppmtct', 'fixpropsupp', 'fixpropdx', 'fixpropcare', 'fixproptx'])
dataminuspars = set(datakeys) - set(parkeys)
parsminusdata = set(parkeys) - set(datakeys)
if dataminuspars != dataonly:
mismatch1 = list(dataonly - dataminuspars)
mismatch2 = list(dataminuspars - dataonly)
errormsg = 'Unexpected "dataminuspars" parameter in "%s" or "%s"' % (mismatch1, mismatch2)
raise OptimaException(errormsg)
if parsminusdata != parsonly:
mismatch1 = list(parsonly - parsminusdata)
mismatch2 = list(parsminusdata - parsonly)
errormsg = 'Unexpected "parsminusdata" parameter in "%s" or "%s"' % (mismatch1, mismatch2)
raise OptimaException(errormsg)
done(t)
## Adding results
if 'resultsaddition' in tests:
t = tic()
print('Running results addition test...')
import optima as op
P = op.defaultproject()
Q = op.defaultproject()
R1 = P.results[0]
R2 = Q.results[0]
R3 = R1+R2
if doplot:
multires = op.Multiresultset([R1,R3])
op.pygui(multires, toplot=['prev-tot','numplhiv-tot'])
done(t)
## Project save/load test
if 'saveload' in tests:
t = tic()
print('Running save/load test...')
from optima import Project, saveobj, loadproj
from os import remove
filename = 'testproject.prj'
print(' Checking saving...')
P = Project()
saveobj(filename, P)
print(' Checking loading...')
Q = loadproj(filename)
print('Cleaning up...')
remove(filename)
done(t)
## Load spreadsheet test
if 'loadspreadsheet' in tests:
t = tic()
print('Running loadspreadsheet test...')
from optima import Project
print(' Create a project from a spreadsheet')
P = Project(spreadsheet='simple.xlsx')
print(' Load a project, then load a spreadsheet')
Q = Project()
Q.loadspreadsheet('simple.xlsx')
assert Q.data['effcondom'][0]==0.95, 'Condom efficacy not 95% or not being read in properly'
done(t)
## Load economics spreadsheet test
if 'loadeconomics' in tests:
t = tic()
print('Running loadeconomics test...')
from optima import Project
print(' Create an empty project and add economic data')
P = Project()
P.loadeconomics(filename='testeconomics.xlsx')
print(' Create a project from a spreadsheet and add economic data')
P = Project(spreadsheet='simple.xlsx')
P.loadeconomics(filename='testeconomics.xlsx')
## Run simulation test
if 'runsim' in tests:
t = tic()
print('Running runsim test...')
from optima import Project
P = Project()
P.loadspreadsheet('simple.xlsx',dorun=True)
done(t)
print('\n\n\nDONE: ran %i tests' % len(tests))
toc(T)
|
lgpl-3.0
| 5,239,596,296,055,542,000
| 21.801802
| 164
| 0.585539
| false
| 3.515278
| true
| false
| false
|
ylzmax/vncmanager
|
site1/settings.py
|
1
|
2938
|
"""
Django settings for site1 project.
Generated by 'django-admin startproject' using Django 1.8.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '!ctts*k9tll(!m7v)tp1#!z-_xl*agj(1%%y6!(z*lg!66k-7-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_crontab',
'vnc',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'site1.urls'
#BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,"templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'site1.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = '/tmp/static'
STATICFILES_DIRS=(
"%s/%s"%(BASE_DIR,"static"),
)
CRONJOBS = [
('*/15 * * * *','vnc.t.updateserver', '>> /tmp/c.log 2>&1'),
]
|
gpl-3.0
| 8,641,891,417,207,550,000
| 25
| 71
| 0.677672
| false
| 3.316027
| false
| false
| false
|
mishka28/NYU-Python
|
intro-programming/final_project/binomial_tree.py
|
1
|
1850
|
#!/usr/bin/env python3
from datetime import date
import math
import numpy as np
import sys
today = date(2015, 11, 20)
expiry = date(2017, 1, 20)
#Global inputs
s = 138.65
k = 133
t = (expiry - today).days / 365
r = 0.0106161830925
sig = 0.22545538993
div = .027000
def tree(steps, flavor):
Dt = t / steps
r_Dt = r * Dt
exp_r_Dt = math.exp(r_Dt)
exp_neg_r_Dt = math.exp(-r_Dt)
exp_div_Dt = math.exp(div * Dt)
exp_neg_div_Dt = math.exp(-div * Dt)
sig_sqrt_Dt = sig * math.sqrt(Dt)
exp_sig_sqrt_Dt = math.exp(sig_sqrt_Dt)
u = exp_sig_sqrt_Dt
d = 1/u
q = (exp_r_Dt * exp_neg_div_Dt - d) / (u -d)
p = 1 - q
sv = np.zeros((steps + 1,steps +1), dtype = np.float64)
sv[0,0] = s
z1 = 0
for i in range(1,steps + 1, 1):
z1 = z1 + 1
for n in range(z1 + 1):
sv[n,i] = sv[0,0] * (u ** (i - n)) * (d ** n)
iv = np.zeros((steps + 1,steps +1), dtype = np.float64)
z2 = 0
for i in range(1,steps + 1, 1):
find = False
for n in range(z2 + 2):
if flavor == "C":
iv[n,i] = max(sv[n,i] - k, 0)
elif flavor == "P":
iv[n,i] = max(-1 * (sv[n,i] - k), 0)
else:
print("fravor has to be 'C' for call and 'P' for put")
find = True
break
if find:
break
z2 = z2 + 1
# print(iv)
pv = np.zeros((steps + 1,steps +1), dtype = np.float64)
pv[:, steps] = iv[:, steps]
z3 = steps + 1
for i in range(steps -1, -1, -1):
z3 = z3 - 1
for n in range(z3):
pv[n,i] = (q * pv[n, i + 1] + p * pv[n + 1, i + 1]) * exp_neg_r_Dt
return(pv[0,0])
if __name__ == "__main__":
steps = int(sys.argv[1])
flavor = (sys.argv[2])
print(tree(steps,flavor))
|
mit
| -1,405,318,716,439,187,000
| 22.125
| 78
| 0.471892
| false
| 2.583799
| false
| false
| false
|
lukius/mts
|
set1/challenge8.py
|
1
|
1567
|
from common.challenge import MatasanoChallenge
from common.tools.misc import FileLines
class ECBEncryptionFinder(object):
BLOCK_SIZE = 16
def __init__(self, hex_strings):
self.hex_strings = hex_strings
def _less_than(self, number1, number2):
return number2 is None or number1 < number2
def _build_block_set(self, hex_string):
return set(hex_string[i:i+2*self.BLOCK_SIZE]
for i in range(0, len(hex_string), 2*self.BLOCK_SIZE))
def value(self):
min_blocks = None
for hex_string in self.hex_strings:
block_set = self._build_block_set(hex_string)
size = len(block_set)
if self._less_than(size, min_blocks):
candidate_string = hex_string
min_blocks = len(block_set)
return candidate_string
class Set1Challenge08(MatasanoChallenge):
FILE = 'set1/data/8.txt'
def expected_value(self):
return 'd880619740a8a19b7840a8a31c810a3d08649af70dc06f4fd5d2d69c744' +\
'cd283e2dd052f6b641dbf9d11b0348542bb5708649af70dc06f4fd5d2d6' +\
'9c744cd2839475c9dfdbc1d46597949d9c7e82bf5a08649af70dc06f4fd' +\
'5d2d69c744cd28397a93eab8d6aecd566489154789a6b0308649af70dc0' +\
'6f4fd5d2d69c744cd283d403180c98c8f6db1f2a3f9c4040deb0ab51b29' +\
'933f2c123c58386b06fba186a'
def value(self):
hex_strings = FileLines(self.FILE).value()
return ECBEncryptionFinder(hex_strings).value()
|
mit
| 7,581,138,453,766,070,000
| 34.636364
| 79
| 0.640715
| false
| 3.048638
| false
| false
| false
|
DaveA50/lbry
|
lbrynet/lbrylive/StdinUploader.py
|
1
|
5224
|
# pylint: skip-file
# This file is not maintained, but might be used in the future
#
import logging
import sys
from lbrynet.lbrylive.LiveStreamCreator import StdOutLiveStreamCreator
from lbrynet.core.BlobManager import TempBlobManager
from lbrynet.core.Session import Session
from lbrynet.core.server.BlobAvailabilityHandler import BlobAvailabilityHandlerFactory
from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory
from lbrynet.core.server.ServerProtocol import ServerProtocolFactory
from lbrynet.lbrylive.PaymentRateManager import BaseLiveStreamPaymentRateManager
from lbrynet.lbrylive.LiveStreamMetadataManager import DBLiveStreamMetadataManager
from lbrynet.lbrylive.server.LiveBlobInfoQueryHandler import CryptBlobInfoQueryHandlerFactory
from lbrynet.dht.node import Node
from twisted.internet import defer, task
class StdinUploader():
"""This class reads from standard in, creates a stream, and makes it available on the network."""
def __init__(self, peer_port, dht_node_port, known_dht_nodes,
stream_info_manager_class=DBLiveStreamMetadataManager, blob_manager_class=TempBlobManager):
"""
@param peer_port: the network port on which to listen for peers
@param dht_node_port: the network port on which to listen for nodes in the DHT
@param known_dht_nodes: a list of (ip_address, dht_port) which will be used to join the DHT network
"""
self.peer_port = peer_port
self.lbry_server_port = None
self.session = Session(blob_manager_class=blob_manager_class,
stream_info_manager_class=stream_info_manager_class,
dht_node_class=Node, dht_node_port=dht_node_port,
known_dht_nodes=known_dht_nodes, peer_port=self.peer_port,
use_upnp=False)
self.payment_rate_manager = BaseLiveStreamPaymentRateManager()
def start(self):
"""Initialize the session and start listening on the peer port"""
d = self.session.setup()
d.addCallback(lambda _: self._start())
return d
def _start(self):
self._start_server()
return True
def _start_server(self):
query_handler_factories = [
CryptBlobInfoQueryHandlerFactory(self.stream_info_manager, self.session.wallet,
self.payment_rate_manager),
BlobAvailabilityHandlerFactory(self.session.blob_manager),
BlobRequestHandlerFactory(self.session.blob_manager, self.session.wallet,
self.payment_rate_manager),
self.session.wallet.get_wallet_info_query_handler_factory()
]
self.server_factory = ServerProtocolFactory(self.session.rate_limiter,
query_handler_factories,
self.session.peer_manager)
from twisted.internet import reactor
self.lbry_server_port = reactor.listenTCP(self.peer_port, self.server_factory)
def start_live_stream(self, stream_name):
"""Create the stream and start reading from stdin
@param stream_name: a string, the suggested name of this stream
"""
stream_creator_helper = StdOutLiveStreamCreator(stream_name, self.session.blob_manager,
self.stream_info_manager)
d = stream_creator_helper.create_and_publish_stream_descriptor()
def print_sd_hash(sd_hash):
print "Stream descriptor hash:", sd_hash
d.addCallback(print_sd_hash)
d.addCallback(lambda _: stream_creator_helper.start_streaming())
return d
def shut_down(self):
"""End the session and stop listening on the server port"""
d = self.session.shut_down()
d.addCallback(lambda _: self._shut_down())
return d
def _shut_down(self):
if self.lbry_server_port is not None:
d = defer.maybeDeferred(self.lbry_server_port.stopListening)
else:
d = defer.succeed(True)
return d
def launch_stdin_uploader():
from twisted.internet import reactor
logging.basicConfig(level=logging.WARNING, filename="ul.log")
if len(sys.argv) == 4:
uploader = StdinUploader(int(sys.argv[2]), int(sys.argv[3]), [])
elif len(sys.argv) == 6:
uploader = StdinUploader(int(sys.argv[2]), int(sys.argv[3]), [(sys.argv[4], int(sys.argv[5]))])
else:
print "Usage: lbrynet-stdin-uploader <stream_name> <peer_port> <dht_node_port>" \
" [<dht_bootstrap_host> <dht_bootstrap port>]"
sys.exit(1)
def start_stdin_uploader():
return uploader.start_live_stream(sys.argv[1])
def shut_down():
logging.debug("Telling the reactor to stop in 60 seconds")
reactor.callLater(60, reactor.stop)
d = task.deferLater(reactor, 0, uploader.start)
d.addCallback(lambda _: start_stdin_uploader())
d.addCallback(lambda _: shut_down())
reactor.addSystemEventTrigger('before', 'shutdown', uploader.shut_down)
reactor.run()
|
mit
| -3,413,635,974,544,798,000
| 42.173554
| 108
| 0.647779
| false
| 4.037094
| false
| false
| false
|
dukekuang/thinkpython2e-solution
|
chapter 7/7.3.py
|
1
|
1155
|
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 18 13:28:29 2017
@author: dukekuang
"""
#Exercise 7.3. The mathematician Srinivasa Ramanujan
#found an infinite series that can be used to
#generate a numerical approximation of 1/p:
#Write a function called estimate_pi that uses this formula to compute
#and return an estimate of p.
#It should use a while loop to compute terms of the summation until
#the last term is smaller than 1e-15 (which is Python notation for 1015).
#You can check the result by comparing it to math.pi.
#Author Solution: http: // thinkpython2. com/ code/ pi. py .
import math
def estimate_pi():
factor=2*math.sqrt(2)/9801
k=0
total=0
while True:
num=math.factorial(4*k)*(1103+26390*k)
den=((math.factorial(k)**4)*(396**(4*k)))
term=num/den
total += term
if term < 1e-15 :
break
print('term',k,' ',term)
k += 1
print('last term ',term)
print('total ',total)
pi = 1/(factor*total)
print('1/pi ',factor*total)
print('estimate pi ',pi,'\t','math.pi ',math.pi)
estimate_pi()
|
gpl-3.0
| 634,642,464,086,977,000
| 26.85
| 75
| 0.611979
| false
| 3.217877
| false
| false
| false
|
CptDemocracy/Python
|
MITx-6.00.1x-EDX-Introduction-to-Computer-Science/Week-4/PSET-4/game.py
|
1
|
2366
|
"""
PSET-4
Word Game Part 9: You and your Computer
"""
PROMPT_STR = "Enter n to deal a new hand, r to replay the last hand, or e to end game: "
NO_REPL_AVAIL_STR = "You have not played a hand yet. Please play a new hand first!"
INVALID_CMD = "Invalid command."
CHOOSE_PLAYER_STR = "Enter u to have yourself play, c to have the computer play: "
def playGame(wordList):
"""
Allow the user to play an arbitrary number of hands.
1) Asks the user to input 'n' or 'r' or 'e'.
* If the user inputs 'e', immediately exit the game.
* If the user inputs anything that's not 'n', 'r', or 'e', keep asking them again.
2) Asks the user to input a 'u' or a 'c'.
* If the user inputs anything that's not 'c' or 'u', keep asking them again.
3) Switch functionality based on the above choices:
* If the user inputted 'n', play a new (random) hand.
* Else, if the user inputted 'r', play the last hand again.
But if no hand was played, output "You have not played a hand yet.
Please play a new hand first!"
* If the user inputted 'u', let the user play the game
with the selected hand, using playHand.
* If the user inputted 'c', let the computer play the
game with the selected hand, using compPlayHand.
4) After the computer or user has played the hand, repeat from step 1
wordList: list (string)
"""
firstGame = True
lastHand = {}
while True:
userInput = raw_input(PROMPT_STR)
if userInput == 'n':
hand = dealHand(HAND_SIZE)
lastHand = hand.copy()
playHand_AI_and_human(hand, wordList, HAND_SIZE)
elif userInput == 'r':
if len(lastHand) == 0:
print(NO_REPL_AVAIL_STR)
else:
playHand_AI_and_human(lastHand, wordList, HAND_SIZE)
elif userInput == 'e':
break
else:
print(INVALID_CMD)
print
def playHand_AI_and_human(hand, wordList, n):
userInput = ""
while userInput != 'u' and userInput != 'c':
userInput = raw_input(CHOOSE_PLAYER_STR)
if userInput == 'u':
playHand(hand, wordList, n)
elif userInput == 'c':
compPlayHand(hand, wordList, n)
else:
print(INVALID_CMD)
|
mit
| -341,466,025,357,899,300
| 34.848485
| 90
| 0.587067
| false
| 3.662539
| false
| false
| false
|
lgfausak/sqlbridge
|
sqlbridge/twisted/db/ausqlite3.py
|
1
|
6540
|
###############################################################################
##
## Copyright (C) 2014 Greg Fausak
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
###############################################################################
## ausqlite3.py - sqlite3 driver
##
## this is driver interface code. It is used with the DB class. It shouldn't
## be called or instantiated independent of that class.
###############################################################################
from __future__ import absolute_import
import sys,os
import sqlite3
from twisted.enterprise import adbapi
from twisted.python import log
from twisted.internet.defer import inlineCallbacks, returnValue
from .dbbase import dbbase
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def set_dict_factory(conn):
conn.row_factory = dict_factory
class SQLITE3_3_8_2(dbbase):
"""
basic sqlite3 3.8.2 driver
"""
def __init__(self, topic_base, app_session, debug):
if debug is not None and debug:
log.startLogging(sys.stdout)
log.msg("SQLITE3_3_8_2:__init__()")
self.engine_version = "SQLITE3_3_8_2"
self.engine = "SQLITE"
self.conn = None
self.dsn = None
self.topic_base = topic_base
self.app_session = app_session
self.debug = debug
return
#
# connect
# simply connect to a database
# dsn is the only argument, it is a string, in psycopg2 connect
# format. basically it looks like
# dbname=DBNAME host=MACHINE user=DBUSER
# DBNAME is the database name
# MACHINE is the ip address or dns name of the machine
# DBUSER is the user to connect as
#
def connect(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:connect({},{})".format(args,kwargs))
self.dsn = args[0]
# there must be an easier way.
# this converts db=x host=y shatever=z to a dictionary.
try:
md = dict(s.split('=') for s in self.dsn.split())
md['cp_openfun'] = set_dict_factory
#self.conn = adbapi.ConnectionPool("sqlite3",**dict(s.split('=') for s in self.dsn.split()))
self.conn = adbapi.ConnectionPool("sqlite3",**dict(md))
log.msg("SQLITE3_3_8_2:connect() established")
except Exception as err:
log.msg("SQLITE3_3_8_2:connect({}),error({})".format(self.dsn,err))
raise err
return
#
# disconnect
# this disconnects from the currently connected database. if no database
# is currently connected then this does nothing.
def disconnect(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:disconnect({},{})".format(args,kwargs))
if self.conn:
c = self.conn
self.conn = None
c.close()
return
#
# query:
# s - query to run (with dictionary substitution embedded, like %(key)s
# a - dictionary pointing to arguments.
# example:
# s = 'select * from login where id = %(id)s'
# a = { 'id': 100 }
# returns:
# dictionary result of query
# note:
# there MUST be a result, otherwise use the operation call!
# well, the query can return 0 rows, that is ok. but, if the query
# by its nature doesn't return any rows then don't use this call!
# for example, a query that says 'insert into table x (c) values(r)'
# by its nature it doesn't return a row, so, this isn't the right
# method to use, use operation instead
#
@inlineCallbacks
def query(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:query({},{})".format(args,kwargs))
s = args[0]
a = args[1]
if self.conn:
try:
log.msg("SQLITE3_3_8_2:query().running({} with args {})".format(s,a))
rv = yield self.conn.runQuery(s,a)
log.msg("SQLITE3_3_8_2:query().results({})".format(rv))
returnValue(rv)
except Exception as err:
log.msg("SQLITE3_3_8_2:query({}),error({})".format(s,err))
raise err
# error here, probably should raise exception
return
#
# operation:
# identical to query, except, there is no result returned.
# note:
# it is important that your query does NOT return anything! If it does,
# use the query call!
#
# see also:
# query method has a good description of this and query.
#
@inlineCallbacks
def operation(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:operation({},{})".format(args,kwargs))
s = args[0]
a = args[1]
if self.conn:
try:
log.msg("SQLITE3_3_8_2:query().running({} with args {})".format(s,a))
rv = yield self.conn.runOperation(s,a)
log.msg("SQLITE3_3_8_2:query().results({})".format(rv))
returnValue(rv)
except Exception as err:
log.msg("SQLITE3_3_8_2:query({}),error({})".format(s,err))
raise err
# error here, probably should raise exception
return
#
# watch:
# this is specific to postgres NOTIFY/LISTEN. other drivers will need to stub this out
#
def watch(self,*args,**kwargs):
raise Exception("sqlite3 is trying to add watch, can only do this in postgres")
return
#
# info:
# return information about this connection
#
@inlineCallbacks
def info(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:info({},{})".format(args,kwargs))
rv = yield [{
"engine":self.engine,
"engine_version":self.engine_version,
"dsn":self.dsn,
"topic_base":self.topic_base,
"debug":self.debug
}]
returnValue(rv)
return
|
apache-2.0
| 6,459,614,035,259,180,000
| 32.88601
| 104
| 0.562997
| false
| 3.867534
| false
| false
| false
|
jdfreder/leaftletwidget
|
leafletwidget/notebook.py
|
1
|
1202
|
from __future__ import print_function
import os
from IPython.display import display, HTML, Javascript
leaflet_css = '//cdn.leafletjs.com/leaflet-0.7.2/leaflet.css'
# leaftlet_js = "//cdn.leafletjs.com/leaflet-0.7.2/leaflet"
# leaflet_draw_js = ['//cdnjs.cloudflare.com/ajax/libs/leaflet.draw/0.2.3/leaflet.draw-src.js',
# '//cdnjs.cloudflare.com/ajax/libs/leaflet.draw/0.2.3/leaflet.draw.js']
# leaflet_draw_png = ['//cdnjs.cloudflare.com/ajax/libs/leaflet.draw/0.2.3/images/spritesheet-2x.png',
# '//cdnjs.cloudflare.com/ajax/libs/leaflet.draw/0.2.3/images/spritesheet.png']
leaflet_draw_css = '//cdnjs.cloudflare.com/ajax/libs/leaflet.draw/0.2.3/leaflet.draw.css'
def get_static_path():
return os.path.join(os.path.split(__file__)[0], 'static')
css_template = '<link rel="stylesheet" href="{}" />'
def display_css(url):
display(HTML(css_template.format(url)))
def initialize_notebook(leaflet_css=leaflet_css, leaflet_js=leaflet_css):
display_css(leaflet_css)
display_css(leaflet_draw_css)
for filename in ['leaflet.js']:
with open(os.path.join(get_static_path(), filename)) as f:
display(Javascript(f.read()))
|
mit
| 2,223,657,766,573,104,400
| 36.5625
| 102
| 0.680532
| false
| 2.713318
| false
| false
| false
|
fluxcapacitor/pipeline
|
libs/pipeline_runtime/setup.py
|
1
|
1300
|
# -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import re
from setuptools import setup
#import sys
#if not sys.version_info[0] == 3:
# print("\n \
# sys.exit("\n \
# ****************************************************************\n \
# * The CLI has only been tested with Python 3+ at this time. *\n \
# * Report any issues with Python 2 by emailing help@pipeline.io *\n \
# ****************************************************************\n")
version = re.search(
'^__version__\s*=\s*"(.*)"',
open('pipeline_runtime/__init__.py').read(),
re.M
).group(1)
# Get the long description from the relevant file
with open('README.rst', encoding='utf-8') as f:
long_description = f.read()
with open('requirements.txt', encoding='utf-8') as f:
requirements = [line.rstrip() for line in f.readlines()]
setup(
name = "pipeline-runtime",
packages = ["pipeline_runtime"],
version = version,
description = "PipelineAI Runtime",
long_description = "%s\n\nRequirements:\n%s" % (long_description, requirements),
author = "Chris Fregly",
author_email = "github@pipeline.ai",
url = "https://github.com/PipelineAI/",
install_requires=requirements,
dependency_links=[
]
)
|
apache-2.0
| 2,087,528,225,292,709,600
| 29.952381
| 84
| 0.540769
| false
| 3.714286
| false
| false
| false
|
Zhenxingzhang/kaggle-cdiscount-classification
|
src/misc/inception_bson.py
|
1
|
1345
|
from src.freezing import inception
from src.freezing.inception import NodeLookup
import tensorflow as tf
import bson
from tqdm import tqdm
import numpy as np
if __name__ == '__main__':
input_bson_filename = "/data/data/train_example.bson"
node_lookup = NodeLookup()
inception_graph = tf.Graph()
inception_sess = tf.Session(graph=inception_graph)
with inception_graph.as_default(), inception_sess.as_default() as sess:
inception_model = inception.inception_inference()
z = 0
n = 82
data = bson.decode_file_iter(open(input_bson_filename, 'rb'))
opts = tf.python_io.TFRecordOptions(tf.python_io.TFRecordCompressionType.ZLIB)
with open('inception_feature.txt', 'w') as f:
for c, d in tqdm(enumerate(data), total=n):
n_img = len(d['imgs'])
for index in range(n_img):
img_raw = d['imgs'][index]['picture']
# height = img.shape[0]
# width = img.shape[1]
product_id = d['_id']
prediction = inception_model(sess, img_raw)
predictions = np.squeeze(prediction)
top_5 = predictions.argsort()[-5:][::-1]
f.write(str(d["_id"]) + " " + node_lookup.id_to_string(top_5[0]) + "\n")
|
apache-2.0
| 7,757,940,009,184,435,000
| 34.394737
| 92
| 0.565056
| false
| 3.558201
| false
| false
| false
|
moniker-dns/contractor
|
contractor/openstack/common/gettextutils.py
|
1
|
13133
|
# Copyright 2012 Red Hat, Inc.
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
gettext for openstack-common modules.
Usual usage in an openstack.common module:
from contractor.openstack.common.gettextutils import _
"""
import copy
import gettext
import logging
import os
import re
try:
import UserString as _userString
except ImportError:
import collections as _userString
from babel import localedata
import six
_localedir = os.environ.get('contractor'.upper() + '_LOCALEDIR')
_t = gettext.translation('contractor', localedir=_localedir, fallback=True)
_AVAILABLE_LANGUAGES = {}
USE_LAZY = False
def enable_lazy():
"""Convenience function for configuring _() to use lazy gettext
Call this at the start of execution to enable the gettextutils._
function to use lazy gettext functionality. This is useful if
your project is importing _ directly instead of using the
gettextutils.install() way of importing the _ function.
"""
global USE_LAZY
USE_LAZY = True
def _(msg):
if USE_LAZY:
return Message(msg, 'contractor')
else:
if six.PY3:
return _t.gettext(msg)
return _t.ugettext(msg)
def install(domain, lazy=False):
"""Install a _() function using the given translation domain.
Given a translation domain, install a _() function using gettext's
install() function.
The main difference from gettext.install() is that we allow
overriding the default localedir (e.g. /usr/share/locale) using
a translation-domain-specific environment variable (e.g.
NOVA_LOCALEDIR).
:param domain: the translation domain
:param lazy: indicates whether or not to install the lazy _() function.
The lazy _() introduces a way to do deferred translation
of messages by installing a _ that builds Message objects,
instead of strings, which can then be lazily translated into
any available locale.
"""
if lazy:
# NOTE(mrodden): Lazy gettext functionality.
#
# The following introduces a deferred way to do translations on
# messages in OpenStack. We override the standard _() function
# and % (format string) operation to build Message objects that can
# later be translated when we have more information.
#
# Also included below is an example LocaleHandler that translates
# Messages to an associated locale, effectively allowing many logs,
# each with their own locale.
def _lazy_gettext(msg):
"""Create and return a Message object.
Lazy gettext function for a given domain, it is a factory method
for a project/module to get a lazy gettext function for its own
translation domain (i.e. nova, glance, cinder, etc.)
Message encapsulates a string so that we can translate
it later when needed.
"""
return Message(msg, domain)
from six import moves
moves.builtins.__dict__['_'] = _lazy_gettext
else:
localedir = '%s_LOCALEDIR' % domain.upper()
if six.PY3:
gettext.install(domain,
localedir=os.environ.get(localedir))
else:
gettext.install(domain,
localedir=os.environ.get(localedir),
unicode=True)
class Message(_userString.UserString, object):
"""Class used to encapsulate translatable messages."""
def __init__(self, msg, domain):
# _msg is the gettext msgid and should never change
self._msg = msg
self._left_extra_msg = ''
self._right_extra_msg = ''
self._locale = None
self.params = None
self.domain = domain
@property
def data(self):
# NOTE(mrodden): this should always resolve to a unicode string
# that best represents the state of the message currently
localedir = os.environ.get(self.domain.upper() + '_LOCALEDIR')
if self.locale:
lang = gettext.translation(self.domain,
localedir=localedir,
languages=[self.locale],
fallback=True)
else:
# use system locale for translations
lang = gettext.translation(self.domain,
localedir=localedir,
fallback=True)
if six.PY3:
ugettext = lang.gettext
else:
ugettext = lang.ugettext
full_msg = (self._left_extra_msg +
ugettext(self._msg) +
self._right_extra_msg)
if self.params is not None:
full_msg = full_msg % self.params
return six.text_type(full_msg)
@property
def locale(self):
return self._locale
@locale.setter
def locale(self, value):
self._locale = value
if not self.params:
return
# This Message object may have been constructed with one or more
# Message objects as substitution parameters, given as a single
# Message, or a tuple or Map containing some, so when setting the
# locale for this Message we need to set it for those Messages too.
if isinstance(self.params, Message):
self.params.locale = value
return
if isinstance(self.params, tuple):
for param in self.params:
if isinstance(param, Message):
param.locale = value
return
if isinstance(self.params, dict):
for param in self.params.values():
if isinstance(param, Message):
param.locale = value
def _save_dictionary_parameter(self, dict_param):
full_msg = self.data
# look for %(blah) fields in string;
# ignore %% and deal with the
# case where % is first character on the line
keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', full_msg)
# if we don't find any %(blah) blocks but have a %s
if not keys and re.findall('(?:[^%]|^)%[a-z]', full_msg):
# apparently the full dictionary is the parameter
params = copy.deepcopy(dict_param)
else:
params = {}
for key in keys:
try:
params[key] = copy.deepcopy(dict_param[key])
except TypeError:
# cast uncopyable thing to unicode string
params[key] = six.text_type(dict_param[key])
return params
def _save_parameters(self, other):
# we check for None later to see if
# we actually have parameters to inject,
# so encapsulate if our parameter is actually None
if other is None:
self.params = (other, )
elif isinstance(other, dict):
self.params = self._save_dictionary_parameter(other)
else:
# fallback to casting to unicode,
# this will handle the problematic python code-like
# objects that cannot be deep-copied
try:
self.params = copy.deepcopy(other)
except TypeError:
self.params = six.text_type(other)
return self
# overrides to be more string-like
def __unicode__(self):
return self.data
def __str__(self):
if six.PY3:
return self.__unicode__()
return self.data.encode('utf-8')
def __getstate__(self):
to_copy = ['_msg', '_right_extra_msg', '_left_extra_msg',
'domain', 'params', '_locale']
new_dict = self.__dict__.fromkeys(to_copy)
for attr in to_copy:
new_dict[attr] = copy.deepcopy(self.__dict__[attr])
return new_dict
def __setstate__(self, state):
for (k, v) in state.items():
setattr(self, k, v)
# operator overloads
def __add__(self, other):
copied = copy.deepcopy(self)
copied._right_extra_msg += other.__str__()
return copied
def __radd__(self, other):
copied = copy.deepcopy(self)
copied._left_extra_msg += other.__str__()
return copied
def __mod__(self, other):
# do a format string to catch and raise
# any possible KeyErrors from missing parameters
self.data % other
copied = copy.deepcopy(self)
return copied._save_parameters(other)
def __mul__(self, other):
return self.data * other
def __rmul__(self, other):
return other * self.data
def __getitem__(self, key):
return self.data[key]
def __getslice__(self, start, end):
return self.data.__getslice__(start, end)
def __getattribute__(self, name):
# NOTE(mrodden): handle lossy operations that we can't deal with yet
# These override the UserString implementation, since UserString
# uses our __class__ attribute to try and build a new message
# after running the inner data string through the operation.
# At that point, we have lost the gettext message id and can just
# safely resolve to a string instead.
ops = ['capitalize', 'center', 'decode', 'encode',
'expandtabs', 'ljust', 'lstrip', 'replace', 'rjust', 'rstrip',
'strip', 'swapcase', 'title', 'translate', 'upper', 'zfill']
if name in ops:
return getattr(self.data, name)
else:
return _userString.UserString.__getattribute__(self, name)
def get_available_languages(domain):
"""Lists the available languages for the given translation domain.
:param domain: the domain to get languages for
"""
if domain in _AVAILABLE_LANGUAGES:
return copy.copy(_AVAILABLE_LANGUAGES[domain])
localedir = '%s_LOCALEDIR' % domain.upper()
find = lambda x: gettext.find(domain,
localedir=os.environ.get(localedir),
languages=[x])
# NOTE(mrodden): en_US should always be available (and first in case
# order matters) since our in-line message strings are en_US
language_list = ['en_US']
# NOTE(luisg): Babel <1.0 used a function called list(), which was
# renamed to locale_identifiers() in >=1.0, the requirements master list
# requires >=0.9.6, uncapped, so defensively work with both. We can remove
# this check when the master list updates to >=1.0, and update all projects
list_identifiers = (getattr(localedata, 'list', None) or
getattr(localedata, 'locale_identifiers'))
locale_identifiers = list_identifiers()
for i in locale_identifiers:
if find(i) is not None:
language_list.append(i)
_AVAILABLE_LANGUAGES[domain] = language_list
return copy.copy(language_list)
def get_localized_message(message, user_locale):
"""Gets a localized version of the given message in the given locale.
If the message is not a Message object the message is returned as-is.
If the locale is None the message is translated to the default locale.
:returns: the translated message in unicode, or the original message if
it could not be translated
"""
translated = message
if isinstance(message, Message):
original_locale = message.locale
message.locale = user_locale
translated = six.text_type(message)
message.locale = original_locale
return translated
class LocaleHandler(logging.Handler):
"""Handler that can have a locale associated to translate Messages.
A quick example of how to utilize the Message class above.
LocaleHandler takes a locale and a target logging.Handler object
to forward LogRecord objects to after translating the internal Message.
"""
def __init__(self, locale, target):
"""Initialize a LocaleHandler
:param locale: locale to use for translating messages
:param target: logging.Handler object to forward
LogRecord objects to after translation
"""
logging.Handler.__init__(self)
self.locale = locale
self.target = target
def emit(self, record):
if isinstance(record.msg, Message):
# set the locale and resolve to a string
record.msg.locale = self.locale
self.target.emit(record)
|
apache-2.0
| 4,492,199,157,041,093,000
| 34.398922
| 79
| 0.60565
| false
| 4.477668
| false
| false
| false
|
hwroitzsch/DayLikeTodayClone
|
examples/main.py
|
1
|
1357
|
import json
from urllib.parse import urlencode
from urllib.request import urlopen, Request
# HTTP URL is constructed accordingly with JSON query results format in mind.
def sparqlQuery(query, baseURL, format="application/json"):
params = {
"default-graph": "",
"should-sponge": "soft",
"query": query,
"debug": "on",
"timeout": "",
"format": format,
"save": "display",
"fname": ""
}
querypart = urlencode(params)
binary_query = querypart.encode('utf8')
request = Request(baseURL, binary_query)
response = urlopen(request).read()
return json.loads(response)
# Setting Data Source Name (DSN)
data_source_name = "http://dbpedia.org/sparql"
# Virtuoso pragmas for instructing SPARQL engine to perform an HTTP GET
# using the IRI in FROM clause as Data Source URL
query = """DEFINE get:soft "replace" SELECT DISTINCT * FROM <%s> WHERE {?s ?p ?o}""" %data_source_name
query_people_born_before_1900 = """
SELECT ?name ?birth ?death ?person
WHERE {
?person dbo:birthPlace :Berlin .
?person dbo:birthDate ?birth .
?person foaf:name ?name .
?person dbo:deathDate ?death .
FILTER (?birth < "1900-01-01"^^xsd:date) .
}
ORDER BY ?name
"""
data = sparqlQuery(query, "http://localhost:8890/sparql/")
print("Retrieved data:\n" + json.dumps(data, sort_keys=True, indent=4))
|
mit
| 3,912,693,555,853,568,500
| 28.5
| 102
| 0.666912
| false
| 3.301703
| false
| false
| false
|
brendannee/Bikesy-Backend
|
misc/tripplanner/main.py
|
2
|
1089
|
from graphserver.graphdb import GraphDatabase
from graphserver.ext.osm.osmdb import OSMDB
from graphserver.core import State, WalkOptions, Graph, Street, Combination, EdgePayload, ContractionHierarchy
import sys
def make_native_ch(basename):
gdb = GraphDatabase( basename+".gdb" )
gg = gdb.incarnate()
wo = WalkOptions()
wo.hill_reluctance=1
ch = gg.get_contraction_hierarchies( wo )
chdowndb = GraphDatabase( basename+".down.gdb", overwrite=True )
chdowndb.populate( ch.downgraph, reporter=sys.stdout )
chupdb = GraphDatabase( basename+".up.gdb", overwrite=True )
chupdb.populate( ch.upgraph, reporter=sys.stdout )
def reincarnate_chdbs(basename):
chdowndb = GraphDatabase( basename+".down.gdb" )
chupdb = GraphDatabase( basename+".up.gdb" )
upgg = chupdb.incarnate()
downgg = chdowndb.incarnate()
return ContractionHierarchy(upgg, downgg)
if __name__ == '__main__':
try:
make_native_ch( sys.argv[1] )
except IndexError:
print "usage: python ch.py gdb_basename"
|
bsd-3-clause
| 4,947,394,258,959,224,000
| 30.114286
| 110
| 0.676768
| false
| 3.413793
| false
| false
| false
|
jimblandy/plife
|
setup.py
|
1
|
1112
|
from distutils.core import setup, Extension
from glob import glob
from os.path import join
version = "0.4.1"
bindir = "bin"
docdir = join ("share", "doc", "plife-" + version)
setup (
name = "plife-python",
version = version,
description = "Pattern construction tool for Conway's Game of Life",
long_description = """\
Python package intended to help in designing complex patterns for
Conway's Game of Life and related cellular automata.
Sample pattern-describing scripts and resulting patterns are included.
You may also want to install the 'plife' package to view them.""",
author = "Eugene Langvagen",
author_email = "elang@yandex.ru",
url = "http://plife.sourceforge.net/",
license = "GPL",
packages = ["life"],
package_dir = {"life": "python/life"},
ext_modules = [Extension ("life.lifeint", [
"src/life.cc", "src/life_io.cc",
"src/life_rect.cc", "src/lifeint.cc"
], libraries = ["stdc++"])],
data_files = [
(join (docdir, "samples"), glob (join ("samples", "*.py")) + glob (join ("samples", "*.lif"))),
(join (docdir, "samples", "lib"), glob (join ("samples", "lib", "*.py")))
]
)
|
gpl-2.0
| 8,316,089,656,023,166,000
| 32.69697
| 97
| 0.666367
| false
| 3.029973
| false
| false
| false
|
NikNitro/Python-iBeacon-Scan
|
sympy/core/decorators.py
|
1
|
4111
|
"""
SymPy core decorators.
The purpose of this module is to expose decorators without any other
dependencies, so that they can be easily imported anywhere in sympy/core.
"""
from __future__ import print_function, division
from functools import wraps
from .sympify import SympifyError, sympify
from sympy.core.compatibility import get_function_code
def deprecated(**decorator_kwargs):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used."""
def deprecated_decorator(func):
@wraps(func)
def new_func(*args, **kwargs):
from sympy.utilities.exceptions import SymPyDeprecationWarning
decorator_kwargs.setdefault('feature', func.__name__)
SymPyDeprecationWarning(**decorator_kwargs).warn(stacklevel=3)
return func(*args, **kwargs)
new_func._sympy_deprecated_func = func
return new_func
return deprecated_decorator
def _sympifyit(arg, retval=None):
"""decorator to smartly _sympify function arguments
@_sympifyit('other', NotImplemented)
def add(self, other):
...
In add, other can be thought of as already being a SymPy object.
If it is not, the code is likely to catch an exception, then other will
be explicitly _sympified, and the whole code restarted.
if _sympify(arg) fails, NotImplemented will be returned
see: __sympifyit
"""
def deco(func):
return __sympifyit(func, arg, retval)
return deco
def __sympifyit(func, arg, retval=None):
"""decorator to _sympify `arg` argument for function `func`
don't use directly -- use _sympifyit instead
"""
# we support f(a,b) only
if not get_function_code(func).co_argcount:
raise LookupError("func not found")
# only b is _sympified
assert get_function_code(func).co_varnames[1] == arg
if retval is None:
@wraps(func)
def __sympifyit_wrapper(a, b):
return func(a, sympify(b, strict=True))
else:
@wraps(func)
def __sympifyit_wrapper(a, b):
try:
# If an external class has _op_priority, it knows how to deal
# with sympy objects. Otherwise, it must be converted.
if not hasattr(b, '_op_priority'):
b = sympify(b, strict=True)
return func(a, b)
except SympifyError:
return retval
return __sympifyit_wrapper
def call_highest_priority(method_name):
"""A decorator for binary special methods to handle _op_priority.
Binary special methods in Expr and its subclasses use a special attribute
'_op_priority' to determine whose special method will be called to
handle the operation. In general, the object having the highest value of
'_op_priority' will handle the operation. Expr and subclasses that define
custom binary special methods (__mul__, etc.) should decorate those
methods with this decorator to add the priority logic.
The ``method_name`` argument is the name of the method of the other class
that will be called. Use this decorator in the following manner::
# Call other.__rmul__ if other._op_priority > self._op_priority
@call_highest_priority('__rmul__')
def __mul__(self, other):
...
# Call other.__mul__ if other._op_priority > self._op_priority
@call_highest_priority('__mul__')
def __rmul__(self, other):
...
"""
def priority_decorator(func):
@wraps(func)
def binary_op_wrapper(self, other):
if hasattr(other, '_op_priority'):
if other._op_priority > self._op_priority:
try:
f = getattr(other, method_name)
except AttributeError:
pass
else:
return f(self)
return func(self, other)
return binary_op_wrapper
return priority_decorator
|
gpl-3.0
| -6,863,624,794,790,311,000
| 32.975207
| 78
| 0.620044
| false
| 4.392094
| false
| false
| false
|
impactlab/jps-handoff
|
model/postgres_dump_recent.py
|
1
|
1839
|
#!/usr/bin/env python
import psycopg2
import csv, sys, os, datetime
datadir = '/data/extract/'
with open('meter_list.csv', 'r') as f:
fcsv = csv.reader(f)
meters = fcsv.next()
stop_date = datetime.datetime.now()
start_date = stop_date - datetime.timedelta(days=365)
start_date_evt = stop_date - datetime.timedelta(days=30)
conn = psycopg2.connect(service='jps')
cur = conn.cursor()
for meter in meters:
out_filename = meter + '__' + \
datetime.datetime.strftime(start_date,'%Y-%m-%dT%H%M%S') + '__' + \
datetime.datetime.strftime(stop_date,'%Y-%m-%dT%H%M%S') + '.csv'
out_filename_evt = 'evt__' + meter + '__' + \
datetime.datetime.strftime(start_date_evt,'%Y-%m-%dT%H%M%S') + '__' + \
datetime.datetime.strftime(stop_date,'%Y-%m-%dT%H%M%S') + '.csv'
cur.execute('SELECT m.meter_id, p.ts, p.kw, p.kva FROM viewer_meter m JOIN viewer_profiledatapoint p ON '+
'm.id=p.meter_id WHERE m.meter_id=%s AND p.ts > %s AND p.ts <= %s',(meter,start_date,stop_date))
with open(datadir+out_filename,'w') as f:
fcsv = csv.writer(f)
for line in cur:
if len(line) != 4: continue
ts = datetime.datetime.strftime(line[1], '%Y/%m/%d %H:%M')
fcsv.writerow([line[0], ts, line[2], line[3]])
cur.execute('SELECT m.meter_id, p.ts, p.event FROM viewer_meter m JOIN viewer_eventdatapoint p ON '+
'm.id=p.meter_id WHERE m.meter_id=%s AND p.ts > %s AND p.ts <= %s',(meter,start_date,stop_date))
with open(datadir+out_filename_evt,'w') as f:
fcsv = csv.writer(f)
fcsv.writerow(['Device Id','Time','Event'])
for line in cur:
if len(line) != 3: continue
ts = datetime.datetime.strftime(line[1], '%Y-%m-%d %H:%M:%S')
fcsv.writerow([line[0], ts, line[2]])
|
mit
| 6,000,819,424,975,845,000
| 40.795455
| 112
| 0.586188
| false
| 2.923688
| false
| false
| false
|
iceflow/aws-demo
|
es/cf_logs_to_es/cf_logs_to_es.py
|
1
|
4234
|
#!/usr/bin/python
# -*- coding: utf8 -*-
import sys
import json
from log import *
from es_put import *
from ip2geo import *
### 常量定义
DUMP_PROCESS_NUM = 2000 # 当日志条数累计到DUMP_PROCESS_NUM数量,触发入库
INDICES_PREFIX = "cf-logs-"
DEFAULT_TYPE = "log"
CF_LOGS_WEB_FORMAT_SIZE = 24
CF_LOGS_RTMP_FORMAT_SIZE = 13
################ 全局变量 - 开始 ########################################
#### 日志
log = Log('CF_LOGS_TO_ES', '/var/log/cf_logs_to_es.log')
#### 应用数据
es_server = None # ES 服务器
g_value_body = "" # 数据待入库列表
################ 全局变量 - 结束 ########################################
# //完整网络包格式:总长度(4)+协议ID(2)+protobuf流数据长度(4)+protobuf流数据内容
def process_line(s):
if CF_LOGS_WEB_FORMAT_SIZE != len(s):
log.info('日志字段数量不匹配%d: %d(%s)'%(CF_LOGS_WEB_FORMAT_SIZE, len(s), ' '.join(s)))
return
# 数据分段
data = {}
data["@timestamp"] = "%s:%s"%(s[0], s[1]);
data["x-edge-location"] = s[2];
data["sc-bytes"] = int(s[3]);
data["c-ip"] = s[4];
data["location"] = get_geo_location(s[4]);
data["cs-method"] = s[5];
data["cs-host"] = s[6];
data["cs-uri-stem"] = s[7];
data["sc-status"] = s[8];
data["cs-feferer"] = s[9];
data["cs-user-agent"] = s[10];
data["cs-uri-query"] = s[11];
data["cs-cookie"] = s[12];
data["x-edge-result-type"] = s[13];
data["x-edge-request-id"] = s[14];
data["x-host-header"] = s[15];
data["cs-protocol"] = s[16];
data["cs-bytes"] = s[17];
data["time-taken"] = s[18];
data["x-forwarded-for"] = s[19];
data["ssl-protocol"] = s[20];
data["ssl-cipher"] = s[21];
data["x-edge-response-result-type"] = s[22];
data["cs-protocol-version"] = s[23];
#print data
#put_data_to_es(es_server, '%s%s'%(INDICES_PREFIX, s[0]), DEFAULT_TYPE, data)
#put_data_to_es(es_server, 'cf-logs-2017-02-25', 'log', data)
global g_value_body
g_value_body += '{"index":{"_index":"%s%s","_type":"%s"}}\n%s\n'%(INDICES_PREFIX, s[0], DEFAULT_TYPE, json.dumps(data))
def put_data_to_es(filename):
''' def put_data_to_es(filename):
批量将数据入库
'''
global g_value_body
#print "put_data_to_es: ", filename
if len(g_value_body) > 0:
try:
bulk_data_to_es(es_server, g_value_body)
#log.debug('数据入库成功:')
print "+",
except Exception,data:
log.debug('数据文件:%s 数据入库失败: Data[%s]'%(filename, g_value_body))
print(data)
# 清空buffer
g_value_body = ""
def parse_file(es_server, filename):
log.debug('开始分析文件:%s'%(filename))
if not os.path.exists(filename):
log.debug('文件%s不存在'%(filename))
return
total_num = 0 # 处理数量总数
process_num = 0 # 未入库数量
with open(filename) as f:
for line in f.readlines():
line = line.strip()
if not len(line) or line.startswith('#'):
continue
sections = line.split('\t')
if len(sections) > 1:
#print ("sections[%d]"%len(sections))
data = process_line(sections)
if ( process_num > DUMP_PROCESS_NUM ):
put_data_to_es(filename)
process_num = 0
total_num += 1
process_num += 1
## 分析完毕后,入库剩余数据
if process_num > 0:
put_data_to_es(filename)
log.debug('完成分析文件:%s 数量:%d'%(filename, total_num))
def usage(prog):
print( "%s usage:"%(prog))
print(" %s es_server log_file [log_file] [log_file] ... : 分析日志文件列表"%(prog))
if __name__ == '__main__':
# 参数检查
argc = len(sys.argv)
if argc < 2:
usage(sys.argv[0])
sys.exit(1)
es_server = sys.argv[1]
log.info('开始批量分析日志文件到%s: %s'%(es_server, ' '.join(sys.argv[1:])))
# 数据分析
for pos in xrange(argc-2):
parse_file(es_server, sys.argv[pos+2])
sys.exit(0)
|
gpl-3.0
| 3,759,973,895,854,942,700
| 25.575342
| 123
| 0.515722
| false
| 2.574652
| false
| false
| false
|
rhdedgar/openshift-tools
|
openshift/installer/vendored/openshift-ansible-3.6.173/roles/openshift_health_checker/openshift_checks/docker_image_availability.py
|
1
|
9598
|
"""Check that required Docker images are available."""
from openshift_checks import OpenShiftCheck
from openshift_checks.mixins import DockerHostMixin
NODE_IMAGE_SUFFIXES = ["haproxy-router", "docker-registry", "deployer", "pod"]
DEPLOYMENT_IMAGE_INFO = {
"origin": {
"namespace": "openshift",
"name": "origin",
"registry_console_image": "cockpit/kubernetes",
},
"openshift-enterprise": {
"namespace": "openshift3",
"name": "ose",
"registry_console_image": "registry.access.redhat.com/openshift3/registry-console",
},
}
class DockerImageAvailability(DockerHostMixin, OpenShiftCheck):
"""Check that required Docker images are available.
Determine docker images that an install would require and check that they
are either present in the host's docker index, or available for the host to pull
with known registries as defined in our inventory file (or defaults).
"""
name = "docker_image_availability"
tags = ["preflight"]
# we use python-docker-py to check local docker for images, and skopeo
# to look for images available remotely without waiting to pull them.
dependencies = ["python-docker-py", "skopeo"]
skopeo_img_check_command = "timeout 10 skopeo inspect --tls-verify=false docker://{registry}/{image}"
def __init__(self, *args, **kwargs):
super(DockerImageAvailability, self).__init__(*args, **kwargs)
# record whether we could reach a registry or not (and remember results)
self.reachable_registries = {}
def is_active(self):
"""Skip hosts with unsupported deployment types."""
deployment_type = self.get_var("openshift_deployment_type")
has_valid_deployment_type = deployment_type in DEPLOYMENT_IMAGE_INFO
return super(DockerImageAvailability, self).is_active() and has_valid_deployment_type
def run(self):
msg, failed = self.ensure_dependencies()
if failed:
return {
"failed": True,
"msg": "Some dependencies are required in order to check Docker image availability.\n" + msg
}
required_images = self.required_images()
missing_images = set(required_images) - set(self.local_images(required_images))
# exit early if all images were found locally
if not missing_images:
return {}
registries = self.known_docker_registries()
if not registries:
return {"failed": True, "msg": "Unable to retrieve any docker registries."}
available_images = self.available_images(missing_images, registries)
unavailable_images = set(missing_images) - set(available_images)
if unavailable_images:
registries = [
reg if self.reachable_registries.get(reg, True) else reg + " (unreachable)"
for reg in registries
]
msg = (
"One or more required Docker images are not available:\n {}\n"
"Configured registries: {}\n"
"Checked by: {}"
).format(
",\n ".join(sorted(unavailable_images)),
", ".join(registries),
self.skopeo_img_check_command
)
return dict(failed=True, msg=msg)
return {}
def required_images(self):
"""
Determine which images we expect to need for this host.
Returns: a set of required images like 'openshift/origin:v3.6'
The thorny issue of determining the image names from the variables is under consideration
via https://github.com/openshift/openshift-ansible/issues/4415
For now we operate as follows:
* For containerized components (master, node, ...) we look at the deployment type and
use openshift/origin or openshift3/ose as the base for those component images. The
version is openshift_image_tag as determined by the openshift_version role.
* For OpenShift-managed infrastructure (router, registry...) we use oreg_url if
it is defined; otherwise we again use the base that depends on the deployment type.
Registry is not included in constructed images. It may be in oreg_url or etcd image.
"""
required = set()
deployment_type = self.get_var("openshift_deployment_type")
host_groups = self.get_var("group_names")
# containerized etcd may not have openshift_image_tag, see bz 1466622
image_tag = self.get_var("openshift_image_tag", default="latest")
image_info = DEPLOYMENT_IMAGE_INFO[deployment_type]
if not image_info:
return required
# template for images that run on top of OpenShift
image_url = "{}/{}-{}:{}".format(image_info["namespace"], image_info["name"], "${component}", "${version}")
image_url = self.get_var("oreg_url", default="") or image_url
if 'nodes' in host_groups:
for suffix in NODE_IMAGE_SUFFIXES:
required.add(image_url.replace("${component}", suffix).replace("${version}", image_tag))
# The registry-console is for some reason not prefixed with ose- like the other components.
# Nor is it versioned the same, so just look for latest.
# Also a completely different name is used for Origin.
required.add(image_info["registry_console_image"])
# images for containerized components
if self.get_var("openshift", "common", "is_containerized"):
components = set()
if 'nodes' in host_groups:
components.update(["node", "openvswitch"])
if 'masters' in host_groups: # name is "origin" or "ose"
components.add(image_info["name"])
for component in components:
required.add("{}/{}:{}".format(image_info["namespace"], component, image_tag))
if 'etcd' in host_groups: # special case, note it is the same for origin/enterprise
required.add("registry.access.redhat.com/rhel7/etcd") # and no image tag
return required
def local_images(self, images):
"""Filter a list of images and return those available locally."""
registries = self.known_docker_registries()
found_images = []
for image in images:
# docker could have the image name as-is or prefixed with any registry
imglist = [image] + [reg + "/" + image for reg in registries]
if self.is_image_local(imglist):
found_images.append(image)
return found_images
def is_image_local(self, image):
"""Check if image is already in local docker index."""
result = self.execute_module("docker_image_facts", {"name": image})
return bool(result.get("images")) and not result.get("failed")
def known_docker_registries(self):
"""Build a list of docker registries available according to inventory vars."""
regs = list(self.get_var("openshift.docker.additional_registries", default=[]))
deployment_type = self.get_var("openshift_deployment_type")
if deployment_type == "origin" and "docker.io" not in regs:
regs.append("docker.io")
elif "enterprise" in deployment_type and "registry.access.redhat.com" not in regs:
regs.append("registry.access.redhat.com")
return regs
def available_images(self, images, default_registries):
"""Search remotely for images. Returns: list of images found."""
return [
image for image in images
if self.is_available_skopeo_image(image, default_registries)
]
def is_available_skopeo_image(self, image, default_registries):
"""Use Skopeo to determine if required image exists in known registry(s)."""
registries = default_registries
# If image already includes a registry, only use that.
# NOTE: This logic would incorrectly identify images that do not use a namespace, e.g.
# registry.access.redhat.com/rhel7 as if the registry were a namespace.
# It's not clear that there's any way to distinguish them, but fortunately
# the current set of images all look like [registry/]namespace/name[:version].
if image.count("/") > 1:
registry, image = image.split("/", 1)
registries = [registry]
for registry in registries:
if registry not in self.reachable_registries:
self.reachable_registries[registry] = self.connect_to_registry(registry)
if not self.reachable_registries[registry]:
continue
args = {"_raw_params": self.skopeo_img_check_command.format(registry=registry, image=image)}
result = self.execute_module_with_retries("command", args)
if result.get("rc", 0) == 0 and not result.get("failed"):
return True
if result.get("rc") == 124: # RC 124 == timed out; mark unreachable
self.reachable_registries[registry] = False
return False
def connect_to_registry(self, registry):
"""Use ansible wait_for module to test connectivity from host to registry. Returns bool."""
# test a simple TCP connection
host, _, port = registry.partition(":")
port = port or 443
args = dict(host=host, port=port, state="started", timeout=30)
result = self.execute_module("wait_for", args)
return result.get("rc", 0) == 0 and not result.get("failed")
|
apache-2.0
| 5,285,902,716,826,715,000
| 44.704762
| 115
| 0.627735
| false
| 4.279091
| false
| false
| false
|
tim-clifford/py-cipher
|
src/core.py
|
1
|
3094
|
'''
A few common functions for cipher cracking
Functionality:
- Return a list of letter frequency from a given string
- Sort a string with a given linear function into a list of inputs based on letter frequency
- Shift a given string based on a linear function and inputs
Sample Usage:
>>> from cipher import core
>>> letterFrequency = core.frequencyList(<encrypted bytearray>)
>>> core.sortLinear(lambda x, a, b: a*x + b,<encrypted bytearray>,range(1,5),range(26))
[(<a1>,<b1>),(<a2>,<b2>)...(<a104>,<b104>)]
>>> core.shiftLinear(lambda x, a, b: a*x + b,<encrypted bytearray>,<a1>,<b1>)
<decrypted string>
'''
def frequencyList(input1,utf8=False):
'''
Returns a list of the frequency of characters in a string as fractions of the total
>>> frequencyList("abcde",utf8=True)
[0.2, 0.2, 0.2, 0.2, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
>>> frequencyList(bytearray("abcde","ascii"))
[0.2, 0.2, 0.2, 0.2, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
'''
cipherLetterFrequency = []
for letter in range(97,123):
tempFrequency = 0
for i in input1.lower():
if utf8:
if ord(i) == letter: tempFrequency += 1
elif i == letter:
tempFrequency += 1
cipherLetterFrequency.append((tempFrequency))
return cipherLetterFrequency
def sortLinear(function, list1, a, b, cipherLetterFrequency):
'''
Returns a list of possible values for a given function
sorted by similarity of the letter frequency to english
>>> core.sortLinear(lambda x, a, b: a*x + b,<encrypted string>,range(1,5),range(26))
[(<a1>,<b1>),(<a2>,<b2>)...(<a104>,<b104>)]
'''
letterFrequency = [0.0817, 0.0149, 0.0278, 0.0425, 0.127, 0.0223, 0.0202, 0.0609, 0.0697, 0.0015, 0.0077, 0.0402, 0.0241, 0.0675, 0.0751, 0.0193, 0.0009, 0.0599, 0.0633, 0.0906, 0.0276, 0.0098, 0.0236, 0.0015, 0.0197, 0.0007]
shiftPossibility = []
paramList = []
for param1 in a:
for param2 in b:
tempPossibility = 0
for letter in list1:
if 65 <= letter <= 90:
newLetter = (function(letter-65,param1,param2))%26
tempPossibility += letterFrequency[newLetter]
shiftPossibility.append(tempPossibility)
paramList.append((param1,param2))
return [(a,b) for _,(a,b) in sorted(zip(shiftPossibility, paramList))][::-1]
def shiftLinear(function, list1, a, b, utf8=False):
'''
Shifts a given string by the function and two input values `a` and `b`
>>> core.shiftLinear(lambda x, a, b: a*(x - b),"NGGNPX NG QNJA",1,13,utf8=True)
'attack at dawn'
>>> core.shiftLinear(lambda x, a, b: a*(x - b),bytearray("NGGNPX NG QNJA","ascii"),1,13)
bytearray(b'attack at dawn')
'''
if utf8:
newInput=""
for i in list1.lower():
if ord(i) < 97 or ord(i) > 122:
newInput += i
else:
newInput += chr((function(ord(i)-97,a,b) % 26 + 97))
return newInput
else:
newInput = bytearray("","ascii")
for i in list1.lower():
if i < 97 or i > 122:
newInput += bytes([i])
else:
newInput += bytes([(function(i-97,a,b)) % 26 + 97])
return newInput
|
mit
| 1,851,057,241,019,089,200
| 35.4
| 227
| 0.641241
| false
| 2.523654
| false
| false
| false
|
mrtumnus/scrape-tedtalks
|
download_tedtalk.py
|
1
|
1586
|
# File: download_tedtalk.py
# Author: E. Partridge
# Date: 8 August 2012
# Description:
# This script parses the TED Talk audio feed and proceeds to
# download all audio files into the same directory that
# this script is located in. Files are prepended with the publication
# date for convenience.
#
# Note: This has only been tested on Windows 7 64-bit, with Python 2.7.2.5
# Note2: TED Talk audio files contain ID3v2.4 tags, which are not supported
# natively by Windows. I used foobar2000 to convert the tags to ID3v2.3,
# which Windows does support. To do this, open the MP3 files in
# foobar2000, right click and select Tagging > MP3 Tag Types... Check
# "Override ID3v2 revision:" and select the ID3v2.3 radio button.
# After that, I was able to view metadata in Windows Explorer and
# Windows Media Player.
import urllib
import feedparser
import time
tedtalk_rss_url = 'http://feeds.feedburner.com/TEDTalks_audio'
tedtalk_feed = feedparser.parse(tedtalk_rss_url)
def GetFeedContent(entry):
content_url = entry.enclosures[0].href
file_name = content_url.split('/')[-1]
file_date = time.strptime(entry.published[5:16], '%d %b %Y')
date_str = '{:04}-{:02}-{:02}'.format(file_date.tm_year, file_date.tm_mon, file_date.tm_mday)
file_name = date_str + ' ' + file_name
try:
with open(file_name) as f:
print('File exists: ' + file_name)
except IOError as e:
print('Downloading: ' + file_name)
urllib.urlretrieve(content_url, file_name)
return
for entry in tedtalk_feed.entries:
GetFeedContent(entry)
|
gpl-3.0
| -5,137,724,765,449,738,000
| 37.7
| 94
| 0.703026
| false
| 2.894161
| false
| false
| false
|
carlgao/lenga
|
images/lenny64-peon/usr/share/python-support/mercurial-common/mercurial/context.py
|
1
|
21456
|
# context.py - changeset and file context objects for mercurial
#
# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
from node import nullid, nullrev, short
from i18n import _
import ancestor, bdiff, revlog, util, os, errno
class changectx(object):
"""A changecontext object makes access to data related to a particular
changeset convenient."""
def __init__(self, repo, changeid=None):
"""changeid is a revision number, node, or tag"""
self._repo = repo
if not changeid and changeid != 0:
p1, p2 = self._repo.dirstate.parents()
self._rev = self._repo.changelog.rev(p1)
if self._rev == -1:
changeid = 'tip'
else:
self._node = p1
return
self._node = self._repo.lookup(changeid)
self._rev = self._repo.changelog.rev(self._node)
def __str__(self):
return short(self.node())
def __repr__(self):
return "<changectx %s>" % str(self)
def __eq__(self, other):
try:
return self._rev == other._rev
except AttributeError:
return False
def __ne__(self, other):
return not (self == other)
def __nonzero__(self):
return self._rev != nullrev
def __getattr__(self, name):
if name == '_changeset':
self._changeset = self._repo.changelog.read(self.node())
return self._changeset
elif name == '_manifest':
self._manifest = self._repo.manifest.read(self._changeset[0])
return self._manifest
elif name == '_manifestdelta':
md = self._repo.manifest.readdelta(self._changeset[0])
self._manifestdelta = md
return self._manifestdelta
else:
raise AttributeError, name
def __contains__(self, key):
return key in self._manifest
def __getitem__(self, key):
return self.filectx(key)
def __iter__(self):
a = self._manifest.keys()
a.sort()
for f in a:
yield f
def changeset(self): return self._changeset
def manifest(self): return self._manifest
def rev(self): return self._rev
def node(self): return self._node
def user(self): return self._changeset[1]
def date(self): return self._changeset[2]
def files(self): return self._changeset[3]
def description(self): return self._changeset[4]
def branch(self): return self._changeset[5].get("branch")
def extra(self): return self._changeset[5]
def tags(self): return self._repo.nodetags(self._node)
def parents(self):
"""return contexts for each parent changeset"""
p = self._repo.changelog.parents(self._node)
return [changectx(self._repo, x) for x in p]
def children(self):
"""return contexts for each child changeset"""
c = self._repo.changelog.children(self._node)
return [changectx(self._repo, x) for x in c]
def _fileinfo(self, path):
if '_manifest' in self.__dict__:
try:
return self._manifest[path], self._manifest.flags(path)
except KeyError:
raise revlog.LookupError(self._node, path,
_('not found in manifest'))
if '_manifestdelta' in self.__dict__ or path in self.files():
if path in self._manifestdelta:
return self._manifestdelta[path], self._manifestdelta.flags(path)
node, flag = self._repo.manifest.find(self._changeset[0], path)
if not node:
raise revlog.LookupError(self._node, path,
_('not found in manifest'))
return node, flag
def filenode(self, path):
return self._fileinfo(path)[0]
def fileflags(self, path):
try:
return self._fileinfo(path)[1]
except revlog.LookupError:
return ''
def filectx(self, path, fileid=None, filelog=None):
"""get a file context from this changeset"""
if fileid is None:
fileid = self.filenode(path)
return filectx(self._repo, path, fileid=fileid,
changectx=self, filelog=filelog)
def filectxs(self):
"""generate a file context for each file in this changeset's
manifest"""
mf = self.manifest()
m = mf.keys()
m.sort()
for f in m:
yield self.filectx(f, fileid=mf[f])
def ancestor(self, c2):
"""
return the ancestor context of self and c2
"""
n = self._repo.changelog.ancestor(self._node, c2._node)
return changectx(self._repo, n)
class filectx(object):
"""A filecontext object makes access to data related to a particular
filerevision convenient."""
def __init__(self, repo, path, changeid=None, fileid=None,
filelog=None, changectx=None):
"""changeid can be a changeset revision, node, or tag.
fileid can be a file revision or node."""
self._repo = repo
self._path = path
assert (changeid is not None
or fileid is not None
or changectx is not None)
if filelog:
self._filelog = filelog
if changeid is not None:
self._changeid = changeid
if changectx is not None:
self._changectx = changectx
if fileid is not None:
self._fileid = fileid
def __getattr__(self, name):
if name == '_changectx':
self._changectx = changectx(self._repo, self._changeid)
return self._changectx
elif name == '_filelog':
self._filelog = self._repo.file(self._path)
return self._filelog
elif name == '_changeid':
if '_changectx' in self.__dict__:
self._changeid = self._changectx.rev()
else:
self._changeid = self._filelog.linkrev(self._filenode)
return self._changeid
elif name == '_filenode':
if '_fileid' in self.__dict__:
self._filenode = self._filelog.lookup(self._fileid)
else:
self._filenode = self._changectx.filenode(self._path)
return self._filenode
elif name == '_filerev':
self._filerev = self._filelog.rev(self._filenode)
return self._filerev
elif name == '_repopath':
self._repopath = self._path
return self._repopath
else:
raise AttributeError, name
def __nonzero__(self):
try:
n = self._filenode
return True
except revlog.LookupError:
# file is missing
return False
def __str__(self):
return "%s@%s" % (self.path(), short(self.node()))
def __repr__(self):
return "<filectx %s>" % str(self)
def __eq__(self, other):
try:
return (self._path == other._path
and self._fileid == other._fileid)
except AttributeError:
return False
def __ne__(self, other):
return not (self == other)
def filectx(self, fileid):
'''opens an arbitrary revision of the file without
opening a new filelog'''
return filectx(self._repo, self._path, fileid=fileid,
filelog=self._filelog)
def filerev(self): return self._filerev
def filenode(self): return self._filenode
def fileflags(self): return self._changectx.fileflags(self._path)
def isexec(self): return 'x' in self.fileflags()
def islink(self): return 'l' in self.fileflags()
def filelog(self): return self._filelog
def rev(self):
if '_changectx' in self.__dict__:
return self._changectx.rev()
if '_changeid' in self.__dict__:
return self._changectx.rev()
return self._filelog.linkrev(self._filenode)
def linkrev(self): return self._filelog.linkrev(self._filenode)
def node(self): return self._changectx.node()
def user(self): return self._changectx.user()
def date(self): return self._changectx.date()
def files(self): return self._changectx.files()
def description(self): return self._changectx.description()
def branch(self): return self._changectx.branch()
def manifest(self): return self._changectx.manifest()
def changectx(self): return self._changectx
def data(self): return self._filelog.read(self._filenode)
def path(self): return self._path
def size(self): return self._filelog.size(self._filerev)
def cmp(self, text): return self._filelog.cmp(self._filenode, text)
def renamed(self):
"""check if file was actually renamed in this changeset revision
If rename logged in file revision, we report copy for changeset only
if file revisions linkrev points back to the changeset in question
or both changeset parents contain different file revisions.
"""
renamed = self._filelog.renamed(self._filenode)
if not renamed:
return renamed
if self.rev() == self.linkrev():
return renamed
name = self.path()
fnode = self._filenode
for p in self._changectx.parents():
try:
if fnode == p.filenode(name):
return None
except revlog.LookupError:
pass
return renamed
def parents(self):
p = self._path
fl = self._filelog
pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
r = self._filelog.renamed(self._filenode)
if r:
pl[0] = (r[0], r[1], None)
return [filectx(self._repo, p, fileid=n, filelog=l)
for p,n,l in pl if n != nullid]
def children(self):
# hard for renames
c = self._filelog.children(self._filenode)
return [filectx(self._repo, self._path, fileid=x,
filelog=self._filelog) for x in c]
def annotate(self, follow=False, linenumber=None):
'''returns a list of tuples of (ctx, line) for each line
in the file, where ctx is the filectx of the node where
that line was last changed.
This returns tuples of ((ctx, linenumber), line) for each line,
if "linenumber" parameter is NOT "None".
In such tuples, linenumber means one at the first appearance
in the managed file.
To reduce annotation cost,
this returns fixed value(False is used) as linenumber,
if "linenumber" parameter is "False".'''
def decorate_compat(text, rev):
return ([rev] * len(text.splitlines()), text)
def without_linenumber(text, rev):
return ([(rev, False)] * len(text.splitlines()), text)
def with_linenumber(text, rev):
size = len(text.splitlines())
return ([(rev, i) for i in xrange(1, size + 1)], text)
decorate = (((linenumber is None) and decorate_compat) or
(linenumber and with_linenumber) or
without_linenumber)
def pair(parent, child):
for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
child[0][b1:b2] = parent[0][a1:a2]
return child
getlog = util.cachefunc(lambda x: self._repo.file(x))
def getctx(path, fileid):
log = path == self._path and self._filelog or getlog(path)
return filectx(self._repo, path, fileid=fileid, filelog=log)
getctx = util.cachefunc(getctx)
def parents(f):
# we want to reuse filectx objects as much as possible
p = f._path
if f._filerev is None: # working dir
pl = [(n.path(), n.filerev()) for n in f.parents()]
else:
pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
if follow:
r = f.renamed()
if r:
pl[0] = (r[0], getlog(r[0]).rev(r[1]))
return [getctx(p, n) for p, n in pl if n != nullrev]
# use linkrev to find the first changeset where self appeared
if self.rev() != self.linkrev():
base = self.filectx(self.filerev())
else:
base = self
# find all ancestors
needed = {base: 1}
visit = [base]
files = [base._path]
while visit:
f = visit.pop(0)
for p in parents(f):
if p not in needed:
needed[p] = 1
visit.append(p)
if p._path not in files:
files.append(p._path)
else:
# count how many times we'll use this
needed[p] += 1
# sort by revision (per file) which is a topological order
visit = []
for f in files:
fn = [(n.rev(), n) for n in needed.keys() if n._path == f]
visit.extend(fn)
visit.sort()
hist = {}
for r, f in visit:
curr = decorate(f.data(), f)
for p in parents(f):
if p != nullid:
curr = pair(hist[p], curr)
# trim the history of unneeded revs
needed[p] -= 1
if not needed[p]:
del hist[p]
hist[f] = curr
return zip(hist[f][0], hist[f][1].splitlines(1))
def ancestor(self, fc2):
"""
find the common ancestor file context, if any, of self, and fc2
"""
acache = {}
# prime the ancestor cache for the working directory
for c in (self, fc2):
if c._filerev == None:
pl = [(n.path(), n.filenode()) for n in c.parents()]
acache[(c._path, None)] = pl
flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
def parents(vertex):
if vertex in acache:
return acache[vertex]
f, n = vertex
if f not in flcache:
flcache[f] = self._repo.file(f)
fl = flcache[f]
pl = [(f, p) for p in fl.parents(n) if p != nullid]
re = fl.renamed(n)
if re:
pl.append(re)
acache[vertex] = pl
return pl
a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
v = ancestor.ancestor(a, b, parents)
if v:
f, n = v
return filectx(self._repo, f, fileid=n, filelog=flcache[f])
return None
class workingctx(changectx):
"""A workingctx object makes access to data related to
the current working directory convenient."""
def __init__(self, repo):
self._repo = repo
self._rev = None
self._node = None
def __str__(self):
return str(self._parents[0]) + "+"
def __nonzero__(self):
return True
def __getattr__(self, name):
if name == '_parents':
self._parents = self._repo.parents()
return self._parents
if name == '_status':
self._status = self._repo.status()
return self._status
if name == '_manifest':
self._buildmanifest()
return self._manifest
else:
raise AttributeError, name
def _buildmanifest(self):
"""generate a manifest corresponding to the working directory"""
man = self._parents[0].manifest().copy()
copied = self._repo.dirstate.copies()
is_exec = util.execfunc(self._repo.root,
lambda p: man.execf(copied.get(p,p)))
is_link = util.linkfunc(self._repo.root,
lambda p: man.linkf(copied.get(p,p)))
modified, added, removed, deleted, unknown = self._status[:5]
for i, l in (("a", added), ("m", modified), ("u", unknown)):
for f in l:
man[f] = man.get(copied.get(f, f), nullid) + i
try:
man.set(f, is_exec(f), is_link(f))
except OSError:
pass
for f in deleted + removed:
if f in man:
del man[f]
self._manifest = man
def manifest(self): return self._manifest
def user(self): return self._repo.ui.username()
def date(self): return util.makedate()
def description(self): return ""
def files(self):
f = self.modified() + self.added() + self.removed()
f.sort()
return f
def modified(self): return self._status[0]
def added(self): return self._status[1]
def removed(self): return self._status[2]
def deleted(self): return self._status[3]
def unknown(self): return self._status[4]
def clean(self): return self._status[5]
def branch(self): return self._repo.dirstate.branch()
def tags(self):
t = []
[t.extend(p.tags()) for p in self.parents()]
return t
def parents(self):
"""return contexts for each parent changeset"""
return self._parents
def children(self):
return []
def fileflags(self, path):
if '_manifest' in self.__dict__:
try:
return self._manifest.flags(path)
except KeyError:
return ''
pnode = self._parents[0].changeset()[0]
orig = self._repo.dirstate.copies().get(path, path)
node, flag = self._repo.manifest.find(pnode, orig)
is_link = util.linkfunc(self._repo.root,
lambda p: flag and 'l' in flag)
is_exec = util.execfunc(self._repo.root,
lambda p: flag and 'x' in flag)
try:
return (is_link(path) and 'l' or '') + (is_exec(path) and 'e' or '')
except OSError:
pass
if not node or path in self.deleted() or path in self.removed():
return ''
return flag
def filectx(self, path, filelog=None):
"""get a file context from the working directory"""
return workingfilectx(self._repo, path, workingctx=self,
filelog=filelog)
def ancestor(self, c2):
"""return the ancestor context of self and c2"""
return self._parents[0].ancestor(c2) # punt on two parents for now
class workingfilectx(filectx):
"""A workingfilectx object makes access to data related to a particular
file in the working directory convenient."""
def __init__(self, repo, path, filelog=None, workingctx=None):
"""changeid can be a changeset revision, node, or tag.
fileid can be a file revision or node."""
self._repo = repo
self._path = path
self._changeid = None
self._filerev = self._filenode = None
if filelog:
self._filelog = filelog
if workingctx:
self._changectx = workingctx
def __getattr__(self, name):
if name == '_changectx':
self._changectx = workingctx(self._repo)
return self._changectx
elif name == '_repopath':
self._repopath = (self._repo.dirstate.copied(self._path)
or self._path)
return self._repopath
elif name == '_filelog':
self._filelog = self._repo.file(self._repopath)
return self._filelog
else:
raise AttributeError, name
def __nonzero__(self):
return True
def __str__(self):
return "%s@%s" % (self.path(), self._changectx)
def filectx(self, fileid):
'''opens an arbitrary revision of the file without
opening a new filelog'''
return filectx(self._repo, self._repopath, fileid=fileid,
filelog=self._filelog)
def rev(self):
if '_changectx' in self.__dict__:
return self._changectx.rev()
return self._filelog.linkrev(self._filenode)
def data(self): return self._repo.wread(self._path)
def renamed(self):
rp = self._repopath
if rp == self._path:
return None
return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
def parents(self):
'''return parent filectxs, following copies if necessary'''
p = self._path
rp = self._repopath
pcl = self._changectx._parents
fl = self._filelog
pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
if len(pcl) > 1:
if rp != p:
fl = None
pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
return [filectx(self._repo, p, fileid=n, filelog=l)
for p,n,l in pl if n != nullid]
def children(self):
return []
def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
def date(self):
t, tz = self._changectx.date()
try:
return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
except OSError, err:
if err.errno != errno.ENOENT: raise
return (t, tz)
def cmp(self, text): return self._repo.wread(self._path) == text
|
mit
| -2,856,619,026,517,189,000
| 33.220096
| 81
| 0.546001
| false
| 4.023251
| false
| false
| false
|
dimtruck/magnum
|
magnum/api/controllers/v1/service.py
|
1
|
13306
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_utils import timeutils
import pecan
from pecan import rest
import wsme
from wsme import types as wtypes
from magnum.api.controllers import link
from magnum.api.controllers.v1 import base as v1_base
from magnum.api.controllers.v1 import collection
from magnum.api.controllers.v1 import types
from magnum.api.controllers.v1 import utils as api_utils
from magnum.api import expose
from magnum.api import validation
from magnum.common import exception
from magnum.common import k8s_manifest
from magnum.common import policy
from magnum import objects
# NOTE(dims): We don't depend on oslo*i18n yet
_ = _LI = _LW = _LE = _LC = lambda x: x
class ServicePatchType(v1_base.K8sPatchType):
@staticmethod
def internal_attrs():
defaults = v1_base.K8sPatchType.internal_attrs()
return defaults + ['/selector', '/ports', '/ip']
class Service(v1_base.K8sResourceBase):
uuid = types.uuid
"""Unique UUID for this service"""
selector = wsme.wsattr({wtypes.text: wtypes.text}, readonly=True)
"""Selector of this service"""
ip = wtypes.text
"""IP of this service"""
ports = wsme.wsattr([{wtypes.text: wtypes.IntegerType()}], readonly=True)
"""Port of this service"""
links = wsme.wsattr([link.Link], readonly=True)
"""A list containing a self link and associated service links"""
def __init__(self, **kwargs):
super(Service, self).__init__()
self.fields = []
for field in objects.Service.fields:
# Skip fields we do not expose.
if not hasattr(self, field):
continue
self.fields.append(field)
setattr(self, field, kwargs.get(field, wtypes.Unset))
@staticmethod
def _convert_with_links(service, url, expand=True):
if not expand:
service.unset_fields_except(['uuid', 'name', 'bay_uuid', 'labels',
'selector', 'ip', 'ports'])
service.links = [link.Link.make_link('self', url,
'services', service.uuid),
link.Link.make_link('bookmark', url,
'services', service.uuid,
bookmark=True)
]
return service
@classmethod
def convert_with_links(cls, rpc_service, expand=True):
service = Service(**rpc_service.as_dict())
return cls._convert_with_links(service, pecan.request.host_url, expand)
@classmethod
def sample(cls, expand=True):
sample = cls(uuid='fe78db47-9a37-4e9f-8572-804a10abc0aa',
name='MyService',
bay_uuid='7ae81bb3-dec3-4289-8d6c-da80bd8001ae',
labels={'label1': 'foo'},
selector={'label1': 'foo'},
ip='172.17.2.2',
ports=[{"port": 88,
"targetPort": 6379,
"protocol": "TCP"}],
manifest_url='file:///tmp/rc.yaml',
manifest='''{
"metadata": {
"name": "test",
"labels": {
"key": "value"
}
},
"spec": {
"ports": [
{
"port": 88,
"targetPort": 6379,
"protocol": "TCP"
}
],
"selector": {
"bar": "foo"
}
}
}''',
created_at=timeutils.utcnow(),
updated_at=timeutils.utcnow())
return cls._convert_with_links(sample, 'http://localhost:9511', expand)
def parse_manifest(self):
try:
manifest = k8s_manifest.parse(self._get_manifest())
except ValueError as e:
raise exception.InvalidParameterValue(message=str(e))
try:
self.name = manifest["metadata"]["name"]
except (KeyError, TypeError):
raise exception.InvalidParameterValue(
"Field metadata['name'] can't be empty in manifest.")
try:
self.ports = manifest["spec"]["ports"][:]
except (KeyError, TypeError):
raise exception.InvalidParameterValue(
"Field spec['ports'] can't be empty in manifest.")
if "selector" in manifest["spec"]:
self.selector = manifest["spec"]["selector"]
if "labels" in manifest["metadata"]:
self.labels = manifest["metadata"]["labels"]
class ServiceCollection(collection.Collection):
"""API representation of a collection of services."""
services = [Service]
"""A list containing services objects"""
def __init__(self, **kwargs):
self._type = 'services'
@staticmethod
def convert_with_links(rpc_services, limit, url=None,
expand=False, **kwargs):
collection = ServiceCollection()
collection.services = [Service.convert_with_links(p, expand)
for p in rpc_services]
collection.next = collection.get_next(limit, url=url, **kwargs)
return collection
@classmethod
def sample(cls):
sample = cls()
sample.services = [Service.sample(expand=False)]
return sample
class ServicesController(rest.RestController):
"""REST controller for Services."""
def __init__(self):
super(ServicesController, self).__init__()
_custom_actions = {
'detail': ['GET'],
}
def _get_services_collection(self, marker, limit,
sort_key, sort_dir,
bay_ident, expand=False,
resource_url=None):
limit = api_utils.validate_limit(limit)
sort_dir = api_utils.validate_sort_dir(sort_dir)
marker_obj = None
if marker:
marker_obj = objects.Service.get_by_uuid(pecan.request.context,
marker)
services = pecan.request.rpcapi.service_list(pecan.request.context,
limit,
marker_obj,
sort_key=sort_key,
sort_dir=sort_dir)
return ServiceCollection.convert_with_links(services, limit,
url=resource_url,
expand=expand,
sort_key=sort_key,
sort_dir=sort_dir)
@policy.enforce_wsgi("service")
@expose.expose(ServiceCollection, types.uuid, int, wtypes.text,
wtypes.text, types.uuid_or_name)
def get_all(self, marker=None, limit=None, sort_key='id',
sort_dir='asc', bay_ident=None):
"""Retrieve a list of services.
:param marker: pagination marker for large data sets.
:param limit: maximum number of resources to return in a single result.
:param sort_key: column to sort results by. Default: id.
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
:param bay_ident: UUID or logical name of the Bay.
"""
return self._get_services_collection(marker, limit, sort_key,
sort_dir, bay_ident)
@policy.enforce_wsgi("service")
@expose.expose(ServiceCollection, types.uuid, int, wtypes.text,
wtypes.text, types.uuid_or_name)
def detail(self, marker=None, limit=None, sort_key='id',
sort_dir='asc', bay_ident=None):
"""Retrieve a list of services with detail.
:param marker: pagination marker for large data sets.
:param limit: maximum number of resources to return in a single result.
:param sort_key: column to sort results by. Default: id.
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
:param bay_ident: UUID or logical name of the Bay.
"""
# NOTE(lucasagomes): /detail should only work agaist collections
parent = pecan.request.path.split('/')[:-1][-1]
if parent != "services":
raise exception.HTTPNotFound
expand = True
resource_url = '/'.join(['services', 'detail'])
return self._get_services_collection(marker, limit,
sort_key, sort_dir, expand,
resource_url)
@policy.enforce_wsgi("service", "get")
@expose.expose(Service, types.uuid_or_name,
types.uuid_or_name)
def get_one(self, service_ident, bay_ident):
"""Retrieve information about the given service.
:param service_ident: UUID or logical name of the service.
:param bay_ident: UUID or logical name of the Bay.
"""
rpc_service = api_utils.get_rpc_resource('Service', service_ident)
return Service.convert_with_links(rpc_service)
@policy.enforce_wsgi("service", "create")
@expose.expose(Service, body=Service, status_code=201)
@validation.enforce_bay_types('kubernetes')
def post(self, service):
"""Create a new service.
:param service: a service within the request body.
"""
service.parse_manifest()
service_dict = service.as_dict()
context = pecan.request.context
service_dict['project_id'] = context.project_id
service_dict['user_id'] = context.user_id
service_obj = objects.Service(context, **service_dict)
new_service = pecan.request.rpcapi.service_create(service_obj)
if new_service is None:
raise exception.InvalidState()
# Set the HTTP Location Header
pecan.response.location = link.build_url('services', new_service.uuid)
return Service.convert_with_links(new_service)
@policy.enforce_wsgi("service", "update")
@wsme.validate(types.uuid, [ServicePatchType])
@expose.expose(Service, types.uuid_or_name,
types.uuid_or_name, body=[ServicePatchType])
def patch(self, service_ident, bay_ident, patch):
"""Update an existing service.
:param service_ident: UUID or logical name of a service.
:param bay_ident: UUID or logical name of the Bay.
:param patch: a json PATCH document to apply to this service.
"""
rpc_service = api_utils.get_rpc_resource('Service', service_ident)
# Init manifest and manifest_url field because we don't store them
# in database.
rpc_service['manifest'] = None
rpc_service['manifest_url'] = None
try:
service_dict = rpc_service.as_dict()
service = Service(**api_utils.apply_jsonpatch(service_dict, patch))
if service.manifest or service.manifest_url:
service.parse_manifest()
except api_utils.JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=patch, reason=e)
# Update only the fields that have changed
for field in objects.Service.fields:
try:
patch_val = getattr(service, field)
except AttributeError:
# Ignore fields that aren't exposed in the API
continue
if patch_val == wtypes.Unset:
patch_val = None
if rpc_service[field] != patch_val:
rpc_service[field] = patch_val
if service.manifest or service.manifest_url:
pecan.request.rpcapi.service_update(rpc_service)
else:
rpc_service.save()
return Service.convert_with_links(rpc_service)
@policy.enforce_wsgi("service")
@expose.expose(None, types.uuid_or_name,
types.uuid_or_name, status_code=204)
def delete(self, service_ident, bay_ident):
"""Delete a service.
:param service_ident: UUID or logical name of a service.
:param bay_ident: UUID or logical name of the Bay.
"""
rpc_service = api_utils.get_rpc_resource('Service', service_ident)
pecan.request.rpcapi.service_delete(rpc_service.uuid)
|
apache-2.0
| -2,565,025,987,527,595,000
| 38.48368
| 79
| 0.55118
| false
| 4.40596
| false
| false
| false
|
Altair3/Tanks
|
bzagents/PigeonAgent.py
|
1
|
2822
|
import sys
import math
import time
import random
from bzrc import BZRC, Command
class PigeonAgent(object):
def __init__(self, bzrc, mode, time):
self.bzrc = bzrc
self.mode = mode
self.num_tanks = 1
self.cur_time = time
self.const_velocity = .5
self.time_move = self.cur_time
self.time_turn = self.cur_time
self.move_interval = 2.0
self.turn_interval = 1.0
def behave(self, time):
if self.mode == "sit":
return
elif self.mode == "const":
#self.mytanks = self.bzrc.get_mytanks()
for i in range(self.num_tanks):
self.bzrc.speed(i, self.const_velocity)
elif self.mode == "wild":
for i in range(self.num_tanks):
if (time - self.time_move) > self.move_interval:
for i in range(self.num_tanks):
speed = self.getRandomSpeed()
self.bzrc.speed(i, speed)
self.time_move = time
if (time - self.time_turn) > self.turn_interval:
for i in range(self.num_tanks):
angvel = self.getRandomAngvel()
self.bzrc.angvel(i, angvel)
self.time_turn = time
def getRandomAngvel(self):
rval = random.random()
rval *= self.getDirection()
return rval
def getDirection(self):
threshold = .5
n = random.random()
if n <= threshold:
direction = -1.0
else:
direction = 1.0
return direction
def getRandomSpeed(self):
rval = random.uniform(0.4, 1.0)
return rval
def stop(self):
for tank in self.bzrc.get_mytanks():
self.bzrc.speed(tank.index, 0)
self.bzrc.angvel(tank.index, 0)
def main():
# Process CLI arguments.
try:
execname, host, port, mode = sys.argv
except ValueError:
execname = sys.argv[0]
print >>sys.stderr, '%s: incorrect number of arguments' % execname
print >>sys.stderr, 'usage: %s hostname port [sit|const|wild]' % sys.argv[0]
sys.exit(-1)
bzrc = BZRC(host, int(port))
cur_time = time.time()
agent = PigeonAgent(bzrc, mode, cur_time)
# Run the agent
try:
while True:
cur_time = time.time()
agent.behave(cur_time)
except KeyboardInterrupt:
print "Exiting due to keyboard interrupt."
agent.stop()
bzrc.close()
if __name__ == '__main__':
main()
|
gpl-3.0
| 8,886,061,043,608,434,000
| 26.398058
| 84
| 0.488661
| false
| 3.946853
| false
| false
| false
|
hybrid-storage-dev/cinder-fs-111t-hybrid-cherry
|
api/v2/views/volumes.py
|
1
|
5777
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinder.api import common
from cinder.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class ViewBuilder(common.ViewBuilder):
"""Model a server API response as a python dictionary."""
_collection_name = "volumes"
def __init__(self):
"""Initialize view builder."""
super(ViewBuilder, self).__init__()
def summary_list(self, request, volumes):
"""Show a list of volumes without many details."""
return self._list_view(self.summary, request, volumes)
def detail_list(self, request, volumes):
"""Detailed view of a list of volumes."""
return self._list_view(self.detail, request, volumes,
coll_name=self._collection_name + '/detail')
def summary(self, request, volume):
"""Generic, non-detailed view of an volume."""
return {
'volume': {
'id': volume['id'],
'name': volume['display_name'],
'links': self._get_links(request,
volume['id']),
},
}
def detail(self, request, volume):
"""Detailed view of a single volume."""
return {
'volume': {
'id': volume.get('id'),
'status': volume.get('status'),
'size': volume.get('size'),
'availability_zone': volume.get('availability_zone'),
'created_at': volume.get('created_at'),
'attachments': self._get_attachments(volume),
'name': volume.get('display_name'),
'description': volume.get('display_description'),
'volume_type': self._get_volume_type(volume),
'snapshot_id': volume.get('snapshot_id'),
'source_volid': volume.get('source_volid'),
'metadata': self._get_volume_metadata(volume),
'links': self._get_links(request, volume['id']),
'user_id': volume.get('user_id'),
'bootable': str(volume.get('bootable')).lower(),
'encrypted': self._is_volume_encrypted(volume),
'replication_status': volume.get('replication_status'),
'consistencygroup_id': volume.get('consistencygroup_id'),
'shareable': str(volume.get('shareable')).lower(),
'updated_at': volume.get('updated_at')
}
}
def _is_volume_encrypted(self, volume):
"""Determine if volume is encrypted."""
return volume.get('encryption_key_id') is not None
def _get_attachments(self, volume):
"""Retrieve the attachments of the volume object."""
attachments = []
if volume['attach_status'] == 'attached':
attaches = volume.get('volume_attachment', [])
for attachment in attaches:
if attachment.get('attach_status') == 'attached':
a = {'id': attachment.get('volume_id'),
'attachment_id': attachment.get('id'),
'volume_id': attachment.get('volume_id'),
'server_id': attachment.get('instance_uuid'),
'host_name': attachment.get('attached_host'),
'device': attachment.get('mountpoint'),
}
attachments.append(a)
return attachments
def _get_volume_metadata(self, volume):
"""Retrieve the metadata of the volume object."""
if volume.get('volume_metadata'):
metadata = volume.get('volume_metadata')
return dict((item['key'], item['value']) for item in metadata)
# avoid circular ref when vol is a Volume instance
elif volume.get('metadata') and isinstance(volume.get('metadata'),
dict):
return volume['metadata']
return {}
def _get_volume_type(self, volume):
"""Retrieve the type the volume object."""
if volume['volume_type_id'] and volume.get('volume_type'):
return volume['volume_type']['name']
else:
return volume['volume_type_id']
def _list_view(self, func, request, volumes, coll_name=_collection_name):
"""Provide a view for a list of volumes.
:param func: Function used to format the volume data
:param request: API request
:param servers: List of volumes in dictionary format
:param coll_name: Name of collection, used to generate the next link
for a pagination query
:returns: Volume data in dictionary format
"""
volumes_list = [func(request, volume)['volume'] for volume in volumes]
volumes_links = self._get_collection_links(request,
volumes,
coll_name)
volumes_dict = dict(volumes=volumes_list)
if volumes_links:
volumes_dict['volumes_links'] = volumes_links
return volumes_dict
|
apache-2.0
| 6,775,183,563,880,163,000
| 40.561151
| 78
| 0.556344
| false
| 4.651369
| false
| false
| false
|
ragupta-git/ImcSdk
|
imcsdk/mometa/bios/BiosVfCDNEnable.py
|
1
|
3109
|
"""This module contains the general information for BiosVfCDNEnable ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class BiosVfCDNEnableConsts:
VP_CDNENABLE_DISABLED = "Disabled"
VP_CDNENABLE_ENABLED = "Enabled"
_VP_CDNENABLE_DISABLED = "disabled"
_VP_CDNENABLE_ENABLED = "enabled"
VP_CDNENABLE_PLATFORM_DEFAULT = "platform-default"
class BiosVfCDNEnable(ManagedObject):
"""This is BiosVfCDNEnable class."""
consts = BiosVfCDNEnableConsts()
naming_props = set([])
mo_meta = {
"classic": MoMeta("BiosVfCDNEnable", "biosVfCDNEnable", "CDN-Enable", VersionMeta.Version204c, "InputOutput", 0x1f, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"modular": MoMeta("BiosVfCDNEnable", "biosVfCDNEnable", "CDN-Enable", VersionMeta.Version2013e, "InputOutput", 0x1f, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"])
}
prop_meta = {
"classic": {
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version204c, MoPropertyMeta.READ_WRITE, 0x2, 0, 255, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version204c, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version204c, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"vp_cdn_enable": MoPropertyMeta("vp_cdn_enable", "vpCDNEnable", "string", VersionMeta.Version204c, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["Disabled", "Enabled", "disabled", "enabled", "platform-default"], []),
},
"modular": {
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x2, 0, 255, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"vp_cdn_enable": MoPropertyMeta("vp_cdn_enable", "vpCDNEnable", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["Disabled", "Enabled", "disabled", "enabled", "platform-default"], []),
},
}
prop_map = {
"classic": {
"dn": "dn",
"rn": "rn",
"status": "status",
"vpCDNEnable": "vp_cdn_enable",
},
"modular": {
"dn": "dn",
"rn": "rn",
"status": "status",
"vpCDNEnable": "vp_cdn_enable",
},
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.status = None
self.vp_cdn_enable = None
ManagedObject.__init__(self, "BiosVfCDNEnable", parent_mo_or_dn, **kwargs)
|
apache-2.0
| -6,090,074,951,718,473,000
| 43.414286
| 236
| 0.602766
| false
| 3.307447
| false
| false
| false
|
deljus/predictor
|
MWUI/API/resources.py
|
1
|
37011
|
# -*- coding: utf-8 -*-
#
# Copyright 2016, 2017 Ramil Nugmanov <stsouko@live.ru>
# This file is part of MWUI.
#
# MWUI is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
from collections import defaultdict
from flask import url_for, request, Response
from flask_login import current_user, login_user
from flask_restful import reqparse, marshal, inputs, Resource
from functools import wraps
from importlib.util import find_spec
from os import path
from pony.orm import db_session, select, left_join
from typing import Dict, Tuple
from uuid import uuid4
from validators import url
from werkzeug import datastructures
from werkzeug.exceptions import HTTPException, Aborter
from .data import get_additives, get_model, get_models_list, format_results
from .redis import RedisCombiner
from .structures import (ModelRegisterFields, TaskPostResponseFields, TaskGetResponseFields, TaskStructureFields,
LogInFields, AdditivesListFields, ModelListFields)
from ..config import (UPLOAD_PATH, REDIS_HOST, REDIS_JOB_TIMEOUT, REDIS_PASSWORD, REDIS_PORT, REDIS_TTL, SWAGGER,
BLOG_POSTS_PER_PAGE)
from ..constants import (StructureStatus, TaskStatus, ModelType, TaskType, StructureType, UserRole, AdditiveType,
ResultType)
from ..logins import UserLogin
from ..models import Task, Structure, Additive, Model, Additiveset, Destination, User, Result
if SWAGGER and find_spec('flask_restful_swagger'):
from flask_restful_swagger import swagger
else:
class Swagger:
@staticmethod
def operation(*args, **kwargs):
def decorator(f):
return f
return decorator
swagger = Swagger()
redis = RedisCombiner(host=REDIS_HOST, port=REDIS_PORT, password=REDIS_PASSWORD, result_ttl=REDIS_TTL,
job_timeout=REDIS_JOB_TIMEOUT)
task_types_desc = ', '.join('{0.value} - {0.name}'.format(x) for x in TaskType)
results_types_desc = ', '.join('{0.value} - {0.name}'.format(x) for x in ResultType)
additives_types_desc = ', '.join('{0.value} - {0.name}'.format(x) for x in AdditiveType)
class Abort512(HTTPException):
code = 512
description = 'task not ready'
original_flask_abort = Aborter(extra={512: Abort512})
def abort(http_status_code, **kwargs):
""" copy-paste from flask-restful
"""
try:
original_flask_abort(http_status_code)
except HTTPException as e:
if len(kwargs):
e.data = kwargs
raise
def fetch_task(task, status):
job = redis.fetch_job(task)
if job is None:
abort(404, message='invalid task id. perhaps this task has already been removed')
if not job:
abort(500, message='modeling server error')
if not job['is_finished']:
abort(512, message='PROCESSING.Task not ready')
if job['result']['status'] != status:
abort(406, message='task status is invalid. task status is [%s]' % job['result']['status'].name)
if job['result']['user'] != current_user.id:
abort(403, message='user access deny. you do not have permission to this task')
return job['result'], job['ended_at']
def dynamic_docstring(*sub):
def decorator(f):
f.__doc__ = f.__doc__.format(*sub)
return f
return decorator
def authenticate(f):
@wraps(f)
def wrapper(*args, **kwargs):
if current_user.is_authenticated:
return f(*args, **kwargs)
abort(401, message=dict(user='not authenticated'))
return wrapper
def auth_admin(f):
@wraps(f)
def wrapper(*args, **kwargs):
auth = request.authorization
if auth:
u = UserLogin.get(auth.username.lower(), auth.password)
if u and u.role_is(UserRole.ADMIN):
return f(*args, **kwargs)
return Response('access deny', 401, {'WWW-Authenticate': 'Basic realm="Login Required"'})
return wrapper
class AuthResource(Resource):
method_decorators = [authenticate]
class AdminResource(Resource):
method_decorators = [auth_admin]
class RegisterModels(AdminResource):
def post(self):
data = marshal(request.get_json(force=True), ModelRegisterFields.resource_fields)
models = data if isinstance(data, list) else [data]
available = {x['name']: [(d['host'], d['port'], d['name']) for d in x['destinations']]
for x in get_models_list(skip_prep=False).values()}
report = []
for m in models:
if m['destinations']:
if m['name'] not in available:
with db_session:
new_m = Model(type=m['type'], name=m['name'], description=m['description'],
example=m['example'])
for d in m['destinations']:
Destination(model=new_m, **d)
report.append(dict(model=new_m.id, name=new_m.name, description=new_m.description,
type=new_m.type.value,
example=new_m.example,
destinations=[dict(host=x.host, port=x.port, name=x.name)
for x in new_m.destinations]))
else:
tmp = []
with db_session:
model = Model.get(name=m['name'])
for d in m['destinations']:
if (d['host'], d['port'], d['name']) not in available[m['name']]:
tmp.append(Destination(model=model, **d))
if tmp:
report.append(dict(model=model.id, name=model.name, description=model.description,
type=model.type.value, example=model.example,
destinations=[dict(host=x.host, port=x.port, name=x.name)
for x in tmp]))
return report, 201
class AvailableModels(AuthResource):
@swagger.operation(
notes='Get available models',
nickname='modellist',
responseClass=ModelListFields.__name__,
responseMessages=[dict(code=200, message="models list"), dict(code=401, message="user not authenticated")])
@dynamic_docstring(ModelType.MOLECULE_MODELING, ModelType.REACTION_MODELING)
def get(self):
"""
Get available models list
response format:
example - chemical structure in in smiles or marvin or cml format
description - description of model. in markdown format.
name - model name
type - model type: {0.value} [{0.name}] or {1.value} [{1.name}]
model - id
"""
out = []
for x in get_models_list(skip_destinations=True, skip_example=False).values():
x['type'] = x['type'].value
out.append(x)
return out, 200
class AvailableAdditives(AuthResource):
@swagger.operation(
notes='Get available additives',
nickname='additives',
responseClass=AdditivesListFields.__name__,
responseMessages=[dict(code=200, message="additives list"), dict(code=401, message="user not authenticated")])
@dynamic_docstring(additives_types_desc)
def get(self):
"""
Get available additives list
response format:
additive - id
name - name of additive
structure - chemical structure in smiles or marvin or cml format
type - additive type: {0}
"""
out = []
for x in get_additives().values():
x['type'] = x['type'].value
out.append(x)
return out, 200
results_fetch = reqparse.RequestParser()
results_fetch.add_argument('page', type=inputs.positive)
class ResultsTask(AuthResource):
@swagger.operation(
notes='Get saved modeled task',
nickname='saved',
responseClass=TaskGetResponseFields.__name__,
parameters=[dict(name='task', description='Task ID', required=True,
allowMultiple=False, dataType='str', paramType='path')],
responseMessages=[dict(code=200, message="modeled task"),
dict(code=401, message="user not authenticated"),
dict(code=403, message='user access deny. you do not have permission to this task'),
dict(code=404, message='invalid task id. perhaps this task has already been removed'),
dict(code=406, message='task status is invalid. only validation tasks acceptable'),
dict(code=500, message="modeling server error"),
dict(code=512, message='task not ready')])
def get(self, task):
"""
Task with modeling results of structures with conditions
all structures include only models with nonempty results lists.
see /task/model get doc.
"""
try:
task = int(task)
except ValueError:
abort(404, message='invalid task id. Use int Luke')
page = results_fetch.parse_args().get('page')
with db_session:
result = Task.get(id=task)
if not result:
abort(404, message='Invalid task id. Perhaps this task has already been removed')
if result.user.id != current_user.id:
abort(403, message='User access deny. You do not have permission to this task')
models = get_models_list(skip_destinations=True)
for v in models.values():
v['type'] = v['type'].value
additives = get_additives()
s = select(s for s in Structure if s.task == result).order_by(Structure.id)
if page:
s = s.page(page, pagesize=BLOG_POSTS_PER_PAGE)
structures = {x.id: dict(structure=x.id, data=x.structure, temperature=x.temperature, pressure=x.pressure,
type=x.structure_type, status=x.structure_status, additives=[], models=[])
for x in s}
r = left_join((s.id, r.model.id, r.key, r.value, r.result_type)
for s in Structure for r in s.results if s.id in structures.keys() and r is not None)
a = left_join((s.id, a.additive.id, a.amount)
for s in Structure for a in s.additives if s.id in structures.keys() and a is not None)
for s, a, aa in a:
tmp = dict(amount=aa)
tmp.update(additives[a])
structures[s]['additives'].append(tmp)
tmp_models = defaultdict(dict)
for s, m, rk, rv, rt in r:
tmp_models[s].setdefault(m, []).append(dict(key=rk, value=rv, type=rt))
for s, mr in tmp_models.items():
for m, r in mr.items():
tmp = dict(results=r)
tmp.update(models[m])
structures[s]['models'].append(tmp)
return dict(task=task, status=TaskStatus.DONE.value, date=result.date.strftime("%Y-%m-%d %H:%M:%S"),
type=result.task_type, user=result.user.id, structures=list(structures.values())), 200
@swagger.operation(
notes='Save modeled task',
nickname='save',
responseClass=TaskPostResponseFields.__name__,
parameters=[dict(name='task', description='Task ID', required=True,
allowMultiple=False, dataType='str', paramType='path')],
responseMessages=[dict(code=201, message="modeled task saved"),
dict(code=401, message="user not authenticated"),
dict(code=403, message='user access deny. you do not have permission to this task'),
dict(code=404, message='invalid task id. perhaps this task has already been removed'),
dict(code=406, message='task status is invalid. only modeled tasks acceptable'),
dict(code=500, message="modeling server error"),
dict(code=512, message='task not ready')])
def post(self, task):
"""
Store in database modeled task
only modeled tasks can be saved.
failed models in structures skipped.
"""
result, ended_at = fetch_task(task, TaskStatus.DONE)
with db_session:
_task = Task(type=result['type'], date=ended_at, user=User[current_user.id])
for s in result['structures']:
_structure = Structure(structure=s['data'], type=s['type'], temperature=s['temperature'],
pressure=s['pressure'], status=s['status'], task=_task)
for a in s['additives']:
Additiveset(additive=Additive[a['additive']], structure=_structure, amount=a['amount'])
for m in s['models']:
for r in m.get('results', []):
Result(model=m['model'], structure=_structure, type=r['type'], key=r['key'], value=r['value'])
return dict(task=_task.id, status=TaskStatus.DONE.value, date=ended_at.strftime("%Y-%m-%d %H:%M:%S"),
type=result['type'].value, user=current_user.id), 201
class ModelTask(AuthResource):
@swagger.operation(
notes='Get modeled task',
nickname='modeled',
responseClass=TaskGetResponseFields.__name__,
parameters=[dict(name='task', description='Task ID', required=True,
allowMultiple=False, dataType='str', paramType='path')],
responseMessages=[dict(code=200, message="modeled task"),
dict(code=401, message="user not authenticated"),
dict(code=403, message='user access deny. you do not have permission to this task'),
dict(code=404, message='invalid task id. perhaps this task has already been removed'),
dict(code=406, message='task status is invalid. only validation tasks acceptable'),
dict(code=500, message="modeling server error"),
dict(code=512, message='task not ready')])
@dynamic_docstring(results_types_desc)
def get(self, task):
"""
Task with results of structures with conditions modeling
all structures include models with results lists.
failed models contain empty results lists.
see also /task/prepare get doc.
available model results response types: {0}
"""
page = results_fetch.parse_args().get('page')
return format_results(task, fetch_task(task, TaskStatus.DONE), page=page), 200
@swagger.operation(
notes='Create modeling task',
nickname='modeling',
responseClass=TaskPostResponseFields.__name__,
parameters=[dict(name='task', description='Task ID', required=True,
allowMultiple=False, dataType='str', paramType='path'),
dict(name='structures', description='Conditions and selected models for structure[s]',
required=True, allowMultiple=False, dataType=TaskStructureFields.__name__, paramType='body')],
responseMessages=[dict(code=201, message="modeling task created"),
dict(code=400, message="invalid structure data"),
dict(code=401, message="user not authenticated"),
dict(code=403, message='user access deny. you do not have permission to this task'),
dict(code=404, message='invalid task id. perhaps this task has already been removed'),
dict(code=406, message='task status is invalid. only validation tasks acceptable'),
dict(code=500, message="modeling server error"),
dict(code=512, message='task not ready')])
def post(self, task):
"""
Modeling task structures and conditions
send only changed conditions or todelete marks. see task/prepare doc.
data, status and type fields unusable.
"""
data = marshal(request.get_json(force=True), TaskStructureFields.resource_fields)
result = fetch_task(task, TaskStatus.PREPARED)[0]
prepared = {s['structure']: s for s in result['structures']}
structures = data if isinstance(data, list) else [data]
tmp = {x['structure']: x for x in structures if x['structure'] in prepared}
if 0 in tmp:
abort(400, message='invalid structure data')
additives = get_additives()
models = get_models_list()
for s, d in tmp.items():
if d['todelete']:
prepared.pop(s)
else:
ps = prepared[s]
if d['additives'] is not None:
alist = []
for a in d['additives']:
if a['additive'] in additives and (0 < a['amount'] <= 1
if additives[a['additive']]['type'] == AdditiveType.SOLVENT
else a['amount'] > 0):
a.update(additives[a['additive']])
alist.append(a)
ps['additives'] = alist
if result['type'] != TaskType.MODELING: # for search tasks assign compatible models
ps['models'] = [get_model(ModelType.select(ps['type'], result['type']))]
elif d['models'] is not None and ps['status'] == StructureStatus.CLEAR:
ps['models'] = [models[m['model']].copy() for m in d['models'] if m['model'] in models and
models[m['model']]['type'].compatible(ps['type'], TaskType.MODELING)]
if d['temperature']:
ps['temperature'] = d['temperature']
if d['pressure']:
ps['pressure'] = d['pressure']
result['structures'] = list(prepared.values())
result['status'] = TaskStatus.MODELING
new_job = redis.new_job(result)
if new_job is None:
abort(500, message='modeling server error')
return dict(task=new_job['id'], status=result['status'].value, type=result['type'].value,
date=new_job['created_at'].strftime("%Y-%m-%d %H:%M:%S"), user=result['user']), 201
class PrepareTask(AuthResource):
@swagger.operation(
notes='Get validated task',
nickname='prepared',
responseClass=TaskGetResponseFields.__name__,
parameters=[dict(name='task', description='Task ID', required=True,
allowMultiple=False, dataType='str', paramType='path')],
responseMessages=[dict(code=200, message="validated task"),
dict(code=401, message="user not authenticated"),
dict(code=403, message='user access deny. you do not have permission to this task'),
dict(code=404, message='invalid task id. perhaps this task has already been removed'),
dict(code=406, message='task status is invalid. only validation tasks acceptable'),
dict(code=500, message="modeling server error"),
dict(code=512, message='task not ready')])
@dynamic_docstring(ModelType.PREPARER, StructureStatus.CLEAR, StructureStatus.RAW, StructureStatus.HAS_ERROR,
ResultType.TEXT, StructureType.REACTION, StructureType.MOLECULE)
def get(self, task):
"""
Task with validated structure and conditions data
all structures has check status = {1.value} [{1.name}] - all checks passed. {3.value} [{3.name}] - structure \
has errors. {2.value} [{2.name}] - validation failed.
structure type also autoassigned: {5.value} [{5.name}] or {6.value} [{6.name}].
all newly validated structures include model with type = {0.value} [{0.name}] with results containing \
errors or warning information.
if task not newly created by upload file or create task api it can contain models with types different from \
{0.value} [{0.name}] which previously selected on revalidaton for structures with status = {1.value} [{1.name}].
this models contain empty results list.
if preparer model failed [due to server lag etc] returned structures with status = {2.value} [{2.name}] and\
{0.name} model with empty results list. In this case possible to resend this task to revalidation as is.
for upload task failed validation return empty structure list and resend impossible.
model results response structure:
key: string - header
type: data type = {4.value} [{4.name}] - plain text information
value: string - body
"""
page = results_fetch.parse_args().get('page')
return format_results(task, fetch_task(task, TaskStatus.PREPARED), page=page), 200
@swagger.operation(
notes='Create revalidation task',
nickname='prepare',
responseClass=TaskPostResponseFields.__name__,
parameters=[dict(name='task', description='Task ID', required=True,
allowMultiple=False, dataType='str', paramType='path'),
dict(name='structures', description='Structure[s] of molecule or reaction with optional conditions',
required=True, allowMultiple=False, dataType=TaskStructureFields.__name__, paramType='body')],
responseMessages=[dict(code=201, message="revalidation task created"),
dict(code=400, message="invalid structure data"),
dict(code=401, message="user not authenticated"),
dict(code=403, message='user access deny. you do not have permission to this task'),
dict(code=404, message='invalid task id. perhaps this task has already been removed'),
dict(code=406, message='task status is invalid. only validation tasks acceptable'),
dict(code=500, message="modeling server error"),
dict(code=512, message='task not ready')])
@dynamic_docstring(StructureStatus.CLEAR, StructureType.REACTION, ModelType.REACTION_MODELING,
StructureType.MOLECULE, ModelType.MOLECULE_MODELING)
def post(self, task):
"""
Revalidate task structures and conditions
possible to send list of TaskStructureFields.
send only changed data and structure id's. e.g. if user changed only temperature in structure 4 json should be
{{"temperature": new_value, "structure": 4}} or in list [{{"temperature": new_value, "structure": 4}}]
unchanged data server kept as is.
structures status and type fields not usable
todelete field marks structure for delete.
example json: [{{"structure": 5, "todetele": true}}]
structure with id 5 in task will be removed from list.
data field should be a string containing marvin document or cml or smiles/smirks.
models field usable if structure has status = {0.value} [{0.name}] and don't changed.
for structure type = {1.value} [{1.name}] acceptable only model types = {2.value} [{2.name}]
and vice versa for type = {3.value} [{3.name}] only model types = {4.value} [{4.name}].
only model id field needed. e.g. [{{"models": [{{model: 1}}], "structure": 3}}]
for SEARCH type tasks models field unusable.
see also task/create doc.
"""
data = marshal(request.get_json(force=True), TaskStructureFields.resource_fields)
result = fetch_task(task, TaskStatus.PREPARED)[0]
preparer = get_model(ModelType.PREPARER)
prepared = {s['structure']: s for s in result['structures']}
structures = data if isinstance(data, list) else [data]
tmp = {x['structure']: x for x in structures if x['structure'] in prepared}
if 0 in tmp:
abort(400, message='invalid structure data')
additives = get_additives()
models = get_models_list()
for s, d in tmp.items():
if d['todelete']:
prepared.pop(s)
else:
ps = prepared[s]
if d['additives'] is not None:
alist = []
for a in d['additives']:
if a['additive'] in additives and (0 < a['amount'] <= 1
if additives[a['additive']]['type'] == AdditiveType.SOLVENT
else a['amount'] > 0):
a.update(additives[a['additive']])
alist.append(a)
ps['additives'] = alist
if d['data']:
ps['data'] = d['data']
ps['status'] = StructureStatus.RAW
ps['models'] = [preparer.copy()]
elif s['status'] == StructureStatus.RAW: # renew preparer model.
ps['models'] = [preparer.copy()]
elif ps['status'] == StructureStatus.CLEAR:
if d['models'] is not None:
ps['models'] = [models[m['model']].copy() for m in d['models'] if m['model'] in models and
models[m['model']]['type'].compatible(ps['type'], TaskType.MODELING)]
else: # recheck models for existing
ps['models'] = [m.copy() for m in ps['models'] if m['model'] in models]
if d['temperature']:
ps['temperature'] = d['temperature']
if d['pressure']:
ps['pressure'] = d['pressure']
result['structures'] = list(prepared.values())
result['status'] = TaskStatus.PREPARING
new_job = redis.new_job(result)
if new_job is None:
abort(500, message='modeling server error')
return dict(task=new_job['id'], status=result['status'].value, type=result['type'].value,
date=new_job['created_at'].strftime("%Y-%m-%d %H:%M:%S"), user=result['user']), 201
class CreateTask(AuthResource):
@swagger.operation(
notes='Create validation task',
nickname='create',
responseClass=TaskPostResponseFields.__name__,
parameters=[dict(name='_type', description='Task type ID: %s' % task_types_desc, required=True,
allowMultiple=False, dataType='int', paramType='path'),
dict(name='structures', description='Structure[s] of molecule or reaction with optional conditions',
required=True, allowMultiple=False, dataType=TaskStructureFields.__name__, paramType='body')],
responseMessages=[dict(code=201, message="validation task created"),
dict(code=400, message="invalid structure data"),
dict(code=401, message="user not authenticated"),
dict(code=403, message="invalid task type"),
dict(code=500, message="modeling server error")])
@dynamic_docstring(AdditiveType.SOLVENT, TaskStatus.PREPARING,
TaskType.MODELING, TaskType.SIMILARITY, TaskType.SUBSTRUCTURE)
def post(self, _type):
"""
Create new task
possible to send list of TaskStructureFields.
e.g. [TaskStructureFields1, TaskStructureFields2,...]
todelete, status, type and models fields not usable
data field is required. field should be a string containing marvin document or cml or smiles/smirks
additive should be in list of available additives.
amount should be in range 0 to 1 for additives type = {0.value} [{0.name}], and positive for overs.
temperature in Kelvin and pressure in Bar also should be positive.
response include next information:
date: creation date time
status: {1.value} [{1.name}]
task: task id
type: {2.value} [{2.name}] or {3.value} [{3.name}] or {4.value} [{4.name}]
user: user id
"""
try:
_type = TaskType(_type)
except ValueError:
abort(403, message='invalid task type [%s]. valid values are %s' % (_type, task_types_desc))
data = marshal(request.get_json(force=True), TaskStructureFields.resource_fields)
additives = get_additives()
preparer = get_model(ModelType.PREPARER)
structures = data if isinstance(data, list) else [data]
data = []
for s, d in enumerate(structures, start=1):
if d['data']:
alist = []
for a in d['additives'] or []:
if a['additive'] in additives and (0 < a['amount'] <= 1
if additives[a['additive']]['type'] == AdditiveType.SOLVENT
else a['amount'] > 0):
a.update(additives[a['additive']])
alist.append(a)
data.append(dict(structure=s, data=d['data'], status=StructureStatus.RAW, type=StructureType.UNDEFINED,
pressure=d['pressure'], temperature=d['temperature'],
additives=alist, models=[preparer.copy()]))
if not data:
abort(400, message='invalid structure data')
new_job = redis.new_job(dict(status=TaskStatus.NEW, type=_type, user=current_user.id, structures=data))
if new_job is None:
abort(500, message='modeling server error')
return dict(task=new_job['id'], status=TaskStatus.PREPARING.value, type=_type.value,
date=new_job['created_at'].strftime("%Y-%m-%d %H:%M:%S"), user=current_user.id), 201
uf_post = reqparse.RequestParser()
uf_post.add_argument('file.url', type=str)
uf_post.add_argument('file.path', type=str)
uf_post.add_argument('structures', type=datastructures.FileStorage, location='files')
class UploadTask(AuthResource):
@swagger.operation(
notes='Create validation task from uploaded structures file',
nickname='upload',
responseClass=TaskPostResponseFields.__name__,
parameters=[dict(name='_type', description='Task type ID: %s' % task_types_desc, required=True,
allowMultiple=False, dataType='int', paramType='path'),
dict(name='structures', description='RDF SDF MRV SMILES file', required=True,
allowMultiple=False, dataType='file', paramType='body')],
responseMessages=[dict(code=201, message="validation task created"),
dict(code=401, message="user not authenticated"),
dict(code=400, message="structure file required"),
dict(code=403, message="invalid task type"),
dict(code=500, message="modeling server error")])
def post(self, _type: int) -> Tuple[Dict, int]:
"""
Structures file upload
Need for batch mode.
Any chemical structure formats convertable with Chemaxon JChem can be passed.
conditions in files should be present in next key-value format:
additive.amount.1 --> string = float [possible delimiters: :, :=, =]
temperature --> float
pressure --> float
additive.2 --> string
amount.2 --> float
where .1[.2] is index of additive. possible set multiple additives.
example [RDF]:
$DTYPE additive.amount.1
$DATUM water = .4
$DTYPE temperature
$DATUM 298
$DTYPE pressure
$DATUM 0.9
$DTYPE additive.2
$DATUM DMSO
$DTYPE amount.2
$DATUM 0.6
parsed as:
temperature = 298
pressure = 0.9
additives = [{"name": "water", "amount": 0.4, "type": x, "additive": y1}, \
{"name": "DMSO", "amount": 0.6, "type": x, "additive": y2}]
where "type" and "additive" obtained from DataBase by name
see task/create doc about acceptable conditions values and additives types and response structure.
"""
try:
_type = TaskType(_type)
except ValueError:
abort(403, message='invalid task type [%s]. valid values are %s' % (_type, task_types_desc))
args = uf_post.parse_args()
file_url = None
if args['file.url']: # smart frontend
if url(args['file.url']):
file_url = args['file.url']
elif args['file.path']: # NGINX upload
file_name = path.basename(args['file.path'])
if path.exists(path.join(UPLOAD_PATH, file_name)):
file_url = url_for('.batch_file', file=file_name, _external=True)
elif args['structures']: # flask
file_name = str(uuid4())
args['structures'].save(path.join(UPLOAD_PATH, file_name))
file_url = url_for('.batch_file', file=file_name, _external=True)
if file_url is None:
abort(400, message='structure file required')
new_job = redis.new_job(dict(status=TaskStatus.NEW, type=_type, user=current_user.id,
structures=[dict(data=dict(url=file_url), status=StructureStatus.RAW,
type=StructureType.UNDEFINED,
models=[get_model(ModelType.PREPARER)])]))
if new_job is None:
abort(500, message='modeling server error')
return dict(task=new_job['id'], status=TaskStatus.PREPARING.value, type=_type.value,
date=new_job['created_at'].strftime("%Y-%m-%d %H:%M:%S"), user=current_user.id), 201
class LogIn(Resource):
@swagger.operation(
notes='App login',
nickname='login',
parameters=[dict(name='credentials', description='User credentials', required=True,
allowMultiple=False, dataType=LogInFields.__name__, paramType='body')],
responseMessages=[dict(code=200, message="logged in"),
dict(code=400, message="invalid data"),
dict(code=403, message="bad credentials")])
def post(self):
"""
Get auth token
Token returned in headers as remember_token.
for use task api send in requests headers Cookie: 'remember_token=_token_'
"""
data = request.get_json(force=True)
if data:
username = data.get('user')
password = data.get('password')
if username and password:
user = UserLogin.get(username.lower(), password)
if user:
login_user(user, remember=True)
return dict(message='logged in'), 200
return dict(message='bad credentials'), 403
class MagicNumbers(AuthResource):
@swagger.operation(
notes='Magic Numbers',
nickname='magic',
parameters=[],
responseMessages=[dict(code=200, message="magic numbers"),
dict(code=401, message="user not authenticated")])
def get(self):
"""
Get Magic numbers
Dict of all magic numbers with values.
"""
data = {x.__name__: self.__to_dict(x) for x in [TaskType, TaskStatus, StructureType, StructureStatus,
AdditiveType, ResultType]}
data['ModelType'] = {ModelType.MOLECULE_MODELING.name: ModelType.MOLECULE_MODELING.value,
ModelType.REACTION_MODELING.name: ModelType.REACTION_MODELING.value}
return data, 200
@staticmethod
def __to_dict(enum):
return {x.name: x.value for x in enum}
|
agpl-3.0
| -434,087,189,328,953,200
| 44.523985
| 120
| 0.574559
| false
| 4.285168
| false
| false
| false
|
emanuele-f/python-pesci
|
pesci/environment.py
|
1
|
3727
|
#!/bin/env python2
# -*- coding: utf-8 -*-
#
# Emanuele Faranda <black.silver@hotmail.it>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
import ast
import pesci.code
from pesci.errors import *
class ExecutionEnvironment:
def __init__(self):
self.reset()
def reset(self):
self.code = None
self.ip = -1
self._contexts = []
self._stack = []
self.iterator = None
# create global context
self.push_context()
def setup(self, code):
self.code = code
self.ip = 0
self.iterator = None
def setvar(self, vid, val):
if vid and vid[0] == "_":
raise EnvBadSymbolName(self, vid)
self._set_in_context(self._get_context(vid), vid, val)
def getvar(self, vid):
# Try to get defined function or name
try:
var = self._get_from_contexts(vid)
except KeyError:
raise EnvSymbolNotFound(self, vid)
return var
"""set multiple k->v at one """
def loadvars(self, vdict):
for k,v in vdict.items():
self.setvar(k, v)
def _set_in_context(self, context, key, val):
context[key] = val
def _get_from_contexts(self, key):
# search a key in the context stack
for context in reversed(self._contexts):
if context.has_key(key):
return context[key]
raise KeyError(key)
"""push a value into the call stack"""
def push(self, val):
self._stack.append(val)
def pop(self):
return self._stack.pop()
def popall(self):
s = self._stack
self._stack = []
return s
"""context: additional names pool"""
def push_context(self):
# a trick to remember global variables
context = {'__globals__':[]}
self._contexts.append(context)
def pop_context(self):
if len(self._contexts) <= 1:
# NB: cannot pop the global context
raise EnvContextsEmpty(self)
return self._contexts.pop()
def _get_context(self, var):
cur = self.get_current_context()
if var in cur['__globals__']:
return self.get_global_context()
return cur
def get_global_context(self):
return self._contexts[0]
def get_current_context(self):
return self._contexts[-1]
def get_visible_context(self):
# determine the currently visible context variables
ctx = {}
for env in self._contexts:
for key,val in env.items():
if key[0] != "_":
ctx[key] = val
return ctx
def add_global(self, name):
gls = self.get_current_context()['__globals__']
if not name in gls:
gls.append(name)
def get_description(self):
env = self.get_visible_context()
return "ENV :%d:\n%s\n%s\n%s" % (self.ip, "-" * 10,
"\n".join(["%s: %s" % (key, env[key]) for key in sorted(env.keys())]),
"-" * 10)
|
gpl-3.0
| 7,393,122,344,925,203,000
| 28.346457
| 82
| 0.579823
| false
| 3.842268
| false
| false
| false
|
pudo-attic/docstash
|
docstash/util.py
|
1
|
2772
|
from os import path
from hashlib import sha1
from httplib import HTTPResponse
from urllib2 import urlopen
from StringIO import StringIO
from urlparse import urlparse
from werkzeug import secure_filename
MANIFEST_FILE = 'manifest.yaml'
def fullpath(filename):
# a happy tour through stdlib
filename = path.expanduser(filename)
filename = path.expandvars(filename)
filename = path.normpath(filename)
return path.abspath(filename)
def filename(filename, default='data'):
if filename is None:
return filename
basename = path.basename(filename)
return secure_filename(basename) or default
def checksum(filename):
hash = sha1()
with open(filename, 'rb') as fh:
while True:
block = fh.read(2 ** 10)
if not block:
break
hash.update(block)
return hash.hexdigest()
def clean_headers(headers):
result = {}
for k, v in dict(headers).items():
k = k.lower().replace('-', '_')
result[k] = v
return result
def ingest_misc(coll, obj, **kwargs):
if isinstance(obj, basestring):
# Treat strings as paths or URLs
url = urlparse(obj)
if url.scheme.lower() in ['http', 'https']:
try:
import requests
obj = requests.get(obj)
except ImportError:
obj = urlopen(obj)
elif url.scheme.lower() in ['file', '']:
if path.isdir(url.path):
return coll.ingest_dir(url.path)
return coll.ingest_file(url.path)
# Python requests
try:
from requests import Response
if isinstance(obj, Response):
kwargs['source_status'] = obj.status_code
kwargs['headers'] = clean_headers(obj.headers)
kwargs['source_url'] = obj.url
kwargs['file'] = obj.url
fd = StringIO(obj.content)
return coll.ingest_fileobj(fd, **kwargs)
except ImportError:
pass
if isinstance(obj, HTTPResponse):
# Can't tell the URL for HTTPResponses
kwargs['source_status'] = obj.status
# TODO handle lists:
kwargs['headers'] = clean_headers(obj.getheaders())
return coll.ingest_fileobj(obj, **kwargs)
elif hasattr(obj, 'geturl') and hasattr(obj, 'info'):
# assume urllib or urllib2
kwargs['source_url'] = obj.url
kwargs['file'] = obj.url
kwargs['source_status'] = obj.getcode()
kwargs['headers'] = clean_headers(obj.headers)
return coll.ingest_fileobj(obj, **kwargs)
elif hasattr(obj, 'read'):
# Fileobj will be a bit bland
return coll.ingest_fileobj(obj, **kwargs)
raise ValueError("Can't ingest: %r" % obj)
|
mit
| -3,031,235,047,868,030,000
| 26.445545
| 59
| 0.599206
| false
| 4.137313
| false
| false
| false
|
Vrekrer/PycuBLAS
|
codeUtils.py
|
1
|
5234
|
#Published symbols @7.0
#readelf -Ds /usr/lib/x86_64-linux-gnu/libcublas.so.7.0
from subprocess import Popen, PIPE
import pyperclip
libDir = '/usr/lib/x86_64-linux-gnu/'
c_types_reps = {'int' :'c_int',
'size_t' :'c_size_t',
'char' :'c_char',
'unsigned int' :'c_uint',
'void' :'',
'char*' :'c_char_p',
'void*' :'c_void_p'
}
class XX():
pass
def getSymbolTable(libname):
(stdout, stderr) = Popen(["readelf", "-Ds",
libDir + libname], stdout=PIPE).communicate()
lines = stdout.splitlines()[3:]
return [l.split()[8] for l in lines]
def getNotDefined(fileName, base, symbolTable):
with open(fileName,'r') as pyfile:
fileText = pyfile.read()
return [s for s in symbolTable if not(base+'.'+s in fileText)]
# Function to help construct the headers
def header(funct):
fS = 'cublasS' + funct
fD = 'cublasD' + funct
fC = 'cublasC' + funct
fZ = 'cublasZ' + funct
for f in [fS, fD, fC, fZ]:
print '%s = libcublas.%s_v2' % (f, f)
print 'for funct in [%s, %s, %s, %s]:' % (fS, fD, fC, fZ)
print ' funct.restype = cublasStatus_t'
print ' #funct.argtypes = [cublasHandle_t,'
def pharseFunct(doc):
FunctData = XX()
#remove unicode chars
doc = doc.decode('unicode_escape').encode('ascii', 'ignore')
#split at "("
data = doc.rsplit('(')
#get retType and function Name
FunctData.retType, FunctData.Name = data[0].strip().split()[-2:]
#get
pars = data[1].rsplit(')')[0].strip().split(',')
FunctData.pars = [p.rsplit() for p in pars]
return FunctData
def codeFunct(FunctData, libname):
code = ''
c_header = '# ' + FunctData.retType + ' ' + FunctData.Name + ' ( '
lenH = len(c_header) - 1
for i, p in enumerate(FunctData.pars):
c_header += ' '.join(p)
if (i+1) != len(FunctData.pars):
c_header += ( ',\n#' + lenH*' ' )
else:
c_header += ' )'
code += c_header + '\n'
code += FunctData.Name + ' = ' + libname + '.' + FunctData.Name + '\n'
code += FunctData.Name + '.restype = ' + FunctData.retType + '\n'
args = FunctData.Name + '.argtypes = ['
lenA = len(args)
argtypes = []
argNames = []
for pars in FunctData.pars:
if len(pars) == 1:
argtypes.append(pars[0])
argNames.append('')
elif len(pars) == 2:
argtypes.append(pars[0])
argNames.append(pars[1])
elif len(pars) == 3:
if pars[0] == 'const':
argtypes.append(pars[1])
else:
argtypes.append(' '.join(pars[:2]))
argNames.append(pars[2])
elif '=' in pars:
argtypes.append(' '.join(pars[:-3]))
argNames.append(' '.join(pars[-3:]))
for i, t in enumerate(argtypes):
if t in c_types_reps.keys():
argtypes[i] = c_types_reps[t]
elif (t[:-1] in c_types_reps.keys()) & (t[-1]=='*'):
argtypes[i] = 'POINTER(' + c_types_reps[t[:-1]] + ')'
elif t[-1]=='*':
argtypes[i] = 'POINTER(' + t[:-1] + ')'
else:
argtypes[i] = t
maxArgTypeName = max([len(t) for t in argtypes])+1
for i, argT in enumerate(argtypes):
args += argT
if (i+1) != len(argtypes):
args += ','
else:
args += ' '
if argNames[i] != '':
args += ' '*(maxArgTypeName-len(argT))
args += '# ' + argNames[i]
args += ( '\n' + lenA*' ' )
args += ']\n'
code += args
pyperclip.copy(code)
return code
def codeClipBoardFunct(libname):
source = pyperclip.paste().splitlines()
out = '\n'.join([codeFunct(pharseFunct(l), libname) for l in source])
pyperclip.copy(out)
def pharseStructFields(c_code):
S = XX()
lines = c_code.splitlines()
lines = [line.rsplit(';')[0].strip() for line in lines]
S.datatypes = [' '.join(l.split()[:-1]) for l in lines]
S.dataNames = [l.split()[-1].rsplit('[')[0] for l in lines]
S.arraySize = [(l.split()[-1]+'[').rsplit('[')[1].rsplit(']')[0] for l in lines]
S.size = len(S.datatypes)
S.maxDataNameSize = max([len(a) for a in S.dataNames])
return S
def codeStruct(sData):
code = ' _fields_ = ['
lenH = len(code)
for i in range(sData.size):
name_spaces = (sData.maxDataNameSize - len(sData.dataNames[i]) + 1)*' '
code += "('" + sData.dataNames[i] + "'," +name_spaces
if sData.datatypes[i] in c_types_reps.keys():
code += c_types_reps[sData.datatypes[i]]
else:
code += sData.datatypes[i]
if sData.arraySize[i] != '':
code += '*'+sData.arraySize[i]+')'
else:
code += ')'
if (i+1) != sData.size:
code += ',\n' + lenH*' '
else:
code += ']'
pyperclip.copy(code)
return code
def codeClipBoardStruct():
source = pyperclip.paste()
out = codeStruct(pharseStructFields(source))
pyperclip.copy(out)
|
bsd-3-clause
| 1,705,460,581,328,154,400
| 31.7125
| 84
| 0.509171
| false
| 3.13789
| false
| false
| false
|
mostaphaRoudsari/Honeybee
|
src/Honeybee_Load OpenStudio Measure.py
|
1
|
15679
|
#
# Honeybee: A Plugin for Environmental Analysis (GPL) started by Mostapha Sadeghipour Roudsari
#
# This file is part of Honeybee.
#
# Copyright (c) 2013-2020, Mostapha Sadeghipour Roudsari <mostapha@ladybug.tools>
# Honeybee is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License,
# or (at your option) any later version.
#
# Honeybee is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Honeybee; If not, see <http://www.gnu.org/licenses/>.
#
# @license GPL-3.0+ <http://spdx.org/licenses/GPL-3.0+>
"""
This component loads OpenStudio measureds into Honeybee. The measure can be applied to an OpenStudio model.
Read more about OpenStudio measures here: http://nrel.github.io/OpenStudio-user-documentation/reference/measure_writing_guide/
You can download several measures from here: https://bcl.nrel.gov/nrel/types/measure
-
Provided by Honeybee 0.0.66
Args:
_OSMeasure: Path to measure directory [NOT THE FILE]. This input will be removed once measure is loaded
Returns:
OSMeasure: Loaded OpenStudio measure
"""
ghenv.Component.Name = "Honeybee_Load OpenStudio Measure"
ghenv.Component.NickName = 'importOSMeasure'
ghenv.Component.Message = 'VER 0.0.66\nJUL_07_2020'
ghenv.Component.IconDisplayMode = ghenv.Component.IconDisplayMode.application
ghenv.Component.Category = "HB-Legacy"
ghenv.Component.SubCategory = "09 | Energy | HVACSystems"
#compatibleHBVersion = VER 0.0.56\nJUL_25_2017
#compatibleLBVersion = VER 0.0.59\nFEB_01_2015
try: ghenv.Component.AdditionalHelpFromDocStrings = "2"
except: pass
import os
import Grasshopper.Kernel as gh
import scriptcontext as sc
if sc.sticky.has_key('honeybee_release'):
if sc.sticky["honeybee_folders"]["OSLibPath"] != None:
# openstudio is there
openStudioLibFolder = sc.sticky["honeybee_folders"]["OSLibPath"]
openStudioIsReady = True
# check to see that it's version 2.0 or above.
rightVersion = False
try:
osVersion = openStudioLibFolder.split('-')[-1]
if osVersion.startswith('2'):
rightVersion = True
except:
pass
if rightVersion == False:
openStudioIsReady = False
msg = "Your version of OpenStudio must be 2.0 or above to use the measures components."
print msg
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg)
import clr
clr.AddReferenceToFileAndPath(openStudioLibFolder+"\\openStudio.dll")
import sys
if openStudioLibFolder not in sys.path:
sys.path.append(openStudioLibFolder)
import OpenStudio
else:
openStudioIsReady = False
# let the user know that they need to download OpenStudio libraries
msg1 = "You do not have OpenStudio installed on Your System.\n" + \
"You wont be able to use this component until you install it.\n" + \
"Download the latest OpenStudio for Windows from:\n"
msg2 = "https://www.openstudio.net/downloads"
print msg1
print msg2
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg1)
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg2)
else:
openStudioIsReady = False
class OPSChoice:
def __init__(self, originalString):
self.originalString = originalString
self.value = self.get_value()
self.display_name = self.get_display_name()
def get_display_name(self):
return self.originalString.split("<display_name>")[-1].split("</display_name>")[0]
def get_value(self):
return self.originalString.split("<value>")[-1].split("</value>")[0]
def __repr__(self):
return self.display_name
class OPSMeasureArg:
def __init__(self, originalString):
self.originalString = originalString
self.name = self.get_name()
self.display_name = self.get_display_name()
self.description = self.get_description()
self.type = self.get_type()
self.required = self.get_required()
if self.required == True:
self.display_name = "_" + self.display_name
else:
self.display_name = self.display_name + "_"
self.model_dependent = self.get_model_dependent()
self.default_value = self.get_default_value()
self.choices = self.get_choices()
self.validChoices = [choice.value.lower() for choice in self.choices]
self.userInput = None
def get_name(self):
return self.originalString.split("<name>")[-1].split("</name>")[0]
def get_display_name(self):
return self.originalString.split("</display_name>")[0].split("<display_name>")[-1]
def get_description(self):
return self.originalString.split("<description>")[-1].split("</description>")[0]
def get_type(self):
return self.originalString.split("<type>")[-1].split("</type>")[0]
def get_required(self):
req = self.originalString.split("<required>")[-1].split("</required>")[0]
return True if req.strip() == "true" else False
def get_model_dependent(self):
depends = self.originalString.split("<model_dependent>")[-1].split("</model_dependent>")[0]
return True if depends.strip() == "true" else False
def get_default_value(self):
if not "<default_value>" in self.originalString:
return None
else:
value = self.originalString.split("<default_value>")[-1].split("</default_value>")[0]
if self.type.lower() != "boolean": return value
return True if value.strip() == "true" else False
def get_choices(self):
choicesContainer = self.originalString.split("<choices>")[-1].split("</choices>")[0]
choices = [arg.split("<choice>")[-1] for arg in choicesContainer.split("</choice>")][:-1]
return [OPSChoice(choice) for choice in choices]
def update_value(self, userInput):
#currently everything is string
if len(self.validChoices) == 0:
self.userInput = userInput
elif str(userInput).lower() not in self.validChoices:
#give warning
msg = str(userInput) + " is not a valid input for " + self.display_name + ".\nValid inputs are: " + str(self.choices)
give_warning(msg)
else:
self.userInput = userInput
def __repr__(self):
return (self.display_name + "<" + self.type + "> " + str(self.choices) + \
" Current Value: {}").format(self.default_value if not self.userInput else self.userInput)
def give_warning(msg):
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, msg)
def get_measureArgs(xmlFile):
# there is no good XML parser for IronPython
# here is parsing the file
with open(xmlFile, "r") as measure:
lines = measure.readlines()
argumentsContainer = "".join(lines).split("<arguments>")[-1].split("</arguments>")[0]
arguments = [arg.split("<argument>")[-1] for arg in argumentsContainer.split("</argument>")][:-1]
#collect arguments in a dictionary so I can map the values on update
args = dict()
for count, arg in enumerate(arguments):
args[count+1] = OPSMeasureArg(arg)
return args
def addInputParam(arg, path, i=None):
if i == None:
param = gh.Parameters.Param_ScriptVariable()
else:
param = ghenv.Component.Params.Input[i]
param.NickName = arg.display_name
param.Name = arg.name
param.Description = str(arg)
param.Optional = True # even if it is required it has a default value
param.AllowTreeAccess = False
param.Access = gh.GH_ParamAccess.item # I assume this can't be a list
if arg.default_value != None:
param.AddVolatileData(path, 0, arg.default_value)
if i == None:
index = ghenv.Component.Params.Input.Count
ghenv.Component.Params.RegisterInputParam(param,index)
ghenv.Component.Params.OnParametersChanged()
def cleanInputNames():
# I couldn't find a clean way to remove the input so I just change the name
for paramCount in range(1,ghenv.Component.Params.Input.Count):
param = ghenv.Component.Params.Input[paramCount]
param.NickName = "_"
param.Name = "_"
param.Description = "_"
param.Optional = False
ghenv.Component.Params.OnParametersChanged()
def cleanFirstInput():
ghenv.Component.Params.Input[0].NickName = "_"
ghenv.Component.Params.Input[0].Name = "_"
# ghenv.Component.Params.Input[0].RemoveAllSources()
def updateComponentDescription(xmlFile):
# get name of measure and description
nickName = os.path.normpath(xmlFile).split("\\")[-2]
ghenv.Component.NickName = nickName
measureType = 'OpenStudio'
with open(xmlFile, "r") as measure:
lines = "".join(measure.readlines())
ghenv.Component.Name = lines.split("</display_name>")[0].split("<display_name>")[-1]
ghenv.Component.Description = lines.split("</description>")[0].split("<description>")[-1]
if 'EnergyPlusMeasure' in lines:
measureType = 'EnergyPlus'
elif 'ModelMeasure' in lines:
measureType = 'OpenStudio'
elif 'ReportingMeasure' in lines:
measureType = 'Reporting'
return measureType
class OpenStudioMeasure:
def __init__(self, name, nickName, description, measurePath, args, measureType):
self.name = name
self.nickName = nickName
self.description = description
self.path = os.path.normpath(measurePath)
self.args = args
self.type = measureType
def updateArguments(self):
#iterate over inputs and assign the new values in case there is any new values
for i in range(1, ghenv.Component.Params.Input.Count):
try:
value = ghenv.Component.Params.Input[i].VolatileData[0][0]
except:
value = self.args[i].default_value
path = gh.Data.GH_Path(0)
ghenv.Component.Params.Input[i].AddVolatileData(path, 0, value)
self.args[i].update_value(value)
def __repr__(self):
return "OpenStudio " + self.name
def loadMeasureFromFile(xmlFile):
if not os.path.isfile(xmlFile): raise Exception("Can't find measure at " + xmlFile)
directory, f_name = os.path.split(xmlFile)
measure = OpenStudio.BCLMeasure(tryGetOSPath(directory))
if measure.arguments().Count == 0:
print "Measure contains no arguments."
measureType = updateComponentDescription(xmlFile)
# load arguments
args = get_measureArgs(xmlFile)
# create an OSMeasure based on default values
OSMeasure = OpenStudioMeasure(ghenv.Component.Name, ghenv.Component.NickName, ghenv.Component.Description, _, args, measureType)
OSMeasure.updateArguments()
# add the measure to sticky to be able to load and update it
key = ghenv.Component.InstanceGuid.ToString()
if "osMeasures" not in sc.sticky.keys():
sc.sticky["osMeasures"] = dict()
sc.sticky["osMeasures"][key] = OSMeasure
return OSMeasure
def tryGetOSPath(path):
"""Try to convert a string path to OpenStudio Path."""
try:
return OpenStudio.Path(path)
except TypeError:
# OpenStudio 2.6.1
ospath = OpenStudio.OpenStudioUtilitiesCore.toPath(path)
return OpenStudio.Path(ospath)
def loadMeasureFromMem():
try:
key = ghenv.Component.InstanceGuid.ToString()
OSMeasure = sc.sticky["osMeasures"][key]
OSMeasure.updateArguments()
ghenv.Component.Name = OSMeasure.name
ghenv.Component.NickName = OSMeasure.nickName
ghenv.Component.Description = OSMeasure.description
return OSMeasure
except Exception , e:
msg = "Couldn't load the measure!\n%s" % str(e)
if ghenv.Component.Params.Input.Count!=1:
msg += "\nTry to reload the measure with a fresh component."
raise Exception(msg)
print msg
return None
fileLoad = False
try:
OSMeasure = sc.sticky["osMeasures"][key]
except:
try:
xmlFile = os.path.join(_ , "measure.xml")
OSMeasure = loadMeasureFromFile(xmlFile)
fileLoad = True
except Exception as e:
print e
#Honeybee check.
initCheck = True
if not sc.sticky.has_key('honeybee_release') == True:
initCheck = False
print "You should first let Honeybee fly..."
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, "You should first let Honeybee fly...")
else:
try:
if not sc.sticky['honeybee_release'].isCompatible(ghenv.Component): initCheck = False
hb_hvacProperties = sc.sticky['honeybee_hvacProperties']()
hb_airDetail = sc.sticky["honeybee_hvacAirDetails"]
hb_heatingDetail = sc.sticky["honeybee_hvacHeatingDetails"]
hb_coolingDetail = sc.sticky["honeybee_hvacCoolingDetails"]
except:
initCheck = False
warning = "You need a newer version of Honeybee to use this compoent." + \
"Use updateHoneybee component to update userObjects.\n" + \
"If you have already updated userObjects drag Honeybee_Honeybee component " + \
"into canvas and try again."
ghenv.Component.AddRuntimeMessage(w, warning)
if openStudioIsReady == True and initCheck == True and fileLoad == False:
if ghenv.Component.Params.Input.Count==1 and _OSMeasure:
# first time loading
xmlFile = os.path.join(_OSMeasure, "measure.xml")
if not os.path.isfile(xmlFile): raise Exception("Can't find measure at " + xmlFile)
measure = OpenStudio.BCLMeasure(tryGetOSPath(_OSMeasure))
if measure.arguments().Count == 0:
print "Measure contains no arguments."
# load arguments
args = get_measureArgs(xmlFile)
# add arguments to component
path = gh.Data.GH_Path(0)
for key in sorted(args.keys()):
addInputParam(args[key], path)
measureType = updateComponentDescription(xmlFile)
# create an OSMeasure based on default values
OSMeasure = OpenStudioMeasure(ghenv.Component.Name, ghenv.Component.NickName, ghenv.Component.Description, _OSMeasure, args, measureType)
# add the measure to sticky to be able to load and update it
key = ghenv.Component.InstanceGuid.ToString()
if "osMeasures" not in sc.sticky.keys():
sc.sticky["osMeasures"] = dict()
sc.sticky["osMeasures"][key] = OSMeasure
_OSMeasure = False
# clean first input
cleanFirstInput()
if sc.sticky['honeybee_release'].isInputMissing(ghenv.Component):
OSMeasure = None
elif ghenv.Component.Params.Input.Count==1 and not _OSMeasure == False:
sc.sticky['honeybee_release'].isInputMissing(ghenv.Component)
else:
OSMeasure = loadMeasureFromMem()
sc.sticky['honeybee_release'].isInputMissing(ghenv.Component)
|
gpl-3.0
| 6,137,310,421,724,974,000
| 38.696203
| 145
| 0.649085
| false
| 3.848552
| false
| false
| false
|
openstack/rally
|
tests/unit/doc/test_format.py
|
1
|
3089
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fnmatch
import io
import os
import re
import testtools
class TestFormat(testtools.TestCase):
def _check_lines_wrapping(self, doc_file, raw):
code_block = False
text_inside_simple_tables = False
lines = raw.split("\n")
for i, line in enumerate(lines):
if code_block:
if not line or line.startswith(" "):
continue
else:
code_block = False
if "::" in line:
code_block = True
# simple style tables also can fit >=80 symbols
# open simple style table
if ("===" in line or "---" in line) and not lines[i - 1]:
text_inside_simple_tables = True
if "http://" in line or "https://" in line or ":ref:" in line:
continue
# Allow lines which do not contain any whitespace
if re.match(r"\s*[^\s]+$", line):
continue
if not text_inside_simple_tables:
self.assertTrue(
len(line) < 80,
msg="%s:%d: Line limited to a maximum of 79 characters." %
(doc_file, i + 1))
# close simple style table
if "===" in line and not lines[i + 1]:
text_inside_simple_tables = False
def _check_no_cr(self, doc_file, raw):
matches = re.findall("\r", raw)
self.assertEqual(
len(matches), 0,
"Found %s literal carriage returns in file %s" %
(len(matches), doc_file))
def _check_trailing_spaces(self, doc_file, raw):
for i, line in enumerate(raw.split("\n")):
trailing_spaces = re.findall(r"\s+$", line)
self.assertEqual(
len(trailing_spaces), 0,
"Found trailing spaces on line %s of %s" % (i + 1, doc_file))
def test_lines(self):
files = []
docs_dir = os.path.join(os.path.dirname(__file__), os.pardir,
os.pardir, os.pardir, "doc")
for root, dirnames, filenames in os.walk(docs_dir):
for filename in fnmatch.filter(filenames, "*.rst"):
files.append(os.path.join(root, filename))
for filename in files:
with io.open(filename, encoding="utf-8") as f:
data = f.read()
self._check_lines_wrapping(filename, data)
self._check_no_cr(filename, data)
self._check_trailing_spaces(filename, data)
|
apache-2.0
| -7,617,275,831,751,958,000
| 37.135802
| 78
| 0.556167
| false
| 4.163073
| false
| false
| false
|
CXWorks/compilerLab
|
lab1/lex/recore.py
|
1
|
7297
|
import networkx as nx
from collections import deque
import matplotlib.pyplot as plt
def re2dfa(re,debug=False):
def isChar(c):
return (c>='a' and c<='z') or (c>='A' and c<='Z') or (c>='0' and c<='9')
way=[]
def re2nfa(re):
_op_={'.':6,'|':7,'(':10}
#add .
full=[]
skip=False
for i in range(len(re)):
if skip:
skip=False
continue
full.append(re[i])
if re[i]=='\\':
i+=1
full.append(re[i])
skip=True
if re[i] not in _op_.keys() and i+1<len(re) and (isChar(re[i+1]) or re[i+1]=='(' or re[i+1]=='\\'):
full.append('.')
full.append('$')
# back
back=[]
symb=[]
skip=False
for i in range(len(full)):
if skip:
skip=False
continue
c=full[i]
if isChar(c):
back.append(c)
if c not in way:
way.append(c)
elif c==')':
while symb[len(symb)-1]!= '(':
back.append(symb.pop())
symb.pop()
elif c=='$':
while len(symb)>0:
back.append(symb.pop())
elif c in ['*','+','?']:
back.append(c)
elif c =='\\':
back.append(c)
i+=1
back.append(full[i])
skip=True
if full[i] not in way:
way.append(full[i])
elif c in _op_.keys():
while len(symb)>0 and symb[len(symb)-1]!='(' and _op_[symb[len(symb)-1]] >= _op_[c]:
back.append(symb.pop())
symb.append(c)
else:
back.append(c)
if c not in way:
way.append(c)
#build nfa
stack=[]
skip=False
for i in range(len(back)):
if skip:
skip=False
continue
c=back[i]
if isChar(c):
g=nx.DiGraph()
g.add_edge(0,1,c=c)
stack.append(g)
elif c=='\\':
i+=1
g=nx.DiGraph()
g.add_edge(0,1,c=back[i])
stack.append(g)
skip=True
elif c== '.':
g2=stack.pop()
g1=stack.pop()
n=len(g1)
g=nx.disjoint_union(g1,g2)
g.add_edge(n-1,n,e='1')
stack.append(g)
elif c=='*':
g=stack[len(stack)-1]
n=len(g)
g.add_edge(0,n-1,e='1')
g.add_edge(n-1,0,e='1')
elif c=='+':
g = stack[len(stack)-1]
n = len(g)
g.add_edge(n - 1, 0, e='1')
elif c=='?':
g = stack[len(stack) - 1]
n = len(g)
g.add_edge(0, n - 1, e='1')
elif c=='|':
g1 = stack.pop()
g2 = stack.pop()
n1 = len(g1)
n2 = len(g2)
s=nx.DiGraph()
s.add_node(0)
s1=nx.disjoint_union(s,g1)
s1.add_edge(0,1,e='1')
e=nx.DiGraph()
e.add_node(0)
e1=nx.disjoint_union(g2,e)
e1.add_edge(n2-1,n2,e='1')
ans=nx.disjoint_union(s1,e1)
ans.add_edge(0,n1+1,e='1')
ans.add_edge(n1,n1+n2+1,e='1')
stack.append(ans)
else:
g = nx.DiGraph()
g.add_edge(0, 1, c=c)
stack.append(g)
return stack.pop()
def findClo(g,node):
ans=[node]
#dfs
stack=[node]
while len(stack)>0:
n=stack.pop()
edge = g.edge[n]
for no,dic in edge.items():
if no not in ans and dic.has_key('e'):
stack.append(no)
ans.append(no)
return ans
def findWay(g,ns,w):
ans=[]
for n in ns:
edge=g.edge[n]
for no,dic in edge.items():
if no not in ans and dic.has_key('c') and dic['c']==w:
#find clo
temp=findClo(g,no)
ans.extend(temp)
return ans
def minDFA(node,index):
ans=[]
log=[]
for i in range(len(node)):
n=node[i]
if n in log:
continue
nto=index[n].values()
notin=[x for x in nto if x not in node]
if len(notin)>0 :
ans.append([n])
continue
t=[n]
for j in range(i+1,len(node)):
jto=index[node[j]].values()
if nto==jto and len(nto)!=0:
t.append(node[j])
log.append(node[j])
ans.append(t)
return ans
def delnode(n,conn,t,to):
del conn[n]
t[to].extend([x for x in t[n] if x not in t[to]])
del t[n]
for k,v in conn.items():
if k != n :
for w in way:
if v.has_key(w) and v[w]==n :
v[w]=to
return conn
def nfa2dfa(nfa):
table={}
#init
t=findClo(nfa,0)
t.sort()
table[0]=t
conn={}
queue=deque([0])
while len(queue)>0:
n=queue.popleft()
n2c={}
n_n=table[n]
for c in way:
te=findWay(nfa,n_n,c)
if len(te)==0:
continue
te.sort()
if te not in table.values():
idd=len(table)
table[idd]=te
queue.append(idd)
else:
idd=table.keys()[table.values().index(te)]
n2c[c]=idd
conn[n]=n2c
#minimise
s=[]
e=[]
for k,v in table.items():
if len(nfa.node)-1 in v:
e.append(k)
else:
s.append(k)
s2=minDFA(s,conn)
e2=minDFA(e,conn)
s2.extend(e2)
for l in s2:
if len(l) == 1:
continue
for i in range(1,len(l)):
conn=delnode(l[i],conn,table,l[0])
#build graph
g=nx.DiGraph()
for k,v in table.items():
g.add_node(k)
if len(nfa.node) - 1 in v:
g.node[k]['e']=1
for node,di in conn.items():
for c,t in di.items():
# g.add_edge(node,t,)
if g.has_edge(node,t):
g.edge[node][t]['c'].append(c)
else:
g.add_edge(node, t,c=[c] )
return g
nfa = re2nfa(re)
g = nfa2dfa(nfa)
if debug:
return g
else:
return [g.node,g.edge]
if __name__ == '__main__':
g=re2dfa('(a|b)*a(a|b)(a|b)',debug=True)
print g.node
print g.edge
nx.draw_networkx(g)
plt.show()
|
mit
| 3,387,408,629,161,329,000
| 27.396887
| 111
| 0.371386
| false
| 3.531946
| false
| false
| false
|
OndinaHQ/Tracker
|
plugins/s3.py
|
1
|
3045
|
# Copyright (C) 2012 Stefano Palazzo <stefano.palazzo@gmail.com>
# Copyright (C) 2012 Ondina, LLC. <http://ondina.co>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import time
import hmac
import hashlib
import http.client
import urllib.parse
import base64
import collections
class S3Error (Exception):
def __init__(self, status, response):
self.status, self.response = status, response
def __str__(self):
return "{}: {}".format(self.status, self.response)
def __str__(self):
return "S3Error({}, {})".format(repr(self.status), repr(self.response))
class S3 (object):
'''
Usage:
>>> s3 = S3(YOUR_ACCESS_KEY_ID, YOUR_SECRET_ACCESS_KEY)
>>> s3.upload("some-bucket", open("image.png", "rb").read(),
"image/png", "image3838838.png")
https://s3.amazonaws.com/some-bucket/image3838838.png
'''
def __init__(self, access_key, secret_key):
self.__access_key, self.__secret_key = access_key, secret_key
def __request(self, method, bucket, host, action, body, content_type, fn):
date = time.strftime("%c GMT", time.gmtime())
headers = collections.OrderedDict((
("x-amz-acl", "public-read"),
("Content-Type", content_type),
("Content-Length", len(body)),
("Host", bucket + "." + host),
("Date", date),
))
string_to_sign = (method + "\n" +
"\n" +
content_type + "\n" +
date + "\n" +
"x-amz-acl:public-read\n" +
"/" + bucket + "/" + fn)
signature = base64.b64encode(hmac.new(self.__secret_key.encode(),
string_to_sign.encode(), hashlib.sha1).digest()).decode()
authorization = "AWS " + self.__access_key + ":" + signature
headers.update({"Authorization": authorization})
connection = http.client.HTTPSConnection(bucket + "." + host)
action = action + "?" + urllib.parse.urlencode({})
connection.request(method, action, body, headers)
response = connection.getresponse()
if response.status != 200:
raise S3Error(response.status, response.read())
return "https://s3.amazonaws.com/{}/{}".format(bucket, fn)
def upload(self, bucket, data, content_type, filename):
return self.__request("PUT", bucket, "s3.amazonaws.com", "/" +
filename, data, content_type, filename)
|
gpl-3.0
| 6,556,845,181,018,682,000
| 36.592593
| 79
| 0.611494
| false
| 3.754624
| false
| false
| false
|
gioman/QGIS
|
python/plugins/processing/algs/gdal/buildvrt.py
|
1
|
4226
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
merge.py
---------------------
Date : October 2014
Copyright : (C) 2014 by Radoslaw Guzinski
Email : rmgu at dhi-gras dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Radoslaw Guzinski'
__date__ = 'October 2014'
__copyright__ = '(C) 2014, Radoslaw Guzinski'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt.QtGui import QIcon
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.core.outputs import OutputRaster
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterMultipleInput
from processing.core.parameters import ParameterSelection
from processing.algs.gdal.GdalUtils import GdalUtils
from processing.tools.system import tempFolder
from processing.tools import dataobjects
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class buildvrt(GdalAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
RESOLUTION = 'RESOLUTION'
SEPARATE = 'SEPARATE'
PROJ_DIFFERENCE = 'PROJ_DIFFERENCE'
RESOLUTION_OPTIONS = ['average', 'highest', 'lowest']
def name(self):
return 'buildvirtualraster'
def displayName(self):
return self.tr('Build Virtual Raster')
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'vrt.png'))
def group(self):
return self.tr('Raster miscellaneous')
def defineCharacteristics(self):
self.addParameter(ParameterMultipleInput(self.INPUT,
self.tr('Input layers'), dataobjects.TYPE_RASTER))
self.addParameter(ParameterSelection(self.RESOLUTION,
self.tr('Resolution'), self.RESOLUTION_OPTIONS, 0))
self.addParameter(ParameterBoolean(self.SEPARATE,
self.tr('Layer stack'), True))
self.addParameter(ParameterBoolean(self.PROJ_DIFFERENCE,
self.tr('Allow projection difference'), False))
self.addOutput(OutputRaster(buildvrt.OUTPUT, self.tr('Virtual')))
def getConsoleCommands(self):
arguments = []
arguments.append('-resolution')
arguments.append(self.RESOLUTION_OPTIONS[self.getParameterValue(self.RESOLUTION)])
if self.getParameterValue(buildvrt.SEPARATE):
arguments.append('-separate')
if self.getParameterValue(buildvrt.PROJ_DIFFERENCE):
arguments.append('-allow_projection_difference')
# Always write input files to a text file in case there are many of them and the
# length of the command will be longer then allowed in command prompt
listFile = os.path.join(tempFolder(), 'buildvrtInputFiles.txt')
with open(listFile, 'w') as f:
f.write(self.getParameterValue(buildvrt.INPUT).replace(';', '\n'))
arguments.append('-input_file_list')
arguments.append(listFile)
out = self.getOutputValue(buildvrt.OUTPUT)
# Ideally the file extensions should be limited to just .vrt but I'm not sure how
# to do it simply so instead a check is performed.
_, ext = os.path.splitext(out)
if not ext.lower() == '.vrt':
out = out.replace(ext, '.vrt')
self.setOutputValue(self.OUTPUT, out)
arguments.append(out)
return ['gdalbuildvrt', GdalUtils.escapeAndJoin(arguments)]
|
gpl-2.0
| 2,611,517,822,219,813,400
| 40.841584
| 99
| 0.58424
| false
| 4.343268
| false
| false
| false
|
boffi/boffi.github.io
|
dati_2014/08/exercise1.py
|
1
|
3923
|
######################################################################
# Preliminaries,
import scipy as sp
mat=sp.matrix
from scipy.linalg import inv
######################################################################
# an utility function to format a matrix for inclusion in a LaTeX file
def latex_print(data,name,fmt="%10.4f",title=""):
delim={"mat":"b",
"vet":"B",
"det":"V",
"norm":"v"}
if title:
print "% ----- "+title+" -----"
print "\\begin{"+delim[name]+"matrix}"
print "\\\\\n".join(["&".join(map(lambda x: fmt%(x,),line)) for line in sp.asarray(data)])
print "\\end{"+delim[name]+"matrix}"
######################################################################
Mass=mat(((2,0,0,),
(0,3,0,),
(0,0,4,),));
Mass=100000.*Mass
latex_print(Mass,"mat",title="Mass Matrix")
######################################################################
Stif=mat(((+1,-1,+0),
(-1,+3,-2),
(+0,-2,+5)))
Stif=120e6*Stif
latex_print(Stif,"mat",title="Stiffness Matrix")
######################################################################
# roots finds the roots of the poly defined by
# the list of coefficients (1, -11.4,m ...)
Omegas=mat(sorted(sp.roots((1,-11/4.,15/8.,-1/4.))))*1200.
Eigenv=mat(sp.zeros((3,3)))
# This sets the 0 row if the eigenv matrix to ones
Eigenv[0,:]=1.,1.,1.
# this is a {0,1} column vector
known=mat(((1,),(0,)))
# solve the eq. of free vibrations for psi_0i = 1
for i in range(3):
Omega2=Omegas[0,i]/1200
coef=mat(((3.-3.*Omega2,-2.),(-2.,5.-4.*Omega2)))
bottom=coef.I*known
# this sets the bottom part of each eigenvector
Eigenv[1:,i]=bottom
latex_print(Eigenv,"mat",title="Eigenvectors Matrix")
MStar=Eigenv.T*Mass*Eigenv
latex_print(MStar,"mat",title="Modal Masses Matrix")
KStar=Eigenv.T*Stif*Eigenv
latex_print(KStar,"mat","%10.5e",title="Modal Stiffnesses Matrix")
MStar=Eigenv.T*Mass*Eigenv
latex_print(MStar/1000.,"mat",title="Modal Masses Matrix, in tons")
KStar=Eigenv.T*Stif*Eigenv
latex_print(KStar/1E6,"mat","%10.2f",title="Modal Stiffnesses Matrix, in MN/m")
q_0=MStar.I*Eigenv.T*Mass*mat((5,4,3)).T
latex_print(sp.mat(((5,4,3),)).T,"vet",title="Initial displacements, nodal coo.")
latex_print(q_0,"vet",title="Initial displacements, modal coo.")
qdot_0=MStar.I*Eigenv.T*Mass*mat((0,9,0)).T
latex_print(mat((0,9,0)).T,"vet",title="Initial velocities, nodal coo.")
latex_print(qdot_0,"vet",title="Initial velocities, modal coo.")
# q_i = A_i sin(w_i t) + B_i cos(w_i t)
# qdot_i = w_i(A_i cos(w_i t) - B_i sin(w_i t))
Bs=q_0
As=mat(sp.diagonal(qdot_0/sp.sqrt(Omegas))).T
latex_print(As,"vet",title="Sine coefficients for modal disp.s")
latex_print(Bs,"vet",title="Cosine coefficients for modal disp.s")
ampli=sp.real(sp.sqrt(sp.power(As,2)+(sp.power(Bs,2))))
phase=sp.arctan2(As,Bs)
latex_print(ampli,"vet",title="Cosine only amplitudes for modal disp.s")
latex_print(phase,"vet",title="Cosine only phases for modal disp.s")
# q_i(t) = ampli_i*cos(w_i-phase)
print "% Nodal displacements, in mm\n\\begin{align*}"
for i in range(3):
print r" x_%d & = " % (i+1,),
for j in range(3):
print r"%+6.3f \cos(%10.3f t %+10.3f) " % (Eigenv[i,j]*ampli[j], sp.sqrt(Omegas[0,j]), phase[j]),
print r"\\"
print "\\end{align*}"
print "% Nodal forces, in kN\n\\begin{align*}"
for i in range(3):
print r"x_%d & = " % (i+1,),
for j in range(3):
print r"%+6.3f \cos(%10.3f t %+10.3f) " % (Mass[i,i]*Omegas[0,j]*Eigenv[i,j]*ampli[j]/1E6, sp.sqrt(Omegas[0,j]), phase[j]),
print r"\\"
print "\\end{align*}"
## half-sine
#t1=0.02 # seconds
#p=mat((2.5e6,5e6,5e6)).T # Newtons
## modal loads, normalized
#pl=MStar.I*Eigenv.T*p
##the impulse, and the final velocity, as pl was normalized, is
#qdot_0 = pl*t1/(sp.pi/2)
#print qdot_0, sp.diagonal(qdot_0/sp.sqrt(Omegas))
|
mit
| -4,355,237,838,158,614,000
| 32.818966
| 131
| 0.559266
| false
| 2.657859
| false
| false
| false
|
mattyowl/fitScalingRelation
|
fitScalingRelation/fitScalingRelationLib.py
|
1
|
62047
|
"""
The MCMC fitting code used in Hilton et al. (2012), in a more general purpose form
Copyright 2015 Matt Hilton (matt.hilton@mykolab.com)
This file is part of fitScalingRelation.
fitScalingRelation is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
fitScalingRelation is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with fitScalingRelation. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import sys
import math
import string
from astLib import *
import pylab as plt
import numpy as np
import astropy.table as atpy
import popen2
from scipy import stats
from scipy import special
from scipy import interpolate
from scipy import ndimage
import pyximport; pyximport.install()
import cythonScalingRelation as csr
import time
import pickle
import matplotlib
import IPython
np.random.seed()
plt.matplotlib.interactive(False)
# For some unknown reason, mathtext in matplotlib is behaving weirdly since Ubuntu 16.10 upgrade
#try:
#plt.matplotlib.rc('text', usetex=True)
#except:
#pass
#-------------------------------------------------------------------------------------------------------------
# Adopt Ed's cosmology
#astCalc.OMEGA_M0=0.27
#astCalc.OMEGA_L=0.73
#-------------------------------------------------------------------------------------------------------------
def ask_for( key ):
s = raw_input( "ParametersDict: enter value for '%s': " % key )
try:
val = eval(s)
except NameError:
# allow people to enter unquoted strings
val = s
return val
class ParametersDict( dict ):
def __getitem__( self, key ):
if key not in self:
print "ParametersDict: parameter '%s' not found" % key
val = ask_for( key )
print "ParametersDict: setting '%s' = %s" % (key,repr(val))
dict.__setitem__( self, key, val )
return dict.__getitem__( self, key )
def read_from_file( self, filename ):
f = open( filename )
old = ''
for line in f:
line = line.strip()
if len(line) == 0 or line[0] == '#':
continue
s = line.split('#')
line = s[0]
#if line[-1] == '\\':
#s = line.split('\\')
#if len(s) > 1:
#old = string.join([old, s[0]])
#continue
#else:
#line = string.join([old, s[0]])
#old = ''
##IPython.embed()
##sys.exit()
s = line.split('=')
if len(s) != 2:
print "Error parsing line:"
print line
IPython.embed()
sys.exit()
continue
try:
key = s[0].strip()
val = eval(s[1].strip()) # XXX:make safer
except:
raise Exception, "can't parse line: %s" % (line)
self[key] = val
f.close()
def write_to_file( self, filename, mode = 'w' ):
f = open( filename, mode )
keys = self.keys()
keys.sort()
for key in keys:
f.write( "%s = %s\n" % (key,repr(self[key])) )
f.close()
def cmp( self, otherDict ):
diff = []
ks = self.keys()
for k in ks:
try:
if otherDict[k] == self.params[k]:
continue
diff += [k]
break
except KeyError:
diff += [k]
return otherDict
#-------------------------------------------------------------------------------------------------------------
def selectStartParsFromPriors(settingsDict):
"""Choose random starting values for the MCMC from the priors we're placing on the parameters.
"""
variables=settingsDict['variables']
pars=np.zeros(len(variables))
for i in range(len(variables)):
v=variables[i]
if settingsDict['%sFit' % (v)] == 'fixed':
pars[i]=settingsDict['%s0' % (v)]
else:
pars[i]=np.random.uniform(settingsDict['prior_%s_MIN' % (v)], settingsDict['prior_%s_MAX' % (v)])
# This makes sure that if we're testing by swapping axes, we can use the same prior ranges
if 'swapAxes' in settingsDict.keys() and settingsDict['swapAxes'] == True:
b=1.0/pars[1]
a=-pars[0]/pars[1]
pars[0]=a
pars[1]=b
return pars
#-------------------------------------------------------------------------------------------------------------
def getPPrior(pPars, settingsDict):
"""Gets prior probability.
"""
variables=settingsDict['variables']
# This makes sure that if we're testing by swapping axes, we can use the same prior ranges
if 'swapAxes' in settingsDict.keys() and settingsDict['swapAxes'] == True:
b=1.0/pPars[1]
a=-pPars[0]/pPars[1]
pPars[0]=a
pPars[1]=b
priors=np.zeros(len(variables))
for i in range(len(variables)):
v=variables[i]
if pPars[i] > settingsDict['prior_%s_MIN' % (v)] and pPars[i] < settingsDict['prior_%s_MAX' % (v)]:
priors[i]=1.0
else:
priors[i]=0.0
# Fixed parameters must surely be within the priors...
if settingsDict['%sFit' % (v)] == 'fixed':
priors[i]=1.0
pPrior=np.product(priors)
return pPrior
#-------------------------------------------------------------------------------------------------------------
def byteSwapArr(arr):
"""FITS is big-endian, but cython likes native-endian arrays (little-endian for x86)... so, byteswap
if we need.
"""
if arr.dtype.byteorder == '>':
arr=arr.byteswap().newbyteorder('=')
return arr
#-------------------------------------------------------------------------------------------------------------
def sampleGetter(settingsDict, sampleDef, outDir):
"""Loads in catalogue in .fits table format, and add columns xToFit, yToFit, xErrToFit, yErrToFit,
which are fed into the MCMCFit routine. Applies any asked for scalings and cuts according to the
contents of settingsDict and sampleDef.
"""
# Stuff we need from settings...
xColumnName=settingsDict['xColumnName']
xPlusErrColumnName=settingsDict['xPlusErrColumnName']
xMinusErrColumnName=settingsDict['xMinusErrColumnName']
yColumnName=settingsDict['yColumnName']
yPlusErrColumnName=settingsDict['yPlusErrColumnName']
yMinusErrColumnName=settingsDict['yMinusErrColumnName']
xPivot=settingsDict['xPivot']
yPivot=settingsDict['yPivot']
xTakeLog10=settingsDict['xTakeLog10']
yTakeLog10=settingsDict['yTakeLog10']
redshiftColumnName=settingsDict['redshiftColumnName']
xScaleFactor=settingsDict['xScaleFactor']
yScaleFactor=settingsDict['yScaleFactor']
yScaleFactorPower=settingsDict['yScaleFactorPower']
newTab=atpy.Table().read(settingsDict['inFileName'])
# Make a new table here with cuts applied
# NOTE: we really need a better way of labelling constraints
for key in sampleDef:
if key not in ['label', 'plotLabel']:
if key[-4:] == '_MIN':
col=key[:-4]
newTab=newTab[np.where(newTab[col] > sampleDef[key])]
elif key[-4:] == '_MAX':
col=key[:-4]
newTab=newTab[np.where(newTab[col] < sampleDef[key])]
else:
if type(sampleDef[key]) != list:
newTab=newTab[np.where(newTab[key] == sampleDef[key])]
else:
print "Need to add more sampleDef key handling code"
IPython.embed()
sys.exit()
if len(newTab) == 0:
print "Hmm... all objects cut? empty newTab"
IPython.embed()
sys.exit()
# Value added useful columns
Ez=[]
for row in newTab:
Ez.append(astCalc.Ez(row[redshiftColumnName]))
newTab.add_column(atpy.Column(Ez, 'E(z)'))
# Add columns we will fit to, scaling and applying log10 as necessary
# We apply pivots here also (undo them, if necessary, elsewhere)
stab=newTab
# We should probably make this default
if xPivot == "median":
xPivot=np.median(newTab[xColumnName])
settingsDict['xPivot']=xPivot
if yPivot == "median":
yPivot=np.median(newTab[yColumnName])
settingsDict['yPivot']=yPivot
if yScaleFactor == "E(z)":
yScaling=np.power(stab["E(z)"], yScaleFactorPower)
elif yScaleFactor == None:
yScaling=np.ones(len(stab))
else:
raise Exception, "didn't understand yScaleFactor"
if xTakeLog10 == True:
xToFit=np.log10(stab[xColumnName]/xPivot)
xErrToFitPlus=np.log10((stab[xColumnName]+stab[xPlusErrColumnName])/xPivot)-xToFit
xErrToFitMinus=xToFit-np.log10((stab[xColumnName]-stab[xMinusErrColumnName])/xPivot)
else:
xToFit=stab[xColumnName]
xErrToFitPlus=stab[xPlusErrColumnName]
xErrToFitMinus=stab[xMinusErrColumnName]
if yTakeLog10 == True:
yToFit=np.log10(yScaling*stab[yColumnName]/yPivot)
yErrToFitPlus=np.log10(yScaling*(stab[yColumnName]+stab[yPlusErrColumnName])/yPivot)-yToFit
yErrToFitMinus=yToFit-np.log10(yScaling*(stab[yColumnName]-stab[yMinusErrColumnName])/yPivot)
else:
yToFit=stab[yColumnName]
yErrToFitPlus=stab[yPlusErrColumnName]
yErrToFitMinus=stab[yMinusErrColumnName]
# Swap
if xToFit.dtype.byteorder == '>':
xToFit=xToFit.byteswap().newbyteorder('=')
stab.add_column(atpy.Column(xToFit, 'xToFit'))
stab.add_column(atpy.Column(xErrToFitPlus, 'xErrToFitPlus'))
stab.add_column(atpy.Column(xErrToFitMinus, 'xErrToFitMinus'))
stab.add_column(atpy.Column(yToFit, 'yToFit'))
stab.add_column(atpy.Column(yErrToFitPlus, 'yErrToFitPlus'))
stab.add_column(atpy.Column(yErrToFitMinus, 'yErrToFitMinus'))
# If we ever get around to fiddling with detection probabilities again, change this...
if 'detPColumnName' in settingsDict.keys():
if settingsDict['detPColumnName'] != 'detP':
stab.add_column(atpy.Column(stab[settingsDict['detPColumnName']], 'detP'))
#stab['detP']=np.ones(len(stab))
#stab['detP']=stab['detP'].byteswap().newbyteorder()
#IPython.embed()
#sys.exit()
else:
stab.add_column(atpy.Column([1.0]*len(stab), 'detP'))
if 'ignoreSelectionFunction' in settingsDict.keys() and settingsDict['ignoreSelectionFunction'] == True:
stab['detP']=np.ones(len(stab))
if settingsDict['symmetriseErrors'] == True:
xAvErr=(stab['xErrToFitPlus']+stab['xErrToFitMinus'])/2.0
yAvErr=(stab['yErrToFitPlus']+stab['yErrToFitMinus'])/2.0
stab['xErrToFitPlus']=xAvErr
stab['xErrToFitMinus']=xAvErr
stab['yErrToFitPlus']=yAvErr
stab['yErrToFitMinus']=yAvErr
# Histograms of redshift and x property distribution, one above the other
# Fiddle with this later...
#print "plots"
#IPython.embed()
#sys.exit()
#fontDict={'size': 16}
#cols=1
#pylab.figure(figsize=(6, 8*cols))
#pylab.subplots_adjust(0.1, 0.06, 0.97, 0.97, 0.03, 0.12)
#pylab.subplot(2, 1, 1)
#pylab.hist(stab['redshift'], bins = numpy.linspace(0.0, 1.5, 16), histtype = 'stepfilled', color =
#'#A0A0A0', ec = '#A0A0A0')
#pylab.xlabel("$z$", fontdict = fontDict)
#pylab.ylabel("N", fontdict = fontDict)
#pylab.ylim(0, 60)
#pylab.subplot(2, 1, 2)
#pylab.hist(stab['temp'], bins = numpy.linspace(0, 12, 13), histtype = 'stepfilled', color =
#'#A0A0A0', ec = '#A0A0A0')
#pylab.xlabel("$T$ (keV)", fontdict = fontDict)
#pylab.ylabel("N", fontdict = fontDict)
##pylab.yticks(ylocs, [""]*len(ylabels))
#pylab.ylim(0, 60)
#pylab.savefig(outDir+os.path.sep+"zT_histograms.pdf")
#pylab.close()
return stab
#-------------------------------------------------------------------------------------------------------------
def MCMCFit(settingsDict, tab):
"""My attempt at fitting using MCMC and maximum likelihood.
settingsDict = dictionary containing MCMC parameters and settings
You can choose whether to use the likelihood for 'bisector' or 'orthogonal' fitting using the 'method' key.
"""
# Can now swap axes for testing purposes
if 'swapAxes' in settingsDict.keys():
swapAxes=settingsDict['swapAxes']
else:
swapAxes=False
print "... swapAxes = ", swapAxes
# Choice of method
method=settingsDict['method']
if method == 'orthogonal':
likelihood=csr.fastOrthogonalLikelihood
variables=['A', 'B', 'C', 'S']
numFreePars=4
elif method == 'bisector':
likelihood=csr.fastBisectorLikelihood
variables=['A', 'B', 'C', 'Sx', 'Sy']
numFreePars=5
settingsDict['variables']=variables # A handy place to store this for cutting down code elsewhere
scales=[]
for v in variables:
scales.append(settingsDict['%sScale' % (v)])
# Start by writing this in python, but calling the likelihood function in cython
# MCMC parameters
numSamples=settingsDict['numSamples'] # Total number of random steps over likelihood surface
burnSamples=settingsDict['burnSamples'] # Throw away initial bunch of this many samples
thinning=settingsDict['thinning'] # Keep only every ith sample - good in some ways, bad in others
# Choice of evolution models
if settingsDict['evoModel'] == '1+z':
log10RedshiftEvo=np.log10(tab[settingsDict['redshiftColumnName']]+1)
elif settingsDict['evoModel'] == 'E(z)':
log10RedshiftEvo=np.log10(tab['E(z)'])
else:
raise Exception, "didn't understand evoModel '%s'" % (evoModel)
#log10RedshiftEvo=np.array(log10RedshiftEvo, dtype = float)
# To start with, we're going to use the same proposal distribution for everything
# But later on we could dig out the correlated random numbers code to generate random parameter values that
# satisfy the covariance we see between parameters, which would speed things up.
cPars=selectStartParsFromPriors(settingsDict)
#print "... starting values [A, B, C, S] = [%.2f, %.2f, %.2f, %.2f]" % (cA, cB, cC, cS)
# Byte swapping festival to keep cython happy
yToFit=byteSwapArr(tab['yToFit'])
yErrToFitPlus=byteSwapArr(tab['yErrToFitPlus'])
yErrToFitMinus=byteSwapArr(tab['yErrToFitMinus'])
xToFit=byteSwapArr(tab['xToFit'])
xErrToFitPlus=byteSwapArr(tab['xErrToFitPlus'])
xErrToFitMinus=byteSwapArr(tab['xErrToFitMinus'])
detP=byteSwapArr(tab['detP'])
# Another thing... fix this later properly... but if everything isn't same data type, cython falls over
yToFit=np.array(tab['yToFit'], dtype = np.float64)
yErrToFitPlus=np.array(tab['yErrToFitPlus'], dtype = np.float64)
yErrToFitMinus=np.array(tab['yErrToFitMinus'], dtype = np.float64)
xToFit=np.array(tab['xToFit'], dtype = np.float64)
xErrToFitPlus=np.array(tab['xErrToFitPlus'], dtype = np.float64)
xErrToFitMinus=np.array(tab['xErrToFitMinus'], dtype = np.float64)
log10RedshiftEvo=np.array(log10RedshiftEvo, dtype = np.float64)
detP=np.array(tab['detP'], dtype = np.float64)
if swapAxes == False:
try:
cProb, probArray=likelihood(cPars, yToFit, yErrToFitPlus, yErrToFitMinus, xToFit, xErrToFitPlus,
xErrToFitMinus, log10RedshiftEvo, detP)
except:
print "byte swapping problem?"
IPython.embed()
sys.exit()
else:
cProb, probArray=likelihood(cPars, xToFit, xErrToFitPlus, xErrToFitMinus, yToFit, yErrToFitPlus,
yErrToFitMinus, log10RedshiftEvo, detP)
if cProb == 0:
raise Exception, "initial position in MCMC chain has zero probability - change initial values/fiddle with priors in .par file?"
allPars=[] # == 'the Markov chain'
likelihoods=[]
# Metropolis-Hastings (actually just Metropolis since our candidate distribution is symmetric)
for k in range(numSamples):
# Progress update
tenPercent=numSamples/10
for j in range(0,11):
if k == j*tenPercent:
print "... "+str(j*10)+"% complete ..."
pPars=makeProposal(cPars, scales, settingsDict)
if swapAxes == False:
pProb, probArray=likelihood(pPars, yToFit, yErrToFitPlus, yErrToFitMinus, xToFit, xErrToFitPlus,
xErrToFitMinus, log10RedshiftEvo, detP)
else:
pProb, probArray=likelihood(pPars, xToFit, xErrToFitPlus, xErrToFitMinus, yToFit, yErrToFitPlus,
yErrToFitMinus, log10RedshiftEvo, detP)
if np.isinf(pProb) == True:
print "Hmm - infinite probability?"
IPython.embed()
sys.exit()
# Changed below because we're now dealing with log10 probabilities instead of the actual numbers
alpha=pProb-cProb
acceptProposal=False
if alpha > 0:
acceptProposal=True
else:
U=math.log10(np.random.uniform(0, 1))
if U <= alpha:
acceptProposal=True
# Our prior is uniform, so we're really just using it to force the answer into a range
# i.e. if it's not 1.0, then something has strayed out of the box.
pPrior=getPPrior(pPars, settingsDict)
if acceptProposal == True and pPrior == 1.0:
cPars=pPars
cProb=pProb
# Only keep samples after burning in and also thin as we go along
if k > burnSamples and k % thinning == 0:
# If we want to plot the trace (i.e. to check mixing) then we want to store these always in some fashion
# As it is, we're only keeping the ones that are drawn from the probability distributions
allPars.append(cPars)
likelihoods.append(pProb)
allPars=np.array(allPars)
likelihoods=np.array(likelihoods)
# If we swap axes, it's just easier to transform back into a form we know
if 'swapAxes' in settingsDict.keys() and settingsDict['swapAxes'] == True:
a=-allPars[:, 0]/allPars[:, 1]
b=1.0/allPars[:, 1]
allPars[:, 0]=a
allPars[:, 1]=b
# Gewerke test to check if the chain has converged
# If z < 2 then we're converged
index10Percent=int(len(allPars)*0.1)
index50Percent=int(len(allPars)*0.5)
mean10Percent=allPars[:index10Percent].mean(axis = 0)
mean50Percent=allPars[::-1][:index50Percent].mean(axis = 0)
var10Percent=allPars[:index10Percent].var(axis = 0)
var50Percent=allPars[::-1][:index50Percent].var(axis = 0)
zStatistic=(mean10Percent-mean50Percent)/np.sqrt(var10Percent+var50Percent)
zStatistic=np.nan_to_num(zStatistic)
# Zap entries in here that are fixed (avoids round off or div 0 making them look large when we don't care)
for i in range(len(variables)):
v=variables[i]
if settingsDict['%sFit' % (v)] == 'fixed':
zStatistic[i]=0.0
numFreePars=numFreePars-1
# Max likelihood values are simply the mean of the values in the probability distribution
# 1-sigma errors are similarly easy (could also use calc1SigmaError routine, but this is quicker)
resultsDict={}
for i in range(len(variables)):
v=variables[i]
resultsDict['%s' % (v)]=allPars[:, i].mean()
resultsDict['%sErr' % (v)]=calc68Percentile(allPars[:, i])
# Scott's translation of orthogonal scatter S into scatter in y-variable at fixed x-variable
if method == 'orthogonal':
s=allPars[:, 3]/np.cos(np.arctan(allPars[:, 1]))
resultsDict['s']=s.mean()
resultsDict['sErr']=calc68Percentile(s)
# We have numFreePars above
lnL=np.log(np.power(10, likelihoods))
resultsDict['AIC']=2*numFreePars-2*lnL.max()
resultsDict['AICc']=resultsDict['AIC']+(2*numFreePars*(numFreePars+1))/(float(len(tab))-numFreePars-1)
resultsDict['pars']=allPars
resultsDict['zStatistic']=zStatistic
# chi-sq
#yMod=(xToFit*resultsDict['B'])+resultsDict['A']+resultsDict['C']*log10RedshiftEvo
#chiSq=np.sum(np.power(yToFit-yMod, 2)/np.power(yErrToFitPlus, 2))
#resultsDict['chiSq']=chiSq
#print "check chiSq"
#IPython.embed()
#sys.exit()
return resultsDict
#-------------------------------------------------------------------------------------------------------------
def makeProposal(pars, scales, settingsDict):
"""Generates random set of parameters in format [A, B, C, S] for feeding into likelihood function.
Proposal distributions are assumed Gaussian with scales [AScale, BScale, CScale, SScale].
"""
# This makes sure that if we're testing by swapping axes, we can use the same prior scales
# To the same space as our scales
if 'swapAxes' in settingsDict.keys() and settingsDict['swapAxes'] == True:
b=1.0/pars[1]
a=-pars[0]/pars[1]
pars[0]=a
pars[1]=b
prop=np.random.normal(pars, scales)
# And back...
if 'swapAxes' in settingsDict.keys() and settingsDict['swapAxes'] == True:
b=1.0/prop[1]
a=-prop[0]/prop[1]
prop[0]=a
prop[1]=b
# Force scatters +ve
prop[3:]=abs(prop[3:])
if settingsDict['AFit'] == 'fixed':
prop[0]=settingsDict['A0']
if settingsDict['BFit'] == 'fixed':
prop[1]=settingsDict['B0']
if settingsDict['CFit'] == 'fixed':
prop[2]=settingsDict['C0']
if settingsDict['method'] == 'orthogonal':
if settingsDict['SFit'] == 'fixed':
prop[3]=settingsDict['S0']
elif settingsDict['method'] == 'bisector':
if settingsDict['SxFit'] == 'fixed':
prop[3]=settingsDict['Sx0']
if settingsDict['SyFit'] == 'fixed':
prop[4]=settingsDict['Sy0']
return prop
#-------------------------------------------------------------------------------------------------------------
def make1DProbDensityPlots(fitResults, settingsDict, outDir):
"""Makes 1D plots of probability density distributions
"""
sigmaScale=5.0
bins=30
variables=settingsDict['variables']
axes=range(len(variables))
# Individual plots
#for v, a in zip(variables, axes):
#if settingsDict['%sFit' % (v)] == 'free':
#x=np.linspace(fitResults['%s' % (v)]-sigmaScale*fitResults['%sErr' % (v)],
#fitResults['%s' % (v)]+sigmaScale*fitResults['%sErr' % (v)], bins)
#P1D=LTCythonMCMC.fast1DProbProjection(x, a, fitResults['pars'])
#make1DPlot(x, P1D, '%s' % (v), '%s = %.3f $\pm$ %.3f' % (v, fitResults['%s' % (v)], fitResults['%sErr' % (v)]),
#outDir+os.path.sep+"1DProb_%s.pdf" % (v))
# Make an uber plot with multiple panels
cols=0
for v, a in zip(variables, axes):
if settingsDict['%sFit' % (v)] == 'free':
cols=cols+1
plt.figure(figsize=(4.5*cols, 3.94))
plt.subplots_adjust(0.02, 0.12, 0.98, 0.92, 0.1, 0.1)
count=0
for v, a in zip(variables, axes):
if settingsDict['%sFit' % (v)] == 'free':
count=count+1
x=np.linspace(fitResults['%s' % (v)]-sigmaScale*fitResults['%sErr' % (v)],
fitResults['%s' % (v)]+sigmaScale*fitResults['%sErr' % (v)], bins)
P1D=csr.fast1DProbProjection(x, a, fitResults['pars'])
P1D=P1D/P1D.max()
plt.subplot(1, cols, count)
ax=plt.gca()
y=P1D
fitLabel='%s = %.3f $\pm$ %.3f' % (v, fitResults['%s' % (v)], fitResults['%sErr' % (v)])
xLabel='%s' % (v)
plt.plot(x, y, 'k-', label = fitLabel)
plt.xlabel(xLabel, fontdict = {'size': 14})
plt.ylabel("")
plt.yticks([], [])
ax.xaxis.set_major_locator(matplotlib.ticker.MaxNLocator(6))
plt.ylim(0, 1.2)
leg=plt.legend(prop = {'size': 12})
leg.draw_frame(False)
plt.draw()
plt.savefig(outDir+os.path.sep+"1DProb_allPars.pdf")
plt.close()
#-------------------------------------------------------------------------------------------------------------
def make1DPlot(x, y, xLabel, fitLabel, outFileName):
"""Actually makes the 1D probability plots
"""
plt.plot(x, y, label = fitLabel)
plt.xlabel(xLabel)
plt.ylabel("")
plt.legend()
plt.savefig(outFileName)
plt.close()
#-------------------------------------------------------------------------------------------------------------
def makeContourPlots(fitResults, outDir, sampleLabel):
"""This takes fit results and turns it into contour plots.
"""
mlA, mlAErr=fitResults['A'], fitResults['AErr']
mlB, mlBErr=fitResults['B'], fitResults['BErr']
mlC, mlCErr=fitResults['C'], fitResults['CErr']
mlS, mlSErr=fitResults['S'], fitResults['SErr']
pars=fitResults['pars']
# Make 2d contour plots of valid combinations, determined by if they have a non null 1 sigma error
As=np.linspace(mlA-5.0*mlAErr-math.fmod(mlA-5.0*mlAErr, 0.1), mlA+7.0*mlAErr-math.fmod(mlA+7.0*mlAErr, 0.1), 81)
Bs=np.linspace(mlB-5.0*mlBErr-math.fmod(mlB-5.0*mlBErr, 0.1), mlB+7.0*mlBErr-math.fmod(mlB+7.0*mlBErr, 0.1), 81)
Cs=np.linspace(mlC-5.0*mlCErr-math.fmod(mlC-5.0*mlCErr, 0.1), mlC+7.0*mlCErr-math.fmod(mlC+7.0*mlCErr, 0.1), 81)
Ss=np.linspace(mlS-5.0*mlSErr-math.fmod(mlS-5.0*mlSErr, 0.05), mlS+7.0*mlSErr-math.fmod(mlS+7.0*mlSErr, 0.05), 81)
if mlAErr > 0 and mlBErr > 0:
outFileName=outDir+os.path.sep+"contours_AvB_"+sampleLabel+".pdf"
PDist2D=csr.fast2DProbProjection(As, Bs, 0, 1, pars)
astImages.saveFITS(outFileName.replace(".pdf", ".fits"), PDist2D, None)
probContourPlot(As, Bs, "A", "B", 0.1, 0.1, mlA, mlB, mlAErr, mlBErr, PDist2D, outFileName)
if mlAErr > 0 and mlCErr > 0:
outFileName=outDir+os.path.sep+"contours_AvC_"+sampleLabel+".pdf"
PDist2D=csr.fast2DProbProjection(As, Cs, 0, 2, pars)
probContourPlot(As, Cs, "A", "C", 0.1, 0.5, mlA, mlC, mlAErr, mlCErr, PDist2D, outFileName)
astImages.saveFITS(outFileName.replace(".pdf", ".fits"), PDist2D, None)
if mlAErr > 0 and mlSErr > 0:
outFileName=outDir+os.path.sep+"contours_AvS_"+sampleLabel+".pdf"
PDist2D=csr.fast2DProbProjection(As, Ss, 0, 3, pars)
probContourPlot(As, Ss, "A", "S", 0.1, 0.05, mlA, mlS, mlAErr, mlSErr, PDist2D, outFileName)
astImages.saveFITS(outFileName.replace(".pdf", ".fits"), PDist2D, None)
if mlBErr > 0 and mlCErr > 0:
outFileName=outDir+os.path.sep+"contours_BvC_"+sampleLabel+".pdf"
PDist2D=csr.fast2DProbProjection(Bs, Cs, 1, 2, pars)
probContourPlot(Bs, Cs, "B", "C", 0.1, 0.5, mlB, mlC, mlBErr, mlCErr, PDist2D, outFileName)
astImages.saveFITS(outFileName.replace(".pdf", ".fits"), PDist2D, None)
#-------------------------------------------------------------------------------------------------------------
def probContourPlot(par1Values, par2Values, par1Label, par2Label, par1TickStep, par2TickStep, mlPar1, mlPar2,
mlPar1Err, mlPar2Err, PDist2D, outFileName):
"""Make a 2d contour plot of probability surface of given parameters.
par1Values = values for parameter 1 (plotted on Y axis)
par2Values = values for parameter 2 (plotted on X axis)
par1Label = text label for Y axis
par2Label = text label for X axis
par1TickStep = tick step along Y axis
par2TickStep = tick step along X axis
mlPar1 = maximum likelihood value for parameter 1
mlPar2 = maximum likelihood value for parameter 2
mlPar1Err = 1d 1-sigma error in parameter 1
mlPar2Err = 1d 1-sigma error in parameter 2
PDist2D = 2d likelihood surface, made using fast2DProbProjection
"""
tck1=interpolate.splrep(par1Values, np.arange(par1Values.shape[0]))
par1TickLabels=np.arange(par1Values.min(), par1Values.max(), par1TickStep)
par1TickIndices=interpolate.splev(par1TickLabels, tck1)
plt.yticks(par1TickIndices, par1TickLabels)
tck2=interpolate.splrep(par2Values, np.arange(par2Values.shape[0]))
par2TickLabels=np.arange(par2Values.min(), par2Values.max(), par2TickStep)
par2TickIndices=interpolate.splev(par2TickLabels, tck2)
plt.xticks(par2TickIndices, par2TickLabels)
# We have to smooth to get decent looking contours
# Gaussian smoothing preserves the normalisation
# NOTE: smoothing only needed if very fine grid
PDist2D=ndimage.gaussian_filter(PDist2D, 1)
# Work out where to put contours
sigma1Level=calc2DProbThreshold(PDist2D, 0.683)
sigma2Level=calc2DProbThreshold(PDist2D, 0.95)
plt.contour(PDist2D, [sigma1Level, sigma2Level], colors = 'b')
# Save plot - trim down area first (?) and add axes labels
plt.plot(interpolate.splev(mlPar2, tck2), interpolate.splev(mlPar1, tck1), 'r*',
label = "%s = %.2f $\pm$ %.2f, %s = %.2f $\pm$ %.2f" % (par1Label, mlPar1, mlPar1Err, par2Label, mlPar2, mlPar2Err))
plt.legend(numpoints = 1)
plt.xlabel(par2Label)
plt.ylabel(par1Label)
if outFileName != None:
plt.savefig(outFileName)
plt.close()
#-------------------------------------------------------------------------------------------------------------
def calc1SigmaError(par1d, prob1d, mlParValue):
"""Calculates 1d 1-sigma error on a parameter (marginalised, is the word I'm looking for I think) relative
to the maximum likelihood value.
NOTE: Now we're using MCMC, the regular calc68Percentile routine below works just fine, and is quicker
than this.
"""
norm=np.trapz(prob1d, par1d)
prob1d=prob1d/norm
tckPDist=interpolate.splrep(par1d, prob1d)
target=0.683 # 1 sigma
dRange=np.linspace(0.0, par1d.max()-mlParValue, 1000) # we need to wok out how to choose sensible values
bestDiff=1e6
dBest=1e6
for d in dRange:
integrationRange=np.linspace(mlParValue-d, mlParValue+d, 1000)
diff=abs(target-np.trapz(interpolate.splev(integrationRange, tckPDist), integrationRange))
if diff < bestDiff:
bestDiff=diff
dBest=d
return dBest
#-------------------------------------------------------------------------------------------------------------
def calc2DProbThreshold(PDist2D, probThresh):
"""Calculates threshold probability per pixel in PDist2D needed to draw confidence contours at e.g.
1-sigma, 2-sigma level
"""
p=PDist2D.flatten()
p.sort()
p=p[::-1]
pCumSum=p.cumsum()
diff=abs(pCumSum-probThresh)
pIndex=diff.tolist().index(diff.min())
pLevel=p[pIndex]
return pLevel
#------------------------------------------------------------------------------------------------------------
def calc68Percentile(arr):
"""Calculates the 68-percentile (i.e. equivalent to 1-sigma error) from an array.
"""
res=np.abs(arr-np.median(arr))
res=np.sort(res)
index=int(round(0.683*arr.shape[0]))
try:
err=res[index]
except:
print "index error?"
IPython.embed()
sys.exit()
return err
#-------------------------------------------------------------------------------------------------------------
def makeScalingRelationPlot(sampleTab, fitResults, outDir, sampleDict, settingsDict):
"""Make a scaling relation plot.
sampleDict = the dictionary defining the sample (e.g. min z, max z etc.)
"""
# Stuff we need from settings...
xColumnName=settingsDict['xColumnName']
xPlusErrColumnName=settingsDict['xPlusErrColumnName']
xMinusErrColumnName=settingsDict['xMinusErrColumnName']
yColumnName=settingsDict['yColumnName']
yPlusErrColumnName=settingsDict['yPlusErrColumnName']
yMinusErrColumnName=settingsDict['yMinusErrColumnName']
xPivot=settingsDict['xPivot']
xTakeLog10=settingsDict['xTakeLog10']
yTakeLog10=settingsDict['yTakeLog10']
redshiftColumnName=settingsDict['redshiftColumnName']
xScaleFactor=settingsDict['xScaleFactor']
yScaleFactor=settingsDict['yScaleFactor']
yScaleFactorPower=settingsDict['yScaleFactorPower']
# The plot
plt.figure(figsize=(10, 10))
plt.axes([0.1, 0.1, 0.85, 0.85])
if yScaleFactor != None:
yPlot=np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yColumnName]
yPlotErrs=np.array([np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yMinusErrColumnName],
np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yPlusErrColumnName]])
else:
yPlot=sampleTab[yColumnName]
yPlotErrs=np.array([sampleTab[yMinusErrColumnName],
sampleTab[yPlusErrColumnName]])
plt.errorbar(sampleTab[xColumnName], yPlot,
yerr = yPlotErrs,
xerr = np.array([sampleTab[xMinusErrColumnName],
sampleTab[xPlusErrColumnName]]),
fmt = 'kD', mec = 'k', label = sampleDict['label']+" (N=%d)" % (len(sampleTab)))
if xTakeLog10 == True and yTakeLog10 == True:
plt.loglog()
elif xTakeLog10 == True and yTakeLog10 == False:
plt.semilogx()
elif xTakeLog10 == False and yTakeLog10 == True:
plt.semilogy()
#cmdata=np.outer(np.linspace(0, 1, 10), np.linspace(0, 1, 10)) # to easily make a colorbar 0-1
#cmim=plt.imshow(cmdata, cmap = "gray")
#ax=plt.axes([0.1, 0.17, 0.85, 0.78])
if np.sum(np.equal(sampleTab['detP'], 1.0)) == len(sampleTab):
shadeByDetP=False
else:
shadeByDetP=True
if shadeByDetP == True:
for row, pY in zip(sampleTab, yPlot):
plt.plot(row[xColumnName], [pY], 'D', color = (row['detP'], row['detP'], row['detP']))
plotRange=np.linspace(settingsDict['xPlotMin'], settingsDict['xPlotMax'], 100)
if xTakeLog10 == True and yTakeLog10 == True:
yFit=settingsDict['yPivot']*np.power(10, fitResults['A'])*np.power((plotRange/xPivot), fitResults['B'])
elif xTakeLog10 == False and yTakeLog10 == False:
yFit=settingsDict['yPivot']*(fitResults['A']+fitResults['B']*(plotRange/xPivot))
else:
raise Exception, "add semilogx, semilogy fit line code"
if xPivot != 1.0:
fitLabel='%s (%s) = 10$^{%.2f \pm %.2f}$ (%s/%.1f %s)$^{%.2f \pm %.2f}$' % (settingsDict['yPlotLabel'], settingsDict['yPlotLabelUnits'], fitResults['A'], fitResults['AErr'], settingsDict['xPlotLabel'], xPivot, settingsDict['xPlotLabelUnits'], fitResults['B'], fitResults['BErr'])
else:
fitLabel='%s (%s) = 10$^{%.2f \pm %.2f}$ (%s)$^{%.2f \pm %.2f}$' % (settingsDict['yPlotLabel'], settingsDict['yPlotLabelUnits'], fitResults['A'], fitResults['AErr'], settingsDict['xPlotLabel'], fitResults['B'], fitResults['BErr'])
yLabel="%s (%s)" % (settingsDict['yPlotLabel'], settingsDict['yPlotLabelUnits'])
if settingsDict['yScaleFactor'] == "E(z)":
fitLabel="$E^{%d}(z)$ " % (settingsDict['yScaleFactorPower'])+fitLabel
yLabel="$E^{%d}(z)$ " % (settingsDict['yScaleFactorPower'])+yLabel
plt.plot(plotRange, yFit, 'b--', label = fitLabel)
## Below is just diagnostic
#if sampleLabel == 'REXCESS':
#prattLabel='$L_{\sf X}$ (erg s$^{-1}$) = 10$^{44.85 \pm 0.06}$ ($T/5.0$ keV)$^{3.35 \pm 0.32}$'
#prattLabel="$E^{-1}(z)$ "+prattLabel
#prattLabel="P09: "+prattLabel
#prattLX=np.power(10, 44.85)*np.power((plotRange/5.0), 3.35)
#plt.plot(plotRange, prattLX, 'r:', label = prattLabel)
#sample['plotLabel']=""
plt.ylabel(yLabel, size = 16)
plt.xlabel("%s (%s)" % (settingsDict['xPlotLabel'], settingsDict['xPlotLabelUnits']), size = 16)
plt.xlim(settingsDict['xPlotMin'], settingsDict['xPlotMax'])
plt.ylim(settingsDict['yPlotMin'], settingsDict['yPlotMax'])
if settingsDict['showPlotLegend'] == True:
leg=plt.legend(loc = 'upper left', prop = {'size': 16}, scatterpoints = 1, numpoints = 1)
leg.draw_frame(False)
plt.draw()
ax=plt.gca()
plt.text(0.95, 0.05, sampleDict['plotLabel'], ha = 'right', va = 'center', transform = ax.transAxes,
fontdict = {"size": 16, "linespacing" : 1.2, 'family': 'serif'})
outFileName=outDir+os.path.sep+"scalingRelation_%s_%s.pdf" % (yColumnName, xColumnName)
plt.savefig(outFileName)
plt.close()
#-------------------------------------------------------------------------------------------------------------
def makeScalingRelationPlot_ABC(sampleTab, fitResults, outDir, sampleDict, settingsDict, mode = 'normal'):
"""Make a scaling relation plot with y values scaling by normalisation and z evolution.
sampleDict = the dictionary defining the sample (e.g. min z, max z etc.)
"""
# Stuff we need from settings...
xColumnName=settingsDict['xColumnName']
xPlusErrColumnName=settingsDict['xPlusErrColumnName']
xMinusErrColumnName=settingsDict['xMinusErrColumnName']
yColumnName=settingsDict['yColumnName']
yPlusErrColumnName=settingsDict['yPlusErrColumnName']
yMinusErrColumnName=settingsDict['yMinusErrColumnName']
xPivot=settingsDict['xPivot']
xTakeLog10=settingsDict['xTakeLog10']
yTakeLog10=settingsDict['yTakeLog10']
redshiftColumnName=settingsDict['redshiftColumnName']
xScaleFactor=settingsDict['xScaleFactor']
yScaleFactor=settingsDict['yScaleFactor']
yScaleFactorPower=settingsDict['yScaleFactorPower']
# The plot...
if yScaleFactor != None:
yPlot=np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yColumnName]
yPlotErrs=np.array([np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yMinusErrColumnName],
np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yPlusErrColumnName]])
else:
yPlot=sampleTab[yColumnName]
yPlotErrs=np.array([sampleTab[yMinusErrColumnName],
sampleTab[yPlusErrColumnName]])
fitLabel='%s = 10$^{%.2f \pm %.2f}$ (%s/%d)$^{%.2f \pm %.2f}$' % (settingsDict['yPlotLabel'], fitResults['A'], fitResults['AErr'], settingsDict['xPlotLabel'], xPivot, fitResults['B'], fitResults['BErr'])
yLabel="%s (%s)" % (settingsDict['yPlotLabel'], settingsDict['yPlotLabelUnits'])
if settingsDict['evoModel'] == '1+z':
yPlot=np.power(sampleTab[redshiftColumnName]+1, -fitResults['C'])*yPlot
yPlotErrs=np.power(sampleTab[redshiftColumnName]+1, -fitResults['C'])*yPlotErrs
fitLabel=fitLabel+' (1+$z$)$^{%s}$' % (fitResults['plotLabel_C'])
yLabel=yLabel.replace("(%s)" % (settingsDict['yPlotLabelUnits']), "(1+$z$)$^{%.1f}$ (%s)" % (-1*fitResults['C'], settingsDict['yPlotLabelUnits']))
elif settingsDict['evoModel'] == 'E(z)':
yPlot=np.power(sampleTab['E(z)'], -fitResults['C'])*yPlot
yPlotErrs=np.power(sampleTab['E(z)'], -fitResults['C'])*yPlotErrs
fitLabel=fitLabel+' $E(z)^{%s}$' % (fitResults['plotLabel_C'])
yLabel=yLabel.replace("(%s)" % (settingsDict['yPlotLabelUnits']), "$E(z)^{%.1f}$ (%s)" % (-1*fitResults['C'], settingsDict['yPlotLabelUnits']))
if settingsDict['yScaleFactor'] == "E(z)":
fitLabel="$E^{%d}(z)$ " % (settingsDict['yScaleFactorPower'])+fitLabel
yLabel="$E^{%d}(z)$ " % (settingsDict['yScaleFactorPower'])+yLabel
if mode == 'normal':
plt.figure(figsize=(8, 8))
ax=plt.axes([0.11, 0.1, 0.86, 0.85])
plotRange=np.linspace(0.1*sampleTab[xColumnName].min(), 10*sampleTab[xColumnName].max(), 100)
yFit=np.power(10, fitResults['A'])*np.power((plotRange/xPivot), fitResults['B'])
plt.plot(plotRange, yFit, 'b--', label = fitLabel)
outFileName=outDir+os.path.sep+"scalingRelation_%s_%s_ABC.pdf" % (settingsDict['yColumnName'], settingsDict['xColumnName'])
# Old
#plt.errorbar(sampleTab['temp'], plotLXs,
#yerr = plotLXErrs,
#xerr = np.array([sampleTab['temp_min'],
#sampleTab['temp_max']]),
#fmt = 'kD', mec = 'k', label = sampleLabel+" (N=%d)" % (len(sampleTab)))
# New (coding by redshift)
zBins=[[0.0, 0.25], [0.25, 0.5], [0.5, 1.5]]
labels=["0.0 < $z$ < 0.25", "0.25 < $z$ < 0.5", "0.5 < $z$ < 1.5"]
#colours=['k', [0.5, 0, 1], [1, 0.5, 0]]
colours=['k', 'c', 'r']
symbols=['D', 'o', '^']
for zBin, col, s, l in zip(zBins, colours, symbols, labels):
mask=np.logical_and(np.greater(sampleTab[redshiftColumnName], zBin[0]), np.less_equal(sampleTab[redshiftColumnName], zBin[1]))
plt.errorbar(sampleTab[xColumnName][mask], yPlot[mask],
yerr = yPlotErrs[:, mask],
xerr = np.array([sampleTab[xMinusErrColumnName][mask],
sampleTab[xPlusErrColumnName][mask]]),
fmt = s, ecolor = col, mfc = col, mec = col, label = l)
elif mode == 'PDetCoded':
plotRange=np.linspace(0.1, 22.0, 100)
fitLXs=np.power(10, fitResults['A'])*np.power((plotRange/pivotT), fitResults['B'])
#fitLabel='$L_{\sf X}$ (erg s$^{-1}$) = 10$^{%.2f \pm %.2f}$ ($T/%.1f$ keV)$^{%.2f \pm %.2f}$ (1+$z$)$^{%.2f \pm %.2f}$' % (fitResults['A'], fitResults['AErr'], pivotT, fitResults['B'], fitResults['BErr'], fitResults['C'], fitResults['CErr'])
plt.plot(plotRange, fitLXs, 'b--', label = fitLabel)
outFileName=outDir+os.path.sep+"L-T_ABC_PDetCoded.pdf"
plt.figure(figsize=(8, 8))
plt.axes([0.5, 0.5, 0.1, 0.1])
cmdata=np.outer(np.linspace(0, 1, 10), np.linspace(0, 1, 10)) # to easily make a colorbar 0-1
cmim=plt.imshow(cmdata, cmap = "gray")
ax=plt.axes([0.1, 0.17, 0.85, 0.78])
for row, pLX in zip(sampleTab, plotLXs):
plt.plot(row['temp'], [pLX], 'D', color = (row['detP'], row['detP'], row['detP']))
cmax=plt.axes([0.1, 0.075, 0.85, 0.1], frameon=False)
plt.xticks([], [])
plt.yticks([], [])
plt.colorbar(cmim, orientation = 'v', aspect = 40.0)
plt.figtext(0.52, 0.03, "P$_{\sf det}$", va = 'center', ha = 'center')
plt.axes(ax)
else:
raise Exception, "didn't understand mode"
plt.loglog()
plt.ylabel(yLabel, size = 16)
plt.xlabel("%s (%s)" % (settingsDict['xPlotLabel'], settingsDict['xPlotLabelUnits']), size = 16)
plt.xlim(settingsDict['xPlotMin'], settingsDict['xPlotMax'])
plt.ylim(settingsDict['yPlotMin'], settingsDict['yPlotMax'])
#leg=plt.legend(loc = 'upper left', prop = {'size': 16}, scatterpoints = 1, numpoints = 1)
#leg.draw_frame(False)
plt.draw()
ax=plt.gca()
plt.text(0.95, 0.05, sampleDict['plotLabel'], ha = 'right', va = 'center', transform = ax.transAxes,
fontdict = {"size": 16, "linespacing" : 1.2, 'family': 'serif'})
plt.savefig(outFileName)
plt.close()
#-------------------------------------------------------------------------------------------------------------
def makeScalingRelationPlots_sideBySide(sampleDefs, outDir, settingsDict):
"""Makes side by side subpanel plots of all the scaling relations in sampleDefs
"""
# Stuff we need from settings...
xColumnName=settingsDict['xColumnName']
xPlusErrColumnName=settingsDict['xPlusErrColumnName']
xMinusErrColumnName=settingsDict['xMinusErrColumnName']
yColumnName=settingsDict['yColumnName']
yPlusErrColumnName=settingsDict['yPlusErrColumnName']
yMinusErrColumnName=settingsDict['yMinusErrColumnName']
xPivot=settingsDict['xPivot']
xTakeLog10=settingsDict['xTakeLog10']
yTakeLog10=settingsDict['yTakeLog10']
redshiftColumnName=settingsDict['redshiftColumnName']
xScaleFactor=settingsDict['xScaleFactor']
yScaleFactor=settingsDict['yScaleFactor']
yScaleFactorPower=settingsDict['yScaleFactorPower']
# Make an uber plot with multiple panels
# NOTE: add adjustable layout later...
cols=len(sampleDefs)
plt.figure(figsize=(6*cols, 6))
plt.subplots_adjust(0.05, 0.1, 0.99, 0.99, 0.02, 0.02)
count=0
for s in sampleDefs:
sampleTab=s['stab']
fitResults=s['fitResults']
count=count+1
plt.subplot(1, cols, count)
if yScaleFactor != None:
yPlot=np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yColumnName]
yPlotErrs=np.array([np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yMinusErrColumnName],
np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yPlusErrColumnName]])
else:
yPlot=sampleTab[yColumnName]
yPlotErrs=np.array([sampleTab[yMinusErrColumnName],
sampleTab[yPlusErrColumnName]])
plt.errorbar(sampleTab[xColumnName], yPlot,
yerr = yPlotErrs,
xerr = np.array([sampleTab[xMinusErrColumnName],
sampleTab[xPlusErrColumnName]]),
fmt = 'kD', mec = 'k', label = s['label']+" (N=%d)" % (len(sampleTab)))
plt.loglog()
plotRange=np.linspace(0.1*sampleTab[xColumnName].min(), 10*sampleTab[xColumnName].max(), 100)
yFit=settingsDict['yPivot']*np.power(10, fitResults['A'])*np.power((plotRange/xPivot), fitResults['B'])
fitLabel='%s (%s) = 10$^{%.2f \pm %.2f}$ (%s/%.1f %s)$^{%.2f \pm %.2f}$' % (settingsDict['yPlotLabel'], settingsDict['yPlotLabelUnits'], fitResults['A'], fitResults['AErr'], settingsDict['xPlotLabel'], xPivot, settingsDict['xPlotLabelUnits'], fitResults['B'], fitResults['BErr'])
yLabel="%s (%s)" % (settingsDict['yPlotLabel'], settingsDict['yPlotLabelUnits'])
if settingsDict['yScaleFactor'] == "E(z)":
fitLabel="$E^{%d}(z)$ " % (settingsDict['yScaleFactorPower'])+fitLabel
yLabel="$E^{%d}(z)$ " % (settingsDict['yScaleFactorPower'])+yLabel
plt.plot(plotRange, yFit, 'b--', label = fitLabel)
plt.ylabel(yLabel, size = 16)
plt.xlabel("%s (%s)" % (settingsDict['xPlotLabel'], settingsDict['xPlotLabelUnits']), size = 16)
ax=plt.gca()
plt.text(0.95, 0.05, s['plotLabel'], ha = 'right', va = 'center', transform = ax.transAxes,
fontdict = {"size": 16, "linespacing" : 1.2, 'family': 'serif'})
if count > 1:
ylocs, ylabels=plt.yticks()
plt.ylabel("")
plt.yticks(ylocs, [""]*len(ylabels))
plt.xlim(settingsDict['xPlotMin'], settingsDict['xPlotMax'])
plt.ylim(settingsDict['yPlotMin'], settingsDict['yPlotMax'])
outFileName=outDir+os.path.sep+"scalingRelation_multiPlot_%s_%s.pdf" % (yColumnName, xColumnName)
plt.savefig(outFileName)
plt.close()
#-------------------------------------------------------------------------------------------------------------
def makeRoundedPlotLabelStrings(fitResults, variables, numSigFig = 1):
"""Add plot labels to fitResults, to given number of sig fig, taking care of rounding
NOTE: disabled the rounding for now
"""
# Not rounding, just dp not sf
dps=[2, 2, 1, 3, 3]
for p, dp in zip(variables, dps):
if fitResults['%sErr' % (p)] != 0:
fmt="%."+str(dp)+"f"
valStr=fmt % (fitResults['%s' % (p)])
errStr=fmt % (fitResults['%sErr' % (p)])
fitResults['plotLabel_%s' % (p)]="%s \pm %s" % (valStr, errStr)
#-------------------------------------------------------------------------------------------------------------
def makeNormEvoPlot(stab, fitResults, outDir, settingsDict):
"""Makes plot of evolution of the normalisation.
"""
zs=np.linspace(0, 2.0, 100)
Ez=[]
for z in zs:
Ez.append(astCalc.Ez(z))
Ez=np.array(Ez)
plt.figure(figsize=(8,6))
plt.axes([0.13, 0.1, 0.85, 0.86])
xColumnName=settingsDict['xColumnName']
yColumnName=settingsDict['yColumnName']
redshiftColumnName=settingsDict['redshiftColumnName']
yLabel="%s / %s$_{Fit (z=0)}$" % (settingsDict['yPlotLabel'], settingsDict['yPlotLabel'])
# If we have applied E(z)^{some power}, we want to plot that expected scaling,
# as well as a null line for no evolution
if settingsDict['yScaleFactor'] == 'E(z)':
dataNormalisation=((np.power(stab['E(z)'], settingsDict['yScaleFactorPower'])*stab[yColumnName])/np.power(stab[xColumnName]/settingsDict['xPivot'], fitResults['B']))/np.power(10, fitResults['A'])
nullLine=np.power(Ez, settingsDict['yScaleFactorPower']) # because E(z)^{some power} is flat in this form, null line is not
yScalingLine=np.ones(len(Ez)) # because we've scaled it out it's flat
yLabel="($E^{-1}(z)$ %s) / %s$_{Fit (z=0)}$" % (settingsDict['yPlotLabel'], settingsDict['yPlotLabel'])
else:
dataNormalisation=(stab[yColumnName]/np.power(stab[xColumnName]/settingsDict['xPivot'], fitResults['B']))/np.power(10, fitResults['A'])
nullLine=np.zeros(len(Ez))
yScalingLine=None
yLabel="%s / %s$_{Fit (z=0)}$" % (settingsDict['yPlotLabel'], settingsDict['yPlotLabel'])
dataLabel='%s$_{Fit (z=0)}$ = (%s/%d)$^{%.2f}$ / 10$^{%.2f}$' % (settingsDict['yPlotLabel'], settingsDict['xPlotLabel'], settingsDict['xPivot'], fitResults['B'], fitResults['A'])
if settingsDict['yScaleFactor'] == 'E(z)':
# Look for fractions
if settingsDict['yScaleFactorPower'] == -1:
yScalingLineLabel='$E(z)$'
elif abs(settingsDict['yScaleFactorPower']) == 2/3.0:
yScalingLineLabel='$E(z)$'
powerFactor=settingsDict['yScaleFactorPower']
# Need to swap power, remember we scaled these out...
if powerFactor > 0:
yScalingLineLabel=yScalingLineLabel+"$^{-2/3}$"
else:
yScalingLineLabel=yScalingLineLabel+"$^{2/3}$"
else:
print "yScalingLineLabel fraction handling?"
IPython.embed()
sys.exit()
plt.plot(stab[redshiftColumnName], dataNormalisation, 'kD', label = dataLabel)
if np.any(yScalingLine) != None:
plt.plot(zs, yScalingLine, 'b--', label = yScalingLineLabel, lw = 2)
plt.plot(zs, nullLine, 'g-.', label = 'no evolution', lw = 2)
if settingsDict['evoModel'] == '1+z':
plt.plot(zs, np.power(1+zs, fitResults['C']), 'r', lw = 2, label = '(1+z)$^{%.2f \pm %.2f}$' % (fitResults['C'], fitResults['CErr']))
shadedX=np.linspace(0, 2.0, 100)
shadedYPlus=np.power(shadedX+1, fitResults['C']+fitResults['CErr'])
shadedYMinus=np.power(shadedX+1, fitResults['C']-fitResults['CErr'])
elif settingsDict['evoModel'] == 'E(z)':
plt.plot(zs, np.power(Ez, fitResults['C']), 'r', lw = 2, label = '$E(z)^{%.2f \pm %.2f}$' % (fitResults['C'], fitResults['CErr']))
shadedX=np.linspace(0, 2.0, len(Ez))
shadedYPlus=np.power(Ez, fitResults['C']+fitResults['CErr'])
shadedYMinus=np.power(Ez, fitResults['C']-fitResults['CErr'])
if fitResults['C'] < 0:
loc="upper right"
else:
loc="lower left"
leg=plt.legend(loc = loc, prop = {'size': 14}, numpoints = 1)
leg.draw_frame(False)
plt.draw()
plt.xlabel("$z$", fontdict = {'size': 20})
plt.ylabel(yLabel, fontdict = {'size': 20})
xs=shadedX.tolist()+shadedX[::-1].tolist()
ys=shadedYPlus.tolist()+shadedYMinus[::-1].tolist()
plt.fill(xs, ys, 'b', alpha=0.2, edgecolor='none', label = "None", lw = 0.1)
plt.semilogy()
#plt.loglog()
plt.xlim(0, 1.6)
plt.ylim(1e-2, 1e2)
plt.savefig(outDir+os.path.sep+"normEvo_%s_%s.pdf" % (yColumnName, xColumnName))
plt.close()
#-------------------------------------------------------------------------------------------------------------
def makePaperContourPlots(fitResults, parDict, outDir):
"""Special case of plots, for 4 parameter fits, for the paper.
"""
if 'S' not in fitResults.keys():
print "... using bisector method - 2D contour plots disabled ..."
return None
mlA, mlAErr=fitResults['A'], fitResults['AErr']
mlB, mlBErr=fitResults['B'], fitResults['BErr']
mlC, mlCErr=fitResults['C'], fitResults['CErr']
mlS, mlSErr=fitResults['S'], fitResults['SErr']
pars=fitResults['pars']
# We only want to go on if we have a full set...
if mlAErr == 0 or mlBErr == 0 or mlCErr == 0 or mlSErr == 0:
return None
plt.figure(figsize=(10, 10))
plt.subplots_adjust(0.08, 0.07, 0.97, 0.97, 0.0, 0.0)
# Make 2d contour plots of valid combinations, determined by if they have a non null 1 sigma error
# NOTE: here steps have to be smaller than AStep, BStep, CStep, SStep below
# NOTE: any strange numbers in here are fiddling to get non-overlapping plot labels
As=np.linspace(mlA-5.0*mlAErr-math.fmod(mlA-5.0*mlAErr, 0.1), mlA+5.0*mlAErr-math.fmod(mlA+5.0*mlAErr, 0.1), 81)
Bs=np.linspace(mlB-5.0*mlBErr-math.fmod(mlB-5.0*mlBErr, 0.1), mlB+5.0*mlBErr-math.fmod(mlB+5.0*mlBErr, 0.1), 81)
Cs=np.linspace(mlC-5.0*mlCErr-math.fmod(mlC-5.0*mlCErr, 0.1), mlC+5.0*mlCErr-math.fmod(mlC+5.0*mlCErr, 0.1), 81)
Ss=np.linspace(mlS-5.0*mlSErr-math.fmod(mlS-5.0*mlSErr, 0.01), mlS+5.0*mlSErr-math.fmod(mlS+5.0*mlSErr, 0.01), 81)
# Steps for tick label plotting adjustment
AStep=0.2
BStep=0.4
CStep=1.0
SStep=0.02
# Bottom row
# AB
plt.subplot(4, 4, 15)
PDist2D=csr.fast2DProbProjection(As, Bs, 0, 1, pars)
probContourPlot_subPlot(As, Bs, "A", "B", AStep, BStep, mlA, mlB, mlAErr, mlBErr, PDist2D, noYLabels = True)
# AC
plt.subplot(4, 4, 14)
PDist2D=csr.fast2DProbProjection(As, Cs, 0, 2, pars)
probContourPlot_subPlot(As, Cs, "A", "C", AStep, CStep, mlA, mlC, mlAErr, mlCErr, PDist2D, noYLabels = True)
# AS
plt.subplot(4, 4, 13)
PDist2D=csr.fast2DProbProjection(As, Ss, 0, 3, pars)
probContourPlot_subPlot(As, Ss, "A", "S", AStep, SStep, mlA, mlS, mlAErr, mlSErr, PDist2D)
# Middle row
# BC
plt.subplot(4, 4, 10)
PDist2D=csr.fast2DProbProjection(Bs, Cs, 1, 2, pars)
probContourPlot_subPlot(Bs, Cs, "B", "C", BStep, CStep, mlB, mlC, mlBErr, mlCErr, PDist2D, noXLabels = True, noYLabels = True)
# BS
plt.subplot(4, 4, 9)
PDist2D=csr.fast2DProbProjection(Bs, Ss, 1, 3, pars)
probContourPlot_subPlot(Bs, Ss, "B", "S", BStep, SStep, mlB, mlS, mlBErr, mlSErr, PDist2D, noXLabels = True)
# Top row
# CS
plt.subplot(4, 4, 5)
PDist2D=csr.fast2DProbProjection(Cs, Ss, 2, 3, pars)
probContourPlot_subPlot(Cs, Ss, "C", "S", CStep, SStep, mlC, mlS, mlCErr, mlSErr, PDist2D, noXLabels = True)
# 1D plots
# S
plt.subplot(4, 4, 1)
PDist1D=csr.fast1DProbProjection(Ss, 3, pars)
probPlot1D_subPlot(Ss, "S", SStep, mlS, mlSErr, PDist1D, fitResults['plotLabel_S'], noYLabels = True, noXLabels = True)
# C
plt.subplot(4, 4, 6)
PDist1D=csr.fast1DProbProjection(Cs, 2, pars)
probPlot1D_subPlot(Cs, "C", CStep, mlC, mlCErr, PDist1D, fitResults['plotLabel_C'], noYLabels = True, noXLabels = True)
# B
plt.subplot(4, 4, 11)
PDist1D=csr.fast1DProbProjection(Bs, 1, pars)
probPlot1D_subPlot(Bs, "B", BStep, mlB, mlBErr, PDist1D, fitResults['plotLabel_B'], noYLabels = True, noXLabels = True)
# A
plt.subplot(4, 4, 16)
PDist1D=csr.fast1DProbProjection(As, 0, pars)
probPlot1D_subPlot(As, "A", AStep, mlA, mlAErr, PDist1D, fitResults['plotLabel_A'], noYLabels = True, noXLabels = False)
plt.savefig(outDir+os.path.sep+"2DProb_allPars.pdf")
plt.close()
#-------------------------------------------------------------------------------------------------------------
def probPlot1D_subPlot(par1Values, par1Label, par1TickStep, mlPar1, mlPar1Err, PDist1D, resultLabel,
noXLabels = False, noYLabels = False):
"""Make a 1d contour plot of marginalised probability for a parameter.
par1Values = values for parameter 1 (plotted on Y axis)
par1Label = text label for Y axis
par1TickStep = tick step along Y axis
mlPar1 = maximum likelihood value for parameter 1
mlPar1Err = 1d 1-sigma error in parameter 1
PDist1D = 1d prob distribution for parameter 1
"""
par1TickLabels=np.arange(par1Values.min(), par1Values.max(), par1TickStep)
plt.xticks(par1TickLabels, par1TickLabels)
PDist1D=PDist1D/PDist1D.max()
ax=plt.gca()
fitLabel='%s = %s' % (par1Label, resultLabel.replace("\pm", "$\pm$"))
plt.plot(par1Values, PDist1D, 'k-', label = fitLabel)
plt.ylabel("")
plt.yticks([], [])
#ax.xaxis.set_major_locator(matplotlib.ticker.MaxNLocator(6))
plt.ylim(0, 1.2)
leg=plt.legend(loc = (0.0, 0.86), prop = {'size': 12})
leg.draw_frame(False)
plt.draw()
plt.xlabel(par1Label)
if noYLabels == True:
ylocs, ylabels=plt.yticks()
plt.ylabel("")
plt.yticks(ylocs, [""]*len(ylabels))
if noXLabels == True:
xlocs, xlabels=plt.xticks()
plt.xlabel("")
plt.xticks(xlocs, [""]*len(xlabels))
#-------------------------------------------------------------------------------------------------------------
def probContourPlot_subPlot(par1Values, par2Values, par1Label, par2Label, par1TickStep, par2TickStep, mlPar1, mlPar2,
mlPar1Err, mlPar2Err, PDist2D, noXLabels = False, noYLabels = False):
"""Make a 2d contour plot of probability surface of given parameters. Somewhat needless duplication of
code, for makePaperContourPlots
par1Values = values for parameter 1 (plotted on Y axis)
par2Values = values for parameter 2 (plotted on X axis)
par1Label = text label for Y axis
par2Label = text label for X axis
par1TickStep = tick step along Y axis
par2TickStep = tick step along X axis
mlPar1 = maximum likelihood value for parameter 1
mlPar2 = maximum likelihood value for parameter 2
mlPar1Err = 1d 1-sigma error in parameter 1
mlPar2Err = 1d 1-sigma error in parameter 2
PDist2D = 2d likelihood surface, made using fast2DProbProjection
"""
tck1=interpolate.splrep(par1Values, np.arange(par1Values.shape[0]))
par1TickLabels=np.arange(par1Values.min(), par1Values.max(), par1TickStep)
par1TickIndices=interpolate.splev(par1TickLabels, tck1)
plt.yticks(par1TickIndices, par1TickLabels)
tck2=interpolate.splrep(par2Values, np.arange(par2Values.shape[0]))
par2TickLabels=np.arange(par2Values.min(), par2Values.max(), par2TickStep)
par2TickIndices=interpolate.splev(par2TickLabels, tck2)
plt.xticks(par2TickIndices, par2TickLabels)
# We have to smooth to get decent looking contours
# Gaussian smoothing preserves the normalisation
# NOTE: smoothing only needed if very fine grid
PDist2D=ndimage.gaussian_filter(PDist2D, 1)
# Work out where to put contours
sigma1Level=calc2DProbThreshold(PDist2D, 0.683)
sigma2Level=calc2DProbThreshold(PDist2D, 0.95)
# Apparently, we need to switch the order in newer versions of matplotlib
try:
plt.contour(PDist2D, [sigma2Level, sigma1Level], colors = 'k')
except:
print "contour problem"
IPython.embed()
sys.exit()
# Save plot - trim down area first (?) and add axes labels
plt.plot(interpolate.splev(mlPar2, tck2), interpolate.splev(mlPar1, tck1), 'k*',
label = "%s = %.2f $\pm$ %.2f, %s = %.2f $\pm$ %.2f" % (par1Label, mlPar1, mlPar1Err, par2Label, mlPar2, mlPar2Err))
#plt.legend(numpoints = 1)
plt.xlabel(par2Label)
plt.ylabel(par1Label)
if noYLabels == True:
ylocs, ylabels=plt.yticks()
plt.ylabel("")
plt.yticks(ylocs, [""]*len(ylabels))
if noXLabels == True:
xlocs, xlabels=plt.xticks()
plt.xlabel("")
plt.xticks(xlocs, [""]*len(xlabels))
|
gpl-3.0
| 6,037,897,584,997,941,000
| 42.088194
| 287
| 0.585862
| false
| 3.322107
| false
| false
| false
|
mtwestra/akvo-wandelenvoorwater
|
wvw/urls.py
|
1
|
1878
|
from django.conf.urls.defaults import *
from django.views.static import serve
from W4W.models import school, inschrijving,steunpunt
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
info_dict_list_scholen = {
'queryset': school.objects.all().order_by('NAAM_VOLLEDIG'),
'paginate_by': 20,
'extra_context':{'order_by':'nd'}
}
result=inschrijving.objects.filter(ACTIEF=True).order_by('-id')
numschools=len(result)
info_dict_list_inschrijvingen = {
'queryset': result,
'paginate_by': 20,
'extra_context':{'order_by':'id','numschools':numschools}
}
info_dict_list_steunpunten = {
'queryset': steunpunt.objects.filter(ACTIEF=True).exclude(NAAM__contains='onbekend').order_by('id'),
'paginate_by': 20,
'extra_context':{'order_by':'id'}
}
info_dict_detail={
'queryset': school.objects.all(),
}
urlpatterns = patterns('',
(r'^scholen/$', 'django.views.generic.list_detail.object_list', info_dict_list_scholen),
(r'^inschrijvingen/$', 'django.views.generic.list_detail.object_list', info_dict_list_inschrijvingen),
(r'^steunpunten/$', 'django.views.generic.list_detail.object_list', info_dict_list_steunpunten),
(r'^scholen/(?P<object_id>\d+)/$', 'django.views.generic.list_detail.object_detail', info_dict_detail),
(r'^scholen/query/$', 'W4W.views.query_school'),
(r'^inschrijvingen/query/$', 'W4W.views.query_inschrijving'),
(r'^steunpunten/query/$', 'W4W.views.query_steunpunt'),
(r'^inschrijf/$', 'W4W.views.inschrijf'),
(r'^admin/', include(admin.site.urls)),
(r'^scholen/export/$','W4W.views.export_to_excel_school' ),
(r'^inschrijvingen/export/$','W4W.views.export_to_excel_inschrijf' ),
(r'^WvW_media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_DOC_ROOT}),
)
|
agpl-3.0
| -2,006,749,946,259,257,000
| 37.326531
| 108
| 0.685836
| false
| 2.79049
| false
| true
| false
|
junwoo091400/MyCODES
|
Projects/FootPad_Logger/logged_data_analyzer_LSTM/Data_manipulation.py
|
1
|
1131
|
from Base import FootLog
def Datestr_to_Int(datestr):
bigStr = ''.join(datestr.split('-'))
return int(bigStr[2:])#'20' is eliminated![We live in 20th century.]
'''
def Time2Float(timestr,state):
if(state == DateHandler.BREAKFAST):
elif(state == DateHandler.LUNCH):
elif(state == DateHandler.DINNER):
return TimeDiff('')
else:
return -1
'''
def find_1_maxIdx(arr):
for i in range(len(arr)-1,-1,-1):
if(arr[i] == 1):
return i
return -1 # Not found.
def find_MaxValue_Idx(predict):
max = 0
idx = -1
for i in range(len(predict)):
if(predict[i]>max):
max = predict[i]
idx = i
return idx
FLAG_1 = 1<<0
FLAG_2 = 1<<1
FLAG_3 = 1<<2
FLAG_4 = 1<<3
FLAG_5 = 1<<4
def Encrpted_to_List5(ts):
retArr = [0,0,0,0,0]
if(ts & FLAG_1):
retArr[0] = 1
if(ts & FLAG_2):
retArr[1] = 1
if(ts & FLAG_3):
retArr[2] = 1
if(ts & FLAG_4):
retArr[3] = 1
if(ts & FLAG_5):
retArr[4] = 1
return retArr
def Timestr_difference_Seconds(start,stop):
stt = [int(x) for x in start.split(':')]
stp = [int(x) for x in stop.split(':')]
delta = (stp[0]-stt[0])*3600 + (stp[1]-stt[1])*60 + (stp[2]-stt[2])
return delta
|
gpl-3.0
| -3,336,045,622,675,393,500
| 19.214286
| 69
| 0.609195
| false
| 2.252988
| false
| false
| false
|
MrNeon/qbittorrent-search-plugins
|
strikesearch.py
|
1
|
1268
|
#VERSION: 1.03
#AUTHORS: MrNeon
from novaprinter import prettyPrinter
from helpers import retrieve_url, download_file
import json
class strikesearch(object):
url = 'https://getstrike.net/'
name = 'Strike Search'
supported_categories = {'all': '', 'movies': 'Movies', 'tv': 'TV', 'anime': 'Anime', 'books': 'Books',
'music': 'Music', 'games': 'Games', 'software': 'Applications'}
def __init__(self):
pass
def download_torrent(self, info):
print(download_file(info))
def search(self, what, cat='all'):
json_data = retrieve_url("".join((self.url, 'api/v2/torrents/search/?phrase=', what,
'&category=', self.supported_categories.get(cat, ''))))
json_dict = json.loads(json_data)
if json_dict['results'] < 1:
return
for r in json_dict['torrents']:
r_dict = {'link': r['magnet_uri'],
'name': r['torrent_title'],
'size': str(r['size']) + 'B',
'seeds': r['seeds'],
'leech': r['leeches'],
'desc_link': r['page'],
'engine_url': self.url}
prettyPrinter(r_dict)
|
mit
| 2,467,185,917,792,230,400
| 33.27027
| 106
| 0.501577
| false
| 3.77381
| false
| false
| false
|
szarroug3/X-Ray_Calibre_Plugin
|
lib/book.py
|
1
|
48525
|
# Book.py
'''Controls book functions and holds book data'''
import os
import json
import struct
from sqlite3 import connect
from datetime import datetime
from cStringIO import StringIO
from shutil import copy
from calibre.ebooks.mobi import MobiError
from calibre.ebooks.metadata.mobi import MetadataUpdater
from calibre_plugins.xray_creator.lib.utilities import LIBRARY
from calibre_plugins.xray_creator.lib.status_info import StatusInfo
from calibre_plugins.xray_creator.lib.book_parser import BookParser
from calibre_plugins.xray_creator.lib.book_settings import BookSettings
from calibre_plugins.xray_creator.lib.exceptions import PageDoesNotExist
from calibre_plugins.xray_creator.lib.xray_db_writer import XRayDBWriter
from calibre_plugins.xray_creator.lib.goodreads_parser import GoodreadsParser
class Book(object):
'''Class to hold book information and creates/sends files depending on user settings'''
def __init__(self, database, book_id, connections, settings):
self._basic_info = {'book_id': book_id, 'xray_send_fmt': None}
self._goodreads_conn = connections['goodreads']
self._settings = settings
self._xray_format_information = None
self._statuses = {'general': StatusInfo(status=StatusInfo.IN_PROGRESS),
'xray': StatusInfo(), 'xray_send': StatusInfo(),
'author_profile': StatusInfo(), 'author_profile_send': StatusInfo(),
'start_actions': StatusInfo(), 'start_actions_send': StatusInfo(),
'end_actions': StatusInfo(), 'end_actions_send': StatusInfo()}
self._goodreads_data = {}
self._book_settings = BookSettings(database, book_id, connections)
self._get_basic_information(database, settings['formats'])
if self._statuses['general'].status != StatusInfo.FAIL:
self._statuses['general'].status = StatusInfo.SUCCESS
@property
def status(self):
return self._statuses['general']
@property
def xray_status(self):
return self._statuses['xray']
@property
def xray_send_status(self):
return self._statuses['xray_send']
@property
def xray_send_fmt(self):
return self._basic_info['xray_send_fmt']
@property
def author_profile_status(self):
return self._statuses['author_profile']
@property
def author_profile_send_status(self):
return self._statuses['author_profile_send']
@property
def start_actions_status(self):
return self._statuses['start_actions']
@property
def start_actions_send_status(self):
return self._statuses['start_actions_send']
@property
def end_actions_status(self):
return self._statuses['end_actions']
@property
def end_actions_send_status(self):
return self._statuses['end_actions_send']
@property
def book_id(self):
return self._basic_info['book_id']
@property
def title(self):
return self._basic_info['title']
@property
def author(self):
return self._basic_info['author']
@property
def title_and_author(self):
return '{0} - {1}'.format(self._basic_info['title'], self._basic_info['author'])
def xray_formats_failing(self):
'''Yields x-ray formats that are failing'''
for fmt, info in self._xray_format_information.items():
if info['status'].status is StatusInfo.FAIL:
yield (fmt, info)
def xray_formats_not_failing(self):
'''Yields x-ray formats that are not failing'''
for fmt, info in self._xray_format_information.items():
if info['status'].status is not StatusInfo.FAIL:
yield (fmt, info)
def xray_formats_not_failing_exist(self):
'''Checks if any formats that aren't failing exist'''
return any(self.xray_formats_not_failing())
def _get_basic_information(self, database, formats):
'''Gets title, author, goodreads url, ASIN, and file specific info for the book'''
self._basic_info['title'] = database.field_for('title', self._basic_info['book_id'])
self._basic_info['author'] = ' & '.join(database.field_for('authors', self._basic_info['book_id']))
if self._basic_info['title'] == 'Unknown' or self._basic_info['author'] == 'Unknown':
self._statuses['general'].set(StatusInfo.FAIL, StatusInfo.F_BASIC_INFORMATION_MISSING)
return
if not self._book_settings.prefs['goodreads_url'] or self._book_settings.prefs['goodreads_url'] == '':
self._statuses['general'].set(StatusInfo.FAIL, StatusInfo.F_COULD_NOT_FIND_GOODREADS_PAGE)
return
if not self._book_settings.prefs['asin'] or self._book_settings.prefs['asin'] == '':
self._statuses['general'].set(StatusInfo.FAIL, StatusInfo.F_COULD_NOT_FIND_ASIN)
return
self._basic_info['goodreads_url'] = self._book_settings.prefs['goodreads_url']
self._basic_info['asin'] = self._book_settings.prefs['asin']
if os.path.isfile(self._book_settings.prefs['sample_xray']):
self._basic_info['sample_xray'] = self._book_settings.prefs['sample_xray']
else:
self._basic_info['sample_xray'] = None
if self._settings['create_send_xray']:
self._get_basic_xray_information(database, formats)
if (self._settings['create_send_author_profile']
or self._settings['create_send_start_actions']
or self._settings['create_send_end_actions']):
self._get_basic_non_xray_information(database)
def _get_basic_xray_information(self, database, formats):
'''Gets aliases and format information for the book and initializes x-ray variables'''
self._basic_info['aliases'] = self._book_settings.prefs['aliases']
self._xray_format_information = {}
self._statuses['xray'].status = StatusInfo.IN_PROGRESS
for fmt in formats:
info = {'status': StatusInfo(status=StatusInfo.IN_PROGRESS)}
# find local book if it exists; fail if it doesn't
local_book = database.format_abspath(self._basic_info['book_id'], fmt.upper())
if not local_book or not os.path.exists(local_book):
info['status'].set(StatusInfo.FAIL, StatusInfo.F_LOCAL_BOOK_NOT_FOUND)
else:
info['local_book'] = local_book
local_xray = '.'.join(local_book.split('.')[:-1]) + '.sdr'
if not os.path.exists(local_xray):
os.mkdir(local_xray)
info['local_xray'] = os.path.join(local_xray, fmt)
if not os.path.exists(info['local_xray']):
os.mkdir(info['local_xray'])
self._xray_format_information[fmt.lower()] = info
if not self.xray_formats_not_failing_exist():
self._statuses['xray'].set(StatusInfo.FAIL, StatusInfo.F_NO_APPROPRIATE_LOCAL_BOOK_FOUND)
def _get_basic_non_xray_information(self, database):
'''Gets local book's directory and initializes non-xray variables'''
book_path = database.field_for('path', self._basic_info['book_id']).replace('/', os.sep)
local_book_directory = os.path.join(LIBRARY, book_path)
self._basic_info['local_non_xray'] = os.path.join(local_book_directory, 'non_xray')
if not os.path.exists(self._basic_info['local_non_xray']):
os.mkdir(self._basic_info['local_non_xray'])
if self._settings['create_send_author_profile']:
self._statuses['author_profile'].status = StatusInfo.IN_PROGRESS
if self._settings['create_send_start_actions']:
self._statuses['start_actions'].status = StatusInfo.IN_PROGRESS
if self._settings['create_send_end_actions']:
self._statuses['end_actions'].status = StatusInfo.IN_PROGRESS
def create_files_event(self, create_file_params, log, notifications, abort):
'''Creates and sends files depending on user's settings'''
title_and_author = self.title_and_author
device_books, perc, total = create_file_params
# Prep
if not self._settings['overwrite_when_creating']:
notifications.put((self._calculate_percentage(perc, total),
'Checking for {0} existing files'.format(title_and_author)))
log('{0} Checking for existing files...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._check_for_existing_files()
perc += 1
if abort.isSet():
return
create_xray = self._settings['create_send_xray'] and self.xray_formats_not_failing_exist()
author_profile = (self._settings['create_send_author_profile'] and
self._statuses['author_profile'].status != StatusInfo.FAIL)
start_actions = (self._settings['create_send_start_actions'] and
self._statuses['start_actions'].status != StatusInfo.FAIL)
end_actions = self._settings['create_send_end_actions'] and self._statuses['end_actions'].status != StatusInfo.FAIL
if create_xray or author_profile or start_actions or end_actions:
if self._basic_info['sample_xray'] and create_xray:
notifications.put((self._calculate_percentage(perc, total),
'Parsing {0} given data'.format(title_and_author)))
log('{0} Parsing given data...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._parse_input_file()
self._parse_goodreads_data(create_xray=False, create_author_profile=author_profile,
create_start_actions=start_actions, create_end_actions=end_actions)
else:
notifications.put((self._calculate_percentage(perc, total),
'Parsing {0} Goodreads data'.format(title_and_author)))
log('{0} Parsing Goodreads data...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._parse_goodreads_data(create_xray=create_xray, create_author_profile=author_profile,
create_start_actions=start_actions, create_end_actions=end_actions)
perc += 1
if self._statuses['general'].status is StatusInfo.FAIL:
return
# Creating Files
if abort.isSet():
return
files_to_send = self._create_files(perc, total, notifications, log)
self._update_general_statuses()
# Sending Files
if self._settings['send_to_device'] and device_books is not None:
send_files = False
if self._settings['create_send_xray'] and self.xray_formats_not_failing_exist():
send_files = True
elif (self._settings['create_send_author_profile'] and
self._statuses['author_profile'].status != StatusInfo.FAIL):
send_files = True
elif (self._settings['create_send_start_actions'] and
self._statuses['start_actions'].status != StatusInfo.FAIL):
send_files = True
elif self._settings['create_send_end_actions'] and self._statuses['end_actions'].status != StatusInfo.FAIL:
send_files = True
if send_files:
notifications.put((self._calculate_percentage(perc, total),
'Sending {0} files to device'.format(self.title_and_author)))
log('{0} Sending files to device...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._check_fmts_for_create_event(device_books, files_to_send)
if len(files_to_send) > 0:
self._send_files(device_books, files_to_send)
perc += 1
def _create_files(self, perc, total, notifications, log):
'''Create files for create_files_event'''
files_to_send = {}
if self._settings['create_send_xray']:
if self.xray_formats_not_failing_exist() and self._statuses['xray'].status != StatusInfo.FAIL:
notifications.put((self._calculate_percentage(perc, total),
'Parsing {0} book data'.format(self.title_and_author)))
log('{0} Creating x-ray...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
log('{0} Parsing book data...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
for fmt, info in self.xray_formats_not_failing():
self._parse_book(fmt, info)
perc += 1
if self.xray_formats_not_failing_exist():
notifications.put((self._calculate_percentage(perc, total),
'Writing {0} x-ray'.format(self.title_and_author)))
log('{0} Writing x-ray...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
for fmt, info in self.xray_formats_not_failing():
self._write_xray(info)
perc += 1
if self._settings['create_send_author_profile']:
if self._statuses['author_profile'].status != StatusInfo.FAIL:
notifications.put((self._calculate_percentage(perc, total),
'Writing {0} author profile'.format(self.title_and_author)))
log('{0} Writing author profile...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._write_author_profile(files_to_send)
perc += 1
if self._settings['create_send_start_actions']:
if self._statuses['start_actions'].status != StatusInfo.FAIL:
notifications.put((self._calculate_percentage(perc, total),
'Writing {0} start actions'.format(self.title_and_author)))
log('{0} Writing start actions...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._write_start_actions(files_to_send)
perc += 1
if self._settings['create_send_end_actions']:
if self._statuses['end_actions'].status != StatusInfo.FAIL:
notifications.put((self._calculate_percentage(perc, total),
'Writing {0} end actions'.format(self.title_and_author)))
log('{0} Writing end actions...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._write_end_actions(files_to_send)
perc += 1
return files_to_send
def send_files_event(self, send_file_params, log, notifications, abort):
'''Sends files to device depending on user's settings'''
device_books, book_num, total = send_file_params
if abort.isSet():
return
notifications.put((self._calculate_percentage(book_num, total), self.title_and_author))
files_to_send = {}
checked_data = self._check_fmts_for_send_event(device_books, files_to_send)
create_xray_format_info, create_author_profile, create_start_actions, create_end_actions = checked_data
if create_xray_format_info or create_author_profile or create_start_actions or create_end_actions:
log('{0} Parsing {1} Goodreads data...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S'),
self.title_and_author))
create_xray = True if create_xray_format_info != None else False
if create_xray and self._basic_info['sample_xray']:
self._parse_input_file()
else:
self._parse_goodreads_data(create_xray=create_xray, create_author_profile=create_author_profile,
create_start_actions=create_start_actions, create_end_actions=create_end_actions)
if self._statuses['general'].status is StatusInfo.FAIL:
return
if create_xray and self._statuses['xray'].status != StatusInfo.FAIL:
log('{0} Creating {1} x-ray...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S'),
self.title_and_author))
self._parse_book(create_xray_format_info['format'],
self._xray_format_information[create_xray_format_info['format']])
if self._xray_format_information[create_xray_format_info['format']]['status'].status != StatusInfo.FAIL:
self._write_xray(self._xray_format_information[create_xray_format_info['format']])
if os.path.exists(create_xray_format_info['local']):
files_to_send['xray'] = create_xray_format_info
if create_author_profile and self._statuses['author_profile'].status != StatusInfo.FAIL:
log('{0} Creating {1} author profile...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S'),
self.title_and_author))
self._write_author_profile(files_to_send)
if create_start_actions and self._statuses['start_actions'].status != StatusInfo.FAIL:
log('{0} Creating {1} start actions...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S'),
self.title_and_author))
self._write_start_actions(files_to_send)
if create_end_actions and self._statuses['end_actions'].status != StatusInfo.FAIL:
log('{0} Creating {1} end actions...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S'),
self.title_and_author))
self._write_end_actions(files_to_send)
self._update_general_statuses()
if len(files_to_send) > 0:
log('{0} Sending files to device...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._send_files(device_books, files_to_send)
def _update_general_statuses(self):
if self._settings['create_send_xray'] and self._statuses['xray'].status != StatusInfo.FAIL:
self._statuses['xray'].status = StatusInfo.SUCCESS
if self._settings['create_send_author_profile'] and self._statuses['author_profile'].status != StatusInfo.FAIL:
self._statuses['author_profile'].status = StatusInfo.SUCCESS
if self._settings['create_send_start_actions'] and self._statuses['start_actions'].status != StatusInfo.FAIL:
self._statuses['start_actions'].status = StatusInfo.SUCCESS
if self._settings['create_send_end_actions'] and self._statuses['end_actions'].status != StatusInfo.FAIL:
self._statuses['end_actions'].status = StatusInfo.SUCCESS
@staticmethod
def _calculate_percentage(amt_completed, total):
'''Calculates percentage of amt_completed compared to total; Minimum returned is .01'''
return amt_completed/total if amt_completed/total >= .01 else .01
def _parse_input_file(self):
'''Checks input file type and calls appropriate parsing function'''
filetype = os.path.splitext(self._basic_info['sample_xray'])[1][1:].lower()
if filetype == 'asc':
characters, settings = self._parse_input_asc()
quotes = []
elif filetype == 'json':
characters, settings, quotes = self._parse_input_json()
else:
return
self._process_goodreads_xray_results({'characters': characters, 'settings': settings, 'quotes': quotes})
def _parse_input_asc(self):
'''Gets character and setting information from sample x-ray file'''
cursor = connect(self._basic_info['sample_xray']).cursor()
characters = {}
settings = {}
for entity_desc in cursor.execute('SELECT * FROM entity_description').fetchall():
entity_id = entity_desc[3]
description = entity_desc[0]
entity = cursor.execute('SELECT * FROM entity WHERE id = "{0}"'.format(entity_id)).fetchall()
if not entity:
continue
entity_label = entity[0][1]
entity_type = entity[0][3]
if entity_type == 1:
aliases = self._basic_info['aliases'][entity_label] if entity_label in self._basic_info['aliases'] else []
characters[entity_id] = {'label': entity_label, 'description': description, 'aliases': aliases}
elif entity_type == 2:
settings[entity_id] = {'label': entity_label, 'description': description, 'aliases': []}
return characters, settings
def _parse_input_json(self):
'''Gets characters, setting, and quote data from json file'''
entity_num = 1
characters = {}
settings = {}
data = json.load(open(self._basic_info['sample_xray']))
if 'characters' in data:
for name, char_data in data['characters'].items():
description = char_data['description'] if 'description' in char_data else 'No description found.'
aliases = self._basic_info['aliases'][name] if name in self._basic_info['aliases'] else []
characters[entity_num] = {'label': name, 'description': description, 'aliases': aliases}
entity_num += 1
if 'settings' in data:
for setting, char_data in data['settings'].items():
description = char_data['description'] if 'description' in char_data else 'No description found.'
aliases = self._basic_info['aliases'][setting] if setting in self._basic_info['aliases'] else []
settings[entity_num] = {'label': setting, 'description': description, 'aliases': aliases}
entity_num += 1
quotes = data['quotes'] if 'quotes' in data else []
return characters, settings, quotes
def _parse_goodreads_data(self, create_xray=None, create_author_profile=None,
create_start_actions=None, create_end_actions=None):
if create_xray is None:
create_xray = self._settings['create_send_xray']
if create_author_profile is None:
create_author_profile = self._settings['create_send_author_profile']
if create_start_actions is None:
create_start_actions = self._settings['create_send_start_actions']
if create_end_actions is None:
create_end_actions = self._settings['create_send_end_actions']
try:
goodreads_data = GoodreadsParser(self._basic_info['goodreads_url'], self._goodreads_conn,
self._basic_info['asin'])
results = goodreads_data.parse(create_xray=create_xray, create_author_profile=create_author_profile,
create_start_actions=create_start_actions, create_end_actions=create_end_actions)
compiled_xray, compiled_author_profile, compiled_start_actions, compiled_end_actions = results
except PageDoesNotExist:
self._statuses['general'].set(StatusInfo.FAIL, StatusInfo.F_COULD_NOT_PARSE_GOODREADS_DATA)
return
if create_xray:
self._process_goodreads_xray_results(compiled_xray)
if create_author_profile:
self._process_goodreads_author_profile_results(compiled_author_profile)
if create_start_actions:
self._process_goodreads_start_actions_results(compiled_start_actions)
if create_end_actions:
self._process_goodreads_end_actions_results(compiled_end_actions)
def _process_goodreads_xray_results(self, compiled_xray):
'''Sets aliases in book settings and basic info if compiled xray has data; sets status to fail if it doesn't'''
if compiled_xray:
self._goodreads_data['xray'] = compiled_xray
for char in self._goodreads_data['xray']['characters'].values():
if char['label'] not in self._basic_info['aliases'].keys():
self._basic_info['aliases'][char['label']] = char['aliases']
self._book_settings.prefs['aliases'] = self._basic_info['aliases']
else:
self._statuses['xray'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_CREATE_XRAY)
def _process_goodreads_author_profile_results(self, compiled_author_profile):
'''Sets author profile in goodreads data if compiled author profile has data; sets status to fail if it doesn't'''
if compiled_author_profile:
self._goodreads_data['author_profile'] = compiled_author_profile
else:
self._statuses['author_profile'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_CREATE_AUTHOR_PROFILE)
def _process_goodreads_start_actions_results(self, compiled_start_actions):
'''Sets start actions in goodreads data if compiled start actions has data; sets status to fail if it doesn't'''
if compiled_start_actions:
self._goodreads_data['start_actions'] = compiled_start_actions
else:
self._statuses['start_actions'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_CREATE_START_ACTIONS)
def _process_goodreads_end_actions_results(self, compiled_end_actions):
'''Sets end actions in goodreads data if compiled end actions has data; sets status to fail if it doesn't'''
if compiled_end_actions:
self._goodreads_data['end_actions'] = compiled_end_actions
else:
self._statuses['end_actions'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_CREATE_END_ACTIONS)
def _parse_book(self, fmt, info):
'''Will parse book using the format info given'''
try:
book_parser = BookParser(fmt, info['local_book'], self._goodreads_data['xray'], self._basic_info['aliases'])
info['parsed_book_data'] = book_parser.parse()
except MobiError:
info['status'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_PARSE_BOOK)
def _check_for_existing_files(self):
'''Checks if files exist and fails for that type if they do'''
if self._settings['create_send_xray']:
for fmt_info in self.xray_formats_not_failing():
info = fmt_info[1]
if os.path.exists(os.path.join(info['local_xray'],
'XRAY.entities.{0}.asc'.format(self._basic_info['asin']))):
info['status'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_NOT_OVERWRITE_LOCAL_XRAY)
if self._settings['create_send_author_profile']:
if os.path.exists(os.path.join(self._basic_info['local_non_xray'],
'AuthorProfile.profile.{0}.asc'.format(self._basic_info['asin']))):
self._statuses['author_profile'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_NOT_OVERWRITE_LOCAL_AUTHOR_PROFILE)
if self._settings['create_send_start_actions']:
if os.path.exists(os.path.join(self._basic_info['local_non_xray'],
'StartActions.data.{0}.asc'.format(self._basic_info['asin']))):
self._statuses['start_actions'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_NOT_OVERWRITE_LOCAL_START_ACTIONS)
if self._settings['create_send_end_actions']:
if os.path.exists(os.path.join(self._basic_info['local_non_xray'],
'EndActions.data.{0}.asc'.format(self._basic_info['asin']))):
self._statuses['end_actions'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_NOT_OVERWRITE_LOCAL_END_ACTIONS)
def _write_xray(self, info):
'''Writes x-ray file using goodreads and parsed book data; Will save in local directory'''
try:
filename = os.path.join(info['local_xray'], 'XRAY.entities.{0}.asc'.format(self._basic_info['asin']))
if os.path.exists(filename):
os.remove(filename)
except OSError:
info['status'].set(StatusInfo.FAIL, StatusInfo.F_REMOVE_LOCAL_XRAY)
xray_db_writer = XRayDBWriter(info['local_xray'], self._basic_info['goodreads_url'],
self._basic_info['asin'], info['parsed_book_data'])
xray_db_writer.write_xray()
if not os.path.exists(os.path.join(info['local_xray'], 'XRAY.entities.{0}.asc'.format(self._basic_info['asin']))):
info['status'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_WRITE_XRAY)
return
info['status'].status = StatusInfo.SUCCESS
def _write_author_profile(self, files_to_send):
'''Writes author profile file using goodreads; Will save in local directory'''
try:
filename = os.path.join(self._basic_info['local_non_xray'],
'AuthorProfile.profile.{0}.asc'.format(self._basic_info['asin']))
if os.path.exists(filename):
os.remove(filename)
except OSError:
self._statuses['author_profile'].set(StatusInfo.FAIL, StatusInfo.F_REMOVE_LOCAL_AUTHOR_PROFILE)
try:
with open(os.path.join(self._basic_info['local_non_xray'],
'AuthorProfile.profile.{0}.asc'.format(self._basic_info['asin'])),
'w+') as author_profile:
json.dump(self._goodreads_data['author_profile'], author_profile)
except OSError:
self._statuses['author_profile'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_WRITE_AUTHOR_PROFILE)
return
if self._settings['send_to_device']:
filename = 'AuthorProfile.profile.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(self._basic_info['local_non_xray'], filename)
files_to_send['author_profile'] = {'local': local_file, 'filename': filename}
def _write_start_actions(self, files_to_send):
'''Writes start actions file using goodreads; Will save in local directory'''
try:
filename = os.path.join(self._basic_info['local_non_xray'],
'StartActions.data.{0}.asc'.format(self._basic_info['asin']))
if os.path.exists(filename):
os.remove(filename)
except OSError:
self._statuses['start_actions'].set(StatusInfo.FAIL, StatusInfo.F_REMOVE_LOCAL_START_ACTIONS)
try:
with open(os.path.join(self._basic_info['local_non_xray'],
'StartActions.data.{0}.asc'.format(self._basic_info['asin'])),
'w+') as start_actions:
json.dump(self._goodreads_data['start_actions'], start_actions)
except OSError:
self._statuses['start_actions'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_WRITE_START_ACTIONS)
return
if self._settings['send_to_device']:
filename = 'StartActions.data.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(self._basic_info['local_non_xray'], filename)
files_to_send['start_actions'] = {'local': local_file, 'filename': filename}
def _write_end_actions(self, files_to_send):
'''Writes end actions file using goodreads; Will save in local directory'''
try:
filename = os.path.join(self._basic_info['local_non_xray'],
'EndActions.data.{0}.asc'.format(self._basic_info['asin']))
if os.path.exists(filename):
os.remove(filename)
except OSError:
self._statuses['end_actions'].set(StatusInfo.FAIL, StatusInfo.F_REMOVE_LOCAL_END_ACTIONS)
try:
with open(os.path.join(self._basic_info['local_non_xray'],
'EndActions.data.{0}.asc'.format(self._basic_info['asin'])),
'w+') as end_actions:
json.dump(self._goodreads_data['end_actions'], end_actions)
except OSError:
self._statuses['end_actions'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_WRITE_END_ACTIONS)
return
if self._settings['send_to_device']:
filename = 'EndActions.data.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(self._basic_info['local_non_xray'], filename)
files_to_send['end_actions'] = {'local': local_file, 'filename': filename}
def _check_fmts_for_create_event(self, device_books, files_to_send):
'''Compiles dict of file type info to use when creating files'''
if len(device_books) == 0 or not device_books.has_key(self._basic_info['book_id']):
if self._settings['create_send_xray'] and self.xray_formats_not_failing_exist():
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
if (self._settings['create_send_author_profile'] and
self._statuses['author_profile'].status == StatusInfo.SUCCESS):
self._statuses['author_profile_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
if files_to_send.has_key('author_profile'):
del files_to_send['author_profile']
if self._settings['create_send_start_actions'] and self._statuses['start_actions'].status == StatusInfo.SUCCESS:
self._statuses['start_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
if files_to_send.has_key('start_actions'):
del files_to_send['start_actions']
if self._settings['create_send_end_actions'] and self._statuses['end_actions'].status == StatusInfo.SUCCESS:
self._statuses['end_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
if files_to_send.has_key('end_actions'):
del files_to_send['end_actions']
return
first_fmt = device_books[self._basic_info['book_id']].keys()[0]
self._basic_info['device_sdr'] = device_books[self._basic_info['book_id']][first_fmt]['device_sdr']
if not os.path.exists(self._basic_info['device_sdr']):
os.mkdir(self._basic_info['device_sdr'])
if self._settings['create_send_xray'] and self.xray_formats_not_failing_exist():
# figure out which format to send
self._check_xray_format_to_create(device_books, files_to_send)
def _check_xray_format_to_create(self, device_books, files_to_send):
'''Compiles dict of file type to use for x-ray'''
formats_not_failing = [fmt for fmt, info in self.xray_formats_not_failing()]
formats_on_device = device_books[self._basic_info['book_id']].keys()
common_formats = list(set(formats_on_device).intersection(formats_not_failing))
if len(common_formats) == 0:
for fmt, info in self.xray_formats_not_failing():
info['status'].status = StatusInfo.SUCCESS
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
else:
format_picked = self._settings['file_preference']
if len(common_formats) == 1:
format_picked = common_formats[0]
for fmt, info in self.xray_formats_not_failing():
if fmt != format_picked:
info['status'].status = StatusInfo.SUCCESS
continue
filename = 'XRAY.entities.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(info['local_xray'], filename)
files_to_send['xray'] = {'local': local_file, 'filename': filename, 'format': format_picked}
def _check_fmts_for_send_event(self, device_books, files_to_send):
'''Compiles dict of file type info to use when sending files'''
create_xray = None
create_author_profile = False
create_start_actions = False
create_end_actions = False
if not device_books.has_key(self._basic_info['book_id']):
if self._settings['create_send_xray']:
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
if self._settings['create_send_author_profile']:
self._statuses['author_profile_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
if self._settings['create_send_start_actions']:
self._statuses['start_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
if self._settings['create_send_end_actions']:
self._statuses['end_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
return create_xray, create_author_profile, create_start_actions, create_end_actions
first_fmt = device_books[self._basic_info['book_id']].keys()[0]
self._basic_info['device_sdr'] = device_books[self._basic_info['book_id']][first_fmt]['device_sdr']
if not os.path.exists(self._basic_info['device_sdr']):
os.mkdir(self._basic_info['device_sdr'])
if self._settings['create_send_xray']:
# figure out which format to send
create_xray = self._check_xray_fmt_for_send(device_books, files_to_send)
if self._settings['create_send_author_profile']:
create_author_profile = self._check_author_profile_for_send(files_to_send)
if self._settings['create_send_start_actions']:
create_start_actions = self._check_start_actions_for_send(files_to_send)
if self._settings['create_send_end_actions']:
create_end_actions = self._check_end_actions_for_send(files_to_send)
return create_xray, create_author_profile, create_start_actions, create_end_actions
def _check_xray_fmt_for_send(self, device_books, files_to_send):
'''Check if there's a valid x-ray to send'''
formats_not_failing = [fmt for fmt, info in self._xray_format_information.items()]
formats_on_device = device_books[self._basic_info['book_id']].keys()
common_formats = list(set(formats_on_device).intersection(formats_not_failing))
if len(common_formats) == 0:
for fmt, info in self._xray_format_information.items():
info['status'].status = StatusInfo.SUCCESS
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
else:
format_picked = self._settings['file_preference']
if len(common_formats) == 1:
format_picked = common_formats[0]
filename = 'XRAY.entities.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(self._xray_format_information[format_picked]['local_xray'], filename)
if (os.path.exists(os.path.join(self._basic_info['device_sdr'], filename)) and not
self._settings['overwrite_when_sending']):
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_NOT_OVERWRITE_DEVICE_XRAY)
else:
if os.path.exists(local_file):
files_to_send['xray'] = {'local': local_file, 'filename': filename, 'format': format_picked}
else:
if not self._settings['create_files_when_sending']:
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_SET_TO_NOT_CREATE_XRAY)
else:
return {'local': local_file, 'filename': filename, 'format': format_picked}
return None
def _check_author_profile_for_send(self, files_to_send):
'''Check if there's a valid author profile to send'''
filename = 'AuthorProfile.profile.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(self._basic_info['local_non_xray'], filename)
if (os.path.exists(os.path.join(self._basic_info['device_sdr'], filename)) and not
self._settings['overwrite_when_sending']):
self._statuses['author_profile_send'].set(StatusInfo.FAIL,
StatusInfo.F_PREFS_NOT_OVERWRITE_DEVICE_AUTHOR_PROFILE)
else:
if os.path.exists(local_file):
files_to_send['author_profile'] = {'local': local_file, 'filename': filename}
else:
if not self._settings['create_files_when_sending']:
self._statuses['author_profile_send'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_SET_TO_NOT_CREATE_XRAY)
else:
return True
return False
def _check_start_actions_for_send(self, files_to_send):
'''Check if there's a valid start actions file to send'''
filename = 'StartActions.data.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(self._basic_info['local_non_xray'], filename)
if (os.path.exists(os.path.join(self._basic_info['device_sdr'], filename)) and not
self._settings['overwrite_when_sending']):
self._statuses['start_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_NOT_OVERWRITE_DEVICE_START_ACTIONS)
else:
if os.path.exists(local_file):
files_to_send['start_actions'] = {'local': local_file, 'filename': filename}
else:
if not self._settings['create_files_when_sending']:
self._statuses['start_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_SET_TO_NOT_CREATE_XRAY)
else:
return True
return False
def _check_end_actions_for_send(self, files_to_send):
'''Check if there's a valid end actions file to send'''
filename = 'EndActions.data.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(self._basic_info['local_non_xray'], filename)
if (os.path.exists(os.path.join(self._basic_info['device_sdr'], filename)) and not
self._settings['overwrite_when_sending']):
self._statuses['end_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_NOT_OVERWRITE_DEVICE_END_ACTIONS)
else:
if os.path.exists(local_file):
files_to_send['end_actions'] = {'local': local_file, 'filename': filename}
else:
if not self._settings['create_files_when_sending']:
self._statuses['end_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_SET_TO_NOT_CREATE_XRAY)
else:
return True
return False
def _send_files(self, device_books, files_to_send):
'''Sends files to device depending on list compiled in files_to_send'''
number_of_failed_asin_updates = 0
formats_on_device = device_books[self._basic_info['book_id']].keys()
try:
for fmt in formats_on_device:
with open(device_books[self._basic_info['book_id']][fmt]['device_book'], 'r+b') as stream:
mobi_updater = ASINUpdater(stream)
mobi_updater.update(self._basic_info['asin'])
except MobiError:
number_of_failed_asin_updates += 1
if (self._settings['create_send_xray'] and self._settings['send_to_device'].has_key('xray') and
fmt == self._settings['send_to_device']['xray']['format']):
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_UPDATE_ASIN)
self._basic_info['xray_send_fmt'] = files_to_send['xray']['format']
if files_to_send.has_key('xray'):
del files_to_send['xray']
if number_of_failed_asin_updates == len(formats_on_device):
if self._settings['create_send_author_profile']:
self._statuses['author_profile_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_UPDATE_ASIN)
if self._settings['create_send_start_actions']:
self._statuses['start_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_UPDATE_ASIN)
if self._settings['create_send_end_actions']:
self._statuses['end_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_UPDATE_ASIN)
return
# temporarily rename current file in case send fails
for filetype, info in files_to_send.items():
self._send_file(filetype, info)
def _send_file(self, filetype, info):
'''Send file to device and update status accordingly'''
device_filename = os.path.join(self._basic_info['device_sdr'], info['filename'])
if os.path.exists(device_filename):
os.rename(device_filename, '{0}.old'.format(device_filename))
copy(info['local'], self._basic_info['device_sdr'])
if os.path.exists(device_filename):
if os.path.exists('{0}.old'.format(device_filename)):
os.remove('{0}.old'.format(device_filename))
if filetype == 'xray':
self._statuses['xray_send'].status = StatusInfo.SUCCESS
self._basic_info['xray_send_fmt'] = info['format']
elif filetype == 'author_profile':
self._statuses['author_profile_send'].status = StatusInfo.SUCCESS
elif filetype == 'start_actions':
self._statuses['start_actions_send'].status = StatusInfo.SUCCESS
elif filetype == 'end_actions':
self._statuses['end_actions_send'].status = StatusInfo.SUCCESS
else:
os.rename('{0}.old'.format(device_filename), device_filename)
if filetype == 'xray':
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_SEND_XRAY)
self._basic_info['xray_send_fmt'] = self._basic_info['xray_send_fmt']
elif filetype == 'author_profile':
self._statuses['author_profile_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_SEND_AUTHOR_PROFILE)
elif filetype == 'start_actions':
self._statuses['start_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_SEND_START_ACTIONS)
elif filetype == 'end_actions':
self._statuses['end_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_SEND_END_ACTIONS)
class ASINUpdater(MetadataUpdater):
'''Class to modify MOBI book'''
def update(self, asin):
'''This will update ASIN'''
def update_exth_record(rec):
'''Gets exth records'''
recs.append(rec)
if rec[0] in self.original_exth_records:
self.original_exth_records.pop(rec[0])
if self.type != "BOOKMOBI":
raise MobiError("Setting ASIN only supported for MOBI files of type 'BOOK'.\n"
"\tThis is a '%s' file of type '%s'" % (self.type[0:4], self.type[4:8]))
recs = []
original = None
if 113 in self.original_exth_records:
original = self.original_exth_records[113]
elif 504 in self.original_exth_records:
original = self.original_exth_records[504]
if original == asin:
return
update_exth_record((113, asin.encode(self.codec, 'replace')))
update_exth_record((504, asin.encode(self.codec, 'replace')))
# Include remaining original EXTH fields
for record_id in sorted(self.original_exth_records):
recs.append((record_id, self.original_exth_records[record_id]))
recs = sorted(recs, key=lambda x: (x[0], x[0]))
exth = StringIO()
for code, data in recs:
exth.write(struct.pack('>II', code, len(data) + 8))
exth.write(data)
exth = exth.getvalue()
trail = len(exth) % 4
pad = '\0' * (4 - trail) # Always pad w/ at least 1 byte
exth = ''.join(['EXTH', struct.pack('>II', len(exth) + 12, len(recs)), exth, pad])
if getattr(self, 'exth', None) is None:
raise MobiError('No existing EXTH record. Cannot update ASIN.')
self.create_exth(exth=exth)
return
|
gpl-3.0
| 4,600,604,134,167,618,000
| 53.64527
| 124
| 0.593673
| false
| 3.900096
| false
| false
| false
|
gisdev-km/pyras
|
pyras/io/hecras/read_ras.py
|
1
|
2308
|
"""
"""
class SimpleAttribute:
"""
"""
def __init__(self, name, options=['SI Units', 'English Units']):
pass
class NamedAttribute:
"""
"""
def __init__(self, name, type_, value=None, separator='=',
max_length=None):
pass
class TagAttribute:
"""
"""
def __init__(self, name, start_tag, end_tag, type_, value=None,
max_length=None):
pass
def _generic_reader():
""" """
def read_project(filename):
"""
Proj Title=new_project
Default Exp/Contr=0.3,0.1
SI Units
Y Axis Title=Elevation
X Axis Title(PF)=Main Channel Distance
X Axis Title(XS)=Station
BEGIN DESCRIPTION:
Example text
END DESCRIPTION:
DSS Start Date=
DSS Start Time=
DSS End Date=
DSS End Time=
DSS Export Filename=
DSS Export Rating Curves= 0
DSS Export Rating Curve Sorted= 0
DSS Export Volume Flow Curves= 0
DXF Filename=
DXF OffsetX= 0
DXF OffsetY= 0
DXF ScaleX= 1
DXF ScaleY= 10
GIS Export Profiles= 0
"""
sep ='='
tags = {
'description': ['BEGIN DESCRIPTION:', 'END DESCRIPTION:']
}
fixed = {
'units': ['SI Units' 'English Units']
}
keys = {
'Proj Title': '',
'Default Exp/Contr': '=0.3,0.1',
'Current Plan': '=p03',
'Geom File': '=g01',
'Flow File': '=f01',
'Plan File': '=p01',
'Y Axis Title=Elevation': '',
'X Axis Title(PF)': '=Main Channel Distance',
'X Axis Title(XS)': '=Station',
'DSS Start Date': '=',
'DSS Start Time': '=',
'DSS End Date': '=',
'DSS End Time': '=',
'DSS Export Filename': '=',
'DSS Export Rating Curves': '= 0',
'DSS Export Rating Curve Sorted': '= 0',
'DSS Export Volume Flow Curves': '= 0',
'DXF Filename': '=',
'DXF OffsetX': '= 0',
'DXF OffsetY': '= 0',
'DXF ScaleX': '= 1',
'DXF ScaleY': '= 10',
'GIS Export Profiles': '= 0'
}
def read_geometry(filename):
""" """
def read_plan(filename):
""" """
def read_boundary(filename):
""" """
def test_project():
""" """
'D:\Users\penac1\Dropbox (Personal)\it\repos\git\pyras\temp_examples\Steady Examples'
if __name__ == '__main__':
test_project()
|
mit
| 6,103,303,954,101,331,000
| 18.726496
| 89
| 0.526863
| false
| 3.33526
| false
| false
| false
|
pism/pism
|
examples/python/ssa_tests/ssa_testj.py
|
1
|
4091
|
#! /usr/bin/env python3
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015, 2016, 2018 Ed Bueler and Constantine Khroulev and David Maxwell
#
# This file is part of PISM.
#
# PISM is free software; you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation; either version 3 of the License, or (at your option) any later
# version.
#
# PISM is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License
# along with PISM; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import PISM
from PISM.util import convert
class testj(PISM.ssa.SSAExactTestCase):
def _initGrid(self):
halfWidth = 300.0e3
Lx = halfWidth
Ly = halfWidth
ctx = PISM.Context().ctx
self.grid = PISM.IceGrid.Shallow(ctx, Lx, Ly, 0, 0,
self.Mx, self.My,
PISM.CELL_CENTER,
PISM.XY_PERIODIC)
def _initPhysics(self):
config = self.modeldata.config
config.set_flag("basal_resistance.pseudo_plastic.enabled", False)
enthalpyconverter = PISM.EnthalpyConverter(config)
config.set_string("stress_balance.ssa.flow_law", "isothermal_glen")
self.modeldata.setPhysics(enthalpyconverter)
def _initSSACoefficients(self):
self._allocStdSSACoefficients()
self._allocateBCs()
vecs = self.modeldata.vecs
vecs.tauc.set(0.0) # irrelevant for test J
# ensures that the ice is floating (max. thickness if 770 m)
vecs.bedrock_altitude.set(-1000.0)
vecs.mask.set(PISM.MASK_FLOATING)
vecs.bc_mask.set(0) # No dirichlet data.
EC = PISM.EnthalpyConverter(PISM.Context().config)
enth0 = EC.enthalpy(273.15, 0.01, 0) # 0.01 water fraction
vecs.enthalpy.set(enth0)
ocean_rho = self.config.get_number("constants.sea_water.density")
ice_rho = self.config.get_number("constants.ice.density")
# The PISM.vec.Access object ensures that we call beginAccess for each
# variable in 'vars', and that endAccess is called for each one on exiting
# the 'with' block.
with PISM.vec.Access(comm=[vecs.land_ice_thickness,
vecs.surface_altitude,
vecs.bc_mask,
vecs.vel_bc]):
grid = self.grid
for (i, j) in grid.points():
p = PISM.exactJ(grid.x(i), grid.y(j))
vecs.land_ice_thickness[i, j] = p.H
vecs.surface_altitude[i, j] = (1.0 - ice_rho / ocean_rho) * p.H # // FIXME task #7297
# special case at center point (Dirichlet BC)
if (i == grid.Mx() // 2) and (j == grid.My() // 2):
vecs.bc_mask[i, j] = 1
vecs.vel_bc[i, j] = [p.u, p.v]
def _initSSA(self):
# Test J has a viscosity that is independent of velocity. So we force a
# constant viscosity by settting the strength_extension
# thickness larger than the given ice thickness. (max = 770m).
nu0 = convert(30.0, "MPa year", "Pa s")
H0 = 500.0 # 500 m typical thickness
ssa = self.ssa
ssa.strength_extension.set_notional_strength(nu0 * H0)
ssa.strength_extension.set_min_thickness(800.)
def exactSolution(self, i, j, x, y):
p = PISM.exactJ(x, y)
return [p.u, p.v]
# The main code for a run follows:
if __name__ == '__main__':
context = PISM.Context()
config = context.config
tc = testj(int(config.get_number("grid.Mx")), int(config.get_number("grid.My")))
tc.run(config.get_string("output.file_name"))
|
gpl-3.0
| 8,439,416,760,993,107,000
| 37.233645
| 109
| 0.599364
| false
| 3.364309
| true
| false
| false
|
dimtion/jml
|
inputFiles/ourIA/old_ias/dijkstra.py
|
1
|
8663
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
##################################################################################################################
#################################################### PRE-DEFINED IMPORTS ####################################################
##################################################################################################################
# Imports that are necessary for the program architecture to work properly
# Do not edit this code
import ast
import sys
import os
##################################################################################################################
###################################################### YOUR IMPORTS ######################################################
##################################################################################################################
import utils as u
import algorithms as algo
#################################################### PRE-DEFINED CONSTANTS ###################################################
##################################################################################################################
# Possible characters to send to the maze application
# Any other will be ignored
# Do not edit this code
UP = 'U'
DOWN = 'D'
LEFT = 'L'
RIGHT = 'R'
##################################################################################################################
# Name of your team
# It will be displayed in the maze
# You have to edit this code
TEAM_NAME = "Your name here"
##################################################################################################################
##################################################### YOUR CONSTANTS #####################################################
##################################################################################################################
##################################################################################################################
##################################################### YOUR VARIABLES #####################################################
##################################################################################################################
route = []
##################################################################################################################
#################################################### PRE-DEFINED FUNCTIONS ###################################################
##################################################################################################################
# Writes a message to the shell
# Use for debugging your program
# Channels stdout and stdin are captured to enable communication with the maze
# Do not edit this code
def debug (text) :
# Writes to the stderr channel
sys.stderr.write(str(text) + "\n")
sys.stderr.flush()
##################################################################################################################
# Reads one line of information sent by the maze application
# This function is blocking, and will wait for a line to terminate
# The received information is automatically converted to the correct type
# Do not edit this code
def readFromPipe () :
# Reads from the stdin channel and returns the structure associated to the string
try :
text = sys.stdin.readline()
return ast.literal_eval(text.strip())
except :
os._exit(-1)
##################################################################################################################
# Sends the text to the maze application
# Do not edit this code
def writeToPipe (text) :
# Writes to the stdout channel
sys.stdout.write(text)
sys.stdout.flush()
##################################################################################################################
# Reads the initial maze information
# The function processes the text and returns the associated variables
# The dimensions of the maze are positive integers
# Maze map is a dictionary associating to a location its adjacent locations and the associated weights
# The preparation time gives the time during which 'initializationCode' can make computations before the game starts
# The turn time gives the time during which 'determineNextMove' can make computations before returning a decision
# Player locations are tuples (line, column)
# Coins are given as a list of locations where they appear
# A boolean indicates if the game is over
# Do not edit this code
def processInitialInformation () :
# We read from the pipe
data = readFromPipe()
return (data['mazeWidth'], data['mazeHeight'], data['mazeMap'], data['preparationTime'], data['turnTime'], data['playerLocation'], data['opponentLocation'], data['coins'], data['gameIsOver'])
##################################################################################################################
# Reads the information after each player moved
# The maze map and allowed times are no longer provided since they do not change
# Do not edit this code
def processNextInformation () :
# We read from the pipe
data = readFromPipe()
return (data['playerLocation'], data['opponentLocation'], data['coins'], data['gameIsOver'])
##################################################################################################################
# This is where you should write your code to do things during the initialization delay
# This function should not return anything, but should be used for a short preprocessing
# This function takes as parameters the dimensions and map of the maze, the time it is allowed for computing, the players locations in the maze and the remaining coins locations
# Make sure to have a safety margin for the time to include processing times (communication etc.)
def initializationCode (mazeWidth, mazeHeight, mazeMap, timeAllowed, playerLocation, opponentLocation, coins) :
global route
routingTable = algo.dijkstra(mazeMap, playerLocation)
route = u.way_width(routingTable, playerLocation, (0, mazeWidth - 1))
##################################################################################################################
# This is where you should write your code to determine the next direction
# This function should return one of the directions defined in the CONSTANTS section
# This function takes as parameters the dimensions and map of the maze, the time it is allowed for computing, the players locations in the maze and the remaining coins locations
# Make sure to have a safety margin for the time to include processing times (communication etc.)
def determineNextMove (mazeWidth, mazeHeight, mazeMap, timeAllowed, playerLocation, opponentLocation, coins):
next_pos = route.pop(0)
return u.direction(playerLocation, next_pos)
##################################################################################################################
####################################################### MAIN LOOP ######################################################
##################################################################################################################
# This is the entry point when executing this file
# We first send the name of the team to the maze
# The first message we receive from the maze includes its dimensions and map, the times allowed to the various steps, and the players and coins locations
# Then, at every loop iteration, we get the maze status and determine a move
# Do not edit this code
if __name__ == "__main__" :
# We send the team name
writeToPipe(TEAM_NAME + "\n")
# We process the initial information and have a delay to compute things using it
(mazeWidth, mazeHeight, mazeMap, preparationTime, turnTime, playerLocation, opponentLocation, coins, gameIsOver) = processInitialInformation()
initializationCode(mazeWidth, mazeHeight, mazeMap, preparationTime, playerLocation, opponentLocation, coins)
# We decide how to move and wait for the next step
while not gameIsOver :
(playerLocation, opponentLocation, coins, gameIsOver) = processNextInformation()
if gameIsOver :
break
nextMove = determineNextMove(mazeWidth, mazeHeight, mazeMap, turnTime, playerLocation, opponentLocation, coins)
writeToPipe(nextMove)
##################################################################################################################
##################################################################################################################
|
mit
| 4,362,299,323,070,154,000
| 48.221591
| 195
| 0.461618
| false
| 5.9295
| false
| false
| false
|
CuonDeveloper/cuon
|
cuon_client/cuon_newclient/bin/cuon/Bank/bank.py
|
1
|
8104
|
# -*- coding: utf-8 -*-
##Copyright (C) [2005] [Jürgen Hamel, D-32584 Löhne]
##This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as
##published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version.
##This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
##warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
##for more details.
##You should have received a copy of the GNU General Public License along with this program; if not, write to the
##Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import sys
import os
import os.path
from types import *
import pygtk
pygtk.require('2.0')
import gtk
import gtk.glade
import gobject
import string
import logging
from cuon.Windows.chooseWindows import chooseWindows
import cPickle
#import cuon.OpenOffice.letter
# localisation
import locale, gettext
locale.setlocale (locale.LC_NUMERIC, '')
import threading
import datetime as DateTime
import SingleBank
import cuon.Addresses.addresses
import cuon.Addresses.SingleAddress
class bankwindow(chooseWindows):
def __init__(self, allTables):
chooseWindows.__init__(self)
self.singleBank = SingleBank.SingleBank(allTables)
self.singleAddress = cuon.Addresses.SingleAddress.SingleAddress(allTables)
self.loadGlade('bank.xml')
self.win1 = self.getWidget('BankMainwindow')
#self.setStatusBar()
self.allTables = allTables
self.EntriesBank = 'bank.xml'
self.loadEntries(self.EntriesBank)
self.singleBank.setEntries(self.getDataEntries(self.EntriesBank) )
self.singleBank.setGladeXml(self.xml)
self.singleBank.setTreeFields( ['address.lastname as address_name', \
'address.city as city','bcn'] )
self.singleBank.setStore( gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_UINT) )
self.singleBank.setTreeOrder('bcn')
self.singleBank.setListHeader([_('Lastname'), _('City'),_('BCN')])
self.singleBank.setTree(self.xml.get_widget('tree1') )
self.singleBank.sWhere = 'where address.id = address_id '
# set values for comboBox
# Menu-items
self.initMenuItems()
# Close Menus for Tab
self.addEnabledMenuItems('tabs','bank11')
# seperate Menus
self.addEnabledMenuItems('address','bank1')
# enabledMenues for Address
self.addEnabledMenuItems('editAddress','mi_new1')
self.addEnabledMenuItems('editAddress','mi_clear1')
self.addEnabledMenuItems('editAddress','mi_print1')
self.addEnabledMenuItems('editAddress','mi_edit1')
# tabs from notebook
self.tabClients = 0
self.tabChanged()
def checkClient(self):
pass
#Menu File
def on_quit1_activate(self, event):
self.out( "exit clients V1")
self.closeWindow()
def on_tree1_row_activated(self, event, data1, data2):
print event
print data1
print data2
self.activateClick('bChooseClient', event, 'clicked')
def on_save1_activate(self, event):
self.out( "save addresses v2")
self.singleBank.save()
self.setEntriesEditable(self.EntriesBank, False)
self.tabChanged()
def on_new1_activate(self, event):
self.out( "new addresses v2")
self.singleBank.newRecord()
self.setEntriesEditable(self.EntriesBank, True)
def on_edit1_activate(self, event):
self.out( "edit addresses v2")
self.setEntriesEditable(self.EntriesBank, True)
def on_delete1_activate(self, event):
self.out( "delete addresses v2")
self.singleBank.deleteRecord()
# Button choose address
def on_bChooseAddressOfBank_clicked(self, event):
adr = cuon.Addresses.addresses.addresswindow(self.allTables)
adr.setChooseEntry('chooseAddress', self.getWidget( 'eAddressID'))
# signals from entry eAddressNumber
def on_eAddressID_changed(self, event):
print 'eAdrnbr changed'
iAdrNumber = self.getChangedValue('eAddressID')
eAdrField = self.getWidget('tvAddress')
liAdr = self.singleAddress.getAddress(iAdrNumber)
self.setTextbuffer(eAdrField,liAdr)
# search button
def on_bSearch_clicked(self, event):
self.out( 'Searching ....', self.ERROR)
sName = self.getWidget('eFindName').get_text()
sCity = self.getWidget('eFindCity').get_text()
self.out('Name and City = ' + sName + ', ' + sCity, self.ERROR)
self.singleBank.sWhere = 'where lastname ~* \'.*' + sName + '.*\' and city ~* \'.*' + sCity + '.*\''
self.out(self.singleBank.sWhere, self.ERROR)
self.refreshTree()
def refreshTree(self):
self.singleBank.disconnectTree()
if self.tabOption == self.tabClients:
self.singleBank.connectTree()
self.singleBank.refreshTree()
elif self.tabOption == self.tabMisc:
self.singleMisc.sWhere ='where address_id = ' + `int(self.singleBank.ID)`
self.singleMisc.fillEntries(self.singleMisc.findSingleId())
elif self.tabOption == self.tabPartner:
self.singlePartner.sWhere ='where addressid = ' + `int(self.singleBank.ID)`
self.singlePartner.connectTree()
self.singlePartner.refreshTree()
elif self.tabOption == self.tabSchedul:
self.singleSchedul.sWhere ='where partnerid = ' + `int(self.singlePartner.ID)`
self.singleSchedul.connectTree()
self.singleSchedul.refreshTree()
def tabChanged(self):
self.out( 'tab changed to :' + str(self.tabOption))
if self.tabOption == self.tabClients:
#Address
self.disableMenuItem('tabs')
self.enableMenuItem('address')
self.actualEntries = self.singleBank.getEntries()
self.editAction = 'editAddress'
#self.setStatusbarText([''])
self.setTreeVisible(True)
self.out( 'Seite 0')
elif self.tabOption == self.tabBank:
self.out( 'Seite 2')
self.disableMenuItem('tabs')
self.enableMenuItem('bank')
self.editAction = 'editBank'
self.setTreeVisible(False)
#self.setStatusbarText([self.singleBank.sStatus])
elif self.tabOption == self.tabMisc:
self.out( 'Seite 3')
self.disableMenuItem('tabs')
self.enableMenuItem('misc')
self.editAction = 'editMisc'
self.setTreeVisible(False)
#self.setStatusbarText([self.singleBank.sStatus])
elif self.tabOption == self.tabPartner:
#Partner
self.disableMenuItem('tabs')
self.enableMenuItem('partner')
self.out( 'Seite 1')
self.editAction = 'editPartner'
self.setTreeVisible(True)
#self.setStatusbarText([self.singleBank.sStatus])
elif self.tabOption == self.tabSchedul:
#Scheduling
self.disableMenuItem('tabs')
self.enableMenuItem('schedul')
self.out( 'Seite 4')
self.editAction = 'editSchedul'
self.setTreeVisible(True)
self.setStatusbarText([self.singlePartner.sStatus])
# refresh the Tree
self.refreshTree()
self.enableMenuItem(self.editAction)
self.editEntries = False
|
gpl-3.0
| 6,315,108,617,176,841,000
| 28.786765
| 134
| 0.613182
| false
| 3.938746
| false
| false
| false
|
rtucker-mozilla/mozilla_inventory
|
api_v1/keyvalue_handler.py
|
1
|
24912
|
from piston.handler import BaseHandler, rc
from systems.models import System, SystemRack,SystemStatus,NetworkAdapter,KeyValue
from truth.models import Truth, KeyValue as TruthKeyValue
from dhcp.DHCP import DHCP as DHCPInterface
from dhcp.models import DHCP
from MacroExpansion import MacroExpansion
from KeyValueTree import KeyValueTree
import re
try:
import json
except:
from django.utils import simplejson as json
from django.test.client import Client
from settings import API_ACCESS
class KeyValueHandler(BaseHandler):
allowed_methods = API_ACCESS
def create(self, request, key_value_id=None):
if 'system_id' in request.POST:
n = KeyValue()
system = System.objects.get(id=request.POST['system_id'])
n.system = system
if 'key' in request.POST:
n.key = request.POST['key']
if 'value' in request.POST:
n.value = request.POST['value']
try:
n.save()
resp = rc.ALL_OK
resp.write('json = {"id":%i}' % (n.id))
except:
resp = rc.NOT_FOUND
resp.write('Unable to Create Key/Value Pair')
return resp
elif 'truth_name' in request.POST:
n = TruthKeyValue()
truth = Truth.objects.get(name=request.POST['truth_name'])
n.truth = truth
if 'key' in request.POST:
n.key = request.POST['key']
if 'value' in request.POST:
n.value = request.POST['value']
try:
n.save()
resp = rc.ALL_OK
resp.write('json = {"id":%i}' % (n.id))
except:
resp = rc.NOT_FOUND
resp.write('Unable to Create Key/Value Pair')
return resp
else:
resp = rc.NOT_FOUND
resp.write('system_id or truth_name required')
def build_validation_array(self):
input_regex_array = []
output_regex_array = []
error_message_array = []
ipv4_regex = re.compile(r'((2[0-5]|1[0-9]|[0-9])?[0-9]\.){3}((2[0-5]|1[0-9]|[0-9])?[0-9])')
true_false_regex = re.compile('(^True$|^False$)')
input_regex_array.append(re.compile('nic\.\d+\.ipv4_address\.\d+'))
output_regex_array.append(ipv4_regex)
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.scope\.netmask$'))
output_regex_array.append(ipv4_regex)
error_message_array.append('Requires Subnet Mask')
input_regex_array.append(re.compile('^is_dhcp_scope$'))
output_regex_array.append(re.compile(true_false_regex))
error_message_array.append('Requires True|False')
input_regex_array.append(re.compile('^dhcp\.scope\.start$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.scope\.end$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.pool\.start$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.pool\.end$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.option\.ntp_server\.\d+$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.dns_server\.\d+$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.option_router\.\d+$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.option\.subnet_mask\.\d+$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.pool\.allow_booting\.\d+$'))
output_regex_array.append(re.compile(true_false_regex))
error_message_array.append('Requires True|False')
input_regex_array.append(re.compile('^dhcp\.pool\.allow_bootp\.\d+$'))
output_regex_array.append(re.compile(true_false_regex))
error_message_array.append('Requires True|False')
input_regex_array.append(re.compile('^nic\.\d+\.mac_address\.\d+$'))
output_regex_array.append(re.compile('^([0-9a-f]{2}([:-]|$)){6}$', re.I))
error_message_array.append('Requires Mac Address XX:XX:XX:XX:XX:XX')
return input_regex_array, output_regex_array, error_message_array
def validate(self, key, passed_value):
error_message = None
return_regex = None
return_bool = True
input_regex_array, output_regex_array, error_message_array = self.build_validation_array()
## Here we loop through all of the possible input validation array. If they key matches one, then we need to validate the value for the key/value pair
for i in range(0, len(input_regex_array)):
if input_regex_array[i].match(key):
return_regex = output_regex_array[i]
error_message = error_message_array[i];
continue
## Check if we should validate the value portion of the key/value pair. No use validating it if the key doesn't require it
if return_regex is not None:
if return_regex.match(passed_value) is None:
return_bool = False
else:
error_message = None
return return_bool, error_message
def update(self, request, key_value_id=None):
###TODO This whole method is not functioning correctly. Just for version 2. Not getting the system_id or truth_id from the poster firefox plugin
if 'system_id' in request.POST:
n = None
key_validated, validation_error_string = self.validate(request.POST['key'], request.POST['value'])
if key_validated is False:
resp = rc.FORBIDDEN
resp.write('Validation Failed for %s %s' % (request.POST['key'], validation_error_string) )
return resp
try:
n = KeyValue.objects.get(id=key_value_id,key=request.POST['key'])
system = System.objects.get(id=request.POST['system_id'])
except:
resp = rc.NOT_FOUND
resp.write('Neither system_id or truth_id found')
if n is not None:
n.system = system
if 'value' in request.POST and n is not None:
n.value = request.POST['value']
if n is not None:
try:
n.save()
resp = rc.ALL_OK
resp.write('json = {"id":%i}' % (n.id))
except:
resp = rc.NOT_FOUND
resp.write('Unable to Create Key/Value Pair')
return resp
elif 'truth_id' in request.POST:
try:
truth = Truth.objects.get(name=key_value_id)
na = TruthKeyValue.objects.get(truth=truth,key=request.POST['key'])
if 'value' in request.POST:
n.value = request.POST['value']
except:
pass
try:
n.save()
resp = rc.ALL_OK
resp.write('json = {"id":%i}' % (n.id))
except Exception, e:
resp = rc.NOT_FOUND
resp.write('Unable to Update Key/Value Pair %s' % e)
return resp
else:
resp = rc.NOT_FOUND
resp.write('Neither system_id or truth_id found')
return resp
def read(self, request, key_value_id=None):
#if keystore get var is set return the whole keystore
if 'keystore' in request.GET:
#if key get var is set return the keystore based on the existance of this key
if 'key' in request.GET:
base = KeyValue.objects.filter(key=request.GET['keystore']).filter(keyvalue_set__contains=request.GET['key'])
tmp_list = []
for row in base:
matches = re.match("\$\{(.*)\}", row.value)
if matches is not None:
m = MacroExpansion(matches.group(1))
row.value = m.output()
for r in base:
key_name = 'host:%s:%s' % (r.system.hostname, r.key)
tmp_list[key_name] = r.value
if 'key' not in request.GET:
tree = KeyValueTree(request.GET['keystore']).final
return tree
elif 'key_type' in request.GET:
key_type = request.GET['key_type']
tmp_list = []
if key_type == 'dhcp_scopes':
#Get keystores from truth that have dhcp.is_scope = True
base = TruthKeyValue.objects.filter(key='dhcp.is_scope',value='True')
#Iterate through the list and get all of the key/value pairs
for row in base:
keyvalue = TruthKeyValue.objects.filter(truth=row.truth)
tmp_dict = {}
for kv in keyvalue:
tmp_dict[kv.key] = kv.value
tmp_list.append(tmp_dict)
return tmp_list
if key_type == 'system_by_reverse_dns_zone':
#Get keystores from truth that have dhcp.is_scope = True
keyvalue_pairs = KeyValue.objects.filter(key__contains='reverse_dns_zone',value=request.GET['zone']).filter(key__startswith='nic.')
#Iterate through the list and get all of the key/value pairs
tmp_list = []
for row in keyvalue_pairs:
keyvalue = KeyValue.objects.filter(system=row.system)
tmp_dict = {}
for kv in keyvalue:
tmp_dict[kv.key] = kv.value
tmp_dict['hostname'] = row.system.hostname
appendable = True
for the_items in tmp_list:
if 'hostname' not in the_items:
appendable = True
elif the_items['hostname'] == row.system.hostname:
appendable = False
if appendable is True:
tmp_list.append(tmp_dict)
#tmp_list = list(set(tmp_list))
return tmp_list
if key_type == 'system_by_scope':
#Get keystores from truth that have dhcp.is_scope = True
keyvalue_pairs = KeyValue.objects.filter(key__contains='dhcp_scope',value=request.GET['scope']).filter(key__startswith='nic.')
#Iterate through the list and get all of the key/value pairs
tmp_list = []
for row in keyvalue_pairs:
keyvalue = KeyValue.objects.filter(system=row.system)
tmp_dict = {}
for kv in keyvalue:
tmp_dict[kv.key] = kv.value
tmp_dict['hostname'] = row.system.hostname
appendable = True
for the_items in tmp_list:
if 'hostname' not in the_items:
appendable = True
elif the_items['hostname'] == row.system.hostname:
appendable = False
if appendable is True:
tmp_list.append(tmp_dict)
#tmp_list = list(set(tmp_list))
return tmp_list
if key_type == 'adapters_by_system':
#Get keystores from truth that have dhcp.is_scope = True
system = System.objects.get(hostname=request.GET['system'])
keyvalue_pairs = KeyValue.objects.filter(key__startswith='nic.').filter(system=system).order_by('key')
#Iterate through the list and get all of the key/value pairs
tmp_dict = {}
adapter_ids = []
final_list = []
for kv in keyvalue_pairs:
tmp_dict[kv.key] = kv.value
for k in tmp_dict.iterkeys():
matches = re.match('nic\.(\d+).*',k)
if matches.group is not None:
if matches.group(1) not in adapter_ids:
adapter_ids.append(matches.group(1))
adapter_ids.sort()
for a in adapter_ids:
adapter_name = ''
mac_address = ''
dhcp_hostname = ''
dhcp_filename = ''
ipv4_address = ''
if 'nic.%s.ipv4_address.0' % a in tmp_dict:
ipv4_address = tmp_dict['nic.%s.ipv4_address.0' % a]
if 'nic.%s.name.0' % a in tmp_dict:
adapter_name = tmp_dict['nic.%s.name.0' % a]
if 'nic.%s.mac_address.0' % a in tmp_dict:
mac_address = tmp_dict['nic.%s.mac_address.0' % a]
if 'nic.%s.dhcp_hostname.0' % a in tmp_dict:
dhcp_hostname = tmp_dict['nic.%s.dhcp_hostname.0' % a]
if 'nic.%s.dhcp_filename.0' % a in tmp_dict:
dhcp_filename = tmp_dict['nic.%s.dhcp_filename.0' % a]
try:
final_list.append({
'system_hostname':system.hostname,
'ipv4_address':ipv4_address,
'adapter_name':adapter_name,
'mac_address':mac_address,
'dhcp_hostname':dhcp_hostname,
'dhcp_filename':dhcp_filename}
)
except:
pass
#tmp_list.append(tmp_dict)
return final_list
if key_type == 'adapters_by_system_and_zone':
#Get keystores from truth that have dhcp.is_scope = True
zone = request.GET['zone']
system = System.objects.get(hostname=request.GET['system'])
keyvalue_pairs = KeyValue.objects.filter(key__startswith='nic.').filter(system=system).order_by('key')
#Iterate through the list and get all of the key/value pairs
tmp_dict = {}
adapter_ids = []
final_list = []
for kv in keyvalue_pairs:
tmp_dict[kv.key] = kv.value
for k in tmp_dict.iterkeys():
matches = re.match('nic\.(\d+).*',k)
if matches.group is not None:
dhcp_scope_match = 'nic.%s.reverse_dns_zone.0' % matches.group(1)
if matches.group(1) not in adapter_ids and dhcp_scope_match in tmp_dict and tmp_dict[dhcp_scope_match] == zone:
#if matches.group(1) not in adapter_ids and 'nic.%s.dhcp_scope.0' % matches.group(1) in tmp_dict and tmp_dict['nic.%s.dhcp_scope.0' % matches.group(1)] == dhcp_scope:
adapter_ids.append(matches.group(1))
adapter_ids.sort()
for a in adapter_ids:
adapter_name = ''
mac_address = ''
dhcp_hostname = ''
dhcp_filename = ''
dhcp_domain_name = ''
ipv4_address = ''
if 'nic.%s.ipv4_address.0' % a in tmp_dict:
ipv4_address = tmp_dict['nic.%s.ipv4_address.0' % a]
if 'nic.%s.name.0' % a in tmp_dict:
adapter_name = tmp_dict['nic.%s.name.0' % a]
if 'nic.%s.mac_address.0' % a in tmp_dict:
mac_address = tmp_dict['nic.%s.mac_address.0' % a]
if 'nic.%s.dhcp_hostname.0' % a in tmp_dict:
dhcp_hostname = tmp_dict['nic.%s.dhcp_hostname.0' % a]
if 'nic.%s.dhcp_filename.0' % a in tmp_dict:
dhcp_filename = tmp_dict['nic.%s.dhcp_filename.0' % a]
if 'nic.%s.dhcp_domain_name.0' % a in tmp_dict:
dhcp_domain_name = tmp_dict['nic.%s.dhcp_domain_name.0' % a]
final_list.append({'system_hostname':system.hostname, 'ipv4_address':ipv4_address})
#tmp_list.append(tmp_dict)
return final_list
if key_type == 'adapters_by_system_and_scope':
#Get keystores from truth that have dhcp.is_scope = True
dhcp_scope = request.GET['dhcp_scope']
system = System.objects.get(hostname=request.GET['system'])
keyvalue_pairs = KeyValue.objects.filter(key__startswith='nic.').filter(system=system).order_by('key')
#Iterate through the list and get all of the key/value pairs
tmp_dict = {}
adapter_ids = []
final_list = []
for kv in keyvalue_pairs:
tmp_dict[kv.key] = kv.value
for k in tmp_dict.iterkeys():
matches = re.match('nic\.(\d+).*',k)
if matches.group is not None:
dhcp_scope_match = 'nic.%s.dhcp_scope.0' % matches.group(1)
if matches.group(1) not in adapter_ids and dhcp_scope_match in tmp_dict and tmp_dict[dhcp_scope_match] == dhcp_scope:
#if matches.group(1) not in adapter_ids and 'nic.%s.dhcp_scope.0' % matches.group(1) in tmp_dict and tmp_dict['nic.%s.dhcp_scope.0' % matches.group(1)] == dhcp_scope:
adapter_ids.append(matches.group(1))
adapter_ids.sort()
for a in adapter_ids:
adapter_name = ''
mac_address = ''
dhcp_hostname = ''
dhcp_filename = ''
dhcp_domain_name = ''
ipv4_address = ''
if 'nic.%s.ipv4_address.0' % a in tmp_dict:
ipv4_address = tmp_dict['nic.%s.ipv4_address.0' % a]
if 'nic.%s.name.0' % a in tmp_dict:
adapter_name = tmp_dict['nic.%s.name.0' % a]
if 'nic.%s.mac_address.0' % a in tmp_dict:
mac_address = tmp_dict['nic.%s.mac_address.0' % a]
if 'nic.%s.dhcp_hostname.0' % a in tmp_dict and 'nic.%s.option_hostname.0' % a not in tmp_dict:
dhcp_hostname = tmp_dict['nic.%s.dhcp_hostname.0' % a]
if 'nic.%s.option_hostname.0' % a not in tmp_dict:
dhcp_hostname = tmp_dict['nic.%s.option_hostname.0' % a]
if 'nic.%s.dhcp_filename.0' % a in tmp_dict:
dhcp_filename = tmp_dict['nic.%s.dhcp_filename.0' % a]
if 'nic.%s.dhcp_domain_name.0' % a in tmp_dict:
dhcp_domain_name = tmp_dict['nic.%s.dhcp_domain_name.0' % a]
final_list.append({'system_hostname':system.hostname, 'ipv4_address':ipv4_address, 'adapter_name':adapter_name, 'mac_address':mac_address, 'dhcp_hostname':dhcp_hostname, 'dhcp_filename':dhcp_filename, 'dhcp_domain_name':dhcp_domain_name})
#tmp_list.append(tmp_dict)
return final_list
elif 'key' in request.GET and request.GET['key'] > '':
tmp_list = {}
try:
base = KeyValue.objects.filter(key=request.GET['key'])
for row in base:
matches = re.match("\$\{(.*)\}", row.value)
if matches is not None:
m = MacroExpansion(matches.group(1))
row.value = m.output()
for r in base:
key_name = 'host:%s:%s' % (r.system.hostname, r.key)
tmp_list[key_name] = r.value
except Exception, e:
pass
try:
base = TruthKeyValue.objects.filter(key=request.GET['key'])
for row in base:
matches = re.match("\$\{(.*)\}", row.value)
if matches is not None:
m = MacroExpansion(matches.group(1))
row.value = m.output()
for r in base:
key_name = 'truth:%s:%s' % (r.truth.name, r.key)
tmp_list[key_name] = r.value
except Exception, e:
pass
return tmp_list
elif 'value' in request.GET:
tmp_list = {}
try:
base = KeyValue.objects.filter(value=request.GET['value'])
for row in base:
matches = re.match("\$\{(.*)\}", row.value)
if matches is not None:
m = MacroExpansion(matches.group(1))
row.value = m.output()
for r in base:
key_name = 'host:%s:%s' % (r.system.hostname, r.key)
tmp_list[key_name] = r.value
except Exception, e:
pass
try:
base = TruthKeyValue.objects.filter(value=request.GET['value'])
for row in base:
matches = re.match("\$\{(.*)\}", row.value)
if matches is not None:
m = MacroExpansion(matches.group(1))
row.value = m.output()
for r in base:
key_name = 'truth:%s:%s' % (r.truth.name, r.key)
tmp_list[key_name] = r.value
except Exception, e:
pass
return tmp_list
def delete(self, request, key_value_id=None):
if 'key_type' in request.GET and request.GET['key_type'] == 'delete_all_network_adapters':
#Get keystores from truth that have dhcp.is_scope = True
try:
system_hostname = request.GET['system_hostname']
system = System.objects.get(hostname=system_hostname)
KeyValue.objects.filter(key__startswith='nic', system=system).delete()
resp = rc.ALL_OK
resp.write('json = {"id":"0"}')
except:
resp = rc.NOT_FOUND
resp.write('json = {"error_message":"Unable to Delete}')
return resp
if 'key_type' in request.GET and request.GET['key_type'] == 'delete_network_adapter':
#Get keystores from truth that have dhcp.is_scope = True
try:
adapter_number = request.GET['adapter_number']
system_hostname = request.GET['system_hostname']
system = System.objects.get(hostname=system_hostname)
KeyValue.objects.filter(key__startswith='nic.%s' % adapter_number, system=system).delete()
#KeyValue.objects.filter(key__startswith='nic.0', system=system).delete()
resp = rc.ALL_OK
resp.write('json = {"id":"14"}')
except:
resp = rc.NOT_FOUND
resp.write('json = {"error_message":"Unable to Delete}')
return resp
if 'key_type' not in request.GET:
if 'system_id' in request.GET:
try:
n = KeyValue.objects.get(id=key_value_id)
n.delete()
resp = rc.ALL_OK
resp.write('json = {"id":"%s"}' % str(key_value_id))
except:
resp = rc.NOT_FOUND
return resp
if 'truth_id' in request.GET:
try:
n = TruthKeyValue.objects.get(id=key_value_id)
n.delete()
resp = rc.ALL_OK
resp.write('json = {"id":"%s"}' % str(key_value_id))
except:
resp = rc.NOT_FOUND
return resp
resp = rc.ALL_OK
resp.write('json = {"id":"1"}')
return resp
|
bsd-3-clause
| -5,242,016,272,190,501,000
| 47.943026
| 259
| 0.504978
| false
| 4.062622
| false
| false
| false
|
karstenw/nodebox-pyobjc
|
art/nodeboxlogo_larger.py
|
1
|
2485
|
size(512,512)
background(None)
def bar(x, y, w, depth, filled=1.0):
d1 = depth*filled
colormode(HSB)
f = fill()
s = stroke()
if f != None and f.brightness != 1:
s = color(f.hue, f.saturation+0.2, f.brightness-0.4)
nostroke()
#front
if f != None: fill(f)
rect(x, y, w, w)
#bottom
beginpath(x, y+w)
lineto(x-d1, y+w+d1)
lineto(x-d1+w, y+w+d1)
lineto(x+w, y+w)
endpath()
#left
beginpath(x, y)
lineto(x-d1, y+d1)
lineto(x-d1, y+w+d1)
lineto(x, y+w)
endpath()
#top
if f != None: fill(f.hue, f.saturation-0, f.brightness-0.15)
beginpath(x, y)
lineto(x+w, y)
lineto(x+w-d1, y+d1)
lineto(x-d1, y+d1)
endpath()
#right
if f != None: fill(f.hue, f.saturation-0, f.brightness-0.15)
beginpath(x+w, y)
lineto(x+w-d1, y+d1)
lineto(x+w-d1, y+w+d1)
lineto(x+w, y+w)
endpath()
if s != None: stroke(s)
line(x, y, x+w, y)
line(x, y, x-d1, y+d1)
line(x+w, y, x+w, y+w)
line(x+w, y+w, x+w-d1, y+w+d1)
line(x, y+w, x-d1, y+w+d1)
line(x+w, y, x+w-d1, y+d1)
#front
if f != None: fill(f)
rect(x-d1, y+d1, w, w)
x += d1
y += d1
d2 = depth*(1-filled)
if d2 != 0:
line(x, y, x+d2, y+d2)
line(x+w, y, x+w+d2, y+d2)
line(x+w, y+w, x+w+d2, y+w+d2)
line(x, y+w, x+d2, y+w+d2)
f = fill()
nofill()
rect(x+d2, y+d2, w, w)
if f != None: fill(f)
def cube(x, y, w, filled=1.0):
bar(x, y, w, w*0.5, filled)
from random import seed
seed(55)
w = 112
n = 3
strokewidth(0.5)
colormode(RGB)
c = color(0.05,0.65,0.85)
c.brightness += 0.2
for x in range(n):
for y in range(n):
bottom = w * n
for z in range(n):
stroke(0.1)
strokewidth(2)
colormode(RGB)
dr = (1-c.r)/(n-1) * (x*0.85+y*0.15+z*0.05) * 1.1
dg = (1-c.g)/(n-1) * (x*0.85+y*0.15+z*0.05) * 1.2
db = (1-c.b)/(n-1) * (x*0.85+y*0.15+z*0.05) * 1.1
fill(1.2-dr, 1.1-dg, 1.2-db)
if random() > 0.5:
nostroke()
nofill()
dx = w*x - w/2*z
dy = bottom-w*y + w/2*z
transform(CORNER)
translate(171,-112)
scale(1.01)
cube(dx, dy, w)
reset()
|
mit
| -5,216,147,913,592,614,000
| 19.890756
| 64
| 0.440241
| false
| 2.326779
| false
| false
| false
|
notapresent/rbm2m
|
rbm2m/action/scan_manager.py
|
1
|
3426
|
# -*- coding: utf-8 -*-
import datetime
import logging
from sqlalchemy import and_, func
from base_manager import BaseManager
from ..models import Scan, scan_records, Genre
# All scans with no activity for this long are considered stalled
INACTIVITY_PERIOD = datetime.timedelta(seconds=600)
# Update interval
UPDATE_INTERVAL = datetime.timedelta(days=1)
logger = logging.getLogger(__name__)
class ScanManager(BaseManager):
"""
Handles all DB interactions regarding scans
"""
__model__ = Scan
def get_current_scans(self, genre_id=None):
"""
Returns currently running scans for genre (or all genres if genre_id is None)
:return list of Scans
"""
query = (
self.session.query(Scan)
.filter(Scan.status.in_(['queued', 'running']))
)
if genre_id:
query = query.filter(Scan.genre_id == genre_id)
return query.all()
def last_scans(self):
return (
self.session.query(Scan)
.order_by(Scan.started_at.desc())
.limit(50)
.all()
)
def records_not_in_scan(self, scan_id, rec_ids):
result = (
self.session.query(scan_records.c.record_id)
.filter(scan_records.c.scan_id == scan_id)
.filter(scan_records.c.record_id.in_(rec_ids))
.all()
)
in_scan = [rec_id for rec_id, in result]
return list(set(rec_ids) - set(in_scan))
def get_stalled_scans(self):
"""
Mark scans with no activity during last INACTIVITY_THRESHOLD seconds as failed
:return: List of stalled scans
"""
threshold = datetime.datetime.utcnow() - INACTIVITY_PERIOD
active_scans = (
self.session.query(Scan)
.filter(Scan.status.in_(['queued', 'running']))
.all()
)
rv = [s for s in active_scans if s.last_action < threshold]
return rv
def get_genre_with_no_scans_in_24h(self):
"""
Find one imported genre for which there were no succesful scans in last day
:return: Genre
"""
threshold = datetime.datetime.utcnow() - UPDATE_INTERVAL
q = (
self.session.query(Genre)
.select_from(Scan)
.join(Genre)
.filter(Scan.status == 'success')
.filter(Genre.import_enabled.is_(True))
.group_by(Scan.genre_id)
.having(func.max(Scan.started_at) < threshold)
)
return q.first()
def get_genre_with_no_scans(self):
"""
Find one imported genre for which there were no successful scans at all
:return: Genre
"""
q = (
self.session.query(Genre)
.outerjoin(Scan,
and_(
Scan.genre_id == Genre.id,
Scan.status == 'success')
)
.filter(Genre.import_enabled.is_(True))
.filter(Scan.id.is_(None))
)
return q.first()
def clean_up_old_scans(self):
"""
Delete all scans older than 7 days from now
"""
threshold = datetime.datetime.utcnow() - datetime.timedelta(days=7)
self.session.query(Scan).filter(Scan.started_at < threshold).delete()
|
apache-2.0
| 1,392,480,987,541,680,600
| 28.534483
| 90
| 0.542323
| false
| 3.969873
| false
| false
| false
|
pdarragh/EMC-Mars-Challenge
|
Hackpack/db.py
|
1
|
2571
|
import json
import pymongo
from pymongo import MongoClient
client = MongoClient('mongodb://107.170.244.164/', 27017)
db = client.mars_db
game_coll = db.game_data
sensor_coll = db.sensor_data
log_coll = db.log_data
db2 = client.mars_db2
game_coll2 = db2.game_data
sensor_coll2 = db2.sensor_data
log_coll2 = db2.log_data
db3 = client.mars_db3
game_coll3 = db3.game_data
sensor_coll3 = db3.sensor_data
log_coll3 = db3.log_data
# Inserts the json data into the sensor_data collection
# Returns the inserted_id
def game_insert(json_data):
result = game_coll.insert_one(json_data)
game_coll2.insert_one(json_data)
game_coll3.insert_one(json_data)
return result.inserted_id
# Gets the data based on the json query
def game_get(json_query):
return game_coll.find(json_query)
# Returns an array of all of the data in the sensor_data
# collection
def game_get_all():
return game_coll.find()
# Gets the records where all of the readings are greater than
# the specified readings
#
# Give the radiation value first, then the temperature value
# then the flare value
def game_get_threshold(rad, temp, flare):
new_rad = "$gt: " + rad
new_temp = "$gt: " + temp
return game_coll.find({"readings.radiation": new_rad,
"readings.temperature": new_temp,
"readings.solarFlare": flare})
def game_reset():
game_coll.drop()
# Inserts the json data into the sensor_data collection
# Returns the inserted_id
def sensor_insert(json_data):
result = sensor_coll.insert_one(json_data)
sensor_coll2.insert_one(json_data)
sensor_coll3.insert_one(json_data)
return result.inserted_id
# Gets the data based on the json query
def sensor_get(json_query):
return sensor_coll.find(json_query)
# Returns an array of all of the data in the sensor_data
# collection
def sensor_get_all():
return sensor_coll.find()
# Gets the records where all of the readings are greater than
# the specified readings
#
# Give the radiation value first, then the temperature value
# then the flare value
def sensor_get_threshold(rad, temp, flare):
new_rad = "$gt: " + rad
new_temp = "$gt: " + temp
return sensor_coll.find({"readings.radiation": new_rad,
"readings.temperature": new_temp,
"readings.solarFlare": flare})
def sensor_reset():
sensor_coll.drop()
def log_insert(json_data):
log_coll.insert_one(json_data)
log_coll2.insert_one(json_data)
log_coll3.insert_one(json_data)
def log_get(json_query):
log_coll.find(json_query)
def log_get_all():
log_coll.find()
|
mit
| -148,093,916,572,310,200
| 22.381818
| 61
| 0.702839
| false
| 3.105072
| false
| false
| false
|
kustomzone/Fuzium
|
core/src/Site/SiteStorage.py
|
1
|
18598
|
import os
import re
import shutil
import json
import time
import sys
import sqlite3
import gevent.event
from Db import Db
from Debug import Debug
from Config import config
from util import helper
from Plugin import PluginManager
@PluginManager.acceptPlugins
class SiteStorage(object):
def __init__(self, site, allow_create=True):
self.site = site
self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
self.allowed_dir = os.path.abspath(self.directory.decode(sys.getfilesystemencoding())) # Only serve file within this dir
self.log = site.log
self.db = None # Db class
self.db_checked = False # Checked db tables since startup
self.event_db_busy = None # Gevent AsyncResult if db is working on rebuild
self.has_db = self.isFile("dbschema.json") # The site has schema
if not os.path.isdir(self.directory):
if allow_create:
os.mkdir(self.directory) # Create directory if not found
else:
raise Exception("Directory not exists: %s" % self.directory)
# Load db from dbschema.json
def openDb(self, check=True):
try:
schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"])
except Exception, err:
raise Exception("dbschema.json is not a valid JSON: %s" % err)
if check:
if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: # Not exist or null
self.rebuildDb()
if not self.db:
self.db = Db(schema, db_path)
if check and not self.db_checked:
changed_tables = self.db.checkTables()
if changed_tables:
self.rebuildDb(delete_db=False) # TODO: only update the changed table datas
def closeDb(self):
if self.db:
self.db.close()
self.event_db_busy = None
self.db = None
# Return db class
def getDb(self):
if not self.db:
self.log.debug("No database, waiting for dbschema.json...")
self.site.needFile("dbschema.json", priority=3)
self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist
if self.has_db:
self.openDb()
return self.db
# Return possible db files for the site
def getDbFiles(self):
for content_inner_path, content in self.site.content_manager.contents.iteritems():
# content.json file itself
if self.isFile(content_inner_path): # Missing content.json file
yield self.getPath(content_inner_path), self.open(content_inner_path)
else:
self.log.error("[MISSING] %s" % content_inner_path)
# Data files in content.json
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
for file_relative_path in content["files"].keys():
if not file_relative_path.endswith(".json"):
continue # We only interesed in json files
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
if self.isFile(file_inner_path):
yield self.getPath(file_inner_path), self.open(file_inner_path)
else:
self.log.error("[MISSING] %s" % file_inner_path)
# Rebuild sql cache
def rebuildDb(self, delete_db=True):
self.has_db = self.isFile("dbschema.json")
if not self.has_db:
return False
self.event_db_busy = gevent.event.AsyncResult()
schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"])
if os.path.isfile(db_path) and delete_db:
if self.db:
self.db.close() # Close db if open
time.sleep(0.5)
self.log.info("Deleting %s" % db_path)
try:
os.unlink(db_path)
except Exception, err:
self.log.error("Delete error: %s" % err)
self.db = None
self.openDb(check=False)
self.log.info("Creating tables...")
self.db.checkTables()
self.log.info("Importing data...")
cur = self.db.getCursor()
cur.execute("BEGIN")
cur.logging = False
found = 0
s = time.time()
try:
for file_inner_path, file in self.getDbFiles():
try:
if self.db.loadJson(file_inner_path, file=file, cur=cur):
found += 1
except Exception, err:
self.log.error("Error importing %s: %s" % (file_inner_path, Debug.formatException(err)))
finally:
cur.execute("END")
self.log.info("Imported %s data file in %ss" % (found, time.time() - s))
self.event_db_busy.set(True) # Event done, notify waiters
self.event_db_busy = None # Clear event
# Execute sql query or rebuild on dberror
def query(self, query, params=None):
if self.event_db_busy: # Db not ready for queries
self.log.debug("Wating for db...")
self.event_db_busy.get() # Wait for event
try:
res = self.getDb().execute(query, params)
except sqlite3.DatabaseError, err:
if err.__class__.__name__ == "DatabaseError":
self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query))
self.rebuildDb()
res = self.db.cur.execute(query, params)
else:
raise err
return res
# Open file object
def open(self, inner_path, mode="rb"):
return open(self.getPath(inner_path), mode)
# Open file object
def read(self, inner_path, mode="r"):
return open(self.getPath(inner_path), mode).read()
# Write content to file
def write(self, inner_path, content):
file_path = self.getPath(inner_path)
# Create dir if not exist
file_dir = os.path.dirname(file_path)
if not os.path.isdir(file_dir):
os.makedirs(file_dir)
# Write file
if hasattr(content, 'read'): # File-like object
with open(file_path, "wb") as file:
shutil.copyfileobj(content, file) # Write buff to disk
else: # Simple string
if inner_path == "content.json" and os.path.isfile(file_path):
helper.atomicWrite(file_path, content)
else:
with open(file_path, "wb") as file:
file.write(content)
del content
self.onUpdated(inner_path)
# Remove file from filesystem
def delete(self, inner_path):
file_path = self.getPath(inner_path)
os.unlink(file_path)
self.onUpdated(inner_path, file=False)
def deleteDir(self, inner_path):
dir_path = self.getPath(inner_path)
os.rmdir(dir_path)
def rename(self, inner_path_before, inner_path_after):
for retry in range(3):
# To workaround "The process cannot access the file beacause it is being used by another process." error
try:
os.rename(self.getPath(inner_path_before), self.getPath(inner_path_after))
err = None
break
except Exception, err:
self.log.error("%s rename error: %s (retry #%s)" % (inner_path_before, err, retry))
time.sleep(0.1 + retry)
if err:
raise err
# List files from a directory
def list(self, dir_inner_path):
directory = self.getPath(dir_inner_path)
for root, dirs, files in os.walk(directory):
root = root.replace("\\", "/")
root_relative_path = re.sub("^%s" % re.escape(directory), "", root).lstrip("/")
for file_name in files:
if root_relative_path: # Not root dir
yield root_relative_path + "/" + file_name
else:
yield file_name
# Site content updated
def onUpdated(self, inner_path, file=None):
file_path = self.getPath(inner_path)
# Update Sql cache
if inner_path == "dbschema.json":
self.has_db = self.isFile("dbschema.json")
# Reopen DB to check changes
if self.has_db:
self.closeDb()
self.openDb()
elif not config.disable_db and inner_path.endswith(".json") and self.has_db: # Load json file to db
if config.verbose:
self.log.debug("Loading json file to db: %s" % inner_path)
try:
self.getDb().loadJson(file_path, file)
except Exception, err:
self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
self.closeDb()
# Load and parse json file
def loadJson(self, inner_path):
with self.open(inner_path) as file:
return json.load(file)
# Write formatted json file
def writeJson(self, inner_path, data):
content = json.dumps(data, indent=1, sort_keys=True)
# Make it a little more compact by removing unnecessary white space
def compact_dict(match):
if "\n" in match.group(0):
return match.group(0).replace(match.group(1), match.group(1).strip())
else:
return match.group(0)
content = re.sub("\{(\n[^,\[\{]{10,100}?)\}[, ]{0,2}\n", compact_dict, content, flags=re.DOTALL)
# Remove end of line whitespace
content = re.sub("(?m)[ ]+$", "", content)
# Write to disk
self.write(inner_path, content)
# Get file size
def getSize(self, inner_path):
path = self.getPath(inner_path)
try:
return os.path.getsize(path)
except:
return 0
# File exist
def isFile(self, inner_path):
return os.path.isfile(self.getPath(inner_path))
# File or directory exist
def isExists(self, inner_path):
return os.path.exists(self.getPath(inner_path))
# Dir exist
def isDir(self, inner_path):
return os.path.isdir(self.getPath(inner_path))
# Security check and return path of site's file
def getPath(self, inner_path):
inner_path = inner_path.replace("\\", "/") # Windows separator fix
if not inner_path:
return self.directory
if ".." in inner_path:
raise Exception(u"File not allowed: %s" % inner_path)
return u"%s/%s" % (self.directory, inner_path)
# Get site dir relative path
def getInnerPath(self, path):
if path == self.directory:
inner_path = ""
else:
inner_path = re.sub("^%s/" % re.escape(self.directory), "", path)
return inner_path
# Verify all files sha512sum using content.json
def verifyFiles(self, quick_check=False, add_optional=False, add_changed=True):
bad_files = []
i = 0
if not self.site.content_manager.contents.get("content.json"): # No content.json, download it first
self.log.debug("VerifyFile content.json not exists")
self.site.needFile("content.json", update=True) # Force update to fix corrupt file
self.site.content_manager.loadContent() # Reload content.json
for content_inner_path, content in self.site.content_manager.contents.items():
i += 1
if i % 50 == 0:
time.sleep(0.0001) # Context switch to avoid gevent hangs
if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file
self.log.debug("[MISSING] %s" % content_inner_path)
bad_files.append(content_inner_path)
for file_relative_path in content.get("files", {}).keys():
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
if not os.path.isfile(file_path):
self.log.debug("[MISSING] %s" % file_inner_path)
bad_files.append(file_inner_path)
continue
if quick_check:
ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"]
else:
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
if not ok:
self.log.debug("[CHANGED] %s" % file_inner_path)
if add_changed or content.get("cert_user_id"): # If updating own site only add changed user files
bad_files.append(file_inner_path)
# Optional files
optional_added = 0
optional_removed = 0
for file_relative_path in content.get("files_optional", {}).keys():
file_node = content["files_optional"][file_relative_path]
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
if not os.path.isfile(file_path):
if self.site.content_manager.hashfield.hasHash(file_node["sha512"]):
self.site.content_manager.optionalRemove(file_inner_path, file_node["sha512"], file_node["size"])
if add_optional:
bad_files.append(file_inner_path)
continue
if quick_check:
ok = os.path.getsize(file_path) == content["files_optional"][file_relative_path]["size"]
else:
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
if ok:
if not self.site.content_manager.hashfield.hasHash(file_node["sha512"]):
self.site.content_manager.optionalDownloaded(file_inner_path, file_node["sha512"], file_node["size"])
optional_added += 1
else:
if self.site.content_manager.hashfield.hasHash(file_node["sha512"]):
self.site.content_manager.optionalRemove(file_inner_path, file_node["sha512"], file_node["size"])
optional_removed += 1
bad_files.append(file_inner_path)
self.log.debug("[OPTIONAL CHANGED] %s" % file_inner_path)
if config.verbose:
self.log.debug(
"%s verified: %s, quick: %s, optionals: +%s -%s" %
(content_inner_path, len(content["files"]), quick_check, optional_added, optional_removed)
)
time.sleep(0.0001) # Context switch to avoid gevent hangs
return bad_files
# Check and try to fix site files integrity
def updateBadFiles(self, quick_check=True):
s = time.time()
bad_files = self.verifyFiles(
quick_check,
add_optional=self.site.isDownloadable(""),
add_changed=not self.site.settings.get("own") # Don't overwrite changed files if site owned
)
self.site.bad_files = {}
if bad_files:
for bad_file in bad_files:
self.site.bad_files[bad_file] = 1
self.log.debug("Checked files in %.2fs... Found bad files: %s, Quick:%s" % (time.time() - s, len(bad_files), quick_check))
# Delete site's all file
def deleteFiles(self):
self.log.debug("Deleting files from content.json...")
files = [] # Get filenames
for content_inner_path in self.site.content_manager.contents.keys():
content = self.site.content_manager.contents[content_inner_path]
files.append(content_inner_path)
# Add normal files
for file_relative_path in content.get("files", {}).keys():
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
files.append(file_inner_path)
# Add optional files
for file_relative_path in content.get("files_optional", {}).keys():
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
files.append(file_inner_path)
if self.isFile("dbschema.json"):
self.log.debug("Deleting db file...")
self.closeDb()
self.has_db = False
try:
schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"])
if os.path.isfile(db_path):
os.unlink(db_path)
except Exception, err:
self.log.error("Db file delete error: %s" % err)
for inner_path in files:
path = self.getPath(inner_path)
if os.path.isfile(path):
for retry in range(5):
try:
os.unlink(path)
break
except Exception, err:
self.log.error("Error removing %s: %s, try #%s" % (path, err, retry))
time.sleep(float(retry) / 10)
self.onUpdated(inner_path, False)
self.log.debug("Deleting empty dirs...")
for root, dirs, files in os.walk(self.directory, topdown=False):
for dir in dirs:
path = os.path.join(root, dir)
if os.path.isdir(path) and os.listdir(path) == []:
os.removedirs(path)
self.log.debug("Removing %s" % path)
if os.path.isdir(self.directory) and os.listdir(self.directory) == []:
os.removedirs(self.directory) # Remove sites directory if empty
if os.path.isdir(self.directory):
self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory)
return False # Some files not deleted
else:
self.log.debug("Site data directory deleted: %s..." % self.directory)
return True # All clean
|
mit
| -2,596,954,781,683,501,000
| 41.364465
| 130
| 0.561136
| false
| 3.941089
| true
| false
| false
|
migasfree/migasfree-backend
|
migasfree/settings/base.py
|
1
|
7965
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2021 Jose Antonio Chavarría <jachavar@gmail.com>
# Copyright (c) 2015-2021 Alberto Gacías <alberto@migasfree.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import django
import django.conf.global_settings as DEFAULT_SETTINGS
from corsheaders.defaults import default_headers
from .migasfree import BASE_DIR, MIGASFREE_TMP_DIR
if django.VERSION < (3, 1, 0, 'final'):
print('Migasfree requires Django 3.1.0 at least. Please, update it.')
exit(1)
ADMINS = (
('Your name', 'your_name@example.com'),
)
MANAGERS = ADMINS
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Madrid'
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = False
FIRST_DAY_OF_WEEK = 1
DATE_FORMAT = 'Y-m-d'
DATETIME_FORMAT = 'Y-m-d H:i:s'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = False
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'django.contrib.staticfiles.finders.FileSystemFinder',
)
STATIC_URL = '/static/'
MEDIA_URL = '/pub/'
FILE_UPLOAD_TEMP_DIR = MIGASFREE_TMP_DIR
LOGIN_REDIRECT_URL = '/'
LOCALE_PATHS = (
os.path.join(BASE_DIR, 'i18n'),
)
ADMIN_SITE_ROOT_URL = '/admin/'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.gzip.GZipMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
DEFAULT_CHARSET = 'utf-8'
ROOT_URLCONF = 'migasfree.urls'
ASGI_APPLICATION = 'migasfree.asgi.application'
WSGI_APPLICATION = 'migasfree.wsgi.application'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.humanize',
'django.contrib.admindocs',
'django.contrib.messages',
'django.contrib.staticfiles',
'graphene_django',
'rest_framework',
'rest_framework.authtoken',
'drf_yasg',
'rest_framework_filters',
'dj_rest_auth',
'django_filters',
'corsheaders',
'djoser',
'import_export',
'markdownx',
'channels',
'migasfree.core',
'migasfree.app_catalog',
'migasfree.client',
'migasfree.stats',
'migasfree.hardware',
'migasfree.device',
'migasfree.api_v4',
)
DATA_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024 * 100 # 100 MB
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.TokenAuthentication',
'rest_framework_simplejwt.authentication.JWTAuthentication',
),
'DEFAULT_FILTER_BACKENDS': (
'rest_framework_filters.backends.RestFrameworkFilterBackend',
'rest_framework.filters.OrderingFilter',
'rest_framework.filters.SearchFilter',
),
'DEFAULT_PAGINATION_CLASS': 'migasfree.paginations.DefaultPagination',
'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.coreapi.AutoSchema',
}
REST_AUTH_SERIALIZERS = {
'USER_DETAILS_SERIALIZER': 'migasfree.core.serializers.UserProfileSerializer',
}
GRAPHENE = {
'SCHEMA': 'migasfree.schema.schema'
}
CORS_ALLOW_HEADERS = list(default_headers) + [
'accept-language',
]
# http://docs.python.org/2/howto/logging-cookbook.html
# http://docs.python.org/2/library/logging.html#logrecord-attributes
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': '%(asctime)s - %(levelname)s - %(module)s - %(lineno)d '
'- %(funcName)s - %(message)s',
},
'simple': {
'format': '%(asctime)s - %(levelname)s - %(filename)s - %(message)s'
},
},
'handlers': {
'console': {
'level': 'ERROR',
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'file': {
'level': 'ERROR',
'class': 'logging.handlers.RotatingFileHandler',
'formatter': 'verbose',
'filename': os.path.join(MIGASFREE_TMP_DIR, 'migasfree-backend.log'),
'maxBytes': 1024 * 1024 * 10, # 10 MB
},
'celery': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join(MIGASFREE_TMP_DIR, 'migasfree-celery.log'),
'formatter': 'simple',
'maxBytes': 1024 * 1024 * 100, # 100 MB
},
},
'loggers': {
'migasfree': {
'handlers': ['console', 'file'],
'level': 'ERROR',
},
'celery': {
'handlers': ['celery', 'console'],
'level': 'DEBUG',
},
},
}
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.MD5PasswordHasher',
'django.contrib.auth.hashers.CryptPasswordHasher',
)
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.request',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
'debug': False,
}
}
]
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
SWAGGER_SETTINGS = {
'LOGIN_URL': '/rest-auth/login/',
'LOGOUT_URL': '/rest-auth/logout/'
}
SESSION_COOKIE_NAME = 'migasfree_backend'
# CSRF_COOKIE_NAME = 'csrftoken_migasfree_backend' # issue with markdownx component :_(
|
gpl-3.0
| -5,171,708,381,460,351,000
| 29.864341
| 91
| 0.656662
| false
| 3.585322
| false
| false
| false
|
Spanarchie/pyRest
|
pyRest/lib/python2.7/site-packages/py/_path/common.py
|
1
|
12572
|
"""
"""
import os, sys
import py
class Checkers:
_depend_on_existence = 'exists', 'link', 'dir', 'file'
def __init__(self, path):
self.path = path
def dir(self):
raise NotImplementedError
def file(self):
raise NotImplementedError
def dotfile(self):
return self.path.basename.startswith('.')
def ext(self, arg):
if not arg.startswith('.'):
arg = '.' + arg
return self.path.ext == arg
def exists(self):
raise NotImplementedError
def basename(self, arg):
return self.path.basename == arg
def basestarts(self, arg):
return self.path.basename.startswith(arg)
def relto(self, arg):
return self.path.relto(arg)
def fnmatch(self, arg):
return self.path.fnmatch(arg)
def endswith(self, arg):
return str(self.path).endswith(arg)
def _evaluate(self, kw):
for name, value in kw.items():
invert = False
meth = None
try:
meth = getattr(self, name)
except AttributeError:
if name[:3] == 'not':
invert = True
try:
meth = getattr(self, name[3:])
except AttributeError:
pass
if meth is None:
raise TypeError(
"no %r checker available for %r" % (name, self.path))
try:
if py.code.getrawcode(meth).co_argcount > 1:
if (not meth(value)) ^ invert:
return False
else:
if bool(value) ^ bool(meth()) ^ invert:
return False
except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY):
# EBUSY feels not entirely correct,
# but its kind of necessary since ENOMEDIUM
# is not accessible in python
for name in self._depend_on_existence:
if name in kw:
if kw.get(name):
return False
name = 'not' + name
if name in kw:
if not kw.get(name):
return False
return True
class NeverRaised(Exception):
pass
class PathBase(object):
""" shared implementation for filesystem path objects."""
Checkers = Checkers
def __div__(self, other):
return self.join(str(other))
__truediv__ = __div__ # py3k
def basename(self):
""" basename part of path. """
return self._getbyspec('basename')[0]
basename = property(basename, None, None, basename.__doc__)
def dirname(self):
""" dirname part of path. """
return self._getbyspec('dirname')[0]
dirname = property(dirname, None, None, dirname.__doc__)
def purebasename(self):
""" pure base name of the path."""
return self._getbyspec('purebasename')[0]
purebasename = property(purebasename, None, None, purebasename.__doc__)
def ext(self):
""" extension of the path (including the '.')."""
return self._getbyspec('ext')[0]
ext = property(ext, None, None, ext.__doc__)
def dirpath(self, *args, **kwargs):
""" return the directory Path of the current Path joined
with any given path arguments.
"""
return self.new(basename='').join(*args, **kwargs)
def read_binary(self):
""" read and return a bytestring from reading the path. """
with self.open('rb') as f:
return f.read()
def read_text(self, encoding):
""" read and return a Unicode string from reading the path. """
with self.open("r", encoding=encoding) as f:
return f.read()
def read(self, mode='r'):
""" read and return a bytestring from reading the path. """
with self.open(mode) as f:
return f.read()
def readlines(self, cr=1):
""" read and return a list of lines from the path. if cr is False, the
newline will be removed from the end of each line. """
if not cr:
content = self.read('rU')
return content.split('\n')
else:
f = self.open('rU')
try:
return f.readlines()
finally:
f.close()
def load(self):
""" (deprecated) return object unpickled from self.read() """
f = self.open('rb')
try:
return py.error.checked_call(py.std.pickle.load, f)
finally:
f.close()
def move(self, target):
""" move this path to target. """
if target.relto(self):
raise py.error.EINVAL(target,
"cannot move path into a subdirectory of itself")
try:
self.rename(target)
except py.error.EXDEV: # invalid cross-device link
self.copy(target)
self.remove()
def __repr__(self):
""" return a string representation of this path. """
return repr(str(self))
def check(self, **kw):
""" check a path for existence and properties.
Without arguments, return True if the path exists, otherwise False.
valid checkers::
file=1 # is a file
file=0 # is not a file (may not even exist)
dir=1 # is a dir
link=1 # is a link
exists=1 # exists
You can specify multiple checker definitions, for example::
path.check(file=1, link=1) # a link pointing to a file
"""
if not kw:
kw = {'exists' : 1}
return self.Checkers(self)._evaluate(kw)
def fnmatch(self, pattern):
"""return true if the basename/fullname matches the glob-'pattern'.
valid pattern characters::
* matches everything
? matches any single character
[seq] matches any character in seq
[!seq] matches any char not in seq
If the pattern contains a path-separator then the full path
is used for pattern matching and a '*' is prepended to the
pattern.
if the pattern doesn't contain a path-separator the pattern
is only matched against the basename.
"""
return FNMatcher(pattern)(self)
def relto(self, relpath):
""" return a string which is the relative part of the path
to the given 'relpath'.
"""
if not isinstance(relpath, (str, PathBase)):
raise TypeError("%r: not a string or path object" %(relpath,))
strrelpath = str(relpath)
if strrelpath and strrelpath[-1] != self.sep:
strrelpath += self.sep
#assert strrelpath[-1] == self.sep
#assert strrelpath[-2] != self.sep
strself = str(self)
if sys.platform == "win32" or getattr(os, '_name', None) == 'nt':
if os.path.normcase(strself).startswith(
os.path.normcase(strrelpath)):
return strself[len(strrelpath):]
elif strself.startswith(strrelpath):
return strself[len(strrelpath):]
return ""
def ensure_dir(self, *args):
""" ensure the path joined with args is a directory. """
return self.ensure(*args, **{"dir": True})
def bestrelpath(self, dest):
""" return a string which is a relative path from self
(assumed to be a directory) to dest such that
self.join(bestrelpath) == dest and if not such
path can be determined return dest.
"""
try:
if self == dest:
return os.curdir
base = self.common(dest)
if not base: # can be the case on windows
return str(dest)
self2base = self.relto(base)
reldest = dest.relto(base)
if self2base:
n = self2base.count(self.sep) + 1
else:
n = 0
l = [os.pardir] * n
if reldest:
l.append(reldest)
target = dest.sep.join(l)
return target
except AttributeError:
return str(dest)
def exists(self):
return self.check()
def isdir(self):
return self.check(dir=1)
def isfile(self):
return self.check(file=1)
def parts(self, reverse=False):
""" return a root-first list of all ancestor directories
plus the path itself.
"""
current = self
l = [self]
while 1:
last = current
current = current.dirpath()
if last == current:
break
l.append(current)
if not reverse:
l.reverse()
return l
def common(self, other):
""" return the common part shared with the other path
or None if there is no common part.
"""
last = None
for x, y in zip(self.parts(), other.parts()):
if x != y:
return last
last = x
return last
def __add__(self, other):
""" return new path object with 'other' added to the basename"""
return self.new(basename=self.basename+str(other))
def __cmp__(self, other):
""" return sort value (-1, 0, +1). """
try:
return cmp(self.strpath, other.strpath)
except AttributeError:
return cmp(str(self), str(other)) # self.path, other.path)
def __lt__(self, other):
try:
return self.strpath < other.strpath
except AttributeError:
return str(self) < str(other)
def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False):
""" yields all paths below the current one
fil is a filter (glob pattern or callable), if not matching the
path will not be yielded, defaulting to None (everything is
returned)
rec is a filter (glob pattern or callable) that controls whether
a node is descended, defaulting to None
ignore is an Exception class that is ignoredwhen calling dirlist()
on any of the paths (by default, all exceptions are reported)
bf if True will cause a breadthfirst search instead of the
default depthfirst. Default: False
sort if True will sort entries within each directory level.
"""
for x in Visitor(fil, rec, ignore, bf, sort).gen(self):
yield x
def _sortlist(self, res, sort):
if sort:
if hasattr(sort, '__call__'):
res.sort(sort)
else:
res.sort()
def samefile(self, other):
""" return True if other refers to the same stat object as self. """
return self.strpath == str(other)
class Visitor:
def __init__(self, fil, rec, ignore, bf, sort):
if isinstance(fil, str):
fil = FNMatcher(fil)
if isinstance(rec, str):
self.rec = FNMatcher(rec)
elif not hasattr(rec, '__call__') and rec:
self.rec = lambda path: True
else:
self.rec = rec
self.fil = fil
self.ignore = ignore
self.breadthfirst = bf
self.optsort = sort and sorted or (lambda x: x)
def gen(self, path):
try:
entries = path.listdir()
except self.ignore:
return
rec = self.rec
dirs = self.optsort([p for p in entries
if p.check(dir=1) and (rec is None or rec(p))])
if not self.breadthfirst:
for subdir in dirs:
for p in self.gen(subdir):
yield p
for p in self.optsort(entries):
if self.fil is None or self.fil(p):
yield p
if self.breadthfirst:
for subdir in dirs:
for p in self.gen(subdir):
yield p
class FNMatcher:
def __init__(self, pattern):
self.pattern = pattern
def __call__(self, path):
pattern = self.pattern
if pattern.find(path.sep) == -1:
name = path.basename
else:
name = str(path) # path.strpath # XXX svn?
if not os.path.isabs(pattern):
pattern = '*' + path.sep + pattern
return py.std.fnmatch.fnmatch(name, pattern)
|
unlicense
| -194,356,141,815,722,620
| 30.989822
| 82
| 0.529271
| false
| 4.37287
| false
| false
| false
|
jgr208/stig-fix-el6-kickstart
|
config/stig-fix/menu.py
|
1
|
49277
|
#!/usr/bin/python
# Graphical Kickstart Script
#
# This script was written by Frank Caviggia, Red Hat Consulting
# edit by Jason Ricles, Mikros Systems Corp
# Last update was 7 December 2015
# This script is NOT SUPPORTED by Red Hat Global Support Services.
# Please contact Josh Waldman for more information.
#
# Author: Frank Caviggia (fcaviggi@redhat.com)
# Copyright: Red Hat, (C) 2013
# Version: 1.3
# License: GPLv2
import os,sys,re,crypt,random
try:
os.environ['DISPLAY']
import pygtk,gtk
except:
print "Error: DISPLAY environment varible not set."
sys.exit(1)
# Class containing verification items
class Verification:
# Name/Comment Check
def check_name(self,name):
pattern = re.compile(r"^[ a-zA-Z']+$",re.VERBOSE)
if re.match(pattern,name):
return True
else:
return False
# Check for vaild Unix username
def check_username(self,username):
pattern = re.compile(r"^\w{5,255}$",re.VERBOSE)
if re.match(pattern,username):
return True
else:
return False
# Check for vaild Unix UID
def check_uid(self,uid):
pattern = re.compile(r"^\d{1,10}$",re.VERBOSE)
if re.match(pattern,uid):
return True
else:
return False
# Check for vaild IP address
def check_ip(self,ip):
pattern = re.compile(r"\b(([01]?\d?\d|2[0-4]\d|25[0-5])\.){3}([01]?\d?\d|2[0-4]\d|25[0-3])\b",re.VERBOSE)
if re.match(pattern,ip) and ip != "0.0.0.0":
return True
else:
return False
# Check for vaild system hostanme
def check_hostname(self,hostname):
pattern = re.compile(r"^[a-zA-Z0-9\-\.]{1,100}$",re.VERBOSE)
if re.match(pattern,hostname):
return True
else:
return False
# Display Menu
class Display_Menu:
def __init__(self):
# Initalize Additional Configuration Files
f = open('/tmp/stig-fix-post','w')
f.write('')
f.close()
f = open('/tmp/stig-fix-packages','w')
f.write('')
f.close()
# Data Storage
self.data = {}
# Verification Functions
self.verify = Verification()
# Create Main Window
self.window = gtk.Window()
self.window.set_title("Red Hat Enterprise Linux - DISA STIG Installation")
self.window.set_position(gtk.WIN_POS_CENTER)
self.window.connect("delete_event",gtk.main_quit)
self.display = gtk.gdk.display_get_default()
self.screen = self.display.get_default_screen()
self.hres = self.screen.get_width()
self.vres = self.screen.get_height()
self.window.connect("key-release-event",self.event_key)
# Create Main Vertical Box to Populate
self.vbox = gtk.VBox()
if self.hres == 640:
self.window.resize(640,480)
elif self.hres > 640:
self.window.resize(800,600)
# RedHat Logo
self.logo = gtk.Image()
self.logo.set_from_file("/usr/share/anaconda/pixmaps/anaconda_header.png")
self.logo.set_alignment(0,0)
self.logo.set_padding(0,0)
self.vbox.add(self.logo)
# Creates Header
self.header = gtk.HBox()
self.label = gtk.Label("<span font_family='liberation-sans' weight='bold' foreground='red' size='large'> Red Hat Enterprise Linux - DISA STIG Installation </span>")
self.label.set_use_markup(True)
self.header.add(self.label)
self.vbox.add(self.header)
# Creates Information Message
self.label = gtk.Label('This DVD installs Red Hat Enterprise Linux 6 with configurations required by the DISA STIG.')
self.vbox.add(self.label)
self.label = gtk.Label('RHEL 6 (STIG Installer v.1.3)')
self.vbox.add(self.label)
# Blank Label
self.label = gtk.Label("")
self.vbox.add(self.label)
# System Configuration
self.system = gtk.HBox()
self.label = gtk.Label(" Hostame: ")
self.system.pack_start(self.label,False,True, 0)
self.hostname = gtk.Entry(100)
self.hostname.set_size_request(225,-1)
self.system.pack_start(self.hostname,False,True,0)
try:
if os.environ['HOSTNAME'] != '':
self.hostname.set_text(os.environ['HOSTNAME'])
else:
self.hostname.set_text('localhost.localdomain')
except:
self.hostname.set_text('localhost.localdomain')
self.label = gtk.Label(" System Profile: ")
self.system.pack_start(self.label,False,True, 0)
self.system_profile = gtk.combo_box_new_text()
self.system_profile.append_text("Minimal Installation")
self.system_profile.append_text("User Workstation")
self.system_profile.append_text("Developer Workstation")
self.system_profile.append_text("RHN Satellite Server")
self.system_profile.append_text("Proprietary Database Server")
self.system_profile.append_text("RHEV-Attached KVM Server")
#self.system_profile.append_text("Standalone KVM Server")
#self.system_profile.append_text("Apache Web Server")
#self.system_profile.append_text("Tomcat Web Server")
#self.system_profile.append_text("PostgreSQL Database Server")
#self.system_profile.append_text("MySQL Database Server")
self.system_profile.set_active(0)
self.system_profile.connect('changed',self.configure_system_profile)
self.system.pack_start(self.system_profile,False,True,0)
self.vbox.add(self.system)
self.classification = gtk.HBox()
self.label = gtk.Label(" System Classification: ")
self.classification.pack_start(self.label,False,True, 0)
self.system_classification = gtk.combo_box_new_text()
self.system_classification.append_text("UNCLASSIFIED")
self.system_classification.append_text("UNCLASSIFIED//FOUO")
self.system_classification.append_text("CONFIDENTIAL")
self.system_classification.append_text("SECRET")
self.system_classification.append_text("TOP SECRET")
self.system_classification.append_text("TOP SECRET//SCI")
self.system_classification.append_text("TOP SECRET//SCI//NOFORN")
self.system_classification.set_active(0)
self.classification.pack_start(self.system_classification,False,True,0)
self.vbox.add(self.classification)
# Blank Label
self.label = gtk.Label("")
self.vbox.add(self.label)
# System Information
self.cpu_cores = 0
self.cpu_model = ''
self.cpu_arch = ''
self.system_memory = {}
with open('/proc/cpuinfo') as f:
for line in f:
if line.strip():
if line.rstrip('\n').startswith('model name'):
self.cpu_model = line.rstrip('\n').split(':')[1]
self.cpu_cores += 1
elif line.rstrip('\n').startswith('flags') or line.rstrip('\n').startswith('Features'):
if 'lm' in line.rstrip('\n').split():
self.cpu_arch = '64-bit'
else:
self.cpu_arch = '32-bit'
f.close()
with open('/proc/meminfo') as f:
for line in f:
self.system_memory[line.split(':')[0]] = line.split(':')[1].strip()
f.close()
self.cpu_information = gtk.HBox()
self.label = gtk.Label(" CPU Model: ")
self.cpu_information.pack_start(self.label,False,True, 0)
self.label = gtk.Label(" %s "%(self.cpu_model))
self.cpu_information.pack_start(self.label,False,True, 0)
self.label = gtk.Label(" CPU Threads: ")
self.cpu_information.pack_start(self.label,False,True, 0)
self.label = gtk.Label(" %d "%(self.cpu_cores))
self.cpu_information.pack_start(self.label,False,True, 0)
self.label = gtk.Label(" Architecure: ")
self.cpu_information.pack_start(self.label,False,True, 0)
self.label = gtk.Label(" %s "%(self.cpu_arch))
self.cpu_information.pack_start(self.label,False,True, 0)
self.vbox.add(self.cpu_information)
self.memory_information = gtk.HBox()
self.label = gtk.Label(" Total System Memory: ")
self.memory_information.pack_start(self.label,False,True, 0)
self.label = gtk.Label(" %s "%(self.system_memory['MemTotal']))
self.memory_information.pack_start(self.label,False,True, 0)
self.label = gtk.Label(" Free Memory: ")
self.memory_information.pack_start(self.label,False,True, 0)
self.label = gtk.Label(" %s "%(self.system_memory['MemFree']))
self.memory_information.pack_start(self.label,False,True, 0)
self.vbox.add(self.memory_information)
# Disk Partitioning Section
self.label = gtk.Label("\n<span font_family='liberation-sans' weight='bold'>Disk Partitioning</span>")
self.label.set_use_markup(True)
self.vbox.add(self.label)
# Blank Label
self.label = gtk.Label("")
self.vbox.add(self.label)
# List Disks
self.disk_list = gtk.HBox()
self.disk_info = []
self.disk_total = 0
self.output = os.popen('list-harddrives')
for self.line in self.output:
self.line = self.line.strip()
if not ('fd0' in self.line or 'sr0' in self.line):
self.disk_info.append(self.line.split(' '))
self.label = gtk.Label(" Available Disks: ")
self.disk_list.pack_start(self.label, False, True, 0)
if len(self.disk_info) == 0:
self.label = gtk.Label("No Drives Available.")
self.disk_list.pack_start(self.label,False,True,0)
else:
for i in range(len(self.disk_info)):
if len(self.disk_info) > 5:
exec("self.disk%d = gtk.CheckButton(self.disk_info[%d][0])"%(i,i))
else:
exec("self.disk%s = gtk.CheckButton(self.disk_info[%d][0] +' ('+ str(int(float(self.disk_info[%d][1]))/1024) +'Gb)')"%(i,i,i))
exec("self.disk%d.set_active(True)"%(i))
exec("self.disk_list.pack_start(self.disk%d, False, True, 0)"%(i))
self.disk_total += int(float(self.disk_info[i][1])/1024)
self.vbox.add(self.disk_list)
# Disk Encryption (Ability to disable LUKS for self encrypting drives)
self.encrypt = gtk.HBox()
self.core = gtk.HBox()
self.tim = gtk.HBox()
self.label = gtk.Label(" ")
self.encrypt.pack_start(self.label, False, True, 0)
self.label = gtk.Label(" ")
self.core.pack_start(self.label, False, True, 0)
self.label = gtk.Label(" ")
self.tim.pack_start(self.label, False, True, 0)
self.encrypt_disk = gtk.CheckButton('Encrypt Drives with LUKS')
self.core_install = gtk.CheckButton('CORE')
self.tim_install = gtk.CheckButton('TIM')
self.encrypt_disk.set_active(True)
self.core_install.set_active(False)
self.tim_install.set_active(False)
self.encrypt.pack_start(self.encrypt_disk, False, True, 0)
self.core.pack_start(self.core_install, False, True, 0)
self.tim.pack_start(self.tim_install, False, True, 0)
self.tim_install.connect("clicked",self.choose)
self.core_install.connect("clicked",self.choose)
self.vbox.add(self.encrypt)
self.vbox.add(self.core)
self.vbox.add(self.tim)
# Minimal Installation Warning
if self.disk_total < 8:
self.MessageBox(self.window,"<b>Recommended minimum of 8Gb disk space for a Minimal Install!</b>\n\n You have "+str(self.disk_total)+"Gb available.",gtk.MESSAGE_WARNING)
# Blank Label
self.label = gtk.Label("")
self.vbox.add(self.label)
# Partitioning
self.label = gtk.Label('Required LVM Partitioning Percentage')
self.vbox.add(self.label)
self.partitioning1 = gtk.HBox()
self.label = gtk.Label(" ROOT (/) ")
self.partitioning1.pack_start(self.label,False,True,0)
self.root_range = gtk.Adjustment(45,1,95,1,0, 0)
self.root_partition = gtk.SpinButton(adjustment=self.root_range,climb_rate=1,digits=0)
self.root_partition.connect('value-changed',self.lvm_check)
self.partitioning1.pack_start(self.root_partition,False,True,0)
self.label = gtk.Label("% HOME (/home) ")
self.partitioning1.pack_start(self.label,False,True,0)
self.home_range = gtk.Adjustment(15,1,95,1,0, 0)
self.home_partition = gtk.SpinButton(adjustment=self.home_range,climb_rate=1,digits=0)
self.home_partition.connect('value-changed',self.lvm_check)
self.partitioning1.pack_start(self.home_partition,False,True,0)
self.label = gtk.Label("% TMP (/tmp) ")
self.partitioning1.pack_start(self.label,False,True,0)
self.tmp_range = gtk.Adjustment(10,1,60,1,0, 0)
self.tmp_partition = gtk.SpinButton(adjustment=self.tmp_range,climb_rate=1,digits=0)
self.tmp_partition.connect('value-changed',self.lvm_check)
self.partitioning1.pack_start(self.tmp_partition,False,True,0)
self.label = gtk.Label("% VAR (/var) ")
self.partitioning1.pack_start(self.label,False,True,0)
self.var_range = gtk.Adjustment(10,1,95,1,0, 0)
self.var_partition = gtk.SpinButton(adjustment=self.var_range,climb_rate=1,digits=0)
self.var_partition.connect('value-changed',self.lvm_check)
self.partitioning1.pack_start(self.var_partition,False,True,0)
self.label = gtk.Label("%")
self.partitioning1.pack_start(self.label,False,True,0)
self.vbox.add(self.partitioning1)
self.partitioning2 = gtk.HBox()
self.label = gtk.Label(" LOG (/var/log) ")
self.partitioning2.pack_start(self.label,False,True,0)
self.log_range = gtk.Adjustment(10,1,75,1,0, 0)
self.log_partition = gtk.SpinButton(adjustment=self.log_range,climb_rate=1,digits=0)
self.log_partition.connect('value-changed',self.lvm_check)
self.partitioning2.pack_start(self.log_partition,False,True,0)
self.label = gtk.Label("% AUDIT (/var/log/audit) ")
self.partitioning2.pack_start(self.label,False,True,0)
self.audit_range = gtk.Adjustment(10,1,75,1,0, 0)
self.audit_partition = gtk.SpinButton(adjustment=self.audit_range,climb_rate=1,digits=0)
self.audit_partition.connect('value-changed',self.lvm_check)
self.partitioning2.pack_start(self.audit_partition,False,True,0)
self.label = gtk.Label("% SWAP ")
self.partitioning2.pack_start(self.label,False,True,0)
self.swap_range = gtk.Adjustment(0,0,25,1,0, 0)
self.swap_partition = gtk.SpinButton(adjustment=self.swap_range,climb_rate=1,digits=0)
self.swap_partition.connect('value-changed',self.lvm_check)
self.partitioning2.pack_start(self.swap_partition,False,True,0)
self.label = gtk.Label("%")
self.partitioning2.pack_start(self.label,False,True,0)
self.vbox.add(self.partitioning2)
# Blank Label
self.label = gtk.Label("")
self.vbox.add(self.label)
self.label = gtk.Label('Optional LVM Partitioning Percentage')
self.vbox.add(self.label)
self.partitioning3 = gtk.HBox()
self.label = gtk.Label(" WWW (/var/www) ")
self.partitioning3.pack_start(self.label,False,True,0)
self.www_range = gtk.Adjustment(0,0,90,1,0, 0)
self.www_partition = gtk.SpinButton(adjustment=self.www_range,climb_rate=1,digits=0)
self.www_partition.connect('value-changed',self.lvm_check)
self.partitioning3.pack_start(self.www_partition,False,True,0)
self.label = gtk.Label("% OPT (/opt) ")
self.partitioning3.pack_start(self.label,False,True,0)
self.opt_range = gtk.Adjustment(0,0,90,1,0, 0)
self.opt_partition = gtk.SpinButton(adjustment=self.opt_range,climb_rate=1,digits=0)
self.opt_partition.connect('value-changed',self.lvm_check)
self.partitioning3.pack_start(self.opt_partition,False,True,0)
self.label = gtk.Label("%")
self.partitioning3.pack_start(self.label,False,True,0)
self.vbox.add(self.partitioning3)
# Blank Label
self.label = gtk.Label("")
self.vbox.add(self.label)
self.partition_message = gtk.HBox()
self.label = gtk.Label(' Note: LVM Partitions should add up to 100% or less before proceeding. <b>Currently Used:</b> ')
self.label.set_use_markup(True)
self.partition_message.pack_start(self.label,False,True,0)
self.partition_used = gtk.Label('100%')
self.partition_message.pack_start(self.partition_used,False,True,0)
self.vbox.add(self.partition_message)
# Button Bar at the Bottom of the Window
self.label = gtk.Label("")
self.vbox.add(self.label)
self.button_bar = gtk.HBox()
# Apply Configurations
self.button1 = gtk.Button(None,gtk.STOCK_OK)
self.button1.connect("clicked",self.apply_configuration)
self.button_bar.pack_end(self.button1,False,True,0)
# Help
self.button2 = gtk.Button(None,gtk.STOCK_HELP)
self.button2.connect("clicked",self.show_help_main)
self.button_bar.pack_end(self.button2,False,True,0)
self.vbox.add(self.button_bar)
self.window.add(self.vbox)
self.window.show_all()
## STOCK CONFIGURATIONS (Minimal Install)
# Post Configuration (nochroot)
f = open('/tmp/stig-fix-post-nochroot','w')
f.write('')
f.close()
# Post Configuration
f = open('/tmp/stig-fix-post','w')
# Run Hardening Script
f.write('/sbin/stig-fix -q &> /dev/null')
f.close()
# Package Selection
f = open('/tmp/stig-fix-packages','w')
f.write('-telnet-server\n')
f.write('-java-1.7.0-openjdk-devel\n')
f.write('-java-1.6.0-openjdk-devel\n')
f.write('gcc-c++\n')
f.write('dos2unix\n')
f.write('kernel-devel\n')
f.write('gcc\n')
f.write('dialog\n')
f.write('dmidecode\n')
f.write('aide\n')
f.close()
# Key Press Event
def event_key(self,args,event):
if event.keyval == gtk.keysyms.F12:
self.apply_configuration(args)
elif event.keyval == gtk.keysyms.F1:
self.show_help_main(args)
# Shows Help for Main Install
def show_help_main(self,args):
self.help_text = ("<b>Install Help</b>\n\n- All LVM partitions need to take less than or equal to 100% of the LVM Volume Group.\n\n- Pressing OK prompts for a password to encrypt Disk (LUKS) and Root password. GRUB is installed with a randomly generated password. Use the 'grubby' command to modify grub configuration and the 'grub-crypt' command to generate a new password for grub.\n\n- To access root remotely via ssh you need to create a user and add them to the wheel and sshusers groups.\n\n- Minimum password length is 15 characters, using a strong password is recommended.\n")
self.MessageBox(self.window,self.help_text,gtk.MESSAGE_INFO)
# System Profile Configuration
def configure_system_profile(self,args):
# Zero out partitioning
self.opt_partition.set_value(0)
self.www_partition.set_value(0)
self.swap_partition.set_value(0)
self.tmp_partition.set_value(0)
self.var_partition.set_value(0)
self.log_partition.set_value(0)
self.audit_partition.set_value(0)
self.home_partition.set_value(0)
self.root_partition.set_value(0)
################################################################################################################
# Minimal (Defualts to Kickstart)
################################################################################################################
if int(self.system_profile.get_active()) == 0:
# Partitioning
if self.disk_total < 8:
self.MessageBox(self.window,"<b>Recommended minimum of 8Gb disk space for a Minimal Install!</b>\n\n You have "+str(self.disk_total)+"Gb available.",gtk.MESSAGE_WARNING)
self.opt_partition.set_value(0)
self.www_partition.set_value(0)
self.swap_partition.set_value(0)
self.tmp_partition.set_value(10)
self.var_partition.set_value(10)
self.log_partition.set_value(10)
self.audit_partition.set_value(10)
self.home_partition.set_value(15)
self.root_partition.set_value(45)
# Post Configuration (nochroot)
f = open('/tmp/stig-fix-post-nochroot','w')
f.write('')
f.close()
# Post Configuration
f = open('/tmp/stig-fix-post','w')
# Run Hardening Script
f.write('/sbin/stig-fix -q &> /dev/null')
f.close()
# Package Selection
f = open('/tmp/stig-fix-packages','w')
f.write('-telnet-server\n')
f.write('-java-1.7.0-openjdk-devel\n')
f.write('-java-1.6.0-openjdk-devel\n')
f.write('gcc-c++\n')
f.write('dos2unix\n')
f.write('kernel-devel\n')
f.write('gcc\n')
f.write('dialog\n')
f.write('dmidecode\n')
f.write('aide\n')
f.close()
################################################################################################################
# User Workstation
################################################################################################################
if int(self.system_profile.get_active()) == 1:
# Partitioning
if self.disk_total < 12:
self.MessageBox(self.window,"<b>Recommended minimum of 12Gb disk space for a User Workstation!</b>\n\n You have "+str(self.disk_total)+"Gb available.",gtk.MESSAGE_WARNING)
self.opt_partition.set_value(0)
self.www_partition.set_value(0)
self.swap_partition.set_value(5)
self.tmp_partition.set_value(10)
self.var_partition.set_value(10)
self.log_partition.set_value(10)
self.audit_partition.set_value(10)
self.home_partition.set_value(10)
self.root_partition.set_value(45)
# Post Configuration (nochroot)
f = open('/tmp/stig-fix-post-nochroot','w')
f.write('cp /mnt/source/stig-fix/classification-banner.py /mnt/sysimage/usr/local/bin/\n')
f.write('chmod a+rx /mnt/sysimage/usr/local/bin/classification-banner.py\n')
f.close()
# Post Configuration
f = open('/tmp/stig-fix-post','w')
# Run Hardening Script
f.write('/sbin/stig-fix -q &> /dev/null')
f.close()
# Package Selection
f = open('/tmp/stig-fix-packages','w')
f.write('@additional-devel\n')
f.write('@basic-desktop\n')
f.write('@desktop-platform\n')
f.write('@directory-client\n')
f.write('@general-desktop\n')
f.write('@graphical-admin-tools\n')
f.write('@input-methods\n')
f.write('@internet-browser\n')
f.write('@legacy-x\n')
f.write('@x11\n')
f.write('pcsc*\n')
f.write('aide\n')
f.write('coolkey\n')
f.write('liberation-*\n')
f.write('dejavu-*\n')
f.write('krb5-auth-dialog\n')
f.write('seahorse-plugins\n')
f.write('vim-X11\n')
f.write('gcc-c++\n')
f.write('dos2unix\n')
f.write('kernel-devel\n')
f.write('gcc\n')
f.write('dialog\n')
f.write('dmidecode\n')
f.write('policycoreutils-gui\n')
f.write('system-config-lvm\n')
f.write('audit-viewer\n')
f.write('openmotif\n')
f.write('libXmu\n')
f.write('libXp\n')
f.write('openmotif22\n')
f.write('-samba-winbind\n')
f.write('-certmonger\n')
f.write('-gnome-applets\n')
f.write('-vino\n')
f.write('-ypbind\n')
f.write('-cheese\n')
f.write('-gnome-backgrounds\n')
f.write('-compiz-gnome\n')
f.write('-gnome-bluetooth\n')
f.write('-gnome-user-share\n')
f.write('-sound-juicer\n')
f.write('-rhythmbox\n')
f.write('-brasero\n')
f.write('-brasero-nautilus\n')
f.write('-brasero-libs\n')
f.write('-NetworkManager\n')
f.write('-NetworkManager-gnome\n')
f.write('-evolution-data-server\n')
f.write('-NetworkManager-glib\n')
f.write('-m17n-contrib-bengali\n')
f.write('-m17n-contrib-punjabi\n')
f.write('-ibus-sayura\n')
f.write('-m17n-contrib-assamese\n')
f.write('-m17n-contrib-oriya\n')
f.write('-m17n-contrib-kannada\n')
f.write('-m17n-contrib-telugu\n')
f.write('-m17n-contrib-hindi\n')
f.write('-m17n-contrib-maithili\n')
f.write('-m17n-db-sinhala\n')
f.write('-m17n-contrib-marathi\n')
f.write('-m17n-db-thai\n')
f.write('-ibus-pinyin\n')
f.write('-m17n-contrib-urdu\n')
f.write('-m17n-contrib-tamil\n')
f.write('-ibus-chewing\n')
f.write('-ibus-hangul\n')
f.write('-ibus-anthy\n')
f.write('-m17n-contrib-malayalam\n')
f.write('-m17n-contrib-gujarati\n')
f.write('-telnet-server\n')
f.write('-java-1.7.0-openjdk-devel\n')
f.write('-java-1.6.0-openjdk-devel\n')
f.close()
################################################################################################################
# Developer Workstation
################################################################################################################
if int(self.system_profile.get_active()) == 2:
# Partitioning
if self.disk_total < 16:
self.MessageBox(self.window,"<b>Recommended minimum 16Gb disk space for a Developer Workstation!</b>\n\n You have "+str(self.disk_total)+"Gb available.",gtk.MESSAGE_WARNING)
self.opt_partition.set_value(0)
self.www_partition.set_value(0)
self.swap_partition.set_value(0)
self.tmp_partition.set_value(10)
self.var_partition.set_value(10)
self.log_partition.set_value(10)
self.audit_partition.set_value(10)
self.home_partition.set_value(25)
self.root_partition.set_value(30)
# Post Configuration (nochroot)
f = open('/tmp/stig-fix-post-nochroot','w')
f.write('cp /mnt/source/stig-fix/classification-banner.py /mnt/sysimage/usr/local/bin/\n')
f.write('chmod a+rx /mnt/sysimage/usr/local/bin/classification-banner.py\n')
f.close()
# Post Configuration
f = open('/tmp/stig-fix-post','w')
# Run Hardening Script
f.write('/sbin/stig-fix -q &> /dev/null')
f.close()
# Package Selection
f = open('/tmp/stig-fix-packages','w')
f.write('@additional-devel\n')
f.write('@basic-desktop\n')
f.write('@desktop-platform\n')
f.write('@desktop-platform-devel\n')
f.write('@development\n')
f.write('@directory-client\n')
f.write('@eclipse\n')
f.write('@general-desktop\n')
f.write('@graphical-admin-tools\n')
f.write('@input-methods\n')
f.write('@internet-browser\n')
f.write('@legacy-x\n')
f.write('@server-platform-devel\n')
f.write('@x11\n')
f.write('pcsc*\n')
f.write('coolkey\n')
f.write('liberation-*\n')
f.write('dejavu-*\n')
f.write('libXinerama-devel\n')
f.write('openmotif-devel\n')
f.write('libXmu-devel\n')
f.write('xorg-x11-proto-devel\n')
f.write('startup-notification-devel\n')
f.write('libgnomeui-devel\n')
f.write('libbonobo-devel\n')
f.write('junit\n')
f.write('libXau-devel\n')
f.write('libgcrypt-devel\n')
f.write('popt-devel\n')
f.write('gnome-python2-desktop\n')
f.write('libdrm-devel\n')
f.write('libXrandr-devel\n')
f.write('libxslt-devel\n')
f.write('libglade2-devel\n')
f.write('gnutls-devel\n')
f.write('desktop-file-utils\n')
f.write('ant\n')
f.write('rpmdevtools\n')
f.write('jpackage-utils\n')
f.write('rpmlint\n')
f.write('krb5-auth-dialog\n')
f.write('seahorse-plugins\n')
f.write('vim-X11\n')
f.write('system-config-lvm\n')
f.write('audit-viewer\n')
f.write('openmotif\n')
f.write('libXmu\n')
f.write('libXp\n')
f.write('openmotif22\n')
f.write('-samba-winbind\n')
f.write('-certmonger\n')
f.write('-gnome-applets\n')
f.write('-vino\n')
f.write('-ypbind\n')
f.write('-cheese\n')
f.write('-gnome-backgrounds\n')
f.write('-compiz-gnome\n')
f.write('-gnome-bluetooth\n')
f.write('-gnome-user-share\n')
f.write('-sound-juicer\n')
f.write('-rhythmbox\n')
f.write('-brasero\n')
f.write('-brasero-nautilus\n')
f.write('-brasero-libs\n')
f.write('-NetworkManager\n')
f.write('-NetworkManager-gnome\n')
f.write('-evolution-data-server\n')
f.write('-evolution-data-server-devel\n')
f.write('-NetworkManager-glib\n')
f.write('-m17n-contrib-bengali\n')
f.write('-m17n-contrib-punjabi\n')
f.write('-ibus-sayura\n')
f.write('-m17n-contrib-assamese\n')
f.write('-m17n-contrib-oriya\n')
f.write('-m17n-contrib-kannada\n')
f.write('-m17n-contrib-telugu\n')
f.write('-m17n-contrib-hindi\n')
f.write('-m17n-contrib-maithili\n')
f.write('-m17n-db-sinhala\n')
f.write('-m17n-contrib-marathi\n')
f.write('-m17n-db-thai\n')
f.write('-ibus-pinyin\n')
f.write('-m17n-contrib-urdu\n')
f.write('-m17n-contrib-tamil\n')
f.write('-ibus-chewing\n')
f.write('-ibus-hangul\n')
f.write('-ibus-anthy\n')
f.write('-m17n-contrib-malayalam\n')
f.write('-m17n-contrib-gujarati\n')
f.close()
################################################################################################################
# RHN Satellite Install
################################################################################################################
if int(self.system_profile.get_active()) == 3:
# Partitioning
if self.disk_total < 120:
self.MessageBox(self.window,"<b>Recommended minimum of 120Gb disk space for a RHN Satelite Server!</b>\n\n You have "+str(self.disk_total)+"Gb available.",gtk.MESSAGE_WARNING)
self.opt_partition.set_value(0)
self.www_partition.set_value(3)
self.swap_partition.set_value(0)
self.tmp_partition.set_value(2)
self.var_partition.set_value(80)
self.log_partition.set_value(3)
self.audit_partition.set_value(3)
self.home_partition.set_value(3)
self.root_partition.set_value(5)
# Post Configuration (nochroot)
f = open('/tmp/stig-fix-post-nochroot','w')
f.write('')
f.close()
# Post Configuration
f = open('/tmp/stig-fix-post','w')
# Run Hardening Script
f.write('/sbin/stig-fix -q &> /dev/null')
# RHN Satellite requires umask of 022 for installation
f.write('sed -i "/umask/ c\umask 022" /etc/profile\n')
f.close()
# Package Selection
f = open('/tmp/stig-fix-packages','w')
f.write('')
f.close()
################################################################################################################
# Proprietary Database
################################################################################################################
if int(self.system_profile.get_active()) == 4:
# Partitioning
if self.disk_total < 60:
self.MessageBox(self.window,"<b>Recommended minimum of 60Gb disk space for a Proprietary Database Server!</b>\n\n You have "+str(self.disk_total)+"Gb available.",gtk.MESSAGE_WARNING)
self.www_partition.set_value(0)
self.home_partition.set_value(5)
self.swap_partition.set_value(0)
self.var_partition.set_value(7)
self.log_partition.set_value(10)
self.audit_partition.set_value(10)
self.tmp_partition.set_value(15)
self.opt_partition.set_value(30)
self.root_partition.set_value(18)
# Post Configuration (nochroot)
f = open('/tmp/stig-fix-post-nochroot','w')
f.write('cp /mnt/source/stig-fix/classification-banner.py /mnt/sysimage/usr/local/bin/\n')
f.write('chmod a+rx /mnt/sysimage/usr/local/bin/classification-banner.py\n')
f.close()
# Post Configuration
f = open('/tmp/stig-fix-post','w')
# Run Hardening Script
f.write('/sbin/stig-fix -q &> /dev/null')
f.close()
# Package Selection
f = open('/tmp/stig-fix-packages','w')
f.write('xorg-x11-server-Xorg\n')
f.write('xorg-x11-xinit\n')
f.write('xterm\n')
f.write('twm\n')
f.write('liberation-*\n')
f.write('dejavu-*\n')
f.write('openmotif\n')
f.write('libXmu\n')
f.write('libXp\n')
f.write('openmotif22\n')
f.write('kernel-devel\n')
f.write('kernel-headers\n')
f.write('gcc\n')
f.write('gcc-c++\n')
f.write('libgcc\n')
f.write('autoconf\n')
f.write('make\n')
f.write('libstdc++\n')
f.write('compat-libstdc++\n')
f.write('libaio\n')
f.write('libaio-devel\n')
f.write('unixODBC\n')
f.write('unixODBC-devel\n')
f.write('sysstat\n')
f.write('ksh\n')
f.close()
################################################################################################################
# RHEV-Attached KVM Server (HARDENING SCRIPT NOT RUN UNTIL AFTER CONNECTION TO RHEVM SERVER)
################################################################################################################
if int(self.system_profile.get_active()) == 5:
# WARNING - HARDENDING SCRIPT NOT RUN!
self.MessageBox(self.window,"<b>THIS PROFILE WILL NOT RUN THE HARDENING SCRIPT!</b>\n\nPlease run the system hardening script after system has been attached to the RHEV-M server using the following command:\n\n # stig-fix",gtk.MESSAGE_WARNING)
# Partitioning
if self.disk_total < 60:
self.MessageBox(self.window,"<b>Recommended minimum of 60Gb disk space for a RHEV-Attached KVM Server Install!</b>\n\n You have "+str(self.disk_total)+"Gb available.",gtk.MESSAGE_WARNING)
self.opt_partition.set_value(0)
self.www_partition.set_value(0)
self.swap_partition.set_value(0)
self.tmp_partition.set_value(10)
self.var_partition.set_value(10)
self.log_partition.set_value(10)
self.audit_partition.set_value(10)
self.home_partition.set_value(25)
self.root_partition.set_value(30)
# Post Configuration (nochroot)
f = open('/tmp/stig-fix-post-nochroot','w')
f.write('')
f.close()
# Post Configuration
f = open('/tmp/stig-fix-post','w')
# Allow 'root' to login via SSH - Required by RHEV-M
f.write('sed -i "/^PermitRootLogin/ c\PermitRootLogin yes" /etc/ssh/sshd_config')
f.close()
# Package Selection
f = open('/tmp/stig-fix-packages','w')
f.write('')
f.close()
################################################################################################################
# Standalone KVM Installation
################################################################################################################
if int(self.system_profile.get_active()) == 6:
# Partitioning
if self.disk_total < 60:
self.MessageBox(self.window,"<b>Recommended minimum 60Gb disk space for a RHEL/KVM Server!</b>\n\n You have "+str(self.disk_total)+"Gb available.",gtk.MESSAGE_WARNING)
self.opt_partition.set_value(0)
self.www_partition.set_value(0)
self.swap_partition.set_value(0)
self.tmp_partition.set_value(3)
self.var_partition.set_value(65)
self.log_partition.set_value(5)
self.audit_partition.set_value(5)
self.home_partition.set_value(5)
self.root_partition.set_value(15)
# Post Configuration (nochroot)
f = open('/tmp/stig-fix-post-nochroot','w')
f.write('')
f.close()
# Post Configuration
f = open('/tmp/stig-fix-post','w')
# Run Hardening Script
f.write('/sbin/stig-fix -q &> /dev/null')
f.close()
# Package Selection
f = open('/tmp/stig-fix-packages','w')
f.write('@storage-client-iscsi\n')
f.write('@virtualization\n')
f.write('@virtualization-client\n')
f.write('@virtualization-platform\n')
f.write('@virtualization-tools\n')
f.write('perl-Sys-Virt\n')
f.write('qemu-kvm-tools\n')
f.write('fence-virtd-libvirt\n')
f.write('virt-v2v\n')
f.write('libguestfs-tools\n')
f.close()
################################################################################################################
# Apache HTTP (Web Server)
################################################################################################################
if int(self.system_profile.get_active()) == 7:
# Partitioning
if self.disk_total < 10:
self.MessageBox(self.window,"<b>Recommended minimum of 10Gb disk space for a Web Server!</b>\n\n You have "+str(self.disk_total)+"Gb available.",gtk.MESSAGE_WARNING)
self.opt_partition.set_value(0)
self.www_partition.set_value(0)
self.swap_partition.set_value(0)
self.tmp_partition.set_value(10)
self.var_partition.set_value(10)
self.log_partition.set_value(10)
self.audit_partition.set_value(10)
self.home_partition.set_value(25)
self.root_partition.set_value(30)
# Post Configuration (nochroot)
f = open('/tmp/stig-fix-post-nochroot','w')
f.write('')
f.close()
# Post Configuration
f = open('/tmp/stig-fix-post','w')
# Run Hardening Script
f.write('/sbin/stig-fix -q &> /dev/null')
f.close()
# Package Selection
f = open('/tmp/stig-fix-packages','w')
f.write('httpd\n')
f.close()
################################################################################################################
# Apache Tomcat
################################################################################################################
if int(self.system_profile.get_active()) == 8:
# Partitioning
if self.disk_total < 10:
self.MessageBox(self.window,"<b>Recommended minimum of 10Gb disk space for an Apache Tomcat Web Server!</b>\n\n You have "+str(self.disk_total)+"Gb available.",gtk.MESSAGE_WARNING)
self.opt_partition.set_value(0)
self.www_partition.set_value(0)
self.swap_partition.set_value(0)
self.tmp_partition.set_value(10)
self.var_partition.set_value(10)
self.log_partition.set_value(10)
self.audit_partition.set_value(10)
self.home_partition.set_value(25)
self.root_partition.set_value(30)
# Post Configuration (nochroot)
f = open('/tmp/stig-fix-post-nochroot','w')
f.write('')
f.close()
# Post Configuration
f = open('/tmp/stig-fix-post','w')
# Run Hardening Script
f.write('/sbin/stig-fix -q &> /dev/null')
f.close()
# Package Selection
f = open('/tmp/stig-fix-packages','w')
f.write('tomcat6\n')
f.close()
################################################################################################################
# PostgreSQL Database
################################################################################################################
if int(self.system_profile.get_active()) == 9:
# Partitioning
if self.disk_total < 16:
self.MessageBox(self.window,"<b>Recommended minimum of 16Gb disk space for a PostgreSQL Database Server!</b>\n\n You have "+str(self.disk_total)+"Gb available.",gtk.MESSAGE_WARNING)
self.opt_partition.set_value(0)
self.www_partition.set_value(0)
self.swap_partition.set_value(0)
self.tmp_partition.set_value(10)
self.var_partition.set_value(10)
self.log_partition.set_value(10)
self.audit_partition.set_value(10)
self.home_partition.set_value(25)
self.root_partition.set_value(30)
# Post Configuration (nochroot)
f = open('/tmp/stig-fix-post-nochroot','w')
f.write('')
f.close()
# Post Configuration
f = open('/tmp/stig-fix-post','w')
# Run Hardening Script
f.write('/sbin/stig-fix -q &> /dev/null')
f.close()
# Package Selection
f = open('/tmp/stig-fix-packages','w')
f.write('postgresql\n')
f.close()
################################################################################################################
# MySQL Database
################################################################################################################
if int(self.system_profile.get_active()) == 10:
# Partitioning
if self.disk_total < 16:
self.MessageBox(self.window,"<b>Recommended minimum of 16Gb disk space for a MariaDB Database Server!</b>\n\n You have "+str(self.disk_total)+"Gb available.",gtk.MESSAGE_WARNING)
self.opt_partition.set_value(0)
self.www_partition.set_value(0)
self.swap_partition.set_value(0)
self.tmp_partition.set_value(10)
self.var_partition.set_value(10)
self.log_partition.set_value(10)
self.audit_partition.set_value(10)
self.home_partition.set_value(25)
self.root_partition.set_value(30)
# Post Configuration (nochroot)
f = open('/tmp/stig-fix-post-nochroot','w')
f.write('')
f.close()
# Post Configuration
f = open('/tmp/stig-fix-post','w')
# Run Hardening Script
f.write('/sbin/stig-fix -q &> /dev/null')
f.close()
# Package Selection
f = open('/tmp/stig-fix-packages','w')
f.write('mysql-server\n')
f.close()
# Check LVM Partitioning
def lvm_check(self,args):
self.lvm = self.root_partition.get_value_as_int()+self.home_partition.get_value_as_int()+self.tmp_partition.get_value_as_int()+self.var_partition.get_value_as_int()+self.log_partition.get_value_as_int()+self.audit_partition.get_value_as_int()+self.swap_partition.get_value_as_int()+self.www_partition.get_value_as_int()+self.opt_partition.get_value_as_int()
self.partition_used.set_label(str(self.lvm)+'%')
if int(self.lvm) > 100:
self.MessageBox(self.window,"<b>Verify that LVM configuration is not over 100%!</b>",gtk.MESSAGE_ERROR)
return False
else:
return True
def choose(self, widget):
if self.tim_install.get_active() == True and self.core_install.get_active():
self.MessageBox(self.window,"<b>Can not have both TIM and CORE install!</b>",gtk.MESSAGE_ERROR)
self.tim_install.set_active(False)
self.core_install.set_active(False)
# Display Message Box (e.g. Help Screen, Warning Screen, etc.)
def MessageBox(self,parent,text,type=gtk.MESSAGE_INFO):
message = gtk.MessageDialog(parent,0,type,gtk.BUTTONS_OK)
message.set_markup(text)
response = message.run()
if response == gtk.RESPONSE_OK:
message.destroy()
# Get Password
def get_password(self,parent):
dialog = gtk.Dialog("Configure System Password",parent,gtk.DIALOG_MODAL|gtk.DIALOG_DESTROY_WITH_PARENT,(gtk.STOCK_CANCEL,gtk.RESPONSE_REJECT,gtk.STOCK_OK,gtk.RESPONSE_ACCEPT))
self.pass1 = gtk.HBox()
self.label1 = gtk.Label(" Passsword: ")
self.pass1.pack_start(self.label1,False,True,0)
self.password1 = gtk.Entry()
self.password1.set_visibility(False)
self.pass1.pack_start(self.password1,False,True,0)
dialog.vbox.add(self.pass1)
self.pass2 = gtk.HBox()
self.label2 = gtk.Label(" Verify Password: ")
self.pass2.pack_start(self.label2,False,True,0)
self.password2 = gtk.Entry()
self.password2.set_visibility(False)
self.pass2.pack_start(self.password2,False,True,0)
dialog.vbox.add(self.pass2)
dialog.show_all()
response = dialog.run()
if response == gtk.RESPONSE_ACCEPT:
self.a = self.password1.get_text()
self.b = self.password2.get_text()
dialog.destroy()
else:
self.a = ''
self.b = ''
dialog.destroy()
# Appply Configurations to Kickstart File
def apply_configuration(self,args):
# Set system password
while True:
self.get_password(self.window)
if self.a == self.b:
if len(self.a) == 0:
return
elif len(self.a) >= 15:
self.passwd = self.a
break
else:
self.MessageBox(self.window,"<b>Password too short! 15 Characters Required.</b>",gtk.MESSAGE_ERROR)
else:
self.MessageBox(self.window,"<b>Passwords Don't Match!</b>",gtk.MESSAGE_ERROR)
self.error = 0
if self.verify.check_hostname(self.hostname.get_text()) == False:
self.MessageBox(self.window,"<b>Invalid Hostname!</b>",gtk.MESSAGE_ERROR)
self.error = 1
# Check Install Disks
self.install_disks = ""
self.ignore_disks = ""
for i in range(len(self.disk_info)):
if eval("self.disk%d.get_active()"%(i)) == True:
self.install_disks += self.disk_info[i][0]+","
else:
self.ignore_disks += self.disk_info[i][0]+","
self.data["INSTALL_DRIVES"] = self.install_disks[:-1]
self.data["IGNORE_DRIVES"] = self.ignore_disks[:-1]
if self.install_disks == "":
self.MessageBox(self.window,"<b>Please select at least one install disk!</b>",gtk.MESSAGE_ERROR)
self.error = 1
# Check LVM Partitioning
if self.lvm_check(args) == False:
self.error = 1
# Write Kickstart File
if self.error == 0:
# Generate Salt
self.salt = ''
self.alphabet = '.abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
for self.i in range(16):
self.index = random.randrange(len(self.alphabet))
self.salt = self.salt+self.alphabet[self.index]
# Encrypt Password
self.salt = '$6$'+self.salt
self.password = crypt.crypt(self.passwd,self.salt)
# Write Classification Banner Settings
if int(self.system_profile.get_active()) == 1 or int(self.system_profile.get_active()) == 2:
f = open('/tmp/classification-banner','w')
f.write('message = "'+str(self.system_classification.get_active_text())+'"\n')
if int(self.system_classification.get_active()) == 0 or int(self.system_classification.get_active()) == 1:
f.write('fgcolor = "#000000"\n')
f.write('bgcolor = "#00CC00"\n')
elif int(self.system_classification.get_active()) == 2:
f.write('fgcolor = "#000000"\n')
f.write('bgcolor = "#33FFFF"\n')
elif int(self.system_classification.get_active()) == 3:
f.write('fgcolor = "#FFFFFF"\n')
f.write('bgcolor = "#FF0000"\n')
elif int(self.system_classification.get_active()) == 4:
f.write('fgcolor = "#FFFFFF"\n')
f.write('bgcolor = "#FF9900"\n')
elif int(self.system_classification.get_active()) == 5:
f.write('fgcolor = "#000000"\n')
f.write('bgcolor = "#FFFF00"\n')
elif int(self.system_classification.get_active()) == 6:
f.write('fgcolor = "#000000"\n')
f.write('bgcolor = "#FFFF00"\n')
else:
f.write('fgcolor = "#000000"\n')
f.write('bgcolor = "#FFFFFF"\n')
f.close()
# Write Kickstart Configuration
f = open('/tmp/stig-fix','w')
if int(self.system_profile.get_active()) > 0:
f.write('network --device eth0 --bootproto dhcp --noipv6 --hostname '+self.hostname.get_text()+'\n')
else:
f.write('network --device eth0 --bootproto static --ip=192.168.1.101 --netmask=255.255.255.0 --onboot=on --noipv6 --hostname '+self.hostname.get_text()+'\n')
f.write('rootpw --iscrypted '+str(self.password)+'\n')
f.write('bootloader --location=mbr --driveorder='+str(self.data["INSTALL_DRIVES"])+' --append="crashkernel=auto rhgb quiet audit=1" --password='+str(self.password)+'\n')
#f.close()
# Write Kickstart Configuration (Hostname/Passwords)
#f = open('/tmp/partitioning','w')
if self.data["IGNORE_DRIVES"] != "":
f.write('ignoredisk --drives='+str(self.data["IGNORE_DRIVES"])+'\n')
f.write('zerombr\n')
f.write('clearpart --all --drives='+str(self.data["INSTALL_DRIVES"])+'\n')
if self.encrypt_disk.get_active() == True:
f.write('part pv.01 --grow --size=200 --encrypted --cipher=\'aes-xts-plain64\' --passphrase='+str(self.passwd)+'\n')
else:
f.write('part pv.01 --grow --size=200\n')
f.write('part /boot --fstype=ext4 --size=300\n')
f.write('volgroup vg1 --pesize=4096 pv.01\n')
f.write('logvol / --fstype=ext4 --name=lv_root --vgname=vg1 --size=2048 --grow --percent='+str(self.root_partition.get_value_as_int())+'\n')
f.write('logvol /home --fstype=ext4 --name=lv_home --vgname=vg1 --size=1024 --grow --percent='+str(self.home_partition.get_value_as_int())+'\n')
f.write('logvol /tmp --fstype=ext4 --name=lv_tmp --vgname=vg1 --size=512 --grow --percent='+str(self.tmp_partition.get_value_as_int())+'\n')
f.write('logvol /var --fstype=ext4 --name=lv_var --vgname=vg1 --size=512 --grow --percent='+str(self.var_partition.get_value_as_int())+'\n')
f.write('logvol /var/log --fstype=ext4 --name=lv_log --vgname=vg1 --size=512 --grow --percent='+str(self.log_partition.get_value_as_int())+'\n')
f.write('logvol /var/log/audit --fstype=ext4 --name=lv_audit --vgname=vg1 --size=512 --grow --percent='+str(self.audit_partition.get_value_as_int())+'\n')
if self.swap_partition.get_value_as_int() >= 1:
f.write('logvol swap --fstype=swap --name=lv_swap --vgname=vg1 --size=256 --maxsize=4096 --percent='+str(self.swap_partition.get_value_as_int())+'\n')
if self.opt_partition.get_value_as_int() >= 1:
f.write('logvol /opt --fstype=ext4 --name=lv_opt --vgname=vg1 --size=512 --grow --percent='+str(self.opt_partition.get_value_as_int())+'\n')
if self.www_partition.get_value_as_int() >= 1:
f.write('logvol /var/www --fstype=ext4 --name=lv_www --vgname=vg1 --size=512 --grow --percent='+str(self.www_partition.get_value_as_int())+'\n')
f.close()
f = open('/tmp/system-choice','w')
if self.tim_install.get_active() == True:
f.write('echo Installing tim config')
f.write('/opt/tim_config/install\n')
if self.core_install.get_active() == True:
f.write('echo Installing core config')
f.write('/opt/core_config/install\n')
f.close()
gtk.main_quit()
# Executes Window Display
if __name__ == "__main__":
window = Display_Menu()
gtk.main()
|
apache-2.0
| 9,098,609,482,478,488,000
| 40.409244
| 586
| 0.604542
| false
| 3.025356
| true
| false
| false
|
spooky/lobby
|
src/widgets.py
|
1
|
6282
|
import logging
import os
import re
import asyncio
from collections import OrderedDict
from PyQt5.QtCore import QObject, QCoreApplication, QUrl, pyqtSignal, pyqtSlot
from PyQt5.QtQml import QQmlApplicationEngine
from PyQt5.QtGui import QGuiApplication, QIcon
from PyQt5.QtQuick import QQuickItem
import settings
import factories
from utils.async import asyncSlot
from view_models.chrome import MainWindowViewModel, LoginViewModel, TaskStatusViewModel
class Application(QGuiApplication):
logChanged = pyqtSignal(str)
initComplete = pyqtSignal()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
try:
self.setWindowIcon(QIcon('views/icons/faf.ico'))
except AttributeError: # setWindowIcon is available on windows only
pass
self.mapLookup = {}
self.modLookup = {}
@asyncio.coroutine
def __initMapLookup(self):
local = yield from factories.localMapLookup(settings.getMapDirs())
self.mapLookup.update(local)
@asyncio.coroutine
def __initModLookup(self):
local = yield from factories.localModLookup(settings.getModDirs())
self.modLookup.update(local)
@asyncio.coroutine
def __queueTask(self, asyncCoroutine, text='', indefinite=True, progress=0.0, running=False):
with self.report(text, indefinite, progress, running):
yield from asyncCoroutine()
@asyncSlot
def start(self):
logger = logging.getLogger(__name__)
try:
self.mainWindow = MainWindow(self)
self.mainWindow.show()
except Exception as e:
logger.critical('error during init: {}'.format(e))
self.quit()
else:
try:
logger.info('loading maps')
yield from self.__queueTask(self.__initMapLookup, QCoreApplication.translate('Application', 'loading maps'))
logger.info('loading mods')
yield from self.__queueTask(self.__initModLookup, QCoreApplication.translate('Application', 'loading mods'))
except Exception as e:
logger.error(e)
finally:
logger.debug('init complete')
self.initComplete.emit()
# Required for QtHandler to propagate log messages to client 'console'
def log(self, msg):
self.logChanged.emit(msg)
def report(self, text='', indefinite=True, progress=0.0, running=False):
status = TaskStatusViewModel(text, indefinite, progress, running)
self.mainWindow.windowModel.taskList.append(status)
return status
class MainWindow(QObject):
def __init__(self, parent=None):
super().__init__(parent)
self.log = logging.getLogger(__name__)
self.app = Application.instance()
self.windowModel = MainWindowViewModel(parent=self)
self.windowModel.switchView.connect(self._onSwitchView)
self.loginModel = LoginViewModel(self.app, parent=self)
self.loginModel.readCredentials()
self.loginModel.panelVisible = not self.loginModel.remember
if self.loginModel.remember:
self.loginModel.autologin()
self.engine = QQmlApplicationEngine(self)
self.engine.rootContext().setContextProperty('windowModel', self.windowModel)
self.engine.rootContext().setContextProperty('loginModel', self.loginModel)
self.engine.quit.connect(parent.quit)
self.engine.load(QUrl.fromLocalFile('views/Chrome.qml'))
self.viewManager = ViewManager(self.engine.rootContext(), self.windowModel, parent=self)
first = self._registerViews(settings.MODULES, self.app)
self.viewManager.loadView(first)
self.window = self.engine.rootObjects()[0]
# wire up logging console
self.console = self.window.findChild(QQuickItem, 'console')
parent.logChanged.connect(self._onLogChanged)
def show(self):
if not self.windowModel.currentView:
raise Exception('currentView not set')
self.window.show()
self.log.debug('client up')
def _registerViews(self, views, app):
for view in views:
self.viewManager.registerView(view)
# TODO need nicer solution - would be nice if the list was notifyable
self.windowModel.registeredViews = list(self.viewManager.views)
return views[0]
@pyqtSlot(str)
def _onSwitchView(self, name):
self.viewManager.loadView(name)
@pyqtSlot(str)
def _onLogChanged(self, msg):
# replace with collections.deque binding(ish)?
if self.console.property('lineCount') == settings.LOG_BUFFER_SIZE:
line_end = self.console.property('text').find('\n') + 1
self.console.remove(0, line_end)
self.console.append(msg)
class ViewManager(QObject):
def __init__(self, context, windowViewModel, parent=None):
super().__init__(parent)
self._context = context
self._window = windowViewModel
self._views = OrderedDict()
def registerView(self, name, *args, **kwargs):
'''
Works on a convention. The view requires 2 thins:
1) the ui file which should be the camel cased .qml file in the ui directory. Path should be relative to Chrome.qml
2) the view model which should be a class in the view_models module
'''
if self._views.get(name):
raise Exception('{} already registered'.format(name))
n = self._convertName(name)
vm_name = '{}ViewModel'.format(n)
# equivalent of from <name>.view_models import <vm_name>
vm = __import__(name + '.view_models', globals(), locals(), [vm_name], 0)
self._views[name] = (n, (getattr(vm, vm_name))(*args, parent=self, **kwargs))
def getView(self, name):
return self._views[name]
def loadView(self, name):
viewName, viewModel = self.getView(name)
self._context.setContextProperty('contentModel', viewModel)
self._window.currentView = os.path.join('..', name, 'views', viewName)
@property
def views(self):
return self._views
def _convertName(self, name):
return re.sub('([_\s]?)([A-Z]?[a-z]+)', lambda m: m.group(2).title(), name)
|
gpl-3.0
| -6,388,136,681,272,199,000
| 34.491525
| 124
| 0.646769
| false
| 4.05029
| false
| false
| false
|
aristanetworks/arista-ovs-nova
|
nova/virt/netutils.py
|
1
|
3524
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Network-releated utilities for supporting libvirt connection code."""
import netaddr
from nova.openstack.common import cfg
CONF = cfg.CONF
CONF.import_opt('use_ipv6', 'nova.config')
CONF.import_opt('injected_network_template', 'nova.virt.disk.api')
Template = None
def _late_load_cheetah():
global Template
if Template is None:
t = __import__('Cheetah.Template', globals(), locals(),
['Template'], -1)
Template = t.Template
def get_net_and_mask(cidr):
net = netaddr.IPNetwork(cidr)
return str(net.ip), str(net.netmask)
def get_net_and_prefixlen(cidr):
net = netaddr.IPNetwork(cidr)
return str(net.ip), str(net._prefixlen)
def get_ip_version(cidr):
net = netaddr.IPNetwork(cidr)
return int(net.version)
def get_injected_network_template(network_info, use_ipv6=CONF.use_ipv6,
template=CONF.injected_network_template):
"""
return a rendered network template for the given network_info
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
Note: this code actually depends on the legacy network_info, but will
convert the type itself if necessary.
"""
# the code below depends on the legacy 'network_info'
if hasattr(network_info, 'legacy'):
network_info = network_info.legacy()
nets = []
ifc_num = -1
have_injected_networks = False
for (network_ref, mapping) in network_info:
ifc_num += 1
if not network_ref['injected']:
continue
have_injected_networks = True
address = mapping['ips'][0]['ip']
netmask = mapping['ips'][0]['netmask']
address_v6 = None
gateway_v6 = None
netmask_v6 = None
if use_ipv6:
address_v6 = mapping['ip6s'][0]['ip']
netmask_v6 = mapping['ip6s'][0]['netmask']
gateway_v6 = mapping['gateway_v6']
net_info = {'name': 'eth%d' % ifc_num,
'address': address,
'netmask': netmask,
'gateway': mapping['gateway'],
'broadcast': mapping['broadcast'],
'dns': ' '.join(mapping['dns']),
'address_v6': address_v6,
'gateway_v6': gateway_v6,
'netmask_v6': netmask_v6}
nets.append(net_info)
if have_injected_networks is False:
return None
if not template:
return None
_late_load_cheetah()
ifc_template = open(template).read()
return str(Template(ifc_template,
searchList=[{'interfaces': nets,
'use_ipv6': use_ipv6}]))
|
apache-2.0
| -1,055,264,008,260,136,000
| 29.37931
| 78
| 0.61975
| false
| 3.859803
| false
| false
| false
|
Outernet-Project/tekhenu
|
tekhenu/routes/content_list.py
|
1
|
5442
|
"""
content_list.py: Content list request handler, and suggestion form handler
Tekhenu
(c) 2014, Outernet Inc
All rights reserved.
This software is free software licensed under the terms of GPLv3. See COPYING
file that comes with the source code, or http://www.gnu.org/licenses/gpl.txt.
"""
from __future__ import unicode_literals, division
import math
import logging
from urlparse import urlparse
from bottle_utils import csrf
from bottle_utils.i18n import i18n_path
from google.appengine.ext import ndb
from bottle_utils.i18n import lazy_gettext as _
from bottle import view, default_app, request, response, redirect
from db.models import Content
from . import QueryResult
app = default_app()
PREFIX = '/'
def get_content_list(per_page=20):
"""
Create a query over ``Content`` objects using query string parameters.
:param per_page: number of items to return per page
:returns: ``QueryResult`` object
"""
search = request.params.getunicode('q', '').strip()
status = request.params.get('status')
license = request.params.get('license')
votes = request.params.get('votes')
page = int(request.params.get('p', '1'))
q = Content.query()
if search:
keywords = Content.get_keywords(search)
if len(keywords) > 1:
q = q.filter(ndb.AND(*[Content.keywords == kw for kw in keywords]))
if len(keywords) == 1:
q = q.filter(Content.keywords == keywords[0])
if status:
q = q.filter(Content.status == status)
if license == 'free':
q = q.filter(Content.is_free == True)
elif license == 'nonfree':
q = q.filter(Content.is_free == False)
elif license == 'unknown':
q = q.filter(Content.license == None)
if votes == 'asc':
q = q.order(+Content.votes)
elif votes == 'desc':
q = q.order(-Content.votes)
q = q.order(-Content.updated)
count = q.count()
if not count:
return QueryResult([], count, 1, 1)
npages = int(math.ceil(count / per_page))
if page * per_page > count:
page = npages
offset = int(per_page * (page - 1))
return QueryResult(q.fetch(per_page, offset=offset), count, page, npages)
@app.get(PREFIX)
@csrf.csrf_token
@view('content_list', errors={}, Content=Content)
def show_content_list():
"""
Show a list of 10 last-updated pieces of content and a suggestion form.
"""
return dict(vals=request.params, content=get_content_list())
@app.post(PREFIX)
@csrf.csrf_protect
@view('content_list', Content=Content)
def add_content_suggestion():
"""
Handle a content suggestion request.
"""
# TODO: Handle Unicode URLs
url = Content.validate_url(request.forms.get('url', ''))
license = request.forms.get('license') or None
errors = {}
if not url:
# Translators, used as error message on failure submit suggestion
errors['url'] = _('This URL is invalid')
if license:
license = license.strip().upper()
if license not in Content.LICENSE_CHOICES:
# Translators, used as error message on failure to submit
# suggestion
errors['license'] = _('Please select a license from provided '
'choices')
if not url:
# Translators, used as error message on failure to submit suggestion
errors['url'] = _('Please type in a valid URL')
if not errors:
try:
content = Content.create(url=url, license=license)
logging.info("Created content for '%s' (real url: '%s')", url,
content.url)
response.flash(_('Your suggestion has been added'))
redirect(i18n_path(content.path))
except Content.InvalidURLError as err:
logging.debug("URL error while parsing '%s': %s", url, err)
# Translators, used as error message on failure submit suggestion
errors['url'] = _('This URL is invalid')
except Content.FetchError as err:
logging.debug("Fetch error while parsing '%s': %s (%s)",
url, err, err.error)
# Translators, used as error message on failure submit suggestion
errors['url'] = _('The page at specified URL does not exist or '
'the domain cannot be reached.')
except Content.NotAllowedError as err:
logging.debug("Access error while parsing '%s': %s", url, err)
# Translators, used as error message on failure submit suggestion
errors['url'] = _('The page must be accessible to robots')
except Content.ContentError as err:
logging.debug("Content error while parsing '%s': %s (%s)", url,
err, err.error)
# Translators, used as error message on failure submit suggestion
errors['url'] = _('The content on the page could not be '
'understood, please provide and URL to a valid '
'web page')
except Exception as err:
logging.debug("Unknown error fetching '%s': %s", url, err)
# Translators, used as error message on failure submit suggestion
errors['url'] = _('There was an unknown error with the URL')
return dict(vals=request.forms, errors=errors, Content=Content,
content=get_content_list())
|
gpl-3.0
| 883,379,827,784,138,000
| 34.109677
| 79
| 0.610437
| false
| 4.07946
| false
| false
| false
|
pearpai/TensorFlow-action
|
deep_learning_with_tensorFlow/Chapter04/p7902.py
|
1
|
1719
|
import tensorflow as tf
from numpy.random import RandomState
batch_size = 8
# 两个输入节点
x = tf.placeholder(tf.float32, shape=(None, 2), name='x-input')
# 回归问题一般只有一个输出节点
y_ = tf.placeholder(tf.float32, shape=(None, 1), name='y-input')
# 定义了一个单层的神经网络前向传播过程,这里就是简单加权和
w1 = tf.Variable(tf.random_normal([2, 1], stddev=1, seed=1))
y = tf.matmul(x, w1)
# 定义预测多了 和 预测少了的成本
loss_less = 10
loss_more = 1
loss = tf.reduce_sum(tf.where(tf.greater(y, y_), (y - y_) * loss_more, (y_ - y) * loss_less))
train_step = tf.train.AdamOptimizer(0.001).minimize(loss)
# 通过随机数生成一个模拟数据集
rdm = RandomState(1)
dataset_size = 128
X = rdm.rand(dataset_size, 2)
# 设置回归的正确值为两个输入的喝加上一个随机量。之所以要加上一个随机量是为了
# 加入不可预测的噪音,否则不同损失函数的意义不大,因为不同损失函数都会在能
# 完全预测正确的时候最低。一般来说噪音为一个均值为0的小量,所以这里的噪音设置为
# -0.05 ~ 0.05 的随机数
Y = [[x1 + x2 + (rdm.rand() / 10.0 - 0.05)] for (x1, x2) in X]
# 训练神经网络
with tf.Session() as sess:
init_op = tf.global_variables_initializer()
sess.run(init_op)
STEPS = 5000
for i in range(STEPS):
start = (i * batch_size) % 128
end = (i * batch_size) % 128 + batch_size
sess.run(train_step, feed_dict={x: X[start:end], y_: Y[start:end]})
if i % 1000 == 0:
print("After %d training step(s), w1 is: " % (i))
print(sess.run(w1), "\n")
print("Final w1 is: \n", sess.run(w1))
|
apache-2.0
| -4,511,052,033,741,601,000
| 29.813953
| 93
| 0.638491
| false
| 1.743421
| false
| false
| false
|
ThoriumGroup/thorium
|
thorium/utils/flags.py
|
1
|
13209
|
#!/usr/bin/env python
"""
Thorium Utils Flags
===================
Nuke Knob Flags which can be difficult to access due to Nuke not storing
readily available variables for them, forcing the use the integer values as
seen below.
Any of these flags can now be used with:
::
from thorium.utils import flags
And then when needed:
::
gain = nuke.Array_Knob('gain')
gain.setFlag(flags.SLIDER)
gain.setFlag(flags.LOG_SLIDER)
Non-PEP8 Styling is used within this script to preserve readability.
## Version Restrictions
Flags new in 6.3:
* KNOB_CHANGED_RECURSIVE
* MODIFIES_GEOMETRY
* OUTPUT_ONLY
* NO_KNOB_CHANGED_FINISHED
* SET_SIZE_POLICY
* EXPAND_TO_WIDTH
Flags new in 6.2:
* READ_ONLY
* GRANULARY_UNDO
* NO_RECURSIVE_PATHS
## License
The MIT License (MIT)
Flags
Copyright (c) 2010-2014 John R.A. Benson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# =============================================================================
# GLOBALS
# =============================================================================
# General Flags ===============================================================
# Must not intersect any class-specific flags
DISABLED = 0x0000000000000080 # DISABLED Set by disable(), cleared by enable().
NO_ANIMATION = 0x0000000000000100 # NO_ANIMATION Prevent the value from being animated.
# This removes any anymation or view buttons, and
# it stops tcl expressions from being evaluated in
# string knobs, and may make it ignore attempts to
# set expressions or key frames (nyi).
DO_NOT_WRITE = 0x0000000000000200 # DO_NOT_WRITE Don't ever save this knob to a script
# (including copy & paste!)
INVISIBLE = 0x0000000000000400 # INVISIBLE The knob does not appear in the panels.
# No widgets are created. This is not the same
# as hide(), and show() will not undo it!
RESIZABLE = 0x0000000000000800 # RESIZABLE The knob can stretch in the panel so
# that it fills up all the remaining space in the line.
# Defaults to true for most of the complex knobs,
# but off for buttons, checkmarks, and pulldown lists.
STARTLINE = 0x0000000000001000 # STARTLINE This knob starts a new row in the panel.
# The default is true unless a zero-length (not NULL)
# string is passed as the label. Currently the default
# is false for checkmarks and buttons but this may
# change in future versions.
ENDLINE = 0x0000000000002000 # ENDLINE This knob will end a row, acts exactly
# like STARTLINE was set on the next knob.
# Set true for divider lines.
NO_RERENDER = 0x0000000000004000 # NO_RERENDER This knob does not contribute to the
# hash value for the op. This should be used on knobs
# that have no effect on the op's output.
NO_HANDLES = 0x0000000000008000 # NO_HANDLES Don't draw anything in the viewer,
# this is useful if the Op draws it's own indicators.
KNOB_CHANGED_ALWAYS = 0x0000000000010000 # KNOB_CHANGED_ALWAYS will call node()->knob_changed()
# every time the value of the knob changes. Normally
# it is only called if the user changes the value with
# the panel open. This allows you to track all changes to
# the value. Be careful as knob_changed() will be called
# without storing the new values into your structure.
NO_KNOB_CHANGED = 0x0000000000020000 # NO_KNOB_CHANGED: Don't bother calling Op::knob_changed()
# with this knob. This is turned on automatically
# if the knob_changed() returns false.
HIDDEN = 0x0000000000040000 # HIDDEN Set by hide(), cleared by show().
NO_UNDO = 0x0000000000080000 # NO_UNDO Don't undo/redo any changes to this knob.
# May be replaced with "output knob" in the future.
ALWAYS_SAVE = 0x0000000000100000 # ALWAYS_SAVE save the knob to a script even if not_default()
# returns false. *Deprecated*, instead override
# not_default() and make it return true!
NODE_KNOB = 0x0000000000200000 # NODE_KNOB is used by Nuke internally for controls on
# the DAG appearance such as xpos and ypos.
HANDLES_ANYWAY = 0x0000000000400000 # HANDLES_ANYWAY makes the handles appear in the viewer when
# the panel is open even if a different tab is selected.
READ_ONLY = 0x0000000010000000 # knob cannot be modified by UI intervention but can
# still be copied from etc
# Internal Use Flags ==========================================================
INDETERMINATE = 0x0000000000800000
COLOURCHIP_HAS_UNSET = 0x0000000001000000 #/< whether a color chip can be in the 'unset' state,
# DEFAULTS TO FALSE
SMALL_UI = 0x0000000002000000
NO_NUMERIC_FIELDS = 0x0000000004000000
NO_CURVE_EDITOR = 0x0000000020000000
NO_MULTIVIEW = 0x0000000040000000
EARLY_STORE = 0x0000000080000000
KNOB_CHANGED_RECURSIVE = 0x0000000008000000 # 6.3 recursive knobChanged calls are guarded against.
# To override the non-recursion on a particular knob,
# specify this flag
MODIFIES_GEOMETRY = 0x0000000100000000 # 6.3 MODIFIES_GEOMETRY should be set for any knob
# that modifies geometry, either by affecting the
# internal geometry directly or by changing its transform
OUTPUT_ONLY = 0x0000000200000000 # 6.3
NO_KNOB_CHANGED_FINISHED = 0x0000000400000000 # 6.3
SET_SIZE_POLICY = 0x0000000800000000 # 6.3
EXPAND_TO_WIDTH = 0x0000001000000000 # 6.3 Just for enum knobs currently
# Numeric Knob Flags ==========================================================
MAGNITUDE = 0x0000000000000001 # MAGNITUDE If there are several numbers, this enables a
# button to only show a single number, and all are set
# equal to this number. Default is true for WH_knob()
# and Color_knob().
SLIDER = 0x0000000000000002 # SLIDER Turns on the slider. Currently this only works if
# the size is 1 or MAGNITUDE is enabled and it is set
# to single numbers.
# Defaults to on for most non-integer numerical controls.
LOG_SLIDER = 0x0000000000000004 # LOG_SLIDER Tick marks on the slider (if enabled with SLIDER)
# are spaced logarithmically. This is turned on for
# WH_knob() and Color_knob(), and if the range has both
# ends greater than zero. If you turn this on and the
# range passes through zero, the scale is actually the cube
# root of the number, not the logarithim.
STORE_INTEGER = 0x0000000000000008 # STORE_INTEGER Only integer values should be displayed/stored
FORCE_RANGE = 0x0000000000000010 # FORCE_RANGE Clamps the value to the range when storing.
ANGLE = 0x0000000000000020 # ANGLE Turn on a widget depicting this number as an angle.
NO_PROXYSCALE = 0x0000000000000040 # NO_PROXYSCALE disables proxy scaling for XY or WH knobs.
# Useful if you just want two numbers called "x" and "y"
# that are not really a position.
# You probably also want to do NO_HANDLES.
# String Knob Flags ===========================================================
GRANULAR_UNDO = 0x0000000000000001
NO_RECURSIVE_PATHS = 0x0000000000000002
# Enumeration Knob Flags ======================================================
SAVE_MENU = 0x0000000002000000 # SAVE_MENU writes the contents of the menu to the saved
# script. Useful if your plugin modifies the list of items.
# BeginGroup Knob Flags =======================================================
CLOSED = 0x0000000000000001 # CLOSED True for a BeginGroup knob that is closed
TOOLBAR_GROUP = 0x0000000000000002 # Make the group into a viewer toolbar
TOOLBAR_LEFT = 0x0000000000000000 # Position in the viewer. Only set one of these:
TOOLBAR_TOP = 0x0000000000000010
TOOLBAR_BOTTOM = 0x0000000000000020
TOOLBAR_RIGHT = 0x0000000000000030
TOOLBAR_POSITION = 0x0000000000000030 # A mask for the position part of the flags
# ChannelSet/Channel Knob Flags ===============================================
NO_CHECKMARKS = 0x0000000000000001 # NO_CHECKMARKS Get rid of the individual channel checkmarks.
NO_ALPHA_PULLDOWN = 0x0000000000000002 # NO_ALPHA_PULLDOWN Get rid of the extra pulldown that lets
# you set the 4th channel to an arbitrary different layer
# than the first 3.
# Format Knob Flags ===========================================================
PROXY_DEFAULT = 0x0000000000000001 # PROXY_DEFAULT makes the default value be the
# root.proxy_format rather than the root.format.
# =============================================================================
# EXPORTS
# =============================================================================
__all__ = [
'ALWAYS_SAVE',
'ANGLE',
'CLOSED',
'COLOURCHIP_HAS_UNSET',
'DISABLED',
'DO_NOT_WRITE',
'EARLY_STORE',
'ENDLINE',
'EXPAND_TO_WIDTH',
'FORCE_RANGE',
'GRANULAR_UNDO',
'HANDLES_ANYWAY',
'HIDDEN',
'INDETERMINATE',
'INVISIBLE',
'KNOB_CHANGED_ALWAYS',
'KNOB_CHANGED_RECURSIVE',
'LOG_SLIDER',
'MAGNITUDE',
'MODIFIES_GEOMETRY',
'NODE_KNOB',
'NO_ALPHA_PULLDOWN',
'NO_ANIMATION',
'NO_CHECKMARKS',
'NO_CURVE_EDITOR',
'NO_HANDLES',
'NO_KNOB_CHANGED',
'NO_KNOB_CHANGED_FINISHED',
'NO_MULTIVIEW',
'NO_NUMERIC_FIELDS',
'NO_PROXYSCALE',
'NO_RECURSIVE_PATHS',
'NO_RERENDER',
'NO_UNDO',
'OUTPUT_ONLY',
'PROXY_DEFAULT',
'READ_ONLY',
'RESIZABLE',
'SAVE_MENU',
'SET_SIZE_POLICY',
'SLIDER',
'SMALL_UI',
'STARTLINE',
'STORE_INTEGER',
'TOOLBAR_BOTTOM',
'TOOLBAR_GROUP',
'TOOLBAR_LEFT',
'TOOLBAR_POSITION',
'TOOLBAR_RIGHT',
'TOOLBAR_TOP',
]
|
mit
| -3,900,856,096,993,762,300
| 44.705882
| 106
| 0.532516
| false
| 4.491329
| false
| false
| false
|
jianmingtang/PIC-tools
|
Python/PIC/DistNASA.py
|
1
|
4991
|
# Copyright (C) 2014 Jian-Ming Tang <jmtang@mailaps.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Distribution
------------
"""
import numpy
class DistNASA:
"""
This class is used to store data in ndarray from a NASA PIC data file.
Methods for data slicing and summation are provided.
"""
data_t = {}
def __init__(self, fname, grid, nss=4):
"""
fname: data filename
grid: number of grid points
nss: number of species
"""
self.grid = grid
self.nss = nss
datatype = numpy.dtype([
('pad1', 'i4'),
('axes', 'f4', (nss, grid)),
('xlo', 'f4'), ('xhi', 'f4'), ('zlo', 'f4'), ('zhi', 'f4'),
('ic', 'i4', (nss,)),
('fxyz', 'f4', (nss, grid, grid, grid)),
('fxy', 'f4', (nss, grid, grid)),
('fxz', 'f4', (nss, grid, grid)),
('fyz', 'f4', (nss, grid, grid)),
('vxa', 'f4', (nss,)),
('vya', 'f4', (nss,)),
('vza', 'f4', (nss,)),
('pad2', 'i4')
])
self.data = numpy.fromfile(fname, datatype)[0]
self.truncate([0, grid])
def __getitem__(self, key):
return self.data_t[key]
def __str__(self):
"""
"""
s = '\n'
s += 'Bin location: '
s += 'x=(%4g,%4g), z=(%4g,%4g)\n' % (
self.data['xlo'], self.data['xhi'],
self.data['zlo'], self.data['zhi'])
# This is broken due to truncation
# This is hard coded to species 1
# s += '(Hard coded) Axes max: %4g\n' % self['axes'][1][-1]
# s += '\n'
# for i in range(self.nss):
# s += 'v['+str(i)+'] = ({0:g}, {1:g}, {2:g})\n'.format(
# self['vxa'][i], self['vya'][i], self['vza'][i])
return s
def truncate(self, r):
""" We do basic slicing here, so that no copies are made.
"""
b = r[0]
e = r[1]
for k in ['fxy', 'fxz', 'fyz']:
self.data_t[k] = self.data[k][:, b:e, b:e]
self.data_t['fxyz'] = self.data['fxyz'][:, b:e, b:e, b:e]
self.data_t['axes'] = self.data['axes'][:, b:e]
# print help(dict(self.data))
# print self.data.has_key('cut')
# if self.data.has_key('cut'):
# self.data_t['cut'] = self.data['cut'][:,b:e,b:e]
def cut(self, p):
"""
Cut out a 2D slice from the 3D data
p = [dir,rmin,rmax]
"""
rmin = int(p[1])
rmax = int(p[2])
A = self['fxyz']
if p[0] == 'x':
self.dataCUT = A[:,:,:, rmin]
for i in range(rmin+1, rmax+1):
self.dataCUT += A[:,:,:, i]
elif p[0] == 'y':
self.dataCUT = A[:,:, rmin,:]
for i in range(rmin+1, rmax+1):
self.dataCUT += A[:,:, i,:]
elif p[0] == 'z':
self.dataCUT = A[:, rmin,:,:]
for i in range(rmin+1, rmax+1):
self.dataCUT += A[:, i,:,:]
else:
raise IndexError
self.data['cut'] = self.dataCUT
def _check_add(self, sps):
# Check the ranges of velocities are consistent.
allowed_error = [1.e-6] * self.grid
self.axes = self['axes'][int(sps[0])]
for s in sps[1:]:
diff = self['axes'][int(s)] - self.axes
if numpy.any(diff > allowed_error):
print addset, ' cannot be combined!!!'
raise IndexError
def add2D(self, sps):
"""
Combine species for a 2D slice
sps = [s1,s2,...]
"""
self._check_add(sps)
self.data2D = self.dataCUT[int(sps[0])]
for s in sps[1:]:
self.data2D += self.dataCUT[int(s)]
def add_reduced(self, sps):
"""
Combine species for reduced data sets
sps = [s1,s2,...]
"""
self._check_add(sps)
self.dataR = {}
for f in ['fxy', 'fxz', 'fyz']:
self.dataR[f] = self[f][int(sps[0])]
for s in sps[1:]:
self.dataR[f] += self[f][int(s)]
def add3D(self, sps):
"""
Combine species for 3D data
sps = [s1,s2,...]
"""
self._check_add(sps)
self.data3D = self['fxyz'][int(sps[0])]
for s in sps[1:]:
self.data3D += self['fxyz'][int(s)]
|
gpl-3.0
| 4,494,715,066,244,990,000
| 31.620915
| 75
| 0.47766
| false
| 3.156863
| false
| false
| false
|
porolakka/motioneye-jp
|
src/v4l2ctl.py
|
1
|
11358
|
# Copyright (c) 2013 Calin Crisan
# This file is part of motionEye.
#
# motionEye is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import fcntl
import logging
import os.path
import re
import stat
import subprocess
import time
_resolutions_cache = {}
_ctrls_cache = {}
_ctrl_values_cache = {}
_DEV_V4L_BY_ID = '/dev/v4l/by-id/'
def find_v4l2_ctl():
try:
return subprocess.check_output('which v4l2-ctl', shell=True).strip()
except subprocess.CalledProcessError: # not found
return None
def list_devices():
global _resolutions_cache, _ctrls_cache, _ctrl_values_cache
logging.debug('listing v4l devices...')
try:
output = ''
started = time.time()
p = subprocess.Popen('v4l2-ctl --list-devices', shell=True, stdout=subprocess.PIPE, bufsize=1)
fd = p.stdout.fileno()
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
while True:
try:
data = p.stdout.read(1024)
if not data:
break
except IOError:
data = ''
time.sleep(0.01)
output += data
if len(output) > 10240:
logging.warn('v4l2-ctl command returned more than 10k of output')
break
if time.time() - started > 3:
logging.warn('v4l2-ctl command ran for more than 3 seconds')
break
except subprocess.CalledProcessError:
logging.debug('failed to list devices (probably no devices installed)')
return []
try:
# try to kill the v4l2-ctl subprocess
p.kill()
except:
pass # nevermind
name = None
devices = []
for line in output.split('\n'):
if line.startswith('\t'):
device = line.strip()
device = find_persistent_device(device)
devices.append((device, name))
logging.debug('found device %(name)s: %(device)s' % {
'name': name, 'device': device})
else:
name = line.split('(')[0].strip()
# clear the cache
_resolutions_cache = {}
_ctrls_cache = {}
_ctrl_values_cache = {}
return devices
def list_resolutions(device):
global _resolutions_cache
if device in _resolutions_cache:
return _resolutions_cache[device]
logging.debug('listing resolutions of device %(device)s...' % {'device': device})
resolutions = set()
output = ''
started = time.time()
p = subprocess.Popen('v4l2-ctl -d %(device)s --list-formats-ext | grep -vi stepwise | grep -oE "[0-9]+x[0-9]+" || true' % {
'device': device}, shell=True, stdout=subprocess.PIPE, bufsize=1)
fd = p.stdout.fileno()
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
while True:
try:
data = p.stdout.read(1024)
if not data:
break
except IOError:
data = ''
time.sleep(0.01)
output += data
if len(output) > 10240:
logging.warn('v4l2-ctl command returned more than 10k of output')
break
if time.time() - started > 3:
logging.warn('v4l2-ctl command ran for more than 3 seconds')
break
try:
# try to kill the v4l2-ctl subprocess
p.kill()
except:
pass # nevermind
for pair in output.split('\n'):
pair = pair.strip()
if not pair:
continue
width, height = pair.split('x')
width = int(width)
height = int(height)
if (width, height) in resolutions:
continue # duplicate resolution
if width < 96 or height < 96: # some reasonable minimal values
continue
if width % 16 or height % 16: # ignore non-modulo 16 resolutions
continue
resolutions.add((width, height))
logging.debug('found resolution %(width)sx%(height)s for device %(device)s' % {
'device': device, 'width': width, 'height': height})
if not resolutions:
logging.debug('no resolutions found for device %(device)s, adding the defaults' % {'device': device})
# no resolution returned by v4l2-ctl call, add common default resolutions
resolutions.add((320, 240))
resolutions.add((640, 480))
resolutions.add((800, 480))
resolutions.add((1024, 576))
resolutions.add((1024, 768))
resolutions.add((1280, 720))
resolutions.add((1280, 800))
resolutions.add((1280, 960))
resolutions.add((1280, 1024))
resolutions.add((1440, 960))
resolutions.add((1440, 1024))
resolutions.add((1600, 1200))
resolutions = list(sorted(resolutions, key=lambda r: (r[0], r[1])))
_resolutions_cache[device] = resolutions
return resolutions
def device_present(device):
try:
st = os.stat(device)
return stat.S_ISCHR(st.st_mode)
except:
return False
def find_persistent_device(device):
try:
devs_by_id = os.listdir(_DEV_V4L_BY_ID)
except OSError:
return device
for p in devs_by_id:
p = os.path.join(_DEV_V4L_BY_ID, p)
if os.path.realpath(p) == device:
return p
return device
def get_brightness(device):
return _get_ctrl(device, 'brightness')
def set_brightness(device, value):
_set_ctrl(device, 'brightness', value)
def get_contrast(device):
return _get_ctrl(device, 'contrast')
def set_contrast(device, value):
_set_ctrl(device, 'contrast', value)
def get_saturation(device):
return _get_ctrl(device, 'saturation')
def set_saturation(device, value):
_set_ctrl(device, 'saturation', value)
def get_hue(device):
return _get_ctrl(device, 'hue')
def set_hue(device, value):
_set_ctrl(device, 'hue', value)
def _get_ctrl(device, control):
global _ctrl_values_cache
if not device_present(device):
return None
if device in _ctrl_values_cache and control in _ctrl_values_cache[device]:
return _ctrl_values_cache[device][control]
controls = _list_ctrls(device)
properties = controls.get(control)
if properties is None:
logging.warn('control %(control)s not found for device %(device)s' % {
'control': control, 'device': device})
return None
value = int(properties['value'])
# adjust the value range
if 'min' in properties and 'max' in properties:
min_value = int(properties['min'])
max_value = int(properties['max'])
value = int(round((value - min_value) * 100.0 / (max_value - min_value)))
else:
logging.warn('min and max values not found for control %(control)s of device %(device)s' % {
'control': control, 'device': device})
logging.debug('control %(control)s of device %(device)s is %(value)s%%' % {
'control': control, 'device': device, 'value': value})
return value
def _set_ctrl(device, control, value):
global _ctrl_values_cache
if not device_present(device):
return
controls = _list_ctrls(device)
properties = controls.get(control)
if properties is None:
logging.warn('control %(control)s not found for device %(device)s' % {
'control': control, 'device': device})
return
_ctrl_values_cache.setdefault(device, {})[control] = value
# adjust the value range
if 'min' in properties and 'max' in properties:
min_value = int(properties['min'])
max_value = int(properties['max'])
value = int(round(min_value + value * (max_value - min_value) / 100.0))
else:
logging.warn('min and max values not found for control %(control)s of device %(device)s' % {
'control': control, 'device': device})
logging.debug('setting control %(control)s of device %(device)s to %(value)s' % {
'control': control, 'device': device, 'value': value})
output = ''
started = time.time()
p = subprocess.Popen('v4l2-ctl -d %(device)s --set-ctrl %(control)s=%(value)s' % {
'device': device, 'control': control, 'value': value}, shell=True, stdout=subprocess.PIPE, bufsize=1)
fd = p.stdout.fileno()
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
while True:
try:
data = p.stdout.read(1024)
if not data:
break
except IOError:
data = ''
time.sleep(0.01)
output += data
if len(output) > 10240:
logging.warn('v4l2-ctl command returned more than 10k of output')
break
if time.time() - started > 3:
logging.warn('v4l2-ctl command ran for more than 3 seconds')
break
try:
# try to kill the v4l2-ctl subprocess
p.kill()
except:
pass # nevermind
def _list_ctrls(device):
global _ctrls_cache
if device in _ctrls_cache:
return _ctrls_cache[device]
output = ''
started = time.time()
p = subprocess.Popen('v4l2-ctl -d %(device)s --list-ctrls' % {
'device': device}, shell=True, stdout=subprocess.PIPE, bufsize=1)
fd = p.stdout.fileno()
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
while True:
try:
data = p.stdout.read(1024)
if not data:
break
except IOError:
data = ''
time.sleep(0.01)
output += data
if len(output) > 10240:
logging.warn('v4l2-ctl command returned more than 10k of output')
break
if time.time() - started > 3:
logging.warn('v4l2-ctl command ran for more than 3 seconds')
break
try:
# try to kill the v4l2-ctl subprocess
p.kill()
except:
pass # nevermind
controls = {}
for line in output.split('\n'):
if not line:
continue
match = re.match('^\s*(\w+)\s+\(\w+\)\s+\:\s*(.+)', line)
if not match:
continue
(control, properties) = match.groups()
properties = dict([v.split('=', 1) for v in properties.split(' ') if v.count('=')])
controls[control] = properties
_ctrls_cache[device] = controls
return controls
|
gpl-3.0
| -6,950,108,781,523,003,000
| 26.302885
| 127
| 0.569202
| false
| 3.817815
| false
| false
| false
|
intelxed/xed
|
pysrc/regmap.py
|
1
|
3763
|
#!/usr/bin/env python
#BEGIN_LEGAL
#
#Copyright (c) 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#END_LEGAL
from __future__ import print_function
import re
import sys
def die(s):
sys.stderr.write(s+"\n")
sys.exit(1)
class regmap_t(object):
"""This converts register indices to register enumerations. And
vice versa. This replaces some clunkier register lookup machinery
in XED2."""
def __init__(self, dst, ntname,base,index):
self.name = dst
self.ntname = ntname
self.base_reg = base
self.index = index
self.decode_output = ''
self.encode_output = ''
def activate(self):
self.emit_decoder_code()
self.emit_encoder_code()
def dump(self):
print(" DECODER OUTPUT: ", self.decode_output)
print(" ENCODER OUTPUT: ", self.encode_output)
def emit_decoder_code(self):
self.decode_preamble()
self.decode_emit()
self.decode_epilogue()
def emit_encoder_code(self):
self.encode_preamble()
self.encode_emit()
self.encode_epilogue()
def decode_preamble(self):
pass
def decode_emit(self):
d = {}
d['base_reg'] = self.base_reg
d['index'] = self.index
d['name'] = self.name # bypass OUTREG!
c = 'ov[XED_OPERAND_%(name)s]= %(base_reg)s + %(index)s'
self.decode_output += (c%d)
def decode_epilogue(self):
self.decode_output += ";"
def encode_preamble(self):
pass
def encode_emit(self):
d = {}
d['operand_name'] = self.name
d['base_reg'] = self.base_reg
d['index_name'] = self.index
c = "ov[XED_OPERAND_%(index_name)s]= ov[XED_OPERAND_%(operand_name)s] - %(base_reg)s;"
self.encode_output += (c%d)
def encode_epilogue(self):
pass
class parse_regmap_t(object):
def __init__(self):
self.regmaps = {}
def read_line(self,line):
""" Lines have the following very simple format
XMM_1 XMM0 REGINDEX1
"""
a = line.split()
if len(a) != 3:
die("Wrong number of fields on line: " + line)
try:
(ntname, base, index) = a
except:
die("Could not parse " + line)
regmap = regmap_t('OUTREG', ntname, 'XED_REG_'+base, index)
regmap.activate()
if ntname in self.regmaps:
die("Attempting to duplication regmap " + ntname)
self.regmaps[ntname] = regmap
def read_lines(self,lines):
for line in lines:
line = re.sub(r'#.*','',line)
line = line.strip()
if line:
self.read_line(line)
def dump(self):
for g,v in self.regmaps.items():
print(g, ": ")
v.dump()
print("\n\n")
if __name__ == "__main__":
o = regmap_t('OUTREG', 'XMM_1','XED_REG_XMM0','REGIDX1')
o.activate()
o.dump()
p = parse_regmap_t()
lines = ['XMM_1 XMM0 REGIDX1',
'XMM_2 XMM0 REGIDX2',
'YMM_1 YMM0 REGIDX1',
'YMM_2 YMM0 REGIDX2' ]
p.read_lines(lines)
p.dump()
|
apache-2.0
| -6,261,212,164,540,289,000
| 27.725191
| 97
| 0.560723
| false
| 3.474608
| false
| false
| false
|
gaoxiaofeng/troubleShooting
|
templates/keywords/Disk.py
|
1
|
1949
|
# -*- coding: utf-8 -*-
from troubleshooting.framework.libraries.library import singleton
import re
from troubleshooting.framework.template.Keyword import *
import traceback
@singleton
class Disk(Keyword):
def __init__(self):
super(self.__class__,self).__init__()
self._diskSize = {}
self._diskInodes = {}
def _listSize(self):
command = "df -hP | awk '{print $5 $NF}'"
stdout = self.execute_command(command)
pattern = re.compile(r"(^\d+)%(\S+)",re.I|re.M)
_list = pattern.findall(stdout)
__list = []
for _tuple in _list:
if len(_tuple) != 2:
continue
__tuple = (_tuple[1],_tuple[0])
__list.append(__tuple)
self._diskSize = dict(__list)
def _listInodes(self):
command = "df -iP | awk '{print $5 $NF}'"
stdout = self.execute_command(command)
pattern = re.compile(r"(^\d+)%(\S+)",re.I|re.M)
_list = pattern.findall(stdout)
__list = []
for _tuple in _list:
if len(_tuple) != 2:
continue
__tuple = (_tuple[1],_tuple[0])
__list.append(__tuple)
self._diskInodes = dict(__list)
def _list(self):
if self._diskInodes == {}:
self._listInodes()
if self._diskSize == {}:
self._listSize()
def get_disk_usage_size(self):
self._list()
return self._diskSize
def get_disk_usage_inodes(self):
self._list()
return self._diskInodes
def is_exist_file(self,path):
command = "ls %s"%path
try:
stdout = self.execute_command(command,checkerr = True)
except Exception,e:
print "raise exception : %s"%traceback.format_exc()
# file not exist
return False
else:
#file exist
return True
if __name__ == "__main__":
disk = Disk()
|
apache-2.0
| 1,576,557,107,511,169,300
| 30.435484
| 66
| 0.518214
| false
| 3.68431
| false
| false
| false
|
bbc/ebu-tt-live-toolkit
|
testing/bdd/test_ebuttd_multiple_active_regions_overlapping.py
|
1
|
5826
|
import pytest
from pytest_bdd import scenarios, when, then, parsers
from ebu_tt_live.errors import OverlappingActiveElementsError, RegionExtendingOutsideDocumentError
from ebu_tt_live.documents.converters import EBUTT3EBUTTDConverter
from ebu_tt_live.documents import EBUTT3Document
from ebu_tt_live.documents import EBUTTDDocument
scenarios('features/timing/ebuttd_multiple_active_regions_overlapping.feature')
@when(parsers.parse('it has region "{region_id}"'))
def when_it_contains_region(test_context, template_dict, region_id):
if 'regions' not in template_dict:
template_dict['regions'] = list()
region = {"id": region_id}
template_dict['regions'].append(region)
test_context[region_id] = region
@when(parsers.parse('it has p_element "{p_id}"'))
def when_it_contains_p_element(test_context, template_dict, p_id):
if 'p_elements' not in template_dict:
template_dict['p_elements'] = list()
p_element = {"id": p_id}
template_dict['p_elements'].append(p_element)
test_context[p_id] = p_element
@when(parsers.parse('p_element "{p_id}" has attribute "{attribute}" set to <p1_begin>'))
def when_p1_has_attribute_begin(test_context, p_id, attribute ,p1_begin):
test_context[p_id][attribute] = p1_begin
@when(parsers.parse('p_element "{p_id}" has attribute "{attribute}" set to <p1_end>'))
def when_p1_has_attribute_end(test_context, p_id, attribute ,p1_end):
test_context[p_id][attribute] = p1_end
@when(parsers.parse('p_element "{p_id}" has attribute "{attribute}" set to <p2_begin>'))
def when_p2_has_attribute_begin(test_context, p_id, attribute ,p2_begin):
test_context[p_id][attribute] = p2_begin
@when(parsers.parse('p_element "{p_id}" has attribute "{attribute}" set to <p2_end>'))
def when_p2_has_attribute_end(test_context, p_id, attribute ,p2_end):
test_context[p_id][attribute] = p2_end
@when(parsers.parse('p_element "{p_id}" has attribute "{attribute}" set to "{value}"'))
def when_p_element_has_attribute(test_context, p_id, attribute ,value):
test_context[p_id][attribute] = value
@when(parsers.parse('region "{region_id}" has attribute "{attribute}" set to <r1_origin>'))
def when_region1_has_attribute_origin(test_context, region_id, attribute ,r1_origin):
test_context[region_id][attribute] = r1_origin
@when(parsers.parse('region "{region_id}" has attribute "{attribute}" set to <r1_extent>'))
def when_region1_has_attribute_extent(test_context, region_id, attribute ,r1_extent):
test_context[region_id][attribute] = r1_extent
@when(parsers.parse('region "{region_id}" has attribute "{attribute}" set to <r2_origin>'))
def when_region2_has_attribute_origin(test_context, region_id, attribute ,r2_origin):
test_context[region_id][attribute] = r2_origin
@when(parsers.parse('region "{region_id}" has attribute "{attribute}" set to <r2_extent>'))
def when_region2_has_attribute_extent(test_context, region_id, attribute ,r2_extent):
test_context[region_id][attribute] = r2_extent
@when(parsers.parse('region "{region_id}" has attribute "{attribute}" set to <r3_origin>'))
def when_region3_has_attribute_origin(test_context, region_id, attribute ,r3_origin):
test_context[region_id][attribute] = r3_origin
@when(parsers.parse('region "{region_id}" has attribute "{attribute}" set to <r3_extent>'))
def when_region3_has_attribute_extent(test_context, region_id, attribute ,r3_extent):
test_context[region_id][attribute] = r3_extent
@when(parsers.parse('it contains element with region1 "{region_id}"'))
def when_element_has_attribute_region1(template_dict, region_id):
template_dict['text_region1'] = region_id
@when(parsers.parse('it contains element with region2 "{region_id}"'))
def when_element_has_attribute_region2(template_dict, region_id):
template_dict['text_region2'] = region_id
@then(parsers.parse('application should exit with error OverlappingActiveElementsError'))
def then_application_should_exit_overlapping_active_region_error(
test_context, template_dict):
match_string = "The EBU-TT-D spec forbids overlapping active areas. " \
"Element {elem1_id} references region" \
"id={region1_id}, origin={region1_origin}, extent={region1_extent}" \
" and Element {elem2_id} references region" \
"id={region2_id}, origin={region2_origin}, extent={region2_extent}.".format(
elem1_id=template_dict['p_elements'][0]['id'],
elem2_id=template_dict['p_elements'][1]['id'],
region1_id=template_dict['regions'][0]['id'],
region2_id=template_dict['regions'][1]['id'],
region1_origin=template_dict['regions'][0]['origin'],
region1_extent=template_dict['regions'][0]['extent'],
region2_origin=template_dict['regions'][1]['origin'],
region2_extent=template_dict['regions'][1]['extent'],
)
with pytest.raises(
OverlappingActiveElementsError,
match=match_string):
ebuttd_document = EBUTTDDocument.create_from_raw_binding(
test_context["converted_bindings"])
ebuttd_document.validate()
@when('the EBU-TT-Live document is converted to a EBU-TT-D')
def convert_to_ebuttd(test_context):
ebuttd_converter = EBUTT3EBUTTDConverter(None)
converted_bindings = ebuttd_converter.convert_document(test_context['document'].binding)
test_context["converted_bindings"] = converted_bindings
@then(parsers.parse('application should exit with error RegionExtendingOutsideDocumentError'))
def then_application_should_exit_with_region_error(test_context, template_dict, template_file):
with pytest.raises(RegionExtendingOutsideDocumentError) as e:
ebuttd_document = EBUTTDDocument.create_from_raw_binding(test_context["converted_bindings"])
ebuttd_document.validate()
|
bsd-3-clause
| -1,952,068,880,815,068,000
| 50.105263
| 100
| 0.706831
| false
| 3.297114
| true
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.