repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
christiansandberg/canopen
|
canopen/emcy.py
|
1
|
4163
|
import struct
import logging
import threading
import time
# Error code, error register, vendor specific data
EMCY_STRUCT = struct.Struct("<HB5s")
logger = logging.getLogger(__name__)
class EmcyConsumer(object):
def __init__(self):
#: Log of all received EMCYs for this node
self.log = []
#: Only active EMCYs. Will be cleared on Error Reset
self.active = []
self.callbacks = []
self.emcy_received = threading.Condition()
def on_emcy(self, can_id, data, timestamp):
code, register, data = EMCY_STRUCT.unpack(data)
entry = EmcyError(code, register, data, timestamp)
with self.emcy_received:
if code & 0xFF00 == 0:
# Error reset
self.active = []
else:
self.active.append(entry)
self.log.append(entry)
self.emcy_received.notify_all()
for callback in self.callbacks:
callback(entry)
def add_callback(self, callback):
"""Get notified on EMCY messages from this node.
:param callback:
Callable which must take one argument of an
:class:`~canopen.emcy.EmcyError` instance.
"""
self.callbacks.append(callback)
def reset(self):
"""Reset log and active lists."""
self.log = []
self.active = []
def wait(self, emcy_code=None, timeout=10):
"""Wait for a new EMCY to arrive.
:param int emcy_code: EMCY code to wait for
:param float timeout: Max time in seconds to wait
:return: The EMCY exception object or None if timeout
:rtype: canopen.emcy.EmcyError
"""
end_time = time.time() + timeout
while True:
with self.emcy_received:
prev_log_size = len(self.log)
self.emcy_received.wait(timeout)
if len(self.log) == prev_log_size:
# Resumed due to timeout
return None
# Get last logged EMCY
emcy = self.log[-1]
logger.info("Got %s", emcy)
if time.time() > end_time:
# No valid EMCY received on time
return None
if emcy_code is None or emcy.code == emcy_code:
# This is the one we're interested in
return emcy
class EmcyProducer(object):
def __init__(self, cob_id):
self.network = None
self.cob_id = cob_id
def send(self, code, register=0, data=b""):
payload = EMCY_STRUCT.pack(code, register, data)
self.network.send_message(self.cob_id, payload)
def reset(self, register=0, data=b""):
payload = EMCY_STRUCT.pack(0, register, data)
self.network.send_message(self.cob_id, payload)
class EmcyError(Exception):
"""EMCY exception."""
DESCRIPTIONS = [
# Code Mask Description
(0x0000, 0xFF00, "Error Reset / No Error"),
(0x1000, 0xFF00, "Generic Error"),
(0x2000, 0xF000, "Current"),
(0x3000, 0xF000, "Voltage"),
(0x4000, 0xF000, "Temperature"),
(0x5000, 0xFF00, "Device Hardware"),
(0x6000, 0xF000, "Device Software"),
(0x7000, 0xFF00, "Additional Modules"),
(0x8000, 0xF000, "Monitoring"),
(0x9000, 0xFF00, "External Error"),
(0xF000, 0xFF00, "Additional Functions"),
(0xFF00, 0xFF00, "Device Specific")
]
def __init__(self, code, register, data, timestamp):
#: EMCY code
self.code = code
#: Error register
self.register = register
#: Vendor specific data
self.data = data
#: Timestamp of message
self.timestamp = timestamp
def get_desc(self):
for code, mask, description in self.DESCRIPTIONS:
if self.code & mask == code:
return description
return ""
def __str__(self):
text = "Code 0x{:04X}".format(self.code)
description = self.get_desc()
if description:
text = text + ", " + description
return text
|
mit
| 6,766,806,848,442,568,000
| 29.837037
| 63
| 0.554648
| false
| 3.858202
| false
| false
| false
|
benjsmith/mubiomics
|
MPSDemultiplexer/patricia.py
|
1
|
3563
|
#!/usr/local/bin/python
#patrcia.py
#Python class definitions for creating a radix-like trie.
# Copyright (C) <2012> <Benjamin C. Smith>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class patricia:
"""Creates Patricia(Radix)-like Tries using dictionaries. Code found on
StackOverflow.com"""
def __init__(self, *args, **kwargs):
self._d = {}
def addWord(self,w):
d = self._d
i = 0
while 1:
try:
node = d[w[i:i+1]]
except KeyError:
if d:
d[w[i:i+1]] = [w[i+1:],{}]
else:
if w[i:i+1] == '':
return
else:
if i != 0:
d[''] = ['',{}]
d[w[i:i+1]] = [w[i+1:],{}]
return
i += 1
if w.startswith(node[0],i):
if len(w[i:]) == len(node[0]):
if node[1]:
try:
node[1]['']
except KeyError:
d = node[1]
d[''] = ['',{}]
return
else:
i += len(node[0])
d = node[1]
else:
ii = i
j = 0
while ii != len(w) and j != len(node[0]) and \
w[ii:ii+1] == node[0][j:j+1]:
ii += 1
j += 1
tmpd = {}
tmpd[node[0][j:j+1]] = [node[0][j+1:],node[1]]
tmpd[w[ii:ii+1]] = [w[ii+1:],{}]
d[w[i-1:i]] = [node[0][:j],tmpd]
return
def isWord(self,w):
d = self._d
i = 0
while 1:
try:
node = d[w[i:i+1]]
except KeyError:
return False
i += 1
if w.startswith(node[0],i):
if len(w[i:]) == len(node[0]):
if node[1]:
try:
node[1]['']
except KeyError:
return False
return True
else:
i += len(node[0])
d = node[1]
else:
return False
def isPrefix(self,w):
d = self._d
i = 0
wlen = len(w)
while 1:
try:
node = d[w[i:i+1]]
except KeyError:
return False
i += 1
if w.startswith(node[0][:wlen-i],i):
if wlen - i > len(node[0]):
i += len(node[0])
d = node[1]
else:
return True
else:
return False
__getitem__ = isWord
|
gpl-3.0
| 3,643,960,447,306,891,000
| 30.263158
| 75
| 0.386191
| false
| 4.147846
| false
| false
| false
|
Vaei/ModularChannelBox
|
jtChannelBox_Menu_Rigging.py
|
1
|
19478
|
# jtChannelBox - Modular / Customizeable Channel Box
# Copyright (C) 2016 Jared Taylor
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# --------------------------------------------------------------------------
# To request a commercial license please email me at my address:
# jaredtaylor.99@gmail.com
# --------------------------------------------------------------------------
from collections import OrderedDict
import jtChannelBox_Commands_Default as cbc
reload(cbc)
# --------------------------------------------------------------------------
# HELPER FUNCTIONS
# Used for menu creation
# --------------------------------------------------------------------------
# Variables used for helper functions
divider_step = [
0] # as list containing one int, because list is mutable and therefore passed by reference (so function can
# directly affect it without setting the variable)
menu_step = [0,
"genericKey"] # the key built off this becomes "genericKey_0", +1 is added to the 0 each time, it's
# used to generate unique keys
# --------------------------------------------------------------------------
# Helper function for creating dividers.
# USAGE: divider(menu_name)
def divider(_menu, step=divider_step):
_menu["divider_" + str(step[0])] = ["", 0, "divider", ""]
step[0] += 1
# --------------------------------------------------------------------------
# Helper function for creating menu items.
# USAGE:
# _menu : this is the OrderedDict storing the menu
# _label : the label for the menu item that the user sees on the menu
# _hasEnableConditions : if 0/False will always be available, if 1/True then will have conditions to meet before being
# enabled, by default this is whether an
# attribute is selected or not, you can overwride it in jtChannelBox.py function channelBox_Menu_States
# _type : various types are available and will be listed after the definition below, however for a default menu item
# simply enter "" with nothing in the string (empty string)
# _command : the function that is executed when the menu item is pressed
# _tooltip : (optional if no _key entered) this is assigned a default value of "" which equates to no tooltip, it is
# optional unless you enter a menu key
# _key : (optional) set a custom menu key, only required if you need to refer to the menu item later, will always need
# this for a checkbox or optionbox to query
# the state or when there's a corresponding variable attached in saved_states, if you enter a key, you must also
# enter a tooltip (can simply be "" for no tooltip)
# or the system will think the key is the tooltip
# Without using the function it would be: menu_channels["keyItem"] = ["Key Selected", 1, "", cbc.channelbox_command_
# keyItem]
# With the function the equivelant is: menu(menu_channels, "Key Selected", 1, "", cbc.channelbox_command_keyItem) -
# but the key will be set automatically to genericKey_0, which is OK,
# we don't need to refer back to this menu item
# all KEYS must be UNIQUE per dict in python. This function handles it for you unless you need a specific key.
# Duplicate keys will be excluded/ignored.
def menu(_menu, _label, _has_enable_conditions, _type, _command, _tooltip="", _key=menu_step):
key = _key[1] + "_" + str(_key[0]) # build key based off menu_step
if _key is menu_step: # check if no custom key entered, increment step if true
_key[0] += 1
else: # custom key was entered, use that instead
key = _key
_menu[key] = [_label, _has_enable_conditions, _type, _command, _tooltip if _tooltip is not "" else None]
# TYPES for _type:
# "checkbox" : can be enabled or disabled with a box to the left of the item, you will need to set a custom key and
# add it also to the saved_states
# "optionbox" : has a secondary function that is used when clicking the option box, which is placed to the right of
# the item, you do not have to set a custom key
# "submenu" : replace the _command with integer defining how many of the following menu items are placed in this
# submenu, you do not have to set a custom key
# "radio" : replace the _command with integer defining how many of following menu items are a part of this radio
# collection, you will need to set a custom key and add it also to the saved_states
# "custom" : for behaviour that is not defined here, add to the function in jtChannelBox.py called channelBox_Menu
# _Custom for what happens for this specific key, you will need to set a custom key - for example, look at
# "selectFilterSet" and the specified function
# "divider" : this is also a type, but you would usually use the divider() function instead
# ----------------------End : Helper Functions End--------------------------
# --------------------------------------------------------------------------
# --------------------------------------------------------------------------
# MENU ITEMS DICTS
# This is where you add your own menus
# --------------------------------------------------------------------------
# Read the "USAGE" for the helper functions if you don't know what to do
# --------------------------------------------------------------------------
# ------------------------------CHANNEL MENU--------------------------------
menu_channels = OrderedDict()
# -------------------------------------------------------------------------
# +submenu
menu(menu_channels, "Freeze", 0, "submenu", 4)
menu(menu_channels, "Translate", 0, "", cbc.channelbox_command_freezeTranslate)
menu(menu_channels, "Rotate", 0, "", cbc.channelbox_command_freezeRotate)
menu(menu_channels, "Scale", 0, "", cbc.channelbox_command_freezeScale)
menu(menu_channels, "All", 0, "optionbox", cbc.channelbox_command_freezeAll)
# -submenu
divider(menu_channels)
menu(menu_channels, "Key Selected", 1, "", cbc.channelbox_command_keyItem)
# menu(menu_channels, "Key All Keyable", 0, "", cbc.channelbox_command_keyAll)
# menu(menu_channels, "Breakdown Selected", 1, "", cbc.channelbox_command_breakdown)
# menu(menu_channels, "Breakdown All", 0, "", cbc.channelbox_command_breakdownAll)
# menu(menu_channels, "Mute Selected", 1, "", cbc.channelbox_command_mute)
# menu(menu_channels, "Mute All", 0, "", cbc.channelbox_command_muteAll)
# menu(menu_channels, "Unmute Selected", 1, "", cbc.channelbox_command_unmute)
# menu(menu_channels, "Unmute All", 0, "", cbc.channelbox_command_unmuteAll)
divider(menu_channels)
# menu(menu_channels, "Sync Timeline Display", 0, "checkbox", cbc.channelbox_command_syncTimeline,
# "Update timeline ticks based on selected channel box entries. Active list is used when there is no
# channel box selection",
# "syncTimeline")
# divider(menu_channels)
# menu(menu_channels, "Cut Selected", 1, "", cbc.channelbox_command_cut, "Cut selected keyframes")
menu(menu_channels, "Copy Selected", 1, "", cbc.channelbox_command_copy, "Copy selected keyframes")
menu(menu_channels, "Paste Selected", 1, "", cbc.channelbox_command_paste, "Paste selected keyframes")
menu(menu_channels, "Delete Selected", 1, "", cbc.channelbox_command_delete, "Delete selected keyframes")
divider(menu_channels)
# menu(menu_channels, "Duplicate Values", 1, "", cbc.channelbox_command_duplicateAttrValues)
menu(menu_channels, "Break Connections", 1, "", cbc.channelbox_command_break)
# menu(menu_channels, "Select Connection", 1, "", cbc.channelbox_command_selectConnection)
divider(menu_channels)
menu(menu_channels, "Lock Selected", 1, "", cbc.channelbox_command_lock)
menu(menu_channels, "Unlock Selected", 1, "", cbc.channelbox_command_unlock)
menu(menu_channels, "Hide Selected", 1, "", cbc.channelbox_command_unkeyable)
menu(menu_channels, "Lock and Hide Selected", 1, "", cbc.channelbox_command_lockUnkeyable)
menu(menu_channels, "Make Selected Nonkeyable", 1, "", cbc.channelbox_command_unkeyableDisplayed)
menu(menu_channels, "Make Selected Keyable", 1, "", cbc.channelbox_command_keyable)
divider(menu_channels)
# menu(menu_channels, "Add to Selected Layers", 1, "", cbc.channelbox_command_addToLayers,
# "Add selected attributes to selected Animation Layer")
# menu(menu_channels, "Remove From Selected Layers", 1, "", cbc.channelbox_command_removeFromLayers,
# "Remove selected attributes from selected Animation Layer")
menu(menu_channels, "Sync Graph Editor Display", 0, "checkbox", cbc.channelbox_command_syncGraph,
"Update Graph Editor based on selected channel box entries and set keyframes only on selected entries."
" Active list is used when there is no channel box selection",
"syncGraphEditor")
# -------------------------------EDIT MENU---------------------------------
menu_edit = OrderedDict()
# -------------------------------------------------------------------------
menu(menu_edit, "Expressions...", 1, "", cbc.channelbox_command_expression)
menu(menu_edit, "Set Driven Key...", 1, "", cbc.channelbox_command_driven)
menu(menu_edit, "Connection Editor", 0, "", cbc.channelbox_command_connectionEditor)
menu(menu_edit, "Graph Editor", 0, "", cbc.channelbox_command_animCurve)
menu(menu_edit, "Channel Control", 0, "", cbc.channelbox_command_channelControlEditor)
# menu(menu_edit, "Attribute Editor", 0, "", cbc.channelbox_command_attributeEditor)
# menu(menu_edit, "Material Attributes", 0, "", cbc.channelbox_command_materialAttributes)
divider(menu_edit)
menu(menu_edit, "Add Attribute", 0, "", cbc.channelbox_command_addAttribute)
menu(menu_edit, "Edit Attribute", 1, "", cbc.channelbox_command_renameAttribute)
menu(menu_edit, "Duplicate Attribute", 1, "", cbc.channelbox_command_duplicateAttr)
menu(menu_edit, "Delete Attributes", 1, "", cbc.channelbox_command_deleteAttributes)
divider(menu_edit)
# menu(menu_edit, "Select Node", 0, "", cbc.channelbox_command_selectNode)
# menu(menu_edit, "Delete Node", 0, "", cbc.channelbox_command_deleteNode)
# menu(menu_edit, "Delete History", 0, "", cbc.channelbox_command_deleteHistory)
# +submenu
menu(menu_edit, "Settings", 0, "submenu", 9)
# +radio
menu(menu_edit, "", 0, "radio", 2, "", "speedState")
menu(menu_edit, "Slow", 0, "", cbc.channelbox_command_setSpeed, "Channel box attributes move in increments of "
"0.1","speedSlow")
menu(menu_edit, "Medium", 0, "", cbc.channelbox_command_setSpeed, "Channel box attributes move in increments of "
"1.0", "speedMedium")
menu(menu_edit, "Fast", 0, "", cbc.channelbox_command_setSpeed, "Channel box attributes move in increments of "
"10.0", "speedFast")
# -radio
# divider(menu_edit)
# menu(menu_edit, "Hyperbolic", 0, "checkbox", cbc.channelbox_command_setHyperbolic,
# "Switch between increments acting as linear (unchecked) or curve-based", "hyperbolic")
divider(menu_edit)
menu(menu_edit, "Show Namespace", 0, "checkbox", cbc.channelbox_command_setNamespace, "", "showNamespace")
divider(menu_edit)
# +radio
menu(menu_edit, "", 0, "radio", 2, "", "manipsState")
menu(menu_edit, "No Manips", 0, "", cbc.channelbox_command_setManip, "", "noManips")
menu(menu_edit, "Invisible Manips", 0, "", cbc.channelbox_command_setManip, "", "invisibleManips")
menu(menu_edit, "Standard Manips", 0, "", cbc.channelbox_command_setManip, "", "standardManips")
# -radio
divider(menu_edit)
menu(menu_edit, "Change Precision...", 0, "", cbc.channelbox_command_precision,
"How many floating point values are displayed in the Channel Box", "changePrecision")
menu(menu_edit, "Reset to Default", 0, "", cbc.channelbox_command_reset)
# -submenu, +submenu
menu(menu_edit, "Channel Names", 0, "submenu", 3)
# +radio
menu(menu_edit, "", 0, "radio", 3, "", "namesState")
menu(menu_edit, "Nice", 0, "", cbc.channelbox_command_setChannelName, "", "nameNice")
menu(menu_edit, "Long", 0, "", cbc.channelbox_command_setChannelName, "", "nameLong")
menu(menu_edit, "Short", 0, "", cbc.channelbox_command_setChannelName, "", "nameShort")
# ------------------------------SHOW MENU----------------------------------
menu_show = OrderedDict()
# -------------------------------------------------------------------------
# +submenu
menu(menu_show, "Attributes", 0, "submenu", 8)
menu(menu_show, "Driven by Anim Curve", 0, "checkbox", cbc.channelbox_command_filter_itemCB, "", "attr_animCurve")
menu(menu_show, "Driven by Expression", 0, "checkbox", cbc.channelbox_command_filter_itemCB,
"View->Show Results in Graph Editor must be on to see curves driven by expressions", "attr_expression")
menu(menu_show, "Driven by Driven Key", 0, "checkbox", cbc.channelbox_command_filter_itemCB, "", "attr_drivenKey")
menu(menu_show, "Scale", 0, "checkbox", cbc.channelbox_command_filter_itemCB, "", "attr_scale")
menu(menu_show, "Rotate", 0, "checkbox", cbc.channelbox_command_filter_itemCB, "", "attr_rotate")
menu(menu_show, "Translate", 0, "checkbox", cbc.channelbox_command_filter_itemCB, "", "attr_translate")
menu(menu_show, "Scale Rotate Translate", 0, "checkbox", cbc.channelbox_command_filter_itemCB, "",
"attr_scaleRotateTranslate")
menu(menu_show, "User Defined", 0, "checkbox", cbc.channelbox_command_filter_itemCB,
"No effect if there are no user-defined attributes present", "attr_userDefined")
# -submenu
menu(menu_show, "Isolate Selected", 0, "optionbox", cbc.channelbox_command_isolateAttr, "", "selectAttr")
menu(menu_show, "Invert Shown", 1, "checkbox", cbc.channelbox_command_filter_invertShown,
"Toggle between isolating/hiding", "invertShown")
divider(menu_show)
menu(menu_show, "Show All", 0, "", cbc.channelbox_command_filter_filterShowAll, "Reset all attribute filters")
divider(menu_show)
menu(menu_show, "Select Filter Set", 1, "custom", cbc.channelbox_command_selectFilterSet, "", "selectFilterSet")
menu(menu_show, "Create Filter Set...", 1, "", cbc.channelbox_command_createFilterSet, "", "createFilterSet")
divider(menu_show)
menu(menu_show, "Channel Box Settings", 0, "submenu", 4)
menu(menu_show, "Label on Right-Click Menu", 0, "checkbox", cbc.channelbox_command_popupLabel,
"Show the menu label at top of right-click menu", "popupLabel")
menu(menu_show, "Show Icons", 0, "checkbox", cbc.channelbox_command_showIcons,
"Show the Manipulator, Speed, and Hyperbolic icons above the menu bar", "showIcons")
menu(menu_show, "Hide Unavailable Menu Items", 0, "checkbox", cbc.channelbox_command_hideUnavailable,
"Hide unavailable menu options instead of disabling them", "hideUnavailable")
divider(menu_show)
menu(menu_show, "Delete All Stored Settings (Full Reset)", 0, "", cbc.channelbox_command_cboxReset,
"Re-initialize this channel box at the default state")
# -------------------------------End : Menus-------------------------------
# -------------------------------------------------------------------------
# ------------------------------MENUS DICT---------------------------------
menus = OrderedDict() # Add your custom menus here too
# -------------------------------------------------------------------------
menus["Channels"] = menu_channels
menus["Edit"] = menu_edit
menus["Objects"] = "" # this is a custom menu and it's behaviour is defined (differently) in jtChannelBox.py
menus["Show"] = menu_show
# ----------------------------End : Menus Dict-----------------------------
# -------------------------------------------------------------------------
# ----------------------------SYMBOL COMMANDS------------------------------
symbol_commands = {}
# -------------------------------------------------------------------------
symbol_commands["pressed"] = cbc.channelbox_command_Symbol_pressed
symbol_commands["update"] = cbc.channelbox_command_Symbol_update
# --------------------------End : Symbol Commands--------------------------
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
# SAVED STATES
# Variables stored by the system
# [x, 0] - First element is the saved data, second element is whether or #
# not this state is saved/serialized persistently to disk and restored
# when the script or maya is restarted
saved_states = {}
# -------------------------------------------------------------------------
# checkbox states
saved_states["syncGraphEditor"] = [0, 0]
saved_states["syncTimeline"] = [0, 0]
saved_states["hyperbolic"] = [0, 1]
saved_states["showNamespace"] = [1, 1]
# radio button collection states
saved_states["speedState"] = [2, 1]
saved_states["manipsState"] = [3, 1]
saved_states["namesState"] = [1, 1]
# serialized settings
saved_states["changePrecision"] = [3, 1]
saved_states["fieldWidth"] = [65, 1]
saved_states["channelWidth"] = [230, 1]
saved_states["hideUnavailable"] = [0, 1]
saved_states["showIcons"] = [1, 1]
saved_states["popupLabel"] = [1, 1]
# filter checkbox states
saved_states["attr_animCurve"] = [0, 0]
saved_states["attr_expression"] = [0, 0]
saved_states["attr_drivenKey"] = [0, 0]
saved_states["attr_scaleRotateTranslate"] = [0, 0]
saved_states["attr_userDefined"] = [0, 0]
saved_states["attr_scale"] = [0, 0]
saved_states["attr_rotate"] = [0, 0]
saved_states["attr_translate"] = [0, 0]
saved_states["invertShown"] = [0, 0]
saved_states["savedFilters"] = [OrderedDict(), 1] # Used to store filter sets, you probably don't want to modify this
# --------------------------End : Saved States-----------------------------
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
# SCRIPT JOB IDs
# Saved for later removal of script jobs
# Script jobs end automatically when the parent UI is closed
# -1 almost always is the default value, -1 means not currently running #
jobIDs = {}
# -------------------------------------------------------------------------
jobIDs["syncGraphEditor"] = -1
jobIDs["syncTimeline"] = -1
# -------------------------End : Script Job IDs----------------------------
# -------------------------------------------------------------------------
|
agpl-3.0
| 2,335,228,321,807,812,600
| 55.294118
| 118
| 0.595595
| false
| 3.712217
| false
| false
| false
|
iansealy/projecteuler
|
optimal/9.py
|
1
|
1324
|
#!/usr/bin/env python
"""This script solves the Project Euler problem "Special Pythagorean triplet".
The problem is: There exists exactly one Pythagorean triplet for which
a + b + c = 1000. Find the product abc.
"""
from __future__ import division
import math
def main():
"""Special Pythagorean triplet"""
# Constants
SUM = 1000
a, b, c = get_pythagorean_triplet_by_sum(SUM)
print(a * b * c)
def get_pythagorean_triplet_by_sum(s):
"""Get Pythagorean triplet"""
s2 = s // 2
mlimit = int(math.ceil(math.sqrt(s2))) - 1
for m in range(2, mlimit + 1):
if s2 % m == 0:
sm = s2 // m
while sm % 2 == 0:
sm = sm // 2
k = m + 1
if m % 2 == 1:
k = m + 2
while k < 2 * m and k <= sm:
if sm % k == 0 and gcd(k, m) == 1:
d = s2 // (k * m)
n = k - m
a = d * (m * m - n * n)
b = 2 * d * m * n
c = d * (m * m + n * n)
return(a, b, c)
k += 2
return(0, 0, 0)
def gcd(a, b):
"""Get greatest common divisor"""
if a > b:
a, b = b, a
while a:
a, b = b % a, a
return(b)
if __name__ == '__main__':
main()
|
gpl-3.0
| 8,416,430,042,208,403,000
| 21.066667
| 78
| 0.428248
| false
| 3.167464
| false
| false
| false
|
JaneliaSciComp/janelia-parking-manager
|
ParkingPermit/models.py
|
1
|
11541
|
"""
Holds all of the data models for the site for managing campus visitors.
TODO:
Triggers for history so we don't lose old data - document
Load new data - Update to use user defined make/model
Future:
history
Deploy notes:
"""
import datetime
import re
from django.contrib.auth.models import Group, User
from django.db import models
from django.contrib.admin.models import LogEntry
#from django.contrib.localflavor.us.models import USStateField
from django.contrib.localflavor.us.us_states import STATE_CHOICES
from django.core.mail import EmailMultiAlternatives
from general.models import AuditableTable,LookUp
from django.db.models.signals import post_save
from general.utilities import memoize
from django.conf import settings
#fix Django bug, uggh
User._meta.ordering=["username"]
class VehicleType(LookUp):
class Meta:
ordering = ['name']
class VehicleColor(LookUp):
class Meta:
ordering = ['name']
class VehicleMake(LookUp):
class Meta:
ordering = ['name']
class ParkingLocation(LookUp):
class Meta:
ordering = ['name']
class ViolationReason(LookUp):
class Meta:
ordering = ['name']
class LivingArrangement(LookUp):
class Meta:
ordering = ['name']
class Vehicle(AuditableTable):
""" """
vehicle_type = models.ForeignKey(VehicleType)
make = models.ForeignKey(VehicleMake)
model = models.CharField(max_length=500)
class Meta:
unique_together = ("vehicle_type", "make", "model", )
ordering = ['vehicle_type', 'make', 'model']
def __unicode__(self):
ret_str = "%s %s" % (self.make, self.model)
return ret_str
class VehicleRegistration(AuditableTable):
"""This is the main table to record the issue of a parking permit
(registration) for a user/vehicle combination for one parking year.
Note: These records should generally not be deleted since we want to keep a
historical record.
"""
class Meta:
ordering = ['-created_datetime']
user = models.ForeignKey(User, null=True, blank=True)
vehicle = models.ForeignKey(Vehicle, null=True, blank=True,
help_text="If you see your vehicle in the dropdown list, select it. Otherwise <a href='#' id='show_user_vehicle'>click here</a>.")
user_entered_vehicle_make = models.CharField(max_length=50, blank=True)
user_entered_vehicle_model = models.CharField(max_length=50, blank=True)
color = models.ForeignKey(VehicleColor,
help_text="Choose closest matching color from the list.")
license_plate = models.CharField(max_length=20, help_text="Please no spaces or dashes")
#Django hack, update states to include a *foreign option* (instead of using USStateField)
license_plate_state = models.CharField(max_length=2, choices=(('ZZ', '*Non - US*'),) + STATE_CHOICES)
parking_location = models.ForeignKey(ParkingLocation, null=True, blank=True)
current_living_arrangement = models.ForeignKey(LivingArrangement,
verbose_name="Where do you live?")
current_apt_number = models.CharField(max_length=20, blank=True, help_text="Apartment Number (if applicable)")
parking_number = models.CharField(max_length=200, blank=True)
parking_number_year = models.IntegerField(blank=True, null=True)
notes = models.CharField(max_length=500, blank=True)
agree_to_TOS = models.BooleanField("Policy Agreement", blank=False,
help_text="I acknowledge that I have read and understand the <a href='http://wiki/wiki/display/policy/Parking+on+Campus'>rules</a> for parking " \
"on the Janelia Farm Research Campus. I agree to abide by these rules. I understand " \
"that failure to follow these rules may result in loss of parking privileges on campus.")
active = models.BooleanField(default=True, help_text="Uncheck to remove this vehicle.")
#Fields to collect data for non credentialed employees who won't have their own user
#accounts. The parking system gamekeeper will enter their registrations manually into
#the system.
non_cred_first_name = models.CharField(max_length=255, blank=True,
verbose_name="Non-Credentialed User - First Name")
non_cred_last_name = models.CharField(max_length=255, blank=True,
verbose_name="Non-Credentialed User - Last Name")
non_cred_dept_company = models.CharField(max_length=255, blank=True,
verbose_name="Non-Credentialed User - Dept. or Company")
def vehicle_for_display(self):
if self.vehicle:
return str(self.vehicle)
else:
return "%s %s" % (self.user_entered_vehicle_make,self.user_entered_vehicle_model)
def user_display_name(self):
if self.user:
return self.user.get_profile().display_name
else:
return str(self.non_cred_first_name) + ' ' + str(self.non_cred_last_name)
def user_dept_company(self):
if self.user:
return self.user.get_profile().description
else:
return self.non_cred_dept_company
def user_phone_email(self):
if self.user:
return "%s / %s" % (self.user.get_profile().work_phone,
self.user.email)
else:
return ""
def __unicode__(self):
if self.user:
user_str = str(self.user)
else:
user_str = str(self.non_cred_first_name) + ' ' + str(self.non_cred_last_name)
return "%s, %s, Tags: %s %s Parking #: %s" % (
user_str,
self.vehicle,
self.license_plate_state,
self.license_plate,
#self.parking_location, #doesn't get included in selected related so leave out
self.parking_number)
def get_edit_url(self,include_base=False):
url = '/ParkingPermit/vehicleregistration/%s/' % self.id
if include_base:
url = settings.BASE_URL + url
return url
def save(self, *args, **kwargs):
"""Clean up, replace spaces and dashes in license plate"""
if self.license_plate:
self.license_plate = self.license_plate.replace('-','').replace(' ','')
super(VehicleRegistration,self).save(*args, **kwargs)
def send_created_email(self):
"""Send an email when a new registration is added"""
if settings.NOTIFY_NEW_REG:
to = settings.NOTIFY_NEW_REG
message = """\
Greetings,<br><br>
A new vehicle registration has been submitted by %s.<br><br>
Go here to view or edit the request: <br>
<a href="%s">%s</a>
<br><br>
Sincerely,<br><br>
The Janelia Parking Permit Program
""" % (self.user_display_name(), self.get_edit_url(True), self.get_edit_url(True))
subject = 'A new parking permit request has been entered'
from_email = 'parkingpermit-donotreply@janelia.hhmi.org'
text_content = re.sub(r'<[^>]+>','',message)
html_content = message
msg = EmailMultiAlternatives(subject, text_content, from_email, to)
msg.attach_alternative(html_content, "text/html")
msg.send()
class Violation(AuditableTable):
""" """
class Meta:
ordering = ['serial_number']
serial_number = models.CharField(max_length=50, unique=True)
vehicle_registration = models.ForeignKey(VehicleRegistration,) # limit_choices_to = {'active': True}) .. would be good but breaks old records:-(
reason = models.ForeignKey(ViolationReason)
location = models.ForeignKey(ParkingLocation, null=True, blank=True)
violation_datetime = models.DateTimeField(blank=True)
notes = models.CharField(max_length=500, blank=True, help_text="Optional notes")
photo = models.ImageField(blank=True, upload_to='violation_photos',
help_text="Optional image of infraction")
def __unicode__(self):
ret_str = "%s / %s - %s" % (self.reason, self.created_datetime,
self.vehicle_registration)
return ret_str
def user_display_name(self):
if self.vehicle_registration.user:
return self.vehicle_registration.user.get_profile().display_name
else:
return str(self.vehicle_registration.non_cred_first_name) + ' ' + str(self.vehicle_registration.non_cred_last_name)
def user_dept_company(self):
if self.vehicle_registration.user:
return self.vehicle_registration.user.get_profile().description
else:
return self.vehicle_registration.non_cred_dept_company
class UserProfile(models.Model):
"""Additional information to be stored with each user"""
# This field is required.
user = models.OneToOneField(User)
work_phone = models.CharField(max_length=255, blank=True)
job_title = models.CharField(max_length=255, blank=True)
department = models.CharField(max_length=255, blank=True)
employee_num = models.CharField(max_length=30)
LDAP_name = models.CharField(max_length=255)
description = models.CharField(max_length=500, blank=True)
company = models.CharField(max_length=500, blank=True)
display_name = models.CharField(max_length=100, blank=True)
room = models.CharField(max_length=100, help_text="Location", blank=True)
is_active_employee = models.BooleanField(default=True)
date_severed = models.DateField(blank=True)
employee_type = models.CharField(max_length=100, blank=True,
choices=(('SR','Shared Resource'),('RESEARCH','Research')))
gender = models.CharField(max_length=100, blank=True,
choices=(('m','Male'),('f','Female')))
def date_joined(self):
return self.user.date_joined
#Make sure a user profile gets created if a user doesn't have one
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
#Use any string with dispatch_uid to prevent signal from being fired once for every
#time the module it imported. Stupid Django bug ...
post_save.connect(create_user_profile, sender=User, dispatch_uid="models.py")
class MyVehicalsProxy(VehicleRegistration):
"""This is a dummy model for a different object view in admin interface
see: http://stackoverflow.com/questions/1861897/django-filtering-or-displaying-a-model-method-in-django-admin
"""
class Meta:
proxy=True
verbose_name = "Registered Vehicle"
verbose_name_plural = "My Registered Vehicles"
ordering = ['-active','vehicle']
class PendingVehicalsProxy(VehicleRegistration):
"""This is a dummy model for a different object view in admin interface
see: http://stackoverflow.com/questions/1861897/django-filtering-or-displaying-a-model-method-in-django-admin
This displays any registrations without a year or parking number.
"""
class Meta:
proxy=True
verbose_name = "Pending Registered Vehicle"
verbose_name_plural = "Pending Registered Vehicles"
ordering = ['-updated_datetime']
class OffboardedVehicalsProxy(VehicleRegistration):
"""This is a dummy model for a different object view in admin interface
see: http://stackoverflow.com/questions/1861897/django-filtering-or-displaying-a-model-method-in-django-admin
This displays any active registrations for employees who have been offboarded.
"""
class Meta:
proxy=True
verbose_name = "Offboarded Employee - Registered Vehicle"
verbose_name_plural = "Offboarded Employees - Registered Vehicles"
ordering = ['-updated_datetime']
|
bsd-3-clause
| 6,460,956,195,560,898,000
| 40.071174
| 155
| 0.673858
| false
| 3.742218
| false
| false
| false
|
0x7678/binwalk
|
src/binwalk/modules/binvis.py
|
1
|
10370
|
# Generates 3D visualizations of input files.
import os
from binwalk.core.compat import *
from binwalk.core.common import BlockFile
from binwalk.core.module import Module, Option, Kwarg
class Plotter(Module):
'''
Base class for visualizing binaries in Qt.
Other plotter classes are derived from this.
'''
VIEW_DISTANCE = 1024
MAX_2D_PLOT_POINTS = 12500
MAX_3D_PLOT_POINTS = 25000
TITLE = "Binary Visualization"
CLI = [
Option(short='3',
long='3D',
kwargs={'axis' : 3, 'enabled' : True},
description='Generate a 3D binary visualization'),
Option(short='2',
long='2D',
kwargs={'axis' : 2, 'enabled' : True},
description='Project data points onto 3D cube walls only'),
Option(short='Z',
long='points',
type=int,
kwargs={'max_points' : 0},
description='Set the maximum number of plotted data points'),
# Option(short='V',
# long='grids',
# kwargs={'show_grids' : True},
# description='Display the x-y-z grids in the resulting plot'),
]
KWARGS = [
Kwarg(name='axis', default=3),
Kwarg(name='max_points', default=0),
Kwarg(name='show_grids', default=False),
Kwarg(name='enabled', default=False),
]
# There isn't really any useful data to print to console. Disable header and result output.
HEADER = None
RESULT = None
def init(self):
import pyqtgraph.opengl as gl
from pyqtgraph.Qt import QtGui
self.verbose = self.config.verbose
self.offset = self.config.offset
self.length = self.config.length
self.plane_count = -1
self.plot_points = None
if self.axis == 2:
self.MAX_PLOT_POINTS = self.MAX_2D_PLOT_POINTS
self._generate_data_point = self._generate_2d_data_point
elif self.axis == 3:
self.MAX_PLOT_POINTS = self.MAX_3D_PLOT_POINTS
self._generate_data_point = self._generate_3d_data_point
else:
raise Exception("Invalid Plotter axis specified: %d. Must be one of: [2,3]" % self.axis)
if not self.max_points:
self.max_points = self.MAX_PLOT_POINTS
self.app = QtGui.QApplication([])
self.window = gl.GLViewWidget()
self.window.opts['distance'] = self.VIEW_DISTANCE
if len(self.config.target_files) == 1:
self.window.setWindowTitle(self.config.target_files[0].name)
def _print(self, message):
'''
Print console messages. For internal use only.
'''
if self.verbose:
print(message)
def _generate_plot_points(self, data_points):
'''
Generates plot points from a list of data points.
@data_points - A dictionary containing each unique point and its frequency of occurance.
Returns a set of plot points.
'''
total = 0
min_weight = 0
weightings = {}
plot_points = {}
# If the number of data points exceeds the maximum number of allowed data points, use a
# weighting system to eliminate data points that occur less freqently.
if sum(data_points.values()) > self.max_points:
# First, generate a set of weight values 1 - 10
for i in range(1, 11):
weightings[i] = 0
# Go through every data point and how many times that point occurs
for (point, count) in iterator(data_points):
# For each data point, compare it to each remaining weight value
for w in get_keys(weightings):
# If the number of times this data point occurred is >= the weight value,
# then increment the weight value. Since weight values are ordered lowest
# to highest, this means that more frequent data points also increment lower
# weight values. Thus, the more high-frequency data points there are, the
# more lower-frequency data points are eliminated.
if count >= w:
weightings[w] += 1
else:
break
# Throw out weight values that exceed the maximum number of data points
if weightings[w] > self.max_points:
del weightings[w]
# If there's only one weight value left, no sense in continuing the loop...
if len(weightings) == 1:
break
# The least weighted value is our minimum weight
min_weight = min(weightings)
# Get rid of all data points that occur less frequently than our minimum weight
for point in get_keys(data_points):
if data_points[point] < min_weight:
del data_points[point]
for point in sorted(data_points, key=data_points.get, reverse=True):
plot_points[point] = data_points[point]
# Register this as a result in case future modules need access to the raw point information,
# but mark plot as False to prevent the entropy module from attempting to overlay this data on its graph.
self.result(point=point, plot=False)
total += 1
if total >= self.max_points:
break
return plot_points
def _generate_data_point(self, data):
'''
Subclasses must override this to return the appropriate data point.
@data - A string of data self.axis in length.
Returns a data point tuple.
'''
return (0,0,0)
def _generate_data_points(self, fp):
'''
Generates a dictionary of data points and their frequency of occurrance.
@fp - The BlockFile object to generate data points from.
Returns a dictionary.
'''
i = 0
data_points = {}
self._print("Generating data points for %s" % fp.name)
# We don't need any extra data from BlockFile
fp.set_block_size(peek=0)
while True:
(data, dlen) = fp.read_block()
if not data or not dlen:
break
i = 0
while (i+(self.axis-1)) < dlen:
point = self._generate_data_point(data[i:i+self.axis])
if has_key(data_points, point):
data_points[point] += 1
else:
data_points[point] = 1
i += 3
return data_points
def _generate_plot(self, plot_points):
import numpy as np
import pyqtgraph.opengl as gl
nitems = float(len(plot_points))
pos = np.empty((nitems, 3))
size = np.empty((nitems))
color = np.empty((nitems, 4))
i = 0
for (point, weight) in iterator(plot_points):
r = 0.0
g = 0.0
b = 0.0
pos[i] = point
frequency_percentage = (weight / nitems)
# Give points that occur more frequently a brighter color and larger point size.
# Frequency is determined as a percentage of total unique data points.
if frequency_percentage > .010:
size[i] = .20
r = 1.0
elif frequency_percentage > .005:
size[i] = .15
b = 1.0
elif frequency_percentage > .002:
size[i] = .10
g = 1.0
r = 1.0
else:
size[i] = .05
g = 1.0
color[i] = (r, g, b, 1.0)
i += 1
scatter_plot = gl.GLScatterPlotItem(pos=pos, size=size, color=color, pxMode=False)
scatter_plot.translate(-127.5, -127.5, -127.5)
return scatter_plot
def plot(self, wait=True):
import pyqtgraph.opengl as gl
self.window.show()
if self.show_grids:
xgrid = gl.GLGridItem()
ygrid = gl.GLGridItem()
zgrid = gl.GLGridItem()
self.window.addItem(xgrid)
self.window.addItem(ygrid)
self.window.addItem(zgrid)
# Rotate x and y grids to face the correct direction
xgrid.rotate(90, 0, 1, 0)
ygrid.rotate(90, 1, 0, 0)
# Scale grids to the appropriate dimensions
xgrid.scale(12.8, 12.8, 12.8)
ygrid.scale(12.8, 12.8, 12.8)
zgrid.scale(12.8, 12.8, 12.8)
for fd in iter(self.next_file, None):
data_points = self._generate_data_points(fd)
self._print("Generating plot points from %d data points" % len(data_points))
self.plot_points = self._generate_plot_points(data_points)
del data_points
self._print("Generating graph from %d plot points" % len(self.plot_points))
self.window.addItem(self._generate_plot(self.plot_points))
if wait:
self.wait()
def wait(self):
from pyqtgraph.Qt import QtCore, QtGui
t = QtCore.QTimer()
t.start(50)
QtGui.QApplication.instance().exec_()
def _generate_3d_data_point(self, data):
'''
Plot data points within a 3D cube.
'''
return (ord(data[0]), ord(data[1]), ord(data[2]))
def _generate_2d_data_point(self, data):
'''
Plot data points projected on each cube face.
'''
self.plane_count += 1
if self.plane_count > 5:
self.plane_count = 0
if self.plane_count == 0:
return (0, ord(data[0]), ord(data[1]))
elif self.plane_count == 1:
return (ord(data[0]), 0, ord(data[1]))
elif self.plane_count == 2:
return (ord(data[0]), ord(data[1]), 0)
elif self.plane_count == 3:
return (255, ord(data[0]), ord(data[1]))
elif self.plane_count == 4:
return (ord(data[0]), 255, ord(data[1]))
elif self.plane_count == 5:
return (ord(data[0]), ord(data[1]), 255)
def run(self):
self.plot()
return True
|
mit
| -8,027,040,989,788,108,000
| 32.451613
| 117
| 0.539923
| false
| 4.10368
| false
| false
| false
|
dmilith/SublimeText3-dmilith
|
Packages/pymdownx/st3/pymdownx/mark.py
|
1
|
2825
|
"""
Mark.
pymdownx.mark
Really simple plugin to add support for
<mark>test</mark> tags as ==test==
MIT license.
Copyright (c) 2014 - 2017 Isaac Muse <isaacmuse@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
import re
from markdown import Extension
from . import util
SMART_CONTENT = r'((?:(?<=\s)=+?(?=\s)|.)+?=*?)'
CONTENT = r'((?:[^=]|(?<!={2})=)+?)'
# ==mark==
MARK = r'(={2})(?!\s)%s(?<!\s)\1' % CONTENT
# ==mark==
SMART_MARK = r'(?:(?<=_)|(?<![\w=]))(={2})(?![\s=])%s(?<!\s)\1(?:(?=_)|(?![\w=]))' % SMART_CONTENT
class MarkProcessor(util.PatternSequenceProcessor):
"""Handle mark patterns."""
PATTERNS = [
util.PatSeqItem(re.compile(MARK, re.DOTALL | re.UNICODE), 'single', 'mark')
]
class MarkSmartProcessor(util.PatternSequenceProcessor):
"""Handle smart mark patterns."""
PATTERNS = [
util.PatSeqItem(re.compile(SMART_MARK, re.DOTALL | re.UNICODE), 'single', 'mark')
]
class MarkExtension(Extension):
"""Add the mark extension to Markdown class."""
def __init__(self, *args, **kwargs):
"""Initialize."""
self.config = {
'smart_mark': [True, "Treat ==connected==words== intelligently - Default: True"]
}
super(MarkExtension, self).__init__(*args, **kwargs)
def extendMarkdown(self, md):
"""Insert `<mark>test</mark>` tags as `==test==`."""
config = self.getConfigs()
smart = bool(config.get('smart_mark', True))
md.registerExtension(self)
escape_chars = []
escape_chars.append('=')
util.escape_chars(md, escape_chars)
mark = MarkSmartProcessor(r'=') if smart else MarkProcessor(r'=')
md.inlinePatterns.register(mark, "mark", 65)
def makeExtension(*args, **kwargs):
"""Return extension."""
return MarkExtension(*args, **kwargs)
|
mit
| -617,774,133,755,232,400
| 32.235294
| 111
| 0.662655
| false
| 3.746684
| false
| false
| false
|
agapow/egas
|
egas/models.py
|
1
|
2568
|
### IMPORTS
from flask_appbuilder import Model
from flask_appbuilder.models.mixins import AuditMixin
from sqlalchemy import Table, ForeignKey, Column, Integer, String, Enum, Float, Text
from sqlalchemy.orm import relationship
#from sqlalchemy import UniqueConstraint
from . import consts
from . import utils
### CODE ###
## Linking table between tags and associations
tag_membership_table = Table ('tag_membership', Model.metadata,
Column ('assoc_id', String(48), ForeignKey ('associations.id')),
Column ('tag_id', Integer, ForeignKey ('tags.id'))
)
def gen_assoc_id (context):
return "%s.%s" % (context.current_parameters['snp_id'],
context.current_parameters['cpg_id'])
class Association (AuditMixin, Model):
"""
A SNP and methylation pairing with statistical support.
"""
__tablename__ = 'associations'
## Properties:
id = Column (String (48), primary_key=True, default=gen_assoc_id)
snp_id = Column (String (16), nullable=False)
snp_locn_chr = Column (Enum (*consts.chromosomes), nullable=False)
snp_locn_posn = Column (Integer, nullable=False)
snp_base_wild = Column (String (1), nullable=False)
snp_base_var = Column (String (1), nullable=False)
cpg_id = Column (String (16), nullable=False)
cpg_locn_chr = Column (Enum (*consts.chromosomes), nullable=False)
cpg_locn_posn = Column (Integer, nullable=False)
stat_beta = Column (Float)
stat_stderr = Column (Float)
stat_pval = Column (Float)
tags = relationship ('Tag', secondary=tag_membership_table, back_populates='associations')
def __repr__(self):
return utils.simple_repr (self, 'id', 'snp_id', 'cpg_id')
class Tag (AuditMixin, Model):
"""
A group of associations, implemented as tagging.
"""
__tablename__ = 'tags'
## Properties:
id = Column (Integer, autoincrement=True, primary_key=True)
title = Column (String (64), nullable=False)
description = Column (Text())
associations = relationship ('Association', secondary=tag_membership_table, back_populates='tags')
def __repr__(self):
return utils.simple_repr (self, 'id', 'title', 'description')
class News (AuditMixin, Model):
"""
News items and updates for the website.
"""
__tablename__ = 'news'
## Properties:
id = Column (Integer, autoincrement=True, primary_key=True)
title = Column (String (64), nullable=False)
body = Column (Text(), nullable=False)
## Accessors:
## Utils:
def __repr__(self):
return utils.simple_repr (self, 'id', 'title', 'body')
### END ###
|
mit
| 7,409,194,600,156,728,000
| 24.425743
| 101
| 0.669782
| false
| 3.542069
| false
| false
| false
|
PalisadoesFoundation/switchmap-ng
|
switchmap/test/test_general.py
|
1
|
15337
|
#!/usr/bin/env python3
"""Test the general module."""
import getpass
import unittest
import random
import os
import sys
import string
import tempfile
import yaml
import shutil
# Try to create a working PYTHONPATH
TEST_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
SWITCHMAP_DIRECTORY = os.path.abspath(os.path.join(TEST_DIRECTORY, os.pardir))
ROOT_DIRECTORY = os.path.abspath(os.path.join(SWITCHMAP_DIRECTORY, os.pardir))
if TEST_DIRECTORY.endswith('/switchmap-ng/switchmap/test') is True:
sys.path.append(ROOT_DIRECTORY)
else:
print(
'This script is not installed in the "switchmap-ng/bin" directory. '
'Please fix.')
sys.exit(2)
from switchmap.utils import general
from switchmap import switchmap
class KnownValues(unittest.TestCase):
"""Checks all functions and methods."""
#########################################################################
# General object setup
#########################################################################
# Required
maxDiff = None
random_string = ''.join([random.choice(
string.ascii_letters + string.digits) for n in range(9)])
def test_cli_help(self):
"""Testing method / function cli_help."""
pass
def test_systemd_daemon(self):
"""Testing function systemd_daemon."""
pass
def test_systemd_exists(self):
"""Testing function systemd_exists."""
# Get result for unknown service
agent_name = self.random_string
result = general.systemd_exists(agent_name)
self.assertEqual(result, False)
def test_check_sudo(self):
"""Testing function check_sudo."""
# Test with sudo variable set
result = 'SUDO_UID' in os.environ
self.assertEqual(result, False)
# Test with sudo variable set
os.environ['SUDO_UID'] = getpass.getuser()
with self.assertRaises(SystemExit):
general.check_sudo()
def test_check_user(self):
"""Testing function check_user."""
pass
def test_root_directory(self):
"""Testing method / function root_directory."""
# Initializing key variables
# Determine root directory for switchmap
switchmap_dir = switchmap.__path__[0]
components = switchmap_dir.split(os.sep)
# Determine root directory 2 levels above
root_dir = os.sep.join(components[0:-2])
result = general.root_directory()
self.assertEqual(result, root_dir)
def test_get_hosts(self):
"""Testing method / function get_hosts."""
# Initializing key variables
pass
def test_read_yaml_file(self):
"""Testing method / function read_yaml_file."""
# Initializing key variables
dict_1 = {
'key1': 1,
'key2': 2,
'key3': 3,
'key4': 4,
}
# Create temp file with known data
directory = tempfile.mkdtemp()
file_data = [
(('{}/file_1.yaml').format(directory), dict_1)
]
for item in file_data:
filename = item[0]
data_dict = item[1]
with open(filename, 'w') as filehandle:
yaml.dump(data_dict, filehandle, default_flow_style=False)
# Get Results
result = general.read_yaml_file(filename)
# Test equivalence
for key in result.keys():
self.assertEqual(data_dict[key], result[key])
# Clean up
filelist = [
next_file for next_file in os.listdir(
directory) if next_file.endswith('.yaml')]
for delete_file in filelist:
delete_path = ('{}/{}').format(directory, delete_file)
os.remove(delete_path)
os.removedirs(directory)
def test_read_yaml_files(self):
"""Testing method / function read_yaml_files."""
# Initializing key variables
dict_1 = {
'key1': 1,
'key2': 2,
'key3': 3,
'key4': 4,
}
dict_2 = {
'key6': 6,
'key7': 7,
}
dict_3 = {}
# Populate a third dictionary with contents of other dictionaries.
for key, value in dict_1.items():
dict_3[key] = value
for key, value in dict_2.items():
dict_3[key] = value
# Create temp file with known data
directory = tempfile.mkdtemp()
filenames = {
('%s/file_1.yaml') % (directory): dict_1,
('%s/file_2.yaml') % (directory): dict_2
}
for filename, data_dict in filenames.items():
with open(filename, 'w') as filehandle:
yaml.dump(data_dict, filehandle, default_flow_style=False)
# Get Results
result = general.read_yaml_files([directory])
# Clean up
for key in result.keys():
self.assertEqual(dict_3[key], result[key])
filelist = [
next_file for next_file in os.listdir(
directory) if next_file.endswith('.yaml')]
for delete_file in filelist:
delete_path = ('%s/%s') % (directory, delete_file)
os.remove(delete_path)
os.removedirs(directory)
def test_run_script(self):
"""Testing method / function run_script."""
# Initializing key variables
pass
def test_delete_files(self):
"""Testing method / function delete_files."""
# Testing with a known invalid directory
directory = self.random_string
with self.assertRaises(SystemExit):
general.delete_files(directory)
# Creating temporary yaml and json files for testing
directory = tempfile.mkdtemp()
testfiles = ['test1.yaml', 'test2.yaml', 'test3.json']
for filename in testfiles:
filepath = '{}/{}'.format(directory, filename)
open(filepath, 'a').close()
# Testing if all yaml files were created
count = len([name for name in os.listdir(
directory) if name.endswith('.yaml')])
self.assertEqual(count, 2)
# Test if json file was created
jcount = len([name for name in os.listdir(
directory) if name.endswith('.json')])
self.assertEqual(jcount, 1)
# Deleting all yaml files using function
general.delete_files(directory)
# Test if all yaml files were deleted
result = len([name for name in os.listdir(
directory) if name.endswith('.yaml')])
self.assertEqual(result, 0)
# Test if json file was not deleted
jcount = len([name for name in os.listdir(
directory) if name.endswith('.json')])
self.assertEqual(jcount, 1)
# Delete json file
general.delete_files(directory, extension='.json')
# Test if json file was deleted
jcount = len([name for name in os.listdir(
directory) if name.endswith('.json')])
self.assertEqual(jcount, 0)
# Removing test directory
os.removedirs(directory)
# Test if directory has been deleted
self.assertEqual(os.path.isdir(directory), False)
def test_config_directories(self):
"""Testing method / function config_directories."""
# Initializing key variables
# Initialize key variables
save_directory = None
if 'SWITCHMAP_CONFIGDIR' in os.environ:
save_directory = os.environ['SWITCHMAP_CONFIGDIR']
# Try with no SWITCHMAP_CONFIGDIR
os.environ.pop('SWITCHMAP_CONFIGDIR', None)
directory = '{}/etc'.format(general.root_directory())
result = general.config_directories()
self.assertEqual(result, [directory])
# Test with SWITCHMAP_CONFIGDIR set
directory = tempfile.mkdtemp()
os.environ['SWITCHMAP_CONFIGDIR'] = directory
result = general.config_directories()
self.assertEqual(result, [directory])
# Restore state
if save_directory is not None:
os.environ['SWITCHMAP_CONFIGDIR'] = save_directory
def test_search_file(self):
"""Testing method / function search_file."""
# Initializing key variables
result = general.search_file('cp')
self.assertEqual(result, '/bin/cp')
def test_move_files(self):
"""Testing method / function move_files."""
# Initialize key variables
source_filenames = {}
target_filenames = {}
#################################################
# Test with invalid source directory
#################################################
invalid_path = ('/tmp/%s.%s') % (
self.random_string,
self.random_string)
with self.assertRaises(SystemExit):
general.move_files(invalid_path, '/tmp')
#################################################
# Test with invalid destination directory
#################################################
invalid_path = ('/tmp/%s.%s') % (
self.random_string,
self.random_string)
with self.assertRaises(SystemExit):
general.move_files('/tmp', invalid_path)
#################################################
# Test with valid directory
#################################################
# Create a source directory
source_dir = ('/tmp/%s.1') % (self.random_string)
if os.path.exists(source_dir) is False:
os.makedirs(source_dir)
# Create a target directory
target_dir = ('/tmp/%s.2') % (self.random_string)
if os.path.exists(target_dir) is False:
os.makedirs(target_dir)
# Place files in the directory
for count in range(0, 4):
filename = ''.join([random.choice(
string.ascii_letters + string.digits) for n in range(15)])
source_filenames[count] = ('%s/%s') % (source_dir, filename)
target_filenames[count] = ('%s/%s') % (target_dir, filename)
open(source_filenames[count], 'a').close()
# Check files in directory
self.assertEqual(os.path.isfile(source_filenames[count]), True)
# Delete files in directory
general.move_files(source_dir, target_dir)
# Check that files are not in source_dir
for filename in source_filenames.values():
self.assertEqual(os.path.isfile(filename), False)
# Check that files are in in target_dir
for filename in target_filenames.values():
self.assertEqual(os.path.isfile(filename), True)
# Delete directory
shutil.rmtree(source_dir)
# Delete directory
shutil.rmtree(target_dir)
def test_create_yaml_file(self):
"""Testing method / function create_yaml_file."""
# Initializing key variables
pass
def test_dict2yaml(self):
"""Testing method / function dict2yaml."""
# Initializing key variables
data_dict = {
'1': 'test 1',
'two': 'test 2'
}
data_yaml = """'1': test 1
two: test 2
"""
# Do test with good dict
yaml_result = general.dict2yaml(data_dict)
self.assertEqual(yaml_result, data_yaml)
def test_delete_file(self):
"""Test function delete_file."""
# Testing with a known invalid directory
directory = self.random_string
with self.assertRaises(SystemExit):
general.delete_files(directory)
# Creating temporary yaml and json files to test with
directory = tempfile.mkdtemp()
filenames = ['test1.yaml', 'test2.yaml', 'test3.json']
for filename in filenames:
filepath = '{}/{}'.format(directory, filename)
open(filepath, 'a').close()
# Testing if all files were created
yamlcount = len([name for name in os.listdir(
directory) if name.endswith('.yaml')])
self.assertEqual(yamlcount, 2)
jsoncount = len([name for name in os.listdir(
directory) if name.endswith('.json')])
self.assertEqual(jsoncount, 1)
# Testing if all json files are deleted
general.delete_files(directory, extension='.json')
result = len([name for name in os.listdir(
directory) if name.endswith('.json')])
self.assertEqual(result, 0)
# Testing if all yaml files are deleted
general.delete_files(directory, extension='.yaml')
result = len([name for name in os.listdir(
directory) if name.endswith('.yaml')])
self.assertEqual(result, 0)
# Removing test directory
os.removedirs(directory)
# Test if directory has been deleted
self.assertEqual(os.path.isdir(directory), False)
def test_delete_yaml_files(self):
"""Test function delete_yaml_files."""
# Testing with a known invalid directory
directory = self.random_string
with self.assertRaises(SystemExit):
general.delete_files(directory)
# Creating temporary yaml and json files for testing
directory = tempfile.mkdtemp()
testfiles = ['test1.yaml', 'test2.yaml', 'test3.json']
for filename in testfiles:
filepath = '{}/{}'.format(directory, filename)
open(filepath, 'a').close()
# Testing if all yaml files were created
count = len([name for name in os.listdir(
directory) if name.endswith('.yaml')])
self.assertEqual(count, 2)
# Test if json file was created
jcount = len([name for name in os.listdir(
directory) if name.endswith('.json')])
self.assertEqual(jcount, 1)
# Deleting all yaml files using function
general.delete_yaml_files(directory)
# Test if all yaml files were deleted
result = len([name for name in os.listdir(
directory) if name.endswith('.yaml')])
self.assertEqual(result, 0)
# Test if json file was not deleted
jcount = len([name for name in os.listdir(
directory) if name.endswith('.json')])
self.assertEqual(jcount, 1)
# Delete json file
general.delete_files(directory, extension='.json')
# Test if json file was deleted
jcount = len([name for name in os.listdir(
directory) if name.endswith('.json')])
self.assertEqual(jcount, 0)
# Removing test directory
os.removedirs(directory)
# Test if directory has been deleted
self.assertEqual(os.path.isdir(directory), False)
def test_cleanstring(self):
"""Testing method / function cleanstring."""
# Initializing key variables
dirty_string = (' %s\n \r %s \n %s ') % (
self.random_string, self.random_string, self.random_string)
clean_string = ('%s %s %s') % (
self.random_string, self.random_string, self.random_string)
# Test result
result = general.cleanstring(dirty_string)
self.assertEqual(result, clean_string)
if __name__ == '__main__':
# Do the unit test
unittest.main()
|
apache-2.0
| -1,092,549,414,544,443,000
| 32.341304
| 78
| 0.569407
| false
| 4.390782
| true
| false
| false
|
edonyM/toolkitem
|
fileprocess/emgui/filebrowser.py
|
1
|
14274
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
r"""
# .---. .-----------
# / \ __ / ------
# / / \( )/ ----- (`-') _ _(`-') <-. (`-')_
# ////// '\/ ` --- ( OO).-/( (OO ).-> .-> \( OO) ) .->
# //// / // : : --- (,------. \ .'_ (`-')----. ,--./ ,--/ ,--.' ,-.
# // / / / `\/ '-- | .---' '`'-..__)( OO).-. ' | \ | | (`-')'.' /
# // //..\\ (| '--. | | ' |( _) | | | | . '| |)(OO \ /
# ============UU====UU==== | .--' | | / : \| |)| | | |\ | | / /)
# '//||\\` | `---. | '-' / ' '-' ' | | \ | `-/ /`
# ''`` `------' `------' `-----' `--' `--' `--'
# ######################################################################################
#
# Author: edony - edonyzpc@gmail.com
#
# twitter : @edonyzpc
#
# Last modified: 2015-07-03 21:38
#
# Filename: project_file_browser.py
#
# Description: All Rights Are Reserved
#
"""
#import scipy as sp
#import math as m
#import matplotlib as mpl
#import matplotlib.pyplot as plt
#from mpl_toolkits.mplot3d import Axes3D as Ax3
#from scipy import stats as st
#from matplotlib import cm
#import numpy as np
import os
import sys
if sys.version.startswith('3.4'):
import tkinter
else:
import Tkinter as tkinter
import tkMessageBox
import shutil
from packages.filesline.getdirections import GetDirections as GD
class PyColor(object):
""" This class is for colored print in the python interpreter!
"F3" call Addpy() function to add this class which is defined
in the .vimrc for vim Editor."""
def __init__(self):
self.self_doc = r"""
STYLE: \033['display model';'foreground';'background'm
DETAILS:
FOREGROUND BACKGOUND COLOR
---------------------------------------
30 40 black
31 41 red
32 42 green
33 43 yellow
34 44 blue
35 45 purple
36 46 cyan
37 47 white
DISPLAY MODEL DETAILS
-------------------------
0 default
1 highlight
4 underline
5 flicker
7 reverse
8 non-visiable
e.g:
\033[1;31;40m <!--1-highlight;31-foreground red;40-background black-->
\033[0m <!--set all into default-->
"""
self.warningcolor = '\033[0;31m'
self.tipcolor = '\033[0;32m'
self.endcolor = '\033[0m'
self._newcolor = ''
@property
def new(self):
"""
Customized Python Print Color.
"""
return self._newcolor
@new.setter
def new(self, color_str):
"""
New Color.
"""
self._newcolor = color_str
def disable(self):
"""
Disable Color Print.
"""
self.warningcolor = ''
self.endcolor = ''
class GUI(tkinter.Frame):
def __init__(self, root):
tkinter.Frame.__init__(self, root, background="white")
self.root = root
self.path = ""
self.files = {}
self.pc_file = ""
self.stl_file = ""
self.igs_file = ""
self.init_gui()
self.pack(fill=tkinter.BOTH, expand=1)
def get_files(self, path=None):
if path:
path = GD.normal_path(path)
files_list = GD(path)
else:
self.path = GD.normal_path(self.path)
files_list = GD(self.path)
files_list.get_dir()
files_list.all_files()
self.files = files_list.files
def update_files(self, path):
if self.path != path:
self.get_files(path)
else:
self.get_files(self.path)
def update_listbox(self):
self.lsbox_pc.delete(0, self.lsbox_pc.size())
self.lsbox_stl.delete(0, self.lsbox_stl.size())
self.lsbox_igs.delete(0, self.lsbox_igs.size())
for file in self.files:
for name in self.files[file]:
if name.endswith(".sp"):
self.lsbox_pc.insert(tkinter.END, name)
if name.endswith(".stl"):
self.lsbox_stl.insert(tkinter.END, name)
if name.endswith(".igs"):
self.lsbox_igs.insert(tkinter.END, name)
def update_listbox_search(self, search_txt):
self.lsbox_pc.delete(0, self.lsbox_pc.size())
for file in self.files:
for name in self.files[file]:
if name.endswith(".sp") and search_txt in name:
self.lsbox_pc.insert(tkinter.END, name)
def match_files(self, match_re):
self.lsbox_stl.delete(0, self.lsbox_stl.size())
self.lsbox_igs.delete(0, self.lsbox_igs.size())
for file in self.files:
for name in self.files[file]:
if name.startswith(match_re+".") and name.endswith("stl"):
self.lsbox_stl.insert(tkinter.END, name)
if name.startswith(match_re+".") and name.endswith("igs"):
self.lsbox_igs.insert(tkinter.END, name)
def full_path_file(self, name):
for path, item in self.files.items():
if name in item:
return path + "/" + name
def init_gui(self):
#self.get_files()
# main frame
self.frame_top = tkinter.Frame(self.root, height=400, width=800, background="black")
self.frame_top.pack(side=tkinter.TOP, fill=tkinter.BOTH)
self.frame_bottom = tkinter.Frame(self.root, height=100, width=400, background="black")
self.frame_bottom.pack(side=tkinter.RIGHT, fill=tkinter.BOTH)
self.frame_bottom_l = tkinter.Frame(self.root, height=100, width=400, background="black")
self.frame_bottom_l.pack(side=tkinter.LEFT, fill=tkinter.BOTH)
# labelframe of bottom frame
self.labframe = tkinter.LabelFrame(self.frame_bottom, text="Console",
height=50, width=400, background="white")
self.labframe.pack(side=tkinter.RIGHT, fill=tkinter.BOTH, expand=1)
# labelframel of bottom frame
self.labframel = tkinter.LabelFrame(self.frame_bottom_l, text="Enter",
height=50, width=400, background="white")
self.labframel.pack(side=tkinter.LEFT, fill=tkinter.BOTH, expand=1)
# labelframe of top frame
self.labframe_bottom = tkinter.LabelFrame(self.frame_top, text="Point Cloud",
height=400, width=800, background="cyan")
self.labframe_bottom.pack(side=tkinter.BOTTOM, fill=tkinter.BOTH)
self.labframe_left = tkinter.LabelFrame(self.frame_top, text="STL",
height=400, width=400, background="cyan")
self.labframe_left.pack(side=tkinter.LEFT, fill=tkinter.BOTH, expand=1)
self.labframe_right = tkinter.LabelFrame(self.frame_top, text="IGS",
height=400, width=400, background="cyan")
self.labframe_right.pack(side=tkinter.RIGHT, fill=tkinter.BOTH, expand=1)
# message
# message of labframe
txt = tkinter.StringVar()
msm_status = tkinter.Message(self.labframe, textvariable=txt, width=200, background="white")
msm_status.pack(side=tkinter.LEFT, fill=tkinter.BOTH)
txt.set("FILE MANAGEMENT START...")
# button
# quit button
quit_button = tkinter.Button(self.labframe, text="Browser", relief=tkinter.SUNKEN,
fg="blue", height=50,
activebackground="green", command=self.root.destroy)
quit_button.pack(side=tkinter.RIGHT, fill=tkinter.BOTH, expand=1)
# entry
# entry of labframe
## enter event handler
def getin(content):
content = enter.get()
self.path = content
self.update_files(content)
self.update_listbox()
txt.set(enter.get())
enter_str = tkinter.StringVar()
enter = tkinter.Entry(self.labframel, textvariable=enter_str, width=400, background="red")
enter.pack(side=tkinter.TOP, fill=tkinter.BOTH)
enter.bind("<Return>", getin)
def rmpcfile(event):
if tkMessageBox.askokcancel("Remove", "Are you sure to remove the file?"):
event = self.lsbox_pc.get(self.lsbox_pc.curselection())
remove_file = self.full_path_file(event)
os.remove(remove_file)
self.get_files()
self.update_listbox()
def rmstlfile(event):
if tkMessageBox.askokcancel("Remove", "Are you sure to remove the file?"):
event = self.lsbox_stl.get(self.lsbox_stl.curselection())
remove_file = self.full_path_file(event)
os.remove(remove_file)
self.get_files()
self.update_listbox()
def rmigsfile(event):
if tkMessageBox.askokcancel("Remove", "Are you sure to remove the file?"):
event = self.lsbox_igs.get(self.lsbox_igs.curselection())
remove_file = self.full_path_file(event)
os.remove(remove_file)
self.get_files()
self.update_listbox()
def addfile(evect):
topdlg = tkinter.Toplevel(self.root)
topdlg.title("Add Files")
topdlg.geometry("250x80+300+200")
def mvfile():
if self.path:
event = enter_add_file.get()
print(event)
filename = event[event.rfind("/"):]
shutil.move(event, self.path+"/"+filename)
self.get_files()
self.update_listbox()
topdlg.destroy()
else:
txt.set("Please Set The Root Path")
topdlg.destroy()
enter_add_file = tkinter.Entry(topdlg, width=250)
label = tkinter.Label(topdlg, text="New File Name With Path", width=250, anchor="w", justify="left")
label.pack(side=tkinter.TOP)
#enter_add_file.bind("<Retrun>", mvfile)
enter_add_file.pack()
button_add_file = tkinter.Button(topdlg, text="Add Single File", command=mvfile)
button_add_file.pack(side=tkinter.LEFT)
button_add_file_list = tkinter.Button(topdlg, text="Add Multiple File", command=mvfile)
button_add_file_list.pack(side=tkinter.RIGHT)
# listbox
# listbox of point cloud labelframe
## lsbox_pc event handler
def selectpcfile(event):
event = self.lsbox_pc.get(self.lsbox_pc.curselection())
name_without = event.split(".")[0]
self.match_files(name_without)
self.pc_file = self.full_path_file(event)
txt.set(self.lsbox_pc.get(self.lsbox_pc.curselection()))
txt.set(self.pc_file)
self.lsbox_pc = tkinter.Listbox(self.labframe_bottom,
selectmode=tkinter.BROWSE, background="yellow")
self.lsbox_pc.bind("<Double-Button-1>", selectpcfile)
self.lsbox_pc.bind("<Double-Button-3>", rmpcfile)
self.lsbox_pc.bind("<Button-2>", addfile)
for file in self.files:
for name in self.files[file]:
if name.endswith(".sp"):
self.lsbox_pc.insert(tkinter.END, name)
self.lsbox_pc.pack(side=tkinter.LEFT, fill=tkinter.BOTH, expand=1)
## entry for lsbox_pc search
### enter for lsbox_pc event handler
def getsearch(content):
content = enter_pc.get()
self.update_listbox_search(content)
txt.set(enter_pc.get())
enter_str_pc = tkinter.StringVar()
enter_pc = tkinter.Entry(self.labframe, textvariable=enter_str_pc, background="cyan")
enter_pc.pack(side=tkinter.BOTTOM, fill=tkinter.BOTH, expand=1)
enter_pc.bind("<Return>", getsearch)
# listbox of STL labelframe
def selectstlfile(event):
event = self.lsbox_stl.get(self.lsbox_stl.curselection())
self.stl_file = self.full_path_file(event)
txt.set(self.stl_file)
self.lsbox_stl = tkinter.Listbox(self.labframe_left,
selectmode=tkinter.BROWSE, background="yellow")
self.lsbox_stl.bind("<Double-Button-1>", selectstlfile)
self.lsbox_stl.bind("<Double-Button-3>", rmstlfile)
for file in self.files:
for name in self.files[file]:
if name.endswith(".stl"):
self.lsbox_stl.insert(tkinter.END, name)
self.lsbox_stl.pack(side=tkinter.TOP, fill=tkinter.BOTH)
# listbox of IGS labelframe
def selectigsfile(event):
event = self.lsbox_igs.get(self.lsbox_igs.curselection())
self.igs_file = self.full_path_file(event)
txt.set(self.igs_file)
self.lsbox_igs = tkinter.Listbox(self.labframe_right,
selectmode=tkinter.BROWSE, background="yellow")
self.lsbox_igs.bind("<Double-Button-1>", selectigsfile)
self.lsbox_igs.bind("<Double-Button-3>", rmigsfile)
for file in self.files:
for name in self.files[file]:
if name.endswith(".igs"):
self.lsbox_igs.insert(tkinter.END, name)
self.lsbox_igs.pack(side=tkinter.TOP, fill=tkinter.BOTH)
if __name__ == "__main__":
WIN = tkinter.Tk()
WIN.geometry("800x450+200+300")
WIN.title("File Manager")
# root.resizable(width=False, height=False)
GUI = GUI(WIN)
WIN.mainloop()
STL = GUI.stl_file
IGS = GUI.igs_file
#print(STL)
#print(IGS)
|
mit
| -9,219,411,283,376,683,000
| 41.230769
| 112
| 0.519756
| false
| 3.684564
| false
| false
| false
|
DoraemonShare/yuqing
|
src/utils/utils.py
|
1
|
4066
|
#-*-coding:utf-8 -*-
cartypeMap = {
'"******************"': '"******************"',
'"******************"': '"******************"',
'"******************"': '"******************"',
'"******************"': '"******************"'
}
#目前舆情分析中关注的列,列名来自postgresql数据表
concern_colcumns = ['topic', 'publicationDate', 'replyNum',
'clicks', 'postUrl', 'postContent', 'qaClassification', 'postType']
#excel的表头
EXCEL_HEADER = [u'贴名', u'发帖日期', u'回复数', u'点击数', u'贴URL', u'正文', u'算法标注结果']
not_concern_postType = ['icon_01', 'icon_02', 'icon_03', 'icon_04', 'icon_05',
'read-box', 'icon_04', 'icon-jian-grey', 'icon_business',
'icon-piao-grey', 'icon_buy', 'icon_video', 'icon_official',
'icon_zuan', 'icon_zhibo', 'icon_jing']
concern_postType = ['','icon_tu icon-tu-grey', 'icon_hot', 'icon_new', 'icon-wen-grey', None, 'None']
# -*- coding:utf-8 -*-
import csv
import codecs
# import pandas
import sys
# try to fix '_csv.Error: field larger than field limit (131072)'
csv.field_size_limit(sys.maxint) #field larger than field limit (131072)
# from sqlalchemy import create_engine
# from sqlalchemy.orm import sessionmaker
# # from sqlalchemy.exc import OperationalError
# import psycopg2
# engine = create_engine('postgresql+psycopg2://postgres:oio@139.159.218.12:5432/postgres')
# engine = create_engine('postgresql+psycopg2://postgres:909@localhost:5432/postgres')
# session = sessionmaker(bind = engine)
sentence_delimiters = ['?', '!', ';', '?', '!', '。', ';', '……', '…', '\n']
def as_text(v): ## 生成str, unicode字符串
if v is None:
return None
elif isinstance(v, bytes):
return v.decode('utf-8', errors='ignore')
elif isinstance(v, str):
return v
elif isinstance(v, unicode):
return v
else:
raise ValueError('Unknown type %r' % type(v))
def read_by_line(filename=None, delimiter=',', field=None):
'''
to read csv file row by row (with Generator), with specified csv delimiter
return specified fields or a whole csv-line
params:
csv_delimiter=',',csv delimiter, ',' default,
field=None, if None: return a whole csv line , if array, len(field) must be 1, or 2, if len(field)==1, return specified field, if len(field)==2, return line slice(include the end field)
'''
with codecs.open(filename, mode='rb', encoding='utf-8') as f:
try:
for row in f:
row = row.split(delimiter)
if field:
if len(field) == 1:
yield row[field[0]]
elif len(field) == 2:
yield row[field[0]:(field[1]+1)] #include the end field
else:
yield row
except Exception, e:
raise
TOPICS = ['油耗', '操控', '最满意的一点', '最不满意的一点',
'舒适性', '性价比', '内饰', '外观', '动力', '空间', '故障']
import pinyin
TOPICS_PINYIN = [pinyin.get(item, format='strip') for item in TOPICS]
import re
def cleaner(text):
'''
to clean text and return the text
'''
dirty_tag = [' ', '\n', '=', ' ', '&', 'pos108', '\N',
'&', 'http://club.autohome.com.cn/bbs/thread-c-.+-.+.html;',
'http://club.autohome.com.cn/bbs/thread-c-.+-.+.html', '-', '\r']
text = re.sub('|'.join(dirty_tag), '', text)
if len(text) > 10:
#去掉非中文
if re.findall(u'[\u4e00-\u9fa5]+', text):
return text
else:
return None
# if text == '\n' or text == ' ' or text == ' \n':
# return
# else:
# text = re.sub('|'.join(dirty_tag), '', text)
# return text
def str2unicode(s):
'''
将str转为unicode, utf8编码
'''
if isinstance(s, str):
return unicode(s, 'utf-8')
else:
return s
|
bsd-3-clause
| 7,075,182,384,411,835,000
| 29.936
| 189
| 0.528712
| false
| 3.034537
| false
| false
| false
|
aewallin/openvoronoi
|
python_examples/offset/offset_2_ttt.py
|
1
|
8593
|
import openvoronoi as ovd # https://github.com/aewallin/openvoronoi
import ovdvtk # for VTK visualization, https://github.com/aewallin/openvoronoi
import truetypetracer as ttt # https://github.com/aewallin/truetype-tracer
import offset2vtk # vtk visualization helper https://github.com/aewallin/openvoronoi
import time
import vtk
import math
# insert points into VoronoiDiagram, return list of point IDs
# polygon = [p1,p2,p3,p4,p5]
# where we assume the polygon is closed, e.g. p5 connects to p1
# each point is a 2D point (p[0], p[1])
def insert_polygon_points(vd, polygon):
pts = []
for p in polygon:
pts.append(ovd.Point(p[0], p[1])) # this just converts (p[0], p[1]) format points into ovd.Point
id_list = []
print "inserting ", len(pts), " point-sites:"
m = 0
# pts = [pt1, pt2, pt3, pt4, pt5] where each pt is of class ovd.Point
for p in pts:
id_list.append(vd.addVertexSite(p)) # note we store and return the Point ID returned here!
print " ", m, " added vertex ", id_list[len(id_list) - 1]
m = m + 1
return id_list
# insert polygon line-segments based on a list of IDs returned by insert_polygon_points()
# id_list = [0, 1, 2, 3, 4, 5] defines a closed polygon. the
# 0->1->2->3->4->5->0
# the IDs _must_ refer to points that have been previously inserted with vd.addVertexSite()
#
def insert_polygon_segments(vd, id_list):
j = 0
print "inserting ", len(id_list), " line-segments:"
for n in range(len(id_list)):
n_nxt = n + 1
if n == (len(id_list) - 1):
n_nxt = 0
print " ", j, "inserting segment ", id_list[n], " - ", id_list[n_nxt]
# this inserts a line-segment id_list[n] -> id_list[n_nxt] into the VoronoiDiagram
vd.addLineSite(id_list[n], id_list[n_nxt])
j = j + 1
# insert many polygons into vd
# segs is a list of polygons:
# segs = [poly1, poly2, poly3, ...]
# poly defines a closed polygon as a a list of points
# poly1 = [ [x1,y1], [x2,y2], [x3,y3], ..., [xN,yN] ]
# where the last point [xN,yN] in the list connects to the first [x1,y1]
#
def insert_many_polygons(vd, segs):
polygon_ids = []
t_before = time.time()
# all points must be inserted into the vd first!
for poly in segs:
poly_id = insert_polygon_points(vd, poly)
polygon_ids.append(poly_id)
t_after = time.time()
pt_time = t_after - t_before
# all line-segments are inserted then
t_before = time.time()
for ids in polygon_ids:
insert_polygon_segments(vd, ids)
t_after = time.time()
seg_time = t_after - t_before
return [pt_time, seg_time] # return timing-info, for benchmarking
# translate all segments by x,y
def translate(segs, x, y):
out = []
for seg in segs:
seg2 = []
for p in seg:
p2 = []
p2.append(p[0] + x)
p2.append(p[1] + y)
seg2.append(p2)
out.append(seg2)
return out
# call truetype-tracer to get font input geometry
# text = the text-string we want
# scale = used to scale the geometry to fit within a unit-circle
#
# output is a list of lists:
# [ [p1,p2,p3,p4,p5,p1] ,
# [p6,p7,p8,p9,p10, ... ,pN, p6],
# ...
# ]
# each point is a 2D point ( p[0], p[1] )
# each sub-list corresponds to a closed loop of line-segments
# e.g. p1->p2->p3->p4->p5->p1
#
# If the main(outer) geometry is given in e.g. CW orientation, then
# islands (closed loops within the main geometry) are given in CCW orientation
def ttt_segments(text, scale):
wr = ttt.SEG_Writer()
wr.arc = False
wr.conic = False
wr.cubic = False
wr.conic_biarc_subdivision = 10 # this has no effect?
wr.conic_line_subdivision = 50 # this increases nr of points
wr.cubic_biarc_subdivision = 10 # no effect?
wr.cubic_line_subdivision = 10 # no effect?
wr.setFont(3)
wr.scale = float(1) / float(scale)
ttt.ttt(text, wr)
segs = wr.get_segments()
return segs
# the segments come out of truetype-tracer in a slightly wrong format
# truetype-tracer outputs closed polygons with identical points
# at the start and end of the point-list. here we get rid of this repetition.
# input:
# [ [p1,p2,p3,p4,p5,p1] ,
# [p6,p7,p8,p9,p10, ... ,pN, p6],
# ...
# ]
# this functions simply removes the repeated end-point from each segment
# output:
# [ [p1,p2,p3,p4,p5] ,
# [p6,p7,p8,p9,p10, ... ,pN],
# ...
# ]
def modify_segments(segs):
segs_mod = []
for seg in segs:
first = seg[0]
last = seg[len(seg) - 1]
assert (first[0] == last[0] and first[1] == last[1])
seg.pop()
seg.reverse() # to get interior or exterior offsets
segs_mod.append(seg)
# drawSegment(myscreen, seg)
return segs_mod
if __name__ == "__main__":
# this sets up a VTK viewport where we can draw in 3D
w = 1920 # width and height of VTK viewport
h = 1080
# w=1024
# h=1024
myscreen = ovdvtk.VTKScreen(width=w, height=h)
ovdvtk.drawOCLtext(myscreen, rev_text=ovd.version())
myscreen.render()
scale = 1
far = 1
camPos = far
zmult = 3
myscreen.camera.SetPosition(0, -camPos / float(1000), zmult * camPos)
myscreen.camera.SetClippingRange(-(zmult + 1) * camPos, (zmult + 1) * camPos)
myscreen.camera.SetFocalPoint(0.0, 0, 0)
# create a VoronoiDiagram
# use far=1.0 for now. This means all input geometry should fit within a unit circle!
# 120 is a binning-parameter for nearest neighbor search. sqrt(n) where we have n points should be optimal
vd = ovd.VoronoiDiagram(far, 120)
# for vtk visualization of the VoronoiDiagram
# (not required for offsetting or drawing offsets)
vod = ovdvtk.VD(myscreen, vd, float(scale), textscale=0.01, vertexradius=0.003)
vod.drawFarCircle()
vod.textScale = 0.02
vod.vertexRadius = 0.0031
vod.drawVertices = 0
vod.drawVertexIndex = 0
vod.drawGenerators = 0
vod.offsetEdges = 0
vd.setEdgeOffset(0.05) # for visualization only. NOT offsetting!
# get segments from ttt
# this is the input geometry to VoronoiDiagram. It could also come from a text-file
# see the description of each function for details on the format
segs = ttt_segments("LinuxCNC", 40000)
segs = translate(segs, -0.06, 0.05)
segs = modify_segments(segs)
# build a VD from the input geometry
times = insert_many_polygons(vd, segs)
print "all sites inserted. "
print "VD check: ", vd.check() # sanity check
# this filters the diagram so we are left with only the interior or the exterior
# of the polygon. If the filtering is omitted we get offsets on both sides of the input geometry.
# try True/False here and see what happens
pi = ovd.PolygonInterior(False)
vd.filter_graph(pi)
# Create an Offset class, for offsetting.
of = ovd.Offset(vd.getGraph()) # pass the created graph to the Offset class
ofs_list = []
t_before = time.time()
for t in [0.002 * x for x in range(1, 20)]:
ofs = of.offset(t) # produce offsets at distance t
ofs_list.append(ofs)
t_after = time.time()
oftime = t_after - t_before
# offset output format
# ofs will be a list of offset-loops.
# [loop1, loop2, loop3, ...]
# each offset-loop contains offset-elements
# loop1 = [ofs1, ofs2, ofs3, ...]
# offset elements can be either lines or arcs
# an offset element is a list:
# ofs1 = [p, r, cen, cw]
# p = the end-point of the offset-element
# r = the radius if it is an arc, -1 for lines
# cen = the center-point if it is an arc
# cw = clockwise/anticlockwise True/False flag for arcs
# now we draw the offsets in VTK
print len(ofs_list), " offsets to draw:"
m = 0
for ofs in ofs_list:
print m, " / ", len(ofs_list)
offset2vtk.drawOffsets2(myscreen, ofs)
m = m + 1
# draw some text on how long Offset ran
oftext = ovdvtk.Text()
oftext.SetPos((50, 100))
oftext_text = "Offset in {0:.3f} s CPU time.".format(oftime)
oftext.SetText(oftext_text)
myscreen.addActor(oftext)
# turn off the whole VD so we can more clearly see the offsets
# a VD filtered with both True and False is essentially invisible (both the interior and exterior of a polygon removed)
pi = ovd.PolygonInterior(True)
vd.filter_graph(pi)
# display timing-info on how long the VD build took
vod.setVDText2(times)
vod.setAll()
print "PYTHON All DONE."
myscreen.render()
myscreen.iren.Start()
|
lgpl-2.1
| -6,805,446,223,300,510,000
| 33.23506
| 123
| 0.635285
| false
| 3.004545
| false
| false
| false
|
apmechev/GRID_LRT
|
docs/conf.py
|
1
|
5577
|
# -*- coding: utf-8 -*-
#
# GRID_LRT documentation build configuration file, created by
# sphinx-quickstart on Mon Feb 5 09:40:38 2018.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.imgmath',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'GRID_LRT'
copyright = u'2019, Alexandar Mechev'
author = u'Alexandar Mechev'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
import GRID_LRT
version = GRID_LRT.__version__
# The full version, including alpha/beta/rc tags.
release = GRID_LRT.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = "classic"
html_theme = "sphinx_rtd_theme"
#html_theme_options = {
# "rightsidebar": "false",
# "sidebarwidth": 300,
# "relbarbgcolor": "black"
#}
def skip(app, what, name, obj, skip, options):
if name == "__init__":
return False
return skip
def setup(app):
app.connect("autodoc-skip-member", skip)
app.add_javascript('copybutton.js')
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'GRID_LRTdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'GRID_LRT.tex', u'GRID\\_LRT Documentation',
u'Alexandar Mechev', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'grid_lrt', u'GRID_LRT Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'GRID_LRT', u'GRID_LRT Documentation',
author, 'GRID_LRT', 'Distributing LOFAR processing on the Dutch Grid infrastructure',
'Miscellaneous'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None,
'couchdb':('http://couchdb-python.readthedocs.io/en/latest/',None)}
|
gpl-3.0
| 2,674,878,722,036,451,300
| 29.47541
| 90
| 0.671149
| false
| 3.742953
| true
| false
| false
|
xclaeys/ElastoPhi
|
postprocessing/graphes_output_err_decrease.py
|
1
|
2917
|
# -*-coding:Utf-8 -*
import Vue.Figure as figure
import Vue.Donnee as donnee
import Lecture.FonctionLectureClassique as classique
import numpy as np
import sys
########################################################################################
#------------------------------- Input -----------------------------
########################################################################################
if (len(sys.argv)==2):
filename=sys.argv[1]
print(filename+" will be printed")
else:
print("You must give the name of the output file")
sys.exit(1)
outputname_err="/".join(filename.split("/")[0:-1])+"/graphe_"+(filename.split("/")[-1]).split(".")[0]
########################################################################################
#------------------------------- Figure -----------------------------
########################################################################################
colors=["m","b","c","r","g","y","k","firebrick","purple"]
markers=["^","o",".","v"]
(dist,rank) = classique.lecture(filename,0,1)
(err1,err2) = classique.lecture(filename,2,3)
Dist = []
Rank = []
Err1 = []
Err2 = []
Courbes_dist = []
Courbes_rank = []
Courbes_err1 = []
Courbes_err2 = []
compt=0
ymax_err=0
ymin_err=1e30
offset = 49
for i in range(1,6):
Rank.append(rank[compt+0:compt+offset])
Dist.append(dist[compt+0])
Err1.append(err1[compt+0:compt+offset])
Err2.append(err2[compt+0:compt+offset])
ymax_err=max(ymax_err,max(err1[compt+0:compt+offset]))
ymax_err=max(ymax_err,max(err2[compt+0:compt+offset]))
ymin_err=min(ymin_err,min(err1[compt+0:compt+offset]))
ymin_err=min(ymin_err,min(err2[compt+0:compt+offset]))
compt+=offset
ncolor=0
for i in range(0,len(Dist)):
line1={"linestyle":"-","linewidth":3,"linecolor":colors[ncolor]}
line2={"linestyle":"--","linewidth":3,"linecolor":colors[ncolor]}
marker={"markerstyle":"None","markersize":10,"fillstyle":"full"}
Courbes_err1.append(donnee.Ligne(nom=r"ACA - distance="+str(Dist[i]),ordonnee=Err1[i],abscisse=Rank[i],line=line1,marker=marker))
Courbes_err2.append(donnee.Ligne(nom=r"SVD - distance="+str(Dist[i]),ordonnee=Err2[i],abscisse=Rank[i],line=line2,marker=marker))
ncolor+=1
xlim=[min(Rank[0])*0.75,max(Rank[0])*1.01]
ylim_erro=[ymin_err*0.75,ymax_err*1.25]
xlabel={"label":"Rank","fontsize":20}
ylabel_erro={"label":"Relative error","fontsize":20}
# titre={"titre":"Test","fontsize":20,"loc":"center"}
legende={"loc":"upper left","bbox_to_anchor":(1.01,1),"ncol":1,"fontsize":12}
Figure_erro=figure.Graphe1D(id=0,legende=legende,xlim=xlim,ylim=ylim_erro,xlabel=xlabel,ylabel=ylabel_erro,yscale="log",axis="off",format="pdf")
for courbe in Courbes_err1:
Figure_erro.AjoutCourbe(courbe)
for courbe in Courbes_err2:
Figure_erro.AjoutCourbe(courbe)
Figure_erro.TraceGraphe1D()
Figure_erro.EnregistreFigure(outputname_err)
Figure_erro.FermeFigure()
|
lgpl-3.0
| -2,422,340,530,487,132,000
| 27.598039
| 144
| 0.569421
| false
| 2.829292
| false
| false
| false
|
diffeo/Datawake
|
memex-datawake-stream/src/datawakeio/HBASEEntityDataConnector.py
|
1
|
1852
|
import happybase
from datawakeio.data_connector import ExtractedDataConnector
class HBASEDataConnector(ExtractedDataConnector):
def __init__(self, hbase_host):
ExtractedDataConnector.__init__(self)
self.hbase_host = hbase_host
self.hbase_conn = None
def open(self):
self.hbase_conn = happybase.Connection(self.hbase_host)
def close(self):
if self.hbase_conn is not None:
self.hbase_conn.close()
def _checkConn(self):
self.open()
def insertHBASE(self, rowkey_prefix, items, table):
try:
self._checkConn()
hbase_table = self.hbase_conn.table(table)
batch_put = hbase_table.batch(batch_size=len(items))
for i in items:
batch_put.put(row="%s%s" % (rowkey_prefix, i), data={"colFam:c": ""})
batch_put.send()
finally:
self.close()
def insert_entities(self, url, entity_type, entity_values):
rowkey_prefix = "%s\0%s\0" % (url, entity_type)
self.insertHBASE(rowkey_prefix, entity_values, "general_extractor_web_index_hbase")
def insert_domain_entities(self, domain, url, entity_type, entity_values):
rowkey_prefix = "%s\0%s\0%s\0" % (domain, url, entity_type)
self.insertHBASE(rowkey_prefix, entity_values, "domain_extractor_web_index_hbase")
def get_domain_entity_matches(self, domain, type, values):
try:
self._checkConn()
hbase_table = self.hbase_conn.table("datawake_domain_entities_hbase")
rowkey = "%s\0%s\0" % (domain, type)
found = []
for value in values:
for item in hbase_table.scan(row_prefix="%s%s" % (rowkey, value)):
found.append(value)
return found
finally:
self.close()
|
apache-2.0
| -8,978,908,006,801,545,000
| 33.962264
| 91
| 0.591793
| false
| 3.481203
| false
| false
| false
|
FrodeSolheim/fs-uae-launcher
|
amitools/vamos/lib/lexec/Pool.py
|
1
|
1784
|
from amitools.vamos.log import log_exec
from amitools.vamos.error import *
from .Puddle import Puddle
class Pool:
def __init__(self, mem, alloc, flags, size, thresh, poolid):
self.alloc = alloc
self.mem = mem
self.minsize = size
self.flags = flags
self.thresh = thresh
self.name = " in Pool %x" % poolid
self.puddles = []
def __del__(self):
while len(self.puddles) > 0:
puddle = self.puddles.pop()
puddle.__del__()
def __str__(self):
poolstr = ""
for puddle in self.puddles:
if poolstr == "":
poolstr = "{%s}" % puddle
else:
poolstr = "%s,{%s}" % (poolstr,puddle)
return poolstr
def AllocPooled(self, label_mgr, name, size):
result = None
if size >= self.thresh:
puddle = Puddle(self.mem, self.alloc, label_mgr, name, size)
if puddle != None:
self.puddles.append(puddle)
result = puddle.AllocPooled(name + self.name, size)
else:
for puddle in self.puddles:
result = puddle.AllocPooled(name + self.name, size)
if result != None:
break
# none of the puddles had enough memory
if result == None:
puddle = Puddle(self.mem, self.alloc, label_mgr, name, self.minsize)
if puddle != None:
self.puddles.append(puddle)
result = puddle.AllocPooled(name + self.name, size)
if result == None:
log_exec.info("AllocPooled: Unable to allocate memory (%x)", size)
return result
def FreePooled(self, mem, size):
if mem != 0:
for puddle in self.puddles:
if puddle.contains(mem,size):
puddle.FreePooled(mem,size)
return
raise VamosInternalError("FreePooled: invalid memory, not in any puddle : ptr=%06x" % mem)
|
gpl-2.0
| -7,964,768,728,106,835,000
| 29.237288
| 96
| 0.598094
| false
| 3.081174
| false
| false
| false
|
Kagami/shitsu
|
shitsu/utils/__init__.py
|
1
|
4409
|
##################################################
# shitsu - tiny and flexible xmpp bot framework
# Copyright (C) 2008-2012 Kagami Hiiragi <kagami@genshiken.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##################################################
# unescape function by Fredrik Lundh
# <http://effbot.org/zone/re-sub.htm#unescape-html>
##################################################
import re
import random
import urllib2
import urlparse
import traceback
import htmlentitydefs
from shitsu.utils import fix_socket
reload(fix_socket)
def trim(docstring):
docstring = docstring.strip()
return "\n".join([line.strip() for line in docstring.splitlines()])
def sandbox(fn):
def new(*args, **kwargs):
try:
fn(*args, **kwargs)
except Exception:
traceback.print_exc()
return new
host_rec = re.compile(r"^([-A-Za-z0-9]{1,63}\.)*[-A-Za-z0-9]{1,63}\.?$")
private_hosts_rec = re.compile(
r"^("
r"127\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}|localhost(\.localdomain)?\.?|"
r"192\.168\.[0-9]{1,3}\.[0-9]{1,3}|10\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}|"
r"172\.(1[6-9]|2[0-9]|3[0-1])\.[0-9]{1,3}\.[0-9]{1,3}"
r")$", re.I)
def fix_host(host, forbid_private=True):
"""Check validness of hostname and fix idna hosts.
Optionally forbid private hosts.
"""
if len(host) > 255:
return
try:
host = host.encode("idna")
except UnicodeError:
return
if not host_rec.match(host):
return
if forbid_private and private_hosts_rec.match(host):
return
return host
def fix_url(url, forbid_private=True):
"""Check and fix url's hostname via fix_host."""
p = urlparse.urlsplit(url)
userpass, at, hostport = p.netloc.partition("@")
if not at: userpass, hostport = "", userpass
host, colon, port = hostport.partition(":")
host = fix_host(host, forbid_private)
if not host:
return
netloc = "".join([userpass, at, host, colon, port])
url_out = urlparse.urlunsplit(
(p.scheme, netloc, p.path, p.query, p.fragment))
return url_out.encode("utf-8")
default_url_timeout = 4
default_max_page_size = 1 * 1024 * 1024
request_headers = {
"User-Agent": ("Mozilla/5.0 (Windows NT 6.1; rv:9.0) "
"Gecko/20100101 Firefox/9.0")
}
def get_url(url, max_page_size=default_max_page_size, return_headers=False,
timeout=default_url_timeout, forbid_private=True):
url = fix_url(url, forbid_private)
if not url:
return ""
request = urllib2.Request(url, None, request_headers)
try:
f = urllib2.urlopen(request, timeout=timeout)
data = f.read(max_page_size)
except Exception:
return ""
else:
if return_headers:
return data, f.info()
else:
return data
def unescape(text):
"""Removes HTML or XML character references and
entities from a text string.
@param text The HTML (or XML) source text.
@return The plain text, as a Unicode string, if necessary.
Source: http://effbot.org/zone/re-sub.htm#unescape-html
"""
def fixup(m):
text = m.group(0)
if text[:2] == "&#":
# character reference
try:
if text[:3] == "&#x":
return unichr(int(text[3:-1], 16))
else:
return unichr(int(text[2:-1]))
except ValueError:
pass
else:
# named entity
try:
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]])
except KeyError:
pass
return text # leave as is
return re.sub("&#?\w+;", fixup, text)
def get_id():
return "".join(map(lambda _: str(random.randint(0, 9)), xrange(10)))
|
gpl-3.0
| -3,346,007,296,900,773,400
| 30.492857
| 79
| 0.585167
| false
| 3.488133
| false
| false
| false
|
kevinconway/require.py
|
require/__init__.py
|
1
|
7262
|
"""Alternate import logic that provides for multiple dependency versions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from collections import defaultdict
import contextlib
import inspect
import os
import sys
from os import path
# If this is the first import of this module then store a reference to the
# original, builtin import statement. This is used later for the optional
# patching, and restoration, of the import command.
BUILTINS_NAME = '__builtin__' if '__builtin__' in sys.modules else 'builtins'
if '__original__import' not in sys.modules:
sys.modules['__original__import'] = sys.modules[BUILTINS_NAME].__import__
class ModuleCache(object):
"""Replacment for sys.modules that respects the physical path of an import.
The standard sys.modules cache can only cache on version of a module that
has been imported. This replacement uses the file path of the requesting
module (the one performing the import) as a secondary key when drawing
from the cache.
"""
def __init__(self):
"""Initialize the module cache."""
self._cache = defaultdict(dict)
def set(self, name, path, module):
"""Store a module in the cache with the given path key.
Args:
name (str): The name of the import.
path (str): The absolute path of the requesting module directory.
module (object): The Python module object to store.
"""
self._cache[name][path] = module
def cached(self, name, path):
"""Determine if an import is already cached.
Args:
name (str): The name of the import.
path (str): The absolute path of the requesting module directory.
Returns:
Bool: True if cached else False.
"""
return name in self._cache and path in self._cache[name]
def get(self, name, path, default=None):
"""Fetch a module from the cache with a given path key.
Args:
name (str): The name of the import.
path (str): The absolute path of the requesting module directory.
default: The value to return if not found. Defaults to None.
"""
return self._cache[name].get(path, default)
def get_nearest(self, name, path, default=None):
"""Fetch the module from the cache nearest the given path key.
Args:
name (str): The name of the import.
path (str): The absolute path of the requesting module directory.
default: The value to return if not found. Defaults to None.
If the specific path key is not present in the cache, this method will
search the cache for the nearest parent path with a cached value. If
a parent cache is found it is returned. Otherwise the given default
value is returned.
"""
if self.cached(name, path):
return self.get(name, path, default)
for parent in sorted(self._cache[name], key=len, reverse=True):
if path.startswith(parent):
# Set the cache for quicker lookups later.
self.set(name, path, self.get(name, parent))
return self.get(name, path, default)
return default
@contextlib.contextmanager
def local_modules(path, pymodules='.pymodules'):
"""Set the nearest pymodules directory to the first sys.path element.
Args:
path (str): The path to start the search in.
pymodules (str): The name of the pymodules directory to search for.
The default value is .pymodules.
If no valid pymodules directory is found in the path no sys.path
manipulation will take place.
"""
path = os.path.abspath(path)
previous_path = None
target_path = None
while previous_path != path:
if os.path.isdir(os.path.join(path, pymodules)):
target_path = path
break
previous_path, path = path, os.path.dirname(path)
if target_path:
sys.path.insert(1, os.path.join(target_path, pymodules))
try:
yield target_path
finally:
if target_path:
sys.path.pop(1)
class Importer(object):
"""An import statement replacement.
This import statement alternative uses a custom module cache and path
manipulation to override the default Python import behaviour.
"""
def __init__(self, cache=None, pymodules='.pymodules'):
"""Initialize the importer with a custom cache.
Args:
cache (ModuleCache): An instance of ModuleCache.
pymodules (str): The name to use when searching for pymodules.
"""
self._cache = cache or ModuleCache()
self._pymodules = pymodules or '.pymodules'
@staticmethod
def _calling_dir():
"""Get the directory containing the code that called require.
This function will look 2 or 3 frames up from the stack in order to
resolve the directory depending on whether require was called
directly or proxied through __call__.
"""
stack = inspect.stack()
current_file = __file__
if not current_file.endswith('.py'):
current_file = current_file[:-1]
calling_file = inspect.getfile(stack[2][0])
if calling_file == current_file:
calling_file = inspect.getfile(stack[3][0])
return path.dirname(path.abspath(calling_file))
def require(
self,
name,
locals=None,
globals=None,
fromlist=None,
level=None,
):
"""Import modules using the custom cache and path manipulations."""
# Default and allowed values change after 3.3.
level = -1 if sys.version_info[:2] < (3, 3) else 0
calling_dir = self._calling_dir()
module = self._cache.get_nearest(name, calling_dir)
if module:
return module
with local_modules(calling_dir, self._pymodules) as pymodules:
module = sys.modules['__original__import'](
name,
locals,
globals,
fromlist,
level,
)
if self._pymodules in repr(module):
del sys.modules[name]
# Create the module cache key if it doesn't already exist.
self._cache.set(name, pymodules, module)
# Enjoy your fresh new module object.
return module
def __call__(self, *args, **kwargs):
"""Proxy functions for require."""
return self.require(*args, **kwargs)
require = Importer()
def patch_import(importer=require):
"""Replace the builtin import statement with the wrapped version.
This function may be called multiple times without having negative side
effects.
"""
sys.modules[BUILTINS_NAME].__import__ = importer
def unpatch_import():
"""Restore the builtin import statement to the original version.
This function may be called multiple time without having negative side
effects.
"""
sys.modules[BUILTINS_NAME].__import__ = sys.modules['__original__import']
|
apache-2.0
| 8,249,302,086,919,257,000
| 29.512605
| 79
| 0.623382
| false
| 4.493812
| false
| false
| false
|
Vrekrer/magdynlab
|
instruments/srs_ds335.py
|
1
|
4144
|
# coding=utf-8
# Author: Diego González Chávez
# email : diegogch@cbpf.br / diego.gonzalez.chavez@gmail.com
#
# This class controls the:
# Signal Generator
# Stanford Research Systems : DS335
#
# TODO:
# Make documentation
from .instruments_base import InstrumentBase as _InstrumentBase
__all__ = ['SRS_DS335']
class SRS_DS335(_InstrumentBase):
def __init__(self,
GPIB_Address=15, GPIB_Device=0,
ResourceName=None, logFile=None):
if ResourceName is None:
ResourceName = 'GPIB%d::%d::INSTR' % (GPIB_Device, GPIB_Address)
super().__init__(ResourceName, logFile)
self._IDN = 'SRS_SR830'
self.VI.write_termination = self.VI.LF
self.VI.read_termination = self.VI.LF
self._unit = 'VP' # Volts peak to peak
@property
def amplitude(self):
'''
Sets or return the output voltage amplitude.
Use the "unit" property to set the units used (Vpp or Vrms).
'''
amp_str = self.querry('AMPL?')
self._unit = amp_str[-2:]
return float(amp_str[:4])
@amplitude.setter
def amplitude(self, vAmp):
self.write('AMPL %0.2f%s' % (vAmp, self._unit))
@property
def unit(self):
'''
Sets or return the voltage units (Vpp or Vrms).
Changing the unit corrects the output voltage value
to keep it at the same phisical value.
'''
self.amplitude # read unit from hardware
return {'VP': 'Vpp', 'VR': 'Vrms'}[self._unit]
@unit.setter
def unit(self, vUnit):
newUnit = {'Vpp': 'VP', 'Vrms': 'VR'}.get(vUnit, 'VP')
amp = self.amplitude # read amplitude and unit from hardware
oldUnit = self._unit
self._unit = newUnit
unitChange_str = '%sto%s' % (oldUnit, newUnit)
unitChange_factors = {'VPtoVR': 0.5**0.5, 'VRtoVP': 2**0.5}
if unitChange_str in unitChange_factors:
self.amplitude = amp * unitChange_factors[unitChange_str]
@property
def frequency(self):
'''Sets or return the output frequency in Hz'''
return self.query_float('FREQ?')
@frequency.setter
def frequency(self, vFreq):
self.write('FREQ %0.6f' % vFreq)
@property
def offset(self):
'''Sets or return the output offset in volts'''
return self.query_float('OFFS?')
@offset.setter
def offset(self, vOffset):
self.write('OFFS %0.2f' % vOffset)
@property
def loadImpedance(self):
'''
Sets or return the output source impedance mode
"HighZ" or "50 Ohm"
'''
val = self.query('TERM?')
return {'1': 'HighZ', '0': '50 Ohm'}[val]
@loadImpedance.setter
def loadImpedance(self, vTerm):
term_str = {'HighZ': '1', '50 Ohm': '0'}.get(vTerm, '1')
self.write('TERM %s' % term_str)
@property
def syncOutput(self):
'''
Return the sync output state or sets it to "ON" or "OFF"
'''
val = self.query('SYNC?')
return {'1': 'ON', '0': 'OFF'}[val]
@syncOutput.setter
def syncOutput(self, vSync):
sync_str = {'ON': '1', 'OFF': '0'}.get(vSync, '1')
self.write('SYNC %s' % sync_str)
@property
def function(self):
val = self.query('FUNC?')
return {'0': 'Sine', '1': 'Square', '2': 'Triange',
'3': 'Ramp', '4': 'Noise'}[val]
@function.setter
def function(self, vFunct):
'''
Sets or return the output function
"Sine", "Square", "Triange", "Ramp" or "Noise"
'''
funct_str = {'Sine': '0', 'Square': '1', 'Triange': '2',
'Ramp': '3', 'Noise': '4'}.get(vFunct, '0')
self.write('FUNC %s' % funct_str)
def Display(self, show_funct='Amp'):
'''
Changes de hardware dysplay to show:
"Amplitude" ('Amp'), "Frequency" (Freq) or "Offset" ('Offs')
'''
dps_str = {'Amplitude': '2', 'Frequency': '1', 'Offset': '3',
'Amp': '2', 'Freq': '1', 'Offs': '3'}.get(show_funct, '2')
self.write('KEYS %s' % dps_str)
|
mit
| 5,892,871,699,193,613,000
| 29.681481
| 77
| 0.549734
| false
| 3.30303
| false
| false
| false
|
PALab/pyjamaseis
|
pyjamaseis/pyjamaseisv2.0.py
|
1
|
89157
|
#=================================================================================================================
# Structure of PyjAmaseis
#
# - IMPORTS
# - STATION INFORMATION USER INTERFACE CODE (class myFrame4)
# - SECONDARY OPTIONS UI WINDOW CODE (class selectionWindow)
# - CODE FOR FRAME WHICH APPEARS AT BOTTOM OF PLOTTING WINDOW (class lowerFrame)
# - DATA SELECTION AND EXTRACTION CODE (class dataHandler)
# - INITIALIZATION OF PLOTTING CODE (class Plotting)
# - CLASS FOR HANDLING TK FRAMES WHICH MUST BE IN MAIN THREAD (class mFrame)
# - alignToBottomRight Function - aligns secondary window to bottom right hand corner of screen
# - secondaryWindow Function - creates the Options window
# - Collecting Function - collects and process data read from the TC1
# - plotPrevious Function - loads and plots pre-recorded data
# - saveHourData Function - saves data recorded by TC1
# - getSerialPort Function - finds the serial port the TC1 is connected to
# - serial_ports Functions - returns all active serial ports
# - initializeHeader Function used to create a header object for headers in a SAC object
# - plotData - called by the Plotting function to plot data
# - calculateYAxisLabels - creates 24 hour UTC labels for the y axis, these are saved in an array
# - calculateYAxisLabelsOneHour - creates y axis labels for the current hour in UTC divided into 5 minute sections
# - xAxisLabels Function - Creates the labels which appear on the x axis of teh plottting window
# - window_close Function - causes the collecting and plotting processes to stop before closing windows
# - directory_handler Function - checks for a directory or creates a new one
# - getHourData Function - looks for an loads previously recorded data
# - if __name__ == '__main__': - the is where the code starts
#
#=================================================================================================================
### Importing all required libraries for running PyjAmaseis
### v1.0 change: The cross-platform screenshot module pyscreenshot is imported instead of the PIL module ImageGrab
### which is Windows-only. Tkinter messagebox is also imported.
import matplotlib
matplotlib.use('TkAgg')
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg
from matplotlib.figure import Figure
import matplotlib.pyplot as plt
import numpy as np
import sys
import platform
import Tkinter as tk
import tkMessageBox as msgbx
import time as Time
import serial
from obspy import read, Trace, UTCDateTime
from obspy.core.stream import Stream
from obspy.core import AttribDict
from obspy.core.trace import Stats
import datetime as dt
from datetime import datetime
from decimal import *
from multiprocessing import Queue
import pyscreenshot
from threading import Thread
import wx
from pygeocoder import Geocoder
import os
import errno
import glob
import fileinput
import pycurl
import base64
#### Initial window presented to user when launching PyjAmaseis for the first time
#### This window will require the user to enter the station information which will be later used when saving SAC files
#### Class was auto generate by using wxGlade
class MyFrame4(wx.Frame):
def __init__(self, *args, **kwds):
# begin wxGlade: MyFrame4.__init__
kwds["style"] = wx.DEFAULT_FRAME_STYLE
wx.Frame.__init__(self, *args, **kwds)
self.iconFile = "icons/icon.ico"
self.icon = wx.Icon(self.iconFile, wx.BITMAP_TYPE_ICO)
self.SetIcon(self.icon)
self.bitmap_1 = wx.StaticBitmap(self, wx.ID_ANY, wx.Bitmap("logo.gif", wx.BITMAP_TYPE_ANY))
self.label_4 = wx.StaticText(self, wx.ID_ANY, ("Station Information\n"))
self.label_6 = wx.StaticText(self, wx.ID_ANY, ("Station ID:"))
self.text_ctrl_2 = wx.TextCtrl(self, wx.ID_ANY, "")
self.label_7 = wx.StaticText(self, wx.ID_ANY, ("Station Name:"))
self.text_ctrl_3 = wx.TextCtrl(self, wx.ID_ANY, "")
self.label_8 = wx.StaticText(self, wx.ID_ANY, ("Street Address:"))
self.text_ctrl_4 = wx.TextCtrl(self, wx.ID_ANY, "")
self.label_5 = wx.StaticText(self, wx.ID_ANY, ("Geographic Coordinates\n"))
self.label_9 = wx.StaticText(self, wx.ID_ANY, ("Longitude:"))
self.text_ctrl_6 = wx.TextCtrl(self, wx.ID_ANY, "")
self.label_10 = wx.StaticText(self, wx.ID_ANY, ("Latitude:"))
self.text_ctrl_7 = wx.TextCtrl(self, wx.ID_ANY, "")
self.label_11 = wx.StaticText(self, wx.ID_ANY, ("Elevation:"))
self.text_ctrl_8 = wx.TextCtrl(self, wx.ID_ANY, "")
self.panel_1 = wx.Panel(self, wx.ID_ANY)
self.button_2 = wx.Button(self, wx.ID_ANY, ("Begin"))
self.__set_properties()
self.__do_layout()
# end wxGlade
self.Bind(wx.EVT_BUTTON, self.begin, id = self.button_2.Id)
self.Bind(wx.EVT_TEXT, self.checkAddress, id = self.text_ctrl_4.Id)
def checkAddress(self, e):
## This method makes calls to the get the geo coordinates of the entered address in the - street address field
try:
results = Geocoder.geocode(self.text_ctrl_4.GetValue())
longitude, latitude = results[0].coordinates
self.text_ctrl_6.SetValue(str(longitude))
self.text_ctrl_7.SetValue(str(latitude))
self.text_ctrl_8.SetValue(str(0.0))
except:
pass
def begin(self, e):
#### Station Information entered is saved into text file, everytime application is loaded,
#### the information stored in this file will be read and saved in memory for use when saving SAC files -
#### this information goes into the header files of SAC
#writing user entered information to text file
file = open("Station Information.txt", "w")
file.write("Station ID:"+self.text_ctrl_2.GetValue()+"\n")
file.write("Station Name:"+self.text_ctrl_3.GetValue()+"\n")
file.write("Station Address:"+self.text_ctrl_4.GetValue()+"\n")
file.write("Latitude:"+self.text_ctrl_6.GetValue()+"\n")
file.write("Longitude:"+self.text_ctrl_7.GetValue()+"\n")
file.write("Elevation:"+self.text_ctrl_8.GetValue()+"\n")
file.write("DCShift:0"+"\n")
file.close()
self.Close()
#close and exit mainloop
def __set_properties(self):
# begin wxGlade: MyFrame4.__set_properties
self.SetTitle("PyjAmaseis v1.0")
self.SetSize((804, 456))
self.SetBackgroundColour(wx.Colour(255, 255, 255))
self.SetForegroundColour(wx.Colour(0, 0, 0))
self.label_4.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 1, ""))
self.label_5.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 1, ""))
# end wxGlade
def __do_layout(self):
# begin wxGlade: MyFrame4.__do_layout
#--- Initial GUI setup. Creates a grid and button layout functionalities ---
sizer_10 = wx.BoxSizer(wx.HORIZONTAL)
sizer_11 = wx.BoxSizer(wx.VERTICAL)
sizer_4 = wx.BoxSizer(wx.VERTICAL)
sizer_7 = wx.BoxSizer(wx.HORIZONTAL)
sizer_6 = wx.BoxSizer(wx.HORIZONTAL)
sizer_5 = wx.BoxSizer(wx.HORIZONTAL)
sizer_12 = wx.BoxSizer(wx.VERTICAL)
sizer_15 = wx.BoxSizer(wx.HORIZONTAL)
sizer_14 = wx.BoxSizer(wx.HORIZONTAL)
sizer_13 = wx.BoxSizer(wx.HORIZONTAL)
sizer_10.Add(self.bitmap_1, 0, 0, 0)
sizer_11.Add(self.label_4, 0, wx.LEFT | wx.TOP | wx.EXPAND, 5)
sizer_13.Add(self.label_6, 1, wx.LEFT | wx.EXPAND, 5)
sizer_13.Add(self.text_ctrl_2, 2, wx.RIGHT, 5)
sizer_12.Add(sizer_13, 1, wx.EXPAND, 0)
sizer_14.Add(self.label_7, 1, wx.LEFT | wx.EXPAND, 5)
sizer_14.Add(self.text_ctrl_3, 2, wx.RIGHT | wx.ALIGN_CENTER_HORIZONTAL, 5)
sizer_12.Add(sizer_14, 1, wx.EXPAND, 0)
sizer_15.Add(self.label_8, 1, wx.LEFT | wx.EXPAND, 5)
sizer_15.Add(self.text_ctrl_4, 2, wx.RIGHT, 5)
sizer_12.Add(sizer_15, 1, wx.EXPAND, 0)
sizer_11.Add(sizer_12, 1, wx.EXPAND, 0)
sizer_11.Add(self.label_5, 0, wx.LEFT | wx.TOP | wx.EXPAND, 5)
sizer_5.Add(self.label_9, 1, wx.LEFT, 5)
sizer_5.Add(self.text_ctrl_6, 2, wx.RIGHT | wx.EXPAND, 5)
sizer_4.Add(sizer_5, 1, wx.EXPAND, 0)
sizer_6.Add(self.label_10, 1, wx.LEFT, 5)
sizer_6.Add(self.text_ctrl_7, 2, wx.RIGHT | wx.EXPAND, 5)
sizer_4.Add(sizer_6, 1, wx.EXPAND, 0)
sizer_7.Add(self.label_11, 1, wx.LEFT, 5)
sizer_7.Add(self.text_ctrl_8, 2, wx.RIGHT | wx.EXPAND, 5)
sizer_4.Add(sizer_7, 1, wx.EXPAND, 0)
sizer_11.Add(sizer_4, 1, wx.EXPAND, 0)
sizer_11.Add(self.panel_1, 1, wx.EXPAND, 0)
sizer_11.Add(self.button_2, 1, wx.RIGHT | wx.TOP | wx.BOTTOM | wx.EXPAND | wx.ALIGN_RIGHT, 5)
sizer_10.Add(sizer_11, 1, wx.EXPAND, 0)
self.SetSizer(sizer_10)
self.Layout()
self.Centre()
# end wxGlade
# end of class MyFrame4
#### This class represents the secondary options window that is launching when the real time plotting of data begins
#### Signals are sent over a secondary queue that listens for when the user wants to change between a 24 Hour plot to a 1 hour plot
#### A Y Shift is also signaled to shift the graph up or down on the y axis
#### Class was auto generate by using wxGlade
class selectionWindow(wx.Frame):
def __init__(self, *args, **kwds):
# begin wxGlade: SecondaryWindow.__init__
kwds["style"] = wx.DEFAULT_FRAME_STYLE
wx.Frame.__init__(self, None, wx.ID_ANY, "", style= wx.SYSTEM_MENU | wx.CAPTION | wx.CLOSE_BOX)
self.iconFile = "icons/icon.ico"
self.icon = wx.Icon(self.iconFile, wx.BITMAP_TYPE_ICO)
self.SetIcon(self.icon)
#For Plotting Options
self.label_2 = wx.StaticText(self, wx.ID_ANY, ("Plotting Options: "), style=wx.ALIGN_LEFT)
self.button_3 = wx.Button(self, wx.ID_ANY, ("24 Hour Plotting"))
self.panel_3 = wx.Panel(self, wx.ID_ANY)
self.button_4 = wx.Button(self, wx.ID_ANY, ("1 Hour Plotting"))
self.panel_4 = wx.Panel(self, wx.ID_ANY)
self.spin_button_1 = wx.SpinButton(self, wx.ID_ANY , style=wx.SP_VERTICAL)
self.label_1 = wx.StaticText(self, wx.ID_ANY, ("Graph Shift"), style=wx.ALIGN_CENTRE)
self.panel_5 = wx.Panel(self, wx.ID_ANY)
#For dividing lines
self.div_lin1 = wx.StaticLine(self, -1, size=(3,210),style=wx.LI_VERTICAL)
#For Data Options
self.dat_label = wx.StaticText(self, wx.ID_ANY, ("Data Options: "), style=wx.ALIGN_LEFT)
self.extract_button = wx.Button(self, wx.ID_ANY, ("Extract Data"))
self.extract_button.Disable()
self.dataAccess = None
self.hourData = None
self.extrSaveOnly = wx.RadioButton(self, wx.ID_ANY, label='Save Selection',style=wx.RB_GROUP)
self.extrDisplayOnly = wx.RadioButton(self, wx.ID_ANY, label='Display Selection')
self.extrBoth = wx.RadioButton(self, wx.ID_ANY, label='Save and Display\nSelection')
self.display_button = wx.Button(self, wx.ID_ANY, ("Display Data\n from File"))
#Bindings of buttons and boxes
self.Bind(wx.EVT_BUTTON, self.twentyFourHourPlot, id = self.button_3.Id)
self.Bind(wx.EVT_BUTTON, self.oneHourPlot, id = self.button_4.Id)
self.Bind(wx.EVT_SPIN_UP, self.graphMoveUp, id = self.spin_button_1.Id)
self.Bind(wx.EVT_SPIN_DOWN, self.graphMoveDown, id = self.spin_button_1.Id)
self.Bind(wx.EVT_BUTTON, self.extractData, id = self.extract_button.Id)
self.Bind(wx.EVT_BUTTON, self.displayData, id = self.display_button.Id)
self.Bind(wx.EVT_CLOSE, self.doNothingIfExitButtonPressed)
self.Bind(wx.EVT_MAXIMIZE, self.doNothingIfExitButtonPressed)
self.__set_properties()
self.__do_layout()
# end wxGlade
def doNothingIfExitButtonPressed(self,e):
a=5
def close(self):
self.Destroy()
def twentyFourHourPlot(self, e):
#Send signal via queue 2 to the collecting process to inform the plotting process to re adjust the axis to show 24 hour
queue2.put("24-Hour-Plot")
def oneHourPlot(self, e):
#Send signal via queue 2 to the collecting process to inform the plotting process to re adjust the axis to show 1 hour
queue2.put("1-Hour-Plot")
def graphMoveUp(self, e):
#Send signal via queue 2 to the collecting process to change the dcshift value
queue2.put("UP")
def graphMoveDown(self, e):
#Send signal via queue 2 to the collecting process to change the dcshift value
queue2.put("DOWN")
#This method is the main method which handles collecting and saving a region of data which has been selected by dataHandler. It is invoked
#when Extract Data button is pressed. It gets the start and end times of the selection, finds how many hours are included, builds a list of directories
#where this data exists (or gets it from hourSeismicData via the 'now' marker), then puts all this data in an array which is saved in a .sac file
#Note that this method only supports selection intervals which include up to a maximum of 24 hours.
def extractData(self, e):
global stationId, mainWin
if self.dataAccess != None:
start = self.dataAccess.initialTime
end = self.dataAccess.endTime
interval = end[1]-start[1]
if interval < 0:
interval = interval+24
interval += 1 #Total number of hours selected (where an hour is counted even if only part of it is selected)
directoryList = []
for hour in range(int(start[1]), int(start[1]+interval)):
if hour < 24:
year, month, day = start[0].year, start[0].month, start[0].day
else:
year, month, day, hour = end[0].year, end[0].month, end[0].day, hour-24
timeTuple = (int(year), int(month), int(day), int(hour))
if len(str(hour)) < 2:
hour = '0'+str(hour)
if len(str(day)) < 2:
day = '0'+str(day)
if len(str(month)) < 2:
month = '0'+str(month)
directory = [timeTuple, stationId+'/'+str(year)+'/'+str(month)+'/'+str(day)+'/'+str(year)[-2:]+str(month)+str(day)+str(hour)+stationId+'.sac']
directoryList.append(directory)
now = datetime.utcnow()
for i in range(len(directoryList)):
if not os.path.exists(directoryList[i][1]):
if (end[0].year, end[0].month, end[0].day, end[1]) == (now.year, now.month, now.day, now.hour):
directoryList[i][1] = 'now'
else:
msgbx.showerror("Error", "Some or all of the selected time\ndoes not have recorded data. Please\nselect a region of time which has\ncontinuous data.")
return
elif directoryList[i][0] == (int(now.year), int(now.month), int(now.day), int(now.hour)):
directoryList[i][1] = directoryList[i][1] + 'now'
hourSeisDat, hourTime = self.hourData[0], self.hourData[1]
extrxtData, tot_time = np.array([], dtype=np.float64), 0
for i in range(len(directoryList)):
if i == 0:
if directoryList[i][1][-3:] != 'now':
trace = read(pathname_or_url = directoryList[0][1], format = 'SAC')
trace = trace.pop(0)
trace_dat = trace.data
extrxtData = np.concatenate((extrxtData, trace_dat[int(start[2]*len(trace_dat)):]))
tot_time += 3600-start[2]*3600
else:
total_time = hourTime.minute*60+hourTime.second+hourTime.microsecond/1000000.0
start_index = int(start[2]*3600/total_time*len(hourSeisDat))
end_index = int(end[2]*3600/total_time*len(hourSeisDat))
print 'Ind', start_index, end_index
if len(directoryList[i][1]) > 3:
trace = read(pathname_or_url = directoryList[0][1][:-3], format = 'SAC')
trace = trace.pop(0)
hourSeisDat = np.concatenate((trace.data, hourSeisDat))
extrxtData = np.concatenate((extrxtData, hourSeisDat[start_index:end_index]))
tot_time += (end[2]-start[2])*3600
elif i != len(directoryList)-1:
trace = read(pathname_or_url = directoryList[i][1], format = 'SAC')
trace = trace.pop(0)
trace_dat = trace.data
extrxtData = np.concatenate((extrxtData, trace_dat[:]))
tot_time += 3600
elif i == len(directoryList)-1:
if directoryList[i][1][-3:] != 'now':
trace = read(pathname_or_url = directoryList[i][1], format = 'SAC')
trace = trace.pop(0)
trace_dat = trace.data
extrxtData = np.concatenate((extrxtData, trace_dat[:int(end[2]*len(trace_dat))]))
else:
total_time = hourTime.minute*60+hourTime.second+hourTime.microsecond/1000000.0
end_index = int(end[2]*3600/total_time*len(hourSeisDat))
if len(directoryList[i][1]) > 3:
trace = read(pathname_or_url = directoryList[0][1][:-3], format = 'SAC')
trace = trace.pop(0)
hourSeisDat = np.concatenate((trace.data, hourSeisDat))
extrxtData = np.concatenate((extrxtData, hourSeisDat[:end_index]))
tot_time += end[2]*3600
latitude, longitude, elevation = self.hourData[2][0], self.hourData[2][1], self.hourData[2][2]
sampling_rate = len(extrxtData)/tot_time
stats = initializeHeader(longitude, latitude , elevation, start[0])
stats.npts = len(extrxtData)
stats.sampling_rate = sampling_rate
stats.delta = 1/sampling_rate
st = Stream(Trace(data=extrxtData, header=stats))
self.dataAccess.dataDeselector('resize')
if self.extrSaveOnly.GetValue() or self.extrBoth.GetValue():
filename = self.file_dialog('save', start[0], end[0])
st.write(filename, format='SAC')
if self.extrDisplayOnly.GetValue() or self.extrBoth.GetValue():
queue3.put(st)
tkframes.data_ready()
#Method for handling the file saving dialog box when data is extracted (13/01/16)
def file_dialog(self, mode, start=None, end=None):
if mode == 'save':
start = str(start.year)+'-'+str(start.month)+'-'+str(start.day)+'-'+str(start.hour)+'.'+str(start.minute)+'.'+str(round(start.second,2))
end = str(end.year)+'-'+str(end.month)+'-'+str(end.day)+'-'+str(end.hour)+'.'+str(end.minute)+'.'+str(round(end.second,2))
fileBrowser = wx.FileDialog(self, 'Select Location to Save Data', os.path.expanduser('~'), start+'_to_'+end+'.sac', 'SAC files (*.sac)|*.sac', wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT)
elif mode == 'open':
fileBrowser = wx.FileDialog(self, 'Select Data File to Display', os.path.expanduser('~'), '', 'SAC files (*.sac)|*.sac', wx.FD_OPEN)
fileBrowser.ShowModal()
path = fileBrowser.GetPath()
if mode == 'save' and path[-4:] != '.sac':
path += '.sac'
return path
def displayData(self, e=None):
pathName = self.file_dialog('open')
stream = read(pathname_or_url = pathName, format = 'SAC')
queue3.put(stream)
tkframes.data_ready()
def __set_properties(self):
# begin wxGlade: MyFrame.__set_properties
self.SetTitle(("Options"))
self.SetSize((325, 240))
self.SetBackgroundColour(wx.Colour(240, 240, 240))
self.panel_3.SetBackgroundColour(wx.Colour(240, 240, 240))
self.panel_4.SetBackgroundColour(wx.Colour(240, 240, 240))
self.panel_5.SetBackgroundColour(wx.Colour(240, 240, 240))
self.label_2.SetFont(wx.Font(8, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.dat_label.SetFont(wx.Font(8, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.label_1.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
self.extrSaveOnly.SetFont(wx.Font(8, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.extrDisplayOnly.SetFont(wx.Font(8, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
self.extrBoth.SetFont(wx.Font(8, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, ""))
# end wxGlade
def __do_layout(self):
# begin wxGlade: MyFrame.__do_layout
#Main Sizer
sizer_1 = wx.BoxSizer(wx.HORIZONTAL)
#Plotting Options
sizer_2 = wx.BoxSizer(wx.VERTICAL)
sizer_2a = wx.BoxSizer(wx.HORIZONTAL)
sizer_2b = wx.BoxSizer(wx.HORIZONTAL)
sizer_2.Add((4,6), 0, wx.EXPAND, 0)
sizer_2a.Add(self.label_2, 1, wx.ALIGN_CENTER_VERTICAL, 8)
sizer_2.Add(sizer_2a, 0, wx.LEFT, 9)
sizer_2.Add((4,10), 0, wx.EXPAND, 0)
sizer_2.Add(self.button_3, 1, wx.LEFT | wx.RIGHT | wx.EXPAND, 8)
sizer_2.Add(self.panel_3, 1, wx.EXPAND, 0)
sizer_2.Add(self.button_4, 1, wx.LEFT | wx.RIGHT | wx.EXPAND, 8)
sizer_2.Add(self.panel_4, 1, wx.EXPAND, 0)
sizer_2b.Add(self.spin_button_1, 2, wx.LEFT | wx.EXPAND, 20)
sizer_2b.Add(self.label_1, 4, wx.ALIGN_CENTER_VERTICAL, 0)
sizer_2.Add(sizer_2b, 1, wx.EXPAND, 0)
sizer_2.Add(self.panel_5, 1, wx.EXPAND, 0)
#First dividing line
sizer_3 = wx.BoxSizer(wx.HORIZONTAL)
sizer_3.Add(self.div_lin1, 1, wx.ALIGN_CENTER_VERTICAL, 0)
#Data Options
sizer_4 = wx.BoxSizer(wx.VERTICAL)
sizer_4a = wx.BoxSizer(wx.HORIZONTAL)
sizer_4.Add((4,6), 0, wx.EXPAND, 0)
sizer_4a.Add(self.dat_label, 1, wx.ALIGN_CENTER_VERTICAL, 8)
sizer_4.Add(sizer_4a, 0, wx.LEFT, 3)
sizer_4.Add((4,6), 0, wx.EXPAND, 0)
sizer_4.Add(self.extrSaveOnly, 0, wx.LEFT | wx.RIGHT, 0)
sizer_4.Add(self.extrDisplayOnly, 0, wx.LEFT | wx.RIGHT, 0)
sizer_4.Add(self.extrBoth, 0, wx.LEFT | wx.RIGHT, 0)
sizer_4.Add((4,5), 0, wx.EXPAND, 0)
sizer_4.Add(self.extract_button, 0, wx.LEFT | wx.RIGHT | wx.EXPAND, 5)
sizer_4.Add((4,20), 0, wx.EXPAND, 0)
sizer_4.Add(self.display_button, 1, wx.LEFT | wx.RIGHT | wx.EXPAND, 10)
#Putting everything in main sizer
sizer_1.Add((4,1), 0, wx.EXPAND, 0)
sizer_1.Add(sizer_2, 5, wx.RIGHT | wx.EXPAND, 0)
sizer_1.Add(sizer_3, 1, wx.RIGHT | wx.EXPAND, 0)
sizer_1.Add(sizer_4, 5, wx.RIGHT, 2)
sizer_1.Add((4,1), 0, wx.EXPAND, 0)
self.SetSizer(sizer_1)
self.Layout()
# end wxGlade
### Class to handle the frame which appears at the bottom of the main plotting window. New v2.0 (18/12/15).
class lowerFrame():
def __init__(self, master):
bckgrnd = '#E6E6E6'
self.frame = tk.Frame(master, bd=1, relief='sunken', bg=bckgrnd)
time_label = tk.Label(self.frame, text='Current Time: ', bg=bckgrnd)
time_label.pack(side='left', pady=1, padx=4)
self.currentLabel = tk.Label(self.frame, text="", bg=bckgrnd)
self.currentLabel.pack(side='left', pady=1)
self.mouselocLabel = tk.Label(self.frame, text=" "*20, bg=bckgrnd)
self.mouselocLabel.pack(side='right', pady=1, padx=4)
loc_time_label = tk.Label(self.frame, text='Time at mouse location: ', bg=bckgrnd)
loc_time_label.pack(side='right', pady=1)
self.mode = "24-Hour-Plot" #Changed in plotData when mode is changed. Makes it easy to tell mode when mouse_move is called.
self.firstHour = datetime.utcnow()
##Function to display the time at the mouse location on the plot. THis is called when the mouse is moved over the plot (see mpl_connect binding of fig in Plotting). (18/12/15)
def mouse_move(self, event, graph_constant):
x_pos, y_pos, time = event.xdata, event.ydata, False
if x_pos and y_pos and self.mode == "24-Hour-Plot":
hour = 23-int(((y_pos-32750)+graph_constant/2)//graph_constant)
hour = hour+self.firstHour.hour
if hour > 23:
hour = hour-24
if (y_pos+graph_constant-32750) < (graph_constant/2):
hour = hour-1
elif (y_pos+graph_constant-32750) > graph_constant*24+(graph_constant/2):
hour = hour+1
minute = int(x_pos)
second = round((x_pos%1)*60, 4)
time = True
elif x_pos and y_pos and self.mode == "1-Hour-Plot":
hour = self.firstHour.hour
minute = int(x_pos)+(11-int(((y_pos-32750)+graph_constant/2)//graph_constant))*5
if (y_pos+graph_constant-32750) < (graph_constant/2):
minute = minute-5
elif (y_pos+graph_constant-32750) > graph_constant*12+(graph_constant/2):
minute = minute+5
second = round((x_pos%1)*60, 4)
time = True
if time:
hour, minute, second = str(hour), str(minute), str(second)
if int(hour) < 10:
hour = '0'+hour
if int(minute) < 10:
minute = '0'+minute
if float(second) < 10:
second = '0'+second
if len(str(second)) < 7:
second = second + '0'*(7-len(second))
time = hour+':'+minute+':'+second
self.mouselocLabel.config(text=time)
if not x_pos and not y_pos:
self.mouselocLabel.config(text='Not Available')
### Class responsible for data selection and extraction and associated bindings (05/01/16)
class dataHandler():
def __init__(self, fig, ax, graphHeightConst, mode_getter_class):
self.fig = fig
self.canvas = fig.canvas
self.ax = ax
self.pressId = self.canvas.mpl_connect('button_press_event', self.dataSelector)
self.graphHeightConst = graphHeightConst
self.mode_getter = mode_getter_class
self.activeSelection=False
self.hourAccess = None
self.hourData = None
self.displayItems = None
def dataSelector(self, event):
global options_window
if event.button == 1:
x_dat_pos, y_dat_pos = event.xdata, event.ydata
x_pixel, y_pixel = event.x, event.y #Measured from bottom left hand corner of TkAgg Canvas.
if x_dat_pos and y_dat_pos:
self.mode = mode = self.mode_getter.mode
data_buffer = self.data_buffer(y_dat_pos)
self.initialTime = self.findTime(x_dat_pos, y_dat_pos, data_buffer)
now, then = datetime.utcnow(), self.initialTime[0]
if then < now:
self.activeSelection=True
options_window.extract_button.Enable()
bbox = self.ax.get_window_extent().transformed(self.fig.dpi_scale_trans.inverted())
width, height = bbox.width*self.fig.dpi, bbox.height*self.fig.dpi
if mode == "24-Hour-Plot":
self.frame_height = height/25
self.plot_width = width/60
self.mode_no = 60
elif mode == "1-Hour-Plot":
self.frame_height = height/13
self.plot_width = width/5
self.mode_no = 5
self.window_height = self.canvas._tkcanvas.winfo_height()
self.originalx = x_pixel
self.original_xdat = x_dat_pos
self.originaly = self.window_height-y_pixel-(self.frame_height/self.graphHeightConst*data_buffer) #self.frame_height/self.graphHeightConst is pixels/data
self.initialTime = self.findTime(x_dat_pos, y_dat_pos, data_buffer)
self.drawFrame()
self.moveId = self.canvas.mpl_connect('motion_notify_event', self.resizeFrame)
self.releaseId = self.canvas.mpl_connect('button_release_event', self.dataExtractor)
#Function to find the times which correspond to the ends of the selection area
def findTime(self, x_dat_pos, y_dat_pos, data_buffer):
time = []
if self.mode == "24-Hour-Plot":
hour = 23-((y_dat_pos+data_buffer-self.graphHeightConst/4-32750)//self.graphHeightConst)+self.hourAccess.firstHour.hour
date = self.hourAccess.firstHour
if hour>23:
hour = hour-24
date = self.hourAccess.firstHour + dt.timedelta(days=1)
minute = (int(x_dat_pos)*60+(x_dat_pos%1*60))/3600 #Decimal fraction of hour where initial selection is
time.append(datetime(date.year, date.month, date.day, int(hour), int(minute*60), int(minute*3600)-int(minute*60)*60, int(minute*3600000000)-int(minute*3600)*1000000))
time.append(hour)
time.append(minute)
elif self.mode == "1-Hour-Plot":
minute = ((11-((y_dat_pos+data_buffer-self.graphHeightConst/4-32750)//self.graphHeightConst))*5*60+(x_dat_pos//1*60+(x_dat_pos%1*60)))/3600 #Decimal fraction of hour where initial selection is
date = self.hourAccess.firstHour
time.append(datetime(date.year, date.month, date.day, date.hour, int(minute*60), int(minute*3600)-int(minute*60)*60, int(minute*3600000000)-int(minute*3600)*1000000))
time.append(self.hourAccess.firstHour.hour)
time.append(minute)
return time
#This function is the primary function for matching the selection area to where the mouse is moved. The selection area is drawn from 1-pixel thick
#frames. There is either four frames (for one line selections), or eight (for multiple line selection--extras are initiated and included in self.extra_frames)
#Several refernce points are established in the make_extra_frames function (the x-pixel for the left of the plot, and the left and right distances to the
#edge of the plot from the original position where the selection was first started). As the mouse is moved, the frames are configured (in size and position).
def resizeFrame(self, event):
x_pixel, y_pixel = event.x, event.y
x_dat_pos, y_dat_pos, newy = event.xdata, event.ydata, None
x_win, y_win = mainWin.winfo_rootx(), mainWin.winfo_rooty()
if y_dat_pos:
newy = self.window_height-y_pixel-(self.frame_height/self.graphHeightConst*self.data_buffer(y_dat_pos))
if self.findTime(x_dat_pos, y_dat_pos, self.data_buffer(y_dat_pos))[0] < datetime.utcnow():
if x_dat_pos and self.originaly < self.window_height-y_pixel < self.originaly+self.frame_height: #For selection of one line of the trace only
self.currentEndy = y_dat_pos
self.currentEndx = x_dat_pos
self.clear_extras()
self.leftVert.config(height = self.frame_height+1)
self.rightVert.config(height = self.frame_height+1)
if x_pixel > self.originalx:
self.rightVert.place_configure(x = x_pixel, y = self.originaly)
self.topHoriz.config(width = x_pixel-self.originalx)
self.botHoriz.config(width = x_pixel-self.originalx)
self.botHoriz.place_configure(anchor = 'nw', y=self.originaly+self.frame_height,x=self.originalx)
self.topHoriz.place_configure(anchor = 'nw', x=self.originalx, y=self.originaly)
elif x_pixel < self.originalx:
self.rightVert.place_configure(x = x_pixel, y = self.originaly)
self.topHoriz.config(width = self.originalx-x_pixel)
self.botHoriz.config(width = self.originalx-x_pixel)
self.botHoriz.place_configure(anchor = 'ne', y=self.originaly+self.frame_height, x=self.originalx)
self.topHoriz.place_configure(anchor = 'ne', y=self.originaly, x=self.originalx)
elif x_dat_pos and (self.mode=='24-Hour-Plot' and 32750-self.graphHeightConst/2<y_dat_pos<32750+self.graphHeightConst*24-self.graphHeightConst/2)\
or (self.mode=='1-Hour-Plot' and 32750-self.graphHeightConst/2<y_dat_pos<32750+self.graphHeightConst*12-self.graphHeightConst/2): #For selection of multiple lines of the trace.
try:
if self.extra_frames:
pass
except:
self.extra_frames = self.make_extra_frames()
self.currentEndy = y_dat_pos
self.currentEndx = x_dat_pos
side_height = abs(self.originaly-newy)
frames = self.extra_frames
self.leftVert.config(height = self.frame_height) #Height of verticals has to be reduced by one for an unknown reason
self.rightVert.config(height = self.frame_height)
if newy > self.originaly:
self.rightVert.place_configure(x = x_pixel, y = newy)
self.topHoriz.config(width = self.to_right_width)
self.botHoriz.config(width = self.to_left_width+(x_pixel-self.originalx))
self.botHoriz.place_configure(anchor = 'nw', y = newy+self.frame_height-1, x = self.left_of_plot)
self.topHoriz.place_configure(anchor = 'nw', x=self.originalx, y=self.originaly)
frames[2].config(width = self.to_left_width)
frames[3].config(width = self.to_right_width-(x_pixel-self.originalx), bg = 'red')
frames[0].config(height=side_height), frames[1].config(height=side_height)
frames[0].place_configure(anchor = 'nw', x = self.left_of_plot, y = self.originaly+self.frame_height)
frames[1].place_configure(anchor = 'ne', x = self.left_of_plot+self.mode_no*self.plot_width, y = self.originaly)
frames[2].place_configure(anchor = 'nw', x = self.left_of_plot, y = self.originaly+self.frame_height-1)
frames[3].place_configure(anchor = 'nw', x = x_pixel, y = newy)
elif newy < self.originaly:
self.rightVert.place_configure(x = x_pixel, y = newy)
self.topHoriz.config(width = self.to_right_width-(x_pixel-self.originalx))
self.botHoriz.config(width = self.to_left_width)
self.botHoriz.place_configure(anchor = 'ne', y=self.originaly+self.frame_height-1, x=self.originalx)
self.topHoriz.place_configure(anchor = 'ne', y = newy, x = self.left_of_plot+self.mode_no*self.plot_width)
frames[2].config(width = self.to_left_width+(x_pixel-self.originalx), bg = 'red')
frames[3].config(width = self.to_right_width, bg = 'red')
frames[0].config(height=side_height), frames[1].config(height=side_height)
frames[0].place_configure(anchor = 'nw', x = self.left_of_plot, y = newy+self.frame_height)
frames[1].place_configure(anchor = 'ne', x = self.left_of_plot+self.mode_no*self.plot_width, y = newy)
frames[2].place_configure(anchor = 'nw', x = self.left_of_plot, y = newy+self.frame_height-1)
frames[3].place_configure(anchor = 'nw', x = self.originalx, y = self.originaly)
def clear_extras(self):
try:
for widget in self.extra_frames:
tkframes.destroy(widget)
del self.extra_frames
except:
pass
def make_extra_frames(self):
self.to_left_width = int(self.original_xdat*self.plot_width)
self.to_right_width = int((self.mode_no-self.original_xdat)*self.plot_width)
self.left_of_plot = np.ceil(self.originalx-self.original_xdat*self.plot_width)
left_vert = tkframes.Frame(height = self.frame_height, width = 1, bg = 'red')
right_vert = tkframes.Frame(height = self.frame_height, width = 1, bg = 'red')
top_to_left = tkframes.Frame(height = 1, width = 1, bg = 'red')
bottom_to_right = tkframes.Frame(bg = 'red', height = 1, width = 1)
bottom_to_right.place(), top_to_left.place(), right_vert.place(), left_vert.place()
return (left_vert, right_vert, top_to_left, bottom_to_right)
#Function which handles the mouse release event after the data area has been selected. From here, extraction and saving is handled in selectionWindow.
def dataExtractor(self, event):
global options_window
if event.button == 1:
self.canvas.mpl_disconnect(self.pressId)
self.canvas.mpl_disconnect(self.moveId)
self.deselectId = self.canvas.mpl_connect('button_press_event', self.dataDeselector)
self.canvas.mpl_disconnect(self.releaseId)
self.endTime = self.findTime(self.currentEndx, self.currentEndy, self.data_buffer(self.currentEndy))
#Now that start and end times are established (the aim of this class), the data can be extracted and saved or displayed by the extracting and
#saving functions in the selectionWindow class, which contains the button to initiate this task.
#To clear selection on click (or otehr call such as resize or changing mode)
def dataDeselector(self, event):
global options_window, tkframes
if self.activeSelection and (event == 'resize' or event.button == 1):
self.clear_extras()
try:
for widget in self.selection_frame:
tkframes.destroy(widget)
self.canvas.mpl_disconnect(self.deselectId)
self.pressId = self.canvas.mpl_connect('button_press_event', self.dataSelector)
except AttributeError:
print 'Attribute Error Occurred'
self.activeSelection = False
options_window.extract_button.Disable()
#Function to initiate the four 1-pixel width frames which make up the selection area. (Note extra frames initiated in make_extra_frames when required)
def drawFrame(self):
global tkframes
self.topHoriz = tkframes.Frame(height = 1, width = 3, bg = 'red')
self.botHoriz = tkframes.Frame(height = 1, width = 3, bg = 'red')
self.leftVert = tkframes.Frame(height = self.frame_height+1, width = 1, bg = 'red')
self.rightVert = tkframes.Frame(height = self.frame_height+1, width = 1, bg = 'red')
self.topHoriz.place(x = self.originalx, y = self.originaly)
self.topHoriz.place(x = self.originalx, y = self.originaly)
self.leftVert.place(x = self.originalx, y = self.originaly)
self.botHoriz.place(x = self.originalx, y = self.originaly+self.frame_height-1)
self.rightVert.place(x = self.originalx+3, y = self.originaly)
self.selection_frame = (self.topHoriz, self.botHoriz, self.leftVert, self.rightVert)
#For finding the difference (in terms of yaxis height, not pixels) between the clicked location and the next highest mid-point between traces.
def data_buffer(self, y_dat_pos):
if y_dat_pos:
data_buffer = (np.ceil((y_dat_pos-32750)*2/self.graphHeightConst)*(self.graphHeightConst/2))
mode = self.mode
if data_buffer < 0:
data_buffer = self.graphHeightConst/2
elif mode == "24-Hour-Plot" and y_dat_pos > 23*self.graphHeightConst+32750:
data_buffer = 23*self.graphHeightConst+self.graphHeightConst/2
elif mode == "1-Hour-Plot" and y_dat_pos > 11*self.graphHeightConst+32750:
data_buffer = 11*self.graphHeightConst+self.graphHeightConst/2
elif data_buffer/self.graphHeightConst%1==0:
data_buffer = data_buffer+self.graphHeightConst/2
data_buffer = data_buffer+32750-y_dat_pos
return data_buffer
return None
###Initialization fo the main plotting window. This is a class which is called from the __main__ thread so that the mainloop() is in the main thread.
###Previously, this was the target for the plottingProcess thread, but the current architecture achieves the same plotting functionality (via root.after)
###while allowing for the mainloop of the plotting window to be in the main thread.
class Plotting():
def __init__(self, queue, queue2):
global mainWin, plotting_loop, options_window
looping = True
while looping:
if not queue.empty():
value = queue.get()
if value == "Start Plotting Process":
looping = False
elif not plotting_loop:
looping = False
if plotting_loop: #In case program has been closed before now (i.e. if no TC1 connected and user has selected to exit).
timeNow = datetime.time(datetime.now())
time = timeNow.minute + (timeNow.second + timeNow.microsecond/1000000.0)/60.0
lastX = time
lastY = 90250
connect = True
step = 0
x=[]
y=[]
mode = "24-Hour-Plot"
self.root = tk.Tk()
mainWin = self.root
mainWin.protocol("WM_DELETE_WINDOW", window_close) #Closes options window and ends processes. New in v2.0.
mainWin.wm_title("PyAmaseis v1.0")
### v1.0 change: Conditional added. .ico not supported on Linux. zoomed not
### supported on linux.
if platform.system() == 'Linux':
mainWin.iconbitmap(r'@icons/icon1.xbm')
else:
mainWin.iconbitmap(r'icons/icon.ico')
mainWin.wm_state('zoomed')
graphHeightConst = 2500
fig = plt.figure(figsize=(13,9)) #15,10
# v1.0 change: AttributeError: 'Figure' object has no attribute 'set_tight_layout' on Linux
if platform.system() != 'Linux':
fig.set_tight_layout(0.4)
ax = fig.add_subplot(1,1,1)
ax.set_xlim(0,60)
ax.set_ylim(30250,92750)
ax.set_xlabel('Minute')
ax.set_ylabel('Hour (UTC)')
yAxis = [30250,92750]
y1 = (np.arange(min(yAxis), max(yAxis)+1,graphHeightConst))
y2 = calculateYAxisLabels()
ax = xAxisLabels(ax, 24)
plt.yticks(y1, y2)
ax.yaxis.grid(color = '#0000FF', linestyle = '-')
ax.set_axisbelow(True)
line, = ax.plot(x, y, color='k')
canvas = FigureCanvasTkAgg(fig, master=mainWin)
canvas._tkcanvas.config(highlightthickness=0)
bottomFrame = lowerFrame(mainWin)
bottomFrame.frame.pack(side='bottom', expand=1, fill = tk.BOTH)
canvas._tkcanvas.pack(side=tk.TOP, expand=1, fill = tk.BOTH)
canvas.draw()
dataInteractive = dataHandler(fig, ax, graphHeightConst, bottomFrame)
options_window.dataAccess = dataInteractive
dataInteractive.hourAccess = bottomFrame
self.displayItems = None
dataInteractive.displayItems = self.displayItems
fig.canvas.mpl_connect('motion_notify_event', lambda event: bottomFrame.mouse_move(event, graphHeightConst))
mainWin.update_idletasks()
geometry = mainWin.geometry()
geometry = geometry[:geometry.find('+')]
mainWin.after(0, plotData,queue, queue2, fig, ax, canvas, bottomFrame, mainWin, lastY, lastX, connect, line, mode, geometry, dataInteractive)
###Any tk Frames used in this program must originate from the __main__ thread. Hence, this class, which is only called from the __main__ thread, initiates a
###list of tk frames that can be used from other threads but still have their mainloops in the __main__ thread. The frames are mostly used in dataHandler.
class mFrame(tk.Frame):
def __init__(self, queue3, root):
tk.Frame.__init__(self)
self.max_no = 20
self.frames = []
self.root = root
for i in range(self.max_no):
self.frames.append(tk.Frame(mainWin))
self.frame_index = 0
self.queue3 = queue3
self.figureCount = 0
self.windows = []
def Frame(self, **kwargs):
frame = self.frames[self.frame_index]
self.frame_index+=1
frame.config(**kwargs)
return frame
def destroy(self, widget):
widget.destroy()
index = self.frames.index(widget)
del self.frames[index]
self.frames.append(tk.Frame(mainWin))
self.frame_index = self.frame_index-1
def data_ready(self):
self.current_data = queue3.get()
self.plot()
def plot(self):
if self.figureCount < 3:
self.figureCount += 1
window = tk.Toplevel(master=self.root)
window.lower()
self.windows.append(window)
window.protocol("WM_DELETE_WINDOW", lambda: self.toplevel_close(window))
if platform.system() == 'Linux':
mainWin.iconbitmap(r'@icons/icon1.xbm')
else:
mainWin.iconbitmap(r'icons/icon.ico')
window.title('Data Display')
fig = matplotlib.figure.Figure()
start = str(self.current_data[0].stats['starttime'])
end = str(self.current_data[0].stats['endtime'])
fig.suptitle("Data Extraction: "+start[:start.find('T')]+', '+start[start.find('T')+1:-1]+'\nto '+end[:end.find('T')]+', '+end[end.find('T')+1:-1])
ax = fig.add_subplot(1,1,1)
ax.xaxis.set_visible(False)
ax.yaxis.set_visible(False)
canvas = FigureCanvasTkAgg(fig, master=window)
toolbarFrame = tk.Frame(window)
toolbar = NavigationToolbar2TkAgg(canvas, toolbarFrame)
toolbarFrame.pack(side=tk.BOTTOM, expand=1, fill = tk.BOTH)
canvas._tkcanvas.pack(side=tk.TOP, expand=1, fill = tk.BOTH)
self.current_data.plot(fig=fig)
window.lift()
else:
msgbx.showinfo("Maximum Reached", "The maximum number of data displays has been reached. Close an open data display before proceeding.")
def toplevel_close(self, window):
deleted = False
for i in range(len(self.windows)):
if not deleted and self.windows[i] == window:
self.windows[i].destroy()
del self.windows[i]
deleted = True
self.figureCount = self.figureCount-1
#### This method aligns the Options window to the bottom right hand corner of the screen so it doesn't come in the way of plotting
def alignToBottomRight(win):
dw, dh = wx.DisplaySize()
w, h = win.GetSize()
x = dw - w
y = dh - h
win.SetPosition((x-20, y-65))
#### This method creates the Options window
def secondaryWindow(queue2, queue3):
global options_window #New in v2.0.
app = wx.App(False)
options_window = selectionWindow()
app.SetTopWindow(options_window)
alignToBottomRight(options_window)
options_window.Show()
options_window.Raise()
app.MainLoop()
#### This is the Collecting method (Thread) responsible for reading data from the TC1, sending this data via a queue to plotting thread/method, saving data into SAC, listening to commands from Options window, and uploading SAC files to NZSeis server after saving them
def Collecting(queue, queue2, queue3):
global collecting_loop, stationId, options_window
#Stats header information initialization
stationId = 01
stationName = 'Unknown'
stationAddress = 'Unknown'
longitude = 0.0
latitude = 0.0
elevation = 0.0
dcShift = 0
oldDCShift = 0
#Check if user has already entered Station information, if yes, then go straight into 24 hour live plotting, if no create the initial station information input window
if(os.path.exists('Station Information.txt') == False):
app = wx.App(False)
frame_5 = MyFrame4(None, wx.ID_ANY, "")
app.SetTopWindow(frame_5)
frame_5.Center()
frame_5.Show()
app.MainLoop()
else:
pass
#Once user has entered the station information and that information is saved into a txt file, it is read line by line by the following lines of code and is parsed to extract the data required or header information
file = open("Station Information.txt", "r")
informationArray = file.readlines()
for line in informationArray:
if "Station ID" in line:
stationId = line[line.find(":")+1:line.find("\n")]
if "Station Name" in line:
stationName = line[line.find(":")+1:line.find("\n")]
if "Station Address" in line:
stationAddress = line[line.find(":")+1:line.find("\n")]
if "Longitude" in line:
longitude = line[line.find(":")+1:line.find("\n")]
if "Latitude" in line:
latitude = line[line.find(":")+1:line.find("\n")]
if "Elevation" in line:
elevation = line[line.find(":")+1:line.find("\n")]
if "DCShift" in line:
dcShift = int(line[line.find(":")+1::])
oldDCShift = int(line[line.find(":")+1::])
file.close()
#initializing further required variables
mode = "None"
currentMode = "24Hour"
graphHeightConst = 2500 #distance between each 1 hour plot on the 24 hour plot
totalHoursConst = 23 #used to decrement the hour so that once the plot reaches the end of 24 hours the plot is cleared and plotting starts from the top
skipConst = 1 #currently not used, but in place to skip reading values coming in from the TC1 - eg. if it is 2, then it will read every second value
count = 0
lastHour = datetime.time(datetime.now()).hour
hasHourChanged = False
plotLimit = graphHeightConst*7
goldenNumber = 32750 #the center line of each plot, where it oscillates - used to fix y axis according to this (32750 - graphHeightConstant which gives lower limit + graphHeightConstant * 25 (or how many ever hours gives upper limit))
upperLim = 36000 #the top limit of each plot
lowerLim = 28000 #bottom limit of each plot
plotClear = False
#hourMillisecondData = np.array([], dtype = np.float64)
tempMillisecond = np.array([], dtype = np.float64)
serialNumber = None
serialPort = None
#Returns the serialPort that the TC1 is connected to
serialPort = getSerialPort()
#This while loop ensures user has connected the TC1 before continuing
while serialPort == None:
redundantRoot = tk.Tk() #Parent for error dialog to display on top of. This is done so it can then be hidden and destroyed.
redundantRoot.withdraw()
yes_or_no = msgbx.askokcancel(message="Please Connect TC-1 Seismometer", title="Error", parent=redundantRoot)
redundantRoot.destroy()
if yes_or_no:
serialPort = getSerialPort()
else:
window_close(True)
return
serialPort = serial.Serial(serialPort)
serialPort.flushInput()
serialPort.flushOutput()
#The following two lines create the secondary options window
secondaryWindowProcess = Thread(target= secondaryWindow, args=(queue2,queue3,))
secondaryWindowProcess.start()
queue.put("Start Plotting Process")
#create a stats object that holds all the station information retrieved from the txt file
stats = initializeHeader(longitude, latitude , elevation)
hourSeismicData, stats = getHourData(stats) #stores the data from the hour, populated with data from previous recordings in the hour or zeroes
hourTimeData = np.array([], dtype = np.float64)
tempSeismicData = np.array([]) #used to store 18 value read from the tc1 and sent is sent to the plotting array, then cleared for next 18 values
queue.put(['prev', hourSeismicData, currentMode, 'None', graphHeightConst, dcShift, skipConst, stats]) #bad idea. change this becasue it will take too long, and the length of the data array will be too short by the time collecting process is started.
while collecting_loop:
try:
#Checks whether the user has changed the view selection in the options window from 24 hour to 1 hour or has increased or decreased the graphShift
if(queue2.empty() == False):
readingQueue2 = queue2.get()
if readingQueue2 == "24-Hour-Plot":
currentMode = "24Hour"
now = Time.time()
queue.put(['prev', hourSeismicData, currentMode, '24-Hour-Plot', graphHeightConst, dcShift, skipConst, stats])
totalHoursConst = 23
tempSeismicData = np.array([])
tempMillisecond = np.array([])
if readingQueue2 == "1-Hour-Plot":
currentMode = "1Hour"
now = Time.time()
queue.put(['prev', hourSeismicData, currentMode, '1-Hour-Plot', graphHeightConst, dcShift, skipConst, stats])
tempSeismicData = np.array([])
tempMillisecond = np.array([])
if readingQueue2 == "UP":
tempSeismicData = np.array([])
tempMillisecond = np.array([])
dcShift += 100
for line in fileinput.input('Station Information.txt', inplace=True):
print line.replace('DCShift:'+str(oldDCShift), 'DCShift:'+str(dcShift)),
oldDCShift = dcShift
if readingQueue2 == "DOWN":
tempSeismicData = np.array([])
tempMillisecond = np.array([])
dcShift -= 100
#Every time the user changes the graphshift - the value in against the graphShift header in the StationInformation.txt file is updated
for line in fileinput.input('Station Information.txt', inplace=True):
print line.replace('DCShift:'+str(oldDCShift), 'DCShift:'+str(dcShift)),
oldDCShift = dcShift
#Read from the TC1 seismometer.
#Causes problems if seismometer not connected properly or if python is run multiple times?? (09/12/15). See exception handler below.
reading = int(serialPort.readline())
timeNow = datetime.time(datetime.now())
time = timeNow.minute + (timeNow.second + timeNow.microsecond/1000000.0)/60.0
hourTime = timeNow.minute*60+timeNow.second + timeNow.microsecond/1000000.0
hour = timeNow.hour
plotClear = False
if currentMode == "24Hour":
#Depending on the hour and viewMode which is 24 or 1 hour plotting, the data value that is read is translated to the appropriate height
data = [int(reading+(graphHeightConst*totalHoursConst))+dcShift]
if currentMode == "1Hour":
minute = (datetime.time(datetime.now())).minute
if minute < 5:
data = [int(reading+(graphHeightConst*11))+dcShift]
if minute < 10 and minute >= 5:
data = [int(reading+(graphHeightConst*10))+dcShift]
if minute < 15 and minute >= 10:
data = [int(reading+(graphHeightConst*9))+dcShift]
if minute < 20 and minute >= 15:
data = [int(reading+(graphHeightConst*8))+dcShift]
if minute < 25 and minute >= 20:
data = [int(reading+(graphHeightConst*7))+dcShift]
if minute < 30 and minute >= 25:
data = [int(reading+(graphHeightConst*6))+dcShift]
if minute < 35 and minute >= 30:
data = [int(reading+(graphHeightConst*5))+dcShift]
if minute < 40 and minute >= 35:
data = [int(reading+(graphHeightConst*4))+dcShift]
if minute < 45 and minute >= 40:
data = [int(reading+(graphHeightConst*3))+dcShift]
if minute < 50 and minute >= 45:
data = [int(reading+(graphHeightConst*2))+dcShift]
if minute < 55 and minute >= 50:
data = [int(reading+(graphHeightConst*1))+dcShift]
if minute < 60 and minute >= 55:
data = [int(reading+(graphHeightConst*0))+dcShift]
if (hour != lastHour):
## Everytime the hour changes the following code saves hour long SAC Files
lastHour = hour
fileName, stats, directory = saveHourData(stats, hourSeismicData, stationId,longitude, latitude , elevation)
hourSeismicData = np.array([])
hourTimeData = np.array([], dtype = np.float64)
##Uploads SAC file right after creating it
contentType = "application/octet-stream" #image/png
c = pycurl.Curl()
c.setopt(c.URL, 'https://nzseis.phy.auckland.ac.nz/pyjamaseis/upload/')
c.setopt(c.HTTPHEADER, ['Authorization:'+'Basic %s' % base64.b64encode("kofi:pyjamaseis")])
c.setopt(c.HTTPPOST, [("payload",(c.FORM_FILE, directory+fileName, c.FORM_CONTENTTYPE, contentType)), ("mode","sac")])
try:
c.perform()
c.close()
except pycurl.error, error:
errno, errstr = error
print 'An error occurred: ', errstr
totalHoursConst = totalHoursConst-1
if(totalHoursConst == -1):
plotClear = True
totalHoursConst = 23
hasHourChanged = True
if ((count % skipConst == 0) or hasHourChanged):
if ((tempSeismicData.size >= 18) or hasHourChanged):
##After every 18 values are read from the TC1 seismometer, the array containing these values along with the tempMillisecond array which contains the exact time the value was read put on the queue for the plotting process to read
queue.put([tempSeismicData, tempMillisecond, hasHourChanged, plotClear, mode])
mode = "None"
#the arrays are cleared
tempSeismicData = np.array([])
tempMillisecond = np.array([])
hasHourChanged = False
options_window.hourData = (hourSeismicData, datetime.utcnow(), (latitude,longitude,elevation))
else:
if currentMode == "1Hour":
tempSeismicData = np.append(tempSeismicData,data)
if time < 5:
tempMillisecond = np.append(tempMillisecond,time)
elif time < 10:
tempMillisecond = np.append(tempMillisecond,time - 5)
elif time < 15:
tempMillisecond = np.append(tempMillisecond,time - 10)
elif time < 20:
tempMillisecond = np.append(tempMillisecond,time - 15)
elif time < 25:
tempMillisecond = np.append(tempMillisecond,time - 20)
elif time < 30:
tempMillisecond = np.append(tempMillisecond,time - 25)
elif time < 35:
tempMillisecond = np.append(tempMillisecond,time - 30)
elif time < 40:
tempMillisecond = np.append(tempMillisecond,time - 35)
elif time < 45:
tempMillisecond = np.append(tempMillisecond,time - 40)
elif time < 50:
tempMillisecond = np.append(tempMillisecond,time - 45)
elif time < 55:
tempMillisecond = np.append(tempMillisecond,time - 50)
elif time < 60:
tempMillisecond = np.append(tempMillisecond,time - 55)
hourSeismicData = np.append(hourSeismicData,reading)
hourTimeData = np.append(hourTimeData, hourTime)
else:
tempSeismicData = np.append(tempSeismicData,data)
tempMillisecond = np.append(tempMillisecond,time)
hourSeismicData = np.append(hourSeismicData,reading)
hourTimeData = np.append(hourTimeData, hourTime)
count += 1
except:
#Exception handler for seismometer connection error mentioned above. (09/12/15)
exc_type = sys.exc_info()[0]
if str(exc_type).find('SerialException') != -1:
msgbx.showerror("Error", "PyjAmaSeis has detected a seismometer connection error.\nPlease exit PyjAmaSeis and reconnect seismometer.")
window_close()
else:
print exc_type
queue.put((stats, hourSeismicData, stationId, longitude, latitude , elevation, hourTimeData)) #saves data when program closes.
return
##This function is responsible for plotting data whcih is pre-loaded and has not been read from the seismometer in real-time. (11/12/15)
def plotPrevious(hour_data=None, currentMode=None, mode=None, graphHeightConst=None, dcShift=None, skipConst=None, stats=None):
data_array = hour_data
delta = stats['delta']
if currentMode == "24Hour":
data_array = data_array+(graphHeightConst*23+dcShift)
time_array = np.arange(0,len(data_array))*delta/60
queue.put([data_array, time_array, False, False, mode])
if currentMode == "1Hour":
tot_length = 0
for i in range(12):
i = i+1
if ((i)*300/delta) <= len(data_array):
data = np.array(data_array[tot_length:int(((i)*300/delta))])+(graphHeightConst*(12-i))+dcShift
time_array = np.arange(0,5,delta/60) #Want one less than 5
if len(time_array) == len(data)+1:
time_array = time_array[:len(data)]
if tot_length == 0:
queue.put([data, time_array, False, False, "1st-1-Hour-Plot"])
else:
queue.put([data, time_array, False, False, mode])
tot_length += len(data)
elif ((i-1)*300/delta) <= len(data_array):
data = np.array(data_array[int(((i-1)*300/delta)):])+(graphHeightConst*(12-i))+dcShift
if i != 1:
time_array = np.arange(0,(len(data_array)-tot_length))*delta/60
else:
time_array = np.arange(0,len(data_array))*delta/60
mode = "1st-1-Hour-Plot"
if len(time_array) == len(data)+1:
print len(time_array), len(data)
time_array = time_array[:len(data)]
queue.put([data, time_array, False, False, mode])
##This function (newv2.0) saves the seismic data from the hour. (11/12/15)
def saveHourData(stats, hourSeismicData, stationId, longitude, latitude , elevation):
now = UTCDateTime()
diff = now-stats['starttime']
sampling_rate = len(hourSeismicData)/diff
delta = 1/sampling_rate
stats['ntps'] = len(hourSeismicData)
stats['sampling_rate'] = sampling_rate
stats['delta'] = delta
st = Stream([Trace(data=hourSeismicData, header=stats)])
print 'Start:', stats['starttime'], 'End:', now, 'Length:', len(hourSeismicData)
sacdateAndTime = str(stats['starttime']).split('T')
sacdate = sacdateAndTime[0].split('-')
sactime = sacdateAndTime[1].split(':')
sacyear = sacdate[0][2:]
sacmonth = sacdate[1]
sacday = sacdate[2]
sachour = sactime[0]
fileName = str(sacyear+sacmonth+sacday+sachour+stats['station']+".sac") #v1.0 change. Removed minute from filename.
directory = stationId+'/'+str(sacdate[0])+'/'+sacmonth+'/'+sacday+'/'
directory_handler(directory)
st.write(directory+fileName, format='SAC')
stats = initializeHeader(longitude, latitude , elevation)
return fileName, stats, directory
#### This method gets all the active usb ports and selects the port that the TC1 is connected to by doing property comparisons that are unique to the TC1 connected port
def getSerialPort():
try:
activePorts = serial_ports()
for port in activePorts:
serialPort = serial.Serial(port)
if (serialPort.baudrate == 9600):
if (serialPort.parity == 'N'):
if (serialPort.timeout == None):
if (serialPort.xonxoff == False):
if platform.system() == 'Linux': #new v2.0. TC1 will be a /dev/ttyACM* port on linux.
if serialPort.port.find('/dev/ttyACM') != -1:
serialPort.close()
return port
else:
serialPort.close()
return port
#if(serialPort.inWaiting() != 0):
# return port
except:
print("Device not found")
#### Method Returns all active usb ports
def serial_ports():
"""Lists serial ports
:raises EnvironmentError:
On unsupported or unknown platforms
:returns:
A list of available serial ports
"""
if sys.platform.startswith('win'):
ports = ['COM' + str(i + 1) for i in range(256)]
elif sys.platform.startswith('linux') or sys.platform.startswith('cygwin'):
# this is to exclude your current terminal "/dev/tty"
ports = glob.glob('/dev/tty[A-Za-z]*')
elif sys.platform.startswith('darwin'):
ports = glob.glob('/dev/tty.*')
else:
raise EnvironmentError('Unsupported platform')
result = []
for port in ports:
try:
s = serial.Serial(port)
s.close()
result.append(port)
except (OSError, serial.SerialException):
pass
return result
#### Initializes the Header information for the SAC File
def initializeHeader(longitude, latitude , elevation, start=None):
global stationId
stats = Stats()
stats.network = 'RU'
stats.station = stationId
stats.location = latitude+'N.'+longitude+'E'
stats.channel = ' '
stats._format = 'SAC'
stats.calib = 1.0
stats.sampling_rate = 18.7647228241 #This is just a preliminary value (for get_hour_data). This is changed before saving with stats as header.
if start:
stats.starttime = UTCDateTime(start)
else:
#starttime in stats is no longer the current time this function is called, but the start of the current hour (11/12/15)
time = str(datetime.utcnow())
year, month, day = time.split('-')[0], time.split('-')[1], time.split('-')[2].split()[0] #utcnow() in form of 2015-12-10 03:21:24.769079
hour = time.split()[1].split(':')[0]
start = UTCDateTime(int(year),int(month),int(day),int(hour),0,0)
stats.starttime = UTCDateTime(start)
return stats
###Plotting process responsible for plotting data sent from the Collecting process, also responsible for managing Plotting window, and changing and refreshing the axis from 24 hour to 1 hour plots, This process saves screenshots of plot after ever hour and Uploads to NZSeis server
### This method receives several input parameters such as queue figure axis... the queue is read for arrays of values sent by the collecting process
### This data is then plotted according to the plot selection (24 or 1 hour) on the ax object
### This method is also responsible for managing the connectivity between the lines drawn
def plotData(queue, queue2, fig, ax, canvas, bottomFrame, root, lastY, lastX, connect, line, mode, geometry, dataInteractive):
global plotting_loop, options_window, mainWin
#Embedded callback function (also see code below) to make sure previously recorded data is plotted after the window has been resized. (17/12/15)
def resize(root, geometry, mode):
root.update_idletasks()
new_geometry = root.geometry()
new_geometry = new_geometry[:new_geometry.find('+')] #Only concerned about when window is resized, not moved. (18/12/15)
if new_geometry != geometry:
dataInteractive.dataDeselector('resize') #Must be in this if statement
queue2.put(mode)
return new_geometry
if(queue.empty() == False):
#read the arrays and values sent by the collecting process. If _continue is changed to False if this gets a call to plot previous data.
values, _continue = queue.get(), True
geometry = resize(root, geometry, mode)
##
if values[4] == "24-Hour-Plot": #Only when data is put in queue by plotPrevious (15/12/15)
dataInteractive.dataDeselector('resize')
connect = True
lastX = 0
lastY = 0
mode = "24-Hour-Plot" #This variable is local to plotData and is not the same as mode in Collecting (that's values[4])
bottomFrame.mode = "24-Hour-Plot"
bottomFrame.firstHour = datetime.utcnow()
graphHeightConst = 2500
ax.cla()
ax.set_xlim(0,60)
ax.set_ylim(30250,92750)
ax.set_xlabel('Minute')
ax.set_ylabel('Hour (UTC)')
yAxis = [30250,92750]
y1 = (np.arange(min(yAxis), max(yAxis)+1,graphHeightConst))
y2 = calculateYAxisLabels()
ax = xAxisLabels(ax, 24)
plt.yticks(y1, y2)
ax.yaxis.grid(color = '#0000FF', linestyle = '-')
ax.set_axisbelow(True)
canvas.draw()
if values[4] == "1-Hour-Plot" or values[4] == "1st-1-Hour-Plot": #Only when data is put in queue by plotPrevious (15/12/15)
dataInteractive.dataDeselector('resize')
connect = True
lastX = 0
lastY = 0
mode = "1-Hour-Plot" #This variable is local to plotData and is not the same as mode in Collecting (that's values[4])
bottomFrame.mode = "1-Hour-Plot"
bottomFrame.firstHour = datetime.utcnow()
if values[4] == "1st-1-Hour-Plot":
values[4] = "1-Hour-Plot"
graphHeightConst = 2500
ax.cla()
ax.set_xlim(0,5)
ax.set_ylim(30250,62750)
ax.set_xlabel('Minute')
ax.set_ylabel('Hour (UTC)')
yAxis = [30250,62750]
y1 = (np.arange(min(yAxis), max(yAxis)+1,graphHeightConst))
y2 = calculateYAxisLabelsOneHour()
ax = xAxisLabels(ax, 1)
plt.yticks(y1, y2)
ax.yaxis.grid(color = '#0000FF', linestyle = '-')
ax.set_axisbelow(True)
canvas.draw()
if values[0] == 'prev':
plotPrevious(*values[1:])
_continue = False #Don't continue executing function
##
if _continue:
y = values[0]
x = values[1]
#The following if statement and its content are incharge of inserting the last value of the the previous array to the front of the new array so the line would start from the last point to get connectivity between each line drawn
if(values[0].size != 0 and mode == "1-Hour-Plot" and values[4] != "1-Hour-Plot"):
if(lastX != 0 and lastY != 0):
y = np.insert(y, 0, lastY)
x = np.insert(x, 0, lastX)
lastY = values[0][-1]
lastX = values[1][-1]
for value in x:
if value > 4.998 or ((value > 4.9) and (str(datetime.utcnow()).split(':')[1] == '00')): #Addition to conditional to prevent probelems if the plotting of the last set is actually slightly after the hour has changed. (10/12/15)
lastX = 0
lastY = 0
x = np.array([])
y = np.array([])
#The following if statement and its content are incharge of inserting the last value of the the previous array to the front of the new array so the line would start from the last point to get connectivity between each line drawn
if (connect == True and mode == "24-Hour-Plot"):
if(lastX != 0 and lastY != 0):
y = np.insert(y, 0, lastY)
x = np.insert(x, 0, lastX)
if (values[0].size != 0 and mode == "24-Hour-Plot"):
lastY = values[0][-1]
lastX = values[1][-1]
#print 'Last:', lastY, lastX
if (values[2] == True and mode == "24-Hour-Plot"):
timestamp = open('timestamp.txt', 'a')
connect = False
# calculating time for the screenshot name when saving it
# v1.0 change: pyscreenshot.grab_to_file used instead of ImageGrab.grab().save()
now = str(datetime.utcnow())
now2 = now.split(' ',1 )
now3 = now2[1].split(':',1)
now3 = int(now3[0])-1
if (now3 == -1):
now3 = 23
name = str(now2[0]+'-'+str(now3)+".png")
timestamp.write(str(now2[0]+'-'+str(now3)))
timestamp.close()
yr_mnth_day = now2[0].split('-')
directory = stationId+'/'+yr_mnth_day[0]+'/'+yr_mnth_day[1]+'/'+yr_mnth_day[2]+'/'
directory_handler(directory)
#New Conditional v2.0. Screenshots causing problems with X server on ubuntu.
if platform.system() != 'Linux':
pyscreenshot.grab_to_file(directory+now2[0]+'-'+str(now3)+".png")
#upload image to NZSeis server - using the password and user name - kofi:pyjamaseis
contentType = 'image/png'
c = pycurl.Curl()
c.setopt(c.URL, 'https://nzseis.phy.auckland.ac.nz/pyjamaseis/upload/')
c.setopt(c.HTTPHEADER, ['Authorization:'+'Basic %s' % base64.b64encode("kofi:pyjamaseis")])
c.setopt(c.HTTPPOST, [("payload",(c.FORM_FILE, name, c.FORM_CONTENTTYPE, contentType)), ("mode","image")])
try:
c.perform()
c.close()
except pycurl.error, error:
errno, errstr = error
print 'An error occurred: ', errstr
else:
connect = True
if (values[2] == True and mode == "1-Hour-Plot"):
timestamp = open('timestamp.txt', 'a')
# calculating time for the screenshot name when saving it
# v1.0 change: pyscreenshot.grab_to_file used instead of ImageGrab.grab().save()
now = str(datetime.utcnow())
now2 = now.split(' ',1 )
now3 = now2[1].split(':',1)
now3 = int(now3[0])-1
if (now3 == -1):
now3 = 23
name = str(now2[0]+'-'+str(now3)+".png")
timestamp.write(str(now2[0]+'-'+str(now3)))
timestamp.close()
yr_mnth_day = now2[0].split('-')
directory = stationId+'/'+yr_mnth_day[0]+'/'+yr_mnth_day[1]+'/'+yr_mnth_day[2]+'/'
directory_handler(directory)
#New Conditional v2.0. Screenshots causing problems with X server on ubuntu.
if platform.system() != 'Linux':
pyscreenshot.grab_to_file(directory+now2[0]+'-'+str(now3)+".png")
#upload image to NZSeis server - using the password and user name - kofi:pyjamaseis
contentType = 'image/png'
c = pycurl.Curl()
c.setopt(c.URL, 'https://nzseis.phy.auckland.ac.nz/pyjamaseis/upload/')
c.setopt(c.HTTPHEADER, ['Authorization:'+'Basic %s' % base64.b64encode("kofi:pyjamaseis")])
c.setopt(c.HTTPPOST, [("payload",(c.FORM_FILE, name, c.FORM_CONTENTTYPE, contentType)), ("mode","image")])
try:
c.perform()
c.close()
except pycurl.error, error:
errno, errstr = error
print 'An error occurred: ', errstr
graphHeightConst = 2500
dataInteractive.dataDeselector('resize')
bottomFrame.firstHour = datetime.utcnow()
ax.cla()
ax.set_xlim(0,5)
ax.set_ylim(30250,62750)
ax.set_xlabel('Minute')
ax.set_ylabel('Hour (UTC)')
yAxis = [30250,62750]
y1 = (np.arange(min(yAxis), max(yAxis)+1,graphHeightConst))
y2 = calculateYAxisLabelsOneHour()
ax = xAxisLabels(ax, 1)
plt.yticks(y1, y2)
ax.yaxis.grid(color = '#0000FF', linestyle = '-')
ax.set_axisbelow(True)
canvas.draw()
fig.canvas.mpl_connect('motion_notify_event', lambda event: bottomFrame.mouse_move(event, graphHeightConst))
x = np.array([])
y = np.array([])
##
#Get the current time to display on the main plotting window
now = str(datetime.utcnow())
now1 = now.split('.',1)
timeNow = now1[0]+' - UTC'
bottomFrame.currentLabel.configure(text=timeNow) #sets the time as a label on the plot
if(values[3] == True and mode == "24-Hour-Plot"):
graphHeightConst = 2500
dataInteractive.dataDeselector('resize')
ax.cla()
ax.set_xlim(0,60) #05/01/16
ax.set_ylim(30250,92750)
ax.set_xlabel('Minute')
ax.set_ylabel('Hour (UTC)')
yAxis = [30250,92750]
y1 = (np.arange(min(yAxis), max(yAxis)+1,graphHeightConst))
y2 = calculateYAxisLabels()
ax = xAxisLabels(ax, 24)
plt.yticks(y1, y2)
ax.yaxis.grid(color = '#0000FF', linestyle = '-')
ax.set_axisbelow(True)
line, = ax.plot(x, y, color='k')
canvas.draw()
fig.canvas.mpl_connect('motion_notify_event', lambda event: bottomFrame.mouse_move(event, graphHeightConst))
x = np.array([])
y = np.array([])
line.set_data(x,y)
ax.draw_artist(line)
canvas.blit(ax.bbox) #Makes motion_notify events much faster. If this is tabbed in 2, then motion_notify events only update every second. Hopefully no adverse memory effects. (09/01/16)
if plotting_loop:
root.after(0, plotData,queue, queue2, fig, ax, canvas, bottomFrame, root, lastY, lastX, connect, line, mode, geometry, dataInteractive)
### Calculates labels required to represent the y axis for a 24 hour plot
def calculateYAxisLabels():
#24 hour labels
yaxislabels = []
#Gets current hour and generates an array containing values of the following 24 hours
now = str(datetime.utcnow())
now = now.split(' ',1)
now = now[1].split(':',1)
d = datetime.strptime(now[0], "%H")
d = str(d.strftime("%I %p")).split(' ',1)
currentHour = int(d[0])
ampm = str(" "+d[1])
hourAfter = currentHour + 1
hourAfterAmPm = ampm
if hourAfter == 12:
if(hourAfterAmPm == ' AM'):
hourAfterAmPm = ' PM'
else:
hourAfterAmPm = ' AM'
if hourAfter == 13:
hourAfter = 1
yaxislabels.append(str(currentHour)+ampm)
while currentHour != hourAfter or ampm != hourAfterAmPm:
yaxislabels.append(str(hourAfter)+ hourAfterAmPm)
hourAfter += 1
if hourAfter == 12:
if(hourAfterAmPm == ' AM'):
hourAfterAmPm = ' PM'
else:
hourAfterAmPm = ' AM'
if hourAfter == 13:
hourAfter = 1
yaxislabels.append('')
return yaxislabels[::-1]
### Calculates labels required to represent the y axis for a 1 hour plot
def calculateYAxisLabelsOneHour():
#24 hour labels
yaxislabels = []
#Gets current hour and generates an array containing values of that hour divided into 5 minute sections
now = str(datetime.utcnow())
now = now.split(' ',1)
now = now[1].split(':',1)
d = datetime.strptime(now[0], "%H")
d = str(d.strftime("%I %p")).split(' ',1)
start = 00
currentHour = int(d[0])
for i in range(0, 12):
if(start<10):
yaxislabels.append(str(currentHour)+':0'+str(start))
else:
yaxislabels.append(str(currentHour)+':'+str(start))
start = start+5
yaxislabels.append('')
return yaxislabels[::-1]
## Function to find the labels for the x axis and draw grid.
def xAxisLabels(ax, mode):
if mode == 24:
x_list = []
for i in range(61): #(17/12/15)
if i%5 == 0:
x_list.append('+'+str(i))
else:
x_list.append('')
ax.set_xticks(np.arange(0,61,5))
ax.set_xticks(np.arange(0,61,1), minor=True)
ax.set_xticklabels([':00',':05',':10',':15',':20',':25',':30',':35',':40',':45',':50',':55',''])
ax.set_xticklabels(['']*61, minor=True)
ax.xaxis.grid(which = 'minor', color = '#7DCEA0', linestyle = ':')
ax.xaxis.grid(which = 'major', color = '#51bd80', linestyle = ':')
ax.xaxis.set_tick_params(labeltop='on')
return ax
elif mode == 1:
x_list = []
for i in range(31): #(17/12/15)
if i%6 == 0:
x_list.append('+'+str(i/6))
else:
x_list.append('')
ax.set_xticks(np.arange(0,6,1))
ax.set_xticks(np.arange(0,5.1,0.1666666666666666666666666666666), minor=True)
ax.set_xticklabels(['+0','+1','+2','+3','+4','+5'])
ax.set_xticklabels(['']*31, minor=True)
ax.xaxis.grid(which = 'minor', color = '#7DCEA0', linestyle = ':')
ax.xaxis.grid(which = 'major', color = '#51bd80', linestyle = ':')
ax.xaxis.set_tick_params(labeltop='on')
return ax
###Function to define what occurs when the main plotting window is closed. This is taken as exiting PyjAmaseis, so all windows and processes are ended. (07/12/15)
def window_close(condition=False):
global plotting_loop, collecting_loop, mainWin, options_window
plotting_loop, collecting_loop = False, False
if not condition: #Condition is True if program has not yet fully started (TC1 not connected error dialog exit press)
options_window.close()
mainWin.quit()
##Function (new v2.0) to support the new file saving system. Tries to make directory, and if directory already exists, ignores the exception raised. All other exceptions are reported. (09/12/15)
def directory_handler(path):
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
##Function to populate hourSeismicData array with any previous readings in that hour before readings start.
def getHourData(stats):
sampling_rate = stats['sampling_rate']
time = str(datetime.utcnow())
year, month, day = time.split('-')[0], time.split('-')[1], time.split('-')[2].split()[0] #utcnow() in form of 2015-12-10 03:21:24.769079
hour = time.split()[1].split(':')[0]
filename = year[2:]+month+day+hour+stationId+'.sac'
directory = stationId+'/'+year+'/'+month+'/'+day+'/'+filename
if not os.path.exists(directory): #returns an array with appropriate number of zeroes since beginning of hour
hour_seconds = (datetime(int(year),int(month),int(day),int(hour),0,0) - datetime(1970,1,1)).total_seconds()
number_of_zeroes = int((Time.time()-hour_seconds)*sampling_rate)
return np.array([32750]*number_of_zeroes), stats
else: #returns array with data previously recorded in that hour
trace = read(pathname_or_url = directory, format = 'SAC')
trace = trace.pop(0)
data = trace.data
hour_seconds = (datetime(int(year),int(month),int(day),int(hour),0,0) - datetime(1970,1,1)).total_seconds()
number_of_zeroes = int((Time.time()-hour_seconds)*sampling_rate)-len(data)
return np.append(data, [32750]*number_of_zeroes), stats
### Main Method, this is where the application starts - 2 queues are created for passing data between these threads, and 2 process are created one for collecting the data and the other for plotting it
if __name__ == '__main__':
global collecting_loop, plotting_loop, options_window, tkframes #(09/12/15)
#Create 2 queues, one is used for communication between the collecting and plotting thread, the second is used between the collecting process and options window to send the selection information that the user does
queue = Queue()
queue2 = Queue()
queue3 = Queue()
#Create 2 threads
collecting_loop, plotting_loop = True, True
collectionProcess = Thread(target= Collecting, args=(queue,queue2,queue3,))
#Making threads daemons so that the program closes when processes in them stop (09/12/15).
collectionProcess.daemon = True
##Starting everything
collectionProcess.start()
#Previously, Plotting was the target for the plotting thread (plottingProcess, v1.0). This has been changed (14/01/16), as TkInter does not behave well when
#the mainloop is not in the Main Thread. Once the main window has been closed (see window_close, initiated by protocol of main window), the code after the
#mainloop can be executed to save the data before the entire program is closed. The while loops wait for the tuple of data from collecting to be placed in the
#queue.
window = Plotting(queue,queue2)
if plotting_loop: #This conditional is only False if the TC-1 is not connected on startup. No windows will have been created if this is the case, and the user has chosen to exit (see while loop near beginning of Collecting).
tkframes = mFrame(queue3, window.root)
window.root.mainloop()
#Wait until data is put into queue by Collecting, then save data and close.
while queue.empty():
waiting = 'Waiting for final data from Collecting'
trying = True
while trying:
if not queue.empty():
data = queue.get()
if type(data) == type((1,)):
trying = False
print 'Saving:'
print ''
saveHourData(data[0], data[1], data[2], data[3], data[4] , data[5])
print ''
print 'Done'
|
gpl-2.0
| 3,916,063,271,660,287,500
| 49.946857
| 281
| 0.582119
| false
| 3.772883
| true
| false
| false
|
luhn/pubsubclub
|
test2.py
|
1
|
1562
|
from twisted.internet import reactor
from autobahn.wamp1 import protocol as wamp
from autobahn.twisted.websocket import listenWS
from pubsubclub import (
ConsumerMixin, ProducerMixin, ConsumerServer, ProducerClient, consul,
)
class WampServerProtocol(wamp.WampServerProtocol):
def onSessionOpen(self):
print("Whoa")
self.registerForRpc(self, "http://example.com/pubsub#")
self.registerForPubSub('http://example.com/mytopic')
@wamp.exportRpc('publish')
def publish(self, data):
try:
self.dispatch(
data['channel'],
data['content'],
exclude=[self],
)
except:
import traceback
traceback.print_exc()
return {}
class WampServerFactory(ConsumerMixin, ProducerMixin, wamp.WampServerFactory):
protocol = WampServerProtocol
if __name__ == '__main__':
# import logging
# logging.basicConfig(level=logging.INFO)
from twisted.python import log
import sys
log.startLogging(sys.stderr)
consumer = ConsumerServer('0.0.0.0', 19001)
WampServerFactory.consumer = consumer
producer = ProducerClient([
('127.0.0.1', 19000),
])
WampServerFactory.producer = producer
server = WampServerFactory('ws://localhost:9901')
listenWS(server)
consumer.processor = server
"""
discovery = consul.ConsulDiscovery(
'http://localhost:8500/', 'pubsubclub', producer,
)
discovery.start()
"""
print('Starting...')
reactor.run()
|
mit
| -5,059,213,242,148,466,000
| 24.606557
| 78
| 0.638284
| false
| 3.914787
| false
| false
| false
|
Johnzero/OE7
|
OE-debug文件/PyWapFetion-master/PyWapFetion/Fetion.py
|
1
|
5245
|
#coding=utf-8
from cookielib import MozillaCookieJar
from urllib2 import Request, build_opener, HTTPHandler, HTTPCookieProcessor
from urllib import urlencode
import base64
import os
from Errors import *
from re import compile
from Cache import Cache
from gzip import GzipFile
try:
from cStringIO import StringIO
except:
from StringIO import StringIO
idfinder = compile('touserid=(\d*)')
idfinder2 = compile('name="internalid" value="(\d+)"')
csrf_token = compile('<postfield name="csrfToken" value="(\w+)"/>')
codekey = compile('<img src="/im5/systemimage/verifycode(.*?).jpeg"')
__all__ = ['Fetion']
class Fetion(object):
def __init__(self, mobile, password=None, status='0',
cachefile='Fetion.cache', cookiesfile=''):
'''登录状态:
在线:400 隐身:0 忙碌:600 离开:100
'''
if cachefile:
self.cache = Cache(cachefile)
if not cookiesfile:
cookiesfile = '%s.cookies' % mobile
cookiejar = MozillaCookieJar(filename=cookiesfile)
if not os.path.isfile(cookiesfile):
open(cookiesfile, 'w').write(MozillaCookieJar.header)
cookiejar.load(filename=cookiesfile)
cookie_processor = HTTPCookieProcessor(cookiejar)
self.opener = build_opener(cookie_processor,
HTTPHandler)
self.mobile, self.password = mobile, password
if not self.alive():
self._login()
cookiejar.save()
self.changestatus(status)
def send2self(self, message, time=None):
if time:
htm = self.open('im/user/sendTimingMsgToMyselfs.action',
{'msg': message, 'timing': time})
else:
htm = self.open('im/user/sendMsgToMyselfs.action',
{'msg': message})
return '成功' in htm
def send(self, mobile, message, sm=False):
if mobile == self.mobile:
return self.send2self(message)
return self.sendBYid(self.findid(mobile), message, sm)
def addfriend(self, mobile, name='xx'):
htm = self.open('im/user/insertfriendsubmit.action',
{'nickname': name, 'number': phone, 'type': '0'})
return '成功' in htm
def alive(self):
htm = self.open('im/index/indexcenter.action')
return '心情' in htm or '正在登陆' in htm
def deletefriend(self, id):
htm = self.open('im/user/deletefriendsubmit.action?touserid=%s' % id)
return '删除好友成功!' in htm
def changestatus(self, status='0'):
url = 'im5/index/setLoginStatus.action?loginstatus=' + status
for x in range(2):
htm = self.open(url)
return 'success' in htm
def logout(self, *args):
self.opener.open('http://f.10086.cn/im/index/logoutsubmit.action')
__enter__ = lambda self: self
__exit__ = __del__ = logout
def _login(self):
htm = ''
data = {
'm': self.mobile,
'pass': self.password,
}
while '图形验证码错误' in htm or not htm:
page = self.open('/im5/login/loginHtml5.action')
matches = codekey.findall(page)
if matches:
captcha = matches[0]
img = self.open('/im5/systemimage/verifycode%s.jpeg' % captcha)
open('verifycode.jpeg', 'wb').write(img)
captchacode = raw_input('captchaCode:')
data['captchaCode'] = captchacode
htm = self.open('/im5/login/loginHtml5.action', data)
self.alive()
return '登录' in htm
def sendBYid(self, id, message, sm=False):
url = 'im/chat/sendShortMsg.action?touserid=%s' % id
if sm:
url = 'im/chat/sendMsg.action?touserid=%s' % id
htm = self.open(url,
{'msg': message, 'csrfToken': self._getcsrf(id)})
if '对方不是您的好友' in htm:
raise FetionNotYourFriend
return '成功' in htm
def _getid(self, mobile):
htm = self.open('im/index/searchOtherInfoList.action',
{'searchText': mobile})
try:
return idfinder.findall(htm)[0]
except IndexError:
try:
return idfinder2.findall(htm)[0]
except:
return None
except:
return None
def findid(self, mobile):
if hasattr(self, 'cache'):
id = self.cache[mobile]
if not id:
self.cache[mobile] = id = self._getid(mobile)
return id
return self._getid(mobile)
def open(self, url, data=''):
request = Request('http://f.10086.cn/%s' % url, data=urlencode(data))
htm = self.opener.open(request).read()
try:
htm = GzipFile(fileobj=StringIO(htm)).read()
finally:
return htm
def _getcsrf(self, id=''):
if hasattr(self, 'csrf'):
return self.csrf
url = ('im/chat/toinputMsg.action?touserid=%s&type=all' % id)
htm = self.open(url)
try:
self.csrf = csrf_token.findall(htm)[0]
return self.csrf
except IndexError:
print htm
raise FetionCsrfTokenFail
|
agpl-3.0
| -1,393,321,632,573,504,300
| 31.13125
| 79
| 0.571873
| false
| 3.656472
| false
| false
| false
|
umangv/LitHub
|
LitHub/fbconnect/utils.py
|
1
|
5141
|
# Copyright 2011 Kalamazoo College Computer Science Club
# <kzoo-cs-board@googlegroups.com>
# This file is part of LitHub.
#
# LitHub is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# LitHub is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with LitHub. If not, see <http://www.gnu.org/licenses/>.
from django.utils.http import urlencode
from django.conf import settings
from django.core.urlresolvers import reverse
import urllib2
from urlparse import parse_qs
import json
def lazy_prop(func):
"""Wrapper for properties that should be evaluated lazily
This calls the actual method only once per instance. On the first time
the property is read, it's value is stored in self.__dict__. The next
time onwards, the stored value is returned.
Note that this wrapper also wraps the property wrapper on the method, so
only the @lazy_prop wrapper needs to be used.
"""
def wrap(self, *args, **kwargs):
if not func.__name__ in self.__dict__:
self.__dict__[func.__name__] = func(self, *args, **kwargs)
return self.__dict__[func.__name__]
return property(wrap)
class FBConnect(object):
"""Access and run queries using the Facebook Connect API"""
def __init__(self, code=None, view=None, access_token=None):
if code != None:
self.access_token = ""
self._get_access_token(code, view)
elif access_token != None:
self.access_token = access_token
elif access_token==None and code==None:
raise ValueError('code and access_token cannot both be None.')
def _get_access_token(self, code, view=None):
LOOKUP_URL = "https://graph.facebook.com/oauth/access_token?"
opts = {'client_id':settings.FB_APP_ID,
'redirect_uri':_url_receiving_code(view),
'client_secret':settings.FB_APP_SECRET,
'code':code}
try:
fb_resp = urllib2.urlopen(LOOKUP_URL + urlencode(opts))
result = fb_resp.read()
fb_resp.close()
except urllib2.HTTPError:
raise ValueError("The code was invalid or there was a problem" \
+ " connecting to Facebook")
resp = parse_qs(result)
if not resp.has_key('access_token'):
raise ValueError("No access token returned")
self.access_token = resp['access_token'][0]
@lazy_prop
def basic_info(self):
LOOKUP_URL = "https://graph.facebook.com/me?"
opts = {'access_token':self.access_token,}
try:
fb_resp = urllib2.urlopen(LOOKUP_URL + urlencode(opts))
results = fb_resp.read()
fb_resp.close()
except urllib2.HTTPError:
raise ValueError("The token was invalid or there was a " +\
"problem connecting to facebook")
return json.loads(results)
@lazy_prop
def networks(self):
LOOKUP_URL = "https://api.facebook.com/method/fql.query?"
opts = {'query':"SELECT affiliations FROM user WHERE uid=%s"%\
self.userid, 'access_token':self.access_token,
'format':'json'}
try:
fb_resp = urllib2.urlopen(LOOKUP_URL + urlencode(opts))
results = fb_resp.read()
fb_resp.close()
except urllib2.HTTPError:
raise ValueError("The token was invalid or there was a" + \
"problem connecting to facebook")
return json.loads(results)[0]['affiliations']
@lazy_prop
def userid(self):
return self.basic_info['id']
def publish_og(self, action, obj_type, obj, params=None):
opts = {'access_token':self.access_token,
obj_type:obj}
if params:
opts.update(params)
# Allows overriding any of the options in opts
try:
fb_resp = urllib2.urlopen(\
'https://graph.facebook.com/me/%s:%s'%(\
settings.FB_APP_NAMESPACE, action),
urlencode(opts))
id = fb_resp.read()
fb_resp.close()
except urllib2.HTTPError as e:
raise ValueError("There was a problem connecting to facebook.")
return id
def _url_receiving_code(view=None):
view = view or 'fbconnect.views.receive_code'
extra = reverse(view)
return settings.FB_REDIRECT_URL + extra
def redirect_to_fb_url(view=None):
base_url = "https://www.facebook.com/dialog/oauth?"
opts = {'client_id':settings.FB_APP_ID,
'redirect_uri':_url_receiving_code(view),
'scope':'email,publish_actions',}
return base_url + urlencode(opts)
|
gpl-3.0
| -8,131,026,453,522,601,000
| 38.244275
| 76
| 0.613888
| false
| 3.951576
| false
| false
| false
|
georgejhunt/HaitiDictionary.activity
|
palettes.py
|
1
|
10881
|
# Copyright (C) 2008, One Laptop Per Child
# Copyright (C) 2009, Tomeu Vizoso, Simon Schampijer
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import tempfile
import urlparse
from gettext import gettext as _
from gi.repository import Gtk
from gi.repository import GObject
from sugar3.graphics.palette import Palette, Invoker
from sugar3.graphics.menuitem import MenuItem
from sugar3.graphics.icon import Icon
from sugar3 import profile
from sugar3.activity import activity
import downloadmanager
class MouseOutListener(GObject.GObject):
_com_interfaces_ = interfaces.nsIDOMEventListener
__gsignals__ = {
'mouse-out': (GObject.SignalFlags.RUN_FIRST,
None,
([])),
}
def __init__(self, target):
GObject.GObject.__init__(self)
self.target = target
def handleEvent(self, event):
self.emit('mouse-out')
class ContentInvoker(Invoker):
_com_interfaces_ = interfaces.nsIDOMEventListener
def __init__(self, browser):
Invoker.__init__(self)
self._position_hint = self.AT_CURSOR
self._browser = browser
self._mouseout_listener = None
self._popdown_handler_id = None
def get_default_position(self):
return self.AT_CURSOR
def get_rect(self):
return ()
def get_toplevel(self):
return None
def handleEvent(self, event):
if event.button != 2:
return
target = event.target
if target.tagName.lower() == 'a':
if target.firstChild:
title = target.firstChild.nodeValue
else:
title = None
self.palette = LinkPalette(self._browser, title, target.href,
target.ownerDocument)
self.notify_right_click()
elif target.tagName.lower() == 'img':
if target.title:
title = target.title
elif target.title:
title = target.alt
elif target.name:
title = target.name
else:
title = os.path.basename(urlparse.urlparse(target.src).path)
self.palette = ImagePalette(title, target.src,
target.ownerDocument)
self.notify_right_click()
else:
return
if self._popdown_handler_id is not None:
self._popdown_handler_id = self.palette.connect( \
'popdown', self.__palette_popdown_cb)
self._mouseout_listener = MouseOutListener(target)
wrapper = xpcom.server.WrapObject(self._mouseout_listener,
interfaces.nsIDOMEventListener)
target.addEventListener('mouseout', wrapper, False)
self._mouseout_listener.connect('mouse-out', self.__moved_out_cb)
def __moved_out_cb(self, listener):
self.palette.popdown()
def __palette_popdown_cb(self, palette):
if self._mouseout_listener is not None:
wrapper = xpcom.server.WrapObject(self._mouseout_listener,
interfaces.nsIDOMEventListener)
self._mouseout_listener.target.removeEventListener('mouseout',
wrapper, False)
del self._mouseout_listener
class LinkPalette(Palette):
def __init__(self, browser, title, url, owner_document):
Palette.__init__(self)
self._browser = browser
self._title = title
self._url = url
self._owner_document = owner_document
if title is not None:
self.props.primary_text = title
self.props.secondary_text = url
else:
self.props.primary_text = url
menu_item = MenuItem(_('Follow link'), 'browse-follow-link')
menu_item.connect('activate', self.__follow_activate_cb)
self.menu.append(menu_item)
menu_item.show()
menu_item = MenuItem(_('Follow link in new tab'),
'browse-follow-link-new-tab')
menu_item.connect('activate', self.__follow_activate_cb, True)
self.menu.append(menu_item)
menu_item.show()
menu_item = MenuItem(_('Keep link'))
icon = Icon(icon_name='document-save', xo_color=profile.get_color(),
icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
menu_item.connect('activate', self.__download_activate_cb)
self.menu.append(menu_item)
menu_item.show()
menu_item = MenuItem(_('Copy link'))
icon = Icon(icon_name='edit-copy', xo_color=profile.get_color(),
icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
menu_item.connect('activate', self.__copy_activate_cb)
self.menu.append(menu_item)
menu_item.show()
def __follow_activate_cb(self, menu_item, new_tab=False):
if new_tab:
new_browser = self._browser.open_new_tab(self._url)
else:
self._browser.load_uri(self._url)
self._browser.grab_focus()
def __copy_activate_cb(self, menu_item):
clipboard = Gtk.Clipboard()
targets = Gtk.target_list_add_uri_targets()
targets = Gtk.target_list_add_text_targets(targets)
targets.append(('text/x-moz-url', 0, 0))
clipboard.set_with_data(targets,
self.__clipboard_get_func_cb,
self.__clipboard_clear_func_cb)
def __clipboard_get_func_cb(self, clipboard, selection_data, info, data):
uri_targets = \
[target[0] for target in Gtk.target_list_add_uri_targets()]
text_targets = \
[target[0] for target in Gtk.target_list_add_text_targets()]
if selection_data.target in uri_targets:
selection_data.set_uris([self._url])
elif selection_data.target in text_targets:
selection_data.set_text(self._url)
elif selection_data.target == 'text/x-moz-url':
selection_data.set('text/x-moz-url', 8, self._url)
def __clipboard_clear_func_cb(self, clipboard, data):
pass
def __download_activate_cb(self, menu_item):
downloadmanager.save_link(self._url, self._title, self._owner_document)
class ImagePalette(Palette):
def __init__(self, title, url, owner_document):
Palette.__init__(self)
self._title = title
self._url = url
self._owner_document = owner_document
self.props.primary_text = title
self.props.secondary_text = url
menu_item = MenuItem(_('Keep image'))
icon = Icon(icon_name='document-save', xo_color=profile.get_color(),
icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
menu_item.connect('activate', self.__download_activate_cb)
self.menu.append(menu_item)
menu_item.show()
menu_item = MenuItem(_('Copy image'))
icon = Icon(icon_name='edit-copy', xo_color=profile.get_color(),
icon_size=Gtk.IconSize.MENU)
menu_item.set_image(icon)
menu_item.connect('activate', self.__copy_activate_cb)
self.menu.append(menu_item)
menu_item.show()
def __copy_activate_cb(self, menu_item):
file_name = os.path.basename(urlparse.urlparse(self._url).path)
if '.' in file_name:
base_name, extension = file_name.split('.')
extension = '.' + extension
else:
base_name = file_name
extension = ''
temp_path = os.path.join(activity.get_activity_root(), 'instance')
fd, temp_file = tempfile.mkstemp(dir=temp_path, prefix=base_name,
suffix=extension)
os.close(fd)
os.chmod(temp_file, 0664)
cls = components.classes['@mozilla.org/network/io-service;1']
io_service = cls.getService(interfaces.nsIIOService)
uri = io_service.newURI(self._url, None, None)
cls = components.classes['@mozilla.org/file/local;1']
target_file = cls.createInstance(interfaces.nsILocalFile)
target_file.initWithPath(temp_file)
cls = components.classes[ \
'@mozilla.org/embedding/browser/nsWebBrowserPersist;1']
persist = cls.createInstance(interfaces.nsIWebBrowserPersist)
persist.persistFlags = 1 # PERSIST_FLAGS_FROM_CACHE
listener = xpcom.server.WrapObject(_ImageProgressListener(temp_file),
interfaces.nsIWebProgressListener)
persist.progressListener = listener
persist.saveURI(uri, None, None, None, None, target_file)
def __download_activate_cb(self, menu_item):
downloadmanager.save_link(self._url, self._title, self._owner_document)
class _ImageProgressListener(object):
_com_interfaces_ = interfaces.nsIWebProgressListener
def __init__(self, temp_file):
self._temp_file = temp_file
def onLocationChange(self, webProgress, request, location):
pass
def onProgressChange(self, webProgress, request, curSelfProgress,
maxSelfProgress, curTotalProgress, maxTotalProgress):
pass
def onSecurityChange(self, webProgress, request, state):
pass
def onStatusChange(self, webProgress, request, status, message):
pass
def onStateChange(self, webProgress, request, stateFlags, status):
if (stateFlags & interfaces.nsIWebProgressListener.STATE_IS_REQUEST and
stateFlags & interfaces.nsIWebProgressListener.STATE_STOP):
clipboard = Gtk.Clipboard()
clipboard.set_with_data([('text/uri-list', 0, 0)],
_clipboard_get_func_cb,
_clipboard_clear_func_cb,
self._temp_file)
def _clipboard_get_func_cb(clipboard, selection_data, info, temp_file):
selection_data.set_uris(['file://' + temp_file])
def _clipboard_clear_func_cb(clipboard, temp_file):
if os.path.exists(temp_file):
os.remove(temp_file)
|
gpl-2.0
| 158,557,187,725,631,200
| 35.029801
| 79
| 0.602151
| false
| 3.982796
| false
| false
| false
|
mbohlool/client-python
|
kubernetes/client/models/v1beta2_stateful_set_list.py
|
1
|
6301
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta2StatefulSetList(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'api_version': 'str',
'items': 'list[V1beta2StatefulSet]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, items=None, kind=None, metadata=None):
"""
V1beta2StatefulSetList - a model defined in Swagger
"""
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.items = items
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
"""
Gets the api_version of this V1beta2StatefulSetList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:return: The api_version of this V1beta2StatefulSetList.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1beta2StatefulSetList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:param api_version: The api_version of this V1beta2StatefulSetList.
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""
Gets the items of this V1beta2StatefulSetList.
:return: The items of this V1beta2StatefulSetList.
:rtype: list[V1beta2StatefulSet]
"""
return self._items
@items.setter
def items(self, items):
"""
Sets the items of this V1beta2StatefulSetList.
:param items: The items of this V1beta2StatefulSetList.
:type: list[V1beta2StatefulSet]
"""
if items is None:
raise ValueError("Invalid value for `items`, must not be `None`")
self._items = items
@property
def kind(self):
"""
Gets the kind of this V1beta2StatefulSetList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:return: The kind of this V1beta2StatefulSetList.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1beta2StatefulSetList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param kind: The kind of this V1beta2StatefulSetList.
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""
Gets the metadata of this V1beta2StatefulSetList.
:return: The metadata of this V1beta2StatefulSetList.
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1beta2StatefulSetList.
:param metadata: The metadata of this V1beta2StatefulSetList.
:type: V1ListMeta
"""
self._metadata = metadata
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta2StatefulSetList):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
apache-2.0
| -815,164,855,839,261,300
| 29.439614
| 281
| 0.594509
| false
| 4.164574
| false
| false
| false
|
tkw1536/PythonCaseClass
|
case_class/case_class.py
|
1
|
8346
|
"""
CaseClass implementation for the case_class module
Copyright (c) 2016 Tom Wiesing -- licensed under MIT, see LICENSE
"""
import inspect
from . import exceptions, clsutils, signature
#
# Meta-classes for the case class
#
class CaseClassMeta(type):
""" Meta-Class for case classes. """
instance_keys = {}
instance_values = {}
instance_list = []
def __new__(mcs, name, bases, attrs):
""" Creates a new class with MetaClass CaseClassMeta.
:param name: Name of the class to create.
:type name: str
:param bases: Base classes for the class.
:type bases: list
:param attrs: Attributes of this class.
:type attrs: dict
:rtype: CaseClassMeta
"""
# no case-to-case inheritance outside of the base classes
if _CaseClass not in bases and \
CaseClassMeta.inherits_from_case_class(bases):
raise exceptions.NoCaseToCaseInheritanceException(name)
# now we can just create it normally.
return super(CaseClassMeta, mcs).__new__(mcs, name, bases, attrs)
def __call__(cls, *args, **kwargs):
""" Creates a new CaseClass() instance.
:param args: Arguments to this CaseClass instance.
:type args: list
:param kwargs: Keyword arguments to this CaseClass instance.
:type kwargs: dict
:rtype: CaseClass
"""
# Can not instantiate Abstract Case Class
if AbstractCaseClass in cls.__bases__:
raise exceptions.NotInstantiableAbstractCaseClassException(cls)
# may not instantiate sub classes of _CaseClass
if _CaseClass in cls.__bases__:
raise exceptions.NotInstantiableClassException(
"Cannot instantiate %s: " % (cls.__name__,) +
"Classes inheriting directly from _CaseClass may not be " +
"instantiated. ", cls)
# make sure we have the dictionary
if cls not in CaseClassMeta.instance_keys:
CaseClassMeta.instance_keys[cls] = []
CaseClassMeta.instance_values[cls] = {}
# Extract the instances for this class
ckey = CaseClassMeta.instance_keys[cls]
cval = CaseClassMeta.instance_values[cls]
# key we will use for this instance.
key = clsutils.get_class_parameters(cls, *args, **kwargs)
# try and return an existing instance.
try:
return cval[ckey.index(key)]
except ValueError:
pass
# create a new instance
instance = super(CaseClassMeta, cls).__call__(*args, **kwargs)
# store the instance
idx = len(ckey)
ckey.append(key)
cval[idx] = instance
# and return it
return instance
def __getitem__(cls, item):
""" Syntactic sugar to create new CaseClass instances.
:param item: Tuple representing parameters or slice instance.
:type item: Any
:rtype: CaseClass
"""
# allow CaseClass[:] to create a new CaseClass()
if isinstance(item, slice):
if item.start is None and item.stop is None and item.step is None:
return CaseClassMeta.__call__(cls)
# if we get a single item, it needs to be turned into a tuple.
elif not isinstance(item, tuple):
item = (item,)
# finally just do the same as in call.
return CaseClassMeta.__call__(cls, *item)
@staticmethod
def get_hash(cc):
""" Gets a hash for a CaseClass or None.
:param cc: CaseClass instance to get hash for
:type cc: CaseClass
:rtype: int
"""
if not isinstance(cc, CaseClass):
raise ValueError("Argument is not a CaseClass, can not get hash. ")
# get a key for the instance
cls = cc.__class__
key = (cc.case_args, cc.case_kwargs)
# extract the key
ckey = CaseClassMeta.instance_keys[cls]
idx = ckey.index(key)
# and return a hash of it
return hash((CaseClassMeta, ckey, idx))
@staticmethod
def is_concrete_caseclass(cls):
""" Checks if a class is a concrete case class via inheritance.
:param cls: Class to check.
:type cls: type
:rtype: bool
"""
return cls != AbstractCaseClass and CaseClass in cls.__bases__
@staticmethod
def inherits_from_case_class(bases):
""" Checks if this class inherits from a non-inheritable case class.
:param bases: List of bases of the class to check
:type bases: list
:rtype: bool
"""
# if we can inherit from it, we are already done.
if InheritableCaseClass in bases:
return False
for b in bases:
if CaseClassMeta.is_concrete_caseclass(b):
return True
return False
class _CaseClass(object):
""" A class used as base for all CaseClasses"""
pass
@clsutils.add_metaclass(CaseClassMeta)
class CaseClass(_CaseClass):
""" Represents a normal CaseClass. """
def __new__(cls, *args, **kwargs):
""" Creates a new CaseClass instance.
:param args: Parameters for this CaseClass instance.
:type args: list
:param kwargs: Keyword Arguments for this CaseClass instance.
:type kwargs: dict
:rtype: CaseClass
"""
# create a new instance
inst = super(CaseClass, cls).__new__(cls)
# set the class name
inst.__name = inst.__class__.__name__
# get the init signature
inst.__sig = clsutils.get_init_signature(inst.__class__)
# and the arguments
inst.__applied = inst.__sig(*args, **kwargs)
# and return the instance
return inst
def __hash__(self):
""" Returns a hash representing this case class.
:rtype: int
"""
return CaseClassMeta.get_hash(self)
def copy(self, *args, **kwargs):
""" Makes a copy of this CaseClass instance and exchanges the given
values.
:rtype: CaseClass
"""
updated = self.case_params.signature(*args, **kwargs)
return updated.call(self.__class__)
@property
def case_params(self):
""" Returns the parameters originally given to this CaseClass.
:rtype: CaseParameters
"""
return CaseParameters(self.__applied)
def __repr__(self):
""" Implements a representation for CaseClass instances. This is given
by the class name and the representation of all the parameters.
:rtype: str
"""
# name of the class and parameters
return "%s(%s)" % (self.__name, self.case_params)
class AbstractCaseClass(CaseClass, _CaseClass):
""" Represents a CaseClass that may not be instantiated but only inherited
from. """
pass
class InheritableCaseClass(CaseClass, _CaseClass):
""" Represent a CaseClass that may be inherited from. """
pass
class CaseParameters(CaseClass, dict):
""" Represents arguments given to a CaseClass. """
def __init__(self, sig):
""" Creates a new CaseArguments() instance.
:param sig: Applied Signature of the original init function.
:type sig: signature.AppliedSignature
"""
self.__sig = sig
# super(CaseParameters, self).__init__(self.__params)
def __getitem__(self, n):
""" Returns a positional CaseClass parameter.
:param n: Number of item to get.
:type n: int
:rtype: object
"""
# TODO: Check into the numerical things
return self.__sig[n]
def __getattr__(self, name):
""" Gets a parameter given to this CaseParameters instance by name.
:param name: Name of parameter to get
:type name: str
"""
return self.__sig[name]
@property
def signature(self):
""" Returns the applied Signature belonging to this CaseClasss.
:rtype: signature.AppliedSignature
"""
return self.__sig
def __str__(self):
""" Turns this CaseParameters instance into a string.
:rtype: str
"""
return str(self.__sig)
__all__ = ["AbstractCaseClass", "CaseClass", "InheritableCaseClass"]
|
mit
| -7,552,289,889,035,847,000
| 25.495238
| 79
| 0.593937
| false
| 4.385707
| false
| false
| false
|
yakky/django-form-designer
|
form_designer/migrations/0011_auto__add_field_formdefinitionfield_choice_model_queryset.py
|
1
|
10609
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'FormDefinitionField.choice_model_queryset'
db.add_column(u'form_designer_formdefinitionfield', 'choice_model_queryset',
self.gf('django.db.models.fields.CharField')(default='objects', max_length=255, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'FormDefinitionField.choice_model_queryset'
db.delete_column(u'form_designer_formdefinitionfield', 'choice_model_queryset')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'form_designer.formdefinition': {
'Meta': {'object_name': 'FormDefinition'},
'action': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'allow_get_initial': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'body': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'display_logged': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'error_message': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'form_template_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'log_data': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mail_from': ('form_designer.fields.TemplateCharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'mail_subject': ('form_designer.fields.TemplateCharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'mail_to': ('form_designer.fields.TemplateCharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'mail_uploaded_files': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'message_template': ('form_designer.fields.TemplateTextField', [], {'null': 'True', 'blank': 'True'}),
'method': ('django.db.models.fields.CharField', [], {'default': "'POST'", 'max_length': '10'}),
'name': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'private_hash': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40'}),
'public_hash': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40'}),
'redirect_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'require_hash': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'save_uploaded_files': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'submit_label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'success_clear': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'success_message': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'success_redirect': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'form_designer.formdefinitionfield': {
'Meta': {'ordering': "['position']", 'object_name': 'FormDefinitionField'},
'choice_labels': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'choice_model': ('form_designer.fields.ModelNameField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'choice_model_empty_label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'choice_model_queryset': ('django.db.models.fields.CharField', [], {'default': "'objects'", 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'choice_values': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'decimal_places': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'field_class': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'form_definition': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['form_designer.FormDefinition']"}),
'help_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'include_result': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'initial': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'max_digits': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'max_length': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'max_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'min_length': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'min_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'position': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'regex': ('form_designer.fields.RegexpExpressionField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'widget': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'form_designer.formlog': {
'Meta': {'object_name': 'FormLog'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'form_definition': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'logs'", 'to': u"orm['form_designer.FormDefinition']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'form_designer.formvalue': {
'Meta': {'object_name': 'FormValue'},
'field_name': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'form_log': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'values'", 'to': u"orm['form_designer.FormLog']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'value': ('picklefield.fields.PickledObjectField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['form_designer']
|
bsd-3-clause
| -4,493,967,210,291,561,500
| 81.248062
| 187
| 0.557923
| false
| 3.62453
| false
| false
| false
|
swharden/SWHLab
|
doc/uses/EPSCs-and-IPSCs/variance method/2016-12-16 tryout.py
|
1
|
3941
|
"""
This script investigates how calculating phasic currents from voltage clamp
recordings may benefit from subtracting-out the "noise" determined from a
subset of the quietest pieces of the recording, rather than using smoothing
or curve fitting to guess a guassian-like RMS noise function.
"""
import os
import swhlab
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.mlab as mlab
POINTS_PER_SEC=20000
POINTS_PER_MS=int(POINTS_PER_SEC/1000)
CHUNK_POINTS=POINTS_PER_MS*10 # size of Y pieces to calculate variance from
PERCENT_STEP=10 # percentile steps to display
HIST_RESOLUTION=.1 # pA per bin
COLORMAP=plt.get_cmap('jet') # which color scheme do we want to use?
#COLORMAP=plt.get_cmap('winter') # which color scheme do we want to use?
def quietParts(data,percentile=10):
"""
Given some data (Y) break it into chunks and return just the quiet ones.
Returns data where the variance for its chunk size is below the given percentile.
CHUNK_POINTS should be adjusted so it's about 10ms of data.
"""
nChunks=int(len(Y)/CHUNK_POINTS)
chunks=np.reshape(Y[:nChunks*CHUNK_POINTS],(nChunks,CHUNK_POINTS))
variances=np.var(chunks,axis=1)
percentiles=np.empty(len(variances))
for i,variance in enumerate(variances):
percentiles[i]=sorted(variances).index(variance)/len(variances)*100
selected=chunks[np.where(percentiles<=percentile)[0]].flatten()
return selected
def ndist(data,Xs):
"""
given some data and a list of X posistions, return the normal
distribution curve as a Y point at each of those Xs.
"""
sigma=np.sqrt(np.var(data))
center=np.average(data)
curve=mlab.normpdf(Xs,center,sigma)
curve*=len(data)*HIST_RESOLUTION
return curve
if __name__=="__main__":
Y=np.load("sweepdata.npy")
# predict what our histogram will look like
padding=50
histCenter=int(np.average(Y))
histRange=(histCenter-padding,histCenter+padding)
histBins=int(abs(histRange[0]-histRange[1])/HIST_RESOLUTION)
# FIRST CALCULATE THE 10-PERCENTILE CURVE
data=quietParts(Y,10) # assume 10% is a good percentile to use
hist,bins=np.histogram(data,bins=histBins,range=histRange,density=False)
hist=hist.astype(np.float) # histogram of data values
curve=ndist(data,bins[:-1]) # normal distribution curve
hist[hist == 0] = np.nan
histValidIs=np.where(~np.isnan(hist))
histX,histY=bins[:-1][histValidIs],hist[histValidIs] # remove nans
baselineCurve=curve/np.max(curve) # max is good for smooth curve
# THEN CALCULATE THE WHOLE-SWEEP HISTOGRAM
hist,bins=np.histogram(Y,bins=histBins,range=histRange,density=False)
hist=hist.astype(np.float) # histogram of data values
hist[hist == 0] = np.nan
histValidIs=np.where(~np.isnan(hist))
histX,histY=bins[:-1][histValidIs],hist[histValidIs] # remove nans
histY/=np.percentile(histY,98) # percentile is needed for noisy data
# DETERMINE THE DIFFERENCE
diffX=bins[:-1][histValidIs]
diffY=histY-baselineCurve[histValidIs]
diffY[diffY<0]=np.nan
# NOW PLOT THE DIFFERENCE
plt.figure(figsize=(10,10))
plt.subplot(211)
plt.grid()
plt.plot(histX,histY,'b.',ms=10,alpha=.5,label="data points")
plt.plot(bins[:-1],baselineCurve,'r-',lw=3,alpha=.5,label="10% distribution")
plt.legend(loc='upper left',shadow=True)
plt.ylabel("normalized distribution")
plt.axis([histCenter-20,histCenter+20,0,1.5])
plt.subplot(212)
plt.grid()
plt.plot(diffX,diffY,'.',ms=10,alpha=.5,color='b')
plt.axvline(histCenter,color='r',lw=3,alpha=.5,ls='--')
plt.legend(loc='upper left',shadow=True)
plt.ylabel("difference")
plt.xlabel("histogram data points (pA)")
plt.margins(0,.1)
plt.axis([histCenter-20,histCenter+20,0,None])
plt.tight_layout()
plt.savefig("2016-12-16-tryout.png")
plt.show()
print("DONE")
|
mit
| 2,619,447,392,169,634,000
| 36.903846
| 85
| 0.698046
| false
| 3.160385
| false
| false
| false
|
gaasedelen/lighthouse
|
plugins/lighthouse/ui/coverage_settings.py
|
1
|
4506
|
import logging
from lighthouse.util.qt import *
from lighthouse.util.disassembler import disassembler
logger = logging.getLogger("Lighthouse.UI.Settings")
class TableSettingsMenu(QtWidgets.QMenu):
"""
A quick-access settings menu for Lighthouse.
"""
def __init__(self, parent=None):
super(TableSettingsMenu, self).__init__(parent)
self._visible_action = None
self._ui_init_actions()
self.setToolTipsVisible(True)
#--------------------------------------------------------------------------
# QMenu Overloads
#--------------------------------------------------------------------------
def event(self, event):
"""
Hook the QMenu event stream.
"""
action = self.activeAction()
# swallow clicks to checkbox/radiobutton actions to keep qmenu open
if event.type() == QtCore.QEvent.MouseButtonRelease:
if action and action.isEnabled() and action.isCheckable():
action.trigger()
event.accept()
return True
# handle any other events as wee normally should
return super(TableSettingsMenu, self).event(event)
#--------------------------------------------------------------------------
# Initialization - UI
#--------------------------------------------------------------------------
def _ui_init_actions(self):
"""
Initialize the menu actions.
"""
# lighthouse colors
self._action_change_theme = QtWidgets.QAction("Change theme", None)
self._action_change_theme.setToolTip("Lighthouse color & theme customization")
self.addAction(self._action_change_theme)
self.addSeparator()
# painting
self._action_force_clear = QtWidgets.QAction("Force clear paint (slow!)", None)
self._action_force_clear.setToolTip("Attempt to forcefully clear stuck paint from the database")
self.addAction(self._action_force_clear)
self._action_disable_paint = QtWidgets.QAction("Disable painting", None)
self._action_disable_paint.setCheckable(True)
self._action_disable_paint.setToolTip("Disable the coverage painting subsystem")
self.addAction(self._action_disable_paint)
self.addSeparator()
# table actions
self._action_refresh_metadata = QtWidgets.QAction("Rebuild coverage mappings", None)
self._action_refresh_metadata.setToolTip("Refresh the database metadata and coverage mapping")
self.addAction(self._action_refresh_metadata)
self._action_dump_unmapped = QtWidgets.QAction("Dump unmapped coverage", None)
self._action_dump_unmapped.setToolTip("Print all coverage data not mapped to a function")
self.addAction(self._action_dump_unmapped)
self._action_export_html = QtWidgets.QAction("Generate HTML report", None)
self._action_export_html.setToolTip("Export the coverage table to HTML")
self.addAction(self._action_export_html)
self._action_hide_zero = QtWidgets.QAction("Hide 0% coverage", None)
self._action_hide_zero.setToolTip("Hide table entries with no coverage data")
self._action_hide_zero.setCheckable(True)
self.addAction(self._action_hide_zero)
def connect_signals(self, controller, lctx):
"""
Connect UI signals.
"""
self._action_change_theme.triggered.connect(lctx.core.palette.interactive_change_theme)
self._action_refresh_metadata.triggered.connect(lctx.director.refresh)
self._action_hide_zero.triggered[bool].connect(controller._model.filter_zero_coverage)
self._action_disable_paint.triggered[bool].connect(lambda x: lctx.painter.set_enabled(not x))
self._action_force_clear.triggered.connect(lctx.painter.force_clear)
self._action_export_html.triggered.connect(controller.export_to_html)
self._action_dump_unmapped.triggered.connect(lctx.director.dump_unmapped)
lctx.painter.status_changed(self._ui_painter_changed_status)
#--------------------------------------------------------------------------
# Signal Handlers
#--------------------------------------------------------------------------
@disassembler.execute_ui
def _ui_painter_changed_status(self, painter_enabled):
"""
Handle an event from the painter being enabled/disabled.
"""
self._action_disable_paint.setChecked(not painter_enabled)
|
mit
| -7,577,214,251,421,822,000
| 41.509434
| 104
| 0.60253
| false
| 4.461386
| false
| false
| false
|
fatiherikli/komposto.org
|
sketches/views.py
|
1
|
5951
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import json
import markdown
import base64
from uuid import uuid4
from PIL import Image
from django.shortcuts import render, get_object_or_404
from django.views.decorators.csrf import csrf_exempt
from django.views.generic.list import ListView
from django.http import JsonResponse
from django.views.generic.detail import DetailView
from django.views.generic import TemplateView, View
from django.views.generic import CreateView
from django.http import HttpResponse
from django.views.decorators.clickjacking import xframe_options_sameorigin
from django.core.files.base import ContentFile
from auth.mixins import LoginRequiredMixin
from sketches.models import Sketch
from sketches.forms import SketchCreationForm
from sketches.mixins import (
JSONResponseListMixin, JSONResponseDetailMixin,
PaginationMixin
)
class SketchListView(PaginationMixin, JSONResponseListMixin, ListView):
model = Sketch
def get_queryset(self):
return (
self.model.objects.all()[
self.get_offset():
self.get_limit()
]
)
class SketchDetailView(JSONResponseDetailMixin, DetailView):
model = Sketch
@csrf_exempt
def dispatch(self, *args, **kwargs):
return super(SketchDetailView, self).dispatch(*args, **kwargs)
def post(self, request, pk):
sketch = get_object_or_404(Sketch, pk=pk)
if sketch.user.pk != request.user.pk:
return HttpResponse(status=403)
payload = json.loads(request.body)
sketch.content = payload.get('content')
sketch.title = payload.get('title')
sketch.description = payload.get('description')
sketch.save();
if payload.get('snapshot'):
snapshot = payload.get('snapshot')
binary = base64.b64decode(snapshot)
content = ContentFile(
binary,
name='%s.png' % sketch.slug
)
sketch.snapshots.create(content=content)
return HttpResponse(status=202)
class SketchForkView(LoginRequiredMixin, View):
@csrf_exempt
def dispatch(self, *args, **kwargs):
return super(SketchForkView, self).dispatch(*args, **kwargs)
def post(self, request, pk):
fork_of = get_object_or_404(Sketch, pk=pk)
payload = json.loads(request.body)
sketch = Sketch.objects.create(
user=request.user,
title=payload.get('title'),
description=payload.get('description'),
content=payload.get('content'),
fork_of=fork_of
)
if payload.get('snapshot'):
snapshot = payload.get('snapshot')
binary = base64.b64decode(snapshot)
content = ContentFile(
binary,
name='%s.png' % sketch.slug
)
sketch.snapshots.create(content=content)
return JsonResponse(sketch.serialize(), status=201)
class HomeView(PaginationMixin, TemplateView):
template_name = 'sketches/index.html'
model = Sketch
def get_queryset(self):
return (
self.model.objects.filter(
is_featured=True
)[
self.get_offset():
self.get_limit()
]
)
def get_context_data(self, **kwargs):
return super(HomeView, self).get_context_data(
sketches=self.get_queryset(),
next_page_url=self.get_next_page_url(),
**kwargs
)
class HelpView(TemplateView):
def get_template_names(self):
if self.request.GET.get('only-content'):
return ['sketches/help-content.html']
return ['sketches/help.html']
def get_context_data(self, **kwargs):
path = os.path.join(os.path.dirname(__file__), '../docs/help.md')
content = markdown.markdown(open(path).read())
return super(HelpView, self).get_context_data(
content=content,
**kwargs
)
class AboutView(TemplateView):
template_name = "about.html"
def get_context_data(self, **kwargs):
path = os.path.join(os.path.dirname(__file__), '../docs/about.md')
content = markdown.markdown(open(path).read())
return super(AboutView, self).get_context_data(
content=content,
**kwargs
)
class PlayView(DetailView):
template_name = 'sketches/detail.html'
model = Sketch
def dispatch(self, *args, **kwargs):
nonce = uuid4()
self.request.nonce = nonce
response = super(PlayView, self).dispatch(*args, **kwargs)
response.set_cookie('nonce', nonce)
return response
def get_context_data(self, **kwargs):
return super(PlayView, self).get_context_data(
nonce=self.request.nonce,
**kwargs
)
class SandboxView(DetailView):
template_name = 'sketches/sandbox.html'
model = Sketch
@xframe_options_sameorigin
def dispatch(self, request, *args, **kwargs):
if request.COOKIES.get('nonce') != request.GET.get('nonce'):
return HttpResponse(status=403)
return super(SandboxView, self).dispatch(request, *args, **kwargs)
class NewSketchView(CreateView):
form_class = SketchCreationForm
template_name = "sketches/new.html"
def form_valid(self, form):
form.instance.user = self.request.user
return super(NewSketchView, self).form_valid(form)
class SnapshotView(DetailView):
model = Sketch
def render_to_response(self, context, **response_kwargs):
snapshot = self.object.snapshots.latest('id')
image = Image.new("RGBA", (360, 640))
import pdb; pdb.set_trace();
image.putdata(snapshot.content)
response = HttpResponse(content_type="image/jpg")
image.save(response, "JPEG")
return response
|
mit
| -3,792,996,439,892,349,400
| 28.315271
| 74
| 0.625945
| false
| 3.956782
| false
| false
| false
|
dhuang/incubator-airflow
|
airflow/operators/jdbc_operator.py
|
1
|
1847
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from airflow.hooks.jdbc_hook import JdbcHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class JdbcOperator(BaseOperator):
"""
Executes sql code in a database using jdbc driver.
Requires jaydebeapi.
:param jdbc_conn_id: reference to a predefined database
:type jdbc_conn_id: string
:param sql: the sql code to be executed
:type sql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
"""
template_fields = ('sql',)
template_ext = ('.sql',)
ui_color = '#ededed'
@apply_defaults
def __init__(
self, sql,
jdbc_conn_id='jdbc_default', autocommit=False, parameters=None,
*args, **kwargs):
super(JdbcOperator, self).__init__(*args, **kwargs)
self.parameters = parameters
self.sql = sql
self.jdbc_conn_id = jdbc_conn_id
self.autocommit = autocommit
def execute(self, context):
self.log.info('Executing: %s', self.sql)
self.hook = JdbcHook(jdbc_conn_id=self.jdbc_conn_id)
self.hook.run(self.sql, self.autocommit, parameters=self.parameters)
|
apache-2.0
| -3,436,255,019,614,622,700
| 34.519231
| 76
| 0.682729
| false
| 3.921444
| false
| false
| false
|
moriyoshi/payjp-python
|
payjp/error.py
|
1
|
1415
|
# coding: utf-8
class PayjpException(Exception):
def __init__(self, message=None, http_body=None, http_status=None,
json_body=None):
super(PayjpException, self).__init__(message)
if http_body and hasattr(http_body, 'decode'):
try:
http_body = http_body.decode('utf-8')
except:
http_body = ('<Could not decode body as utf-8. '
'Please report to support@pay.jp>')
self.http_body = http_body
self.http_status = http_status
self.json_body = json_body
class APIError(PayjpException):
pass
class APIConnectionError(PayjpException):
pass
class CardError(PayjpException):
def __init__(self, message, param, code, http_body=None,
http_status=None, json_body=None):
super(CardError, self).__init__(message,
http_body, http_status, json_body)
self.param = param
self.code = code
class InvalidRequestError(PayjpException):
pass
class AuthenticationError(PayjpException):
pass
class InvalidRequestError(PayjpException):
def __init__(self, message, param, http_body=None,
http_status=None, json_body=None):
super(InvalidRequestError, self).__init__(
message, http_body, http_status, json_body)
self.param = param
|
mit
| -915,718,673,730,715,300
| 25.698113
| 74
| 0.587279
| false
| 4.019886
| false
| false
| false
|
wfpinedar/fpm_data_cube
|
src/installer/dc_tasks.py
|
1
|
62143
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 24 14:01:33 2015
@author: jdh
Tile task system for celery
"""
from datacube.api.query import SortType
from matplotlib.mlab import PCA
from datetime import datetime,timedelta
import logging
import os
from osgeo import gdal
import osr #agregado para exportar el archivo de pca (TODO: Evitarlo)
import numpy
import numpy as np
import numexpr as ne
import Image
import sklearn
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import cPickle as pickl
from sklearn.preprocessing import normalize
from datacube.api.model import DatasetType, Ls57Arg25Bands, Satellite, Ls8Arg25Bands
from datacube.api.utils import NDV, empty_array, get_dataset_metadata, get_dataset_data_with_pq, raster_create,get_dataset_data, PqaMask
from datacube.api.query import list_tiles
from datacube.api.model import DatasetType
from datacube.api.model import Ls57Arg25Bands, TciBands, NdviBands, EviBands
from datacube.api.query import list_tiles
from datacube.api.utils import get_mask_pqa, get_dataset_data_masked, OutputFormat
import time
from pprint import pprint
import itertools
import random
import string
from gdalconst import *
from datacube_worker import celery, cache, database
import Image
import math
from scipy.cluster.vq import kmeans,vq
#app = Celery('tasks',backend='redis://localhost',broker='amqp://')
satellites = {'ls7':Satellite.LS7,'ls8':Satellite.LS8}
FILE_EXT = {"png":".png","GTiff":".tif","VRT":".vrt","JPEG":".jpeg"}
@celery.task()
def get_tile_info(xa,ya,start,end,satellite,datasets,months=None):
"""
Get Tile Info
"""
tiles = list_tiles(x=xa,y=ya,acq_min=start,acq_max=end,satellites = satellite,dataset_types=datasets)
data = "{\"request\":\"DONE\",\"tiles\":["
data_arr = []
for tile in tiles:
if months:
print tile.start_datetime.month
if tile.start_datetime.month in months:
data_arr.append()
else:
data_arr.append("{\"x\":"+str(tile.x)+",\"y\":"+str(tile.y)+",\"date\":\""+str(tile.start_datetime)+"\"}")
data+=','.join(data_arr)+"]}"
return data
@celery.task()
def get_tile_listing(xa,ya,start,end,satellite,datasets,months=None):
"""
List tiles. Months will only show the requested months
"""
tiles = list_tiles(x=xa,y=ya,acq_min=start,acq_max=end,satellites = satellite,dataset_types=datasets)
data = "{\"request\":\"DONE\",\"tiles\":["
data_arr = []
for tile in tiles:
if months:
print tile.start_datetime.month
if tile.start_datetime.month in months:
data_arr.append("{\"x\":"+str(tile.x)+",\"y\":"+str(tile.y)+",\"date\":\""+str(tile.start_datetime)+"\"}")
else:
data_arr.append("{\"x\":"+str(tile.x)+",\"y\":"+str(tile.y)+",\"date\":\""+str(tile.start_datetime)+"\"}")
data+=','.join(data_arr)+"]}"
return data
@celery.task()
def obtain_cloudfree_mosaic(x,y,start,end, bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CInt16,months=None):
StartDate = start
EndDate = end
print "starting cloudfree mosaic"
best_data = {}
band_str = "+".join([band.name for band in bands])
sat_str = "+".join([sat.name for sat in satellite])
cache_id = [str(x),str(y),str(start),str(end),band_str,sat_str,str(xsize),str(ysize),file_format,str(iterations)]
f_name = "_".join(cache_id)
f_name = f_name.replace(" ","_")
c_name = f_name
cached_res = cache.get(c_name)
if cached_res:
return str(cached_res)
f_name = os.path.join("/tilestore/tile_cache",f_name)
tiles = list_tiles(x=[x], y=[y],acq_min=StartDate,acq_max=EndDate,satellites=satellite,dataset_types=[DatasetType.ARG25,DatasetType.PQ25], sort=SortType.ASC)
tile_metadata = None
tile_count = 0
tile_filled = False
stats_file = open(f_name+'.csv','w+')
for tile in tiles:
if tile_filled:
break
if months:
print tile.start_datetime.month
if not tile.start_datetime.month in months:
continue
#print "merging on tile "+str(tile.x)+", "+str(tile.y)
tile_count+=1
dataset = DatasetType.ARG25 in tile.datasets and tile.datasets[DatasetType.ARG25] or None
if dataset is None:
print "No dataset availible"
tile_count-=1
continue
tile_metadata = get_dataset_metadata(dataset)
if tile_metadata is None:
print "NO METADATA"
tile_count-=1
continue
pqa = DatasetType.PQ25 in tile.datasets and tile.datasets[DatasetType.PQ25] or None
mask = None
mask = get_mask_pqa(pqa,[PqaMask.PQ_MASK_CLEAR],mask=mask)
if tile.dataset.find('LC8') >= 0:
nbands = map(lambda x: Ls8Arg25Bands(x.value+1),bands)
else:
nbands = bands
band_data = get_dataset_data_masked(dataset, mask=mask,bands=nbands)
if tile.dataset.find('LC8') >= 0:
band_data = dict(map(lambda (k,v): (Ls57Arg25Bands(k.value-1),v), band_data.iteritems()))
swap_arr = None
best = None
for band in bands:
if not band in best_data:
#print "Adding "+band.name
#print band_data[band]
best_data[band]=band_data[band]
best = numpy.array(best_data[band])
swap_arr=numpy.in1d(best.ravel(),-999).reshape(best.shape)
else:
best = numpy.array(best_data[band])
swap_arr=numpy.in1d(best.ravel(),-999).reshape(best.shape)
b_data = numpy.array(band_data[band])
# extend array if source data is smaller than best data
while b_data.shape[1] < swap_arr.shape[1]:
col = numpy.zeros((b_data.shape[0],1))
col.fill(-999)
b_data = numpy.append(b_data,col,axis=1)
while b_data.shape[0] < swap_arr.shape[0]:
row = numpy.zeros((1,b_data.shape[1]))
row.fill(-999)
b_data = numpy.append(b_data,row,axis=0)
best[swap_arr]=b_data[swap_arr]
best_data[band]=numpy.copy(best)
del b_data
stats_file.write(str(tile.start_datetime.year)+','+str(tile.start_datetime.month)+','+str(len(best[swap_arr]))+"\n")
del swap_arr
del best
if iterations > 0:
if tile_count>iterations:
print "Exiting after "+str(iterations)+" iterations"
break
numberOfBands=len(bands)
if numberOfBands == 0:
return "None"
if bands[0] not in best_data:
print "No data was merged for "+str(x)+", "+str(y)
return "None"
print "mosaic created"
numberOfPixelsInXDirection=len(best_data[bands[0]])
print numberOfPixelsInXDirection
numberOfPixelsInYDirection=len(best_data[bands[0]][0])
print numberOfPixelsInYDirection
pixels = numberOfPixelsInXDirection
if numberOfPixelsInYDirection > numberOfPixelsInXDirection:
pixels = numberOfPixelsInYDirection
if tile_count <1:
print "No tiles found for "+str(x)+", "+str(y)
return "None"
driver = gdal.GetDriverByName(file_format)
if driver is None:
print "No driver found for "+file_format
return "None"
#print f_name+'.tif'
raster = driver.Create(f_name+'.tif', pixels, pixels, numberOfBands, data_type, options=["BIGTIFF=YES", "INTERLEAVE=BAND"])
raster.SetGeoTransform(tile_metadata.transform)
raster.SetProjection(tile_metadata.projection)
index = 1
stats_file.close()
for band in bands:
stack_band = raster.GetRasterBand(index)
stack_band.SetNoDataValue(-999)
stack_band.WriteArray(best_data[band])
stack_band.ComputeStatistics(True)
index+=1
stack_band.FlushCache()
del stack_band
raster.FlushCache()
del raster
cache.set(c_name,f_name+".tif")
return f_name+".tif"
@celery.task()
def matrix_obtain_mosaic(x,y,start,end, bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CInt16,months=None, normalized=False):
"""
Obtains a dict with the query results, one matrix per band
MATRIX OBTAIN MOSAIC
"""
StartDate = start
EndDate = end
print("____________________matriz_obtain_mosaic____________________")
tiles = list_tiles(x=[x], y=[y],acq_min=StartDate,acq_max=EndDate,satellites=satellite,dataset_types=[DatasetType.ARG25,DatasetType.PQ25], sort=SortType.ASC)
tile_metadata = None
tile_count = 0
tile_filled = False
total_ins = 0
all_bands={}
avgs_band={}
st_band={}
count_band={}
for tile in tiles:
if tile_filled:
break
if months:
print tile.start_datetime.month
if not tile.start_datetime.month in months:
continue
tile_count+=1
dataset = DatasetType.ARG25 in tile.datasets and tile.datasets[DatasetType.ARG25] or None
if dataset is None:
print "No dataset availible"
tile_count-=1
continue
tile_metadata = get_dataset_metadata(dataset)
if tile_metadata is None:
print "NO METADATA"
tile_count-=1
continue
pqa = DatasetType.PQ25 in tile.datasets and tile.datasets[DatasetType.PQ25] or None
mask = None
mask = get_mask_pqa(pqa,[PqaMask.PQ_MASK_CLEAR],mask=mask)
band_data = get_dataset_data_masked(dataset, mask=mask,bands=bands)
del mask
for band in band_data:
# print "Adding "+band.name
data = numpy.array(band_data[band]).astype(numpy.float32)
non_data=numpy.in1d(data.ravel(),-999).reshape(data.shape)
data[non_data]=numpy.NaN
if normalized:
m=np.nanmean(data)
st=np.nanstd(data)
if not np.isnan(m):
avgs_band[band.name]=avgs_band[band.name]+m if avgs_band.has_key(band.name) else m
st_band[band.name]=st_band[band.name]+st if st_band.has_key(band.name) else st
count_band[band.name] =(count_band[band.name]+1) if count_band.has_key(band.name) else 1
if not np.isnan(m):
# print ("Media: "+str(m)+" STD: "+str(st))
data=np.true_divide(np.subtract(data,m),st)
if not np.isnan(data).all():
if all_bands.has_key(band.name):
all_bands[band.name]=numpy.dstack((all_bands[band.name], data))
else:
all_bands[band.name]=data
if normalized:
for band in bands:
if count_band.has_key(band.name):
all_bands[band.name]=(all_bands[band.name]*(st_band[band.name]/count_band[band.name]))+(avgs_band[band.name]/count_band[band.name])
return all_bands,tile_metadata
@celery.task()
def obtain_median(validate_range,x,y,start,end, bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_Float32,months=None):
median_bands,meta=matrix_obtain_mosaic(x,y,start,end, bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=data_type,months=None,normalized=True)
print "OBTAIN MEDIAN"
print "Terminó consulta"
median_data=None
for bandCONST in bands:
#b =np.apply_along_axis(median_min,2,median_bands[band],validate_range)
band=bandCONST.name
print band
if not band in median_bands:
continue
print median_bands[band].shape
if len(median_bands[band].shape)>2:
b=np.nanmedian(median_bands[band],2)
allNan=~np.isnan(median_bands[band])
b[np.sum(allNan,2)<validate_range]=np.nan
del allNan
else:
b=median_bands[band]
if validate_range>1:
b[:]=np.nan
if median_data is None:
median_data=b
else:
median_data=np.dstack((median_data, b))
#print median_data.shape
del median_bands
return median_data,meta
@celery.task()
def obtain_median_mosaic(validate_range,x,y,start,end, bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CFloat32,months=None):
medians,meta=obtain_median(validate_range,x,y,start,end, bands, satellite,iterations,xsize,ysize,file_format,data_type,months)
if medians is None:
return "None"
pprint(medians.shape)
pprint(len(medians.shape))
nf=medians.shape[0]
nc=medians.shape[1]
if len(medians.shape)>=3:
nb=medians.shape[2]
else:
nb=1
band_str = "+".join([band.name for band in bands])
sat_str = "+".join([sat.name for sat in satellite])
cache_id = [str(x),str(y),str(start),str(end),band_str,sat_str,str(xsize),str(ysize),file_format,str(iterations)]
f_name = "_".join(cache_id)
f_name = "res_median_"+f_name.replace(" ","_")
c_name = f_name
f_name = os.path.join("/tilestore/tile_cache",f_name)
tile_metadata=meta
numberOfBands=nb
if numberOfBands == 0:
return "None"
numberOfPixelsInXDirection=nc
print numberOfPixelsInXDirection
numberOfPixelsInYDirection=nf
print numberOfPixelsInYDirection
pixels = numberOfPixelsInXDirection
if numberOfPixelsInYDirection > numberOfPixelsInXDirection:
pixels = numberOfPixelsInYDirection
driver = gdal.GetDriverByName(file_format)
if driver is None:
print "No driver found for "+file_format
return "None"
raster = driver.Create(f_name+'.tif', pixels, pixels, numberOfBands, data_type, options=["BIGTIFF=YES", "INTERLEAVE=BAND"])
raster.SetGeoTransform(tile_metadata.transform)
raster.SetProjection(tile_metadata.projection)
index = 1
#medians[np.isnan(medians)]=-999
for band in range (0,nb):
stack_band = raster.GetRasterBand(index)
stack_band.SetNoDataValue(-999)
if nb==1:
stack_band.WriteArray(medians)
else:
stack_band.WriteArray(medians[:,:,band])
stack_band.ComputeStatistics(True)
index+=1
stack_band.FlushCache()
del stack_band
raster.FlushCache()
del raster
cache.set(c_name,f_name+".tif")
return f_name+".tif"
def obtain_histogram_info(x,y,start,end, selectedBand, satellite):
median_bands,meta=matrix_obtain_mosaic(x,y,start,end, [selectedBand], satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CInt16,months=None)
median_data=None
band=selectedBand.name
if not(median_bands.has_key(band)):
pprint('No data for period'+str(x)+' '+str(y)+' '+str(start)+' '+str(end))
return None,[],0,0,0
allNan=~np.isnan(median_bands[band])
tileSizeArray=allNan.shape
numberTiles=1
if len(tileSizeArray)>=3:
numberTiles=tileSizeArray[2]
if numberTiles>1:
matrixCount=np.sum(allNan,2)
else:
matrixCount=np.sum(allNan)
del allNan
histogram=np.histogram(np.ravel(matrixCount),density=False)
bincount=np.bincount(np.ravel(matrixCount))
min=np.min(matrixCount)
max=np.max(matrixCount)
return histogram,bincount,min,max,numberTiles
@celery.task()
def obtain_forest_noforest(x, y, start_date, end_date, satellite = [Satellite.LS7], months = None, min_ok = 1, vegetation_rate = 0.5, ndvi_threshold = 0.7, slice_size = 3):
period_ndvi,metadata = obtain_ndvi(x, y, start_date, end_date, satellite = satellite, months = months, min_ok = min_ok)
if period_ndvi is None:
return "None"
height = period_ndvi.shape[0]
width = period_ndvi.shape[1]
nan_mask=np.isnan(period_ndvi)
original_ndvi=period_ndvi.astype(float)
original_nvdi=np.clip(original_ndvi,-1,1)
for y1 in xrange(0, height, slice_size):
for x1 in xrange(0, width, slice_size):
x2 = x1 + slice_size
y2 = y1 + slice_size
if(x2 > width):
x2 = width
if(y2 > height):
y2 = height
submatrix = period_ndvi[y1:y2,x1:x2]
ok_pixels = np.count_nonzero(~np.isnan(submatrix))
submatrix[np.isnan(submatrix)]=-1
if ok_pixels==0:
period_ndvi[y1:y2,x1:x2] = 1
elif float(np.sum(submatrix>ndvi_threshold))/float(ok_pixels) >= vegetation_rate :
period_ndvi[y1:y2,x1:x2] = 2
else:
period_ndvi[y1:y2,x1:x2] = 1
period_ndvi[nan_mask] = np.nan
composite_all=np.dstack((period_ndvi,original_ndvi))
pprint("Max nvdi es:"+str(np.nanmax(original_ndvi)))
pprint("Min nvdi es:"+str(np.nanmin(original_ndvi)))
# Prepara el nombre base de los archivos de salida
bands = [ Ls57Arg25Bands.RED, Ls57Arg25Bands.NEAR_INFRARED ]
bands_str = '+'.join(each_band.name for each_band in bands)
satellites_str = '+'.join(each_satellite.name for each_satellite in satellite)
image_filename = ("_".join([str(x), str(y), str(start_date), str(end_date), bands_str, satellites_str])).replace(" ","_")
# generate_rgb_image(period_ndvi, period_ndvi, period_ndvi, temp_directory, output_name = "FOREST_NOFOREST_" + image_filename, width = width, height = height, scale = 0.3)
file=generate_geotiff_image(composite_all, width, height, "/tilestore/tile_cache/", metadata = metadata, output_name = "FOREST_NOFOREST_" + image_filename)
return file
def obtain_ndvi(x, y, start_date, end_date, satellite = [Satellite.LS7], months = None, min_ok = 2):
print "BEGIN NDVI PROCESS"
# Lista las bandas necesarias para operar NDVI
bands = [ Ls57Arg25Bands.RED, Ls57Arg25Bands.NEAR_INFRARED ]
# Obtiene los compuestos de medianas del periodos 1
period, metadata = obtain_median(min_ok,x, y, start_date, end_date,bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_Float32,months=None)
if period is None:
return None, metadata
mask_nan=np.any(np.isnan(period),axis=2)
# Separa los canales rojo e infrarrojo cercano
period_red = period[:,:,0]
period_red[mask_nan]=0
period_nir = period[:,:,1]
period_nir[mask_nan]=0
# Genera NDVI del periodo 1
period_ndvi = np.true_divide( np.subtract(period_nir,period_red) , np.add(period_nir,period_red) )
period_nvdi2=np.copy(period_ndvi)
np.clip(period_ndvi,0,1,out=period_nvdi2)
period_nvdi2[mask_nan]=np.nan
return period_nvdi2, metadata
def obtain_bands_dict(x, y, start, end, bands, satellite, months=None):
"""
Obtains a dict with the query results, one matrix per band
"""
tiles = list_tiles(x=[x], y=[y],acq_min=start,acq_max=end,satellites=satellite,dataset_types=[DatasetType.ARG25,DatasetType.PQ25], sort=SortType.ASC)
tile_metadata = None
tile_count = 0
tile_filled = False
total_ins = 0
all_bands={}
for tile in tiles:
if tile_filled:
break
if months:
print tile.start_datetime.month
if not tile.start_datetime.month in months:
continue
tile_count+=1
dataset = DatasetType.ARG25 in tile.datasets and tile.datasets[DatasetType.ARG25] or None
if dataset is None:
print "No dataset availible"
tile_count-=1
continue
tile_metadata = get_dataset_metadata(dataset)
if tile_metadata is None:
print "NO METADATA"
tile_count-=1
continue
pqa = DatasetType.PQ25 in tile.datasets and tile.datasets[DatasetType.PQ25] or None
mask = None
mask = get_mask_pqa(pqa,[PqaMask.PQ_MASK_CLEAR],mask=mask)
band_data = get_dataset_data_masked(dataset, mask=mask,bands=bands)
for band in band_data:
data = np.array(band_data[band]).astype(np.float32)
non_data=np.in1d(data.ravel(),-999).reshape(data.shape)
data[non_data]=np.NaN
if all_bands.has_key(band.name):
all_bands[band.name]=np.dstack((all_bands[band.name], data))
else:
all_bands[band.name]=np.array(data)
return all_bands, tile_metadata
def ravel_compounds(compounds):
flattened_compounds = None
for compound in xrange(0, compounds.shape[2]):
flattened_compound = compounds[:,:,compound].ravel()
if flattened_compounds is None:
flattened_compounds = flattened_compound
else:
flattened_compounds = np.vstack((flattened_compounds, flattened_compound))
return flattened_compounds.T
def obtain_medians_compound(x, y, start, end, bands, satellite, months = None, validate_range = 2):
median_bands, metadata = obtain_bands_dict(x, y, start, end, bands, satellite, months)
print "Terminó consulta"
if median_bands is None:
return None, metadata
median_data=None
for bandCONST in bands:
#b =np.apply_along_axis(median_min,2,median_bands[band],validate_range)
band=bandCONST.name
print band
print median_bands[band].shape
if len(median_bands[band].shape)>2:
b=np.nanmedian(median_bands[band],2)
allNan=~np.isnan(median_bands[band])
b[np.sum(allNan,2)<validate_range]=np.nan
del allNan
else:
b=median_bands[band]
if validate_range>1:
b[:]=np.nan
if median_data is None:
median_data=b
else:
median_data=np.dstack((median_data, b))
#print median_data.shape
del median_bands
return median_data,metadata
@celery.task()
def obtain_convolution_nvdi(prueba,NDVI_result_final,percetage_ndvi=0.3,threshold_ndvi=0.7):
print ("_______________obtain_convolution_nvdiL____________")
[height,weight]=NDVI_result_final.shape
#print ("Alto",height)
#print ("Ancho",weight)
test=(prueba+"entro convolucion")
nueva_matriz=None
for x1 in xrange(0,height,3):
for y1 in xrange(0,weight,3):
auxX=x1+3
auxY=y1+3
if(auxX>=height):
auxX=height-1
if(auxY>=weight):
auxY=weight-1
auxMatriz=NDVI_result_final[xrange(x1,auxX),:] [:,xrange(y1,auxY)]
#print auxMatriz.shape
count_pixel=auxMatriz.shape[0]*auxMatriz.shape[1]
pixel_nan=np.count_nonzero(np.isnan(auxMatriz))
pixel_forest=np.sum(np.where(auxMatriz>threshold_ndvi,1,0))
if(x1==0 and y1==0):
print("AUX_X______",auxX)
print("AUX_Y_______",auxY)
print("AUX_AUXM______",auxMatriz)
print("AUX_COUPIX______",count_pixel)
print("AUX_COU_NAN______",pixel_nan)
print("AUX_PIX_FOR______",pixel_forest)
if(count_pixel-pixel_nan>0):
auxResult=(pixel_forest)/(count_pixel-pixel_nan)
if(auxResult>percetage_ndvi):
#print ("ENTRO ERROR")
NDVI_result_final[x1:auxX, y1:auxY]=1
else:
NDVI_result_final[x1:auxX, y1:auxY]=0
else:
NDVI_result_final[x1:auxX, y1:auxY]=np.nan
if(x1==0 and y1==0):
print ("FINAL TEST",NDVI_result_final[xrange(x1,auxX),:] [:,xrange(y1,auxY)])
print NDVI_result_final
return test
def generate_geotiff_image(input_array, width, height, output_path, metadata, output_name = "oimage4", data_type = gdal.GDT_Float32 ):
n_bands=1
if len(input_array.shape)>=3:
n_bands = input_array.shape[2]
gtiff_driver = gdal.GetDriverByName('GTiff')
f_name=output_path + output_name
raster = gtiff_driver.Create( f_name+ '.tif', width, height, n_bands, eType = data_type, options=["BIGTIFF=YES", "INTERLEAVE=BAND"])
if metadata:
raster.SetGeoTransform(metadata.transform)
srs = osr.SpatialReference()
srs.SetWellKnownGeogCS("WGS84")
raster.SetProjection(srs.ExportToWkt())
for band in xrange(0,n_bands):
raster_band = raster.GetRasterBand(band+1)
raster_band.SetNoDataValue(-999)
if n_bands==1:
raster_band.WriteArray(input_array)
else:
raster_band.WriteArray(input_array[:,:,band])
raster_band.ComputeStatistics(True)
raster_band.FlushCache()
raster.FlushCache()
return f_name+ '.tif'
def generate_rgb_image(r_array, g_array, b_array, output_path, output_name = "oimage", width = None, height = None, scale = 1, format = "jpg"):
input_array = np.zeros(((width*height),3))
input_array[:,0] = r_array
input_array[:,1] = g_array
input_array[:,2] = b_array
if len(input_array.shape) == 2:
input_array = input_array.reshape((height, width, 3))
max_value = np.nanmax(input_array)
input_array = (input_array/max_value)*255
output_img = Image.fromarray(np.uint8(input_array), 'RGB')
width = int(np.ceil(output_img.size[0]*scale))
height = int(np.ceil(output_img.size[1]*scale))
output_img = output_img.resize((width, height))
output_img.save(output_path + output_name + "." + format)
@celery.task()
def obtain_pca_png(validate_range,x,y,start1,end1,start2,end2, bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CInt16,months=None):
median_p1=obtain_median(validate_range,x,y,start1,end1, bands, satellite,iterations,xsize,ysize,file_format,data_type,months)
median_p2=obtain_median(validate_range,x,y,start2,end2, bands, satellite,iterations,xsize,ysize,file_format,data_type,months)
pickl.dump( median_p1, open( "median_p_1.p", "wb" ) )
pickl.dump( median_p2, open( "median_p_2.p", "wb" ) )
##GUARDANDO DATOS MEDIANA
component_p1=pre_process_ravel(median_p1)
component_p2=pre_process_ravel(median_p2)
#________________ciclo Normalizacion______________________________
for x in xrange(0,component_p1.shape[1]):
component_p2[:,x]=normalize(component_p1[:,x],component_p2[:,x])
#________________ciclo mascara______________________________
mask_comp = None
for x in xrange(0,component_p1.shape[1]):
if(mask_comp is None) :
mask_comp = combine_masks(np.zeros(len(component_p1[:,x])),component_p1[:,x])
mask_comp = combine_masks(mask_comp,component_p2[:,x])
else:
mask_comp = combine_masks(mask_comp,(combine_masks(component_p1[:,x],component_p2[:,x])))
#________________ciclo change NAN______________________________
pre_pca_bands=numpy.concatenate((component_p1,component_p2),1)
a= pre_pca_bands.flatten()
median_array_pre_pca=np.nanmedian(a)
print("MEDIANA PREPCA",median_array_pre_pca)
for x in xrange(0,pre_pca_bands.shape[1]):
pre_pca_bands[:,x]=convert_nan_to_median(pre_pca_bands[:,x],median_array_pre_pca)
print ("RESULTADO FINAL",pre_pca_bands.shape)
print("COMPUESTO SIN NAN",pre_pca_bands)
print ("RESULTADO MASCARA PARA COMPARAR DATOS ",mask_comp)
##GUARDANDO DATOS TEST
print ("GUARDE LOS DATOS")
f_pca=PCA(pre_pca_bands)
size_ma=f_pca.Y.T.shape
pickl.dump( f_pca, open( "f_pca2.p", "wb" ) )
pickl.dump( mask_comp, open( "mask_comp2.p", "wb" ) )
presult=f_pca.Y[:,0].reshape(3705,3705)
presult2=f_pca.Y[:,2].reshape(3705,3705)
#normalizacion
presult *= (255.0/presult.max())
im = Image.fromarray(np.uint8(cm.gist_earth(presult)*255))
im2 = Image.fromarray(np.uint8(cm.gist_earth(presult2)*255))
print ("MATRIX ok2",im)
im.save('test__TEST2.jpeg')
im2.save('test72.png')
return 0
@celery.task()
def obtain_median_png(validate_range,x,y,start1,end1,start2,end2, bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CInt16,months=None):
mediana= pickl.load( open( "median_p1.p", "rb" ) )
print("PRUEBA",prueba)
print("PRUEBA2",prueba.shape)
print mediana
print mediana.shape
#rgbArray = np.zeros((512,512,3), 'uint8')
r=mediana[..., 0]
g=mediana[..., 1]
b=mediana[..., 1]
print("PRUEBA",mediana)
print("R",r)
print("G",g)
print("B",b)
return 0
def obtain_pca_all(validate_range,x,y,start1,end1,start2,end2, bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CInt16,months=None):
print("OBTAIN PCA_ALL")
raw_b1,meta=obtain_median(validate_range,x,y,start1,end1, bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CInt16,months=None)
median_p1=raw_b1
nf=raw_b1.shape[0]
nc=raw_b1.shape[1]
nb=raw_b1.shape[2]*2
median_p2,meta2=obtain_median(validate_range,x,y,start2,end2, bands, satellite,iterations,xsize,ysize,file_format,data_type,months)
pickl.dump( median_p1, open( "26_median_p_1_all_f.p", "wb" ) )
pickl.dump( median_p2, open( "26_median_p_2_all_f.p", "wb" ) )
##GUARDANDO DATOS MEDIANA
component_p1=pre_process_ravel(raw_b1)
component_p2=pre_process_ravel(median_p2)
#________________ciclo Normalizacion______________________________
for x in xrange(0,component_p1.shape[1]):
component_p2[:,x]=normalize(component_p1[:,x],component_p2[:,x])
#________________ciclo mascara______________________________
mask_comp = None
for x in xrange(0,component_p1.shape[1]):
if(mask_comp is None) :
mask_comp = component_p1[:,x]
mask_comp = combine_masks(mask_comp,component_p2[:,x])
else:
mask_comp = combine_masks(mask_comp,(combine_masks(component_p1[:,x],component_p2[:,x])))
#________________ciclo change NAN______________________________
pre_pca_bands=numpy.concatenate((component_p1,component_p2),1)
a= pre_pca_bands.flatten()
median_array_pre_pca=np.nanmedian(a)
print("MEDIANA PREPCA",median_array_pre_pca)
for x in xrange(0,pre_pca_bands.shape[1]):
pre_pca_bands[:,x]=convert_nan_to_median(pre_pca_bands[:,x],median_array_pre_pca)
print ("RESULTADO FINAL",pre_pca_bands.shape)
print("COMPUESTO SIN NAN",pre_pca_bands)
print ("RESULTADO MASCARA PARA COMPARAR DATOS ",mask_comp)
##GUARDANDO DATOS TEST
print ("GUARDE LOS DATOS")
f_pca=PCA(pre_pca_bands)
size_ma=f_pca.Y.T.shape
presult=f_pca.Y.T
pickl.dump( f_pca, open( "26_pca_final_25.p", "wb" ) )
pickl.dump( presult, open( "26_pca_final_trasn.p", "wb" ) )
presult1=f_pca.Y[:,0].reshape(3705,3705)
presult2=f_pca.Y[:,2].reshape(3705,3705)
#normalizacion
presult1 *= (255.0/presult1.max())
im = Image.fromarray(np.uint8(cm.gist_earth(presult1)*255))
im2 = Image.fromarray(np.uint8(cm.gist_earth(presult2)*255))
print ("MATRIX ok2",im)
im.save('26_presentacion.jpeg')
im2.save('26_presentacion_norma.jpeg')
#-_-------------------_-----------------------
km_centroids,_=kmeans(f_pca.Y, 2) #Generar los centroides
print km_centroids
"""
Guardar el archivo:
"""
band_str = "+".join([band.name for band in bands])
sat_str = "+".join([sat.name for sat in satellite])
cache_id = [str(x),str(y),str(start1),str(end1),str(start2),str(end2),band_str,sat_str,str(xsize),str(ysize),file_format,str(iterations)]
f_name = "_".join(cache_id)
f_name = "26_celery"+f_name.replace(" ","_")
c_name = f_name
driver = gdal.GetDriverByName(file_format)
if driver is None:
print "No driver found for "+file_format
return "None"
c_file=os.path.join("/tilestore/tile_cache","centroids_"+f_name+".csv")
print c_file
numpy.savetxt(c_file,km_centroids)
f_name = os.path.join("/tilestore/tile_cache",f_name)
raster = driver.Create(f_name+'.tif', nf, nc, nb, data_type, options=["BIGTIFF=YES", "INTERLEAVE=BAND"])
raster.SetGeoTransform((x-0.00025, 0.00027, 0.0, y+1.0002400000000002, 0.0, -0.00027)) #Debemos obtenerlo del original, o calcularlo bien
srs = osr.SpatialReference()
srs.SetWellKnownGeogCS("WGS84")
raster.SetProjection(srs.ExportToWkt())
index = 1
for bn in presult:
stack_band = raster.GetRasterBand(index)
stack_band.SetNoDataValue(-999)
bn[numpy.isnan(bn)]=-999
stack_band.WriteArray(bn.reshape(nf,nc))
stack_band.ComputeStatistics(False)
index+=1
stack_band.FlushCache()
del stack_band
raster.FlushCache()
del raster
cache.set(c_name,f_name+".tif")
return f_name+".tif"
#Funcion que aplica los elementos de mask2 en array1
@celery.task()
def apply_nan(array1,mask2):
if (len(array1)==len(mask2)):
i = 0
while i < len(array1):
if(np.isnan(mask2[i])):
array1[i] = np.nan
i+=1
return array1
else:
print("ERROR DE TAMANOS DE MASCARA DIFERENTES DIFERENTES")
def generate_component_kmean(km_centroids,pca_final_with_nan):
indices = [numpy.where(km_centroids<=x)[0][0] for x in pca_final_with_nan]
print indices
return 99
@celery.task()
def convert_nan_to_median(array1,median_array_pre_pca):
f_result=[]
i=0
media=median_array_pre_pca
#print ("media ",media)
while i<len(array1) :
if(np.isnan(array1[i])):
f_result.append(media)
else:
f_result.append(array1[i])
i+=1
return f_result
@celery.task()
def combine_masks(mask1, mask2):
if (len(mask1)==len(mask2)):
i = 0
while i < len(mask1):
if(np.isnan(mask2[i])):
mask1[i] = np.nan
i+=1
return mask1
else:
print("ERROR DE TAMANOS DE MASCARA DIFERENTES DIFERENTES")
@celery.task()
def normalize(final_composite1,final_composite2):
desv_final_mask2=np.nanstd(final_composite2)
mean_final_1=np.nanmean(final_composite1)
mean_final_2=np.nanmean(final_composite2)
temp_mask2=((final_composite2-mean_final_2)/desv_final_mask2)+mean_final_1
return temp_mask2
@celery.task()
def pre_process_ravel(pre_pca):
new_pca_input=None
for d in xrange(0,pre_pca.shape[2]):
b=pre_pca[:,:,d].ravel()
if new_pca_input is None:
new_pca_input=b
else:
new_pca_input=numpy.vstack((new_pca_input,b))
#print ("ENVIO_VSTACK",new_pca_input.T.shape)
return new_pca_input.T
@celery.task()
def median_min(array_bands,validate_range):
count_no_nan=np.count_nonzero(np.isnan(array_bands))
len_data=len(array_bands)
if((len_data - count_no_nan)<=validate_range):
return np.nanmedian(array_bands)
else:
return np.nan
@celery.task()
def mask_range(array_bands,validate_range):
count_nan=np.count_nonzero(np.isnan(array_bands))
len_data=len(array_bands)
if((len_data - count_nan)>validate_range):
return True
else:
return False
@celery.task()
def validate_mask(array_bands):
count_nan=np.count_nonzero(np.isnan(array_bands))
len_data=len(array_bands)
if count_nan!=len_data :
return False
else:
return True
@celery.task()
def obtain_mask(validate_range,x,y,start,end, bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CInt16,months=None):
mosaic_bands,meta=matrix_obtain_mosaic(x,y,start,end, bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CInt16,months=None)
mask_data=None
for band in mosaic_bands:
b =np.apply_along_axis(mask_range,2,mosaic_bands[band],validate_range)
if mask_data is None:
mask_data=b
else:
mask_data=np.dstack((mask_data, b))
print mask_data.shape
return mask_data
@celery.task()
def assemble_mosaic(file_list):
print "Assembling mosaic"
print file_list
fl = None
try:
if type(file_list) is list:
fl = [f for f in file_list if f!="None"]
else:
fl = [file_list]
except:
fl = [file_list]
if len(fl) <1:
return "None"
c_name = hashlib.sha512("_".join(fl)).hexdigest()[0:32]
cmd = "gdalbuildvrt -hidenodata /tilestore/tile_cache/"+c_name+".vrt "+" ".join(fl)
print cmd
os.system(cmd)
if not os.path.exists("/tilestore/tile_cache/"+c_name+".vrt"):
return "None"
res = "/tilestore/tile_cache/"+c_name+".vrt"
ret_prod = []
ret_prod.append(res)
for fi in fl:
ret_prod.append(fi)
return ret_prod
@celery.task()
def get_bounds(input_file):
in_file = None
print input_file
if isinstance(input_file,(str)):
if input_file == "None":
return "None"
else:
in_file = input_file
else:
in_file = input_file[0]
ds = gdal.Open(in_file)
cols = ds.RasterXSize
rows = ds.RasterYSize
gt = ds.GetGeoTransform()
bb1 = originx = gt[0]
bb4 = originy = gt[3]
pixelWidth = gt[1]
pixelHeight = gt[5]
width = cols*pixelWidth
height = rows*pixelHeight
bb3 = originx+width
bb2 = originy+height
del ds
return str(bb2)+","+str(bb1)+","+str(bb4)+","+str(bb3)
@celery.task()
def translate_files(file_list,file_format,output_scale,output_size,output_datatype,output_bands,additional_arguments=None):
print file_list
fl = None
try:
if type(file_list) is list:
fl = [f for f in file_list if f!="None"]
else:
fl = [file_list]
except:
fl = [file_list]
addy = ""
b_arg= ""
if output_bands is not None:
b_arg = " ".join(["-b "+str(b) for b in output_bands])
res = []
if additional_arguments:
addy = " "+" ".join(additional_arguments)
for f in fl:
print "Translating "+f
ds = gdal.Open(f)
rc = ds.RasterCount
if output_bands is not None:
if rc < len(output_bands):
print "Less bands than requested!"
b_arg = "-b 1"
del ds
out_scale = ""
out_dt = ""
out_size = ""
b_l_arg = ""
if output_scale is not None and b_arg != "-b 1":
out_scale = " -scale "+output_scale
if output_datatype is not None:
out_dt = " -ot "+output_datatype
if output_size is not None:
out_size = " -outsize "+output_size
if output_bands is not None and b_arg != "-b 1":
b_l_arg = " "+b_arg
b_tmp = ""
if output_bands is not None:
b_tmp = "_".join([str(b) for b in output_bands])
c_arr = [f,str(file_format),str(output_scale),str(output_size),str(output_datatype),b_tmp,addy]
c_name = "_".join(c_arr)
c_name = hashlib.sha512(c_name).hexdigest()[0:32]
tar_img = os.path.join("/tilestore/tile_cache/",c_name+FILE_EXT[file_format])
tar_img_marked = os.path.join("/tilestore/tile_cache/",c_name+"_marked"+FILE_EXT[file_format])
cmd = "gdal_translate -of "+file_format+out_dt+out_scale+out_size+b_l_arg+addy+" "+f+" "+tar_img
print cmd
os.system(cmd)
if os.path.exists(tar_img):
if file_format == "png" or file_format == "PNG":
cmd = "convert -transparent \"#000000\" "+tar_img+" "+tar_img
os.system(cmd);
cmd = "convert "+tar_img+" -background red -alpha remove "+tar_img_marked
os.system(cmd)
res.append(tar_img)
res.append(tar_img_marked)
return res
@celery.task()
def apply_color_table_to_files(file_list,output_band,color_table):
print file_list
fl = None
try:
if type(file_list) is list:
fl = [f for f in file_list if f!="None"]
else:
fl = [file_list]
except:
fl = [file_list]
res = []
for f in fl:
print "Coloring "+f
c_arr = [f,str(output_band),color_table]
c_name = "_".join(c_arr)
c_name = hashlib.sha512(c_name).hexdigest()[0:32]
tar_img = os.path.join("/tilestore/tile_cache/",c_name+".tif")
tmp_img = os.path.join("/tilestore/tile_cache/",c_name)
cmd = "gdal_translate "+f+" "+tmp_img+"_"+str(output_band)+".tif"+" -b "+str(output_band)
os.system(cmd)
print "Applying color table"
cmd = "gdaldem color-relief -of GTiff "+tmp_img+"_"+str(output_band)+".tif"+" "+color_table+" "+tar_img
print cmd
os.system(cmd)
if os.path.exists(tar_img):
#cmd = "convert -transparent \"#000000\" "+tar_img+" "+tar_img
#os.system(cmd);
res.append(tar_img)
return res
@celery.task()
def preview_cloudfree_mosaic(x,y,start,end, bands, satellite,iterations=0,xsize=2000,ysize=2000,file_format="GTiff",data_type=gdal.GDT_CInt16):
def resize_array(arr,size):
r = numpy.array(arr).astype(numpy.int16)
i = Image.fromarray(r)
i2 = i.resize(size,Image.NEAREST)
r2 = numpy.array(i2)
del i2
del i
del r
return r2
StartDate = start
EndDate = end
best_data = {}
band_str = "+".join([band.name for band in bands])
sat_str = "+".join([sat.name for sat in satellite])
cache_id = ["preview",str(x),str(y),str(start),str(end),band_str,sat_str,str(xsize),str(ysize),file_format,str(iterations)]
f_name = "_".join(cache_id)
f_name = f_name.replace(" ","_")
c_name = f_name
cached_res = cache.get(c_name)
if cached_res:
return str(cached_res)
f_name = os.path.join("/tilestore/tile_cache",f_name)
tiles = list_tiles(x=[x], y=[y],acq_min=StartDate,acq_max=EndDate,satellites=satellite,dataset_types=[DatasetType.ARG25,DatasetType.PQ25], sort=SortType.ASC)
tile_metadata = None
tile_count = 0
tile_filled = False
for tile in tiles:
if tile_filled:
break
print "merging on tile "+str(tile.x)+", "+str(tile.y)
tile_count+=1
dataset = DatasetType.ARG25 in tile.datasets and tile.datasets[DatasetType.ARG25] or None
if dataset is None:
print "No dataset availible"
tile_count-=1
continue
tile_metadata = get_dataset_metadata(dataset)
if tile_metadata is None:
print "NO METADATA"
tile_count-=1
continue
pqa = DatasetType.PQ25 in tile.datasets and tile.datasets[DatasetType.PQ25] or None
mask = None
mask = get_mask_pqa(pqa,[PqaMask.PQ_MASK_CLEAR],mask=mask)
band_data = get_dataset_data_masked(dataset, mask=mask,bands=bands)
swap_arr = None
for band in band_data:
if not band in best_data:
print "Adding "+band.name
bd = resize_array(band_data[band],(2000,2000))
print bd
best_data[band]=bd
del bd
else:
best = resize_array(best_data[band],(2000,2000))
swap_arr=numpy.in1d(best.ravel(),-999).reshape(best.shape)
b_data = numpy.array(band_data[band])
best[swap_arr]=b_data[swap_arr]
best_data[band]=numpy.copy(best)
del b_data
del best
del swap_arr
if iterations > 0:
if tile_count>iterations:
print "Exiting after "+str(iterations)+" iterations"
break
numberOfBands=len(bands)
if numberOfBands == 0:
return "None"
if bands[0] not in best_data:
print "No data was merged for "+str(x)+", "+str(y)
return "None"
numberOfPixelsInXDirection=len(best_data[bands[0]])
numberOfPixelsInYDirection=len(best_data[bands[0]][0])
if tile_count <1:
print "No tiles found for "+str(x)+", "+str(y)
return "None"
driver = gdal.GetDriverByName(file_format)
if driver is None:
print "No driver found for "+file_format
return "None"
print f_name+'.tif'
raster = driver.Create(f_name+'.tif', numberOfPixelsInXDirection, numberOfPixelsInYDirection, numberOfBands, data_type, options=["BIGTIFF=YES", "INTERLEAVE=BAND"])
gt = tile_metadata.transform
gt2 = (gt[0],gt[1]*2.0,gt[2],gt[3],gt[4],gt[5]*2.0)
tile_metadata.transform = gt2
raster.SetGeoTransform(tile_metadata.transform)
print tile_metadata.transform
raster.SetProjection(tile_metadata.projection)
index = 1
for band in bands:
stack_band = raster.GetRasterBand(index)
stack_band.SetNoDataValue(-999)
stack_band.WriteArray(best_data[band])
stack_band.ComputeStatistics(True)
index+=1
stack_band.FlushCache()
del stack_band
raster.FlushCache()
del raster
cache.set(c_name,f_name+".tif")
return f_name+".tif"
import hashlib
#TODO: Implement proper masking support
@celery.task()
def obtain_file_from_math(input_file,expressions_list,file_format="GTiff",data_type=gdal.GDT_CFloat32,input_ndv=-999,output_ndv=-999):
"""
ex. band4,band3, (band4-band3)/(band4+band3) AKA NDVI
"""
"""
Read in file
"""
if input_file == "None":
return "None"
driver = gdal.GetDriverByName(file_format)
ds = gdal.Open(input_file,0)
if ds is None:
return "None"
arrays = []
band_count = ds.RasterCount
xsize = ds.RasterXSize
ysize = ds.RasterYSize
gt = ds.GetGeoTransform()
proj = ds.GetProjection()
exp_str = "_".join(expressions_list)
cache_id = [os.path.splitext(os.path.basename(input_file))[0],exp_str,str(xsize),str(ysize),file_format]
f_name = "_".join(cache_id)
f_name = hashlib.sha512(f_name).hexdigest()[0:32]
c_name = f_name
cached_res = cache.get(c_name)
if cached_res:
return cached_res
f_name = os.path.join("/tilestore/tile_cache",f_name)
for i in range(band_count):
RB = ds.GetRasterBand(i+1)
arrays.append(RB.ReadAsArray(0,0,xsize,ysize).astype(numpy.float32))
del RB
var_identifier = "A"+''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
#test if we've used this id in this scope
var_test = var_identifier+"_band1"
while var_test in globals():
var_identifier = "A"+''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
var_test = var_identifier+"_band1"
for band_num in range(len(arrays)):
globals()[var_identifier+'_band'+str(band_num+1)]=arrays[band_num]
results = []
expressions = [expression.replace("band",var_identifier+"_band") for expression in expressions_list]
for expression in expressions:
results.append(ne.evaluate(expression))
raster = driver.Create(f_name+'.tif', xsize, ysize, len(expressions_list), data_type, options=["BIGTIFF=YES", "INTERLEAVE=BAND"])
raster.SetGeoTransform(gt)
raster.SetProjection(proj)
index = 1
for band in results:
stack_band = raster.GetRasterBand(index)
stack_band.SetNoDataValue(output_ndv)
stack_band.WriteArray(band)
stack_band.ComputeStatistics(True)
index+=1
stack_band.FlushCache()
del stack_band
raster.FlushCache()
del raster
del ds
del results
cache.set(c_name,f_name+".tif")
return f_name+".tif"
@celery.task()
def shrink_raster_file(input_file,size=(2000,2000)):
if len(size)!=2:
return "None"
if input_file=="None":
return "None"
file_name = os.path.splitext(os.path.basename(input_file))[0]
if size[0] ==0 or size[1]==0:
return "None"
gdal.AllRegister()
c_arr = [file_name,str(size)]
c_name = "_".join(c_arr)
c_name = c_name.replace(" ","_")
c_name = c_name.replace(",","")
c_name = c_name.replace("(","")
c_name = c_name.replace(")","")
f_name = c_name+".tif"
f_name = os.path.join("/tilestore/tile_cache",f_name)
ds = gdal.Open(input_file,0)
band_count = ds.RasterCount
if band_count == 0:
return "None"
xsize = ds.RasterXSize
ysize = ds.RasterYSize
gt = ds.GetGeoTransform()
proj = ds.GetProjection()
ndv = ds.GetRasterBand(1).GetNoDataValue()
dt = ds.GetRasterBand(1).DataType
bands = []
for i in range(band_count):
RB = ds.GetRasterBand(i+1)
r = numpy.array(RB.ReadAsArray(0,0,xsize,ysize)).astype(numpy.float32)
print r
i = Image.fromarray(r)
i2 = i.resize(size,Image.NEAREST)
bands.append(numpy.array(i2))
del i2
del i
del r
driver = gdal.GetDriverByName("GTiff")
raster = driver.Create(f_name, size[0], size[1], band_count, dt, options=["BIGTIFF=YES", "INTERLEAVE=BAND"])
raster.SetGeoTransform(gt)
raster.SetProjection(proj)
index = 1
for band in bands:
stack_band = raster.GetRasterBand(index)
stack_band.SetNoDataValue(ndv)
stack_band.WriteArray(band)
stack_band.ComputeStatistics(True)
index+=1
stack_band.FlushCache()
del stack_band
raster.FlushCache()
del raster
return f_name
@celery.task()
def merge_files_on_value(input_files_list,merge_value=-999, input_ndv=-999,output_ndv=-999):
input_files = input_files_list
input_files = [fl for fl in input_files if fl != "None"]
if len(input_files)<2:
if len(input_files)==1:
return input_files[0]
else:
return "None"
file_name_list = [os.path.splitext(os.path.basename(in_file))[0] for in_file in input_files]
file_names_str = "_".join(file_name_list)
c_name_arr = [file_names_str,str(merge_value),str(input_ndv),str(output_ndv)]
c_name= "_".join(c_name_arr)
f_name = c_name+".tif"
f_name = os.path.join("/tilestore/tile_cache",f_name)
gdal.AllRegister()
arrays = []
ds = None
ndv_array = None
swap_array = None
xsize = 0
ysize = 0
gt = None
proj = None
band_count = 0
ds = gdal.Open(file_path,0)
gt = ds.GetGeoTransform()
proj = ds.GetProjection()
band_count = ds.RasterCount
xsize = ds.RasterXSize
ysize = ds.RasterYSize
"""
Load the first file
"""
for i in range(band_count):
RB = ds.GetRasterBand(i+1)
arrays.append(RB.ReadAsArray(0,0,xsize,ysize))
del RB
ds = None
for file_path in input_files[1:]:
ds = gdal.Open(file_path,0)
if ds.RasterCount == band_count:
for i in range(band_count):
RB = ds.GetRasterBand(i+1)
RA = RB.ReadAsArray(0,0,xsize,ysize)
ndv_array = numpy.in1d(arrays[0].ravel(),ndv).reshape(arrays[0].shape)
swap_array = numpy.in1d(arrays[0].ravel(),merge_value).reshape(arrays[0].shape)
arrays[i][swap_array]=RA[swap_array]
arrays[i][ndv_array]=output_ndv
del RB
del RA
ndv_array = None
swap_array = None
ds = None
"""
Write the merged file
"""
raster = driver.Create(f_name+'.tif', xsize, ysize, band_count, gdal.GDT_CFloat32, options=["BIGTIFF=YES", "INTERLEAVE=BAND"])
raster.SetGeoTransform(gt)
raster.SetProjection(proj)
index = 1
for band in arrays:
stack_band = raster.GetRasterBand(index)
stack_band.SetNoDataValue(output_ndv)
stack_band.WriteArray(band)
stack_band.ComputeStatistics(True)
index+=1
stack_band.FlushCache()
del stack_band
raster.FlushCache()
del raster
return f_name
@celery.task()
def merge_2files_on_value(input_file1, input_file2, merge_value=-999, input_ndv=-999,output_ndv=-999):
driver = gdal.GetDriverByName(file_format)
ds1 = gdal.Open(input_file1,0)
if ds1 is None:
return "None"
ds2 = gdal.Open(input_file2,0)
if ds2 is None:
return "None"
arrays1 = []
arrays2 = []
band_count = ds1.RasterCount
xsize = ds1.RasterXSize
ysize = ds1.RasterYSize
gt = ds1.GetGeoTransform()
proj = ds1.GetProjection()
for i in range(band_count):
RB = ds1.GetRasterBand(i+1)
arrays1.append(RB.ReadAsArray(0,0,xsize,ysize))
del RB
for i in range(band_count):
RB = ds2.GetRasterBand(i+1)
arrays2.append(RB.ReadAsArray(0,0,xsize,ysize))
del RB
for i in arrays1:
ndv_array = numpy.in1d(arrays1[0].ravel(),ndv).reshape(arrays1[0].shape)
swap_array = numpy.in1d(arrays1[0].ravel(),merge_value).reshape(arrays1[0].shape)
arrays1[i][swap_array]=arrays2[i][swap_array]
arrays1[i][ndv_array]=output_ndv
del ndv_array
del swap_array
del arrays2
cache_id = [os.path.splitext(os.path.basename(input_file1))[0],os.path.splitext(os.path.basename(input_file2))[0],str(merge_value),str(input_ndv),str(output_ndv)]
f_name = "_".join(cache_id)
f_name = hashlib.sha512(f_name).hexdigest()[0:32]
f_name = os.path.join("/tilestore/tile_cache",f_name)
raster = driver.Create(f_name+'.tif', xsize, ysize, band_count, data_type, options=["BIGTIFF=YES", "INTERLEAVE=BAND"])
raster.SetGeoTransform(gt)
raster.SetProjection(proj)
index = 1
for band in arrays1:
stack_band = raster.GetRasterBand(index)
stack_band.SetNoDataValue(output_ndv)
stack_band.WriteArray(band)
stack_band.ComputeStatistics(True)
index+=1
stack_band.FlushCache()
del stack_band
raster.FlushCache()
del raster
del ds1
del ds2
return f_name+".tif"
@celery.task()
def obtain_pca_test(validate_range,x,y,start1,end1,start2,end2, bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CFloat32,months=None):
print("OBTAIN PCA_ALL")
medians,meta=obtain_median(validate_range,x,y,start1,end1, bands, satellite,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CFloat32,months=None)
median_p2,meta2=obtain_median(validate_range,x,y,start2,end2, bands, satellite,iterations,xsize,ysize,file_format,data_type,months)
if medians is None or median_p2 is None:
return "None"
nf=medians.shape[0]
nc=medians.shape[1]
nb=medians.shape[2]*2
mask_nan=np.any(np.isnan(np.concatenate((medians, median_p2),axis=2)),axis=2)
##GUARDANDO DATOS MEDIANA_APLANANDO
component_p1=pre_process_ravel(medians)
component_p2=pre_process_ravel(median_p2)
#________________ciclo Normalizacion______________________________
for xat in xrange(0,component_p1.shape[1]):
component_p2[:,xat]=normalize(component_p1[:,xat],component_p2[:,xat])
pre_pca_bands=numpy.concatenate((component_p1,component_p2),1)
for xat in xrange(0,pre_pca_bands.shape[1]):
a=pre_pca_bands[:,xat]
a[np.isnan(a)]=np.nanmedian(a)
pre_pca_bands[:,xat]=a
f_pca=PCA(pre_pca_bands)
del medians
del median_p2
presult=f_pca.Y.T
#-_-------------------_-----------------------
"""
Guardar el archivo:
"""
band_str = "+".join([band.name for band in bands])
sat_str = "+".join([sat.name for sat in satellite])
cache_id = [str(x),str(y),str(start1),str(end1),str(start2),str(end2),band_str,sat_str,str(xsize),str(ysize),file_format,str(iterations)]
f_name = "_".join(cache_id)
f_name = "pca_"+f_name.replace(" ","_")
c_name = f_name
driver = gdal.GetDriverByName(file_format)
if driver is None:
print "No driver found for "+file_format
return "None"
f_name = os.path.join("/tilestore/tile_cache/",f_name)
t=max(nf,nc)
raster = driver.Create(f_name+'.tif', t, t, nb, data_type, options=["BIGTIFF=YES", "INTERLEAVE=BAND"])
#raster.SetGeoTransform((x-0.00025, 0.00027, 0.0, y+1.0002400000000002, 0.0, -0.00027)) #Debemos obtenerlo del original, o calcularlo bien
srs = osr.SpatialReference()
raster.SetGeoTransform(meta.transform)
#raster.SetProjection(tile_metadata.projection)
srs.SetWellKnownGeogCS("WGS84")
raster.SetProjection(srs.ExportToWkt())
index = 1
for bn in presult:
stack_band = raster.GetRasterBand(index)
stack_band.SetNoDataValue(-999)
bn=bn.reshape(nf,nc)
bn[mask_nan]=np.nan
stack_band.WriteArray(bn)
stack_band.ComputeStatistics(True)
index+=1
stack_band.FlushCache()
del stack_band
raster.FlushCache()
del presult
del f_pca
cache.set(c_name,f_name+".tif")
return f_name+".tif"
@celery.task()
def obtain_pca_2002_2014L8(x,y):
validate_range=1
st = datetime.strptime('2002-01-01','%Y-%m-%d')
en = datetime.strptime('2002-12-31','%Y-%m-%d')
st2 = datetime.strptime('2014-01-01','%Y-%m-%d')
en2 = datetime.strptime('2014-12-31','%Y-%m-%d')
file_format="GTiff"
data_type=gdal.GDT_CFloat32
iterations=0
bands1=[ Ls57Arg25Bands.RED, Ls57Arg25Bands.NEAR_INFRARED, Ls57Arg25Bands.SHORT_WAVE_INFRARED_1,Ls57Arg25Bands.SHORT_WAVE_INFRARED_2]
satellite1=[Satellite.LS7]
medians,meta=obtain_median(validate_range,x,y,st,en, bands1, satellite1,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CFloat32,months=None)
print "consulta 1"
nf=medians.shape[0]
nc=medians.shape[1]
nb=medians.shape[2]*2
bands2=[Ls8Arg25Bands.RED, Ls8Arg25Bands.NEAR_INFRARED, Ls8Arg25Bands.SHORT_WAVE_INFRARED_1, Ls8Arg25Bands.SHORT_WAVE_INFRARED_2]
satellite2=[Satellite.LS8]
median_p2,meta2=obtain_median(validate_range,x,y,st2,en2, bands2, satellite2,iterations=0,xsize=4000,ysize=4000,file_format="GTiff",data_type=gdal.GDT_CFloat32,months=None)
print "consulta 2"
mask_nan=np.any(np.isnan(np.concatenate((medians, median_p2),axis=2)),axis=2)
##GUARDANDO DATOS MEDIANA_APLANANDO
component_p1=pre_process_ravel(medians)
component_p2=pre_process_ravel(median_p2)
#________________ciclo Normalizacion______________________________
for xat in xrange(0,component_p1.shape[1]):
component_p2[:,xat]=normalize(component_p1[:,xat],component_p2[:,xat])
pre_pca_bands=numpy.concatenate((component_p1,component_p2),1)
for xat in xrange(0,pre_pca_bands.shape[1]):
a=pre_pca_bands[:,xat]
a[np.isnan(a)]=np.nanmedian(a)
pre_pca_bands[:,xat]=a
f_pca=PCA(pre_pca_bands)
del medians
del median_p2
presult=f_pca.Y.T
#-_-------------------_-----------------------
"""
Guardar el archivo:
"""
band_str = "+".join([band.name for band in bands1])
sat_str = "+".join([sat.name for sat in satellite1])
cache_id = [str(x),str(y),str(st),str(en),str(st2),str(en2),band_str,sat_str,file_format,str(iterations)]
f_name = "_".join(cache_id)
f_name = "pca_"+f_name.replace(" ","_")
c_name = f_name
driver = gdal.GetDriverByName(file_format)
if driver is None:
print "No driver found for "+file_format
return "None"
f_name = os.path.join("/tilestore/tile_cache/",f_name)
t=max(nf,nc)
raster = driver.Create(f_name+'.tif', t, t, nb, data_type, options=["BIGTIFF=YES", "INTERLEAVE=BAND"])
srs = osr.SpatialReference()
raster.SetGeoTransform(meta.transform)
#raster.SetProjection(tile_metadata.projection)
srs.SetWellKnownGeogCS("WGS84")
raster.SetProjection(srs.ExportToWkt())
index = 1
for bn in presult:
stack_band = raster.GetRasterBand(index)
stack_band.SetNoDataValue(-999)
bn=bn.reshape(nf,nc)
bn[mask_nan]=np.nan
stack_band.WriteArray(bn)
stack_band.ComputeStatistics(True)
index+=1
stack_band.FlushCache()
del stack_band
raster.FlushCache()
del presult
del f_pca
cache.set(c_name,f_name+".tif")
return f_name+".tif"
|
agpl-3.0
| 1,160,426,004,678,091,800
| 35.57681
| 182
| 0.593422
| false
| 3.220011
| false
| false
| false
|
vaginessa/inception
|
inception/argparsers/makers/submakers/submaker_property.py
|
1
|
1852
|
from .submaker import Submaker
import collections
import os
class PropertySubmaker(Submaker):
def make(self, workDir):
props = self.getValue(".", {})
if "__make__" in props:
del props["__make__"]
if "__depend__" in props:
del props["__depend__"]
propsFlat = self.flatten(props)
outDir = os.path.join("data", "property")
localOutDir = os.path.join(workDir, outDir)
if len(propsFlat):
os.makedirs(localOutDir)
self.setValue("update.files.add.data/property", {
"destination": "/data/property",
"uid": "0",
"gid": "0",
"mode": "0600",
"mode_dirs": "0700"
})
for fname, val in propsFlat.items():
if not val:
continue
if fname.endswith("__val__"):
fname = fname.replace(".__val__", "")
fname = "persist.%s" % fname
with open(os.path.join(localOutDir, fname), "w") as propFile:
propFile.write(val)
#escapedFname = fname.replace(".", "\.")
#self.setConfigValue("update.files.add.data/property/%s" % escapedFname, self._getPropFileData(fname))
def _getPropFileData(self, fname):
return {
"destination": "/data/property/%s" % fname,
"uid": "0",
"gid": "0",
"mode": "0600"
}
def flatten(self, d, parent_key='', sep='.'):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, collections.MutableMapping):
items.extend(self.flatten(v, new_key, sep=sep).items())
else:
items.append((new_key, v))
return dict(items)
|
gpl-3.0
| -1,622,258,097,428,729,000
| 30.931034
| 114
| 0.49568
| false
| 4.008658
| false
| false
| false
|
vmware/pyvmomi-community-samples
|
samples/sessions_list.py
|
1
|
2110
|
#!/usr/bin/env python
# VMware vSphere Python SDK
# Copyright (c) 2008-2021 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tools import cli, service_instance
# Demonstrates some simple working with sessions actions. By common sense
# you should expect that the session is None when you've logged out and
# you will lose the ability to see any session ID. It would be a massive
# security hole to allow people to see these when they were not authenticated
# since the session ID is all you need to spoof another user's login.
# Example output:
# > logged in to vcsa
# > current pyVmomi session id: 523ea3ee-865b-fc7e-3486-bd380c3ab4a2
# > Listing all sessions I can see:
# > session 5205c9e7-8f79-6597-f1d9-e06583cb5089
# > session 523ea3ee-865b-fc7e-3486-bd380c3ab4a2
# > session 52500401-b1e7-bb05-c6b1-05d903d32dcb
# > session 5284cc12-f15c-363a-4455-ae8dbeb8bc3b
# > logout
# > current pyVmomi session: None
parser = cli.Parser()
args = parser.get_args()
si = service_instance.connect(args)
print("logged in to %s" % args.host)
session_id = si.content.sessionManager.currentSession.key
print("current pyVmomi session id: %s" % session_id)
print("Listing all sessions I can see:")
for session in si.content.sessionManager.sessionList:
print(
"session key={0.key}, "
"username={0.userName}, "
"ip={0.ipAddress}".format(session)
)
print("logout")
si.content.sessionManager.Logout()
# The current session will be None after logout
session = si.content.sessionManager.currentSession
print("current pyVmomi session: %s" % session)
|
apache-2.0
| 6,791,903,590,379,356,000
| 36.017544
| 77
| 0.746919
| false
| 3.296875
| false
| false
| false
|
tallypokemap/PokeAlarm
|
PokeAlarm/Discord/DiscordAlarm.py
|
1
|
8686
|
# Standard Library Imports
import logging
import requests
# 3rd Party Imports
# Local Imports
from ..Alarm import Alarm
from ..Utils import parse_boolean, get_static_map_url, reject_leftover_parameters, require_and_remove_key
log = logging.getLogger('Discord')
try_sending = Alarm.try_sending
replace = Alarm.replace
##################################################### ATTENTION! #####################################################
# You DO NOT NEED to edit this file to customize messages for services! Please see the Wiki on the correct way to
# customize services In fact, doing so will likely NOT work correctly with many features included in PokeAlarm.
# PLEASE ONLY EDIT IF YOU KNOW WHAT YOU ARE DOING!
##################################################### ATTENTION! #####################################################
class DiscordAlarm(Alarm):
_defaults = {
'pokemon': {
'username': "<pkmn>",
'content':"",
'icon_url': "https://raw.githubusercontent.com/RocketMap/PokeAlarm/master/icons/<pkmn_id>.png",
'avatar_url': "https://raw.githubusercontent.com/RocketMap/PokeAlarm/master/icons/<pkmn_id>.png",
'title': "A wild <pkmn> has appeared!",
'url': "<gmaps>",
'body': "Available until <24h_time> (<time_left>)."
},
'pokestop': {
'username': "Pokestop",
'content': "",
'icon_url': "https://raw.githubusercontent.com/RocketMap/PokeAlarm/master/icons/pokestop.png",
'avatar_url': "https://raw.githubusercontent.com/RocketMap/PokeAlarm/master/icons/pokestop.png",
'title': "Someone has placed a lure on a Pokestop!",
'url': "<gmaps>",
'body': "Lure will expire at <24h_time> (<time_left>)."
},
'gym': {
'username': "<new_team> Gym Alerts",
'content': "",
'icon_url': "https://raw.githubusercontent.com/RocketMap/PokeAlarm/master/icons/gym_<new_team_id>.png",
'avatar_url': "https://raw.githubusercontent.com/RocketMap/PokeAlarm/master/icons/gym_leader_<new_team_id>.png",
'title': "A Team <old_team> gym has fallen!",
'url': "<gmaps>",
'body': "It is now controlled by <new_team>."
},
'egg': {
'username': "Egg",
'content': "",
'icon_url': "https://raw.githubusercontent.com/RocketMap/PokeAlarm/master/icons/egg_<raid_level>.png",
'avatar_url': "https://raw.githubusercontent.com/RocketMap/PokeAlarm/master/icons/egg_<raid_level>.png",
'title': "Raid is incoming!",
'url': "<gmaps>",
'body': "A level <raid_level> raid will hatch <begin_24h_time> (<begin_time_left>)."
},
'raid': {
'username': "Raid",
'content': "",
'icon_url': "https://raw.githubusercontent.com/RocketMap/PokeAlarm/master/icons/<pkmn_id>.png",
'avatar_url': "https://raw.githubusercontent.com/RocketMap/PokeAlarm/master/icons/egg_<raid_level>.png",
'title': "Level <raid_level> Raid is available against <pkmn>!",
'url': "<gmaps>",
'body': "The raid is available until <24h_time> (<time_left>)."
}
}
# Gather settings and create alarm
def __init__(self, settings, max_attempts, static_map_key):
# Required Parameters
self.__webhook_url = require_and_remove_key('webhook_url', settings, "'Discord' type alarms.")
self.__max_attempts = max_attempts
# Optional Alarm Parameters
self.__startup_message = parse_boolean(settings.pop('startup_message', "True"))
self.__disable_embed = parse_boolean(settings.pop('disable_embed', "False"))
self.__avatar_url = settings.pop('avatar_url', "")
self.__map = settings.pop('map', {}) # default for the rest of the alerts
self.__static_map_key = static_map_key
# Set Alert Parameters
self.__pokemon = self.create_alert_settings(settings.pop('pokemon', {}), self._defaults['pokemon'])
self.__pokestop = self.create_alert_settings(settings.pop('pokestop', {}), self._defaults['pokestop'])
self.__gym = self.create_alert_settings(settings.pop('gym', {}), self._defaults['gym'])
self.__egg = self.create_alert_settings(settings.pop('egg', {}), self._defaults['egg'])
self.__raid = self.create_alert_settings(settings.pop('raid', {}), self._defaults['raid'])
# Warn user about leftover parameters
reject_leftover_parameters(settings, "'Alarm level in Discord alarm.")
log.info("Discord Alarm has been created!")
# (Re)connect with Discord
def connect(self):
pass
# Send a message letting the channel know that this alarm has started
def startup_message(self):
if self.__startup_message:
args = {
'url': self.__webhook_url,
'payload': {
'username': 'PokeAlarm',
'content': 'PokeAlarm activated!'
}
}
try_sending(log, self.connect, "Discord", self.send_webhook, args, self.__max_attempts)
log.info("Startup message sent!")
# Set the appropriate settings for each alert
def create_alert_settings(self, settings, default):
alert = {
'webhook_url': settings.pop('webhook_url', self.__webhook_url),
'username': settings.pop('username', default['username']),
'avatar_url': settings.pop('avatar_url', default['avatar_url']),
'disable_embed': parse_boolean(settings.pop('disable_embed', self.__disable_embed)),
'content': settings.pop('content', default['content']),
'icon_url': settings.pop('icon_url', default['icon_url']),
'title': settings.pop('title', default['title']),
'url': settings.pop('url', default['url']),
'body': settings.pop('body', default['body']),
'map': get_static_map_url(settings.pop('map', self.__map), self.__static_map_key)
}
reject_leftover_parameters(settings, "'Alert level in Discord alarm.")
return alert
# Send Alert to Discord
def send_alert(self, alert, info):
log.debug("Attempting to send notification to Discord.")
payload = {
'username': replace(alert['username'], info)[:32], # Username must be 32 characters or less
'content': replace(alert['content'], info),
'avatar_url': replace(alert['avatar_url'], info),
}
if alert['disable_embed'] is False:
payload['embeds'] = [{
'title': replace(alert['title'], info),
'url': replace(alert['url'], info),
'description': replace(alert['body'], info),
'thumbnail': {'url': replace(alert['icon_url'], info)}
}]
if alert['map'] is not None:
payload['embeds'][0]['image'] = {'url': replace(alert['map'], {'lat': info['lat'], 'lng': info['lng']})}
args = {
'url': alert['webhook_url'],
'payload': payload
}
try_sending(log, self.connect, "Discord", self.send_webhook, args, self.__max_attempts)
# Trigger an alert based on Pokemon info
def pokemon_alert(self, pokemon_info):
log.debug("Pokemon notification triggered.")
self.send_alert(self.__pokemon, pokemon_info)
# Trigger an alert based on Pokestop info
def pokestop_alert(self, pokestop_info):
log.debug("Pokestop notification triggered.")
self.send_alert(self.__pokestop, pokestop_info)
# Trigger an alert based on Pokestop info
def gym_alert(self, gym_info):
log.debug("Gym notification triggered.")
self.send_alert(self.__gym, gym_info)
# Trigger an alert when a raid egg has spawned (UPCOMING raid event)
def raid_egg_alert(self, raid_info):
self.send_alert(self.__egg, raid_info)
def raid_alert(self, raid_info):
self.send_alert(self.__raid, raid_info)
# Send a payload to the webhook url
def send_webhook(self, url, payload):
log.debug(payload)
resp = requests.post(url, json=payload, timeout=5)
if resp.ok is True:
log.debug("Notification successful (returned {})".format(resp.status_code))
else:
log.debug("Discord response was {}".format(resp.content))
raise requests.exceptions.RequestException(
"Response received {}, webhook not accepted.".format(resp.status_code))
|
agpl-3.0
| -7,167,528,100,492,597,000
| 46.206522
| 124
| 0.575869
| false
| 3.87941
| false
| false
| false
|
pfsmorigo/minecraft
|
overviewer_config.py
|
1
|
12108
|
#!/bin/python
world_name = 'SmoWorld'
worlds[world_name] = '/home/pfsmorigo/.minecraft/saves/'+world_name
outputdir = '/mnt/disk/beirut/minecraft/overviewer/'+world_name
rendermode = "smooth_lighting"
MC_IDS = {
0: "air",
1: "stone",
2: "grass",
3: "dirt",
4: "cobblestone",
5: "planks",
6: "sapling",
7: "bedrock",
8: "flowing water",
9: "water",
10: "flowing lava",
11: "lava",
12: "sand",
13: "gravel",
14: "gold ore",
15: "iron ore",
16: "coal ore",
17: "log",
18: "leaves",
19: "sponge",
20: "glass",
21: "lapis ore",
22: "lapis block",
23: "dispenser",
24: "sandstone",
25: "noteblock",
26: "bed",
27: "golden rail",
28: "detector rail",
29: "sticky piston",
30: "web",
31: "tallgrass",
32: "deadbush",
33: "piston",
34: "piston head",
35: "wool",
36: "piston extension",
37: "yellow flower",
38: "red flower",
39: "brown mushroom",
40: "red mushroom",
41: "gold block",
42: "iron block",
43: "double stone slab",
44: "stone slab",
45: "brick block",
46: "tnt",
47: "bookshelf",
48: "mossy cobblestone",
49: "obsidian",
50: "torch",
51: "fire",
52: "mob spawner",
53: "oak stairs",
54: "chest",
55: "redstone wire",
56: "diamond ore",
57: "diamond block",
58: "crafting table",
59: "wheat",
60: "farmland",
61: "furnace",
62: "lit furnace",
63: "standing sign",
64: "wooden door",
65: "ladder",
66: "rail",
67: "stone stairs",
68: "wall sign",
69: "lever",
70: "stone pressure plate",
71: "iron door",
72: "wooden pressure plate",
73: "redstone ore",
74: "lit redstone ore",
75: "unlit redstone torch",
76: "redstone torch",
77: "stone button",
78: "snow layer",
79: "ice",
80: "snow",
81: "cactus",
82: "clay",
83: "reeds",
84: "jukebox",
85: "fence",
86: "pumpkin",
87: "netherrack",
88: "soul sand",
89: "glowstone",
90: "portal",
91: "lit pumpkin",
92: "cake",
93: "unpowered repeater",
94: "powered repeater",
95: "stained glass",
96: "trapdoor",
97: "monster egg",
98: "stonebrick",
99: "brown mushroom block",
100: "red mushroom block",
101: "iron bars",
102: "glass pane",
103: "melon block",
104: "pumpkin stem",
105: "melon stem",
106: "vine",
107: "fence gate",
108: "brick stairs",
109: "stone brick stairs",
110: "mycelium",
111: "waterlily",
112: "nether brick",
113: "nether brick fence",
114: "nether brick stairs",
115: "nether wart",
116: "enchanting table",
117: "brewing stand",
118: "cauldron",
119: "end portal",
120: "end portal frame",
121: "end stone",
122: "dragon egg",
123: "redstone lamp",
124: "lit redstone lamp",
125: "double wooden slab",
126: "wooden slab",
127: "cocoa",
128: "sandstone stairs",
129: "emerald ore",
130: "ender chest",
131: "tripwire hook",
132: "tripwire",
133: "emerald block",
134: "spruce stairs",
135: "birch stairs",
136: "jungle stairs",
137: "command block",
138: "beacon",
139: "cobblestone wall",
140: "flower pot",
141: "carrots",
142: "potatoes",
143: "wooden button",
144: "skull",
145: "anvil",
146: "trapped chest",
147: "light weighted pressure plate",
148: "heavy weighted pressure plate",
149: "unpowered comparator",
150: "powered comparator",
151: "daylight detector",
152: "redstone block",
153: "quartz ore",
154: "hopper",
155: "quartz block",
156: "quartz stairs",
157: "activator rail",
158: "dropper",
159: "stained hardened clay",
160: "stained glass pane",
161: "leaves2",
162: "log2",
163: "acacia stairs",
164: "dark oak stairs",
165: "slime",
166: "barrier",
167: "iron trapdoor",
168: "prismarine",
169: "sea lantern",
170: "hay block",
171: "carpet",
172: "hardened clay",
173: "coal block",
174: "packed ice",
175: "double plant",
176: "standing banner",
177: "wall banner",
178: "daylight detector inverted",
179: "red sandstone",
180: "red sandstone stairs",
181: "double stone slab2",
182: "stone slab2",
183: "spruce fence gate",
184: "birch fence gate",
185: "jungle fence gate",
186: "dark oak fence gate",
187: "acacia fence gate",
188: "spruce fence",
189: "birch fence",
190: "jungle fence",
191: "dark oak fence",
192: "acacia fence",
193: "spruce door",
194: "birch door",
195: "jungle door",
196: "acacia door",
197: "dark oak door",
# ...and items.
256: "iron shovel",
257: "iron pickaxe",
258: "iron axe",
259: "flint and steel",
260: "apple",
261: "bow",
262: "arrow",
263: "coal",
264: "diamond",
265: "iron ingot",
266: "gold ingot",
267: "iron sword",
268: "wooden sword",
269: "wooden shovel",
270: "wooden pickaxe",
271: "wooden axe",
272: "stone sword",
273: "stone shovel",
274: "stone pickaxe",
275: "stone axe",
276: "diamond sword",
277: "diamond shovel",
278: "diamond pickaxe",
279: "diamond axe",
280: "stick",
281: "bowl",
282: "mushroom stew",
283: "golden sword",
284: "golden shovel",
285: "golden pickaxe",
286: "golden axe",
287: "string",
288: "feather",
289: "gunpowder",
290: "wooden hoe",
291: "stone hoe",
292: "iron hoe",
293: "diamond hoe",
294: "golden hoe",
295: "wheat seeds",
296: "wheat",
297: "bread",
298: "leather helmet",
299: "leather chestplate",
300: "leather leggings",
301: "leather boots",
302: "chainmail helmet",
303: "chainmail chestplate",
304: "chainmail leggings",
305: "chainmail boots",
306: "iron helmet",
307: "iron chestplate",
308: "iron leggings",
309: "iron boots",
310: "diamond helmet",
311: "diamond chestplate",
312: "diamond leggings",
313: "diamond boots",
314: "golden helmet",
315: "golden chestplate",
316: "golden leggings",
317: "golden boots",
318: "flint",
319: "porkchop",
320: "cooked porkchop",
321: "painting",
322: "golden apple",
323: "sign",
324: "wooden door",
325: "bucket",
326: "water bucket",
327: "lava bucket",
328: "minecart",
329: "saddle",
330: "iron door",
331: "redstone",
332: "snowball",
333: "boat",
334: "leather",
335: "milk bucket",
336: "brick",
337: "clay ball",
338: "reeds",
339: "paper",
340: "book",
341: "slime ball",
342: "chest minecart",
343: "furnace minecart",
344: "egg",
345: "compass",
346: "fishing rod",
347: "clock",
348: "glowstone dust",
349: "fish",
350: "cooked fish",
351: "dye",
352: "bone",
353: "sugar",
354: "cake",
355: "bed",
356: "repeater",
357: "cookie",
358: "filled map",
359: "shears",
360: "melon",
361: "pumpkin seeds",
362: "melon seeds",
363: "beef",
364: "cooked beef",
365: "chicken",
366: "cooked chicken",
367: "rotten flesh",
368: "ender pearl",
369: "blaze rod",
370: "ghast tear",
371: "gold nugget",
372: "nether wart",
373: "potion",
374: "glass bottle",
375: "spider eye",
376: "fermented spider eye",
377: "blaze powder",
378: "magma cream",
379: "brewing stand",
380: "cauldron",
381: "ender eye",
382: "speckled melon",
383: "spawn egg",
384: "experience bottle",
385: "fire charge",
386: "writable book",
387: "written book",
388: "emerald",
389: "item frame",
390: "flower pot",
391: "carrot",
392: "potato",
393: "baked potato",
394: "poisonous potato",
395: "map",
396: "golden carrot",
397: "skull",
398: "carrot on a stick",
399: "nether star",
400: "pumpkin pie",
401: "fireworks",
402: "firework charge",
403: "enchanted book",
404: "comparator",
405: "netherbrick",
406: "quartz",
407: "tnt minecart",
408: "hopper minecart",
409: "prismarine shard",
410: "prismarine crystals",
411: "rabbit",
412: "cooked rabbit",
413: "rabbit stew",
414: "rabbit foot",
415: "rabbit hide",
416: "armor stand",
417: "iron horse armor",
418: "golden horse armor",
419: "diamond horse armor",
420: "lead",
421: "name tag",
422: "command block minecart",
423: "mutton",
424: "cooked mutton",
425: "banner",
427: "spruce door",
428: "birch door",
429: "jungle door",
430: "acacia door",
431: "dark oak door",
2256: "record 13",
2257: "record cat",
2258: "record blocks",
2259: "record chirp",
2260: "record far",
2261: "record mall",
2262: "record mellohi",
2263: "record stal",
2264: "record strad",
2265: "record ward",
2266: "record 11",
2267: "record wait"
}
def signFilter(poi):
if poi['id'] == 'Sign':
return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']])
def chestFilter(poi):
global MC_IDS
if poi['id'] == 'Chest':
items = ''
for item in poi['Items']:
item_name = MC_IDS[item['id']]
items += '\n%s (%d)' % (item_name, item['Count'])
if 'diamond' in item_name:
print 'FOUND: %s at %d, %d, %d' % (item_name, poi['x'], poi['y'], poi['z'])
return ('Chest', 'Chest with %d items (%d, %d, %d):%s' % (len(poi['Items']), poi['x'], poi['y'], poi['z'], items))
def playerFilter(poi):
if poi['id'] == 'Player':
poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId']
return "Last known location for %s" % poi['EntityId']
def townFilter(poi):
if poi['id'] == 'Town':
try:
return (poi['name'], poi['description'])
except KeyError:
return poi['name'] + '\n'
def get_crop(radius = 0):
center_x = -16
center_z = -76
castle_radius = 30
min_x = center_x-castle_radius-radius
min_z = center_z-castle_radius-radius
max_x = center_x+castle_radius+radius+1
max_z = center_z+castle_radius+radius+1
return (min_x, min_z, max_x, max_z)
filters = [dict(name = "Signs", filterFunction = signFilter),
dict(name = "Chests", filterFunction = chestFilter, checked = True),
dict(name = "Players", filterFunction = playerFilter)]
renders['overworld_daytime'] = {
'world': world_name,
'title': 'Castle',
'dimension': 'overworld',
'northdirection': 'upper-left',
'crop': get_crop(100),
'markers': filters
}
renders['overworld_daytime'] = {
'world': world_name,
'title': 'Day',
'dimension': 'overworld',
'northdirection': 'upper-left',
'crop': get_crop(100),
'markers': filters
}
renders['overworld_night'] = {
'world': world_name,
'title': 'Night',
'rendermode': 'smooth_night',
'dimension': 'overworld',
'northdirection': 'upper-left',
'crop': get_crop(100),
'markers': filters
}
for i in range(0, 15):
renders['overworld_level_'+str(i)] = {
'world': world_name,
'title': 'Level '+str(i),
'rendermode': [Base(), EdgeLines(), SmoothLighting(), Depth(max = (i*8)+3)],
'dimension': 'overworld',
'northdirection': 'upper-left',
'crop': get_crop(),
'markers': filters
}
renders['nether'] = {
'world': world_name,
'title': 'Nether',
'rendermode': 'nether',
'dimension': 'nether',
'northdirection': 'upper-left',
'crop': get_crop(),
'markers': filters
}
|
gpl-2.0
| -4,839,259,828,219,592,000
| 23.811475
| 122
| 0.537248
| false
| 2.793724
| false
| false
| false
|
richbrowne/f5-openstack-agent
|
f5_openstack_agent/lbaasv2/drivers/bigip/vlan_binding.py
|
1
|
6094
|
# coding=utf-8
# Copyright 2014-2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class VLANBindingBase(object):
"""Base Class for device interface to port binding """
def __init__(self, conf, driver):
self.conf = conf
self.driver = driver
self.interface_binding_mappings = {}
self.__initialized__bigip_ports = False
LOG.debug('reading static device interface port bindings')
if self.conf.interface_port_static_mappings:
LOG.debug('bindings: %s '
% self.conf.interface_port_static_mappings)
interface_binding_static_mappings = \
json.loads(self.conf.interface_port_static_mappings)
if isinstance(interface_binding_static_mappings, dict):
for device in interface_binding_static_mappings:
if isinstance(device, dict):
self.interface_binding_mappings[device] = \
interface_binding_static_mappings[device]
else:
LOG.debug('interface_port_static_mappings not configured')
def register_bigip_interfaces(self):
# Delayed binding BIG-IP ports will be called
# after BIG-IP endpoints are registered.
if not self.__initialized__bigip_ports:
for bigip in self.driver.get_all_bigips():
LOG.debug('Request Port information for MACs: %s'
% bigip.device_interfaces)
if self.driver.plugin_rpc:
ports = self.driver.plugin_rpc.get_ports_for_mac_addresses(
mac_addresses=bigip.mac_addresses)
LOG.debug('Neutron returned Port Info: %s' % ports)
for port in ports:
for interface in bigip.device_interfaces:
if not interface == 'mgmt':
if bigip.device_interfaces[interface] == \
port['mac_address']:
mapping = {interface: port['id']}
self.interface_binding_mappings[
bigip.device_name] = mapping
LOG.debug('adding mapping information device'
'%s interface %s to port: %s'
% (bigip.device_name,
interface,
port['id']))
self.__initialized__bigip_ports = True
LOG.debug('interface bindings after initialization are: %s'
% self.interface_binding_mappings)
for bigip in self.driver.get_all_bigips():
if bigip.device_name not in self.interface_binding_mappings:
example = {bigip.device_name: {}}
for interface in bigip.device_interfaces:
example[bigip.device_name][interface] = \
"port_id_for_%s" % interface
json_example = json.loads(example)
LOG.warning(
'The device %s at %s does not have interface bindings'
% (bigip.device_name, bigip.hostname),
' even though VLAN binding has been requested',
)
LOG.warning(
'An example static mapping would be: %s' % json_example
)
def allow_vlan(self, device_name=None, interface=None, vlanid=0):
raise NotImplementedError(
"An VLAN binding class must implement allow_vlan"
)
def prune_vlan(self, device_name=None, interface=None, vlanid=0):
raise NotImplementedError(
"An VLAN binding class must implement prune_vlan"
)
class NullBinding(VLANBindingBase):
# Class for configuring VLAN lists on ports.
def __init__(self, conf, driver):
super(NullBinding, self).__init__(conf, driver)
def allow_vlan(self, device_name=None, interface=None, vlanid=0):
if not device_name:
return
if not interface:
return
if vlanid == 0:
return
LOG.debug('checking for port bindings '
'device_name: %s interface %s'
% (device_name, interface))
if device_name in self.interface_binding_mappings:
if interface in self.interface_binding_mappings[device_name]:
LOG.debug(
'allowing VLAN %s on port %s'
% (vlanid,
self.interface_binding_mappings[device_name][interface])
)
def prune_vlan(self, device_name=None, interface=None, vlanid=None):
if not device_name:
return
if not interface:
return
if vlanid == 0:
return
LOG.debug('checking for port bindings '
'device_name: %s interface %s'
% (device_name, interface))
if device_name in self.interface_binding_mappings:
if interface in self.interface_binding_mappings[device_name]:
LOG.debug(
'pruning VLAN %s from port %s'
% (vlanid,
self.interface_binding_mappings[device_name][interface])
)
|
apache-2.0
| -6,098,282,063,651,757,000
| 42.528571
| 79
| 0.541516
| false
| 4.844197
| false
| false
| false
|
xArm-Developer/xArm-Python-SDK
|
example/wrapper/common/3002-record_trajectory.py
|
1
|
1366
|
#!/usr/bin/env python3
# Software License Agreement (BSD License)
#
# Copyright (c) 2019, UFACTORY, Inc.
# All rights reserved.
#
# Author: Vinman <vinman.wen@ufactory.cc> <vinman.cub@gmail.com>
"""
Description: Record trajectory
1. requires firmware 1.2.0 and above support
"""
import os
import sys
import time
sys.path.append(os.path.join(os.path.dirname(__file__), '../../..'))
from xarm.wrapper import XArmAPI
#######################################################
"""
Just for test example
"""
if len(sys.argv) >= 2:
ip = sys.argv[1]
else:
try:
from configparser import ConfigParser
parser = ConfigParser()
parser.read('../robot.conf')
ip = parser.get('xArm', 'ip')
except:
ip = input('Please input the xArm ip address:')
if not ip:
print('input error, exit')
sys.exit(1)
########################################################
arm = XArmAPI(ip, is_radian=True)
arm.motion_enable(enable=True)
arm.set_mode(0)
arm.set_state(state=0)
# Turn on manual mode before recording
arm.set_mode(2)
arm.set_state(0)
arm.start_record_trajectory()
# Analog recording process, here with delay instead
time.sleep(20)
arm.stop_record_trajectory()
arm.save_record_trajectory('test.traj')
time.sleep(1)
# Turn off manual mode after recording
arm.set_mode(0)
arm.set_state(0)
|
bsd-3-clause
| -4,354,373,712,127,189,500
| 20.015385
| 68
| 0.60981
| false
| 3.291566
| false
| false
| false
|
dladd/pyFormex
|
pyformex/examples/TrussFrame.py
|
1
|
2483
|
# $Id$
##
## This file is part of pyFormex 0.8.9 (Fri Nov 9 10:49:51 CET 2012)
## pyFormex is a tool for generating, manipulating and transforming 3D
## geometrical models by sequences of mathematical operations.
## Home page: http://pyformex.org
## Project page: http://savannah.nongnu.org/projects/pyformex/
## Copyright 2004-2012 (C) Benedict Verhegghe (benedict.verhegghe@ugent.be)
## Distributed under the GNU General Public License version 3 or later.
##
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see http://www.gnu.org/licenses/.
##
"""TrussFrame
"""
from __future__ import print_function
_status = 'checked'
_level = 'normal'
_topics = ['geometry']
_techniques = ['color']
from gui.draw import *
def run():
clear()
yf = [ 0.0, 0.2, 1.2, 2.2, 3.2, 4.2, 4.5 ] # y of nodes in frame columns
a = Formex([[[0.0,y]] for y in yf ])
b = connect([a,a],bias=[0,1]).translate([0.5,0.0,0.0])
b.setProp(3)
c = b.reflect(0)
d = connect([b,c],bias=[1,1])
d.setProp(2)
e = connect([b,c],bias=[1,2]).select([0,2]) + connect([b,c],bias=[2,1]).select([1,3])
e.setProp(1)
col = b+c+d+e
frame = col.translate([-4.0,0.0,0.0]) + col.translate([+4.0,0.0,0.0])
# Dakligger
h0 = 1.2 # hoogte in het midden
h1 = 0.5 # hoogte aan het einde
xd = [ 0, 0.6 ] + [ 0.6+i*1.2 for i in range(5)] # hor. positie knopen
ko = Formex([[[x,0.0]] for x in xd])
ond = connect([ko,ko],bias=[0,1])
bov = ond.translate(1,h0).shear(1,0,(h1-h0)/xd[-1])
tss = connect([ond,bov],bias=[1,1])
ond.setProp(2)
bov.setProp(4)
tss.setProp(5)
dakligger = (ond+bov+tss)
dakligger += dakligger.reflect(0)
frame += dakligger.translate([0,yf[-1],0])
draw(frame)
structure = frame.replic2(2,6,12.,3.,0,2)
clear()
draw(structure)
view('top')
view('right')
view('iso')
if __name__ == 'draw':
run()
# End
|
gpl-3.0
| -147,417,965,538,401,250
| 32.106667
| 89
| 0.631091
| false
| 2.707743
| false
| false
| false
|
BigEgg/LeetCode
|
Python/LeetCode/_051_100/_068_TextJustification.py
|
1
|
1097
|
class Solution:
def fullJustify(self, words: [str], maxWidth: int) -> [str]:
result = []
start_index, current_length = 0, 0
for i in range(len(words)):
if current_length + len(words[i]) > maxWidth:
space = maxWidth - current_length + (i - start_index)
new_line = ''
for j in range(start_index, i):
new_line += words[j]
space_count = maxWidth - len(new_line) if j == i - 1 else space // (i - start_index - 1) + (1 if (j - start_index < (space % (i - start_index - 1))) else 0)
new_line += ' ' * space_count
result.append(new_line)
current_length = 0
start_index = i
current_length += len(words[i]) + 1
new_line = ''
for j in range(start_index, len(words)):
new_line += words[j]
space_count = 1 if j != len(words) - 1 else maxWidth - len(new_line)
new_line += ' ' * space_count
result.append(new_line)
return result
|
mit
| -1,006,641,651,837,325,300
| 35.566667
| 176
| 0.483136
| false
| 3.756849
| false
| false
| false
|
ehuelsmann/openipam
|
openIPAM/openipam/web/access.py
|
1
|
3170
|
import cherrypy
from basepage import BasePage
import framework
from openipam.web.resource.submenu import submenu
from openipam.config import frontend
perms = frontend.perms
class Access(BasePage):
'''The access class. This includes all pages that are /access/*'''
def __init__(self):
BasePage.__init__(self)
# Object for wrapping HTML into the template
self.__template = framework.Basics("access")
#-----------------------------------------------------------------
# PUBLISHED FUNCTIONS
#-----------------------------------------------------------------
#-----------------------------------------------------------------
# EXPOSED FUNCTIONS
#-----------------------------------------------------------------
@cherrypy.expose
def index(self):
"""The user management page"""
# Confirm user authentication
self.check_session()
domains_text = []
networks_text = []
hosts_text = []
domains = self.webservice.get_domains( { 'additional_perms' : str(perms.ADD) } )
if not domains:
domains_text.append("<p>You do not have access to add hosts in any domains.</p>")
else:
domains_text.append("<p>You have access to add hosts in the following domains:</p>")
rows = []
# The template HTML for every item
item_template = '''<tr class="info">
<td>%(name)s</td>
<td>%(description)s</td>
</tr>
'''
# Go through the query and make the table HTML using the template
for domain in domains:
rows.append(item_template % (domain))
# Combine all the parts into the table
domains_text.append('''
<table class="infoTable">
<thead>
<tr>
<th>Name</th>
<th>Description</th>
</tr>
</thead>
<tbody>
%s
</tbody>
</table>
''' % ''.join(rows))
networks = self.webservice.get_networks( { 'additional_perms' : str(perms.ADD) } )
if not networks:
networks_text.append("<p>You do not have access to add static IP addresses to any networks.</p>")
else:
networks_text.append("<p>You have access to add static IP addresses to these networks:</p>")
rows = []
# The template HTML for every item
item_template = '''<tr class="info">
<td>%(network)s</td>
<td>%(name)s</td>
<td>%(gateway)s</td>
<td>%(description)s</td>
</tr>
'''
# Go through the query and make the table HTML using the template
for network in networks:
rows.append(item_template % (network))
# Combine all the parts into the table
networks_text.append('''
<table class="infoTable">
<thead>
<tr>
<th>Network (CIDR)</th>
<th>Name</th>
<th>Gateway</th>
<th>Description</th>
</tr>
</thead>
<tbody>
%s
</tbody>
</table>
''' % ''.join(rows))
maincontent = '''
<h1>My Access</h1>
%s
<h2>Domains</h2>
%s
<h2>Networks</h2>
%s
''' % (frontend.my_access_text, ''.join(domains_text), ''.join(networks_text))
return self.__template.wrap(maincontent)
#-----------------------------------------------------------------
|
gpl-3.0
| 1,357,530,256,039,712,000
| 24.15873
| 100
| 0.529022
| false
| 3.573844
| false
| false
| false
|
vtsuperdarn/davitpy
|
davitpy/pydarn/sdio/radDataRead.py
|
1
|
13124
|
# Copyright (C) 2012 VT SuperDARN Lab
# Full license can be found in LICENSE.txt
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
.. module:: radDataRead
:synopsis: A module for reading radar data (iqdat, raw, fit)
.. moduleauthor:: AJ, 20130110
************************************
**Module**: pydarn.sdio.radDataRead
************************************
Functions
----------
:func:`pydarn.sdio.radDataRead.radDataOpen`
:func:`pydarn.sdio.radDataRead.radDataReadRec`
:func:`pydarn.sdio.radDataRead.radDataReadScan`
:func:`pydarn.sdio.radDataRead.radDataReadAll`
:func:`pydarn.sdio.radDataRead.radDataCreateIndex`
"""
import logging
def radDataOpen(sTime, radcode, eTime=None, channel=None, bmnum=None, cp=None,
fileType='fitex', filtered=False, src=None, fileName=None,
noCache=False, local_dirfmt=None, local_fnamefmt=None,
local_dict=None, remote_dirfmt=None, remote_fnamefmt=None,
remote_dict=None, remote_site=None, username=None,
password=None, port=None, tmpdir=None, remove=False,
try_file_types=True):
"""A function to establish a pipeline through which we can read radar data.
first it tries the mongodb, then it tries to find local files, and lastly
it sftp's over to the VT data server.
Parameters
-----------
sTime : (datetime)
The beginning time for which you want data
radcode : (str)
The 3-letter radar code with optional channel extension for which you
want data
eTime : (datetime/NoneType)
The last time that you want data for. If this is set to None, it will
be set to 1 day after sTime. (default=None)
channel : (str/NoneType)
The 1-letter code for what channel you want data from, eg 'a','b',...
if this is set to None, data from ALL channels will be read.
(default=None)
bmnum : (int/NoneType)
The beam number which you want data for. If this is set to None, data
from all beams will be read. (default=None)
cp : (int)
The control program which you want data for. If this is set to None,
data from all cp's will be read. (default=None)
fileType : (str)
The type of data you want to read. valid inputs are: 'fitex','fitacf',
'fitacf3','lmfit','rawacf','iqdat'. If you choose a fit file format
and the specified one isn't found, we will search for one of the
others. Beware: if you ask for rawacf/iq data, these files are large
and the data transfer might take a long time. (default='fitex')
filtered : (boolean)
A boolean specifying whether you want the fit data to be boxcar
filtered. ONLY VALID FOR FIT. (default=False)
src : (str/NoneType)
The source of the data. Valid inputs are 'local' 'sftp'. If this is
set to None, it will try all possibilites sequentially. (default=None)
fileName : (str/NoneType)
The name of a specific file which you want to open. (default=None)
noCache : (boolean)
Flag to indicate that you do not want to check first for cached files.
(default=False)
remote_site : (str/NoneType)
The remote data server's address. If None, the rcParam value DB will be
used. (default=None)
port : (str/NoneType)
The port number to use for remote_site. If None, the rcParam value
DB_PORT will be used. (default=None)
username : (str/NoneType)
Username for remote_site. If None, the rcParam value DBREADUSER will
be used. (default=None)
password : (str/bool/NoneType)
Password for remote_site. If password is set to True, the user is
prompted for the remote_site password. If set to None, the rcParam
value DBREADPASS will be used (default=None)
remote_dirfmt : (str/NoneType)
The remote_site directory structure. Can include keywords to be
replaced by dictionary keys in remote_dict. If None, the rcParam value
DAVIT_REMOTE_DIRFORMAT will be used. (default=None)
Ex) remote_dirfmt='/{year}/{month}'
remote_fnamefmt : (str/list/NoneType)
The remote_site file naming format. Can include keywords to be replaced
by dictionary keys in remote_dict. If None, the rcParam value
DAVIT_REMOTE_FNAMEFMT will be used. (default=None)
Ex) remote_fnamefmt=['{date}.{radar}.{ftype}',
'{date}.{channel}.{radar}.{ftype}']
local_dirfmt : (str/None)
The local directory structure. Can include keywords to be replaced by
dictionary keys in remote_dict. If None, the rcParam value
DAVIT_LOCAL_DIRFORMAT will be used. (default=None)
Ex) local_dirfmt='/{year}/{month}'
local_fnamefmt : (str/list/NoneType)
The local file naming format. Can include keywords to be replaced by
dictionary keys in remote_dict. If None, the rcParam value
DAVIT_LOCAL_FNAMEFMT will be used. (default=None)
Ex) local_fnamefmt=['{date}.{radar}.{ftype}',
'{date}.{channel}.{radar}.{ftype}']
tmpdir : (str/NoneType)
The directory in which to store temporary files. If None, the rcParam
value DAVIT_TMPDIR will be used. (default=None)
remove : (bool)
Remove compressed file after uncompression (default=False)
try_file_types : (bool)
If desired file type could not be found, try to download others
(default=True)
Returns
--------
myPtr : (pydarn.sdio.radDataTypes.radDataPtr)
A radDataPtr object which contains a link to the data to be read.
This can then be passed to radDataReadRec in order to actually read the
data.
Notes
-------
The evironment variables are python dictionary capable formatted strings
appended encode radar name, channel, and/or date information. Currently
supported dictionary keys which can be used are:
"date" : datetime.datetime.strftime("%Y%m%d")
"year" : 0 padded 4 digit year
"month" : 0 padded 2 digit month
"day" : 0 padded 2 digit day
"hour" : 0 padded 2 digit day
"ftype" : filetype string
"radar" : 3-chr radarcode
"channel" : single character string, ex) 'a'
Example
----------
::
import datetime as dt
myPtr = pydarn.sdio.radDataOpen(dt.datetime(2011,1,1),'bks', \
eTime=dt.datetime(2011,1,1,2),channel=None, bmnum=7,cp=153, \
fileType='fitex',filtered=False, src=None)
Written by AJ 20130110
"""
from davitpy.pydarn.sdio import radDataPtr
from davitpy.pydarn.radar import network
myPtr = radDataPtr(sTime=sTime, radcode=radcode, eTime=eTime,
channel=channel, bmnum=bmnum, cp=cp, fileType=fileType,
filtered=filtered, src=src, fileName=fileName,
noCache=noCache, local_dirfmt=local_dirfmt,
local_fnamefmt=local_fnamefmt, local_dict=local_dict,
remote_dirfmt=remote_dirfmt, remote_dict=remote_dict,
remote_fnamefmt=remote_fnamefmt, remote_site=remote_site,
username=username, port=port, password=password,
stid=int(network().getRadarByCode(radcode).id),
tmpdir=tmpdir, remove=remove,
try_file_types=try_file_types)
return myPtr
def radDataReadRec(my_ptr):
"""A function to read a single record of radar data from a
:class:`pydarn.sdio.radDataTypes.radDataPtr` object
Parameters
------------
my_ptr : (pydarn.sdio.radDataTypes.radDataPtr)
Contains the pipeline to the data we are after.
Returns
---------
my_beam : (pydarn.sdio.radDataTypes.beamData/NoneType)
An object filled with the data we are after. Will return None when
finished reading.
Example
---------
::
import datetime as dt
my_ptr = radDataOpen(dt.datetime(2011,1,1),'bks', \
eTime=dt.datetime(2011,1,1,2),channel=None,bmnum=7,cp=153,
fileType='fitex',filtered=False,src=None)
my_beam = radDataReadRec(my_ptr)
Notes
------
To use this, you must first create a
:class:`pydarn.sdio.radDataTypes.radDataPtr` object with
:func:`radDataOpen`
Written by AJ 20130110
"""
from davitpy.pydarn.sdio import radDataPtr
assert isinstance(my_ptr, radDataPtr), \
logging.error('input must be of type radDataPtr')
return my_ptr.readRec()
def radDataReadScan(my_ptr):
"""A function to read a full scan of data from a
:class:`pydarn.sdio.radDataTypes.radDataPtr` object
Parameters
-----------
my_ptr : (pydarn.sdio.radDataTypes.radDataPtr)
Contains the pipeline to the data we are after
Returns
--------
my_scan : (pydarn.sdio.radDataTypes.scanData)
A class created to define a list of pydarn.sdio.radDataTypes.beamData
objects, filled with a scan (pattern of beams) of data from the
specified pipeline. The pointer will return None when finished reading.
Example
--------
::
import datetime as dt
my_ptr = radDataOpen(dt.datetime(2011,1,1),'bks', \
eTime=dt.datetime(2011,1,1,2),channel=None, bmnum=7,cp=153, \
fileType='fitex',filtered=False, src=None):
my_scan = radDataReadScan(my_ptr)
Notes
-------
To use this, you must first create a
:class:`pydarn.sdio.radDataTypes.radDataPtr` object with :func:`radDataOpen`
This will ignore any beam number (bmnum) request. Also, if no channel was
specified in radDataOpen, it will only read channel 'a'
Written by AJ 20130110
"""
from davitpy.pydarn.sdio import radDataPtr
# check input
assert isinstance(my_ptr, radDataPtr), \
logging.error('input must be of type radDataPtr')
return my_ptr.readScan()
def radDataCreateIndex(my_ptr):
"""A function to index radar data into dict from a
:class:`pydarn.sdio.radDataTypes.radDataPtr` object
Parameters
-----------
my_ptr : (pydarn.sdio.radDataTypes.radDataPtr)
Contains the pipeline to the data we are after
Returns
--------
my_index : (dict)
A dictionary with keys recording the time of each bean in the specified
pointer and the value corresponding to the location for that record
in the data file (byte offsets in the file).
Example
---------
::
import datetime as dt
my_ptr = radDataOpen(dt.datetime(2011,1,1),'bks', \
eTime=dt.datetime(2011,1,1,2),channel=None, bmnum=7,cp=153, \
fileType='fitex',filtered=False, src=None)
my_index = radDataCreateIndex(my_ptr)
Notes
------
To use this, you must first create a
:class:`pydarn.sdio.radDataTypes.radDataPtr` object with :func:`radDataOpen`
Written by JDS 20140606
"""
from davitpy.pydarn.sdio.radDataTypes import radDataPtr
assert isinstance(my_ptr, radDataPtr), \
logging.error('input must be of type radDataPtr')
return my_ptr.createIndex()
def radDataReadAll(my_ptr):
"""A function to read a large amount (to the end of the request) of radar
data into a list from a :class:`pydarn.sdio.radDataTypes.radDataPtr` object
Parameters
-----------
my_ptr : (pydarn.sdio.radDataTypes.radDataPtr)
Contains the pipeline to the data we are after
Returns
----------
my_list : (list)
A list filled with pydarn.sdio.radDataTypes.scanData objects holding
the data we are after. The list will contain None if nothing is found.
Example
-----------
::
import datetime as dt
my_ptr = radDataOpen(dt.datetime(2011,1,1),'bks', \
eTime=dt.datetime(2011,1,1,2),channel=None, bmnum=7,cp=153, \
fileType='fitex',filtered=False, src=None)
my_list = radDataReadAll(my_ptr)
Notes
------
To use this, you must first create a
:class:`pydarn.sdio.radDataTypes.radDataPtr` object with :func:`radDataOpen`
Written by AJ 20130606
"""
from davitpy.pydarn.sdio import radDataPtr
# check input
assert isinstance(my_ptr, radDataPtr), \
logging.error('input must be of type radDataPtr')
my_list = [beam for beam in my_ptr]
return my_list
|
gpl-3.0
| 7,147,466,849,340,686,000
| 37.262391
| 80
| 0.639973
| false
| 3.710489
| false
| false
| false
|
mnr/rubberfish
|
unneeded_stuff/countSyllables.py
|
1
|
1467
|
def count_syllables(word):
# thanks to https://github.com/akkana
verbose = False #print debugging?
vowels = ['a', 'e', 'i', 'o', 'u']
on_vowel = False
in_diphthong = False
minsyl = 0
maxsyl = 0
lastchar = None
word = word.lower()
for c in word:
is_vowel = c in vowels
if on_vowel == None:
on_vowel = is_vowel
# y is a special case
if c == 'y':
is_vowel = not on_vowel
if is_vowel:
if verbose: print (c, "is a vowel")
if not on_vowel:
# We weren't on a vowel before.
# Seeing a new vowel bumps the syllable count.
if verbose: print ("new syllable")
minsyl += 1
maxsyl += 1
elif on_vowel and not in_diphthong and c != lastchar:
# We were already in a vowel.
# Don't increment anything except the max count,
# and only do that once per diphthong.
if verbose: print (c, "is a diphthong")
in_diphthong = True
maxsyl += 1
elif verbose: print ("[consonant]")
on_vowel = is_vowel
lastchar = c
# Some special cases:
if word[-1] == 'e':
minsyl -= 1
# if it ended with a consonant followed by y, count that as a syllable.
if word[-1] == 'y' and not on_vowel:
maxsyl += 1
return minsyl, maxsyl
|
mit
| 3,068,843,860,149,435,000
| 27.764706
| 75
| 0.503749
| false
| 3.6133
| false
| false
| false
|
mcoughli/root_of_trust
|
operational_os/plot_data.py
|
1
|
4710
|
#!/usr/bin/python
import pylab
import matplotlib
matplotlib.rcParams['pdf.fonttype'] = 42
matplotlib.rcParams['ps.fonttype'] = 42
import numpy
import csv
import os
import sys
# TIMESTAMPS = ["2017-03-30T06:35:54.726002", "2017-03-30T09:45:20.551204"]
TIMESTAMPS = ["2017-04-02T03:35:32.431002"]
# TIMESTAMP_LABELS = {"2017-03-30T06:35:54.726002":"SDCARD",
# "2017-03-30T09:45:20.551204":"TMPFS"}
TIMESTAMP_LABELS = {"2017-04-02T03:35:32.431002":"TMPFS"}
SUB_EXPERIMENTS = ["python", "fpga"]
EXPERIMENT_LABELS = {"python":"Python FS",
"fpga":"FPGA FS"}
EXPERIMENT_MARKER = {"python":"o-",
"fpga":"D-"}
# TRIALS = range(1000, 30000, 1000)
TRIALS = [(1024*2**x) for x in range(1,13)]
def get_sub_experiment_data(sub_experiment, timestamp):
data = {}
averages = []
times_averages = []
stdevs = []
data_bytes = []
throughputs_averages = []
for trial in TRIALS:
filename = "data/{1}/{0}/{0}_{1}_{2}.csv".format(sub_experiment, timestamp, trial)
if not os.path.exists(filename):
print "Skipping file {}".format(filename)
continue
data[trial] = {}
with open(filename) as data_file:
reader = csv.DictReader(data_file)
data[trial]['data'] = list(reader)
throughputs = numpy.array([])
times = numpy.array([])
data_sum = 0
data_total = 0
for item in data[trial]['data']:
num_bytes = int(item["Bytes"])#/1024.0
num_time = float(item["Time"])
print "Num bytes: {}".format(num_bytes)
throughputs = numpy.append(throughputs, (num_bytes/num_time)/1024.0)
# print "{},{}".format(int(item["Bytes"]), float(item["Time"]))
data_sum = data_sum + num_bytes
data_total = data_total + 1
times = numpy.append(times, num_time)
data_average = data_sum/data_total
if data_average not in TRIALS:
print "Data average {} not in trials".format(data_average)
continue
data[trial]['average'] = numpy.average(throughputs)
data[trial]['std'] = numpy.std(throughputs)#numpy.std(times_averages)
data[trial]['time_average'] = numpy.average(times)
averages.append(data[trial]['average'])
stdevs.append(data[trial]['std'])
times_averages.append(data[trial]['time_average'])
throughput_average = numpy.average(throughputs)
throughputs_averages.append(throughput_average)
data_bytes.append(data_average)
data[trial]['throughputs'] = throughputs
print "Throughputs average for {} {},{}: {}".format(TIMESTAMP_LABELS[timestamp], sub_experiment, trial, throughput_average)
# print "Throughputs mean for {} {},{}: {}".format(TIMESTAMP_LABELS[timestamp], sub_experiment, trial, numpy.mean(throughputs))
# print "Throughputs stdev for {} {},{}: {}".format(TIMESTAMP_LABELS[timestamp], sub_experiment, trial, numpy.std(throughputs))
# pylab.figure()
# pylab.plot(TRIALS, averages)
# pylab.show()
# print "\n\n\n\n\n"
return times_averages, stdevs, data_bytes, throughputs_averages
def plot_experiment():
for timestamp in TIMESTAMPS:
throughputs = {}
# all_averages = []
# lengend = []
# if TIMESTAMP_LABELS[timestamp] != "TMPFS":
# continue
for sub_experiment in SUB_EXPERIMENTS:
# legend.append(sub_experiment)
time_averages, std, data, throughputs_averages = get_sub_experiment_data(sub_experiment, timestamp)
pylab.errorbar(data, throughputs_averages, yerr=std, fmt=EXPERIMENT_MARKER[sub_experiment], label=EXPERIMENT_LABELS[sub_experiment])
throughputs[sub_experiment] = list(throughputs_averages)
# pylab.bar(data, throughputs_averages, 1000, yerr=std)
overheads = []
for i in range(len(throughputs["python"])):
overheads.append(float(throughputs["fpga"][i])/float(throughputs["python"][i]))
overhead_sum = 0
for overhead in overheads:
overhead_sum = overhead_sum + overhead
overhead_average = overhead_sum/len(overheads)
print "Overhead average: {}".format((1-overhead_average)*100.0)
pylab.xscale("log", nonposx='clip')
pylab.xlabel("Data Processed (bytes)")
# pylab.ylabel("Time (s)")
pylab.ylabel("Throughput (KB/s)")
pylab.legend(loc=4)
pylab.savefig("{}.png".format(TIMESTAMP_LABELS[timestamp]))
pylab.savefig("{}.pdf".format(TIMESTAMP_LABELS[timestamp]))
pylab.show()
if __name__ == "__main__":
plot_experiment()
|
gpl-3.0
| 8,308,318,929,500,350,000
| 40.681416
| 144
| 0.608068
| false
| 3.45815
| false
| false
| false
|
adafruit/Adafruit_Python_BluefruitLE
|
setup.py
|
1
|
1037
|
from setuptools import setup, find_packages
import platform
platform_install_requires = []
if platform.system() == 'Darwin':
platform_install_requires += ['pyobjc-framework-CoreBluetooth']
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(name = 'Adafruit_BluefruitLE',
version = '0.9.10',
author = 'Tony DiCola',
author_email = 'tdicola@adafruit.com',
description = 'Python library for interacting with Bluefruit LE (Bluetooth low energy) devices on Linux or OSX.',
long_description = long_description,
license = 'MIT',
url = 'https://github.com/adafruit/Adafruit_Python_BluefruitLE/',
install_requires = ['future'] + platform_install_requires,
packages = find_packages())
|
mit
| -1,287,049,547,496,601,900
| 36.035714
| 125
| 0.642237
| false
| 3.716846
| false
| false
| false
|
silenteddie/Landsat8LST_SWA
|
modis_water_vapor_interface.py
|
1
|
5279
|
# coding=utf-8
from PyQt4.QtGui import QApplication
import modis_extent_generator
from qgis.core import *
import l8_lst_swa_common_lib
import processing
import datetime
from urllib2 import urlopen
from ftplib import FTP
import shutil
from PyQt4.QtCore import QUrl
from PyQt4.QtNetwork import QNetworkAccessManager, QNetworkRequest
def getWaterVaporForGivenRaster (inputRaster, year, month, day, outputPath,tle1,tle2,processLabel, tempDir):
"""
Find needed MOD09 file for raster (e.g. Landsat scene) and download everything needed from NASA FTP'
Then cut MOD09 by input raster and fix resolition.
:param inputRaster: raster, for which's extent MOD09 will be searched
:param year: year of aquisition
:param month: month of aquisition
:param day: day of aquisition
:param outputPath: path, where final Water Vapor grid will be saved
:param tle1: TLE line 1
:param tle2: TLE line 2
:param processLabel: qt label from interface to show status of progress
:param tempDir: temporary directory where files will be downloaded
:return: 1 or error code
"""
processLabel.setText('Calculating TERRA track for day')
QApplication.processEvents()
scenesExtent = modis_extent_generator.generateScenesExtentLayerForDay(year,month,day,tle1,tle2,'Terra', True)
processLabel.setText('Searching suitable scene for raster')
QApplication.processEvents()
WGS84 = QgsCoordinateReferenceSystem(4326, QgsCoordinateReferenceSystem.PostgisCrsId)
rasterExtent = l8_lst_swa_common_lib.getRasterLayerExtent(inputRaster, WGS84)
rasterExtentGeom = [[QgsPoint(rasterExtent['xMin'],rasterExtent['yMin']),
QgsPoint(rasterExtent['xMin'],rasterExtent['yMax']),
QgsPoint(rasterExtent['xMax'],rasterExtent['yMax']),
QgsPoint(rasterExtent['xMax'],rasterExtent['yMin'])]]
rasterMaskLayer = QgsVectorLayer("Polygon", 'Raster mask', "memory")
rasterMaskLayerDP = rasterMaskLayer.dataProvider()
rasterMaskLayer.startEditing()
maskFeature = QgsFeature()
maskFeature.setGeometry(QgsGeometry.fromPolygon(rasterExtentGeom))
rasterMaskLayerDP.addFeatures([maskFeature])
rasterMaskLayer.commitChanges()
rasterMaskLayer.updateExtents()
QgsMapLayerRegistry.instance().addMapLayer(rasterMaskLayer)
QgsMapLayerRegistry.instance().addMapLayer(scenesExtent)
try:
processing.runalg('qgis:selectbylocation',scenesExtent,rasterMaskLayer,u'contains',0)
except:
raise
containingScene = scenesExtent.selectedFeatures()[0]
# Suitable scene time
containingSceneTime = str(containingScene[1]).split(':')[0]+str(containingScene[1]).split(':')[1]
processLabel.setText('Downloading MOD03...')
QApplication.processEvents()
MOD03 = downloadMODL2ForGivenDateAndTime(year,month,day,containingSceneTime,'MOD03',tempDir+'\\MOD03A.'+str(year)+str(month)+str(day)+'.'+str(containingSceneTime)+'.hdf')
if MOD03 != 1:
return MOD03
processLabel.setText('Downloading MOD09...')
QApplication.processEvents()
MOD09 = downloadMODL2ForGivenDateAndTime(year,month,day,containingSceneTime,'MOD09',tempDir+'\\MOD09A.'+str(year)+str(month)+str(day)+'.'+str(containingSceneTime)+'.hdf')
if MOD09 != 1:
return MOD09
QgsMapLayerRegistry.instance().removeMapLayer(rasterMaskLayer.id())
QgsMapLayerRegistry.instance().removeMapLayer(scenesExtent.id())
### TO BE CONTINUED
def downloadMODL2ForGivenDateAndTime(year, month, day, time, product, rasterFullPath):
"""
Downloads MODIS L2 product for given date. If success returns 1. Else error code from 2 to 6
:param year: year of aquisition
:param month: month of aquisition
:param day: day of aquisition
:param time: time if format hhmm ( 0845 )
:param product: Product code. MOD09, MOD03 etc.
:param rasterLayerFullPath: path, where to download
:return: 1 or error code
"""
currentDate = datetime.date(year,month,day)
currentDayOfYear = currentDate.timetuple().tm_yday
currentDayOfYear = '0'*(3-len(str(currentDayOfYear))) + str(currentDayOfYear)
try:
ftp = FTP('ladsweb.nascom.nasa.gov') # MODIS NASA FTP
ftp.login()
except:
return 2 # Bad connection
try:
ftp.cwd('allData/6/'+product+'/')
ftp.cwd(str(year))
ftp.cwd(str(currentDayOfYear))
except:
return 3 # Date is unavailable
pathString = 'ftp://ladsweb.nascom.nasa.gov/allData/6/' + product + '/' + str(year) + '/' +\
str(currentDayOfYear) + '/'
try:
files = ftp.nlst()
except:
return 4 # File list is not available
timestamp = str(year) + str(currentDayOfYear) + '.' + str(time)
fileFlag = False
for file in files:
if (file[-3:] == 'hdf') and (file.find(timestamp) != -1):
fileFlag = True
pathString += file
try:
req = urlopen(pathString)
dist = open(rasterFullPath, 'wb')
shutil.copyfileobj(req, dist)
dist.close()
except:
return 5 # Cannot download file
if not fileFlag:
return 6 # No needed file
return 1
|
gpl-2.0
| 4,448,369,078,451,487,000
| 36.707143
| 174
| 0.684599
| false
| 3.817064
| false
| false
| false
|
dwagon/pymoo
|
moo/moo/settings.py
|
1
|
2358
|
"""
Django settings for moo project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'u@g=^v7)hrcffe-1p82f!q38v#1!w6(b!6p1=61m-$osx2w%!h'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = []
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'moo/templates',),
)
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'building',
'game',
'planet',
'race',
'ship',
'system',
'tech',
'player'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'moo.urls'
WSGI_APPLICATION = 'moo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
|
gpl-2.0
| -6,647,317,063,238,338,000
| 22.58
| 71
| 0.704411
| false
| 3.243466
| false
| false
| false
|
D-Vaillant/julius
|
setup.py
|
1
|
5140
|
""" setup.py:
Real simple utility.
No "install to command line" option, because nobody has requested one.
"""
from typing import Union
from io import TextIOBase
import string
import argparse
import julius
# Some utilities.
num_to_word = {'1': 'one', '2': 'two', '3': 'three', '4': 'four', '5': 'five',
'6': 'six', '7': 'seven', '8': 'eight', '9': 'nine', '0': 'zero'}
class MissingKeyError(Exception):
pass
# Compatibility, I guess?
try:
FileNotFoundError()
except NameError:
FileNotFoundError = IOError
def file_wrapper(file_loc: str) -> str:
"""
Tries to treat a string as a file location.
If successfully, return the contents of the file.
Otherwise, return the input string.
"""
try:
out = open(file_loc, 'r').read()
except FileNotFoundError:
out = file_loc
return out
def safety_wrapper(key: str, safety_level = 0) -> str:
"""
Strips a string of non-alphabetical characters, depending on the safety.
PUNCTUATION, CAPITALIZATION
"""
if safety_level > 2:
safety_level -= 2
return key
# The argparse definition.
parser = argparse.ArgumentParser(prog='julius',
description="Implements a Vigenere cipher.\n"
"Sends the text to sys.out.")
# The text to be ciphered through.
parser.add_argument('plain_text',
nargs=1,
help="It really should be a file location, but you can "
"input strings as well.")
# The key that we're ciphering with!
parser.add_argument('key',
nargs='?',
default=None,
help="If omitted, looks to optional KEY arguments.\n"
"Please only use lowercase letters! "
"Can also open files.")
# Key arguments.
parser.add_argument('--key_length',
nargs='?',
default=0,
type=int,
help="If key is omitted, generate a random key of given "
"length and use that. This is a KEY argument.")
parser.add_argument('--caesar',
action='store_true',
help="If key is omitted, generate a random key of length 1 "
"and use that. This is a KEY argument.")
parser.add_argument('--otp',
action='store_true',
help="If key is omitted, generate a random key of length "
"equal to the length of the plain_text and save it to "
"the given file location.\nStores a file containing "
"key. This is a KEY argument.")
# Use-case arguments.
parser.add_argument('--decrypt',
action='store_true',
help="Key cannot be omitted. Decrypts a text encrypted "
"with the given key.")
'''
parser.add_argument(
'--unsafe',
nargs='?',
type=int,
default=0,
help="Allows for the preservation of non-alphanumeric characters.\n"
"Controls punctuation, capitalization, and spaces.\r\n"
"It's a binary notation: SPACES - CAPITALIZATION - PUNCTUATION.\r\n"
"001 -> 1 => strip spaces and capitalization, keep punctuation\r\n"
"111 -> 1 + 2 + 4 = 7 => keep all\r\n"
"101 -> 4 + 0 + 1 = 5 => strip capitalization"
)
'''
if __name__ == "__main__":
args = parser.parse_args()
# Some plain_text text mungling.
plain_text = file_wrapper(args.plain_text[0])
# Turn numerals into words.
for k, v in num_to_word.items():
plain_text = plain_text.replace(k, v)
# Forcefully remove all non-alphabetical characters, make 'em lowercase.
# TODO: Remove this in lieu of safety_wrapper.
plain_text = ''.join(char for char in plain_text
if char in string.ascii_letters).lower()
# This is the part that deals with keys.
if args.key is not None:
# strip the key of punctuation and capitalization
key = safety_wrapper(file_wrapper(args.key), 0)
else:
# Decryption requires a key to decrypt with, of course.
if args.decrypt:
raise MissingKeyError("Decryption requires a key!")
# One-time pad.
if args.otp:
key = julius.create_random_key(length=len(plain_text))
# Save the key to a keyfile of random name.
with open("key_{}.txt".format(julius.create_random_key(5)), 'w')\
as key_file:
key_file.write(key)
print("Saved key to {}.".format(key_file.name))
elif args.key_length > 0:
key = julius.create_random_key(length=args.key_length)
elif args.caesar:
key = julius.create_random_key(length=1)
else:
raise MissingKeyError("Either specify a key textfile location, a "
"key, or use one of the KEY flags.")
print(julius.vigenere(plain_text, key, decrypting=args.decrypt))
|
gpl-3.0
| -5,746,411,298,161,874,000
| 31.948718
| 80
| 0.560506
| false
| 4.04406
| false
| false
| false
|
pmeier82/BOTMpy
|
botmpy/common/spike_alignment.py
|
1
|
7458
|
# -*- coding: utf-8 -*-
#_____________________________________________________________________________
#
# Copyright (c) 2012 Berlin Institute of Technology
# All rights reserved.
#
# Developed by: Philipp Meier <pmeier82@gmail.com>
# Neural Information Processing Group (NI)
# School for Electrical Engineering and Computer Science
# Berlin Institute of Technology
# MAR 5-6, Marchstr. 23, 10587 Berlin, Germany
# http://www.ni.tu-berlin.de/
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal with the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimers.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimers in the documentation
# and/or other materials provided with the distribution.
# * Neither the names of Neural Information Processing Group (NI), Berlin
# Institute of Technology, nor the names of its contributors may be used to
# endorse or promote products derived from this Software without specific
# prior written permission.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# WITH THE SOFTWARE.
#_____________________________________________________________________________
#
# Acknowledgements:
# Philipp Meier <pmeier82@gmail.com>
#_____________________________________________________________________________
#
"""spikes alignment functions"""
__docformat__ = 'restructuredtext'
__all__ = ['sinc_interp1d', 'get_tau_for_alignment', 'get_tau_align_min',
'get_tau_align_max', 'get_tau_align_energy', 'get_aligned_spikes']
##--- IMPORTS
import scipy as sp
from scipy.signal import resample
from .util import INDEX_DTYPE
from .funcs_spike import epochs_from_spiketrain, get_cut, extract_spikes
##---FUNCTIONS
def sinc_interp1d(x, s, r):
"""Interpolates `x`, sampled at times `s`
Output `y` is sampled at times `r`
inspired from from Matlab:
http://phaseportrait.blogspot.com/2008/06/sinc-interpolation-in-matlab.html
:param ndarray x: input data time series
:param ndarray s: input sampling time series (regular sample interval)
:param ndarray r: output sampling time series
:return ndarray: output data time series (regular sample interval)
"""
# init
s = sp.asarray(s)
r = sp.asarray(r)
x = sp.asarray(x)
if x.ndim == 1:
x = sp.atleast_2d(x)
else:
if x.shape[0] == len(s):
x = x.T
else:
if x.shape[1] != s.shape[0]:
raise ValueError('x and s must be same temporal extend')
if sp.allclose(s, r):
return x.T
T = s[1] - s[0]
# resample
sincM = sp.tile(r, (len(s), 1)) - sp.tile(s[:, sp.newaxis], (1, len(r)))
return sp.vstack([sp.dot(xx, sp.sinc(sincM / T)) for xx in x]).T
def get_tau_for_alignment(spikes, align_at):
"""return the per spike offset in samples (taus) of the maximum values to
the desired alignment sample within the spike waveform.
:type spikes: ndarray
:param spikes: stacked mc spike waveforms [ns, tf, nc]
:type align_at: int
:param align_at: sample to align the maximum at
:returns: ndarray - offset per spike
"""
# checks
ns, tf, nc = spikes.shape
if 0 < align_at >= tf:
return sp.zeros(ns)
# offsets
dchan = [spike.max(0).argmax() for spike in spikes]
tau = [spikes[i, :, dchan[i]].argmax() - align_at for i in xrange(ns)]
return sp.asarray(tau, dtype=INDEX_DTYPE)
get_tau_align_min = lambda spks, ali: get_tau_for_alignment(-spks, ali)
get_tau_align_max = lambda spks, ali: get_tau_for_alignment(spks, ali)
get_tau_align_energy = lambda spks, ali: get_tau_for_alignment(spks * spks, ali)
def get_aligned_spikes(data, spike_train, align_at=-1, tf=47, look_ahead=0, mc=True,
kind='none', rsf=1., sample_back=True):
"""return the set of aligned spikes waveforms and the aligned spike train
:type data: ndarray
:param data: data with channels in the columns
:type spike_train: ndarray or list
:param spike_train: spike train of events in data
:type align_at: int
:param align_at: align feature at this sample in the waveform
:type tf: int
:param tf: temporal extend of the waveform in samples
:type look_ahead: int
:param look_ahead: samples to look beyond the cut window for finding the align feature
:type mc: bool
:param mc: if True, return mc waveforms, else return concatenated waveforms.
Default=True
:type kind: str
:param kind: String giving the type of alignment to conduct. One of:
- "max" - align on maximum of the waveform
- "min" - align on minimum of the waveform
- "energy" - align on peak of energy
- "none" - no alignment
Default='none'
:type rsf: float
:param rsf: resampling factor (use integer values of powers of 2)
:param bool sample_back: if True, resample spikes to original length after resampling
:rtype: ndarray, ndarray
:returns: stacked spike events, spike train with events corrected for
alignment
"""
# resample?
if rsf != 1.0:
data = resample(data, rsf * data.shape[0])
tf *= rsf
align_at *= rsf
spike_train *= rsf
look_ahead *= rsf
# init
ep, st = epochs_from_spiketrain(
spike_train,
(align_at + look_ahead, tf - align_at + look_ahead),
end=data.shape[0],
with_corrected_st=True)
# align spikes
if ep.shape[0] > 0:
if kind in ['min', 'max', 'energy']:
spikes = extract_spikes(data, ep, mc=True)
if rsf != 1.0:
print spikes.shape
tau = {'min': get_tau_align_min,
'max': get_tau_align_max,
'energy': get_tau_align_energy}[kind](spikes, align_at)
st += tau
st -= look_ahead
ep, st = epochs_from_spiketrain(
st,
(align_at, tf - align_at),
end=data.shape[0],
with_corrected_st=True)
spikes = extract_spikes(data, ep, mc=mc)
else:
if mc is True:
size = 0, tf, data.shape[1]
else:
size = 0, tf * data.shape[1]
spikes = sp.zeros(size)
# re-resample?
if sample_back and rsf != 1.0:
spikes = resample(spikes, spikes.shape[1] * 1. / rsf, axis=1)
st *= 1. / rsf
# return
return spikes, st
##--- MAIN
if __name__ == '__main__':
pass
|
mit
| -8,748,461,185,882,833,000
| 35.028986
| 90
| 0.61786
| false
| 3.634503
| false
| false
| false
|
ryansturmer/cuttlebug
|
cuttlebug/ui/views/runtime_view.py
|
1
|
29660
|
import view
import wx
import wx.gizmos as gizmos
from cuttlebug.ui.controls import DictListCtrl
from cuttlebug.util import ArtListMixin, has_icon, bidict, KeyTree, str2int
from functools import partial
import cuttlebug.gdb as gdb
import os, threading
import cuttlebug.ui.menu as menu
import cuttlebug.settings as settings
import cuttlebug.project as project
import cuttlebug.ui.controls as controls
MNU_ENABLE_BKPT = 0
MNU_DISABLE_BKPT = 1
class RuntimeTree(gizmos.TreeListCtrl, ArtListMixin, KeyTree):
def __init__(self, parent):
self.parent = parent
gizmos.TreeListCtrl.__init__(self, id=-1, parent=parent, style=wx.TR_DEFAULT_STYLE | wx.TR_FULL_ROW_HIGHLIGHT | wx.TR_HIDE_ROOT | wx.TR_HAS_BUTTONS | wx.TR_LINES_AT_ROOT | wx.TR_EDIT_LABELS)
ArtListMixin.__init__(self)
KeyTree.__init__(self)
self.SetFont(wx.Font(8, wx.FONTFAMILY_MODERN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL))
self.parent = parent
self.Bind(wx.EVT_TREE_ITEM_EXPANDING, self.on_expanding)
self.Bind(wx.EVT_TREE_ITEM_GETTOOLTIP, self.on_get_tooltip)
self.Bind(wx.EVT_TREE_BEGIN_LABEL_EDIT, self.on_begin_label_edit)
self.Bind(wx.EVT_TREE_END_LABEL_EDIT, self.on_end_label_edit)
self.Bind(wx.EVT_TREE_SEL_CHANGED, self.on_select_item)
#self.Bind(wx.EVT_LEFT_DOWN, self.on_left_down)
self.Bind(wx.EVT_LEFT_DCLICK, self.on_dclick)
self.Bind(wx.EVT_TREE_ITEM_ACTIVATED, self.on_dclick)
self.Bind(wx.EVT_TREE_ITEM_RIGHT_CLICK, self.on_item_right_click)
self.Bind(wx.EVT_LIST_COL_END_DRAG, self.on_col_resize)
self.model = None
self.AddColumn('Context')
self.AddColumn('Value')
self.SetColumnEditable(1, True)
self.SetColumnAlignment(1, wx.ALIGN_RIGHT)
self.lock = threading.RLock()
self.__var_idx = 0
self.create_popup_menus()
self.clear()
self.load_positions()
def on_col_resize(self, evt):
self.save_positions()
def save_positions(self):
cols = self.GetColumnCount()
widths = [self.GetColumnWidth(i) for i in range(cols)]
settings.session_set('runtime_view_col_widths', widths)
def load_positions(self):
try:
widths = settings.session_get('runtime_view_col_widths')
cols = self.GetColumnCount()
if len(widths) != cols:
raise Exception("Mismatch of stored column widths")
for i, width in enumerate(widths):
self.SetColumnWidth(i, width)
except:
pass
def create_popup_menus(self):
self.menu_manager = menu.MenuManager()
m = self.menu_manager.menu()
m.item("Enable", func=self.on_enable_breakpoint, icon='stop.png', show=MNU_ENABLE_BKPT, hide=MNU_DISABLE_BKPT)
m.item("Disable", func=self.on_disable_breakpoint, icon='stop_disabled.png', show=MNU_DISABLE_BKPT, hide=MNU_ENABLE_BKPT)
m.item("Remove", func=self.on_remove_breakpoint, icon='ex.png')
self.menu_breakpoint_item = m
m = self.menu_manager.menu()
m.item("Enable All Breakpoints", func=self.on_enable_all_breakpoints, icon='stop.png')
m.item("Disable All Breakpoints", func=self.on_disable_all_breakpoints, icon='stop_disabled.png')
m.item("Remove All Breakpoints", func=self.on_remove_all_breakpoints, icon='ex.png')
self.menu_breakpoints = m
m = self.menu_manager.menu()
m.item("Show this Frame in Source", func=self.on_show_frame, icon='find.png')
m.step_out = m.item("Step Out\tShift+F6", func=self.on_step_out, icon='control_play_blue.png')
self.menu_frame_item = m
m = self.menu_manager.menu()
m.item("Add Watch...", func=self.on_add_watch, icon='magnifier_zoom_in.png')
self.menu_watches = m
m = self.menu_manager.menu()
m.item("Remove Watch", func=self.on_remove_watch, icon='ex.png')
self.menu_watch_item = m
def set_model(self, model):
self.model = model
self.model.Bind(gdb.EVT_GDB_UPDATE_VARS, self.on_var_update)
self.model.Bind(gdb.EVT_GDB_UPDATE_STACK, self.on_stack_update)
self.model.Bind(gdb.EVT_GDB_UPDATE_BREAKPOINTS, self.on_breakpoint_update)
self.model.Bind(gdb.EVT_GDB_UPDATE_REGISTERS, self.on_register_update)
self.model.Bind(gdb.EVT_GDB_FINISHED, self.on_gdb_finished)
self.model.Bind(gdb.EVT_GDB_STOPPED, self.on_gdb_stopped)
wx.CallAfter(self.build_sfr_tree)
def get_var_name(self):
name = "rtv_%d" % self.__var_idx
self.__var_idx += 1
return name
def on_breakpoint_update(self, evt):
wx.CallAfter(self.update_breakpoints)
def on_register_update(self, evt):
wx.CallAfter(self.update_registers, evt.data)
self.save_positions()
def on_var_update(self, evt):
names = evt.data
for name in names:
if name in self.pending_var_additions:
self.lock.acquire()
parent = self.pending_var_additions.pop(name)
self.lock.release()
wx.CallAfter(self.add_var_item, parent, name, self.model.vars[name])
if parent == self.watch_item:
self.expand(self.watch_item)
elif name in self.pending_var_updates:
self.lock.acquire()
var_item = self.pending_var_updates.pop(name)
old_name= self.get_item_data(var_item)
if old_name in self.var_registry:
self.var_registry.pop(old_name)
self.lock.release()
wx.CallAfter(self.update_var_item, var_item, name, self.model.vars[name])
elif name in self.var_registry and name in self.model.vars:
var_item = self.var_registry[name]
wx.CallAfter(self.update_var_item, var_item, name, self.model.vars[name])
else:
pass
def on_stack_update(self, evt):
#print self.model.stack.pretty()
if self.model:
if self.__check_stack():
wx.CallAfter(self.update_stack)
else:
wx.CallAfter(self.rebuild_stack)
evt.Skip()
def on_gdb_finished(self, evt):
self.clear()
self.model = None
def on_item_right_click(self, evt):
item = self.__get_evt_item(evt)
if item.is_ok():
self.select_item(item)
if self.model:
if item == self.breakpoints_item and self.get_children_count(self.breakpoints_item) > 0:
self.PopupMenu(self.menu_breakpoints.build(self), evt.GetPoint())
elif self.is_descendent(item, self.breakpoints_item):
bkpt = self.get_item_data(item)
self.breakpoint = bkpt
self.menu_manager.publish(MNU_DISABLE_BKPT) if bkpt.enabled else self.menu_manager.publish(MNU_ENABLE_BKPT)
self.PopupMenu(self.menu_breakpoint_item.build(self), evt.GetPoint())
elif self.is_frame_item(item):
frame = self.get_item_data(item)
self.frame = frame
if frame.level == 0 and len(self.frames) > 1:
self.menu_frame_item.step_out.show()
else:
self.menu_frame_item.step_out.hide()
self.PopupMenu(self.menu_frame_item.build(self), evt.GetPoint())
elif item == self.watch_item:
self.PopupMenu(self.menu_watches.build(self), evt.GetPoint())
elif self.is_descendent(item, self.watch_item):
self.selected_item = item
self.PopupMenu(self.menu_watch_item.build(self), evt.GetPoint())
evt.Skip()
def on_dclick(self, evt):
id = self.__get_evt_item(evt)
if self.model and self.is_descendent(id, self.breakpoints_item):
bkpt = self.get_item_data(id)
if bkpt.enabled:
self.model.break_disable(bkpt)
else:
self.model.break_enable(bkpt)
elif self.model and self.is_descendent(id, self.sfr_item):
reg = self.get_item_data(id)
if reg:
old_value = reg.value
try:
response = controls.RegisterEditDialog.show(self, reg)
except Exception, e:
print e
if response == wx.ID_OK:
self.model.data_evaluate_expression("%s=%s" % (reg.expression, reg.value), callback=partial(self.on_sfr_data, id,True))
else:
reg.value = old_value
elif self.model and self.is_descendent(id, self.registers_item):
name = self.get_item_data(id)
target_model = self.parent.controller.project.target
reg = target_model.find_by_name(name)
if not reg:
reg = project.CPURegister(name, name, 4)
reg.add_field(project.Field(0, 32, name))
reg.value = str2int(self.register_registry[name])
response = controls.RegisterEditDialog.show(self, reg)
if response == wx.ID_OK:
self.model.data_evaluate_expression("%s=%s" % (reg.expression, reg.value),callback=self.on_register_data)
evt.Skip()
def on_register_data(self, evt):
self.model.update()
def on_begin_label_edit(self, evt):
item = self.get_event_item(evt)
name = self.get_item_data(item)
if name in self.var_registry:
if self.is_descendent(item, self.get_frame_items()[-1]):
evt.Skip()
return
if self.is_descendent(item, self.sfr_item) or self.is_descendent(item, self.watch_item):
evt.Skip()
return
evt.Veto()
def on_select_item(self, evt):
#item = self.get_event_item(evt)
#print self.get_item_data(item)
evt.Veto()
#evt.Skip()
def on_end_label_edit(self, evt):
item = self.get_event_item(evt)
name = self.get_item_data(item)
if name in self.var_registry and name in self.model.vars:
new_var_value = evt.GetLabel()
self.model.var_assign(name, new_var_value)
if self.is_descendent(item, self.sfr_item) or self.is_descendent(item, self.watch_item):
reg = self.get_item_data(item)
if hasattr(reg, 'expression'):
self.model.data_evaluate_expression('%s=%s' % (reg.expression, evt.GetLabel()), callback=partial(self.on_sfr_data, item,True))
evt.Veto()
def on_get_tooltip(self, evt):
item = self.get_event_item(evt)
if self.model and item:
if item == self.stack_item:
evt.SetToolTip(wx.ToolTip("Stack Depth: %d frames" % self.model.stack.depth))
data = self.get_item_data(item)
if hasattr(data, 'file'): # This is a stack frame
evt.SetToolTip(wx.ToolTip("Stack frame %s() at 0x%x %s" % (data.func, data.addr, "in file %s" % data.file if data.file else "")))
elif data in self.var_registry:
evt.SetToolTip(wx.ToolTip(self.model.vars[data].expression))
def on_expanding(self, evt):
item=self.get_event_item(evt)
item_data=self.get_item_data(item)
if self.is_descendent(item, self.sfr_item):
self.update_sfr_tree(item, force_root=True, colorize=False)
return
if hasattr(item_data, 'level') and self.get_children_count(item, False) == 0: #item_data is a stack frame, and we wish to list its locals
if not self.model.running:
self.model.stack_list_arguments(frame=item_data.level, callback=partial(self.__on_listed_arguments, item))
else:
evt.Veto()
elif item_data in self.var_registry and self.get_children_count(item, False) == 0:
if not self.model.running:
self.model.var_list_children(item_data, callback=partial(self.__on_listed_children, item))
else:
evt.Veto()
evt.Skip()
def __on_listed_children(self, parent, result):
names = []
if hasattr(result, 'children'):
for child in result.children:
varname= child['child']['name']
self.lock.acquire()
self.pending_var_additions[varname] = parent
self.lock.release()
names.append(varname)
class Dummy(object): pass
evt = Dummy()
evt.data = names
wx.CallAfter(self.on_var_update, evt)
def __on_listed_locals(self, frame_item, args, result):
if result.cls != 'error':
if hasattr(result, 'locals') and frame_item.is_ok():
frame = self.get_item_data(frame_item)
if self.get_children_count(frame_item, recursive=False) == 0:
for item in args + result.locals:
varname = self.get_var_name()
self.lock.acquire()
self.pending_var_additions[varname] = frame_item
self.lock.release()
self.model.var_create(item['name'], frame=frame.level, callback=self.__on_created_var, name=varname)
def __on_listed_arguments(self, frame_item, result):
if result.cls != 'error':
if 'stack-args' in result and frame_item.is_ok():
frame = self.get_item_data(frame_item)
f = result['stack-args'][frame.level]['frame']
if int(f['level']) != frame.level:
raise ValueError("Failed Sanity Check!")
args = f['args']
self.model.stack_list_locals(frame=frame.level, callback=partial(self.__on_listed_locals, frame_item, args))
def __on_created_var(self, result):
if hasattr(result, 'name'):
self.model.var_update(result.name)
def add_var_item(self, parent, name, var):
if parent.is_ok():
var_item = self.append_item(parent, var.expression.strip('"'))
self.update_var_item(var_item, name, var)
def update_var_item(self, var_item, name, var):
if var_item.is_ok():
self.set_item_data(var_item, name)
if var.children:
self.set_item_has_children(var_item, bool(var.children))
else:
self.set_item_has_children(var_item, False)
self.set_item_text(var_item, var.data, 1)
icon_name = var.type.icon_name
if has_icon(icon_name):
self.set_item_art(var_item, icon_name)
self.lock.acquire()
self.var_registry[name] = var_item
self.lock.release()
def add_watch(self, s):
vn = self.get_var_name()
self.lock.acquire()
self.pending_var_additions[vn] = self.watch_item
self.lock.release()
self.model.var_create(s, floating=True, callback=self.__on_created_var, name=vn)
def on_add_watch(self, evt):
dlg = wx.TextEntryDialog(self, "Watch Variable", self.last_watch)
if dlg.ShowModal() == wx.ID_OK:
var = dlg.GetValue().strip()
self.add_watch('"%s"' % var) # Quoting the watch allows spaces
def on_remove_watch(self, evt):
item = self.get_item_data(self.selected_item)
self.model.var_delete(item, callback=partial(self.on_watch_deleted, self.selected_item))
def on_watch_deleted(self, watch_item, evt):
self.delete(watch_item)
def scrub_vars(self, all_vars=False):
#TODO use a list
to_update = {}
if self.get_frame_count() > 0:
frame_items = self.get_frame_items()
for name, var_item in self.var_registry.iteritems():
if (not self.is_descendent(var_item, frame_items[-1]) or all_vars) and name in self.model.vars:
var = self.model.vars[name]
frame = self.get_var_frame(name)
if frame:
varname = self.get_var_name()
to_update[(name, varname)] = (frame, var)
self.pending_var_updates[varname] = var_item
for (old_name, new_name), (frame, var)in to_update.iteritems():
self.model.var_delete(old_name)
self.model.var_create(var.expression, frame=frame.level, callback=self.__on_created_var, name=new_name)
def get_frame_items(self):
return list(self.children(self.stack_item)) if self.stack_item.is_ok() else []
def get_frames(self):
return [self.get_item_data(frame_item) for frame_item in self.get_frame_items()]
def get_frame_count(self):
if self.stack_item.is_ok():
return self.get_children_count(self.stack_item, recursive=False)
else:
return 0
def is_frame_item(self, item):
return item.is_ok() and isinstance(self.get_item_data(item), gdb.GDBStackFrame)
def add_frame_item(self, frame):
item = self.append_item(self.stack_item, frame.func + "( )")
self.update_frame_item(item, frame)
def update_frame_item(self, frame_item, frame):
self.set_item_data(frame_item, frame)
self.set_item_art(frame_item, 'frame.png' if frame.level != 0 else 'frame_active.png')
self.set_item_has_children(frame_item, True)
self.set_item_bold(frame_item, True)
self.set_item_data(frame_item, frame)
def on_show_frame(self, evt):
if self.model and self.frame:
self.GetParent().controller.goto(self.frame.file, self.frame.line)
self.frame = None
def __check_stack(self):
if self.model:
# Our list of frames is reversed from the models, because that's how we view it.
for model_frame, view_frame in zip(reversed(self.model.stack), self.get_frames()):
if model_frame.key != view_frame.key: return False
return True
def get_var_frame(self, name):
frame = None
item = self.var_registry[name]
frames = self.get_frames()
while frame not in frames:
item = self.get_parent(item)
if item.is_ok():
frame = self.get_item_data(item)
else:
return None
return frame
def on_step_out(self, evt):
self.parent.controller.step_out()
def clear_stack(self):
n = self.get_frame_count()
for i in range(n):
self.pop_stack_frame()
def rebuild_stack(self):
self.clear_stack()
self.update_stack()
def update_stack(self):
stack = self.model.stack
stack_changed=False
# If the frame count in the view is > the frame count in the model, pop off until they match (tossing frames that no longer exist)
n = self.get_frame_count()-len(stack)
if n > 0:
for i in range(n):
self.pop_stack_frame()
stack_changed = True
for frame_item, frame in zip(self.get_frame_items(), reversed(self.model.stack)):
self.update_frame_item(frame_item, frame)
# Otherwise add frames until we're all in sync
idx = self.get_frame_count()+1
while self.get_frame_count() < len(self.model.stack):
frame = stack[len(stack)-idx]
self.add_frame_item(frame)
idx += 1
self.scrub_vars(all_vars=stack_changed)
def pop_stack_frame(self):
frame_item = self.get_frame_items()[-1]
if frame_item.is_ok():
for child in self.walk(frame_item):
name = self.get_item_data(child)
if name in self.var_registry:
self.var_registry.pop(name)
self.model.var_delete(name)
self.delete(frame_item)
else:
print "Can't remove frame. Frame item is NOT ok."
def update_breakpoints(self):
if self.model and self.breakpoints_item.is_ok():
breakpoints = self.model.breakpoints
self.delete_children(self.breakpoints_item)
for bp in breakpoints:
if bp.fullname:
name = os.path.split(os.path.abspath(bp.fullname))[1]
else:
name = '0x%x' % bp.address
item = self.append_item(self.breakpoints_item, name)
self.set_item_data(item, bp)
self.set_item_text(item, str(bp.line), 1)
self.set_item_art(item, 'stop.png' if bp.enabled else 'stop_disabled.png')
def on_enable_breakpoint(self, evt):
if self.breakpoint and self.model:
self.model.break_enable(self.breakpoint)
self.breakpoint = None
def on_disable_breakpoint(self, evt):
if self.breakpoint and self.model:
self.model.break_disable(self.breakpoint)
self.breakpoint = None
def on_remove_breakpoint(self, evt):
if self.breakpoint and self.model:
self.model.break_delete(self.breakpoint)
self.breakpoint = None
def on_enable_all_breakpoints(self, evt):
if self.model:
for bkpt in self.model.breakpoints:
self.model.break_enable(bkpt)
def on_disable_all_breakpoints(self, evt):
if self.model:
for bkpt in self.model.breakpoints:
self.model.break_disable(bkpt)
def on_remove_all_breakpoints(self, evt):
if self.model:
for bkpt in self.model.breakpoints:
self.model.break_delete(bkpt)
def update_registers(self, names):
'''
if self.model and self.registers_item.is_ok():
registers = self.model.registers
if len(registers) != self.get_children_count(self.registers_item, recursive=False):
self.delete_children(self.registers_item)
for key, value in registers.iteritems():
item = self.append_item(self.registers_item, key)
self.set_item_text(item, value, 1)
self.set_item_data(item, key)
self.register_registry[key] = value
else:
for child in self.children(self.registers_item):
self.set_item_text_colour(child, wx.BLACK)
for name in names:
item = self.register_registry[name]
print item
self.set_item_text(item, registers[name], 1)
self.set_item_text_colour(item, wx.RED)
'''
def build_sfr_tree(self):
if not self.parent.controller.project:
return
self.delete_children(self.sfr_item)
target_model = self.parent.controller.project.target
def walk(self, tree_item, item):
if isinstance(item, project.Group):
group_item = self.append_item(tree_item, item.name)
for child in item.items:
walk(self, group_item, child)
elif isinstance(item, project.Peripheral):
peripheral_item = self.append_item(tree_item, item.name)
for child in item.registers:
walk(self, peripheral_item, child)
elif isinstance(item, project.SpecialFunctionRegister):
sfr_item = self.append_item(tree_item, item.fullname)
self.set_item_data(sfr_item, item)
tree_item = self.sfr_item
for item in target_model.items:
walk(self, tree_item, item)
def on_gdb_stopped(self, evt):
self.update_sfr_tree(self.sfr_item)
evt.Skip()
def update_sfr_tree(self, sfr_item, force_root=False, colorize=True):
if force_root:
items = self.children(sfr_item)
else:
items = [sfr_item]
for i in items:
for tree_item in self.walk_expanded(i, False):
item = self.get_item_data(tree_item)
if hasattr(item, 'expression'):
self.model.data_evaluate_expression(item.expression, callback=partial(self.on_sfr_data, tree_item, colorize))
def on_sfr_data(self, item, colorize, data):
if data.cls == "done" and hasattr(data, 'value'):
wx.CallAfter(self.update_sfr_value, item, data.value, colorize)
def update_sfr_value(self, item, value, colorize=True):
current_value = self.get_item_text(item, 1)
try:
reg = self.get_item_data(item)
reg.value = int(value)
text = "0x%08x" % int(value)
except:
text = value
self.set_item_text(item, text, 1)
if current_value != text and colorize:
self.set_item_text_colour(item, wx.RED)
else:
self.set_item_text_colour(item, wx.BLACK)
def update(self):
pass
def clear(self):
self.last_watch = ""
self.DeleteAllItems()
self.root_item = self.add_root('root')
self.stack_item = self.append_item(self.root_item,'Call Stack')
self.breakpoints_item = self.append_item(self.root_item, 'Breakpoints')
self.registers_item = self.append_item(self.root_item, 'CPU Registers')
self.watch_item = self.append_item(self.root_item, 'Watch')
self.sfr_item = self.append_item(self.root_item, 'HW Registers')
self.set_item_art(self.registers_item, 'chip.png')
self.set_item_art(self.stack_item, 'stack.png')
self.set_item_art(self.breakpoints_item, 'breakpoint.png')
self.set_item_art(self.watch_item, 'magnifier.png')
self.set_item_art(self.sfr_item, 'application_view_list.png')
self.lock.acquire()
self.frames = [] # Frame keys to tree items
self.var_registry = bidict() # Var names to tree items
self.pending_var_additions = {}
self.pending_var_updates = {}
self.register_registry = bidict()
self.lock.release()
self.breakpoint = None
def __get_evt_item(self, evt):
item = evt.GetItem()
if item and item.IsOk():
try:
return self.get_key(item)
except:
return None
pt = evt.GetPosition()
items = self.HitTest(pt)
try:
return self.get_key(items[0])
except:
return None
def set_item_art(self, item, name, style=wx.TreeItemIcon_Normal):
if name not in self.art:
self.add_art(name)
if item.is_ok():
self.set_item_image(item, self.art[name], style)
else:
print "Tried to set art for item that's NOT ok?"
class RuntimeView(view.View):
def __init__(self, *args, **kwargs):
super(RuntimeView, self).__init__(*args, **kwargs)
self.tree = RuntimeTree(self)
# self.tree.Bind(wx.EVT_KEY_DOWN, self.tree.on_key_down)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self.tree, 1, wx.EXPAND)
self.SetSizer(sizer)
def set_model(self, model):
self.tree.set_model(model)
def update(self, stack):
self.tree.update()
def add_watch(self, s):
self.tree.add_watch(s)
class GDBDebugView(view.View):
def __init__(self, *args, **kwargs):
super(GDBDebugView, self).__init__(*args, **kwargs)
self.list = DictListCtrl(self, color_changes=False)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self.list, 1, wx.EXPAND)
self.SetSizer(sizer)
def set_model(self, model):
self.model = model
print "Binding the var update"
self.model.Bind(gdb.EVT_GDB_UPDATE_VARS, self.on_var_update)
def on_var_update(self, evt):
for name in evt.data:
if name in self.model.vars:
self.list[name] = self.model.vars[name].data
else:
del self.list[name]
evt.Skip()
|
mit
| 3,711,950,667,816,855,000
| 41.050798
| 199
| 0.54973
| false
| 3.742587
| false
| false
| false
|
caveatemptors-2015/special-garbanzo
|
project/portfolioX/migrations/0001_initial.py
|
1
|
2621
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-09 23:49
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Holding',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.IntegerField()),
('avg_price', models.DecimalField(decimal_places=2, max_digits=6)),
],
),
migrations.CreateModel(
name='Portfolio',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('portfolio_name', models.CharField(max_length=255)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Security',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ticker', models.CharField(max_length=100)),
('company_name', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Transaction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.IntegerField()),
('txn_date', models.DateTimeField(auto_now_add=True)),
('price', models.DecimalField(decimal_places=2, max_digits=6)),
('portfolio', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='portfolioX.Portfolio')),
('symbol', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='portfolioX.Security')),
],
),
migrations.AddField(
model_name='holding',
name='portfolio',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='portfolioX.Portfolio'),
),
migrations.AddField(
model_name='holding',
name='security',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='portfolioX.Security'),
),
]
|
mit
| -8,121,466,650,044,240,000
| 39.953125
| 121
| 0.581457
| false
| 4.296721
| false
| false
| false
|
polyaxon/polyaxon
|
platform/coredb/tests/test_create_e2e/test_create_jobs_e2e.py
|
1
|
3730
|
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.test import TestCase
from coredb import operations
from coredb.factories.projects import ProjectFactory
from coredb.factories.users import UserFactory
from coredb.models.runs import Run
from polyaxon.polyaxonfile import CompiledOperationSpecification, OperationSpecification
from polyaxon.polyflow import V1RunKind
from polycommon.test_cases.fixtures import get_fxt_job, get_fxt_job_with_inputs
class TestCreateJobs(TestCase):
def setUp(self):
super().setUp()
self.user = UserFactory()
self.project = ProjectFactory()
def test_create_run_with_job_spec(self):
count = Run.objects.count()
config_dict = get_fxt_job()
spec = OperationSpecification.read(values=config_dict)
run = operations.init_and_save_run(
project_id=self.project.id, user_id=self.user.id, op_spec=spec
)
assert Run.objects.count() == count + 1
assert run.pending is None
assert run.kind == V1RunKind.JOB
assert run.name == "foo"
assert run.description == "a description"
assert set(run.tags) == {"tag1", "tag2"}
# Check compiled operation passes
compiled_operation = CompiledOperationSpecification.read(run.content)
compiled_operation = CompiledOperationSpecification.apply_params(
compiled_operation
)
CompiledOperationSpecification.apply_runtime_contexts(compiled_operation)
# Check job
job_spec = CompiledOperationSpecification.read(run.content)
assert job_spec.run.container.image == "test"
job_spec = CompiledOperationSpecification.apply_operation_contexts(job_spec)
assert job_spec.run.container.image == "test"
def test_create_run_with_templated_job_spec(self):
count = Run.objects.count()
config_dict = get_fxt_job_with_inputs()
spec = OperationSpecification.read(values=config_dict)
run = operations.init_and_save_run(
project_id=self.project.id, user_id=self.user.id, op_spec=spec
)
assert Run.objects.count() == count + 1
assert run.pending is None
assert run.kind == V1RunKind.JOB
assert run.name == "foo"
assert run.description == "a description"
assert set(run.tags) == {"tag1", "tag2"} # From template
compiled_operation = CompiledOperationSpecification.read(run.content)
compiled_operation = CompiledOperationSpecification.apply_params(
compiled_operation, params=spec.params
)
compiled_operation = CompiledOperationSpecification.apply_operation_contexts(
compiled_operation
)
CompiledOperationSpecification.apply_runtime_contexts(compiled_operation)
run.content = compiled_operation.to_dict(dump=True)
run.save(update_fields=["content"])
job_spec = CompiledOperationSpecification.read(run.content)
assert job_spec.run.container.image == "{{ image }}"
job_spec = CompiledOperationSpecification.apply_runtime_contexts(job_spec)
assert job_spec.run.container.image == "foo/bar"
|
apache-2.0
| -2,776,431,568,413,844,500
| 42.882353
| 88
| 0.694102
| false
| 4.023732
| true
| false
| false
|
ohsu-qin/qipipe
|
qipipe/helpers/logging.py
|
1
|
5667
|
# Absolute import (the default in a future Python release) resolves
# the logging import as the Python standard logging module rather
# than this module of the same name.
from __future__ import absolute_import
import os
import sys
from datetime import datetime
import logging
import qiutil
NIPYPE_LOG_DIR_ENV_VAR = 'NIPYPE_LOG_DIR'
"""The environment variable used by Nipype to set the log directory."""
def configure(**opts):
"""
Configures the logger as follows:
- If there is a *log* option,
then the logger is a conventional ``qiutil.logging`` logger
which writes to the given log file.
- Otherwise, the logger delegates to a mock logger that
writes to stdout.
.. Note:: In a cluster environment, Nipype kills the dispatched job
log config. Logging falls back to the default. For this reason,
the default mock logger level is ``DEBUG`` rather than ``INFO``.
The dispatched node's log is the stdout captured in the file
*work*\ ``/batch/``\ *node_name*\ ``.o``\ *node_id*, where
*work* the execution work directory.
:param opts: the ``qiutil.command.configure_log`` options
:return: the logger factory
"""
# The log file option.
log_file_opt = opts.get('log')
# Set the Nipype log directory environment variable before importing
# any nipype module. The code below works around the following Nipype
# bug:
# * Nipype requires a log directory. If the Nipype log directory is
# set to /dev/null, then Nipype raises an error. The work-around
# is to set the NIPYPE_LOG_DIR environment variable to a new temp
# directory.
log_dir = None
if log_file_opt:
# Configure the qiutil logger for the auxiliary qi* modules.
# The non-Nipype log messages will be ignored in a cluster
# job context since Nipype stomps on the Python logger, but
# we will go through the motions anyway.
qiutil.command.configure_log('qixnat', 'qidicom',
'qiutil', **opts)
log_file = os.path.abspath(log_file_opt)
if log_file == '/dev/null':
# Work around the Nipype bug described above.
log_dir = tempfile.mkdtemp(prefix='qipipe_')
else:
log_dir = os.path.dirname(log_file)
# Make the log file parent directory, if necessary.
if not os.path.exists(log_dir):
os.makedirs(log_dir)
# Nipype always needs a log directory to work around the
# following Nipype bug:
# * If the Nipype log directory is not set, then Nipype still
# logs to the default log file ./log/pypeline.log, but also
# logs to stdout, which stomps on the qipipe logging.
if not log_dir:
log_dir = '/'.join([os.getcwd(), 'log'])
# Set the Nipype log directory environment variable.
os.environ[NIPYPE_LOG_DIR_ENV_VAR] = log_dir
# Print qipipe log messages to stdout to work around the
# Nipype bug described in the logger method apidoc.
mock_log_opts = {}
level = opts.get('log_level')
if level:
mock_log_opts['level'] = level
factory = MockLoggerFactory(**mock_log_opts).logger
# Set the qipipe logger factory.
logger._factory = factory
# Print a log message.
log_dest = log_file_opt if log_file_opt else 'stdout'
log_level = opts.get('log_level', 'DEBUG')
factory(__name__).info("Logging qipipe to %s with level %s." %
(log_dest, log_level))
factory(__name__).info("Logging nipype to the %s directory." %
log_dir)
return factory
def logger(name):
"""
This method overrides ``qiutil.logging.logger`` to work
around the following Nipype bug:
* Nipype stomps on any other application's logging.
The work-around is to mock a "logger" that writes
to stdout.
:param name: the caller's context ``__name__``
:return: the logger facade
"""
# Make a default logger factory on demand.
if not hasattr(logger, '_factory'):
logger._factory = configure()
return logger._factory(name)
class MockLoggerFactory(object):
def __init__(self, **opts):
self.writer = MockLogWriter(**opts)
def logger(self, name):
return MockLogger(self.writer, name)
class MockLogger(object):
def __init__(self, writer, name):
self.writer = writer
self.name = name
@property
def level(self):
return self.writer.level
def info(self, message):
self.writer.info(self.name, message)
def error(self, message):
self.writer.error(self.name, message)
def warn(self, message):
self.writer.warn(self.name, message)
def debug(self, message):
self.writer.debug(self.name, message)
class MockLogWriter(object):
def __init__(self, level=None):
if not level:
level = 'DEBUG'
self.level = getattr(logging, level)
def info(self, name, message):
if self.level <= logging.INFO:
self._write(name, 'INFO', message)
def debug(self, name, message):
if self.level <= logging.DEBUG:
self._write(name, 'DEBUG', message)
def warn(self, name, message):
if self.level <= logging.WARN:
self._write(name, 'WARN', message)
def error(self, name, message):
if self.level <= logging.ERROR:
self._write(name, 'ERROR', message)
def _write(self, name, level, message):
dt = datetime.now().strftime("%m/%d/%Y %H:%M:%S")
print "%s %s %s %s" % (dt, name, level, message)
sys.stdout.flush()
|
bsd-2-clause
| 5,169,502,066,354,706,000
| 32.934132
| 73
| 0.627845
| false
| 3.839431
| true
| false
| false
|
briend/mypaint
|
lib/palette.py
|
1
|
31230
|
# This file is part of MyPaint.
# Copyright (C) 2013-2018 by the MyPaint Development Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"""Palette: user-defined lists of color swatches"""
# TODO: Make palettes part of the model, save as part of ORA documents.
## Imports
from __future__ import division, print_function
import re
from copy import copy
import logging
from lib.helpers import clamp
from lib.observable import event
from lib.color import RGBColor, CAM16Color, color_diff
from lib.color import YCbCrColor
from lib.pycompat import unicode
from lib.pycompat import xrange
from lib.pycompat import PY3
from io import open
logger = logging.getLogger(__name__)
## Class and function defs
class Palette (object):
"""A flat list of color swatches, compatible with the GIMP
As a (sideways-compatible) extension to the GIMP's format, MyPaint supports
empty slots in the palette. These slots are represented by pure black
swatches with the name ``__NONE__``.
Palette objects expose the position within the palette of a current color
match, which can be declared to be approximate or exact. This is used for
highlighting the user concept of the "current color" in the GUI.
Palette objects can be serialized in the GIMP's file format (the regular
`unicode()` function on a Palette will do this too), or converted to and
from a simpler JSON-ready representation for storing in the MyPaint prefs.
Support for loading and saving via modal dialogs is defined here too.
"""
## Class-level constants
_EMPTY_SLOT_ITEM = RGBColor(-1, -1, -1)
_EMPTY_SLOT_NAME = "__NONE__"
## Construction, loading and saving
def __init__(self, filehandle=None, filename=None, colors=None):
"""Instantiate, from a file or a sequence of colors
:param filehandle: Filehandle to load.
:param filename: Name of a file to load.
:param colors: Iterable sequence of colors (lib.color.UIColor).
The constructor arguments are mutually exclusive. With no args
specified, you get an empty palette.
>>> Palette()
<Palette colors=0, columns=0, name=None>
Palettes can be generated from interpolations, which is handy for
testing, at least.
>>> cols = RGBColor(1,1,0).interpolate(RGBColor(1,0,1), 10)
>>> Palette(colors=cols)
<Palette colors=10, columns=0, name=None>
"""
super(Palette, self).__init__()
#: Number of columns. 0 means "natural flow"
self._columns = 0
#: List of named colors
self._colors = []
#: Name of the palette as a Unicode string, or None
self._name = None
#: Current position in the palette. None=no match; integer=index.
self._match_position = None
#: True if the current match is approximate
self._match_is_approx = False
#: Set to true to keep position during palette shifting
self.keep_position = False
# Clear and initialize
self.clear(silent=True)
if colors:
for col in colors:
col = self._copy_color_in(col)
self._colors.append(col)
elif filehandle:
self.load(filehandle, silent=True)
elif filename:
with open(filename, "r", encoding="utf-8", errors="replace") as fp:
self.load(fp, silent=True)
def clear(self, silent=False):
"""Resets the palette to its initial state.
>>> grey16 = RGBColor(1,1,1).interpolate(RGBColor(0,0,0), 16)
>>> p = Palette(colors=grey16)
>>> p.name = "Greyscale"
>>> p.columns = 3
>>> p # doctest: +ELLIPSIS
<Palette colors=16, columns=3, name=...'Greyscale'>
>>> p.clear()
>>> p
<Palette colors=0, columns=0, name=None>
Fires the `info_changed()`, `sequence_changed()`, and `match_changed()`
events, unless the `silent` parameter tests true.
"""
self._colors = []
self._columns = 0
self._name = None
self._match_position = None
self._match_is_approx = False
if not silent:
self.info_changed()
self.sequence_changed()
self.match_changed()
def load(self, filehandle, silent=False):
"""Load contents from a file handle containing a GIMP palette.
:param filehandle: File-like object (.readline, line iteration)
:param bool silent: If true, don't emit any events.
>>> pal = Palette()
>>> with open("palettes/MyPaint_Default.gpl", "r") as fp:
... pal.load(fp)
>>> len(pal) > 1
True
If the file format is incorrect, a RuntimeError will be raised.
"""
comment_line_re = re.compile(r'^#')
field_line_re = re.compile(r'^(\w+)\s*:\s*(.*)$')
color_line_re = re.compile(r'^(\d+)\s+(\d+)\s+(\d+)\s*(?:\b(.*))$')
fp = filehandle
self.clear(silent=True) # method fires events itself
line = fp.readline()
if line.strip() != "GIMP Palette":
raise RuntimeError("Not a valid GIMP Palette")
header_done = False
line_num = 0
for line in fp:
line = line.strip()
line_num += 1
if line == '':
continue
if comment_line_re.match(line):
continue
if not header_done:
match = field_line_re.match(line)
if match:
key, value = match.groups()
key = key.lower()
if key == 'name':
self._name = value.strip()
elif key == 'columns':
self._columns = int(value)
else:
logger.warning("Unknown 'key:value' pair %r", line)
continue
else:
header_done = True
match = color_line_re.match(line)
if not match:
logger.warning("Expected 'R G B [Name]', not %r", line)
continue
r, g, b, col_name = match.groups()
col_name = col_name.strip()
r = clamp(int(r), 0, 0xff) / 0xff
g = clamp(int(g), 0, 0xff) / 0xff
b = clamp(int(b), 0, 0xff) / 0xff
if r == g == b == 0 and col_name == self._EMPTY_SLOT_NAME:
self.append(None)
else:
col = CAM16Color(color=RGBColor(r, g, b))
col.__name = col_name
self._colors.append(col)
if not silent:
self.info_changed()
self.sequence_changed()
self.match_changed()
def save(self, filehandle):
"""Saves the palette to an open file handle.
:param filehandle: File-like object (.write suffices)
>>> from lib.pycompat import PY3
>>> if PY3:
... from io import StringIO
... else:
... from cStringIO import StringIO
>>> fp = StringIO()
>>> cols = RGBColor(1,.7,0).interpolate(RGBColor(.1,.1,.5), 16)
>>> pal = Palette(colors=cols)
>>> pal.save(fp)
>>> fp.getvalue() == unicode(pal)
True
The file handle is not flushed, and is left open after the
write.
>>> fp.flush()
>>> fp.close()
"""
filehandle.write(unicode(self))
def update(self, other):
"""Updates all details of this palette from another palette.
Fires the `info_changed()`, `sequence_changed()`, and `match_changed()`
events.
"""
self.clear(silent=True)
for col in other._colors:
col = self._copy_color_in(col)
self._colors.append(col)
self._name = other._name
self._columns = other._columns
self.info_changed()
self.sequence_changed()
self.match_changed()
## Palette size and metadata
def get_columns(self):
"""Get the number of columns (0 means unspecified)."""
return self._columns
def set_columns(self, n):
"""Set the number of columns (0 means unspecified)."""
self._columns = int(n)
self.info_changed()
def get_name(self):
"""Gets the palette's name."""
return self._name
def set_name(self, name):
"""Sets the palette's name."""
if name is not None:
name = unicode(name)
self._name = name
self.info_changed()
def __bool__(self):
"""Palettes never test false, regardless of their length.
>>> p = Palette()
>>> bool(p)
True
"""
return True
def __len__(self):
"""Palette length is the number of color slots within it."""
return len(self._colors)
## PY2/PY3 compat
__nonzero__ = __bool__
## Match position marker
def get_match_position(self):
"""Return the position of the current match (int or None)"""
return self._match_position
def set_match_position(self, i):
"""Sets the position of the current match (int or None)
Fires `match_changed()` if the value is changed."""
if i is not None:
i = int(i)
if i < 0 or i >= len(self):
i = None
if i != self._match_position:
self._match_position = i
self.match_changed()
def get_match_is_approx(self):
"""Returns whether the current match is approximate."""
return self._match_is_approx
def set_match_is_approx(self, approx):
"""Sets whether the current match is approximate
Fires match_changed() if the boolean value changes."""
approx = bool(approx)
if approx != self._match_is_approx:
self._match_is_approx = approx
self.match_changed()
def match_color(self, col, exact=False, order=None):
"""Moves the match position to the color closest to the argument.
:param col: The color to match.
:type col: lib.color.UIColor
:param exact: Only consider exact matches, and not near-exact or
approximate matches.
:type exact: bool
:param order: a search order to use. Default is outwards from the
match position, or in order if the match is unset.
:type order: sequence or iterator of integer color indices.
:returns: Whether the match succeeded.
:rtype: bool
By default, the matching algorithm favours exact or near-exact matches
which are close to the current position. If the current position is
unset, this search starts at 0. If there are no exact or near-exact
matches, a looser approximate match will be used, again favouring
matches with nearby positions.
>>> red2blue = RGBColor(1, 0, 0).interpolate(RGBColor(0, 1, 1), 5)
>>> p = Palette(colors=red2blue)
>>> p.match_color(RGBColor(0.45, 0.45, 0.45))
True
>>> p.match_position
2
>>> p.match_is_approx
True
>>> p[p.match_position]
<CAM16, v=53.0488, s=2.4757, h=209.5203, illuminant=95.0456, 100.0000, 108.9058>
>>> p.match_color(RGBColor(0.5, 0.5, 0.5))
True
>>> p.match_is_approx
False
>>> p.match_color(RGBColor(0.45, 0.45, 0.45), exact=True)
False
>>> p.match_color(RGBColor(0.5, 0.5, 0.5), exact=True)
True
Fires the ``match_changed()`` event when changes happen.
"""
if self.keep_position:
return False
if order is not None:
search_order = order
elif self.match_position is not None:
search_order = _outwards_from(len(self), self.match_position)
else:
search_order = xrange(len(self))
bestmatch_i = None
bestmatch_d = None
is_approx = True
if not isinstance(col, CAM16Color):
col = CAM16Color(color=col)
for i in search_order:
c = self._colors[i]
if c is self._EMPTY_SLOT_ITEM:
continue
# Closest exact or near-exact match by index distance (according to
# the search_order). Considering near-exact matches as equivalent
# to exact matches improves the feel of PaletteNext and
# PalettePrev.
if exact:
if c == col:
bestmatch_i = i
is_approx = False
break
else:
d = color_diff(col, c)
if c == col or d < 1.0:
bestmatch_i = i
is_approx = False
break
if bestmatch_d is None or d < bestmatch_d:
bestmatch_i = i
bestmatch_d = d
# If there are no exact or near-exact matches, choose the most similar
# color anywhere in the palette.
if bestmatch_i is not None:
self._match_position = bestmatch_i
self._match_is_approx = is_approx
self.match_changed()
return True
return False
def move_match_position(self, direction, refcol, group=False):
"""Move the match position in steps, matching first if needed.
:param direction: Direction for moving, positive or negative
:type direction: int:, ``1`` or ``-1``
:param refcol: Reference color, used for initial matching when needed.
:type refcol: lib.color.UIColor
:param group: Whether to loop over groups seperated by blank spaces
:type group: bool
:returns: the color newly matched, if the match position has changed
:rtype: lib.color.UIColor, or None
Invoking this method when there's no current match position will select
the color that's closest to the reference color, just like
`match_color()`
>>> greys = RGBColor(1,1,1).interpolate(RGBColor(0,0,0), 16)
>>> pal = Palette(colors=greys)
>>> refcol = RGBColor(0.5, 0.55, 0.45)
>>> pal.move_match_position(-1, refcol)
>>> pal.match_position
7
>>> pal.match_is_approx
True
When the current match is defined, but only an approximate match, this
method converts it to an exact match but does not change its position.
>>> pal.move_match_position(-1, refcol) is None
False
>>> pal.match_position
7
>>> pal.match_is_approx
False
When the match is initially exact, its position is stepped in the
direction indicated, either by +1 or -1. Blank palette entries are
skipped.
>>> pal.move_match_position(-1, refcol) is None
False
>>> pal.match_position
6
>>> pal.match_is_approx
False
Fires ``match_position_changed()`` and ``match_is_approx_changed()`` as
appropriate. The return value is the newly matched color whenever this
method produces a new exact match.
"""
# Normalize direction
direction = int(direction)
if direction < 0:
direction = -1
elif direction > 0:
direction = 1
else:
return None
# If nothing is selected, pick the closest match without changing
# the managed color.
old_pos = self._match_position
if old_pos is None:
self.match_color(refcol)
return None
# Otherwise, refine the match, or step it in the requested direction.
new_pos = None
if self._match_is_approx:
# Make an existing approximate match concrete.
new_pos = old_pos
else:
# Index reflects a close or identical match.
# Seek in the requested direction, skipping empty entries.
# Loop back around if to other end of array if needed.
# If group=True, stop within a segment surrounded by blanks
pos = old_pos
assert direction != 0
pos += direction
if group is False:
looped = 0
while looped < 2:
if pos == len(self._colors) and direction == 1:
pos = 0
looped += 1
if pos == -1 and direction == -1:
pos = len(self._colors) - 1
looped += 1
if self._colors[pos] is not self._EMPTY_SLOT_ITEM:
new_pos = pos
break
pos += direction
else:
if ((pos == len(self._colors) and direction == 1)
or (pos == -1 and direction == -1)):
return None
elif self._colors[pos] is not self._EMPTY_SLOT_ITEM:
new_pos = pos
# Update the palette index and the managed color.
result = None
if new_pos is not None:
col = self._colors[new_pos]
if col is not self._EMPTY_SLOT_ITEM:
result = self._copy_color_out(col)
self.set_match_position(new_pos)
self.set_match_is_approx(False)
return result
## Property-style access for setters and getters
columns = property(get_columns, set_columns)
name = property(get_name, set_name)
match_position = property(get_match_position, set_match_position)
match_is_approx = property(get_match_is_approx, set_match_is_approx)
## Color access
def _copy_color_out(self, col):
if col is self._EMPTY_SLOT_ITEM:
return None
result = col
result.__name = col.__name
return result
def _copy_color_in(self, col, name=None):
if col is self._EMPTY_SLOT_ITEM or col is None:
result = self._EMPTY_SLOT_ITEM
else:
if name is None:
try:
name = col.__name
except AttributeError:
pass
if name is not None:
name = unicode(name)
if not isinstance(col, CAM16Color):
result = CAM16Color(color=col)
else:
result = col
result.__name = name
return result
def append(self, col, name=None, unique=False, match=False):
"""Appends a color, optionally setting a name for it.
:param col: The color to append.
:param name: Name of the color to insert.
:param unique: If true, don't append if the color already exists
in the palette. Only exact matches count.
:param match: If true, set the match position to the
appropriate palette entry.
"""
col = self._copy_color_in(col, name)
if unique:
# Find the final exact match, if one is present
for i in xrange(len(self._colors)-1, -1, -1):
if col == self._colors[i]:
if match:
self._match_position = i
self._match_is_approx = False
self.match_changed()
return
# Append new color, and select it if requested
end_i = len(self._colors)
self._colors.append(col)
if match:
self._match_position = end_i
self._match_is_approx = False
self.match_changed()
self.sequence_changed()
def insert(self, i, col, name=None):
"""Inserts a color, setting an optional name for it.
:param i: Target index. `None` indicates appending a color.
:param col: Color to insert. `None` indicates an empty slot.
:param name: Name of the color to insert.
>>> grey16 = RGBColor(1, 1, 1).interpolate(RGBColor(0, 0, 0), 16)
>>> p = Palette(colors=grey16)
>>> p.insert(5, RGBColor(1, 0, 0), name="red")
>>> p
<Palette colors=17, columns=0, name=None>
>>> p[5]
<CAM16, v=55.9620, s=104.0363, h=27.4858, illuminant=95.0456, 100.0000, 108.9058>
Fires the `sequence_changed()` event. If the match position changes as
a result, `match_changed()` is fired too.
"""
col = self._copy_color_in(col, name)
if i is None:
self._colors.append(col)
else:
self._colors.insert(i, col)
if self.match_position is not None:
if self.match_position >= i:
self.match_position += 1
self.sequence_changed()
def reposition(self, src_i, targ_i):
"""Moves a color, or copies it to empty slots, or moves it the end.
:param src_i: Source color index.
:param targ_i: Source color index, or None to indicate the end.
This operation performs a copy if the target is an empty slot, and a
remove followed by an insert if the target slot contains a color.
>>> grey16 = RGBColor(1, 1, 1).interpolate(RGBColor(0, 0, 0), 16)
>>> p = Palette(colors=grey16)
>>> p[5] = None # creates an empty slot
>>> p.match_position = 8
>>> p[5] == p[0]
False
>>> p.reposition(0, 5)
>>> p[5] == p[0]
True
>>> p.match_position
8
>>> p[5] = RGBColor(1, 0, 0)
>>> p.reposition(14, 5)
>>> p.match_position # continues pointing to the same color
9
>>> len(p) # repositioning doesn't change the length
16
Fires the `color_changed()` event for copies to empty slots, or
`sequence_changed()` for moves. If the match position changes as a
result, `match_changed()` is fired too.
"""
assert src_i is not None
if src_i == targ_i:
return
try:
col = self._colors[src_i]
assert col is not None # just in case we change the internal repr
except IndexError:
return
# Special case: just copy if the target is an empty slot
match_pos = self.match_position
if targ_i is not None:
targ = self._colors[targ_i]
if targ is self._EMPTY_SLOT_ITEM:
self._colors[targ_i] = self._copy_color_in(col)
self.color_changed(targ_i)
# Copying from the matched color moves the match position.
# Copying to the match position clears the match.
if match_pos == src_i:
self.match_position = targ_i
elif match_pos == targ_i:
self.match_position = None
return
# Normal case. Remove...
self._colors.pop(src_i)
moving_match = False
updated_match = False
if match_pos is not None:
# Moving rightwards. Adjust for the pop().
if targ_i is not None and targ_i > src_i:
targ_i -= 1
# Similar logic for the match position, but allow it to follow
# the move if it started at the src position.
if match_pos == src_i:
match_pos = None
moving_match = True
updated_match = True
elif match_pos > src_i:
match_pos -= 1
updated_match = True
# ... then append or insert.
if targ_i is None:
self._colors.append(col)
if moving_match:
match_pos = len(self._colors) - 1
updated_match = True
else:
self._colors.insert(targ_i, col)
if match_pos is not None:
if moving_match:
match_pos = targ_i
updated_match = True
elif match_pos >= targ_i:
match_pos += 1
updated_match = True
# Announce changes
self.sequence_changed()
if updated_match:
self.match_position = match_pos
self.match_changed()
def pop(self, i):
"""Removes a color, returning it.
Fires the `match_changed()` event if the match index changes as a
result of the removal, and `sequence_changed()` if a color was removed,
prior to its return.
"""
i = int(i)
try:
col = self._colors.pop(i)
except IndexError:
return
if self.match_position == i:
self.match_position = None
elif self.match_position > i:
self.match_position -= 1
self.sequence_changed()
return self._copy_color_out(col)
def get_color(self, i):
"""Looks up a color by its list index."""
if i is None:
return None
try:
col = self._colors[i]
return self._copy_color_out(col)
except IndexError:
return None
def __getitem__(self, i):
return self.get_color(i)
def __setitem__(self, i, col):
self._colors[i] = self._copy_color_in(col, None)
self.color_changed(i)
## Color name access
def get_color_name(self, i):
"""Looks up a color's name by its list index."""
try:
col = self._colors[i]
except IndexError:
return
if col is self._EMPTY_SLOT_ITEM:
return
return col.__name
def set_color_name(self, i, name):
"""Sets a color's name by its list index."""
try:
col = self._colors[i]
except IndexError:
return
if col is self._EMPTY_SLOT_ITEM:
return
col.__name = name
self.color_changed(i)
def get_color_by_name(self, name):
"""Looks up the first color with the given name.
>>> pltt = Palette()
>>> pltt.append(RGBColor(1,0,1), "Magenta")
>>> pltt.get_color_by_name("Magenta")
<CAM16, v=63.8320, s=96.7099, h=334.4049, illuminant=95.0456, 100.0000, 108.9058>
"""
for col in self:
if col.__name == name:
return col
def __iter__(self):
return self.iter_colors()
def iter_colors(self):
"""Iterates across the palette's colors."""
for col in self._colors:
if col is self._EMPTY_SLOT_ITEM:
yield None
else:
yield col
## Observable events
@event
def info_changed(self):
"""Event: palette name, or number of columns was changed."""
@event
def match_changed(self):
"""Event: either match position or match_is_approx was updated."""
@event
def sequence_changed(self):
"""Event: the color ordering or palette length was changed."""
@event
def color_changed(self, i):
"""Event: the color in the given slot, or its name, was modified."""
## Dumping and cloning
def __unicode__(self):
"""Py2-era serialization as a Unicode string.
Used by the Py3 __str__() while we are in transition.
"""
result = u"GIMP Palette\n"
if self._name is not None:
result += u"Name: %s\n" % self._name
if self._columns > 0:
result += u"Columns: %d\n" % self._columns
result += u"#\n"
for col in self._colors:
if col is self._EMPTY_SLOT_ITEM:
col_name = self._EMPTY_SLOT_NAME
r = g = b = 0
else:
col_name = col.__name
# get sRGB D65 RGB values
col.illuminant = None
col.limit_purity = None
col.cachedrgb = None
r, g, b = [clamp(int(c*0xff), 0, 0xff) for c in col.get_rgb()]
if col_name is None:
result += u"%d %d %d\n" % (r, g, b)
else:
result += u"%d %d %d %s\n" % (r, g, b, col_name)
return result
def __str__(self):
"""Py3: serialize as str (=Unicode). Py2: as bytes (lossy!)."""
s = self.__unicode__()
if not PY3:
s = s.encode("utf-8", errors="replace")
return s
def __copy__(self):
clone = Palette()
clone.set_name(self.get_name())
clone.set_columns(self.get_columns())
for col in self._colors:
if col is self._EMPTY_SLOT_ITEM:
clone.append(None)
else:
clone.append(copy(col), col.__name)
return clone
def __deepcopy__(self, memo):
return self.__copy__()
def __repr__(self):
return "<Palette colors=%d, columns=%d, name=%r>" % (
len(self._colors),
self._columns,
self._name,
)
## Conversion to/from simple dict representation
def to_simple_dict(self):
"""Converts the palette to a simple dict form used in the prefs."""
simple = {}
simple["name"] = self.get_name()
simple["columns"] = self.get_columns()
entries = []
for col in self.iter_colors():
if col is None:
entries.append(None)
else:
name = col.__name
entries.append(((col.v, col.s, col.h), name))
simple["entries"] = entries
return simple
@classmethod
def new_from_simple_dict(cls, simple):
"""Constructs and returns a palette from the simple dict form."""
pal = cls()
pal.set_name(simple.get("name", None))
pal.set_columns(simple.get("columns", None))
for entry in simple.get("entries", []):
if entry is None:
pal.append(None)
else:
s, name = entry
# convert old format to CAM16
if "#" in s:
col = CAM16Color(color=RGBColor.new_from_hex_str(s))
else:
col = CAM16Color(vsh=s)
pal.append(col, name)
return pal
## Helper functions
def _outwards_from(n, i):
"""Search order within the palette, outwards from a given index.
Defined for a sequence of len() `n`, outwards from index `i`.
"""
assert i < n and i >= 0
yield i
for j in xrange(n):
exhausted = True
if i - j >= 0:
yield i - j
exhausted = False
if i + j < n:
yield i + j
exhausted = False
if exhausted:
break
## Module testing
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
import doctest
doctest.testmod()
|
gpl-2.0
| 846,482,513,551,410,600
| 33.093886
| 91
| 0.535767
| false
| 4.138616
| false
| false
| false
|
enriquepablo/terms.server
|
setup.py
|
1
|
2240
|
# Copyright (c) 2007-2012 by Enrique Pérez Arnaud <enriquepablo@gmail.com>
#
# This file is part of the terms project.
# https://github.com/enriquepablo/terms
#
# The terms project is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# The terms project is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with any part of the terms project.
# If not, see <http://www.gnu.org/licenses/>.
from setuptools import setup, find_packages
VERSION = '0.1.0a2'
setup(
name = 'terms.server',
version = VERSION,
author = 'Enrique Pérez Arnaud',
author_email = 'enriquepablo@gmail.com',
url = 'http://pypi.python.org/terms.server',
license = 'GNU GENERAL PUBLIC LICENSE Version 3',
description = 'Terms services',
long_description = (open('README.rst').read() +
'\n' + open('INSTALL.rst').read()) +
'\n' + open('SUPPORT.rst').read(),
packages = find_packages(),
namespace_packages = ['terms'],
test_suite = 'nose.collector',
include_package_data = True,
entry_points = {
'console_scripts': [
'webserver = terms.server.scripts.webserver:serve',
'client = terms.server.scripts.client:client',
'initterms = terms.server.scripts.initialize:init_terms',
],
},
tests_require = [
'nose == 1.3.7',
'coverage == 4.3.4',
],
extras_require = {
'PG': ['psycopg2 == 2.7.1',],
},
install_requires = [
'setuptools==34.3.3',
'bottle==0.12.13',
'repoze.who==2.3',
'gevent==1.2.1',
'gevent-websocket==0.10.1',
'py-bcrypt==0.4',
'sqlalchemy==1.1.7',
'colander==1.3.2',
'colanderalchemy==0.3.3',
'deform==2.0.4',
'mako==1.0.6',
],
)
|
gpl-3.0
| 3,798,885,400,614,978,000
| 31.434783
| 75
| 0.60992
| false
| 3.475155
| false
| false
| false
|
dames57/multimarkdown_reader
|
mmd_reader.py
|
1
|
1283
|
import subprocess
from pelican import signals
from pelican.readers import BaseReader
from pelican.utils import pelican_open
class MmdReader(BaseReader):
enabled = True
file_extensions = ['md', 'markdown', 'mkd', 'mdown']
def read(self, filename):
with pelican_open(filename) as fp:
text = list(fp.splitlines())
metadata = {}
for i, line in enumerate(text):
kv = line.split(':', 1)
if len(kv) == 2:
name, value = kv[0].lower(), kv[1].strip()
metadata[name] = self.process_metadata(name, value)
else:
content = "\n".join(text[i:])
break
mmd_cmd = ["multimarkdown"]
proc = subprocess.Popen(mmd_cmd,
stdin = subprocess.PIPE,
stdout = subprocess.PIPE)
output = proc.communicate(content.encode('utf-8'))[0].decode('utf-8')
status = proc.wait()
if status:
raise subprocess.CalledProcessError(status, mmd_cmd)
return output, metadata
def add_reader(readers):
for ext in MmdReader.file_extensions:
readers.reader_classes[ext] = MmdReader
def register():
signals.readers_init.connect(add_reader)
|
agpl-3.0
| -7,180,659,168,199,426,000
| 29.547619
| 77
| 0.56742
| false
| 4.073016
| false
| false
| false
|
beproud/bpcommons
|
beproud/django/commons/templatetags/compat.py
|
1
|
5776
|
import string
import re
import json
import six
from django.utils.encoding import force_text
from django.core.serializers.json import DjangoJSONEncoder
try:
from django.utils.functional import keep_lazy # Django-1.8 doesn't have it.
except ImportError:
# allow_lazy has been deprecated at Django-1.10, will be removed at 2.0
from django.utils.functional import allow_lazy as keep_lazy
# copy from beproud.utils.strutils.abbrev
def abbrev(s, num=255, end="..."):
"""
文章を要約する
返す文字列の長さは、num以上にならないのを保証します。
>>> abbrev('spamspamspam', 6)
'spa...'
>>> abbrev('spamspamspam', 12)
'spamspamspam'
>>> abbrev('eggseggseggs', 1)
'e'
>>> abbrev('eggseggseggs', 2)
'eg'
>>> abbrev('eggseggseggs', 3)
'egg'
>>> abbrev('eggseggseggs', 4)
'e...'
>>> abbrev('eggseggseggs', 2, '.')
'e.'
"""
index = num - len(end)
if len(s) > num:
s = (s[:index] + end) if index > 0 else s[:num]
return s
# copy from beproud.utils.html.urlize
def escape(html):
"""
Returns the given HTML with ampersands, quotes and angle brackets encoded.
"""
return (force_text(html).replace('&', '&')
.replace('<', '<')
.replace('>', '>')
.replace('"', '"')
.replace("'", '''))
# copy from beproud.utils.html.urlize
HTTP_SCHEME_RE = 'http[s]*'
# See: http://www.ietf.org/rfc/rfc1738.txt
URL_SAFE = "$-_.+"
URL_EXTRA = "!*'(),"
URL_PATH_RESERVED = ';?'
URL_QUERY_RESERVED = '#'
URL_OTHER_RESERVED = ':@&=/'
URL_RESERVED = URL_PATH_RESERVED + URL_QUERY_RESERVED + URL_OTHER_RESERVED
URL_ESCAPE = '%'
URL_ALNUM = string.ascii_letters + string.digits
URL_PATH_VALID_CHARS = URL_ALNUM + URL_SAFE + URL_EXTRA + URL_OTHER_RESERVED + URL_ESCAPE
URL_QUERY_VALID_CHARS = URL_ALNUM + URL_SAFE + URL_EXTRA + URL_OTHER_RESERVED + URL_PATH_RESERVED + URL_ESCAPE
URL_FRAGMENT_VALID_CHARS = URL_ALNUM + URL_SAFE + URL_EXTRA + URL_RESERVED + URL_ESCAPE
# 0-65535
# See: http://www.regular-expressions.info/numericranges.html
PORT_RE = "%s" % "|".join([
"6553[0-5]",
"655[0-2][0^9]",
"65[0-4][0-9][0-9]",
"6[0-4][0-9][0-9][0-9]",
"[1-5][0-9][0-9][0-9][0-9]",
"[1-9][0-9][0-9][0-9]",
"[1-9][0-9][0-9]",
"[1-9][0-9]",
"[1-9]",
])
# See: http://www.shauninman.com/archive/2006/05/08/validating_domain_names
# See: http://www.iana.org/domains/root/db/
DOMAIN_RE = '(?:[a-z0-9](?:[-a-z0-9]*[a-z0-9])?\\.)+(?:(?:aero|arpa|a[cdefgilmnoqrstuwxz])|(?:cat|com|coop|b[abdefghijmnorstvwyz]|biz)|(?:c[acdfghiklmnorsuvxyz])|d[ejkmoz]|(?:edu|e[ceghrstu])|f[ijkmor]|(?:gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(?:info|int|i[delmnoqrst])|(?:jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(?:mil|mobi|museum|m[acdghklmnopqrstuvwxyz])|(?:name|net|n[acefgilopruz])|(?:om|org)|(?:pro|p[aefghklmnrstwy])|qa|r[eouw]|s[abcdeghijklmnortvyz]|(?:travel|t[cdfghjklmnoprtvwz])|u[agkmsyz]|v[aceginu]|w[fs]|y[etu]|z[amw])'
# See: http://www.regular-expressions.info/regexbuddy/ipaccurate.html
IP_ADDRESS_RE = '(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
# Domain or IP address
IP_DOMAIN_RE = '(%s)|(%s)' % (DOMAIN_RE, IP_ADDRESS_RE)
# Domain or IP address with port number
URL_DOMAIN_RE = '(?:%s)(?::(%s))?' % (IP_DOMAIN_RE, PORT_RE)
URL_RE = r'(%s)\:\/\/(%s)(/[%s]*)?(?:\?([%s]*))?(?:\#([%s]*))?' % (
HTTP_SCHEME_RE,
URL_DOMAIN_RE,
re.escape(URL_PATH_VALID_CHARS),
re.escape(URL_QUERY_VALID_CHARS),
re.escape(URL_FRAGMENT_VALID_CHARS),
)
URL_RE_CMP = re.compile(URL_RE)
URLIZE_TMPL = '<a href="%(link_url)s"%(attrs)s>%(link_text)s</a>'
# copy from beproud.utils.html.urlize
def urlize(text, trim_url_limit=None, attrs={}, url_re=URL_RE_CMP, autoescape=False):
"""text内URLを抽出してアンカータグで囲む
URLのデリミタは半角カンマ、<>(エスケープ済み含む)、\s、全角スペース、行末で、これらが末尾にマッチしない場合はURLとして認識しません。
URL部分は.+の最小マッチ、もしくはtrim_url_limitが指定された場合は{,trim_url_limit}の最小マッチとなります。
-args
text: urlize対象文字列
trim_url_limit: urlとして認識する文字数に上限を設ける場合は数値をセット
nofollow: Trueを与えるとタグにrel="nofollow"を付加
autoescape: Trueを与えるとタグエスケープを行います。
"""
if autoescape:
text = escape(text)
def _repl(m):
return URLIZE_TMPL % {
"link_url": m.group(),
"attrs": "".join(map(lambda x: ' %s="%s"' % x, attrs.items())),
"link_text": (abbrev(m.group(), trim_url_limit)
if trim_url_limit is not None else m.group()),
}
return url_re.sub(_repl, text)
# copy from django.utils.html.strip_entities(). it was removed
def strip_entities(value):
"""Returns the given HTML with all entities (&something;) stripped."""
return re.sub(r'&(?:\w+|#\d+);', '', force_text(value))
strip_entities = keep_lazy(strip_entities, six.text_type)
# copy from bputils: beproud.utils.javascript
JS_CONVERT_TYPES = {
'bool': bool,
'int': int,
'string': str,
'array': list,
}
# copy from bputils: beproud.utils.javascript
def force_js(value, typename=None, encoder=None):
"""
Changes a python value to javascript for use in templates
"""
if typename:
typename = typename.lower()
if typename in JS_CONVERT_TYPES:
value = JS_CONVERT_TYPES[typename](value)
return json.dumps(value, cls=(encoder or DjangoJSONEncoder))
|
bsd-2-clause
| -7,532,070,724,396,816,000
| 31.557576
| 537
| 0.609643
| false
| 2.446266
| false
| false
| false
|
maartenbreddels/vaex
|
packages/vaex-ui/vaex/ui/plugin/dispersions.py
|
1
|
11670
|
__author__ = 'maartenbreddels'
import functools
import matplotlib.patches as patches
import numpy as np
import matplotlib.artist as artist
import vaex.ui.plugin
from vaex.ui.qt import *
import logging
logger = logging.getLogger("plugin.dispersions")
import matplotlib.transforms as transforms
from matplotlib.path import Path
class DispersionEllipse(patches.Patch):
"""
This ellipse has it's center in user coordinates, and the width and height in device coordinates
such that is is not deformed
"""
def __str__(self):
return "DispersionEllipse(%s,%s;%sx%s)" % (self.center[0], self.center[1],
self.width, self.height)
#@docstring.dedent_interpd
def __init__(self, xy, width, height, scale=1.0, angle=0.0, **kwargs):
"""
*xy*
center of ellipse
*width*
total length (diameter) of horizontal axis
*height*
total length (diameter) of vertical axis
*angle*
rotation in degrees (anti-clockwise)
Valid kwargs are:
%(Patch)s
"""
patches.Patch.__init__(self, **kwargs)
self.center = xy
self.width, self.height = width, height
self.scale = scale
self.angle = angle
self._path = Path.unit_circle()
# Note: This cannot be calculated until this is added to an Axes
self._patch_transform = transforms.IdentityTransform()
def _recompute_transform(self):
"""NOTE: This cannot be called until after this has been added
to an Axes, otherwise unit conversion will fail. This
maxes it very important to call the accessor method and
not directly access the transformation member variable.
"""
center = (self.convert_xunits(self.center[0]),
self.convert_yunits(self.center[1]))
width = self.width #self.convert_xunits(self.width)
height = self.height #self.convert_yunits(self.height)
trans = artist.Artist.get_transform(self)
self._patch_transform = transforms.Affine2D() \
.scale(width * 0.5 * self.scale, height * 0.5* self.scale) \
.rotate_deg(self.angle) \
.translate(*trans.transform(center))
def get_path(self):
"""
Return the vertices of the rectangle
"""
return self._path
def get_transform(self):
"""
Return the :class:`~matplotlib.transforms.Transform` applied
to the :class:`Patch`.
"""
return self.get_patch_transform()
def get_patch_transform(self):
self._recompute_transform()
return self._patch_transform
def contains(self, ev):
if ev.x is None or ev.y is None:
return False, {}
x, y = self.get_transform().inverted().transform_point((ev.x, ev.y))
return (x * x + y * y) <= 1.0, {}
class DispersionPlugin(vaex.ui.plugin.PluginLayer):
name = "dispersion"
def __init__(self, parent, layer):
super(DispersionPlugin, self).__init__(parent, layer)
dialog.plug_page(self.plug_page, "Dispersions", 2.25, 1.0)
dialog.plug_grids(self.define_grids, self.draw_grids)
def define_grids(self, grids):
#grids.define_grid("counts_vector", self.dialog.gridsize_vector, "VZ*0+1")
# covariance matrix terms
# diagonals
for dimension in range(self.dialog.dimensions):
axis_name = self.dialog.axisnames[dimension].lower()
expression = self.expressions[dimension].strip()
if len(expression) > 0:
grids.define_grid(axis_name + "_mom1", self.dialog.vector_grid_size, expression)
grids.define_grid(axis_name + "_mom2", self.dialog.vector_grid_size, "(" + expression + ")**2")
else:
grids.define_grid(axis_name + "_mom1", self.dialog.vector_grid_size, None)
grids.define_grid(axis_name + "_mom2", self.dialog.vector_grid_size, None)
if 1:
for dimension1 in range(self.dialog.dimensions):
for dimension2 in range(dimension1+1, self.dialog.dimensions):
axis_name1 = self.dialog.axisnames[dimension1].lower()
axis_name2 = self.dialog.axisnames[dimension2].lower()
expression1 = self.expressions[dimension1].strip()
expression2 = self.expressions[dimension2].strip()
if len(expression1) > 0 and len(expression2) > 0:
grids.define_grid("cov_" + axis_name1 +"_" +axis_name2, self.dialog.vector_grid_size, "(" + expression1 + ")*(" + expression2 +")")
def draw_grids(self, axes, grid_map, grid_map_vector):
if not self.dispersions_draw:
return
self.ellipses = []
dispersions = []
counts = grid_map_vector["counts"]
#print "counts check", np.sum(counts), np.sum(grid_map["counts"])
#print counts
#print grid_map_vector.keys()
if self.dialog.dimensions == 2:
axis_name1 = self.dialog.axisnames[0].lower()
axis_name2 = self.dialog.axisnames[1].lower()
if len(self.expressions[0]) > 0 and len(self.expressions[1]) > 0:
meanx = grid_map_vector[axis_name1 + "_mom1"]/counts
meany = grid_map_vector[axis_name2 + "_mom1"]/counts
varx = grid_map_vector[axis_name1 + "_mom2"]/counts
vary = grid_map_vector[axis_name2 + "_mom2"]/counts
covxy = grid_map_vector["cov_" +axis_name1 + "_" +axis_name2]/counts - meanx*meany
sigmax = (varx-meanx**2)**0.5
sigmay = (vary-meany**2)**0.5
mask = counts > 0
x = grid_map_vector["x"]
y = grid_map_vector["y"]
x, y = np.meshgrid(x, y)
vmax = np.nanmax(np.sqrt(sigmax.reshape(-1)**2 + sigmay.reshape(-1)**2))
width, height = self.dialog.canvas.get_width_height()
#print "width,height", width, height
max_size = min(width, height) / float(self.dialog.vector_grid_size)# * 0.9
#print max_size
#identity_transform = matplotlib.transforms.IdentityTransform()
#deltax = self.dialog.ranges_show[0][1] - self.dialog.ranges_show[0][0]
#deltay = self.dialog.ranges_show[1][1] - self.dialog.ranges_show[1][0]
#aspect = deltay / float(height) / (deltax/float(width))
#for grid in [x, y, sigmax, sigmay, covxy, counts, mask]:
# print grid.shape
for x, y, sigmax, sigmay, covxy in zip(x[mask].reshape(-1), y[mask].reshape(-1), sigmax[mask].reshape(-1), sigmay[mask].reshape(-1), covxy[mask].reshape(-1)):
try:
covmatrix = [[sigmax**2, covxy], [covxy, sigmay**2]]
eigen_values, eigen_vectors = np.linalg.eig(covmatrix)
except:
pass
else:
scaling = 1./vmax * max_size
device_width = (np.sqrt(np.max(eigen_values)) * scaling)
device_height = (np.sqrt(np.min(eigen_values)) * scaling)
if self.dispersions_unit_length:
length = np.sqrt(device_width**2+device_height**2)
device_width /= float(length) / max_size
device_height /= float(length) / max_size
#ellipse_width = np.sqrt(np.max(eigen_values)) * scaling / width * deltax
#ellipse_height = np.sqrt(np.min(eigen_values)) * scaling / height * deltay
#ellipse_height /= aspect
if sigmax < sigmay: # if x was smaller, the largest eigenvalue corresponds to the y value
device_width, device_height = device_height, device_width
#ellipse_width, ellipse_height = ellipse_height, ellipse_width
#ellipse_height /= aspect
angle = np.arctan(2*covxy / (sigmax**2-sigmay**2))/2.
#angle2 = np.arctan(2*covxy / (sigmax**2-sigmay**2))/2.
#angle = angle2 = 0
#print aspect, sigmax, sigmay, sigmax/sigmay, covxy/(sigmax*sigmay), ellipse_width/ellipse_height
#aspect = 0.1
#m = [[np.cos(angle2), np.sin(angle2)*aspect], [-np.sin(angle2), np.cos(angle2)*aspect]]
#ellipse_width, ellipse_height = np.dot(m, [ellipse_width, ellipse_height])
#print covxy/(sigmax*sigmay), angle, sigmax, sigmay, covxy
#device_x, device_y = axes.transData.transform((x, y))
#print device_x, device_y, device_width, device_height
#ellipse = patches.Ellipse(xy=(device_x, device_y), width=device_width, height=device_height, angle=angle, transform=identity_transform,
# alpha=0.4, color="blue") #rand()*360
#ellipse = patches.Ellipse(xy=(x, y), width=ellipse_width, height=ellipse_height, angle=np.degrees(angle),
# alpha=0.4, color="blue") #rand()*360
ellipse = DispersionEllipse(xy=(x, y), width=device_width, height=device_height, angle=np.degrees(angle), scale=self.scale_dispersion,
alpha=0.4, facecolor="green", edgecolor="black") #rand()*360
axes.add_artist(ellipse)
self.ellipses.append(ellipse)
#axes.quiver()
#[Ellipse(xy=rand(2)*10, width=rand(), height=rand(), angle=rand()*360)
#@staticmethod
#def useon(dialog_class):
# return issubclass(dialog_class, vaex.plot_windows.VolumeRenderingPlotDialog)
def plug_page(self, page):
layout = self.layout = QtGui.QGridLayout()
page.setLayout(self.layout)
layout.setSpacing(0)
layout.setContentsMargins(0,0,0,0)
layout.setAlignment(QtCore.Qt.AlignTop)
row = 0
self.dispersions_draw = bool(eval(self.dialog.options.get("disp_draw", "True")))
def setter(value):
self.dispersions_draw = value
self.dialog.plot()
self.dispersions_draw_checkbox = self.dialog.create_checkbox(page, "Draw dispersion tensors", lambda : self.dispersions_draw, setter)
layout.addWidget(self.dispersions_draw_checkbox, row, 1)
row += 1
self.dispersions_unit_length = bool(eval(self.dialog.options.get("disp_unit", "False")))
def setter(value):
self.dispersions_unit_length = value
self.dialog.plot()
self.dispersions_unit_lengthcheckbox = self.dialog.create_checkbox(page, "Unit length", lambda : self.dispersions_unit_length, setter)
layout.addWidget(self.dispersions_unit_lengthcheckbox, row, 1)
row += 1
self.expressions = []
self.expression_boxes = []
for dimension in range(self.dialog.dimensions):
axis_name = self.dialog.axisnames[dimension]
expression_box = QtGui.QComboBox(page)
expression_box.setEditable(True)
expression_box.setMinimumContentsLength(10)
self.expression_boxes.append(expression_box)
self.layout.addWidget(QtGui.QLabel(axis_name + '-axis:', page), row, 0)
self.layout.addWidget(expression_box, row, 1, QtCore.Qt.AlignLeft)
expression = self.dialog.options.get("disp"+axis_name.lower(), "")
expression_box.lineEdit().setText(expression)
self.expressions.append(expression)
#self.onExpressionChangedPartials.append()
#expression_box.lineEdit().editingFinished.connect(self.onExpressionChangedPartials[axisIndex])
calllback = functools.partial(self.onExpressionChanged, axis_index=dimension)
expression_box.lineEdit().editingFinished.connect(calllback)
row += 1
self.scale_dispersion = eval(self.dialog.options.get("disp_scale", "1"))
def setter(value):
self.scale_dispersion = value
for ellipse in self.ellipses:
ellipse.scale = self.scale_dispersion
self.dialog.canvas.draw()
#self.dialog.plot()
self.scale_dispersion_label, self.scale_dispersion_slider, self.scale_dispersion_value_label =\
self.dialog.create_slider(page, "scale: ", 1./100, 100., lambda : self.scale_dispersion, setter, format=" {0:>05.2f}", transform=lambda x: 10**x, inverse=lambda x: np.log10(x))
layout.addWidget(self.scale_dispersion_label, row, 0)
layout.addWidget(self.scale_dispersion_slider, row, 1)
layout.addWidget(self.scale_dispersion_value_label, row, 2)
row += 1
def onExpressionChanged(self, _=None, axis_index=-1):
text = str(self.expression_boxes[axis_index].lineEdit().text())
logger.debug("text set for axis %i: %s" % (axis_index, text))
if text != self.expressions[axis_index]:
axis_name = self.dialog.axisnames[axis_index].lower()
self.expressions[axis_index] = text
if text == "": # check if we can replot without doing the whole calculation
self.dialog.plot()
else:
non_empty = [k for k in self.expressions if len(k) > 0]
if len(non_empty) == len(self.expressions):
self.dialog.compute()
self.dialog.jobsManager.execute()
else:
logger.debug("nothing changed")
|
mit
| 1,899,751,465,761,447,200
| 38.0301
| 180
| 0.68329
| false
| 3.020968
| false
| false
| false
|
marionleborgne/nupic
|
tests/swarming/nupic/swarming/experiments/dummyV2/description.py
|
1
|
15299
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by the OPF Experiment Generator to generate the actual
description.py file by replacing $XXXXXXXX tokens with desired values.
This description.py file was generated by:
'/Users/ronmarianetti/nupic/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.py'
"""
from nupic.frameworks.opf.expdescriptionapi import ExperimentDescriptionAPI
from nupic.frameworks.opf.expdescriptionhelpers import (
updateConfigFromSubConfig,
applyValueGettersToContainer,
DeferredDictLookup)
from nupic.frameworks.opf.htmpredictionmodelcallbacks import *
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import (InferenceType,
InferenceElement)
from nupic.support import aggregationDivide
from nupic.frameworks.opf.opftaskdriver import (
IterationPhaseSpecLearnOnly,
IterationPhaseSpecInferOnly,
IterationPhaseSpecLearnAndInfer)
# Model Configuration Dictionary:
#
# Define the model parameters and adjust for any modifications if imported
# from a sub-experiment.
#
# These fields might be modified by a sub-experiment; this dict is passed
# between the sub-experiment and base experiment
#
#
# NOTE: Use of DEFERRED VALUE-GETTERs: dictionary fields and list elements
# within the config dictionary may be assigned futures derived from the
# ValueGetterBase class, such as DeferredDictLookup.
# This facility is particularly handy for enabling substitution of values in
# the config dictionary from other values in the config dictionary, which is
# needed by permutation.py-based experiments. These values will be resolved
# during the call to applyValueGettersToContainer(),
# which we call after the base experiment's config dictionary is updated from
# the sub-experiment. See ValueGetterBase and
# DeferredDictLookup for more details about value-getters.
#
# For each custom encoder parameter to be exposed to the sub-experiment/
# permutation overrides, define a variable in this section, using key names
# beginning with a single underscore character to avoid collisions with
# pre-defined keys (e.g., _dsEncoderFieldName2_N).
#
# Example:
# config = dict(
# _dsEncoderFieldName2_N = 70,
# _dsEncoderFieldName2_W = 5,
# dsEncoderSchema = [
# base=dict(
# fieldname='Name2', type='ScalarEncoder',
# name='Name2', minval=0, maxval=270, clipInput=True,
# n=DeferredDictLookup('_dsEncoderFieldName2_N'),
# w=DeferredDictLookup('_dsEncoderFieldName2_W')),
# ],
# )
# updateConfigFromSubConfig(config)
# applyValueGettersToContainer(config)
config = {
# Type of model that the rest of these parameters apply to.
'model': "HTMPrediction",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': { 'days': 0,
'fields': [ (u'timestamp', 'first'),
(u'gym', 'first'),
(u'consumption', 'mean'),
(u'address', 'first')],
'hours': 0,
'microseconds': 0,
'milliseconds': 0,
'minutes': 0,
'months': 0,
'seconds': 0,
'weeks': 0,
'years': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'TemporalNextStep',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 0,
# Example:
# dsEncoderSchema = [
# DeferredDictLookup('__field_name_encoder'),
# ],
#
# (value generated from DS_ENCODER_SCHEMA)
'encoders': {
'address': { 'fieldname': u'address',
'n': 300,
'name': u'address',
'type': 'SDRCategoryEncoder',
'w': 21},
'consumption': { 'clipInput': True,
'fieldname': u'consumption',
'maxval': 200,
'minval': 0,
'n': 1500,
'name': u'consumption',
'type': 'ScalarEncoder',
'w': 21},
'gym': { 'fieldname': u'gym',
'n': 600,
'name': u'gym',
'type': 'SDRCategoryEncoder',
'w': 21},
'timestamp_dayOfWeek': { 'dayOfWeek': (7, 3),
'fieldname': u'timestamp',
'name': u'timestamp_dayOfWeek',
'type': 'DateEncoder'},
'timestamp_timeOfDay': { 'fieldname': u'timestamp',
'name': u'timestamp_timeOfDay',
'timeOfDay': (7, 8),
'type': 'DateEncoder'}},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : None,
},
'spEnable': True,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActiveColumnsPerInhArea': 40,
'seed': 1956,
# potentialPct
# What percent of the columns's receptive field is available
# for potential synapses. At initialization time, we will
# choose potentialPct * (2*potentialRadius+1)^2
'potentialPct': 0.5,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10. Cells whose activity
# level before inhibition falls below minDutyCycleBeforeInh
# will have their own internal synPermConnectedCell
# threshold set below this default value.
# (This concept applies to both SP and TP and so 'cells'
# is correct here as opposed to 'columns')
'synPermConnected': 0.1,
'synPermActiveInc': 0.1,
'synPermInactiveDec': 0.01,
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tmEnable' : True,
'tmParams': {
# TP diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nupic/trunk/py/nupic/research/TP.py and TP10X*.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selector (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
#
# TODO: need better explanation
'newSynapseCount': 15,
# Maximum number of synapses per segment
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSegmentsPerCell': 128,
# Initial Permanence
# TODO: need better explanation
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TP how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
'pamLength': 1,
},
'clParams': {
'regionName' : 'SDRClassifierRegion',
# Classifier diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
'verbosity' : 0,
# This controls how fast the classifier learns/forgets. Higher values
# make it adapt faster and forget older patterns faster.
'alpha': 0.001,
# This is set after the call to updateConfigFromSubConfig and is
# computed from the aggregationInfo and predictAheadTime.
'steps': '1',
},
'trainSPNetOnlyIfRequested': False,
},
}
# end of config dictionary
# Adjust base config dictionary for any modifications if imported from a
# sub-experiment
updateConfigFromSubConfig(config)
# Compute predictionSteps based on the predictAheadTime and the aggregation
# period, which may be permuted over.
if config['predictAheadTime'] is not None:
predictionSteps = int(round(aggregationDivide(
config['predictAheadTime'], config['aggregationInfo'])))
assert (predictionSteps >= 1)
config['modelParams']['clParams']['steps'] = str(predictionSteps)
# Adjust config by applying ValueGetterBase-derived
# futures. NOTE: this MUST be called after updateConfigFromSubConfig() in order
# to support value-getter-based substitutions from the sub-experiment (if any)
applyValueGettersToContainer(config)
control = {
# The environment that the current model is being run in
"environment": 'nupic',
# Input stream specification per py/nupicengine/cluster/database/StreamDef.json.
#
'dataset' : {u'info': u'test_NoProviders',
u'streams': [ { u'columns': [u'*'],
u'info': "test data",
u'source': "file://swarming/test_data.csv"}],
u'version': 1},
# Iteration count: maximum number of iterations. Each iteration corresponds
# to one record from the (possibly aggregated) dataset. The task is
# terminated when either number of iterations reaches iterationCount or
# all records in the (possibly aggregated) database have been processed,
# whichever occurs first.
#
# iterationCount of -1 = iterate over the entire dataset
#'iterationCount' : ITERATION_COUNT,
# Metrics: A list of MetricSpecs that instantiate the metrics that are
# computed for this experiment
'metrics':[
MetricSpec(field=u'consumption',inferenceElement=InferenceElement.prediction,
metric='rmse'),
],
# Logged Metrics: A sequence of regular expressions that specify which of
# the metrics from the Inference Specifications section MUST be logged for
# every prediction. The regex's correspond to the automatically generated
# metric labels. This is similar to the way the optimization metric is
# specified in permutations.py.
}
descriptionInterface = ExperimentDescriptionAPI(modelConfig=config,
control=control)
|
agpl-3.0
| 8,030,776,921,436,739,000
| 37.928753
| 110
| 0.589973
| false
| 4.550565
| true
| false
| false
|
aleosd/sfpy
|
client/gamedata.py
|
1
|
14314
|
# -*- coding: UTF-8 -*-
import datetime
import logging
from .settings import LOGGER_NAME
from .gameapi import APIManager
class Resources:
def __init__(self):
self.wallet = {}
def add(self, data):
self.wallet = data.get('wallet', {})
def is_enough_for_mission(self, mission):
for currency_data in mission.price['currencies']:
currency_id = str(currency_data['id'])
if self.wallet.get(currency_id, 0) < currency_data['amount']:
return False
return True
class Progress:
TYPE_MISSION = "FUSE"
TYPE_UPGRADE = "UPGRADE"
def __init__(self, **kwargs):
self.id = kwargs['id']
self.finished = kwargs['finished']
self.start_time = self.time_from_ms(kwargs['startTime'])
self.end_time = self.time_from_ms(kwargs['endTime'])
self.type = kwargs['type']
if self.is_mission():
self.mission_id = kwargs['fuseData']['missionId']
@staticmethod
def time_from_ms(ms):
return datetime.datetime.fromtimestamp(ms // 1000)
def is_finished(self):
return self.finished
def time_elapsed(self):
return self.end_time - datetime.datetime.now()
def time_elapsed_verbose(self):
eta = self.time_elapsed()
return "{:02d}:{:02d}:{:02d}".format(
eta.seconds // 3600,
(eta.seconds // 60) % 60,
eta.seconds % 60
)
def is_mission(self):
return self.type == self.TYPE_MISSION
class Mission:
def __init__(self, **kwargs):
self.id = kwargs['id']
self.name = kwargs['name']
self.in_progress = kwargs['inProgress']
self.is_success = kwargs['isSuccess']
self.difficulty = kwargs['difficulty']
self.duration = kwargs['duration']
self.experience = kwargs['experience']
self.price = kwargs['price']
self._professions = kwargs['professions']
self.slot_count = kwargs['slotCount']
self.quality_name = kwargs['missionQualityName']
self.mission_type = kwargs['missionType']
def is_free(self):
return not (self.price['currencies'] or self.price['resources'])
def is_available(self):
return not self.in_progress
def get_profession_ids(self):
return [i['id'] for i in self._professions]
def is_mining(self):
return self.quality_name == u"Добыча ресурсов" and self.is_free()
def is_battle(self):
return self.quality_name == u"Боевое задание"
def is_cult(self):
return self.quality_name == u"Развитие культа"
def is_invasion(self):
return self.quality_name == u"Вторжение"
def is_case(self):
return self.mission_type == "Case"
def result(self):
if self.is_success:
return u"успех"
return u"неудача"
class Follower:
def __init__(self, **kwargs):
self.id = kwargs['id']
self.efficiency = kwargs['efficiency']
self.in_progress = kwargs['inProgress']
self.profession = kwargs['profession']
def is_available(self):
return not self.in_progress
@property
def profession_id(self):
return self.profession['id']
class ProgressManager:
def __init__(self):
self.progresses = {}
self.logger = logging.getLogger(LOGGER_NAME)
def add_progress(self, data):
p = Progress(**data)
self.progresses[p.id] = p
self.logger.debug(u"Добавляем прогресс id {}".format(p.id))
return p
def remove_progress(self, progress):
del self.progresses[progress.id]
def add_many(self, data, clear=True):
self.logger.info(u"Добавляем информацию по прогрессам")
if clear:
self.clear()
for progress_data in data:
self.add_progress(progress_data)
def clear(self):
self.progresses = {}
def get_mission_progress_list(self):
return [p for p in self.progresses.values() if p.is_mission()]
class MissionManager:
def __init__(self):
self.missions = {}
self.logger = logging.getLogger(LOGGER_NAME)
def add_mission(self, data):
mission = Mission(**data)
self.missions[mission.id] = mission
self.logger.debug(u"Добавляем миссию id {}".format(mission.id))
def add_many(self, data, clear=True):
if clear:
self.clear()
self.logger.info(u"Добавляем миссии: {}".format(len(data)))
for mission_data in data:
self.add_mission(mission_data)
def clear(self):
self.missions = {}
def mining_missions(self):
u"""
Возвращает список с миссиями, доступными для выполнения и не требующими
ресурсов. Список отсортирован по возрастанию длинтельности миссии и
количества адептов, необходимых для её выполнения
:return: List of missions
"""
missions = [m for m in self.missions.values() if m.is_mining() and
m.is_available()]
return sorted(missions, key=lambda m: (m.duration, m.slot_count))
def invasion_missions(self):
missions = [m for m in self.missions.values() if m.is_invasion() and
m.is_available()]
return sorted(missions, key=lambda m: (m.duration, m.slot_count))
def case_missions(self):
return [m for m in self.missions.values() if m.is_case() and
m.is_available()]
def cult_missions(self):
return [m for m in self.missions.values() if m.is_cult() and
m.is_available()]
def get(self, id_):
return self.missions.get(id_)
class FollowerManager:
def __init__(self):
self.followers = {}
self.logger = logging.getLogger(LOGGER_NAME)
def add_follower(self, data):
follower = Follower(**data)
self.followers[follower.id] = follower
def add_many(self, data, clear=True):
if data and clear:
self.clear()
for follower in data:
self.add_follower(follower)
def clear(self):
self.followers = {}
def free_followers(self):
return {k: f for k, f in self.followers.items() if f.is_available()}
def get_for_profession(self, profession, free=False):
u"""
Возвращает список сотрудников с определенной профессией
:param free: Bool, учитывать только не занятых адептов
:param profession: int, profession id
:return: list
"""
if free:
followers = self.free_followers().values()
else:
followers = self.followers.values()
if isinstance(profession, (list, tuple)):
return [f for f in followers if f.profession_id in profession]
if isinstance(profession, int):
return [f for f in followers if f.profession_id == profession]
raise ValueError(u"Profession must be an int or list or tuple")
def get_efficient(self, count=None, free=False, exclude=None):
u"""
Возвращает отсортированный по эффективности список адептов.
При помощи count можно указать ограничение на количество возвращаемых
значений.
:param free: Bool, учитывать только не занятых адептов
:param count: int
:param exclude: followers list to exclude from result
:return: list
"""
if free:
followers = self.free_followers().values()
else:
followers = self.followers.values()
if exclude:
followers = [f for f in followers if f not in exclude]
fs = sorted(followers, key=lambda k: k.efficiency, reverse=True)
return fs[0:count]
class Game:
def __init__(self):
self.logger = logging.getLogger(LOGGER_NAME)
self.progress_manager = ProgressManager()
self.mission_manager = MissionManager()
self.follower_manager = FollowerManager()
self.resources = Resources()
self.api = APIManager()
self.data_has_changed = False
def start(self, session):
start_data = self.api.start(session)
self.update_state(start_data)
self.process_state()
def turn(self):
data = self.api.get_game_data()
self.update_state(data)
self.process_state()
def update_state(self, data):
self.resources.add(data)
self.progress_manager.add_many(data.get('progresses', []))
self.mission_manager.add_many(data.get('missions', []))
self.follower_manager.add_many(data.get('followers', []))
def process_state(self):
self.process_progresses(
self.progress_manager.get_mission_progress_list())
case_missions = self.mission_manager.case_missions()
if case_missions:
self.process_case_missions(case_missions)
mining_missions = self.mission_manager.mining_missions()
if mining_missions:
self.process_missions(mining_missions)
invasion_missions = self.mission_manager.invasion_missions()
if invasion_missions:
self.process_missions(invasion_missions)
cult_missions = self.mission_manager.cult_missions()
if cult_missions:
self.process_missions(cult_missions)
if self.data_has_changed:
self.logger.info(u"Данные изменились, обрабатываем повторно")
self.data_has_changed = False
self.process_state()
def process_progresses(self, progresses):
u"""
Проверяет состояние текущих прогресов, если есть завершенные -
отправляет запрос к API.
:param progresses: Список прогрессов
"""
for p in progresses:
if self.data_has_changed:
break
mission = self.mission_manager.get(p.mission_id)
self.logger.info(u"Проверяем состояние прогресса {} по "
u"миссии \"{}\"".format(p.id, mission.name))
if p.is_finished():
self.logger.info(
u"Прогресс {} завершен, отправляем запрос".format(p.id))
status, result = self.api.finish_progress(p)
self._handle_call_result(status, result)
else:
self.logger.info(
u"До окончания прогресса {} еще {}, результат - {}".format(
p.id, p.time_elapsed_verbose(), mission.result()))
def process_missions(self, missions):
self.logger.info(u"Доступно миссий {}: {}".format(
missions[0].quality_name, len(missions)))
for mission in missions:
if self.data_has_changed:
break
status, result = self.process_mission(mission)
self._handle_call_result(status, result)
def process_mission(self, mission):
self.logger.info(u"Пробуем запустить миссию {}".format(mission.id))
followers = self.follower_manager.free_followers()
if mission.slot_count > len(followers):
return self.api.STATUS_ACTION_NOT_AVAILABLE, \
u"Недостаточно последователей"
if not self.resources.is_enough_for_mission(mission):
return self.api.STATUS_ACTION_NOT_AVAILABLE, \
u"Недостаточно ресурсов"
matched_followers = self.follower_manager.get_for_profession(
mission.get_profession_ids(), free=True)
if len(matched_followers) < mission.slot_count:
additional_followers = self.follower_manager.get_efficient(
mission.slot_count - len(matched_followers), free=True,
exclude=matched_followers
)
matched_followers = matched_followers + additional_followers
return self.api.start_mission(mission, matched_followers)
def process_case_missions(self, missions):
self.logger.info(u"Доступно ивентовых миссий: {}".format(len(missions)))
for mission in missions:
if self.data_has_changed:
break
status, result = self.process_case_mission(mission)
self._handle_call_result(status, result)
def process_case_mission(self, mission):
self.logger.info(u"Пробуем запустить миссию {}".format(mission.id))
followers = self.follower_manager.free_followers()
if mission.slot_count > len(followers):
return self.api.STATUS_ACTION_NOT_AVAILABLE, \
u"Недостаточно последователей"
followers = self.follower_manager.get_efficient(free=True)
return self.api.start_mission(mission, followers[-mission.slot_count:])
def _handle_call_result(self, status, result):
if status == self.api.STATUS_SUCCESS:
self.logger.info(u"Успешный запрос, сервер вернул \"{}\"".format(
result['operationResult']['actionFailCause']
))
self.update_state(result['updateData'])
self.data_has_changed = True
elif status == self.api.STATUS_ACTION_NOT_AVAILABLE:
self.logger.info(result)
elif status == self.api.STATUS_GAME_ERROR:
self.logger.error(u"Ошибка выполнения запроса: \"{}\"".format(
result['operationResult']['actionFailCause']
))
else:
self.logger.critical(result)
|
mit
| 4,507,464,201,464,738,000
| 33.430412
| 80
| 0.601916
| false
| 3.247994
| false
| false
| false
|
UManPychron/pychron
|
pychron/dvc/tasks/actions.py
|
1
|
7897
|
# ===============================================================================
# Copyright 2015 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
# ============= standard library imports ========================
# ============= local library imports ==========================
import os
from pyface.confirmation_dialog import confirm
from pyface.constant import YES
from pyface.message_dialog import warning, information
from pyface.tasks.action.task_action import TaskAction
from traitsui.menu import Action
from pychron.core.ui.progress_dialog import myProgressDialog
from pychron.dvc import repository_path
from pychron.envisage.resources import icon
from pychron.envisage.tasks.actions import restart
from pychron.pychron_constants import DVC_PROTOCOL
class LocalRepositoryAction(TaskAction):
enabled_name = 'selected_local_repository_name'
class RemoteRepositoryAction(TaskAction):
enabled_name = 'selected_repository'
class CloneAction(RemoteRepositoryAction):
method = 'clone'
name = 'Clone'
image = icon('repo-clone')
tooltip = 'Clone repository from remote. e.g. git clone https://github.com...'
class AddBranchAction(LocalRepositoryAction):
name = 'Add Branch'
method = 'add_branch'
image = icon('git-branch')
tooltip = 'Add branch to selected repository'
class CheckoutBranchAction(LocalRepositoryAction):
name = 'Checkout Branch'
method = 'checkout_branch'
image = icon('check')
tooltip = 'Checkout branch. e.g. git checkout <branch_name>'
class PushAction(LocalRepositoryAction):
name = 'Push'
method = 'push'
image = icon('repo-push')
tooltip = 'Push changes to remote. git push'
class PullAction(LocalRepositoryAction):
name = 'Pull'
method = 'pull'
image = icon('repo-pull')
tooltip = 'Pull changes from remote. git pull'
class RebaseAction(LocalRepositoryAction):
name = 'Rebase'
method = 'rebase'
image = icon('git-merge')
tooltip = 'Rebase commits from [master] onto current branch. git rebase'
class FindChangesAction(LocalRepositoryAction):
name = 'Find Changes'
method = 'find_changes'
tooltip = 'Search all local repositories for changes. e.g. git log <remote>/branch..HEAD'
image = icon('search')
class DeleteLocalChangesAction(LocalRepositoryAction):
name = 'Delete Local Changes'
method = 'delete_local_changes'
image = icon('trashcan')
class DeleteChangesAction(LocalRepositoryAction):
name = 'Delete Commits'
method = 'delete_commits'
image = icon('trashcan')
class ArchiveRepositoryAction(LocalRepositoryAction):
name = 'Archive Repository'
method = 'archive_repository'
image = icon('squirrel')
class LoadOriginAction(TaskAction):
name = 'Load Origin'
method = 'load_origin'
image = icon('cloud-download')
tooltip = 'Update the list of available repositories'
class SyncSampleInfoAction(LocalRepositoryAction):
name = 'Sync Repo/DB Sample Info'
method = 'sync_sample_info'
tooltip = 'Copy information from Central Database to the selected repository'
image = icon('octicon-database')
class SyncRepoAction(LocalRepositoryAction):
name = 'Sync'
method = 'sync_repo'
tooltip = 'Sync to Origin. aka Pull then Push'
image = icon('sync')
class RepoStatusAction(LocalRepositoryAction):
name = 'Status'
method = 'status'
tooltip = 'Report the repository status. e.g. git status'
image = icon('pulse')
class BookmarkAction(LocalRepositoryAction):
name = 'Bookmark'
method = 'add_bookmark'
tooltip = 'Add a bookmark to the data reduction history. e.g. git tag -a <name> -m <message>'
image = icon('git-bookmark')
class SortLocalReposAction(TaskAction):
name = 'Sort Repos'
method = 'sort_repos'
tooltip = 'Sort repos by most recently analyzed'
# class SyncMetaDataAction(Action):
# name = 'Sync Repo/DB Metadata'
#
# def perform(self, event):
# app = event.task.window.application
# app.information_dialog('Sync Repo disabled')
# return
#
# dvc = app.get_service('pychron.dvc.dvc.DVC')
# if dvc:
# dvc.repository_db_sync('IR986', dry_run=False)
class ShareChangesAction(Action):
name = 'Share Changes'
def perform(self, event):
from git import Repo
from git.exc import InvalidGitRepositoryError
from pychron.paths import paths
remote = 'origin'
branch = 'master'
repos = []
for d in os.listdir(paths.repository_dataset_dir):
if d.startswith('.') or d.startswith('~'):
continue
try:
r = Repo(repository_path(d))
except InvalidGitRepositoryError:
continue
repos.append(r)
n = len(repos)
pd = myProgressDialog(max=n - 1,
can_cancel=True,
can_ok=False)
pd.open()
shared = False
for r in repos:
pd.change_message('Fetch {}'.format(os.path.basename(r.working_dir)))
c = r.git.log('{}/{}..HEAD'.format(remote, branch), '--oneline')
if c:
r.git.pull()
d = os.path.basename(r.working_dir)
if confirm(None, 'Share changes made to {}.\n\n{}'.format(d, c)) == YES:
r.git.push(remote, branch)
shared = True
msg = 'Changes successfully shared' if shared else 'No changes to share'
information(None, msg)
class GenerateCurrentsAction(Action):
name = 'Generate Currents'
def perform(self, event):
app = event.task.window.application
dvc = app.get_service(DVC_PROTOCOL)
dvc.generate_currents()
class MapRunIDsAction(Action):
name = 'Map RunIDs'
def perform(self, event):
app = event.task.window.application
dvc = app.get_service(DVC_PROTOCOL)
from pychron.dvc.map_runid import MapRunID
mr = MapRunID()
mr.map(dvc)
class ClearCacheAction(Action):
name = 'Clear Cache'
def perform(self, event):
app = event.task.window.application
dvc = app.get_service(DVC_PROTOCOL)
dvc.clear_cache()
class WorkOfflineAction(Action):
name = 'Work Offline'
def perform(self, event):
app = event.task.window.application
dvc = app.get_service(DVC_PROTOCOL)
if dvc.db.kind != 'mysql':
warning(None, 'Your are not using a centralized MySQL database')
else:
from pychron.dvc.work_offline import WorkOffline
wo = WorkOffline(dvc=dvc, application=app)
if wo.initialize():
wo.edit_traits()
class UseOfflineDatabase(Action):
name = 'Use Offline Database'
def perform(self, event):
from pychron.dvc.work_offline import switch_to_offline_database
app = event.task.window.application
switch_to_offline_database(app.preferences)
ret = confirm(None, 'You are now using the offline database. Restart now for changes to take effect')
if ret == YES:
restart()
# ============= EOF =============================================
|
apache-2.0
| 155,290,666,617,389,280
| 29.141221
| 109
| 0.629859
| false
| 3.998481
| false
| false
| false
|
thombashi/DataProperty
|
dataproperty/_container.py
|
1
|
3704
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import abc
from typepy import RealNumber
class AbstractContainer(metaclass=abc.ABCMeta):
@abc.abstractproperty
def min_value(self): # pragma: no cover
pass
@abc.abstractproperty
def max_value(self): # pragma: no cover
pass
@abc.abstractmethod
def mean(self): # pragma: no cover
pass
@abc.abstractmethod
def update(self, value): # pragma: no cover
pass
@abc.abstractmethod
def merge(self, value): # pragma: no cover
pass
def __repr__(self):
if not self.has_value():
return "None"
return ", ".join(["min={}".format(self.min_value), "max={}".format(self.max_value)])
def has_value(self):
return self.min_value is not None and self.max_value is not None
def is_same_value(self):
return self.has_value() and self.min_value == self.max_value
def is_zero(self):
return self.has_value() and self.min_value == 0 and self.max_value == 0
class ListContainer(AbstractContainer):
__slots__ = ("__value_list",)
@property
def min_value(self):
try:
return min(self.__value_list)
except ValueError:
return None
@property
def max_value(self):
try:
return max(self.__value_list)
except ValueError:
return None
@property
def value_list(self):
return self.__value_list
def __init__(self, value_list=None):
if value_list is None:
self.__value_list = []
else:
self.__value_list = value_list
def mean(self):
try:
return sum(self.__value_list) / len(self.__value_list)
except ZeroDivisionError:
return float("nan")
def update(self, value):
store_value = RealNumber(value).try_convert()
if store_value is None:
return
self.__value_list.append(store_value)
def merge(self, value):
try:
self.__value_list.extend(value)
except TypeError:
pass
class MinMaxContainer(AbstractContainer):
__slots__ = ("__min_value", "__max_value")
@property
def min_value(self):
return self.__min_value
@property
def max_value(self):
return self.__max_value
def __init__(self, value_list=None):
self.__min_value = None
self.__max_value = None
if value_list is None:
return
for value in value_list:
self.update(value)
def __eq__(self, other):
return all([self.min_value == other.min_value, self.max_value == other.max_value])
def __ne__(self, other):
return any([self.min_value != other.min_value, self.max_value != other.max_value])
def __contains__(self, x):
return self.min_value <= x <= self.max_value
def diff(self):
try:
return self.max_value - self.min_value
except TypeError:
return float("nan")
def mean(self):
try:
return (self.max_value + self.min_value) * 0.5
except TypeError:
return float("nan")
def update(self, value):
if value is None:
return
if self.__min_value is None:
self.__min_value = value
else:
self.__min_value = min(self.__min_value, value)
if self.__max_value is None:
self.__max_value = value
else:
self.__max_value = max(self.__max_value, value)
def merge(self, value):
self.update(value.min_value)
self.update(value.max_value)
|
mit
| -3,326,360,890,995,376,000
| 23.368421
| 92
| 0.560475
| false
| 3.862357
| false
| false
| false
|
CodyKochmann/generators
|
generators/inline_tools.py
|
1
|
2039
|
#!/usr/bin/env python
from __future__ import print_function
_print = print
del print_function
from inspect import getsource
from strict_functions import strict_globals, noglobals
__all__ = 'asserts', 'print', 'attempt'
@strict_globals(getsource=getsource)
def asserts(input_value, rule, message=''):
""" this function allows you to write asserts in generators since there are
moments where you actually want the program to halt when certain values
are seen.
"""
assert callable(rule) or type(rule)==bool, 'asserts needs rule to be a callable function or a test boolean'
assert isinstance(message, str), 'asserts needs message to be a string'
# if the message is empty and rule is callable, fill message with rule's source code
if len(message)==0 and callable(rule):
try:
s = getsource(rule).splitlines()[0].strip()
except:
s = repr(rule).strip()
message = 'illegal input of {} breaks - {}'.format(input_value, s)
if callable(rule):
# if rule is a function, run the function and assign it to rule
rule = rule(input_value)
# now, assert the rule and return the input value
assert rule, message
return input_value
del getsource
@strict_globals(_print=_print)
def print(*a):
""" print just one that returns what you give it instead of None """
try:
_print(*a)
return a[0] if len(a) == 1 else a
except:
_print(*a)
del _print
@noglobals
def attempt(fn, default_output=None):
''' attempt running a function in a try block without raising exceptions '''
assert callable(fn), 'generators.inline_tools.attempt needs fn to be a callable function'
try:
return fn()
except:
return default_output
del strict_globals, noglobals
if __name__ == '__main__':
print(print(attempt(lambda:1/0)))
print(print(attempt(lambda:1/2)))
print(print(attempt(lambda:asserts(1, lambda i:1/i))))
print(asserts(0, lambda i:1>i))
asserts(2, lambda i:1>i)
|
mit
| -1,161,471,738,659,893,500
| 30.859375
| 111
| 0.659637
| false
| 3.782931
| false
| false
| false
|
njoubert/ardupilot
|
Tools/ardupilotwaf/px4.py
|
1
|
11068
|
#!/usr/bin/env python
# encoding: utf-8
"""
Waf tool for PX4 build
"""
from waflib import Errors, Logs, Task, Utils
from waflib.TaskGen import after_method, before_method, feature
import os
import shutil
import sys
_dynamic_env_data = {}
def _load_dynamic_env_data(bld):
bldnode = bld.bldnode.make_node('modules/PX4Firmware')
for name in ('cxx_flags', 'include_dirs', 'definitions'):
_dynamic_env_data[name] = bldnode.find_node(name).read().split(';')
@feature('px4_ap_library', 'px4_ap_program')
@before_method('process_source')
def px4_dynamic_env(self):
# The generated files from configuration possibly don't exist if it's just
# a list command (TODO: figure out a better way to address that).
if self.bld.cmd == 'list':
return
if not _dynamic_env_data:
_load_dynamic_env_data(self.bld)
self.env.append_value('INCLUDES', _dynamic_env_data['include_dirs'])
self.env.prepend_value('CXXFLAGS', _dynamic_env_data['cxx_flags'])
self.env.prepend_value('CXXFLAGS', _dynamic_env_data['definitions'])
# Single static library
# NOTE: This only works only for local static libraries dependencies - fake
# libraries aren't supported yet
@feature('px4_ap_program')
@after_method('apply_link')
@before_method('process_use')
def px4_import_objects_from_use(self):
queue = list(Utils.to_list(getattr(self, 'use', [])))
names = set()
while queue:
name = queue.pop(0)
if name in names:
continue
names.add(name)
try:
tg = self.bld.get_tgen_by_name(name)
except Errors.WafError:
continue
tg.post()
for t in getattr(tg, 'compiled_tasks', []):
self.link_task.set_inputs(t.outputs)
queue.extend(Utils.to_list(getattr(tg, 'use', [])))
class px4_copy(Task.Task):
color = 'CYAN'
def run(self):
shutil.copy2(self.inputs[0].abspath(), self.outputs[0].abspath())
def keyword(self):
return "PX4: Copying %s to" % self.inputs[0].name
def __str__(self):
return self.outputs[0].path_from(self.generator.bld.bldnode)
class px4_add_git_hashes(Task.Task):
run_str = '${PYTHON} ${PX4_ADD_GIT_HASHES} --ardupilot ${PX4_APM_ROOT} --px4 ${PX4_ROOT} --nuttx ${PX4_NUTTX_ROOT} ${SRC} ${TGT}'
color = 'CYAN'
def keyword(self):
return "PX4: Copying firmware and adding git hashes"
def __str__(self):
return self.outputs[0].path_from(self.outputs[0].ctx.launch_node())
def _update_firmware_sig(fw_task, firmware, elf):
original_post_run = fw_task.post_run
def post_run():
original_post_run()
firmware.sig = firmware.cache_sig = Utils.h_file(firmware.abspath())
elf.sig = elf.cache_sig = Utils.h_file(elf.abspath())
fw_task.post_run = post_run
_cp_px4io = None
_firmware_semaphorish_tasks = []
_upload_task = []
@feature('px4_ap_program')
@after_method('process_source')
def px4_firmware(self):
global _cp_px4io, _firmware_semaphorish_tasks, _upload_task
board_name = self.env.get_flat('PX4_BOARD_NAME')
px4 = self.bld.cmake('px4')
px4.vars['APM_PROGRAM_LIB'] = self.link_task.outputs[0].abspath()
if self.env.PX4_PX4IO_NAME and not _cp_px4io:
px4io_task = self.create_cmake_build_task('px4', 'fw_io')
px4io = px4io_task.cmake.bldnode.make_node(
'src/modules/px4iofirmware/%s.bin' % self.env.PX4_PX4IO_NAME,
)
px4io_elf = px4.bldnode.make_node(
'src/modules/px4iofirmware/%s' % self.env.PX4_PX4IO_NAME
)
px4io_task.set_outputs([px4io, px4io_elf])
romfs = self.bld.bldnode.make_node(self.env.PX4_ROMFS_BLD)
romfs_px4io = romfs.make_node('px4io/px4io.bin')
romfs_px4io.parent.mkdir()
_cp_px4io = self.create_task('px4_copy', px4io, romfs_px4io)
_cp_px4io.keyword = lambda: 'PX4: Copying PX4IO to ROMFS'
px4io_elf_dest = self.bld.bldnode.make_node(self.env.PX4IO_ELF_DEST)
cp_px4io_elf = self.create_task('px4_copy', px4io_elf, px4io_elf_dest)
fw_task = self.create_cmake_build_task(
'px4',
'build_firmware_%s' % board_name,
)
fw_task.set_run_after(self.link_task)
# we need to synchronize in order to avoid the output expected by the
# previous ap_program being overwritten before used
for t in _firmware_semaphorish_tasks:
fw_task.set_run_after(t)
_firmware_semaphorish_tasks = []
if self.env.PX4_PX4IO_NAME and _cp_px4io.generator is self:
fw_task.set_run_after(_cp_px4io)
firmware = px4.bldnode.make_node(
'src/firmware/nuttx/nuttx-%s-apm.px4' % board_name,
)
fw_elf = px4.bldnode.make_node(
'src/firmware/nuttx/firmware_nuttx',
)
_update_firmware_sig(fw_task, firmware, fw_elf)
fw_dest = self.bld.bldnode.make_node(
os.path.join(self.program_dir, '%s.px4' % self.program_name)
)
git_hashes = self.create_task('px4_add_git_hashes', firmware, fw_dest)
git_hashes.set_run_after(fw_task)
_firmware_semaphorish_tasks.append(git_hashes)
fw_elf_dest = self.bld.bldnode.make_node(
os.path.join(self.program_dir, self.program_name)
)
cp_elf = self.create_task('px4_copy', fw_elf, fw_elf_dest)
cp_elf.set_run_after(fw_task)
_firmware_semaphorish_tasks.append(cp_elf)
self.build_summary = dict(
target=self.name,
binary=fw_elf_dest.path_from(self.bld.bldnode),
)
if self.bld.options.upload:
if _upload_task:
Logs.warn('PX4: upload for %s ignored' % self.name)
return
_upload_task = self.create_cmake_build_task('px4', 'upload')
_upload_task.set_run_after(fw_task)
_firmware_semaphorish_tasks.append(_upload_task)
def _px4_taskgen(bld, **kw):
if 'cls_keyword' in kw and not callable(kw['cls_keyword']):
cls_keyword = str(kw['cls_keyword'])
kw['cls_keyword'] = lambda tsk: 'PX4: ' + cls_keyword
if 'cls_str' in kw and not callable(kw['cls_str']):
cls_str = str(kw['cls_str'])
kw['cls_str'] = lambda tsk: cls_str
kw['color'] = 'CYAN'
return bld(**kw)
@feature('_px4_romfs')
def _process_romfs(self):
bld = self.bld
file_list = [
'init.d/rc.APM',
'init.d/rc.error',
'init.d/rcS',
(bld.env.PX4_BOOTLOADER, 'bootloader/fmu_bl.bin'),
]
if bld.env.PX4_BOARD_RC:
board_rc = 'init.d/rc.%s' % bld.env.get_flat('PX4_BOARD_NAME')
file_list.append((board_rc, 'init.d/rc.board'))
romfs_src = bld.srcnode.find_dir(bld.env.PX4_ROMFS_SRC)
romfs_bld = bld.bldnode.make_node(bld.env.PX4_ROMFS_BLD)
for item in file_list:
if isinstance(item, str):
src = romfs_src.make_node(item)
dst = romfs_bld.make_node(item)
else:
src = romfs_src.make_node(item[0])
dst = romfs_bld.make_node(item[1])
bname = os.path.basename(str(src))
if bname in bld.env.ROMFS_EXCLUDE:
print("Excluding %s" % bname)
continue
dst.parent.mkdir()
self.create_task('px4_copy', src, dst)
def configure(cfg):
cfg.env.CMAKE_MIN_VERSION = '3.2'
cfg.load('cmake')
bldnode = cfg.bldnode.make_node(cfg.variant)
env = cfg.env
env.AP_PROGRAM_FEATURES += ['px4_ap_program']
kw = env.AP_LIBRARIES_OBJECTS_KW
kw['features'] = Utils.to_list(kw.get('features', [])) + ['px4_ap_library']
def srcpath(path):
return cfg.srcnode.make_node(path).abspath()
def bldpath(path):
return bldnode.make_node(path).abspath()
board_name = env.get_flat('PX4_BOARD_NAME')
# TODO: we should move stuff from mk/PX4 to Tools/ardupilotwaf/px4 after
# stop using the make-based build system
env.PX4_ROMFS_SRC = 'mk/PX4/ROMFS'
env.PX4_ROMFS_BLD = 'px4-extra-files/ROMFS'
env.PX4_BOOTLOADER = '/../bootloader/%s' % env.PX4_BOOTLOADER_NAME
env.PX4_ADD_GIT_HASHES = srcpath('Tools/scripts/add_git_hashes.py')
env.PX4_APM_ROOT = srcpath('')
env.PX4_ROOT = srcpath('modules/PX4Firmware')
env.PX4_NUTTX_ROOT = srcpath('modules/PX4NuttX')
if env.PX4_PX4IO_NAME:
env.PX4IO_ELF_DEST = 'px4-extra-files/px4io'
nuttx_config='nuttx_%s_apm' % board_name
env.PX4_CMAKE_VARS = dict(
CONFIG=nuttx_config,
CMAKE_MODULE_PATH=srcpath('Tools/ardupilotwaf/px4/cmake'),
NUTTX_SRC=env.PX4_NUTTX_ROOT,
PX4_NUTTX_ROMFS=bldpath(env.PX4_ROMFS_BLD),
ARDUPILOT_BUILD='YES',
EXTRA_CXX_FLAGS=' '.join((
# NOTE: these "-Wno-error=*" flags should be removed as we update
# the submodule
'-Wno-error=double-promotion',
'-Wno-error=reorder',
# NOTE: *Temporarily* using this definition so that both
# PX4Firmware build systems (cmake and legacy make-based) can live
# together
'-DCMAKE_BUILD',
'-DARDUPILOT_BUILD',
'-I%s' % bldpath('libraries/GCS_MAVLink'),
'-I%s' % bldpath('libraries/GCS_MAVLink/include/mavlink'),
'-Wl,--gc-sections',
)),
EXTRA_C_FLAGS=' '.join((
# NOTE: *Temporarily* using this definition so that both
# PX4Firmware build systems (cmake and legacy make-based) can live
# together
'-DCMAKE_BUILD',
)),
)
def build(bld):
board_name = bld.env.get_flat('PX4_BOARD_NAME')
px4 = bld.cmake(
name='px4',
cmake_src=bld.srcnode.find_dir('modules/PX4Firmware'),
cmake_vars=bld.env.PX4_CMAKE_VARS,
cmake_flags=['-Wno-deprecated'],
)
px4.build(
'msg_gen',
group='dynamic_sources',
cmake_output_patterns='src/modules/uORB/topics/*.h',
)
px4.build(
'prebuild_targets',
group='dynamic_sources',
cmake_output_patterns='%s/NuttX/nuttx-export/**/*.h' % board_name,
)
bld(
name='px4_romfs_static_files',
group='dynamic_sources',
features='_px4_romfs',
)
bld.extra_build_summary = _extra_build_summary
def _extra_build_summary(bld, build_summary):
build_summary.text('')
build_summary.text('PX4')
build_summary.text('', '''
The ELF files are pointed by the path in the "%s" column. The .px4 files are in
the same directory of their corresponding ELF files.
''' % build_summary.header_text['target'])
if not bld.options.upload:
build_summary.text('')
build_summary.text('', '''
You can use the option --upload to upload the firmware to the PX4 board if you
have one connected.''')
if bld.env.PX4_PX4IO_NAME:
build_summary.text('')
build_summary.text('PX4IO')
summary_data_list = bld.size_summary([bld.env.PX4IO_ELF_DEST])
header = bld.env.BUILD_SUMMARY_HEADER[:]
try:
header.remove('target')
except ValueError:
pass
header.insert(0, 'binary_path')
build_summary.print_table(summary_data_list, header)
|
gpl-3.0
| -3,635,688,299,489,346,600
| 31.84273
| 133
| 0.615287
| false
| 3.007609
| false
| false
| false
|
nicproulx/mne-python
|
mne/simulation/evoked.py
|
2
|
5502
|
# Authors: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Daniel Strohmeier <daniel.strohmeier@tu-ilmenau.de>
# Martin Luessi <mluessi@nmr.mgh.harvard.edu>
#
# License: BSD (3-clause)
import copy
import warnings
import numpy as np
from ..io.pick import pick_channels_cov
from ..forward import apply_forward
from ..utils import check_random_state, verbose, _time_mask
@verbose
def simulate_evoked(fwd, stc, info, cov, snr=3., tmin=None, tmax=None,
iir_filter=None, random_state=None, verbose=None):
"""Generate noisy evoked data.
.. note:: No projections from ``info`` will be present in the
output ``evoked``. You can use e.g.
:func:`evoked.add_proj <mne.Evoked.add_proj>` or
:func:`evoked.set_eeg_reference <mne.Evoked.set_eeg_reference>`
to add them afterward as necessary.
Parameters
----------
fwd : Forward
a forward solution.
stc : SourceEstimate object
The source time courses.
info : dict
Measurement info to generate the evoked.
cov : Covariance object
The noise covariance.
snr : float
signal to noise ratio in dB. It corresponds to
10 * log10( var(signal) / var(noise) ).
tmin : float | None
start of time interval to estimate SNR. If None first time point
is used.
tmax : float | None
start of time interval to estimate SNR. If None last time point
is used.
iir_filter : None | array
IIR filter coefficients (denominator) e.g. [1, -1, 0.2].
random_state : None | int | np.random.RandomState
To specify the random generator state.
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
evoked : Evoked object
The simulated evoked data
See Also
--------
simulate_raw
simulate_stc
simulate_sparse_stc
Notes
-----
.. versionadded:: 0.10.0
"""
evoked = apply_forward(fwd, stc, info)
if snr < np.inf:
noise = simulate_noise_evoked(evoked, cov, iir_filter, random_state)
evoked_noise = add_noise_evoked(evoked, noise, snr, tmin=tmin,
tmax=tmax)
else:
evoked_noise = evoked
return evoked_noise
def simulate_noise_evoked(evoked, cov, iir_filter=None, random_state=None):
"""Create noise as a multivariate Gaussian.
The spatial covariance of the noise is given from the cov matrix.
Parameters
----------
evoked : evoked object
an instance of evoked used as template
cov : Covariance object
The noise covariance
iir_filter : None | array
IIR filter coefficients (denominator)
random_state : None | int | np.random.RandomState
To specify the random generator state.
Returns
-------
noise : evoked object
an instance of evoked
Notes
-----
.. versionadded:: 0.10.0
"""
noise = evoked.copy()
noise.data = _generate_noise(evoked.info, cov, iir_filter, random_state,
evoked.data.shape[1])[0]
return noise
def _generate_noise(info, cov, iir_filter, random_state, n_samples, zi=None):
"""Create spatially colored and temporally IIR-filtered noise."""
from scipy.signal import lfilter
noise_cov = pick_channels_cov(cov, include=info['ch_names'], exclude=[])
if set(info['ch_names']) != set(noise_cov.ch_names):
raise ValueError('Evoked and covariance channel names are not '
'identical. Cannot generate the noise matrix. '
'Channels missing in covariance %s.' %
np.setdiff1d(info['ch_names'], noise_cov.ch_names))
rng = check_random_state(random_state)
c = np.diag(noise_cov.data) if noise_cov['diag'] else noise_cov.data
mu_channels = np.zeros(len(c))
# we almost always get a positive semidefinite warning here, so squash it
with warnings.catch_warnings(record=True):
noise = rng.multivariate_normal(mu_channels, c, n_samples).T
if iir_filter is not None:
if zi is None:
zi = np.zeros((len(c), len(iir_filter) - 1))
noise, zf = lfilter([1], iir_filter, noise, axis=-1, zi=zi)
else:
zf = None
return noise, zf
def add_noise_evoked(evoked, noise, snr, tmin=None, tmax=None):
"""Add noise to evoked object with specified SNR.
SNR is computed in the interval from tmin to tmax.
Parameters
----------
evoked : Evoked object
An instance of evoked with signal
noise : Evoked object
An instance of evoked with noise
snr : float
signal to noise ratio in dB. It corresponds to
10 * log10( var(signal) / var(noise) )
tmin : float
start time before event
tmax : float
end time after event
Returns
-------
evoked_noise : Evoked object
An instance of evoked corrupted by noise
"""
evoked = copy.deepcopy(evoked)
tmask = _time_mask(evoked.times, tmin, tmax, sfreq=evoked.info['sfreq'])
tmp = 10 * np.log10(np.mean((evoked.data[:, tmask] ** 2).ravel()) /
np.mean((noise.data ** 2).ravel()))
noise.data = 10 ** ((tmp - float(snr)) / 20) * noise.data
evoked.data += noise.data
return evoked
|
bsd-3-clause
| -6,964,635,638,887,342,000
| 32.345455
| 77
| 0.613413
| false
| 3.690141
| false
| false
| false
|
okrt/horspool-string-matching
|
main.py
|
1
|
7710
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'main.ui'
#
# Created: Wed Apr 08 10:31:45 2015
# by: PyQt4 UI code generator 4.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(647, 735)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setStyleSheet(_fromUtf8(""))
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.teMetin = QtGui.QTextEdit(self.centralwidget)
self.teMetin.setGeometry(QtCore.QRect(10, 30, 381, 151))
self.teMetin.setObjectName(_fromUtf8("teMetin"))
self.lePattern = QtGui.QLineEdit(self.centralwidget)
self.lePattern.setGeometry(QtCore.QRect(10, 210, 381, 20))
self.lePattern.setObjectName(_fromUtf8("lePattern"))
self.teAdimlar = QtGui.QTextEdit(self.centralwidget)
self.teAdimlar.setGeometry(QtCore.QRect(10, 310, 621, 151))
self.teAdimlar.setReadOnly(True)
self.teAdimlar.setObjectName(_fromUtf8("teAdimlar"))
self.btnBul = QtGui.QPushButton(self.centralwidget)
self.btnBul.setGeometry(QtCore.QRect(10, 240, 611, 41))
self.btnBul.setObjectName(_fromUtf8("btnBul"))
self.btnDosya = QtGui.QPushButton(self.centralwidget)
self.btnDosya.setGeometry(QtCore.QRect(220, 0, 121, 23))
self.btnDosya.setObjectName(_fromUtf8("btnDosya"))
self.lblMetin = QtGui.QLabel(self.centralwidget)
self.lblMetin.setGeometry(QtCore.QRect(10, 10, 121, 16))
self.lblMetin.setObjectName(_fromUtf8("lblMetin"))
self.lblPattern = QtGui.QLabel(self.centralwidget)
self.lblPattern.setGeometry(QtCore.QRect(10, 190, 121, 16))
self.lblPattern.setObjectName(_fromUtf8("lblPattern"))
self.lblAdimlar = QtGui.QLabel(self.centralwidget)
self.lblAdimlar.setGeometry(QtCore.QRect(10, 290, 121, 16))
self.lblAdimlar.setObjectName(_fromUtf8("lblAdimlar"))
self.lblHakkinda = QtGui.QLabel(self.centralwidget)
self.lblHakkinda.setGeometry(QtCore.QRect(440, 480, 181, 71))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lblHakkinda.sizePolicy().hasHeightForWidth())
self.lblHakkinda.setSizePolicy(sizePolicy)
self.lblHakkinda.setObjectName(_fromUtf8("lblHakkinda"))
self.lblPozisyon = QtGui.QLabel(self.centralwidget)
self.lblPozisyon.setGeometry(QtCore.QRect(410, 10, 101, 20))
self.lblPozisyon.setObjectName(_fromUtf8("lblPozisyon"))
self.lblDurum = QtGui.QLabel(self.centralwidget)
self.lblDurum.setGeometry(QtCore.QRect(10, 560, 531, 20))
self.lblDurum.setObjectName(_fromUtf8("lblDurum"))
self.lblIslem = QtGui.QLabel(self.centralwidget)
self.lblIslem.setGeometry(QtCore.QRect(10, 590, 621, 71))
self.lblIslem.setAutoFillBackground(False)
self.lblIslem.setStyleSheet(_fromUtf8("background-color:rgb(255, 255, 255)"))
self.lblIslem.setTextFormat(QtCore.Qt.RichText)
self.lblIslem.setScaledContents(False)
self.lblIslem.setObjectName(_fromUtf8("lblIslem"))
self.cbAdimlar = QtGui.QCheckBox(self.centralwidget)
self.cbAdimlar.setGeometry(QtCore.QRect(20, 510, 401, 21))
self.cbAdimlar.setObjectName(_fromUtf8("cbAdimlar"))
self.cbThDurumAl = QtGui.QCheckBox(self.centralwidget)
self.cbThDurumAl.setGeometry(QtCore.QRect(20, 490, 421, 21))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.cbThDurumAl.sizePolicy().hasHeightForWidth())
self.cbThDurumAl.setSizePolicy(sizePolicy)
self.cbThDurumAl.setChecked(True)
self.cbThDurumAl.setTristate(False)
self.cbThDurumAl.setObjectName(_fromUtf8("cbThDurumAl"))
self.lblDurum_2 = QtGui.QLabel(self.centralwidget)
self.lblDurum_2.setGeometry(QtCore.QRect(10, 470, 531, 20))
self.lblDurum_2.setObjectName(_fromUtf8("lblDurum_2"))
self.lvKonumlar = QtGui.QListView(self.centralwidget)
self.lvKonumlar.setGeometry(QtCore.QRect(410, 30, 211, 201))
self.lvKonumlar.setObjectName(_fromUtf8("lvKonumlar"))
self.cbRenklendir = QtGui.QCheckBox(self.centralwidget)
self.cbRenklendir.setGeometry(QtCore.QRect(20, 530, 391, 21))
self.cbRenklendir.setObjectName(_fromUtf8("cbRenklendir"))
self.label = QtGui.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(10, 670, 601, 16))
self.label.setObjectName(_fromUtf8("label"))
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 647, 21))
self.menubar.setObjectName(_fromUtf8("menubar"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Horspool String Matching @NKU", None))
self.btnBul.setText(_translate("MainWindow", "Find", None))
self.btnDosya.setText(_translate("MainWindow", "Import File", None))
self.lblMetin.setText(_translate("MainWindow", "Text:", None))
self.lblPattern.setText(_translate("MainWindow", "Pattern:", None))
self.lblAdimlar.setText(_translate("MainWindow", "Steps:", None))
self.lblHakkinda.setText(_translate("MainWindow", "<html><head/><body><p>Ferhat Yeşiltarla</p><p>Gökmen Güreşçi</p><p>Oğuz Kırat</p></body></html>", None))
self.lblPozisyon.setText(_translate("MainWindow", "Positions Found", None))
self.lblDurum.setText(_translate("MainWindow", "Status", None))
self.lblIslem.setText(_translate("MainWindow", "Ready", None))
self.cbAdimlar.setText(_translate("MainWindow", "Show steps (Not recommended on long texts)", None))
self.cbThDurumAl.setText(_translate("MainWindow", "Get info from string matching thread while processing.", None))
self.lblDurum_2.setText(_translate("MainWindow", "Options", None))
self.cbRenklendir.setText(_translate("MainWindow", "Colorize patterns found. (Not recommended on long texts)", None))
self.label.setText(_translate("MainWindow", "Quickly developed for \"Pattern Matching in Texts\" course assignment @ nku.edu.tr", None))
|
gpl-2.0
| -656,853,989,499,909,800
| 55.226277
| 163
| 0.704401
| false
| 3.520567
| false
| false
| false
|
repotvsupertuga/tvsupertuga.repository
|
script.module.streamtvsupertuga/lib/resources/lib/sources/it/ddown.py
|
1
|
2923
|
import re
import requests
import xbmc,xbmcaddon,time
import urllib
from ..common import get_rd_domains, filter_host,send_log,error_log
from ..scraper import Scraper
dev_log = xbmcaddon.Addon('script.module.universalscrapers').getSetting("dev_log")
s = requests.session()
User_Agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36'
class ddown(Scraper):
domains = ['https://directdownload.tv/']
name = "Direct Download"
sources = []
def __init__(self):
self.base_link = 'https://directdownload.tv/'
self.sources = []
if dev_log=='true':
self.start_time = time.time()
def scrape_episode(self,title, show_year, year, season, episode, imdb, tvdb, debrid = False):
try:
if not debrid:
return []
season_url = "0%s"%season if len(season)<2 else season
episode_url = "0%s"%episode if len(episode)<2 else episode
start_url = 'https://directdownload.tv/api?key=4B0BB862F24C8A29&qualities/disk-480p,disk-1080p-x265,tv-480p,tv-720p,web-480p,web-720p,web-1080p,web-1080p-x265,movie-480p-x265,movie-1080p-x265&limit=50&keyword=%s+s%se%s' %(title.lower(),season_url,episode_url)
start_url=start_url.replace(' ','%20')
#SEND2LOG(start_url)
content = requests.get(start_url).content
#print 'content >> ' +content
links=re.compile('"http(.+?)"',re.DOTALL).findall(content)
count = 0
for url in links:
url = 'http' + url.replace('\/', '/')
if '720p' in url:
res = '720p'
elif '1080p' in url:
res = '1080p'
else:
res='480p'
host = url.split('//')[1].replace('www.','')
host = host.split('/')[0].lower()
rd_domains = get_rd_domains()
if host in rd_domains:
if 'k2s.cc' not in url:
count +=1
self.sources.append({'source': host,'quality': res,'scraper': self.name,'url': url,'direct': False, 'debridonly': True})
if dev_log=='true':
end_time = time.time() - self.start_time
send_log(self.name,end_time,count)
return self.sources
except Exception, argument:
if dev_log == 'true':
error_log(self.name,'Check Search')
return self.sources
# def resolve(self, url):
# return url
def SEND2LOG(Txt):
print ':::::::::::::::::::::::::::::::::::::::::::::::::'
print ':'
print ': LOG string: ' + (str(Txt))
print ':'
print ':::::::::::::::::::::::::::::::::::::::::::::::::'
return
|
gpl-2.0
| -5,524,192,560,469,390,000
| 36.974026
| 271
| 0.509408
| false
| 3.649189
| false
| false
| false
|
R2pChyou/starcheat
|
starcheat/assets/techs.py
|
1
|
1741
|
import os
import logging
from io import BytesIO
from PIL import Image
class Techs():
def __init__(self, assets):
self.assets = assets
self.starbound_folder = assets.starbound_folder
def is_tech(self, key):
return key.endswith(".tech")
def index_data(self, asset):
key = asset[0]
path = asset[1]
offset = asset[2]
length = asset[3]
name = os.path.basename(asset[0]).split(".")[0]
asset_data = self.assets.read(key, path, False, offset, length)
if asset_data is None:
return
# TODO: Switch over to new tech system
# item = self.assets.read(asset[0]+"item", asset[1])
# if item is None or "itemName" not in item:
# logging.warning("No techitem for %s in %s" % asset[0], asset[1])
# return
return (key, path, offset, length, "tech", "", name, "")
def all(self):
"""Return a list of all techs."""
c = self.assets.db.cursor()
c.execute("select desc from assets where type = 'tech' order by desc")
return [x[0] for x in c.fetchall()]
def get_tech(self, name):
q = "select key, path from assets where type = 'tech' and (name = ? or desc = ?)"
c = self.assets.db.cursor()
c.execute(q, (name, name))
tech = c.fetchone()
if tech is None:
return
asset = self.assets.read(tech[0], tech[1])
info = self.assets.read(tech[0]+"item", tech[1])
icon = self.assets.read(info["inventoryIcon"], tech[1], image=True)
if icon is None:
icon = self.assets.items().missing_icon()
return info, Image.open(BytesIO(icon)).convert("RGBA"), tech[0], asset
|
mit
| 2,986,662,716,974,814,000
| 30.089286
| 89
| 0.563469
| false
| 3.597107
| false
| false
| false
|
h-hwang/octodns
|
tests/helpers.py
|
1
|
1328
|
#
#
#
from __future__ import absolute_import, division, print_function, \
unicode_literals
from shutil import rmtree
from tempfile import mkdtemp
class SimpleSource(object):
def __init__(self, id='test'):
pass
class SimpleProvider(object):
SUPPORTS_GEO = False
SUPPORTS = set(('A',))
def __init__(self, id='test'):
pass
def populate(self, zone, source=False, lenient=False):
pass
def supports(self, record):
return True
def __repr__(self):
return self.__class__.__name__
class GeoProvider(object):
SUPPORTS_GEO = True
def __init__(self, id='test'):
pass
def populate(self, zone, source=False, lenient=False):
pass
def supports(self, record):
return True
def __repr__(self):
return self.__class__.__name__
class NoSshFpProvider(SimpleProvider):
def supports(self, record):
return record._type != 'SSHFP'
class TemporaryDirectory(object):
def __init__(self, delete_on_exit=True):
self.delete_on_exit = delete_on_exit
def __enter__(self):
self.dirname = mkdtemp()
return self
def __exit__(self, *args, **kwargs):
if self.delete_on_exit:
rmtree(self.dirname)
else:
raise Exception(self.dirname)
|
mit
| -6,261,348,014,156,640,000
| 17.971429
| 67
| 0.593373
| false
| 3.905882
| false
| false
| false
|
jonasjberg/autonameow
|
autonameow/util/text/regexbatch.py
|
1
|
3842
|
# -*- coding: utf-8 -*-
# Copyright(c) 2016-2020 Jonas Sjöberg <autonameow@jonasjberg.com>
# Source repository: https://github.com/jonasjberg/autonameow
#
# This file is part of autonameow.
#
# autonameow is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# autonameow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with autonameow. If not, see <http://www.gnu.org/licenses/>.
import re
from collections import defaultdict
def replace(regex_replacement_tuples, strng, ignore_case=False):
assert isinstance(strng, str)
if not strng:
return strng
re_flags = 0
if ignore_case:
re_flags |= re.IGNORECASE
matches = list()
for regex, replacement in regex_replacement_tuples:
match = re.search(regex, strng, re_flags)
if match:
matches.append((regex, replacement))
sorted_by_longest_replacement = sorted(
matches, key=lambda x: len(x[1]), reverse=True
)
for regex, replacement in sorted_by_longest_replacement:
strng = re.sub(regex, replacement, strng, flags=re_flags)
return strng
def find_longest_match(regexes, strng, ignore_case=False):
"""
Searches a string with a list of regular expressions for the longest match.
NOTE: Does not handle groups!
Args:
regexes: List or set of regular expressions as Unicode strings or
compiled regular expressions.
strng (str): The string to search.
ignore_case: Whether to ignore letter case.
Returns:
The longest match found when searching the string with all given
regular expressions, as a Unicode string.
"""
assert isinstance(strng, str)
if not strng:
return None
re_flags = 0
if ignore_case:
re_flags |= re.IGNORECASE
matches = list()
for regex in regexes:
matches.extend(re.findall(regex, strng, re_flags))
if matches:
sorted_by_longest_match = sorted(
matches, key=lambda x: len(x), reverse=True
)
return sorted_by_longest_match[0]
return None
def find_replacement_value(value_regexes, strng, flags=0):
"""
Returns a value associated with one or more regular expressions.
The value whose associated regular expressions produced the longest total
substring match is returned.
NOTE: Do not pass 'flags' ff the regular expressions are already compiled.
Args:
value_regexes (dict): Dictionary keyed by any values, each storing
lists/tuples of regular expression patterns.
strng (str): The text to search.
flags: Regular expression flags applied to all regular expressions.
Returns:
The "best" matched key in the "value_regexes" dict, or None.
"""
assert isinstance(strng, str)
if not strng:
return strng
# Use canonical form with longest total length of matched substrings.
value_match_lengths = defaultdict(int)
for value, regexes in value_regexes.items():
for regex in regexes:
matches = re.finditer(regex, strng, flags)
for match in matches:
value_match_lengths[value] += len(match.group(0))
if value_match_lengths:
value_associated_with_longest_match = max(value_match_lengths,
key=value_match_lengths.get)
return value_associated_with_longest_match
return None
|
gpl-2.0
| -8,631,422,745,035,632,000
| 30.227642
| 79
| 0.659984
| false
| 4.134553
| false
| false
| false
|
gios-asu/text-geolocator
|
docs/source/conf.py
|
1
|
9841
|
# -*- coding: utf-8 -*-
#
# Text-Geolocator documentation build configuration file, created by
# sphinx-quickstart on Mon Mar 2 18:40:28 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../geolocator'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Text-Geolocator'
copyright = (u'2015, Adam McNabb, Weston Neal, Samantha Juntiff, '
'Christopher Silvia, Jack Workman, Jang Won')
author = (u'Adam McNabb, Weston Neal, Samantha Juntiff, Christopher Silvia, '
'Jack Workman, Jang Won')
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.5'
# The full version, including alpha/beta/rc tags.
release = '0.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'classic'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Text-Geolocatordoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
# Latex figure (float) alignment
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Text-Geolocator.tex', u'Text-Geolocator Documentation',
u'Adam McNabb, Weston Neal, Samantha Juntiff, Christopher Silvia, '
'Jack Workman, Jang Won', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'text-geolocator', u'Text-Geolocator Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Text-Geolocator', u'Text-Geolocator Documentation',
author, 'Text-Geolocator', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
|
mit
| -798,293,896,022,486,400
| 32.13468
| 79
| 0.700335
| false
| 3.665177
| true
| false
| false
|
wsmith323/frozenordereddict
|
frozenordereddict/__init__.py
|
1
|
1193
|
from collections import Mapping
try:
reduce
except NameError:
from functools import reduce
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
import operator
import os
with open(os.path.join(os.path.dirname(__file__), 'VERSION.txt')) as f:
__version__ = f.read().strip()
class FrozenOrderedDict(Mapping):
"""
Frozen OrderedDict.
"""
def __init__(self, *args, **kwargs):
self.__dict = OrderedDict(*args, **kwargs)
self.__hash = None
def __getitem__(self, item):
return self.__dict[item]
def __iter__(self):
return iter(self.__dict)
def __len__(self):
return len(self.__dict)
def __hash__(self):
if self.__hash is None:
self.__hash = reduce(operator.xor, map(hash, self.__dict.items()), 0)
return self.__hash
def __repr__(self):
return '{}({!r})'.format(self.__class__.__name__, self.__dict.items())
def copy(self, *args, **kwargs):
new_dict = self.__dict.copy()
if args or kwargs:
new_dict.update(OrderedDict(*args, **kwargs))
return self.__class__(new_dict)
|
mit
| -6,693,519,827,597,702,000
| 21.092593
| 81
| 0.582565
| false
| 3.911475
| false
| false
| false
|
opinkerfi/okconfig
|
tests/test_group.py
|
1
|
1924
|
# !/usr/bin/env python
"""Test adding objects"""
from __future__ import absolute_import
import os.path
import sys
# Make sure we import from working tree
okconfig_base = os.path.dirname(os.path.realpath(__file__ + "/.."))
sys.path.insert(0, okconfig_base)
import unittest2 as unittest
import okconfig
from pynag import Model
import tests
class Group(tests.OKConfigTest):
"""Template additions tests"""
def setUp(self):
super(Group, self).setUp()
okconfig.addhost("www.okconfig.org")
okconfig.addhost("okconfig.org")
okconfig.addhost("aliased.okconfig.org",
address="192.168.1.1",
group_name="testgroup")
def test_basic(self):
"""Add a group"""
okconfig.addgroup("testgroup1")
contacts = Model.Contactgroup.objects.filter(
contactgroup_name='testgroup1'
)
self.assertEqual(1, len(contacts), 'There can be only one')
hostgroups = Model.Hostgroup.objects.filter(
hostgroup_name='testgroup1'
)
self.assertEqual(1, len(hostgroups), 'There can be only one')
def test_alias(self):
"""Add a group with an alias"""
okconfig.addgroup("testgroup1", alias="the first testgroup")
contacts = Model.Contactgroup.objects.filter(
contactgroup_name='testgroup1',
alias='the first testgroup')
self.assertEqual(1, len(contacts))
def test_conflict(self):
"""Test adding a conflicting group"""
okconfig.addgroup("testgroup1")
self.assertRaises(okconfig.OKConfigError,
okconfig.addgroup,
"testgroup1")
def test_force(self):
"""Test force adding a group"""
okconfig.addgroup("testgroup1")
okconfig.addgroup("testgroup1", force=True)
if __name__ == "__main__":
unittest.main()
|
gpl-3.0
| 6,050,673,122,055,601,000
| 26.5
| 69
| 0.608628
| false
| 3.93456
| true
| false
| false
|
TaliesinSkye/evennia
|
wintersoasis-master/objects/examples/object.py
|
1
|
7393
|
"""
Template for Objects
Copy this module up one level and name it as you like, then
use it as a template to create your own Objects.
To make the default commands default to creating objects of your new
type (and also change the "fallback" object used when typeclass
creation fails), change settings.BASE_OBJECT_TYPECLASS to point to
your new class, e.g.
settings.BASE_OBJECT_TYPECLASS = "game.gamesrc.objects.myobj.MyObj"
Note that objects already created in the database will not notice
this change, you have to convert them manually e.g. with the
@typeclass command.
"""
from ev import Object
class ExampleObject(Object):
"""
This is the root typeclass object, implementing an in-game Evennia
game object, such as having a location, being able to be
manipulated or looked at, etc. If you create a new typeclass, it
must always inherit from this object (or any of the other objects
in this file, since they all actually inherit from BaseObject, as
seen in src.object.objects).
The BaseObject class implements several hooks tying into the game
engine. By re-implementing these hooks you can control the
system. You should never need to re-implement special Python
methods, such as __init__ and especially never __getattribute__ and
__setattr__ since these are used heavily by the typeclass system
of Evennia and messing with them might well break things for you.
* Base properties defined/available on all Objects
key (string) - name of object
name (string)- same as key
aliases (list of strings) - aliases to the object. Will be saved to database as AliasDB entries but returned as strings.
dbref (int, read-only) - unique #id-number. Also "id" can be used.
dbobj (Object, read-only) - link to database model. dbobj.typeclass points back to this class
typeclass (Object, read-only) - this links back to this class as an identified only. Use self.swap_typeclass() to switch.
date_created (string) - time stamp of object creation
permissions (list of strings) - list of permission strings
player (Player) - controlling player (will also return offline player)
location (Object) - current location. Is None if this is a room
home (Object) - safety start-location
sessions (list of Sessions, read-only) - returns all sessions connected to this object
has_player (bool, read-only)- will only return *connected* players
contents (list of Objects, read-only) - returns all objects inside this object (including exits)
exits (list of Objects, read-only) - returns all exits from this object, if any
destination (Object) - only set if this object is an exit.
is_superuser (bool, read-only) - True/False if this user is a superuser
* Handlers available
locks - lock-handler: use locks.add() to add new lock strings
db - attribute-handler: store/retrieve database attributes on this self.db.myattr=val, val=self.db.myattr
ndb - non-persistent attribute handler: same as db but does not create a database entry when storing data
scripts - script-handler. Add new scripts to object with scripts.add()
cmdset - cmdset-handler. Use cmdset.add() to add new cmdsets to object
nicks - nick-handler. New nicks with nicks.add().
* Helper methods (see src.objects.objects.py for full headers)
search(ostring, global_search=False, attribute_name=None, use_nicks=False, location=None, ignore_errors=False, player=False)
execute_cmd(raw_string)
msg(message, from_obj=None, data=None)
msg_contents(message, exclude=None, from_obj=None, data=None)
move_to(destination, quiet=False, emit_to_obj=None, use_destination=True)
copy(new_key=None)
delete()
is_typeclass(typeclass, exact=False)
swap_typeclass(new_typeclass, clean_attributes=False, no_default=True)
access(accessing_obj, access_type='read', default=False)
check_permstring(permstring)
* Hooks (these are class methods, so their arguments should also start with self):
basetype_setup() - only called once, used for behind-the-scenes setup. Normally not modified.
basetype_posthook_setup() - customization in basetype, after the object has been created; Normally not modified.
at_object_creation() - only called once, when object is first created. Object customizations go here.
at_object_delete() - called just before deleting an object. If returning False, deletion is aborted. Note that all objects
inside a deleted object are automatically moved to their <home>, they don't need to be removed here.
at_init() - called whenever typeclass is cached from memory, at least once every server restart/reload
at_cmdset_get() - this is called just before the command handler requests a cmdset from this object
at_first_login() - (player-controlled objects only) called once, the very first time user logs in.
at_pre_login() - (player-controlled objects only) called every time the user connects, after they have identified, before other setup
at_post_login() - (player-controlled objects only) called at the end of login, just before setting the player loose in the world.
at_disconnect() - (player-controlled objects only) called just before the user disconnects (or goes linkless)
at_server_reload() - called before server is reloaded
at_server_shutdown() - called just before server is fully shut down
at_before_move(destination) - called just before moving object to the destination. If returns False, move is cancelled.
announce_move_from(destination) - called in old location, just before move, if obj.move_to() has quiet=False
announce_move_to(source_location) - called in new location, just after move, if obj.move_to() has quiet=False
at_after_move(source_location) - always called after a move has been successfully performed.
at_object_leave(obj, target_location) - called when an object leaves this object in any fashion
at_object_receive(obj, source_location) - called when this object receives another object
at_before_traverse(traversing_object) - (exit-objects only) called just before an object traverses this object
at_after_traverse(traversing_object, source_location) - (exit-objects only) called just after a traversal has happened.
at_failed_traverse(traversing_object) - (exit-objects only) called if traversal fails and property err_traverse is not defined.
at_msg_receive(self, msg, from_obj=None, data=None) - called when a message (via self.msg()) is sent to this obj.
If returns false, aborts send.
at_msg_send(self, msg, to_obj=None, data=None) - called when this objects sends a message to someone via self.msg().
return_appearance(looker) - describes this object. Used by "look" command by default
at_desc(looker=None) - called by 'look' whenever the appearance is requested.
at_get(getter) - called after object has been picked up. Does not stop pickup.
at_drop(dropper) - called when this object has been dropped.
at_say(speaker, message) - by default, called if an object inside this object speaks
"""
pass
|
bsd-3-clause
| 5,246,995,477,435,459,000
| 59.105691
| 144
| 0.714324
| false
| 4.114079
| false
| false
| false
|
dennishuo/dataproc-initialization-actions
|
kafka/test_kafka.py
|
1
|
1443
|
import os
import unittest
from parameterized import parameterized
from integration_tests.dataproc_test_case import DataprocTestCase
class KafkaTestCase(DataprocTestCase):
COMPONENT = 'kafka'
INIT_ACTIONS = ['kafka/kafka.sh']
TEST_SCRIPT_FILE_NAME = 'validate.sh'
def verify_instance(self, name):
self.upload_test_file(
os.path.join(os.path.dirname(os.path.abspath(__file__)),
self.TEST_SCRIPT_FILE_NAME), name)
self.__run_test_script(name)
self.remove_test_script(self.TEST_SCRIPT_FILE_NAME, name)
def __run_test_script(self, name):
self.assert_instance_command(
name, "bash {}".format(self.TEST_SCRIPT_FILE_NAME))
@parameterized.expand(
[
("HA", "1.2", ["m-0", "m-1", "m-2"]),
("HA", "1.3", ["m-0", "m-1", "m-2"]),
],
testcase_func_name=DataprocTestCase.generate_verbose_test_name)
def test_kafka(self, configuration, dataproc_version, machine_suffixes):
self.createCluster(configuration,
self.INIT_ACTIONS,
dataproc_version,
machine_type="n1-standard-2")
for machine_suffix in machine_suffixes:
self.verify_instance("{}-{}".format(self.getClusterName(),
machine_suffix))
if __name__ == '__main__':
unittest.main()
|
apache-2.0
| 4,321,424,766,428,474,400
| 33.357143
| 76
| 0.568954
| false
| 3.858289
| true
| false
| false
|
rafaelvieiras/PseudoTV_Live
|
plugin.video.pseudotv.live/resources/lib/parsers/MKVParser.py
|
1
|
5919
|
# Copyright (C) 2020 Jason Anderson, Lunatixz
#
#
# This file is part of PseudoTV Live.
#
# PseudoTV Live is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PseudoTV Live is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PseudoTV Live. If not, see <http://www.gnu.org/licenses/>.
from resources.lib.globals import *
class MKVParser:
def determineLength(self, filename):
log("MKVParser: determineLength " + filename)
try:
self.File = xbmcvfs.File(filename, "r")
except:
log("MKVParser: Unable to open the file")
log(traceback.format_exc(), xbmc.LOGERROR)
return
size = self.findHeader()
if size == 0:
log('MKVParser: Unable to find the segment info')
dur = 0
else:
dur = self.parseHeader(size)
log("MKVParser: Duration is " + str(dur))
return dur
def parseHeader(self, size):
duration = 0
timecode = 0
fileend = self.File.tell() + size
datasize = 1
data = 1
while self.File.tell() < fileend and datasize > 0 and data > 0:
data = self.getEBMLId()
datasize = self.getDataSize()
if data == 0x2ad7b1:
timecode = 0
try:
for x in range(datasize):
timecode = (timecode << 8) + struct.unpack('B', self.getData(1))[0]
except:
timecode = 0
if duration != 0 and timecode != 0:
break
elif data == 0x4489:
try:
if datasize == 4:
duration = int(struct.unpack('>f', self.getData(datasize))[0])
else:
duration = int(struct.unpack('>d', self.getData(datasize))[0])
except:
log("MKVParser: Error getting duration in header, size is " + str(datasize))
duration = 0
if timecode != 0 and duration != 0:
break
else:
try:
self.File.seek(datasize, 1)
except:
log('MKVParser: Error while seeking')
return 0
if duration > 0 and timecode > 0:
dur = (duration * timecode) / 1000000000
return dur
return 0
def findHeader(self):
log("MKVParser: findHeader")
filesize = self.getFileSize()
if filesize == 0:
log("MKVParser: Empty file")
return 0
data = self.getEBMLId()
# Check for 1A 45 DF A3
if data != 0x1A45DFA3:
log("MKVParser: Not a proper MKV")
return 0
datasize = self.getDataSize()
try:
self.File.seek(datasize, 1)
except:
log('MKVParser: Error while seeking')
return 0
data = self.getEBMLId()
# Look for the segment header
while data != 0x18538067 and self.File.tell() < filesize and data > 0 and datasize > 0:
datasize = self.getDataSize()
try:
self.File.seek(datasize, 1)
except:
log('MKVParser: Error while seeking')
return 0
data = self.getEBMLId()
datasize = self.getDataSize()
data = self.getEBMLId()
# Find segment info
while data != 0x1549A966 and self.File.tell() < filesize and data > 0 and datasize > 0:
datasize = self.getDataSize()
try:
self.File.seek(datasize, 1)
except:
log('MKVParser: Error while seeking')
return 0
data = self.getEBMLId()
datasize = self.getDataSize()
if self.File.tell() < filesize:
return datasize
return 0
def getFileSize(self):
size = 0
try:
pos = self.File.tell()
self.File.seek(0, 2)
size = self.File.tell()
self.File.seek(pos, 0)
except:
pass
return size
def getData(self, datasize):
data = self.File.readBytes(datasize)
return data
def getDataSize(self):
data = self.File.readBytes(1)
try:
firstbyte = struct.unpack('>B', data)[0]
datasize = firstbyte
mask = 0xFFFF
for i in range(8):
if datasize >> (7 - i) == 1:
mask = mask ^ (1 << (7 - i))
break
datasize = datasize & mask
if firstbyte >> 7 != 1:
for i in range(1, 8):
datasize = (datasize << 8) + struct.unpack('>B', self.File.readBytes(1))[0]
if firstbyte >> (7 - i) == 1:
break
except:
datasize = 0
return datasize
def getEBMLId(self):
data = self.File.readBytes(1)
try:
firstbyte = struct.unpack('>B', data)[0]
ID = firstbyte
if firstbyte >> 7 != 1:
for i in range(1, 4):
ID = (ID << 8) + struct.unpack('>B', self.File.readBytes(1))[0]
if firstbyte >> (7 - i) == 1:
break
except:
ID = 0
return ID
|
gpl-3.0
| 41,527,159,163,878,820
| 27.320574
| 96
| 0.497043
| false
| 4.18896
| false
| false
| false
|
koreyou/word_embedding_loader
|
setup.py
|
1
|
3965
|
import os
from setuptools import setup
from setuptools.extension import Extension
from setuptools.command.sdist import sdist as _sdist
cython_modules = [
["word_embedding_loader", "loader", "word2vec_bin"],
["word_embedding_loader", "saver", "word2vec_bin"]
]
def _cythonize(extensions, apply_cythonize):
import numpy
import six
ext = '.pyx' if apply_cythonize else '.cpp'
extensions = [
Extension(
'.'.join(mod), ['/'.join(mod) + ext],
language="c++"
) for mod in extensions
]
for i in six.moves.xrange(len(extensions)):
extensions[i].include_dirs.append(numpy.get_include())
# Add signiture for Sphinx
extensions[i].cython_directives = {"embedsignature": True}
if apply_cythonize:
from Cython.Build import cythonize
extensions = cythonize(extensions)
return extensions
class sdist(_sdist):
def run(self):
# Force cythonize for sdist
_cythonize(cython_modules, True)
_sdist.run(self)
class lazy_cythonize(list):
# Adopted from https://stackoverflow.com/a/26698408/7820599
def _cythonize(self):
self._list = _cythonize(self._list, self._apply_cythonize)
self._is_cythonized = True
def __init__(self, extensions, apply_cythonize=False):
super(lazy_cythonize, self).__init__()
self._list = extensions
self._apply_cythonize = apply_cythonize
self._is_cythonized = False
def c_list(self):
if not self._is_cythonized:
self._cythonize()
return self._list
def __iter__(self):
for e in self.c_list():
yield e
def __getitem__(self, ii):
return self.c_list()[ii]
def __len__(self):
return len(self.c_list())
try:
with open('README.rst') as f:
readme = f.read()
except IOError:
readme = ''
name = 'WordEmbeddingLoader'
exec(open('word_embedding_loader/_version.py').read())
release = __version__
version = '.'.join(release.split('.')[:2])
setup(
name=name,
author='Yuta Koreeda',
author_email='secret-email@example.com',
maintainer='Yuta Koreeda',
maintainer_email='secret-email@example.com',
version=release,
description='Loaders and savers for different implentations of word embedding.',
long_description=readme,
url='https://github.com/koreyou/word_embedding_loader',
packages=['word_embedding_loader',
'word_embedding_loader.loader',
'word_embedding_loader.saver'
],
ext_modules=lazy_cythonize(
cython_modules,
os.environ.get('DEVELOP_WE', os.environ.get('READTHEDOCS')) is not None
),
license='MIT',
cmdclass = {'sdist': sdist},
install_requires=[
'Click',
'numpy>=1.10',
'six'
],
entry_points = {
'console_scripts': ['word-embedding-loader=word_embedding_loader.cli:cli'],
},
command_options={
'build_sphinx': {
'project': ('setup.py', name),
'version': ('setup.py', version),
'release': ('setup.py', release)}},
setup_requires = ['Cython',
'numpy>=1.10',
'six'
],
classifiers=[
"Environment :: Console",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Cython",
"Topic :: Documentation :: Sphinx",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: Scientific/Engineering :: Information Analysis"
]
)
|
mit
| 8,672,868,639,906,950,000
| 28.589552
| 84
| 0.590668
| false
| 3.92186
| false
| false
| false
|
umeboshi2/vignewton
|
vignewton/views/admin/sitetext.py
|
1
|
7950
|
from cStringIO import StringIO
from datetime import datetime
import transaction
from pyramid.httpexceptions import HTTPFound, HTTPNotFound
from pyramid.security import authenticated_userid
from pyramid.renderers import render
from pyramid.response import Response
from trumpet.models.sitecontent import SiteText
from trumpet.resources import MemoryTmpStore
from trumpet.managers.admin.images import ImageManager
from trumpet.views.base import NotFound
from trumpet.views.menus import BaseMenu
from vignewton.views.base import AdminViewer, make_main_menu
from vignewton.managers.wiki import WikiArchiver
import colander
import deform
tmpstore = MemoryTmpStore()
def prepare_main_data(request):
layout = request.layout_manager.layout
menu = layout.ctx_menu
imgroute = 'admin_images'
url = request.route_url(imgroute, context='list', id=None)
menu.append_new_entry('List Images', url)
url = request.route_url(imgroute, context='add', id=None)
menu.append_new_entry('Add Image', url)
main_menu = make_main_menu(request)
layout.title = 'Manage Images'
layout.header = 'Manage Images'
layout.main_menu = main_menu.render()
layout.ctx_menu = menu
class EditSiteTextSchema(colander.Schema):
name = colander.SchemaNode(
colander.String(),
title='Name')
content = colander.SchemaNode(
colander.String(),
title='Content',
widget=deform.widget.TextAreaWidget(rows=10, cols=60))
class SiteTextViewer(AdminViewer):
def __init__(self, request):
super(SiteTextViewer, self).__init__(request)
#prepare_main_data(self.request)
self.images = ImageManager(self.request.db)
self._dispatch_table = dict(
list=self.list_site_text,
add=self.create_site_text,
delete=self.main,
confirmdelete=self.main,
viewentry=self.view_site_text,
editentry=self.edit_site_text,
create=self.create_site_text,
download_wiki_archive=self.download_wiki_archive,)
self.context = self.request.matchdict['context']
self._view = self.context
self.dispatch()
def _set_menu(self):
menu = self.layout.ctx_menu
menu.set_header('Site Text Menu')
url = self.url(context='list', id='all')
menu.append_new_entry('List Entries', url)
url = self.url(context='create', id='new')
menu.append_new_entry('Create New Entry', url)
url = self.url(context='download_wiki_archive', id='all')
menu.append_new_entry('Download Wiki Archive', url)
def main(self):
self._set_menu()
content = '<h1>Here is where we manage site text.</h1>'
self.layout.content = content
def manage_site_text(self):
self._set_menu()
action = None
if 'action' in self.request.GET:
action = self.request.GET['action']
return self._manage_site_text_action_map[action]()
def view_site_text(self):
self._set_menu()
id = int(self.request.matchdict['id'])
self.layout.footer = str(type(id))
entry = self.request.db.query(SiteText).get(id)
self.layout.subheader = entry.name
self.layout.content = '<pre width="80">%s</pre>' % entry.content
def list_site_text(self):
self._set_menu()
template = 'vignewton:templates/list-site-text.mako'
entries = self.request.db.query(SiteText).all()
env = dict(viewer=self, entries=entries)
self.layout.content = self.render(template, env)
def list_site_text_orig(self):
self._set_menu()
content = '<h1>Here is where we <b>list</b> site text.</h1>'
self.layout.content = content
anchors = []
edit_anchors = []
entries = self.request.db.query(SiteText).all()
for entry in entries:
getdata = dict(action='viewentry', id=entry.id)
href = self.url(context='viewentry', id=entry.id)
anchors.append('<a href="%s">%s</a>' % (href, entry.name))
getdata['action'] = 'editentry'
href = self.url(context='editentry', id=entry.id)
edit_anchors.append('<a href="%s">edit</a>' % href)
list_items = []
for index in range(len(anchors)):
list_item = '<li>%s(%s)</li>'
list_item = list_item % (anchors[index], edit_anchors[index])
list_items.append(list_item)
ul = '<ul>%s</ul>' % '\n'.join(list_items)
self.layout.content = ul
def _edit_site_text_form(self):
schema = EditSiteTextSchema()
submit_button = deform.form.Button(name='submit_site_text',
title='Update Content')
form = deform.Form(schema, buttons=(submit_button,))
self.layout.resources.deform_auto_need(form)
return form
def _validate_site_text(self, form, create=False):
controls = self.request.POST.items()
try:
data = form.validate(controls)
except deform.ValidationFailure, e:
self.layout.content = e.render()
return {}
if create:
db = self.request.db
query = db.query(SiteText).filter_by(name=data['name'])
rows = query.all()
if rows:
h1 = '<h1>Site Text "%s" already exists.</h1>'
h1 = h1 % data['name']
self.layout.content = h1 + form.render(data)
return {}
else:
self.layout.subheader = str(rows)
return data
def _submit_site_text(self, form, data={}):
rendered = form.render(data)
if 'submit_site_text' in self.request.params:
if not self._validate_site_text(form):
return
else:
self.layout.content = rendered
self.layout.subheader = 'Please edit content'
def create_site_text(self):
self._set_menu()
form = self._edit_site_text_form()
# check submission
if 'submit_site_text' in self.request.params:
valid = self._validate_site_text(form, create=True)
if not valid:
return
transaction.begin()
entry = SiteText(valid['name'], valid['content'])
self.request.db.add(entry)
transaction.commit()
self.layout.content = 'Submitted for approval.'
else:
self.layout.content = form.render()
self.layout.subheader = 'Please edit content'
def edit_site_text(self):
self._set_menu()
form = self._edit_site_text_form()
rendered = form.render()
id = int(self.request.matchdict['id'])
entry = self.request.db.query(SiteText).get(id)
data = dict(name=entry.name, content=entry.content)
if 'submit_site_text' in self.request.params:
valid = self._validate_site_text(form)
if not valid:
return
transaction.begin()
entry.content = valid['content']
self.request.db.add(entry)
transaction.commit()
self.layout.content = 'Submitted for approval.'
else:
self.layout.content = form.render(data)
self.layout.subheader = 'Please edit content'
def download_wiki_archive(self):
self._set_menu()
archiver = WikiArchiver(self.request.db)
archiver.create_new_zipfile()
archive = archiver.archive_pages()
content_type = 'application/zip'
r = Response(content_type=content_type, body=archive)
r.content_disposition = 'attachment; filename="tutwiki-archive.zip"'
self.response = r
|
unlicense
| -3,346,914,358,330,187,300
| 33.415584
| 76
| 0.588176
| false
| 3.861098
| false
| false
| false
|
silly-wacky-3-town-toon/SOURCE-COD
|
toontown/golf/GolfRewardDialog.py
|
1
|
13295
|
from panda3d.core import *
from panda3d.direct import *
from toontown.toonbase.ToonBaseGlobal import *
from direct.interval.IntervalGlobal import *
from direct.task import Task
from direct.directnotify import DirectNotifyGlobal
from math import *
from direct.distributed.ClockDelta import *
from toontown.golf import GolfGlobals
from toontown.shtiker.GolfPage import GolfTrophy
class GolfRewardDialog:
notify = directNotify.newCategory('GolfRewardDialog')
def __init__(self, avIdList, trophyList, rankingsList, holeBestList, courseBestList, cupList, localAvId, tieBreakWinner, aimTimesList, endMovieCallback = None):
self.avIdList = avIdList
self.trophyList = trophyList
self.rankingsList = rankingsList
self.holeBestList = holeBestList
self.courseBestList = courseBestList
self.cupList = cupList
self.tieBreakWinner = tieBreakWinner
self.movie = None
self.myPlace = 0
self.victory = None
self.endMovieCallback = endMovieCallback
self.aimTimesList = aimTimesList
self.setup(localAvId)
return
def calcTrophyTextListForOnePlayer(self, avId):
retval = []
av = base.cr.doId2do.get(avId)
if av and avId in self.avIdList:
playerIndex = self.avIdList.index(avId)
name = av.getName()
for trophyIndex in xrange(len(self.trophyList[playerIndex])):
wonTrophy = self.trophyList[playerIndex][trophyIndex]
if wonTrophy:
trophyName = TTLocalizer.GolfTrophyDescriptions[trophyIndex]
text = TTLocalizer.GolfAvReceivesTrophy % {'name': name,
'award': trophyName}
retval.append(text)
return retval
def calcCupTextListForAllPlayers(self, localAvId):
retval = []
for cupPlayerIndex in xrange(len(self.avIdList)):
if self.avIdList[cupPlayerIndex] != localAvId:
av = base.cr.doId2do.get(self.avIdList[cupPlayerIndex])
name = ''
if av:
name = av.getName()
cupIndex = 0
for cupIndex in xrange(len(self.cupList[cupPlayerIndex])):
if self.cupList[cupPlayerIndex][cupIndex]:
cupName = TTLocalizer.GolfCupDescriptions[cupIndex]
text = TTLocalizer.GolfAvReceivesCup % {'name': name,
'cup': cupName}
retval.append(text)
for cupPlayerIndex in xrange(len(self.avIdList)):
if self.avIdList[cupPlayerIndex] == localAvId:
av = base.cr.doId2do.get(self.avIdList[cupPlayerIndex])
name = av.getName()
cupIndex = 0
for cupIndex in xrange(len(self.cupList[cupPlayerIndex])):
if self.cupList[cupPlayerIndex][cupIndex]:
cupName = TTLocalizer.GolfCupDescriptions[cupIndex]
text = TTLocalizer.GolfAvReceivesCup % {'name': name,
'cup': cupName}
retval.append(text)
return retval
def calcRankings(self, localAvId):
retval = []
self.notify.debug('aimTimesList=%s' % self.aimTimesList)
for rank in xrange(len(self.rankingsList) + 1):
for avIndex in xrange(len(self.avIdList)):
if self.rankingsList[avIndex] == rank:
name = ' '
av = base.cr.doId2do.get(self.avIdList[avIndex])
if av:
name = av.getName()
text = '%d. ' % rank + ' ' + name
if GolfGlobals.TIME_TIE_BREAKER:
time = self.aimTimesList[avIndex]
minutes = int(time / 60)
time -= minutes * 60
seconds = int(time)
padding = (seconds < 10 and ['0'] or [''])[0]
time -= seconds
fraction = str(time)[2:4]
fraction = fraction + '0' * (2 - len(fraction))
timeStr = "%d'%s%d''%s" % (minutes,
padding,
seconds,
fraction)
text += ' - ' + timeStr
retval.append(text)
if self.avIdList[avIndex] == localAvId:
self.myPlace = rank
return retval
def calcHoleBestTextListForAllPlayers(self, localAvId):
retval = []
if GolfGlobals.CalcOtherHoleBest:
for hbPlayerIndex in xrange(len(self.avIdList)):
if self.avIdList[hbPlayerIndex] != localAvId:
av = base.cr.doId2do.get(self.avIdList[hbPlayerIndex])
name = av.getName()
for hbIndex in xrange(len(self.holeBestList[hbPlayerIndex])):
if self.holeBestList[hbPlayerIndex][hbIndex]:
hbName = TTLocalizer.GolfHoleNames[hbIndex]
text = TTLocalizer.GolfAvReceivesHoleBest % {'name': name,
'hole': hbName}
retval.append(text)
for hbPlayerIndex in xrange(len(self.avIdList)):
if self.avIdList[hbPlayerIndex] == localAvId:
av = base.cr.doId2do.get(self.avIdList[hbPlayerIndex])
name = av.getName()
for hbIndex in xrange(len(self.holeBestList[hbPlayerIndex])):
if self.holeBestList[hbPlayerIndex][hbIndex]:
hbName = TTLocalizer.GolfHoleNames[hbIndex]
text = TTLocalizer.GolfAvReceivesHoleBest % {'name': name,
'hole': hbName}
retval.append(text)
return retval
def calcCourseBestTextListForAllPlayers(self, localAvId):
retval = []
if GolfGlobals.CalcOtherCourseBest:
for cbPlayerIndex in xrange(len(self.avIdList)):
if self.avIdList[cbPlayerIndex] != localAvId:
av = base.cr.doId2do.get(self.avIdList[cbPlayerIndex])
name = av.getName()
for cbIndex in xrange(len(self.holeBestList[cbPlayerIndex])):
if self.holeBestList[cbPlayerIndex][cbIndex]:
cbName = TTLocalizer.GolfCourseNames[cbIndex]
text = TTLocalizer.GolfAvReceivesCourseBest % {'name': name,
'course': cbName}
retval.append(text)
for cbPlayerIndex in xrange(len(self.avIdList)):
if self.avIdList[cbPlayerIndex] == localAvId:
av = base.cr.doId2do.get(self.avIdList[cbPlayerIndex])
name = av.getName()
for cbIndex in xrange(len(self.courseBestList[cbPlayerIndex])):
if self.courseBestList[cbPlayerIndex][cbIndex]:
cbName = TTLocalizer.GolfCourseNames[cbIndex]
text = TTLocalizer.GolfAvReceivesCourseBest % {'name': name,
'course': cbName}
retval.append(text)
return retval
def createRewardMovie(self, localAvId):
retval = Sequence(name='Reward sequence', autoPause=1)
self.trophy = None
def setTrophyLabelText(text, playerIndex, trophyIndex):
self.rankLabel.hide()
self.rewardLabel.hide()
self.trophy = GolfTrophy(level=self.trophyList[playerIndex][trophyIndex], parent=self.trophyLabel, pos=(1.3, 0, -0.25))
self.trophy.setScale(0.65, 1, 0.65)
self.trophy.show()
self.trophyLabel['text'] = text
def setRewardLabelText(text):
self.rewardLabel.show()
self.rankLabel.hide()
self.trophyLabel.hide()
if self.trophy:
self.trophy.hide()
self.rewardLabel['text'] = text
def setRankLabelText(text):
self.rankLabel.show()
self.rewardLabel.hide()
self.trophyLabel.hide()
if self.trophy:
self.trophy.hide()
self.rankLabel['text'] = text
if len(self.avIdList) > 1:
self.victory = base.loadSfx('phase_6/audio/sfx/KART_Applause_%d.ogg' % self.myPlace)
self.victory.play()
for avId in self.avIdList:
if avId != localAvId:
rewardTextList = self.calcTrophyTextListForOnePlayer(avId)
trophyIndex = 0
for rewardText in rewardTextList:
playerIndex = self.avIdList.index(avId)
var = (rewardText, playerIndex, trophyIndex)
oneTrophyIval = Parallel(Func(setTrophyLabelText, rewardText, playerIndex, trophyIndex), LerpColorScaleInterval(self.trophyLabel, 4, Vec4(1, 1, 1, 0), startColorScale=Vec4(1, 1, 1, 1), blendType='easeIn'))
trophyIndex = trophyIndex + 1
retval.append(oneTrophyIval)
rewardTextList = self.calcTrophyTextListForOnePlayer(localAvId)
trophyIndex = 0
playerIndex = self.avIdList.index(localAvId)
for rewardText in rewardTextList:
if len(rewardTextList) > 0:
var = (rewardText, playerIndex, trophyIndex)
oneRewardIval = Parallel(Func(setTrophyLabelText, rewardText, playerIndex, trophyIndex), LerpColorScaleInterval(self.trophyLabel, 4, Vec4(1, 1, 1, 0), startColorScale=Vec4(1, 1, 1, 1), blendType='easeIn'))
retval.append(oneRewardIval)
rewardCupList = self.calcCupTextListForAllPlayers(localAvId)
if len(rewardCupList) > 0:
for rewardText in rewardCupList:
oneCupIval = Parallel(Func(setRewardLabelText, rewardText), LerpColorScaleInterval(self.rewardLabel, 4, Vec4(1, 1, 1, 0), startColorScale=Vec4(1, 1, 1, 1), blendType='noBlend'))
retval.append(oneCupIval)
if self.tieBreakWinner:
name = ''
av = base.cr.doId2do.get(self.tieBreakWinner)
if av:
name = av.getName()
if GolfGlobals.TIME_TIE_BREAKER:
rewardText = TTLocalizer.GolfTimeTieBreakWinner % {'name': name}
else:
rewardText = TTLocalizer.GolfTieBreakWinner % {'name': name}
randomWinnerIval = Parallel(Func(setRewardLabelText, rewardText), LerpColorScaleInterval(self.rewardLabel, 7, Vec4(1, 1, 1, 0), startColorScale=Vec4(1, 1, 1, 1), blendType='noBlend'))
retval.append(randomWinnerIval)
rankings = self.calcRankings(localAvId)
rankText = TTLocalizer.GolfRanking + '\n'
for rank in xrange(len(rankings)):
rankText = rankText + rankings[rank] + '\n'
oneRankIval = Parallel(Func(setRankLabelText, rankText), LerpColorScaleInterval(self.rankLabel, 8, Vec4(1, 1, 1, 1), startColorScale=Vec4(1, 1, 1, 1), blendType='easeIn'))
retval.append(oneRankIval)
rewardHoleList = self.calcHoleBestTextListForAllPlayers(localAvId)
if len(rewardHoleList) > 0:
for rewardText in rewardHoleList:
oneHoleIval = Parallel(Func(setRewardLabelText, rewardText), LerpColorScaleInterval(self.rewardLabel, 8, Vec4(1, 1, 1, 0), startColorScale=Vec4(1, 1, 1, 1), blendType='easeIn'))
retval.append(oneHoleIval)
rewardCourseList = self.calcCourseBestTextListForAllPlayers(localAvId)
if len(rewardCourseList) > 0:
for rewardText in rewardCourseList:
oneCourseIval = Parallel(Func(setRewardLabelText, rewardText), LerpColorScaleInterval(self.rewardLabel, 4, Vec4(1, 1, 1, 0), startColorScale=Vec4(1, 1, 1, 1), blendType='easeIn'))
retval.append(oneCourseIval)
if self.endMovieCallback:
retval.append(Func(self.endMovieCallback))
return retval
def setup(self, localAvId):
self.rewardBoard = DirectFrame(parent=aspect2d, relief=None, geom=DGG.getDefaultDialogGeom(), geom_color=ToontownGlobals.GlobalDialogColor, geom_scale=(1.75, 1, 0.6), pos=(0, 0, -0.6))
self.rewardLabel = DirectLabel(parent=self.rewardBoard, relief=None, pos=(-0, 0, 0), text_align=TextNode.ACenter, text='', text_scale=0.05, text_wordwrap=30)
self.rankLabel = DirectLabel(parent=self.rewardBoard, relief=None, pos=(-0, 0, 0.17), text_align=TextNode.ACenter, text='', text_scale=0.06)
self.trophyLabel = DirectLabel(parent=self.rewardBoard, relief=None, pos=(-0.7, 0, 0.05), text_align=TextNode.ALeft, text='', text_scale=0.06, text_wordwrap=20)
self.movie = self.createRewardMovie(localAvId)
return
def delete(self):
self.movie.pause()
self.notify.debug('Movie is paused')
self.rewardBoard.destroy()
self.notify.debug('Reward board is destroyed')
self.movie = None
self.notify.debug('Movie is deleted')
return
def getMovie(self):
return self.movie
|
apache-2.0
| 1,307,104,951,644,556,000
| 48.059041
| 225
| 0.581271
| false
| 3.73665
| false
| false
| false
|
openstack/oslo.vmware
|
oslo_vmware/service.py
|
1
|
18788
|
# Copyright (c) 2014-2020 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Common classes that provide access to vSphere services.
"""
import http.client as httplib
import io
import logging
import netaddr
from oslo_utils import timeutils
from oslo_utils import uuidutils
import requests
import suds
from suds import cache
from suds import client
from suds import plugin
import suds.sax.element as element
from suds import transport
from oslo_vmware._i18n import _
from oslo_vmware import exceptions
from oslo_vmware import vim_util
CACHE_TIMEOUT = 60 * 60 # One hour cache timeout
ADDRESS_IN_USE_ERROR = 'Address already in use'
CONN_ABORT_ERROR = 'Software caused connection abort'
RESP_NOT_XML_ERROR = 'Response is "text/html", not "text/xml"'
SERVICE_INSTANCE = 'ServiceInstance'
LOG = logging.getLogger(__name__)
class ServiceMessagePlugin(plugin.MessagePlugin):
"""Suds plug-in handling some special cases while calling VI SDK."""
# list of XML elements which are allowed to be empty
EMPTY_ELEMENTS = ["VirtualMachineEmptyProfileSpec"]
def add_attribute_for_value(self, node):
"""Helper to handle AnyType.
Suds does not handle AnyType properly. But VI SDK requires type
attribute to be set when AnyType is used.
:param node: XML value node
"""
if node.name == 'value' or node.name == 'val':
node.set('xsi:type', 'xsd:string')
# removeKey may be a 'int' or a 'string'
if node.name == 'removeKey':
try:
int(node.text)
node.set('xsi:type', 'xsd:int')
except (ValueError, TypeError):
node.set('xsi:type', 'xsd:string')
def prune(self, el):
pruned = []
for c in el.children:
self.prune(c)
if c.isempty(False) and c.name not in self.EMPTY_ELEMENTS:
pruned.append(c)
for p in pruned:
el.children.remove(p)
def marshalled(self, context):
"""Modifies the envelope document before it is sent.
This method provides the plug-in with the opportunity to prune empty
nodes and fix nodes before sending it to the server.
:param context: send context
"""
# Suds builds the entire request object based on the WSDL schema.
# VI SDK throws server errors if optional SOAP nodes are sent
# without values; e.g., <test/> as opposed to <test>test</test>.
self.prune(context.envelope)
context.envelope.walk(self.add_attribute_for_value)
class Response(io.BytesIO):
"""Response with an input stream as source."""
def __init__(self, stream, status=200, headers=None):
self.status = status
self.headers = headers or {}
self.reason = requests.status_codes._codes.get(
status, [''])[0].upper().replace('_', ' ')
io.BytesIO.__init__(self, stream)
@property
def _original_response(self):
return self
@property
def msg(self):
return self
def read(self, chunk_size, **kwargs):
return io.BytesIO.read(self, chunk_size)
def info(self):
return self
def get_all(self, name, default):
result = self.headers.get(name)
if not result:
return default
return [result]
def getheaders(self, name):
return self.get_all(name, [])
def release_conn(self):
self.close()
class LocalFileAdapter(requests.adapters.HTTPAdapter):
"""Transport adapter for local files.
See http://stackoverflow.com/a/22989322
"""
def __init__(self, pool_maxsize=10):
super(LocalFileAdapter, self).__init__(pool_connections=pool_maxsize,
pool_maxsize=pool_maxsize)
def _build_response_from_file(self, request):
file_path = request.url[7:]
with open(file_path, 'rb') as f:
file_content = f.read()
buff = bytearray(file_content.decode(), "utf-8")
resp = Response(buff)
return self.build_response(request, resp)
def send(self, request, stream=False, timeout=None,
verify=True, cert=None, proxies=None):
"""Sends request for a local file."""
return self._build_response_from_file(request)
class RequestsTransport(transport.Transport):
def __init__(self, cacert=None, insecure=True, pool_maxsize=10,
connection_timeout=None):
transport.Transport.__init__(self)
# insecure flag is used only if cacert is not
# specified.
self.verify = cacert if cacert else not insecure
self.session = requests.Session()
self.session.mount('file:///',
LocalFileAdapter(pool_maxsize=pool_maxsize))
self.session.mount('https://', requests.adapters.HTTPAdapter(
pool_connections=pool_maxsize, pool_maxsize=pool_maxsize))
self.cookiejar = self.session.cookies
self._connection_timeout = connection_timeout
def open(self, request):
resp = self.session.get(request.url, verify=self.verify)
return io.BytesIO(resp.content)
def send(self, request):
resp = self.session.post(request.url,
data=request.message,
headers=request.headers,
verify=self.verify,
timeout=self._connection_timeout)
return transport.Reply(resp.status_code, resp.headers, resp.content)
class MemoryCache(cache.ObjectCache):
def __init__(self):
self._cache = {}
def get(self, key):
"""Retrieves the value for a key or None."""
now = timeutils.utcnow_ts()
for k in list(self._cache):
(timeout, _value) = self._cache[k]
if timeout and now >= timeout:
del self._cache[k]
return self._cache.get(key, (0, None))[1]
def put(self, key, value, time=CACHE_TIMEOUT):
"""Sets the value for a key."""
timeout = 0
if time != 0:
timeout = timeutils.utcnow_ts() + time
self._cache[key] = (timeout, value)
return True
_CACHE = MemoryCache()
class CompatibilitySudsClient(client.Client):
"""suds client with added cookiejar attribute
The cookiejar properties allow reading/setting the cookiejar used by the
underlying transport.
"""
def __init__(self, *args, **kwargs):
super(CompatibilitySudsClient, self).__init__(*args, **kwargs)
@property
def cookiejar(self):
return self.options.transport.cookiejar
@cookiejar.setter
def cookiejar(self, cookies):
self.options.transport.session.cookies = cookies
self.options.transport.cookiejar = cookies
class Service(object):
"""Base class containing common functionality for invoking vSphere
services
"""
def __init__(self, wsdl_url=None, soap_url=None,
cacert=None, insecure=True, pool_maxsize=10,
connection_timeout=None, op_id_prefix='oslo.vmware'):
self.wsdl_url = wsdl_url
self.soap_url = soap_url
self.op_id_prefix = op_id_prefix
LOG.debug("Creating suds client with soap_url='%s' and wsdl_url='%s'",
self.soap_url, self.wsdl_url)
transport = RequestsTransport(cacert=cacert,
insecure=insecure,
pool_maxsize=pool_maxsize,
connection_timeout=connection_timeout)
self.client = CompatibilitySudsClient(self.wsdl_url,
transport=transport,
location=self.soap_url,
plugins=[ServiceMessagePlugin()],
cache=_CACHE)
self._service_content = None
self._vc_session_cookie = None
@staticmethod
def build_base_url(protocol, host, port):
proto_str = '%s://' % protocol
host_str = '[%s]' % host if netaddr.valid_ipv6(host) else host
port_str = '' if port is None else ':%d' % port
return proto_str + host_str + port_str
@staticmethod
def _retrieve_properties_ex_fault_checker(response):
"""Checks the RetrievePropertiesEx API response for errors.
Certain faults are sent in the SOAP body as a property of missingSet.
This method raises VimFaultException when a fault is found in the
response.
:param response: response from RetrievePropertiesEx API call
:raises: VimFaultException
"""
fault_list = []
details = {}
if not response:
# This is the case when the session has timed out. ESX SOAP
# server sends an empty RetrievePropertiesExResponse. Normally
# missingSet in the response objects has the specifics about
# the error, but that's not the case with a timed out idle
# session. It is as bad as a terminated session for we cannot
# use the session. Therefore setting fault to NotAuthenticated
# fault.
LOG.debug("RetrievePropertiesEx API response is empty; setting "
"fault to %s.",
exceptions.NOT_AUTHENTICATED)
fault_list = [exceptions.NOT_AUTHENTICATED]
else:
for obj_cont in response.objects:
if hasattr(obj_cont, 'missingSet'):
for missing_elem in obj_cont.missingSet:
f_type = missing_elem.fault.fault
f_name = f_type.__class__.__name__
fault_list.append(f_name)
if f_name == exceptions.NO_PERMISSION:
details['object'] = \
vim_util.get_moref_value(f_type.object)
details['privilegeId'] = f_type.privilegeId
if fault_list:
fault_string = _("Error occurred while calling "
"RetrievePropertiesEx.")
raise exceptions.VimFaultException(fault_list,
fault_string,
details=details)
def _set_soap_headers(self, op_id):
"""Set SOAP headers for the next remote call to vCenter.
SOAP headers may include operation ID and vcSessionCookie.
The operation ID is a random string which allows to correlate log
messages across different systems (OpenStack, vCenter, ESX).
vcSessionCookie is needed when making PBM calls.
"""
headers = []
if self._vc_session_cookie:
elem = element.Element('vcSessionCookie').setText(
self._vc_session_cookie)
headers.append(elem)
if op_id:
elem = element.Element('operationID').setText(op_id)
headers.append(elem)
if headers:
self.client.set_options(soapheaders=headers)
@property
def service_content(self):
if self._service_content is None:
self._service_content = self.retrieve_service_content()
return self._service_content
def get_http_cookie(self):
"""Return the vCenter session cookie."""
cookies = self.client.cookiejar
for cookie in cookies:
if cookie.name.lower() == 'vmware_soap_session':
return cookie.value
def __getattr__(self, attr_name):
"""Returns the method to invoke API identified by param attr_name."""
def request_handler(managed_object, **kwargs):
"""Handler for vSphere API calls.
Invokes the API and parses the response for fault checking and
other errors.
:param managed_object: managed object reference argument of the
API call
:param kwargs: keyword arguments of the API call
:returns: response of the API call
:raises: VimException, VimFaultException, VimAttributeException,
VimSessionOverLoadException, VimConnectionException
"""
try:
if isinstance(managed_object, str):
# For strings, use string value for value and type
# of the managed object.
managed_object = vim_util.get_moref(managed_object,
managed_object)
if managed_object is None:
return
skip_op_id = kwargs.pop('skip_op_id', False)
op_id = None
if not skip_op_id:
# Generate opID. It will appear in vCenter and ESX logs for
# this particular remote call.
op_id = '%s-%s' % (self.op_id_prefix,
uuidutils.generate_uuid())
LOG.debug('Invoking %s.%s with opID=%s',
vim_util.get_moref_type(managed_object),
attr_name,
op_id)
self._set_soap_headers(op_id)
request = getattr(self.client.service, attr_name)
response = request(managed_object, **kwargs)
if (attr_name.lower() == 'retrievepropertiesex'):
Service._retrieve_properties_ex_fault_checker(response)
return response
except exceptions.VimFaultException:
# Catch the VimFaultException that is raised by the fault
# check of the SOAP response.
raise
except suds.WebFault as excep:
fault_string = None
if excep.fault:
fault_string = excep.fault.faultstring
doc = excep.document
detail = None
if doc is not None:
detail = doc.childAtPath('/detail')
if not detail:
# NOTE(arnaud): this is needed with VC 5.1
detail = doc.childAtPath('/Envelope/Body/Fault/detail')
fault_list = []
details = {}
if detail:
for fault in detail.getChildren():
fault_type = fault.get('type')
# NOTE(vbala): PBM faults use vim25 namespace. Also,
# PBM APIs throw NotAuthenticated in vSphere 6.5 for
# session expiry.
if (fault_type.endswith(exceptions.SECURITY_ERROR) or
fault_type.endswith(
exceptions.NOT_AUTHENTICATED)):
fault_type = exceptions.NOT_AUTHENTICATED
fault_list.append(fault_type)
for child in fault.getChildren():
details[child.name] = child.getText()
raise exceptions.VimFaultException(fault_list, fault_string,
excep, details)
except AttributeError as excep:
raise exceptions.VimAttributeException(
_("No such SOAP method %s.") % attr_name, excep)
except (httplib.CannotSendRequest,
httplib.ResponseNotReady,
httplib.CannotSendHeader) as excep:
raise exceptions.VimSessionOverLoadException(
_("httplib error in %s.") % attr_name, excep)
except requests.RequestException as excep:
raise exceptions.VimConnectionException(
_("requests error in %s.") % attr_name, excep)
except Exception as excep:
# TODO(vbala) should catch specific exceptions and raise
# appropriate VimExceptions.
# Socket errors which need special handling; some of these
# might be caused by server API call overload.
if (str(excep).find(ADDRESS_IN_USE_ERROR) != -1 or
str(excep).find(CONN_ABORT_ERROR)) != -1:
raise exceptions.VimSessionOverLoadException(
_("Socket error in %s.") % attr_name, excep)
# Type error which needs special handling; it might be caused
# by server API call overload.
elif str(excep).find(RESP_NOT_XML_ERROR) != -1:
raise exceptions.VimSessionOverLoadException(
_("Type error in %s.") % attr_name, excep)
else:
raise exceptions.VimException(
_("Exception in %s.") % attr_name, excep)
return request_handler
def __repr__(self):
return "vSphere object"
def __str__(self):
return "vSphere object"
class SudsLogFilter(logging.Filter):
"""Filter to mask/truncate vCenter credentials in suds logs."""
def filter(self, record):
if not hasattr(record.msg, 'childAtPath'):
return True
# Suds will log vCenter credentials if SessionManager.Login or
# SessionManager.SessionIsActive fails.
login = (record.msg.childAtPath('/Envelope/Body/Login') or
record.msg.childAtPath('/Envelope/Body/SessionIsActive'))
if login is None:
return True
if login.childAtPath('userName') is not None:
login.childAtPath('userName').setText('***')
if login.childAtPath('password') is not None: # nosec
login.childAtPath('password').setText('***') # nosec
session_id = login.childAtPath('sessionID')
if session_id is not None:
session_id.setText(session_id.getText()[-5:])
return True
# Set log filter to mask/truncate vCenter credentials in suds logs.
suds.client.log.addFilter(SudsLogFilter())
|
apache-2.0
| 6,293,512,933,116,968,000
| 37.818182
| 79
| 0.568288
| false
| 4.54585
| false
| false
| false
|
dbiesecke/dbiesecke.github.io
|
repo/script.module.urlresolver/lib/urlresolver/lib/kodi.py
|
1
|
9725
|
"""
URLResolver Addon for Kodi
Copyright (C) 2016 t0mm0, tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import xbmcaddon
import xbmcplugin
import xbmcgui
import xbmc
import xbmcvfs
import urllib
import urlparse
import sys
import os
import re
import time
import strings
import CustomProgressDialog
addon = xbmcaddon.Addon('script.module.urlresolver')
get_setting = addon.getSetting
show_settings = addon.openSettings
sleep = xbmc.sleep
_log = xbmc.log
def get_path():
return addon.getAddonInfo('path').decode('utf-8')
def get_profile():
return addon.getAddonInfo('profile').decode('utf-8')
def translate_path(path):
return xbmc.translatePath(path).decode('utf-8')
def set_setting(id, value):
if not isinstance(value, basestring):
value = str(value)
addon.setSetting(id, value)
def get_version():
return addon.getAddonInfo('version')
def get_id():
return addon.getAddonInfo('id')
def get_name():
return addon.getAddonInfo('name')
def open_settings():
return addon.openSettings()
def get_keyboard(heading, default=''):
keyboard = xbmc.Keyboard()
keyboard.setHeading(heading)
if default: keyboard.setDefault(default)
keyboard.doModal()
if keyboard.isConfirmed():
return keyboard.getText()
else:
return None
def i18n(string_id):
try:
return addon.getLocalizedString(strings.STRINGS[string_id]).encode('utf-8', 'ignore')
except Exception as e:
_log('Failed String Lookup: %s (%s)' % (string_id, e))
return string_id
def get_plugin_url(queries):
try:
query = urllib.urlencode(queries)
except UnicodeEncodeError:
for k in queries:
if isinstance(queries[k], unicode):
queries[k] = queries[k].encode('utf-8')
query = urllib.urlencode(queries)
return sys.argv[0] + '?' + query
def end_of_directory(cache_to_disc=True):
xbmcplugin.endOfDirectory(int(sys.argv[1]), cacheToDisc=cache_to_disc)
def set_content(content):
xbmcplugin.setContent(int(sys.argv[1]), content)
def create_item(queries, label, thumb='', fanart='', is_folder=None, is_playable=None, total_items=0, menu_items=None, replace_menu=False):
list_item = xbmcgui.ListItem(label, iconImage=thumb, thumbnailImage=thumb)
add_item(queries, list_item, fanart, is_folder, is_playable, total_items, menu_items, replace_menu)
def add_item(queries, list_item, fanart='', is_folder=None, is_playable=None, total_items=0, menu_items=None, replace_menu=False):
if menu_items is None: menu_items = []
if is_folder is None:
is_folder = False if is_playable else True
if is_playable is None:
playable = 'false' if is_folder else 'true'
else:
playable = 'true' if is_playable else 'false'
liz_url = get_plugin_url(queries)
if fanart: list_item.setProperty('fanart_image', fanart)
list_item.setInfo('video', {'title': list_item.getLabel()})
list_item.setProperty('isPlayable', playable)
list_item.addContextMenuItems(menu_items, replaceItems=replace_menu)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), liz_url, list_item, isFolder=is_folder, totalItems=total_items)
def parse_query(query):
q = {'mode': 'main'}
if query.startswith('?'): query = query[1:]
queries = urlparse.parse_qs(query)
for key in queries:
if len(queries[key]) == 1:
q[key] = queries[key][0]
else:
q[key] = queries[key]
return q
def notify(header=None, msg='', duration=2000, sound=None):
if header is None: header = get_name()
if sound is None: sound = get_setting('mute_notifications') == 'false'
icon_path = os.path.join(get_path(), 'icon.png')
try:
xbmcgui.Dialog().notification(header, msg, icon_path, duration, sound)
except:
builtin = "XBMC.Notification(%s,%s, %s, %s)" % (header, msg, duration, icon_path)
xbmc.executebuiltin(builtin)
def close_all():
xbmc.executebuiltin('Dialog.Close(all)')
def get_current_view():
skinPath = translate_path('special://skin/')
xml = os.path.join(skinPath, 'addon.xml')
f = xbmcvfs.File(xml)
read = f.read()
f.close()
try:
src = re.search('defaultresolution="([^"]+)', read, re.DOTALL).group(1)
except:
src = re.search('<res.+?folder="([^"]+)', read, re.DOTALL).group(1)
src = os.path.join(skinPath, src, 'MyVideoNav.xml')
f = xbmcvfs.File(src)
read = f.read()
f.close()
match = re.search('<views>([^<]+)', read, re.DOTALL)
if match:
views = match.group(1)
for view in views.split(','):
if xbmc.getInfoLabel('Control.GetLabel(%s)' % view):
return view
class WorkingDialog(object):
def __init__(self):
xbmc.executebuiltin('ActivateWindow(busydialog)')
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
xbmc.executebuiltin('Dialog.Close(busydialog)')
def has_addon(addon_id):
return xbmc.getCondVisibility('System.HasAddon(%s)' % addon_id) == 1
class ProgressDialog(object):
def __init__(self, heading, line1='', line2='', line3='', background=False, active=True, timer=0, custom=False):
self.begin = time.time()
self.timer = timer
self.background = background
self.custom = custom
self.heading = heading
if active and not timer:
self.pd = self.__create_dialog(line1, line2, line3)
self.pd.update(0)
else:
self.pd = None
def __create_dialog(self, line1, line2, line3):
if self.background:
pd = xbmcgui.DialogProgressBG()
msg = line1 + line2 + line3
pd.create(self.heading, msg)
else:
if self.custom:
pd = CustomProgressDialog.ProgressDialog()
else:
pd = xbmcgui.DialogProgress()
pd.create(self.heading, line1, line2, line3)
return pd
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if self.pd is not None:
self.pd.close()
del self.pd
def is_canceled(self):
if self.pd is not None and not self.background:
return self.pd.iscanceled()
else:
return False
def update(self, percent, line1='', line2='', line3=''):
if self.pd is None and self.timer and (time.time() - self.begin) >= self.timer:
self.pd = self.__create_dialog(line1, line2, line3)
if self.pd is not None:
if self.background:
msg = line1 + line2 + line3
self.pd.update(percent, self.heading, msg)
else:
self.pd.update(percent, line1, line2, line3)
class CountdownDialog(object):
__INTERVALS = 5
def __init__(self, heading, line1='', line2='', line3='', active=True, countdown=60, interval=5, custom=False):
self.heading = heading
self.countdown = countdown
self.custom = custom
self.interval = interval
self.line3 = line3
if active:
if self.custom:
pd = CustomProgressDialog.ProgressDialog()
else:
pd = xbmcgui.DialogProgress()
if not self.line3: line3 = 'Expires in: %s seconds' % countdown
pd.create(self.heading, line1, line2, line3)
pd.update(100)
self.pd = pd
else:
self.pd = None
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if self.pd is not None:
self.pd.close()
del self.pd
def start(self, func, args=None, kwargs=None):
if args is None: args = []
if kwargs is None: kwargs = {}
result = func(*args, **kwargs)
if result:
return result
if self.pd is not None:
start = time.time()
expires = time_left = self.countdown
interval = self.interval
while time_left > 0:
for _ in range(CountdownDialog.__INTERVALS):
sleep(interval * 1000 / CountdownDialog.__INTERVALS)
if self.is_canceled(): return
time_left = expires - int(time.time() - start)
if time_left < 0: time_left = 0
progress = time_left * 100 / expires
line3 = 'Expires in: %s seconds' % time_left if not self.line3 else ''
self.update(progress, line3=line3)
result = func(*args, **kwargs)
if result:
return result
def is_canceled(self):
if self.pd is None:
return False
else:
return self.pd.iscanceled()
def update(self, percent, line1='', line2='', line3=''):
if self.pd is not None:
self.pd.update(percent, line1, line2, line3)
|
mit
| -4,834,126,515,700,619,000
| 30.070288
| 139
| 0.601028
| false
| 3.792902
| false
| false
| false
|
lcy-seso/models
|
fluid/face_detection/widerface_eval.py
|
1
|
11459
|
import os
import time
import numpy as np
import argparse
import functools
from PIL import Image
import paddle.fluid as fluid
import reader
from pyramidbox import PyramidBox
from visualize import draw_bboxes
from utility import add_arguments, print_arguments
parser = argparse.ArgumentParser(description=__doc__)
add_arg = functools.partial(add_arguments, argparser=parser)
# yapf: disable
add_arg('use_gpu', bool, True, "Whether use GPU or not.")
add_arg('use_pyramidbox', bool, True, "Whether use PyramidBox model.")
add_arg('data_dir', str, 'data/WIDER_val/images/', "The validation dataset path.")
add_arg('model_dir', str, '', "The model path.")
add_arg('pred_dir', str, 'pred', "The path to save the evaluation results.")
add_arg('file_list', str, 'data/wider_face_split/wider_face_val_bbx_gt.txt', "The validation dataset path.")
add_arg('infer', bool, False, "Whether do infer or eval.")
add_arg('confs_threshold', float, 0.15, "Confidence threshold to draw bbox.")
add_arg('image_path', str, '', "The image used to inference and visualize.")
# yapf: enable
def infer(args, config):
model_dir = args.model_dir
pred_dir = args.pred_dir
if not os.path.exists(model_dir):
raise ValueError("The model path [%s] does not exist." % (model_dir))
if args.infer:
image_path = args.image_path
image = Image.open(image_path)
if image.mode == 'L':
image = img.convert('RGB')
shrink, max_shrink = get_shrink(image.size[1], image.size[0])
det0 = detect_face(image, shrink)
det1 = flip_test(image, shrink)
[det2, det3] = multi_scale_test(image, max_shrink)
det4 = multi_scale_test_pyramid(image, max_shrink)
det = np.row_stack((det0, det1, det2, det3, det4))
dets = bbox_vote(det)
keep_index = np.where(dets[:, 4] >= args.confs_threshold)[0]
dets = dets[keep_index, :]
draw_bboxes(image_path, dets[:, 0:4])
else:
test_reader = reader.test(config, args.file_list)
for image, image_path in test_reader():
shrink, max_shrink = get_shrink(image.size[1], image.size[0])
det0 = detect_face(image, shrink)
det1 = flip_test(image, shrink)
[det2, det3] = multi_scale_test(image, max_shrink)
det4 = multi_scale_test_pyramid(image, max_shrink)
det = np.row_stack((det0, det1, det2, det3, det4))
dets = bbox_vote(det)
save_widerface_bboxes(image_path, dets, pred_dir)
print("Finish evaluation.")
def save_widerface_bboxes(image_path, bboxes_scores, output_dir):
"""
Save predicted results, including bbox and score into text file.
Args:
image_path (string): file name.
bboxes_scores (np.array|list): the predicted bboxed and scores, layout
is (xmin, ymin, xmax, ymax, score)
output_dir (string): output directory.
"""
image_name = image_path.split('/')[-1]
image_class = image_path.split('/')[-2]
image_name = image_name.encode('utf-8')
image_class = image_class.encode('utf-8')
odir = os.path.join(output_dir, image_class)
if not os.path.exists(odir):
os.makedirs(odir)
ofname = os.path.join(odir, '%s.txt' % (image_name[:-4]))
f = open(ofname, 'w')
f.write('{:s}\n'.format(image_class + '/' + image_name))
f.write('{:d}\n'.format(bboxes_scores.shape[0]))
for box_score in bboxes_scores:
xmin, ymin, xmax, ymax, score = box_score
f.write('{:.1f} {:.1f} {:.1f} {:.1f} {:.3f}\n'.format(xmin, ymin, (
xmax - xmin + 1), (ymax - ymin + 1), score))
f.close()
print("The predicted result is saved as {}".format(ofname))
def detect_face(image, shrink):
image_shape = [3, image.size[1], image.size[0]]
if shrink != 1:
h, w = int(image_shape[1] * shrink), int(image_shape[2] * shrink)
image = image.resize((w, h), Image.ANTIALIAS)
image_shape = [3, h, w]
img = np.array(image)
img = reader.to_chw_bgr(img)
mean = [104., 117., 123.]
scale = 0.007843
img = img.astype('float32')
img -= np.array(mean)[:, np.newaxis, np.newaxis].astype('float32')
img = img * scale
img = [img]
img = np.array(img)
detection, = exe.run(infer_program,
feed={'image': img},
fetch_list=fetches,
return_numpy=False)
detection = np.array(detection)
# layout: xmin, ymin, xmax. ymax, score
if detection.shape == (1, ):
print("No face detected")
return np.array([[0, 0, 0, 0, 0]])
det_conf = detection[:, 1]
det_xmin = image_shape[2] * detection[:, 2] / shrink
det_ymin = image_shape[1] * detection[:, 3] / shrink
det_xmax = image_shape[2] * detection[:, 4] / shrink
det_ymax = image_shape[1] * detection[:, 5] / shrink
det = np.column_stack((det_xmin, det_ymin, det_xmax, det_ymax, det_conf))
return det
def bbox_vote(det):
order = det[:, 4].ravel().argsort()[::-1]
det = det[order, :]
if det.shape[0] == 0:
dets = np.array([[10, 10, 20, 20, 0.002]])
det = np.empty(shape=[0, 5])
while det.shape[0] > 0:
# IOU
area = (det[:, 2] - det[:, 0] + 1) * (det[:, 3] - det[:, 1] + 1)
xx1 = np.maximum(det[0, 0], det[:, 0])
yy1 = np.maximum(det[0, 1], det[:, 1])
xx2 = np.minimum(det[0, 2], det[:, 2])
yy2 = np.minimum(det[0, 3], det[:, 3])
w = np.maximum(0.0, xx2 - xx1 + 1)
h = np.maximum(0.0, yy2 - yy1 + 1)
inter = w * h
o = inter / (area[0] + area[:] - inter)
# nms
merge_index = np.where(o >= 0.3)[0]
det_accu = det[merge_index, :]
det = np.delete(det, merge_index, 0)
if merge_index.shape[0] <= 1:
if det.shape[0] == 0:
try:
dets = np.row_stack((dets, det_accu))
except:
dets = det_accu
continue
det_accu[:, 0:4] = det_accu[:, 0:4] * np.tile(det_accu[:, -1:], (1, 4))
max_score = np.max(det_accu[:, 4])
det_accu_sum = np.zeros((1, 5))
det_accu_sum[:, 0:4] = np.sum(det_accu[:, 0:4],
axis=0) / np.sum(det_accu[:, -1:])
det_accu_sum[:, 4] = max_score
try:
dets = np.row_stack((dets, det_accu_sum))
except:
dets = det_accu_sum
dets = dets[0:750, :]
return dets
def flip_test(image, shrink):
img = image.transpose(Image.FLIP_LEFT_RIGHT)
det_f = detect_face(img, shrink)
det_t = np.zeros(det_f.shape)
# image.size: [width, height]
det_t[:, 0] = image.size[0] - det_f[:, 2]
det_t[:, 1] = det_f[:, 1]
det_t[:, 2] = image.size[0] - det_f[:, 0]
det_t[:, 3] = det_f[:, 3]
det_t[:, 4] = det_f[:, 4]
return det_t
def multi_scale_test(image, max_shrink):
# Shrink detecting is only used to detect big faces
st = 0.5 if max_shrink >= 0.75 else 0.5 * max_shrink
det_s = detect_face(image, st)
index = np.where(
np.maximum(det_s[:, 2] - det_s[:, 0] + 1, det_s[:, 3] - det_s[:, 1] + 1)
> 30)[0]
det_s = det_s[index, :]
# Enlarge one times
bt = min(2, max_shrink) if max_shrink > 1 else (st + max_shrink) / 2
det_b = detect_face(image, bt)
# Enlarge small image x times for small faces
if max_shrink > 2:
bt *= 2
while bt < max_shrink:
det_b = np.row_stack((det_b, detect_face(image, bt)))
bt *= 2
det_b = np.row_stack((det_b, detect_face(image, max_shrink)))
# Enlarged images are only used to detect small faces.
if bt > 1:
index = np.where(
np.minimum(det_b[:, 2] - det_b[:, 0] + 1,
det_b[:, 3] - det_b[:, 1] + 1) < 100)[0]
det_b = det_b[index, :]
# Shrinked images are only used to detect big faces.
else:
index = np.where(
np.maximum(det_b[:, 2] - det_b[:, 0] + 1,
det_b[:, 3] - det_b[:, 1] + 1) > 30)[0]
det_b = det_b[index, :]
return det_s, det_b
def multi_scale_test_pyramid(image, max_shrink):
# Use image pyramids to detect faces
det_b = detect_face(image, 0.25)
index = np.where(
np.maximum(det_b[:, 2] - det_b[:, 0] + 1, det_b[:, 3] - det_b[:, 1] + 1)
> 30)[0]
det_b = det_b[index, :]
st = [0.75, 1.25, 1.5, 1.75]
for i in range(len(st)):
if (st[i] <= max_shrink):
det_temp = detect_face(image, st[i])
# Enlarged images are only used to detect small faces.
if st[i] > 1:
index = np.where(
np.minimum(det_temp[:, 2] - det_temp[:, 0] + 1,
det_temp[:, 3] - det_temp[:, 1] + 1) < 100)[0]
det_temp = det_temp[index, :]
# Shrinked images are only used to detect big faces.
else:
index = np.where(
np.maximum(det_temp[:, 2] - det_temp[:, 0] + 1,
det_temp[:, 3] - det_temp[:, 1] + 1) > 30)[0]
det_temp = det_temp[index, :]
det_b = np.row_stack((det_b, det_temp))
return det_b
def get_shrink(height, width):
"""
Args:
height (int): image height.
width (int): image width.
"""
# avoid out of memory
max_shrink_v1 = (0x7fffffff / 577.0 / (height * width))**0.5
max_shrink_v2 = ((678 * 1024 * 2.0 * 2.0) / (height * width))**0.5
def get_round(x, loc):
str_x = str(x)
if '.' in str_x:
str_before, str_after = str_x.split('.')
len_after = len(str_after)
if len_after >= 3:
str_final = str_before + '.' + str_after[0:loc]
return float(str_final)
else:
return x
max_shrink = get_round(min(max_shrink_v1, max_shrink_v2), 2) - 0.3
if max_shrink >= 1.5 and max_shrink < 2:
max_shrink = max_shrink - 0.1
elif max_shrink >= 2 and max_shrink < 3:
max_shrink = max_shrink - 0.2
elif max_shrink >= 3 and max_shrink < 4:
max_shrink = max_shrink - 0.3
elif max_shrink >= 4 and max_shrink < 5:
max_shrink = max_shrink - 0.4
elif max_shrink >= 5:
max_shrink = max_shrink - 0.5
shrink = max_shrink if max_shrink < 1 else 1
return shrink, max_shrink
if __name__ == '__main__':
args = parser.parse_args()
print_arguments(args)
config = reader.Settings(data_dir=args.data_dir)
place = fluid.CUDAPlace(0) if args.use_gpu else fluid.CPUPlace()
exe = fluid.Executor(place)
main_program = fluid.Program()
startup_program = fluid.Program()
image_shape = [3, 1024, 1024]
with fluid.program_guard(main_program, startup_program):
network = PyramidBox(
image_shape, sub_network=args.use_pyramidbox, is_infer=True)
infer_program, nmsed_out = network.infer(main_program)
fetches = [nmsed_out]
fluid.io.load_persistables(
exe, args.model_dir, main_program=main_program)
infer(args, config)
|
apache-2.0
| 9,215,710,945,514,166,000
| 35.964516
| 116
| 0.53242
| false
| 3.167219
| true
| false
| false
|
lukipuki/obnam
|
obnamlib/bag_store.py
|
1
|
3767
|
# Copyright 2015 Lars Wirzenius
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# =*= License: GPL-3+ =*=
import os
import random
import obnamlib
class BagStore(object):
def __init__(self):
self._fs = None
self._dirname = None
self._id_inventor = IdInventor()
self._id_inventor.set_filename_maker(self._make_bag_filename)
def _make_bag_filename(self, bag_id):
return os.path.join(self._dirname, '%016x.bag' % bag_id)
def set_location(self, fs, dirname):
self._fs = fs
self._dirname = dirname
self._id_inventor.set_fs(fs)
def reserve_bag_id(self):
return self._id_inventor.reserve_id()
def put_bag(self, bag):
filename = self._make_bag_filename(bag.get_id())
serialised = serialise_bag(bag)
self._fs.overwrite_file(filename, serialised)
def get_bag(self, bag_id):
filename = self._make_bag_filename(bag_id)
serialised = self._fs.cat(filename)
return deserialise_bag(serialised)
def has_bag(self, bag_id):
filename = self._make_bag_filename(bag_id)
try:
st = self._fs.lstat(filename)
except (IOError, OSError): # pragma: no cover
return False
return st.st_size > 0
def get_bag_ids(self):
for pathname, _ in self._fs.scan_tree(self._dirname):
if self._is_bag_filename(pathname):
yield self._get_bag_id_from_filename(pathname)
def _is_bag_filename(self, pathname):
return pathname.endswith('.bag')
def _get_bag_id_from_filename(self, pathname):
basename = os.path.basename(pathname)
return int(basename[:-len('.bag')], 16)
def remove_bag(self, bag_id):
filename = self._make_bag_filename(bag_id)
self._fs.remove(filename)
class IdInventor(object):
def __init__(self):
self.set_fs(None)
self._filename_maker = None
def set_fs(self, fs):
self._fs = fs
self._prev_id = None
def set_filename_maker(self, maker):
self._filename_maker = maker
def reserve_id(self):
while True:
self._next_id()
if self._reserve_succeeds():
return self._prev_id
self._prev_id = None # pragma: no cover
def _next_id(self):
if self._prev_id is None:
self._prev_id = random.randint(0, obnamlib.MAX_ID)
else:
self._prev_id += 1 # pragma: no cover
def _reserve_succeeds(self):
filename = self._filename_maker(self._prev_id)
try:
self._fs.write_file(filename, '')
except OSError as e: # pragma: no cover
if e.errno == e.EEXIST:
return False
raise
return True
def serialise_bag(bag):
obj = {
'bag-id': bag.get_id(),
'blobs': [bag[i] for i in range(len(bag))],
}
return obnamlib.serialise_object(obj)
def deserialise_bag(serialised):
obj = obnamlib.deserialise_object(serialised)
bag = obnamlib.Bag()
bag.set_id(obj['bag-id'])
for blob in obj['blobs']:
bag.append(blob)
return bag
|
gpl-3.0
| -8,144,014,369,732,960,000
| 27.976923
| 71
| 0.606849
| false
| 3.510718
| false
| false
| false
|
noba3/KoTos
|
addons/plugin.video.movie25/resources/libs/plugins/tvrelease.py
|
1
|
14043
|
import urllib, urllib2,re,string,sys,os
import xbmc, xbmcgui, xbmcaddon, xbmcplugin
from resources.libs import main
from t0mm0.common.addon import Addon
addon_id = 'plugin.video.movie25'
selfAddon = xbmcaddon.Addon(id=addon_id)
addon = Addon(addon_id, sys.argv)
art = main.art
error_logo = art+'/bigx.png'
BASEURL = 'http://www.tv-release.net/'
prettyName = 'TVRelease'
def MAINMENU():
main.addDir('Search Tv-Release', BASEURL+'?seacher=', 1006,art+'/tvrsearch1.png')
main.addDir('TV 480', BASEURL+'?cat=TV-480p', 1001,art+'/TV480.png')
main.addDir('TV 720', BASEURL+'?cat=TV-720p', 1001,art+'/TV720.png')
main.addDir('TV MP4', BASEURL+'?cat=TV-Mp4', 1001,art+'/TVmp4.png')
main.addDir('TV Xvid', BASEURL+'?cat=TV-XviD', 1001,art+'/TVxvid.png')
#main.addDir('TV Packs', BASEURL+'category/tvshows/tvpack/', 1007,art+'/TVpacks.png')
main.addDir('TV Foreign', BASEURL+'?cat=TV-Foreign', 1001,art+'/TVforeign.png')
main.addDir('Movies 480', BASEURL+'?cat=Movies-480p', 1001,art+'/Movies480.png')
main.addDir('Movies 720', BASEURL+'?cat=Movies-720p', 1001,art+'/Movies720.png')
main.addDir('Movies Xvid', BASEURL+'?cat=Movies-XviD', 1001,art+'/Moviesxvid.png')
main.addDir('Movies Foreign', BASEURL+'?cat=Movies-Foreign', 1001,art+'/Moviesforeign.png')
main.addSpecial('Resolver Settings',BASEURL, 1004,art+'/tvrresolver.png')
main.VIEWSB()
def INDEX(url):
types = []
SearchType = None
if '!' in url:
r = url.rpartition('!')
print r
url = r[0]
SearchType = r[2]
else:
url = url
if 'cat=TV' in url:
types = 'tv'
elif 'cat=Movies' in url:
types = 'movie'
html = GETHTML(url)
if html == None:
return
pattern = '<tr><td[^>]*?><a [^>]*?>([^<]*?)</a></td><td[^>]*?><a href=\'([^\']*?)\'[^>]*?>([^<]*?)<'
r = re.findall(pattern, html, re.I|re.M|re.DOTALL)
dialogWait = xbmcgui.DialogProgress()
ret = dialogWait.create('Please wait until list is cached.')
totalLinks = len(r)
loadedLinks = 0
remaining_display = 'Media loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(0,'[B]Will load instantly from now on[/B]',remaining_display)
for tag, url, name in r:
if re.search('(?i)WEB-DL',name): tag = tag.strip() + " WEB-DL"
if re.findall('\d{4}p', name):
r = re.findall('(.+?)\s(\d+p)', name)
for name, quality in r:
tag = tag.replace('720p',quality)
pass
if re.findall('\ss\d+e\d+\s', name, re.I|re.DOTALL):
r = re.findall('(.+?)\ss(\d+)e(\d+)\s', name, re.I)
for name, season, episode in r:
name = name+' S'+season+'E'+episode
elif re.findall('\s\d{4}\s\d{2}\s\d{2}\s', name):
r = re.findall('(.+?)\s(\d{4})\s(\d{2})\s(\d{2})\s',name)
for name, year, month, day in r:
name = name+' '+year+' '+month+' '+day
elif re.findall('\shdtv\sx', name, re.I):
r = re.findall('(.+?)\shdtv\sx',name, re.I)
for name in r:
pass
name = re.sub('\s\s+',' ',name).strip()
name = name+' [COLOR red]'+re.sub('(?sim)^(TV-|Movies-)(.*)','\\2',tag)+'[/COLOR]'
if SearchType == None:
if 'TV' in tag:
main.addDirTE(main.CleanTitle(name),url,1003,'','','','','','')
elif 'Movies' in tag:
if re.findall('\s\d+\s',name):
r = name.rpartition('\s\d{4}\s')
main.addDirM(main.CleanTitle(name),url,1003,'','','','','','')
elif SearchType == 'tv' and 'TV' in tag:
main.addDirTE(main.CleanTitle(name),url,1003,'','','','','','')
elif SearchType == 'movie' and 'Movies' in tag:
r = name.rpartition('\s\d{4}\s')
main.addDirM(main.CleanTitle(name),url,1003,'','','','','','')
loadedLinks = loadedLinks + 1
percent = (loadedLinks * 100)/totalLinks
remaining_display = 'Media loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(percent,'[B]Will load instantly from now on[/B]',remaining_display)
if dialogWait.iscanceled(): break
dialogWait.close()
del dialogWait
if "<div class='zmg_pn'" in html and loadedLinks >= totalLinks:
r = re.findall("""<span class='zmg_pn_current'>(\d+?)</span>[^<]*?<span class='zmg_pn_standar'><a href="([^"]+?)">""", html, re.I|re.DOTALL|re.M)
total = re.findall('">(\d+)</a></span>', html)
if total: total = total[-1]
else: total = "1"
for current, url in r:
name = 'Page '+current+' of '+total+' [COLOR green]Next Page >>>[/COLOR]'
main.addDir('[COLOR green]Go to Page[/COLOR]', url+':'+total, 1002, art+'/gotopagetr.png')
main.addDir(name, url.replace('%5C',''), 1001, art+'/nextpage.png')
main.VIEWS()
def LISTHOSTERS(name,url):
html = GETHTML(url)
if html == None: return
if selfAddon.getSetting("hide-download-instructions") != "true":
main.addLink("[COLOR red]For Download Options, Bring up Context Menu Over Selected Link.[/COLOR]",'','')
r = re.findall(r'class="td_cols"><a target=\'_blank\'.+?href=\'(.+?)\'>',html, re.M|re.DOTALL)
try:
t = re.findall(r'rel="nofollow">((?!.*\.rar).*)</a>', html, re.I)
r = r+t
except: pass
if len(r) == 0:
addon.show_ok_dialog(['No Playable Streams Found,','It Might Be That They Are Still Being Uploaded,',
'Or They Are Unstreamable Archive Files'],'MashUP: TV-Release')
xbmcplugin.endOfDirectory(int(sys.argv[1]), False, False)
return
from urlparse import urlparse
for url in r:
url = url.replace("180upload.nl","180upload.com")
host = urlparse(url).hostname.replace('www.','').partition('.')[0]
if main.supportedHost(host):
main.addDown2(name.strip()+" [COLOR blue]"+host.upper()+"[/COLOR]",url,1005,art+'/hosts/'+host+'.png',art+'/hosts/'+host+'.png')
def superSearch(encode,type):
try:
if type == 'Movies': cat = 'Movies-XviD,Movies-720p,Movies-480p,Movies-Foreign,Movies-DVDR,'
else: cat = 'TV-XviD,TV-Mp4,TV-720p,TV-480p,TV-Foreign,'
surl ='http://tv-release.net/?s='+encode+'&cat='+cat
returnList=[]
link=main.OPENURL(surl,verbose=False)
link=link.replace('\r','').replace('\n','').replace('\t','').replace(' ','')
pattern = '<tr><td[^>]*?><a [^>]*?>([^<]*?)</a></td><td[^>]*?><a href=\'([^\']*?)\'[^>]*?>([^<]*?)<'
r = re.findall(pattern, link, re.I|re.M|re.DOTALL)
for tag, url, name in r:
if re.search('(?i)WEB-DL',name): tag = tag.strip() + " WEB-DL"
if re.findall('\d+p\s', name):
r = re.findall('(.+?)\s(\d+p)\s', name)
for name, quality in r:
tag = tag.replace('720p',quality)
pass
if re.findall('\ss\d+e\d+\s', name, re.I|re.DOTALL):
r = re.findall('(.+?)\ss(\d+)e(\d+)\s', name, re.I)
for name, season, episode in r:
name = name+' S'+season+'E'+episode
elif re.findall('\s\d{4}\s\d{2}\s\d{2}\s', name):
r = re.findall('(.+?)\s(\d{4})\s(\d{2})\s(\d{2})\s',name)
for name, year, month, day in r:
name = name+' '+year+' '+month+' '+day
elif re.findall('\shdtv\sx', name, re.I):
r = re.findall('(.+?)\shdtv\sx',name, re.I)
for name in r:
pass
name = name+' [COLOR red]'+re.sub('(?sim)^(TV-|Movies-)(.*)','\\2',tag)+'[/COLOR]'
returnList.append((main.CleanTitle(name),prettyName,url,'',1003,True))
return returnList
except: return []
def SEARCHhistory():
dialog = xbmcgui.Dialog()
ret = dialog.select('[B]Choose A Search Type[/B]',['[B]TV Shows[/B]','[B]Movies[/B]'])
if ret == -1:
xbmcplugin.endOfDirectory(int(sys.argv[1]), False, False)
if ret == 0:
searchType = 'tv'
seapath=os.path.join(main.datapath,'Search')
SeaFile=os.path.join(seapath,'SearchHistoryTv')
if not os.path.exists(SeaFile):
SEARCH(searchType)
else:
main.addDir('Search',searchType,1008,art+'/search.png')
main.addDir('Clear History',SeaFile,128,art+'/cleahis.png')
thumb=art+'/link.png'
searchis=re.compile('search="(.+?)",').findall(open(SeaFile,'r').read())
for seahis in reversed(searchis):
url=seahis
seahis=seahis.replace('%20',' ')
url = 'http://tv-release.net/?s='+url+'&cat=TV-XviD,TV-Mp4,TV-720p,TV-480p,TV-Foreign,'
main.addDir(seahis,url,1001,thumb)
if ret == 1:
searchType = 'movie'
seapath=os.path.join(main.datapath,'Search')
SeaFile=os.path.join(seapath,'SearchHistory25')
if not os.path.exists(SeaFile):
SEARCH(searchType)
else:
main.addDir('Search',searchType,1008,art+'/search.png')
main.addDir('Clear History',SeaFile,128,art+'/cleahis.png')
thumb=art+'/link.png'
searchis=re.compile('search="(.+?)",').findall(open(SeaFile,'r').read())
for seahis in reversed(searchis):
url=seahis
seahis=seahis.replace('%20',' ')
url = 'http://tv-release.net/?s='+url+'&cat=Movies-XviD,Movies-720p,Movies-480p,Movies-Foreign,Movies-DVDR,'
main.addDir(seahis,url,1001,thumb)
def SEARCH(murl):
if murl == 'tv':
encode = main.updateSearchFile(murl,'TV',defaultValue=murl,searchMsg='Search For Shows or Episodes')
if not encode: return False
url = 'http://tv-release.net/?s='+encode+'&cat=TV-XviD,TV-Mp4,TV-720p,TV-480p,TV-Foreign,'
INDEX(url)
elif murl=='movie':
encode = main.updateSearchFile(murl,'Movies',defaultValue=murl,searchMsg='Search For Movies')
if not encode: return False
url = 'http://tv-release.net/?s='+encode+'&cat=Movies-XviD,Movies-720p,Movies-480p,Movies-Foreign,Movies-DVDR,'
INDEX(url)
def TVPACKS(url):
html = GETHTML(url)
if html == None:
return
pattern = '(?sim)Tv/Pack</a></span>.+?<a href="([^"]+?)"><b><font size="2px">([^<]+?)<'
r = re.findall(pattern,html)
for url, name in r:
main.addDir(name, url, 1001,'')
def GOTOP(url):
default = url
r = url.rpartition(':')
url = re.findall('^(.+page=)\d+(.*)$',r[0])
total = r[2]
keyboard = xbmcgui.Dialog().numeric(0, '[B][I]Goto Page Number[/B][/I]')
if not keyboard:
xbmcplugin.endOfDirectory(int(sys.argv[1]), False, False)
return False
if int(keyboard) > int(total) or keyboard == '0':
addon.show_ok_dialog(['Please Do Not Enter a Page Number bigger than',''+total+', Enter A Number Between 1 and '+total+'',
''], 'MashUP: TV-Release')
GOTOP(default)
url = url[0][0]+keyboard+str(url[0][1])
INDEX(url)
def PLAYMEDIA(name,url):
ok = True
r = re.findall(r'(.+?)\[COLOR', name)
name = r[0]
r=re.findall('Season(.+?)Episode([^<]+)',name)
if r:
infoLabels =main.GETMETAEpiT(name,'','')
video_type='episode'
season=infoLabels['season']
episode=infoLabels['episode']
else:
infoLabels =main.GETMETAT(name,'','','')
video_type='movie'
season=''
episode=''
img=infoLabels['cover_url']
fanart =infoLabels['backdrop_url']
imdb_id=infoLabels['imdb_id']
infolabels = { 'supports_meta' : 'true', 'video_type':video_type, 'name':str(infoLabels['title']), 'imdb_id':str(infoLabels['imdb_id']), 'season':str(season), 'episode':str(episode), 'year':str(infoLabels['year']) }
try:
xbmc.executebuiltin("XBMC.Notification(Please Wait!,Resolving Link,3000)")
stream_url = main.resolve_url(url)
infoL={'Title': infoLabels['title'], 'Plot': infoLabels['plot'], 'Genre': infoLabels['genre']}
# play with bookmark
from resources.universal import playbackengine
player = playbackengine.PlayWithoutQueueSupport(resolved_url=stream_url, addon_id=addon_id, video_type=video_type, title=str(infoLabels['title']),season=str(season), episode=str(episode), year=str(infoLabels['year']),img=img,infolabels=infoL, watchedCallbackwithParams=main.WatchedCallbackwithParams,imdb_id=imdb_id)
#WatchHistory
if selfAddon.getSetting("whistory") == "true":
from resources.universal import watchhistory
wh = watchhistory.WatchHistory(addon_id)
wh.add_item(name+' '+'[COLOR=FF67cc33]TvRelease[/COLOR]', sys.argv[0]+sys.argv[2], infolabels=infolabels, img=str(img), fanart=str(fanart), is_folder=False)
player.KeepAlive()
return ok
except:
return ok
def GETHTML(url):
try:
h = main.OPENURL(url.replace(' ','%20'))
if '<h2>Under Maintenance</h2>' in h:
addon.show_ok_dialog(['[COLOR=FF67cc33][B]TV-Release is Down For Maintenance,[/COLOR][/B]',
'[COLOR=FF67cc33][B]Please Try Again Later[/COLOR][/B]',''],'MashUP: TV-Release')
xbmcplugin.endOfDirectory(int(sys.argv[1]), False, False)
return
return h
except urllib2.URLError, e:
addon.show_small_popup('MashUP: Tv-Release','TV-Release Web Site Failed To Respond, Check Log For Details', 9000, error_logo)
addon.log_notice(str(e))
xbmcplugin.endOfDirectory(int(sys.argv[1]), False, False)
return
|
gpl-2.0
| -4,429,823,489,364,764,700
| 47.760417
| 324
| 0.552304
| false
| 3.22161
| false
| false
| false
|
QuantCrimAtLeeds/PredictCode
|
open_cp/prohotspot.py
|
1
|
11749
|
"""
prohotspot
~~~~~~~~~~
Implements the "prospective hotspotting" technique from:
1. Bowers, Johnson, Pease,
"Prospective hot-spotting: The future of crime mapping?",
Brit. J. Criminol. (2004) 44 641--658. doi:10.1093/bjc/azh036
2. Johnson et al.,
"Prospective crime mapping in operational context",
Home Office Online Report 19/07
`Police online library <http://library.college.police.uk/docs/hordsolr/rdsolr1907.pdf>`_
The underlying idea is to start with a kernel / weight defined in space and
positive time. This typically has finite extent, and might be related to
discretised space and/or time. Weights used in the literature tend to be
of the form :math:`1/(1+d)`.
The classical algorithm assigns all events to cells in a gridding of space,
and a "grid" of time (typically the number of whole weeks before the current
time). Only events which are close enough in space and time to the grid cell
of interest are used. For these, the weight is evaluated on each one, and then
the sum taken.
It is important to note the coupling between the grid size used and the weight,
because it is the distance between grid cells which is used. Exactly what
"distance" here means is unclear, and we have provided a number of options.
Alternatively, we can just use the weight / kernel in a continuous kernel
density estimate scheme.
"""
from . import predictors as _predictors
import abc as _abc
import numpy as _np
class Weight(metaclass=_abc.ABCMeta):
"""Base class for weights / kernels. Classes implementing this algorithm
are responsible purely for providing weights. We leave the details of
possibly discretising data to other classes.
"""
@_abc.abstractmethod
def __call__(self, dt, dd):
"""Evaluate the weight given the potentially discretised input.
:param dt: The time distance from 0. May be a scalar or a numpy array;
should be of a number type, not `timedelta` or similar.
:param dd: Spatial distance. May be a scalar or a one-dimensional
numpy array.
:return: A scalar or one-dimensional numpy array as appropriate.
"""
pass
class ClassicWeight(Weight):
"""The classical weight, :math:`(1/(1+d))(1/(1+t))` where :math:`d` is
distance and :math:`t` is time. Default units are "grid cells" and "weeks",
respectively.
:param space_bandwidth: Distances greater than or equal to this set the
weight to 0.
:param time_bandwidth: Times greater than or equal to this set the weight
to 0.
"""
def __init__(self, space_bandwidth=8, time_bandwidth=8):
self.space_bandwidth = space_bandwidth
self.time_bandwidth = time_bandwidth
def __call__(self, dt, dd):
mask = (dt < self.time_bandwidth) & (dd < self.space_bandwidth)
return 1 / ( (1 + dd) * ( 1 + dt) ) * mask
def __repr__(self):
return "Classic(sb={}, tb={})".format(self.space_bandwidth, self.time_bandwidth)
@property
def args(self):
return "C{},{}".format(self.space_bandwidth, self.time_bandwidth)
class GridDistance(metaclass=_abc.ABCMeta):
"""Abstract base class to calculate the distance between grid cells"""
@_abc.abstractmethod
def __call__(self, x1, y1, x2, y2):
pass
class DistanceDiagonalsSame(GridDistance):
"""Distance in the grid. Diagonal distances are one, so (1,1) and
(2,2) are adjacent points. This equates to using an :math:`\ell^\infty`
norm.
"""
def __call__(self, x1, y1, x2, y2):
xx = _np.abs(x1 - x2)
yy = _np.abs(y1 - y2)
return _np.max(_np.vstack((xx, yy)), axis=0)
def __repr__(self):
return "DiagsSame"
class DistanceDiagonalsDifferent(GridDistance):
"""Distance in the grid. Now diagonal distances are two, so (1,1) and
(2,2) are two grid cells apart. This equates to using an :math:`\ell^1`
norm.
"""
def __call__(self, x1, y1, x2, y2):
return _np.abs(x1 - x2) + _np.abs(y1 - y2)
def __repr__(self):
return "DiagsDiff"
class DistanceCircle(GridDistance):
"""Distance in the grid using the usual Euclidean distance, i.e. the
:math:`\ell^2` norm. This will work better with the continuous version
of the predictor.
"""
def __call__(self, x1, y1, x2, y2):
return _np.sqrt((x1-x2)**2 + (y1-y2)**2)
def __repr__(self):
return "DiagsCircle"
class ProspectiveHotSpot(_predictors.DataTrainer):
"""Implements the classical, grid based algorithm. To calculate distances,
we consider the grid cell we are computing the risk intensity for, the grid
cell the event falls into, and then delegate to an instance of :class
GridDistance: to compute the distance. To compute time, we look at the
time difference between the prediction time and the timestamp of the event
and then divide by the :attr:`time_unit`, then round down to the
nearest whole number. So 6 days divided by 1 week is 0 whole units.
Set :attr:`distance` to change the computation of distance between
grid cells. Set :attr:`weight` to change the weight used.
:param region: The :class:`RectangularRegion` the data is in.
:param grid_size: The size of the grid to place the data into.
:param grid: Alternative to specifying the region and grid_size is to pass
a :class:`BoundedGrid` instance.
:param time_unit: A :class:`numpy.timedelta64` instance giving the time
unit.
"""
def __init__(self, region=None, grid_size=50, time_unit=_np.timedelta64(1, "W"), grid=None):
if grid is None:
self.grid = grid_size
self.region = region
else:
self.region = grid.region()
self.grid = grid.xsize
if grid.xsize != grid.ysize:
raise ValueError("Only supports *square* grid cells.")
self.time_unit = time_unit
self.weight = ClassicWeight()
self.distance = DistanceDiagonalsSame()
def _cell(self, x, y):
gridx = _np.floor((x - self.region.xmin) / self.grid)
gridy = _np.floor((y - self.region.ymin) / self.grid)
return gridx, gridy
def _total_weight(self, time_deltas, coords, cellx, celly):
gridx, gridy = self._cell(coords[0], coords[1])
distances = self.distance(gridx, gridy, cellx, celly)
return _np.sum(self.weight(time_deltas, distances))
def predict(self, cutoff_time, predict_time):
"""Calculate a grid based prediction.
:param cutoff_time: Ignore data with a timestamp after this time.
:param predict_time: Timestamp of the prediction. Used to calculate
the time difference between events and "now". Typically the same as
`cutoff_time`.
:return: An instance of :class:`GridPredictionArray`
"""
if not cutoff_time <= predict_time:
raise ValueError("Data cutoff point should be before prediction time")
events = self.data.events_before(cutoff_time)
time_deltas = _np.datetime64(predict_time) - events.timestamps
time_deltas = _np.floor(time_deltas / self.time_unit)
width = int(_np.rint((self.region.xmax - self.region.xmin) / self.grid))
height = int(_np.rint((self.region.ymax - self.region.ymin) / self.grid))
matrix = _np.empty((height, width))
for x in range(width):
for y in range(height):
matrix[y][x] = self._total_weight(time_deltas, events.coords, x, y)
return _predictors.GridPredictionArray(self.grid, self.grid, matrix,
self.region.xmin, self.region.ymin)
class ProspectiveHotSpotContinuous(_predictors.DataTrainer):
"""Implements the prospective hotspot algorithm as a kernel density
estimation. A copy of the space/time kernel / weight is laid down over
each event and the result is summed. To allow compatibility with the grid
based method, we set a time unit and a grid size, but these are purely used
to scale the data appropriately.
"""
def __init__(self, grid_size=50, time_unit=_np.timedelta64(1, "W")):
self.grid = grid_size
self.time_unit = time_unit
self.weight = ClassicWeight()
def predict(self, cutoff_time, predict_time):
"""Calculate a continuous prediction.
:param cutoff_time: Ignore data with a timestamp after this time.
:param predict_time: Timestamp of the prediction. Used to calculate
the time difference between events and "now". Typically the same as
`cutoff_time`.
:return: An instance of :class:`ContinuousPrediction`
"""
if not cutoff_time <= predict_time:
raise ValueError("Data cutoff point should be before prediction time")
events = self.data.events_before(cutoff_time)
time_deltas = (_np.datetime64(predict_time) - events.timestamps) / self.time_unit
def kernel(points):
points = _np.asarray(points)
xdeltas = (points[0][:,None] - events.coords[0][None,:]) / self.grid
ydeltas = (points[1][:,None] - events.coords[1][None,:]) / self.grid
distances = _np.sqrt(xdeltas**2 + ydeltas**2)
times = time_deltas[None,:]
r = _np.sum(self.weight(times, distances), axis=-1)
# Return a scalar if input as scalar
return r[0] if len(r)==1 else r
return _predictors.KernelRiskPredictor(kernel, cell_width=self.grid,
cell_height=self.grid)
def grid_predict(self, cutoff_time, start, end, grid, samples=None):
"""Directly calculate a grid prediction, by taking the mean value over
both time and space. We also normalise the resulting grid prediction.
(But be aware that if you subsequently "mask" the grid, you will then
need to re-normalise).
:param cutoff_time: Ignore data with a timestamp after this time.
:param start: The start of the prediction time window. Typically the
same as `cutoff_time`.
:param end: The end of the prediction window. We will average the
kernel between `start` and `end`.
:param grid: An instance of :class:`data.BoundedGrid` to use as a basis
for the prediction.
:param samples: Number of samples to use, or `None` for auto-compute
:return: An instance of :class:`GridPredictionArray`.
"""
if not cutoff_time <= start:
raise ValueError("Data cutoff point should be before prediction time")
events = self.data.events_before(cutoff_time)
start, end = _np.datetime64(start), _np.datetime64(end)
# Rather than copy'n'paste a lot of code, we do this...
def kernel(points):
points = _np.asarray(points)
xdeltas = (points[0][:,None] - events.coords[0][None,:]) / self.grid
ydeltas = (points[1][:,None] - events.coords[1][None,:]) / self.grid
distances = _np.sqrt(xdeltas**2 + ydeltas**2)
num_points = points.shape[1] if len(points.shape) > 1 else 1
time_deltas = (end - start) * _np.random.random(num_points) + start
times = (time_deltas[:,None] - events.timestamps[None,:]) / self.time_unit
r = _np.sum(self.weight(times, distances), axis=-1)
# Return a scalar if input as scalar
return r[0] if len(r)==1 else r
krp = _predictors.KernelRiskPredictor(kernel, cell_width=self.grid,
cell_height=self.grid, samples=samples)
return _predictors.GridPredictionArray.from_continuous_prediction_grid(krp, grid)
|
artistic-2.0
| -8,806,066,455,696,376,000
| 41.568841
| 97
| 0.643204
| false
| 3.770539
| false
| false
| false
|
zionist/landing
|
landing/apps/core/migrations/0001_initial.py
|
1
|
2095
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Block',
fields=[
('id', models.AutoField(primary_key=True, verbose_name='ID', serialize=False, auto_created=True)),
('header', models.CharField(verbose_name='header', max_length=512)),
('content', models.CharField(verbose_name='content', max_length=1024)),
('block_image', models.ImageField(upload_to='', verbose_name='block_image')),
],
),
migrations.CreateModel(
name='EmailSettings',
fields=[
('id', models.AutoField(primary_key=True, verbose_name='ID', serialize=False, auto_created=True)),
('to', models.EmailField(verbose_name='to', max_length=1024)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, verbose_name='user')),
],
),
migrations.CreateModel(
name='LandingPage',
fields=[
('id', models.AutoField(primary_key=True, verbose_name='ID', serialize=False, auto_created=True)),
('header', models.CharField(verbose_name='header', max_length=2048)),
('content', models.TextField(verbose_name='content')),
('contacts', models.CharField(verbose_name='contacts', max_length=2048)),
('logo', models.ImageField(upload_to='', verbose_name='logo')),
('background', models.ImageField(upload_to='', verbose_name='background')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, verbose_name='user')),
],
),
migrations.AddField(
model_name='block',
name='landing_page',
field=models.ForeignKey(to='core.LandingPage'),
),
]
|
gpl-3.0
| -2,710,787,936,435,316,700
| 41.755102
| 114
| 0.570883
| false
| 4.419831
| false
| false
| false
|
tbpmig/mig-website
|
corporate/views.py
|
1
|
17269
|
from django.core.urlresolvers import reverse
from django.forms.models import modelformset_factory, modelform_factory
from django.http import HttpResponse
from django.shortcuts import redirect
from django.template import loader
from django_ajax.decorators import ajax
from corporate.auxiliary_scripts import update_resume_zips
from corporate.forms import AddContactForm, ContactFormSet
from corporate.models import CorporateTextField, CorporateResourceGuide
from corporate.models import CompanyContact, Company, JobField, CorporateEmail
from mig_main.utility import get_message_dict, Permissions
FORM_ERROR = 'Your submision contained errors, please correct and resubmit.'
def get_permissions(user):
permission_dict = {
'can_edit_corporate': Permissions.can_edit_corporate_page(user),
'can_add_contact': Permissions.can_add_corporate_contact(user),
'can_edit_contacts': Permissions.can_edit_corporate_page(user),
'can_add_company': Permissions.can_add_company(user),
}
return permission_dict
def get_common_context(request):
context_dict = get_message_dict(request)
contact_text = CorporateTextField.objects.filter(section='CT')
context_dict.update({
'request': request,
'contact_text': contact_text,
'main_nav': 'corporate',
})
return context_dict
def index(request):
request.session['current_page'] = request.path
template = loader.get_template('corporate/corporate.html')
involvement_text = CorporateTextField.objects.filter(section='OP')
context_dict = {
'involvement_text': involvement_text,
'subnav': 'index',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
return HttpResponse(template.render(context_dict, request))
def resumes(request):
request.session['current_page'] = request.path
template = loader.get_template('corporate/resume_book.html')
context_dict = {
'by_major_zip': 'TBP_resumes_by_major.zip',
'by_year_zip': 'TBP_resumes_by_year.zip',
'subnav': 'resumes',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
return HttpResponse(template.render(context_dict, request))
def update_corporate_page(request):
if not Permissions.can_edit_corporate_page(request.user):
request.session['error_message'] = 'You are not authorized to edit the corporate page'
return redirect('corporate:index')
prefix = 'corporate_page'
CorporateTextForm = modelformset_factory(CorporateTextField,
extra=1, exclude=[])
formset = CorporateTextForm(request.POST or None,prefix=prefix)
if request.method == 'POST':
if formset.is_valid():
instances = formset.save()
request.session['success_message'] = 'Corporate page successfully updated.'
return redirect('corporate:index')
else:
request.session['error_message'] = FORM_ERROR
context_dict = {
'formset': formset,
'subnav': 'index',
'prefix': prefix,
'has_files': False,
'submit_name': 'Update Corporate Page',
'back_button': {'link': reverse('corporate:index'),
'text': 'To Corporate Page'},
'form_title': 'Edit Corporate Page Text',
'help_text': ('The text shown on the corporate main page. This text '
'uses markdown syntax.'),
'can_add_row': False,
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('generic_formset.html')
return HttpResponse(template.render(context_dict, request))
def update_resource_guide(request):
if not Permissions.can_edit_corporate_page(request.user):
request.session['error_message'] = 'You are not authorized to edit the corporate page'
return redirect('corporate:index')
ResourceGuideForm = modelform_factory(CorporateResourceGuide, exclude=('active',))
if request.method == 'POST':
form = ResourceGuideForm(request.POST, request.FILES)
if form.is_valid():
instance = form.save(commit=False)
previously_active_guides = CorporateResourceGuide.objects.filter(active=True)
for guide in previously_active_guides:
guide.active = False
guide.save()
instance.active = True
instance.save()
update_resume_zips()
request.session['success_message'] = 'Corporate resource guide successfully updated.'
return redirect('corporate:index')
else:
request.session['error_message'] = FORM_ERROR
else:
form = ResourceGuideForm()
context_dict = {
'form': form,
'subnav': 'index',
'has_files': True,
'submit_name': 'Update Corporate Resource Guide',
'back_button': {'link': reverse('corporate:index'),
'text': 'To Corporate Page'},
'form_title': 'Edit Corporate Resource Guide',
'help_text': ('This guide is inluded in the resume zip files. Update '
'it when the information (or the officer) changes.'),
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('generic_form.html')
return HttpResponse(template.render(context_dict, request))
def add_company_contact(request):
if not Permissions.can_add_corporate_contact(request.user):
request.session['error_message'] = 'You are not authorized to add company contacts'
return redirect('corporate:index')
prefix = 'corporate_page'
can_edit = Permissions.can_edit_corporate_page(request.user)
form = AddContactForm(request.POST or None,prefix=prefix,can_edit=can_edit)
if request.method == 'POST':
if form.is_valid():
if form.is_overdetermined():
request.session['warning_message'] = 'Name, email, phone, bio, and chapter are ignored when profile provided.'
instance = form.save()
request.session['success_message'] = 'Corporate contact successfully added.'
return redirect('corporate:index')
else:
request.session['error_message'] = FORM_ERROR
help_text = 'Add a contact to the company contacts database.'
if not can_edit:
help_text = help_text + (' Note: you are adding a suggested contact; '
'they will not be emailed unless approved by '
'the Corporate Relations Officer.')
context_dict = {
'form': form,
'subnav': 'index',
'prefix': prefix,
'has_files': False,
'submit_name': 'Add company contact',
'back_button': {'link': reverse('corporate:index'),
'text': 'To Corporate Page'},
'form_title': 'Add company contact',
'help_text': help_text,
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('generic_form.html')
return HttpResponse(template.render(context_dict, request))
def edit_company_contacts(request):
if not Permissions.can_edit_corporate_page(request.user):
request.session['error_message'] = 'You are not authorized to add company contacts'
return redirect('corporate:index')
prefix = 'corporate_page'
formset = ContactFormSet(request.POST or None,prefix=prefix,initial=CompanyContact.get_contacts())
if request.method == 'POST':
if formset.is_valid():
overdetermined = formset.save()
if overdetermined:
request.session['warning_message'] = 'Name, email, phone, bio, and chapter are ignored when profile provided.'
request.session['success_message'] = 'Corporate contact successfully added.'
return redirect('corporate:index')
else:
request.session['error_message'] = FORM_ERROR
context_dict = {
'formset': formset,
'subnav': 'index',
'prefix': prefix,
'has_files': False,
'submit_name': 'Update company contacts',
'back_button': {'link': reverse('corporate:index'),
'text': 'To Corporate Page'},
'form_title': 'Edit company contacts',
'help_text': ('Edit the list of company contacts. '
'Contact info is ignored if a profile is provided.'),
'can_add_row':True,
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('generic_formset.html')
return HttpResponse(template.render(context_dict, request))
def add_company(request):
if not Permissions.can_add_company(request.user):
request.session['error_message'] = 'You are not authorized to add companies'
return redirect('corporate:index')
prefix = 'corporate_page'
AddCompanyForm = modelform_factory(Company, exclude=[])
form = AddCompanyForm(request.POST or None,prefix=prefix)
if request.method == 'POST':
if form.is_valid():
instance = form.save()
request.session['success_message'] = 'Company successfully added.'
return redirect('corporate:index')
else:
request.session['error_message'] = FORM_ERROR
context_dict = {
'form': form,
'subnav': 'index',
'prefix': prefix,
'has_files': False,
'submit_name': 'Add company',
'back_button': {'link': reverse('corporate:index'),
'text': 'To Corporate Page'},
'form_title': 'Add company',
'help_text': ('Add company information. If the appropriate industry '
'is not present, you need to add that first'),
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('generic_form.html')
return HttpResponse(template.render(context_dict, request))
def add_jobfield(request):
if not Permissions.can_add_company(request.user):
request.session['error_message'] = 'You are not authorized to add industries'
return redirect('corporate:index')
prefix = 'corporate_page'
AddIndustryForm = modelform_factory(JobField, exclude=[])
form = AddIndustryForm(request.POST or None,prefix=prefix)
if request.method == 'POST':
if form.is_valid():
instance = form.save()
request.session['success_message'] = 'Industry successfully added.'
return redirect('corporate:index')
else:
request.session['error_message'] = FORM_ERROR
context_dict = {
'form': form,
'subnav': 'index',
'prefix': prefix,
'has_files': False,
'submit_name': 'Add industry',
'back_button': {'link': reverse('corporate:index'),
'text': 'To Corporate Page'},
'form_title': 'Add industry',
'help_text': ('Add industry information. Select all relevant majors.'),
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('generic_form.html')
return HttpResponse(template.render(context_dict, request))
def view_company_contacts(request):
if not Permissions.can_edit_corporate_page(request.user):
request.session['error_message'] = 'You are not authorized to view company contacts'
return redirect('corporate:index')
context_dict = {
'contacts': CompanyContact.get_contacts(),
'subnav': 'index',
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('corporate/contacts_table.html')
return HttpResponse(template.render(context_dict, request))
def view_and_send_email(request):
if not Permissions.can_edit_corporate_page(request.user):
request.session['error_message'] = 'You are not authorized to email companies'
return redirect('corporate:index')
existing_email = CorporateEmail.objects.filter(active=True)
if existing_email.exists():
existing_email = existing_email[0]
else:
request.session['error_message'] = 'No email specified'
return redirect('corporate:index')
contacts = CompanyContact.get_contacts(gets_email=True)
context_dict = {
'contacts': contacts,
'email':existing_email.preview_email(),
'mig_alum_email':existing_email.preview_email(mig_alum=True),
'other_alum_email':existing_email.preview_email(other_alum=True),
'previous_contact_email':existing_email.preview_email(previous_contact=True),
'personal_contact_email':existing_email.preview_email(personal_contact=True),
'subnav': 'index',
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('corporate/view_and_send_email.html')
return HttpResponse(template.render(context_dict, request))
@ajax
def send_corporate_email(request):
if not Permissions.can_edit_corporate_page(request.user):
request.session['error_message'] = 'You are not authorized to email companies'
return {'fragments':{'#ajax-message':r'''<div id="ajax-message" class="alert alert-danger">
<button type="button" class="close" data-dismiss="alert">×</button>
<strong>Error:</strong>%s</div>'''%(request.session.pop('error_message'))}}
existing_email = CorporateEmail.objects.filter(active=True)
if existing_email.exists():
existing_email[0].send_corporate_email()
request.session['success_message']='Companies successfully emailed'
return {'fragments':{'#ajax-message':r'''<div id="ajax-message" class="alert alert-success">
<button type="button" class="close" data-dismiss="alert">×</button>
<strong>Success:</strong>%s</div>'''%(request.session.pop('success_message'))}}
else:
request.session['error_message'] = 'Company email text does not exist'
return {'fragments':{'#ajax-message':r'''<div id="ajax-message" class="alert alert-danger">
<button type="button" class="close" data-dismiss="alert">×</button>
<strong>Error:</strong>%s</div>'''%(request.session.pop('error_message'))}}
def update_corporate_email(request):
if not Permissions.can_edit_corporate_page(request.user):
request.session['error_message'] = 'You are not authorized to email companies'
return redirect('corporate:index')
prefix = 'corporate_email'
existing_email = CorporateEmail.objects.filter(active=True)
UpdateEmailForm = modelform_factory(CorporateEmail, exclude=[])
if existing_email.exists():
form = UpdateEmailForm(request.POST or None,prefix=prefix,instance=existing_email[0])
else:
form = UpdateEmailForm(request.POST or None,prefix=prefix)
if request.method == 'POST':
if form.is_valid():
instance = form.save(commit=False)
instance.id=None
instance.pk=None
instance.save()
if existing_email.exists():
ex=existing_email[0]
ex.active=False
ex.save()
request.session['success_message'] = 'Company email successfully updated.'
return redirect('corporate:index')
else:
request.session['error_message'] = FORM_ERROR
context_dict = {
'form': form,
'subnav': 'index',
'prefix': prefix,
'has_files': False,
'submit_name': 'Update corporate email',
'back_button': {'link': reverse('corporate:index'),
'text': 'To Corporate Page'},
'form_title': 'Update corporate email',
'help_text': ('Update the email sent to companies to encourage their'
'participation in TBP corporate events.\n\nUse '
'{{company_name}} in the subject line as a placeholder'
'and {{extra_text}} in the body as a placeholder for the'
'extra text to members or personal contacts.'),
'base': 'corporate/base_corporate.html',
}
context_dict.update(get_common_context(request))
context_dict.update(get_permissions(request.user))
template = loader.get_template('generic_form.html')
return HttpResponse(template.render(context_dict, request))
|
apache-2.0
| -8,681,007,862,481,104,000
| 43.622739
| 128
| 0.642423
| false
| 3.976284
| false
| false
| false
|
Spiderlover/Toontown
|
toontown/battle/MovieDrop.py
|
1
|
17995
|
from direct.interval.IntervalGlobal import *
from BattleBase import *
from BattleProps import *
from BattleSounds import *
import MovieCamera
from direct.directnotify import DirectNotifyGlobal
import MovieUtil
import MovieNPCSOS
from MovieUtil import calcAvgSuitPos
from direct.showutil import Effects
notify = DirectNotifyGlobal.directNotify.newCategory('MovieDrop')
hitSoundFiles = ('AA_drop_flowerpot.ogg', 'AA_drop_sandbag.ogg', 'AA_drop_anvil.ogg', 'AA_drop_bigweight.ogg', 'AA_drop_safe.ogg', 'AA_drop_piano.ogg', 'AA_drop_boat.ogg')
missSoundFiles = ('AA_drop_flowerpot_miss.ogg', 'AA_drop_sandbag_miss.ogg', 'AA_drop_anvil_miss.ogg', 'AA_drop_bigweight_miss.ogg', 'AA_drop_safe_miss.ogg', 'AA_drop_piano_miss.ogg', 'AA_drop_boat_miss.ogg')
tDropShadow = 1.3
tSuitDodges = 2.45 + tDropShadow
tObjectAppears = 3.0 + tDropShadow
tButtonPressed = 2.44
dShrink = 0.3
dShrinkOnMiss = 0.1
dPropFall = 0.6
objects = ('flowerpot', 'sandbag', 'anvil', 'weight', 'safe', 'piano', 'ship')
objZOffsets = (0.75, 0.75, 0.0, 0.0, 0.0, 0.0, 0.0)
objStartingScales = (1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0)
landFrames = (12, 4, 1, 11, 11, 11, 2)
shoulderHeights = {'a': 13.28 / 4.0,
'b': 13.74 / 4.0,
'c': 10.02 / 4.0}
def doDrops(drops):
if len(drops) == 0:
return (None, None)
npcArrivals, npcDepartures, npcs = MovieNPCSOS.doNPCTeleports(drops)
suitDropsDict = {}
groupDrops = []
for drop in drops:
track = drop['track']
level = drop['level']
targets = drop['target']
if len(targets) == 1:
suitId = targets[0]['suit'].doId
if suitId in suitDropsDict:
suitDropsDict[suitId].append((drop, targets[0]))
else:
suitDropsDict[suitId] = [(drop, targets[0])]
elif level <= MAX_LEVEL_INDEX and attackAffectsGroup(track, level):
groupDrops.append(drop)
else:
for target in targets:
suitId = target['suit'].doId
if suitId in suitDropsDict:
otherDrops = suitDropsDict[suitId]
alreadyInList = 0
for oDrop in otherDrops:
if oDrop[0]['toon'] == drop['toon']:
alreadyInList = 1
if alreadyInList == 0:
suitDropsDict[suitId].append((drop, target))
else:
suitDropsDict[suitId] = [(drop, target)]
suitDrops = suitDropsDict.values()
def compFunc(a, b):
if len(a) > len(b):
return 1
elif len(a) < len(b):
return -1
return 0
suitDrops.sort(compFunc)
delay = 0.0
mtrack = Parallel(name='toplevel-drop')
npcDrops = {}
for st in suitDrops:
if len(st) > 0:
ival = __doSuitDrops(st, npcs, npcDrops)
if ival:
mtrack.append(Sequence(Wait(delay), ival))
delay = delay + TOON_DROP_SUIT_DELAY
dropTrack = Sequence(npcArrivals, mtrack, npcDepartures)
camDuration = mtrack.getDuration()
if groupDrops:
ival = __doGroupDrops(groupDrops)
dropTrack.append(ival)
camDuration += ival.getDuration()
enterDuration = npcArrivals.getDuration()
exitDuration = npcDepartures.getDuration()
camTrack = MovieCamera.chooseDropShot(drops, suitDropsDict, camDuration, enterDuration, exitDuration)
return (dropTrack, camTrack)
def __getSoundTrack(level, hitSuit, node = None):
if hitSuit:
soundEffect = globalBattleSoundCache.getSound(hitSoundFiles[level])
else:
soundEffect = globalBattleSoundCache.getSound(missSoundFiles[level])
soundTrack = Sequence()
if soundEffect:
buttonSound = globalBattleSoundCache.getSound('AA_drop_trigger_box.ogg')
fallingSound = None
buttonDelay = tButtonPressed - 0.3
fallingDuration = 1.5
if not level == UBER_GAG_LEVEL_INDEX:
fallingSound = globalBattleSoundCache.getSound('incoming_whistleALT.ogg')
soundTrack.append(Wait(buttonDelay))
soundTrack.append(SoundInterval(buttonSound, duration=0.67, node=node))
if fallingSound:
soundTrack.append(SoundInterval(fallingSound, duration=fallingDuration, node=node))
if not level == UBER_GAG_LEVEL_INDEX:
soundTrack.append(SoundInterval(soundEffect, node=node))
if level == UBER_GAG_LEVEL_INDEX:
if hitSuit:
uberDelay = tButtonPressed
else:
uberDelay = tButtonPressed - 0.1
oldSoundTrack = soundTrack
soundTrack = Parallel()
soundTrack.append(oldSoundTrack)
uberTrack = Sequence()
uberTrack.append(Wait(uberDelay))
uberTrack.append(SoundInterval(soundEffect, node=node))
soundTrack.append(uberTrack)
else:
soundTrack.append(Wait(0.1))
return soundTrack
def __doSuitDrops(dropTargetPairs, npcs, npcDrops):
toonTracks = Parallel()
delay = 0.0
alreadyDodged = 0
alreadyTeased = 0
for dropTargetPair in dropTargetPairs:
drop = dropTargetPair[0]
level = drop['level']
objName = objects[level]
target = dropTargetPair[1]
track = __dropObjectForSingle(drop, delay, objName, level, alreadyDodged, alreadyTeased, npcs, target, npcDrops)
if track:
toonTracks.append(track)
delay += TOON_DROP_DELAY
hp = target['hp']
if hp <= 0:
if level >= 3:
alreadyTeased = 1
else:
alreadyDodged = 1
return toonTracks
def __doGroupDrops(groupDrops):
toonTracks = Parallel()
delay = 0.0
alreadyDodged = 0
alreadyTeased = 0
for drop in groupDrops:
battle = drop['battle']
level = drop['level']
centerPos = calcAvgSuitPos(drop)
targets = drop['target']
numTargets = len(targets)
closestTarget = -1
nearestDistance = 100000.0
for i in xrange(numTargets):
suit = drop['target'][i]['suit']
suitPos = suit.getPos(battle)
displacement = Vec3(centerPos)
displacement -= suitPos
distance = displacement.lengthSquared()
if distance < nearestDistance:
closestTarget = i
nearestDistance = distance
track = __dropGroupObject(drop, delay, closestTarget, alreadyDodged, alreadyTeased)
if track:
toonTracks.append(track)
delay = delay + TOON_DROP_SUIT_DELAY
hp = drop['target'][closestTarget]['hp']
if hp <= 0:
if level >= 3:
alreadyTeased = 1
else:
alreadyDodged = 1
return toonTracks
def __dropGroupObject(drop, delay, closestTarget, alreadyDodged, alreadyTeased):
level = drop['level']
objName = objects[level]
target = drop['target'][closestTarget]
suit = drop['target'][closestTarget]['suit']
npcDrops = {}
npcs = []
returnedParallel = __dropObject(drop, delay, objName, level, alreadyDodged, alreadyTeased, npcs, target, npcDrops)
for i in xrange(len(drop['target'])):
target = drop['target'][i]
suitTrack = __createSuitTrack(drop, delay, level, alreadyDodged, alreadyTeased, target, npcs)
if suitTrack:
returnedParallel.append(suitTrack)
return returnedParallel
def __dropObjectForSingle(drop, delay, objName, level, alreadyDodged, alreadyTeased, npcs, target, npcDrops):
singleDropParallel = __dropObject(drop, delay, objName, level, alreadyDodged, alreadyTeased, npcs, target, npcDrops)
suitTrack = __createSuitTrack(drop, delay, level, alreadyDodged, alreadyTeased, target, npcs)
if suitTrack:
singleDropParallel.append(suitTrack)
return singleDropParallel
def __dropObject(drop, delay, objName, level, alreadyDodged, alreadyTeased, npcs, target, npcDrops):
toon = drop['toon']
repeatNPC = 0
battle = drop['battle']
if 'npc' in drop:
toon = drop['npc']
if toon in npcDrops:
repeatNPC = 1
else:
npcDrops[toon] = 1
origHpr = Vec3(0, 0, 0)
else:
origHpr = toon.getHpr(battle)
hpbonus = drop['hpbonus']
suit = target['suit']
hp = target['hp']
hitSuit = hp > 0
died = target['died']
leftSuits = target['leftSuits']
rightSuits = target['rightSuits']
kbbonus = target['kbbonus']
suitPos = suit.getPos(battle)
majorObject = level >= 3
if repeatNPC == 0:
button = globalPropPool.getProp('button')
buttonType = globalPropPool.getPropType('button')
button2 = MovieUtil.copyProp(button)
buttons = [button, button2]
hands = toon.getLeftHands()
object = globalPropPool.getProp(objName)
objectType = globalPropPool.getPropType(objName)
if objName == 'weight':
object.setScale(object.getScale() * 0.75)
elif objName == 'safe':
object.setScale(object.getScale() * 0.85)
node = object.node()
node.setBounds(OmniBoundingVolume())
node.setFinal(1)
soundTrack = __getSoundTrack(level, hitSuit, toon)
toonTrack = Sequence()
if repeatNPC == 0:
toonFace = Func(toon.headsUp, battle, suitPos)
toonTrack.append(Wait(delay))
toonTrack.append(toonFace)
toonTrack.append(ActorInterval(toon, 'pushbutton'))
toonTrack.append(Func(toon.loop, 'neutral'))
toonTrack.append(Func(toon.setHpr, battle, origHpr))
buttonTrack = Sequence()
if repeatNPC == 0:
buttonShow = Func(MovieUtil.showProps, buttons, hands)
buttonScaleUp = LerpScaleInterval(button, 1.0, button.getScale(), startScale=Point3(0.01, 0.01, 0.01))
buttonScaleDown = LerpScaleInterval(button, 1.0, Point3(0.01, 0.01, 0.01), startScale=button.getScale())
buttonHide = Func(MovieUtil.removeProps, buttons)
buttonTrack.append(Wait(delay))
buttonTrack.append(buttonShow)
buttonTrack.append(buttonScaleUp)
buttonTrack.append(Wait(2.5))
buttonTrack.append(buttonScaleDown)
buttonTrack.append(buttonHide)
objectTrack = Sequence()
def posObject(object, suit, level, majorObject, miss, battle = battle):
object.reparentTo(battle)
if battle.isSuitLured(suit):
suitPos, suitHpr = battle.getActorPosHpr(suit)
object.setPos(suitPos)
object.setHpr(suitHpr)
if level >= 3:
object.setY(object.getY() + 2)
else:
object.setPos(suit.getPos(battle))
object.setHpr(suit.getHpr(battle))
if miss and level >= 3:
object.setY(object.getY(battle) + 5)
if not majorObject:
if not miss:
shoulderHeight = shoulderHeights[suit.style.body] * suit.scale
object.setZ(object.getPos(battle)[2] + shoulderHeight)
object.setZ(object.getPos(battle)[2] + objZOffsets[level])
objectTrack.append(Func(battle.movie.needRestoreRenderProp, object))
objInit = Func(posObject, object, suit, level, majorObject, hp <= 0)
objectTrack.append(Wait(delay + tObjectAppears))
objectTrack.append(objInit)
if hp > 0 or level == 1 or level == 2:
if hasattr(object, 'getAnimControls'):
animProp = ActorInterval(object, objName)
shrinkProp = LerpScaleInterval(object, dShrink, Point3(0.01, 0.01, 0.01), startScale=object.getScale())
objAnimShrink = ParallelEndTogether(animProp, shrinkProp)
objectTrack.append(objAnimShrink)
else:
startingScale = objStartingScales[level]
object2 = MovieUtil.copyProp(object)
posObject(object2, suit, level, majorObject, hp <= 0)
endingPos = object2.getPos()
startPos = Point3(endingPos[0], endingPos[1], endingPos[2] + 5)
startHpr = object2.getHpr()
endHpr = Point3(startHpr[0] + 90, startHpr[1], startHpr[2])
animProp = LerpPosInterval(object, landFrames[level] / 24.0, endingPos, startPos=startPos)
shrinkProp = LerpScaleInterval(object, dShrink, Point3(0.01, 0.01, 0.01), startScale=startingScale)
bounceProp = Effects.createZBounce(object, 2, endingPos, 0.5, 1.5)
objAnimShrink = Sequence(Func(object.setScale, startingScale), Func(object.setH, endHpr[0]), animProp, bounceProp, Wait(1.5), shrinkProp)
objectTrack.append(objAnimShrink)
MovieUtil.removeProp(object2)
elif hasattr(object, 'getAnimControls'):
animProp = ActorInterval(object, objName, duration=landFrames[level] / 24.0)
def poseProp(prop, animName, level):
prop.pose(animName, landFrames[level])
poseProp = Func(poseProp, object, objName, level)
wait = Wait(1.0)
shrinkProp = LerpScaleInterval(object, dShrinkOnMiss, Point3(0.01, 0.01, 0.01), startScale=object.getScale())
objectTrack.append(animProp)
objectTrack.append(poseProp)
objectTrack.append(wait)
objectTrack.append(shrinkProp)
else:
startingScale = objStartingScales[level]
object2 = MovieUtil.copyProp(object)
posObject(object2, suit, level, majorObject, hp <= 0)
endingPos = object2.getPos()
startPos = Point3(endingPos[0], endingPos[1], endingPos[2] + 5)
startHpr = object2.getHpr()
endHpr = Point3(startHpr[0] + 90, startHpr[1], startHpr[2])
animProp = LerpPosInterval(object, landFrames[level] / 24.0, endingPos, startPos=startPos)
shrinkProp = LerpScaleInterval(object, dShrinkOnMiss, Point3(0.01, 0.01, 0.01), startScale=startingScale)
bounceProp = Effects.createZBounce(object, 2, endingPos, 0.5, 1.5)
objAnimShrink = Sequence(Func(object.setScale, startingScale), Func(object.setH, endHpr[0]), animProp, bounceProp, Wait(1.5), shrinkProp)
objectTrack.append(objAnimShrink)
MovieUtil.removeProp(object2)
objectTrack.append(Func(MovieUtil.removeProp, object))
objectTrack.append(Func(battle.movie.clearRenderProp, object))
dropShadow = MovieUtil.copyProp(suit.getShadowJoint())
if level == 0:
dropShadow.setScale(0.5)
elif level <= 2:
dropShadow.setScale(0.8)
elif level == 3:
dropShadow.setScale(2.0)
elif level == 4:
dropShadow.setScale(2.3)
else:
dropShadow.setScale(3.6)
def posShadow(dropShadow = dropShadow, suit = suit, battle = battle, hp = hp, level = level):
dropShadow.reparentTo(battle)
if battle.isSuitLured(suit):
suitPos, suitHpr = battle.getActorPosHpr(suit)
dropShadow.setPos(suitPos)
dropShadow.setHpr(suitHpr)
if level >= 3:
dropShadow.setY(dropShadow.getY() + 2)
else:
dropShadow.setPos(suit.getPos(battle))
dropShadow.setHpr(suit.getHpr(battle))
if hp <= 0 and level >= 3:
dropShadow.setY(dropShadow.getY(battle) + 5)
dropShadow.setZ(dropShadow.getZ() + 0.5)
shadowTrack = Sequence(Wait(delay + tButtonPressed), Func(battle.movie.needRestoreRenderProp, dropShadow), Func(posShadow), LerpScaleInterval(dropShadow, tObjectAppears - tButtonPressed, dropShadow.getScale(), startScale=Point3(0.01, 0.01, 0.01)), Wait(0.3), Func(MovieUtil.removeProp, dropShadow), Func(battle.movie.clearRenderProp, dropShadow))
return Parallel(toonTrack, soundTrack, buttonTrack, objectTrack, shadowTrack)
def __createSuitTrack(drop, delay, level, alreadyDodged, alreadyTeased, target, npcs):
toon = drop['toon']
if 'npc' in drop:
toon = drop['npc']
battle = drop['battle']
majorObject = level >= 3
suit = target['suit']
hp = target['hp']
hitSuit = hp > 0
died = target['died']
revived = target['revived']
leftSuits = target['leftSuits']
rightSuits = target['rightSuits']
kbbonus = target['kbbonus']
hpbonus = drop['hpbonus']
if hp > 0:
suitTrack = Sequence()
showDamage = Func(suit.showHpText, -hp, openEnded=0)
updateHealthBar = Func(suit.updateHealthBar, hp)
if majorObject:
anim = 'flatten'
else:
anim = 'drop-react'
suitReact = ActorInterval(suit, anim)
suitTrack.append(Wait(delay + tObjectAppears))
suitTrack.append(showDamage)
suitTrack.append(updateHealthBar)
suitGettingHit = Parallel(suitReact)
if level == UBER_GAG_LEVEL_INDEX:
gotHitSound = globalBattleSoundCache.getSound('AA_drop_boat_cog.ogg')
suitGettingHit.append(SoundInterval(gotHitSound, node=toon))
suitTrack.append(suitGettingHit)
bonusTrack = None
if hpbonus > 0:
bonusTrack = Sequence(Wait(delay + tObjectAppears + 0.75), Func(suit.showHpText, -hpbonus, 1, openEnded=0), Func(suit.updateHealthBar, hpbonus))
if revived != 0:
suitTrack.append(MovieUtil.createSuitReviveTrack(suit, toon, battle, npcs))
elif died != 0:
suitTrack.append(MovieUtil.createSuitDeathTrack(suit, toon, battle, npcs))
else:
suitTrack.append(Func(suit.loop, 'neutral'))
if bonusTrack != None:
suitTrack = Parallel(suitTrack, bonusTrack)
elif kbbonus == 0:
suitTrack = Sequence(Wait(delay + tObjectAppears), Func(MovieUtil.indicateMissed, suit, 0.6), Func(suit.loop, 'neutral'))
else:
if alreadyDodged > 0:
return
if level >= 3:
if alreadyTeased > 0:
return
else:
suitTrack = MovieUtil.createSuitTeaseMultiTrack(suit, delay=delay + tObjectAppears)
else:
suitTrack = MovieUtil.createSuitDodgeMultitrack(delay + tSuitDodges, suit, leftSuits, rightSuits)
return suitTrack
|
mit
| 155,016,294,914,021,400
| 40.367816
| 350
| 0.62973
| false
| 3.357276
| false
| false
| false
|
bhaveshAn/crisscross
|
crisscross/__init__.py
|
1
|
3025
|
'''
Crisscross
==========
'''
__all__ = ('accelerometer', 'audio', 'barometer', 'battery', 'call', 'camera',
'compass', 'email', 'filechooser', 'flash', 'gps', 'gravity',
'gyroscope', 'irblaster', 'light', 'orientation', 'notification',
'proximity', 'sms', 'tts', 'uniqueid', 'vibrator', 'wifi',
'temperature', 'bluetooth')
__version__ = '1.0.0dev'
from crisscross import facades
from crisscross.utils import Proxy
#: Accelerometer proxy to :class:`crisscross.facades.Accelerometer`
accelerometer = Proxy('accelerometer', facades.Accelerometer)
#: Audio proxy to :class:`crisscross.facades.Audio`
audio = Proxy('audio', facades.Audio)
#: Barometer proxy to :class:`crisscross.facades.Barometer`
barometer = Proxy('barometer', facades.Barometer)
#: Battery proxy to :class:`crisscross.facades.Battery`
battery = Proxy('battery', facades.Battery)
#: Call proxy to :class `crisscross.facades.Call`
call = Proxy('call', facades.Call)
#: Compass proxy to :class:`crisscross.facades.Compass`
compass = Proxy('compass', facades.Compass)
#: Camera proxy to :class:`crisscross.facades.Camera`
camera = Proxy('camera', facades.Camera)
#: Email proxy to :class:`crisscross.facades.Email`
email = Proxy('email', facades.Email)
#: FileChooser proxy to :class:`crisscross.facades.FileChooser`
filechooser = Proxy('filechooser', facades.FileChooser)
#: GPS proxy to :class:`crisscross.facades.GPS`
gps = Proxy('gps', facades.GPS)
#: Gravity proxy to :class:`crisscross.facades.Gravity`
gravity = Proxy('gravity', facades.Gravity)
#: Gyroscope proxy to :class:`crisscross.facades.Gyroscope`
gyroscope = Proxy('gyroscope', facades.Gyroscope)
#: IrBlaster proxy to :class:`crisscross.facades.IrBlaster`
irblaster = Proxy('irblaster', facades.IrBlaster)
#: Light proxy to :class:`crisscross.facades.Light`
light = Proxy('light', facades.Light)
#: Orientation proxy to :class:`crisscross.facades.Orientation`
orientation = Proxy('orientation', facades.Orientation)
#: Notification proxy to :class:`crisscross.facades.Notification`
notification = Proxy('notification', facades.Notification)
#: Proximity proxy to :class:`crisscross.facades.Proximity`
proximity = Proxy('proximity', facades.Proximity)
#: Sms proxy to :class:`crisscross.facades.Sms`
sms = Proxy('sms', facades.Sms)
#: TTS proxy to :class:`crisscross.facades.TTS`
tts = Proxy('tts', facades.TTS)
#: UniqueID proxy to :class:`crisscross.facades.UniqueID`
uniqueid = Proxy('uniqueid', facades.UniqueID)
#: Vibrator proxy to :class:`crisscross.facades.Vibrator`
vibrator = Proxy('vibrator', facades.Vibrator)
#: Flash proxy to :class:`crisscross.facades.Flash`
flash = Proxy('flash', facades.Flash)
#: Wifi proxy to :class:`plyer.facades.Wifi`
wifi = Proxy('wifi', facades.Wifi)
#: Temperature proxy to :class:`crisscross.facades.Temperature`
temperature = Proxy('temperature', facades.Temperature)
#: Bluetooth proxy to :class:`crisscross.facades.Bluetooth`
bluetooth = Proxy('bluetooth', facades.Bluetooth)
|
mit
| 587,959,898,850,672,500
| 31.880435
| 78
| 0.727603
| false
| 2.853774
| false
| false
| false
|
Jozhogg/iris
|
lib/iris/fileformats/netcdf.py
|
1
|
69253
|
# (C) British Crown Copyright 2010 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Module to support the loading of a NetCDF file into an Iris cube.
See also: `netCDF4 python <http://code.google.com/p/netcdf4-python/>`_.
Also refer to document 'NetCDF Climate and Forecast (CF) Metadata Conventions',
Version 1.4, 27 February 2009.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import filter, range
import collections
import os
import os.path
import string
import warnings
import biggus
import iris.proxy
iris.proxy.apply_proxy('netCDF4', globals())
import numpy as np
import numpy.ma as ma
from pyke import knowledge_engine
import iris.analysis
from iris.aux_factory import HybridHeightFactory, HybridPressureFactory, \
OceanSigmaZFactory, OceanSigmaFactory, OceanSFactory, OceanSg1Factory, \
OceanSg2Factory
import iris.coord_systems
import iris.coords
import iris.cube
import iris.exceptions
import iris.fileformats.cf
import iris.fileformats._pyke_rules
import iris.io
import iris.unit
import iris.util
# Show Pyke inference engine statistics.
DEBUG = False
# Pyke CF related file names.
_PYKE_RULE_BASE = 'fc_rules_cf'
_PYKE_FACT_BASE = 'facts_cf'
# Standard CML spatio-temporal axis names.
SPATIO_TEMPORAL_AXES = ['t', 'z', 'y', 'x']
# Pass through CF attributes:
# - comment
# - Conventions
# - flag_masks
# - flag_meanings
# - flag_values
# - history
# - institution
# - reference
# - source
# - title
# - positive
#
_CF_ATTRS = ['add_offset', 'ancillary_variables', 'axis', 'bounds', 'calendar',
'cell_measures', 'cell_methods', 'climatology', 'compress',
'coordinates', '_FillValue', 'formula_terms', 'grid_mapping',
'leap_month', 'leap_year', 'long_name', 'missing_value',
'month_lengths', 'scale_factor', 'standard_error_multiplier',
'standard_name', 'units', 'valid_max', 'valid_min', 'valid_range']
# CF attributes that should not be global.
_CF_DATA_ATTRS = ['flag_masks', 'flag_meanings', 'flag_values',
'instance_dimension', 'sample_dimension',
'standard_error_multiplier']
# CF attributes that should only be global.
_CF_GLOBAL_ATTRS = ['conventions', 'featureType', 'history', 'title']
# UKMO specific attributes that should not be global.
_UKMO_DATA_ATTRS = ['STASH', 'um_stash_source', 'ukmo__process_flags']
CF_CONVENTIONS_VERSION = 'CF-1.5'
_FactoryDefn = collections.namedtuple('_FactoryDefn', ('primary', 'std_name',
'formula_terms_format'))
_FACTORY_DEFNS = {
HybridHeightFactory: _FactoryDefn(
primary='delta',
std_name='atmosphere_hybrid_height_coordinate',
formula_terms_format='a: {delta} b: {sigma} orog: {orography}'),
HybridPressureFactory: _FactoryDefn(
primary='delta',
std_name='atmosphere_hybrid_sigma_pressure_coordinate',
formula_terms_format='ap: {delta} b: {sigma} '
'ps: {surface_air_pressure}'),
OceanSigmaZFactory: _FactoryDefn(
primary='zlev',
std_name='ocean_sigma_z_coordinate',
formula_terms_format='sigma: {sigma} eta: {eta} depth: {depth} '
'depth_c: {depth_c} nsigma: {nsigma} zlev: {zlev}'),
OceanSigmaFactory: _FactoryDefn(
primary='sigma',
std_name='ocean_sigma_coordinate',
formula_terms_format='sigma: {sigma} eta: {eta} depth: {depth}'),
OceanSFactory: _FactoryDefn(
primary='s',
std_name='ocean_s_coordinate',
formula_terms_format='s: {s} eta: {eta} depth: {depth} a: {a} b: {b} '
'depth_c: {depth_c}'),
OceanSg1Factory: _FactoryDefn(
primary='s',
std_name='ocean_s_coordinate_g1',
formula_terms_format='s: {s} c: {c} eta: {eta} depth: {depth} '
'depth_c: {depth_c}'),
OceanSg2Factory: _FactoryDefn(
primary='s',
std_name='ocean_s_coordinate_g2',
formula_terms_format='s: {s} c: {c} eta: {eta} depth: {depth} '
'depth_c: {depth_c}')
}
class CFNameCoordMap(object):
"""Provide a simple CF name to CF coordinate mapping."""
_Map = collections.namedtuple('_Map', ['name', 'coord'])
def __init__(self):
self._map = []
def append(self, name, coord):
"""
Append the given name and coordinate pair to the mapping.
Args:
* name:
CF name of the associated coordinate.
* coord:
The coordinate of the associated CF name.
Returns:
None.
"""
self._map.append(CFNameCoordMap._Map(name, coord))
@property
def names(self):
"""Return all the CF names."""
return [pair.name for pair in self._map]
@property
def coords(self):
"""Return all the coordinates."""
return [pair.coord for pair in self._map]
def name(self, coord):
"""
Return the CF name, given a coordinate
Args:
* coord:
The coordinate of the associated CF name.
Returns:
Coordinate.
"""
result = None
for pair in self._map:
if coord == pair.coord:
result = pair.name
break
if result is None:
msg = 'Coordinate is not mapped, {!r}'.format(coord)
raise KeyError(msg)
return result
def coord(self, name):
"""
Return the coordinate, given a CF name.
Args:
* name:
CF name of the associated coordinate.
Returns:
CF name.
"""
result = None
for pair in self._map:
if name == pair.name:
result = pair.coord
break
if result is None:
msg = 'Name is not mapped, {!r}'.format(name)
raise KeyError(msg)
return result
def _pyke_kb_engine():
"""Return the PyKE knowledge engine for CF->cube conversion."""
pyke_dir = os.path.join(os.path.dirname(__file__), '_pyke_rules')
compile_dir = os.path.join(pyke_dir, 'compiled_krb')
engine = None
if os.path.exists(compile_dir):
tmpvar = [os.path.getmtime(os.path.join(compile_dir, fname)) for
fname in os.listdir(compile_dir) if not
fname.startswith('_')]
if tmpvar:
oldest_pyke_compile_file = min(tmpvar)
rule_age = os.path.getmtime(
os.path.join(pyke_dir, _PYKE_RULE_BASE + '.krb'))
if oldest_pyke_compile_file >= rule_age:
# Initialise the pyke inference engine.
engine = knowledge_engine.engine(
(None, 'iris.fileformats._pyke_rules.compiled_krb'))
if engine is None:
engine = knowledge_engine.engine(iris.fileformats._pyke_rules)
return engine
class NetCDFDataProxy(object):
"""A reference to the data payload of a single NetCDF file variable."""
__slots__ = ('shape', 'dtype', 'path', 'variable_name', 'fill_value')
def __init__(self, shape, dtype, path, variable_name, fill_value):
self.shape = shape
self.dtype = dtype
self.path = path
self.variable_name = variable_name
self.fill_value = fill_value
@property
def ndim(self):
return len(self.shape)
def __getitem__(self, keys):
dataset = netCDF4.Dataset(self.path)
try:
variable = dataset.variables[self.variable_name]
# Get the NetCDF variable data and slice.
data = variable[keys]
finally:
dataset.close()
return data
def __repr__(self):
fmt = '<{self.__class__.__name__} shape={self.shape}' \
' dtype={self.dtype!r} path={self.path!r}' \
' variable_name={self.variable_name!r}>'
return fmt.format(self=self)
def __getstate__(self):
return {attr: getattr(self, attr) for attr in self.__slots__}
def __setstate__(self, state):
for key, value in state.iteritems():
setattr(self, key, value)
def _assert_case_specific_facts(engine, cf, cf_group):
# Initialise pyke engine "provides" hooks.
engine.provides['coordinates'] = []
# Assert facts for CF coordinates.
for cf_name in cf_group.coordinates.iterkeys():
engine.add_case_specific_fact(_PYKE_FACT_BASE, 'coordinate',
(cf_name,))
# Assert facts for CF auxiliary coordinates.
for cf_name in cf_group.auxiliary_coordinates.iterkeys():
engine.add_case_specific_fact(_PYKE_FACT_BASE, 'auxiliary_coordinate',
(cf_name,))
# Assert facts for CF grid_mappings.
for cf_name in cf_group.grid_mappings.iterkeys():
engine.add_case_specific_fact(_PYKE_FACT_BASE, 'grid_mapping',
(cf_name,))
# Assert facts for CF labels.
for cf_name in cf_group.labels.iterkeys():
engine.add_case_specific_fact(_PYKE_FACT_BASE, 'label',
(cf_name,))
# Assert facts for CF formula terms associated with the cf_group
# of the CF data variable.
formula_root = set()
for cf_var in cf.cf_group.formula_terms.itervalues():
for cf_root, cf_term in cf_var.cf_terms_by_root.iteritems():
# Only assert this fact if the formula root variable is
# defined in the CF group of the CF data variable.
if cf_root in cf_group:
formula_root.add(cf_root)
engine.add_case_specific_fact(_PYKE_FACT_BASE, 'formula_term',
(cf_var.cf_name, cf_root,
cf_term))
for cf_root in formula_root:
engine.add_case_specific_fact(_PYKE_FACT_BASE, 'formula_root',
(cf_root,))
def _pyke_stats(engine, cf_name):
if DEBUG:
print('-' * 80)
print('CF Data Variable: %r' % cf_name)
engine.print_stats()
print('Rules Triggered:')
for rule in sorted(list(engine.rule_triggered)):
print('\t%s' % rule)
print('Case Specific Facts:')
kb_facts = engine.get_kb(_PYKE_FACT_BASE)
for key in kb_facts.entity_lists.iterkeys():
for arg in kb_facts.entity_lists[key].case_specific_facts:
print('\t%s%s' % (key, arg))
def _set_attributes(attributes, key, value):
"""Set attributes dictionary, converting unicode strings appropriately."""
if isinstance(value, unicode):
try:
attributes[str(key)] = str(value)
except UnicodeEncodeError:
attributes[str(key)] = value
else:
attributes[str(key)] = value
def _load_cube(engine, cf, cf_var, filename):
"""Create the cube associated with the CF-netCDF data variable."""
# Figure out what the eventual data type will be after any scale/offset
# transforms.
dummy_data = np.zeros(1, dtype=cf_var.dtype)
if hasattr(cf_var, 'scale_factor'):
dummy_data = cf_var.scale_factor * dummy_data
if hasattr(cf_var, 'add_offset'):
dummy_data = cf_var.add_offset + dummy_data
# Create cube with deferred data, but no metadata
fill_value = getattr(cf_var.cf_data, '_FillValue',
netCDF4.default_fillvals[cf_var.dtype.str[1:]])
proxy = NetCDFDataProxy(cf_var.shape, dummy_data.dtype,
filename, cf_var.cf_name, fill_value)
data = biggus.OrthoArrayAdapter(proxy)
cube = iris.cube.Cube(data)
# Reset the pyke inference engine.
engine.reset()
# Initialise pyke engine rule processing hooks.
engine.cf_var = cf_var
engine.cube = cube
engine.provides = {}
engine.requires = {}
engine.rule_triggered = set()
engine.filename = filename
# Assert any case-specific facts.
_assert_case_specific_facts(engine, cf, cf_var.cf_group)
# Run pyke inference engine with forward chaining rules.
engine.activate(_PYKE_RULE_BASE)
# Populate coordinate attributes with the untouched attributes from the
# associated CF-netCDF variable.
coordinates = engine.provides.get('coordinates', [])
attribute_predicate = lambda item: item[0] not in _CF_ATTRS
for coord, cf_var_name in coordinates:
tmpvar = filter(attribute_predicate,
cf.cf_group[cf_var_name].cf_attrs_unused())
for attr_name, attr_value in tmpvar:
_set_attributes(coord.attributes, attr_name, attr_value)
tmpvar = filter(attribute_predicate, cf_var.cf_attrs_unused())
# Attach untouched attributes of the associated CF-netCDF data variable to
# the cube.
for attr_name, attr_value in tmpvar:
_set_attributes(cube.attributes, attr_name, attr_value)
# Show pyke session statistics.
_pyke_stats(engine, cf_var.cf_name)
return cube
def _load_aux_factory(engine, cube):
"""
Convert any CF-netCDF dimensionless coordinate to an AuxCoordFactory.
"""
formula_type = engine.requires.get('formula_type')
if formula_type in ['atmosphere_hybrid_height_coordinate',
'atmosphere_hybrid_sigma_pressure_coordinate',
'ocean_sigma_z_coordinate', 'ocean_sigma_coordinate',
'ocean_s_coordinate', 'ocean_s_coordinate_g1',
'ocean_s_coordinate_g2']:
def coord_from_term(term):
# Convert term names to coordinates (via netCDF variable names).
name = engine.requires['formula_terms'][term]
for coord, cf_var_name in engine.provides['coordinates']:
if cf_var_name == name:
return coord
warnings.warn('Unable to find coordinate for variable '
'{!r}'.format(name))
if formula_type == 'atmosphere_hybrid_height_coordinate':
delta = coord_from_term('a')
sigma = coord_from_term('b')
orography = coord_from_term('orog')
factory = HybridHeightFactory(delta, sigma, orography)
elif formula_type == 'atmosphere_hybrid_sigma_pressure_coordinate':
# Hybrid pressure has two valid versions of its formula terms:
# "p0: var1 a: var2 b: var3 ps: var4" or
# "ap: var1 b: var2 ps: var3" where "ap = p0 * a"
try:
# Attempt to get the "ap" term.
delta = coord_from_term('ap')
except (KeyError, ValueError):
# The "ap" term is unavailable, so try getting terms "p0"
# and "a" terms in order to derive an "ap" equivalent term.
coord_p0 = coord_from_term('p0')
if coord_p0.shape != (1,):
msg = 'Expecting {!r} to be a scalar reference pressure ' \
'coordinate, got shape {!r}'.format(coord_p0.var_name,
coord_p0.shape)
raise ValueError(msg)
if coord_p0.has_bounds():
msg = 'Ignoring atmosphere hybrid sigma pressure scalar ' \
'coordinate {!r} bounds.'.format(coord_p0.name())
warnings.warn(msg)
coord_a = coord_from_term('a')
delta = coord_a * coord_p0.points[0]
delta.units = coord_a.units * coord_p0.units
delta.rename('vertical pressure')
delta.var_name = 'ap'
cube.add_aux_coord(delta, cube.coord_dims(coord_a))
sigma = coord_from_term('b')
surface_air_pressure = coord_from_term('ps')
factory = HybridPressureFactory(delta, sigma, surface_air_pressure)
elif formula_type == 'ocean_sigma_z_coordinate':
sigma = coord_from_term('sigma')
eta = coord_from_term('eta')
depth = coord_from_term('depth')
depth_c = coord_from_term('depth_c')
nsigma = coord_from_term('nsigma')
zlev = coord_from_term('zlev')
factory = OceanSigmaZFactory(sigma, eta, depth,
depth_c, nsigma, zlev)
elif formula_type == 'ocean_sigma_coordinate':
sigma = coord_from_term('sigma')
eta = coord_from_term('eta')
depth = coord_from_term('depth')
factory = OceanSigmaFactory(sigma, eta, depth)
elif formula_type == 'ocean_s_coordinate':
s = coord_from_term('s')
eta = coord_from_term('eta')
depth = coord_from_term('depth')
a = coord_from_term('a')
depth_c = coord_from_term('depth_c')
b = coord_from_term('b')
factory = OceanSFactory(s, eta, depth, a, b, depth_c)
elif formula_type == 'ocean_s_coordinate_g1':
s = coord_from_term('s')
c = coord_from_term('c')
eta = coord_from_term('eta')
depth = coord_from_term('depth')
depth_c = coord_from_term('depth_c')
factory = OceanSg1Factory(s, c, eta, depth,
depth_c)
elif formula_type == 'ocean_s_coordinate_g2':
s = coord_from_term('s')
c = coord_from_term('c')
eta = coord_from_term('eta')
depth = coord_from_term('depth')
depth_c = coord_from_term('depth_c')
factory = OceanSg2Factory(s, c, eta, depth,
depth_c)
cube.add_aux_factory(factory)
def load_cubes(filenames, callback=None):
"""
Loads cubes from a list of NetCDF filenames/URLs.
Args:
* filenames (string/list):
One or more NetCDF filenames/DAP URLs to load from.
Kwargs:
* callback (callable function):
Function which can be passed on to :func:`iris.io.run_callback`.
Returns:
Generator of loaded NetCDF :class:`iris.cubes.Cube`.
"""
# Initialise the pyke inference engine.
engine = _pyke_kb_engine()
if isinstance(filenames, basestring):
filenames = [filenames]
for filename in filenames:
# Ingest the netCDF file.
cf = iris.fileformats.cf.CFReader(filename)
# Process each CF data variable.
data_variables = cf.cf_group.data_variables.values() + \
cf.cf_group.promoted.values()
for cf_var in data_variables:
cube = _load_cube(engine, cf, cf_var, filename)
# Process any associated formula terms and attach
# the corresponding AuxCoordFactory.
try:
_load_aux_factory(engine, cube)
except ValueError as e:
warnings.warn('{}'.format(e))
# Perform any user registered callback function.
cube = iris.io.run_callback(callback, cube, cf_var, filename)
# Callback mechanism may return None, which must not be yielded
if cube is None:
continue
yield cube
class Saver(object):
"""A manager for saving netcdf files."""
def __init__(self, filename, netcdf_format):
"""
A manager for saving netcdf files.
Args:
* filename (string):
Name of the netCDF file to save the cube.
* netcdf_format (string):
Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC',
'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format.
Returns:
None.
For example::
# Initialise Manager for saving
with Saver(filename, netcdf_format) as sman:
# Iterate through the cubelist.
for cube in cubes:
sman.write(cube)
"""
if netcdf_format not in ['NETCDF4', 'NETCDF4_CLASSIC',
'NETCDF3_CLASSIC', 'NETCDF3_64BIT']:
raise ValueError('Unknown netCDF file format, got %r' %
netcdf_format)
# All persistent variables
#: CF name mapping with iris coordinates
self._name_coord_map = CFNameCoordMap()
#: List of dimension coordinates added to the file
self._dim_coords = []
#: List of grid mappings added to the file
self._coord_systems = []
#: A dictionary, listing dimension names and corresponding length
self._existing_dim = {}
#: A dictionary, mapping formula terms to owner cf variable name
self._formula_terms_cache = {}
#: NetCDF dataset
try:
self._dataset = netCDF4.Dataset(filename, mode='w',
format=netcdf_format)
except RuntimeError:
dir_name = os.path.dirname(filename)
if not os.path.isdir(dir_name):
msg = 'No such file or directory: {}'.format(dir_name)
raise IOError(msg)
if not os.access(dir_name, os.R_OK | os.W_OK):
msg = 'Permission denied: {}'.format(filename)
raise IOError(msg)
else:
raise
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
"""Flush any buffered data to the CF-netCDF file before closing."""
self._dataset.sync()
self._dataset.close()
def write(self, cube, local_keys=None, unlimited_dimensions=None,
zlib=False, complevel=4, shuffle=True, fletcher32=False,
contiguous=False, chunksizes=None, endian='native',
least_significant_digit=None):
"""
Wrapper for saving cubes to a NetCDF file.
Args:
* cube (:class:`iris.cube.Cube`):
A :class:`iris.cube.Cube` to be saved to a netCDF file.
Kwargs:
* local_keys (iterable of strings):
An interable of cube attribute keys. Any cube attributes with
matching keys will become attributes on the data variable rather
than global attributes.
* unlimited_dimensions (iterable of strings and/or
:class:`iris.coords.Coord` objects):
Explicit list of coordinate names (or coordinate objects)
corresponding to coordinate dimensions of `cube` to save with the
NetCDF dimension variable length 'UNLIMITED'. By default, the
outermost (first) dimension for each cube is used. Only the
'NETCDF4' format supports multiple 'UNLIMITED' dimensions. To save
no unlimited dimensions, use `unlimited_dimensions=[]` (an empty
list).
* zlib (bool):
If `True`, the data will be compressed in the netCDF file using
gzip compression (default `False`).
* complevel (int):
An integer between 1 and 9 describing the level of compression
desired (default 4). Ignored if `zlib=False`.
* shuffle (bool):
If `True`, the HDF5 shuffle filter will be applied before
compressing the data (default `True`). This significantly improves
compression. Ignored if `zlib=False`.
* fletcher32 (bool):
If `True`, the Fletcher32 HDF5 checksum algorithm is activated to
detect errors. Default `False`.
* contiguous (bool):
If `True`, the variable data is stored contiguously on disk.
Default `False`. Setting to `True` for a variable with an unlimited
dimension will trigger an error.
* chunksizes (tuple of int):
Used to manually specify the HDF5 chunksizes for each dimension of
the variable. A detailed discussion of HDF chunking and I/O
performance is available here:
http://www.hdfgroup.org/HDF5/doc/H5.user/Chunking.html. Basically,
you want the chunk size for each dimension to match as closely as
possible the size of the data block that users will read from the
file. `chunksizes` cannot be set if `contiguous=True`.
* endian (string):
Used to control whether the data is stored in little or big endian
format on disk. Possible values are 'little', 'big' or 'native'
(default). The library will automatically handle endian conversions
when the data is read, but if the data is always going to be read
on a computer with the opposite format as the one used to create
the file, there may be some performance advantage to be gained by
setting the endian-ness.
* least_significant_digit (int):
If `least_significant_digit` is specified, variable data will be
truncated (quantized). In conjunction with `zlib=True` this
produces 'lossy', but significantly more efficient compression. For
example, if `least_significant_digit=1`, data will be quantized
using `numpy.around(scale*data)/scale`, where `scale = 2**bits`,
and `bits` is determined so that a precision of 0.1 is retained (in
this case `bits=4`). From
http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml:
"least_significant_digit -- power of ten of the smallest decimal
place in unpacked data that is a reliable value". Default is
`None`, or no quantization, or 'lossless' compression.
Returns:
None.
.. note::
The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`,
`chunksizes` and `endian` keywords are silently ignored for netCDF
3 files that do not use HDF5.
.. deprecated:: 1.8.0
NetCDF default saving behaviour currently assigns the outermost
dimension as unlimited. This behaviour is to be deprecated, in
favour of no automatic assignment. To switch to the new behaviour,
set `iris.FUTURE.netcdf_no_unlimited` to True.
"""
if unlimited_dimensions is None:
if iris.FUTURE.netcdf_no_unlimited:
unlimited_dimensions = []
else:
_no_unlim_dep_warning()
cf_profile_available = (iris.site_configuration.get('cf_profile') not
in [None, False])
if cf_profile_available:
# Perform a CF profile of the cube. This may result in an exception
# being raised if mandatory requirements are not satisfied.
profile = iris.site_configuration['cf_profile'](cube)
# Get suitable dimension names.
dimension_names = self._get_dim_names(cube)
# Create the CF-netCDF data dimensions.
self._create_cf_dimensions(cube, dimension_names, unlimited_dimensions)
# Create the associated cube CF-netCDF data variable.
cf_var_cube = self._create_cf_data_variable(
cube, dimension_names, local_keys, zlib=zlib, complevel=complevel,
shuffle=shuffle, fletcher32=fletcher32, contiguous=contiguous,
chunksizes=chunksizes, endian=endian,
least_significant_digit=least_significant_digit)
# Add coordinate variables.
self._add_dim_coords(cube, dimension_names)
# Add the auxiliary coordinate variable names and associate the data
# variable to them
self._add_aux_coords(cube, cf_var_cube, dimension_names)
# Add the formula terms to the appropriate cf variables for each
# aux factory in the cube.
self._add_aux_factories(cube, cf_var_cube, dimension_names)
# Add data variable-only attribute names to local_keys.
if local_keys is None:
local_keys = set()
else:
local_keys = set(local_keys)
local_keys.update(_CF_DATA_ATTRS, _UKMO_DATA_ATTRS)
# Add global attributes taking into account local_keys.
global_attributes = {k: v for k, v in cube.attributes.iteritems() if k
not in local_keys and k.lower() != 'conventions'}
self.update_global_attributes(global_attributes)
if cf_profile_available:
cf_patch = iris.site_configuration.get('cf_patch')
if cf_patch is not None:
# Perform a CF patch of the dataset.
cf_patch(profile, self._dataset, cf_var_cube)
else:
msg = 'cf_profile is available but no {} defined.'.format(
'cf_patch')
warnings.warn(msg)
def update_global_attributes(self, attributes=None, **kwargs):
"""
Update the CF global attributes based on the provided
iterable/dictionary and/or keyword arguments.
Args:
* attributes (dict or iterable of key, value pairs):
CF global attributes to be updated.
"""
if attributes is not None:
# Handle sequence e.g. [('fruit', 'apple'), ...].
if not hasattr(attributes, 'keys'):
attributes = dict(attributes)
for attr_name in sorted(attributes):
self._dataset.setncattr(attr_name, attributes[attr_name])
for attr_name in sorted(kwargs):
self._dataset.setncattr(attr_name, kwargs[attr_name])
def _create_cf_dimensions(self, cube, dimension_names,
unlimited_dimensions=None):
"""
Create the CF-netCDF data dimensions.
Args:
* cube (:class:`iris.cube.Cube`):
A :class:`iris.cube.Cube` in which to lookup coordinates.
Kwargs:
* unlimited_dimensions (iterable of strings and/or
:class:`iris.coords.Coord` objects):
List of coordinates to make unlimited. By default, the
outermost dimension is made unlimited.
Returns:
None.
"""
unlimited_dim_names = []
if (unlimited_dimensions is None and
not iris.FUTURE.netcdf_no_unlimited):
if dimension_names:
unlimited_dim_names.append(dimension_names[0])
else:
for coord in unlimited_dimensions:
try:
coord = cube.coord(name_or_coord=coord, dim_coords=True)
except iris.exceptions.CoordinateNotFoundError:
# coordinate isn't used for this cube, but it might be
# used for a different one
pass
else:
dim_name = self._get_coord_variable_name(cube, coord)
unlimited_dim_names.append(dim_name)
for dim_name in dimension_names:
if dim_name not in self._dataset.dimensions:
if dim_name in unlimited_dim_names:
size = None
else:
size = self._existing_dim[dim_name]
self._dataset.createDimension(dim_name, size)
def _add_aux_coords(self, cube, cf_var_cube, dimension_names):
"""
Add aux. coordinate to the dataset and associate with the data variable
Args:
* cube (:class:`iris.cube.Cube`):
A :class:`iris.cube.Cube` to be saved to a netCDF file.
* cf_var_cube (:class:`netcdf.netcdf_variable`):
cf variable cube representation.
* dimension_names (list):
Names associated with the dimensions of the cube.
"""
auxiliary_coordinate_names = []
# Add CF-netCDF variables for the associated auxiliary coordinates.
for coord in sorted(cube.aux_coords, key=lambda coord: coord.name()):
# Create the associated coordinate CF-netCDF variable.
if coord not in self._name_coord_map.coords:
cf_name = self._create_cf_variable(cube, dimension_names,
coord)
self._name_coord_map.append(cf_name, coord)
else:
cf_name = self._name_coord_map.name(coord)
if cf_name is not None:
auxiliary_coordinate_names.append(cf_name)
# Add CF-netCDF auxiliary coordinate variable references to the
# CF-netCDF data variable.
if auxiliary_coordinate_names:
cf_var_cube.coordinates = ' '.join(
sorted(auxiliary_coordinate_names))
def _add_dim_coords(self, cube, dimension_names):
"""
Add coordinate variables to NetCDF dataset.
Args:
* cube (:class:`iris.cube.Cube`):
A :class:`iris.cube.Cube` to be saved to a netCDF file.
* dimension_names (list):
Names associated with the dimensions of the cube.
"""
# Ensure we create the netCDF coordinate variables first.
for coord in cube.dim_coords:
# Create the associated coordinate CF-netCDF variable.
if coord not in self._name_coord_map.coords:
cf_name = self._create_cf_variable(cube, dimension_names,
coord)
self._name_coord_map.append(cf_name, coord)
def _add_aux_factories(self, cube, cf_var_cube, dimension_names):
"""
Modifies the variables of the NetCDF dataset to represent
the presence of dimensionless vertical coordinates based on
the aux factories of the cube (if any).
Args:
* cube (:class:`iris.cube.Cube`):
A :class:`iris.cube.Cube` to be saved to a netCDF file.
* cf_var_cube (:class:`netcdf.netcdf_variable`)
CF variable cube representation.
* dimension_names (list):
Names associated with the dimensions of the cube.
"""
primaries = []
for factory in cube.aux_factories:
factory_defn = _FACTORY_DEFNS.get(type(factory), None)
if factory_defn is None:
msg = 'Unable to determine formula terms ' \
'for AuxFactory: {!r}'.format(factory)
warnings.warn(msg)
else:
# Override `standard_name`, `long_name`, and `axis` of the
# primary coord that signals the presense of a dimensionless
# vertical coord, then set the `formula_terms` attribute.
primary_coord = factory.dependencies[factory_defn.primary]
if primary_coord in primaries:
msg = 'Cube {!r} has multiple aux factories that share ' \
'a common primary coordinate {!r}. Unable to save ' \
'to netCDF as having multiple formula terms on a ' \
'single coordinate is not supported.'
raise ValueError(msg.format(cube, primary_coord.name()))
primaries.append(primary_coord)
cf_name = self._name_coord_map.name(primary_coord)
cf_var = self._dataset.variables[cf_name]
names = {key: self._name_coord_map.name(coord) for
key, coord in factory.dependencies.iteritems()}
formula_terms = factory_defn.formula_terms_format.format(
**names)
std_name = factory_defn.std_name
if hasattr(cf_var, 'formula_terms'):
if cf_var.formula_terms != formula_terms or \
cf_var.standard_name != std_name:
# TODO: We need to resolve this corner-case where
# the dimensionless vertical coordinate containing the
# formula_terms is a dimension coordinate of the
# associated cube and a new alternatively named
# dimensionless vertical coordinate is required with
# new formula_terms and a renamed dimension.
if cf_name in dimension_names:
msg = 'Unable to create dimensonless vertical ' \
'coordinate.'
raise ValueError(msg)
key = (cf_name, std_name, formula_terms)
name = self._formula_terms_cache.get(key)
if name is None:
# Create a new variable
name = self._create_cf_variable(cube,
dimension_names,
primary_coord)
cf_var = self._dataset.variables[name]
cf_var.standard_name = std_name
cf_var.axis = 'Z'
# Update the formula terms.
ft = formula_terms.split()
ft = [name if t == cf_name else t for t in ft]
cf_var.formula_terms = ' '.join(ft)
# Update the cache.
self._formula_terms_cache[key] = name
# Update the associated cube variable.
coords = cf_var_cube.coordinates.split()
coords = [name if c == cf_name else c for c in coords]
cf_var_cube.coordinates = ' '.join(coords)
else:
cf_var.standard_name = std_name
cf_var.axis = 'Z'
cf_var.formula_terms = formula_terms
def _get_dim_names(self, cube):
"""
Determine suitable CF-netCDF data dimension names.
Args:
* cube (:class:`iris.cube.Cube`):
A :class:`iris.cube.Cube` to be saved to a netCDF file.
Returns:
List of dimension names with length equal the number of dimensions
in the cube.
"""
dimension_names = []
for dim in range(cube.ndim):
coords = cube.coords(dimensions=dim, dim_coords=True)
if coords:
coord = coords[0]
dim_name = self._get_coord_variable_name(cube, coord)
# Add only dimensions that have not already been added.
if coord not in self._dim_coords:
# Determine unique dimension name
while (dim_name in self._existing_dim or
dim_name in self._name_coord_map.names):
dim_name = self._increment_name(dim_name)
# Update names added, current cube dim names used and
# unique coordinates added.
self._existing_dim[dim_name] = coord.shape[0]
dimension_names.append(dim_name)
self._dim_coords.append(coord)
else:
# Return the dim_name associated with the existing
# coordinate.
dim_name = self._name_coord_map.name(coord)
dimension_names.append(dim_name)
else:
# No CF-netCDF coordinates describe this data dimension.
dim_name = 'dim%d' % dim
if dim_name in self._existing_dim:
# Increment name if conflicted with one already existing.
if self._existing_dim[dim_name] != cube.shape[dim]:
while (dim_name in self._existing_dim and
self._existing_dim[dim_name] !=
cube.shape[dim] or
dim_name in self._name_coord_map.names):
dim_name = self._increment_name(dim_name)
# Update dictionary with new entry
self._existing_dim[dim_name] = cube.shape[dim]
else:
# Update dictionary with new entry
self._existing_dim[dim_name] = cube.shape[dim]
dimension_names.append(dim_name)
return dimension_names
def _cf_coord_identity(self, coord):
"""
Determine a suitable units from a given coordinate.
Args:
* coord (:class:`iris.coords.Coord`):
A coordinate of a cube.
Returns:
The (standard_name, long_name, unit) of the given
:class:`iris.coords.Coord` instance.
"""
units = str(coord.units)
# TODO: Use #61 to get the units.
if isinstance(coord.coord_system, iris.coord_systems.GeogCS):
if "latitude" in coord.standard_name:
units = 'degrees_north'
elif "longitude" in coord.standard_name:
units = 'degrees_east'
elif isinstance(coord.coord_system, iris.coord_systems.RotatedGeogCS):
units = 'degrees'
elif isinstance(coord.coord_system,
iris.coord_systems.TransverseMercator):
units = 'm'
return coord.standard_name, coord.long_name, units
def _ensure_valid_dtype(self, values, src_name, src_object):
# NetCDF3 does not support int64 or unsigned ints, so we check
# if we can store them as int32 instead.
if ((np.issubdtype(values.dtype, np.int64) or
np.issubdtype(values.dtype, np.unsignedinteger)) and
self._dataset.file_format in ('NETCDF3_CLASSIC',
'NETCDF3_64BIT')):
# Cast to an integer type supported by netCDF3.
if not np.can_cast(values.max(), np.int32) or \
not np.can_cast(values.min(), np.int32):
msg = 'The data type of {} {!r} is not supported by {} and' \
' its values cannot be safely cast to a supported' \
' integer type.'
msg = msg.format(src_name, src_object,
self._dataset.file_format)
raise ValueError(msg)
values = values.astype(np.int32)
return values
def _create_cf_bounds(self, coord, cf_var, cf_name):
"""
Create the associated CF-netCDF bounds variable.
Args:
* coord (:class:`iris.coords.Coord`):
A coordinate of a cube.
* cf_var:
CF-netCDF variable
* cf_name (string):
name of the CF-NetCDF variable.
Returns:
None
"""
if coord.has_bounds():
# Get the values in a form which is valid for the file format.
bounds = self._ensure_valid_dtype(coord.bounds,
'the bounds of coordinate',
coord)
n_bounds = bounds.shape[-1]
if n_bounds == 2:
bounds_dimension_name = 'bnds'
else:
bounds_dimension_name = 'bnds_%s' % n_bounds
if bounds_dimension_name not in self._dataset.dimensions:
# Create the bounds dimension with the appropriate extent.
self._dataset.createDimension(bounds_dimension_name, n_bounds)
cf_var.bounds = cf_name + '_bnds'
cf_var_bounds = self._dataset.createVariable(
cf_var.bounds, bounds.dtype.newbyteorder('='),
cf_var.dimensions + (bounds_dimension_name,))
cf_var_bounds[:] = bounds
def _get_cube_variable_name(self, cube):
"""
Returns a CF-netCDF variable name for the given cube.
Args:
* cube (class:`iris.cube.Cube`):
An instance of a cube for which a CF-netCDF variable
name is required.
Returns:
A CF-netCDF variable name as a string.
"""
if cube.var_name is not None:
cf_name = cube.var_name
else:
# Convert to lower case and replace whitespace by underscores.
cf_name = '_'.join(cube.name().lower().split())
return cf_name
def _get_coord_variable_name(self, cube, coord):
"""
Returns a CF-netCDF variable name for the given coordinate.
Args:
* cube (:class:`iris.cube.Cube`):
The cube that contains the given coordinate.
* coord (:class:`iris.coords.Coord`):
An instance of a coordinate for which a CF-netCDF variable
name is required.
Returns:
A CF-netCDF variable name as a string.
"""
if coord.var_name is not None:
cf_name = coord.var_name
else:
name = coord.standard_name or coord.long_name
if not name or set(name).intersection(string.whitespace):
# Auto-generate name based on associated dimensions.
name = ''
for dim in cube.coord_dims(coord):
name += 'dim{}'.format(dim)
# Handle scalar coordinate (dims == ()).
if not name:
name = 'unknown_scalar'
# Convert to lower case and replace whitespace by underscores.
cf_name = '_'.join(name.lower().split())
return cf_name
def _create_cf_variable(self, cube, dimension_names, coord):
"""
Create the associated CF-netCDF variable in the netCDF dataset for the
given coordinate. If required, also create the CF-netCDF bounds
variable and associated dimension.
Args:
* cube (:class:`iris.cube.Cube`):
The associated cube being saved to CF-netCDF file.
* dimension_names (list):
Names for each dimension of the cube.
* coord (:class:`iris.coords.Coord`):
The coordinate to be saved to CF-netCDF file.
Returns:
The string name of the associated CF-netCDF variable saved.
"""
cf_name = self._get_coord_variable_name(cube, coord)
while cf_name in self._dataset.variables:
cf_name = self._increment_name(cf_name)
# Derive the data dimension names for the coordinate.
cf_dimensions = [dimension_names[dim] for dim in
cube.coord_dims(coord)]
if np.issubdtype(coord.points.dtype, np.str):
string_dimension_depth = coord.points.dtype.itemsize
string_dimension_name = 'string%d' % string_dimension_depth
# Determine whether to create the string length dimension.
if string_dimension_name not in self._dataset.dimensions:
self._dataset.createDimension(string_dimension_name,
string_dimension_depth)
# Add the string length dimension to dimension names.
cf_dimensions.append(string_dimension_name)
# Create the label coordinate variable.
cf_var = self._dataset.createVariable(cf_name, '|S1',
cf_dimensions)
# Add the payload to the label coordinate variable.
if len(cf_dimensions) == 1:
cf_var[:] = list('%- *s' % (string_dimension_depth,
coord.points[0]))
else:
for index in np.ndindex(coord.points.shape):
index_slice = tuple(list(index) + [slice(None, None)])
cf_var[index_slice] = list('%- *s' %
(string_dimension_depth,
coord.points[index]))
else:
# Identify the collection of coordinates that represent CF-netCDF
# coordinate variables.
cf_coordinates = cube.dim_coords
if coord in cf_coordinates:
# By definition of a CF-netCDF coordinate variable this
# coordinate must be 1-D and the name of the CF-netCDF variable
# must be the same as its dimension name.
cf_name = cf_dimensions[0]
# Get the values in a form which is valid for the file format.
points = self._ensure_valid_dtype(coord.points, 'coordinate',
coord)
# Create the CF-netCDF variable.
cf_var = self._dataset.createVariable(
cf_name, points.dtype.newbyteorder('='), cf_dimensions)
# Add the axis attribute for spatio-temporal CF-netCDF coordinates.
if coord in cf_coordinates:
axis = iris.util.guess_coord_axis(coord)
if axis is not None and axis.lower() in SPATIO_TEMPORAL_AXES:
cf_var.axis = axis.upper()
# Add the data to the CF-netCDF variable.
cf_var[:] = points
# Create the associated CF-netCDF bounds variable.
self._create_cf_bounds(coord, cf_var, cf_name)
# Deal with CF-netCDF units and standard name.
standard_name, long_name, units = self._cf_coord_identity(coord)
if units != 'unknown':
cf_var.units = units
if standard_name is not None:
cf_var.standard_name = standard_name
if long_name is not None:
cf_var.long_name = long_name
# Add the CF-netCDF calendar attribute.
if coord.units.calendar:
cf_var.calendar = coord.units.calendar
# Add any other custom coordinate attributes.
for name in sorted(coord.attributes):
value = coord.attributes[name]
if name == 'STASH':
# Adopting provisional Metadata Conventions for representing MO
# Scientific Data encoded in NetCDF Format.
name = 'um_stash_source'
value = str(value)
# Don't clobber existing attributes.
if not hasattr(cf_var, name):
setattr(cf_var, name, value)
return cf_name
def _create_cf_cell_methods(self, cube, dimension_names):
"""
Create CF-netCDF string representation of a cube cell methods.
Args:
* cube (:class:`iris.cube.Cube`) or cubelist
(:class:`iris.cube.CubeList`):
A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or list of
cubes to be saved to a netCDF file.
* dimension_names (list):
Names associated with the dimensions of the cube.
Returns:
CF-netCDF string representation of a cube cell methods.
"""
cell_methods = []
# Identify the collection of coordinates that represent CF-netCDF
# coordinate variables.
cf_coordinates = cube.dim_coords
for cm in cube.cell_methods:
names = ''
for name in cm.coord_names:
coord = cube.coords(name)
if coord:
coord = coord[0]
if coord in cf_coordinates:
name = dimension_names[cube.coord_dims(coord)[0]]
names += '%s: ' % name
interval = ' '.join(['interval: %s' % interval for interval in
cm.intervals or []])
comment = ' '.join(['comment: %s' % comment for comment in
cm.comments or []])
extra = ' '.join([interval, comment]).strip()
if extra:
extra = ' (%s)' % extra
cell_methods.append(names + cm.method + extra)
return ' '.join(cell_methods)
def _create_cf_grid_mapping(self, cube, cf_var_cube):
"""
Create CF-netCDF grid mapping variable and associated CF-netCDF
data variable grid mapping attribute.
Args:
* cube (:class:`iris.cube.Cube`) or cubelist
(:class:`iris.cube.CubeList`):
A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or list of
cubes to be saved to a netCDF file.
* cf_var_cube (:class:`netcdf.netcdf_variable`):
cf variable cube representation.
Returns:
None
"""
cs = cube.coord_system('CoordSystem')
if cs is not None:
# Grid var not yet created?
if cs not in self._coord_systems:
while cs.grid_mapping_name in self._dataset.variables:
cs.grid_mapping_name = (
self._increment_name(cs.grid_mapping_name))
cf_var_grid = self._dataset.createVariable(
cs.grid_mapping_name, np.int32)
cf_var_grid.grid_mapping_name = cs.grid_mapping_name
def add_ellipsoid(ellipsoid):
cf_var_grid.longitude_of_prime_meridian = (
ellipsoid.longitude_of_prime_meridian)
semi_major = ellipsoid.semi_major_axis
semi_minor = ellipsoid.semi_minor_axis
if semi_minor == semi_major:
cf_var_grid.earth_radius = semi_major
else:
cf_var_grid.semi_major_axis = semi_major
cf_var_grid.semi_minor_axis = semi_minor
# latlon
if isinstance(cs, iris.coord_systems.GeogCS):
add_ellipsoid(cs)
# rotated latlon
elif isinstance(cs, iris.coord_systems.RotatedGeogCS):
if cs.ellipsoid:
add_ellipsoid(cs.ellipsoid)
cf_var_grid.grid_north_pole_latitude = (
cs.grid_north_pole_latitude)
cf_var_grid.grid_north_pole_longitude = (
cs.grid_north_pole_longitude)
cf_var_grid.north_pole_grid_longitude = (
cs.north_pole_grid_longitude)
# tmerc
elif isinstance(cs, iris.coord_systems.TransverseMercator):
if cs.ellipsoid:
add_ellipsoid(cs.ellipsoid)
cf_var_grid.longitude_of_central_meridian = (
cs.longitude_of_central_meridian)
cf_var_grid.latitude_of_projection_origin = (
cs.latitude_of_projection_origin)
cf_var_grid.false_easting = cs.false_easting
cf_var_grid.false_northing = cs.false_northing
cf_var_grid.scale_factor_at_central_meridian = (
cs.scale_factor_at_central_meridian)
# osgb (a specific tmerc)
elif isinstance(cs, iris.coord_systems.OSGB):
warnings.warn('OSGB coordinate system not yet handled')
# other
else:
warnings.warn('Unable to represent the horizontal '
'coordinate system. The coordinate system '
'type %r is not yet implemented.' % type(cs))
self._coord_systems.append(cs)
# Refer to grid var
cf_var_cube.grid_mapping = cs.grid_mapping_name
def _create_cf_data_variable(self, cube, dimension_names, local_keys=None,
**kwargs):
"""
Create CF-netCDF data variable for the cube and any associated grid
mapping.
Args:
* cube (:class:`iris.cube.Cube`):
The associated cube being saved to CF-netCDF file.
* dimension_names (list):
String names for each dimension of the cube.
Kwargs:
* local_keys (iterable of strings):
An interable of cube attribute keys. Any cube attributes
with matching keys will become attributes on the data variable.
All other keywords are passed through to the dataset's `createVariable`
method.
Returns:
The newly created CF-netCDF data variable.
"""
cf_name = self._get_cube_variable_name(cube)
while cf_name in self._dataset.variables:
cf_name = self._increment_name(cf_name)
# if netcdf3 avoid streaming due to dtype handling
if (not cube.has_lazy_data()
or self._dataset.file_format in ('NETCDF3_CLASSIC',
'NETCDF3_64BIT')):
# Determine whether there is a cube MDI value.
fill_value = None
if isinstance(cube.data, ma.core.MaskedArray):
fill_value = cube.data.fill_value
# Get the values in a form which is valid for the file format.
data = self._ensure_valid_dtype(cube.data, 'cube', cube)
# Create the cube CF-netCDF data variable with data payload.
cf_var = self._dataset.createVariable(
cf_name, data.dtype.newbyteorder('='), dimension_names,
fill_value=fill_value, **kwargs)
cf_var[:] = data
else:
# Create the cube CF-netCDF data variable.
# Explicitly assign the fill_value, which will be the type default
# in the case of an unmasked array.
cf_var = self._dataset.createVariable(
cf_name, cube.lazy_data().dtype.newbyteorder('='),
dimension_names, fill_value=cube.lazy_data().fill_value,
**kwargs)
# stream the data
biggus.save([cube.lazy_data()], [cf_var], masked=True)
if cube.standard_name:
cf_var.standard_name = cube.standard_name
if cube.long_name:
cf_var.long_name = cube.long_name
if cube.units != 'unknown':
cf_var.units = str(cube.units)
# Add data variable-only attribute names to local_keys.
if local_keys is None:
local_keys = set()
else:
local_keys = set(local_keys)
local_keys.update(_CF_DATA_ATTRS, _UKMO_DATA_ATTRS)
# Add any cube attributes whose keys are in local_keys as
# CF-netCDF data variable attributes.
attr_names = set(cube.attributes).intersection(local_keys)
for attr_name in sorted(attr_names):
# Do not output 'conventions' attribute.
if attr_name.lower() == 'conventions':
continue
value = cube.attributes[attr_name]
if attr_name == 'STASH':
# Adopting provisional Metadata Conventions for representing MO
# Scientific Data encoded in NetCDF Format.
attr_name = 'um_stash_source'
value = str(value)
if attr_name == "ukmo__process_flags":
value = " ".join([x.replace(" ", "_") for x in value])
if attr_name in _CF_GLOBAL_ATTRS:
msg = '{attr_name!r} is being added as CF data variable ' \
'attribute, but {attr_name!r} should only be a CF ' \
'global attribute.'.format(attr_name=attr_name)
warnings.warn(msg)
setattr(cf_var, attr_name, value)
# Create the CF-netCDF data variable cell method attribute.
cell_methods = self._create_cf_cell_methods(cube, dimension_names)
if cell_methods:
cf_var.cell_methods = cell_methods
# Create the CF-netCDF grid mapping.
self._create_cf_grid_mapping(cube, cf_var)
return cf_var
def _increment_name(self, varname):
"""
Increment string name or begin increment.
Avoidance of conflicts between variable names, where the name is
incremented to distinguish it from others.
Args:
* varname (string):
Variable name to increment.
Returns:
Incremented varname.
"""
num = 0
try:
name, endnum = varname.rsplit('_', 1)
if endnum.isdigit():
num = int(endnum) + 1
varname = name
except ValueError:
pass
return '{}_{}'.format(varname, num)
def save(cube, filename, netcdf_format='NETCDF4', local_keys=None,
unlimited_dimensions=None, zlib=False, complevel=4, shuffle=True,
fletcher32=False, contiguous=False, chunksizes=None, endian='native',
least_significant_digit=None):
"""
Save cube(s) to a netCDF file, given the cube and the filename.
* Iris will write CF 1.5 compliant NetCDF files.
* The attributes dictionaries on each cube in the saved cube list
will be compared and common attributes saved as NetCDF global
attributes where appropriate.
* Keyword arguments specifying how to save the data are applied
to each cube. To use different settings for different cubes, use
the NetCDF Context manager (:class:`~Saver`) directly.
* The save process will stream the data payload to the file using biggus,
enabling large data payloads to be saved and maintaining the 'lazy'
status of the cube's data payload, unless the netcdf_format is explicitly
specified to be 'NETCDF3' or 'NETCDF3_CLASSIC'.
Args:
* cube (:class:`iris.cube.Cube` or :class:`iris.cube.CubeList`):
A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or other
iterable of cubes to be saved to a netCDF file.
* filename (string):
Name of the netCDF file to save the cube(s).
Kwargs:
* netcdf_format (string):
Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC',
'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format.
* local_keys (iterable of strings):
An interable of cube attribute keys. Any cube attributes with
matching keys will become attributes on the data variable rather
than global attributes.
* unlimited_dimensions (iterable of strings and/or
:class:`iris.coords.Coord` objects):
Explicit list of coordinate names (or coordinate objects) corresponding
to coordinate dimensions of `cube` to save with the NetCDF dimension
variable length 'UNLIMITED'. By default, the outermost (first)
dimension for each cube is used. Only the 'NETCDF4' format supports
multiple 'UNLIMITED' dimensions. To save no unlimited dimensions, use
`unlimited_dimensions=[]` (an empty list).
* zlib (bool):
If `True`, the data will be compressed in the netCDF file using gzip
compression (default `False`).
* complevel (int):
An integer between 1 and 9 describing the level of compression desired
(default 4). Ignored if `zlib=False`.
* shuffle (bool):
If `True`, the HDF5 shuffle filter will be applied before compressing
the data (default `True`). This significantly improves compression.
Ignored if `zlib=False`.
* fletcher32 (bool):
If `True`, the Fletcher32 HDF5 checksum algorithm is activated to
detect errors. Default `False`.
* contiguous (bool):
If `True`, the variable data is stored contiguously on disk. Default
`False`. Setting to `True` for a variable with an unlimited dimension
will trigger an error.
* chunksizes (tuple of int):
Used to manually specify the HDF5 chunksizes for each dimension of the
variable. A detailed discussion of HDF chunking and I/O performance is
available here: http://www.hdfgroup.org/HDF5/doc/H5.user/Chunking.html.
Basically, you want the chunk size for each dimension to match as
closely as possible the size of the data block that users will read
from the file. `chunksizes` cannot be set if `contiguous=True`.
* endian (string):
Used to control whether the data is stored in little or big endian
format on disk. Possible values are 'little', 'big' or 'native'
(default). The library will automatically handle endian conversions
when the data is read, but if the data is always going to be read on a
computer with the opposite format as the one used to create the file,
there may be some performance advantage to be gained by setting the
endian-ness.
* least_significant_digit (int):
If `least_significant_digit` is specified, variable data will be
truncated (quantized). In conjunction with `zlib=True` this produces
'lossy', but significantly more efficient compression. For example, if
`least_significant_digit=1`, data will be quantized using
`numpy.around(scale*data)/scale`, where `scale = 2**bits`, and `bits`
is determined so that a precision of 0.1 is retained (in this case
`bits=4`). From
http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml:
"least_significant_digit -- power of ten of the smallest decimal place
in unpacked data that is a reliable value". Default is `None`, or no
quantization, or 'lossless' compression.
Returns:
None.
.. note::
The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`,
`chunksizes` and `endian` keywords are silently ignored for netCDF 3
files that do not use HDF5.
.. seealso::
NetCDF Context manager (:class:`~Saver`).
.. deprecated:: 1.8.0
NetCDF default saving behaviour currently assigns the outermost
dimensions to unlimited. This behaviour is to be deprecated, in
favour of no automatic assignment. To switch to the new behaviour,
set `iris.FUTURE.netcdf_no_unlimited` to True.
"""
if unlimited_dimensions is None:
if iris.FUTURE.netcdf_no_unlimited:
unlimited_dimensions = []
else:
_no_unlim_dep_warning()
if isinstance(cube, iris.cube.Cube):
cubes = iris.cube.CubeList()
cubes.append(cube)
else:
cubes = cube
if local_keys is None:
local_keys = set()
else:
local_keys = set(local_keys)
# Determine the attribute keys that are common across all cubes and
# thereby extend the collection of local_keys for attributes
# that should be attributes on data variables.
attributes = cubes[0].attributes
common_keys = set(attributes)
for cube in cubes[1:]:
keys = set(cube.attributes)
local_keys.update(keys.symmetric_difference(common_keys))
common_keys.intersection_update(keys)
different_value_keys = []
for key in common_keys:
if np.any(attributes[key] != cube.attributes[key]):
different_value_keys.append(key)
common_keys.difference_update(different_value_keys)
local_keys.update(different_value_keys)
# Initialise Manager for saving
with Saver(filename, netcdf_format) as sman:
# Iterate through the cubelist.
for cube in cubes:
sman.write(cube, local_keys, unlimited_dimensions, zlib, complevel,
shuffle, fletcher32, contiguous, chunksizes, endian,
least_significant_digit)
conventions = CF_CONVENTIONS_VERSION
# Perform a CF patch of the conventions attribute.
cf_profile_available = (iris.site_configuration.get('cf_profile') not
in [None, False])
if cf_profile_available:
conventions_patch = iris.site_configuration.get(
'cf_patch_conventions')
if conventions_patch is not None:
conventions = conventions_patch(conventions)
else:
msg = 'cf_profile is available but no {} defined.'.format(
'cf_patch_conventions')
warnings.warn(msg)
# Add conventions attribute.
sman.update_global_attributes(Conventions=conventions)
def _no_unlim_dep_warning():
msg = ('NetCDF default saving behaviour currently assigns the '
'outermost dimensions to unlimited. This behaviour is to be '
'deprecated, in favour of no automatic assignment. To switch '
'to the new behaviour, set iris.FUTURE.netcdf_no_unlimited to '
'True.')
warnings.warn(msg)
|
lgpl-3.0
| -8,139,096,805,184,263,000
| 37.797199
| 79
| 0.569073
| false
| 4.284928
| false
| false
| false
|
winking324/ngxtop_rtmp_hls
|
ngxtop/rtmptop.py
|
1
|
9398
|
"""
Nginx-rtmp-module stat parser.
Need to install nginx-rtmp-module first.
"""
import xml.dom.minidom
import urllib2
if __package__ is None:
from utils import error_exit
else:
from .utils import error_exit
STAT_URL = "http://127.0.0.1:8080/stat"
def pass_for_node_value(root, node_name):
child = root.getElementsByTagName(node_name)
if len(child) >= 1 and child[0].firstChild:
return child[0].firstChild.data
return 0
class MetaInfo(object):
def __init__(self):
self.video_width = None
self.video_height = None
self.video_frame_rate = None
self.video_codec = None
self.video_profile = None
self.video_compat = None
self.video_level = None
self.audio_codec = None
self.audio_profile = None
self.audio_channels = None
self.audio_sample_rate = None
def parse_info(self, meta_root):
video_child = meta_root.getElementsByTagName('video')[0]
self.video_width = int(pass_for_node_value(video_child, 'width'))
self.video_height = int(pass_for_node_value(video_child, 'height'))
self.video_frame_rate = int(pass_for_node_value(video_child, 'frame_rate'))
self.video_codec = pass_for_node_value(video_child, 'codec')
self.video_profile = pass_for_node_value(video_child, 'profile')
self.video_compat = int(pass_for_node_value(video_child, 'compat'))
self.video_level = float(pass_for_node_value(video_child, 'level'))
audio_child = meta_root.getElementsByTagName('audio')[0]
self.audio_codec = pass_for_node_value(audio_child, 'codec')
self.audio_profile = pass_for_node_value(audio_child, 'profile')
self.audio_channels = int(pass_for_node_value(audio_child, 'channels'))
self.audio_sample_rate = int(pass_for_node_value(audio_child, 'sample_rate'))
def print_info(self, output):
output.append('\t\tVideo Meta: width %d, height %d, frame_rate %d, codec %s, profile %s, compat %d, level %f' %
(self.video_width, self.video_height, self.video_frame_rate, self.video_codec, self.video_profile,
self.video_compat, self.video_level))
output.append('\t\tAudio Meta: codec %s, profile %s, channels %d, sample rate %d' %
(self.audio_codec, self.audio_profile, self.audio_channels, self.audio_sample_rate))
class ClientInfo(object):
def __init__(self, client_root):
self.id = int(pass_for_node_value(client_root, 'id'))
self.address = pass_for_node_value(client_root, 'address')
self.time = int(pass_for_node_value(client_root, 'time'))
self.flashver = pass_for_node_value(client_root, 'flashver')
self.pageurl = None
self.swfurl = None
self.dropped = int(pass_for_node_value(client_root, 'dropped'))
self.avsync = int(pass_for_node_value(client_root, 'avsync'))
self.timestamp = int(pass_for_node_value(client_root, 'timestamp'))
self.is_publisher = False
def parse_info(self, client_root):
publish_child = client_root.getElementsByTagName('publishing')
if publish_child.length > 0:
self.is_publisher = True
if not self.is_publisher:
self.pageurl = pass_for_node_value(client_root, 'pageurl')
self.swfurl = pass_for_node_value(client_root, 'swfurl')
def print_info(self, output):
if self.is_publisher:
output.append('\t\tServer: addr %s, flashver %s' % (self.address, self.flashver))
else:
output.append('\t\tClient: addr %s, flashver %s, page %s, swf %s' %
(self.address, self.flashver, self.pageurl, self.swfurl))
class StreamInfo(object):
def __init__(self, stream_root):
self.name = pass_for_node_value(stream_root, 'name')
self.time = int(pass_for_node_value(stream_root, 'time'))
self.bw_in = int(pass_for_node_value(stream_root, 'bw_in'))
self.bytes_in = int(pass_for_node_value(stream_root, 'bytes_in'))
self.bw_out = int(pass_for_node_value(stream_root, 'bw_out'))
self.bytes_out = int(pass_for_node_value(stream_root, 'bytes_out'))
self.bw_audio = int(pass_for_node_value(stream_root, 'bw_audio'))
self.bw_video = int(pass_for_node_value(stream_root, 'bw_video'))
self.nclients = int(pass_for_node_value(stream_root, 'nclients'))
self.meta_info = None
self.clients = {}
def parse_info(self, stream_root):
meta_child = stream_root.getElementsByTagName('meta')
if meta_child.length > 0:
self.meta_info = MetaInfo()
self.meta_info.parse_info(meta_child[0])
client_child = stream_root.getElementsByTagName('client')
for client in client_child:
client_info = ClientInfo(client)
client_info.parse_info(client)
self.clients[client_info.id] = client_info
def print_info(self, output):
output.append('\tStream %s: time %d, bw_in %d, bytes_in %f, bw_out %d, '
'bytes_out %f, bw_audio %d, bs_video %d, clients %d' %
(self.name, self.time, self.bw_in, self.bytes_in, self.bw_out,
self.bytes_out, self.bw_audio, self.bw_video, self.nclients))
output.append('\tMeta info:')
if self.meta_info:
self.meta_info.print_info(output)
else:
output.append('\t\tStream Idel')
output.append('\t\tClient Info:')
for client in self.clients.itervalues():
client.print_info(output)
class NginxRtmpInfo(object):
def __init__(self, arguments):
self.arguments = arguments
self.processor = None
self.rtmp_url = STAT_URL
self.nginx_version = None
self.rtmp_version = None
self.compiler = None
self.built = None
self.pid = None
self.uptime = None
self.accepted = None
self.bw_in = None
self.bw_out = None
self.bytes_in = None
self.bytes_out = None
self.stream_infos = {}
def set_processor(self, processor):
self.processor = processor
def get_rtmp_url(self):
rtmp_url = self.arguments['--rtmp-stat-url']
if rtmp_url:
self.rtmp_url = rtmp_url
return self.rtmp_url
def processor_process(self):
if self.processor is None:
return
records = {}
for stream_info in self.stream_infos.itervalues():
records['request'] = stream_info.name
records['in_bytes'] = stream_info.bytes_in
records['in_bw'] = stream_info.bw_in
records['out_bytes'] = stream_info.bytes_out
records['out_bw'] = stream_info.bw_out
for client in stream_info.clients.itervalues():
records['remote_addr'] = client.address
records['time'] = client.time
records['http_user_agent'] = client.flashver
self.processor.process(records)
def parse_info(self):
self.get_rtmp_url()
try:
response = urllib2.urlopen(self.rtmp_url)
except urllib2.URLError:
error_exit('Cannot access RTMP URL: %s' % self.rtmp_url)
dom = xml.dom.minidom.parseString(response.read())
root = dom.documentElement
self.nginx_version = pass_for_node_value(root, 'nginx_version')
self.rtmp_version = pass_for_node_value(root, 'nginx_rtmp_version')
self.compiler = pass_for_node_value(root, 'compiler')
self.built = pass_for_node_value(root, 'built')
self.pid = int(pass_for_node_value(root, 'pid'))
self.uptime = int(pass_for_node_value(root, 'uptime'))
self.accepted = int(pass_for_node_value(root, 'naccepted'))
self.bw_in = int(pass_for_node_value(root, 'bw_in'))
self.bw_out = int(pass_for_node_value(root, 'bw_out'))
self.bytes_in = int(pass_for_node_value(root, 'bytes_in'))
self.bytes_out = int(pass_for_node_value(root, 'bytes_out'))
live_child = root.getElementsByTagName('server')[0].getElementsByTagName(
'application')[0].getElementsByTagName('live')[0]
for stream_child in live_child.getElementsByTagName('stream'):
stream_info = StreamInfo(stream_child)
stream_info.parse_info(stream_child)
self.stream_infos[stream_info.name] = stream_info
self.processor_process()
def print_info(self):
output = list()
output.append('Summary:')
output.append('\tNginx version: %s, RTMP version: %s, Compiler: %s, Built: %s, PID: %d, Uptime: %ds.' %
(self.nginx_version, self.rtmp_version, self.compiler, self.built, self.pid, self.uptime))
output.append('\tAccepted: %d, bw_in: %f Kbit/s, bytes_in: %02f MByte, '
'bw_out: %02f Kbit/s, bytes_out: %02f MByte' %
(self.accepted, self.bw_in / 1024.0, self.bytes_in / 1024.0 / 1024,
self.bw_out / 1024.0, self.bytes_out / 1024.0 / 1024))
output.append('Detail:')
output.append('\tStreams: %d' % len(self.stream_infos))
for stream in self.stream_infos.itervalues():
stream.print_info(output)
return output
|
mit
| -818,754,363,569,974,100
| 38.991489
| 120
| 0.60183
| false
| 3.485905
| false
| false
| false
|
srgblnch/Rijndael
|
Testing/_FIPS197_AES192.py
|
1
|
9943
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
__author__ = "Sergi Blanch-Torne"
__email__ = "srgblnchtrn@protonmail.ch"
__copyright__ = "Copyright 2016 Sergi Blanch-Torne"
__license__ = "GPLv3+"
__status__ = "development"
"""
This file stores the test vectors provided by the fips-197 for the AES192.
"""
aes192 = {}
aes192['key'] = 0x000102030405060708090a0b0c0d0e0f1011121314151617
aes192['input'] = 0x00112233445566778899aabbccddeeff
aes192['output'] = 0xdda97ca4864cdfe06eaf70a0ec0d7191
aes192_round = {}
aes192_round[0] = {}
aes192_round[0]['start'] = aes192['input']
aes192_round[0]['k_sch'] = 0x000102030405060708090a0b0c0d0e0f
aes192_round[0]['end'] = 0x00102030405060708090a0b0c0d0e0f0
aes192_round[1] = {}
aes192_round[1]['start'] = aes192_round[0]['end']
aes192_round[1]['s_box'] = 0x63cab7040953d051cd60e0e7ba70e18c
aes192_round[1]['s_row'] = 0x6353e08c0960e104cd70b751bacad0e7
aes192_round[1]['m_col'] = 0x5f72641557f5bc92f7be3b291db9f91a
aes192_round[1]['k_sch'] = 0x10111213141516175846f2f95c43f4fe
aes192_round[1]['end'] = 0x4f63760643e0aa85aff8c9d041fa0de4
aes192_round[2] = {}
aes192_round[2]['start'] = aes192_round[1]['end']
aes192_round[2]['s_box'] = 0x84fb386f1ae1ac977941dd70832dd769
aes192_round[2]['s_row'] = 0x84e1dd691a41d76f792d389783fbac70
aes192_round[2]['m_col'] = 0x9f487f794f955f662afc86abd7f1ab29
aes192_round[2]['k_sch'] = 0x544afef55847f0fa4856e2e95c43f4fe
aes192_round[2]['end'] = 0xcb02818c17d2af9c62aa64428bb25fd7
aes192_round[3] = {}
aes192_round[3]['start'] = aes192_round[2]['end']
aes192_round[3]['s_box'] = 0x1f770c64f0b579deaaac432c3d37cf0e
aes192_round[3]['s_row'] = 0x1fb5430ef0accf64aa370cde3d77792c
aes192_round[3]['m_col'] = 0xb7a53ecbbf9d75a0c40efc79b674cc11
aes192_round[3]['k_sch'] = 0x40f949b31cbabd4d48f043b810b7b342
aes192_round[3]['end'] = 0xf75c7778a327c8ed8cfebfc1a6c37f53
aes192_round[4] = {}
aes192_round[4]['start'] = aes192_round[3]['end']
aes192_round[4]['s_box'] = 0x684af5bc0acce85564bb0878242ed2ed
aes192_round[4]['s_row'] = 0x68cc08ed0abbd2bc642ef555244ae878
aes192_round[4]['m_col'] = 0x7a1e98bdacb6d1141a6944dd06eb2d3e
aes192_round[4]['k_sch'] = 0x58e151ab04a2a5557effb5416245080c
aes192_round[4]['end'] = 0x22ffc916a81474416496f19c64ae2532
aes192_round[5] = {}
aes192_round[5]['start'] = aes192_round[4]['end']
aes192_round[5]['s_box'] = 0x9316dd47c2fa92834390a1de43e43f23
aes192_round[5]['s_row'] = 0x93faa123c2903f4743e4dd83431692de
aes192_round[5]['m_col'] = 0xaaa755b34cffe57cef6f98e1f01c13e6
aes192_round[5]['k_sch'] = 0x2ab54bb43a02f8f662e3a95d66410c08
aes192_round[5]['end'] = 0x80121e0776fd1d8a8d8c31bc965d1fee
aes192_round[6] = {}
aes192_round[6]['start'] = aes192_round[5]['end']
aes192_round[6]['s_box'] = 0xcdc972c53854a47e5d64c765904cc028
aes192_round[6]['s_row'] = 0xcd54c7283864c0c55d4c727e90c9a465
aes192_round[6]['m_col'] = 0x921f748fd96e937d622d7725ba8ba50c
aes192_round[6]['k_sch'] = 0xf501857297448d7ebdf1c6ca87f33e3c
aes192_round[6]['end'] = 0x671ef1fd4e2a1e03dfdcb1ef3d789b30
aes192_round[7] = {}
aes192_round[7]['start'] = aes192_round[6]['end']
aes192_round[7]['s_box'] = 0x8572a1542fe5727b9e86c8df27bc1404
aes192_round[7]['s_row'] = 0x85e5c8042f8614549ebca17b277272df
aes192_round[7]['m_col'] = 0xe913e7b18f507d4b227ef652758acbcc
aes192_round[7]['k_sch'] = 0xe510976183519b6934157c9ea351f1e0
aes192_round[7]['end'] = 0x0c0370d00c01e622166b8accd6db3a2c
aes192_round[8] = {}
aes192_round[8]['start'] = aes192_round[7]['end']
aes192_round[8]['s_box'] = 0xfe7b5170fe7c8e93477f7e4bf6b98071
aes192_round[8]['s_row'] = 0xfe7c7e71fe7f807047b95193f67b8e4b
aes192_round[8]['m_col'] = 0x6cf5edf996eb0a069c4ef21cbfc25762
aes192_round[8]['k_sch'] = 0x1ea0372a995309167c439e77ff12051e
aes192_round[8]['end'] = 0x7255dad30fb80310e00d6c6b40d0527c
aes192_round[9] = {}
aes192_round[9]['start'] = aes192_round[8]['end']
aes192_round[9]['s_box'] = 0x40fc5766766c7bcae1d7507f09700010
aes192_round[9]['s_row'] = 0x406c501076d70066e17057ca09fc7b7f
aes192_round[9]['m_col'] = 0x7478bcdce8a50b81d4327a9009188262
aes192_round[9]['k_sch'] = 0xdd7e0e887e2fff68608fc842f9dcc154
aes192_round[9]['end'] = 0xa906b254968af4e9b4bdb2d2f0c44336
aes192_round[10] = {}
aes192_round[10]['start'] = aes192_round[9]['end']
aes192_round[10]['s_box'] = 0xd36f3720907ebf1e8d7a37b58c1c1a05
aes192_round[10]['s_row'] = 0xd37e3705907a1a208d1c371e8c6fbfb5
aes192_round[10]['m_col'] = 0x0d73cc2d8f6abe8b0cf2dd9bb83d422e
aes192_round[10]['k_sch'] = 0x859f5f237a8d5a3dc0c02952beefd63a
aes192_round[10]['end'] = 0x88ec930ef5e7e4b6cc32f4c906d29414
aes192_round[11] = {}
aes192_round[11]['start'] = aes192_round[10]['end']
aes192_round[11]['s_box'] = 0xc4cedcabe694694e4b23bfdd6fb522fa
aes192_round[11]['s_row'] = 0xc494bffae62322ab4bb5dc4e6fce69dd
aes192_round[11]['m_col'] = 0x71d720933b6d677dc00b8f28238e0fb7
aes192_round[11]['k_sch'] = 0xde601e7827bcdf2ca223800fd8aeda32
aes192_round[11]['end'] = 0xafb73eeb1cd1b85162280f27fb20d585
aes192_round[12] = {}
aes192_round[12]['start'] = aes192_round[11]['end']
aes192_round[12]['s_box'] = 0x79a9b2e99c3e6cd1aa3476cc0fb70397
aes192_round[12]['s_row'] = 0x793e76979c3403e9aab7b2d10fa96ccc
aes192_round[12]['k_sch'] = 0xa4970a331a78dc09c418c271e3a41d5d
aes192_round[12]['end'] = aes192['output']
aes192_round[0]['iinput'] = aes192['output']
aes192_round[0]['ik_sch'] = aes192_round[12]['k_sch']
aes192_round[0]['ik_add'] = aes192_round[12]['s_row']
aes192_round[0]['iend'] = aes192_round[12]['s_row']
aes192_round[1]['istart'] = aes192_round[0]['iend']
aes192_round[1]['is_row'] = aes192_round[12]['s_box']
aes192_round[1]['is_box'] = aes192_round[11]['end']
aes192_round[1]['ik_sch'] = aes192_round[11]['k_sch']
aes192_round[1]['ik_add'] = aes192_round[11]['m_col']
aes192_round[1]['iend'] = aes192_round[11]['s_row']
aes192_round[2]['istart'] = aes192_round[1]['iend']
aes192_round[2]['is_row'] = aes192_round[11]['s_box']
aes192_round[2]['is_box'] = aes192_round[10]['end']
aes192_round[2]['ik_sch'] = aes192_round[10]['k_sch']
aes192_round[2]['ik_add'] = aes192_round[10]['m_col']
aes192_round[2]['iend'] = aes192_round[10]['s_row']
aes192_round[3]['istart'] = aes192_round[2]['iend']
aes192_round[3]['is_row'] = aes192_round[10]['s_box']
aes192_round[3]['is_box'] = aes192_round[9]['end']
aes192_round[3]['ik_sch'] = aes192_round[9]['k_sch']
aes192_round[3]['ik_add'] = aes192_round[9]['m_col']
aes192_round[3]['iend'] = aes192_round[9]['s_row']
aes192_round[4]['istart'] = aes192_round[3]['iend']
aes192_round[4]['is_row'] = aes192_round[9]['s_box']
aes192_round[4]['is_box'] = aes192_round[8]['end']
aes192_round[4]['ik_sch'] = aes192_round[8]['k_sch']
aes192_round[4]['ik_add'] = aes192_round[8]['m_col']
aes192_round[4]['iend'] = aes192_round[8]['s_row']
aes192_round[5]['istart'] = aes192_round[4]['iend']
aes192_round[5]['is_row'] = aes192_round[8]['s_box']
aes192_round[5]['is_box'] = aes192_round[7]['end']
aes192_round[5]['ik_sch'] = aes192_round[7]['k_sch']
aes192_round[5]['ik_add'] = aes192_round[7]['m_col']
aes192_round[5]['iend'] = aes192_round[7]['s_row']
aes192_round[6]['istart'] = aes192_round[5]['iend']
aes192_round[6]['is_row'] = aes192_round[7]['s_box']
aes192_round[6]['is_box'] = aes192_round[6]['end']
aes192_round[6]['ik_sch'] = aes192_round[6]['k_sch']
aes192_round[6]['ik_add'] = aes192_round[6]['m_col']
aes192_round[6]['iend'] = aes192_round[6]['s_row']
aes192_round[7]['istart'] = aes192_round[6]['iend']
aes192_round[7]['is_row'] = aes192_round[6]['s_box']
aes192_round[7]['is_box'] = aes192_round[5]['end']
aes192_round[7]['ik_sch'] = aes192_round[5]['k_sch']
aes192_round[7]['ik_add'] = aes192_round[5]['m_col']
aes192_round[7]['iend'] = aes192_round[5]['s_row']
aes192_round[8]['istart'] = aes192_round[7]['iend']
aes192_round[8]['is_row'] = aes192_round[5]['s_box']
aes192_round[8]['is_box'] = aes192_round[4]['end']
aes192_round[8]['ik_sch'] = aes192_round[4]['k_sch']
aes192_round[8]['ik_add'] = aes192_round[4]['m_col']
aes192_round[8]['iend'] = aes192_round[4]['s_row']
aes192_round[9]['istart'] = aes192_round[8]['iend']
aes192_round[9]['is_row'] = aes192_round[4]['s_box']
aes192_round[9]['is_box'] = aes192_round[3]['end']
aes192_round[9]['ik_sch'] = aes192_round[3]['k_sch']
aes192_round[9]['ik_add'] = aes192_round[3]['m_col']
aes192_round[9]['iend'] = aes192_round[3]['s_row']
aes192_round[10]['istart'] = aes192_round[9]['iend']
aes192_round[10]['is_row'] = aes192_round[3]['s_box']
aes192_round[10]['is_box'] = aes192_round[2]['end']
aes192_round[10]['ik_sch'] = aes192_round[2]['k_sch']
aes192_round[10]['ik_add'] = aes192_round[2]['m_col']
aes192_round[10]['iend'] = aes192_round[2]['s_row']
aes192_round[11]['istart'] = aes192_round[10]['iend']
aes192_round[11]['is_row'] = aes192_round[2]['s_box']
aes192_round[11]['is_box'] = aes192_round[1]['end']
aes192_round[11]['ik_sch'] = aes192_round[1]['k_sch']
aes192_round[11]['ik_add'] = aes192_round[1]['m_col']
aes192_round[11]['iend'] = aes192_round[1]['s_row']
aes192_round[12]['istart'] = aes192_round[11]['iend']
aes192_round[12]['is_row'] = aes192_round[1]['s_box']
aes192_round[12]['is_box'] = aes192_round[0]['end']
aes192_round[12]['ik_sch'] = aes192_round[0]['k_sch']
aes192_round[12]['ik_add'] = aes192['input']
aes192_round[12]['ioutput'] = aes192['input']
|
gpl-3.0
| 6,142,322,919,003,080,000
| 43.788288
| 78
| 0.708941
| false
| 2.141965
| false
| false
| false
|
assencess/myshop
|
shop/views.py
|
1
|
2754
|
from django.shortcuts import render, get_object_or_404
from .models import Category, Product
from .forms import EmailContactForm
from cart.forms import CartAddProductForm
from django.core.mail import send_mail
from django.views import View
from django.views.generic import DetailView, ListView
class ProductListView(ListView):
model = Category
template_name = 'shop/product/list.html'
def get_context_data(self, **kwargs):
# get context
context = super(ProductListView, self).get_context_data(**kwargs)
if not 'category_slug' in self.kwargs:
self.kwargs['category_slug'] = None
category = None
categories = Category.objects.all()
products = Product.objects.all().filter(available=True)
if self.kwargs['category_slug']:
category = get_object_or_404(Category,
slug=self.kwargs['category_slug'])
products = products.filter(category=category)
context['category'] = category
context['categories'] = categories
context['products'] = products
return context
class ProductDetailView(DetailView):
model = Product
template_name = 'shop/product/detail.html'
def get_context_data(self, **kwargs):
# get context
context = super(ProductDetailView, self).get_context_data(**kwargs)
product = get_object_or_404(Product, id=self.kwargs['id'],
slug=self.kwargs['slug'],
available=True)
cart_product_form = CartAddProductForm()
context['product'] = product
context['cart_product_form'] = cart_product_form
return context
class ContactView(View):
form_class = EmailContactForm
initial = {'form': form_class()}
template_name = 'shop/contact.htmlss'
sent = False
def get(self, request, *args, **kwargs):
return render(request, 'shop/contact.html',
{'form': self.form_class()})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
cd = form.cleaned_data
# send message to client
client_email = cd['email']
subject = 'Support from www.localhost'
message = '{} your messages was sent to support of {}' \
.format(cd['name'], 'www.localhost')
send_mail(subject, message, 'www.localhost', [client_email])
# send message to support of localhost
subject = 'From client {}'.format(client_email)
send_mail(subject, cd['comments'], client_email,
['borodaa@gmail.com'])
sent = True
return render(request, 'shop/contact.html', {'form': form})
|
gpl-3.0
| -1,827,950,606,063,933,700
| 35.72
| 75
| 0.617284
| false
| 4.230415
| false
| false
| false
|
rmariano/dotfiles
|
git-hooks/branch_ticket_name.py
|
1
|
2784
|
#!/usr/bin/python3
"""
From: https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks
The commit-msg hook takes one parameter, which again is the path to a
temporary file that contains the commit message written by the developer.
If this script exits non-zero, Git aborts the commit process, so you can use
it to validate your project state or commit message before
allowing a commit to go through.
argv[1]: path to the temp file where to write the commit message
argv[2]: Type of commit
argv[3]: SHA-1 of commit, if it is an amend.
"""
import subprocess
import sys
def current_branch_name():
return subprocess.check_output(
('git', 'rev-parse', '--abbrev-ref', 'HEAD')).decode().strip('\n')
def ticket_name(branch_name):
"""
Assume the naming convention <ticket_no><underscore><description>
and return <ticket_no>
Where: <underscore> -> "_"
The delimiter is an <underscore>
In case this is not respected it will return the token
up to the first <underscore>, or everything if none is found.
:param str branch_name: name of the branch we are currently in
:return: ticket number from the branch
"""
ticket_no, _, _ = branch_name.partition("_")
return ticket_no
def ticket_from_branch():
return ticket_name(current_branch_name())
def header():
"""
Return the string that will compose the header of the commit msg
"""
ticket = ticket_from_branch()
return """{0}:""".format(ticket)
def is_merge():
"""
Must check that the second parameters indicates merge, and there is no more
parameters (last index is 2, hence length 3).
"""
try:
commit_type = sys.argv[2]
except IndexError:
return False
else:
return commit_type.lower() == "merge" and len(sys.argv) == 3
def is_ammend():
"""
If the commit is an amend, it's SHA-1 is passed in sys.argv[3], hence
the length is 4.
"""
return len(sys.argv) == 4
def should_write_header():
return not (is_merge() or is_ammend())
def write_commit_msg_template(commit_msg_file, header, content):
"""
:param file commit_msg_file: the file where to dump the new content
:param str header: the first line (title) in the commit msg
:param str content: Original content from the base template of the
commit msg.
"""
if should_write_header():
commit_msg_file.write(header)
commit_msg_file.write(content)
if __name__ == '__main__':
commit_msg_filename = sys.argv[1]
with open(commit_msg_filename, "r") as original:
content = original.read()
with open(commit_msg_filename, "w") as commit_msg_file:
write_commit_msg_template(commit_msg_file, header(), content)
|
mit
| -6,879,392,724,612,010,000
| 26.564356
| 79
| 0.651221
| false
| 3.746972
| false
| false
| false
|
google-research/google-research
|
cfq/evaluate_main.py
|
1
|
2144
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Given a list of questions, compare golden answers with inferred answers.
Writes accuracy (fraction of answers correct), and writes all correct and
incorrect output.
"""
import os
from absl import app
from absl import flags
from cfq import evaluate as evaluator
FLAGS = flags.FLAGS
flags.DEFINE_string('questions_path', None, 'Path to the input questions.')
flags.DEFINE_string('golden_answers_path', None,
'Path to the expected (golden) answers.')
flags.DEFINE_string('inferred_answers_path', None,
'Path to the inferred answers.')
flags.DEFINE_string('output_path', None, 'Path to write evaluation results to')
flags.mark_flag_as_required('output_path')
flags.register_validator('questions_path', os.path.exists,
'Questions path not found.')
flags.register_validator('golden_answers_path', os.path.exists,
'Golden answers path not found.')
flags.register_validator('inferred_answers_path', os.path.exists,
'Inferred answers path not found.')
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
accuracy_result = evaluator.get_accuracy_result(FLAGS.questions_path,
FLAGS.golden_answers_path,
FLAGS.inferred_answers_path)
evaluator.write_accuracy_result(
accuracy_result, FLAGS.output_path, print_output=True)
if __name__ == '__main__':
app.run(main)
|
apache-2.0
| 6,893,265,948,132,583,000
| 35.965517
| 79
| 0.680037
| false
| 4.138996
| false
| false
| false
|
jiwanlimbu/aura
|
keystone/token/persistence/backends/sql.py
|
1
|
12737
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import functools
from oslo_log import log
from oslo_utils import timeutils
from keystone.common import sql
import keystone.conf
from keystone import exception
from keystone.i18n import _LI
from keystone import token
from keystone.token.providers import common
CONF = keystone.conf.CONF
LOG = log.getLogger(__name__)
class TokenModel(sql.ModelBase, sql.DictBase):
__tablename__ = 'token'
attributes = ['id', 'expires', 'user_id', 'trust_id']
id = sql.Column(sql.String(64), primary_key=True)
expires = sql.Column(sql.DateTime(), default=None)
extra = sql.Column(sql.JsonBlob())
valid = sql.Column(sql.Boolean(), default=True, nullable=False)
user_id = sql.Column(sql.String(64))
trust_id = sql.Column(sql.String(64))
__table_args__ = (
sql.Index('ix_token_expires', 'expires'),
sql.Index('ix_token_expires_valid', 'expires', 'valid'),
sql.Index('ix_token_user_id', 'user_id'),
sql.Index('ix_token_trust_id', 'trust_id')
)
def _expiry_upper_bound_func():
# don't flush anything within the grace window
sec = datetime.timedelta(seconds=CONF.token.allow_expired_window)
return timeutils.utcnow() - sec
def _expiry_range_batched(session, upper_bound_func, batch_size):
"""Return the stop point of the next batch for expiration.
Return the timestamp of the next token that is `batch_size` rows from
being the oldest expired token.
"""
# This expiry strategy splits the tokens into roughly equal sized batches
# to be deleted. It does this by finding the timestamp of a token
# `batch_size` rows from the oldest token and yielding that to the caller.
# It's expected that the caller will then delete all rows with a timestamp
# equal to or older than the one yielded. This may delete slightly more
# tokens than the batch_size, but that should be ok in almost all cases.
LOG.debug('Token expiration batch size: %d', batch_size)
query = session.query(TokenModel.expires)
query = query.filter(TokenModel.expires < upper_bound_func())
query = query.order_by(TokenModel.expires)
query = query.offset(batch_size - 1)
query = query.limit(1)
while True:
try:
next_expiration = query.one()[0]
except sql.NotFound:
# There are less than `batch_size` rows remaining, so fall
# through to the normal delete
break
yield next_expiration
yield upper_bound_func()
def _expiry_range_all(session, upper_bound_func):
"""Expire all tokens in one pass."""
yield upper_bound_func()
class Token(token.persistence.TokenDriverBase):
# Public interface
def get_token(self, token_id):
if token_id is None:
raise exception.TokenNotFound(token_id=token_id)
with sql.session_for_read() as session:
token_ref = session.query(TokenModel).get(token_id)
if not token_ref or not token_ref.valid:
raise exception.TokenNotFound(token_id=token_id)
return token_ref.to_dict()
def create_token(self, token_id, data):
data_copy = copy.deepcopy(data)
if not data_copy.get('expires'):
data_copy['expires'] = common.default_expire_time()
if not data_copy.get('user_id'):
data_copy['user_id'] = data_copy['user']['id']
token_ref = TokenModel.from_dict(data_copy)
token_ref.valid = True
with sql.session_for_write() as session:
session.add(token_ref)
return token_ref.to_dict()
def delete_token(self, token_id):
with sql.session_for_write() as session:
token_ref = session.query(TokenModel).get(token_id)
if not token_ref or not token_ref.valid:
raise exception.TokenNotFound(token_id=token_id)
token_ref.valid = False
def delete_tokens(self, user_id, tenant_id=None, trust_id=None,
consumer_id=None):
"""Delete all tokens in one session.
The user_id will be ignored if the trust_id is specified. user_id
will always be specified.
If using a trust, the token's user_id is set to the trustee's user ID
or the trustor's user ID, so will use trust_id to query the tokens.
"""
token_list = []
with sql.session_for_write() as session:
now = timeutils.utcnow()
query = session.query(TokenModel)
query = query.filter_by(valid=True)
query = query.filter(TokenModel.expires > now)
if trust_id:
query = query.filter(TokenModel.trust_id == trust_id)
else:
query = query.filter(TokenModel.user_id == user_id)
for token_ref in query.all():
if tenant_id:
token_ref_dict = token_ref.to_dict()
if not self._tenant_matches(tenant_id, token_ref_dict):
continue
if consumer_id:
token_ref_dict = token_ref.to_dict()
if not self._consumer_matches(consumer_id, token_ref_dict):
continue
token_ref.valid = False
token_list.append(token_ref.id)
return token_list
def _tenant_matches(self, tenant_id, token_ref_dict):
return ((tenant_id is None) or
(token_ref_dict.get('tenant') and
token_ref_dict['tenant'].get('id') == tenant_id))
def _consumer_matches(self, consumer_id, ref):
if consumer_id is None:
return True
else:
try:
oauth = ref['token_data']['token'].get('OS-OAUTH1', {})
return oauth and oauth['consumer_id'] == consumer_id
except KeyError:
return False
def _list_tokens_for_trust(self, trust_id):
with sql.session_for_read() as session:
tokens = []
now = timeutils.utcnow()
query = session.query(TokenModel)
query = query.filter(TokenModel.expires > now)
query = query.filter(TokenModel.trust_id == trust_id)
token_references = query.filter_by(valid=True)
for token_ref in token_references:
token_ref_dict = token_ref.to_dict()
tokens.append(token_ref_dict['id'])
return tokens
def _list_tokens_for_user(self, user_id, tenant_id=None):
with sql.session_for_read() as session:
tokens = []
now = timeutils.utcnow()
query = session.query(TokenModel)
query = query.filter(TokenModel.expires > now)
query = query.filter(TokenModel.user_id == user_id)
token_references = query.filter_by(valid=True)
for token_ref in token_references:
token_ref_dict = token_ref.to_dict()
if self._tenant_matches(tenant_id, token_ref_dict):
tokens.append(token_ref['id'])
return tokens
def _list_tokens_for_consumer(self, user_id, consumer_id):
tokens = []
with sql.session_for_write() as session:
now = timeutils.utcnow()
query = session.query(TokenModel)
query = query.filter(TokenModel.expires > now)
query = query.filter(TokenModel.user_id == user_id)
token_references = query.filter_by(valid=True)
for token_ref in token_references:
token_ref_dict = token_ref.to_dict()
if self._consumer_matches(consumer_id, token_ref_dict):
tokens.append(token_ref_dict['id'])
return tokens
def _list_tokens(self, user_id, tenant_id=None, trust_id=None,
consumer_id=None):
if not CONF.token.revoke_by_id:
return []
if trust_id:
return self._list_tokens_for_trust(trust_id)
if consumer_id:
return self._list_tokens_for_consumer(user_id, consumer_id)
else:
return self._list_tokens_for_user(user_id, tenant_id)
def list_revoked_tokens(self):
with sql.session_for_read() as session:
tokens = []
now = timeutils.utcnow()
query = session.query(TokenModel.id, TokenModel.expires,
TokenModel.extra)
query = query.filter(TokenModel.expires > now)
token_references = query.filter_by(valid=False)
for token_ref in token_references:
token_data = token_ref[2]['token_data']
if 'access' in token_data:
# It's a v2 token.
audit_ids = token_data['access']['token']['audit_ids']
else:
# It's a v3 token.
audit_ids = token_data['token']['audit_ids']
record = {
'id': token_ref[0],
'expires': token_ref[1],
'audit_id': audit_ids[0],
}
tokens.append(record)
return tokens
def _expiry_range_strategy(self, dialect):
"""Choose a token range expiration strategy.
Based on the DB dialect, select an expiry range callable that is
appropriate.
"""
# DB2 and MySQL can both benefit from a batched strategy. On DB2 the
# transaction log can fill up and on MySQL w/Galera, large
# transactions can exceed the maximum write set size.
if dialect == 'ibm_db_sa':
# Limit of 100 is known to not fill a transaction log
# of default maximum size while not significantly
# impacting the performance of large token purges on
# systems where the maximum transaction log size has
# been increased beyond the default.
return functools.partial(_expiry_range_batched,
batch_size=100)
elif dialect == 'mysql':
# We want somewhat more than 100, since Galera replication delay is
# at least RTT*2. This can be a significant amount of time if
# doing replication across a WAN.
return functools.partial(_expiry_range_batched,
batch_size=1000)
return _expiry_range_all
def flush_expired_tokens(self):
# The DBAPI itself is in a "never autocommit" mode,
# BEGIN is emitted automatically as soon as any work is done,
# COMMIT is emitted when SQLAlchemy invokes commit() on the
# underlying DBAPI connection. So SQLAlchemy is only simulating
# "begin" here in any case, it is in fact automatic by the DBAPI.
with sql.session_for_write() as session: # Calls session.begin()
dialect = session.bind.dialect.name
expiry_range_func = self._expiry_range_strategy(dialect)
query = session.query(TokenModel.expires)
total_removed = 0
upper_bound_func = _expiry_upper_bound_func
for expiry_time in expiry_range_func(session, upper_bound_func):
delete_query = query.filter(TokenModel.expires <=
expiry_time)
row_count = delete_query.delete(synchronize_session=False)
# Explicitly commit each batch so as to free up
# resources early. We do not actually need
# transactional semantics here.
session.commit() # Emits connection.commit() on DBAPI
# Tells SQLAlchemy to "begin", e.g. hold a new connection
# open in a transaction
session.begin()
total_removed += row_count
LOG.debug('Removed %d total expired tokens', total_removed)
# When the "with: " block ends, the final "session.commit()"
# is emitted by enginefacade
session.flush()
LOG.info(_LI('Total expired tokens removed: %d'), total_removed)
|
apache-2.0
| 6,549,671,619,302,245,000
| 40.488599
| 79
| 0.593546
| false
| 4.154273
| false
| false
| false
|
mattclark/osf.io
|
tests/test_webtests.py
|
1
|
50803
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functional tests using WebTest."""
import datetime as dt
import httplib as http
import logging
import unittest
import markupsafe
import mock
import pytest
from nose.tools import * # noqa: F403
import re
from django.utils import timezone
from addons.wiki.utils import to_mongo_key
from framework.auth import exceptions as auth_exc
from framework.auth.core import Auth
from tests.base import OsfTestCase
from tests.base import fake
from osf_tests.factories import (
fake_email,
AuthUserFactory,
NodeFactory,
PreprintFactory,
PreprintProviderFactory,
PrivateLinkFactory,
ProjectFactory,
RegistrationFactory,
SubjectFactory,
UserFactory,
UnconfirmedUserFactory,
UnregUserFactory,
)
from osf.utils import permissions
from addons.wiki.models import WikiPage, WikiVersion
from addons.wiki.tests.factories import WikiFactory, WikiVersionFactory
from website import settings, language
from addons.osfstorage.models import OsfStorageFile
from website.util import web_url_for, api_url_for
from api_tests import utils as test_utils
logging.getLogger('website.project.model').setLevel(logging.ERROR)
def assert_in_html(member, container, **kwargs):
"""Looks for the specified member in markupsafe-escaped HTML output"""
member = markupsafe.escape(member)
return assert_in(member, container, **kwargs)
def assert_not_in_html(member, container, **kwargs):
"""Looks for the specified member in markupsafe-escaped HTML output"""
member = markupsafe.escape(member)
return assert_not_in(member, container, **kwargs)
class TestDisabledUser(OsfTestCase):
def setUp(self):
super(TestDisabledUser, self).setUp()
self.user = UserFactory()
self.user.set_password('Korben Dallas')
self.user.is_disabled = True
self.user.save()
def test_profile_disabled_returns_401(self):
res = self.app.get(self.user.url, expect_errors=True)
assert_equal(res.status_code, 410)
class TestAnUnregisteredUser(OsfTestCase):
def test_cant_see_profile_if_not_logged_in(self):
url = web_url_for('profile_view')
res = self.app.get(url)
res = res.follow()
assert_equal(res.status_code, 301)
assert_in('/login/', res.headers['Location'])
@pytest.mark.enable_bookmark_creation
@pytest.mark.enable_quickfiles_creation
class TestAUser(OsfTestCase):
def setUp(self):
super(TestAUser, self).setUp()
self.user = AuthUserFactory()
self.auth = self.user.auth
def test_can_see_profile_url(self):
res = self.app.get(self.user.url).maybe_follow()
assert_in(self.user.url, res)
# `GET /login/` without parameters is redirected to `/dashboard/` page which has `@must_be_logged_in` decorator
# if user is not logged in, she/he is further redirected to CAS login page
def test_is_redirected_to_cas_if_not_logged_in_at_login_page(self):
res = self.app.get('/login/').follow()
assert_equal(res.status_code, 302)
location = res.headers.get('Location')
assert_in('login?service=', location)
def test_is_redirected_to_dashboard_if_already_logged_in_at_login_page(self):
res = self.app.get('/login/', auth=self.user.auth)
assert_equal(res.status_code, 302)
assert 'dashboard' in res.headers.get('Location')
def test_register_page(self):
res = self.app.get('/register/')
assert_equal(res.status_code, 200)
def test_is_redirected_to_dashboard_if_already_logged_in_at_register_page(self):
res = self.app.get('/register/', auth=self.user.auth)
assert_equal(res.status_code, 302)
assert 'dashboard' in res.headers.get('Location')
def test_sees_projects_in_her_dashboard(self):
# the user already has a project
project = ProjectFactory(creator=self.user)
project.add_contributor(self.user)
project.save()
res = self.app.get('/myprojects/', auth=self.user.auth)
assert_in('Projects', res) # Projects heading
def test_does_not_see_osffiles_in_user_addon_settings(self):
res = self.app.get('/settings/addons/', auth=self.auth, auto_follow=True)
assert_not_in('OSF Storage', res)
def test_sees_osffiles_in_project_addon_settings(self):
project = ProjectFactory(creator=self.user)
project.add_contributor(
self.user,
permissions=permissions.ADMIN,
save=True)
res = self.app.get('/{0}/addons/'.format(project._primary_key), auth=self.auth, auto_follow=True)
assert_in('OSF Storage', res)
def test_sees_correct_title_on_dashboard(self):
# User goes to dashboard
res = self.app.get('/myprojects/', auth=self.auth, auto_follow=True)
title = res.html.title.string
assert_equal('OSF | My Projects', title)
def test_can_see_make_public_button_if_admin(self):
# User is a contributor on a project
project = ProjectFactory()
project.add_contributor(
self.user,
permissions=permissions.ADMIN,
save=True)
# User goes to the project page
res = self.app.get(project.url, auth=self.auth).maybe_follow()
assert_in('Make Public', res)
def test_cant_see_make_public_button_if_not_admin(self):
# User is a contributor on a project
project = ProjectFactory()
project.add_contributor(
self.user,
permissions=permissions.WRITE,
save=True)
# User goes to the project page
res = self.app.get(project.url, auth=self.auth).maybe_follow()
assert_not_in('Make Public', res)
def test_can_see_make_private_button_if_admin(self):
# User is a contributor on a project
project = ProjectFactory(is_public=True)
project.add_contributor(
self.user,
permissions=permissions.ADMIN,
save=True)
# User goes to the project page
res = self.app.get(project.url, auth=self.auth).maybe_follow()
assert_in('Make Private', res)
def test_cant_see_make_private_button_if_not_admin(self):
# User is a contributor on a project
project = ProjectFactory(is_public=True)
project.add_contributor(
self.user,
permissions=permissions.WRITE,
save=True)
# User goes to the project page
res = self.app.get(project.url, auth=self.auth).maybe_follow()
assert_not_in('Make Private', res)
def test_sees_logs_on_a_project(self):
project = ProjectFactory(is_public=True)
# User goes to the project's page
res = self.app.get(project.url, auth=self.auth).maybe_follow()
# Can see log event
assert_in('created', res)
def test_no_wiki_content_message(self):
project = ProjectFactory(creator=self.user)
# Goes to project's wiki, where there is no content
res = self.app.get('/{0}/wiki/home/'.format(project._primary_key), auth=self.auth)
# Sees a message indicating no content
assert_in('Add important information, links, or images here to describe your project.', res)
# Sees that edit panel is open by default when home wiki has no content
assert_in('panelsUsed: ["view", "menu", "edit"]', res)
def test_wiki_content(self):
project = ProjectFactory(creator=self.user)
wiki_page_name = 'home'
wiki_content = 'Kittens'
wiki_page = WikiFactory(
user=self.user,
node=project,
)
wiki = WikiVersionFactory(
wiki_page=wiki_page,
content=wiki_content
)
res = self.app.get('/{0}/wiki/{1}/'.format(
project._primary_key,
wiki_page_name,
), auth=self.auth)
assert_not_in('Add important information, links, or images here to describe your project.', res)
assert_in(wiki_content, res)
assert_in('panelsUsed: ["view", "menu"]', res)
def test_wiki_page_name_non_ascii(self):
project = ProjectFactory(creator=self.user)
non_ascii = to_mongo_key('WöRlÐé')
WikiPage.objects.create_for_node(project, 'WöRlÐé', 'new content', Auth(self.user))
wv = WikiVersion.objects.get_for_node(project, non_ascii)
assert wv.wiki_page.page_name.upper() == non_ascii.decode('utf-8').upper()
def test_noncontributor_cannot_see_wiki_if_no_content(self):
user2 = UserFactory()
# user2 creates a public project and adds no wiki content
project = ProjectFactory(creator=user2, is_public=True)
# self navigates to project
res = self.app.get(project.url).maybe_follow()
# Should not see wiki widget (since non-contributor and no content)
assert_not_in('Add important information, links, or images here to describe your project.', res)
def test_wiki_does_not_exist(self):
project = ProjectFactory(creator=self.user)
res = self.app.get('/{0}/wiki/{1}/'.format(
project._primary_key,
'not a real page yet',
), auth=self.auth, expect_errors=True)
assert_in('Add important information, links, or images here to describe your project.', res)
def test_sees_own_profile(self):
res = self.app.get('/profile/', auth=self.auth)
td1 = res.html.find('td', text=re.compile(r'Public(.*?)Profile'))
td2 = td1.find_next_sibling('td')
assert_equal(td2.text, self.user.display_absolute_url)
def test_sees_another_profile(self):
user2 = UserFactory()
res = self.app.get(user2.url, auth=self.auth)
td1 = res.html.find('td', text=re.compile(r'Public(.*?)Profile'))
td2 = td1.find_next_sibling('td')
assert_equal(td2.text, user2.display_absolute_url)
@pytest.mark.enable_bookmark_creation
class TestComponents(OsfTestCase):
def setUp(self):
super(TestComponents, self).setUp()
self.user = AuthUserFactory()
self.consolidate_auth = Auth(user=self.user)
self.project = ProjectFactory(creator=self.user)
self.project.add_contributor(contributor=self.user, auth=self.consolidate_auth)
# A non-project componenet
self.component = NodeFactory(
category='hypothesis',
creator=self.user,
parent=self.project,
)
self.component.save()
self.component.set_privacy('public', self.consolidate_auth)
self.component.set_privacy('private', self.consolidate_auth)
self.project.save()
self.project_url = self.project.web_url_for('view_project')
def test_sees_parent(self):
res = self.app.get(self.component.url, auth=self.user.auth).maybe_follow()
parent_title = res.html.find_all('h2', class_='node-parent-title')
assert_equal(len(parent_title), 1)
assert_in(self.project.title, parent_title[0].text) # Bs4 will handle unescaping HTML here
def test_delete_project(self):
res = self.app.get(
self.component.url + 'settings/',
auth=self.user.auth
).maybe_follow()
assert_in(
'Delete {0}'.format(self.component.project_or_component),
res
)
def test_cant_delete_project_if_not_admin(self):
non_admin = AuthUserFactory()
self.component.add_contributor(
non_admin,
permissions=permissions.WRITE,
auth=self.consolidate_auth,
save=True,
)
res = self.app.get(
self.component.url + 'settings/',
auth=non_admin.auth
).maybe_follow()
assert_not_in(
'Delete {0}'.format(self.component.project_or_component),
res
)
def test_can_configure_comments_if_admin(self):
res = self.app.get(
self.component.url + 'settings/',
auth=self.user.auth,
).maybe_follow()
assert_in('Commenting', res)
def test_cant_configure_comments_if_not_admin(self):
non_admin = AuthUserFactory()
self.component.add_contributor(
non_admin,
permissions=permissions.WRITE,
auth=self.consolidate_auth,
save=True,
)
res = self.app.get(
self.component.url + 'settings/',
auth=non_admin.auth
).maybe_follow()
assert_not_in('Commenting', res)
def test_components_should_have_component_list(self):
res = self.app.get(self.component.url, auth=self.user.auth)
assert_in('Components', res)
@pytest.mark.enable_bookmark_creation
class TestPrivateLinkView(OsfTestCase):
def setUp(self):
super(TestPrivateLinkView, self).setUp()
self.user = AuthUserFactory() # Is NOT a contributor
self.project = ProjectFactory(is_public=False)
self.link = PrivateLinkFactory(anonymous=True)
self.link.nodes.add(self.project)
self.link.save()
self.project_url = self.project.web_url_for('view_project')
def test_anonymous_link_hide_contributor(self):
res = self.app.get(self.project_url, {'view_only': self.link.key})
assert_in('Anonymous Contributors', res.body)
assert_not_in(self.user.fullname, res)
def test_anonymous_link_hides_citations(self):
res = self.app.get(self.project_url, {'view_only': self.link.key})
assert_not_in('Citation:', res)
def test_no_warning_for_read_only_user_with_valid_link(self):
link2 = PrivateLinkFactory(anonymous=False)
link2.nodes.add(self.project)
link2.save()
self.project.add_contributor(
self.user,
permissions=permissions.READ,
save=True,
)
res = self.app.get(self.project_url, {'view_only': link2.key},
auth=self.user.auth)
assert_not_in(
'is being viewed through a private, view-only link. '
'Anyone with the link can view this project. Keep '
'the link safe.',
res.body
)
def test_no_warning_for_read_only_user_with_invalid_link(self):
self.project.add_contributor(
self.user,
permissions=permissions.READ,
save=True,
)
res = self.app.get(self.project_url, {'view_only': 'not_valid'},
auth=self.user.auth)
assert_not_in(
'is being viewed through a private, view-only link. '
'Anyone with the link can view this project. Keep '
'the link safe.',
res.body
)
@pytest.mark.enable_bookmark_creation
@pytest.mark.enable_quickfiles_creation
class TestMergingAccounts(OsfTestCase):
def setUp(self):
super(TestMergingAccounts, self).setUp()
self.user = UserFactory.build()
self.user.fullname = "tess' test string"
self.user.set_password('science')
self.user.save()
self.dupe = UserFactory.build()
self.dupe.set_password('example')
self.dupe.save()
def test_merged_user_is_not_shown_as_a_contributor(self):
project = ProjectFactory(is_public=True)
# Both the master and dupe are contributors
project.add_contributor(self.dupe, log=False)
project.add_contributor(self.user, log=False)
project.save()
# At the project page, both are listed as contributors
res = self.app.get(project.url).maybe_follow()
assert_in_html(self.user.fullname, res)
assert_in_html(self.dupe.fullname, res)
# The accounts are merged
self.user.merge_user(self.dupe)
self.user.save()
# Now only the master user is shown at the project page
res = self.app.get(project.url).maybe_follow()
assert_in_html(self.user.fullname, res)
assert_true(self.dupe.is_merged)
assert_not_in(self.dupe.fullname, res)
def test_merged_user_has_alert_message_on_profile(self):
# Master merges dupe
self.user.merge_user(self.dupe)
self.user.save()
# At the dupe user's profile there is an alert message at the top
# indicating that the user is merged
res = self.app.get('/profile/{0}/'.format(self.dupe._primary_key)).maybe_follow()
assert_in('This account has been merged', res)
@pytest.mark.enable_bookmark_creation
class TestShortUrls(OsfTestCase):
def setUp(self):
super(TestShortUrls, self).setUp()
self.user = AuthUserFactory()
self.auth = self.user.auth
self.consolidate_auth = Auth(user=self.user)
self.project = ProjectFactory(creator=self.user)
# A non-project componenet
self.component = NodeFactory(parent=self.project, category='hypothesis', creator=self.user)
# Hack: Add some logs to component; should be unnecessary pending
# improvements to factories from @rliebz
self.component.set_privacy('public', auth=self.consolidate_auth)
self.component.set_privacy('private', auth=self.consolidate_auth)
self.wiki = WikiFactory(
user=self.user,
node=self.component,
)
def _url_to_body(self, url):
return self.app.get(
url,
auth=self.auth
).maybe_follow(
auth=self.auth,
).normal_body
def test_project_url(self):
assert_equal(
self._url_to_body(self.project.deep_url),
self._url_to_body(self.project.url),
)
def test_component_url(self):
assert_equal(
self._url_to_body(self.component.deep_url),
self._url_to_body(self.component.url),
)
def test_wiki_url(self):
assert_equal(
self._url_to_body(self.wiki.deep_url),
self._url_to_body(self.wiki.url),
)
@pytest.mark.enable_bookmark_creation
@pytest.mark.enable_implicit_clean
class TestClaiming(OsfTestCase):
def setUp(self):
super(TestClaiming, self).setUp()
self.referrer = AuthUserFactory()
self.project = ProjectFactory(creator=self.referrer, is_public=True)
def test_correct_name_shows_in_contributor_list(self):
name1, email = fake.name(), fake_email()
UnregUserFactory(fullname=name1, email=email)
name2, email = fake.name(), fake_email()
# Added with different name
self.project.add_unregistered_contributor(fullname=name2,
email=email, auth=Auth(self.referrer))
self.project.save()
res = self.app.get(self.project.url, auth=self.referrer.auth)
# Correct name is shown
assert_in_html(name2, res)
assert_not_in(name1, res)
def test_user_can_set_password_on_claim_page(self):
name, email = fake.name(), fake_email()
new_user = self.project.add_unregistered_contributor(
email=email,
fullname=name,
auth=Auth(self.referrer)
)
self.project.save()
claim_url = new_user.get_claim_url(self.project._primary_key)
res = self.app.get(claim_url)
self.project.reload()
assert_in('Set Password', res)
form = res.forms['setPasswordForm']
#form['username'] = new_user.username #Removed as long as E-mail can't be updated.
form['password'] = 'killerqueen'
form['password2'] = 'killerqueen'
res = form.submit().follow()
new_user.reload()
assert_true(new_user.check_password('killerqueen'))
def test_sees_is_redirected_if_user_already_logged_in(self):
name, email = fake.name(), fake_email()
new_user = self.project.add_unregistered_contributor(
email=email,
fullname=name,
auth=Auth(self.referrer)
)
self.project.save()
existing = AuthUserFactory()
claim_url = new_user.get_claim_url(self.project._primary_key)
# a user is already logged in
res = self.app.get(claim_url, auth=existing.auth, expect_errors=True)
assert_equal(res.status_code, 302)
def test_unregistered_users_names_are_project_specific(self):
name1, name2, email = fake.name(), fake.name(), fake_email()
project2 = ProjectFactory(creator=self.referrer)
# different projects use different names for the same unreg contributor
self.project.add_unregistered_contributor(
email=email,
fullname=name1,
auth=Auth(self.referrer)
)
self.project.save()
project2.add_unregistered_contributor(
email=email,
fullname=name2,
auth=Auth(self.referrer)
)
project2.save()
self.app.authenticate(*self.referrer.auth)
# Each project displays a different name in the contributor list
res = self.app.get(self.project.url)
assert_in_html(name1, res)
res2 = self.app.get(project2.url)
assert_in_html(name2, res2)
@unittest.skip('as long as E-mails cannot be changed')
def test_cannot_set_email_to_a_user_that_already_exists(self):
reg_user = UserFactory()
name, email = fake.name(), fake_email()
new_user = self.project.add_unregistered_contributor(
email=email,
fullname=name,
auth=Auth(self.referrer)
)
self.project.save()
# Goes to claim url and successfully claims account
claim_url = new_user.get_claim_url(self.project._primary_key)
res = self.app.get(claim_url)
self.project.reload()
assert_in('Set Password', res)
form = res.forms['setPasswordForm']
# Fills out an email that is the username of another user
form['username'] = reg_user.username
form['password'] = 'killerqueen'
form['password2'] = 'killerqueen'
res = form.submit().maybe_follow(expect_errors=True)
assert_in(
language.ALREADY_REGISTERED.format(email=reg_user.username),
res
)
def test_correct_display_name_is_shown_at_claim_page(self):
original_name = fake.name()
unreg = UnregUserFactory(fullname=original_name)
different_name = fake.name()
new_user = self.project.add_unregistered_contributor(
email=unreg.username,
fullname=different_name,
auth=Auth(self.referrer),
)
self.project.save()
claim_url = new_user.get_claim_url(self.project._primary_key)
res = self.app.get(claim_url)
# Correct name (different_name) should be on page
assert_in_html(different_name, res)
class TestConfirmingEmail(OsfTestCase):
def setUp(self):
super(TestConfirmingEmail, self).setUp()
self.user = UnconfirmedUserFactory()
self.confirmation_url = self.user.get_confirmation_url(
self.user.username,
external=False,
)
self.confirmation_token = self.user.get_confirmation_token(
self.user.username
)
def test_cannot_remove_another_user_email(self):
user1 = AuthUserFactory()
user2 = AuthUserFactory()
url = api_url_for('update_user')
header = {'id': user1.username, 'emails': [{'address': user1.username}]}
res = self.app.put_json(url, header, auth=user2.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannnot_make_primary_email_for_another_user(self):
user1 = AuthUserFactory()
user2 = AuthUserFactory()
email = 'test@cos.io'
user1.emails.create(address=email)
user1.save()
url = api_url_for('update_user')
header = {'id': user1.username,
'emails': [{'address': user1.username, 'primary': False, 'confirmed': True},
{'address': email, 'primary': True, 'confirmed': True}
]}
res = self.app.put_json(url, header, auth=user2.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannnot_add_email_for_another_user(self):
user1 = AuthUserFactory()
user2 = AuthUserFactory()
email = 'test@cos.io'
url = api_url_for('update_user')
header = {'id': user1.username,
'emails': [{'address': user1.username, 'primary': True, 'confirmed': True},
{'address': email, 'primary': False, 'confirmed': False}
]}
res = self.app.put_json(url, header, auth=user2.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_error_page_if_confirm_link_is_used(self):
self.user.confirm_email(self.confirmation_token)
self.user.save()
res = self.app.get(self.confirmation_url, expect_errors=True)
assert_in(auth_exc.InvalidTokenError.message_short, res)
assert_equal(res.status_code, http.BAD_REQUEST)
@pytest.mark.enable_implicit_clean
@pytest.mark.enable_bookmark_creation
class TestClaimingAsARegisteredUser(OsfTestCase):
def setUp(self):
super(TestClaimingAsARegisteredUser, self).setUp()
self.referrer = AuthUserFactory()
self.project = ProjectFactory(creator=self.referrer, is_public=True)
name, email = fake.name(), fake_email()
self.user = self.project.add_unregistered_contributor(
fullname=name,
email=email,
auth=Auth(user=self.referrer)
)
self.project.save()
def test_claim_user_registered_with_correct_password(self):
reg_user = AuthUserFactory() # NOTE: AuthUserFactory sets password as 'queenfan86'
url = self.user.get_claim_url(self.project._primary_key)
# Follow to password re-enter page
res = self.app.get(url, auth=reg_user.auth).follow(auth=reg_user.auth)
# verify that the "Claim Account" form is returned
assert_in('Claim Contributor', res.body)
form = res.forms['claimContributorForm']
form['password'] = 'queenfan86'
res = form.submit(auth=reg_user.auth)
res = res.follow(auth=reg_user.auth)
self.project.reload()
self.user.reload()
# user is now a contributor to the project
assert_in(reg_user, self.project.contributors)
# the unregistered user (self.user) is removed as a contributor, and their
assert_not_in(self.user, self.project.contributors)
# unclaimed record for the project has been deleted
assert_not_in(self.project, self.user.unclaimed_records)
def test_claim_user_registered_preprint_with_correct_password(self):
preprint = PreprintFactory(creator=self.referrer)
name, email = fake.name(), fake_email()
unreg_user = preprint.add_unregistered_contributor(
fullname=name,
email=email,
auth=Auth(user=self.referrer)
)
reg_user = AuthUserFactory() # NOTE: AuthUserFactory sets password as 'queenfan86'
url = unreg_user.get_claim_url(preprint._id)
# Follow to password re-enter page
res = self.app.get(url, auth=reg_user.auth).follow(auth=reg_user.auth)
# verify that the "Claim Account" form is returned
assert_in('Claim Contributor', res.body)
form = res.forms['claimContributorForm']
form['password'] = 'queenfan86'
res = form.submit(auth=reg_user.auth)
preprint.reload()
unreg_user.reload()
# user is now a contributor to the project
assert_in(reg_user, preprint.contributors)
# the unregistered user (unreg_user) is removed as a contributor, and their
assert_not_in(unreg_user, preprint.contributors)
# unclaimed record for the project has been deleted
assert_not_in(preprint, unreg_user.unclaimed_records)
class TestResendConfirmation(OsfTestCase):
def setUp(self):
super(TestResendConfirmation, self).setUp()
self.unconfirmed_user = UnconfirmedUserFactory()
self.confirmed_user = UserFactory()
self.get_url = web_url_for('resend_confirmation_get')
self.post_url = web_url_for('resend_confirmation_post')
# test that resend confirmation page is load correctly
def test_resend_confirmation_get(self):
res = self.app.get(self.get_url)
assert_equal(res.status_code, 200)
assert_in('Resend Confirmation', res.body)
assert_in('resendForm', res.forms)
# test that unconfirmed user can receive resend confirmation email
@mock.patch('framework.auth.views.mails.send_mail')
def test_can_receive_resend_confirmation_email(self, mock_send_mail):
# load resend confirmation page and submit email
res = self.app.get(self.get_url)
form = res.forms['resendForm']
form['email'] = self.unconfirmed_user.unconfirmed_emails[0]
res = form.submit()
# check email, request and response
assert_true(mock_send_mail.called)
assert_equal(res.status_code, 200)
assert_equal(res.request.path, self.post_url)
assert_in_html('If there is an OSF account', res)
# test that confirmed user cannot receive resend confirmation email
@mock.patch('framework.auth.views.mails.send_mail')
def test_cannot_receive_resend_confirmation_email_1(self, mock_send_mail):
# load resend confirmation page and submit email
res = self.app.get(self.get_url)
form = res.forms['resendForm']
form['email'] = self.confirmed_user.emails.first().address
res = form.submit()
# check email, request and response
assert_false(mock_send_mail.called)
assert_equal(res.status_code, 200)
assert_equal(res.request.path, self.post_url)
assert_in_html('has already been confirmed', res)
# test that non-existing user cannot receive resend confirmation email
@mock.patch('framework.auth.views.mails.send_mail')
def test_cannot_receive_resend_confirmation_email_2(self, mock_send_mail):
# load resend confirmation page and submit email
res = self.app.get(self.get_url)
form = res.forms['resendForm']
form['email'] = 'random@random.com'
res = form.submit()
# check email, request and response
assert_false(mock_send_mail.called)
assert_equal(res.status_code, 200)
assert_equal(res.request.path, self.post_url)
assert_in_html('If there is an OSF account', res)
# test that user cannot submit resend confirmation request too quickly
@mock.patch('framework.auth.views.mails.send_mail')
def test_cannot_resend_confirmation_twice_quickly(self, mock_send_mail):
# load resend confirmation page and submit email
res = self.app.get(self.get_url)
form = res.forms['resendForm']
form['email'] = self.unconfirmed_user.email
res = form.submit()
res = form.submit()
# check request and response
assert_equal(res.status_code, 200)
assert_in_html('Please wait', res)
class TestForgotPassword(OsfTestCase):
def setUp(self):
super(TestForgotPassword, self).setUp()
self.user = UserFactory()
self.auth_user = AuthUserFactory()
self.get_url = web_url_for('forgot_password_get')
self.post_url = web_url_for('forgot_password_post')
self.user.verification_key_v2 = {}
self.user.save()
# log users out before they land on forgot password page
def test_forgot_password_logs_out_user(self):
# visit forgot password link while another user is logged in
res = self.app.get(self.get_url, auth=self.auth_user.auth)
# check redirection to CAS logout
assert_equal(res.status_code, 302)
location = res.headers.get('Location')
assert_not_in('reauth', location)
assert_in('logout?service=', location)
assert_in('forgotpassword', location)
# test that forgot password page is loaded correctly
def test_get_forgot_password(self):
res = self.app.get(self.get_url)
assert_equal(res.status_code, 200)
assert_in('Forgot Password', res.body)
assert_in('forgotPasswordForm', res.forms)
# test that existing user can receive reset password email
@mock.patch('framework.auth.views.mails.send_mail')
def test_can_receive_reset_password_email(self, mock_send_mail):
# load forgot password page and submit email
res = self.app.get(self.get_url)
form = res.forms['forgotPasswordForm']
form['forgot_password-email'] = self.user.username
res = form.submit()
# check mail was sent
assert_true(mock_send_mail.called)
# check http 200 response
assert_equal(res.status_code, 200)
# check request URL is /forgotpassword
assert_equal(res.request.path, self.post_url)
# check push notification
assert_in_html('If there is an OSF account', res)
assert_not_in_html('Please wait', res)
# check verification_key_v2 is set
self.user.reload()
assert_not_equal(self.user.verification_key_v2, {})
# test that non-existing user cannot receive reset password email
@mock.patch('framework.auth.views.mails.send_mail')
def test_cannot_receive_reset_password_email(self, mock_send_mail):
# load forgot password page and submit email
res = self.app.get(self.get_url)
form = res.forms['forgotPasswordForm']
form['forgot_password-email'] = 'fake' + self.user.username
res = form.submit()
# check mail was not sent
assert_false(mock_send_mail.called)
# check http 200 response
assert_equal(res.status_code, 200)
# check request URL is /forgotpassword
assert_equal(res.request.path, self.post_url)
# check push notification
assert_in_html('If there is an OSF account', res)
assert_not_in_html('Please wait', res)
# check verification_key_v2 is not set
self.user.reload()
assert_equal(self.user.verification_key_v2, {})
# test that non-existing user cannot receive reset password email
@mock.patch('framework.auth.views.mails.send_mail')
def test_not_active_user_no_reset_password_email(self, mock_send_mail):
self.user.disable_account()
self.user.save()
# load forgot password page and submit email
res = self.app.get(self.get_url)
form = res.forms['forgotPasswordForm']
form['forgot_password-email'] = self.user.username
res = form.submit()
# check mail was not sent
assert_false(mock_send_mail.called)
# check http 200 response
assert_equal(res.status_code, 200)
# check request URL is /forgotpassword
assert_equal(res.request.path, self.post_url)
# check push notification
assert_in_html('If there is an OSF account', res)
assert_not_in_html('Please wait', res)
# check verification_key_v2 is not set
self.user.reload()
assert_equal(self.user.verification_key_v2, {})
# test that user cannot submit forgot password request too quickly
@mock.patch('framework.auth.views.mails.send_mail')
def test_cannot_reset_password_twice_quickly(self, mock_send_mail):
# load forgot password page and submit email
res = self.app.get(self.get_url)
form = res.forms['forgotPasswordForm']
form['forgot_password-email'] = self.user.username
res = form.submit()
res = form.submit()
# check http 200 response
assert_equal(res.status_code, 200)
# check push notification
assert_in_html('Please wait', res)
assert_not_in_html('If there is an OSF account', res)
@unittest.skip('Public projects/components are dynamically loaded now.')
class TestAUserProfile(OsfTestCase):
def setUp(self):
OsfTestCase.setUp(self)
self.user = AuthUserFactory()
self.me = AuthUserFactory()
self.project = ProjectFactory(creator=self.me, is_public=True, title=fake.bs())
self.component = NodeFactory(creator=self.me, parent=self.project, is_public=True, title=fake.bs())
# regression test for https://github.com/CenterForOpenScience/osf.io/issues/2623
def test_has_public_projects_and_components(self):
# I go to my own profile
url = web_url_for('profile_view_id', uid=self.me._primary_key)
# I see the title of both my project and component
res = self.app.get(url, auth=self.me.auth)
assert_in_html(self.component.title, res)
assert_in_html(self.project.title, res)
# Another user can also see my public project and component
url = web_url_for('profile_view_id', uid=self.me._primary_key)
# I see the title of both my project and component
res = self.app.get(url, auth=self.user.auth)
assert_in_html(self.component.title, res)
assert_in_html(self.project.title, res)
def test_shows_projects_with_many_contributors(self):
# My project has many contributors
for _ in range(5):
user = UserFactory()
self.project.add_contributor(user, auth=Auth(self.project.creator), save=True)
# I go to my own profile
url = web_url_for('profile_view_id', uid=self.me._primary_key)
res = self.app.get(url, auth=self.me.auth)
# I see '3 more' as a link
assert_in('3 more', res)
res = res.click('3 more')
assert_equal(res.request.path, self.project.url)
def test_has_no_public_projects_or_components_on_own_profile(self):
# User goes to their profile
url = web_url_for('profile_view_id', uid=self.user._id)
res = self.app.get(url, auth=self.user.auth)
# user has no public components/projects
assert_in('You have no public projects', res)
assert_in('You have no public components', res)
def test_user_no_public_projects_or_components(self):
# I go to other user's profile
url = web_url_for('profile_view_id', uid=self.user._id)
# User has no public components/projects
res = self.app.get(url, auth=self.me.auth)
assert_in('This user has no public projects', res)
assert_in('This user has no public components', res)
# regression test
def test_does_not_show_registrations(self):
project = ProjectFactory(creator=self.user)
component = NodeFactory(parent=project, creator=self.user, is_public=False)
# User has a registration with public components
reg = RegistrationFactory(project=component.parent_node, creator=self.user, is_public=True)
for each in reg.nodes:
each.is_public = True
each.save()
# I go to other user's profile
url = web_url_for('profile_view_id', uid=self.user._id)
# Registration does not appear on profile
res = self.app.get(url, auth=self.me.auth)
assert_in('This user has no public components', res)
assert_not_in(reg.title, res)
assert_not_in(reg.nodes[0].title, res)
@pytest.mark.enable_bookmark_creation
class TestPreprintBannerView(OsfTestCase):
def setUp(self):
super(TestPreprintBannerView, self).setUp()
self.admin = AuthUserFactory()
self.write_contrib = AuthUserFactory()
self.read_contrib = AuthUserFactory()
self.non_contrib = AuthUserFactory()
self.provider_one = PreprintProviderFactory()
self.project_one = ProjectFactory(creator=self.admin, is_public=True)
self.project_one.add_contributor(self.write_contrib, permissions.WRITE)
self.project_one.add_contributor(self.read_contrib, permissions.READ)
self.subject_one = SubjectFactory()
self.preprint = PreprintFactory(creator=self.admin, filename='mgla.pdf', provider=self.provider_one, subjects=[[self.subject_one._id]], project=self.project_one, is_published=True)
self.preprint.add_contributor(self.write_contrib, permissions.WRITE)
self.preprint.add_contributor(self.read_contrib, permissions.READ)
def test_public_project_published_preprint(self):
url = self.project_one.web_url_for('view_project')
# Admin - preprint
res = self.app.get(url, auth=self.admin.auth)
assert_in('Has supplemental materials for', res.body)
# Write - preprint
res = self.app.get(url, auth=self.write_contrib.auth)
assert_in('Has supplemental materials for', res.body)
# Read - preprint
res = self.app.get(url, auth=self.read_contrib.auth)
assert_in('Has supplemental materials for', res.body)
# Noncontrib - preprint
res = self.app.get(url, auth=self.non_contrib.auth)
assert_in('Has supplemental materials for', res.body)
# Unauthenticated - preprint
res = self.app.get(url)
assert_in('Has supplemental materials for', res.body)
def test_public_project_abandoned_preprint(self):
self.preprint.machine_state = 'initial'
self.preprint.save()
url = self.project_one.web_url_for('view_project')
# Admin - preprint
res = self.app.get(url, auth=self.admin.auth)
assert_not_in('Has supplemental materials for', res.body)
# Write - preprint
res = self.app.get(url, auth=self.write_contrib.auth)
assert_not_in('Has supplemental materials for', res.body)
# Read - preprint
res = self.app.get(url, auth=self.read_contrib.auth)
assert_not_in('Has supplemental materials for', res.body)
# Noncontrib - preprint
res = self.app.get(url, auth=self.non_contrib.auth)
assert_not_in('Has supplemental materials for', res.body)
# Unauthenticated - preprint
res = self.app.get(url)
assert_not_in('Has supplemental materials for', res.body)
def test_public_project_deleted_preprint(self):
self.preprint.deleted = timezone.now()
self.preprint.save()
url = self.project_one.web_url_for('view_project')
# Admin - preprint
res = self.app.get(url, auth=self.admin.auth)
assert_not_in('Has supplemental materials for', res.body)
# Write - preprint
res = self.app.get(url, auth=self.write_contrib.auth)
assert_not_in('Has supplemental materials for', res.body)
# Read - preprint
res = self.app.get(url, auth=self.read_contrib.auth)
assert_not_in('Has supplemental materials for', res.body)
# Noncontrib - preprint
res = self.app.get(url, auth=self.non_contrib.auth)
assert_not_in('Has supplemental materials for', res.body)
# Unauthenticated - preprint
res = self.app.get(url)
assert_not_in('Has supplemental materials for', res.body)
def test_public_project_private_preprint(self):
self.preprint.is_public = False
self.preprint.save()
url = self.project_one.web_url_for('view_project')
# Admin - preprint
res = self.app.get(url, auth=self.admin.auth)
assert_in('Has supplemental materials for', res.body)
# Write - preprint
res = self.app.get(url, auth=self.write_contrib.auth)
assert_in('Has supplemental materials for', res.body)
# Read - preprint
res = self.app.get(url, auth=self.read_contrib.auth)
assert_in('Has supplemental materials for', res.body)
# Noncontrib - preprint
res = self.app.get(url, auth=self.non_contrib.auth)
assert_not_in('Has supplemental materials for', res.body)
# Unauthenticated - preprint
res = self.app.get(url)
assert_not_in('Has supplemental materials for', res.body)
def test_public_project_orphaned_preprint(self):
self.preprint.primary_file = None
self.preprint.save()
url = self.project_one.web_url_for('view_project')
# Admin - preprint
res = self.app.get(url, auth=self.admin.auth)
assert_in('Has supplemental materials for', res.body)
# Write - preprint
res = self.app.get(url, auth=self.write_contrib.auth)
assert_in('Has supplemental materials for', res.body)
# Read - preprint
res = self.app.get(url, auth=self.read_contrib.auth)
assert_in('Has supplemental materials for', res.body)
# Noncontrib - preprint
res = self.app.get(url, auth=self.non_contrib.auth)
assert_not_in('Has supplemental materials for', res.body)
# Unauthenticated - preprint
res = self.app.get(url)
assert_not_in('Has supplemental materials for', res.body)
def test_public_project_unpublished_preprint(self):
self.preprint.is_published = False
self.preprint.save()
url = self.project_one.web_url_for('view_project')
# Admin - preprint
res = self.app.get(url, auth=self.admin.auth)
assert_in('Has supplemental materials for', res.body)
# Write - preprint
res = self.app.get(url, auth=self.write_contrib.auth)
assert_in('Has supplemental materials for', res.body)
# Read - preprint
res = self.app.get(url, auth=self.read_contrib.auth)
assert_in('Has supplemental materials for', res.body)
# Noncontrib - preprint
res = self.app.get(url, auth=self.non_contrib.auth)
assert_not_in('Has supplemental materials for', res.body)
# Unauthenticated - preprint
res = self.app.get(url)
assert_not_in('Has supplemental materials for', res.body)
def test_public_project_pending_preprint_post_moderation(self):
self.preprint.machine_state = 'pending'
provider = PreprintProviderFactory(reviews_workflow='post-moderation')
self.preprint.provider = provider
self.preprint.save()
url = self.project_one.web_url_for('view_project')
# Admin - preprint
res = self.app.get(url, auth=self.admin.auth)
assert_in('{}'.format(self.preprint.provider.name), res.body)
assert_in('Pending\n', res.body)
assert_in('This preprint is publicly available and searchable but is subject to removal by a moderator.', res.body)
# Write - preprint
res = self.app.get(url, auth=self.write_contrib.auth)
assert_in('{}'.format(self.preprint.provider.name), res.body)
assert_in('Pending\n', res.body)
assert_in('This preprint is publicly available and searchable but is subject to removal by a moderator.', res.body)
# Read - preprint
res = self.app.get(url, auth=self.read_contrib.auth)
assert_in('{}'.format(self.preprint.provider.name), res.body)
assert_in('Pending\n', res.body)
assert_in('This preprint is publicly available and searchable but is subject to removal by a moderator.', res.body)
# Noncontrib - preprint
res = self.app.get(url, auth=self.non_contrib.auth)
assert_in('on {}'.format(self.preprint.provider.name), res.body)
assert_not_in('Pending\n', res.body)
assert_not_in('This preprint is publicly available and searchable but is subject to removal by a moderator.', res.body)
# Unauthenticated - preprint
res = self.app.get(url)
assert_in('on {}'.format(self.preprint.provider.name), res.body)
assert_not_in('Pending\n', res.body)
assert_not_in('This preprint is publicly available and searchable but is subject to removal by a moderator.', res.body)
def test_implicit_admins_can_see_project_status(self):
project = ProjectFactory(creator=self.admin)
component = NodeFactory(creator=self.admin, parent=project)
project.add_contributor(self.write_contrib, permissions.ADMIN)
project.save()
preprint = PreprintFactory(creator=self.admin, filename='mgla.pdf', provider=self.provider_one, subjects=[[self.subject_one._id]], project=component, is_published=True)
preprint.machine_state = 'pending'
provider = PreprintProviderFactory(reviews_workflow='post-moderation')
preprint.provider = provider
preprint.save()
url = component.web_url_for('view_project')
res = self.app.get(url, auth=self.write_contrib.auth)
assert_in('{}'.format(preprint.provider.name), res.body)
assert_in('Pending\n', res.body)
assert_in('This preprint is publicly available and searchable but is subject to removal by a moderator.', res.body)
def test_public_project_pending_preprint_pre_moderation(self):
self.preprint.machine_state = 'pending'
provider = PreprintProviderFactory(reviews_workflow='pre-moderation')
self.preprint.provider = provider
self.preprint.save()
url = self.project_one.web_url_for('view_project')
# Admin - preprint
res = self.app.get(url, auth=self.admin.auth)
assert_in('{}'.format(self.preprint.provider.name), res.body)
assert_in('Pending\n', res.body)
assert_in('This preprint is not publicly available or searchable until approved by a moderator.', res.body)
# Write - preprint
res = self.app.get(url, auth=self.write_contrib.auth)
assert_in('{}'.format(self.preprint.provider.name), res.body)
assert_in('Pending\n', res.body)
assert_in('This preprint is not publicly available or searchable until approved by a moderator.', res.body)
# Read - preprint
res = self.app.get(url, auth=self.read_contrib.auth)
assert_in('{}'.format(self.preprint.provider.name), res.body)
assert_in('Pending\n', res.body)
assert_in('This preprint is not publicly available or searchable until approved by a moderator.', res.body)
# Noncontrib - preprint
res = self.app.get(url, auth=self.non_contrib.auth)
assert_in('{}'.format(self.preprint.provider.name), res.body)
assert_not_in('Pending\n', res.body)
assert_not_in('This preprint is not publicly available or searchable until approved by a moderator.', res.body)
# Unauthenticated - preprint
res = self.app.get(url)
assert_in('{}'.format(self.preprint.provider.name), res.body)
assert_not_in('Pending\n', res.body)
assert_not_in('This preprint is not publicly available or searchable until approved by a moderator.', res.body)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
| 6,935,636,752,297,277,000
| 38.716185
| 188
| 0.637931
| false
| 3.671895
| true
| false
| false
|
phalt/dj-twiml
|
tests/test_views.py
|
1
|
1400
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_dj-twiml-views
------------
Tests for `dj-twiml-views` views module.
"""
from hmac import new
from hashlib import sha1
from base64 import encodestring
from django.test import Client, TestCase, RequestFactory
from django.conf import settings
from dj_twiml import views
class TestDj_twiml(TestCase):
fixtures = ['dj_twiml.json']
def setUp(self):
self.client = Client()
self.factory = RequestFactory()
self.uri = 'http://testserver/twiml/'
self.t1_uri = '/twiml/1/'
settings.TWILIO_AUTH_TOKEN = 'xxx'
settings.TWILIO_ACCOUNT_SID = 'xxx'
self.signature = encodestring(
new(settings.TWILIO_AUTH_TOKEN,
'%s1/' % self.uri, sha1).digest()).strip()
def test_detail_forgery_off(self):
request = self.factory.post(
self.t1_uri, HTTP_X_TWILIO_SIGNATURE=self.signature)
deets = views.detail(request, twiml_id=1)
self.assertIn('<Response><Dial>', deets)
def test_detail_forgery_on(self):
''' Same as above but with forgery protection on'''
settings.DJANGO_TWILIO_FORGERY_PROTECTION = True
request = self.factory.post(
self.t1_uri, HTTP_X_TWILIO_SIGNATURE=self.signature)
deets = views.detail(request, twiml_id=1)
self.assertIn('<Response><Dial>', deets)
|
bsd-3-clause
| 2,297,203,071,420,323,000
| 24.925926
| 64
| 0.628571
| false
| 3.317536
| true
| false
| false
|
klmitch/keystone
|
keystone/trust/controllers.py
|
1
|
11243
|
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from oslo_utils import timeutils
import six
from keystone import assignment
from keystone.common import controller
from keystone.common import dependency
from keystone.common import utils
from keystone.common import validation
from keystone import exception
from keystone.i18n import _
from keystone import notifications
from keystone.trust import schema
def _trustor_trustee_only(trust, user_id):
if user_id not in [trust.get('trustee_user_id'),
trust.get('trustor_user_id')]:
raise exception.Forbidden()
def _admin_trustor_only(context, trust, user_id):
if user_id != trust.get('trustor_user_id') and not context['is_admin']:
raise exception.Forbidden()
@dependency.requires('assignment_api', 'identity_api', 'resource_api',
'role_api', 'token_provider_api', 'trust_api')
class TrustV3(controller.V3Controller):
collection_name = "trusts"
member_name = "trust"
@classmethod
def base_url(cls, context, path=None):
"""Construct a path and pass it to V3Controller.base_url method."""
# NOTE(stevemar): Overriding path to /OS-TRUST/trusts so that
# V3Controller.base_url handles setting the self link correctly.
path = '/OS-TRUST/' + cls.collection_name
return super(TrustV3, cls).base_url(context, path=path)
def _get_user_id(self, context):
try:
token_ref = utils.get_token_ref(context)
except exception.Unauthorized:
return None
return token_ref.user_id
def get_trust(self, context, trust_id):
user_id = self._get_user_id(context)
trust = self.trust_api.get_trust(trust_id)
_trustor_trustee_only(trust, user_id)
self._fill_in_roles(context, trust,
self.role_api.list_roles())
return TrustV3.wrap_member(context, trust)
def _fill_in_roles(self, context, trust, all_roles):
if trust.get('expires_at') is not None:
trust['expires_at'] = (utils.isotime
(trust['expires_at'],
subsecond=True))
if 'roles' not in trust:
trust['roles'] = []
trust_full_roles = []
for trust_role in trust['roles']:
if isinstance(trust_role, six.string_types):
trust_role = {'id': trust_role}
matching_roles = [x for x in all_roles
if x['id'] == trust_role['id']]
if matching_roles:
full_role = assignment.controllers.RoleV3.wrap_member(
context, matching_roles[0])['role']
trust_full_roles.append(full_role)
trust['roles'] = trust_full_roles
trust['roles_links'] = {
'self': (self.base_url(context) + "/%s/roles" % trust['id']),
'next': None,
'previous': None}
def _normalize_role_list(self, trust, all_roles):
trust_roles = []
all_role_names = {r['name']: r for r in all_roles}
for role in trust.get('roles', []):
if 'id' in role:
trust_roles.append({'id': role['id']})
elif 'name' in role:
rolename = role['name']
if rolename in all_role_names:
trust_roles.append({'id':
all_role_names[rolename]['id']})
else:
raise exception.RoleNotFound(_("role %s is not defined") %
rolename)
else:
raise exception.ValidationError(attribute='id or name',
target='roles')
return trust_roles
@controller.protected()
@validation.validated(schema.trust_create, 'trust')
def create_trust(self, context, trust):
"""Create a new trust.
The user creating the trust must be the trustor.
"""
auth_context = context.get('environment',
{}).get('KEYSTONE_AUTH_CONTEXT', {})
# Check if delegated via trust
if auth_context.get('is_delegated_auth'):
# Redelegation case
src_trust_id = auth_context['trust_id']
if not src_trust_id:
raise exception.Forbidden(
_('Redelegation allowed for delegated by trust only'))
redelegated_trust = self.trust_api.get_trust(src_trust_id)
else:
redelegated_trust = None
if trust.get('project_id'):
self._require_role(trust)
self._require_user_is_trustor(context, trust)
self._require_trustee_exists(trust['trustee_user_id'])
all_roles = self.role_api.list_roles()
# Normalize roles
normalized_roles = self._normalize_role_list(trust, all_roles)
trust['roles'] = normalized_roles
self._require_trustor_has_role_in_project(trust)
trust['expires_at'] = self._parse_expiration_date(
trust.get('expires_at'))
trust_id = uuid.uuid4().hex
initiator = notifications._get_request_audit_info(context)
new_trust = self.trust_api.create_trust(trust_id, trust,
normalized_roles,
redelegated_trust,
initiator)
self._fill_in_roles(context, new_trust, all_roles)
return TrustV3.wrap_member(context, new_trust)
def _require_trustee_exists(self, trustee_user_id):
self.identity_api.get_user(trustee_user_id)
def _require_user_is_trustor(self, context, trust):
user_id = self._get_user_id(context)
if user_id != trust.get('trustor_user_id'):
raise exception.Forbidden(
_("The authenticated user should match the trustor."))
def _require_role(self, trust):
if not trust.get('roles'):
raise exception.Forbidden(
_('At least one role should be specified.'))
def _get_trustor_roles(self, trust):
original_trust = trust.copy()
while original_trust.get('redelegated_trust_id'):
original_trust = self.trust_api.get_trust(
original_trust['redelegated_trust_id'])
if not self._attribute_is_empty(trust, 'project_id'):
self.resource_api.get_project(original_trust['project_id'])
# Get a list of roles including any domain specific roles
assignment_list = self.assignment_api.list_role_assignments(
user_id=original_trust['trustor_user_id'],
project_id=original_trust['project_id'],
effective=True, strip_domain_roles=False)
return list(set([x['role_id'] for x in assignment_list]))
else:
return []
def _require_trustor_has_role_in_project(self, trust):
trustor_roles = self._get_trustor_roles(trust)
for trust_role in trust['roles']:
matching_roles = [x for x in trustor_roles
if x == trust_role['id']]
if not matching_roles:
raise exception.RoleNotFound(role_id=trust_role['id'])
def _parse_expiration_date(self, expiration_date):
if expiration_date is None:
return None
if not expiration_date.endswith('Z'):
expiration_date += 'Z'
try:
expiration_time = timeutils.parse_isotime(expiration_date)
except ValueError:
raise exception.ValidationTimeStampError()
if timeutils.is_older_than(expiration_time, 0):
raise exception.ValidationExpirationError()
return expiration_time
def _check_role_for_trust(self, context, trust_id, role_id):
"""Checks if a role has been assigned to a trust."""
trust = self.trust_api.get_trust(trust_id)
user_id = self._get_user_id(context)
_trustor_trustee_only(trust, user_id)
if not any(role['id'] == role_id for role in trust['roles']):
raise exception.RoleNotFound(role_id=role_id)
@controller.protected()
def list_trusts(self, context):
query = context['query_string']
trusts = []
if not query:
self.assert_admin(context)
trusts += self.trust_api.list_trusts()
if 'trustor_user_id' in query:
user_id = query['trustor_user_id']
calling_user_id = self._get_user_id(context)
if user_id != calling_user_id:
raise exception.Forbidden()
trusts += (self.trust_api.
list_trusts_for_trustor(user_id))
if 'trustee_user_id' in query:
user_id = query['trustee_user_id']
calling_user_id = self._get_user_id(context)
if user_id != calling_user_id:
raise exception.Forbidden()
trusts += self.trust_api.list_trusts_for_trustee(user_id)
for trust in trusts:
# get_trust returns roles, list_trusts does not
# It seems in some circumstances, roles does not
# exist in the query response, so check first
if 'roles' in trust:
del trust['roles']
if trust.get('expires_at') is not None:
trust['expires_at'] = (utils.isotime
(trust['expires_at'],
subsecond=True))
return TrustV3.wrap_collection(context, trusts)
@controller.protected()
def delete_trust(self, context, trust_id):
trust = self.trust_api.get_trust(trust_id)
user_id = self._get_user_id(context)
_admin_trustor_only(context, trust, user_id)
initiator = notifications._get_request_audit_info(context)
self.trust_api.delete_trust(trust_id, initiator)
@controller.protected()
def list_roles_for_trust(self, context, trust_id):
trust = self.get_trust(context, trust_id)['trust']
user_id = self._get_user_id(context)
_trustor_trustee_only(trust, user_id)
return {'roles': trust['roles'],
'links': trust['roles_links']}
@controller.protected()
def get_role_for_trust(self, context, trust_id, role_id):
"""Get a role that has been assigned to a trust."""
self._check_role_for_trust(context, trust_id, role_id)
role = self.role_api.get_role(role_id)
return assignment.controllers.RoleV3.wrap_member(context, role)
|
apache-2.0
| 7,273,288,418,649,957,000
| 40.640741
| 78
| 0.579383
| false
| 3.999644
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.