repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Deledrius/korman
|
korman/properties/modifiers/region.py
|
1
|
15953
|
# This file is part of Korman.
#
# Korman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Korman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Korman. If not, see <http://www.gnu.org/licenses/>.
import bpy
from bpy.props import *
from PyHSPlasma import *
from ...exporter import ExportError, ExportAssertionError
from ...helpers import TemporaryObject
from ... import idprops
from .base import PlasmaModifierProperties, PlasmaModifierLogicWiz
from ..prop_camera import PlasmaCameraProperties
from .physics import bounds_types
footstep_surface_ids = {
"dirt": 0,
# 1 = NULL
"puddle": 2,
# 3 = tile (NULL in MOUL)
"metal": 4,
"woodbridge": 5,
"rope": 6,
"grass": 7,
# 8 = NULL
"woodfloor": 9,
"rug": 10,
"stone": 11,
# 12 = NULL
# 13 = metal ladder (dupe of metal)
"woodladder": 14,
"water": 15,
# 16 = maintainer's glass (NULL in PotS)
# 17 = maintainer's metal grating (NULL in PotS)
# 18 = swimming (why would you want this?)
}
footstep_surfaces = [("dirt", "Dirt", "Dirt"),
("grass", "Grass", "Grass"),
("metal", "Metal", "Metal Catwalk"),
("puddle", "Puddle", "Shallow Water"),
("rope", "Rope", "Rope Ladder"),
("rug", "Rug", "Carpet Rug"),
("stone", "Stone", "Stone Tile"),
("water", "Water", "Deep Water"),
("woodbridge", "Wood Bridge", "Wood Bridge"),
("woodfloor", "Wood Floor", "Wood Floor"),
("woodladder", "Wood Ladder", "Wood Ladder")]
class PlasmaCameraRegion(PlasmaModifierProperties):
pl_id = "camera_rgn"
bl_category = "Region"
bl_label = "Camera Region"
bl_description = "Camera Region"
bl_icon = "CAMERA_DATA"
camera_type = EnumProperty(name="Camera Type",
description="What kind of camera should be used?",
items=[("auto_follow", "Auto Follow Camera", "Automatically generated follow camera"),
("manual", "Manual Camera", "User specified camera object")],
default="manual",
options=set())
camera_object = PointerProperty(name="Camera",
description="Switches to this camera",
type=bpy.types.Object,
poll=idprops.poll_camera_objects,
options=set())
auto_camera = PointerProperty(type=PlasmaCameraProperties, options=set())
def export(self, exporter, bo, so):
if self.camera_type == "manual":
if self.camera_object is None:
raise ExportError("Camera Modifier '{}' does not specify a valid camera object".format(self.id_data.name))
camera_so_key = exporter.mgr.find_create_key(plSceneObject, bl=self.camera_object)
camera_props = self.camera_object.data.plasma_camera.settings
else:
assert self.camera_type[:4] == "auto"
# Wheedoggy! We get to export the doggone camera now.
camera_props = self.auto_camera
camera_type = self.camera_type[5:]
exporter.camera.export_camera(so, bo, camera_type, camera_props)
camera_so_key = so.key
# Setup physical stuff
phys_mod = bo.plasma_modifiers.collision
simIface, physical = exporter.physics.generate_physical(bo, so, phys_mod.bounds, self.key_name)
physical.memberGroup = plSimDefs.kGroupDetector
physical.reportGroup = 1 << plSimDefs.kGroupAvatar
simIface.setProperty(plSimulationInterface.kPinned, True)
physical.setProperty(plSimulationInterface.kPinned, True)
# I don't feel evil enough to make this generate a logic tree...
msg = plCameraMsg()
msg.BCastFlags |= plMessage.kLocalPropagate | plMessage.kBCastByType
msg.setCmd(plCameraMsg.kRegionPushCamera)
msg.setCmd(plCameraMsg.kSetAsPrimary, camera_props.primary_camera)
msg.newCam = camera_so_key
region = exporter.mgr.find_create_object(plCameraRegionDetector, so=so)
region.addMessage(msg)
def harvest_actors(self):
if self.camera_type == "manual":
if self.camera_object is None:
raise ExportError("Camera Modifier '{}' does not specify a valid camera object".format(self.id_data.name))
camera = self.camera_object.data.plasma_camera.settings
else:
camera = self.auto_camera
return camera.harvest_actors()
class PlasmaFootstepRegion(PlasmaModifierProperties, PlasmaModifierLogicWiz):
pl_id = "footstep"
bl_category = "Region"
bl_label = "Footstep"
bl_description = "Footstep Region"
surface = EnumProperty(name="Surface",
description="What kind of surface are we walking on?",
items=footstep_surfaces,
default="stone")
bounds = EnumProperty(name="Region Bounds",
description="Physical object's bounds",
items=bounds_types,
default="hull")
def export(self, exporter, bo, so):
# Generate the logic nodes now
self.logicwiz(bo)
# Now, export the node tree
self.node_tree.export(exporter, bo, so)
def logicwiz(self, bo):
tree = self.node_tree
nodes = tree.nodes
nodes.clear()
# Region Sensor
volsens = nodes.new("PlasmaVolumeSensorNode")
volsens.name = "RegionSensor"
volsens.region_object = bo
volsens.bounds = self.bounds
volsens.find_input_socket("enter").allow = True
volsens.find_input_socket("exit").allow = True
# Responder
respmod = nodes.new("PlasmaResponderNode")
respmod.name = "Resp"
respmod.link_input(volsens, "satisfies", "condition")
respstate = nodes.new("PlasmaResponderStateNode")
respstate.link_input(respmod, "state_refs", "resp")
# ArmatureEffectStateMsg
msg = nodes.new("PlasmaFootstepSoundMsgNode")
msg.link_input(respstate, "msgs", "sender")
msg.surface = self.surface
@property
def key_name(self):
return "{}_FootRgn".format(self.id_data.name)
class PlasmaPanicLinkRegion(PlasmaModifierProperties):
pl_id = "paniclink"
bl_category = "Region"
bl_label = "Panic Link"
bl_description = "Panic Link Region"
play_anim = BoolProperty(name="Play Animation",
description="Play the link-out animation when panic linking",
default=True)
def export(self, exporter, bo, so):
phys_mod = bo.plasma_modifiers.collision
simIface, physical = exporter.physics.generate_physical(bo, so, phys_mod.bounds, self.key_name)
# Now setup the region detector properties
physical.memberGroup = plSimDefs.kGroupDetector
physical.reportGroup = 1 << plSimDefs.kGroupAvatar
# Finally, the panic link region proper
reg = exporter.mgr.add_object(plPanicLinkRegion, name=self.key_name, so=so)
reg.playLinkOutAnim = self.play_anim
@property
def key_name(self):
return "{}_PanicLinkRgn".format(self.id_data.name)
@property
def requires_actor(self):
return True
class PlasmaSoftVolume(idprops.IDPropMixin, PlasmaModifierProperties):
pl_id = "softvolume"
bl_category = "Region"
bl_label = "Soft Volume"
bl_description = "Soft-Boundary Region"
# Advanced
use_nodes = BoolProperty(name="Use Nodes",
description="Make this a node-based Soft Volume",
default=False)
node_tree = PointerProperty(name="Node Tree",
description="Node Tree detailing soft volume logic",
type=bpy.types.NodeTree)
# Basic
invert = BoolProperty(name="Invert",
description="Invert the soft region")
inside_strength = IntProperty(name="Inside", description="Strength inside the region",
subtype="PERCENTAGE", default=100, min=0, max=100)
outside_strength = IntProperty(name="Outside", description="Strength outside the region",
subtype="PERCENTAGE", default=0, min=0, max=100)
soft_distance = FloatProperty(name="Distance", description="Soft Distance",
default=0.0, min=0.0, max=500.0)
def _apply_settings(self, sv):
sv.insideStrength = self.inside_strength / 100.0
sv.outsideStrength = self.outside_strength / 100.0
def get_key(self, exporter, so=None):
"""Fetches the key appropriate for this Soft Volume"""
if so is None:
so = exporter.mgr.find_create_object(plSceneObject, bl=self.id_data)
if self.use_nodes:
tree = self.get_node_tree()
output = tree.find_output("PlasmaSoftVolumeOutputNode")
if output is None:
raise ExportError("SoftVolume '{}' Node Tree '{}' has no output node!".format(self.key_name, tree.name))
return output.get_key(exporter, so)
else:
pClass = plSoftVolumeInvert if self.invert else plSoftVolumeSimple
return exporter.mgr.find_create_key(pClass, bl=self.id_data, so=so)
def export(self, exporter, bo, so):
if self.use_nodes:
self._export_sv_nodes(exporter, bo, so)
else:
self._export_convex_region(exporter, bo, so)
def _export_convex_region(self, exporter, bo, so):
if bo.type != "MESH":
raise ExportError("SoftVolume '{}': Simple SoftVolumes can only be meshes!".format(bo.name))
# Grab the SoftVolume KO
sv = self.get_key(exporter, so).object
self._apply_settings(sv)
# If "invert" was checked, we got a SoftVolumeInvert, but we need to make a Simple for the
# region data to be exported into..
if isinstance(sv, plSoftVolumeInvert):
svSimple = exporter.mgr.find_create_object(plSoftVolumeSimple, bl=bo, so=so)
self._apply_settings(svSimple)
sv.addSubVolume(svSimple.key)
sv = svSimple
sv.softDist = self.soft_distance
# Initialize the plVolumeIsect. Currently, we only support convex isects. If you want parallel
# isects from empties, be my guest...
with TemporaryObject(bo.to_mesh(bpy.context.scene, True, "RENDER", calc_tessface=False), bpy.data.meshes.remove) as mesh:
mesh.transform(bo.matrix_world)
isect = plConvexIsect()
for i in mesh.vertices:
isect.addPlane(hsVector3(*i.normal), hsVector3(*i.co))
sv.volume = isect
def _export_sv_nodes(self, exporter, bo, so):
tree = self.get_node_tree()
if tree.name not in exporter.node_trees_exported:
exporter.node_trees_exported.add(tree.name)
tree.export(exporter, bo, so)
def get_node_tree(self):
if self.node_tree is None:
raise ExportError("SoftVolume '{}' does not specify a valid Node Tree!".format(self.key_name))
return self.node_tree
@classmethod
def _idprop_mapping(cls):
return {"node_tree": "node_tree_name"}
def _idprop_sources(self):
return {"node_tree_name": bpy.data.node_groups}
class PlasmaSubworldRegion(PlasmaModifierProperties):
pl_id = "subworld_rgn"
bl_category = "Region"
bl_label = "Subworld Region"
bl_description = "Subworld transition region"
subworld = PointerProperty(name="Subworld",
description="Subworld to transition into",
type=bpy.types.Object,
poll=idprops.poll_subworld_objects)
transition = EnumProperty(name="Transition",
description="When to transition to the new subworld",
items=[("enter", "On Enter", "Transition when the avatar enters the region"),
("exit", "On Exit", "Transition when the avatar exits the region")],
default="enter",
options=set())
def export(self, exporter, bo, so):
# Due to the fact that our subworld modifier can produce both RidingAnimatedPhysical
# and [HK|PX]Subworlds depending on the situation, this could get hairy, fast.
# Start by surveying the lay of the land.
from_sub, to_sub = bo.plasma_object.subworld, self.subworld
from_isded = exporter.physics.is_dedicated_subworld(from_sub)
to_isded = exporter.physics.is_dedicated_subworld(to_sub)
if 1:
def get_log_text(bo, isded):
main = "[Main World]" if bo is None else bo.name
sub = "Subworld" if isded or bo is None else "RidingAnimatedPhysical"
return main, sub
from_name, from_type = get_log_text(from_sub, from_isded)
to_name, to_type = get_log_text(to_sub, to_isded)
exporter.report.msg("Transition from '{}' ({}) to '{}' ({})",
from_name, from_type, to_name, to_type,
indent=2)
# I think the best solution here is to not worry about the excitement mentioned above.
# If we encounter anything truly interesting, we can fix it in CWE more easily IMO because
# the game actually knows more about the avatar's state than we do here in the exporter.
if to_isded or (from_isded and to_sub is None):
region = exporter.mgr.find_create_object(plSubworldRegionDetector, so=so)
if to_sub is not None:
region.subworld = exporter.mgr.find_create_key(plSceneObject, bl=to_sub)
region.onExit = self.transition == "exit"
else:
msg = plRideAnimatedPhysMsg()
msg.BCastFlags |= plMessage.kLocalPropagate | plMessage.kPropagateToModifiers
msg.sender = so.key
msg.entering = to_sub is not None
# In Cyan's PlasmaMAX RAP detector, it acts as more of a traditional region
# that changes us over to a dynamic character controller on region enter and
# reverts on region exit. We're going for an approach that is backwards compatible
# with subworlds, so our enter/exit regions are separate. Here, enter/exit message
# corresponds with when we should trigger the transition.
region = exporter.mgr.find_create_object(plRidingAnimatedPhysicalDetector, so=so)
if self.transition == "enter":
region.enterMsg = msg
elif self.transition == "exit":
region.exitMsg = msg
else:
raise ExportAssertionError()
# Fancy pants region collider type shit
simIface, physical = exporter.physics.generate_physical(bo, so, self.id_data.plasma_modifiers.collision.bounds, self.key_name)
physical.memberGroup = plSimDefs.kGroupDetector
physical.reportGroup |= 1 << plSimDefs.kGroupAvatar
|
gpl-3.0
| -1,248,840,267,233,175,300
| 41.769437
| 134
| 0.603021
| false
| 3.92931
| false
| false
| false
|
AuxinJeron/ACS-VRP
|
src/vrpmain.py
|
1
|
1857
|
from TsplibParser import parser as tspparser
from ArgParser import parser as argparser
from VRPCenter import VRPCenter
from TspPainter import tspPainter
import logging
# construct the logger
logger = logging.getLogger("logger")
logger.setLevel(logging.INFO)
logFormatter = logging.Formatter("%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s")
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(logFormatter)
logger.addHandler(consoleHandler)
def run(tspparser):
center = VRPCenter(tspparser)
logger.info("Nodes: ")
for i in range(1, len(tspparser.cities_coord)):
logger.info("Node " + str(i) + " coordinate is " + str(tspparser.cities_coord[i][0]) + ", " + str(tspparser.cities_coord[i][1]))
tspPainter.coord_mat = tspparser.cities_coord
tspPainter.drawMap()
logger.info("Lockers: ")
for i in range(0, len(tspparser.lockers)):
logger.info(tspparser.lockers[i])
tspPainter.drawLockers(tspparser.lockers)
logger.info("Delivers: ")
for i in range(0, len(tspparser.delivers)):
logger.info(tspparser.delivers[i])
logger.info("Demands: ")
demands = 0
for i in range(0, len(tspparser.demands)):
demands += tspparser.demands[i]
logger.info("Node {} {}".format(i, tspparser.demands[i]))
logger.info("Total demands is: {}".format(demands))
center.start()
def main():
args = argparser.parse_args()
tspparser.read_file(args.tsp_file[0])
logger.info("-------------------------------------------")
logger.info("Problem formulation information")
logger.info("-------------------------------------------")
logger.info("Name: " + tspparser.name)
logger.info("Comment: " + tspparser.comment)
logger.info("Type: " + tspparser.type)
# run vrp center
run(tspparser)
if __name__ == "__main__":
main()
|
apache-2.0
| 7,920,056,264,688,182,000
| 33.407407
| 136
| 0.649435
| false
| 3.351986
| false
| false
| false
|
joshu/loan-eval-rabbit
|
async_logger.py
|
1
|
2525
|
from pika.adapters.twisted_connection import TwistedProtocolConnection
from pika.connection import ConnectionParameters
from twisted.internet import protocol, reactor, task
from twisted.python import log
import uuid
import json
import os
class Consumer(object):
def on_connected(self, connection):
d = connection.channel()
d.addCallback(self.got_channel)
d.addCallback(self.queue_declared)
d.addCallback(self.queue_bound)
d.addCallback(self.handle_deliveries)
d.addErrback(log.err)
def got_channel(self, channel):
self.channel = channel
return self.channel.queue_declare(exclusive=True)
def queue_declared(self, queue):
self._queue_name = queue.method.queue
self.channel.queue_bind(queue=self._queue_name,
exchange="topic_loan_eval",
routing_key="*.*.*")
def queue_bound(self, ignored):
return self.channel.basic_consume(queue=self._queue_name)
def handle_deliveries(self, queue_and_consumer_tag):
queue, consumer_tag = queue_and_consumer_tag
self.looping_call = task.LoopingCall(self.consume_from_queue, queue)
return self.looping_call.start(0)
def consume_from_queue(self, queue):
d = queue.get()
return d.addCallback(lambda result: self.handle_payload(*result))
def handle_payload(self, channel, method, properties, body):
self.logReceive(method.routing_key,body)
def serviceName(self):
file_name = (os.path.basename(__file__))
return file_name.split('.')[0]
def logReceive(self, routing_key, message):
eval_request = json.loads(message)
# print(message)
uid = uuid.UUID(eval_request["request_id"])
print " [%s] Received |%r|%r|%r" % (self.serviceName(),
str(uid),eval_request["timestamp"],routing_key)
if __name__ == "__main__":
consumer1 = Consumer()
# consumer2 = Consumer()
parameters = ConnectionParameters()
cc = protocol.ClientCreator(reactor,
TwistedProtocolConnection,
parameters)
d1 = cc.connectTCP("localhost", 5672)
d1.addCallback(lambda protocol: protocol.ready)
d1.addCallback(consumer1.on_connected)
d1.addErrback(log.err)
# d2 = cc.connectTCP("localhost", 5672)
# d2.addCallback(lambda protocol: protocol.ready)
# d2.addCallback(consumer2.on_connected)
# d2.addErrback(log.err)
reactor.run()
|
mit
| -1,238,929,204,795,590,700
| 31.371795
| 76
| 0.645545
| false
| 3.878648
| false
| false
| false
|
KeepSafe/translation-real-time-validaton
|
notifier/sync.py
|
1
|
2907
|
import asyncio
import logging
from . import const, compare
from .model import *
logger = logging.getLogger(__name__)
def _to_dc_items(wti_items, zendesk_items):
return [DynamicContentItem(key, wti_items.get(key), zendesk_item) for key, zendesk_item in zendesk_items.items()]
async def _get_all_translations(zendesk_dc, wti_client, dc_item, zendesk_locales):
tasks = [
wti_client.string(dc_item.wti_id, locale) for locale in zendesk_locales if locale != zendesk_dc.default_locale
]
data = await asyncio.gather(*tasks)
return [d for d in data if d]
def _default_translation(translations, default_locale):
for translation in translations:
if translation.locale == default_locale:
return translation.text
return ''
async def _update_item(zendesk_dc, wti_client, zendesk_locales, dc_item):
res = False
translations = await _get_all_translations(zendesk_dc, wti_client, dc_item, zendesk_locales)
text = dc_item.zendesk_item.text
if compare.is_different(_default_translation(translations, zendesk_dc.default_locale), text):
logger.info('updating wti item with key:%s', dc_item.key)
await wti_client.update_translation(dc_item.wti_id, text, zendesk_dc.default_locale, translations)
res = True
else:
logger.debug('item with key %s did not change', dc_item.key)
logger.info('updating dynamic content key:%s for locales:%s', dc_item.key,
list(map(lambda i: i.locale, translations)))
await zendesk_dc.update(dc_item, translations, zendesk_locales)
return res
async def _create_item(zendesk_dc, wti_client, zendesk_locales, dc_item):
logger.info('creating new wti item with key:%s', dc_item.key)
await wti_client.create_string(dc_item, zendesk_dc.default_locale)
return True
async def sync_zendesk(app):
zendesk_dc = app[const.ZENDESK_DC]
wti_client = app[const.WTI_DYNAMIC_CONTENT]
stats = app[const.STATS]
wti_items = await wti_client.strings_ids()
if not wti_items:
logger.error('no wti strings found')
return
zendesk_locales = await zendesk_dc.locales()
zendesk_items = await zendesk_dc.items(zendesk_locales)
dc_items = _to_dc_items(wti_items, zendesk_items)
logger.info('get %s items to process', len(dc_items))
stats.increment('sync.tick')
updated_keys = []
for dc_item in dc_items:
if dc_item.wti_id:
res = await _update_item(zendesk_dc, wti_client, zendesk_locales, dc_item)
if res:
updated_keys.append(dc_item.key)
else:
await _create_item(zendesk_dc, wti_client, zendesk_locales, dc_item)
updated_keys.append(dc_item.key)
if updated_keys:
await app[const.SLACK_NOTIFIER].notify(updated_keys)
stats.increment('sync.items', len(updated_keys))
logger.info('done updating content')
|
apache-2.0
| 4,294,154,491,269,031,000
| 36.753247
| 118
| 0.675611
| false
| 3.255319
| false
| false
| false
|
AnyChart/GraphicsJS
|
build.py
|
1
|
14952
|
#!/usr/bin/env python
# coding=utf-8
import os
import sys
import subprocess
import urllib
import zipfile
import platform
import shlex
import time
import json
import datetime
# =======================================================================================================================
# Project paths
# =======================================================================================================================
# COMPILER_VERSION = '20161024'
COMPILER_VERSION = '20180204'
PROJECT_PATH = os.path.abspath(os.path.dirname(__file__))
CONTRIB_PATH = os.path.join(PROJECT_PATH, 'contrib')
COMPILER_PATH = os.path.join(CONTRIB_PATH, 'compiler', 'closure-compiler-v%s.jar' % COMPILER_VERSION)
SRC_PATH = os.path.join(PROJECT_PATH, 'src')
OUT_PATH = os.path.join(PROJECT_PATH, 'out')
CLOSURE_LIBRARY_PATH = os.path.join(CONTRIB_PATH, 'closure-library')
CLOSURE_SOURCE_PATH = os.path.join(CLOSURE_LIBRARY_PATH, 'closure', 'goog')
CLOSURE_LINTER_WRAPPER_PATH = os.path.join(CONTRIB_PATH, 'closure-linter-wrapper')
CLOSURE_BIN_PATH = os.path.join(CLOSURE_LIBRARY_PATH, 'closure', 'bin')
DEPS_WRITER_PATH = os.path.join(CLOSURE_BIN_PATH, 'build', 'depswriter.py')
PYTHON = 'python'
# =======================================================================================================================
# Synchronize contributions.
# =======================================================================================================================
def __has_closure_library():
return os.path.exists(CLOSURE_LIBRARY_PATH)
def __has_closure_compiler():
return os.path.exists(COMPILER_PATH)
def __has_closure_linter_wrapper():
return os.path.exists(CLOSURE_LINTER_WRAPPER_PATH)
def __has_closure_linter():
has_lint = True
try:
subprocess.Popen(['gjslint'], shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
except StandardError:
has_lint = False
return has_lint
def __ensure_dir_exists(path):
if not os.path.exists(path):
os.mkdir(path)
def __need_sync_contrib():
return not __has_closure_library() \
or not __has_closure_compiler() \
or not __has_closure_linter_wrapper() \
or not __has_closure_linter()
def __sync_contrib():
t = time.time()
__ensure_dir_exists(CONTRIB_PATH)
subprocess.call(['git', 'submodule', 'init'])
subprocess.call(['git', 'submodule', 'update'])
# Download closure compiler
if not os.path.exists(COMPILER_PATH):
print 'Downloading Google Closure Compiler v.' + COMPILER_VERSION
try:
__download_and_unzip_from_http(
"http://dl.google.com/closure-compiler/compiler-%s.zip" % COMPILER_VERSION,
'compiler'
)
except StandardError as e:
print e
print 'Failed'
return False
# Install closure linter
if not __has_closure_linter():
if not __install_closure_linter():
return False
print 'Environment ready. Time spent: {:.3f}s\n'.format(time.time() - t)
return True
def __download_and_unzip_from_http(from_url, dir_name):
z_obj_path = os.path.join(CONTRIB_PATH, dir_name + '.zip')
# download zip archive from url
if not os.path.exists(z_obj_path):
urllib.urlretrieve(
from_url,
z_obj_path
)
# extract zip archive
target_path = os.path.join(CONTRIB_PATH, dir_name)
__ensure_dir_exists(target_path)
z_obj = zipfile.ZipFile(z_obj_path)
z_obj.extractall(path=target_path)
z_obj.close()
# remove archive file
os.remove(z_obj_path)
return True
def __install_closure_linter():
print 'Installing Google Closure Linter v.2.3.9'
commands = [] if platform.system() == 'Windows' else ['sudo']
commands.append('easy_install')
commands.append('https://closure-linter.googlecode.com/files/closure_linter-2.3.9.tar.gz')
try:
subprocess.call(commands)
except StandardError:
print 'Failed: you should install easy_install module for python first'
return False
print 'Success'
return True
def sync_required(func):
def wrapper():
if __need_sync_contrib():
__sync_contrib()
return func()
return wrapper
# =======================================================================================================================
# Build project
# =======================================================================================================================
def __get_version():
f = open(os.path.join(PROJECT_PATH, 'package.json'));
package_json = json.loads(f.read());
f.close()
return package_json['version']
def __get_file_overview():
return "/**\n * GraphicsJS is a lightweight JavaScript graphics library with an intuitive API, based on SVG/VML technology.\n * Version: %s (%s)\n * License: BSD 3-clause\n * Copyright: AnyChart.com %s. All rights reserved.\n */\n" % (__get_version(), datetime.datetime.now().strftime("%Y-%m-%d"), str(datetime.datetime.now().year))
def __getNotOptimizedCompilerArgs():
compilerArgs = [
'--compilation_level WHITESPACE_ONLY',
'--formatting PRETTY_PRINT'
]
return compilerArgs
def __getOptimizedCompilerArgs():
compilerArgs = [
'--charset UTF-8',
'--compilation_level ADVANCED_OPTIMIZATIONS',
'--process_closure_primitives',
'--language_in ECMASCRIPT3',
'--language_out ECMASCRIPT3',
'--hide_warnings_for "contrib/closure-library"',
'--assume_function_wrapper',
'--use_types_for_optimization true',
'--output_wrapper "' + __get_file_overview() + '(function(){%output%})();"',
'--env BROWSER',
'--extra_annotation_name "includeDoc"',
'--extra_annotation_name "illustration"',
'--extra_annotation_name "illustrationDesc"',
'--extra_annotation_name "ignoreDoc"',
'--extra_annotation_name "propertyDoc"',
'--extra_annotation_name "shortDescription"',
'--warning_level VERBOSE',
'--jscomp_warning accessControls',
'--jscomp_warning ambiguousFunctionDecl',
'--jscomp_warning checkDebuggerStatement',
'--jscomp_warning checkEventfulObjectDisposal',
'--jscomp_warning checkRegExp',
'--jscomp_warning checkTypes',
'--jscomp_warning checkVars',
'--jscomp_warning closureDepMethodUsageChecks',
'--jscomp_warning conformanceViolations',
'--jscomp_warning const',
'--jscomp_warning constantProperty',
'--jscomp_warning deprecated',
'--jscomp_warning deprecatedAnnotations',
'--jscomp_warning duplicate',
'--jscomp_warning duplicateMessage',
'--jscomp_warning es3',
'--jscomp_warning es5Strict',
'--jscomp_warning externsValidation',
'--jscomp_off extraRequire',
'--jscomp_warning fileoverviewTags',
'--jscomp_warning functionParams',
'--jscomp_warning globalThis',
'--jscomp_warning internetExplorerChecks',
'--jscomp_warning invalidCasts',
'--jscomp_warning misplacedTypeAnnotation',
'--jscomp_warning missingGetCssName',
'--jscomp_off missingOverride',
'--jscomp_warning missingPolyfill',
'--jscomp_warning missingProperties',
'--jscomp_warning missingProvide',
'--jscomp_warning missingRequire',
'--jscomp_warning missingReturn',
'--jscomp_warning msgDescriptions',
'--jscomp_off newCheckTypes',
'--jscomp_off newCheckTypesExtraChecks',
'--jscomp_off nonStandardJsDocs',
'--jscomp_off reportUnknownTypes',
'--jscomp_warning suspiciousCode',
'--jscomp_warning strictModuleDepCheck',
'--jscomp_warning typeInvalidation',
'--jscomp_warning undefinedNames',
'--jscomp_warning undefinedVars',
'--jscomp_warning unknownDefines',
'--jscomp_off unusedLocalVariables',
'--jscomp_off unusedPrivateMembers',
'--jscomp_warning uselessCode',
'--jscomp_off useOfGoogBase',
'--jscomp_warning underscore',
'--jscomp_warning visibility',
'--jscomp_warning lintChecks',
]
return compilerArgs
def __getDefaultCompilerArgs(outputFile):
result = [
'java -jar',
COMPILER_PATH,
'--js="%s"' % os.path.join(SRC_PATH, '**.js'),
'--js="%s"' % os.path.join(CLOSURE_SOURCE_PATH, '**.js'),
'--define "goog.DEBUG=false"',
'--js_output_file ' + outputFile,
'--dependency_mode=STRICT',
'--entry_point acgraphentry',
'--hide_warnings_for="goog"'
]
return result
@sync_required
def __compileBinary():
__ensure_dir_exists(OUT_PATH)
t = time.time()
outputFileName = os.path.join(OUT_PATH, 'graphics.min.js')
print 'Building optimized Graphics library js to ' + outputFileName
commands = __getDefaultCompilerArgs(outputFileName) + \
__getOptimizedCompilerArgs()
success = (__call_compiler(commands) == 0)
res = 'Success' if success else 'Failed'
print res + ". Time spent: {:.3f}s\n".format(time.time() - t)
return success
@sync_required
def __compilePlain():
__ensure_dir_exists(OUT_PATH)
t = time.time()
outputFileName = os.path.join(OUT_PATH, 'graphics.js')
print 'Building plain Graphics library js to ' + outputFileName
commands = __getDefaultCompilerArgs(outputFileName) + \
__getNotOptimizedCompilerArgs()
success = (__call_compiler(commands) == 0)
res = 'Success' if success else 'Failed'
print res + ". Time spent: {:.3f}s\n".format(time.time() - t)
return success
def __call_compiler(commands):
commands = " ".join(commands).replace('\\', '\\\\')
commands = shlex.split(commands)
# print commands
p = subprocess.Popen(commands, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
(output, err) = p.communicate()
retcode = p.poll()
if len(output) > 0:
print output
return retcode
# =======================================================================================================================
# Build deps
# =======================================================================================================================
@sync_required
def __buildDepsFromCommandLine():
t = time.time()
output_file = os.path.join(SRC_PATH, 'deps.js')
success = (__callDepsWriter(SRC_PATH, output_file, 'whole project') == 0)
res = 'Success' if success else 'Failed'
print res + ". Time spent: {:.3f}s\n".format(time.time() - t)
return success
def __callDepsWriter(root, output_file, bundle_name):
print 'Writing deps file to ' + output_file
return subprocess.call([
PYTHON,
DEPS_WRITER_PATH,
'--root_with_prefix=' + root + ' ' + os.path.relpath(root, CLOSURE_SOURCE_PATH),
'--output_file=' + output_file
])
# =======================================================================================================================
# Linter.
# =======================================================================================================================
@sync_required
def __lintFromCommandLine():
t = time.time()
success = (__callLinter(SRC_PATH) == 0)
res = 'Success' if success else 'Failed'
print res + ". Time spent: {:.3f}s\n".format(time.time() - t)
return success
def __callLinter(root):
print 'Linting ' + root + ' directory'
return subprocess.call([
PYTHON,
os.path.join(CLOSURE_LINTER_WRAPPER_PATH, 'gjslint.py'),
'--flagfile',
'gjslint.cfg',
'-r',
root
])
# =======================================================================================================================
# JSDoc auto fix.
# =======================================================================================================================
@sync_required
def __autofixFromCommandLine():
t = time.time()
success = (__callAutoFix(SRC_PATH) == 0)
res = 'Success' if success else 'Failed'
print res + ". Time spent: {:.3f}s\n".format(time.time() - t)
return res
def __callAutoFix(root):
print 'Trying to fix ' + root + ' directory'
return subprocess.call([
PYTHON,
os.path.join(CLOSURE_LINTER_WRAPPER_PATH, 'fixjsstyle.py'),
'--flagfile',
'gjslint.cfg',
'-r',
root
])
# =======================================================================================================================
# Help
# =======================================================================================================================
def __printHelp():
print "Build script commands:\n" \
"\n" \
"without params Prepares the environment, than lints and builds everything.\n" \
"\n" \
"contrib Prepares buildin environment.\n" \
"\n" \
"deps Build ./src/deps.js file, needed to run the library in uncompiled mode.\n" \
"\n" \
"compile Builds the library minified js to ./out/ directory.\n" \
"\n" \
"plain Builds the library as one file pretty-printed js to ./out/ directory.\n" \
"\n" \
"lint Lints library sources.\n" \
"\n" \
"autofix Tries to fix lint errors in library sources.\n"
# =======================================================================================================================
# Main
# =======================================================================================================================
def __execMainScript():
print ''
args = sys.argv
if len(args) == 1:
success = __sync_contrib() and \
__lintFromCommandLine() and \
__buildDepsFromCommandLine() and \
__compilePlain() and \
__compileBinary()
elif args[1] == 'contrib':
success = __sync_contrib()
elif args[1] == 'compile':
success = __compileBinary()
elif args[1] == 'plain':
success = __compilePlain()
elif args[1] == 'deps':
success = __buildDepsFromCommandLine()
elif args[1] == 'lint':
success = __lintFromCommandLine()
elif args[1] == 'autofix':
success = __autofixFromCommandLine()
else:
__printHelp()
success = True
return success
if __name__ == '__main__':
try:
success = __execMainScript()
except StandardError as e:
print e
success = False
sys.exit(0 if success else 1)
|
bsd-3-clause
| -1,710,062,974,786,245,600
| 33.451613
| 336
| 0.524144
| false
| 4.154487
| false
| false
| false
|
evanhenri/memfog
|
src/database.py
|
1
|
1615
|
from sqlalchemy import Column, Integer, String, Text, create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class Database:
def __init__(self, db_fp):
# Create an engine that stores data in db found at db_path
engine = create_engine('sqlite:///{}'.format(db_fp))
# Create all tables in the engine
Base.metadata.create_all(engine)
DBSession = sessionmaker(bind=engine)
self.session = DBSession()
def bulk_insert(self, context):
self.session.bulk_save_objects(context.record)
self.session.commit()
def insert(self, context):
self.session.add(context.record)
self.session.commit()
def delete(self, context):
self.session.query(RecordMap).filter_by(row_id=context.record.row_id).delete()
self.session.commit()
def update(self, context):
fields = { k:v for k,v in vars(context.record).items() if k in context.altered_fields }
if len(fields) > 0:
self.session.query(RecordMap).filter_by(row_id=context.record.row_id).update(fields)
self.session.commit()
class RecordMap(Base):
__tablename__ = 'record'
row_id = Column('row_id', Integer, primary_key=True)
title = Column('title', String, nullable=False)
keywords = Column('keywords', String)
body = Column('body', Text)
def __init__(self, row_id=None, title='', keywords='', body=''):
self.row_id = row_id
self.title = title
self.keywords = keywords
self.body = body
|
mit
| -738,783,469,334,043,900
| 32.645833
| 96
| 0.643344
| false
| 3.79108
| false
| false
| false
|
jeremydw/google-apputils-python
|
tests/datelib_unittest.py
|
1
|
8310
|
#!/usr/bin/env python
# Copyright 2002 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest for datelib.py module."""
import datetime
import random
import time
import pytz
from google_apputils import basetest
from google_apputils import datelib
class TimestampUnitTest(basetest.TestCase):
seed = 1979
def testTzAwareSuccession(self):
a = datelib.Timestamp.now()
b = datelib.Timestamp.utcnow()
self.assertLessEqual(a, b)
def testTzRandomConversion(self):
random.seed(self.seed)
for unused_i in xrange(100):
stz = pytz.timezone(random.choice(pytz.all_timezones))
a = datelib.Timestamp.FromString('2008-04-12T10:00:00', stz)
b = a
for unused_j in xrange(100):
b = b.astimezone(pytz.timezone(random.choice(pytz.all_timezones)))
self.assertEqual(a, b)
random.seed()
def testMicroTimestampConversion(self):
"""Test that f1(f2(a)) == a."""
def IsEq(x):
self.assertEqual(
x, datelib.Timestamp.FromMicroTimestamp(x).AsMicroTimestamp())
IsEq(0)
IsEq(datelib.MAXIMUM_MICROSECOND_TIMESTAMP)
random.seed(self.seed)
for _ in xrange(100):
IsEq(random.randint(0, datelib.MAXIMUM_MICROSECOND_TIMESTAMP))
def testMicroTimestampKnown(self):
self.assertEqual(0, datelib.Timestamp.FromString(
'1970-01-01T00:00:00', pytz.UTC).AsMicroTimestamp())
self.assertEqual(
datelib.MAXIMUM_MICROSECOND_TIMESTAMP,
datelib.MAXIMUM_MICROSECOND_TIMESTAMP_AS_TS.AsMicroTimestamp())
def testMicroTimestampOrdering(self):
"""Test that cmp(a, b) == cmp(f1(a), f1(b))."""
def IsEq(a, b):
self.assertEqual(
cmp(a, b),
cmp(datelib.Timestamp.FromMicroTimestamp(a),
datelib.Timestamp.FromMicroTimestamp(b)))
random.seed(self.seed)
for unused_i in xrange(100):
IsEq(
random.randint(0, datelib.MAXIMUM_MICROSECOND_TIMESTAMP),
random.randint(0, datelib.MAXIMUM_MICROSECOND_TIMESTAMP))
def testCombine(self):
for tz in (datelib.UTC, datelib.US_PACIFIC):
self.assertEqual(
datelib.Timestamp(1970, 1, 1, 0, 0, 0, 0, tz),
datelib.Timestamp.combine(
datelib.datetime.date(1970, 1, 1),
datelib.datetime.time(0, 0, 0),
tz))
self.assertEqual(
datelib.Timestamp(9998, 12, 31, 23, 59, 59, 999999, tz),
datelib.Timestamp.combine(
datelib.datetime.date(9998, 12, 31),
datelib.datetime.time(23, 59, 59, 999999),
tz))
def testFromString1(self):
for string_zero in (
'1970-01-01 00:00:00',
'19700101T000000',
'1970-01-01T00:00:00'
):
for testtz in (datelib.UTC, datelib.US_PACIFIC):
self.assertEqual(
datelib.Timestamp.FromString(string_zero, testtz),
datelib.Timestamp(1970, 1, 1, 0, 0, 0, 0, testtz))
self.assertEqual(
datelib.Timestamp.FromString(
'1970-01-01T00:00:00+0000', datelib.US_PACIFIC),
datelib.Timestamp(1970, 1, 1, 0, 0, 0, 0, datelib.UTC))
startdate = datelib.Timestamp(2009, 1, 1, 3, 0, 0, 0, datelib.US_PACIFIC)
for day in xrange(1, 366):
self.assertEqual(
datelib.Timestamp.FromString(startdate.isoformat()),
startdate,
'FromString works for day %d since 2009-01-01' % day)
startdate += datelib.datetime.timedelta(days=1)
def testFromString2(self):
"""Test correctness of parsing the local time in a given timezone.
The result shall always be the same as tz.localize(naive_time).
"""
baseday = datelib.datetime.date(2009, 1, 1).toordinal()
for day_offset in xrange(0, 365):
day = datelib.datetime.date.fromordinal(baseday + day_offset)
naive_day = datelib.datetime.datetime.combine(
day, datelib.datetime.time(0, 45, 9))
naive_day_str = naive_day.strftime('%Y-%m-%dT%H:%M:%S')
self.assertEqual(
datelib.US_PACIFIC.localize(naive_day),
datelib.Timestamp.FromString(naive_day_str, tz=datelib.US_PACIFIC),
'FromString localizes time incorrectly')
def testFromStringInterval(self):
expected_date = datetime.datetime.utcnow() - datetime.timedelta(days=1)
expected_s = time.mktime(expected_date.utctimetuple())
actual_date = datelib.Timestamp.FromString('1d')
actual_s = time.mktime(actual_date.timetuple())
diff_seconds = actual_s - expected_s
self.assertBetween(diff_seconds, 0, 1)
self.assertRaises(
datelib.TimeParseError, datelib.Timestamp.FromString, 'wat')
def _EpochToDatetime(t, tz=None):
if tz is not None:
return datelib.datetime.datetime.fromtimestamp(t, tz)
else:
return datelib.datetime.datetime.utcfromtimestamp(t)
class DatetimeConversionUnitTest(basetest.TestCase):
def setUp(self):
self.pst = pytz.timezone('US/Pacific')
self.utc = pytz.utc
self.now = time.time()
def testDatetimeToUTCMicros(self):
self.assertEqual(
0, datelib.DatetimeToUTCMicros(_EpochToDatetime(0)))
self.assertEqual(
1001 * long(datelib._MICROSECONDS_PER_SECOND),
datelib.DatetimeToUTCMicros(_EpochToDatetime(1001)))
self.assertEqual(long(self.now * datelib._MICROSECONDS_PER_SECOND),
datelib.DatetimeToUTCMicros(_EpochToDatetime(self.now)))
# tzinfo shouldn't change the result
self.assertEqual(
0, datelib.DatetimeToUTCMicros(_EpochToDatetime(0, tz=self.pst)))
def testDatetimeToUTCMillis(self):
self.assertEqual(
0, datelib.DatetimeToUTCMillis(_EpochToDatetime(0)))
self.assertEqual(
1001 * 1000L, datelib.DatetimeToUTCMillis(_EpochToDatetime(1001)))
self.assertEqual(long(self.now * 1000),
datelib.DatetimeToUTCMillis(_EpochToDatetime(self.now)))
# tzinfo shouldn't change the result
self.assertEqual(
0, datelib.DatetimeToUTCMillis(_EpochToDatetime(0, tz=self.pst)))
def testUTCMicrosToDatetime(self):
self.assertEqual(_EpochToDatetime(0), datelib.UTCMicrosToDatetime(0))
self.assertEqual(_EpochToDatetime(1.000001),
datelib.UTCMicrosToDatetime(1000001))
self.assertEqual(_EpochToDatetime(self.now), datelib.UTCMicrosToDatetime(
long(self.now * datelib._MICROSECONDS_PER_SECOND)))
# Check timezone-aware comparisons
self.assertEqual(_EpochToDatetime(0, self.pst),
datelib.UTCMicrosToDatetime(0, tz=self.pst))
self.assertEqual(_EpochToDatetime(0, self.pst),
datelib.UTCMicrosToDatetime(0, tz=self.utc))
def testUTCMillisToDatetime(self):
self.assertEqual(_EpochToDatetime(0), datelib.UTCMillisToDatetime(0))
self.assertEqual(_EpochToDatetime(1.001), datelib.UTCMillisToDatetime(1001))
t = time.time()
dt = _EpochToDatetime(t)
# truncate sub-milli time
dt -= datelib.datetime.timedelta(microseconds=dt.microsecond % 1000)
self.assertEqual(dt, datelib.UTCMillisToDatetime(long(t * 1000)))
# Check timezone-aware comparisons
self.assertEqual(_EpochToDatetime(0, self.pst),
datelib.UTCMillisToDatetime(0, tz=self.pst))
self.assertEqual(_EpochToDatetime(0, self.pst),
datelib.UTCMillisToDatetime(0, tz=self.utc))
class MicrosecondsToSecondsUnitTest(basetest.TestCase):
def testConversionFromMicrosecondsToSeconds(self):
self.assertEqual(0.0, datelib.MicrosecondsToSeconds(0))
self.assertEqual(7.0, datelib.MicrosecondsToSeconds(7000000))
self.assertEqual(1.234567, datelib.MicrosecondsToSeconds(1234567))
self.assertEqual(12345654321.123456,
datelib.MicrosecondsToSeconds(12345654321123456))
if __name__ == '__main__':
basetest.main()
|
apache-2.0
| 4,097,536,020,298,167,300
| 34.512821
| 80
| 0.676775
| false
| 3.494533
| true
| false
| false
|
qbeenslee/Nepenthes-Server
|
data/db.py
|
1
|
8333
|
# coding:utf-8
'''
数据库
Author : qbeenslee
Created : 2014/10/10
'''
import time
import datetime
import sqlalchemy
from sqlalchemy import *
from sqlalchemy.orm import sessionmaker, relationship
from config import setting
from data.base_clazz import Base
def get_db():
'''
获取操作对象集合
:return:
'''
engine = sqlalchemy.create_engine(setting.DB_CONNECT_STRING, echo=setting.DEBUG)
Session = sessionmaker(bind=engine)
session = Session()
db = {'engine': engine, 'session': session}
return db
def get_session():
engine = sqlalchemy.create_engine(setting.DB_CONNECT_STRING, echo=setting.DEBUG)
Session = sessionmaker(bind=engine)
session = Session()
return session
class User(Base):
'''
用户信息
'''
__tablename__ = 'NEP_USER'
uid = Column(String(10), primary_key=True) # 用户ID永远不会变(由机器产生)
imei = Column(String(17), nullable=False) # 手机IEMI (一部手机只能注册一个账号)
nickname = Column(String(20), nullable=True, unique=True) # 用户名
email = Column(String(50), nullable=False, unique=True) # 邮箱
motto = Column(String(50), nullable=True) # 个性签名
wallpaper = Column(String(100), nullable=True) # 用户墙纸
avatar = Column(String(100), nullable=True) # 用户头像
verify_status = Column(Integer, nullable=True, default=0) # 验证状态
level = Column(Integer, nullable=True, default=0) # 等级
is_active = Column(Boolean, nullable=False, default=True) # 账户是否可用
last_loggin = Column(REAL, nullable=True) # 最后登录时间
join_datetime = Column(DateTime, nullable=False) # 注册时间
join_time = Column(REAL, nullable=False) # 注册时间
password = Column(String(60), nullable=False) # 格式化密码
class Log(Base):
'''
记录用户操作
'''
__tablename__ = 'NEP_LOG'
oid = Column(Integer, primary_key=True, autoincrement=True) # 操作ID自增长
master = Column(ForeignKey(User.uid), nullable=False) # 操作产生对象ID
otp = Column(Integer, nullable=True) # 操作类型
remark = Column(String(200), nullable=False) # 备注
log_time = Column(REAL, nullable=False)
class EmailVerify(Base):
'''
邮箱验证码存放
'''
__tablename__ = 'NEP_ENAIL_VERIFY'
vcode = Column(String(20), primary_key=True) # 验证码
invalid_time = Column(REAL, nullable=False) # 失效时间
master = Column(ForeignKey(User.uid), nullable=False) # 作用于的用户
class LoginVerify(Base):
'''
登录状态验证码存放
'''
__tablename__ = 'NEP_LOGIN_VERIFY'
token = Column(String(20), primary_key=True) # 效验码
master = Column(ForeignKey(User.uid), nullable=False) # 作用于的用户
invalid_time = Column(REAL, nullable=False) # 失效时长, default=now() + 604800.0
client = Column(String(20), nullable=True, unique=False) # 客户端代号
imei = Column(String(10), nullable=True, unique=True) # 一个加密密码只能用于一台手机登录
phone_model = Column(String(30), nullable=True) # 手机型号
class Share(Base):
'''
图片分享
'''
__tablename__ = 'NEP_SHARE'
sid = Column(String(20), primary_key=True) # 分享链接
master = Column(ForeignKey(User.uid), nullable=False) # poster ID
submit_time = Column(REAL, nullable=False) # 提交时间
change_time = Column(REAL, nullable=True) # 发生变化的时间
status_code = Column(Integer, nullable=True, default=0) # 分享状态(是否被禁止:0正常;1+不正常)
see_count = Column(Integer, nullable=True, default=0) # 被查看次数(热度)
cmt_count = Column(Integer, nullable=True, default=0) # 被评论次数
img_hash = Column(String(20), nullable=False) # 图片hash地址
img_type = Column(Integer, nullable=True, default=0) # 图片post类型(照相机原图,照相机美化图,相册图)
description = Column(String(140), nullable=True) # 描述
dsp_type = Column(Integer, nullable=True) # 描述类型( 来源于分享,or来源于系统)
subject = Column(String(20), nullable=True, default=u"分享") # 主题
hide_location = Column(Boolean, nullable=True, default=True) # 隐藏位置
place_name = Column(String(120), nullable=True) # 地点名称
img_width = Column(Integer, nullable=True, default=0) # 图片宽
img_height = Column(Integer, nullable=True, default=0) # 图片宽
praise_count = Column(Integer, nullable=True, default=0) # 赞 数量
is_deleted = Column(Boolean, nullable=True, default=False) # 删除标记
# children_1 = relationship("NEP_COMMENT", cascade="all, delete-orphan", passive_deletes=True)
# children_2 = relationship("NEP_FAVORITE", cascade="all, delete-orphan", passive_deletes=True)
# children_3 = relationship("NEP_PRAISE", cascade="all, delete-orphan", passive_deletes=True)
class Constact(Base):
'''
联系人备份(用于以后备份)
'''
__tablename__ = 'NEP_CONSTACT'
cid = Column(String(20), primary_key=True)
master = Column(ForeignKey(User.uid), nullable=False) # 机主ID
phone = Column(String(15), primary_key=True) # 手机号
name = Column(String(30), nullable=False) # 联系人名称/备注
class Location(Base):
'''
地理位置信息
(先时间后距离排序,距离加权重)
'''
__tablename__ = 'NEP_LOCATION'
sid = Column(String(20), primary_key=True) # 分享链接
master = Column(ForeignKey(User.uid), nullable=False) # po主ID
geohash = Column(String(100), nullable=True) # geo hash
ltime = Column(REAL, nullable=False) # po出时间
latitude = Column(String(10)) # north latitude北纬
longitude = Column(String(10)) # east longitude 东经
radius = Column(REAL, nullable=True, default=0.0) # 精度范围
level = Column(Integer, nullable=True) # 精度等级
class Comment(Base):
'''
评论
'''
__tablename__ = 'NEP_COMMENT'
cid = Column(String(20), primary_key=True) # 评论
sid = Column(ForeignKey(Share.sid, ondelete="CASCADE"), nullable=False) # Share ID
master = Column(ForeignKey(User.uid), nullable=False) # po主ID
status_code = Column(Integer, nullable=true) # 评论状态(是否被禁止)
index_code = Column(Integer, nullable=False) # 评论的次序
content = Column(String(140)) # 评论内容
submit_time = Column(REAL, nullable=False) # 提交时间
class Favorite(Base):
'''
分享收藏
'''
__tablename__ = 'NEP_FAVORITE'
fid = Column(String(20), primary_key=True) # 收藏序号
sid = Column(ForeignKey(Share.sid, ondelete="CASCADE"), nullable=False) # 分享链接
master = Column(ForeignKey(User.uid), nullable=False) # 收藏者ID
note = Column(String(140)) # 用户收藏时的标注或者分享描述
submit_time = Column(REAL, nullable=False) # 收藏时间
class Praise(Base):
'''
赞
'''
__tablename__ = 'NEP_PRAISE'
pid = Column(String(20), primary_key=True)
master = Column(ForeignKey(User.uid), nullable=False)
sid = Column(ForeignKey(Share.sid, ondelete="CASCADE"), nullable=False) # Share ID
submit_time = Column(REAL, nullable=False) # 提交时间
class Feedback(Base):
'''
反馈信息
'''
__tablename__ = 'NEP_FEEDBACK'
fid = Column(String(20), primary_key=True)
master = Column(ForeignKey(User.uid), nullable=False) # po主ID
ftime = Column(REAL, nullable=False) # po出时间
msg = Column(String(2000), nullable=False) # 反馈的内容,最大一千字
Warning_level = Column(String(20), nullable=True) # 警告类型
def create_db():
'''
首次运行创建db文件
'''
# os.remove(settings.DB_PATH)
# Base.metadata.create_all(engine)
engine = get_db()['engine']
Base.metadata.create_all(engine)
def test():
session = get_db()['session']
session().add(LoginVerify(master='dd', token='dd'))
if __name__ == '__main__':
create_db()
|
gpl-3.0
| 5,364,762,305,518,218,000
| 32.458333
| 99
| 0.634422
| false
| 2.653476
| false
| false
| false
|
akarol/cfme_tests
|
cfme/physical/physical_server.py
|
1
|
11597
|
# -*- coding: utf-8 -*-
"""A model of an Infrastructure PhysicalServer in CFME."""
import attr
from navmazing import NavigateToSibling, NavigateToAttribute
from cached_property import cached_property
from wrapanapi.lenovo import LenovoSystem
from cfme.common import PolicyProfileAssignable, WidgetasticTaggable
from cfme.common.physical_server_views import (
PhysicalServerDetailsView,
PhysicalServerManagePoliciesView,
PhysicalServersView,
PhysicalServerTimelinesView
)
from cfme.exceptions import (
ItemNotFound,
StatsDoNotMatch,
HostStatsNotContains,
ProviderHasNoProperty
)
from cfme.modeling.base import BaseEntity, BaseCollection
from cfme.utils.appliance.implementations.ui import CFMENavigateStep, navigate_to, navigator
from cfme.utils.log import logger
from cfme.utils.pretty import Pretty
from cfme.utils.providers import get_crud_by_name
from cfme.utils.update import Updateable
from cfme.utils.varmeth import variable
from cfme.utils.wait import wait_for
@attr.s
class PhysicalServer(BaseEntity, Updateable, Pretty, PolicyProfileAssignable, WidgetasticTaggable):
"""Model of an Physical Server in cfme.
Args:
name: Name of the physical server.
hostname: hostname of the physical server.
ip_address: The IP address as a string.
custom_ident: The custom identifiter.
Usage:
myhost = PhysicalServer(name='vmware')
myhost.create()
"""
pretty_attrs = ['name', 'hostname', 'ip_address', 'custom_ident']
name = attr.ib()
provider = attr.ib(default=None)
hostname = attr.ib(default=None)
ip_address = attr.ib(default=None)
custom_ident = attr.ib(default=None)
db_id = None
mgmt_class = LenovoSystem
INVENTORY_TO_MATCH = ['power_state']
STATS_TO_MATCH = ['cores_capacity', 'memory_capacity']
def load_details(self, refresh=False):
"""To be compatible with the Taggable and PolicyProfileAssignable mixins.
Args:
refresh (bool): Whether to perform the page refresh, defaults to False
"""
view = navigate_to(self, "Details")
if refresh:
view.browser.refresh()
view.flush_widget_cache()
def execute_button(self, button_group, button, handle_alert=False):
view = navigate_to(self, "Details")
view.toolbar.custom_button(button_group).item_select(button, handle_alert=handle_alert)
def power_on(self):
view = navigate_to(self, "Details")
view.toolbar.power.item_select("Power On", handle_alert=True)
def power_off(self):
view = navigate_to(self, "Details")
view.toolbar.power.item_select("Power Off", handle_alert=True)
@variable(alias='ui')
def power_state(self):
view = navigate_to(self, "Details")
return view.entities.summary("Power Management").get_text_of("Power State")
@variable(alias='ui')
def cores_capacity(self):
view = navigate_to(self, "Details")
return view.entities.summary("Properties").get_text_of("CPU total cores")
@variable(alias='ui')
def memory_capacity(self):
view = navigate_to(self, "Details")
return view.entities.summary("Properties").get_text_of("Total memory (mb)")
def refresh(self, cancel=False):
"""Perform 'Refresh Relationships and Power States' for the server.
Args:
cancel (bool): Whether the action should be cancelled, default to False
"""
view = navigate_to(self, "Details")
view.toolbar.configuration.item_select("Refresh Relationships and Power States",
handle_alert=cancel)
def wait_for_physical_server_state_change(self, desired_state, timeout=300):
"""Wait for PhysicalServer to come to desired state. This function waits just the needed amount of
time thanks to wait_for.
Args:
desired_state (str): 'on' or 'off'
timeout (int): Specify amount of time (in seconds) to wait until TimedOutError is raised
"""
view = navigate_to(self.parent, "All")
def _looking_for_state_change():
entity = view.entities.get_entity(name=self.name)
return "currentstate-{}".format(desired_state) in entity.data['state']
wait_for(_looking_for_state_change, fail_func=view.browser.refresh, num_sec=timeout)
@property
def exists(self):
"""Checks if the physical_server exists in the UI.
Returns: :py:class:`bool`
"""
view = navigate_to(self.parent, "All")
try:
view.entities.get_entity(name=self.name, surf_pages=True)
except ItemNotFound:
return False
else:
return True
@cached_property
def get_db_id(self):
if self.db_id is None:
self.db_id = self.appliance.physical_server_id(self.name)
return self.db_id
else:
return self.db_id
def wait_to_appear(self):
"""Waits for the server to appear in the UI."""
view = navigate_to(self.parent, "All")
logger.info("Waiting for the server to appear...")
wait_for(
lambda: self.exists,
message="Wait for the server to appear",
num_sec=1000,
fail_func=view.browser.refresh
)
def wait_for_delete(self):
"""Waits for the server to remove from the UI."""
view = navigate_to(self.parent, "All")
logger.info("Waiting for the server to delete...")
wait_for(
lambda: not self.exists,
message="Wait for the server to disappear",
num_sec=500,
fail_func=view.browser.refresh
)
def validate_stats(self, ui=False):
""" Validates that the detail page matches the physical server's information.
This method logs into the provider using the mgmt_system interface and collects
a set of statistics to be matched against the UI. An exception will be raised
if the stats retrieved from the UI do not match those retrieved from wrapanapi.
"""
# Make sure we are on the physical server detail page
if ui:
self.load_details()
# Retrieve the client and the stats and inventory to match
client = self.provider.mgmt
stats_to_match = self.STATS_TO_MATCH
inventory_to_match = self.INVENTORY_TO_MATCH
# Retrieve the stats and inventory from wrapanapi
server_stats = client.stats(*stats_to_match, requester=self)
server_inventory = client.inventory(*inventory_to_match, requester=self)
# Refresh the browser
if ui:
self.browser.selenium.refresh()
# Verify that the stats retrieved from wrapanapi match those retrieved
# from the UI
for stat in stats_to_match:
try:
cfme_stat = int(getattr(self, stat)(method='ui' if ui else None))
server_stat = int(server_stats[stat])
if server_stat != cfme_stat:
msg = "The {} stat does not match. (server: {}, server stat: {}, cfme stat: {})"
raise StatsDoNotMatch(msg.format(stat, self.name, server_stat, cfme_stat))
except KeyError:
raise HostStatsNotContains(
"Server stats information does not contain '{}'".format(stat))
except AttributeError:
raise ProviderHasNoProperty("Provider does not know how to get '{}'".format(stat))
# Verify that the inventory retrieved from wrapanapi match those retrieved
# from the UI
for inventory in inventory_to_match:
try:
cfme_inventory = getattr(self, inventory)(method='ui' if ui else None)
server_inventory = server_inventory[inventory]
if server_inventory != cfme_inventory:
msg = "The {} inventory does not match. (server: {}, server inventory: {}, " \
"cfme inventory: {})"
raise StatsDoNotMatch(msg.format(inventory, self.name, server_inventory,
cfme_inventory))
except KeyError:
raise HostStatsNotContains(
"Server inventory information does not contain '{}'".format(inventory))
except AttributeError:
msg = "Provider does not know how to get '{}'"
raise ProviderHasNoProperty(msg.format(inventory))
@attr.s
class PhysicalServerCollection(BaseCollection):
"""Collection object for the :py:class:`cfme.infrastructure.host.PhysicalServer`."""
ENTITY = PhysicalServer
def select_entity_rows(self, physical_servers):
""" Select all physical server objects """
physical_servers = list(physical_servers)
checked_physical_servers = list()
view = navigate_to(self, 'All')
for physical_server in physical_servers:
view.entities.get_entity(name=physical_server.name, surf_pages=True).check()
checked_physical_servers.append(physical_server)
return view
def all(self, provider):
"""returning all physical_servers objects"""
physical_server_table = self.appliance.db.client['physical_servers']
ems_table = self.appliance.db.client['ext_management_systems']
physical_server_query = (
self.appliance.db.client.session
.query(physical_server_table.name, ems_table.name)
.join(ems_table, physical_server_table.ems_id == ems_table.id))
provider = None
if self.filters.get('provider'):
provider = self.filters.get('provider')
physical_server_query = physical_server_query.filter(ems_table.name == provider.name)
physical_servers = []
for name, ems_name in physical_server_query.all():
physical_servers.append(self.instantiate(name=name,
provider=provider or get_crud_by_name(ems_name)))
return physical_servers
def power_on(self, *physical_servers):
view = self.select_entity_rows(physical_servers)
view.toolbar.power.item_select("Power On", handle_alert=True)
def power_off(self, *physical_servers):
view = self.select_entity_rows(physical_servers)
view.toolbar.power.item_select("Power Off", handle_alert=True)
@navigator.register(PhysicalServerCollection)
class All(CFMENavigateStep):
VIEW = PhysicalServersView
prerequisite = NavigateToAttribute("appliance.server", "LoggedIn")
def step(self):
self.prerequisite_view.navigation.select("Compute", "Physical Infrastructure", "Servers")
@navigator.register(PhysicalServer)
class Details(CFMENavigateStep):
VIEW = PhysicalServerDetailsView
prerequisite = NavigateToAttribute("parent", "All")
def step(self):
self.prerequisite_view.entities.get_entity(name=self.obj.name, surf_pages=True).click()
@navigator.register(PhysicalServer)
class ManagePolicies(CFMENavigateStep):
VIEW = PhysicalServerManagePoliciesView
prerequisite = NavigateToSibling("Details")
def step(self):
self.prerequisite_view.toolbar.policy.item_select("Manage Policies")
@navigator.register(PhysicalServer)
class Timelines(CFMENavigateStep):
VIEW = PhysicalServerTimelinesView
prerequisite = NavigateToSibling("Details")
def step(self):
self.prerequisite_view.toolbar.monitoring.item_select("Timelines")
|
gpl-2.0
| -3,648,648,376,430,438,400
| 36.898693
| 106
| 0.644218
| false
| 4.135877
| false
| false
| false
|
symac/wikidata
|
rugby/01_getListeJoueurs.py
|
1
|
1035
|
# -*- coding: utf-8 -*-
#!/usr/bin/python
import MySQLdb
import pywikibot
import sys
import re
import mwparserfromhell
from pywikibot import pagegenerators
'''
Ce script va récupérer toutes les pages qui utilisent le template "Infobox Rugbyman"
'''
site = pywikibot.Site("fr", "wikipedia")
def parse(title):
page = pywikibot.Page(site, title)
text = page.get()
return mwparserfromhell.parse(text)
liste = pagegenerators.ReferringPageGenerator(pywikibot.Page(site, u"Modèle:Infobox Rugbyman"), onlyTemplateInclusion=True)
for page in liste:
print str(page.title().encode("utf-8"))
sys.exit()
parsedText = parse("Mathieu Bourret")
templates = parsedText.filter_templates()
for tpl in templates:
if tpl.name.upper().strip() == "INFOBOX RUGBYMAN":
print ">>%s<<" % tpl.name.strip().encode("utf-8")
saisons = re.split("<br ?\/>", str(tpl.get("saison").value))
clubs = re.split("<br ?\/>", str(tpl.get("club").value))
print clubs
print "%s - %s" % (len(clubs), len(saisons))
# pywikibot.extract_templates_and_params
|
gpl-2.0
| -8,651,722,581,256,401,000
| 26.184211
| 123
| 0.710271
| false
| 2.923513
| false
| false
| false
|
pirate42/docc
|
docc/image.py
|
1
|
2823
|
# coding=utf-8
from docc.exceptions import APIError
class Image(object):
"""Represent an Image object (name and distribution information)"""
def __init__(self, identifier, name, distribution):
self.id = identifier
self.name = name
self.distribution = distribution
def __repr__(self):
return "<%s: %s>" % (self.id, self.name)
def __str__(self):
return "%s: %s, %s" % (self.id, self.name, self.distribution)
def __eq__(self, other):
return (
isinstance(other, self.__class__) and
self.__dict__ == other.__dict__
)
def __ne__(self, other):
return not self.__eq__(other)
def destroy(self, service):
"""Destroy this image"""
response = service.get("images/%s/destroy" % self.id)
status = response['status']
return status == 'OK'
@staticmethod
def get(service, identifier):
"""Return the Image given an identifier and None if not found.
:param identifier: TODO
:param service: The service object for the Digital Ocean account
that holds the images
"""
try:
response = service.get('images/%s' % identifier)
except APIError as e:
return None
encoded_image = response['image']
i = Image(encoded_image['id'],
encoded_image['name'],
encoded_image['distribution']
)
return i
@staticmethod
def __images(service, my_filter=None):
"""Return the a list containing all the know images.
:param service: The service object for the Digital Ocean account that
holds the images
:param my_filter: Should be absent, 'my_images', 'global'. If 'all'
this will return all the images you have access to. 'my_images' will
return the images you stored and 'global' the images available to all
customers.
"""
if my_filter is None:
response = service.get("images")
else:
response = service.get("images", {'filter': my_filter})
encoded_images = response['images']
result = []
for encoded_image in encoded_images:
i = Image(encoded_image['id'], encoded_image['name'],
encoded_image['distribution'])
result.append(i)
return result
@staticmethod
def images(service):
"""Return all the known images included mine"""
return Image.__images(service)
@staticmethod
def my_images(service):
"""Return my images"""
return Image.__images(service, 'my_images')
@staticmethod
def global_images(service):
"""Return globally available images"""
return Image.__images(service, 'global')
|
mit
| 7,968,175,958,036,338,000
| 28.726316
| 77
| 0.57492
| false
| 4.502392
| false
| false
| false
|
Unity-Technologies/ml-agents
|
ml-agents-envs/mlagents_envs/registry/remote_registry_entry.py
|
1
|
3260
|
from sys import platform
from typing import Optional, Any, List
from mlagents_envs.environment import UnityEnvironment
from mlagents_envs.base_env import BaseEnv
from mlagents_envs.registry.binary_utils import get_local_binary_path
from mlagents_envs.registry.base_registry_entry import BaseRegistryEntry
class RemoteRegistryEntry(BaseRegistryEntry):
def __init__(
self,
identifier: str,
expected_reward: Optional[float],
description: Optional[str],
linux_url: Optional[str],
darwin_url: Optional[str],
win_url: Optional[str],
additional_args: Optional[List[str]] = None,
):
"""
A RemoteRegistryEntry is an implementation of BaseRegistryEntry that uses a
Unity executable downloaded from the internet to launch a UnityEnvironment.
__Note__: The url provided must be a link to a `.zip` file containing a single
compressed folder with the executable inside. There can only be one executable
in the folder and it must be at the root of the folder.
:param identifier: The name of the Unity Environment.
:param expected_reward: The cumulative reward that an Agent must receive
for the task to be considered solved.
:param description: A description of the Unity Environment. Contains human
readable information about potential special arguments that the make method can
take as well as information regarding the observation, reward, actions,
behaviors and number of agents in the Environment.
:param linux_url: The url of the Unity executable for the Linux platform
:param darwin_url: The url of the Unity executable for the OSX platform
:param win_url: The url of the Unity executable for the Windows platform
"""
super().__init__(identifier, expected_reward, description)
self._linux_url = linux_url
self._darwin_url = darwin_url
self._win_url = win_url
self._add_args = additional_args
def make(self, **kwargs: Any) -> BaseEnv:
"""
Returns the UnityEnvironment that corresponds to the Unity executable found at
the provided url. The arguments passed to this method will be passed to the
constructor of the UnityEnvironment (except for the file_name argument)
"""
url = None
if platform == "linux" or platform == "linux2":
url = self._linux_url
if platform == "darwin":
url = self._darwin_url
if platform == "win32":
url = self._win_url
if url is None:
raise FileNotFoundError(
f"The entry {self.identifier} does not contain a valid url for this "
"platform"
)
path = get_local_binary_path(self.identifier, url)
if "file_name" in kwargs:
kwargs.pop("file_name")
args: List[str] = []
if "additional_args" in kwargs:
if kwargs["additional_args"] is not None:
args += kwargs["additional_args"]
if self._add_args is not None:
args += self._add_args
kwargs["additional_args"] = args
return UnityEnvironment(file_name=path, **kwargs)
|
apache-2.0
| 8,832,754,937,924,894,000
| 44.915493
| 87
| 0.647546
| false
| 4.496552
| false
| false
| false
|
wq/wq.db
|
tests/test_router.py
|
1
|
2694
|
from .base import APITestCase
from django.core.exceptions import ImproperlyConfigured
try:
from django.urls import include
except ImportError:
from django.conf.urls import include
class RestRouterTestCase(APITestCase):
def test_rest_model_conflict(self):
from wq.db import rest
from tests.conflict_app.models import Item
# Register model with same name as existing model
with self.assertRaises(ImproperlyConfigured) as e:
rest.router.register_model(Item, fields="__all__")
self.assertEqual(
e.exception.args[0],
"Could not register <class 'tests.conflict_app.models.Item'>: "
"the name 'item' was already registered for "
"<class 'tests.rest_app.models.Item'>"
)
self.assertNotIn(Item, rest.router._models)
# Register model with different name, but same URL as existing model
with self.assertRaises(ImproperlyConfigured) as e:
rest.router.register_model(
Item, name="conflictitem", fields="__all__"
)
self.assertEqual(
e.exception.args[0],
"Could not register <class 'tests.conflict_app.models.Item'>: "
"the url 'items' was already registered for "
"<class 'tests.rest_app.models.Item'>"
)
self.assertNotIn(Item, rest.router._models)
# Register model with different name and URL
rest.router.register_model(
Item, name="conflictitem", url="conflictitems", fields="__all__"
)
self.assertIn(Item, rest.router._models)
self.assertIn("conflictitem", rest.router.get_config()['pages'])
def test_rest_old_config(self):
from wq.db import rest
from tests.conflict_app.models import TestModel
with self.assertRaises(ImproperlyConfigured):
rest.router.register_model(
TestModel,
partial=True,
fields="__all__"
)
self.assertNotIn(TestModel, rest.router._models)
with self.assertRaises(ImproperlyConfigured):
rest.router.register_model(
TestModel,
reversed=True,
fields="__all__"
)
self.assertNotIn(TestModel, rest.router._models)
with self.assertRaises(ImproperlyConfigured):
rest.router.register_model(
TestModel,
max_local_pages=0,
fields="__all__"
)
self.assertNotIn(TestModel, rest.router._models)
def test_rest_include(self):
from wq.db import rest
include(rest.router.urls)
|
mit
| -4,957,441,498,676,931,000
| 33.538462
| 76
| 0.598367
| false
| 4.475083
| true
| false
| false
|
pmitche/it3105-aiprogramming
|
project3/module6/deeplearning/layer.py
|
1
|
1224
|
import numpy as np
import theano
import theano.tensor as T
class HiddenLayer(object):
def __init__(self, input, num_in, number_of_nodes, activation):
self.num_in = num_in
self.number_of_nodes = number_of_nodes
self.weights = self.init_weights(activation)
self.output = activation(T.dot(input, self.weights))
self.params = [self.weights]
def init_weights(self, activation):
# Default for activation function tanh
weights = np.asarray(
np.random.uniform(
low=-np.sqrt(6. / (self.num_in + self.number_of_nodes)),
high=np.sqrt(6. / (self.num_in + self.number_of_nodes)),
size=(self.num_in, self.number_of_nodes)
),
dtype=theano.config.floatX
)
if activation == T.nnet.sigmoid:
weights *= 4
elif activation == T.nnet.softmax:
weights = np.zeros((self.num_in, self.number_of_nodes), dtype=theano.config.floatX)
elif activation == T.nnet.relu:
weights = np.random.uniform(low=0.0, high=0.1, size=(self.num_in, self.number_of_nodes))
return theano.shared(value=weights, name='weights', borrow=True)
|
mit
| 273,362,223,681,299,230
| 33
| 100
| 0.596405
| false
| 3.589443
| false
| false
| false
|
BlackHole/enigma2-obh10
|
lib/python/Tools/Downloader.py
|
1
|
2535
|
from boxbranding import getMachineBrand, getMachineName
from twisted.web import client
from twisted.internet import reactor, defer
from urlparse import urlparse
class HTTPProgressDownloader(client.HTTPDownloader):
def __init__(self, url, outfile, headers=None):
client.HTTPDownloader.__init__(self, url, outfile, headers=headers, agent="%s %s Enigma2 HbbTV/1.1.1 (+PVR+RTSP+DL;OpenBh;;;)" % (getMachineBrand(), getMachineName()))
self.status = self.progress_callback = self.error_callback = self.end_callback = None
self.deferred = defer.Deferred()
def noPage(self, reason):
if self.status == "304":
client.HTTPDownloader.page(self, "")
else:
client.HTTPDownloader.noPage(self, reason)
if self.error_callback:
self.error_callback(reason.getErrorMessage(), self.status)
def gotHeaders(self, headers):
if self.status == "200":
if "content-length" in headers:
self.totalbytes = int(headers["content-length"][0])
else:
self.totalbytes = 0
self.currentbytes = 0.0
return client.HTTPDownloader.gotHeaders(self, headers)
def pagePart(self, packet):
if self.status == "200":
self.currentbytes += len(packet)
if self.totalbytes and self.progress_callback:
self.progress_callback(self.currentbytes, self.totalbytes)
return client.HTTPDownloader.pagePart(self, packet)
def pageEnd(self):
ret = client.HTTPDownloader.pageEnd(self)
if self.end_callback:
self.end_callback()
return ret
class downloadWithProgress:
def __init__(self, url, outputfile, contextFactory=None, *args, **kwargs):
parsed = urlparse(url)
scheme = parsed.scheme
host = parsed.hostname
port = parsed.port or (443 if scheme == 'https' else 80)
self.factory = HTTPProgressDownloader(url, outputfile, *args, **kwargs)
if scheme == 'https':
from twisted.internet import ssl
if contextFactory is None:
contextFactory = ssl.ClientContextFactory()
self.connection = reactor.connectSSL(host, port, self.factory, contextFactory)
else:
self.connection = reactor.connectTCP(host, port, self.factory)
def start(self):
return self.factory.deferred
def stop(self):
if self.connection:
self.factory.progress_callback = self.factory.end_callback = self.factory.error_callback = None
self.connection.disconnect()
def addProgress(self, progress_callback):
self.factory.progress_callback = progress_callback
def addEnd(self, end_callback):
self.factory.end_callback = end_callback
def addError(self, error_callback):
self.factory.error_callback = error_callback
|
gpl-2.0
| -6,097,105,769,769,699,000
| 32.8
| 169
| 0.737673
| false
| 3.318063
| false
| false
| false
|
klocey/DiversityTools
|
StatPak/ACE.py
|
1
|
3982
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
#scikit-bio/skbio/diversity/alpha/_ace.py
#Greg Caporasogregcaporaso on Aug 7, 2014 API: moved base.py to _base.py
#!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
import numpy as np
import os
import sys
mydir = os.path.expanduser("~/Desktop/Repos/rare-bio/tools/StatPak")
sys.path.append(mydir)
import DiversityMetrics as DM
from DiversityMetrics import _validate, osd
def ace(counts, rare_threshold=10):
"""Calculate the ACE metric (Abundance-based Coverage Estimator).
Parameters
----------
counts : 1-D array_like, int
Vector of counts.
rare_threshold : int, optional
Threshold at which an OTU containing as many or fewer individuals will
be considered rare.
Returns
-------
double
Computed ACE metric.
Raises
------
ValueError
If every rare OTU is a singleton.
Notes
-----
ACE was first introduced in [1]_ and [2]_. The implementation here is based
on the description given in the EstimateS manual [3]_.
If no rare OTUs exist, returns the number of abundant OTUs. The default
value of 10 for `rare_threshold` is based on [4]_.
If `counts` contains zeros, indicating OTUs which are known to exist in the
environment but did not appear in the sample, they will be ignored for the
purpose of calculating the number of rare OTUs.
References
----------
.. [1] Chao, A. & S.-M Lee. 1992 Estimating the number of classes via
sample coverage. Journal of the American Statistical Association 87,
210-217.
.. [2] Chao, A., M.-C. Ma, & M. C. K. Yang. 1993. Stopping rules and
estimation for recapture debugging with unequal failure rates.
Biometrika 80, 193-201.
.. [3] http://viceroy.eeb.uconn.edu/estimates/
.. [4] Chao, A., W.-H. Hwang, Y.-C. Chen, and C.-Y. Kuo. 2000. Estimating
the number of shared species in two communities. Statistica Sinica
10:227-246.
"""
counts = _validate(counts)
freq_counts = np.bincount(counts)
s_rare = _otus_rare(freq_counts, rare_threshold)
singles = freq_counts[1]
if singles > 0 and singles == s_rare:
raise ValueError("The only rare OTUs are singletons, so the ACE "
"metric is undefined. EstimateS suggests using "
"bias-corrected Chao1 instead.")
s_abun = _otus_abundant(freq_counts, rare_threshold)
if s_rare == 0:
return s_abun
n_rare = _number_rare(freq_counts, rare_threshold)
c_ace = 1 - singles / n_rare
top = s_rare * _number_rare(freq_counts, rare_threshold, gamma=True)
bottom = c_ace * n_rare * (n_rare - 1)
gamma_ace = (top / bottom) - 1
if gamma_ace < 0:
gamma_ace = 0
return s_abun + (s_rare / c_ace) + ((singles / c_ace) * gamma_ace)
def _otus_rare(freq_counts, rare_threshold):
"""Count number of rare OTUs."""
return freq_counts[1:rare_threshold + 1].sum()
def _otus_abundant(freq_counts, rare_threshold):
"""Count number of abundant OTUs."""
return freq_counts[rare_threshold + 1:].sum()
def _number_rare(freq_counts, rare_threshold, gamma=False):
"""Return number of individuals in rare OTUs.
``gamma=True`` generates the ``n_rare`` used for the variation coefficient.
"""
n_rare = 0
if gamma:
for i, j in enumerate(freq_counts[:rare_threshold + 1]):
n_rare = n_rare + (i * j) * (i - 1)
else:
for i, j in enumerate(freq_counts[:rare_threshold + 1]):
n_rare = n_rare + (i * j)
return n_rare
|
mit
| 2,663,075,253,608,483,000
| 33.626087
| 79
| 0.615018
| false
| 3.418026
| false
| false
| false
|
sanjayankur31/pyjigdo
|
pyJigdo/base.py
|
1
|
5381
|
#
# Copyright 2007-2009 Fedora Unity Project (http://fedoraunity.org)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import sys, os
from urlparse import urlparse
import pyJigdo.logger
import pyJigdo.pyasync
from pyJigdo.jigdo import JigdoFile
from pyJigdo.translate import _, N_
class PyJigdoBase:
""" PyJigdoBase is the primary object that should be called back to.
This object should be aware of all other objects and maintain their
relationships and states. This class should be used to lookup all
objects and their children. """
def __init__(self, pyjigdo_entry):
""" Initializes the PyJigdoBase class with the options specified
from the command line. Setup all basic options and get things
setup to start creating Jigdo objects. """
self.jigdo_files = {} # {JigdoFile().id: JigdoFile(),}
# FIXME: Populate these entry points.
#self.jigdo_templates = {} # {JigdoTemplate().id: JigdoTemplate(),}
#self.jigdo_slices = {} # {JigdoSlice().id: JigdoSlice(),}
#self.slice_sources = {} # {SliceSource().id: SliceSource(),}
# / FIXME
self.log = None # PyJigdoLogger()
self.async = None # PyJigdoReactor()
self.stats = None # PyJigdoStats()
self.interface = None # PyJigdoTextInterface()
self.scan_targets = [] # [PyJigdoScanTarget(),]
# Set our exit points to callback.
self.abort = pyjigdo_entry.abort
self.done = pyjigdo_entry.done
# Get the options parser, and bring it's options
# and args into this namespace.
self.parser = pyjigdo_entry.parser
self.settings = pyjigdo_entry.cli_options
self.args_jigdo_files = pyjigdo_entry.jigdo_files
# Setup Logging.
self.create_logger()
def run(self):
""" Start up the reactor and start performing operations to
put the Jigdo together. """
# Setup Reactor
self.async = pyJigdo.pyasync.PyJigdoReactor( self.log,
threads = self.settings.download_threads,
timeout = self.settings.download_timeout )
# Prepare Jigdo
if self.prep_jigdo_files():
# Seed Reactor
self.async.seed(self)
else:
self.log.critical(_("Seems there is nothing to do!"))
return self.done()
def create_logger(self):
""" Create a logger instance setting an appropriate loglevel
based on runtime options. """
loglevel = pyJigdo.logger.CRITICAL
if self.settings.verbose >= 3:
loglevel = pyJigdo.logger.DEBUG
elif self.settings.verbose == 2:
loglevel = pyJigdo.logger.INFO
elif self.settings.verbose == 1:
loglevel = pyJigdo.logger.WARNING
if self.settings.debug:
loglevel = pyJigdo.logger.DEBUG
# Initialize the logging object
self.log = pyJigdo.logger.pyJigdoLogger( self.settings.log_file,
loglevel = loglevel )
def prep_jigdo_files(self):
""" Prepare selected Jigdo downloads for injection into our reactor. """
for jigdo in self.args_jigdo_files:
self.log.info(_("Prepping Jigdo file %s ") % jigdo)
jigdo_url = urlparse(jigdo)
jigdo_filename = os.path.basename(jigdo_url.path)
if jigdo_url.scheme or \
(not jigdo_url.scheme and os.path.isfile(jigdo_url.path)):
jigdo_storage_location = os.path.join( self.settings.download_target,
jigdo_filename )
self.log.debug(_("Adding Jigdo file %s" % jigdo_url.geturl()))
self.log.debug(_("Storing Jigdo %s at %s" % ( jigdo_filename,
jigdo_storage_location )))
self.jigdo_files[jigdo] = JigdoFile( self.log,
self.async,
self.settings,
self,
jigdo_url.geturl(),
jigdo_storage_location )
if os.path.isfile(jigdo_url.path): self.jigdo_files[jigdo].has_data = True
else:
self.log.error(_("Jigdo file %s seems to not be valid." % jigdo))
self.log.error(_("Cowardly refusing to use/download."))
if not self.jigdo_files:
self.log.critical(_("Nothing given to download!"))
return False
return True
|
gpl-2.0
| 8,769,146,377,141,532,000
| 43.471074
| 90
| 0.584836
| false
| 4.027695
| false
| false
| false
|
nlamirault/portefaix
|
diagrams/certmanager.py
|
1
|
3182
|
#!/usr/bin/python3
# Copyright (C) 2020 Nicolas Lamirault <nicolas.lamirault@gmail.com>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import diagrams
from diagrams.k8s import compute
from diagrams.k8s import network
from diagrams.k8s import rbac
def architecture():
with diagrams.Diagram("cert-manager", show=False, direction="TB"):
with diagrams.Cluster("Cloud Platform"):
with diagrams.Cluster("Kubernetes Cluster"):
clusterRole_cainjector = rbac.ClusterRole()
clusterRoleBinding_cainjector = rbac.ClusterRoleBinding()
clusterRole_webhook = rbac.ClusterRole()
clusterRoleBinding_webhook = rbac.ClusterRoleBinding()
clusterRole_certmanager = rbac.ClusterRole()
clusterRoleBinding_certmanager = rbac.ClusterRoleBinding()
with diagrams.Cluster("cert-manager"):
sa_cainjector = rbac.ServiceAccount("cainjector")
role_cainjector = rbac.Role()
roleBinding_cainjector = rbac.RoleBinding()
role_cainjector << roleBinding_cainjector >> sa_cainjector
clusterRole_cainjector << clusterRoleBinding_cainjector >> sa_cainjector
sa_webhook = rbac.ServiceAccount("webhook")
role_webhook = rbac.Role()
roleBinding_webhook = rbac.RoleBinding()
role_webhook << roleBinding_webhook >> sa_webhook
clusterRole_webhook << clusterRoleBinding_webhook >> sa_webhook
sa_certmanager = rbac.ServiceAccount("certmanager")
role_certmanager = rbac.Role()
roleBinding_certmanager = rbac.RoleBinding()
role_certmanager << roleBinding_certmanager >> sa_certmanager
clusterRole_certmanager << clusterRoleBinding_certmanager >> sa_certmanager
deploy_certmanager = compute.Deployment("certmanager")
svc_certmanager = network.Service()
svc_certmanager << deploy_certmanager << sa_certmanager
deploy_cainjector = compute.Deployment("cainjector")
svc_cainjector = network.Service()
svc_cainjector << deploy_cainjector << sa_cainjector
deploy_webhook = compute.Deployment("webhook")
svc_webhook = network.Service()
svc_webhook << deploy_webhook << sa_webhook
# TODO:
# mutatingWebhookConfiguration
# validatingWebhookConfiguration
|
apache-2.0
| -5,073,813,048,137,684,000
| 44.442857
| 95
| 0.621188
| false
| 4.512057
| false
| false
| false
|
yunojuno/django-inbound-email
|
inbound_email/backends/sendgrid.py
|
1
|
5539
|
import json
import logging
from email.utils import getaddresses
from django.core.mail import EmailMultiAlternatives
from django.http import HttpRequest
from django.utils.datastructures import MultiValueDictKeyError
from django.utils.encoding import smart_text
from ..backends import RequestParser
from ..errors import RequestParseError, AttachmentTooLargeError
logger = logging.getLogger(__name__)
def _decode_POST_value(request, field_name, default=None):
"""Helper to decode a request field into unicode based on charsets encoding.
Args:
request: the HttpRequest object.
field_name: the field expected in the request.POST
Kwargs:
default: if passed in then field is optional and default is used if not
found; if None, then assume field exists, which will raise an error
if it does not.
Returns: the contents of the string encoded using the related charset from
the requests.POST['charsets'] dictionary (or 'utf-8' if none specified).
"""
if default is None:
value = request.POST[field_name]
else:
value = request.POST.get(field_name, default)
# it's inefficient to load this each time it gets called, but we're
# not anticipating incoming email being a performance bottleneck right now!
charsets = json.loads(request.POST.get('charsets', "{}"))
charset = charsets.get(field_name, 'utf-8')
if charset.lower() != 'utf-8':
logger.debug("Incoming email field '%s' has %s encoding.", field_name, charset)
return smart_text(value, encoding=charset)
class SendGridRequestParser(RequestParser):
"""SendGrid request parser."""
def _get_addresses(self, address_data, retain_name=False):
"""
Takes RFC-compliant email addresses in both terse (email only)
and verbose (name + email) forms and returns a list of
email address strings
(TODO: breaking change that returns a tuple of (name, email) per string)
"""
if retain_name:
raise NotImplementedError(
"Not yet implemented, but will need client-code changes too"
)
# We trust than an email address contains an "@" after
# email.utils.getaddresses has done the hard work. If we wanted
# to we could use a regex to check for greater email validity
# NB: getaddresses expects a list, so ensure we feed it appropriately
if isinstance(address_data, str):
if "[" not in address_data:
# Definitely turn these into a list
# NB: this is pretty assumptive, but still prob OK
address_data = [address_data]
output = [x[1] for x in getaddresses(address_data) if "@" in x[1]]
return output
def parse(self, request):
"""Parse incoming request and return an email instance.
Args:
request: an HttpRequest object, containing the forwarded email, as
per the SendGrid specification for inbound emails.
Returns:
an EmailMultiAlternatives instance, containing the parsed contents
of the inbound email.
TODO: non-UTF8 charset handling.
TODO: handler headers.
"""
assert isinstance(request, HttpRequest), "Invalid request type: %s" % type(request)
try:
# from_email should never be a list (unless we change our API)
from_email = self._get_addresses([_decode_POST_value(request, 'from')])[0]
# ...but all these can and will be a list
to_email = self._get_addresses([_decode_POST_value(request, 'to')])
cc = self._get_addresses([_decode_POST_value(request, 'cc', default='')])
bcc = self._get_addresses([_decode_POST_value(request, 'bcc', default='')])
subject = _decode_POST_value(request, 'subject')
text = _decode_POST_value(request, 'text', default='')
html = _decode_POST_value(request, 'html', default='')
except IndexError as ex:
raise RequestParseError(
"Inbound request lacks a valid from address: %s." % request.get('from')
)
except MultiValueDictKeyError as ex:
raise RequestParseError("Inbound request is missing required value: %s." % ex)
if "@" not in from_email:
# Light sanity check for potential issues related to taking just the
# first element of the 'from' address list
raise RequestParseError("Could not get a valid from address out of: %s." % request)
email = EmailMultiAlternatives(
subject=subject,
body=text,
from_email=from_email,
to=to_email,
cc=cc,
bcc=bcc,
)
if html is not None and len(html) > 0:
email.attach_alternative(html, "text/html")
# TODO: this won't cope with big files - should really read in in chunks
for n, f in list(request.FILES.items()):
if f.size > self.max_file_size:
logger.debug(
"File attachment %s is too large to process (%sB)",
f.name,
f.size
)
raise AttachmentTooLargeError(
email=email,
filename=f.name,
size=f.size
)
else:
email.attach(f.name, f.read(), f.content_type)
return email
|
mit
| -6,551,924,801,335,674,000
| 36.938356
| 95
| 0.610399
| false
| 4.4312
| false
| false
| false
|
Lujeni/ansible
|
lib/ansible/modules/monitoring/zabbix/zabbix_host_info.py
|
1
|
8077
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) me@mimiko.me
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
RETURN = '''
---
hosts:
description: List of Zabbix hosts. See https://www.zabbix.com/documentation/4.0/manual/api/reference/host/get for list of host values.
returned: success
type: dict
sample: [ { "available": "1", "description": "", "disable_until": "0", "error": "", "flags": "0", "groups": ["1"], "host": "Host A", ... } ]
'''
DOCUMENTATION = '''
---
module: zabbix_host_info
short_description: Gather information about Zabbix host
description:
- This module allows you to search for Zabbix host entries.
- This module was called C(zabbix_host_facts) before Ansible 2.9. The usage did not change.
version_added: "2.7"
author:
- "Michael Miko (@RedWhiteMiko)"
requirements:
- "python >= 2.6"
- "zabbix-api >= 0.5.4"
options:
host_name:
description:
- Name of the host in Zabbix.
- host_name is the unique identifier used and cannot be updated using this module.
required: true
host_ip:
description:
- Host interface IP of the host in Zabbix.
required: false
exact_match:
description:
- Find the exact match
type: bool
default: no
remove_duplicate:
description:
- Remove duplicate host from host result
type: bool
default: yes
host_inventory:
description:
- List of host inventory keys to display in result.
- Whole host inventory is retrieved if keys are not specified.
type: list
required: false
version_added: 2.8
extends_documentation_fragment:
- zabbix
'''
EXAMPLES = '''
- name: Get host info
local_action:
module: zabbix_host_info
server_url: http://monitor.example.com
login_user: username
login_password: password
host_name: ExampleHost
host_ip: 127.0.0.1
timeout: 10
exact_match: no
remove_duplicate: yes
- name: Reduce host inventory information to provided keys
local_action:
module: zabbix_host_info
server_url: http://monitor.example.com
login_user: username
login_password: password
host_name: ExampleHost
host_inventory:
- os
- tag
host_ip: 127.0.0.1
timeout: 10
exact_match: no
remove_duplicate: yes
'''
import atexit
import traceback
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
try:
from zabbix_api import ZabbixAPI
HAS_ZABBIX_API = True
except ImportError:
ZBX_IMP_ERR = traceback.format_exc()
HAS_ZABBIX_API = False
class Host(object):
def __init__(self, module, zbx):
self._module = module
self._zapi = zbx
def get_hosts_by_host_name(self, host_name, exact_match, host_inventory):
""" Get host by host name """
search_key = 'search'
if exact_match:
search_key = 'filter'
host_list = self._zapi.host.get({'output': 'extend', 'selectParentTemplates': ['name'], search_key: {'host': [host_name]},
'selectInventory': host_inventory})
if len(host_list) < 1:
self._module.fail_json(msg="Host not found: %s" % host_name)
else:
return host_list
def get_hosts_by_ip(self, host_ips, host_inventory):
""" Get host by host ip(s) """
hostinterfaces = self._zapi.hostinterface.get({
'output': 'extend',
'filter': {
'ip': host_ips
}
})
if len(hostinterfaces) < 1:
self._module.fail_json(msg="Host not found: %s" % host_ips)
host_list = []
for hostinterface in hostinterfaces:
host = self._zapi.host.get({
'output': 'extend',
'selectGroups': 'extend',
'selectParentTemplates': ['name'],
'hostids': hostinterface['hostid'],
'selectInventory': host_inventory
})
host[0]['hostinterfaces'] = hostinterface
host_list.append(host[0])
return host_list
def delete_duplicate_hosts(self, hosts):
""" Delete duplicated hosts """
unique_hosts = []
listed_hostnames = []
for zabbix_host in hosts:
if zabbix_host['name'] in listed_hostnames:
continue
unique_hosts.append(zabbix_host)
listed_hostnames.append(zabbix_host['name'])
return unique_hosts
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(type='str', required=True, aliases=['url']),
login_user=dict(type='str', required=True),
login_password=dict(type='str', required=True, no_log=True),
host_name=dict(type='str', default='', required=False),
host_ip=dict(type='list', default=[], required=False),
http_login_user=dict(type='str', required=False, default=None),
http_login_password=dict(type='str', required=False, default=None, no_log=True),
validate_certs=dict(type='bool', required=False, default=True),
timeout=dict(type='int', default=10),
exact_match=dict(type='bool', required=False, default=False),
remove_duplicate=dict(type='bool', required=False, default=True),
host_inventory=dict(type='list', default=[], required=False)
),
supports_check_mode=True
)
if module._name == 'zabbix_host_facts':
module.deprecate("The 'zabbix_host_facts' module has been renamed to 'zabbix_host_info'", version='2.13')
if not HAS_ZABBIX_API:
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
server_url = module.params['server_url']
login_user = module.params['login_user']
login_password = module.params['login_password']
http_login_user = module.params['http_login_user']
http_login_password = module.params['http_login_password']
validate_certs = module.params['validate_certs']
host_name = module.params['host_name']
host_ips = module.params['host_ip']
timeout = module.params['timeout']
exact_match = module.params['exact_match']
is_remove_duplicate = module.params['remove_duplicate']
host_inventory = module.params['host_inventory']
if not host_inventory:
host_inventory = 'extend'
zbx = None
# login to zabbix
try:
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
validate_certs=validate_certs)
zbx.login(login_user, login_password)
atexit.register(zbx.logout)
except Exception as e:
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
host = Host(module, zbx)
if host_name:
hosts = host.get_hosts_by_host_name(host_name, exact_match, host_inventory)
if is_remove_duplicate:
hosts = host.delete_duplicate_hosts(hosts)
extended_hosts = []
for zabbix_host in hosts:
zabbix_host['hostinterfaces'] = host._zapi.hostinterface.get({
'output': 'extend', 'hostids': zabbix_host['hostid']
})
extended_hosts.append(zabbix_host)
module.exit_json(ok=True, hosts=extended_hosts)
elif host_ips:
extended_hosts = host.get_hosts_by_ip(host_ips, host_inventory)
if is_remove_duplicate:
hosts = host.delete_duplicate_hosts(extended_hosts)
module.exit_json(ok=True, hosts=extended_hosts)
else:
module.exit_json(ok=False, hosts=[], result="No Host present")
if __name__ == '__main__':
main()
|
gpl-3.0
| -9,156,222,259,647,895,000
| 32.936975
| 142
| 0.604432
| false
| 3.767257
| false
| false
| false
|
arbenson/mrtsqr
|
dumbo/BtA.py
|
1
|
2491
|
"""
Copyright (c) 2012-2014, Austin Benson and David Gleich
All rights reserved.
This file is part of MRTSQR and is under the BSD 2-Clause License,
which can be found in the LICENSE file in the root directory, or at
http://opensource.org/licenses/BSD-2-Clause
"""
"""
BtA.py
===========
Driver code for computing B^T * A, where both B and A are tall and skinny.
Usage:
dumbo start AtA.py -hadoop $HADOOP_INSTALL \
-matA [path to matrix A] \
-matB [path to matrix B] \
-B_id [unique identifier for path of B] \
-reduce_schedule [optional: number of reducers to use in each stage] \
-output [optional: name of output file] \
-blocksize [optional: block size for compression]
The option 'B_id' is a unique identifier for the path of the B matrix that
does not occur in the path to the A matrix.
TODO(arbenson): this should be automated
Example usage:
dumbo start BtA.py -hadoop $HADOOP_INSTALL -matA A_matrix.mseq \
-matB B_matrix.mseq -output BTA_OUT -B_id B_matrix -blocksize 10
"""
import os
import util
import sys
import dumbo
import time
import numpy
import mrmc
gopts = util.GlobalOptions()
def runner(job):
blocksize = gopts.getintkey('blocksize')
schedule = gopts.getstrkey('reduce_schedule')
schedule = int(schedule)
B_id = gopts.getstrkey('B_id')
if B_id == '':
print "'B_id' not specified"
sys.exit(-1)
job.additer(mapper=mrmc.BtAMapper(B_id=B_id),
reducer=mrmc.BtAReducer(blocksize=blocksize),
opts=[('numreducetasks', str(schedule))])
job.additer(mapper='org.apache.hadoop.mapred.lib.IdentityMapper',
reducer=mrmc.ArraySumReducer,
opts=[('numreducetasks','1')])
def starter(prog):
# set the global opts
gopts.prog = prog
matB = prog.delopt('matB')
if not matB:
return "'matB' not specified'"
matA = prog.delopt('matA')
if not matA:
return "'matA' not specified'"
gopts.getstrkey('B_id', '')
mrmc.starter_helper(prog)
prog.addopt('input', matB)
prog.addopt('input', matA)
matname, matext = os.path.splitext(matA)
gopts.getintkey('blocksize',3)
gopts.getstrkey('reduce_schedule','1')
output = prog.getopt('output')
if not output:
prog.addopt('output','%s-BtA%s'%(matname,matext))
gopts.save_params()
if __name__ == '__main__':
import dumbo
dumbo.main(runner, starter)
|
bsd-2-clause
| 7,643,623,938,800,354,000
| 26.076087
| 75
| 0.638298
| false
| 3.247718
| false
| false
| false
|
facebookresearch/faiss
|
benchs/bench_index_flat.py
|
1
|
2187
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import time
import os
import numpy as np
import faiss
from faiss.contrib.datasets import SyntheticDataset
os.system("grep -m1 'model name' < /proc/cpuinfo")
def format_tab(x):
return "\n".join("\t".join("%g" % xi for xi in row) for row in x)
faiss.cvar.distance_compute_min_k_reservoir = 5
# for have_threads in True, False:
for have_threads in False, :
if have_threads:
# good config for Intel(R) Xeon(R) CPU E5-2698 v4 @ 2.20GHz
nthread = 32
else:
nthread = 1
faiss.omp_set_num_threads(nthread)
print("************ nthread=", nthread)
for nq in 100, 10000:
print("*********** nq=", nq)
if nq == 100:
nrun = 500
unit = "ms"
else:
nrun = 20
unit = "s"
restab = []
for d in 16, 32, 64, 128:
print("========== d=", d)
nb = 10000
# d = 32
ds = SyntheticDataset(d, 0, nb, nq)
print(ds)
index = faiss.IndexFlatL2(d)
index.add(ds.get_database())
nrun = 10
restab1 = []
restab.append(restab1)
for k in 1, 10, 100:
times = []
for run in range(nrun):
t0 = time.time()
index.search(ds.get_queries(), k)
t1 = time.time()
if run >= nrun // 5: # the rest is considered warmup
times.append((t1 - t0))
times = np.array(times)
if unit == "ms":
times *= 1000
print("search k=%3d t=%.3f ms (± %.4f)" % (
k, np.mean(times), np.std(times)))
else:
print("search k=%3d t=%.3f s (± %.4f)" % (
k, np.mean(times), np.std(times)))
restab1.append(np.mean(times))
print("restab=\n", format_tab(restab))
|
mit
| -6,713,802,029,258,484,000
| 24.114943
| 72
| 0.474142
| false
| 3.576105
| false
| false
| false
|
nanomolina/controlDeGastos
|
Windows/openWindow.py
|
1
|
5551
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'new.ui'
#
# Created: Fri Aug 15 21:30:13 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
import sys
from os import getcwd, listdir
from os.path import join, isfile
DB_PATH = "Database/.Database"
COLOR = "#F28F1D" #"#F57B00"
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog_Open(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(243, 397)
self.db_name = ""
self.button_accepted = False
style = "QDialog {background-color:" \
" QLinearGradient(x1:0, y1:0, x2:0, y2:1, stop:0 #616161," \
" stop: 0.5 #505050, stop: 0.6 #434343, stop:1 #656565);}"
Dialog.setStyleSheet(style)
self.initLayout(Dialog)
self.listdb = []
self.initLabel()
self.initListWidget()
self.initButtonBox()
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), self.buttonAccepted)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), Dialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def initLayout(self, Dialog):
self.verticalLayoutWidget = QtGui.QWidget(Dialog)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(10, 10, 221, 371))
self.verticalLayoutWidget.setObjectName(_fromUtf8("verticalLayoutWidget"))
self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
def initLabel(self):
self.label = QtGui.QLabel(self.verticalLayoutWidget)
self.label.setObjectName(_fromUtf8("label"))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
style = "QLabel {background-color:" \
" QLinearGradient( x1: 0, y1: 0, x2: 0, y2: 1," \
" stop: 0 #4d4d4d, stop: 0 #FF4000, stop: 1 #F57B00);" \
" top: 5px; border: 1px solid #656565;" \
" gridline-color: #BAB0A7} "
self.label.setStyleSheet(_fromUtf8(style))
self.verticalLayout.addWidget(self.label)
def initListWidget(self):
self.listWidget = QtGui.QListWidget(self.verticalLayoutWidget)
self.listWidget.setObjectName(_fromUtf8("listWidget"))
self.verticalLayout.addWidget(self.listWidget)
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.listWidget.setFont(font)
style = "QListWidget {background-color:" \
" QLinearGradient( x1: 0, y1: 0, x2: 0, y2: 1," \
" stop: 0 #4d4d4d, stop: 0 #646464, stop: 1 #BDB9B5);" \
" padding: 1px; border-style: solid;" \
" border: 1px solid #656565; border-radius: 5;}"
self.listWidget.setStyleSheet(style)
current_path = getcwd()
db_path = join(current_path, DB_PATH)
listOfdir = listdir(db_path)
for file_ in listOfdir:
dirOfFile = join(db_path, file_)
if isfile(dirOfFile) and ".db" in file_:
file_ , trash = file_.split(".db")
item = QtGui.QListWidgetItem()
self.listWidget.addItem(item)
self.listdb.append(file_)
def initButtonBox(self):
self.buttonBox = QtGui.QDialogButtonBox(self.verticalLayoutWidget)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.verticalLayout.addWidget(self.buttonBox)
style = "background-color: QLinearGradient(" \
" x1: 0, y1: 0, x2: 0, y2: 1," \
" stop: 0 #4d4d4d, stop: 0 #646464, stop: 1 #BDB9B5);"
self.buttonBox.setStyleSheet(style)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", "Abrir", None))
self.label.setText(_translate("Dialog", "Bases de Datos disponibles:", None))
__sortingEnabled = self.listWidget.isSortingEnabled()
self.listWidget.setSortingEnabled(False)
counter = 0
for file_ in self.listdb:
item = self.listWidget.item(counter)
item.setText(_translate("Form", file_, None))
counter += 1
self.listWidget.setSortingEnabled(__sortingEnabled)
def buttonAccepted(self):
currentItem = self.listWidget.currentItem()
name = currentItem.text()
self.db_name = name + ".db"
self.button_accepted = True
def getNameToOpen(self):
return join(DB_PATH, str(self.db_name))
def accepted(self):
return self.button_accepted
|
gpl-2.0
| -6,159,931,926,551,762,000
| 39.518248
| 107
| 0.631057
| false
| 3.666446
| false
| false
| false
|
mottosso/mindbender-setup
|
bin/pythonpath/raven/contrib/django/client.py
|
1
|
10355
|
# -*- coding: utf-8 -*-
"""
raven.contrib.django.client
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import time
import logging
from django.conf import settings
from django.core.exceptions import SuspiciousOperation
from django.http import HttpRequest
from django.template import TemplateSyntaxError
from django.utils.datastructures import MultiValueDict
try:
# support Django 1.9
from django.template.base import Origin
except ImportError:
# backward compatibility
from django.template.loader import LoaderOrigin as Origin
from raven.base import Client
from raven.contrib.django.utils import get_data_from_template, get_host
from raven.contrib.django.middleware import SentryMiddleware
from raven.utils.compat import string_types, binary_type, iterlists
from raven.contrib.django.resolver import RouteResolver
from raven.utils.wsgi import get_headers, get_environ
from raven.utils import once
from raven import breadcrumbs
__all__ = ('DjangoClient',)
class _FormatConverter(object):
def __init__(self, param_mapping):
self.param_mapping = param_mapping
self.params = []
def __getitem__(self, val):
self.params.append(self.param_mapping.get(val))
return '%s'
def format_sql(sql, params):
rv = []
if isinstance(params, dict):
conv = _FormatConverter(params)
if params:
sql = sql % conv
params = conv.params
else:
params = ()
for param in params or ():
if param is None:
rv.append('NULL')
elif isinstance(param, string_types):
if isinstance(param, binary_type):
param = param.decode('utf-8', 'replace')
if len(param) > 256:
param = param[:256] + u'…'
rv.append("'%s'" % param.replace("'", "''"))
else:
rv.append(repr(param))
return sql, rv
@once
def install_sql_hook():
"""If installed this causes Django's queries to be captured."""
try:
from django.db.backends.utils import CursorWrapper
except ImportError:
from django.db.backends.util import CursorWrapper
try:
real_execute = CursorWrapper.execute
real_executemany = CursorWrapper.executemany
except AttributeError:
# XXX(mitsuhiko): On some very old django versions (<1.6) this
# trickery would have to look different but I can't be bothered.
return
def record_sql(vendor, alias, start, duration, sql, params):
def processor(data):
real_sql, real_params = format_sql(sql, params)
if real_params:
real_sql = real_sql % tuple(real_params)
# maybe category to 'django.%s.%s' % (vendor, alias or
# 'default') ?
data.update({
'message': real_sql,
'category': 'query',
})
breadcrumbs.record(processor=processor)
def record_many_sql(vendor, alias, start, sql, param_list):
duration = time.time() - start
for params in param_list:
record_sql(vendor, alias, start, duration, sql, params)
def execute(self, sql, params=None):
start = time.time()
try:
return real_execute(self, sql, params)
finally:
record_sql(self.db.vendor, getattr(self.db, 'alias', None),
start, time.time() - start, sql, params)
def executemany(self, sql, param_list):
start = time.time()
try:
return real_executemany(self, sql, param_list)
finally:
record_many_sql(self.db.vendor, getattr(self.db, 'alias', None),
start, sql, param_list)
CursorWrapper.execute = execute
CursorWrapper.executemany = executemany
breadcrumbs.ignore_logger('django.db.backends')
class DjangoClient(Client):
logger = logging.getLogger('sentry.errors.client.django')
resolver = RouteResolver()
def __init__(self, *args, **kwargs):
install_sql_hook = kwargs.pop('install_sql_hook', True)
Client.__init__(self, *args, **kwargs)
if install_sql_hook:
self.install_sql_hook()
def install_sql_hook(self):
install_sql_hook()
def get_user_info(self, user):
try:
if hasattr(user, 'is_authenticated'):
# is_authenticated was a method in Django < 1.10
if callable(user.is_authenticated):
authenticated = user.is_authenticated()
else:
authenticated = user.is_authenticated
if not authenticated:
return None
user_info = {}
user_info['id'] = user.pk
if hasattr(user, 'email'):
user_info['email'] = user.email
if hasattr(user, 'get_username'):
user_info['username'] = user.get_username()
elif hasattr(user, 'username'):
user_info['username'] = user.username
return user_info
except Exception:
# We expect that user objects can be somewhat broken at times
# and try to just handle as much as possible and ignore errors
# as good as possible here.
return None
def get_data_from_request(self, request):
result = {}
user = getattr(request, 'user', None)
if user is not None:
user_info = self.get_user_info(user)
if user_info:
result['user'] = user_info
try:
uri = request.build_absolute_uri()
except SuspiciousOperation:
# attempt to build a URL for reporting as Django won't allow us to
# use get_host()
if request.is_secure():
scheme = 'https'
else:
scheme = 'http'
host = get_host(request)
uri = '%s://%s%s' % (scheme, host, request.path)
if request.method not in ('GET', 'HEAD'):
try:
data = request.body
except Exception:
try:
data = request.raw_post_data
except Exception:
# assume we had a partial read.
try:
data = request.POST or '<unavailable>'
except Exception:
data = '<unavailable>'
else:
if isinstance(data, MultiValueDict):
data = dict(
(k, v[0] if len(v) == 1 else v)
for k, v in iterlists(data))
else:
data = None
environ = request.META
result.update({
'request': {
'method': request.method,
'url': uri,
'query_string': request.META.get('QUERY_STRING'),
'data': data,
'cookies': dict(request.COOKIES),
'headers': dict(get_headers(environ)),
'env': dict(get_environ(environ)),
}
})
return result
def build_msg(self, *args, **kwargs):
data = super(DjangoClient, self).build_msg(*args, **kwargs)
for frame in self._iter_frames(data):
module = frame.get('module')
if not module:
continue
if module.startswith('django.'):
frame['in_app'] = False
if not self.site and 'django.contrib.sites' in settings.INSTALLED_APPS:
try:
from django.contrib.sites.models import Site
site = Site.objects.get_current()
site_name = site.name or site.domain
data['tags'].setdefault('site', site_name)
except Exception:
# Database error? Fallback to the id
try:
data['tags'].setdefault('site', settings.SITE_ID)
except AttributeError:
# SITE_ID wasn't set, so just ignore
pass
return data
def capture(self, event_type, request=None, **kwargs):
if 'data' not in kwargs:
kwargs['data'] = data = {}
else:
data = kwargs['data']
if request is None:
request = getattr(SentryMiddleware.thread, 'request', None)
is_http_request = isinstance(request, HttpRequest)
if is_http_request:
data.update(self.get_data_from_request(request))
if kwargs.get('exc_info'):
exc_value = kwargs['exc_info'][1]
# As of r16833 (Django) all exceptions may contain a
# ``django_template_source`` attribute (rather than the legacy
# ``TemplateSyntaxError.source`` check) which describes
# template information. As of Django 1.9 or so the new
# template debug thing showed up.
if hasattr(exc_value, 'django_template_source') or \
((isinstance(exc_value, TemplateSyntaxError) and
isinstance(getattr(exc_value, 'source', None),
(tuple, list)) and
isinstance(exc_value.source[0], Origin))) or \
hasattr(exc_value, 'template_debug'):
source = getattr(exc_value, 'django_template_source',
getattr(exc_value, 'source', None))
debug = getattr(exc_value, 'template_debug', None)
if source is None:
self.logger.info('Unable to get template source from exception')
data.update(get_data_from_template(source, debug))
result = super(DjangoClient, self).capture(event_type, **kwargs)
if is_http_request and result:
# attach the sentry object to the request
request.sentry = {
'project_id': data.get('project', self.remote.project),
'id': result,
}
return result
def get_transaction_from_request(self, request):
return self.resolver.resolve(request.path)
|
mit
| -6,920,952,999,196,945,000
| 33.055921
| 84
| 0.552014
| false
| 4.437634
| false
| false
| false
|
JeremyOT/Toto
|
toto/zmqworkerconnection.py
|
1
|
10185
|
import toto
import zmq
import cPickle as pickle
import zlib
import logging
from toto.exceptions import *
from toto.workerconnection import WorkerConnection
from threading import Thread
from tornado.options import options
from tornado.gen import Task
from collections import deque
from zmq.eventloop.ioloop import ZMQPoller, IOLoop, PeriodicCallback
from zmq.eventloop.zmqstream import ZMQStream
from time import time
from uuid import uuid4
from traceback import format_exc
from toto.options import safe_define
WORKER_SOCKET_CONNECT = 'CONNECT'
WORKER_SOCKET_DISCONNECT = 'DISCONNECT'
class ZMQWorkerConnection(WorkerConnection):
'''Use a ``WorkerConnection`` to make RPCs to the remote worker service(s) or worker/router specified by ``address``.
``address`` may be either an enumerable of address strings or a string of comma separated addresses. RPC retries
and timeouts will happen by at most every ``abs(timeout)`` seconds when a periodic callback runs through all active
messages and checks for prolonged requests. This is also the default timeout for any new calls. ``timeout`` must not be
``0``.
Optionally pass any object or module with ``compress`` and ``decompress`` methods as the ``compression`` parameter to
compress messages. The module must implement the same algorithm used on the worker service. By default, messages are not
compressed.
Optionally pass any object or module with ``dumps`` and ``loads`` methods that convert an ``object`` to and from a
``str`` to replace the default ``cPickle`` serialization with a protocol of your choice.
Use ``auto_retry`` to specify whether or not messages should be retried by default. Retrying messages can cause substantial
congestion in your worker service. Use with caution.
'''
def __init__(self, address, timeout=10.0, compression=None, serialization=None, auto_retry=False):
if not address:
self.active_connections = set()
elif isinstance(address, str):
self.active_connections = {i.strip() for i in address.split(',')}
else:
self.active_connections = set(address)
self.message_address = 'inproc://WorkerConnection%s' % id(self)
self.__context = zmq.Context()
self.__queue_socket = self.__context.socket(zmq.PUSH)
self.__queue_socket.bind(self.message_address)
self.__thread = None
self.__timeout = timeout
self.__callbacks = {}
self.__queued_messages = {}
self.__message_auto_retry = {}
self.__message_timeouts = {}
self.__ioloop = None
self.__auto_retry = auto_retry
self.loads = serialization and serialization.loads or pickle.loads
self.dumps = serialization and serialization.dumps or pickle.dumps
self.compress = compression and compression.compress or (lambda x: x)
self.decompress = compression and compression.decompress or (lambda x: x)
def invoke(self, method, parameters={}, callback=None, timeout=0, auto_retry=None, await=False):
'''Invoke a ``method`` to be run on a remote worker process with the given ``parameters``. If specified, ``callback`` will be
invoked with any response from the remote worker. By default the worker will timeout or retry based on the settings of the
current ``WorkerConnection`` but ``timeout`` and ``auto_retry`` can be used for invocation specific behavior.
Note: ``callback`` will be invoked with ``{'error': 'timeout'}`` on ``timeout`` if ``auto_retry`` is false. Invocations
set to retry will never timeout and will instead be re-sent until a response is received. This behavior can be useful for
critical operations but has the potential to cause substantial congestion in the worker system. Use with caution. Negative
values of ``timeout`` will prevent messages from ever expiring or retrying regardless of ``auto_retry``. The default
values of ``timeout`` and ``auto_retry`` cause a fallback to the values used to initialize ``WorkerConnection``.
Passing ``await=True`` will wrap the call in a ``tornado.gen.Task`` allowing you to ``yield`` the response from the worker.
The ``Task`` replaces ``callback`` so any user supplied callback will be ignored when ``await=True``.
Alternatively, you can invoke methods with ``WorkerConnection.<module>.<method>(*args, **kwargs)``
where ``"<module>.<method>"`` will be passed as the ``method`` argument to ``invoke()``.
'''
if await:
return Task(lambda callback: self._queue_message(self.compress(self.dumps({'method': method, 'parameters': parameters})), callback, timeout, auto_retry))
self._queue_message(self.compress(self.dumps({'method': method, 'parameters': parameters})), callback, timeout, auto_retry)
def add_connection(self, address):
'''Connect to the worker at ``address``. Worker invocations will be round robin load balanced between all connected workers.'''
self._queue_message(address, command=WORKER_SOCKET_CONNECT)
def remove_connection(self, address):
'''Disconnect from the worker at ``address``. Worker invocations will be round robin load balanced between all connected workers.'''
self._queue_message(address, command=WORKER_SOCKET_DISCONNECT)
def set_connections(self, addresses):
'''A convenience method to set the connected addresses. A connection will be made to any new address included in the ``addresses``
enumerable and any currently connected address not included in ``addresses`` will be disconnected. If an address in ``addresses``
is already connected, it will not be affected.
'''
addresses = set(addresses)
to_remove = self.active_connections - addresses
to_add = addresses - self.active_connections
for a in to_remove:
self.remove_connection(a)
for a in to_add:
self.add_connection(a)
def __len__(self):
return len(self.__queued_messages)
def _queue_message(self, message, callback=None, timeout=0, auto_retry=None, command=''):
if not self.__ioloop:
self.start()
message_id = str(uuid4())
if callback:
self.__callbacks[message_id] = callback
if timeout != 0:
self.__message_timeouts[message_id] = timeout
if auto_retry is not None:
self.__message_auto_retry[message_id] = auto_retry
self.__queue_socket.send_multipart((command, message_id, message))
def log_error(self, error):
logging.error(repr(error))
def start(self):
if self.__ioloop:
return
def loop():
self.__ioloop = IOLoop()
queue_socket = self.__context.socket(zmq.PULL)
queue_socket.connect(self.message_address)
queue_stream = ZMQStream(queue_socket, self.__ioloop)
def receive_response(message, response_override=None):
self.__queued_messages.pop(message[1], None)
self.__message_timeouts.pop(message[1], None)
callback = self.__callbacks.pop(message[1], None)
if callback:
try:
callback(response_override or self.loads(self.decompress(message[2])))
except Exception as e:
self.log_error(e)
callback({'error': e})
def create_worker_stream():
def close_callback():
logging.info('Worker stream closed')
create_worker_stream()
worker_socket = self.__context.socket(zmq.DEALER)
for address in self.active_connections:
worker_socket.connect(address)
worker_stream = ZMQStream(worker_socket, self.__ioloop)
worker_stream.on_recv(receive_response)
worker_stream.set_close_callback(close_callback)
self._worker_stream = worker_stream
create_worker_stream()
def queue_message(message):
if message[0]:
if message[0] == WORKER_SOCKET_CONNECT and message[2] not in self.active_connections:
self.active_connections.add(message[2])
self._worker_stream.socket.connect(message[2])
elif message[0] == WORKER_SOCKET_DISCONNECT and message[2] in self.active_connections:
self.active_connections.remove(message[2])
self._worker_stream.socket.disconnect(message[2])
return
self.__queued_messages[message[1]] = (time(), message)
try:
self._worker_stream.send_multipart(message)
except IOError as e:
self.log_error(e)
logging.info('Reconnecting')
create_worker_stream()
except Exception as e:
self.log_error(e)
queue_stream.on_recv(queue_message)
def timeout_message():
now = time()
for message, retry in [(item[1], self.__message_auto_retry.get(item[1][1], self.__auto_retry)) for item, t in ((i, self.__message_timeouts.get(i[1][1], self.__timeout)) for i in self.__queued_messages.itervalues()) if t >= 0 and (item[0] + t < now)]:
if retry:
logging.info('Worker timeout, requeuing ' + message[1])
queue_message(message)
else:
receive_response(('', message[1]), {'error': 'timeout'})
timeout_callback = PeriodicCallback(timeout_message, int(abs(self.__timeout * 1000.0)), io_loop = self.__ioloop)
timeout_callback.start()
self.__ioloop.start()
self.__thread = None
self.__thread = Thread(target=loop)
self.__thread.daemon = True
self.__thread.start()
def stop(self):
if self.__ioloop:
self.__ioloop.stop()
def join(self):
if self.__thread:
self.__thread.join()
@classmethod
def instance(cls):
'''Returns the default instance of ``ZMQWorkerConnection`` as configured by the options prefixed
with ``worker_``, instantiating it if necessary. Import the ``workerconnection`` module within
your ``TotoService`` and run it with ``--help`` to see all available options.
'''
if not hasattr(cls, '_instance'):
cls._instance = cls(options.worker_address, timeout=options.worker_timeout, compression=options.worker_compression_module and __import__(options.worker_compression_module), serialization=options.worker_serialization_module and __import__(options.worker_serialization_module), auto_retry=options.worker_auto_retry)
return cls._instance
|
mit
| 51,147,384,794,014,200
| 47.5
| 319
| 0.68434
| false
| 4.179319
| false
| false
| false
|
coddingtonbear/django-location
|
location/signals.py
|
1
|
1394
|
from django.dispatch.dispatcher import Signal
from location.models import LocationSnapshot
location_updated = Signal(providing_args=['user', 'from_', 'to'])
location_changed = Signal(providing_args=['user', 'from_', 'to'])
class watch_location(object):
def __init__(self, user):
self.user = user
def _get_current_location(self):
return LocationSnapshot.objects.filter(
source__user=self.user,
).order_by('-date')[0]
def __enter__(self):
self.original_location = None
try:
self.original_location = self._get_current_location()
except IndexError:
pass
return self
def __exit__(self, *args):
current_location = self._get_current_location()
if self.original_location != current_location:
location_updated.send(
sender=self,
user=self.user,
from_=self.original_location,
to=current_location,
)
if (
self.original_location and
self.original_location.location
!= current_location.location
):
location_changed.send(
sender=self,
user=self.user,
from_=self.original_location,
to=current_location,
)
|
mit
| -3,250,612,997,930,834,400
| 29.304348
| 65
| 0.537303
| false
| 4.646667
| false
| false
| false
|
pck886/kicomav
|
Engine/plugins/emalware.py
|
1
|
12387
|
# -*- coding:utf-8 -*-
# Author: Kei Choi(hanul93@gmail.com)
import os
import re
import kernel
import kavutil
import cryptolib
# -------------------------------------------------------------------------
# KavMain 클래스
# -------------------------------------------------------------------------
class KavMain:
# ---------------------------------------------------------------------
# init(self, plugins_path)
# 플러그인 엔진을 초기화 한다.
# 인력값 : plugins_path - 플러그인 엔진의 위치
# verbose - 디버그 모드 (True or False)
# 리턴값 : 0 - 성공, 0 이외의 값 - 실패
# ---------------------------------------------------------------------
def init(self, plugins_path, verbose=False): # 플러그인 엔진 초기화
pat = r'POST /cdn-cgi/\x00\x00 HTTP/1.1\r\nUser-Agent: \x00\r\nHost:' + \
r'[\d\D]+?GET\x00+/\x00+Cookie:[\d\D]+?http[\d\D]+?url=[\d\D]+?POST'
self.p_linux_mirai = re.compile(pat)
# 변종 바이러스 패턴
self.mirai_a_strings = [
'POST /cdn-cgi/',
'HTTP/1.1\r\nUser-Agent: ',
'Host:',
'GET',
'Cookie:',
'http',
'url=',
'proc/net/tcp'
]
self.aho_mirai_a = kavutil.AhoCorasick()
self.aho_mirai_a.make_tree(self.mirai_a_strings)
return 0 # 플러그인 엔진 초기화 성공
# ---------------------------------------------------------------------
# uninit(self)
# 플러그인 엔진을 종료한다.
# 리턴값 : 0 - 성공, 0 이외의 값 - 실패
# ---------------------------------------------------------------------
def uninit(self): # 플러그인 엔진 종료
return 0 # 플러그인 엔진 종료 성공
# ---------------------------------------------------------------------
# getinfo(self)
# 플러그인 엔진의 주요 정보를 알려준다. (제작자, 버전, ...)
# 리턴값 : 플러그인 엔진 정보
# ---------------------------------------------------------------------
def getinfo(self): # 플러그인 엔진의 주요 정보
info = dict() # 사전형 변수 선언
info['author'] = 'Kei Choi' # 제작자
info['version'] = '1.1' # 버전
info['title'] = 'eMalware Engine' # 엔진 설명
info['kmd_name'] = 'emalware' # 엔진 파일 이름
info['sig_num'] = kavutil.handle_pattern_md5.get_sig_num('emalware') + 2 # 진단/치료 가능한 악성코드 수
return info
# ---------------------------------------------------------------------
# listvirus(self)
# 진단/치료 가능한 악성코드의 리스트를 알려준다.
# 리턴값 : 악성코드 리스트
# ---------------------------------------------------------------------
def listvirus(self): # 진단 가능한 악성코드 리스트
vlist = kavutil.handle_pattern_md5.get_sig_vlist('emalware')
vlist.append('Backdoor.Linux.Mirai.a.gen')
vlist = list(set(vlist))
vlist.sort()
vlists = []
for vname in vlist:
vlists.append(kavutil.normal_vname(vname))
vlists.append(kavutil.normal_vname('<n>AdWare.Win32.Sokuxuan.gen'))
return vlists
# ---------------------------------------------------------------------
# scan(self, filehandle, filename, fileformat)
# 악성코드를 검사한다.
# 입력값 : filehandle - 파일 핸들
# filename - 파일 이름
# fileformat - 파일 포맷
# filename_ex - 파일 이름 (압축 내부 파일 이름)
# 리턴값 : (악성코드 발견 여부, 악성코드 이름, 악성코드 ID) 등등
# ---------------------------------------------------------------------
def scan(self, filehandle, filename, fileformat, filename_ex): # 악성코드 검사
try:
mm = filehandle
# 미리 분석된 파일 포맷중에 PE 포맷이 있는가?
if 'ff_pe' in fileformat:
ff = fileformat['ff_pe']
# case 1 : 섹션 전체를 hash로 검사
for idx, section in enumerate(ff['pe']['Sections']):
# if (section['Characteristics'] & 0x20000000) == 0x20000000: # 실행 속성?
# print section['Name'], hex(section['SizeRawData'])
fsize = section['SizeRawData']
if fsize and kavutil.handle_pattern_md5.match_size('emalware', fsize):
foff = section['PointerRawData']
fmd5 = cryptolib.md5(mm[foff:foff+fsize])
# print fsize, fmd5
vname = kavutil.handle_pattern_md5.scan('emalware', fsize, fmd5)
if vname:
vname = kavutil.normal_vname(vname)
return True, vname, 0, kernel.INFECTED
# case 2. 마지막 섹션에 실행 파일 존재
if len(ff['pe']['Sections']):
# 마지막 섹션
sec = ff['pe']['Sections'][-1]
off = sec['PointerRawData']
size = sec['SizeRawData']
# 실행 파일이 존재하는가?
exe_offs = [m.start() for m in re.finditer('MZ', mm[off:off+size])]
for exe_pos in exe_offs:
fsize = 0x1d5
if fsize and kavutil.handle_pattern_md5.match_size('emalware', fsize):
fmd5 = cryptolib.md5(mm[off + exe_pos:off + exe_pos + fsize])
# print fsize, fmd5
vname = kavutil.handle_pattern_md5.scan('emalware', fsize, fmd5)
if vname:
# return True, vname, 0, kernel.INFECTED
idx = len(ff['pe']['Sections']) - 1
vname = kavutil.normal_vname(vname)
return True, vname, (0x80000000 + idx), kernel.INFECTED
# case 3. pdb를 이용해서 악성코드 검사
if 'PDB_Name' in ff['pe']:
pdb_sigs = {
':\\pz_git\\bin\\': '<n>AdWare.Win32.Sokuxuan.gen',
':\\CODE\\vitruvian\\': '<n>AdWare.Win32.Vitruvian.gen',
}
for pat in pdb_sigs.keys():
if ff['pe']['PDB_Name'].find(pat) != -1:
vname = kavutil.normal_vname(pdb_sigs[pat])
return True, vname, 0, kernel.INFECTED
# 미리 분석된 파일 포맷중에 ELF 포맷이 있는가?
elif 'ff_elf' in fileformat:
ff = fileformat['ff_elf']
if len(ff['elf']['Sections']):
for section in ff['elf']['Sections']:
if (section['Type'] & 0x1) == 0x1 and (section['Flag'] & 0x4) == 0x4: # 프로그램 데이터이면서 실행 속성?
# print section['Name'], section['Size'], section['Offset']
fsize = section['Size']
if fsize and kavutil.handle_pattern_md5.match_size('emalware', fsize):
foff = section['Offset']
fmd5 = cryptolib.md5(mm[foff:foff + fsize])
# print fsize, fmd5
vname = kavutil.handle_pattern_md5.scan('emalware', fsize, fmd5)
if vname:
vname = kavutil.normal_vname(vname)
return True, vname, 0, kernel.INFECTED
elif len(ff['elf']['ProgramHeaders']):
for ph in ff['elf']['ProgramHeaders']:
if (ph['Type'] & 0x1) == 0x1 and (ph['Flag'] & 0x1) == 0x1:
fsize = ph['Size']
if fsize and kavutil.handle_pattern_md5.match_size('emalware', fsize):
foff = ph['Offset']
fmd5 = cryptolib.md5(mm[foff:foff + fsize])
# print fsize, fmd5
vname = kavutil.handle_pattern_md5.scan('emalware', fsize, fmd5)
if vname:
vname = kavutil.normal_vname(vname)
return True, vname, 0, kernel.INFECTED
# Mirai 변종 진단
'''
for section in ff['elf']['Sections']:
if section['Name'] == '.rodata':
fsize = section['Size']
foff = section['Offset']
if self.p_linux_mirai.match(mm[foff:foff+fsize]):
return True, 'Backdoor.Linux.Mirai.gen', 0, kernel.SUSPECT
'''
for section in ff['elf']['Sections']:
if section['Name'] == '.rodata':
vstring = []
foff = section['Offset']
ret = self.aho_mirai_a.search(mm[foff:foff + 0x200])
for n in ret[:len(self.mirai_a_strings)]:
vstring.append(n[1])
# print vstring
# print len(set(vstring)), len(self.mirai_a_strings)
if set(vstring) == set(self.mirai_a_strings):
return True, 'Backdoor.Linux.Mirai.a.gen', 0, kernel.SUSPECT
# NSIS 같은 설치 프로그램의 경우 첨부 영역에 존재하는데..
# 디컴파일하지 않고 오리지널 이미지 원본을 탐지하도록 했음..
if 'ff_attach' in fileformat:
foff = fileformat['ff_attach']['Attached_Pos']
buf = mm[foff:]
fsize = len(buf)
if fsize and kavutil.handle_pattern_md5.match_size('emalware', fsize):
fmd5 = cryptolib.md5(buf) # 첨부 위치부터 끝까지
vname = kavutil.handle_pattern_md5.scan('emalware', fsize, fmd5)
if vname:
vname = kavutil.normal_vname(vname)
return True, vname, 0, kernel.INFECTED
except IOError:
pass
# 악성코드를 발견하지 못했음을 리턴한다.
return False, '', -1, kernel.NOT_FOUND
# ---------------------------------------------------------------------
# disinfect(self, filename, malware_id)
# 악성코드를 치료한다.
# 입력값 : filename - 파일 이름
# : malware_id - 치료할 악성코드 ID
# 리턴값 : 악성코드 치료 여부
# ---------------------------------------------------------------------
def disinfect(self, filename, malware_id): # 악성코드 치료
try:
# 악성코드 진단 결과에서 받은 ID 값이 0인가?
if malware_id == 0:
os.remove(filename) # 파일 삭제
return True # 치료 완료 리턴
if malware_id & 0x80000000 == 0x80000000:
idx = malware_id & 0x7fffffff
import pe
buf = open(filename, 'rb').read()
pe = pe.PE(buf, False, filename)
try:
pe_format = pe.parse() # PE 파일 분석
except MemoryError:
pe_format = None
if pe_format is None:
return False
ff = {'pe': pe_format}
if len(ff['pe']['Sections']) > idx:
section = ff['pe']['Sections'][idx]
fsize = section['SizeRawData']
foff = section['PointerRawData']
data = buf[:foff] + ('\x00' * fsize) + buf[foff+fsize:]
open(filename, 'wb').write(data)
return True
except IOError:
pass
return False # 치료 실패 리턴
|
gpl-2.0
| 4,424,197,806,499,684,000
| 41.310861
| 115
| 0.409843
| false
| 2.812998
| false
| false
| false
|
DHTC-Tools/logstash-confs
|
condor/python/split-index.py
|
1
|
5102
|
#!/usr/bin/env python
import datetime
import argparse
import sys
import logging
import pytz
import elasticsearch
import elasticsearch.helpers
ES_NODES = 'uct2-es-door.mwt2.org'
VERSION = '0.1'
SOURCE_INDEX = 'osg-connect-job-details'
def get_start_week(start_date):
"""
Return a datetime that starts at the beginning of the iso week that
start_date falls in (e.g. if start_date is in day 5 of an iso week
return a datetime object from 5 days ago)
:param start_date: an UTC localized datetime object to use
:return: an UTC localized datetime that
"""
iso_datetime = start_date - datetime.timedelta(days=start_date.isoweekday())
return iso_datetime
def validate_date(arg):
"""
Validate that text string provided is a valid date
"""
if arg is None or len(arg) != 8:
return None
year = arg[0:4]
month = arg[4:6]
day = arg[6:8]
try:
year = int(year)
month = int(month)
day = int(day)
except ValueError:
return None
if year < 2000 or year > 2038:
return None
if month < 1 or month > 12:
return None
if day < 1 or day > 31:
return None
try:
utc = pytz.utc
temp = utc.localize(datetime.datetime(year, month, day, 0, 0, 0))
except ValueError:
return None
return temp
def reindex(source_index, target_index, start_date, end_date, client):
"""
Reindex documents that occur between start_date and end_date
from source index to target index
:param client: instantiated ES client to use
:param source_index: source index for documents
:param target_index: destination index for documents that match
:param start_date: UTC localized datetime that documents need to occur after
:param end_date: UTC localized datetime that documents need to occur before
:return: tuple of (# of successes, error messages) indicating any issues
"""
utc = pytz.utc
start_time = utc.localize(datetime.datetime.combine(start_date, datetime.time(0, 0, 0)))
end_time = utc.localize(datetime.datetime.combine(end_date, datetime.time(0, 0, 0)))
range_query = {"query": {
"filtered": {
"filter": {
"bool": {
"must": [
{"range":
{"@timestamp":
{"gte": start_time.isoformat(),
"lt": end_time.isoformat()}}}]}}}}}
sys.stdout.write("Reindexing into {0}\n".format(target_index))
results = elasticsearch.helpers.reindex(client,
source_index,
target_index,
range_query,
scroll='30m')
return results
def get_es_client():
""" Instantiate DB client and pass connection back """
return elasticsearch.Elasticsearch(hosts=ES_NODES,
retry_on_timeout=True,
max_retries=10,
timeout=300)
def scan_and_reindex(start_date=None, end_date=None, client=None):
"""
Iterate through weeks between start and end date and
reindex documents to a weekly index
:param start_date: date to start reindexing
:param end_date: date to end indexing
:param client: instantiated ES client to use
:return: None
"""
current_date = get_start_week(start_date)
while current_date < end_date:
iso_year, iso_week, _ = current_date.isocalendar()
weekly_index = "{0}-{1}-{2:0>2}".format('osg-connect-job-details',
iso_year,
iso_week)
week_end_date = current_date + datetime.timedelta(days=7)
results = reindex(SOURCE_INDEX,
weekly_index,
current_date,
week_end_date,
client)
logging.warning("{0}".format(results))
current_date += datetime.timedelta(days=7)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Reindex events from ' +
'osg-connect-job-details ' +
'to weekly indices')
parser.add_argument('--start-date',
dest='start_date',
default=None,
required=True,
help='Reindex events that occur on this day or after')
parser.add_argument('--end-date',
dest='end_date',
default=None,
help='Reindex events that occur before this day')
args = parser.parse_args(sys.argv[1:])
start_date = validate_date(args.start_date)
end_date = validate_date(args.end_date)
client = get_es_client()
scan_and_reindex(start_date, end_date, client)
|
apache-2.0
| -7,657,909,026,755,955,000
| 33.945205
| 92
| 0.546844
| false
| 4.338435
| false
| false
| false
|
phoebe-project/phoebe2-docs
|
2.0/tutorials/irrad_method_horvat.py
|
1
|
3065
|
#!/usr/bin/env python
# coding: utf-8
# Lambert Scattering (irrad_method='horvat')
# ============================
#
# Setup
# -----------------------------
# Let's first make sure we have the latest version of PHOEBE 2.0 installed. (You can comment out this line if you don't use pip for your installation or don't want to update to the latest release).
# In[ ]:
get_ipython().system('pip install -I "phoebe>=2.0,<2.1"')
# As always, let's do imports and initialize a logger and a new bundle. See [Building a System](../tutorials/building_a_system.html) for more details.
# In[1]:
get_ipython().run_line_magic('matplotlib', 'inline')
# In[2]:
import phoebe
from phoebe import u # units
import numpy as np
import matplotlib.pyplot as plt
logger = phoebe.logger('error')
b = phoebe.default_binary()
# Relevant Parameters
# ---------------------------------
# For parameters that affect reflection and heating (irrad_frac_\*) see the tutorial on [reflection and heating](./reflection_heating.ipynb).
#
# The 'irrad_method' compute option dictates whether irradiation is handled according to the new Horvat scheme which includes Lambert Scattering, Wilson's original reflection scheme, or ignored entirely.
# In[3]:
print(b['irrad_method'])
# Influence on Light Curves (fluxes)
# ---------------------------------
#
# Let's (roughtly) reproduce Figure 8 from [Prsa et al. 2016](http://phoebe-project.org/publications/2016Prsa+) which shows the difference between Wilson and Horvat schemes for various inclinations.
#
# <img src="prsa+2016_fig8.png" alt="Figure 8" width="600px"/>
#
# First we'll roughly create a A0-K0 binary and set reasonable albedos.
# In[4]:
b['teff@primary'] = 11000
b['rpole@primary'] = 2.5
b['gravb_bol@primary'] = 1.0
b['teff@secondary'] = 5000
b['rpole@secondary'] = 0.85
b['q@binary'] = 0.8/3.0
b.flip_constraint('mass@primary', solve_for='sma@binary')
b['mass@primary'] = 3.0
# In[5]:
print(b.filter(qualifier=['mass', 'rpole', 'teff'], context='component'))
# In[6]:
b['irrad_frac_refl_bol@primary'] = 1.0
b['irrad_frac_refl_bol@secondary'] = 0.6
# Now we'll compute the light curves with wilson and horvat irradiation, and plot the relative differences between the two as a function of phase, for several different values of the inclination.
#
# Note that Figure 8 excluded eclipse effects, but that ability is not included in PHOEBE 2.0, so there will be a slight discrepancy for inclinations which exhibit eclipses.
# In[7]:
phases = np.linspace(0,1,101)
b.add_dataset('lc', times=b.to_time(phases))
# In[8]:
for incl in [0,30,60,90]:
b.set_value('incl@binary', incl)
b.run_compute(irrad_method='wilson')
fluxes_wilson = b.get_value('fluxes', context='model')
b.run_compute(irrad_method='horvat')
fluxes_horvat = b.get_value('fluxes', context='model')
plt.plot(phases, (fluxes_wilson-fluxes_horvat)/fluxes_wilson, label='i={}'.format(incl))
plt.xlabel('phase')
plt.ylabel('[F(wilson) - F(horvat)] / F(wilson)')
plt.legend(loc='upper center')
plt.show()
|
gpl-3.0
| -2,983,392,925,752,449,000
| 25.422414
| 203
| 0.675041
| false
| 2.987329
| false
| false
| false
|
Bharath-J/Mezzanine
|
setup.py
|
1
|
4939
|
# #import os
# #import sys
# #from setuptools import setup, find_packages
# #from shutil import rmtree
# #from mezzanine import __version__ as version
# #exclude = ["mezzanine/project_template/dev.db",
# # "mezzanine/project_template/project_name/local_settings.py"]
# #if sys.argv == ["setup.py", "test"]:
# # exclude = []
# exclude = dict([(e, None) for e in exclude])
# for e in exclude:
# if e.endswith(".py"):
# try:
# os.remove("%sc" % e)
# except:
# pass
# try:
# with open(e, "r") as f:
# exclude[e] = (f.read(), os.stat(e))
# os.remove(e)
# except:
# pass
# if sys.argv[:2] == ["setup.py", "bdist_wheel"]:
# # Remove previous build dir when creating a wheel build,
# # since if files have been removed from the project,
# # they'll still be cached in the build dir and end up
# # as part of the build, which is really neat!
# try:
# rmtree("build")
# except:
# pass
# try:
# setup(
# name="Mezzanine",
# version=version,
# author="Stephen McDonald",
# author_email="stephen.mc@gmail.com",
# description="An open source content management platform built using "
# "the Django framework.",
# long_description=open("README.rst", 'rb').read().decode('utf-8'),
# license="BSD",
# url="http://mezzanine.jupo.org/",
# zip_safe=False,
# include_package_data=True,
# packages=find_packages(),
# install_requires=[
# "django-contrib-comments",
# "django >= 1.7, < 1.9",
# "filebrowser_safe >= 0.4.0",
# "grappelli_safe >= 0.4.0",
# "tzlocal >= 1.0",
# "bleach >= 1.4",
# "beautifulsoup4 >= 4.1.3",
# "requests >= 2.1.0",
# "requests-oauthlib >= 0.4",
# "future >= 0.9.0",
# "pillow",
# "chardet",
# ],
# entry_points="""
# [console_scripts]
# mezzanine-project=mezzanine.bin.mezzanine_project:create_project
# """,
# test_suite="mezzanine.bin.runtests.main",
# tests_require=["pyflakes>=0.6.1", "pep8>=1.4.1"],
# classifiers=[
# "Development Status :: 5 - Production/Stable",
# "Environment :: Web Environment",
# "Framework :: Django",
# "Intended Audience :: Developers",
# "License :: OSI Approved :: BSD License",
# "Operating System :: OS Independent",
# "Programming Language :: Python",
# "Programming Language :: Python :: 2.7",
# "Programming Language :: Python :: 3",
# "Programming Language :: Python :: 3.3",
# "Programming Language :: Python :: 3.4",
# "Programming Language :: Python :: 3.5",
# "Topic :: Internet :: WWW/HTTP",
# "Topic :: Internet :: WWW/HTTP :: Dynamic Content",
# "Topic :: Internet :: WWW/HTTP :: WSGI",
# "Topic :: Software Development :: Libraries :: "
# "Application Frameworks",
# "Topic :: Software Development :: Libraries :: Python Modules",
# ])
# finally:
# for e in exclude:
# if exclude[e] is not None:
# data, stat = exclude[e]
# try:
# with open(e, "w") as f:
# f.write(data)
# os.chown(e, stat.st_uid, stat.st_gid)
# os.chmod(e, stat.st_mode)
# except:
# pass
# My setup
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='mezzanine-bhj',
version='0.4',
packages=['mezzanine'],
include_package_data=True,
license='BSD License', # example license
description='A simple Django app to conduct Web-based polls.',
long_description=README,
url='https://www.example.com/',
author='Your Name',
author_email='yourname@example.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License', # example license
'Operating System :: OS Independent',
'Programming Language :: Python',
# Replace these appropriately if you are stuck on Python 2.
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
bsd-2-clause
| 2,649,597,721,540,508,700
| 34.028369
| 79
| 0.524398
| false
| 3.802156
| false
| false
| false
|
mcclurmc/juju
|
juju/providers/ec2/tests/common.py
|
1
|
7904
|
from yaml import dump
from twisted.internet.defer import fail, succeed
from txaws.s3.client import S3Client
from txaws.s3.exception import S3Error
from txaws.ec2.client import EC2Client
from txaws.ec2.exception import EC2Error
from txaws.ec2.model import Instance, Reservation, SecurityGroup
from juju.lib.mocker import KWARGS, MATCH
from juju.providers.ec2 import MachineProvider
from juju.providers.ec2.machine import EC2ProviderMachine
MATCH_GROUP = MATCH(lambda x: x.startswith("juju-moon"))
class EC2TestMixin(object):
env_name = "moon"
service_factory_kwargs = None
def get_config(self):
return {"type": "ec2",
"juju-origin": "distro",
"admin-secret": "magic-beans",
"access-key": "0f62e973d5f8",
"secret-key": "3e5a7c653f59",
"control-bucket": self.env_name}
def get_provider(self):
"""Return the ec2 machine provider.
This should only be invoked after mocker is in replay mode so the
AWS service class will be appropriately replaced by the mock.
"""
return MachineProvider(self.env_name, self.get_config())
def get_instance(self,
instance_id, state="running", machine_id=42, **kwargs):
groups = kwargs.pop("groups",
["juju-%s" % self.env_name,
"juju-%s-%s" % (self.env_name, machine_id)])
reservation = Reservation("x", "y", groups=groups)
return Instance(instance_id, state, reservation=reservation, **kwargs)
def assert_machine(self, machine, instance_id, dns_name):
self.assertTrue(isinstance(machine, EC2ProviderMachine))
self.assertEquals(machine.instance_id, instance_id)
self.assertEquals(machine.dns_name, dns_name)
def get_ec2_error(self, entity_id,
format="The instance ID %r does not exist",
code=503):
"""Make a representative EC2Error for `entity_id`, eg AWS instance_id.
This error is paired with `get_wrapped_ec2_text` below. The
default format represents a fairly common error seen in
working with EC2. There are others."""
message = format % entity_id
return EC2Error(
"<error><Code>1</Code><Message>%s</Message></error>" % message,
code)
def setUp(self):
# mock out the aws services
service_factory = self.mocker.replace(
"txaws.service.AWSServiceRegion")
self._service = service_factory(KWARGS)
def store_factory_kwargs(**kwargs):
self.service_factory_kwargs = kwargs
self.mocker.call(store_factory_kwargs)
self.s3 = self.mocker.mock(S3Client)
self._service.get_s3_client()
self.mocker.result(self.s3)
self.ec2 = self.mocker.mock(EC2Client)
self._service.get_ec2_client()
self.mocker.result(self.ec2)
class EC2MachineLaunchMixin(object):
def _mock_launch_utils(self, ami_name="ami-default", **get_ami_kwargs):
get_public_key = self.mocker.replace(
"juju.providers.common.utils.get_user_authorized_keys")
def match_config(arg):
return isinstance(arg, dict)
get_public_key(MATCH(match_config))
self.mocker.result("zebra")
if not get_ami_kwargs:
return
get_ami = self.mocker.replace(
"juju.providers.ec2.utils.get_current_ami")
get_ami(KWARGS)
def check_kwargs(**kwargs):
self.assertEquals(kwargs, get_ami_kwargs)
return succeed(ami_name)
self.mocker.call(check_kwargs)
def _mock_create_group(self):
group_name = "juju-%s" % self.env_name
self.ec2.create_security_group(
group_name, "juju group for %s" % self.env_name)
self.mocker.result(succeed(True))
self.ec2.authorize_security_group(
group_name, ip_protocol="tcp", from_port="22",
to_port="22", cidr_ip="0.0.0.0/0")
self.mocker.result(succeed([self.env_name]))
self.ec2.describe_security_groups(group_name)
self.mocker.result(succeed(
[SecurityGroup(group_name, "", owner_id="123")]))
self.ec2.authorize_security_group(
group_name, source_group_name=group_name,
source_group_owner_id="123")
self.mocker.result(succeed(True))
def _mock_create_machine_group(self, machine_id):
machine_group_name = "juju-%s-%s" % (self.env_name, machine_id)
self.ec2.create_security_group(
machine_group_name, "juju group for %s machine %s" % (
self.env_name, machine_id))
self.mocker.result(succeed(True))
def _mock_delete_machine_group(self, machine_id):
machine_group_name = "juju-%s-%s" % (self.env_name, machine_id)
self.ec2.delete_security_group(machine_group_name)
self.mocker.result(succeed(True))
def _mock_delete_machine_group_was_deleted(self, machine_id):
machine_group_name = "juju-%s-%s" % (self.env_name, machine_id)
self.ec2.delete_security_group(machine_group_name)
self.mocker.result(fail(self.get_ec2_error(
machine_group_name,
"There are active instances using security group %r")))
def _mock_get_zookeeper_hosts(self, hosts=None):
"""
Try to encapsulate a variety of behaviors here..
if hosts is None, a default host is used.
if hosts is False, no s3 state is returned
if hosts are passed as a list of instances, they
are returned.
"""
if hosts is None:
hosts = [self.get_instance(
"i-es-zoo", private_dns_name="es.example.internal")]
self.s3.get_object(self.env_name, "provider-state")
if hosts is False:
error = S3Error("<error/>", 404)
error.errors = [{"Code": "NoSuchKey"}]
self.mocker.result(fail(error))
return
state = dump({
"zookeeper-instances":
[i.instance_id for i in hosts]})
self.mocker.result(succeed(state))
if hosts:
# connect grabs the first host of a set.
self.ec2.describe_instances(hosts[0].instance_id)
self.mocker.result(succeed([hosts[0]]))
class MockInstanceState(object):
"""Mock the result of ec2_describe_instances when called successively.
Each call of :method:`get_round` returns a list of mock `Instance`
objects, using the state for that round. Instance IDs not used in
the round (and passed in from ec2_describe_instances) are
automatically skipped."""
def __init__(self, tester, instance_ids, machine_ids, states):
self.tester = tester
self.instance_ids = instance_ids
self.machine_ids = machine_ids
self.states = states
self.round = 0
def get_round(self, *current_instance_ids):
result = []
for instance_id, machine_id, state in zip(
self.instance_ids, self.machine_ids, self.states[self.round]):
if instance_id not in current_instance_ids:
# Ignore instance_ids that are no longer being
# described, because they have since moved into a
# terminated state
continue
result.append(self.tester.get_instance(instance_id,
machine_id=machine_id,
state=state))
self.round += 1
return succeed(result)
class Observed(object):
"""Minimal wrapper just to ensure :method:`add` returns a `Deferred`."""
def __init__(self):
self.items = set()
def add(self, item):
self.items.add(item)
return succeed(True)
|
agpl-3.0
| -6,238,537,517,998,629,000
| 35.762791
| 78
| 0.602859
| false
| 3.770992
| false
| false
| false
|
jelmer/python-fastimport
|
fastimport/processors/filter_processor.py
|
1
|
11587
|
# Copyright (C) 2009 Canonical Ltd
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Import processor that filters the input (and doesn't import)."""
from .. import (
commands,
helpers,
processor,
)
import stat
class FilterProcessor(processor.ImportProcessor):
"""An import processor that filters the input to include/exclude objects.
No changes to the current repository are made.
Here are the supported parameters:
* include_paths - a list of paths that commits must change in order to
be kept in the output stream
* exclude_paths - a list of paths that should not appear in the output
stream
* squash_empty_commits - if set to False, squash commits that don't have
any changes after the filter has been applied
"""
known_params = [
b'include_paths',
b'exclude_paths',
b'squash_empty_commits'
]
def pre_process(self):
self.includes = self.params.get(b'include_paths')
self.excludes = self.params.get(b'exclude_paths')
self.squash_empty_commits = bool(
self.params.get(b'squash_empty_commits', True))
# What's the new root, if any
self.new_root = helpers.common_directory(self.includes)
# Buffer of blobs until we know we need them: mark -> cmd
self.blobs = {}
# These are the commits we've squashed so far
self.squashed_commits = set()
# Map of commit-id to list of parents
self.parents = {}
def pre_handler(self, cmd):
self.command = cmd
# Should this command be included in the output or not?
self.keep = False
# Blobs to dump into the output before dumping the command itself
self.referenced_blobs = []
def post_handler(self, cmd):
if not self.keep:
return
# print referenced blobs and the command
for blob_id in self.referenced_blobs:
self._print_command(self.blobs[blob_id])
self._print_command(self.command)
def progress_handler(self, cmd):
"""Process a ProgressCommand."""
# These always pass through
self.keep = True
def blob_handler(self, cmd):
"""Process a BlobCommand."""
# These never pass through directly. We buffer them and only
# output them if referenced by an interesting command.
self.blobs[cmd.id] = cmd
self.keep = False
def checkpoint_handler(self, cmd):
"""Process a CheckpointCommand."""
# These always pass through
self.keep = True
def commit_handler(self, cmd):
"""Process a CommitCommand."""
# These pass through if they meet the filtering conditions
interesting_filecmds = self._filter_filecommands(cmd.iter_files)
if interesting_filecmds or not self.squash_empty_commits:
# If all we have is a single deleteall, skip this commit
if len(interesting_filecmds) == 1 and isinstance(
interesting_filecmds[0], commands.FileDeleteAllCommand):
pass
else:
# Remember just the interesting file commands
self.keep = True
cmd.file_iter = iter(interesting_filecmds)
# Record the referenced blobs
for fc in interesting_filecmds:
if isinstance(fc, commands.FileModifyCommand):
if (fc.dataref is not None and
not stat.S_ISDIR(fc.mode)):
self.referenced_blobs.append(fc.dataref)
# Update from and merges to refer to commits in the output
cmd.from_ = self._find_interesting_from(cmd.from_)
cmd.merges = self._find_interesting_merges(cmd.merges)
else:
self.squashed_commits.add(cmd.id)
# Keep track of the parents
if cmd.from_ and cmd.merges:
parents = [cmd.from_] + cmd.merges
elif cmd.from_:
parents = [cmd.from_]
else:
parents = None
if cmd.mark is not None:
self.parents[b':' + cmd.mark] = parents
def reset_handler(self, cmd):
"""Process a ResetCommand."""
if cmd.from_ is None:
# We pass through resets that init a branch because we have to
# assume the branch might be interesting.
self.keep = True
else:
# Keep resets if they indirectly reference something we kept
cmd.from_ = self._find_interesting_from(cmd.from_)
self.keep = cmd.from_ is not None
def tag_handler(self, cmd):
"""Process a TagCommand."""
# Keep tags if they indirectly reference something we kept
cmd.from_ = self._find_interesting_from(cmd.from_)
self.keep = cmd.from_ is not None
def feature_handler(self, cmd):
"""Process a FeatureCommand."""
feature = cmd.feature_name
if feature not in commands.FEATURE_NAMES:
self.warning(
"feature %s is not supported - parsing may fail"
% (feature,))
# These always pass through
self.keep = True
def _print_command(self, cmd):
"""Wrapper to avoid adding unnecessary blank lines."""
text = bytes(cmd)
self.outf.write(text)
if not text.endswith(b'\n'):
self.outf.write(b'\n')
def _filter_filecommands(self, filecmd_iter):
"""Return the filecommands filtered by includes & excludes.
:return: a list of FileCommand objects
"""
if self.includes is None and self.excludes is None:
return list(filecmd_iter())
# Do the filtering, adjusting for the new_root
result = []
for fc in filecmd_iter():
if (isinstance(fc, commands.FileModifyCommand) or
isinstance(fc, commands.FileDeleteCommand)):
if self._path_to_be_kept(fc.path):
fc.path = self._adjust_for_new_root(fc.path)
else:
continue
elif isinstance(fc, commands.FileDeleteAllCommand):
pass
elif isinstance(fc, commands.FileRenameCommand):
fc = self._convert_rename(fc)
elif isinstance(fc, commands.FileCopyCommand):
fc = self._convert_copy(fc)
else:
self.warning(
"cannot handle FileCommands of class %s - ignoring",
fc.__class__)
continue
if fc is not None:
result.append(fc)
return result
def _path_to_be_kept(self, path):
"""Does the given path pass the filtering criteria?"""
if self.excludes and (
path in self.excludes
or helpers.is_inside_any(self.excludes, path)):
return False
if self.includes:
return (
path in self.includes
or helpers.is_inside_any(self.includes, path))
return True
def _adjust_for_new_root(self, path):
"""Adjust a path given the new root directory of the output."""
if self.new_root is None:
return path
elif path.startswith(self.new_root):
return path[len(self.new_root):]
else:
return path
def _find_interesting_parent(self, commit_ref):
while True:
if commit_ref not in self.squashed_commits:
return commit_ref
parents = self.parents.get(commit_ref)
if not parents:
return None
commit_ref = parents[0]
def _find_interesting_from(self, commit_ref):
if commit_ref is None:
return None
return self._find_interesting_parent(commit_ref)
def _find_interesting_merges(self, commit_refs):
if commit_refs is None:
return None
merges = []
for commit_ref in commit_refs:
parent = self._find_interesting_parent(commit_ref)
if parent is not None:
merges.append(parent)
if merges:
return merges
else:
return None
def _convert_rename(self, fc):
"""Convert a FileRenameCommand into a new FileCommand.
:return: None if the rename is being ignored, otherwise a
new FileCommand based on the whether the old and new paths
are inside or outside of the interesting locations.
"""
old = fc.old_path
new = fc.new_path
keep_old = self._path_to_be_kept(old)
keep_new = self._path_to_be_kept(new)
if keep_old and keep_new:
fc.old_path = self._adjust_for_new_root(old)
fc.new_path = self._adjust_for_new_root(new)
return fc
elif keep_old:
# The file has been renamed to a non-interesting location.
# Delete it!
old = self._adjust_for_new_root(old)
return commands.FileDeleteCommand(old)
elif keep_new:
# The file has been renamed into an interesting location
# We really ought to add it but we don't currently buffer
# the contents of all previous files and probably never want
# to. Maybe fast-import-info needs to be extended to
# remember all renames and a config file can be passed
# into here ala fast-import?
self.warning(
"cannot turn rename of %s into an add of %s yet" %
(old, new))
return None
def _convert_copy(self, fc):
"""Convert a FileCopyCommand into a new FileCommand.
:return: None if the copy is being ignored, otherwise a
new FileCommand based on the whether the source and destination
paths are inside or outside of the interesting locations.
"""
src = fc.src_path
dest = fc.dest_path
keep_src = self._path_to_be_kept(src)
keep_dest = self._path_to_be_kept(dest)
if keep_src and keep_dest:
fc.src_path = self._adjust_for_new_root(src)
fc.dest_path = self._adjust_for_new_root(dest)
return fc
elif keep_src:
# The file has been copied to a non-interesting location.
# Ignore it!
return None
elif keep_dest:
# The file has been copied into an interesting location
# We really ought to add it but we don't currently buffer
# the contents of all previous files and probably never want
# to. Maybe fast-import-info needs to be extended to
# remember all copies and a config file can be passed
# into here ala fast-import?
self.warning(
"cannot turn copy of %s into an add of %s yet" %
(src, dest))
return None
|
gpl-2.0
| -4,424,120,872,560,378,000
| 36.866013
| 77
| 0.586778
| false
| 4.387353
| false
| false
| false
|
wgkoro/Countdown-Reader-closing
|
source/libs/utils.py
|
1
|
1190
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import string
import re
import random
import hashlib
from flask import request, session
def generate_csrf_token():
rand_str = randstr(20)
session['_csrf_token'] = get_session_token(rand_str)
return rand_str
def get_session_token(rand_str):
key = 'SaltOfToken'
string = key + rand_str
return hashlib.sha1(string).hexdigest()
def get_template_file():
if not is_mobile():
return 'top.html'
return 'mobile.html'
def is_mobile():
ua = request.headers.get('User-Agent', '')
if not ua:
return False
ua = ua.lower()
if re.match(r'.*(iphone|android).*', ua):
return True
return False
def check_pager(pager):
try:
pager = int(pager)
except:
pager = 1
if pager > 50:
pager = 1
return pager
def randstr(n):
alphabets = string.digits + string.letters
return ''.join(random.choice(alphabets) for i in xrange(n))
def select_rand_img():
imgs = ['nature', 'flower', 'night', 'metro', 'tree']
background = random.choice(imgs)
if is_mobile():
return '%s_s.jpg' % background
return '%s.jpg' % background
|
mit
| -7,334,088,037,797,551,000
| 19.517241
| 63
| 0.612605
| false
| 3.361582
| false
| false
| false
|
OnroerendErfgoed/skosprovider_heritagedata
|
setup.py
|
1
|
1309
|
import os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
packages = [
'skosprovider_heritagedata'
]
requires = [
'skosprovider>=0.6.0',
'requests',
'rdflib'
]
setup(
name='skosprovider_heritagedata',
version='0.3.1',
description='Skosprovider implementation of the heritagedata.org Vocabularies',
long_description=README,
packages=packages,
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
author='Flanders Heritage Agency',
author_email='ict@onroerenderfgoed.be',
url='https://github.com/OnroerendErfgoed/skosprovider_heritagedata',
keywords='heritagedata.org skos skosprovider thesauri vocabularies',
test_suite='nose.collector'
)
|
mit
| 516,847,866,655,250,750
| 27.456522
| 83
| 0.662338
| false
| 3.616022
| false
| true
| false
|
alexad2/XeRPI
|
Xe1T_Kr83m_Note/lce_helpers_v2.py
|
1
|
8006
|
import numpy as np
from collections import defaultdict
import ROOT
from subprocess import call
import pandas as pd
##################################################################################################
def atan(y, x):
phi = np.arctan2(y, x)
for i in range(len(phi)):
if phi[i] < 0:
phi[i] += 2*np.pi
return phi
##################################################################################################
def xe_to_lyBins(df,bin_settings,peak,bin_spec_dir='Bin_Hists'):
R, Z, A_r, N_phi, N_z = bin_settings
z_width = Z / N_z
phi_widths = []
for n in N_phi:
phi_widths.append(2*np.pi/n)
if peak == 's10':
s1_spec_max = 20000
s1_ene = 32.1498 # from nuclear data sheets a=83
position = 'i0'
elif peak == 's11':
s1_spec_max = 100
s1_ene = 9.4051
position = 'i0' # only ever consider position of 1st s1
else:
print('error: invalid peak')
return()
bin_data = defaultdict(list)
for z_i in range(int(N_z)):
z_min = z_i * z_width
z_max = (z_i+1) * z_width
df_z = df[ (df[position+'z']<z_min) & (df[position+'z']>=z_max) ]
for r_i in range(len(A_r)):
if r_i == 0:
r_min = 0
else:
r_min = A_r[r_i-1]
r_max = A_r[r_i]
df_r = df_z[ ( np.sqrt(df_z[position+'x']**2 + df_z[position+'y']**2)>r_min )
& ( np.sqrt(df_z[position+'x']**2 + df_z[position+'y']**2)<=r_max )]
for phi_i in range(N_phi[r_i]):
bin_data['z_i'].append(z_i)
bin_data['z'].append( (z_max + z_min)/2 )
bin_data['r_i'].append(r_i)
bin_data['r'].append( (r_max + r_min)/2 )
bin_data['phi_i'].append(phi_i)
phi_min = phi_i * phi_widths[r_i]
phi_max = (phi_i+1) * phi_widths[r_i]
bin_data['phi'].append( (phi_max + phi_min)/2 )
df_phi = df_r[ (atan(df_r[position+'y'].values, df_r[position+'x'].values) > phi_min)
& (atan(df_r[position+'y'].values, df_r[position+'x'].values) <= phi_max )]
bin_data['N'].append(len(df_phi))
c1 = ROOT.TCanvas('','', 800, 700)
hist = ROOT.TH1D('','', 100, 0, s1_spec_max)
for i in range(len(df_phi[peak+'Area'])):
hist.Fill(df_phi[peak+'Area'].values[i])
if hist.GetEntries() < 1:
bin_data['ly'].append(-1)
bin_data['errly'].append(-1)
bin_data['S1AreaMean'].append(-1)
bin_data['S1AreaMeanError'].append(-1)
continue
hist.SetTitle(peak+' Spectrum: \
%.1f > z > %.1f, %.1f < r < %.1f, %.1f < phi < %.1f,'
%(z_min, z_max, r_min, r_max, phi_min, phi_max))
hist.GetXaxis().SetTitle(peak+'Area (pe)')
hist.GetXaxis().CenterTitle()
hist.Sumw2()
hist.SetStats(False)
hist.Draw()
hist.Fit('gaus')
fit = hist.GetFunction('gaus')
p1 = fit.GetParameter(1)
e1 = fit.GetParError(1)
bin_data['S1AreaMean'].append(p1)
bin_data['S1AreaMeanError'].append(e1)
bin_data['ly'].append(p1/s1_ene)
bin_data['errly'].append(e1/s1_ene)
if bin_spec_dir != 'none':
call('mkdir '+bin_spec_dir,shell=True)
chi2 = fit.GetChisquare()
ndf = fit.GetNDF()
p0 = fit.GetParameter(0)
e0 = fit.GetParError(0)
p2 = fit.GetParameter(2)
e2 = fit.GetParError(2)
pt = ROOT.TPaveText(.58, .68, .88, .88, 'NDC')
pt.AddText('Entries = %d'%len(df_phi))
pt.AddText('#mu = %1.3f #pm %1.3f'%(p1, e1))
pt.AddText('#sigma = %1.3f #pm %1.3f' %(p2, e2))
pt.AddText('Amplitude = %1.3f #pm %1.3f' %(p0, e0))
pt.AddText('#chi^{2}/NDF = %1.3f/%1.3f' %(chi2, ndf))
pt.Draw()
c1.Print(bin_spec_dir+'/f_'+peak+'_z%d_r%d_phi%d.png' %(z_i, r_i, phi_i))
c1.Clear()
hist.Delete()
return bin_data
##################################################################################################
def lyBins_to_txt(bin_data,out_file):
f = open(out_file, 'w')
header = 'z t r zmid tmid rmid ly errly\n'
f.write(header)
for i in range(len(bin_data['z'])):
bin_values = (str(bin_data['z_i'][i])+' '+str(bin_data['phi_i'][i])+' '+str(bin_data['r_i'][i])+' '
+str(bin_data['z'][i])+' '+str(bin_data['phi'][i])+' '+str(bin_data['r'][i])+' '
+str(bin_data['ly'][i])+' '+str(bin_data['errly'][i])+'\n')
f.write(bin_values)
f.close()
return
##################################################################################################
def bins_to_plot(bin_dict, peak, bin_settings, outfile, diff = False):
if False:
x=1
else:
df = pd.DataFrame(bin_dict)
dummyH_list=[]
c1 = ROOT.TCanvas( '','', 2400, 3200 )
ROOT.gStyle.SetOptStat(0)
c1.Divide(3,4,0.02,0.02)
z_hists = []
max_ly = max(df['ly'])
min_ly = min(df['ly'])
zjump = bin_settings[1]/bin_settings[4]
for z_i in range(int(bin_settings[4])):
dummyH_list.append(ROOT.TH2D("","",100,-1*bin_settings[0],bin_settings[0],100,-1*bin_settings[0],bin_settings[0]))
df_new = df[ df['z_i'] == z_i ]
r_hists = []
for r_i in range(len(bin_settings[2])):
r_hists.append(ROOT.TH2D('','', bin_settings[3][r_i], 0, 2*np.pi, len(bin_settings[2]), 0, bin_settings[0]))
df_newer = df_new[ df_new['r_i'] == r_i ]
for i in range(len(df_newer)):
r_hists[r_i].Fill(df_newer['phi'].values[i], df_newer['r'].values[i],
df_newer['ly'].values[i] )
z_hists.append(r_hists)
c1.cd(z_i+1)
dummyH_list[z_i].Draw('colz')
dummyH_list[z_i].SetTitle("%.2fcm < z < %.2fcm" %(z_i*zjump, (z_i+1)*zjump ))
dummyH_list[z_i].GetZaxis().SetTitle("<s1Area>")
dummyH_list[z_i].GetXaxis().SetTitle("x position [cm]")
dummyH_list[z_i].GetXaxis().CenterTitle()
dummyH_list[z_i].GetYaxis().SetTitle("y position [cm]")
dummyH_list[z_i].GetYaxis().CenterTitle()
# c1.SetTopMargin(0.2)
c1.SetRightMargin(0.2)
for i in range(len(z_hists[z_i])):
z_hists[z_i][i].GetZaxis().SetRangeUser(0, max_ly)
if diff:
z_hists[z_i][i].GetZaxis().SetTitle("(pax_ly - xerawdp_ly)^{2} [pe/keV]")
else:
z_hists[z_i][i].GetZaxis().SetTitle(peak + " ly [pe/keV]")
z_hists[z_i][i].GetZaxis().SetTitleOffset(1.8)
z_hists[z_i][i].Draw('pol colz a same')
c1.Print(outfile)
c1.Clear()
return
##################################################################################################
|
gpl-3.0
| -3,472,338,365,560,601,000
| 36.586854
| 126
| 0.402823
| false
| 3.306898
| false
| false
| false
|
adobe-type-tools/robofont-scripts
|
Anchors/AnchorsOutput.py
|
1
|
3389
|
from __future__ import print_function
__copyright__ = __license__ = """
Copyright (c) 2013-2019 Adobe Systems Incorporated. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
__doc__ = """
Anchors Input v1.3 - 30 Jul 2019
Updates print commands to use python 3 syntax.
Anchors Output v1.1 - 26 Apr 2016
Outputs all the anchor data to external text file(s) named 'anchors'.
If the family has more than one master, '_X' is appended to the name,
where 'X' represents the index of the font master, counting from 0.
==================================================
Versions:
v1.3 - 30 Jul 2019 - Updates print commands to use python 3 syntax
v1.1 - 26 Apr 2016 - Use the same file naming logic as the derivedchars files
v1.0 - 21 Feb 2013 - Initial release
"""
#----------------------------------------
kAnchorsFileName = "anchors"
#----------------------------------------
import os
def run(font, masterNumber):
anchorsList = []
glyphOrder = font.lib['public.glyphOrder']
if len(glyphOrder) != len(font.keys()):
glyphOrder = font.keys()
# Collect anchors data
for glyphName in glyphOrder:
glyph = font[glyphName]
for anchorIndex in range(len(glyph.anchors)):
anchor = glyph.anchors[anchorIndex]
# Skip nameless anchors
if not len(anchor.name):
print('ERROR: Glyph %s has a nameless anchor. Skipped.' % glyphName)
continue
anchorData = "%s\t%s\t%d\t%d\n" % (glyphName, anchor.name, anchor.x, anchor.y)
anchorsList.append(anchorData)
if not len(anchorsList):
print('The font has no anchors.')
return
# Write file
if masterNumber:
filename = "%s_%s" % (kAnchorsFileName, masterNumber)
else:
filename = kAnchorsFileName
print('Writing file %s ...' % filename)
outfile = open(filename, 'w')
outfile.writelines(anchorsList)
outfile.close()
print('Done!')
if __name__ == "__main__":
font = CurrentFont()
if font == None:
print('Open a font first.')
else:
if not font.path:
print('Save the font first.')
elif not len(font):
print('The font has no glyphs.')
else:
folderPath, fileName = os.path.split(font.path)
fileNameNoExtension, fileExtension = os.path.splitext(fileName)
masterNumber = fileNameNoExtension.split('_')[-1]
if not masterNumber.isdigit():
masterNumber = None
os.chdir(folderPath) # Change current directory to the location of the opened font
run(font, masterNumber)
|
mit
| 5,904,105,215,958,060,000
| 29.258929
| 85
| 0.701682
| false
| 3.720088
| false
| false
| false
|
infothrill/python-viscosity-app
|
viscosity_app/vpn.py
|
1
|
4333
|
"""
This module provides procedures to interact in a programmatic way with the
application "Viscosity" from http://www.sparklabs.com/viscosity/ using the
OS X applescripting interface.
"""
import logging
import time
import applescript
from .observer import Subject
EVT_VPN_STOPPED = 100
EVT_VPN_STARTED = 101
def connect(connection_name):
thescript = """tell application "Viscosity" to connect \"%s\"""" % connection_name
logging.info("VPN: connecting to '%s'", connection_name)
return applescript.AppleScript(thescript).run()
def disconnect_all():
thescript = """tell application "Viscosity" to disconnectall\n"""
logging.debug("disconnecting all viscosity connections")
return applescript.AppleScript(thescript).run()
def disconnect(connection_name):
thescript = """tell application "Viscosity" to disconnect \"%s\"\n""" % connection_name
logging.debug("disconnecting viscosity connection '%s'", connection_name)
return applescript.AppleScript(thescript).run()
def get_active_connection_names():
thescript = """tell application "Viscosity"
set connames to name of connections where state is equal to "Connected"
return connames
end tell"""
try:
names = applescript.AppleScript(thescript).run()
except applescript.ScriptError as exc:
logging.debug("An Apple script error occured while querying active connections", exc_info=exc)
return ()
else:
return names
def get_all_connection_names():
thescript = """tell application "Viscosity"
set connames to name of connections
end tell
return connames"""
logging.debug("getting viscosity connection names")
return applescript.AppleScript(thescript).run()
class VpnConnection(object):
'''
An Applescript based controller for Viscosity.app
(http://www.sparklabs.com/viscosity/)
'''
def __init__(self, connection_name):
super(VpnConnection, self).__init__()
if connection_name not in get_all_connection_names():
raise ValueError("Connection '%s' not found in Viscosity!" % connection_name)
self.__connection_name = connection_name
@property
def name(self):
return self.__connection_name
def connect(self):
_cur_conns = get_active_connection_names()
if self.__connection_name in _cur_conns:
return True
elif len(_cur_conns) > 0:
logging.info("VPN connect(%s): already connected to non-preferred VPN(s): %r", self.__connection_name, _cur_conns)
connect(self.__connection_name)
# wait for it to connect
max_wait = 30 # seconds
current_wait = 0
while current_wait < max_wait:
_cur_conns = get_active_connection_names()
if self.__connection_name in _cur_conns:
break
time.sleep(0.5)
if self.__connection_name in _cur_conns:
logging.info("VPN: connected to '%s'", self.__connection_name)
return True
else:
logging.warn("VPN: failed to connect to '%s'", self.__connection_name)
return False
def disconnect(self):
if self.is_connected():
disconnect(self.__connection_name)
def is_connected(self):
return self.__connection_name in get_active_connection_names()
class VpnControllerSubject(Subject):
'''
A class capable of monitoring a specific Viscosity VPN connection and
notifying observers about changes in the status of the connection.
'''
def __init__(self, vpn):
super(VpnControllerSubject, self).__init__()
self.connection = vpn
def refresh(self):
self.connected = self.connection.is_connected()
@property
def connected(self):
if not hasattr(self, '_connected'):
return None
else:
return self._connected
@connected.setter
def connected(self, value):
oldvalue = self.connected
self._connected = value # pylint: disable=W0201
if oldvalue != value:
if value is True:
self.notifyObservers(EVT_VPN_STARTED, "VPN('%s') is connected" % self.connection.name)
else:
self.notifyObservers(EVT_VPN_STOPPED, "VPN('%s') is disconnected" % self.connection.name)
|
mit
| 6,369,262,081,582,387,000
| 31.825758
| 126
| 0.648973
| false
| 4.049533
| false
| false
| false
|
klnusbaum/UDJ-Server
|
udjserver/udj/views/views07/user_modification.py
|
1
|
2613
|
import json
import re
from udj.views.views07.decorators import NeedsJSON
from udj.views.views07.decorators import AcceptsMethods
from udj.views.views07.decorators import HasNZJSONParams
from udj.views.views07.authdecorators import NeedsAuth
from udj.views.views07.responses import HttpResponseConflictingResource
from udj.views.views07.responses import HttpResponseNotAcceptable
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpRequest
from django.http import HttpResponse
from django.http import HttpResponseBadRequest
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.contrib.auth.models import User
from django.db import transaction
@NeedsJSON
@AcceptsMethods(['PUT', 'POST'])
@HasNZJSONParams(['username', 'email', 'password'])
def userMod(request, json_params):
#Validate inputs
username = json_params['username']
email = json_params['email']
password = json_params['password']
first_name = json_params.get('first_name', '')
last_name = json_params.get('last_name', '')
if len(password) < 8:
return HttpResponseNotAcceptable("password")
try:
validate_email(email)
except ValidationError:
return HttpResponseNotAcceptable("email")
#actuall do stuff
if request.method == 'PUT':
return createUser(request, username, email, password, first_name, last_name)
else:
return modifyUser(request, username, email, password, first_name, last_name)
@NeedsAuth
@transaction.commit_on_success
def modifyUser(request, username, email, password, first_name, last_name):
user = request.udjuser
if user.email != email and User.objects.filter(email=email).exists():
return HttpResponseConflictingResource('email')
if username != user.username:
return HttpResponseNotAcceptable('username')
user.email = email
user.first_name = first_name
user.last_name = last_name
user.save()
user.set_password(password)
return HttpResponse()
@transaction.commit_on_success
def createUser(request, username, email, password, first_name, last_name):
if User.objects.filter(username=username).exists():
return HttpResponseConflictingResource('username')
if User.objects.filter(email=email).exists():
return HttpResponseConflictingResource('email')
if not re.compile(r'^[\w.@+-]+$').match(username):
return HttpResponseNotAcceptable("username")
newUser = User.objects.create_user(
username,
email,
password
)
newUser.first_name = first_name
newUser.last_name = last_name
newUser.save()
return HttpResponse(status=201)
|
gpl-2.0
| 8,605,335,649,776,238,000
| 29.741176
| 80
| 0.760046
| false
| 3.853982
| false
| false
| false
|
StellarCN/py-stellar-base
|
stellar_sdk/base_transaction_envelope.py
|
1
|
5708
|
from abc import abstractmethod
from typing import List, Union, Generic, TypeVar
from . import xdr as stellar_xdr
from .exceptions import SignatureExistError
from .keypair import Keypair
from .network import Network
from .utils import hex_to_bytes, sha256
T = TypeVar("T")
class BaseTransactionEnvelope(Generic[T]):
def __init__(
self,
network_passphrase: str,
signatures: List[stellar_xdr.DecoratedSignature] = None,
) -> None:
self.network_passphrase: str = network_passphrase
self.signatures: List[stellar_xdr.DecoratedSignature] = signatures or []
self._network_id: bytes = Network(network_passphrase).network_id()
def hash(self) -> bytes:
"""Get the XDR Hash of the signature base.
This hash is ultimately what is signed before transactions are sent
over the network. See :meth:`signature_base` for more details about
this process.
:return: The XDR Hash of this transaction envelope's signature base.
"""
return sha256(self.signature_base())
def hash_hex(self) -> str:
"""Return a hex encoded hash for this transaction envelope.
:return: A hex encoded hash for this transaction envelope.
"""
return self.hash().hex()
def sign(self, signer: Union[Keypair, str]) -> None:
"""Sign this transaction envelope with a given keypair.
Note that the signature must not already be in this instance's list of
signatures.
:param signer: The keypair or secret to use for signing this transaction
envelope.
:raise: :exc:`SignatureExistError <stellar_sdk.exception.SignatureExistError>`:
if this signature already exists.
"""
if isinstance(signer, str):
signer = Keypair.from_secret(signer)
tx_hash = self.hash()
sig = signer.sign_decorated(tx_hash)
sig_dict = [signature.__dict__ for signature in self.signatures]
if sig.__dict__ in sig_dict:
raise SignatureExistError("The keypair has already signed.")
else:
self.signatures.append(sig)
@abstractmethod
def signature_base(self) -> bytes:
"""Get the signature base of this transaction envelope.
Return the "signature base" of this transaction, which is the value
that, when hashed, should be signed to create a signature that
validators on the Stellar Network will accept.
It is composed of a 4 prefix bytes followed by the xdr-encoded form of
this transaction.
:return: The signature base of this transaction envelope.
"""
raise NotImplementedError("The method has not been implemented.")
def sign_hashx(self, preimage: Union[bytes, str]) -> None:
"""Sign this transaction envelope with a Hash(x) signature.
See Stellar's documentation on `Multi-Sig
<https://www.stellar.org/developers/guides/concepts/multi-sig.html>`_
for more details on Hash(x) signatures.
:param preimage: Preimage of hash used as signer, byte hash or hex encoded string
"""
preimage_bytes: bytes = hex_to_bytes(preimage)
hash_preimage = sha256(preimage_bytes)
hint = stellar_xdr.SignatureHint(hash_preimage[-4:])
sig = stellar_xdr.DecoratedSignature(
hint, stellar_xdr.Signature(preimage_bytes)
)
sig_dict = [signature.__dict__ for signature in self.signatures]
if sig.__dict__ in sig_dict:
raise SignatureExistError("The preimage has already signed.")
else:
self.signatures.append(sig)
def to_xdr_object(self) -> stellar_xdr.TransactionEnvelope:
"""Get an XDR object representation of this :class:`BaseTransactionEnvelope`.
:return: XDR TransactionEnvelope object
"""
raise NotImplementedError("The method has not been implemented.")
def to_xdr(self) -> str:
"""Get the base64 encoded XDR string representing this
:class:`BaseTransactionEnvelope`.
:return: XDR TransactionEnvelope base64 string object
"""
return self.to_xdr_object().to_xdr()
@classmethod
def from_xdr_object(
cls, xdr_object: stellar_xdr.TransactionEnvelope, network_passphrase: str
) -> T:
"""Create a new :class:`BaseTransactionEnvelope` from an XDR object.
:param xdr_object: The XDR object that represents a transaction envelope.
:param network_passphrase: The network to connect to for verifying and retrieving additional attributes from.
:return: A new :class:`TransactionEnvelope` object from the given XDR TransactionEnvelope object.
"""
raise NotImplementedError("The method has not been implemented.")
@classmethod
def from_xdr(cls, xdr: str, network_passphrase: str) -> T:
"""Create a new :class:`BaseTransactionEnvelope` from an XDR string.
:param xdr: The XDR string that represents a transaction
envelope.
:param network_passphrase: which network this transaction envelope is associated with.
:return: A new :class:`BaseTransactionEnvelope` object from the given XDR TransactionEnvelope base64 string object.
"""
xdr_object = stellar_xdr.TransactionEnvelope.from_xdr(xdr)
return cls.from_xdr_object(xdr_object, network_passphrase)
@abstractmethod
def __eq__(self, other: object) -> bool:
pass # pragma: no cover
def __str__(self):
return (
f"<BaseTransactionEnvelope [network_passphrase={self.network_passphrase}, "
f"signatures={self.signatures}]>"
)
|
apache-2.0
| -428,335,434,075,978,200
| 37.567568
| 123
| 0.658549
| false
| 4.298193
| false
| false
| false
|
hail-is/hail
|
hail/python/hailtop/cleanup_gcr/__main__.py
|
1
|
2922
|
import sys
import time
import logging
import asyncio
import aiohttp
import hailtop.aiogoogle as aiogoogle
log = logging.getLogger(__name__)
class AsyncIOExecutor:
def __init__(self, parallelism):
self._semaphore = asyncio.Semaphore(parallelism)
async def _run(self, fut, aw):
async with self._semaphore:
try:
fut.set_result(await aw)
except asyncio.CancelledError: # pylint: disable=try-except-raise
raise
except Exception as e: # pylint: disable=broad-except
fut.set_exception(e)
def submit(self, aw):
fut = asyncio.Future()
asyncio.ensure_future(self._run(fut, aw))
return fut
async def gather(self, aws):
futs = [self.submit(aw) for aw in aws]
return [await fut for fut in futs]
class CleanupImages:
def __init__(self, client):
self._executor = AsyncIOExecutor(8)
self._client = client
async def cleanup_digest(self, image, digest, tags):
log.info(f'cleaning up digest {image}@{digest}')
await self._executor.gather([
self._client.delete(f'/{image}/manifests/{tag}')
for tag in tags])
await self._executor.submit(self._client.delete(f'/{image}/manifests/{digest}'))
log.info(f'cleaned up digest {image}@{digest}')
async def cleanup_image(self, image):
log.info(f'cleaning up image {image}')
log.info(f'listing tags for {image}')
result = await self._executor.submit(self._client.get(f'/{image}/tags/list'))
manifests = result['manifest']
manifests = [(digest, int(data['timeUploadedMs']) / 1000, data['tag']) for digest, data in manifests.items()]
log.info(f'got {len(manifests)} manifests for {image}')
# sort is ascending, oldest first
manifests = sorted(manifests, key=lambda x: x[1])
# keep the most recent 10
manifests = manifests[:-10]
now = time.time()
await asyncio.gather(*[
self.cleanup_digest(image, digest, tags)
for digest, time_uploaded, tags in manifests
if (now - time_uploaded) >= (7 * 24 * 60 * 60) or len(tags) == 0])
log.info(f'cleaned up image {image}')
async def run(self):
images = await self._executor.submit(self._client.get('/tags/list'))
await asyncio.gather(*[
self.cleanup_image(image)
for image in images['child']
])
async def main():
logging.basicConfig(level=logging.INFO)
if len(sys.argv) != 2:
raise ValueError('usage: cleanup_gcr <project>')
project = sys.argv[1]
async with aiogoogle.ContainerClient(
project=project,
timeout=aiohttp.ClientTimeout(total=60)) as client:
cleanup_images = CleanupImages(client)
await cleanup_images.run()
asyncio.run(main())
|
mit
| 2,255,893,551,007,446,000
| 29.123711
| 117
| 0.605065
| false
| 3.809648
| false
| false
| false
|
partofthething/home-assistant
|
tests/components/unifi/conftest.py
|
1
|
1068
|
"""Fixtures for UniFi methods."""
from typing import Optional
from unittest.mock import patch
from aiounifi.websocket import SIGNAL_CONNECTION_STATE, SIGNAL_DATA
import pytest
@pytest.fixture(autouse=True)
def mock_unifi_websocket():
"""No real websocket allowed."""
with patch("aiounifi.controller.WSClient") as mock:
def make_websocket_call(data: Optional[dict] = None, state: str = ""):
"""Generate a websocket call."""
if data:
mock.return_value.data = data
mock.call_args[1]["callback"](SIGNAL_DATA)
elif state:
mock.return_value.state = state
mock.call_args[1]["callback"](SIGNAL_CONNECTION_STATE)
else:
raise NotImplementedError
yield make_websocket_call
@pytest.fixture(autouse=True)
def mock_discovery():
"""No real network traffic allowed."""
with patch(
"homeassistant.components.unifi.config_flow.async_discover_unifi",
return_value=None,
) as mock:
yield mock
|
mit
| -5,269,824,949,856,575,000
| 29.514286
| 78
| 0.627341
| false
| 4.139535
| false
| false
| false
|
DayGitH/Family-Tree
|
worker.py
|
1
|
28764
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'worker.ui'
#
# Created: Tue Jul 12 16:21:50 2016
# by: pyside-uic 0.2.15 running on PySide 1.2.4
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.setEnabled(True)
MainWindow.resize(1125, 653)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtGui.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.groupBox = QtGui.QGroupBox(self.centralwidget)
self.groupBox.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.groupBox.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.groupBox.setObjectName("groupBox")
self.horizontalLayout = QtGui.QHBoxLayout(self.groupBox)
self.horizontalLayout.setSizeConstraint(QtGui.QLayout.SetMinimumSize)
self.horizontalLayout.setObjectName("horizontalLayout")
self.maleRadio = QtGui.QRadioButton(self.groupBox)
self.maleRadio.setObjectName("maleRadio")
self.horizontalLayout.addWidget(self.maleRadio)
self.femaleRadio = QtGui.QRadioButton(self.groupBox)
self.femaleRadio.setObjectName("femaleRadio")
self.horizontalLayout.addWidget(self.femaleRadio)
self.gridLayout.addWidget(self.groupBox, 4, 2, 1, 1)
self.horizontalLayout_7 = QtGui.QHBoxLayout()
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.newFamilyButton = QtGui.QPushButton(self.centralwidget)
self.newFamilyButton.setObjectName("newFamilyButton")
self.horizontalLayout_7.addWidget(self.newFamilyButton)
self.saveFamilyButton = QtGui.QPushButton(self.centralwidget)
self.saveFamilyButton.setObjectName("saveFamilyButton")
self.horizontalLayout_7.addWidget(self.saveFamilyButton)
self.loadFamilyButton = QtGui.QPushButton(self.centralwidget)
self.loadFamilyButton.setObjectName("loadFamilyButton")
self.horizontalLayout_7.addWidget(self.loadFamilyButton)
self.gridLayout.addLayout(self.horizontalLayout_7, 11, 0, 1, 1)
self.primaryList = QtGui.QListWidget(self.centralwidget)
self.primaryList.setMaximumSize(QtCore.QSize(16777215, 120))
self.primaryList.setObjectName("primaryList")
self.gridLayout.addWidget(self.primaryList, 10, 4, 1, 1)
self.secondaryList = QtGui.QListWidget(self.centralwidget)
self.secondaryList.setMaximumSize(QtCore.QSize(16777215, 120))
self.secondaryList.setObjectName("secondaryList")
self.gridLayout.addWidget(self.secondaryList, 10, 2, 1, 1)
self.peopleList = QtGui.QListWidget(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.peopleList.sizePolicy().hasHeightForWidth())
self.peopleList.setSizePolicy(sizePolicy)
self.peopleList.setMinimumSize(QtCore.QSize(180, 0))
self.peopleList.setObjectName("peopleList")
self.gridLayout.addWidget(self.peopleList, 1, 0, 9, 1)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setSizeConstraint(QtGui.QLayout.SetDefaultConstraint)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.attachSpouseButton = QtGui.QPushButton(self.centralwidget)
self.attachSpouseButton.setObjectName("attachSpouseButton")
self.horizontalLayout_3.addWidget(self.attachSpouseButton)
self.attachParentsButton = QtGui.QPushButton(self.centralwidget)
self.attachParentsButton.setObjectName("attachParentsButton")
self.horizontalLayout_3.addWidget(self.attachParentsButton)
self.attachChildButton = QtGui.QPushButton(self.centralwidget)
self.attachChildButton.setObjectName("attachChildButton")
self.horizontalLayout_3.addWidget(self.attachChildButton)
self.gridLayout.addLayout(self.horizontalLayout_3, 11, 4, 1, 1)
self.bdayEdit = QtGui.QLineEdit(self.centralwidget)
self.bdayEdit.setObjectName("bdayEdit")
self.gridLayout.addWidget(self.bdayEdit, 3, 2, 1, 1)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.moveUpSpouse = QtGui.QToolButton(self.centralwidget)
self.moveUpSpouse.setMaximumSize(QtCore.QSize(25, 16777215))
self.moveUpSpouse.setObjectName("moveUpSpouse")
self.verticalLayout_2.addWidget(self.moveUpSpouse)
self.moveDnSpouse = QtGui.QToolButton(self.centralwidget)
self.moveDnSpouse.setMaximumSize(QtCore.QSize(25, 16777215))
self.moveDnSpouse.setObjectName("moveDnSpouse")
self.verticalLayout_2.addWidget(self.moveDnSpouse)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem)
self.gridLayout.addLayout(self.verticalLayout_2, 3, 5, 3, 1)
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.moveUpChild = QtGui.QToolButton(self.centralwidget)
self.moveUpChild.setMaximumSize(QtCore.QSize(25, 16777215))
self.moveUpChild.setObjectName("moveUpChild")
self.verticalLayout.addWidget(self.moveUpChild)
self.moveDnChild = QtGui.QToolButton(self.centralwidget)
self.moveDnChild.setMaximumSize(QtCore.QSize(25, 16777215))
self.moveDnChild.setObjectName("moveDnChild")
self.verticalLayout.addWidget(self.moveDnChild)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem1)
self.gridLayout.addLayout(self.verticalLayout, 8, 5, 2, 1)
self.spouseList = QtGui.QListWidget(self.centralwidget)
self.spouseList.setMaximumSize(QtCore.QSize(16777215, 100))
self.spouseList.setObjectName("spouseList")
self.gridLayout.addWidget(self.spouseList, 3, 4, 3, 1)
self.spouseInfo = QtGui.QLineEdit(self.centralwidget)
self.spouseInfo.setObjectName("spouseInfo")
self.gridLayout.addWidget(self.spouseInfo, 6, 4, 1, 1)
self.childrenList = QtGui.QListWidget(self.centralwidget)
self.childrenList.setObjectName("childrenList")
self.gridLayout.addWidget(self.childrenList, 8, 4, 2, 1)
self.notesEdit = QtGui.QTextEdit(self.centralwidget)
self.notesEdit.setObjectName("notesEdit")
self.gridLayout.addWidget(self.notesEdit, 8, 2, 2, 1)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.createSpouseButton = QtGui.QPushButton(self.centralwidget)
self.createSpouseButton.setObjectName("createSpouseButton")
self.horizontalLayout_4.addWidget(self.createSpouseButton)
self.createParentsButton = QtGui.QPushButton(self.centralwidget)
self.createParentsButton.setObjectName("createParentsButton")
self.horizontalLayout_4.addWidget(self.createParentsButton)
self.createChildButton = QtGui.QPushButton(self.centralwidget)
self.createChildButton.setObjectName("createChildButton")
self.horizontalLayout_4.addWidget(self.createChildButton)
self.gridLayout.addLayout(self.horizontalLayout_4, 11, 2, 1, 1)
self.deletePersonButton = QtGui.QPushButton(self.centralwidget)
self.deletePersonButton.setObjectName("deletePersonButton")
self.gridLayout.addWidget(self.deletePersonButton, 12, 2, 1, 1)
self.nicknameEdit = QtGui.QLineEdit(self.centralwidget)
self.nicknameEdit.setObjectName("nicknameEdit")
self.gridLayout.addWidget(self.nicknameEdit, 1, 2, 1, 1)
self.keyEdit = QtGui.QLineEdit(self.centralwidget)
self.keyEdit.setReadOnly(True)
self.keyEdit.setObjectName("keyEdit")
self.gridLayout.addWidget(self.keyEdit, 0, 2, 1, 1)
self.realnameEdit = QtGui.QLineEdit(self.centralwidget)
self.realnameEdit.setObjectName("realnameEdit")
self.gridLayout.addWidget(self.realnameEdit, 2, 2, 1, 1)
self.peopleListLabel = QtGui.QLabel(self.centralwidget)
self.peopleListLabel.setObjectName("peopleListLabel")
self.gridLayout.addWidget(self.peopleListLabel, 0, 0, 1, 1)
self.fatherButton = QtGui.QPushButton(self.centralwidget)
self.fatherButton.setText("")
self.fatherButton.setObjectName("fatherButton")
self.gridLayout.addWidget(self.fatherButton, 1, 4, 1, 1)
self.motherButton = QtGui.QPushButton(self.centralwidget)
self.motherButton.setText("")
self.motherButton.setObjectName("motherButton")
self.gridLayout.addWidget(self.motherButton, 2, 4, 1, 1)
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.unattachSpouseButton = QtGui.QPushButton(self.centralwidget)
self.unattachSpouseButton.setObjectName("unattachSpouseButton")
self.horizontalLayout_5.addWidget(self.unattachSpouseButton)
self.unattachParentsButton = QtGui.QPushButton(self.centralwidget)
self.unattachParentsButton.setObjectName("unattachParentsButton")
self.horizontalLayout_5.addWidget(self.unattachParentsButton)
self.unattachChildButton = QtGui.QPushButton(self.centralwidget)
self.unattachChildButton.setObjectName("unattachChildButton")
self.horizontalLayout_5.addWidget(self.unattachChildButton)
self.gridLayout.addLayout(self.horizontalLayout_5, 12, 4, 1, 1)
self.exitButton = QtGui.QPushButton(self.centralwidget)
self.exitButton.setObjectName("exitButton")
self.gridLayout.addWidget(self.exitButton, 12, 0, 1, 1)
self.saveBox = QtGui.QDialogButtonBox(self.centralwidget)
self.saveBox.setOrientation(QtCore.Qt.Horizontal)
self.saveBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Save)
self.saveBox.setCenterButtons(False)
self.saveBox.setObjectName("saveBox")
self.gridLayout.addWidget(self.saveBox, 13, 4, 1, 1)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.ddayRadio = QtGui.QRadioButton(self.centralwidget)
self.ddayRadio.setText("")
self.ddayRadio.setAutoExclusive(False)
self.ddayRadio.setObjectName("ddayRadio")
self.horizontalLayout_2.addWidget(self.ddayRadio)
self.ddayEdit = QtGui.QLineEdit(self.centralwidget)
self.ddayEdit.setEnabled(False)
self.ddayEdit.setObjectName("ddayEdit")
self.horizontalLayout_2.addWidget(self.ddayEdit)
self.gridLayout.addLayout(self.horizontalLayout_2, 6, 2, 1, 1)
self.impRadio = QtGui.QRadioButton(self.centralwidget)
self.impRadio.setAutoExclusive(False)
self.impRadio.setObjectName("impRadio")
self.gridLayout.addWidget(self.impRadio, 7, 2, 1, 1)
self.bdayLabel = QtGui.QLabel(self.centralwidget)
self.bdayLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.bdayLabel.setObjectName("bdayLabel")
self.gridLayout.addWidget(self.bdayLabel, 3, 1, 1, 1)
self.createLabel = QtGui.QLabel(self.centralwidget)
self.createLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.createLabel.setObjectName("createLabel")
self.gridLayout.addWidget(self.createLabel, 11, 1, 1, 1)
self.notesLabel = QtGui.QLabel(self.centralwidget)
self.notesLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.notesLabel.setObjectName("notesLabel")
self.gridLayout.addWidget(self.notesLabel, 8, 1, 1, 1)
self.IDLabel = QtGui.QLabel(self.centralwidget)
self.IDLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.IDLabel.setObjectName("IDLabel")
self.gridLayout.addWidget(self.IDLabel, 0, 1, 1, 1)
self.genderLabel = QtGui.QLabel(self.centralwidget)
self.genderLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.genderLabel.setObjectName("genderLabel")
self.gridLayout.addWidget(self.genderLabel, 4, 1, 1, 1)
self.realnameLabel = QtGui.QLabel(self.centralwidget)
self.realnameLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.realnameLabel.setObjectName("realnameLabel")
self.gridLayout.addWidget(self.realnameLabel, 2, 1, 1, 1)
self.statusLabel = QtGui.QLabel(self.centralwidget)
self.statusLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.statusLabel.setObjectName("statusLabel")
self.gridLayout.addWidget(self.statusLabel, 5, 1, 1, 1)
self.ddayLabel = QtGui.QLabel(self.centralwidget)
self.ddayLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.ddayLabel.setObjectName("ddayLabel")
self.gridLayout.addWidget(self.ddayLabel, 6, 1, 1, 1)
self.primaryListLabel = QtGui.QLabel(self.centralwidget)
self.primaryListLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.primaryListLabel.setObjectName("primaryListLabel")
self.gridLayout.addWidget(self.primaryListLabel, 10, 3, 1, 1)
self.secondaryListLabel = QtGui.QLabel(self.centralwidget)
self.secondaryListLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.secondaryListLabel.setObjectName("secondaryListLabel")
self.gridLayout.addWidget(self.secondaryListLabel, 10, 1, 1, 1)
self.attachLabel = QtGui.QLabel(self.centralwidget)
self.attachLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.attachLabel.setObjectName("attachLabel")
self.gridLayout.addWidget(self.attachLabel, 11, 3, 1, 1)
self.anniversaryLabel = QtGui.QLabel(self.centralwidget)
self.anniversaryLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.anniversaryLabel.setObjectName("anniversaryLabel")
self.gridLayout.addWidget(self.anniversaryLabel, 6, 3, 1, 1)
self.nicknameLabel = QtGui.QLabel(self.centralwidget)
self.nicknameLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.nicknameLabel.setObjectName("nicknameLabel")
self.gridLayout.addWidget(self.nicknameLabel, 1, 1, 1, 1)
self.unattachLabel = QtGui.QLabel(self.centralwidget)
self.unattachLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.unattachLabel.setObjectName("unattachLabel")
self.gridLayout.addWidget(self.unattachLabel, 12, 3, 1, 1)
self.childrenLabel = QtGui.QLabel(self.centralwidget)
self.childrenLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.childrenLabel.setObjectName("childrenLabel")
self.gridLayout.addWidget(self.childrenLabel, 8, 3, 1, 1)
self.marriageLabel = QtGui.QLabel(self.centralwidget)
self.marriageLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.marriageLabel.setObjectName("marriageLabel")
self.gridLayout.addWidget(self.marriageLabel, 3, 3, 1, 1)
self.motherLabel = QtGui.QLabel(self.centralwidget)
self.motherLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.motherLabel.setObjectName("motherLabel")
self.gridLayout.addWidget(self.motherLabel, 2, 3, 1, 1)
self.fatherLabel = QtGui.QLabel(self.centralwidget)
self.fatherLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.fatherLabel.setObjectName("fatherLabel")
self.gridLayout.addWidget(self.fatherLabel, 1, 3, 1, 1)
self.groupBox_1 = QtGui.QGroupBox(self.centralwidget)
self.groupBox_1.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.groupBox_1.setObjectName("groupBox_1")
self.horizontalLayout_6 = QtGui.QHBoxLayout(self.groupBox_1)
self.horizontalLayout_6.setSizeConstraint(QtGui.QLayout.SetMinimumSize)
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.secondStatusRadio = QtGui.QRadioButton(self.groupBox_1)
self.secondStatusRadio.setObjectName("secondStatusRadio")
self.horizontalLayout_6.addWidget(self.secondStatusRadio)
self.firstStatusRadio = QtGui.QRadioButton(self.groupBox_1)
self.firstStatusRadio.setObjectName("firstStatusRadio")
self.horizontalLayout_6.addWidget(self.firstStatusRadio)
self.thirdStatusRadio = QtGui.QRadioButton(self.groupBox_1)
self.thirdStatusRadio.setObjectName("thirdStatusRadio")
self.horizontalLayout_6.addWidget(self.thirdStatusRadio)
self.gridLayout.addWidget(self.groupBox_1, 5, 2, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1125, 26))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionNew_Family = QtGui.QAction(MainWindow)
self.actionNew_Family.setObjectName("actionNew_Family")
self.actionParents = QtGui.QAction(MainWindow)
self.actionParents.setObjectName("actionParents")
self.actionSpouse = QtGui.QAction(MainWindow)
self.actionSpouse.setObjectName("actionSpouse")
self.actionChildren = QtGui.QAction(MainWindow)
self.actionChildren.setObjectName("actionChildren")
self.actionDelete_Person = QtGui.QAction(MainWindow)
self.actionDelete_Person.setObjectName("actionDelete_Person")
self.actionExit = QtGui.QAction(MainWindow)
self.actionExit.setObjectName("actionExit")
self.actionSave_Family = QtGui.QAction(MainWindow)
self.actionSave_Family.setObjectName("actionSave_Family")
self.actionOpen_Family = QtGui.QAction(MainWindow)
self.actionOpen_Family.setObjectName("actionOpen_Family")
self.actionMerge_Person = QtGui.QAction(MainWindow)
self.actionMerge_Person.setObjectName("actionMerge_Person")
self.actionEdit_Spouse = QtGui.QAction(MainWindow)
self.actionEdit_Spouse.setObjectName("actionEdit_Spouse")
self.actionUnParents = QtGui.QAction(MainWindow)
self.actionUnParents.setObjectName("actionUnParents")
self.actionMother = QtGui.QAction(MainWindow)
self.actionMother.setObjectName("actionMother")
self.actionUnSpouse = QtGui.QAction(MainWindow)
self.actionUnSpouse.setObjectName("actionUnSpouse")
self.actionUnChild = QtGui.QAction(MainWindow)
self.actionUnChild.setObjectName("actionUnChild")
self.actionUnAll = QtGui.QAction(MainWindow)
self.actionUnAll.setObjectName("actionUnAll")
self.actionAttach = QtGui.QAction(MainWindow)
self.actionAttach.setObjectName("actionAttach")
self.retranslateUi(MainWindow)
QtCore.QObject.connect(self.ddayRadio, QtCore.SIGNAL("toggled(bool)"), self.ddayEdit.setEnabled)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
MainWindow.setTabOrder(self.keyEdit, self.nicknameEdit)
MainWindow.setTabOrder(self.nicknameEdit, self.realnameEdit)
MainWindow.setTabOrder(self.realnameEdit, self.bdayEdit)
MainWindow.setTabOrder(self.bdayEdit, self.maleRadio)
MainWindow.setTabOrder(self.maleRadio, self.femaleRadio)
MainWindow.setTabOrder(self.femaleRadio, self.ddayRadio)
MainWindow.setTabOrder(self.ddayRadio, self.ddayEdit)
MainWindow.setTabOrder(self.ddayEdit, self.impRadio)
MainWindow.setTabOrder(self.impRadio, self.motherButton)
MainWindow.setTabOrder(self.motherButton, self.fatherButton)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "MainWindow", None, QtGui.QApplication.UnicodeUTF8))
self.maleRadio.setText(QtGui.QApplication.translate("MainWindow", "Male", None, QtGui.QApplication.UnicodeUTF8))
self.femaleRadio.setText(QtGui.QApplication.translate("MainWindow", "Female", None, QtGui.QApplication.UnicodeUTF8))
self.newFamilyButton.setText(QtGui.QApplication.translate("MainWindow", "New Family", None, QtGui.QApplication.UnicodeUTF8))
self.saveFamilyButton.setText(QtGui.QApplication.translate("MainWindow", "Save Family", None, QtGui.QApplication.UnicodeUTF8))
self.loadFamilyButton.setText(QtGui.QApplication.translate("MainWindow", "Load Family", None, QtGui.QApplication.UnicodeUTF8))
self.attachSpouseButton.setText(QtGui.QApplication.translate("MainWindow", "Spouse", None, QtGui.QApplication.UnicodeUTF8))
self.attachParentsButton.setText(QtGui.QApplication.translate("MainWindow", "Parents", None, QtGui.QApplication.UnicodeUTF8))
self.attachChildButton.setText(QtGui.QApplication.translate("MainWindow", "Child", None, QtGui.QApplication.UnicodeUTF8))
self.bdayEdit.setToolTip(QtGui.QApplication.translate("MainWindow", "<html><head/><body><p><br/></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.moveUpSpouse.setText(QtGui.QApplication.translate("MainWindow", "Up", None, QtGui.QApplication.UnicodeUTF8))
self.moveDnSpouse.setText(QtGui.QApplication.translate("MainWindow", "Dn", None, QtGui.QApplication.UnicodeUTF8))
self.moveUpChild.setText(QtGui.QApplication.translate("MainWindow", "Up", None, QtGui.QApplication.UnicodeUTF8))
self.moveDnChild.setText(QtGui.QApplication.translate("MainWindow", "Dn", None, QtGui.QApplication.UnicodeUTF8))
self.createSpouseButton.setText(QtGui.QApplication.translate("MainWindow", "Spouse", None, QtGui.QApplication.UnicodeUTF8))
self.createParentsButton.setText(QtGui.QApplication.translate("MainWindow", "Parents", None, QtGui.QApplication.UnicodeUTF8))
self.createChildButton.setText(QtGui.QApplication.translate("MainWindow", "Child", None, QtGui.QApplication.UnicodeUTF8))
self.deletePersonButton.setText(QtGui.QApplication.translate("MainWindow", "Delete Person", None, QtGui.QApplication.UnicodeUTF8))
self.peopleListLabel.setText(QtGui.QApplication.translate("MainWindow", "List", None, QtGui.QApplication.UnicodeUTF8))
self.unattachSpouseButton.setText(QtGui.QApplication.translate("MainWindow", "Spouse", None, QtGui.QApplication.UnicodeUTF8))
self.unattachParentsButton.setText(QtGui.QApplication.translate("MainWindow", "Parents", None, QtGui.QApplication.UnicodeUTF8))
self.unattachChildButton.setText(QtGui.QApplication.translate("MainWindow", "Child", None, QtGui.QApplication.UnicodeUTF8))
self.exitButton.setText(QtGui.QApplication.translate("MainWindow", "Exit Program", None, QtGui.QApplication.UnicodeUTF8))
self.impRadio.setText(QtGui.QApplication.translate("MainWindow", "Important", None, QtGui.QApplication.UnicodeUTF8))
self.bdayLabel.setText(QtGui.QApplication.translate("MainWindow", "Birthday:", None, QtGui.QApplication.UnicodeUTF8))
self.createLabel.setText(QtGui.QApplication.translate("MainWindow", "Create:", None, QtGui.QApplication.UnicodeUTF8))
self.notesLabel.setText(QtGui.QApplication.translate("MainWindow", "Notes:", None, QtGui.QApplication.UnicodeUTF8))
self.IDLabel.setText(QtGui.QApplication.translate("MainWindow", "ID:", None, QtGui.QApplication.UnicodeUTF8))
self.genderLabel.setText(QtGui.QApplication.translate("MainWindow", "Gender:", None, QtGui.QApplication.UnicodeUTF8))
self.realnameLabel.setText(QtGui.QApplication.translate("MainWindow", "Real Name:", None, QtGui.QApplication.UnicodeUTF8))
self.statusLabel.setText(QtGui.QApplication.translate("MainWindow", "Status:", None, QtGui.QApplication.UnicodeUTF8))
self.ddayLabel.setText(QtGui.QApplication.translate("MainWindow", "Death:", None, QtGui.QApplication.UnicodeUTF8))
self.primaryListLabel.setText(QtGui.QApplication.translate("MainWindow", "TextLabel", None, QtGui.QApplication.UnicodeUTF8))
self.secondaryListLabel.setText(QtGui.QApplication.translate("MainWindow", "TextLabel", None, QtGui.QApplication.UnicodeUTF8))
self.attachLabel.setText(QtGui.QApplication.translate("MainWindow", "Attach:", None, QtGui.QApplication.UnicodeUTF8))
self.anniversaryLabel.setText(QtGui.QApplication.translate("MainWindow", "Anniversary:", None, QtGui.QApplication.UnicodeUTF8))
self.nicknameLabel.setText(QtGui.QApplication.translate("MainWindow", "Spoken Name:", None, QtGui.QApplication.UnicodeUTF8))
self.unattachLabel.setText(QtGui.QApplication.translate("MainWindow", "Unattach:", None, QtGui.QApplication.UnicodeUTF8))
self.childrenLabel.setText(QtGui.QApplication.translate("MainWindow", "Children:", None, QtGui.QApplication.UnicodeUTF8))
self.marriageLabel.setText(QtGui.QApplication.translate("MainWindow", "Marriage:", None, QtGui.QApplication.UnicodeUTF8))
self.motherLabel.setText(QtGui.QApplication.translate("MainWindow", "Mother:", None, QtGui.QApplication.UnicodeUTF8))
self.fatherLabel.setText(QtGui.QApplication.translate("MainWindow", "Father:", None, QtGui.QApplication.UnicodeUTF8))
self.secondStatusRadio.setText(QtGui.QApplication.translate("MainWindow", "Engaged", None, QtGui.QApplication.UnicodeUTF8))
self.firstStatusRadio.setText(QtGui.QApplication.translate("MainWindow", "Single", None, QtGui.QApplication.UnicodeUTF8))
self.thirdStatusRadio.setText(QtGui.QApplication.translate("MainWindow", "Divorced", None, QtGui.QApplication.UnicodeUTF8))
self.actionNew_Family.setText(QtGui.QApplication.translate("MainWindow", "New Family", None, QtGui.QApplication.UnicodeUTF8))
self.actionParents.setText(QtGui.QApplication.translate("MainWindow", "Parents", None, QtGui.QApplication.UnicodeUTF8))
self.actionSpouse.setText(QtGui.QApplication.translate("MainWindow", "Spouse", None, QtGui.QApplication.UnicodeUTF8))
self.actionChildren.setText(QtGui.QApplication.translate("MainWindow", "Child", None, QtGui.QApplication.UnicodeUTF8))
self.actionDelete_Person.setText(QtGui.QApplication.translate("MainWindow", "Delete Person", None, QtGui.QApplication.UnicodeUTF8))
self.actionExit.setText(QtGui.QApplication.translate("MainWindow", "Exit", None, QtGui.QApplication.UnicodeUTF8))
self.actionSave_Family.setText(QtGui.QApplication.translate("MainWindow", "Save Family", None, QtGui.QApplication.UnicodeUTF8))
self.actionOpen_Family.setText(QtGui.QApplication.translate("MainWindow", "Open Family", None, QtGui.QApplication.UnicodeUTF8))
self.actionMerge_Person.setText(QtGui.QApplication.translate("MainWindow", "Merge Person", None, QtGui.QApplication.UnicodeUTF8))
self.actionEdit_Spouse.setText(QtGui.QApplication.translate("MainWindow", "Edit Spouse", None, QtGui.QApplication.UnicodeUTF8))
self.actionUnParents.setText(QtGui.QApplication.translate("MainWindow", "Parents", None, QtGui.QApplication.UnicodeUTF8))
self.actionMother.setText(QtGui.QApplication.translate("MainWindow", "Mother", None, QtGui.QApplication.UnicodeUTF8))
self.actionUnSpouse.setText(QtGui.QApplication.translate("MainWindow", "Spouse", None, QtGui.QApplication.UnicodeUTF8))
self.actionUnChild.setText(QtGui.QApplication.translate("MainWindow", "Child", None, QtGui.QApplication.UnicodeUTF8))
self.actionUnAll.setText(QtGui.QApplication.translate("MainWindow", "All", None, QtGui.QApplication.UnicodeUTF8))
self.actionAttach.setText(QtGui.QApplication.translate("MainWindow", "Attach", None, QtGui.QApplication.UnicodeUTF8))
|
cc0-1.0
| -7,275,819,988,399,845,000
| 71.453401
| 163
| 0.742212
| false
| 3.918267
| false
| false
| false
|
rasbt/advent-of-code-2016
|
python_code/aoc_08_02.py
|
1
|
10351
|
# Sebastian Raschka, 2016
"""
source: http://adventofcode.com/2016/day/8
DESCRIPTION
You come across a door implementing what you can only assume is an
implementation of two-factor authentication after a long
game of requirements telephone.
To get past the door, you first swipe a keycard (no problem; there was one on
a nearby desk). Then, it displays a code on a little screen, and you type
that code on a keypad. Then, presumably, the door unlocks.
Unfortunately, the screen has been smashed. After a few minutes, you've taken
everything apart and figured out how it works. Now you just have to work out
what the screen would have displayed.
The magnetic strip on the card you swiped encodes a series of instructions for
the screen; these instructions are your puzzle input. The screen is 50 pixels
wide and 6 pixels tall, all of which start off, and is capable of three
somewhat peculiar operations:
rect AxB turns on all of the pixels in a rectangle at the top-left of the
screen which is A wide and B tall.
rotate row y=A by B shifts all of the pixels in row A (0 is the top row)
right by B pixels. Pixels that would fall off the right end appear at the
left end of the row.
rotate column x=A by B shifts all of the pixels in column A
(0 is the left column) down by B pixels. Pixels that would fall
off the bottom appear at the top of the column.
For example, here is a simple sequence on a smaller screen:
rect 3x2 creates a small rectangle in the top-left corner:
###....
###....
.......
rotate column x=1 by 1 rotates the second column down by one pixel:
#.#....
###....
.#.....
rotate row y=0 by 4 rotates the top row right by four pixels:
....#.#
###....
.#.....
rotate row x=1 by 1 again rotates the second column down by one pixel,
causing the bottom pixel to wrap back to the top:
.#..#.#
#.#....
.#.....
As you can see, this display technology is extremely powerful, and will soon
dominate the tiny-code-displaying-screen market. That's what the advertisement
on the back of the display tries to convince you, anyway.
There seems to be an intermediate check of the voltage used by the display:
after you swipe your card, if the screen did work,
how many pixels should be lit?
--- Part Two ---
You notice that the screen is only capable of displaying capital letters; in the font it uses, each letter is 5 pixels wide and 6 tall.
After you swipe your card, what code should is the screen trying to display?
"""
from collections import deque
def init_screen(screen, rect_str):
rect_str = rect_str.split(' ')[-1]
x, y = rect_str.strip().split('x')
x, y = int(x), int(y)
for i in range(y):
for j in range(x):
screen[i][j] = '#'
def rotate(screen, rot_str):
s = rot_str.split()[1:]
idx = int(s[1].split('=')[-1])
by = int(s[-1])
if s[0] == 'row':
screen[idx].rotate(by)
else:
dq = deque([i[idx] for i in screen])
dq.rotate(by)
for i, j in zip(screen, dq):
i[idx] = j
if __name__ == '__main__':
data = """rect 1x1
rotate row y=0 by 5
rect 1x1
rotate row y=0 by 5
rect 1x1
rotate row y=0 by 5
rect 1x1
rotate row y=0 by 5
rect 1x1
rotate row y=0 by 2
rect 1x1
rotate row y=0 by 2
rect 1x1
rotate row y=0 by 3
rect 1x1
rotate row y=0 by 3
rect 2x1
rotate row y=0 by 2
rect 1x1
rotate row y=0 by 3
rect 2x1
rotate row y=0 by 2
rect 1x1
rotate row y=0 by 3
rect 2x1
rotate row y=0 by 5
rect 4x1
rotate row y=0 by 5
rotate column x=0 by 1
rect 4x1
rotate row y=0 by 10
rotate column x=5 by 2
rotate column x=0 by 1
rect 9x1
rotate row y=2 by 5
rotate row y=0 by 5
rotate column x=0 by 1
rect 4x1
rotate row y=2 by 5
rotate row y=0 by 5
rotate column x=0 by 1
rect 4x1
rotate column x=40 by 1
rotate column x=27 by 1
rotate column x=22 by 1
rotate column x=17 by 1
rotate column x=12 by 1
rotate column x=7 by 1
rotate column x=2 by 1
rotate row y=2 by 5
rotate row y=1 by 3
rotate row y=0 by 5
rect 1x3
rotate row y=2 by 10
rotate row y=1 by 7
rotate row y=0 by 2
rotate column x=3 by 2
rotate column x=2 by 1
rotate column x=0 by 1
rect 4x1
rotate row y=2 by 5
rotate row y=1 by 3
rotate row y=0 by 3
rect 1x3
rotate column x=45 by 1
rotate row y=2 by 7
rotate row y=1 by 10
rotate row y=0 by 2
rotate column x=3 by 1
rotate column x=2 by 2
rotate column x=0 by 1
rect 4x1
rotate row y=2 by 13
rotate row y=0 by 5
rotate column x=3 by 1
rotate column x=0 by 1
rect 4x1
rotate row y=3 by 10
rotate row y=2 by 10
rotate row y=0 by 5
rotate column x=3 by 1
rotate column x=2 by 1
rotate column x=0 by 1
rect 4x1
rotate row y=3 by 8
rotate row y=0 by 5
rotate column x=3 by 1
rotate column x=2 by 1
rotate column x=0 by 1
rect 4x1
rotate row y=3 by 17
rotate row y=2 by 20
rotate row y=0 by 15
rotate column x=13 by 1
rotate column x=12 by 3
rotate column x=10 by 1
rotate column x=8 by 1
rotate column x=7 by 2
rotate column x=6 by 1
rotate column x=5 by 1
rotate column x=3 by 1
rotate column x=2 by 2
rotate column x=0 by 1
rect 14x1
rotate row y=1 by 47
rotate column x=9 by 1
rotate column x=4 by 1
rotate row y=3 by 3
rotate row y=2 by 10
rotate row y=1 by 8
rotate row y=0 by 5
rotate column x=2 by 2
rotate column x=0 by 2
rect 3x2
rotate row y=3 by 12
rotate row y=2 by 10
rotate row y=0 by 10
rotate column x=8 by 1
rotate column x=7 by 3
rotate column x=5 by 1
rotate column x=3 by 1
rotate column x=2 by 1
rotate column x=1 by 1
rotate column x=0 by 1
rect 9x1
rotate row y=0 by 20
rotate column x=46 by 1
rotate row y=4 by 17
rotate row y=3 by 10
rotate row y=2 by 10
rotate row y=1 by 5
rotate column x=8 by 1
rotate column x=7 by 1
rotate column x=6 by 1
rotate column x=5 by 1
rotate column x=3 by 1
rotate column x=2 by 2
rotate column x=1 by 1
rotate column x=0 by 1
rect 9x1
rotate column x=32 by 4
rotate row y=4 by 33
rotate row y=3 by 5
rotate row y=2 by 15
rotate row y=0 by 15
rotate column x=13 by 1
rotate column x=12 by 3
rotate column x=10 by 1
rotate column x=8 by 1
rotate column x=7 by 2
rotate column x=6 by 1
rotate column x=5 by 1
rotate column x=3 by 1
rotate column x=2 by 1
rotate column x=1 by 1
rotate column x=0 by 1
rect 14x1
rotate column x=39 by 3
rotate column x=35 by 4
rotate column x=20 by 4
rotate column x=19 by 3
rotate column x=10 by 4
rotate column x=9 by 3
rotate column x=8 by 3
rotate column x=5 by 4
rotate column x=4 by 3
rotate row y=5 by 5
rotate row y=4 by 5
rotate row y=3 by 33
rotate row y=1 by 30
rotate column x=48 by 1
rotate column x=47 by 5
rotate column x=46 by 5
rotate column x=45 by 1
rotate column x=43 by 1
rotate column x=38 by 3
rotate column x=37 by 3
rotate column x=36 by 5
rotate column x=35 by 1
rotate column x=33 by 1
rotate column x=32 by 5
rotate column x=31 by 5
rotate column x=30 by 1
rotate column x=23 by 4
rotate column x=22 by 3
rotate column x=21 by 3
rotate column x=20 by 1
rotate column x=12 by 2
rotate column x=11 by 2
rotate column x=3 by 5
rotate column x=2 by 5
rotate column x=1 by 3
rotate column x=0 by 4"""
screen = [deque(50 * '.') for _ in range(6)]
for row in data.split('\n'):
row = row.strip()
if not row:
continue
elif row.startswith('rect'):
init_screen(screen, rect_str=row)
else:
rotate(screen, rot_str=row)
for row in screen:
print(''.join(row))
|
mit
| -4,137,628,623,510,542,300
| 33.274834
| 135
| 0.495314
| false
| 4.252671
| false
| false
| false
|
SCUT16K/SmsSender
|
server/utils/commands.py
|
1
|
1286
|
# -*- coding: utf-8 -*-
import sys
import gevent.wsgi
import gevent.monkey
from werkzeug.contrib import profiler
from flask_script import Command
class ProfileServer(Command):
"""
Run the server with profiling tools
"""
def __init__(self, host='localhost', port=9000, **options):
self.port = port
self.host = host
self.server_options = options
def __call__(self, app, **kwargs):
f = open('profiler.log', 'w')
stream = profiler.MergeStream(sys.stdout, f)
app.config['PROFILE'] = True
app.wsgi_app = profiler.ProfilerMiddleware(app.wsgi_app, stream,
restrictions=[30])
app.run(debug=True)
class GEventServer(Command):
"""
Run the server with gevent
"""
def __init__(self, host='127.0.0.1', port=5000, **options):
self.port = port
self.host = host
self.server_options = options
def __call__(self, app, **kwargs):
gevent.monkey.patch_all()
ws = gevent.wsgi.WSGIServer(listener=(self.host, self.port),
application=app)
print "* Running on http://{}:{}/ (Press CTRL+C to quit)".format(self.host, self.port)
ws.serve_forever()
|
apache-2.0
| -2,121,802,192,813,314,800
| 26.956522
| 94
| 0.566874
| false
| 3.981424
| false
| false
| false
|
matt77hias/Clipping
|
src/intersection.py
|
1
|
1377
|
import numpy as np
###############################################################################
## Intersection utilities 2D
###############################################################################
def intersect2D(c_v1, c_v2, p_v1, p_v2):
A1 = c_v2[1] - c_v1[1]
B1 = c_v1[0] - c_v2[0]
C1 = c_v1[0] * A1 + c_v1[1] * B1
A2 = p_v2[1] - p_v1[1]
B2 = p_v1[0] - p_v2[0]
C2 = p_v1[0] * A2 + p_v1[1] * B2
det = A1 * B2 - B1 * A2
X1 = (C1 * B2 - B1 * C2) / det;
X2 = (A1 * C2 - C1 * A2) / det;
return np.array([X1, X2])
###############################################################################
## Intersection utilities 3D
###############################################################################
def intersect3D(c_v1, c_v2, p_v1, p_v2, a0, a1):
A1 = c_v2[a1] - c_v1[a1]
B1 = c_v1[a0] - c_v2[a0]
C1 = c_v1[a0] * A1 + c_v1[a1] * B1
A2 = p_v2[a1] - p_v1[a1]
B2 = p_v1[a0] - p_v2[a0]
C2 = p_v1[a0] * A2 + p_v1[a1] * B2
det = A1 * B2 - B1 * A2
X1 = (C1 * B2 - B1 * C2) / det;
X2 = (A1 * C2 - C1 * A2) / det;
alpha = -1.0
if B2 != 0:
alpha = (X1 - p_v2[a0]) / B2
else:
alpha = (p_v2[a1] - X2) / A2
a2 = 3 - (a1 + a0)
X3 = alpha * p_v1[a2] + (1.0 - alpha) * p_v2[a2]
X = np.zeros((3))
X[a0] = X1
X[a1] = X2
X[a2] = X3
return X
|
gpl-3.0
| 6,211,066,492,931,171,000
| 26
| 79
| 0.335512
| false
| 2.257377
| false
| false
| false
|
robmcmullen/peppy
|
peppy/project/editra/BZR.py
|
1
|
9992
|
###############################################################################
# Name: Cody Precord #
# Purpose: SourceControl implementation for Bazaar #
# Author: Cody Precord <cprecord@editra.org> #
# Copyright: (c) 2008 Cody Precord <staff@editra.org> #
# License: wxWindows License #
###############################################################################
"""Bazaar implementation of the SourceControl object """
__author__ = "Cody Precord <cprecord@editra.org>"
__revision__ = "$Revision: 867 $"
__scid__ = "$Id: BZR.py 867 2009-05-06 12:10:55Z CodyPrecord $"
#------------------------------------------------------------------------------#
# Imports
import os
import datetime
import re
import time
# Local imports
from SourceControl import SourceControl, DecodeString
#------------------------------------------------------------------------------#
class BZR(SourceControl):
""" Bazaar source control class """
name = 'Bazaar'
command = 'bzr'
ccache = list() # Cache of paths that are under bazaar control
repocache = dict()
def __repr__(self):
return 'BZR.BZR()'
def getAuthOptions(self, path):
""" Get the repository authentication info """
output = []
return output
def getRepository(self, path):
""" Get the repository of a given path """
if path in self.repocache:
return self.repocache[path]
if not os.path.isdir(path):
root = os.path.split(path)[0]
else:
root = path
while True:
if not root:
break
if os.path.exists(os.path.join(root, '.bzr')):
break
else:
root = os.path.split(root)[0]
# Cache the repo of this path for faster lookups next time
self.repocache[path] = root
return root
def isControlled(self, path):
""" Is the path controlled by BZR? """
t1 = time.time()
# Check for cached paths to speed up lookup
if path in self.ccache:
return True
if not os.path.isdir(path):
root = os.path.split(path)[0]
else:
root = path
last = False
while True:
if os.path.exists(os.path.join(root, '.bzr')):
# If a containing directory of the given path has a .bzr
# directory in it run status to find out if the file is being
# tracked or not.
retval = False
out = self.run(root + os.sep, ['status', '-S', path])
if out:
lines = out.stdout.readline()
if lines.startswith('?'):
fname = lines.split(None, 1)[1].strip()
fname = fname.rstrip(os.sep)
retval = not path.endswith(fname)
else:
retval = True
self.closeProcess(out)
if retval:
self.ccache.append(path)
return retval
elif last:
break
else:
root, tail = os.path.split(root)
# If tail is None or '' then this has gotten to the root
# so mark it as the last run
if not tail:
last = True
return False
def add(self, paths):
""" Add paths to the repository """
root, files = self.splitFiles(paths)
out = self.run(root, ['add'] + files)
self.logOutput(out)
self.closeProcess(out)
def checkout(self, paths):
""" Checkout files at the given path """
root, files = self.splitFiles(paths)
out = self.run(root, ['checkout',], files)
self.logOutput(out)
self.closeProcess(out)
def commit(self, paths, message=''):
""" Commit paths to the repository """
root, files = self.splitFiles(paths)
out = self.run(root, ['commit', '-m', message] + files)
self.logOutput(out)
self.closeProcess(out)
def diff(self, paths):
""" Run the diff program on the given files """
root, files = self.splitFiles(paths)
out = self.run(root, ['diff'] + files)
self.closeProcess(out)
def makePatch(self, paths):
""" Make a patch of the given paths """
root, files = self.splitFiles(paths)
patches = list()
for fname in files:
out = self.run(root, ['diff', fname])
lines = [ line for line in out.stdout ]
self.closeProcess(out)
patches.append((fname, ''.join(lines)))
return patches
def history(self, paths, history=None):
""" Get the revision history of the given paths """
if history is None:
history = []
root, files = self.splitFiles(paths)
for fname in files:
out = self.run(root, ['log', fname])
logstart = False
if out:
for line in out.stdout:
self.log(line)
if line.strip().startswith('-----------'):
logstart = False
current = dict(path=fname, revision=None,
author=None, date=None, log=u'')
history.append(current)
elif line.startswith('message:'):
logstart = True
elif logstart:
current['log'] += DecodeString(line)
elif line.startswith('revno:'):
current['revision'] = DecodeString(line.split(None, 1)[-1].strip())
elif line.startswith('committer:'):
author = line.split(None, 1)[-1]
current['author'] = DecodeString(author.strip())
elif line.startswith('timestamp:'):
date = line.split(None, 1)[-1]
current['date'] = self.str2datetime(date.strip())
else:
pass
self.logOutput(out)
self.closeProcess(out)
return history
def str2datetime(self, tstamp):
""" Convert a timestamp string to a datetime object """
parts = tstamp.split()
ymd = [int(x.strip()) for x in parts[1].split('-')]
hms = [int(x.strip()) for x in parts[2].split(':')]
date = ymd + hms
return datetime.datetime(*date)
def remove(self, paths):
""" Recursively remove paths from repository """
root, files = self.splitFiles(paths)
out = self.run(root, ['remove', '--force'] + files)
self.logOutput(out)
def status(self, paths, recursive=False, status=dict()):
""" Get BZR status information from given file/directory """
codes = {' ':'uptodate', 'N':'added', 'C':'conflict', 'D':'deleted',
'M':'modified'}
root, files = self.splitFiles(paths)
# -S gives output similar to svn which is a little easier to work with
out = self.run(root, ['status', '-S'] + files)
repo = self.getRepository(paths[0])
relpath = root.replace(repo, '', 1).lstrip(os.sep)
unknown = list()
if out:
for line in out.stdout:
self.log(line)
txt = line.lstrip(' +-')
# Split the status code and relative file path
code, fname = txt.split(None, 1)
fname = fname.replace(u'/', os.sep).strip().rstrip(os.sep)
fname = fname.replace(relpath, '', 1).lstrip(os.sep)
code = code.rstrip('*')
# Skip unknown files
if code == '?':
unknown.append(fname)
continue
# Get the absolute file path
current = dict()
try:
current['status'] = codes[code]
status[fname] = current
except KeyError:
pass
# Find up to date files
unknown += status.keys()
for path in os.listdir(root):
if path not in unknown:
status[path] = dict(status='uptodate')
self.logOutput(out)
return status
def update(self, paths):
""" Recursively update paths """
root, files = self.splitFiles(paths)
out = self.run(root, ['update'] + files)
self.logOutput(out)
def revert(self, paths):
""" Recursively revert paths to repository version """
root, files = self.splitFiles(paths)
if not files:
files = ['.']
out = self.run(root, ['revert'] + files)
self.logOutput(out)
def fetch(self, paths, rev=None, date=None):
""" Fetch a copy of the paths' contents """
output = []
for path in paths:
if os.path.isdir(path):
continue
root, files = self.splitFiles(path)
options = []
if rev:
options.append('-r')
options.append(str(rev))
if date:
# Date format YYYY-MM-DD,HH:MM:SS
options.append('-r')
options.append('date:%s' % date)
out = self.run(root, ['cat'] + options + files)
if out:
output.append(out.stdout.read())
self.logOutput(out)
else:
output.append(None)
return output
|
gpl-2.0
| 5,564,929,580,805,322,000
| 34.942446
| 91
| 0.470877
| false
| 4.608856
| false
| false
| false
|
fafaschiavo/lol-api-webapp
|
lolapiwebapp/stock/views.py
|
1
|
17940
|
from pprint import pprint
from django.shortcuts import render
from django.http import HttpResponse
from django.conf import settings
from stock.models import Hero, mastery, Rune
import json, requests, grequests
# Create your procedures here.
def searchSummonerStats(summoner_id):
context = {}
if type(summoner_id) != list:
url = 'https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v1.3/stats/by-summoner/'+ str(summoner_id) +'/summary?api_key=' + settings.LOL_API_KEY2
else:
urls = []
for summoner in summoner_id:
urls.append('https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v1.3/stats/by-summoner/'+ str(summoner) +'/summary?api_key=' + settings.LOL_API_KEY2)
rs = (grequests.get(u) for u in urls)
resp = grequests.map(rs)
stat_success = 1
for response in resp:
values_json = json.loads(response.text)
context[values_json['summonerId']] = values_json
if str(response) != '<Response [200]>':
stat_success = '0'
return (context, stat_success)
def searchSummonnerId(summoner_name):
context = {}
summoner_name = summoner_name.lower()
summoner_name = summoner_name.replace(" ", "")
url = 'https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v1.4/summoner/by-name/'+ summoner_name +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
if resp.status_code == 200:
data = json.loads(resp.text)
try:
context['success'] = 1
context['summonerName'] = summoner_name
context['summonerLevel'] = data[summoner_name]['summonerLevel']
context['id'] = data[summoner_name]['id']
context['profileIcon'] = data[summoner_name]['profileIconId']
return context
except KeyError, e:
context['success'] = 0
return context
else:
context['success'] = 0
return context
def searchSummonerName(summoner_id):
if type(summoner_id) != list:
id_list = str(summoner_id)
else:
id_list = ''
for summoner in summoner_id:
id_list = id_list + str(summoner) + ','
url = 'https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v1.4/summoner/'+ id_list +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
return data
def searchSummonerRank(summoner_id):
if type(summoner_id) != list:
id_list = str(summoner_id)
else:
id_list = ''
for summoner in summoner_id:
id_list = id_list + str(summoner) + ','
url = 'https://na.api.pvp.net/api/lol/'+ settings.LOL_REGION +'/v2.5/league/by-summoner/'+ id_list +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
return data
def searchSummonerChampionMastery(summoner_id, champion_id):
url = 'https://na.api.pvp.net/championmastery/location/'+ settings.LOL_PLATFORM_ID +'/player/'+ str(summoner_id) +'/champion/'+ str(champion_id) +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
try:
data = json.loads(resp.text)
except ValueError, e:
data = {}
data['championLevel'] = 0
return data
def searchTierImage(tier):
tier = tier.lower()
tier = tier.title()
imgage_dict = {
'Unranked': 'http://s18.postimg.org/5t36g8pf9/unranked_1_92a5f4dfbb5ffab13f901c80a9d14384.png',
'Bronze': 'https://s3.amazonaws.com/f.cl.ly/items/3q1f0B2j1E0Y0a3P310V/Bronze.png',
'Silver': 'https://s3.amazonaws.com/f.cl.ly/items/0J253J1z3o1d2Z152M2b/Silver.png',
'Gold': 'https://s3.amazonaws.com/f.cl.ly/items/1Y360o3N261b020g0h1r/Gold.png',
'Platinum': 'https://s3.amazonaws.com/f.cl.ly/items/3F2j1u2d3f0w0l260m3E/Platinum.png',
'Diamond': 'https://s3.amazonaws.com/f.cl.ly/items/2X2F2r192B3K1j0p0n3d/Diamond.png',
'Master': 'https://s3.amazonaws.com/f.cl.ly/items/083C392i0t1p1a3h1C3i/Master.png',
'Challenger': 'https://s3.amazonaws.com/f.cl.ly/items/0K350Q2C0b0E0n043e0L/Challenger.png',
}
return imgage_dict.get(tier, 'http://s18.postimg.org/5t36g8pf9/unranked_1_92a5f4dfbb5ffab13f901c80a9d14384.png')
def refreshRuneDatabase(request):
context ={}
# request the mastery list from the riot API
url = 'https://na.api.pvp.net/api/lol/static-data/'+ settings.LOL_REGION +'/v1.2/rune?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
# delete all the existing masteries so the new information can be added
old_runes = Rune.objects.all()
old_runes.delete()
for rune in data['data']:
rune_id_riot = data['data'][rune]['id']
rune_name = data['data'][rune]['name']
rune_description = data['data'][rune]['description'].encode('ascii', 'ignore')
rune_tier = data['data'][rune]['rune']['tier']
rune_type_data = data['data'][rune]['rune']['type']
rune_bonus = rune_description.split(' de')[0]
rune_honest_text = rune_description.split(rune_bonus)[1]
rune_honest_text = rune_honest_text.split(' (')[0]
try:
rune_bonus = rune_bonus.split('+')[1]
except:
rune_bonus = rune_bonus.split('-')[1]
try:
rune_is_percentage = rune_bonus.split('%')[1]
rune_bonus = rune_bonus.split('%')[0]
rune_is_percentage = 1
except:
rune_is_percentage = 0
# rune_bonus = rune_bonus.replace(' ', '')
rune_bonus = rune_bonus.split(' ')[0]
rune_bonus = rune_bonus.replace(',', '.')
rune_bonus = rune_bonus.replace(' ', '')
new_rune = Rune(id_riot = rune_id_riot, name = rune_name, description = rune_description, tier = rune_tier, rune_type = rune_type_data, bonus = float(rune_bonus), honest_text = rune_honest_text, is_percentage = rune_is_percentage)
new_rune.save()
return render(request, 'refresh-rune-database.html', context)
def refreshMasteryDatabase(request):
context ={}
# request the mastery list from the riot API
url = 'https://na.api.pvp.net/api/lol/static-data/'+ settings.LOL_REGION +'/v1.2/mastery?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
# delete all the existing masteries so the new information can be added
old_masteries = mastery.objects.all()
old_masteries.delete()
for mastery_item in data['data']:
mastery_id_riot = data['data'][mastery_item]['id']
mastery_name = data['data'][mastery_item]['name']
mastery_description = data['data'][mastery_item]['description']
table_position = str(mastery_id_riot)[1]
for item in mastery_description:
mastery_description_single_var = item
new_mastery = mastery(id_riot = mastery_id_riot, name = mastery_name, description = mastery_description_single_var, position = table_position)
new_mastery.save()
return render(request, 'refresh-mastery-database.html', context)
def refreshChampionDatabase(request):
context ={}
# request the champion list from the riot API
url = 'https://na.api.pvp.net/api/lol/static-data/'+ settings.LOL_REGION +'/v1.2/champion?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
# delete all the existing heroes so the new information can be added
old_heroes = Hero.objects.all()
old_heroes.delete()
for champion in data['data']:
champion_id_riot = data['data'][champion]['id']
champion_name = data['data'][champion]['name']
champion_title = data['data'][champion]['title']
champion_key = data['data'][champion]['key']
new_champion = Hero(id_riot = champion_id_riot, name = champion_name, title = champion_title, key = champion_key)
new_champion.save()
return render(request, 'refresh-champion-database.html', context)
# Create your views here.
def index(request):
context = {}
return render(request, 'index.html', context)
def getSummonerId(request):
context = {}
return render(request, 'getid.html', context)
def requestId(request):
#receive data from the template
template_form = request.POST['requestId']
#Transform the data into string, then transform into lowercase and remove all the whitespaces
summoner_name = str(template_form)
context = searchSummonnerId(summoner_name)
return render(request, 'requestid.html', context)
def getmatchhistory(request):
context = {}
return render(request, 'getmatchhistory.html', context)
def requestmatchhistory(request):
#receive data from the template
template_form = request.POST['requestmatchhistory']
#Transform the data into string, then transform into lowercase and remove all the whitespaces
summoner_name = str(template_form)
summoner_info = searchSummonnerId(summoner_name)
context = {}
context['summoner_name'] = summoner_name
try:
url = 'https://na.api.pvp.net/api/lol/' + settings.LOL_REGION + '/v2.2/matchlist/by-summoner/' + str(summoner_info['id']) + '?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
data = json.loads(resp.text)
context['header'] = []
context['header'].append('Lane')
context['header'].append('Champion')
context['header'].append('Season')
context['header'].append('Match ID')
context['header'].append('Duration')
context['matches'] = []
match_data_to_context = []
for match in data['matches']:
match_data_to_context = []
match_data_to_context.append(match['lane'])
champion_name = Hero.objects.filter(id_riot = match['champion'])
try:
match_data_to_context.append(champion_name[0].name)
except IndexError:
match_data_to_context.append('-')
match_data_to_context.append(match['season'])
match_data_to_context.append(match['matchId'])
match_data_to_context.append(match['timestamp'])
context['matches'].append(match_data_to_context)
return render(request, 'requestmatchhistory.html', context)
except KeyError:
context['success'] = 'false'
return render(request, 'requestmatchhistory.html', context)
def getcurrentgame(request):
context = {}
return render(request, 'getcurrentgame.html', context)
def requestcurrentgame(request):
#receive data from the template
template_form = request.POST['requestcurrentgame']
#Transform the data into string, then transform into lowercase and remove all the whitespaces
summoner_name = str(template_form)
summoner_info = searchSummonnerId(summoner_name)
context = {}
context2 = {}
# check if the the player name was found in the lol database (1)
if summoner_info['success'] == 1:
url = 'https://na.api.pvp.net/observer-mode/rest/consumer/getSpectatorGameInfo/'+ settings.LOL_PLATFORM_ID +'/'+ str(summoner_info['id']) +'?api_key=' + settings.LOL_API_KEY
resp = requests.get(url=url)
# check if this player is currently in game (2)
if resp.status_code == 200:
data = json.loads(resp.text)
data_formated={}
#search for the participant names based on their IDs
players_ids_list = []
for player in data['participants']:
players_ids_list.append(player['summonerId'])
player_objects = searchSummonerName(players_ids_list)
player_ranks = searchSummonerRank(players_ids_list)
player_stats, stat_success = searchSummonerStats(players_ids_list)
# fill the data array with the name
for player in player_objects:
data_formated[player] ={}
data_formated[player]['name'] = player_objects[player]['name']
for player in data['participants']:
data_formated[str(player['summonerId'])]['side'] = player['teamId']
if stat_success == 1:
for stat in player_stats[int(player['summonerId'])]['playerStatSummaries']:
if stat['playerStatSummaryType'] == 'Unranked':
data_formated[str(player['summonerId'])]['wins'] = stat['wins']
# fill the data array with the tier
for player in player_ranks:
data_formated[player]['tier'] = player_ranks[player][0]['tier']
#fill the data array with the champion name
for player in data['participants']:
heroes_ids = player['championId']
champion = Hero.objects.filter(id_riot = heroes_ids)
data_formated[str(player['summonerId'])]['champion'] = champion[0].__str__()
# champion_name_process = champion[0].__str__()
# champion_name_process = champion_name_process.replace(' ', '')
# champion_name_process = champion_name_process.replace('.', '')
champion_name_process = champion[0].__key__()
data_formated[str(player['summonerId'])]['champion'] = '<span style="margin-left: 12px;"><img style="margin-right: 6px;" src="http://ddragon.leagueoflegends.com/cdn/6.6.1/img/champion/' + champion_name_process + '.png" class="rank--img tier-img"><a style="color: rgba(0,0,0,.87);" href="http://champion.gg/champion/' + champion_name_process + '">' + data_formated[str(player['summonerId'])]['champion'] + '</a><span>'
try:
data_formated[str(player['summonerId'])]['tier']
data_formated[str(player['summonerId'])]['tier'] = '<span style="margin-left: 12px;"><img style="margin-right: 2px;" src="'+ searchTierImage(data_formated[str(player['summonerId'])]['tier']) +'" class="rank--img tier-img">' + data_formated[str(player['summonerId'])]['tier'] + '<span>'
except:
data_formated[str(player['summonerId'])]['tier'] = 'UNRANKED'
data_formated[str(player['summonerId'])]['tier'] = '<span style="margin-left: 12px;"><img style="margin-right: 2px;" src="'+ searchTierImage(data_formated[str(player['summonerId'])]['tier']) +'" class="rank--img tier-img">' + data_formated[str(player['summonerId'])]['tier'] + '<span>'
mastery_set = {}
# fill the data array with the masteries stats
for player in data['participants']:
mastery_set[1] = 0
mastery_set[2] = 0
mastery_set[3] = 0
masteries = player['masteries']
for diff_mastery in masteries:
mastery_object = mastery.objects.get(id_riot = diff_mastery['masteryId'])
mastery_set[mastery_object.__position__()] = mastery_set[mastery_object.__position__()] + diff_mastery['rank']
data_formated[str(player['summonerId'])]['masteries'] = str(mastery_set[1]) + ' / ' + str(mastery_set[3]) + ' / ' +str(mastery_set[2])
context['header'] = []
context['header'].append('Champion')
context['header'].append('Name')
context['header'].append('Tier')
if stat_success == 1:
context['header'].append('Wins')
context['header'].append('Masteries')
context['players'] = []
player_data_to_context = []
for player in data_formated:
if data_formated[player]['side'] == 100:
player_data_to_context = []
player_data_to_context.append(data_formated[player]['champion'])
player_data_to_context.append(data_formated[player]['name'])
player_data_to_context.append(data_formated[player]['tier'])
if stat_success == 1:
player_data_to_context.append(data_formated[player]['wins'])
player_data_to_context.append(data_formated[player]['masteries'])
context['players'].append(player_data_to_context)
context2['header'] = []
context2['header'].append('Champion')
context2['header'].append('Name')
context2['header'].append('Tier')
if stat_success == 1:
context2['header'].append('Wins')
context2['header'].append('Masteries')
context2['players'] = []
player_data_to_context = []
for player in data_formated:
if data_formated[player]['side'] == 200:
player_data_to_context = []
player_data_to_context.append(data_formated[player]['champion'])
player_data_to_context.append(data_formated[player]['name'])
player_data_to_context.append(data_formated[player]['tier'])
if stat_success == 1:
player_data_to_context.append(data_formated[player]['wins'])
player_data_to_context.append(data_formated[player]['masteries'])
context2['players'].append(player_data_to_context)
return render(request, 'requestcurrentgame.html', {'context': context, 'context2': context2, 'summoner_name': summoner_name, 'summoner_info': summoner_info})
# check if this player is currently in game (2)
else:
return render(request, 'general-error.html', context)
# check if the the player name was found in the lol database (1)
else:
return render(request, 'general-error.html', context)
#settings.LOL_PLATFORM_ID
#str(summoner_info['id'])
#settings.LOL_API_KEY
#id do bazetinho 7523004
#id do fafis 454451
#id do leo 514850
|
gpl-3.0
| -8,273,637,906,974,307,000
| 39.046875
| 434
| 0.608696
| false
| 3.332714
| false
| false
| false
|
cfusting/arctic-browning
|
utilities/learning_data.py
|
1
|
4957
|
import os
import ntpath
import re
from functools import partial
import h5py
import design_matrix as dm
class LearningData:
DEFAULT_PREFIX = 'ARG'
CSV = '.csv'
HDF = '.hdf'
def __init__(self):
self.num_variables = None
self.num_observations = None
self.predictors = None
self.response = None
self.variable_names = None
self.unique_variable_prefixes = None
self.variable_type_indices = None
self.variable_dict = None
self.name = None
self.design_matrix = None
self.attributes = {}
self.meta_layers = {}
def from_file(self, file_name, header=False):
file_type = os.path.splitext(file_name)[1]
if file_type == self.HDF:
self.from_hdf(file_name)
elif file_type == self.CSV and header:
self.from_headed_csv(file_name)
elif file_type == self.CSV:
self.from_csv(file_name)
else:
raise ValueError("Bad file: " + file_name + ". File extension must be one of csv, hdf.")
def init_common(self, file_name):
self.name = os.path.splitext(ntpath.basename(file_name))[0]
self.predictors = self.design_matrix.predictors
self.response = self.design_matrix.response
self.num_observations, self.num_variables = self.predictors.shape
self.variable_names = self.design_matrix.variable_names
self.unique_variable_prefixes = get_unique_variable_prefixes(self.variable_names)
variable_groups = get_variable_groups(self.variable_names, self.unique_variable_prefixes)
self.variable_type_indices = get_variable_type_indices(variable_groups)
self.variable_dict = get_variable_dict(self.variable_names, self.DEFAULT_PREFIX)
def from_csv(self, csv_file):
self.design_matrix = dm.DesignMatrix()
self.design_matrix.from_csv(csv_file)
self.init_common(csv_file)
def from_headed_csv(self, csv_file):
self.design_matrix = dm.DesignMatrix()
self.design_matrix.from_headed_csv(csv_file)
self.init_common(csv_file)
def from_hdf(self, hdf_file):
self.design_matrix = dm.DesignMatrix()
self.design_matrix.from_hdf(hdf_file)
self.init_common(hdf_file)
self.get_meta_layers(hdf_file)
self.get_layer_attributes(hdf_file, 'design_matrix')
def from_data(self, matrix, variable_names, name):
self.design_matrix = dm.DesignMatrix()
self.design_matrix.from_data(matrix, variable_names)
self.init_common(name)
def to_hdf(self, file_name):
self.design_matrix.to_hdf(file_name)
self.save_meta_layers(file_name)
self.save_layer_attributes(file_name, 'design_matrix')
def to_headed_csv(self, file_name):
self.design_matrix.to_headed_csv(file_name)
def get_meta_layers(self, file_name):
with h5py.File(file_name, 'r') as f:
layers = filter(lambda x: x != 'design_matrix', f.keys())
for layer in layers:
self.meta_layers[layer] = f[layer][:]
def save_meta_layers(self, file_name):
with h5py.File(file_name, 'r+') as f:
for k, v in self.meta_layers.items():
f.create_dataset(k, data=v)
def get_layer_attributes(self, file_name, layer):
with h5py.File(file_name, 'r') as f:
dset = f[layer]
for k, v in dset.attrs.iteritems():
self.attributes[k] = v
def save_layer_attributes(self, file_name, layer):
with h5py.File(file_name, 'r+') as f:
dset = f[layer]
for k, v in self.attributes.items():
dset.attrs[k] = v
def get_variable_dict(names, default_prefix):
args = [default_prefix + str(x) for x in range(0, len(names))]
return dict(zip(args, names))
def get_variable_type_indices(variable_groups):
indices = []
previous = 0
for i in variable_groups:
current = previous + len(i)
if len(i) != 0:
indices.append(current - 1)
previous = current
return indices
def get_unique_variable_prefixes(variable_names):
"""
Assumes the form prefixnumber.
:param variable_names:
:return:
"""
expr = re.compile('([a-zA-Z]+)')
def get_prefix(name, expression):
result = re.match(expression, name)
if result:
return result.group(1)
return ''
prefixes = map(partial(get_prefix, expression=expr), variable_names)
unique_prefixes = []
seen = []
for prefix in prefixes:
if prefix not in seen:
unique_prefixes.append(prefix)
seen.append(prefix)
return unique_prefixes
def get_variable_groups(variable_names, unique_prefixes):
variable_groups = []
for prefix in unique_prefixes:
variable_groups.append(filter(lambda x: prefix in x, variable_names))
return variable_groups
|
gpl-3.0
| 630,932,586,585,630,200
| 32.268456
| 100
| 0.617511
| false
| 3.573901
| false
| false
| false
|
nonemaw/Flask_nonemaw
|
app/science/views.py
|
1
|
2703
|
from flask import render_template, request, redirect, url_for
from flask_login import current_user, login_required
from bson import ObjectId
from . import science
from .compute import compute
from .forms import populate_form_from_instance, ComputeForm
from .. import db_s
from ..models_science import Compute
# http://hplgit.github.io/web4sciapps/doc/pub/._web4sa_flask015.html
@science.route('/', methods=['GET', 'POST'])
@login_required
def index_science():
result = None
form = ComputeForm(request.form)
if request.method == "POST" and form.validate():
result = compute(form.A.data, form.b.data,form.w.data, form.T.data)
if current_user.is_authenticated:
Compute(form.A.data, form.b.data,form.w.data, form.T.data,
form.resolution.data, result, current_user.id).insert_doc()
elif current_user.is_authenticated:
if db_s.Compute.count() > 0:
# get first item of cursor after sorting
latest = db_s.Compute.find({}).sort([('timestamp', -1)]).next()
result = latest.get('result')
form = populate_form_from_instance(latest)
return render_template("science/index.html", form=form, result=result,
user=current_user)
@science.route('/old')
@login_required
def old():
data = []
if current_user.is_authenticated:
instances = db_s.Compute.find({}).sort([('timestamp', -1)])
for instance_dict in instances:
form = populate_form_from_instance(instance_dict)
result = instance_dict.get('result')
if instance_dict.get('comments'):
comments = "<h3>Comments</h3>" + instance_dict.get('comments')
else:
comments = ''
data.append(
{'form': form, 'result': result,
'id': str(instance_dict.get('_id')),
'comments': comments})
return render_template("science/old.html", data=data)
# @science.route('/add_comment', methods=['GET', 'POST'])
# @login_required
# def add_comment():
# if request.method == 'POST' and current_user.is_authenticated():
# instance = user.Compute.order_by('-id').first()
# instance.comments = request.form.get("comments", None)
# db.session.commit()
# return redirect(url_for('index'))
@science.route('/delete/<id>', methods=['GET', 'POST'])
@login_required
def delete_post(id):
if current_user.is_authenticated:
db_s.Compute.delete_one({'_id': ObjectId(id)})
return redirect(url_for('old'))
@science.route('/graph')
@login_required
def graph():
return render_template('science/graph.html')
|
mit
| -692,888,562,903,513,600
| 34.565789
| 79
| 0.616352
| false
| 3.637954
| false
| false
| false
|
IanLewis/homepage
|
homepage/runner.py
|
1
|
2657
|
# :coding=utf-8:
import os
import argparse
import django
from django.core.management import call_command
from waitress import serve
from homepage import __version__ as VERSION
from homepage.wsgi import application
def start(args):
"""
Starts the homepage application server.
"""
serve(application, host=args.addr, port=args.port)
def migrate(args):
"""
Runs migrations for the homepage server.
"""
call_command(
"migrate", fake=args.fake, interactive=False,
)
def createsuperuser(args):
"""
Creates a superuser.
"""
from django.contrib.auth.models import User
User.objects.create_superuser(
username=args.username, email=args.email, password=args.password,
)
def main():
os.environ["DJANGO_SETTINGS_MODULE"] = "homepage.settings"
django.setup()
parser = argparse.ArgumentParser(description="The Homepage App")
parser.add_argument(
"--version",
action="version",
version=VERSION,
help="Print the version number and exit.",
)
subparsers = parser.add_subparsers(help="Sub-command help")
# start
start_parser = subparsers.add_parser("start", help="Run the app server.")
start_parser.add_argument(
"--addr", default="0.0.0.0", help="Optional IP address to bind to"
)
start_parser.add_argument("--port", default=8000, type=int, help="Port to bind to")
# migrate
start_parser.set_defaults(func=start)
migrate_parser = subparsers.add_parser("migrate", help="Migrate the database.")
migrate_parser.add_argument(
"--fake",
action="store_true",
dest="fake",
default=False,
help="Mark migrations as run without actually " "running them.",
)
migrate_parser.set_defaults(func=migrate)
# createsuperuser
createsuperuser_parser = subparsers.add_parser(
"createsuperuser", help="Create a superuser."
)
createsuperuser_parser.add_argument(
"--username",
default="admin",
help="Specifies the username for the " "superuser. [Default: admin]",
)
createsuperuser_parser.add_argument(
"--email",
default="admin@example.com",
help="Specifies the email address for "
"the superuser. [Default: admin@example.com]",
)
createsuperuser_parser.add_argument(
"--password",
default="admin",
help="Specifies the password for the " "superuser. [Default: admin]",
)
createsuperuser_parser.set_defaults(func=createsuperuser)
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
main()
|
mit
| 8,793,610,769,817,218,000
| 23.601852
| 87
| 0.641325
| false
| 4.04414
| false
| false
| false
|
GoeGaming/lutris
|
lutris/config.py
|
1
|
10735
|
#!/usr/bin/python
# -*- coding:Utf-8 -*-
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Handle the basic configuration of Lutris."""
import os
import sys
import yaml
import logging
from os.path import join
from gi.repository import Gio
from lutris import pga, settings, sysoptions
from lutris.runners import import_runner
from lutris.util.log import logger
def register_handler():
"""Register the lutris: protocol to open with the application."""
logger.debug("registering protocol")
executable = os.path.abspath(sys.argv[0])
base_key = "desktop.gnome.url-handlers.lutris"
schema_directory = "/usr/share/glib-2.0/schemas/"
schema_source = Gio.SettingsSchemaSource.new_from_directory(
schema_directory, None, True
)
schema = schema_source.lookup(base_key, True)
if schema:
settings = Gio.Settings.new(base_key)
settings.set_string('command', executable)
else:
logger.warning("Schema not installed, cannot register url-handler")
def check_config(force_wipe=False):
"""Check if initial configuration is correct."""
directories = [settings.CONFIG_DIR,
join(settings.CONFIG_DIR, "runners"),
join(settings.CONFIG_DIR, "games"),
settings.DATA_DIR,
join(settings.DATA_DIR, "covers"),
settings.ICON_PATH,
join(settings.DATA_DIR, "banners"),
join(settings.DATA_DIR, "runners"),
join(settings.DATA_DIR, "lib"),
settings.RUNTIME_DIR,
settings.CACHE_DIR,
join(settings.CACHE_DIR, "installer"),
join(settings.CACHE_DIR, "tmp")]
for directory in directories:
if not os.path.exists(directory):
logger.debug("creating directory %s" % directory)
os.makedirs(directory)
if force_wipe:
os.remove(settings.PGA_DB)
pga.syncdb()
def read_yaml_from_file(filename):
"""Read filename and return parsed yaml"""
if not filename or not os.path.exists(filename):
return {}
try:
content = file(filename, 'r').read()
yaml_content = yaml.load(content) or {}
except (yaml.scanner.ScannerError, yaml.parser.ParserError):
logger.error("error parsing file %s", filename)
yaml_content = {}
return yaml_content
def write_yaml_to_file(filepath, config):
if not filepath:
raise ValueError('Missing filepath')
yaml_config = yaml.dump(config, default_flow_style=False)
with open(filepath, "w") as filehandler:
filehandler.write(yaml_config)
class LutrisConfig(object):
"""Class where all the configuration handling happens.
Description
===========
Lutris' configuration uses a cascading mecanism where
each higher, more specific level overrides the lower ones
The levels are (highest to lowest): `game`, `runner` and `system`.
Each level has its own set of options (config section), available to and
overriden by upper levels:
```
level | Config sections
-------|----------------------
game | system, runner, game
runner | system, runner
system | system
```
Example: if requesting runner options at game level, their returned value
will be from the game level config if it's set at this level; if not it
will be the value from runner level if available; and if not, the default
value set in the runner's module, or None.
The config levels are stored in separate YAML format text files.
Usage
=====
The config level will be auto set depending on what you pass to __init__:
- For game level, pass game slug and optionally runner_slug (better perfs)
- For runner level, pass runner_slug
- For system level, pass nothing
If need be, you can pass the level manually.
To read, use the config sections dicts: game_config, runner_config and
system_config.
To write, modify the relevant `raw_XXXX_config` section dict, then run
`save()`.
"""
def __init__(self, runner_slug=None, game_slug=None, level=None):
self.game_slug = game_slug
self.runner_slug = runner_slug
if game_slug and not runner_slug:
self.runner_slug = pga.get_game_by_slug(game_slug).get('runner')
# Cascaded config sections (for reading)
self.game_config = {}
self.runner_config = {}
self.system_config = {}
# Raw (non-cascaded) sections (for writing)
self.raw_game_config = {}
self.raw_runner_config = {}
self.raw_system_config = {}
self.raw_config = {}
# Set config level
self.level = level
if not level:
if game_slug:
self.level = 'game'
elif runner_slug:
self.level = 'runner'
else:
self.level = 'system'
# Init and load config files
self.game_level = {'system': {}, self.runner_slug: {}, 'game': {}}
self.runner_level = {'system': {}, self.runner_slug: {}}
self.system_level = {'system': {}}
self.game_level.update(read_yaml_from_file(self.game_config_path))
self.runner_level.update(read_yaml_from_file(self.runner_config_path))
self.system_level.update(read_yaml_from_file(self.system_config_path))
self.update_cascaded_config()
self.update_raw_config()
@property
def system_config_path(self):
return os.path.join(settings.CONFIG_DIR, "system.yml")
@property
def runner_config_path(self):
if not self.runner_slug:
return
return os.path.join(settings.CONFIG_DIR, "runners/%s.yml" %
self.runner_slug)
@property
def game_config_path(self):
if not self.game_slug:
return
return os.path.join(settings.CONFIG_DIR, "games/%s.yml" %
self.game_slug)
def update_cascaded_config(self):
if self.system_level.get('system') is None:
self.system_level['system'] = {}
self.system_config.clear()
self.system_config.update(self.get_defaults('system'))
self.system_config.update(self.system_level.get('system'))
if self.level in ['runner', 'game'] and self.runner_slug:
if self.runner_level.get(self.runner_slug) is None:
self.runner_level[self.runner_slug] = {}
if self.runner_level.get('system') is None:
self.runner_level['system'] = {}
self.runner_config.clear()
self.runner_config.update(self.get_defaults('runner'))
self.runner_config.update(self.runner_level.get(self.runner_slug))
self.system_config.update(self.runner_level.get('system'))
if self.level == 'game' and self.runner_slug:
if self.game_level.get('game') is None:
self.game_level['game'] = {}
if self.game_level.get(self.runner_slug) is None:
self.game_level[self.runner_slug] = {}
if self.game_level.get('system') is None:
self.game_level['system'] = {}
self.game_config.clear()
self.game_config.update(self.get_defaults('game'))
self.game_config.update(self.game_level.get('game'))
self.runner_config.update(self.game_level.get(self.runner_slug))
self.system_config.update(self.game_level.get('system'))
def update_raw_config(self):
# Select the right level of config
if self.level == 'game':
raw_config = self.game_level
elif self.level == 'runner':
raw_config = self.runner_level
else:
raw_config = self.system_level
# Load config sections
self.raw_system_config = raw_config['system']
if self.level in ['runner', 'game']:
self.raw_runner_config = raw_config[self.runner_slug]
if self.level == 'game':
self.raw_game_config = raw_config['game']
self.raw_config = raw_config
def remove(self, game=None):
"""Delete the configuration file from disk."""
if game is None:
game = self.game_slug
logging.debug("removing config for %s", game)
if os.path.exists(self.game_config_path):
os.remove(self.game_config_path)
else:
logger.debug("No config file at %s" % self.game_config_path)
def save(self):
"""Save configuration file according to its type"""
if self.level == "system":
config = self.system_level
config_path = self.system_config_path
elif self.level == "runner":
config = self.runner_level
config_path = self.runner_config_path
elif self.level == "game":
config = self.game_level
config_path = self.game_config_path
else:
raise ValueError("Invalid config level '%s'" % self.level)
write_yaml_to_file(config_path, config)
self.update_cascaded_config()
def get_defaults(self, options_type):
"""Return a dict of options' default value."""
options_dict = self.options_as_dict(options_type)
defaults = {}
for option, params in options_dict.iteritems():
if 'default' in params:
defaults[option] = params['default']
return defaults
def options_as_dict(self, options_type):
"""Convert the option list to a dict with option name as keys"""
options = {}
runner = (import_runner(self.runner_slug)()
if self.runner_slug
else None)
if options_type == 'system':
options = (sysoptions.with_runner_overrides(runner)
if runner
else sysoptions.system_options)
elif options_type == 'runner' and runner:
options = runner.runner_options
elif options_type == 'game' and runner:
options = runner.game_options
return dict((opt['option'], opt) for opt in options)
|
gpl-3.0
| 1,786,690,622,427,660,800
| 36.274306
| 78
| 0.606148
| false
| 3.993676
| true
| false
| false
|
geophysics/mtpy
|
mtpy/uofa/bayesian1d.py
|
1
|
2126
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on 31.07.2013
@author: LK@UofA
mtpy/uofa/bayesian1d.py
Module for handling the UofA Bayesian 1D inversion/modelling code.
"""
import os
import sys
import os.path as op
import mtpy.utils.filehandling as MTfh
import mtpy.core.edi as EDI
import mtpy.utils.exceptions as MTex
import numpy as np
def generate_input_file(edifilename, outputdir=None):
eo = EDI.Edi()
eo.readfile(edifilename)
filebase = op.splitext(op.split(edifilename)[-1])[0]
outfilename1 = '{0}_bayesian1d_z.in'.format(filebase)
outfilename2 = '{0}_bayesian1d_zvar.in'.format(filebase)
outdir = op.split(edifilename)[0]
if outputdir is not None:
try:
if not op.isdir(outputdir):
os.makedirs(outputdir)
outdir = outputdir
except:
pass
outfn1 = op.join(outdir,outfilename1)
outfn2 = op.join(outdir,outfilename2)
outfn1 = MTfh.make_unique_filename(outfn1)
outfn2 = MTfh.make_unique_filename(outfn2)
freqs = eo.freq
z_array = eo.Z.z
zerr_array = eo.Z.zerr
if len(freqs) != len(z_array):
raise MTex.MTpyError_edi_file('ERROR in Edi file {0} - number of '\
'freqs different from length of Z array'.format(eo.filename))
sorting = np.argsort(freqs)
outstring1 = ''
outstring2 = ''
for idx in sorting:
z = z_array[idx]
zerr = zerr_array[idx]
f = freqs[idx]
outstring1 += '{0}\t'.format(f)
outstring2 += '{0}\t'.format(f)
for i in np.arange(2):
for j in np.arange(2):
if np.imag(z[i%2,(j+1)/2]) < 0 :
z_string = '{0}-{1}i'.format(np.real(z[i%2,(j+1)/2]),
np.abs(np.imag(z[i%2,(j+1)/2])))
else:
z_string = '{0}+{1}i'.format(np.real(z[i%2,(j+1)/2]),
np.imag(z[i%2,(j+1)/2]))
zerr_string = '{0}'.format(zerr[i%2,(j+1)/2])
outstring1 += '{0}\t'.format(z_string)
outstring2 += '{0}\t'.format(zerr_string)
outstring1 = outstring1.rstrip() + '\n'
outstring2 = outstring2.rstrip() + '\n'
Fout1 = open(outfn1,'w')
Fout2 = open(outfn2,'w')
Fout1.write(outstring1.expandtabs(4))
Fout2.write(outstring2.expandtabs(4))
Fout1.close()
Fout2.close()
return outfn1,outfn2
|
gpl-3.0
| 2,121,404,373,755,960,000
| 20.049505
| 69
| 0.648636
| false
| 2.421412
| false
| false
| false
|
di/vladiate
|
vladiate/inputs.py
|
1
|
2379
|
import io
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from vladiate.exceptions import MissingExtraException
class VladInput(object):
""" A generic input class """
def __init__(self):
raise NotImplementedError
def open(self):
raise NotImplementedError
def __repr__(self):
raise NotImplementedError
class LocalFile(VladInput):
""" Read from a local file path """
def __init__(self, filename):
self.filename = filename
def open(self):
with open(self.filename, "r") as f:
return f.readlines()
def __repr__(self):
return "{}('{}')".format(self.__class__.__name__, self.filename)
class S3File(VladInput):
""" Read from a file in S3 """
def __init__(self, path=None, bucket=None, key=None):
try:
import boto # noqa
self.boto = boto
except ImportError:
# 2.7 workaround, should just be `raise Exception() from None`
exc = MissingExtraException()
exc.__context__ = None
raise exc
if path and not any((bucket, key)):
self.path = path
parse_result = urlparse(path)
self.bucket = parse_result.netloc
self.key = parse_result.path
elif all((bucket, key)):
self.bucket = bucket
self.key = key
self.path = "s3://{}{}"
else:
raise ValueError(
"Either 'path' argument or 'bucket' and 'key' argument must " "be set."
)
def open(self):
s3 = self.boto.connect_s3()
bucket = s3.get_bucket(self.bucket)
key = bucket.new_key(self.key)
contents = key.get_contents_as_string()
ret = io.BytesIO(bytes(contents))
return ret
def __repr__(self):
return "{}('{}')".format(self.__class__.__name__, self.path)
class String(VladInput):
""" Read a file from a string """
def __init__(self, string_input=None, string_io=None):
self.string_io = string_io if string_io else StringIO(string_input)
def open(self):
return self.string_io
def __repr__(self):
return "{}('{}')".format(self.__class__.__name__, "...")
|
mit
| 376,685,929,278,033,800
| 24.858696
| 87
| 0.565784
| false
| 4.087629
| false
| false
| false
|
dagon666/napi
|
tests/integration_tests/napi/scpmocker.py
|
1
|
1963
|
import os
import subprocess
class ScpMocker(object):
"""
This class interfaces to scpmocker - a programmable command mock.
"""
def __init__(self, scpMockerPath, sandboxPath):
self.scpMockerPath = scpMockerPath
self.sandboxPath = sandboxPath
self.binPath = os.path.join(self.sandboxPath, 'bin')
self.dbPath = os.path.join(self.sandboxPath, 'db')
def __enter__(self):
os.mkdir(self.binPath)
os.mkdir(self.dbPath)
self.envOrig = os.environ.copy()
os.environ["PATH"] = ':'.join((self.binPath, os.environ["PATH"]))
os.environ["SCPMOCKER_BIN_PATH"] = self.binPath
os.environ["SCPMOCKER_DB_PATH"] = self.dbPath
return self
def __exit__(self, *args):
os.environ.clear()
os.environ.update(self.envOrig)
def getPath(self, cmd):
return os.path.join(self.binPath, cmd)
def patchCmd(self, cmd):
cmdPath = self.getPath(cmd)
os.symlink(self.scpMockerPath, cmdPath)
def getCallCount(self, cmd):
inv = [ self.scpMockerPath, '-c', cmd, 'status', '-C' ]
output = subprocess.check_output(inv).strip()
return int(output.strip())
def getCallArgs(self, cmd, n):
inv = [ self.scpMockerPath, '-c', cmd, 'status', '-A', str(n) ]
output = subprocess.check_output(inv).strip()
return output
def program(self, cmd, stdoutStr = "", exitStatus = 0, n = 0):
inv = [ self.scpMockerPath, '-c', cmd, 'program',
'-e', str(exitStatus),
'-s', stdoutStr,
]
if n == 0:
inv.append('-a')
subprocess.call(inv)
else:
for _ in xrange(n):
subprocess.call(inv)
def unPatchCmd(self, cmd):
cmdPath = self.getPath(cmd)
try:
os.unlink(cmdPath)
except OSError as e:
# TODO add logging?
pass
|
gpl-3.0
| -5,285,952,767,629,290,000
| 27.449275
| 73
| 0.55731
| false
| 3.543321
| false
| false
| false
|
esrf-bliss/Lima-camera-andor3
|
tango/Andor3.py
|
1
|
13277
|
############################################################################
# This file is part of LImA, a Library for Image Acquisition
#
# Copyright (C) : 2009-2014
# European Synchrotron Radiation Facility
# BP 220, Grenoble 38043
# FRANCE
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
############################################################################
#=============================================================================
#
# file : Andor3.py
#
# description : Python source for the Andor3 and its commands.
# The class is derived from Device. It represents the
# CORBA servant object which will be accessed from the
# network. All commands which can be executed on the
# Pilatus are implemented in this file.
#
# project : TANGO Device Server
#
# copyleft : European Synchrotron Radiation Facility
# BP 220, Grenoble 38043
# FRANCE
#
#=============================================================================
# (c) - Bliss - ESRF
#=============================================================================
#
import PyTango
import sys, types, os, time
from Lima import Core
from Lima import Andor3 as Andor3Module
# import some useful helpers to create direct mapping between tango attributes
# and Lima interfaces.
from Lima.Server import AttrHelper
class Andor3(PyTango.Device_4Impl):
Core.DEB_CLASS(Core.DebModApplication, 'LimaCCDs')
#==================================================================
# Andor3 Class Description:
#
#
#==================================================================
class Andor3(PyTango.Device_4Impl):
#--------- Add you global variables here --------------------------
Core.DEB_CLASS(Core.DebModApplication, 'LimaCCDs')
#------------------------------------------------------------------
# Device constructor
#------------------------------------------------------------------
@Core.DEB_MEMBER_FUNCT
def __init__(self,cl, name):
PyTango.Device_4Impl.__init__(self,cl,name)
# dictionnaries to be used with AttrHelper.get_attr_4u
self.__AdcGain = {'B11_HI_GAIN': _Andor3Camera.b11_hi_gain,
'B11_LOW_GAIN': _Andor3Camera.b11_low_gain,
'B16_LH_GAIN': _Andor3Camera.b16_lh_gain,
}
self.__AdcRate = {'MHZ10': _Andor3Camera.MHz10,
'MHZ100': _Andor3Camera.MHz100,
'MHZ200': _Andor3Camera.MHz200,
'MHZ280': _Andor3Camera.MHz280,
}
self.__Cooler = {'ON': True,
'OFF': False}
self.__FanSpeed = {'OFF': _Andor3Camera.Off,
'LOW': _Andor3Camera.Low,
'HIGH': _Andor3Camera.On,
}
self.__ElectronicShutterMode = {'ROLLING': _Andor3Camera.Rolling,
'GLOBAL': _Andor3Camera.Global,
}
self.__Overlap = {'ON': True,
'OFF': False}
self.__SpuriousNoiseFilter = {'ON': True,
'OFF': False}
self.__Attribute2FunctionBase = {'adc_gain': 'SimpleGain',
'adc_rate': 'AdcRate',
'temperature': 'Temperature',
'temperature_sp': 'TemperatureSP',
'cooler': 'Cooler',
'cooling_status': 'CoolingStatus',
'fan_speed': 'FanSpeed',
'electronic_shutter_mode': 'ElectronicShutterMode',
'frame_rate': 'FrameRate',
'max_frame_rate_transfer': 'MaxFrameRateTransfer',
'readout_time': 'ReadoutTime',
'overlap': 'Overlap',
'spurious_noise_filter': 'SpuriousNoiseFilter',
}
self.init_device()
#------------------------------------------------------------------
# Device destructor
#------------------------------------------------------------------
def delete_device(self):
pass
#------------------------------------------------------------------
# Device initialization
#------------------------------------------------------------------
@Core.DEB_MEMBER_FUNCT
def init_device(self):
self.set_state(PyTango.DevState.ON)
# Load the properties
self.get_device_properties(self.get_device_class())
# Apply properties if any
if self.adc_gain:
_Andor3Interface.setAdcGain(self.__AdcGain[self.adc_gain])
if self.adc_rate:
_Andor3Interface.setAdcRate(self.__AdcRate[self.adc_rate])
if self.temperature_sp:
_Andor3Camera.setTemperatureSP(self.temperature_sp)
if self.cooler:
_Andor3Camera.setCooler(self.__Cooler[self.cooler])
#==================================================================
#
# Andor3 read/write attribute methods
#
#==================================================================
def __getattr__(self,name) :
try:
return AttrHelper.get_attr_4u(self, name, _Andor3Interface)
except:
return AttrHelper.get_attr_4u(self, name, _Andor3Camera)
#==================================================================
#
# Andor3 command methods
#
#==================================================================
#------------------------------------------------------------------
# getAttrStringValueList command:
#
# Description: return a list of authorized values if any
# argout: DevVarStringArray
#------------------------------------------------------------------
@Core.DEB_MEMBER_FUNCT
def getAttrStringValueList(self, attr_name):
return AttrHelper.get_attr_string_value_list(self, attr_name)
#==================================================================
#
# Andor3 class definition
#
#==================================================================
class Andor3Class(PyTango.DeviceClass):
# Class Properties
class_property_list = {
}
# Device Properties
device_property_list = {
'config_path':
[PyTango.DevString,
'configuration path directory', []],
'camera_number':
[PyTango.DevShort,
'Camera number', []],
'adc_gain':
[PyTango.DevString,
'Adc Gain', []],
'adc_rate':
[PyTango.DevString,
'Adc readout rate', []],
'temperature_sp':
[PyTango.DevShort,
'Temperature set point in Celsius', []],
'cooler':
[PyTango.DevString,
'Start or stop the cooler ("ON"/"OFF")', []],
}
# Command definitions
cmd_list = {
'getAttrStringValueList':
[[PyTango.DevString, "Attribute name"],
[PyTango.DevVarStringArray, "Authorized String value list"]]
}
# Attribute definitions
attr_list = {
'temperature_sp':
[[PyTango.DevDouble,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':'Set/get the temperature set-point',
'unit': 'C',
'format': '%f',
'description': 'in Celsius',
}],
'temperature':
[[PyTango.DevDouble,
PyTango.SCALAR,
PyTango.READ],
{
'label':'get the current temperature sensor',
'unit': 'C',
'format': '%f',
'description': 'in Celsius',
}],
'cooler':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':'Start/stop the cooler',
'unit': 'N/A',
'format': '',
'description': 'OFF or ON',
}],
'cooling_status':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ],
{
'label':'Fast trigger mode, see manual for usage',
'unit': 'N/A',
'format': '',
'description': '0-OFF / 1-ON',
}],
'adc_gain':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':'ADC Gain',
'unit': 'N/A',
'format': '',
'description': 'ADC Gain which can be apply to the preamplifier',
}],
'adc_rate':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label': 'ADC Rate',
'unit': 'N/A',
'format': '',
'description': 'ADC Readout Rate',
}],
'electronic_shutter_mode':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':'Electronic Shutter Mode',
'unit': 'N/A',
'format': '',
'description': 'Electronic shutter mode, Rolling or Global',
}],
'fan_speed':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':'Fan speed',
'unit': 'N/A',
'format': '',
'description': 'Fan speed, off, low or High',
}],
'frame_rate':
[[PyTango.DevDouble,
PyTango.SCALAR,
PyTango.READ],
{
'label':'Frame rate',
'unit': 'Hz',
'format': '%f',
'description': 'the rate at which frames are delivered to the use',
}],
'max_frame_rate_transfer':
[[PyTango.DevDouble,
PyTango.SCALAR,
PyTango.READ],
{
'label':'Maximum frame rate transfer',
'unit': 'byte per sec.',
'format': '%f',
'description': 'Returns the maximum sustainable transfer rate of the interface for the current shutter mode and ROI',
}],
'readout_time':
[[PyTango.DevDouble,
PyTango.SCALAR,
PyTango.READ],
{
'label':'Readout time',
'unit': 'sec',
'format': '%f',
'description': 'return the time to readout data from the sensor',
}],
'overlap':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':' Enable/Disable overlap mode',
'unit': 'N/A',
'format': '',
'description': 'OFF or ON',
}],
'spurious_noise_filter':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':'Enable/Disable spurious noise filter',
'unit': 'N/A',
'format': '',
'description': 'OFF or ON',
}],
}
#------------------------------------------------------------------
# Andor3Class Constructor
#------------------------------------------------------------------
def __init__(self, name):
PyTango.DeviceClass.__init__(self, name)
self.set_type(name)
#----------------------------------------------------------------------------
# Plugins
#----------------------------------------------------------------------------
from Lima import Andor3 as Andor3Acq
_Andor3Camera = None
_Andor3Interface = None
def get_control(config_path='/users/blissadm/local/Andor3/andor/bitflow', camera_number = '0', **keys) :
#properties are passed here as string
global _Andor3Camera
global _Andor3Interface
if _Andor3Camera is None:
print ('\n\nStarting and configuring the Andor3 camera ...')
_Andor3Camera = Andor3Acq.Camera(config_path, int(camera_number))
_Andor3Interface = Andor3Acq.Interface(_Andor3Camera)
print ('\n\nAndor3 Camera #%s (%s:%s) is started'%(camera_number,_Andor3Camera.getDetectorType(),_Andor3Camera.getDetectorModel()))
return Core.CtControl(_Andor3Interface)
def get_tango_specific_class_n_device():
return Andor3Class,Andor3
|
gpl-3.0
| -8,108,087,734,343,641,000
| 34.031662
| 139
| 0.442269
| false
| 4.386191
| false
| false
| false
|
gadeleon/chromatic_circle
|
questions.py
|
1
|
1751
|
'''
Questions generation functions
'''
import random
def degree(note, scale, degree):
'''
What is the <Number> of <Note> <Scale>?
'''
try:
answer = raw_input('What is the {} of {} {}: '.format(str(degree + 1), note, scale.capitalize()))
return answer, degree
except KeyboardInterrupt:
print '\nQUITTER!'
raise SystemExit
def grade_degree(key, note, scale):
deg = random.randint(0, 6)
answer = key[deg]
correct = False
while not correct:
my_answer, my_degree = degree(note, scale, deg)
if my_answer == answer:
print 'You Done got it Right!'
correct = True
else:
continue
def triad(note, scale):
'''
What are the notes in a <NOTE> <Scale> triad?
'''
try:
answer = raw_input('What notes are in a {} {} triad: '.format(note, scale.capitalize()))
return answer
except KeyboardInterrupt:
print '\nQUITTER!'
raise SystemExit
def grade_triad(key, note, scale):
correct = False
answer_triad = [key[0], key[2], key[4]]
my_triad = []
while not correct:
answer = triad(note, scale)
if ',' in answer:
my_triad = answer.split(', ')
print my_triad
if len(my_triad) != 3:
my_triad = answer.split(',')
else:
my_triad = answer.split(' ')
if len(my_triad) != 3:
print 'Answer with commas or spaces between notes'
raise SystemExit
validation = [i for i, x in zip(answer_triad, my_triad) if i == x]
if len(validation) == 3:
print 'You Done got it Right! '
correct = True
else:
continue
|
mit
| -1,124,495,998,507,797,200
| 25.530303
| 105
| 0.537978
| false
| 3.848352
| false
| false
| false
|
jupyter-widgets/ipywidgets
|
ipywidgets/widgets/widget_media.py
|
1
|
7783
|
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import mimetypes
from .widget_core import CoreWidget
from .domwidget import DOMWidget
from .valuewidget import ValueWidget
from .widget import register
from traitlets import Unicode, CUnicode, Bool
from .trait_types import CByteMemoryView
@register
class _Media(DOMWidget, ValueWidget, CoreWidget):
"""Base class for Image, Audio and Video widgets.
The `value` of this widget accepts a byte string. The byte string is the
raw data that you want the browser to display.
If you pass `"url"` to the `"format"` trait, `value` will be interpreted
as a URL as bytes encoded in UTF-8.
"""
# Define the custom state properties to sync with the front-end
value = CByteMemoryView(help="The media data as a memory view of bytes.").tag(sync=True)
@classmethod
def _from_file(cls, tag, filename, **kwargs):
"""
Create an :class:`Media` from a local file.
Parameters
----------
filename: str
The location of a file to read into the value from disk.
**kwargs:
The keyword arguments for `Media`
Returns an `Media` with the value set from the filename.
"""
value = cls._load_file_value(filename)
if 'format' not in kwargs:
format = cls._guess_format(tag, filename)
if format is not None:
kwargs['format'] = format
return cls(value=value, **kwargs)
@classmethod
def from_url(cls, url, **kwargs):
"""
Create an :class:`Media` from a URL.
:code:`Media.from_url(url)` is equivalent to:
.. code-block: python
med = Media(value=url, format='url')
But both unicode and bytes arguments are allowed for ``url``.
Parameters
----------
url: [str, bytes]
The location of a URL to load.
"""
if isinstance(url, str):
# If str, it needs to be encoded to bytes
url = url.encode('utf-8')
return cls(value=url, format='url', **kwargs)
def set_value_from_file(self, filename):
"""
Convenience method for reading a file into `value`.
Parameters
----------
filename: str
The location of a file to read into value from disk.
"""
value = self._load_file_value(filename)
self.value = value
@classmethod
def _load_file_value(cls, filename):
if getattr(filename, 'read', None) is not None:
return filename.read()
else:
with open(filename, 'rb') as f:
return f.read()
@classmethod
def _guess_format(cls, tag, filename):
# file objects may have a .name parameter
name = getattr(filename, 'name', None)
name = name or filename
try:
mtype, _ = mimetypes.guess_type(name)
if not mtype.startswith('{}/'.format(tag)):
return None
return mtype[len('{}/'.format(tag)):]
except Exception:
return None
def _get_repr(self, cls):
# Truncate the value in the repr, since it will
# typically be very, very large.
class_name = self.__class__.__name__
# Return value first like a ValueWidget
signature = []
sig_value = 'value={!r}'.format(self.value[:40].tobytes())
if self.value.nbytes > 40:
sig_value = sig_value[:-1]+"..."+sig_value[-1]
signature.append(sig_value)
for key in super(cls, self)._repr_keys():
if key == 'value':
continue
value = str(getattr(self, key))
signature.append('{}={!r}'.format(key, value))
signature = ', '.join(signature)
return '{}({})'.format(class_name, signature)
@register
class Image(_Media):
"""Displays an image as a widget.
The `value` of this widget accepts a byte string. The byte string is the
raw image data that you want the browser to display. You can explicitly
define the format of the byte string using the `format` trait (which
defaults to "png").
If you pass `"url"` to the `"format"` trait, `value` will be interpreted
as a URL as bytes encoded in UTF-8.
"""
_view_name = Unicode('ImageView').tag(sync=True)
_model_name = Unicode('ImageModel').tag(sync=True)
# Define the custom state properties to sync with the front-end
format = Unicode('png', help="The format of the image.").tag(sync=True)
width = CUnicode(help="Width of the image in pixels. Use layout.width "
"for styling the widget.").tag(sync=True)
height = CUnicode(help="Height of the image in pixels. Use layout.height "
"for styling the widget.").tag(sync=True)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@classmethod
def from_file(cls, filename, **kwargs):
return cls._from_file('image', filename, **kwargs)
def __repr__(self):
return self._get_repr(Image)
@register
class Video(_Media):
"""Displays a video as a widget.
The `value` of this widget accepts a byte string. The byte string is the
raw video data that you want the browser to display. You can explicitly
define the format of the byte string using the `format` trait (which
defaults to "mp4").
If you pass `"url"` to the `"format"` trait, `value` will be interpreted
as a URL as bytes encoded in UTF-8.
"""
_view_name = Unicode('VideoView').tag(sync=True)
_model_name = Unicode('VideoModel').tag(sync=True)
# Define the custom state properties to sync with the front-end
format = Unicode('mp4', help="The format of the video.").tag(sync=True)
width = CUnicode(help="Width of the video in pixels.").tag(sync=True)
height = CUnicode(help="Height of the video in pixels.").tag(sync=True)
autoplay = Bool(True, help="When true, the video starts when it's displayed").tag(sync=True)
loop = Bool(True, help="When true, the video will start from the beginning after finishing").tag(sync=True)
controls = Bool(True, help="Specifies that video controls should be displayed (such as a play/pause button etc)").tag(sync=True)
@classmethod
def from_file(cls, filename, **kwargs):
return cls._from_file('video', filename, **kwargs)
def __repr__(self):
return self._get_repr(Video)
@register
class Audio(_Media):
"""Displays a audio as a widget.
The `value` of this widget accepts a byte string. The byte string is the
raw audio data that you want the browser to display. You can explicitly
define the format of the byte string using the `format` trait (which
defaults to "mp3").
If you pass `"url"` to the `"format"` trait, `value` will be interpreted
as a URL as bytes encoded in UTF-8.
"""
_view_name = Unicode('AudioView').tag(sync=True)
_model_name = Unicode('AudioModel').tag(sync=True)
# Define the custom state properties to sync with the front-end
format = Unicode('mp3', help="The format of the audio.").tag(sync=True)
autoplay = Bool(True, help="When true, the audio starts when it's displayed").tag(sync=True)
loop = Bool(True, help="When true, the audio will start from the beginning after finishing").tag(sync=True)
controls = Bool(True, help="Specifies that audio controls should be displayed (such as a play/pause button etc)").tag(sync=True)
@classmethod
def from_file(cls, filename, **kwargs):
return cls._from_file('audio', filename, **kwargs)
def __repr__(self):
return self._get_repr(Audio)
|
bsd-3-clause
| -1,439,636,317,653,839,400
| 33.745536
| 132
| 0.620326
| false
| 4.009789
| false
| false
| false
|
BorgERP/borg-erp-6of3
|
l10n_hr/l10n_hr_account/account_invoice.py
|
1
|
8374
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012 Slobodni programi d.o.o. (<http://www.slobodni-programi.com>).
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from tools.translate import _
import poziv_na_broj as pnbr
from tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT
import time
class account_invoice(osv.Model):
_inherit = "account.invoice"
def _get_reference_type(self, cursor, user, context=None):
"""Function used by the function field reference_type in order to initalise available Reference Types"""
res = super(account_invoice, self)._get_reference_type(cursor, user, context=context)
res.append(('pnbr', 'Poziv na br.(HR)'))
return res
def _get_default_reference_type(self, cr, uid, context=None):
if context is None:
context = {}
type_inv = context.get('type', 'out_invoice')
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
if user.company_id.country_id and user.company_id.country_id.code in ('HR'):
if type_inv in ('out_invoice'):
return 'pnbr'
return 'none'
def _convert_ref(self, cr, uid, ref):
ref = super(account_invoice, self)._convert_ref(cr, uid, ref)
res = ''
for ch in ref:
res = res + (ch.isdigit() and ch or '')
return res
_columns = {
'reference_type': fields.selection(_get_reference_type,
'Reference Type',
required=True, readonly=True,
states={'draft': [('readonly', False)]}),
'date_delivery': fields.date('Delivery Date', readonly=True,
states={'draft': [('readonly', False)]},
select=True,
help="Keep empty to use the current date"),
'supplier_number': fields.char('Supplier ref', size=32, select=True, readonly=True,
states={'draft': [('readonly', False)]}),
}
_defaults = {
'reference_type': _get_default_reference_type,
}
def copy(self, cr, uid, id, default=None, context=None):
default = default or {}
if 'date_delivery' not in default:
default.update({'date_delivery': False})
return super(account_invoice, self).copy(cr, uid, id, default, context)
def pnbr_get(self, cr, uid, inv_id, context=None):
invoice = self.browse(cr, uid, inv_id, context=context)
res = invoice.reference or ''
def getP1_P4data(what):
res = ""
if what == 'partner_code':
res = invoice.partner_id.code or invoice.partner_id.id
if what == 'partner_id':
res = str(invoice.partner_id.id)
if what == 'invoice_no':
res = invoice.number
if what == 'invoice_ym':
res = invoice.date_invoice[2:4] + invoice.date_invoice[5:7]
if what == 'delivery_ym':
res = invoice.date_delivery[2:4] + invoice.date_delivery[5:7]
return self._convert_ref(cr, uid, res)
if invoice.journal_id.model_pnbr and invoice.journal_id.model_pnbr > 'HR':
model = invoice.journal_id.model_pnbr
P1 = getP1_P4data(invoice.journal_id.P1_pnbr or '')
P2 = getP1_P4data(invoice.journal_id.P2_pnbr or '')
P3 = getP1_P4data(invoice.journal_id.P3_pnbr or '')
P4 = getP1_P4data(invoice.journal_id.P4_pnbr or '')
res = pnbr.reference_number_get(model, P1, P2, P3, P4)
return model + ' ' + res
# KGB Copy
def action_number(self, cr, uid, ids, context=None):
if context is None:
context = {}
#TODO: not correct fix but required a fresh values before reading it.
self.write(cr, uid, ids, {})
for obj_inv in self.browse(cr, uid, ids, context=context):
id = obj_inv.id
invtype = obj_inv.type
number = obj_inv.number
move_id = obj_inv.move_id and obj_inv.move_id.id or False
reference = obj_inv.reference or ''
self.write(cr, uid, id, {'internal_number': number}) # kgb ids?
if invtype in ('in_invoice', 'in_refund'):
if not reference:
ref = self._convert_ref(cr, uid, number)
else:
ref = self._convert_ref(cr, uid, number)
#ref = reference
else:
ref = self._convert_ref(cr, uid, number)
#KGB - start
if not obj_inv.date_invoice:
self.write(cr, uid, [id],
{'date_invoice': time.strftime(DEFAULT_SERVER_DATE_FORMAT)},
context=context)
# TODO: need to? self.action_date_assign( cr, uid, [id])
if not obj_inv.date_delivery: # mandatory in Croatia for services
self.write(cr, uid, [id], {'date_delivery': obj_inv.date_invoice}, context=context)
ref = self.pnbr_get(cr, uid, id, context)
self.write(cr, uid, id, {'reference': ref})
#KGB - end
cr.execute('UPDATE account_move SET ref=%s ' \
'WHERE id=%s -- AND (ref is null OR ref = \'\')',
(ref, move_id))
cr.execute('UPDATE account_move_line SET ref=%s ' \
'WHERE move_id=%s -- AND (ref is null OR ref = \'\')',
(ref, move_id))
cr.execute('UPDATE account_analytic_line SET ref=%s ' \
'FROM account_move_line ' \
'WHERE account_move_line.move_id = %s ' \
'AND account_analytic_line.move_id = account_move_line.id',
(ref, move_id))
for inv_id, name in self.name_get(cr, uid, [id]):
ctx = context.copy()
if obj_inv.type in ('out_invoice', 'out_refund'):
ctx = self.get_log_context(cr, uid, context=ctx)
message = _("Invoice '%s' is validated.") % name
self.log(cr, uid, inv_id, message, context=ctx)
return True
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
types = {
'out_invoice': 'IR: ', # KGB CI
'in_invoice': 'UR: ', # KGB SI
'out_refund': 'IO: ', # KGB OR
'in_refund': 'UO: ', # KGB SR
}
return [(r['id'], (r['number']) or types[r['type']] + (r['name'] or ''))
for r in self.read(cr, uid, ids, ['type', 'number', 'name'], context, load='_classic_write')]
def button_change_fiscal_position(self, cr, uid, ids, context=None):
if context is None:
context = {}
fpos_obj = self.pool.get('account.fiscal.position')
inv_line_obj = self.pool.get('account.invoice.line')
for inv in self.browse(cr, uid, ids):
for line in inv.invoice_line:
new_taxes = fpos_obj.map_tax(cr, uid, inv.fiscal_position, line.product_id.taxes_id)
inv_line_obj.write(cr, uid, [line.id], {'invoice_line_tax_id': [(6, 0, new_taxes)]})
return True
|
agpl-3.0
| -3,190,680,389,484,607,500
| 44.759563
| 112
| 0.524242
| false
| 3.884045
| false
| false
| false
|
jmpews/torweb
|
tests/test_blog_load_from_md.py
|
1
|
2367
|
# coding:utf-8
import sys, os
import os.path
sys.path.append(os.path.dirname(sys.path[0]))
from settings.config import config
from peewee import Model, MySQLDatabase
mysqldb = MySQLDatabase('',
user=config.BACKEND_MYSQL['user'],
password=config.BACKEND_MYSQL['password'],
host=config.BACKEND_MYSQL['host'],
port=config.BACKEND_MYSQL['port'])
from db.mysql_model.blog import BlogPostCategory, BlogPostLabel, BlogPost
md_path = './docs/articles'
def check_md_format(file_path):
fd = open(file_path)
md_info = {}
while True:
line = fd.readline().strip()
if len(line) == 0:
break
try:
i = line.index(':')
k = line[:i]
v = line[i+1:]
except:
fd.close()
return None
md_info[k.strip().lower()] = v.strip()
# 校验字段是否存在
# Necessary Args: title, tags
# Optional Args: date, category, auth, slug
keys = md_info.keys()
if 'title' in keys and 'tags' in keys and 'slug' in keys:
md_info['content'] = fd.read(-1)
fd.close()
return md_info
else:
fd.close()
return None
def convert_md_2_post(md_info):
category = md_info.get('category')
if not category:
category = 'UnClassified'
cate = BlogPostCategory.get_by_name(category)
post = BlogPost.create(title=md_info['title'],
category=cate,
slug=md_info['slug'],
content=md_info['content'])
BlogPostLabel.add_post_label(md_info['tags'], post)
def get_files(root_path):
files = os.listdir(root_path)
print(files)
for file_name in files:
_, suffix = os.path.splitext(file_name)
if suffix == '.md':
md_file_path = os.path.join(root_path, file_name)
md_info = check_md_format(md_file_path)
if md_info:
print(md_info['title'])
convert_md_2_post(md_info)
if __name__ == '__main__':
mysqldb.create_tables([BlogPostLabel, BlogPost, BlogPostCategory], safe=True)
t = BlogPostLabel.delete()
t.execute()
t = BlogPost.delete()
t.execute()
t = BlogPostCategory.delete()
t.execute()
get_files(md_path)
|
mit
| 728,075,483,062,490,800
| 27.670732
| 81
| 0.553382
| false
| 3.53003
| true
| false
| false
|
ageron/tensorflow
|
tensorflow/python/ops/custom_gradient.py
|
1
|
12977
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Decorator to overrides the gradient for a function."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import tape as tape_lib
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_inspect
from tensorflow.python.util.tf_export import tf_export
def copy_handle_data(source_t, target_t):
"""Copies HandleData for variant and resource type tensors if available.
The CppShapeInferenceResult::HandleData proto contains information about the
shapes and types of the element tensors of resource/variant type tensors.
We need to copy this across function boundaries, i.e., when capturing a
placeholder or when returning a function tensor as output. If we don't do this
the element tensors will have unknown shapes, e.g., if a TensorList variant
tensor is captured as a placeholder, elements popped from that list would have
unknown shape.
Args:
source_t: The tensor to copy HandleData from.
target_t: The tensor to copy HandleData to.
"""
if (target_t.dtype == dtypes.resource or
target_t.dtype == dtypes.variant):
if isinstance(source_t, ops.EagerTensor):
handle_data = source_t._handle_data # pylint: disable=protected-access
else:
handle_data = resource_variable_ops.get_resource_handle_data(source_t)
if (handle_data is not None
and handle_data.is_set
and handle_data.shape_and_type):
# pylint: disable=protected-access
pywrap_tensorflow.SetHandleShapeAndType(target_t.graph._c_graph,
target_t._as_tf_output(),
handle_data.SerializeToString())
# pylint: enable=protected-access
# Ensure that shapes and dtypes are propagated.
shapes, types = zip(*[(pair.shape, pair.dtype)
for pair in handle_data.shape_and_type])
ranks = [len(s.dim) if not s.unknown_rank else -1 for s in shapes]
shapes = [[d.size for d in s.dim]
if not s.unknown_rank else None for s in shapes]
pywrap_tensorflow.TF_GraphSetOutputHandleShapesAndTypes_wrapper(
target_t._op._graph._c_graph, # pylint: disable=protected-access
target_t._as_tf_output(), # pylint: disable=protected-access
shapes, ranks, types)
@tf_export("custom_gradient")
def custom_gradient(f):
"""Decorator to define a function with a custom gradient.
This decorator allows fine grained control over the gradients of a sequence
for operations. This may be useful for multiple reasons, including providing
a more efficient or numerically stable gradient for a sequence of operations.
For example, consider the following function that commonly occurs in the
computation of cross entropy and log likelihoods:
```python
def log1pexp(x):
return tf.log(1 + tf.exp(x))
```
Due to numerical instability, the gradient this function evaluated at x=100 is
NaN. For example:
```python
x = tf.constant(100.)
y = log1pexp(x)
dy = tf.gradients(y, x) # Will be NaN when evaluated.
```
The gradient expression can be analytically simplified to provide numerical
stability:
```python
@tf.custom_gradient
def log1pexp(x):
e = tf.exp(x)
def grad(dy):
return dy * (1 - 1 / (1 + e))
return tf.log(1 + e), grad
```
With this definition, the gradient at x=100 will be correctly evaluated as
1.0.
See also `tf.RegisterGradient` which registers a gradient function for a
primitive TensorFlow operation. `tf.custom_gradient` on the other hand allows
for fine grained control over the gradient computation of a sequence of
operations.
Note that if the decorated function uses `Variable`s, the enclosing variable
scope must be using `ResourceVariable`s.
Args:
f: function `f(*x)` that returns a tuple `(y, grad_fn)` where:
- `x` is a sequence of `Tensor` inputs to the function.
- `y` is a `Tensor` or sequence of `Tensor` outputs of applying
TensorFlow operations in `f` to `x`.
- `grad_fn` is a function with the signature `g(*grad_ys)` which returns
a list of `Tensor`s - the derivatives of `Tensor`s in `y` with respect
to the `Tensor`s in `x`. `grad_ys` is a `Tensor` or sequence of
`Tensor`s the same size as `y` holding the initial value gradients for
each `Tensor` in `y`. In a pure mathematical sense, a vector-argument
vector-valued function `f`'s derivatives should be its Jacobian matrix
`J`. Here we are expressing the Jacobian `J` as a function `grad_fn`
which defines how `J` will transform a vector `grad_ys` when
left-multiplied with it (`grad_ys * J`). This functional representation
of a matrix is convenient to use for chain-rule calculation
(in e.g. the back-propagation algorithm).
If `f` uses `Variable`s (that are not part of the
inputs), i.e. through `get_variable`, then `grad_fn` should have
signature `g(*grad_ys, variables=None)`, where `variables` is a list of
the `Variable`s, and return a 2-tuple `(grad_xs, grad_vars)`, where
`grad_xs` is the same as above, and `grad_vars` is a `list<Tensor>`
with the derivatives of `Tensor`s in `y` with respect to the variables
(that is, grad_vars has one Tensor per variable in variables).
Returns:
A function `h(x)` which returns the same value as `f(x)[0]` and whose
gradient (as calculated by `tf.gradients`) is determined by `f(x)[1]`.
"""
def decorated(*args, **kwargs):
"""Decorated function with custom gradient."""
if context.executing_eagerly():
return _eager_mode_decorator(f, *args, **kwargs)
else:
return _graph_mode_decorator(f, *args, **kwargs)
return tf_decorator.make_decorator(f, decorated)
def _graph_mode_decorator(f, *args, **kwargs):
"""Implement custom gradient decorator for graph mode."""
# TODO(rsepassi): Add support for kwargs
if kwargs:
raise ValueError(
"The custom_gradient decorator currently supports keywords "
"arguments only when eager execution is enabled.")
name = "CustomGradient-%s" % ops.uid()
args = [ops.convert_to_tensor(x) for x in args]
# Checking global and local variables attempts to ensure that no non-resource
# Variables are added to the graph.
current_var_scope = variable_scope.get_variable_scope()
before_vars = set(current_var_scope.global_variables() +
current_var_scope.local_variables())
with backprop.GradientTape() as tape:
result, grad_fn = f(*args)
after_vars = set(current_var_scope.global_variables() +
current_var_scope.local_variables())
new_vars = after_vars - before_vars
for v in new_vars:
if not resource_variable_ops.is_resource_variable(v):
raise TypeError(
"All variables used by a function wrapped with @custom_gradient must "
"be `ResourceVariable`s. Ensure that no `variable_scope` is created "
"with `use_resource=False`.")
# The variables that grad_fn needs to return gradients for are the set of
# variables used that are *not* part of the inputs.
variables = list(set(tape.watched_variables()) - set(args))
grad_argspec = tf_inspect.getfullargspec(grad_fn)
variables_in_signature = ("variables" in grad_argspec.args or
grad_argspec.varkw)
if variables and not variables_in_signature:
raise TypeError("If using @custom_gradient with a function that "
"uses variables, then grad_fn must accept a keyword "
"argument 'variables'.")
if variables_in_signature and not variables:
# User seems to intend to use variables but none were captured.
if not variable_scope.get_variable_scope().use_resource:
raise TypeError("If using @custom_gradient with a function that "
"uses variables, the enclosing variable scope must "
"have use_resource=True.")
else:
logging.warn("@custom_gradient grad_fn has 'variables' in signature, but "
"no ResourceVariables were used on the forward pass.")
flat_result = nest.flatten(result)
all_tensors = flat_result + args + variables
def tape_grad_fn(*result_grads):
"""Custom grad fn wrapper."""
result_grads = result_grads[:len(flat_result)]
if variables:
input_grads, variable_grads = grad_fn(*result_grads, variables=variables)
if len(variable_grads) != len(variables):
raise ValueError("Must return gradient for each variable from "
"@custom_gradient grad_fn.")
else:
input_grads = grad_fn(*result_grads)
variable_grads = []
# Need to return one value per input to the IdentityN, so pad the
# gradients of the inputs of the custom_gradient function with the
# gradients of the outputs as well.
input_grads = nest.flatten(input_grads)
return ([None] * len(flat_result)) + input_grads + variable_grads
@ops.RegisterGradient(name)
def internal_grad_fn(unused_op, *result_grads): # pylint: disable=unused-variable
"""Custom grad fn wrapper."""
return tape_grad_fn(*result_grads)
original_tensors = all_tensors
with ops.get_default_graph().gradient_override_map({"IdentityN": name}):
all_tensors = array_ops.identity_n(all_tensors)
# Propagate handle data for happier shape inference for resource variables.
for i, t in enumerate(original_tensors):
if t.dtype == dtypes.resource and hasattr(t, "_handle_data"):
all_tensors[i]._handle_data = t._handle_data # pylint: disable=protected-access
tape_lib.record_operation(
f.__name__, all_tensors, original_tensors, tape_grad_fn)
for ot, t in zip(original_tensors, all_tensors):
copy_handle_data(ot, t)
return nest.pack_sequence_as(
structure=result, flat_sequence=all_tensors[:len(flat_result)])
def _eager_mode_decorator(f, *args, **kwargs):
"""Implement custom gradient decorator for eager mode."""
with backprop.GradientTape() as tape:
result, grad_fn = f(*args, **kwargs)
all_inputs = list(args) + list(kwargs.values())
# The variables that grad_fn needs to return gradients for are the set of
# variables used that are *not* part of the inputs.
variables = [v for v in set(tape.watched_variables()) if v not in all_inputs]
grad_argspec = tf_inspect.getfullargspec(grad_fn)
if (variables and ("variables" not in grad_argspec.args) and
not grad_argspec.varkw):
raise TypeError("If using @custom_gradient with a function that "
"uses variables, then grad_fn must accept a keyword "
"argument 'variables'.")
flat_result = nest.flatten(result)
# TODO(apassos) consider removing the identity below.
flat_result = [gen_array_ops.identity(x) for x in flat_result]
def actual_grad_fn(*result_grads):
"""Custom grad fn wrapper."""
if variables:
input_grads, variable_grads = grad_fn(*result_grads, variables=variables)
if len(variable_grads) != len(variables):
raise ValueError("Must return gradient for each variable from "
"@custom_gradient grad_fn.")
else:
input_grads = grad_fn(*result_grads)
variable_grads = []
return nest.flatten(input_grads) + variable_grads
input_tensors = [ops.convert_to_tensor(x) for x
in list(args) + list(variables)]
tape_lib.record_operation(f.__name__, flat_result, input_tensors,
actual_grad_fn)
flat_result = list(flat_result)
return nest.pack_sequence_as(result, flat_result)
|
apache-2.0
| -7,386,745,741,267,146,000
| 44.059028
| 86
| 0.681282
| false
| 3.988015
| false
| false
| false
|
alexsalo/genenetwork2
|
wqflask/base/webqtlConfig.py
|
1
|
3470
|
#########################################'
# Environment Variables - public
#########################################
#Debug Level
#1 for debug, mod python will reload import each time
DEBUG = 1
#USER privilege
USERDICT = {'guest':1,'user':2, 'admin':3, 'root':4}
#minimum number of informative strains
KMININFORMATIVE = 5
#maximum number of traits for interval mapping
MULTIPLEMAPPINGLIMIT = 11
#maximum number of traits for correlation
MAXCORR = 100
#Daily download limit from one IP
DAILYMAXIMUM = 1000
#maximum LRS value
MAXLRS = 460.0
#temporary data life span
MAXLIFE = 86400
#MINIMUM Database public value
PUBLICTHRESH = 0
#NBCI address
NCBI_LOCUSID = "http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?db=gene&cmd=Retrieve&dopt=Graphics&list_uids=%s"
UCSC_REFSEQ = "http://genome.cse.ucsc.edu/cgi-bin/hgGene?db=%s&hgg_gene=%s&hgg_chrom=chr%s&hgg_start=%s&hgg_end=%s"
GENBANK_ID = "http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?db=Nucleotide&cmd=search&doptcmdl=DocSum&term=%s"
OMIM_ID = "http://www.ncbi.nlm.nih.gov/omim/%s"
UNIGEN_ID = "http://www.ncbi.nlm.nih.gov/UniGene/clust.cgi?ORG=%s&CID=%s";
HOMOLOGENE_ID = "http://www.ncbi.nlm.nih.gov/sites/entrez?Db=homologene&Cmd=DetailsSearch&Term=%s"
PUBMEDLINK_URL = "http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&db=PubMed&list_uids=%s&dopt=Abstract"
UCSC_POS = "http://genome.ucsc.edu/cgi-bin/hgTracks?clade=mammal&org=%s&db=%s&position=chr%s:%s-%s&pix=800&Submit=submit"
UCSC_BLAT = 'http://genome.ucsc.edu/cgi-bin/hgBlat?org=%s&db=%s&type=0&sort=0&output=0&userSeq=%s'
UTHSC_BLAT = 'http://ucscbrowser.genenetwork.org/cgi-bin/hgBlat?org=%s&db=%s&type=0&sort=0&output=0&userSeq=%s'
UCSC_GENOME = "http://genome.ucsc.edu/cgi-bin/hgTracks?db=%s&position=chr%s:%d-%d&hgt.customText=http://web2qtl.utmem.edu:88/snp/chr%s"
ENSEMBLE_BLAT = 'http://www.ensembl.org/Mus_musculus/featureview?type=AffyProbe&id=%s'
DBSNP = 'http://www.ncbi.nlm.nih.gov/SNP/snp_ref.cgi?type=rs&rs=%s'
UCSC_RUDI_TRACK_URL = " http://genome.cse.ucsc.edu/cgi-bin/hgTracks?org=%s&db=%s&hgt.customText=http://gbic.biol.rug.nl/~ralberts/tracks/%s/%s"
GENOMEBROWSER_URL="http://ucscbrowser.genenetwork.org/cgi-bin/hgTracks?clade=mammal&org=Mouse&db=mm9&position=%s&hgt.suggest=&pix=800&Submit=submit"
ENSEMBLETRANSCRIPT_URL="http://useast.ensembl.org/Mus_musculus/Lucene/Details?species=Mus_musculus;idx=Transcript;end=1;q=%s"
GNROOT = "/home/zas1024/gene/" # Will remove this and dependent items later
SECUREDIR = GNROOT + 'secure/'
COMMON_LIB = GNROOT + 'support/admin'
HTMLPATH = GNROOT + 'genotype_files/'
PYLMM_PATH = '/home/zas1024/plink_gemma/'
SNP_PATH = '/home/zas1024/snps/'
IMGDIR = GNROOT + '/wqflask/wqflask/images/'
IMAGESPATH = HTMLPATH + 'images/'
UPLOADPATH = IMAGESPATH + 'upload/'
TMPDIR = '/home/zas1024/tmp/' # Will remove this and dependent items later
GENODIR = HTMLPATH + 'genotypes/'
NEWGENODIR = HTMLPATH + 'new_genotypes/'
GENO_ARCHIVE_DIR = GENODIR + 'archive/'
TEXTDIR = HTMLPATH + 'ProbeSetFreeze_DataMatrix/'
CMDLINEDIR = HTMLPATH + 'webqtl/cmdLine/'
ChangableHtmlPath = GNROOT + 'web/'
SITENAME = 'GN'
PORTADDR = "http://50.16.251.170"
BASEHREF = '<base href="http://50.16.251.170/">'
INFOPAGEHREF = '/dbdoc/%s.html'
GLOSSARYFILE = "/glossary.html"
CGIDIR = '/webqtl/' #XZ: The variable name 'CGIDIR' should be changed to 'PYTHONDIR'
SCRIPTFILE = 'main.py'
REFRESHSTR = '<meta http-equiv="refresh" content="5;url=%s' + SCRIPTFILE +'?sid=%s">'
REFRESHDIR = '%s' + SCRIPTFILE +'?sid=%s'
|
agpl-3.0
| -4,796,549,076,374,986,000
| 44.064935
| 148
| 0.711816
| false
| 2.441942
| false
| false
| false
|
MisanthropicBit/pygments-sisal
|
setup.py
|
1
|
1494
|
"""pygments-sisal module setup script for distribution."""
from __future__ import with_statement
import os
import setuptools
def get_version(filename):
with open(filename) as fh:
for line in fh:
if line.startswith('__version__'):
return line.split('=')[-1].strip()[1:-1]
setuptools.setup(
name='pygments-sisal',
version=get_version(os.path.join('pygments_sisal', '__init__.py')),
author='Alexander Asp Bock',
author_email='alexander.asp.bock@gmail.com',
platforms='All',
description=('A pygments lexer for SISAL'),
install_requires=['Pygments>=2.0'],
license='MIT',
keywords='pygments, lexer, sisal',
url='https://github.com/MisanthropicBit/pygments-sisal',
packages=setuptools.find_packages(),
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
# Pygments entry point
entry_points="[pygments.lexers]\n"
"sisal=pygments_sisal:SisalLexer"
)
|
mit
| 8,996,431,354,112,685,000
| 32.2
| 71
| 0.613119
| false
| 3.973404
| false
| false
| false
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-compute/azure/mgmt/compute/v2015_06_15/models/virtual_machine_scale_set_instance_view_py3.py
|
1
|
1931
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class VirtualMachineScaleSetInstanceView(Model):
"""The instance view of a virtual machine scale set.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar virtual_machine: The instance view status summary for the virtual
machine scale set.
:vartype virtual_machine:
~azure.mgmt.compute.v2015_06_15.models.VirtualMachineScaleSetInstanceViewStatusesSummary
:ivar extensions: The extensions information.
:vartype extensions:
list[~azure.mgmt.compute.v2015_06_15.models.VirtualMachineScaleSetVMExtensionsSummary]
:param statuses: The resource status information.
:type statuses:
list[~azure.mgmt.compute.v2015_06_15.models.InstanceViewStatus]
"""
_validation = {
'virtual_machine': {'readonly': True},
'extensions': {'readonly': True},
}
_attribute_map = {
'virtual_machine': {'key': 'virtualMachine', 'type': 'VirtualMachineScaleSetInstanceViewStatusesSummary'},
'extensions': {'key': 'extensions', 'type': '[VirtualMachineScaleSetVMExtensionsSummary]'},
'statuses': {'key': 'statuses', 'type': '[InstanceViewStatus]'},
}
def __init__(self, *, statuses=None, **kwargs) -> None:
super(VirtualMachineScaleSetInstanceView, self).__init__(**kwargs)
self.virtual_machine = None
self.extensions = None
self.statuses = statuses
|
mit
| -237,297,689,070,112,480
| 39.229167
| 114
| 0.65044
| false
| 4.608592
| false
| false
| false
|
larryhq/railguns
|
railguns/django/generics.py
|
1
|
1295
|
from django.conf import settings
from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
from django.views.generic.base import TemplateView
# headers = get_headers(request, ['HTTP_APP_SCHEME', 'HTTP_USER_AGENT', 'HTTP_HOST'])
# print(headers)
def get_headers(request, keys=[]):
return dict((key, value) for (key, value) in request.META.items() if key in keys)
class BaseView(TemplateView):
name = None
class WebView(BaseView):
def get(self, request, *args, **kwargs):
title = kwargs.get('title', '{} - {}'.format(_(self.name), _('app_name')))
endpoint = kwargs.get('endpoint', '/{}{}'.format(settings.API_VERSION, request.get_full_path()))
template_name = self.template_name if self.template_name else '{}.html'.format(self.name)
return render(request, template_name, locals())
class MobileView(BaseView):
def get(self, request, *args, **kwargs):
title = kwargs.get('title', _(self.name))
endpoint = kwargs.get('endpoint', '/{}{}'.format(settings.API_VERSION, request.get_full_path().replace(kwargs.get('path', '/m/'), '/')))
template_name = self.template_name if self.template_name else 'mobile/{}.html'.format(self.name)
return render(request, template_name, locals())
|
mit
| 7,655,970,432,009,576,000
| 42.166667
| 144
| 0.671042
| false
| 3.63764
| false
| false
| false
|
GuLinux/PySpectrum
|
reference_catalogues.py
|
1
|
1870
|
from PyQt5.QtCore import QStandardPaths
import os
import json
import urllib.request
import gzip
import collections
class ReferenceCatalogues:
def __init__(self, database):
self.database = database
c = database.cursor()
cats = c.execute('SELECT id, "table", "name", spectra_url, gzipped, file_column, sptype_column FROM spectra_catalogues ORDER BY id ASC')
self.catalogues = collections.OrderedDict([(c[2], {'id':c[0],'table':c[1],'name':c[2],'url':c[3],'gzipped':c[4]==1, 'columns': {'sptype': c[6], 'file':c[5]} }) for c in cats])
def spectra(self, catalog):
cat_info = self.catalogues[catalog]
query = "SELECT {0}, {1} FROM {2} WHERE {1} <> '' ORDER BY {1} ASC".format(cat_info['columns']['file'], cat_info['columns']['sptype'], cat_info['table'])
c = self.database.cursor()
return [{'catalog': catalog, 'sptype': r[1], 'file': r[0]} for r in c.execute(query)]
def fits(self, entry):
catname = entry['catalog']
catalog = self.catalogues[catname]
return ReferenceCatalogues.get_fits(catname, entry['file'], catalog['url'], catalog['gzipped'])
def get_fits(catname, filename, url, is_gzipped):
cache_path = os.path.join(QStandardPaths.writableLocation(QStandardPaths.CacheLocation), catname)
file_path = os.path.join(cache_path, '{}.gz'.format(filename))
try:
os.makedirs(cache_path)
except FileExistsError:
pass
if not os.path.exists(file_path):
if is_gzipped:
urllib.request.urlretrieve(url.format("{}.gz".format(filename)), file_path )
else:
request = urllib.request.urlopen(url.format(filename))
with gzip.open(file_path, 'wb') as f:
f.write(request.read())
return file_path
|
gpl-3.0
| -121,556,637,745,708,210
| 45.775
| 183
| 0.605882
| false
| 3.610039
| false
| false
| false
|
TemoaProject/temoa
|
data_processing/GraphVizFormats.py
|
1
|
5096
|
# SVG Formats
results_dot_fmt = """\
strict digraph model {
label = "Results for %(period)s"
rankdir = "LR" ;
smoothtype = "power_dist" ;
splines = "%(splinevar)s" ;
node [ style="filled" ] ;
edge [ arrowhead="vee" ] ;
subgraph unused_techs {
node [
color = "%(unused_color)s",
fontcolor = "%(unusedfont_color)s",
shape = "box",
fontcolor = "%(font_color)s"
] ;
%(dtechs)s
}
subgraph unused_energy_carriers {
node [
color = "%(unused_color)s",
fontcolor = "%(unusedfont_color)s",
shape = "circle",
fillcolor = "%(fill_color)s"
] ;
%(dcarriers)s
}
subgraph unused_emissions {
node [
color = "%(unused_color)s",
fontcolor = "%(unusedfont_color)s",
shape = "circle",
fillcolor = "%(fill_color)s"
]
%(demissions)s
}
subgraph in_use_techs {
node [
color = "%(tech_color)s",
fontcolor = "%(usedfont_color)s",
shape = "box"
fontcolor = "%(font_color)s"
] ;
%(etechs)s
}
subgraph in_use_energy_carriers {
node [
color = "%(commodity_color)s",
fontcolor = "%(usedfont_color)s",
shape = "circle",
fillcolor = "%(fill_color)s"
] ;
%(ecarriers)s
}
subgraph in_use_emissions {
node [
color = "%(commodity_color)s",
fontcolor = "%(usedfont_color)s",
shape = "circle",
fillcolor = "%(fill_color)s"
] ;
%(eemissions)s
}
subgraph unused_flows {
edge [ color="%(unused_color)s" ]
%(dflows)s
}
subgraph in_use_flows {
subgraph inputs {
edge [ color="%(arrowheadin_color)s" ] ;
%(eflowsi)s
}
subgraph outputs {
edge [ color="%(arrowheadout_color)s" ] ;
%(eflowso)s
}
}
{rank = same; %(xnodes)s}
}
"""
tech_results_dot_fmt = """\
strict digraph model {
label = "Results for %(inp_technology)s in %(period)s" ;
compound = "True" ;
concentrate = "True";
rankdir = "LR" ;
splines = "%(splinevar)s" ;
node [ style="filled" ] ;
edge [ arrowhead="vee" ] ;
subgraph cluster_vintages {
label = "Vintages\\nCapacity: %(total_cap).2f" ;
href = "%(cluster_vintage_url)s" ;
style = "filled"
color = "%(sb_vpbackg_color)s"
node [ color="%(sb_vp_color)s", shape="box", fontcolor="%(usedfont_color)s" ] ;
%(vnodes)s
}
subgraph energy_carriers {
node [
color = "%(commodity_color)s",
fontcolor = "%(usedfont_color)s",
shape = "circle",
fillcolor = "%(fill_color)s"
] ;
%(enodes)s
}
subgraph inputs {
edge [ color="%(arrowheadin_color)s" ] ;
%(iedges)s
}
subgraph outputs {
edge [ color="%(arrowheadout_color)s" ] ;
%(oedges)s
}
}
"""
slice_dot_fmt = """\
strict digraph model {
label = "Activity split of process %(inp_technology)s, %(vintage)s in year %(period)s" ;
compound = "True" ;
concentrate = "True";
rankdir = "LR" ;
splines = "%(splinevar)s" ;
node [ style="filled" ] ;
edge [ arrowhead="vee" ] ;
subgraph cluster_slices {
label = "%(vintage)s Capacity: %(total_cap).2f" ;
color = "%(vintage_cluster_color)s" ;
rank = "same" ;
style = "filled" ;
node [ color="%(vintage_color)s", shape="box", fontcolor="%(usedfont_color)s" ] ;
%(snodes)s
}
subgraph energy_carriers {
node [
color = "%(commodity_color)s",
fontcolor = "%(usedfont_color)s",
shape = "circle",
fillcolor = "%(fill_color)s"
] ;
%(enodes)s
}
subgraph inputs {
edge [ color="%(input_color)s" ] ;
%(iedges)s
}
subgraph outputs {
edge [ color="%(output_color)s" ] ;
%(oedges)s
}
}
"""
commodity_dot_fmt = """\
strict digraph result_commodity_%(inp_commodity)s {
label = "%(inp_commodity)s - %(period)s" ;
compound = "True" ;
concentrate = "True" ;
rankdir = "LR" ;
splines = "True" ;
node [ shape="box", style="filled", fontcolor="%(font_color)s" ] ;
edge [
arrowhead = "vee",
fontsize = "8",
label = " ",
labelfloat = "False",
labelfontcolor = "lightgreen"
len = "2",
weight = "0.5",
] ;
%(resource_node)s
subgraph used_techs {
node [ color="%(tech_color)s" ] ;
%(used_nodes)s
}
subgraph used_techs {
node [ color="%(unused_color)s" ] ;
%(unused_nodes)s
}
subgraph in_use_flows {
edge [ color="%(sb_arrow_color)s" ] ;
%(used_edges)s
}
subgraph unused_flows {
edge [ color="%(unused_color)s" ] ;
%(unused_edges)s
}
}
"""
quick_run_dot_fmt = """\
strict digraph model {
rankdir = "LR" ;
// Default node and edge attributes
node [ style="filled" ] ;
edge [ arrowhead="vee", labelfontcolor="lightgreen" ] ;
// Define individual nodes
subgraph techs {
node [ color="%(tech_color)s", shape="box", fontcolor="%(font_color)s" ] ;
%(tnodes)s
}
subgraph energy_carriers {
node [ color="%(commodity_color)s", shape="circle", fillcolor="%(fill_color)s" ] ;
%(enodes)s
}
// Define edges and any specific edge attributes
subgraph inputs {
edge [ color="%(arrowheadin_color)s" ] ;
%(iedges)s
}
subgraph outputs {
edge [ color="%(arrowheadout_color)s" ] ;
%(oedges)s
}
{rank = same; %(snodes)s}
}
"""
|
gpl-2.0
| -627,595,549,209,957,600
| 16.697917
| 89
| 0.57084
| false
| 2.643154
| false
| false
| false
|
GNOME/dia
|
plug-ins/python/dot2dia.py
|
1
|
9213
|
# PyDia DOT Import
# Copyright (c) 2009 Hans Breuer <hans@breuer.org>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
##
# \file dot2dia.py \brief translate dot ( http://www.graphviz.org/ ) to Dia format
# \ingroup ImportFilters
import re, sys
import gettext
_ = gettext.gettext
# FIXME: keywords are case indepentend
keywords = ['node', 'edge', 'graph', 'digraph', 'strict']
# starts with either a keyword or a node name in quotes.
# BEWARE: (?<-> ) - negative lookbehind to not find nodes a second time in connection definition (and misinterpret the params)
rDecl = re.compile(r'\s*(?<!-> )(?P<cmd>(?:' + ')|(?:'.join(keywords) + ')|(?:\w+' + ')|(?:"[^"]+"))\s+\[(?P<dict>[^\]]+)\];', re.DOTALL | re.MULTILINE)
# dont assume that all node names are in quotes
rEdge = re.compile(r'\s*(?P<n1>("[^"]+")|(\w+))\s*->\s*(?P<n2>("[^"]+")|(\w+))\s+\[(?P<dict>[^\]]+)\];*', re.DOTALL | re.MULTILINE)
# a list of key=value
rParam = re.compile(r'(?P<key>\w+)\s*=(?P<val>(\w+)|("[^"]+")),?\s*', re.DOTALL | re.MULTILINE)
# units in dot are either points or inch
cmInch = 2.54
cmPoints = cmInch/72.0
# dot y up, dia y down
def StripQuotes(s) :
"strip quotes if any"
if s[0] == '"' :
s = s[1:-1]
return s
def DictFromString (s) :
#print "KVs", s
d = {}
for m in rParam.finditer (s) :
if m :
d[m.group ("key")] = StripQuotes(m.group ("val"))
return d
##
# \brief Accumulating information with _DiaObject
class Object :
""" will end as a Dia Object """
def __init__ (self, typename, parms) :
self.typename = typename
self.parms = parms
def FontSize (self) :
try :
return float(self.parms['fontsize']) * cmPoints
except :
return 0.6
##
# \brief The nodes of the graph - finally represented as _Ellipse
class Node(Object) :
def __init__ (self, name, parms) :
Object.__init__(self, "Standard - Ellipse", parms)
self.name = name
def Pos(self) :
'deliver scaled X,Y coordinate'
x, y = 0.0, 0.0
try :
xy = self.parms['pos'].split(',')
x = float(xy[0]) * cmPoints
y = float(xy[1]) * cmPoints
except :
print("No position on '%s'" % (self.name,))
return x,-y
def Size(self) :
'deliver scaled W,H coordinate'
w, h = 0.5, 0.5
try :
w = float(self.parms['width']) * cmInch #? maybe this is relative to the font size?
h = float(self.parms['height']) * cmInch
except :
print("No size on '%s'" % (self.name,))
return w,h
##
# \brief The edges of the graph - finally represented as _Bezierline
class Edge(Object) :
def __init__ (self, src, dest, parms) :
Object.__init__(self, "Standard - BezierLine", parms)
self.src = src
self.dest = dest
def LabelPos (self) :
x, y = 0.0, 0.0
try :
xy = self.parms['lp'].split(',')
x = float(xy[0]) * cmPoints
y = float(xy[1]) * cmPoints
except :
if 'label' in self.parms :
# should be optional otherwise
print("No label pos on %s" % (self.src + '->' + self.dest,))
return x, -y
def Pos (self) :
# no need to do something smart, it get adjusted anyway
return 0.0, 0.0
def SetPoints (self, diaobj) :
'the property to set must match the type'
pts = []
if 'pos' in self.parms :
s = self.parms['pos']
if s[:2] == 'e,' :
sp = s[2:].split(" ")
# apparently the first point is the end? just put it there!
sp.append(sp[-1])
del sp[0]
bp = []
for i in range(0, len(sp)) :
xy = sp[i].replace("\n", "").replace("\\", "").split(",")
try :
x = float(xy[0]) * cmPoints
y = float(xy[1]) * (-cmPoints)
except ValueError :
print(xy)
continue
bp.append((x,y))
# must convert to _one_ tuple
if i == 0 : # first must be move to
pts.append ((0, bp[0][0], bp[0][1]))
bp = [] # reset to new point
elif len(bp) == 3 : # rest is curveto ==2
pts.append ((2, bp[0][0], bp[0][1], bp[1][0], bp[1][1], bp[2][0], bp[2][1]))
bp = [] # reset
if len(bp) > 0 :
print(len(bp), "bezier points left!")
if len(pts) > 1 :
diaobj.properties['bez_points'] = pts
else :
print("BezPoints", pts)
def MergeParms (d, extra) :
for k in list(extra.keys()) :
if k not in d :
d[k] = extra[k]
##
# \brief Parsing the given dot file
def Parse(sFile) :
f = open(sFile, 'r')
s = f.read()
extra = {}
nodes = {}
edges = []
if 0 : # debug regex
dbg = rDecl.findall(s)
for db in dbg :
print(db)
for m in rDecl.finditer(s) :
if m :
name = StripQuotes(m.group("cmd"))
if name in keywords :
if name in extra :
MergeParms(extra[name], DictFromString(m.group("dict")))
else :
extra[name] = DictFromString(m.group("dict"))
else : # must be a node
n = Node(name, DictFromString(m.group("dict")))
if 'node' in extra :
MergeParms(n.parms, extra['node'])
nodes[name] = n
for m in rEdge.finditer(s) :
if m :
# the names given are not necessarily registered as nodes already
defparams = {}
if 'node' in extra :
defparams = extra['node']
for k in ["n1", "n2"] :
name = StripQuotes(m.group(k))
if name in nodes :
pass # defparms should be set above
else :
nodes[name] = Node(name, defparams)
# remember connection
edges.append(Edge(StripQuotes(m.group("n1")), StripQuotes(m.group("n2")), DictFromString(m.group("dict"))))
return [nodes, edges]
##
# \brief Adding a label for the edges
#
# This function could be improved if Dia would allow to
# attach labels to arbitrary objects. For the time being
# only the initial postion does match, but relayouting the
# graph in Dia will loose the position
def AddLabel (layer, pos, label, fontsize, center=0) :
""" create a Text object an put it into the layer """
textType = dia.get_object_type("Standard - Text")
obj, h1, h2 = textType.create(pos[0], pos[1])
#TODO: transfer font-size
obj.properties["text"] = label
obj.properties["text_height"] = fontsize
if center :
obj.properties["text_alignment"] = 1
obj.properties["text_vert_alignment"] = 2
layer.add_object(obj)
##
# \brief Callback registered for the ImportFilter
def ImportFile (sFile, diagramData) :
""" read the dot file and create diagram objects """
nodes, edges = Parse(sFile)
layer = diagramData.active_layer # can do better, e.g. layer per graph
for key in list(nodes.keys()) :
n = nodes[key]
nodeType = dia.get_object_type(n.typename) # could be optimized out of loop
x, y = n.Pos()
w, h = n.Size()
obj, h1, h2 = nodeType.create(x-w/2, y-h/2) # Dot pos is center, Dia (usually) uses top/left
# resizing the Ellipse by handle is screwed
# obj.move_handle(h2, (x+w/2, y+h/2), 0, 0) # resize the object
obj.properties["elem_width"] = w
obj.properties["elem_height"] = h
if 'fillcolor' in n.parms :
try :
obj.properties['fill_colour'] = n.parms['fillcolor'] # same color syntax?
except :
print("Failed to apply:", n.parms['fillcolor'])
layer.add_object(obj)
AddLabel (layer, (x,y), n.name, n.FontSize(), 1)
obj.properties['meta'] = n.parms # copy all (remaining) parameters
# after creation replace the node with the object (needed to connect them)
nodes[key] = obj
for e in edges :
edgeType = dia.get_object_type(e.typename) # could be optimized out of loop
x, y = e.Pos() # just to have a start
con, h1, h2 = edgeType.create(x,y)
e.SetPoints(con)
if 'style' in e.parms : # set line style
con.properties['line_style'] = (4, 0.5) #FIXME: hard-coded dotted
if 'weight' in e.parms :
con.properties['line_width'] = float(e.parms['weight']) / 10.0 # arbitray anyway
layer.add_object(con)
if e.src in nodes :
h = con.handles[0]
obj = nodes[e.src]
# by moving to the cp position first, the connection's points get recalculated
pos = obj.connections[8].pos
con.move_handle(h, pos, 0, 0)
h.connect(obj.connections[8]) # connect to mid-point
if e.dest in nodes :
h = con.handles[-1]
obj = nodes[e.dest]
pos = obj.connections[8].pos
con.move_handle(h, pos, 0, 0)
h.connect (obj.connections[8]) # connect to mid-point
if 'label' in e.parms :
AddLabel (layer, e.LabelPos(), e.parms['label'], e.FontSize())
diagram = None # FIXME: get it
if diagram :
for n, o in nodes.items() :
diagram.update_connections(o)
diagram.update_extents()
return diagramData
if __name__ == '__main__':
# just testing at the moment
nodes, edges = Parse(sys.argv[1])
for k, n in nodes.items() :
print("Name:", n.name, "Pos:", n.Pos(), "WxH:", n.Size())
for e in edges :
print(e.src, "->", e.dest, e.LabelPos(), e.parms)
else :
# run as a Dia plug-in
import dia
dia.register_import (_("Graphviz Dot"), "dot", ImportFile)
|
gpl-2.0
| -8,080,859,749,009,912,000
| 31.670213
| 152
| 0.628568
| false
| 2.833897
| false
| false
| false
|
EmanueleCannizzaro/scons
|
test/Interactive/help.py
|
1
|
2710
|
#!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/Interactive/help.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Verify the behavior of the "help" subcommand (and its "h" and "?" aliases).
"""
import TestSCons
test = TestSCons.TestSCons()
test.write('SConstruct', """\
Command('foo.out', 'foo.in', Copy('$TARGET', '$SOURCE'))
Command('1', [], Touch('$TARGET'))
""")
test.write('foo.in', "foo.in 1\n")
scons = test.start(arguments = '-Q --interactive')
scons.send("build foo.out 1\n")
test.wait_for(test.workpath('1'))
test.must_match(test.workpath('foo.out'), "foo.in 1\n")
scons.send('help\n')
scons.send('h\n')
scons.send('?\n')
help_text = """\
build [TARGETS] Build the specified TARGETS and their dependencies.
'b' is a synonym.
clean [TARGETS] Clean (remove) the specified TARGETS and their
dependencies. 'c' is a synonym.
exit Exit SCons interactive mode.
help [COMMAND] Prints help for the specified COMMAND. 'h' and
'?' are synonyms.
shell [COMMANDLINE] Execute COMMANDLINE in a subshell. 'sh' and '!'
are synonyms.
version Prints SCons version information.
"""
expect_stdout = """\
scons>>> Copy("foo.out", "foo.in")
Touch("1")
scons>>> %(help_text)s
scons>>> %(help_text)s
scons>>> %(help_text)s
scons>>>
""" % locals()
test.finish(scons, stdout = expect_stdout)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
mit
| -3,640,869,122,940,750,000
| 28.78022
| 98
| 0.675646
| false
| 3.594164
| true
| false
| false
|
delattreb/TemperatureHumidityServer
|
Doc/ssd1306-master/tests/test_sh1106.py
|
1
|
2109
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from unittest.mock import call, Mock
except ImportError:
from mock import call, Mock
import pytest
from oled.device import sh1106
from oled.render import canvas
import baseline_data
serial = Mock(unsafe=True)
def setup_function(function):
serial.reset_mock()
serial.command.side_effect = None
def test_init_128x64():
sh1106(serial)
serial.command.assert_has_calls([
# Initial burst are initialization commands
call(174, 32, 16, 176, 200, 0, 16, 64, 161, 166, 168, 63, 164,
211, 0, 213, 240, 217, 34, 218, 18, 219, 32, 141, 20),
# set contrast
call(129, 127),
# reset the display
call(176, 2, 16),
call(177, 2, 16),
call(178, 2, 16),
call(179, 2, 16),
call(180, 2, 16),
call(181, 2, 16),
call(182, 2, 16),
call(183, 2, 16)
])
# Next 1024 are all data: zero's to clear the RAM
# (1024 = 128 * 64 / 8)
serial.data.assert_has_calls([call([0] * 128)] * 8)
def test_init_invalid_dimensions():
with pytest.raises(ValueError) as ex:
sh1106(serial, width=77, height=105)
assert "Unsupported display mode: 77x105" in str(ex.value)
def test_init_handle_ioerror():
serial.command.side_effect = IOError(-99, "Test exception")
with pytest.raises(IOError) as ex:
sh1106(serial)
assert "Failed to initialize SH1106 display driver" in str(ex.value)
def test_display():
device = sh1106(serial)
serial.reset_mock()
recordings = []
def data(data):
recordings.append({'data': data})
def command(*cmd):
recordings.append({'command': list(cmd)})
serial.command = Mock(side_effect=command, unsafe=True)
serial.data = Mock(side_effect=data, unsafe=True)
# Use the same drawing primitives as the demo
with canvas(device) as draw:
baseline_data.primitives(device, draw)
serial.data.assert_called()
serial.command.assert_called()
print(recordings)
assert recordings == baseline_data.demo_sh1106
|
gpl-3.0
| -2,187,029,735,590,444,800
| 24.409639
| 72
| 0.626363
| false
| 3.379808
| true
| false
| false
|
ICTU/quality-time
|
docs/src/create_metrics_and_sources_md.py
|
1
|
8299
|
"""Script to convert the data model in a Markdown file."""
import json
import pathlib
import re
import sys
TYPE_DESCRIPTION = dict(
url="URL",
string="String",
multiple_choice="Multiple choice",
password="Password",
integer="Integer",
date="Date",
single_choice="Single choice",
multiple_choice_with_addition="Multiple choice with addition",
)
def html_escape(text: str) -> str:
"""Escape < and >."""
return text.replace("<", "<").replace(">", ">")
def get_data_model():
"""Return the data model."""
module_dir = pathlib.Path(__file__).resolve().parent
server_src_path = module_dir.parent.parent / "components" / "server" / "src"
sys.path.insert(0, str(server_src_path))
from data_model import DATA_MODEL_JSON # pylint: disable=import-error,import-outside-toplevel
return json.loads(DATA_MODEL_JSON)
def markdown_link(url: str, anchor: str = None) -> str:
"""Return a Markdown link."""
anchor = anchor or url
return f"[{anchor}]({url})"
def markdown_table_row(*cells: str) -> str:
"""Return a Markdown table row."""
return f"| {' | '.join([html_escape(cell) for cell in cells])} |\n"
def markdown_table_header(*column_headers: str) -> str:
"""Return a Markdown table header."""
headers = markdown_table_row(*column_headers)
separator = markdown_table_row(*[":" + "-" * (len(column_header) - 1) for column_header in column_headers])
return "\n" + headers + separator
def markdown_header(header: str, level: int = 1) -> str:
"""Return a Markdown header."""
return ("\n" if level > 1 else "") + "#" * level + f" {header}\n"
def metrics_table(data_model, universal_sources: list[str]) -> str:
"""Return the metrics as Markdown table."""
markdown = markdown_table_header("Name", "Description", "Default target", "Scale(s)", "Default tags", "Sources¹")
for metric in sorted(data_model["metrics"].values(), key=lambda item: str(item["name"])):
direction = {"<": "≦", ">": "≧"}[metric["direction"]]
unit = "% of the " + metric["unit"] if metric["default_scale"] == "percentage" else " " + metric["unit"]
target = f"{direction} {metric['target']}{unit}"
if len(metric["scales"]) == 1:
scales = metric["default_scale"]
else:
scales = ", ".join(
[
f"{scale} (default)" if scale == metric["default_scale"] else scale
for scale in sorted(metric["scales"])
]
)
tags = ", ".join(metric["tags"])
sources = []
for source in metric["sources"]:
if source not in universal_sources:
source_name = data_model["sources"][source]["name"]
sources.append(f"[{source_name}]({metric_source_slug(data_model, metric, source)})")
markdown += markdown_table_row(
metric["name"], metric["description"], target, scales, tags, ", ".join(sorted(sources))
)
markdown += "\n"
return markdown
def sources_table(data_model, universal_sources: list[str]) -> str:
"""Return the sources as Markdown table."""
markdown = markdown_table_header("Name", "Description", "Metrics")
for source_key, source in sorted(data_model["sources"].items(), key=lambda item: str(item[1]["name"])):
source_name = f"[{source['name']}]({source['url']})" if "url" in source else source["name"]
if source_key in universal_sources:
metrics = "¹"
else:
metrics = ", ".join(
[
f"[{metric['name']}]({metric_source_slug(data_model, metric, source_key)})"
for metric in data_model["metrics"].values()
if source_key in metric["sources"]
]
)
markdown += markdown_table_row(source_name, source["description"], metrics)
markdown += "\n"
return markdown
def metric_source_slug(data_model, metric, source) -> str:
"""Return a slug for the metric source combination."""
source_name = data_model["sources"][source]["name"]
return f"#{metric['name']} from {source_name}".lower().replace(" ", "-")
def metric_source_table(data_model, metric_key, source_key) -> str:
"""Return the metric source combination as Markdown table."""
markdown = markdown_table_header("Parameter", "Type", "Values", "Default value", "Mandatory", "Help")
for parameter in sorted(
data_model["sources"][source_key]["parameters"].values(), key=lambda parameter: str(parameter["name"])
):
if metric_key in parameter["metrics"]:
name = parameter["name"]
parameter_type = TYPE_DESCRIPTION[parameter["type"]]
default_value = parameter["default_value"]
if isinstance(default_value, list):
if not default_value and parameter["type"] in ("single_choice", "multiple_choice"):
default_value = f"_all {parameter['short_name']}_"
else:
default_value = ", ".join(default_value)
if parameter["type"] in ("single_choice", "multiple_choice"):
values = ", ".join(sorted(parameter["values"]))
else:
values = ""
mandatory = "Yes" if parameter["mandatory"] else "No"
help_url = markdown_link(parameter["help_url"]) if "help_url" in parameter else parameter.get("help", "")
markdown += markdown_table_row(name, parameter_type, values, default_value, mandatory, help_url)
markdown += "\n"
return markdown
def metric_source_configuration_table(data_model, metric_key, source_key) -> str:
"""Return the metric source combination's configuration as Markdown table."""
configurations = data_model["sources"][source_key].get("configuration", {}).values()
relevant_configurations = [config for config in configurations if metric_key in config["metrics"]]
if not relevant_configurations:
return ""
markdown = markdown_table_header("Configuration", "Value")
for configuration in sorted(relevant_configurations, key=lambda config: str(config["name"])):
name = configuration["name"]
values = ", ".join(sorted(configuration["value"], key=lambda value: value.lower()))
markdown += markdown_table_row(name, values)
markdown += "\n"
return markdown
def data_model_as_table(data_model) -> str:
"""Return the data model as Markdown table."""
markdown = markdown_header("Quality-time metrics and sources")
markdown += (
"\nThis document lists all [metrics](#metrics) that *Quality-time* can measure and all "
"[sources](#sources) that *Quality-time* can use to measure the metrics. For each "
"[supported combination of metric and source](#supported-metric-source-combinations), it lists the "
"parameters that can be used to configure the source.\n"
)
markdown += markdown_header("Metrics", 2)
markdown += metrics_table(data_model, universal_sources := ["manual_number"])
markdown += markdown_header("Sources", 2)
markdown += sources_table(data_model, universal_sources)
markdown += "¹) All metrics with the count or percentage scale can be measured using the 'Manual number' source.\n"
markdown += markdown_header("Supported metric-source combinations", 2)
for metric_key, metric in data_model["metrics"].items():
for source_key in metric["sources"]:
if source_key not in universal_sources:
markdown += markdown_header(f"{metric['name']} from {data_model['sources'][source_key]['name']}", 3)
markdown += metric_source_table(data_model, metric_key, source_key)
markdown += metric_source_configuration_table(data_model, metric_key, source_key)
markdown = re.sub(r"\n{3,}", "\n\n", markdown) # Replace multiple consecutive empty lines with one empty line
return re.sub(r"\n\n$", "\n", markdown) # Remove final empty line
if __name__ == "__main__":
data_model_md_path = pathlib.Path(__file__).resolve().parent.parent / "METRICS_AND_SOURCES.md"
with data_model_md_path.open("w") as data_model_md:
data_model_md.write(data_model_as_table(get_data_model()))
|
apache-2.0
| 7,844,623,377,795,428,000
| 44.56044
| 119
| 0.614689
| false
| 4.003863
| true
| false
| false
|
OpenKMIP/PyKMIP
|
kmip/services/server/monitor.py
|
1
|
6822
|
# Copyright (c) 2018 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import multiprocessing
import os
import signal
import time
from kmip.core import policy as operation_policy
def get_json_files(p):
"""
Scan the provided policy directory for all JSON policy files.
"""
f = [os.path.join(p, x) for x in os.listdir(p) if x.endswith(".json")]
return sorted(f)
class PolicyDirectoryMonitor(multiprocessing.Process):
"""
A file monitor that tracks modifications made within the policy directory.
"""
def __init__(self, policy_directory, policy_store, live_monitoring=True):
"""
Set up the file monitor with the policy directory to track.
Args:
policy_directory (string): The system path of the policy directory
that should be monitored. Required.
policy_store (DictProxy): A dictionary proxy created by the server
multiprocessing resource manager. Used to store and share the
policy information across server processes and threads.
Required.
live_monitoring (boolean): A boolean indicating whether or not
live monitoring should continue indefinitely. Optional,
defaults to True.
"""
super(PolicyDirectoryMonitor, self).__init__()
self.halt_trigger = multiprocessing.Event()
self.policy_directory = policy_directory
self.live_monitoring = live_monitoring
self.file_timestamps = None
self.policy_cache = None
self.policy_files = None
self.policy_map = None
self.policy_store = policy_store
self.reserved_policies = ['default', 'public']
def interrupt_handler(trigger, frame):
self.stop()
signal.signal(signal.SIGINT, interrupt_handler)
signal.signal(signal.SIGTERM, interrupt_handler)
self.logger = logging.getLogger("kmip.server.monitor")
self.initialize_tracking_structures()
def stop(self):
self.halt_trigger.set()
def scan_policies(self):
"""
Scan the policy directory for policy data.
"""
policy_files = get_json_files(self.policy_directory)
for f in set(policy_files) - set(self.policy_files):
self.file_timestamps[f] = 0
for f in set(self.policy_files) - set(policy_files):
self.logger.info("Removing policies for file: {}".format(f))
self.file_timestamps.pop(f, None)
for p in self.policy_cache.keys():
self.disassociate_policy_and_file(p, f)
for p in [k for k, v in self.policy_map.items() if v == f]:
self.restore_or_delete_policy(p)
self.policy_files = policy_files
for f in sorted(self.file_timestamps.keys()):
t = os.path.getmtime(f)
if t > self.file_timestamps[f]:
self.logger.info("Loading policies for file: {}".format(f))
self.file_timestamps[f] = t
old_p = [k for k, v in self.policy_map.items() if v == f]
try:
new_p = operation_policy.read_policy_from_file(f)
except ValueError:
self.logger.error("Failure loading file: {}".format(f))
self.logger.debug("", exc_info=True)
continue
for p in new_p.keys():
self.logger.info("Loading policy: {}".format(p))
if p in self.reserved_policies:
self.logger.warning(
"Policy '{}' overwrites a reserved policy and "
"will be thrown out.".format(p)
)
continue
if p in sorted(self.policy_store.keys()):
self.logger.debug(
"Policy '{}' overwrites an existing "
"policy.".format(p)
)
if f != self.policy_map.get(p):
self.policy_cache.get(p).append(
(
time.time(),
self.policy_map.get(p),
self.policy_store.get(p)
)
)
else:
self.policy_cache[p] = []
self.policy_store[p] = new_p.get(p)
self.policy_map[p] = f
for p in set(old_p) - set(new_p.keys()):
self.disassociate_policy_and_file(p, f)
self.restore_or_delete_policy(p)
def run(self):
"""
Start monitoring operation policy files.
"""
self.initialize_tracking_structures()
if self.live_monitoring:
self.logger.info("Starting up the operation policy file monitor.")
while not self.halt_trigger.is_set():
time.sleep(1)
self.scan_policies()
self.logger.info("Stopping the operation policy file monitor.")
else:
self.scan_policies()
def initialize_tracking_structures(self):
self.file_timestamps = {}
self.policy_cache = {}
self.policy_files = []
self.policy_map = {}
for k in self.policy_store.keys():
if k not in self.reserved_policies:
self.policy_store.pop(k, None)
def disassociate_policy_and_file(self, policy, file_name):
c = self.policy_cache.get(policy, [])
for i in [c.index(e) for e in c if e[1] == file_name][::-1]:
c.pop(i)
def restore_or_delete_policy(self, policy):
c = self.policy_cache.get(policy, [])
if len(c) == 0:
self.logger.info("Removing policy: {}".format(policy))
self.policy_store.pop(policy, None)
self.policy_map.pop(policy, None)
self.policy_cache.pop(policy, None)
else:
e = c.pop()
self.policy_store[policy] = e[2]
self.policy_map[policy] = e[1]
|
apache-2.0
| -7,231,256,344,903,181,000
| 37.982857
| 78
| 0.552331
| false
| 4.361893
| false
| false
| false
|
rthallisey/clapper
|
ansible-tests/mistral/tripleo_validations/actions/load_validations.py
|
1
|
2621
|
import glob
import logging
import os
import yaml
from mistral.actions import base
LOG = logging.getLogger(__name__)
DEFAULT_METADATA = {
'name': 'Unnamed',
'description': 'No description',
'stage': 'No stage',
'require_plan': True,
'groups': [],
}
VALIDATIONS_DIR = '/usr/share/tripleo-validations/validations'
def get_validation_metadata(validation, key):
try:
return validation[0]['vars']['metadata'][key]
except KeyError:
return DEFAULT_METADATA.get(key)
except TypeError:
LOG.exception("Failed to get validation metadata.")
def load_validations(groups=None):
'''Loads all validations.'''
paths = glob.glob('{}/*.yaml'.format(VALIDATIONS_DIR))
results = []
for index, validation_path in enumerate(sorted(paths)):
with open(validation_path) as f:
validation = yaml.safe_load(f.read())
validation_groups = get_validation_metadata(validation, 'groups')
if not groups or \
set.intersection(set(groups), set(validation_groups)):
results.append({
'id': os.path.splitext(
os.path.basename(validation_path))[0],
'name': get_validation_metadata(validation, 'name'),
'groups': get_validation_metadata(validation, 'groups'),
'description': get_validation_metadata(validation,
'description'),
'require_plan': get_validation_metadata(validation,
'require_plan'),
'metadata': get_remaining_metadata(validation)
})
return results
def get_remaining_metadata(validation):
try:
for (k, v) in validation[0]['vars']['metadata'].items():
if len(bytes(k)) > 255 or len(bytes(v)) > 255:
LOG.error("Metadata is too long.")
return dict()
return {k: v for k, v in validation[0]['vars']['metadata'].items()
if k not in ['name', 'description', 'require_plan', 'groups']}
except KeyError:
return dict()
class ListValidations(base.Action):
def __init__(self, groups=None):
self.groups = groups
def run(self):
return load_validations(self.groups)
class ListGroups(base.Action):
def __init__(self):
pass
def run(self):
validations = load_validations()
return { group for validation in validations
for group in validation['groups'] }
|
apache-2.0
| -3,117,070,980,614,347,300
| 30.578313
| 78
| 0.563144
| false
| 4.419899
| false
| false
| false
|
mcrav/XDToolkit
|
src/emailfuncs.py
|
1
|
1465
|
'''
#####################################################################
#-------------------EMAIL--------------------------------------------
#####################################################################
'''
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.application import MIMEApplication
from email.mime.text import MIMEText
import os
def sendEmail(body = '', email = '', attachments = [], subject = '', toaddr = ''):
'''
Send email to my email account from xdtoolkit@gmail.com
'''
print('Sending email')
fromaddr = "xdtoolkit@gmail.com"
if not toaddr:
toaddr = "mcrav@chem.au.dk"
msg = MIMEMultipart()
msg['From'] = fromaddr
msg['To'] = toaddr
msg['Subject'] = subject
for f in attachments:
with open(f, "rb") as fil:
part = MIMEApplication(
fil.read(),
Name=os.path.basename(f)
)
part['Content-Disposition'] = 'attachment; filename="%s"' % os.path.basename(f)
msg.attach(part)
bodyText = '{0}<br><br>Email Address: {1}'.format(body,email)
msg.attach(MIMEText(bodyText, 'html'))
print('starting smtp')
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login("xdtoolkit@gmail.com", '****')
print('logged in')
text = msg.as_string()
server.sendmail("xdtoolkit@gmail.com", toaddr, text)
print('sent')
server.quit()
|
gpl-3.0
| 5,255,107,755,861,065,000
| 30.847826
| 91
| 0.519454
| false
| 4.046961
| false
| false
| false
|
tavy14t/tw_project
|
Testing/Pocket_Testing.py
|
1
|
4909
|
import os
import sys
import requests
import time
from pocket import Pocket
import webbrowser
import collections
import json
sys.dont_write_bytecode = True
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "trex.settings")
import django
django.setup()
from restapi.models import *
from random import randint
# POCKET_CONSUMER_KEY = '67853-fa80baf944f56fd495dab319' #Desktop
POCKET_CONSUMER_KEY = '67853-17e07228b29f7c44ef6d2784' # Web
REDIRECT_URI = 'http://localhost:8000/home/about'
'''
# connecting to pocket API; pocket_api stores the http response
pocket_api = requests.post('https://getpocket.com/v3/oauth/request',
data={'consumer_key': POCKET_CONSUMER_KEY,
'redirect_uri': 'http://localhost:8000/home/about'})
print pocket_api.status_code # if 200, it means all ok.
print pocket_api.headers # prints in JSON format
print pocket_api.text
code = pocket_api.text.split('=')[1]
print code
os.system('chrome "https://getpocket.com/auth/authorize?request_token={}&redirect_uri={}"'.format(code, 'http://localhost:8000/home/about'))
time.sleep(5)
print '--------------------------------------------'
pocket_auth = requests.post('https://getpocket.com/v3/oauth/authorize',
data={'consumer_key': POCKET_CONSUMER_KEY,
'code': code})
print pocket_auth.status_code
print pocket_auth.text
pocket_access_token = pocket_auth.text.split('=')[1].split('&')[0]
print '--------------------------------------------'
request_token = Pocket.get_request_token(consumer_key=POCKET_CONSUMER_KEY, redirect_uri=REDIRECT_URI)
print 1
# URL to redirect user to, to authorize your app
auth_url = Pocket.get_auth_url(code=request_token, redirect_uri=REDIRECT_URI)
print 2
# os.system('chrome "{}"'.format(auth_url))
print auth_url
webbrowser.open_new_tab(auth_url)
user_credentials = Pocket.get_credentials(consumer_key=POCKET_CONSUMER_KEY, code=request_token)
time.sleep(3)
print 3
access_token = user_credentials['access_token']
print 4
pocket_instance = Pocket(POCKET_CONSUMER_KEY, access_token)
pocket_get = open('pocket_get.txt', 'w')
def recursive_keys(d, depth=0):
for key in d:
if isinstance(d[key], collections.Mapping):
print ' ' * depth + key
pocket_get.write(' ' * depth + key + '\n')
recursive_keys(d[key], depth + 1)
else:
print ' ' * depth + key + ' ->' + unicode(d[key])
pocket_get.write(' ' * depth + key + ' ->' + unicode(d[key]) + '\n')
d = pocket_instance.get()[0]['list']
for key in d:
print d[key]['resolved_title'], d[key]['given_url']
# open('test.txt', 'w').write(str(pocket_instance.get()))
print '--------------------------------'
#access_token = 'd8830338-65cd-ef39-64db-ec5b99'
#pocket_instance = Pocket(POCKET_CONSUMER_KEY, access_token)
#sample = pocket_instance.get(detailType='complete')[0]
'''
with open('../result.json', 'r') as fp:
pocket_request = json.load(fp)
pocket_posts = pocket_request['list']
def pretty(d, indent=0):
for key, value in d.iteritems():
print ' ' * indent + unicode(key)
if isinstance(value, dict):
pretty(value, indent + 1)
else:
print ' ' * (indent + 1) + unicode(value)
data = {'posts': {}}
for post in pocket_posts:
data['posts'][post] = {}
data['posts'][post]['name'] = pocket_posts[post]['given_title']
data['posts'][post]['embed_link'] = pocket_posts[post]['resolved_url']
if 'tags' in pocket_posts[post]:
data['posts'][post]['tags'] = [tag for tag in pocket_posts[post]['tags']]
else:
data['posts'][post]['tags'] = []
print pocket_posts[pocket_posts.keys()[0]]
# print [tag for tag in pocket_posts[post]]
'''
tags = []
for post in pocket_posts:
#print post
if 'tags' in pocket_posts[post]:
tags.append(pocket_posts[post]['tags'])
print tags
pocket_api = requests.post('https://getpocket.com/v3/get',
data={'consumer_key': POCKET_CONSUMER_KEY,
'access_token': access_token,
'count': 30,
'state': 'unread',
'detailType': 'complete',
})
# print pocket_api.headers
print pocket_api.text
e = json.loads(requests.post('https://getpocket.com/v3/get',
data={'consumer_key': POCKET_CONSUMER_KEY,
'access_token': access_token,
'count': 30,
'state': 'unread',
}).text)['list']
d = json.loads(pocket_api.text)['list']
for key in d:
print set(d[key].keys()).difference(set(e[key].keys()))
e = [key]
# print d
# recursive_keys(pocket_instance.get()[0])
'''
|
mit
| 6,632,174,290,124,783,000
| 29.302469
| 140
| 0.586881
| false
| 3.314652
| false
| false
| false
|
stryku/hb
|
image_processing/tesseract/trainer.py
|
1
|
1576
|
from subprocess import call
BUILD_DIR='build'
FONTa='Fake Receipt'
LANG='hb'
OUTPUTBASE = LANG + '.' + FONTa
def call_shell(command):
splitted = command.split()
call(splitted)
print(command)
def text2image(text_file):
splitted = str('text2image --text=' + text_file + ' --fonts_dir ..').split()
splitted.append('--outputbase=' + OUTPUTBASE)
splitted.append('--font=Fake Receipt')
call(splitted)
def training():
command = ['tesseract', OUTPUTBASE + '.tif', OUTPUTBASE, 'box.train.stderr']
call(command)
def unicharset():
command = ['unicharset_extractor',
OUTPUTBASE + '.box']
call(command)
def clustering():
command = ['mftraining',
'-F', '../font_properties',
'-U', 'unicharset',
OUTPUTBASE + '.tr']
call(command)
def cntraining():
command = ['cntraining', OUTPUTBASE + '.tr']
call(command)
def cp_with_prefix(filename, prefix):
call_shell('cp ' + filename + ' ' + prefix + '.' + filename)
def prepare_for_combine():
cp_with_prefix('unicharset', LANG)
cp_with_prefix('shapetable', LANG)
cp_with_prefix('normproto', LANG)
cp_with_prefix('inttemp', LANG)
cp_with_prefix('pffmtable', LANG)
def combine():
command = ['combine_tessdata', LANG + '.']
call(command)
def copy_combined():
name = LANG + '.traineddata'
call_shell('cp ' + name + ' ../tessdata/' + name)
text2image('../training_text.txt')
training()
unicharset()
clustering()
cntraining()
prepare_for_combine()
combine()
copy_combined()
|
mit
| 1,929,229,213,959,798,300
| 20.013333
| 80
| 0.613579
| false
| 3.317895
| false
| false
| false
|
frankyrumple/smc
|
modules/ednet/sequentialguid.py
|
1
|
5058
|
###### SequentialGUID
import os
import datetime
import sys
from binascii import unhexlify, hexlify
import uuid
class SequentialGUID:
SEQUENTIAL_GUID_AS_STRING = 0
SEQUENTIAL_GUID_AS_BINARY = 1
SEQUENTIAL_GUID_AT_END = 2
def __init__(self):
pass
@staticmethod
def NewGUID(guid_type = SEQUENTIAL_GUID_AS_STRING):
# What type of machine are we runing on?
endian = sys.byteorder # will be 'little' or 'big'
# Need some random info
rand_bytes = bytearray()
rand_bytes += os.urandom(10) #Get 10 random bytes
# Get the current timestamp in miliseconds - makes this sequential
ts = long((datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds() * 1000)
tsbytes = bytearray()
# NOTE: we don't pass endian into long_to_bytes
tsbytes += long_to_bytes(ts) # Convert long to byte array
while (len(tsbytes) < 8): # Make sure to padd some 0s on the front so it is 64 bits
tsbytes.insert(0, 0) # Python will most likely make it a byte array
guid_bytes = bytearray(16) # 16 bytes is 128 bit
# Combine the random and timestamp bytes into a GUID
if(guid_type != SequentialGUID.SEQUENTIAL_GUID_AT_END):
guid_bytes[0] = tsbytes[2] # Copy timestamp into guid
guid_bytes[1] = tsbytes[3]
guid_bytes[2] = tsbytes[4]
guid_bytes[3] = tsbytes[5]
guid_bytes[4] = tsbytes[6]
guid_bytes[5] = tsbytes[7]
guid_bytes[6] = rand_bytes[0] # Copy rand bytes into guid
guid_bytes[7] = rand_bytes[1]
guid_bytes[8] = rand_bytes[2]
guid_bytes[9] = rand_bytes[3]
guid_bytes[10] = rand_bytes[4]
guid_bytes[11] = rand_bytes[5]
guid_bytes[12] = rand_bytes[6]
guid_bytes[13] = rand_bytes[7]
guid_bytes[14] = rand_bytes[8]
guid_bytes[15] = rand_bytes[9]
if (guid_type == SequentialGUID.SEQUENTIAL_GUID_AS_STRING and endian == "little" and 1!=1):
## TODO: This is mucking things up for some reason hence the 1!=1
# Need to swap stuff around if this is going to be string on little endian machines
b = guid_bytes[0:4] # First data chunk (4 items)
b.reverse()
guid_bytes[0] = b[0]
guid_bytes[1] = b[1]
guid_bytes[2] = b[2]
guid_bytes[3] = b[3]
b = guid_bytes[4:6] # 2nd data chunk (2 items)
b.reverse()
guid_bytes[4] = b[0]
guid_bytes[5] = b[1]
pass
pass
else:
# Same as above, but different order - timestamp at end not beginning
guid_bytes[10] = tsbytes[2] # Copy timestamp into guid
guid_bytes[11] = tsbytes[3]
guid_bytes[12] = tsbytes[4]
guid_bytes[13] = tsbytes[5]
guid_bytes[14] = tsbytes[6]
guid_bytes[15] = tsbytes[7]
guid_bytes[0] = rand_bytes[0] # Copy rand bytes into guid
guid_bytes[1] = rand_bytes[1]
guid_bytes[2] = rand_bytes[2]
guid_bytes[3] = rand_bytes[3]
guid_bytes[4] = rand_bytes[4]
guid_bytes[5] = rand_bytes[5]
guid_bytes[6] = rand_bytes[6]
guid_bytes[7] = rand_bytes[7]
guid_bytes[8] = rand_bytes[8]
guid_bytes[9] = rand_bytes[9]
pass
# Create the guid and return it
guid = uuid.UUID(hex=hexlify(guid_bytes))
return guid
def long_to_bytes (val, endianness='big'):
""" Pulled from http://stackoverflow.com/questions/8730927/convert-python-long-int-to-fixed-size-byte-array
Use :ref:`string formatting` and :func:`~binascii.unhexlify` to
convert ``val``, a :func:`long`, to a byte :func:`str`.
:param long val: The value to pack
:param str endianness: The endianness of the result. ``'big'`` for
big-endian, ``'little'`` for little-endian.
If you want byte- and word-ordering to differ, you're on your own.
Using :ref:`string formatting` lets us use Python's C innards.
"""
# one (1) hex digit per four (4) bits
width = val.bit_length()
# unhexlify wants an even multiple of eight (8) bits, but we don't
# want more digits than we need (hence the ternary-ish 'or')
width += 8 - ((width % 8) or 8)
# format width specifier: four (4) bits per hex digit
fmt = '%%0%dx' % (width // 4)
# prepend zero (0) to the width, to zero-pad the output
s = unhexlify(fmt % val)
if endianness == 'little':
# see http://stackoverflow.com/a/931095/309233
s = s[::-1]
return s
### Usage
### guid = SequentialGUID.NewSequentialGUID(SequentialGUID.SEQUENTIAL_GUID_AS_STRING)
### Use String for most dbs, and At End for MSSQL if you use their GUID field type
### REQUIRES: Python 2.6+ with bytearray support
###### End SequentailGUID
|
mit
| 5,169,569,458,561,843,000
| 36.466667
| 111
| 0.574733
| false
| 3.522284
| false
| false
| false
|
suttond/MODOI
|
SimulationClient/SimulationClient.py
|
1
|
12355
|
from multiprocessing.connection import Client
import time
import logging
import socket
import numpy as np
from SimulationUtilities import Configuration_Processing
from SimulationUtilities.Communication_Codes import comm_code
import LinearAlgebra as la
from CustomBFGS import find_geodesic_midpoint
from MetricValues import shutdown_metric
class SimulationClient:
"""
The purpose of this object is to compute local geodesics using a modified BFGS method. The object receives a pair of
end points to compute the local geodesic between. The simulation client then returns a new position for the node.
The simulation client needs the value of the potential and it's gradient function, in order to achieve this it makes
calls to it's assigned SimulationPotential servers.
Attributes:
CONFIGURATION (dict) :
A dictionary containing the parsed values from the file in configuration_file.
CURVE_ADDRESS (str, int) :
A tuple containing a string representing the hostname/IP and an integer for the running SimulationServer.
AUTHKEY (str) :
A string containing the authorisation key for the listener method.
DELAY (float) :
The length of time the SimulationClient should wait if there is no new available jobs, before attempting to
contact the SimulationServer again.
METRIC_SERVERS :
A list containing tuples of addresses for SimulationPotential instances.
ID (str) :
A string that uniquely identifies the client amongst all other clients in the computation.
MASS_MATRIX (numpy.array) :
A NumPy matrix containing the mass matrix of the molecular system. Produced automatically from the Atomistic
Simulation Environment.
"""
def __init__(self, simulation_client_id, server_host, server_port, authkey, metric_server_addresses,
configuration_file, logfile=None, log_level=logging.INFO, callback_delay=1.0):
"""The constructor for the SimulationClient class.
Note:
This class is intended to be used in conjunction with running SimulationServer and SimulationPotential
objects. It will cause a runtime exception if this condition isn't satisfied.
Args:
simulation_client_id (str) :
A string that uniquely identifies the client amongst all other clients in the computation.
server_host (str) :
The TCP/IP hostname of the running SimulationServer instance.
server_port (int) :
The port number that the SimulationServer instance is communicating on.
authkey (str, optional) :
Authentication key used to secure process communications. Default to None for local computations to
increase speed.
metric_server_addresses :
A list containing tuples of the type (str, int) containing the hostnames and ports for the running
SimulationPotential instances.
configuration_file (str) :
Directory and filename of the configuration file.
logfile (str, optional) :
Directory and filename of the log file. Is created if doesn't exist, overwritten if it does.
log_level (int, optional) :
Specify level of logging required as described in the logging package documentation.
callback_delay (float) :
The length of time the SimulationClient should wait if there is no new available jobs, before attempting
to contact the SimulationServer again.
"""
# Set the SimulationClient log output to write to logfile at prescribed log level if specified. Otherwise write
# to console output. Setting to DEBUG will cause poor performance and should only be used to debug.
if logfile is not None:
logging.basicConfig(filename=logfile, level=log_level, filemode='w')
else:
logging.basicConfig(level=logging.INFO)
# Read configuration from configuration_file and store in SimulationPotential's CONFIGURATION attribute.
self.CONFIGURATION = Configuration_Processing.read_configuration_file(configuration_file)
# Set ADDRESS and AUTHKEY attributes for Client object in the start_client method.
self.CURVE_ADDRESS = (server_host, server_port)
self.AUTHKEY = authkey
# Set the callback delay as described in the attributes.
self.DELAY = callback_delay
# Store the ADDRESS and AUTHKEY attributes for Client objects in the start_client method used to compute the
# metric values.
self.METRIC_SERVERS = metric_server_addresses
# Set the client's unique identifier.
self.ID = simulation_client_id
# Compute the mass matrix for the molecular system.
self.MASS_MATRIX = np.diag(np.dstack((self.CONFIGURATION['molecule'].get_masses(),) *
(self.CONFIGURATION['dimension'] /
len(self.CONFIGURATION['molecule'].get_masses()))).flatten())
def start_client(self):
"""Start the instance of SimulationClient and begin computing local geodesics.
"""
# Define a flag to indicate if contact with the SimulationServer instance is possible.
connection_made = False
# Create a response to send to the SimulationServer indicating that this is the first time this SimulationClient
# has attempted to get a task.
client_response = {'status_code': comm_code('CLIENT_FIRST_CONTACT'),
'client_name': self.ID}
# Attempt to connect to the SimulationServer instance.
try:
# Create a Client object that communicates with the listener on CURVE_ADDRESS using password AUTHKEY.
server = Client(self.CURVE_ADDRESS, authkey=self.AUTHKEY)
# When a connection is made send the client message.
server.send(client_response)
# The client assumes the server will respond with a message, either a local geodesic to compute or a message
# asking the client to try again after DELAY seconds.
server_response = server.recv()
# Interpret the servers response by first extracting the status_code variable from the response.
server_response_code = server_response['status_code']
# Close the connection to the server at this point to allow other clients to communicate with the
# SimulationServer.
server.close()
# Store in the connection_made flag that it was possible to create a connection.
connection_made = True
# If it isn't possible to connect to the server than a socket.error exception is raised.
except socket.error:
# Write an error to the log for this client indicating that the connection couldn't be made.
logging.warning('Failed to Make Connection to SimulationServer. Shutting down client.')
# Send a signal to the running instances of SimulationPotential that the SimulationClient would have used
# indicating that they should also shutdown.
shutdown_metric(self.METRIC_SERVERS, self.AUTHKEY)
# This is the main loop of the SimulationClient - the program stops running when it is no longer possible to
# communicate with the SimulationServer. This is decided by the connection_made flag.
while connection_made:
# At this point in the code a new server_response should have been received. How the SimulationClient reacts
# depends on the communication code received.
# If the server has indicated it is giving the SimulationClient a new geodesic to compute then...
if server_response_code == comm_code('SERVER_GIVES_NEW_TASK'):
# Compute the rescaled tangent direction of the curve as store as a NumPy array.
tangent_direction = (1 / float(self.CONFIGURATION['local_number_of_nodes'] + 1)) * \
np.subtract(server_response['right_end_point'], server_response['left_end_point'], dtype='float64')
# Compute the local geodesic using the BFGS method and store the NumPy array in result
result = \
find_geodesic_midpoint(server_response['left_end_point'],
server_response['right_end_point'],
self.CONFIGURATION['local_number_of_nodes'],
la.orthonormal_tangent_basis(tangent_direction,
self.CONFIGURATION['dimension']),
tangent_direction, self.CONFIGURATION['codimension'],
self.METRIC_SERVERS,
self.MASS_MATRIX,
self.AUTHKEY)
# If the function find_geodesic_midpoint returned a None object then it couldn't contact it's
# SimulationPotential instances and should be restarted.
if result is None:
# Tell the user via the log that the SimulationPotential instances couldn't be contacted.
logging.warning('Failed to Make Connection to SimulationPotential. Shutting down client.')
# Exit the main loop of the SimulationClient.
break
# If there is a midpoint then construct a client response to tell the server which node has which new
# position.
client_response = {'status_code': comm_code('CLIENT_HAS_MIDPOINT_DATA'),
'node_number': server_response['node_number'],
'new_node_position': result,
'client_name': self.ID
}
# Otherwise if the server has asked the SimulationClient to try again later...
elif server_response_code == comm_code('SERVER_REQUEST_CALLBACK'):
# Make the SimulationClient wait for DELAY seconds
time.sleep(self.DELAY)
# Create a response to tell the SimulationServer that the SimulationClient would like a new job.
client_response = {'status_code': comm_code('CLIENT_HAS_NO_TASK'), 'client_name': self.ID}
# Attempt to connect to the SimulationServer instance.
try:
# Create a Client object that communicates with the listener on CURVE_ADDRESS using password AUTHKEY.
server = Client(self.CURVE_ADDRESS, authkey=self.AUTHKEY)
# When a connection is made send the client message.
server.send(client_response)
# The client assumes the server will respond with a message, either a local geodesic to compute or a
# message asking the client to try again after DELAY seconds.
server_response = server.recv()
# Interpret the servers response by first extracting the status_code variable from the response.
server_response_code = server_response['status_code']
# Close the connection to the server at this point to allow other clients to communicate with the
# SimulationServer.
server.close()
# If it isn't possible to connect to the server than a socket.error exception is raised.
except (socket.error, EOFError):
# Write an error to the log for this client indicating that the connection couldn't be made.
logging.warning('Failed to Make Connection to SimulationServer. Shutting down client.')
# Send a signal to the running instances of SimulationPotential that the SimulationClient would have
# used indicating that they should also shutdown.
shutdown_metric(self.METRIC_SERVERS, self.AUTHKEY)
# Exit the main loop of the SimulationClient.
break
|
lgpl-3.0
| 1,447,833,398,627,233,500
| 53.192982
| 120
| 0.637556
| false
| 5.071839
| true
| false
| false
|
7sDream/zhihu-py3
|
zhihu/topic.py
|
1
|
17794
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import time
from datetime import datetime
from .common import *
from .base import BaseZhihu
class Topic(BaseZhihu):
"""答案类,请使用``ZhihuClient.topic``方法构造对象."""
@class_common_init(re_topic_url)
def __init__(self, url, name=None, session=None):
"""创建话题类实例.
:param url: 话题url
:param name: 话题名称,可选
:return: Topic
"""
self.url = url
self._session = session
self._name = name
self._id = int(re_topic_url.match(self.url).group(1))
@property
def id(self):
"""获取话题Id(网址最后那串数字)
:return: 话题Id
:rtype: int
"""
return self._id
@property
@check_soup('_xsrf')
def xsrf(self):
"""获取知乎的反xsrf参数(用不到就忽视吧~)
:return: xsrf参数
:rtype: str
"""
return self.soup.find('input', attrs={'name': '_xsrf'})['value']
@property
@check_soup('_tid')
def tid(self):
"""话题内部Id,有时候要用到
:return: 话题内部Id
:rtype: int
"""
return int(self.soup.find(
'div', id='zh-topic-desc')['data-resourceid'])
@property
@check_soup('_name')
def name(self):
"""获取话题名称.
:return: 话题名称
:rtype: str
"""
return self.soup.find('h1').text
@property
def parents(self):
"""获取此话题的父话题。
注意:由于没找到有很多父话题的话题来测试,
所以本方法可能再某些时候出现问题,请不吝反馈。
:return: 此话题的父话题,返回生成器
:rtype: Topic.Iterable
"""
self._make_soup()
parent_topic_tag = self.soup.find('div', class_='parent-topic')
if parent_topic_tag is None:
yield []
else:
for topic_tag in parent_topic_tag.find_all('a'):
yield Topic(Zhihu_URL + topic_tag['href'],
topic_tag.text.strip(),
session=self._session)
@property
def children(self):
"""获取此话题的子话题
:return: 此话题的子话题, 返回生成器
:rtype: Topic.Iterable
"""
self._make_soup()
child_topic_tag = self.soup.find('div', class_='child-topic')
if child_topic_tag is None:
return []
elif '共有' not in child_topic_tag.contents[-2].text:
for topic_tag in child_topic_tag.div.find_all('a'):
yield Topic(Zhihu_URL + topic_tag['href'],
topic_tag.text.strip(),
session=self._session)
else:
flag = 'load'
child = ''
data = {'_xsrf': self.xsrf}
params = {
'parent': self.id
}
while flag == 'load':
params['child'] = child
res = self._session.post(Topic_Get_Children_Url,
params=params, data=data)
j = map(lambda x: x[0], res.json()['msg'][1])
*topics, last = j
for topic in topics:
yield Topic(Zhihu_URL + '/topic/' + topic[2], topic[1],
session=self._session)
flag = last[0]
child = last[2]
if flag == 'topic':
yield Topic(Zhihu_URL + '/topic/' + last[2], last[1],
session=self._session)
@property
@check_soup('_follower_num')
def follower_num(self):
"""获取话题关注人数.
:return: 关注人数
:rtype: int
"""
follower_num_block = self.soup.find(
'div', class_='zm-topic-side-followers-info')
# 无人关注时 找不到对应block,直接返回0 (感谢知乎用户 段晓晨 提出此问题)
if follower_num_block.strong is None:
return 0
return int(follower_num_block.strong.text)
@property
def followers(self):
"""获取话题关注者
:return: 话题关注者,返回生成器
:rtype: Author.Iterable
"""
from .author import Author, ANONYMOUS
self._make_soup()
gotten_data_num = 20
data = {
'_xsrf': self.xsrf,
'start': '',
'offset': 0
}
while gotten_data_num == 20:
res = self._session.post(
Topic_Get_More_Follower_Url.format(self.id), data=data)
j = res.json()['msg']
gotten_data_num = j[0]
data['offset'] += gotten_data_num
soup = BeautifulSoup(j[1])
divs = soup.find_all('div', class_='zm-person-item')
for div in divs:
h2 = div.h2
url = Zhihu_URL + h2.a['href']
name = h2.a.text
motto = h2.parent.div.text.strip()
try:
yield Author(url, name, motto, session=self._session)
except ValueError: # invalid url
yield ANONYMOUS
data['start'] = int(re_get_number.match(divs[-1]['id']).group(1))
@property
@check_soup('_photo_url')
def photo_url(self):
"""获取话题头像图片地址.
:return: 话题头像url
:rtype: str
"""
img = self.soup.find('a', id='zh-avartar-edit-form').img['src']
return img.replace('_m', '_r')
@property
@check_soup('_description')
def description(self):
"""获取话题描述信息.
:return: 话题描述信息
:rtype: str
"""
desc = self.soup.find('div', class_='zm-editable-content').text
return desc
@property
def top_authors(self):
"""获取最佳回答者
:return: 此话题下最佳回答者,一般来说是5个,要不就没有,返回生成器
:rtype: Author.Iterable
"""
from .author import Author, ANONYMOUS
self._make_soup()
t = self.soup.find('div', id='zh-topic-top-answerer')
if t is None:
return
for d in t.find_all('div', class_='zm-topic-side-person-item-content'):
url = Zhihu_URL + d.a['href']
name = d.a.text
motto = d.find('span', class_='bio')['title']
try:
yield Author(url, name, motto, session=self._session)
except ValueError: # invalid url
yield ANONYMOUS
@property
def top_answers(self):
"""获取话题下的精华答案.
:return: 话题下的精华答案,返回生成器.
:rtype: Answer.Iterable
"""
from .question import Question
from .answer import Answer
from .author import Author, ANONYMOUS
top_answers_url = Topic_Top_Answers_Url.format(self.id)
params = {'page': 1}
while True:
# 超出50页直接返回
if params['page'] > 50:
return
res = self._session.get(top_answers_url, params=params)
params['page'] += 1
soup = BeautifulSoup(res.content)
# 不够50页,来到错误页面 返回
if soup.find('div', class_='error') is not None:
return
questions = soup.find_all('a', class_='question_link')
answers = soup.find_all('a', class_='answer-date-link')
authors = soup.find_all('div', class_='zm-item-answer-author-info')
upvotes = soup.find_all('a', class_='zm-item-vote-count')
for ans, up, q, au in zip(answers, upvotes, questions, authors):
answer_url = Zhihu_URL + ans['href']
question_url = Zhihu_URL + q['href']
question_title = q.text.strip()
upvote = up.text
if upvote.isdigit():
upvote = int(upvote)
else:
upvote = None
question = Question(question_url, question_title,
session=self._session)
if au.a is None:
author = ANONYMOUS
else:
author_url = Zhihu_URL + au.a['href']
author_name = au.a.text
author_motto = au.strong['title'] if au.strong else ''
author = Author(author_url, author_name, author_motto,
session=self._session)
yield Answer(answer_url, question, author, upvote,
session=self._session)
@property
def questions(self):
"""获取话题下的所有问题(按时间降序排列)
:return: 话题下所有问题,返回生成器
:rtype: Question.Iterable
"""
from .question import Question
question_url = Topic_Questions_Url.format(self.id)
params = {'page': 1}
older_time_stamp = int(time.time()) * 1000
while True:
res = self._session.get(question_url, params=params)
soup = BeautifulSoup(res.content)
if soup.find('div', class_='error') is not None:
return
questions = soup.find_all('div', class_='question-item')
questions = list(filter(
lambda x: int(x.h2.span['data-timestamp']) < older_time_stamp,
questions))
for qu_div in questions:
url = Zhihu_URL + qu_div.h2.a['href']
title = qu_div.h2.a.text.strip()
creation_time = datetime.fromtimestamp(
int(qu_div.h2.span['data-timestamp']) // 1000)
yield Question(url, title, creation_time=creation_time,
session=self._session)
older_time_stamp = int(questions[-1].h2.span['data-timestamp'])
params['page'] += 1
@property
def unanswered_questions(self):
"""获取话题下的等待回答的问题
什么是「等待回答」的问题:https://www.zhihu.com/question/40470324
:return: 话题下等待回答的问题,返回生成器
:rtype: Question.Iterable
"""
from .question import Question
question_url = Topic_Unanswered_Question_Url.format(self.id)
params = {'page': 1}
while True:
res = self._session.get(question_url, params=params)
soup = BeautifulSoup(res.content)
if soup.find('div', class_='error') is not None:
return
questions = soup.find_all('div', class_='question-item')
for qu_div in questions:
url = Zhihu_URL + qu_div.h2.a['href']
title = qu_div.h2.a.text.strip()
yield Question(url, title, session=self._session)
params['page'] += 1
@property
def answers(self):
"""获取话题下所有答案(按时间降序排列)
:return: 话题下所有答案,返回生成器
:rtype: Answer.Iterable
"""
from .question import Question
from .answer import Answer
from .author import Author, ANONYMOUS
newest_url = Topic_Newest_Url.format(self.id)
params = {'start': 0, '_xsrf': self.xsrf}
res = self._session.get(newest_url)
soup = BeautifulSoup(res.content)
while True:
divs = soup.find_all('div', class_='folding')
# 如果话题下无答案,则直接返回
if len(divs) == 0:
return
last_score = divs[-1]['data-score']
for div in divs:
q = div.find('a', class_="question_link")
question_url = Zhihu_URL + q['href']
question_title = q.text.strip()
question = Question(question_url, question_title,
session=self._session)
ans = div.find('a', class_='answer-date-link')
answer_url = Zhihu_URL + ans['href']
upvote = div.find('a', class_='zm-item-vote-count').text
if upvote.isdigit():
upvote = int(upvote)
else:
upvote = None
au = div.find('div', class_='zm-item-answer-author-info')
if au.a is None:
author = ANONYMOUS
else:
author_url = Zhihu_URL + au.a['href']
author_name = au.a.text
author_motto = au.strong['title'] if au.strong else ''
author = Author(author_url, author_name, author_motto,
session=self._session)
yield Answer(answer_url, question, author, upvote,
session=self._session)
params['offset'] = last_score
res = self._session.post(newest_url, data=params)
gotten_feed_num = res.json()['msg'][0]
# 如果得到内容数量为0则返回
if gotten_feed_num == 0:
return
soup = BeautifulSoup(res.json()['msg'][1])
@property
def hot_questions(self):
"""获取话题下热门的问题
:return: 话题下的热门动态中的问题,按热门度顺序返回生成器
:rtype: Question.Iterable
"""
from .question import Question
hot_questions_url = Topic_Hot_Questions_Url.format(self.id)
params = {'start': 0, '_xsrf': self.xsrf}
res = self._session.get(hot_questions_url)
soup = BeautifulSoup(res.content)
while True:
questions_duplicate = soup.find_all('a', class_='question_link')
# 如果话题下无问题,则直接返回
if len(questions_duplicate) == 0:
return
# 去除重复的问题
questions = list(set(questions_duplicate))
questions.sort(key=self._get_score, reverse=True)
last_score = soup.find_all(
'div', class_='feed-item')[-1]['data-score']
for q in questions:
question_url = Zhihu_URL + q['href']
question_title = q.text.strip()
question = Question(question_url, question_title,
session=self._session)
yield question
params['offset'] = last_score
res = self._session.post(hot_questions_url, data=params)
gotten_feed_num = res.json()['msg'][0]
# 如果得到问题数量为0则返回
if gotten_feed_num == 0:
return
soup = BeautifulSoup(res.json()['msg'][1])
@property
def hot_answers(self):
"""获取话题下热门的回答
:return: 话题下的热门动态中的回答,按热门度顺序返回生成器
:rtype: Question.Iterable
"""
from .question import Question
from .author import Author
from .answer import Answer
hot_questions_url = Topic_Hot_Questions_Url.format(self.id)
params = {'start': 0, '_xsrf': self.xsrf}
res = self._session.get(hot_questions_url)
soup = BeautifulSoup(res.content)
while True:
answers_div = soup.find_all('div', class_='feed-item')
last_score = answers_div[-1]['data-score']
for div in answers_div:
# 没有 text area 的情况是:答案被和谐。
if not div.textarea:
continue
question_url = Zhihu_URL + div.h2.a['href']
question_title = div.h2.a.text.strip()
question = Question(question_url, question_title,
session=self._session)
author_link = div.find('a', class_='author-link')
if not author_link:
author_url = None
author_name = '匿名用户'
author_motto = ''
else:
author_url = Zhihu_URL + author_link['href']
author_name = author_link.text
author_motto_span = div.find('span', class_='bio')
author_motto = author_motto_span['title'] \
if author_motto_span else ''
author = Author(author_url, author_name, author_motto,
session=self._session)
body = div.find('div', class_='zm-item-rich-text')
answer_url = Zhihu_URL + body['data-entry-url']
upvote_num = int(div.find(
'div', class_='zm-item-vote-info')['data-votecount'])
yield Answer(answer_url, question, author, upvote_num,
session=self._session)
params['offset'] = last_score
res = self._session.post(hot_questions_url, data=params)
gotten_feed_num = res.json()['msg'][0]
# 如果得到问题数量为0则返回
if gotten_feed_num == 0:
return
soup = BeautifulSoup(res.json()['msg'][1])
@staticmethod
def _get_score(tag):
h2 = tag.parent
div = h2.parent
try:
_ = h2['class']
return div['data-score']
except KeyError:
return div.parent.parent['data-score']
|
mit
| -5,743,376,127,175,184,000
| 33.827731
| 79
| 0.495597
| false
| 3.359951
| false
| false
| false
|
simonzhangsm/voltdb
|
tools/kit_tools/build_kits.py
|
1
|
13604
|
#!/usr/bin/env python
import argparse, datetime, getpass, os, sys, shutil, traceback
from fabric.api import run, cd, local, get, settings, lcd, put
from fabric_ssh_config import getSSHInfoForHost
from fabric.context_managers import shell_env
from fabric.utils import abort
#Login as user test, but build in a directory by real username
username = 'test'
builddir = "/tmp/" + getpass.getuser() + "Kits/buildtemp"
version = "UNKNOWN"
nativelibdir = "/nativelibs/obj" # ~test/libs/... usually
defaultlicensedays = 70 #default trial license length
################################################
# CHECKOUT CODE INTO A TEMP DIR
################################################
def checkoutCode(voltdbGit, proGit, rbmqExportGit, gitloc):
global buildir
# clean out the existing dir
run("rm -rf " + builddir)
# make the build dir again
run("mkdir -p " + builddir)
# change to it
with cd(builddir):
# do the checkouts, collect checkout errors on both community &
# pro repos so user gets status on both checkouts
message = ""
run("git clone -q %s/voltdb.git" % gitloc)
result = run("cd voltdb; git checkout %s" % voltdbGit, warn_only=True)
if result.failed:
message = "VoltDB checkout failed. Missing branch %s." % rbmqExportGit
run("git clone -q %s/pro.git" % gitloc)
result = run("cd pro; git checkout %s" % proGit, warn_only=True)
if result.failed:
message += "\nPro checkout failed. Missing branch %s." % rbmqExportGit
#rabbitmq isn't mirrored internally, so don't use gitloc
run("git clone -q git@github.com:VoltDB/export-rabbitmq.git")
result = run("cd export-rabbitmq; git checkout %s" % rbmqExportGit, warn_only=True)
# Probably ok to use master for export-rabbitmq.
if result.failed:
print "\nExport-rabbitmg branch %s checkout failed. Defaulting to master." % rbmqExportGit
if len(message) > 0:
abort(message)
return run("cat voltdb/version.txt").strip()
################################################
# MAKE A RELEASE DIR
################################################
def makeReleaseDir(releaseDir):
# handle the case where a release dir exists for this version
if os.path.exists(releaseDir):
shutil.rmtree(releaseDir)
# create a release dir
os.makedirs(releaseDir)
print "Created dir: " + releaseDir
################################################
# BUILD THE COMMUNITY VERSION
################################################
def buildCommunity():
if build_mac:
packageMacLib="true"
else:
packageMacLib="false"
with cd(builddir + "/voltdb"):
run("pwd")
run("git status")
run("git describe --dirty")
run("ant -Djmemcheck=NO_MEMCHECK -Dkitbuild=%s %s clean default dist" % (packageMacLib, build_args))
################################################
# BUILD THE ENTERPRISE VERSION
################################################
def buildEnterprise():
if build_mac:
packageMacLib="true"
else:
packageMacLib="false"
with cd(builddir + "/pro"):
run("pwd")
run("git status")
run("git describe --dirty")
run("VOLTCORE=../voltdb ant -f mmt.xml -Djmemcheck=NO_MEMCHECK -Dallowreplication=true -DallowDrActiveActive=true -Dlicensedays=%d -Dkitbuild=%s %s clean dist.pro" % (defaultlicensedays, packageMacLib, build_args))
################################################
# BUILD THE PRO VERSION
################################################
#
def packagePro(version):
print "Making license"
makeTrialLicense(days=defaultlicensedays, dr_and_xdcr=False, nodes=3)
print "Repacking pro kit"
with cd(builddir + "/pro/obj/pro"):
run("mkdir pro_kit_staging")
with cd(builddir + "/pro/obj/pro/pro_kit_staging"):
run("tar xf ../voltdb-ent-%s.tar.gz" % version)
run("mv voltdb-ent-%s voltdb-pro-%s" % (version, version))
run("cp %s/pro/trial_*.xml voltdb-pro-%s/voltdb/license.xml" % (builddir, version))
run("tar cvfz ../voltdb-pro-%s.tar.gz voltdb-pro-%s" % (version, version))
################################################
# BUILD THE RABBITMQ EXPORT CONNECTOR
################################################
#Build rabbit MQ Exporter
def buildRabbitMQExport(version, dist_type):
# Paths to the final kit for unpacking/repacking with rmq export
paths = {
'community': builddir + "/voltdb/obj/release",
'ent' : builddir + "/pro/obj/pro/"
}
# Untar
with cd(paths[dist_type]):
run ("pwd")
run ("mkdir -p restage")
run ("tar xf voltdb-%s-%s.tar.gz -C restage" % (dist_type, version))
run ("rm -f voltdb-%s-%s.tar.gz" % (dist_type, version))
# Build RabbitMQ export jar and put it into the untarred kit
with cd(builddir + "/export-rabbitmq"):
run("pwd")
run("git status")
run("git describe --dirty", warn_only=True)
run("VOLTDIST=%s/restage/voltdb-%s-%s ant" % (paths[dist_type], dist_type, version))
# Retar
with cd(paths[dist_type]):
run("pwd")
run("tar -C restage -czf voltdb-%s-%s.tar.gz voltdb-%s-%s" % (dist_type, version, dist_type, version))
run ("rm -Rf restage")
################################################
# MAKE AN ENTERPRISE TRIAL LICENSE
################################################
# Must be called after buildEnterprise has been done
def makeTrialLicense(days=30, dr_and_xdcr="true", nodes=12):
with cd(builddir + "/pro/tools"):
run("./make_trial_licenses.pl -t %d -H %d -W %s" % (days, nodes, dr_and_xdcr ))
################################################
# MAKE A SHA256 checksum
################################################
def makeSHA256SUM(version, type):
with cd(builddir + "/pro/obj/pro"):
kitname="voltdb-" + type + "-" + version
run("sha256sum -b %s.tar.gz > %s.SHA256SUM" % (kitname, kitname))
################################################
# MAKE AN JAR FILES NEEDED TO PUSH TO MAVEN
################################################
def makeMavenJars():
with cd(builddir + "/voltdb"):
run("VOLTCORE=../voltdb ant -f build-client.xml maven-jars")
################################################
# COPY FILES
################################################
def copyFilesToReleaseDir(releaseDir, version, type=None):
print "Copying files to releaseDir"
if type:
typeString="-" + type
else:
typeString=""
get("%s/pro/obj/pro/voltdb%s-%s.tar.gz" % (builddir, typeString, version),
"%s/voltdb%s-%s.tar.gz" % (releaseDir, typeString, version))
get("%s/pro/obj/pro/voltdb%s-%s.SHA256SUM" % (builddir, typeString, version),
"%s/voltdb%s-%s.SHA256SUM" % (releaseDir, typeString, version))
def copyCommunityFilesToReleaseDir(releaseDir, version, operatingsys):
get("%s/voltdb/obj/release/voltdb-community-%s.tar.gz" % (builddir, version),
"%s/voltdb-community-%s.tar.gz" % (releaseDir, version))
# add stripped symbols
if operatingsys == "LINUX":
os.makedirs(releaseDir + "/other")
get("%s/voltdb/obj/release/voltdb-%s.sym" % (builddir, version),
"%s/other/%s-voltdb-voltkv-%s.sym" % (releaseDir, operatingsys, version))
def copyTrialLicenseToReleaseDir(releaseDir):
get("%s/pro/trial_*.xml" % (builddir),
"%s/license.xml" % (releaseDir))
def copyMavenJarsToReleaseDir(releaseDir, version):
#The .jars and upload file must be in a directory called voltdb - it is the projectname
mavenProjectDir = releaseDir + "/mavenjars/voltdb"
if not os.path.exists(mavenProjectDir):
os.makedirs(mavenProjectDir)
#Get the voltdbclient-n.n.jar from the recently built community build
get("%s/voltdb/obj/release/dist-client-java/voltdb/voltdbclient-%s.jar" % (builddir, version),
"%s/voltdbclient-%s.jar" % (mavenProjectDir, version))
#Get the upload.gradle file
get("%s/voltdb/tools/kit_tools/upload.gradle" % (builddir),
"%s/upload.gradle" % (mavenProjectDir))
#Get the src and javadoc .jar files
get("%s/voltdb/obj/release/voltdbclient-%s-javadoc.jar" % (builddir, version),
"%s/voltdbclient-%s-javadoc.jar" % (mavenProjectDir, version))
get("%s/voltdb/obj/release/voltdbclient-%s-sources.jar" % (builddir, version),
"%s/voltdbclient-%s-sources.jar" % (mavenProjectDir, version))
################################################
# CREATE CANDIDATE SYMLINKS
################################################
def createCandidateSysmlink(releaseDir):
candidateDir = os.getenv('HOME') + "/releases/candidate";
local("rm -rf " + candidateDir)
local("ln -s %s %s" % (releaseDir, candidateDir))
################################################
# BACKUP RELEASE DIR
################################################
def backupReleaseDir(releaseDir,archiveDir,version):
if not os.path.exists(archiveDir):
os.makedirs(archiveDir)
# make a backup with the timstamp of the build
timestamp = datetime.datetime.now().strftime("%y%m%d-%H%M%S")
local("tar -czf %s/%s-%s.tgz %s" \
% (archiveDir, version, timestamp, releaseDir))
################################################
# REMOVE NATIVE LIBS FROM SHARED DIRECTORY
################################################
def rmNativeLibs():
# local("ls -l ~" + username + nativelibdir)
local("rm -rf ~" + username + nativelibdir)
################################################
# GET THE GIT TAGS OR SHAS TO BUILD FROM
################################################
parser = argparse.ArgumentParser(description = "Create a full kit. With no args, will do build of master")
parser.add_argument('voltdb_sha', nargs="?", default="master", help="voltdb repository commit, tag or branch" )
parser.add_argument('pro_sha', nargs="?", default="master", help="pro repository commit, tag or branch" )
parser.add_argument('rabbitmq_sha', nargs="?", default="master", help="rabbitmq repository commit, tag or branch" )
parser.add_argument('-g','--gitloc', default="git@github.com:VoltDB", help="Repository location. For example: /home/github-mirror")
parser.add_argument('--nomac', action='store_true', help="Don't build Mac OSX")
parser.add_argument('--nocommunity', action='store_true', help="Don't build community")
args = parser.parse_args()
proTreeish = args.pro_sha
voltdbTreeish = args.voltdb_sha
rbmqExportTreeish = args.rabbitmq_sha
print args
build_community = not args.nocommunity
build_mac = not args.nomac
#If anything is missing we're going to dump this in oneoffs dir.
build_all = build_community and build_mac
if voltdbTreeish != proTreeish or not build_all:
oneOff = True
else:
oneOff = False
rmNativeLibs()
try:
build_args = os.environ['VOLTDB_BUILD_ARGS']
except:
build_args=""
print "Building with pro: %s and voltdb: %s" % (proTreeish, voltdbTreeish)
build_errors=False
versionCentos = "unknown"
versionMac = "unknown"
releaseDir = "unknown"
# get ssh config [key_filename, hostname]
CentosSSHInfo = getSSHInfoForHost("volt15a")
MacSSHInfo = getSSHInfoForHost("voltmini")
UbuntuSSHInfo = getSSHInfoForHost("volt12d")
# build community kit on the mini so that .so can be picked up for unified kit
if build_mac or build_community:
try:
with settings(user=username,host_string=MacSSHInfo[1],disable_known_hosts=True,key_filename=MacSSHInfo[0]):
versionMac = checkoutCode(voltdbTreeish, proTreeish, rbmqExportTreeish, args.gitloc)
buildCommunity()
except Exception as e:
print traceback.format_exc()
print "Could not build MAC kit. Exception: " + str(e) + ", Type: " + str(type(e))
build_errors=True
# build kits on 15f
try:
with settings(user=username,host_string=CentosSSHInfo[1],disable_known_hosts=True,key_filename=CentosSSHInfo[0]):
versionCentos = checkoutCode(voltdbTreeish, proTreeish, rbmqExportTreeish, args.gitloc)
if build_mac:
assert versionCentos == versionMac
if oneOff:
releaseDir = "%s/releases/one-offs/%s-%s-%s" % \
(os.getenv('HOME'), versionCentos, voltdbTreeish, proTreeish)
else:
releaseDir = os.getenv('HOME') + "/releases/" + voltdbTreeish
makeReleaseDir(releaseDir)
print "VERSION: " + versionCentos
if build_community:
buildCommunity()
buildRabbitMQExport(versionCentos, "community")
copyCommunityFilesToReleaseDir(releaseDir, versionCentos, "LINUX")
buildEnterprise()
buildRabbitMQExport(versionCentos, "ent")
makeSHA256SUM(versionCentos,"ent")
copyFilesToReleaseDir(releaseDir, versionCentos, "ent")
packagePro(versionCentos)
makeSHA256SUM(versionCentos,"pro")
copyFilesToReleaseDir(releaseDir, versionCentos, "pro")
makeTrialLicense()
copyTrialLicenseToReleaseDir(releaseDir)
makeMavenJars()
copyMavenJarsToReleaseDir(releaseDir, versionCentos)
except Exception as e:
print traceback.format_exc()
print "Could not build LINUX kit. Exception: " + str(e) + ", Type: " + str(type(e))
build_errors=True
rmNativeLibs() # cleanup imported native libs so not picked up unexpectedly by other builds
exit (build_errors)
#archiveDir = os.path.join(os.getenv('HOME'), "releases", "archive", voltdbTreeish, versionCentos)
#backupReleaseDir(releaseDir, archiveDir, versionCentos)
|
agpl-3.0
| -8,301,504,353,190,448,000
| 38.204611
| 222
| 0.600706
| false
| 3.564056
| false
| false
| false
|
boland1992/SeisSuite
|
bin/SNR_plots.py
|
1
|
4053
|
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 16 23:34:00 2015
@author: boland
"""
from seissuite.ant import (pscrosscorr)
import glob
import os
import pickle
#PICKLE_PATH = '/storage/ANT/PROGRAMS/ANT_OUTPUT/OUTPUT/CROSS/06.05.2015-15:53:28/XCORR-STACK_01.01.2014-31.12.2014_datalesspaz.pickle'
#PICKLE_PATH = '/home/boland/Desktop/XCORR-STACK_01.08.1999-10.06.2000_datalesspaz.part.pickle'
# import CONFIG class initalised in ./configs/tmp_config.pickle
config_pickle = 'configs/tmp_config.pickle'
f = open(name=config_pickle, mode='rb')
CONFIG = pickle.load(f)
f.close()
# import variables from initialised CONFIG class.
# import variables from initialised CONFIG class.
MSEED_DIR = CONFIG.MSEED_DIR
DATABASE_DIR = CONFIG.DATABASE_DIR
DATALESS_DIR = CONFIG.DATALESS_DIR
STATIONXML_DIR = CONFIG.STATIONXML_DIR
CROSSCORR_DIR = CONFIG.CROSSCORR_DIR
USE_DATALESSPAZ = CONFIG.USE_DATALESSPAZ
USE_STATIONXML = CONFIG.USE_STATIONXML
CROSSCORR_STATIONS_SUBSET = CONFIG.CROSSCORR_STATIONS_SUBSET
CROSSCORR_SKIPLOCS = CONFIG.CROSSCORR_SKIPLOCS
FIRSTDAY = CONFIG.FIRSTDAY
LASTDAY = CONFIG.LASTDAY
MINFILL = CONFIG.MINFILL
FREQMIN = CONFIG.FREQMIN
FREQMAX = CONFIG.FREQMAX
CORNERS = CONFIG.CORNERS
ZEROPHASE = CONFIG.ZEROPHASE
PERIOD_RESAMPLE = CONFIG.PERIOD_RESAMPLE
ONEBIT_NORM = CONFIG.ONEBIT_NORM
FREQMIN_EARTHQUAKE = CONFIG.FREQMIN_EARTHQUAKE
FREQMAX_EARTHQUAKE = CONFIG.FREQMAX_EARTHQUAKE
WINDOW_TIME = CONFIG.WINDOW_TIME
WINDOW_FREQ = CONFIG.WINDOW_FREQ
XCORR_INTERVAL = CONFIG.XCORR_INTERVAL
CROSSCORR_TMAX = CONFIG.CROSSCORR_TMAX
PLOT_CLASSIC = CONFIG.PLOT_CLASSIC
PLOT_DISTANCE = CONFIG.PLOT_DISTANCE
MAX_DISTANCE = CONFIG.MAX_DISTANCE
pickle_list = []
folder_list = sorted(glob.glob(os.path.join(CROSSCORR_DIR, '*')))
print MSEED_DIR
print CROSSCORR_TMAX
for folder in folder_list:
#check to see if there are any pickle files in the xcorr time folder
if len(glob.glob(os.path.join(folder, '*.pickle'))) < 1:
#print("There are no .pickle files in this folder. Skipping ...")
continue
else:
for file_ in glob.glob(os.path.join(folder, '*.pickle')):
if 'metadata' not in file_ and '.part' not in file_:
pickle_list.append(file_)
if len(pickle_list) < 1:
print("\nThere are no pickle files to begin from.")
raise Exception("No pickle files to process, first run the programme.")
res = ""
else:
print "\nPlease choose a file to process."
#print combinations of partial pickle files available
print '\n'.join('{} - {}'.format(i + 1, f.split('/')[-2])
for i, f in enumerate(pickle_list))
#change folder_list to pickle_list if this gives problems
res = raw_input('\n')
if not res:
raise Exception("You must choose one a number betwen {} and {}"\
.format(1, len(pickle_list)))
else:
PICKLE_PATH = pickle_list[int(res)-1]
OUTFILESPATH = PICKLE_PATH[:-7]
out_basename = os.path.basename(OUTFILESPATH)
OUTPATH = os.path.dirname(OUTFILESPATH)
OUT_SNR = os.path.join(OUTPATH, 'SNR_PLOTS')
print "\nOpening {} file to process ... ".format(OUT_SNR)
# re-initialising .part.pickle collection of cross-correlations
xc = pscrosscorr.load_pickled_xcorr(PICKLE_PATH)
# optimizing time-scale: max time = max distance / vmin (vmin = 2.5 km/s)
maxdist = max([xc[s1][s2].dist() for s1, s2 in xc.pairs()])
maxt = min(CROSSCORR_TMAX, maxdist / 2.5)
#plot distance plot of cross-correlations
#xc.plot(plot_type='distance', xlim=(-maxt, maxt),
#outfile="/home/boland/Desktop/something1342.png", showplot=False)
#plot individual cross-correlations
#xc.plot(plot_type='classic', xlim=(-maxt, maxt),
# outfile="/home/boland/Desktop/something1342.png", showplot=False)
#xc.plot_SNR(plot_type='all', outfile=OUT_SNR,
# config=os.path.basename(config_file))
xc.plot_SNR(plot_type='individual', outfile=OUT_SNR)
|
gpl-3.0
| 5,854,662,131,405,317,000
| 33.649573
| 135
| 0.681964
| false
| 3.100995
| true
| false
| false
|
fras2560/mlsb-platform
|
api/basic/bat.py
|
1
|
8517
|
'''
@author: Dallas Fraser
@date: 2016-04-12
@organization: MLSB API
@summary: The basic bat API
'''
from flask_restful import Resource, reqparse
from flask import Response, request
from json import dumps
from api import DB
from api.model import Bat
from api.authentication import requires_admin
from api.errors import BatDoesNotExist
from api.variables import PAGE_SIZE
from api.routes import Routes
from api.helper import pagination_response
from api.cached_items import handle_table_change
from api.tables import Tables
parser = reqparse.RequestParser()
parser.add_argument('player_id', type=int)
parser.add_argument('rbi', type=int)
parser.add_argument('game_id', type=int)
parser.add_argument('hit', type=str)
parser.add_argument('inning', type=int)
parser.add_argument('team_id', type=str)
post_parser = reqparse.RequestParser(bundle_errors=True)
post_parser.add_argument('player_id', type=int, required=True)
post_parser.add_argument('rbi', type=int)
post_parser.add_argument('game_id', type=int, required=True)
post_parser.add_argument('hit', type=str, required=True)
post_parser.add_argument('inning', type=int)
post_parser.add_argument('team_id', type=str, required=True)
class BatAPI(Resource):
def get(self, bat_id):
"""
GET request for Bat Object matching given bat_id
Route: Routes['bat']/<bat_id: int>
Returns:
if found
status: 200
mimetype: application/json
data:
{
'bat_id': int,
'game_id': int,
'team_id': int,
'team': string,
'rbi': int,
'hit': string,
'inning': int,
'player_id': int,
'player': string
}
otherwise
status: 404
mimetype: application/json
data: None
"""
entry = Bat.query.get(bat_id)
if entry is None:
raise BatDoesNotExist(payload={'details': bat_id})
response = Response(dumps(entry.json()), status=200,
mimetype="application/json")
return response
@requires_admin
def delete(self, bat_id):
"""
DELETE request for Bat
Route: Routes['bat']/<bat_id: int>
Returns:
status: 200
mimetype: application/json
data:
success: tells if request was successful (boolean)
message: the status message (string)
"""
bat = Bat.query.get(bat_id)
if bat is None:
raise BatDoesNotExist(payload={'details': bat_id})
# delete a single bat
DB.session.delete(bat)
DB.session.commit()
response = Response(dumps(None),
status=200,
mimetype="application/json")
handle_table_change(Tables.BAT, item=bat.json())
return response
@requires_admin
def put(self, bat_id):
"""
PUT request for Bat
Route: Routes['bat']/<bat_id: int>
Parameters :
game_id: the id of the game (int)
player_id: the id of the batter (int)
rbi: the number of runs batted in (int)
hit: the type of hit (string)
inning: the inning the hit occurred (int)
team_id: the id of the team (int)
Returns:
status: 200
mimetype: application/json
data:
success: tells if request was successful (boolean)
message: the status message (string)
failures: a list of parameters that failed to update
(list of string)
"""
# update a single bat
args = parser.parse_args()
bat = Bat.query.get(bat_id)
player_id = None
team_id = None
game_id = None
rbi = None
hit = None
inning = None
if bat is None:
raise BatDoesNotExist(payload={'details': bat_id})
if args['team_id']:
team_id = args['team_id']
if args['game_id']:
game_id = args['game_id']
if args['player_id']:
player_id = args['player_id']
if args['rbi']:
rbi = args['rbi']
if args['hit']:
hit = args['hit']
if args['inning']:
inning = args['inning']
bat.update(player_id=player_id,
team_id=team_id,
game_id=game_id,
rbi=rbi,
hit=hit,
inning=inning)
DB.session.commit()
response = Response(dumps(None), status=200,
mimetype="application/json")
handle_table_change(Tables.BAT, item=bat.json())
return response
def option(self):
return {'Allow': 'PUT'}, 200, \
{'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'PUT,GET'}
class BatListAPI(Resource):
def get(self):
"""
GET request for Bats List
Route: Routes['bat']
Parameters :
Returns:
status: 200
mimetype: application/json
data:
games: [ {
'bat_id': int,
'game_id': int,
'team_id': int,
'team': string,
'rbi': int,
'hit': string,
'inning': int,
'player_id': int,
'player': string
}
,{...}
]
"""
# return a pagination of bats
page = request.args.get('page', 1, type=int)
pagination = Bat.query.paginate(page, PAGE_SIZE, False)
result = pagination_response(pagination, Routes['bat'])
resp = Response(dumps(result), status=200,
mimetype="application/json")
return resp
@requires_admin
def post(self):
"""
POST request for Bats List
Route: Routes['bat']
Parameters :
game_id: the id of the game (int)
player_id: the id of the batter (int)
rbi: the number of runs batted in (int)
hit: the type of hit (string)
inning: the inning the hit occurred (int)
team_id: the id of the team (int)
Returns:
if successful
status: 200
mimetype: application/json
data: the created bat id (int)
otherwise possible errors
status: 400, GDNESC, PDNESC, TDNESC
mimetype: application/json
data: None
"""
# create a new bat
args = post_parser.parse_args()
game_id = None
player_id = None
team_id = None
rbi = 0
hit = None
inning = 1 # just assume some first inning
if args['game_id']:
game_id = args['game_id']
if args['player_id']:
player_id = args['player_id']
if args['team_id']:
team_id = args['team_id']
if args['hit']:
hit = args['hit']
if args['rbi']:
rbi = args['rbi']
if args['inning']:
inning = args['inning']
bat = Bat(player_id,
team_id,
game_id,
hit,
inning=inning,
rbi=rbi)
DB.session.add(bat)
DB.session.commit()
bat_id = bat.id
resp = Response(dumps(bat_id), status=201, mimetype="application/json")
handle_table_change(Tables.BAT, item=bat.json())
return resp
def option(self):
return {'Allow': 'PUT'}, 200, \
{'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'PUT,GET'}
|
apache-2.0
| -8,683,075,583,298,692,000
| 33.905738
| 79
| 0.480099
| false
| 4.352069
| false
| false
| false
|
sani-coop/tinjaca
|
doc/informe1/_graphviz/fomdes_proc4.py
|
1
|
2522
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
BPMN diagram for FOMDES process 1
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from bpmn_pgv import *
import pygraphviz as pgv
__author__ = 'mapologo'
PROCESS_LABEL = "Liquidación de Créditos"
# A graph for FOMDES processes
F = pgv.AGraph(strict=False, directed=True)
F.graph_attr.update(label="", rankdir="TB", splines="ortho", labelloc="b",
size="8, 7.5", forcelabels="true", ranksep="0.25", fontname="Liberation Sans Narrow Condensed")
F.node_attr.update(fontname="Liberation Sans Narrow Condensed")
F.edge_attr.update(fontname="Liberation Sans Narrow Condensed", fontsize="10")
se_cluster = {"se7": ("Recibir el documento protocolizado", "start"),
"se8": ("Revisar el documento protocolizado", "human"),
"se9": ("", "end")}
se_edges = {"se7": {"se8": {}},
"se8": {"se9": {"style": "invis"}}}
SE = add_cluster(F, "se", "Secretaría Ejecutiva", se_cluster, se_edges)
p_cluster = {"p1": ("Firma del cheque", "human"),
"p2": ("Entregar cheque a beneficiario", "message")}
p_edges = {"p1":{"p2": {}}}
P = add_cluster(F, "p", "Presidencia", p_cluster, p_edges)
pr_cluster = {"pr2": ("Verificar documentación legal y elaborar certificación de disponibilidad", "human"),
"pr3": ("Crear las cuentas por cobrar", "human"),
"pr4": ("Generar tablas de amortización", "human"),
"pr5": ("Imprimir y firmar orden de liquidación y cheque", "human")}
pr_edges = {"pr2": {"pr3": {}},
"pr3": {"pr4": {}},
"pr4": {"pr5": {}}}
PR = add_cluster(F, "pr", "Presupuesto/Administración", pr_cluster, pr_edges)
F.add_node("SIGEFOMDES Administración", image=IMAGE_PATH + "database.png", shape="plaintext", label="", xlabel="SIGEFOMDES Administración")
F.add_node("SISAC", image=IMAGE_PATH + "database.png", shape="plaintext", label="", xlabel="SISAC")
global_edges = {"Beneficiario": {"se7": {"style": "dashed"}},
"se8": {"pr2": {"style": "dashed"}},
"pr3": {"SIGEFOMDES Administración": {"style": "dashed"}},
"pr4": {"SISAC": {"style": "dashed"}},
"pr5": {"p1": {"style": "dashed"}},
"p2": {"se9": {"style": "dashed"}, "Beneficiario": {"style": "dashed"}}}
add_edges(F, global_edges)
F.draw("proc4.png", prog='dot')
F.write("proc4.dot")
|
gpl-2.0
| -3,765,425,523,692,363,000
| 36.477612
| 139
| 0.597372
| false
| 2.856655
| false
| false
| false
|
shashwat91/Wireless_Networking-ET4394
|
GNU_Radio/Output/Output_window.py
|
1
|
8961
|
#!/usr/bin/env python2
##################################################
# GNU Radio Python Flow Graph
# Title: Output Window
# Generated: Sat Apr 30 16:45:27 2016
##################################################
if __name__ == '__main__':
import ctypes
import sys
if sys.platform.startswith('linux'):
try:
x11 = ctypes.cdll.LoadLibrary('libX11.so')
x11.XInitThreads()
except:
print "Warning: failed to XInitThreads()"
from gnuradio import analog
from gnuradio import blocks
from gnuradio import eng_notation
from gnuradio import fft
from gnuradio import gr
from gnuradio import wxgui
from gnuradio.eng_option import eng_option
from gnuradio.fft import window
from gnuradio.filter import firdes
from gnuradio.wxgui import fftsink2
from gnuradio.wxgui import forms
from gnuradio.wxgui import numbersink2
from grc_gnuradio import wxgui as grc_wxgui
from optparse import OptionParser
import osmosdr
import time
import wx
class Output_window(grc_wxgui.top_block_gui):
def __init__(self):
grc_wxgui.top_block_gui.__init__(self, title="Output Window")
_icon_path = "/usr/share/icons/hicolor/32x32/apps/gnuradio-grc.png"
self.SetIcon(wx.Icon(_icon_path, wx.BITMAP_TYPE_ANY))
##################################################
# Variables
##################################################
self.threshold = threshold = -55
self.samp_rate = samp_rate = 2.048e6
self.freq = freq = 658e6
self.fft_size = fft_size = 1.024e3
##################################################
# Blocks
##################################################
self.notebook = self.notebook = wx.Notebook(self.GetWin(), style=wx.NB_TOP)
self.notebook.AddPage(grc_wxgui.Panel(self.notebook), "Spectrum")
self.notebook.AddPage(grc_wxgui.Panel(self.notebook), "Output")
self.notebook.AddPage(grc_wxgui.Panel(self.notebook), "Stream")
self.Add(self.notebook)
_threshold_sizer = wx.BoxSizer(wx.VERTICAL)
self._threshold_text_box = forms.text_box(
parent=self.notebook.GetPage(1).GetWin(),
sizer=_threshold_sizer,
value=self.threshold,
callback=self.set_threshold,
label="Threshold",
converter=forms.float_converter(),
proportion=0,
)
self._threshold_slider = forms.slider(
parent=self.notebook.GetPage(1).GetWin(),
sizer=_threshold_sizer,
value=self.threshold,
callback=self.set_threshold,
minimum=-100,
maximum=0,
num_steps=100,
style=wx.SL_HORIZONTAL,
cast=float,
proportion=1,
)
self.notebook.GetPage(1).Add(_threshold_sizer)
_freq_sizer = wx.BoxSizer(wx.VERTICAL)
self._freq_text_box = forms.text_box(
parent=self.notebook.GetPage(0).GetWin(),
sizer=_freq_sizer,
value=self.freq,
callback=self.set_freq,
label="freq",
converter=forms.float_converter(),
proportion=0,
)
self._freq_slider = forms.slider(
parent=self.notebook.GetPage(0).GetWin(),
sizer=_freq_sizer,
value=self.freq,
callback=self.set_freq,
minimum=10e6,
maximum=10e9,
num_steps=100,
style=wx.SL_HORIZONTAL,
cast=float,
proportion=1,
)
self.notebook.GetPage(0).Add(_freq_sizer)
self.wxgui_numbersink2_1 = numbersink2.number_sink_f(
self.notebook.GetPage(1).GetWin(),
unit="signal present",
minval=0,
maxval=1,
factor=1,
decimal_places=0,
ref_level=0,
sample_rate=samp_rate,
number_rate=15,
average=False,
avg_alpha=None,
label="Signal Detection",
peak_hold=False,
show_gauge=True,
)
self.notebook.GetPage(1).Add(self.wxgui_numbersink2_1.win)
self.wxgui_numbersink2_0 = numbersink2.number_sink_f(
self.notebook.GetPage(1).GetWin(),
unit="dB",
minval=-120,
maxval=0,
factor=1.0,
decimal_places=10,
ref_level=0,
sample_rate=samp_rate,
number_rate=15,
average=False,
avg_alpha=30e-3,
label="level",
peak_hold=False,
show_gauge=False,
)
self.notebook.GetPage(1).Add(self.wxgui_numbersink2_0.win)
self.wxgui_fftsink2_0 = fftsink2.fft_sink_c(
self.notebook.GetPage(0).GetWin(),
baseband_freq=freq,
y_per_div=5,
y_divs=10,
ref_level=0,
ref_scale=2.0,
sample_rate=samp_rate,
fft_size=1024,
fft_rate=15,
average=True,
avg_alpha=30e-3,
title="Spectrum",
peak_hold=False,
win=window.rectangular,
)
self.notebook.GetPage(0).Add(self.wxgui_fftsink2_0.win)
self.rtlsdr_source_0 = osmosdr.source( args="numchan=" + str(1) + " " + "" )
self.rtlsdr_source_0.set_sample_rate(samp_rate)
self.rtlsdr_source_0.set_center_freq(freq, 0)
self.rtlsdr_source_0.set_freq_corr(0, 0)
self.rtlsdr_source_0.set_dc_offset_mode(0, 0)
self.rtlsdr_source_0.set_iq_balance_mode(0, 0)
self.rtlsdr_source_0.set_gain_mode(False, 0)
self.rtlsdr_source_0.set_gain(20, 0)
self.rtlsdr_source_0.set_if_gain(10, 0)
self.rtlsdr_source_0.set_bb_gain(5, 0)
self.rtlsdr_source_0.set_antenna("", 0)
self.rtlsdr_source_0.set_bandwidth(0, 0)
self.fft_1 = fft.fft_vcc(1024, True, (window.rectangular(1024)), True, 1)
self.blocks_vector_to_stream_0 = blocks.vector_to_stream(gr.sizeof_float*1, 1024)
self.blocks_threshold_ff_0 = blocks.threshold_ff(-100, threshold, 0)
self.blocks_stream_to_vector_0 = blocks.stream_to_vector(gr.sizeof_gr_complex*1, 1024)
self.blocks_nlog10_ff_0 = blocks.nlog10_ff(10, 1, 0)
self.blocks_file_sink_0 = blocks.file_sink(gr.sizeof_float*1, "/media/shashwat/DATA/Q3/Wireless Networking/gnu codes/Outputs/db_498", False)
self.blocks_file_sink_0.set_unbuffered(False)
self.blocks_divide_xx_0 = blocks.divide_ff(1)
self.blocks_complex_to_mag_squared_0 = blocks.complex_to_mag_squared(1024)
self.analog_const_source_x_0 = analog.sig_source_f(0, analog.GR_CONST_WAVE, 0, 0, 1.04858e6)
##################################################
# Connections
##################################################
self.connect((self.analog_const_source_x_0, 0), (self.blocks_divide_xx_0, 1))
self.connect((self.blocks_complex_to_mag_squared_0, 0), (self.blocks_vector_to_stream_0, 0))
self.connect((self.blocks_divide_xx_0, 0), (self.blocks_nlog10_ff_0, 0))
self.connect((self.blocks_nlog10_ff_0, 0), (self.blocks_file_sink_0, 0))
self.connect((self.blocks_nlog10_ff_0, 0), (self.blocks_threshold_ff_0, 0))
self.connect((self.blocks_nlog10_ff_0, 0), (self.wxgui_numbersink2_0, 0))
self.connect((self.blocks_stream_to_vector_0, 0), (self.fft_1, 0))
self.connect((self.blocks_threshold_ff_0, 0), (self.wxgui_numbersink2_1, 0))
self.connect((self.blocks_vector_to_stream_0, 0), (self.blocks_divide_xx_0, 0))
self.connect((self.fft_1, 0), (self.blocks_complex_to_mag_squared_0, 0))
self.connect((self.rtlsdr_source_0, 0), (self.blocks_stream_to_vector_0, 0))
self.connect((self.rtlsdr_source_0, 0), (self.wxgui_fftsink2_0, 0))
def get_threshold(self):
return self.threshold
def set_threshold(self, threshold):
self.threshold = threshold
self._threshold_slider.set_value(self.threshold)
self._threshold_text_box.set_value(self.threshold)
self.blocks_threshold_ff_0.set_hi(self.threshold)
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.rtlsdr_source_0.set_sample_rate(self.samp_rate)
self.wxgui_fftsink2_0.set_sample_rate(self.samp_rate)
def get_freq(self):
return self.freq
def set_freq(self, freq):
self.freq = freq
self._freq_slider.set_value(self.freq)
self._freq_text_box.set_value(self.freq)
self.rtlsdr_source_0.set_center_freq(self.freq, 0)
self.wxgui_fftsink2_0.set_baseband_freq(self.freq)
def get_fft_size(self):
return self.fft_size
def set_fft_size(self, fft_size):
self.fft_size = fft_size
if __name__ == '__main__':
parser = OptionParser(option_class=eng_option, usage="%prog: [options]")
(options, args) = parser.parse_args()
tb = Output_window()
tb.Start(True)
tb.Wait()
|
gpl-3.0
| -1,901,618,430,034,386,400
| 36.810127
| 148
| 0.579734
| false
| 3.224541
| false
| false
| false
|
AMOboxTV/AMOBox.LegoBuild
|
plugin.video.salts/scrapers/myvideolinks_scraper.py
|
1
|
4919
|
"""
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urllib
import urlparse
from salts_lib import kodi
from salts_lib import log_utils
from salts_lib import scraper_utils
from salts_lib import dom_parser
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import VIDEO_TYPES
from salts_lib.kodi import i18n
import scraper
BASE_URL = 'http://myvideolinks.xyz'
class MyVidLinks_Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.MOVIE, VIDEO_TYPES.EPISODE])
@classmethod
def get_name(cls):
return 'MyVideoLinks.eu'
def resolve_link(self, link):
return link
def format_source_label(self, item):
label = '[%s] %s' % (item['quality'], item['host'])
if 'views' in item and item['views']:
label += ' (%s Views)' % (item['views'])
return label
def get_sources(self, video):
source_url = self.get_url(video)
hosters = []
if source_url and source_url != FORCE_NO_MATCH:
url = urlparse.urljoin(self.base_url, source_url)
html = self._http_get(url, cache_limit=.5)
views = None
pattern = '<span[^>]+>(\d+)\s+Views'
match = re.search(pattern, html)
if match:
views = int(match.group(1))
if video.video_type == VIDEO_TYPES.MOVIE:
return self.__get_movie_links(video, views, html)
else:
return self.__get_episode_links(video, views, html)
return hosters
def __get_movie_links(self, video, views, html):
q_str = ''
fragment = dom_parser.parse_dom(html, 'div', {'class': 'post-title'})
if fragment:
q_str = fragment[0]
match = re.search('<p>Size:(.*)', html, re.DOTALL)
if match:
fragment = match.group(1)
else:
fragment = html
return self.__get_links(video, views, fragment, q_str)
def __get_episode_links(self, video, views, html):
pattern = '<h4>(.*?)</h4>(.*?)</ul>'
hosters = []
for match in re.finditer(pattern, html, re.DOTALL):
q_str, fragment = match.groups()
hosters += self.__get_links(video, views, fragment, q_str)
return hosters
def __get_links(self, video, views, html, q_str):
pattern = 'li>\s*<a\s+href="(http[^"]+)'
hosters = []
for match in re.finditer(pattern, html, re.DOTALL):
url = match.group(1)
hoster = {'multi-part': False, 'class': self, 'views': views, 'url': url, 'rating': None, 'quality': None, 'direct': False}
hoster['host'] = urlparse.urlsplit(url).hostname
hoster['quality'] = scraper_utils.blog_get_quality(video, q_str, hoster['host'])
hosters.append(hoster)
return hosters
def get_url(self, video):
return self._blog_get_url(video)
@classmethod
def get_settings(cls):
settings = super(cls, cls).get_settings()
settings = scraper_utils.disable_sub_check(settings)
name = cls.get_name()
settings.append(' <setting id="%s-filter" type="slider" range="0,180" option="int" label=" %s" default="30" visible="eq(-4,true)"/>' % (name, i18n('filter_results_days')))
settings.append(' <setting id="%s-select" type="enum" label=" %s" lvalues="30636|30637" default="0" visible="eq(-5,true)"/>' % (name, i18n('auto_select')))
return settings
def search(self, video_type, title, year, season=''):
search_url = urlparse.urljoin(self.base_url, '/?s=')
search_url += urllib.quote_plus(title)
html = self._http_get(search_url, cache_limit=1)
pattern = '<h\d+>.*?<a\s+href="(?P<url>[^"]*/(?P<date>\d{4}/\d{2}/\d{2})/[^"]*)"\s+rel="bookmark"\s+title="(?:Permanent Link to )?(?P<post_title>[^"]+)'
date_format = '%Y/%m/%d'
return self._blog_proc_results(html, pattern, date_format, video_type, title, year)
|
gpl-2.0
| -8,640,232,997,686,215,000
| 37.429688
| 191
| 0.599106
| false
| 3.531228
| false
| false
| false
|
IA-MP/KnightTour
|
libs/IO/txt_generator.py
|
1
|
2729
|
import os
from time import time
from libs.IO.input_generator import generate_input
def generate_name_file(kind, path):
"""
This function allows to generate sequential input file name
@param kind: int, 0 if input, 1 if output
@param path: string, the path where generate file
@return: string, the name file with an incremental number
"""
global filename
i = 0
while True:
if kind == 0:
filename = "input_%d" % i
elif kind == 1:
filename = "output_%d" % i
if not os.path.exists(path + filename + ".txt"):
return filename
i += 1
def generate_file(kind, text, path, num=None):
"""
This function generates input or output txt file.
@param kind: int, the int that represent the kind of file to be generated. 0 for input, 1 for output
@param text: string, the string to store in the file
@param path: string, the path where we want generate file
@param num: int, the incremental value, if there is not it override the file
"""
global file
file = None
final_path = ""
if kind == 0: # Generate input file
try:
if num is None:
name = generate_name_file(0, path)
final_path = path + name + '.txt'
file = open(final_path, 'w')
else:
final_path = path + 'input_' + str(num) + '.txt'
file = open(final_path, 'w')
for row in text:
for col in row:
for elem in col:
file.writelines(str(elem))
file.write("\n")
except Exception:
print("Si e' verificato un problema con il path, seguire le istruzioni per favore e scrivere un path regolare")
raise SystemExit()
finally:
if file != None:
file.close()
elif kind == 1: # Generate output file
try:
if num is None:
name = generate_name_file(1, path)
final_path = path + name + '.txt'
file = open(final_path, 'w')
else:
final_path = path + 'output_' + str(num) + '.txt'
file = open(final_path, 'w')
i = 1
for elem in text:
file.write("Caso " + str(i) + ": " + elem + "\n")
i += 1
finally:
file.close()
return final_path
if __name__ == "__main__":
start = time()
#generate_file(0, generate_input(100, False), "../../dataset/")
#generate_file(1, ["0", "4", "14", "impossibile", "150"], "../../dataset/")
print(time() - start)
|
mit
| 5,538,565,594,786,700,000
| 33.1125
| 123
| 0.512642
| false
| 4.048961
| false
| false
| false
|
sbarton272/AcousticBarcodes-Explorations
|
barcodes/dxfwrite/tests/test_viewport_entity.py
|
1
|
1662
|
#!/usr/bin/env python
#coding:utf-8
# Created: 10.02.2010
# Copyright (C) 2010, Manfred Moitzi
# License: MIT License
__author__ = "mozman <mozman@gmx.at>"
import unittest
from dxfwrite.entities import Viewport
from dxfwrite import dxfstr, DXFEngine
class TestViewportEntity(unittest.TestCase):
expected = " 0\nVIEWPORT\n 8\nVIEWPORTS\n 67\n1\n 10\n0.0\n 20\n0.0\n 30\n0.0\n 40\n3.0\n"\
" 41\n2.0\n 68\n1\n 69\n1\n" \
"1001\nACAD\n1000\nMVIEW\n1002\n{\n" \
"1070\n16\n" \
"1010\n0.0\n1020\n0.0\n1030\n0.0\n" \
"1010\n0.0\n1020\n0.0\n1030\n0.0\n" \
"1040\n0.0\n1040\n1.0\n"\
"1040\n0.0\n1040\n0.0\n"\
"1040\n50.0\n1040\n0.0\n1040\n0.0\n"\
"1070\n0\n1070\n100\n1070\n1\n"\
"1070\n3\n1070\n0\n1070\n0\n1070\n0\n1070\n0\n"\
"1040\n0.0\n1040\n0.0\n1040\n0.0\n"\
"1040\n0.1\n1040\n0.1\n1040\n0.1\n1040\n0.1\n"\
"1070\n0\n"\
"1002\n{\n1002\n}\n1002\n}\n"
def test_create_viewport_entity(self):
viewport = Viewport((0,0,0), 3, 2, id=1)
self.assertEqual(dxfstr(viewport), self.expected)
def test_viewport_by_factory(self):
viewport = DXFEngine.viewport((0,0,0), 3, 2, id=1)
self.assertEqual(dxfstr(viewport), self.expected)
def test_get_extended_data(self):
viewport = DXFEngine.viewport((0,0,0), 3, 2)
result = viewport['perspective_lens_length']
self.assertEqual(50, result)
def test_set_extended_data(self):
viewport = DXFEngine.viewport((0,0,0), 3, 2, perspective_lens_length=75.)
result = viewport['perspective_lens_length']
self.assertEqual(75, result)
if __name__=='__main__':
unittest.main()
|
mit
| 3,518,769,548,850,778,000
| 32.24
| 97
| 0.633574
| false
| 2.236878
| true
| false
| false
|
raelga/gtav_crew_exporter
|
gtav_crew_exporter.py
|
1
|
10812
|
#!/usr/bin/python
#### Import modules
from selenium import selenium
from selenium import webdriver
import sys, time, re, string, getopt
#### Constants
default_crew = 'elotrolado'
login_url = 'https://socialclub.rockstargames.com/profile/signin'
base_crew_url = 'http://socialclub.rockstargames.com/crew'
path_gtav_base_url = '/games/gtav'
path_gtav_overview_url = '/career/overview/gtaonline'
#### Global
username = ''
password = ''
crew_name = ''
output_file = ''
verbose_flag = ''
#### Class definition
class crew_member:
def __init__(self):
self.id = ''
self.psn = ''
self.url = ''
self.level = ''
self.playtime = ''
self.country = ''
self.rank = ''
self.crew = ''
self.platform = ''
self.error = 'All ok.'
#### Function definitions
def print_help():
print 'gtav_crew_exporter.py -c <crew_name> [-u <username> -p <password>] [-o <output_file>] [-v]'
def arg_parser(argv):
global crew_name
global username
global password
global output_file
global verbose_flag
try:
opts, args = getopt.getopt(argv,"hvu:p:c:o:",["verbose","username","password","crew=","ofile="])
except getopt.GetoptError:
print_help()
debug(2)
for opt, arg in opts:
if opt == '-h':
print_help()
debug()
elif opt in ("-c", "--crew"):
crew_name = arg
elif opt in ("-o", "--ofile"):
output_file = arg
if not output_file: print_help()
elif opt in ("-v", "--verbose"):
verbose_flag = 1
elif opt in ("-u", "--username"):
username = arg
if not username: print_help()
elif opt in ("-p", "--password"):
password = arg
if not password: print_help()
if not crew_name:
crew_name = default_crew
return 0
def debug(msg):
global verbose_flag
if verbose_flag: print 'DBG : ' + msg
def WaitForElement(webdriver, path):
limit = 10 # waiting limit in seconds
inc = 1 # in seconds; sleep for 500ms
c = 0
while (c < limit):
try:
webdriver.find_element_by_xpath(path)
return 1 # Success
except:
time.sleep(inc)
c = c + inc
# print sys.exc_info()
return 0
####
def LoginSocialClub(driver):
if not username or not password:
print '!! Without login and password, only username and rank are available:'
return 1
driver.get(login_url)
path = '//*[@id="submitBtn"]'
result = WaitForElement(driver, path)
if not result: # interprets returned value
# driver.close()
debug("\nThe page is not loaded yet.")
else:
debug('web - page fully loaded!')
path='//input[@id="login-field"]'
driver.find_element_by_xpath(path).clear()
driver.find_element_by_xpath(path).send_keys(username)
path='//input[@id="password-field"]'
driver.find_element_by_xpath(path).clear()
driver.find_element_by_xpath(path).send_keys(password)
path = '//*[@id="submitBtn"]'
driver.find_element_by_xpath(path).click()
driver.get(login_url)
path = '//*[@id="panelaccounts"]'
result = WaitForElement(driver, path)
if not result: # interprets returned value
# driver.close()
debug("\nThe page is not loaded yet.")
else:
debug('web - page fully loaded!')
return 0
####
def GetMembersList(driver):
crew_url = base_crew_url + '/' + crew_name + '/hierarchy'
driver.get(crew_url)
path = '//*[@id="muscleList"]'
result = WaitForElement(driver, path)
if not result: # interprets returned value
# driver.close()
debug("\nThe page is not loaded yet.")
else:
debug('web - page fully loaded!')
path = '//a[@data-ga="footer_selectlanguage_en"]'
viewall = driver.find_element_by_xpath(path)
if not viewall:
debug("meh.")
else:
debug("web - set page in english.")
# viewall.click()
path = '//a[@class="viewAll"]'
try:
viewall = driver.find_element_by_xpath(path)
debug("web - unfold users.")
viewall.click()
except:
debug("web - all users visible.")
path = '//div[contains(@id, "crewRank_")]'
hierarchy = driver.find_elements_by_xpath(path)
crew_members = list()
for rank in hierarchy:
# print rank.get_attribute('id')
path = '//div[@id="' + rank.get_attribute('id') + '"]//i[@class="icon-info"]'
rank_name = rank.find_element_by_xpath(path).get_attribute('data-name')
# print rank_name
path = '//div[@id="' + rank.get_attribute('id') + '"]//ul[@id="' + rank_name + 'List"]//div[@class="member"]//img'
members = rank.find_elements_by_xpath(path)
for member in members:
cm = crew_member()
cm.id = member.get_attribute('data-original-title')
cm.url = member.find_element_by_xpath('..').get_attribute('href')
cm.rank = rank_name
crew_members.append(cm)
return crew_members
#### Function definitions
def GetMemberInfo(driver, member):
debug('[' + member.id + ']')
retry = 0
max_retry = 5
# Add retry to avoid errors
for rety in range(max_retry):
## Load profile page
driver.get(member.url)
path = '//*[@id="cardInfoVitals"]'
result = WaitForElement(driver, path)
if not result: # interprets returned value
# driver.close()
debug("web - The page is not loaded yet. [" + str(retry) + "]")
retry += 1
else:
debug('web - page fully loaded! [' + str(retry) + ']')
break
## Check if profile is private
try:
path = '//div[@id="no-profile"]'
profail = driver.find_element_by_xpath(path)
debug('[' + member.id + '] Profile is private!')
member.error = 'Private profile.'
return 1 # Success
except:
## Crew Principal
path = '//div[@class="crew-info"]/a'
member.crew = driver.find_element_by_xpath(path).get_attribute("href").rsplit('/',1)[1]
debug('[' + member.id + '] main crew: ' + member.crew)
try:
## PSN ID
path = '//div[@class="PSN"]/h5'
member.psn = driver.find_element_by_xpath(path).text
except:
member.psn = ''
debug('[' + member.id + '] PSN ID: ' + member.psn)
try:
## Language
path = '//div[@id="cardInfoFooter"]//span[contains(@class,"Country")]'
member.country = driver.find_element_by_xpath(path).get_attribute("data-original-title")
except:
member.country = ''
debug('[' + member.id + '] country: ' + member.country)
driver.get(member.url + '/'+ path_gtav_base_url + '/ps3' + path_gtav_overview_url)
path = '//div[@id="freemodeRank"]'
result = WaitForElement(driver, path)
if not result: # interprets returned value
# driver.close()
debug("\nThe page is not loaded yet.")
else:
debug('web - page fully loaded!')
try:
path = '//div[@id="freemodeRank"]//h3'
member.level = driver.find_element_by_xpath(path).text
except:
member.level = ''
if member.level == 0:
member.platform = 'XBOX360'
driver.get(member.url + '/'+ path_gtav_base_url + '/xbox' + path_gtav_overview_url)
path = '//div[@id="freemodeRank"]'
result = WaitForElement(driver, path)
if not result: # interprets returned value
# driver.close()
debug("\nThe page is not loaded yet.")
else:
debug('web - page fully loaded!')
try:
path = '//div[@id="freemodeRank"]//h3'
member.level = driver.find_element_by_xpath(path).text
except:
member.level = ''
else:
member.platform = 'PS3'
debug('[' + member.id + '] rank: ' + member.rank)
try:
## Language
path = '//div[@id="freemodeRank"]//div[@class="rankBar"]/h4'
member.playtime = driver.find_element_by_xpath(path).text.rsplit(':',1)[1]
except:
member.playtime = ''
debug('[' + member.id + '] playtime: ' + member.playtime)
# print sys.exc_info()
return member
#### Main function
if __name__ == "__main__":
arg_parser(sys.argv[1:])
debug('web - starting browser')
driver = webdriver.Firefox()
print 'Crew: ' + crew_name
crew_members = GetMembersList(driver)
print 'Crew Size: ' + str(len(crew_members)) + ' members'
error = LoginSocialClub(driver)
if error:
print 'Crew Members :'
for cm in crew_members:
print cm.rank + ", " + cm.id + ", " + cm.url
debug('You need to provide login information to view each member info.')
for cm in crew_members:
cm = GetMemberInfo(driver, cm)
if output_file:
f = open(output_file,'w')
for cm in crew_members:
member_csv = str(cm.id) + ', ' \
+ str(cm.country) + ', ' \
+ str(cm.psn) + ', ' \
+ str(cm.platform) + ', ' \
+ str(cm.crew) + ', ' \
+ str(cm.rank) + ', ' \
+ str(cm.level) + ', ' \
+ str(cm.playtime) + ', ' \
+ str(cm.error)
if output_file:
f.write(member_csv + '\n')
else:
print member_csv
if output_file:
print 'Output saved as ' + output_file + '.'
f.close() # you can omit in most cases as the destructor will call if
driver.close()
sys.exit()
# Grab URL
#url = str(sys.argv[1])
# Check if it's malformed
#regex = re.compile(
# r'^(?:http|ftp)s?://' # http:// or https://
# r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
# r'localhost|' #localhost...
# r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
# r'(?::\d+)?' # optional port
# r'(?:/?|[/?]\S+)$', re.IGNORECASE)
#vurl = regex.match(url)
#if vurl:
# print ("Good url : %s" % url)
#else:
# debug ("Malformed url : %s" % url)
|
gpl-2.0
| 1,753,025,060,716,105,000
| 27.452632
| 122
| 0.515631
| false
| 3.616054
| false
| false
| false
|
ActiveState/code
|
recipes/Python/360698_Extending_pythprolog_syntax/recipe-360698.py
|
1
|
8888
|
#
# pythologic2.py
#
# Add logic programming (Prolog) syntax and *resolution* into Python.
#
# (c) 2004 Francisco Coelho
# after (c) 2004 Shai Berger
# and AIMA examples
#
import string
import copy
class Struct:
def __init__(self, database, head, subs):
"""
The head and subs are essential - what makes this struct.
The database should only be used while structs are constructed,
and later removed.
"""
self.database = database
self.head = head
self.subs = subs
def __pos__(self):
"""
unary + means insert into database as fact
"""
self.database.add_fact(self)
def __invert__(self):
"""
unary ~ means insert into database as query
"""
self.database.add_query(self)
def __lshift__(self, requisites):
"""
The ideal is
consequent(args) << cond1(args1),...
for now we must do with
consequent(args) << [cond1(args1),...]
"""
self.database.add_conditional(self, requisites)
def __str__(self):
subs = map (str, self.subs)
return str(self.head) + "(" + string.join(subs,',') + ")"
class Symbol:
def __init__ (self, name, database):
self.name = name
self.database = database
def __call__(self, *args):
return Struct(self.database, self, args)
def __str__(self):
return self.name
class Constant(Symbol):
"""
A constant is a name. Its value is its name too.
"""
def value(self): return self.name
class Variable(Symbol):
pass
def symbol(name, database):
if (name[0] in string.uppercase):
return Variable(name,database)
else:
return Constant(name, database)
class Database:
def __init__(self, name):
self.name= name
self.facts = []
self.conditionals = []
self.queries = []
def add_fact(self, fact):
self.facts.append(fact)
def add_query(self, query):
self.queries.append(query)
def add_conditional(self,head,requisites):
if not(isinstance(requisites, list)):
requisites = [requisites]
self.conditionals.append((head,requisites))
def __str__(self):
factsStr= string.join(map(str, self.facts),'\n')
condsStr= ''
for (h,r) in self.conditionals:
condsStr = condsStr + "%s << %s\n"%(h,string.join( map(str, r), ', '))
queryStr= string.join( map(str, self.queries),'\n')
return self.name + ' facts\n' + factsStr +'\n'+self.name + ' conditionals\n'+ condsStr + '\n'+self.name + ' queries\n'+queryStr + '\n'
def append(self, func):
"""
Include definitions from func into database
"""
try:
code = func.func_code
except:
raise TypeError, "function or method argument expected"
names = code.co_names
locally_defined = code.co_varnames
globally_defined = func.func_globals.keys()
defined = locally_defined+tuple(globally_defined)
undefined = [name for name in names if name not in defined]
newglobals = func.func_globals.copy()
for name in undefined:
newglobals[name] = symbol(name, self)
exec code in newglobals
def __lshift__(self, func):
"""
A helper for decorator implementation
"""
self.append(func)
return LogicalFunction(self, func)
def solve(self, V = [{}]):
"""
The query queue is LIFO:
Extend valuations in V satisfying the last query.
"""
def solve1( v ):
# get solutions from facts
unify_facts = [unify(query, fact, v) for fact in self.facts]
# look for solutions from conditionals
unify_conditionals = []
for ( header , condition_list ) in self.conditionals:
u = unify(query, header , v) # unify headers
U = [ u ]
if u != None:
# remember query queue
oldQueries = copy.deepcopy(self.queries)
# we want to start by the first conditional
D = copy.copy( condition_list )
D.reverse()
# phase 1: append the conditionals to query queue
for condition in D:
if type( condition ) == type('string'):
# process python code
# should return True or False
self.queries.append( condition )
#eval_python_string( condition , u)
else:
# append the conditional,
# with variables replaced according to u
# to the query queue
unified_condition = subst(u, condition )
self.queries.append( unified_condition )
# phase 2: solve the appended conditionals
for condition in D:
U = self.solve( U )
# restore query queue
self.queries = oldQueries
# grow the list of solutions
unify_conditionals = unify_conditionals + U
return [ u for u in (unify_facts + unify_conditionals) if not u in [None, {}] ]
if self.queries:
query = self.queries[-1]
del self.queries[-1]
else:
return []
if type( query ) == type( 'string' ):
U = [ v for v in V if python_eval_string(query, v) ]
else:
U = []
for v in V:
U = U + solve1(v)
return U
def python_eval_string(s, v):
for k in v:
s=string.replace(s, str(k), str(v[k]))
return eval( s, {} )
def subst(v, x):
if v.has_key(x):
return v[x]
elif isinstance(x, Variable):
return x
elif isinstance(x, Struct):
return Struct( x.database, x.head, [subst(v, xi) for xi in x.subs])
def unify(x,y,v={}):
"""
Find one valuation extending v and unifying x with y
"""
def extend(v, x, t):
"""
Extend valuation v with v[x] = t
"""
v1 = copy.copy(v)
v1[x] = t
return v1
def occur_check(x, t):
"""
Test if the variable x occurr in structure t
"""
if x == t:
return True
elif isinstance(t, Struct):
return t.head == x.head or occur_check(x, t.subs)
return False
def unify_var(x, t, v):
"""
Test if v can be extended with v[x] = t;
In that case return the extention
Else return None
"""
if x in v:
return unify( v[ x ], t, v)
elif occur_check(x, t):
return None
else:
return extend(v, x, t)
if v == None:
return None
elif x == y:
return v
elif isinstance(x, Variable):
return unify_var(x, y, v)
elif isinstance(y, Variable):
return unify_var(y, x, v)
elif isinstance(x, Struct) and isinstance(y, Struct) and (x.head == y.head):
z = v
n = len(x.subs)
m = len(y.subs)
if n == m:
for i in range( n ):
z = unify( x.subs[i], y.subs[i], z)
return z
else:
return None
else:
return None
class LogicalFunction:
"""
This class replaces a logical function once it has
been consulted, to avoid erroneous use
"""
def __init__(self, database, func):
self.database=database
self.logical_function=func
def __call__(self):
raise TypeError, "Logical functions are not really callable"
if __name__ == "__main__":
db = Database('TEST')
print "Defining a prolog program... ",
def prolog_func():
# prolog facts are prefixed with "+"
+ number(0)
+ number(1)
+ number(2)
+ number(3)
+ number(4)
# prolog conditionals have the pattern p << [q1, ..., qn]
test(X, Y) << [number(X), number(Y), 'X==2*Y' ]
# prolog queries are prefixed with "~"
~ test(X, Y)
# Update the database
db << prolog_func
print "done"
print "Before solving"
print db
# Solve the queries
x = db.solve()
print 'Solutions'
for v in x:
for k in v: print k,"=", v[k],' ',
print
print "After solving"
print db
|
mit
| 1,482,748,753,242,608,400
| 27.670968
| 143
| 0.502813
| false
| 4.107209
| false
| false
| false
|
adamlwgriffiths/pyfilesystem
|
fs/watch.py
|
1
|
23532
|
"""
fs.watch
========
Change notification support for FS.
This module defines a standard interface for FS subclasses that support change
notification callbacks. It also offers some WrapFS subclasses that can
simulate such an ability on top of an ordinary FS object.
An FS object that wants to be "watchable" must provide the following methods:
* ``add_watcher(callback,path="/",events=None,recursive=True)``
Request that the given callback be executed in response to changes
to the given path. A specific set of change events can be specified.
This method returns a Watcher object.
* ``del_watcher(watcher_or_callback)``
Remove the given watcher object, or any watchers associated with
the given callback.
If you would prefer to read changes from a filesystem in a blocking fashion
rather than using callbacks, you can use the function 'iter_changes' to obtain
an iterator over the change events.
"""
import sys
import weakref
import threading
import Queue
import traceback
from fs.path import *
from fs.errors import *
from fs.wrapfs import WrapFS
from fs.base import FS
from fs.filelike import FileWrapper
from six import b
class EVENT(object):
"""Base class for change notification events."""
def __init__(self,fs,path):
super(EVENT, self).__init__()
self.fs = fs
if path is not None:
path = abspath(normpath(path))
self.path = path
def __str__(self):
return unicode(self).encode("utf8")
def __unicode__(self):
return u"<fs.watch.%s object (path='%s') at %s>" % (self.__class__.__name__,self.path,hex(id(self)))
def clone(self,fs=None,path=None):
if fs is None:
fs = self.fs
if path is None:
path = self.path
return self.__class__(fs,path)
class ACCESSED(EVENT):
"""Event fired when a file's contents are accessed."""
pass
class CREATED(EVENT):
"""Event fired when a new file or directory is created."""
pass
class REMOVED(EVENT):
"""Event fired when a file or directory is removed."""
pass
class MODIFIED(EVENT):
"""Event fired when a file or directory is modified."""
def __init__(self,fs,path,data_changed=False, closed=False):
super(MODIFIED,self).__init__(fs,path)
self.data_changed = data_changed
self.closed = closed
def clone(self,fs=None,path=None,data_changed=None):
evt = super(MODIFIED,self).clone(fs,path)
if data_changed is None:
data_changed = self.data_changed
evt.data_changed = data_changed
return evt
class MOVED_DST(EVENT):
"""Event fired when a file or directory is the target of a move."""
def __init__(self,fs,path,source=None):
super(MOVED_DST,self).__init__(fs,path)
if source is not None:
source = abspath(normpath(source))
self.source = source
def __unicode__(self):
return u"<fs.watch.%s object (path=%r,src=%r) at %s>" % (self.__class__.__name__,self.path,self.source,hex(id(self)))
def clone(self,fs=None,path=None,source=None):
evt = super(MOVED_DST,self).clone(fs,path)
if source is None:
source = self.source
evt.source = source
return evt
class MOVED_SRC(EVENT):
"""Event fired when a file or directory is the source of a move."""
def __init__(self,fs,path,destination=None):
super(MOVED_SRC,self).__init__(fs,path)
if destination is not None:
destination = abspath(normpath(destination))
self.destination = destination
def __unicode__(self):
return u"<fs.watch.%s object (path=%r,dst=%r) at %s>" % (self.__class__.__name__,self.path,self.destination,hex(id(self)))
def clone(self,fs=None,path=None,destination=None):
evt = super(MOVED_SRC,self).clone(fs,path)
if destination is None:
destination = self.destination
evt.destination = destination
return evt
class CLOSED(EVENT):
"""Event fired when the filesystem is closed."""
pass
class ERROR(EVENT):
"""Event fired when some miscellaneous error occurs."""
pass
class OVERFLOW(ERROR):
"""Event fired when some events could not be processed."""
pass
class Watcher(object):
"""Object encapsulating filesystem watch info."""
def __init__(self,fs,callback,path="/",events=None,recursive=True):
if events is None:
events = (EVENT,)
else:
events = tuple(events)
# Since the FS probably holds a reference to the Watcher, keeping
# a reference back to the FS would create a cycle containing a
# __del__ method. Use a weakref to avoid this.
self._w_fs = weakref.ref(fs)
self.callback = callback
self.path = abspath(normpath(path))
self.events = events
self.recursive = recursive
@property
def fs(self):
return self._w_fs()
def delete(self):
fs = self.fs
if fs is not None:
fs.del_watcher(self)
def handle_event(self,event):
if not isinstance(event,self.events):
return
if event.path is not None:
if not isprefix(self.path,event.path):
return
if not self.recursive:
if event.path != self.path:
if dirname(event.path) != self.path:
return
try:
self.callback(event)
except Exception:
print >>sys.stderr, "error in FS watcher callback", self.callback
traceback.print_exc()
class WatchableFSMixin(FS):
"""Mixin class providing watcher management functions."""
def __init__(self,*args,**kwds):
self._watchers = PathMap()
super(WatchableFSMixin,self).__init__(*args,**kwds)
def __getstate__(self):
state = super(WatchableFSMixin,self).__getstate__()
state.pop("_watchers",None)
return state
def __setstate__(self,state):
super(WatchableFSMixin,self).__setstate__(state)
self._watchers = PathMap()
def add_watcher(self,callback,path="/",events=None,recursive=True):
"""Add a watcher callback to the FS."""
w = Watcher(self,callback,path,events,recursive=recursive)
self._watchers.setdefault(path,[]).append(w)
return w
def del_watcher(self,watcher_or_callback):
"""Delete a watcher callback from the FS."""
if isinstance(watcher_or_callback,Watcher):
self._watchers[watcher_or_callback.path].remove(watcher_or_callback)
else:
for watchers in self._watchers.itervalues():
for i,watcher in enumerate(watchers):
if watcher.callback is watcher_or_callback:
del watchers[i]
break
def _find_watchers(self,callback):
"""Find watchers registered with the given callback."""
for watchers in self._watchers.itervalues():
for watcher in watchers:
if watcher.callback is callback:
yield watcher
def notify_watchers(self,event_or_class,path=None,*args,**kwds):
"""Notify watchers of the given event data."""
if isinstance(event_or_class,EVENT):
event = event_or_class
else:
event = event_or_class(self,path,*args,**kwds)
if path is None:
path = event.path
if path is None:
for watchers in self._watchers.itervalues():
for watcher in watchers:
watcher.handle_event(event)
else:
for prefix in recursepath(path):
if prefix in self._watchers:
for watcher in self._watchers[prefix]:
watcher.handle_event(event)
class WatchedFile(FileWrapper):
"""File wrapper for use with WatchableFS.
This file wrapper provides access to a file opened from a WatchableFS
instance, and fires MODIFIED events when the file is modified.
"""
def __init__(self,file,fs,path,mode=None):
super(WatchedFile,self).__init__(file,mode)
self.fs = fs
self.path = path
self.was_modified = False
def _write(self,string,flushing=False):
self.was_modified = True
return super(WatchedFile,self)._write(string,flushing=flushing)
def _truncate(self,size):
self.was_modified = True
return super(WatchedFile,self)._truncate(size)
def flush(self):
super(WatchedFile,self).flush()
# Don't bother if python if being torn down
if Watcher is not None:
if self.was_modified:
self.fs.notify_watchers(MODIFIED,self.path,True)
def close(self):
super(WatchedFile,self).close()
# Don't bother if python if being torn down
if Watcher is not None:
if self.was_modified:
self.fs.notify_watchers(MODIFIED,self.path,True)
class WatchableFS(WatchableFSMixin,WrapFS):
"""FS wrapper simulating watcher callbacks.
This FS wrapper intercepts method calls that modify the underlying FS
and generates appropriate notification events. It thus allows watchers
to monitor changes made through the underlying FS object, but not changes
that might be made through other interfaces to the same filesystem.
"""
def __init__(self, *args, **kwds):
super(WatchableFS, self).__init__(*args, **kwds)
def close(self):
super(WatchableFS, self).close()
self.notify_watchers(CLOSED)
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
existed = self.wrapped_fs.isfile(path)
f = super(WatchableFS, self).open(path,
mode=mode,
buffering=buffering,
encoding=encoding,
errors=errors,
newline=newline,
line_buffering=line_buffering,
**kwargs)
if not existed:
self.notify_watchers(CREATED, path)
self.notify_watchers(ACCESSED, path)
return WatchedFile(f, self, path, mode)
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=64*1024):
existed = self.wrapped_fs.isfile(path)
ret = super(WatchableFS, self).setcontents(path, data, chunk_size=chunk_size)
if not existed:
self.notify_watchers(CREATED, path)
self.notify_watchers(ACCESSED, path)
if data:
self.notify_watchers(MODIFIED, path, True)
return ret
def createfile(self, path, wipe=False):
existed = self.wrapped_fs.isfile(path)
ret = super(WatchableFS, self).createfile(path, wipe=False)
if not existed:
self.notify_watchers(CREATED,path)
self.notify_watchers(ACCESSED,path)
return ret
def makedir(self,path,recursive=False,allow_recreate=False):
existed = self.wrapped_fs.isdir(path)
try:
super(WatchableFS,self).makedir(path,allow_recreate=allow_recreate)
except ParentDirectoryMissingError:
if not recursive:
raise
parent = dirname(path)
if parent != path:
self.makedir(dirname(path),recursive=True,allow_recreate=True)
super(WatchableFS,self).makedir(path,allow_recreate=allow_recreate)
if not existed:
self.notify_watchers(CREATED,path)
def remove(self,path):
super(WatchableFS,self).remove(path)
self.notify_watchers(REMOVED,path)
def removedir(self,path,recursive=False,force=False):
if not force:
for nm in self.listdir(path):
raise DirectoryNotEmptyError(path)
else:
for nm in self.listdir(path,dirs_only=True):
try:
self.removedir(pathjoin(path,nm),force=True)
except ResourceNotFoundError:
pass
for nm in self.listdir(path,files_only=True):
try:
self.remove(pathjoin(path,nm))
except ResourceNotFoundError:
pass
super(WatchableFS,self).removedir(path)
self.notify_watchers(REMOVED,path)
if recursive:
parent = dirname(path)
while parent and not self.listdir(parent):
super(WatchableFS,self).removedir(parent)
self.notify_watchers(REMOVED,parent)
parent = dirname(parent)
def rename(self,src,dst):
d_existed = self.wrapped_fs.exists(dst)
super(WatchableFS,self).rename(src,dst)
if d_existed:
self.notify_watchers(REMOVED,dst)
self.notify_watchers(MOVED_DST,dst,src)
self.notify_watchers(MOVED_SRC,src,dst)
def copy(self,src,dst,**kwds):
d = self._pre_copy(src,dst)
super(WatchableFS,self).copy(src,dst,**kwds)
self._post_copy(src,dst,d)
def copydir(self,src,dst,**kwds):
d = self._pre_copy(src,dst)
super(WatchableFS,self).copydir(src,dst,**kwds)
self._post_copy(src,dst,d)
def move(self,src,dst,**kwds):
d = self._pre_copy(src,dst)
super(WatchableFS,self).move(src,dst,**kwds)
self._post_copy(src,dst,d)
self._post_move(src,dst,d)
def movedir(self,src,dst,**kwds):
d = self._pre_copy(src,dst)
super(WatchableFS,self).movedir(src,dst,**kwds)
self._post_copy(src,dst,d)
self._post_move(src,dst,d)
def _pre_copy(self,src,dst):
dst_paths = {}
try:
for (dirnm,filenms) in self.wrapped_fs.walk(dst):
dirnm = dirnm[len(dst)+1:]
dst_paths[dirnm] = True
for filenm in filenms:
dst_paths[filenm] = False
except ResourceNotFoundError:
pass
except ResourceInvalidError:
dst_paths[""] = False
src_paths = {}
try:
for (dirnm,filenms) in self.wrapped_fs.walk(src):
dirnm = dirnm[len(src)+1:]
src_paths[dirnm] = True
for filenm in filenms:
src_paths[pathjoin(dirnm,filenm)] = False
except ResourceNotFoundError:
pass
except ResourceInvalidError:
src_paths[""] = False
return (src_paths,dst_paths)
def _post_copy(self,src,dst,data):
(src_paths,dst_paths) = data
for src_path,isdir in sorted(src_paths.items()):
path = pathjoin(dst,src_path)
if src_path in dst_paths:
self.notify_watchers(MODIFIED,path,not isdir)
else:
self.notify_watchers(CREATED,path)
for dst_path,isdir in sorted(dst_paths.items()):
path = pathjoin(dst,dst_path)
if not self.wrapped_fs.exists(path):
self.notify_watchers(REMOVED,path)
def _post_move(self,src,dst,data):
(src_paths,dst_paths) = data
for src_path,isdir in sorted(src_paths.items(),reverse=True):
path = pathjoin(src,src_path)
self.notify_watchers(REMOVED,path)
def setxattr(self,path,name,value):
super(WatchableFS,self).setxattr(path,name,value)
self.notify_watchers(MODIFIED,path,False)
def delxattr(self,path,name):
super(WatchableFS,self).delxattr(path,name)
self.notify_watchers(MODIFIED,path,False)
class PollingWatchableFS(WatchableFS):
"""FS wrapper simulating watcher callbacks by periodic polling.
This FS wrapper augments the functionality of WatchableFS by periodically
polling the underlying FS for changes. It is thus capable of detecting
changes made to the underlying FS via other interfaces, albeit with a
(configurable) delay to account for the polling interval.
"""
def __init__(self,wrapped_fs,poll_interval=60*5):
super(PollingWatchableFS,self).__init__(wrapped_fs)
self.poll_interval = poll_interval
self.add_watcher(self._on_path_modify,"/",(CREATED,MOVED_DST,))
self.add_watcher(self._on_path_modify,"/",(MODIFIED,ACCESSED,))
self.add_watcher(self._on_path_delete,"/",(REMOVED,MOVED_SRC,))
self._path_info = PathMap()
self._poll_thread = threading.Thread(target=self._poll_for_changes)
self._poll_cond = threading.Condition()
self._poll_close_event = threading.Event()
self._poll_thread.start()
def close(self):
self._poll_close_event.set()
self._poll_thread.join()
super(PollingWatchableFS,self).close()
def _on_path_modify(self,event):
path = event.path
try:
try:
self._path_info[path] = self.wrapped_fs.getinfo(path)
except ResourceNotFoundError:
self._path_info.clear(path)
except FSError:
pass
def _on_path_delete(self,event):
self._path_info.clear(event.path)
def _poll_for_changes(self):
try:
while not self._poll_close_event.isSet():
# Walk all directories looking for changes.
# Come back to any that give us an error.
error_paths = set()
for dirnm in self.wrapped_fs.walkdirs():
if self._poll_close_event.isSet():
break
try:
self._check_for_changes(dirnm)
except FSError:
error_paths.add(dirnm)
# Retry the directories that gave us an error, until
# we have successfully updated them all
while error_paths and not self._poll_close_event.isSet():
dirnm = error_paths.pop()
if self.wrapped_fs.isdir(dirnm):
try:
self._check_for_changes(dirnm)
except FSError:
error_paths.add(dirnm)
# Notify that we have completed a polling run
self._poll_cond.acquire()
self._poll_cond.notifyAll()
self._poll_cond.release()
# Sleep for the specified interval, or until closed.
self._poll_close_event.wait(timeout=self.poll_interval)
except FSError:
if not self.closed:
raise
def _check_for_changes(self,dirnm):
# Check the metadata for the directory itself.
new_info = self.wrapped_fs.getinfo(dirnm)
try:
old_info = self._path_info[dirnm]
except KeyError:
self.notify_watchers(CREATED,dirnm)
else:
if new_info != old_info:
self.notify_watchers(MODIFIED,dirnm,False)
# Check the metadata for each file in the directory.
# We assume that if the file's data changes, something in its
# metadata will also change; don't want to read through each file!
# Subdirectories will be handled by the outer polling loop.
for filenm in self.wrapped_fs.listdir(dirnm,files_only=True):
if self._poll_close_event.isSet():
return
fpath = pathjoin(dirnm,filenm)
new_info = self.wrapped_fs.getinfo(fpath)
try:
old_info = self._path_info[fpath]
except KeyError:
self.notify_watchers(CREATED,fpath)
else:
was_accessed = False
was_modified = False
for (k,v) in new_info.iteritems():
if k not in old_info:
was_modified = True
break
elif old_info[k] != v:
if k in ("accessed_time","st_atime",):
was_accessed = True
elif k:
was_modified = True
break
else:
for k in old_info:
if k not in new_info:
was_modified = True
break
if was_modified:
self.notify_watchers(MODIFIED,fpath,True)
elif was_accessed:
self.notify_watchers(ACCESSED,fpath)
# Check for deletion of cached child entries.
for childnm in self._path_info.iternames(dirnm):
if self._poll_close_event.isSet():
return
cpath = pathjoin(dirnm,childnm)
if not self.wrapped_fs.exists(cpath):
self.notify_watchers(REMOVED,cpath)
def ensure_watchable(fs,wrapper_class=PollingWatchableFS,*args,**kwds):
"""Ensure that the given fs supports watching, simulating it if necessary.
Given an FS object, this function returns an equivalent FS that has support
for watcher callbacks. This may be the original object if it supports them
natively, or a wrapper class if they must be simulated.
"""
if isinstance(fs,wrapper_class):
return fs
try:
w = fs.add_watcher(lambda e: None,"/",recursive=False)
except (AttributeError,FSError):
return wrapper_class(fs,*args,**kwds)
else:
fs.del_watcher(w)
return fs
class iter_changes(object):
"""Blocking iterator over the change events produced by an FS.
This class can be used to transform the callback-based watcher mechanism
into a blocking stream of events. It operates by having the callbacks
push events onto a queue as they come in, then reading them off one at a
time.
"""
def __init__(self,fs=None,path="/",events=None,**kwds):
self.closed = False
self._queue = Queue.Queue()
self._watching = set()
if fs is not None:
self.add_watcher(fs,path,events,**kwds)
def __iter__(self):
return self
def __del__(self):
self.close()
def next(self,timeout=None):
if not self._watching:
raise StopIteration
try:
event = self._queue.get(timeout=timeout)
except Queue.Empty:
raise StopIteration
if event is None:
raise StopIteration
if isinstance(event,CLOSED):
event.fs.del_watcher(self._enqueue)
self._watching.remove(event.fs)
return event
def close(self):
if not self.closed:
self.closed = True
for fs in self._watching:
fs.del_watcher(self._enqueue)
self._queue.put(None)
def add_watcher(self,fs,path="/",events=None,**kwds):
w = fs.add_watcher(self._enqueue,path,events,**kwds)
self._watching.add(fs)
return w
def _enqueue(self,event):
self._queue.put(event)
def del_watcher(self,watcher):
for fs in self._watching:
try:
fs.del_watcher(watcher)
break
except ValueError:
pass
else:
raise ValueError("watcher not found: %s" % (watcher,))
|
bsd-3-clause
| -2,816,136,849,653,434,000
| 34.386466
| 130
| 0.581464
| false
| 4.07269
| false
| false
| false
|
mudyc/deftity
|
text.py
|
1
|
2784
|
# deftity - a tool for interaction architect
#
# Copyright (C) 2011 Matti Katila
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# Written by Matti J. Katila, 2011
import pango
import pangocairo
import cairo
import tool
import actions
class TextComp(tool.Component, actions.KeyHandler):
def __init__(self):
self.wh = [100, 40]
self.data = { 'text': 'Text..', 'size': '' }
self.modelF = self.get_data
self.name = 'text'
def save_data(self):
ret = tool.Component.save_data(self)
ret['data'] = self.data
return ret
def get_data(self): return self.data
def pos(self, x,y): self.xy = [ x,y]
def size(self, w,h): self.wh = [w,h]
def xywh(self): return (self.xy[0], self.xy[1], self.wh[0], self.wh[1])
def draw(self, c, tc, mx, my):
x,y,w,h = self.xywh()
if self.is_close(mx, my):
c.new_path()
c.rectangle(x,y,w,h)
c.close_path()
c.set_source(cairo.SolidPattern(1,0,.7, .2))
c.fill_preserve()
c.move_to(x, y)
pctx = pangocairo.CairoContext(c)
pctx.set_antialias(cairo.ANTIALIAS_SUBPIXEL)
layout = pctx.create_layout()
self.layout = layout
fontname = "Sans "+str(self.data['size'])
font = pango.FontDescription(fontname)
layout.set_font_description(font)
layout.set_width(int(w*pango.SCALE))
layout.set_wrap(pango.WRAP_WORD_CHAR)
layout.set_justify(True)
layout.set_text(self.modelF()[self.name])
if self in tc.selected_comps:
c.set_source_rgb(1, 0, 0)
else:
c.set_source_rgb(0, 0, 0)
pctx.update_layout(layout)
pctx.show_layout(layout)
def mouse_released(self, tc, mx,my):
x,y,w,h = self.xywh()
tc.cursor.set_obj(self, self.layout.xy_to_index(
int((mx-x)*pango.SCALE), int((my-y)*pango.SCALE))[0])
def key(self, k, cur=None):
actions.KeyHandler.key(self, k, cur, {'Return': '\n'})
if self.modelF()[self.name] == '':
self.tool.comps.remove(self)
|
gpl-2.0
| 6,217,041,692,376,003,000
| 32.95122
| 80
| 0.619253
| false
| 3.233449
| false
| false
| false
|
minesense/VisTrails
|
vistrails/db/versions/v0_8_1/translate/v0_8_0.py
|
2
|
4302
|
###############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
from __future__ import division
from vistrails.db import VistrailsDBException
from vistrails.db.versions.v0_8_0.domain import DBAdd, DBAnnotation, DBChange, DBDelete
# two step process
# 1. remap all the old "notes" so that they exist in the id scope
# 2. remap all the annotations that were numbered correctly
# note that for 2, we don't need to worry about uniqueness -- they are unique
# but step 1 may have taken some of their ids...
def translateVistrail(vistrail):
id_remap = {}
for action in vistrail.db_get_actions():
# don't need to change key idx since none of that changes
new_action_idx = {}
for annotation in action.db_get_annotations():
annotation.db_id = vistrail.idScope.getNewId(DBAnnotation.vtType)
new_action_idx[annotation.db_id] = annotation
action.db_annotations_id_index = new_action_idx
for operation in action.db_get_operations():
# never have annotations as parent objs so
# don't have to worry about those ids
if operation.db_what == DBAnnotation.vtType:
if operation.vtType == 'add':
new_id = vistrail.idScope.getNewId(DBAnnotation.vtType)
old_id = operation.db_objectId
operation.db_objectId = new_id
operation.db_data.db_id = new_id
id_remap[old_id] = new_id
elif operation.vtType == 'change':
changed_id = operation.db_oldObjId
if id_remap.has_key(changed_id):
operation.db_oldObjId = id_remap[changed_id]
else:
raise VistrailsDBException('cannot translate')
new_id = vistrail.idScope.getNewId(DBAnnotation.vtType)
old_id = operation.db_newObjId
operation.db_newObjId = new_id
operation.db_data.db_id = new_id
id_remap[old_id] = new_id
elif operation.vtType == 'delete':
old_id = operation.db_objectId
if id_remap.has_key(old_id):
operation.db_objectId = id_remap[old_id]
else:
raise VistrailsDBException('cannot translate')
vistrail.db_version = '0.8.1'
return vistrail
|
bsd-3-clause
| -7,632,964,265,899,056,000
| 48.448276
| 87
| 0.630404
| false
| 4.34107
| false
| false
| false
|
bohdan-shramko/learning-python
|
source/chapter05/geek_translator.py
|
1
|
2413
|
# Geek Translator
# Demonstrates using dictionaries
geek = {"404": "clueless. From the web error message 404, meaning page not found.",
"Googling": "searching the Internet for background information on a person.",
"Keyboard Plaque" : "the collection of debris found in computer keyboards.",
"Link Rot" : "the process by which web page links become obsolete.",
"Percussive Maintenance" : "the act of striking an electronic device to make it work.",
"Uninstalled" : "being fired. Especially popular during the dot-bomb era."}
choice = None
while choice != "0":
print(
"""
Geek Translator
0 - Quit
1 - Look Up a Geek Term
2 - Add a Geek Term
3 - Redefine a Geek Term
4 - Delete a Geek Term
"""
)
choice = input("Choice: ")
print()
# exit
if choice == "0":
print("Good-bye.")
# get a definition
elif choice == "1":
term = input("What term do you want me to translate?: ")
if term in geek:
definition = geek[term]
print("\n", term, "means", definition)
else:
print("\nSorry, I don't know", term)
# add a term-definition pair
elif choice == "2":
term = input("What term do you want me to add?: ")
if term not in geek:
definition = input("\nWhat's the definition?: ")
geek[term] = definition
print("\n", term, "has been added.")
else:
print("\nThat term already exists! Try redefining it.")
# redefine an existing term
elif choice == "3":
term = input("What term do you want me to redefine?: ")
if term in geek:
definition = input("What's the new definition?: ")
geek[term] = definition
print("\n", term, "has been redefined.")
else:
print("\nThat term doesn't exist! Try adding it.")
# delete a term-definition pair
elif choice == "4":
term = input("What term do you want me to delete?: ")
if term in geek:
del geek[term]
print("\nOkay, I deleted", term)
else:
print("\nI can't do that!", term, "doesn't exist in the dictionary.")
# some unknown choice
else:
print("\nSorry, but", choice, "isn't a valid choice.")
input("\n\nPress the enter key to exit.")
|
mit
| -6,250,750,825,611,069,000
| 31.173333
| 95
| 0.56237
| false
| 3.917208
| false
| false
| false
|
kacchan822/django-chatwork
|
chatwork/utils.py
|
1
|
1552
|
from django.template.loader import render_to_string
from .api import ChatworkApiClient
client = ChatworkApiClient()
api_account_info = client.get_my_profile()
api_account_id = getattr(api_account_info, 'account_id', '0')
api_room_id = getattr(api_account_info, 'room_id', '0')
def get_rooms(room_type='group'):
""" 所属するルームを取得する """
rooms = client.get_rooms()
return [room for room in rooms if room['type'] == room_type]
def send_chatwork(text, room, title=None, to_all=None):
""" 一つのルームにメッセージを送信する """
context = {
'body': text,
'title': title,
'to_all': to_all,
}
message = render_to_string('chatwork/message.txt', context)
return client.add_messages(room, message.strip())
def send_chatwork_many(text, rooms, title=None, to_all=None):
""" 複数のルームにメッセージを送信する """
results = []
for room in rooms:
result = send_chatwork(text, room, title=title, to_all=to_all)
results.append(result)
return results
def delete_message(room_id, message_id):
""" 指定したメッセージを削除する """
return client.delete_message(room_id, message_id)
def create_task(text, room, assigned_to, limit=None, **kwargs):
""" タスクを依頼する """
data = {
'body': text,
'to_ids': ','.join(list(map(str, assigned_to))),
}
if limit is not None:
data['limit'] = int(limit.timestamp())
return client.add_tasks(room, **data)
|
mit
| 5,175,957,696,668,686,000
| 26.764706
| 70
| 0.632768
| false
| 2.71785
| false
| false
| false
|
COCS4950G7/COSC4950
|
Source/demoCrack2.py
|
1
|
6002
|
# Chris Bugg
# 10/1/14
# NOTE: Runs on Python 2.7.6
# UPDATE:
# 10/10/14
# -> Now runs with 8 sub-processes using
# the [a-z] alphabet
import hashlib
from time import time
from multiprocessing import Process, Pipe, Lock
import os
class DemoCrack():
algorithm = "sha256"
origHash = ''
alphabet = list("abcdefghijklmnopqrstuvwxyz")
chunk1 = 1
chunk2 = 1
key = ''
alphaChoice = "abcdefghijklmnopqrstuvwxyz"
countey = 0
def __init__(self):
os.system('cls' if os.name == 'nt' else 'clear')
self.whatWeGot()
self.getHash()
os.system('cls' if os.name == 'nt' else 'clear')
self.whatWeGot()
self.chunkIt()
start = time()
self.countey += 1
lock = Lock()
parentPipe, childPipe = Pipe()
child1 = Process(target=self.subProcess, args=(childPipe, lock, ))
child2 = Process(target=self.subProcess, args=(childPipe, lock, ))
child3 = Process(target=self.subProcess, args=(childPipe, lock, ))
child4 = Process(target=self.subProcess, args=(childPipe, lock, ))
child5 = Process(target=self.subProcess, args=(childPipe, lock, ))
child6 = Process(target=self.subProcess, args=(childPipe, lock, ))
child7 = Process(target=self.subProcess, args=(childPipe, lock, ))
child8 = Process(target=self.subProcess, args=(childPipe, lock, ))
child1.start()
child2.start()
child3.start()
child4.start()
child5.start()
child6.start()
child7.start()
child8.start()
parentPipe.send("6")
parentPipe.send(self.chunk1)
parentPipe.send("6")
parentPipe.send(self.chunk2)
parentPipe.send("6")
parentPipe.send(self.chunk3)
parentPipe.send("6")
parentPipe.send(self.chunk4)
parentPipe.send("6")
parentPipe.send(self.chunk5)
parentPipe.send("6")
parentPipe.send(self.chunk6)
parentPipe.send("6")
parentPipe.send(self.chunk7)
parentPipe.send("6")
parentPipe.send(self.chunk8)
count = 0
done = False
rec = 0
while not done:
if count > 7:
child1.join()
child2.join()
child3.join()
child4.join()
child5.join()
child6.join()
child7.join()
child8.join()
print "No Dice!"
done = True
else:
rec = parentPipe.recv()
if rec == "found":
self.countey = parentPipe.recv()
child1.terminate()
child2.terminate()
child3.terminate()
child4.terminate()
child5.terminate()
child6.terminate()
child7.terminate()
child8.terminate()
done = True
count += 1
elapsed = (time() - start)
print "That took: ", elapsed, " seconds."
speed = (8 * int(self.countey)) / elapsed
if rec == "found":
print "At about: ", speed, " hashes per second."
exit = raw_input("Hit (Enter/Return) to quit ")
def subProcess(self, pipe, lock):
lock.acquire()
loops = pipe.recv()
alphabet = pipe.recv()
lock.release()
if self.looper6(alphabet) == True:
lock.acquire()
pipe.send("found")
pipe.send(self.countey)
pipe.close()
lock. release()
else:
lock.acquire()
pipe.send("not found")
pipe.close()
lock. release()
def chunkIt(self):
chunky = [self.alphabet[i::8] for i in range(8)]
self.chunk1 = chunky.pop()
self.chunk2 = chunky.pop()
self.chunk3 = chunky.pop()
self.chunk4 = chunky.pop()
self.chunk5 = chunky.pop()
self.chunk6 = chunky.pop()
self.chunk7 = chunky.pop()
self.chunk8 = chunky.pop()
def getHash(self):
key = raw_input("What's the 6 LowerCase-Letter Key: ")
self.key = key
tempKey = hashlib.sha256()
byteKey = str.encode(key)
type(byteKey)
tempKey.update(byteKey)
self.origHash = tempKey.hexdigest()
print "The Key you entered was: ", key
print "Which has a hash of: ", self.origHash
def whatWeGot(self):
print "**********************************"
print "Here's what we've got so far: "
print
print "Key is: ", self.key
print "Hash is: ", self.origHash
print "Searching: ", self.alphaChoice
print "**********************************"
def isSolution(self, key):
tempKey = hashlib.sha256()
byteKey = str.encode(key)
type(byteKey)
tempKey.update(byteKey)
possible = tempKey.hexdigest()
if possible == self.origHash:
print
print"Solution found!"
print "Key is: ", key
print "Which has a hash of: ", possible
return True
else:
return False
def looper6(self, alphabet):
for x in alphabet:
print "Searching ...", x, "*****"
for y in self.alphabet:
for z in self.alphabet:
for a in self.alphabet:
for b in self.alphabet:
for c in self.alphabet:
self.countey += 1
key = x + y + z + a + b + c
if self.isSolution(key):
return True
return False
DemoCrack()
|
gpl-3.0
| -6,494,871,366,440,019,000
| 18.178914
| 74
| 0.489837
| false
| 4.147892
| false
| false
| false
|
home-assistant/home-assistant
|
homeassistant/components/juicenet/__init__.py
|
1
|
3040
|
"""The JuiceNet integration."""
from datetime import timedelta
import logging
import aiohttp
from pyjuicenet import Api, TokenError
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import DOMAIN, JUICENET_API, JUICENET_COORDINATOR
from .device import JuiceNetApi
_LOGGER = logging.getLogger(__name__)
PLATFORMS = ["sensor", "switch"]
CONFIG_SCHEMA = vol.Schema(
vol.All(
cv.deprecated(DOMAIN),
{DOMAIN: vol.Schema({vol.Required(CONF_ACCESS_TOKEN): cv.string})},
),
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the JuiceNet component."""
conf = config.get(DOMAIN)
hass.data.setdefault(DOMAIN, {})
if not conf:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=conf
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up JuiceNet from a config entry."""
config = entry.data
session = async_get_clientsession(hass)
access_token = config[CONF_ACCESS_TOKEN]
api = Api(access_token, session)
juicenet = JuiceNetApi(api)
try:
await juicenet.setup()
except TokenError as error:
_LOGGER.error("JuiceNet Error %s", error)
return False
except aiohttp.ClientError as error:
_LOGGER.error("Could not reach the JuiceNet API %s", error)
raise ConfigEntryNotReady from error
if not juicenet.devices:
_LOGGER.error("No JuiceNet devices found for this account")
return False
_LOGGER.info("%d JuiceNet device(s) found", len(juicenet.devices))
async def async_update_data():
"""Update all device states from the JuiceNet API."""
for device in juicenet.devices:
await device.update_state(True)
return True
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="JuiceNet",
update_method=async_update_data,
update_interval=timedelta(seconds=30),
)
hass.data[DOMAIN][entry.entry_id] = {
JUICENET_API: juicenet,
JUICENET_COORDINATOR: coordinator,
}
await coordinator.async_config_entry_first_refresh()
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
|
apache-2.0
| 7,707,611,442,826,321,000
| 27.679245
| 82
| 0.694079
| false
| 3.753086
| true
| false
| false
|
gt-ros-pkg/hrl-pr2
|
hrl_pr2_lib/src/hrl_pr2_lib/pr2.py
|
1
|
22369
|
import roslib; roslib.load_manifest('hrl_pr2_lib')
import rospy
import actionlib
import actionlib_msgs.msg as amsg
import move_base_msgs.msg as mm
import sensor_msgs.msg as sm
import pr2_controllers_msgs.msg as pm
import trajectory_msgs.msg as tm
import pr2_mechanism_msgs.srv as pmm
import std_msgs.msg as stdm
import geometry_msgs.msg as gm
import dynamic_reconfigure.client as dc
import tf
import tf.transformations as tr
import hrl_lib.tf_utils as tfu
import hrl_lib.rutils as ru
import hrl_lib.util as ut
import functools as ft
import numpy as np
import math
import time
import hrl_pr2_lib.msg as hm
#from sound_play.libsoundplay import SoundClient
#from interpolated_ik_motion_planner import ik_utilities as iku
import pr2_kinematics as pr2k
import os
import os.path as pt
import pdb
#Test this
def unwrap2(cpos, npos):
two_pi = 2*np.pi
nin = npos % two_pi
n_multiples_2pi = np.floor(cpos/two_pi)
return nin + n_multiples_2pi*two_pi
def unwrap(cpos, npos):
two_pi = 2*np.pi
if cpos < npos:
while cpos < npos:
npos = npos - two_pi
npos = npos + two_pi
elif cpos > npos:
while cpos > npos:
npos = npos + two_pi
npos = npos - two_pi
return npos
def diff_arm_pose(pose1, pose2):
pcpy = pose2.copy()
pcpy[4,0] = unwrap2(pose1[4,0], pose2[4,0])
pcpy[6,0] = unwrap2(pose1[6,0], pose2[6,0])
diff = pose1 - pose2
for i in range(pose1.shape[0]):
diff[i,0] = ut.standard_rad(diff[i,0])
return diff
class KinematicsError(Exception):
def __init__(self, value):
self.parameter = value
def __str__(self):
return repr(self.parameter)
class Joint:
def __init__(self, name, joint_provider):
self.joint_provider = joint_provider
self.joint_names = rospy.get_param('/%s/joints' % name)
self.pub = rospy.Publisher('%s/command' % name, tm.JointTrajectory)
self.names_index = None
self.zeros = [0 for j in range(len(self.joint_names))]
def pose(self, joint_states=None):
if joint_states == None:
joint_states = self.joint_provider()
if self.names_index == None:
self.names_index = {}
for i, n in enumerate(joint_states.name):
self.names_index[n] = i
self.joint_idx = [self.names_index[n] for n in self.joint_names]
return (np.matrix(joint_states.position).T)[self.joint_idx, 0]
def _create_trajectory(self, pos_mat, times, vel_mat=None):
#Make JointTrajectoryPoints
points = [tm.JointTrajectoryPoint() for i in range(pos_mat.shape[1])]
for i in range(pos_mat.shape[1]):
points[i].positions = pos_mat[:,i].A1.tolist()
points[i].accelerations = self.zeros
if vel_mat == None:
points[i].velocities = self.zeros
else:
points[i].velocities = vel_mat[:,i].A1.tolist()
for i in range(pos_mat.shape[1]):
points[i].time_from_start = rospy.Duration(times[i])
#Create JointTrajectory
jt = tm.JointTrajectory()
jt.joint_names = self.joint_names
jt.points = points
jt.header.stamp = rospy.get_rostime()
return jt
def set_poses(self, pos_mat, times):
joint_trajectory = self._create_trajectory(pos_mat, times)
self.pub.publish(joint_trajectory)
class PR2Arm(Joint):
def __init__(self, joint_provider, tf_listener, arm, use_kinematics=True):
joint_controller_name = arm + '_arm_controller'
cart_controller_name = arm + '_arm_cartesian_pose_controller'
Joint.__init__(self, joint_controller_name, joint_provider)
self.arm = arm
self.tf_listener = tf_listener
self.client = actionlib.SimpleActionClient('/%s/joint_trajectory_action' % joint_controller_name, pm.JointTrajectoryAction)
rospy.loginfo('pr2arm: waiting for server %s' % joint_controller_name)
self.client.wait_for_server()
self.joint_controller_name = joint_controller_name
self.cart_posture_pub = rospy.Publisher("/%s/command_posture" % cart_controller_name, stdm.Float64MultiArray).publish
self.cart_pose_pub = rospy.Publisher("/%s/command" % cart_controller_name, gm.PoseStamped).publish
if arm == 'l':
self.full_arm_name = 'left'
else:
self.full_arm_name = 'right'
if use_kinematics:
self.kinematics = pr2k.PR2ArmKinematics(self.full_arm_name,
self.tf_listener)
#self.ik_utilities = iku.IKUtilities(self.full_arm_name, self.tf_listener)
self.POSTURES = {
'off': np.matrix([]),
'mantis': np.matrix([0, 1, 0, -1, 3.14, -1, 3.14]).T,
'elbowupr': np.matrix([-0.79,0,-1.6, 9999, 9999, 9999, 9999]).T,
'elbowupl': np.matrix([0.79,0,1.6 , 9999, 9999, 9999, 9999]).T,
'old_elbowupr': np.matrix([-0.79,0,-1.6, -0.79,3.14, -0.79,5.49]).T,
'old_elbowupl': np.matrix([0.79,0,1.6, -0.79,3.14, -0.79,5.49]).T,
'elbowdownr': np.matrix([-0.028262077316910873, 1.2946342642324222, -0.25785640577652386, -1.5498884526859626]).T,
'elbowdownl': np.matrix([-0.0088195719039858515, 1.2834828245284853, 0.20338442004843196, -1.5565279256852611]).T
}
def set_posture(self, posture_mat):
self.cart_posture_pub(stdm.Float64MultiArray(data=posture_mat.A1.tolist()))
##
# Send a cartesian pose to *_cart controllers
# @param trans len 3 list
# @param rot len 4 list
# @param frame string
# @param msg_time float
def set_cart_pose(self, trans, rot, frame, msg_time):
ps = gm.PoseStamped()
for i, field in enumerate(['x', 'y', 'z']):
exec('ps.pose.position.%s = trans[%d]' % (field, i))
for i, field in enumerate(['x', 'y', 'z', 'w']):
exec('ps.pose.orientation.%s = rot[%d]' % (field, i))
ps.header.frame_id = frame
ps.header.stamp = rospy.Time(msg_time)
self.cart_pose_pub(ps)
##
# @param pos_mat column matrix of poses
# @param times array of times
def set_poses(self, pos_mat, times, vel_mat=None, block=True):
p = self.pose()
for i in range(pos_mat.shape[1]):
pos_mat[4,i] = unwrap2(p[4,0], pos_mat[4,i])
pos_mat[6,i] = unwrap2(p[6,0], pos_mat[6,i])
p = pos_mat[:,i]
joint_traj = Joint._create_trajectory(self, pos_mat, times, vel_mat)
#Create goal msg
joint_traj.header.stamp = rospy.get_rostime() + rospy.Duration(1.)
g = pm.JointTrajectoryGoal()
g.trajectory = joint_traj
self.client.send_goal(g)
if block:
return self.client.wait_for_result()
return self.client.get_state()
def stop_trajectory_execution(self):
self.client.cancel_all_goals()
def has_active_goal(self):
s = self.client.get_state()
if s == amsg.GoalStatus.ACTIVE or s == amsg.GoalStatus.PENDING:
return True
else:
return False
def set_poses_monitored(self, pos_mat, times, vel_mat=None, block=True, time_look_ahead=.050):
joint_traj = Joint._create_trajectory(self, pos_mat, times, vel_mat)
#Create goal msg
joint_traj.header.stamp = rospy.get_rostime() + rospy.Duration(1.)
g = pm.JointTrajectoryGoal()
g.trajectory = joint_traj
self.client.send_goal(g)
if block:
return self.client.wait_for_result()
return self.client.get_state()
def set_pose(self, pos, nsecs=5., block=True):
for i in range(2):
cpos = self.pose()
pos[4,0] = unwrap(cpos[4,0], pos[4,0])
pos[6,0] = unwrap(cpos[6,0], pos[6,0])
self.set_poses(np.column_stack([pos]), np.array([nsecs]), block=block)
#self.set_poses(np.column_stack([cpos, pos]), np.array([min_time, min_time+nsecs]), block=block)
def pose_cartesian(self, frame='base_link'):
gripper_tool_frame = self.arm + '_gripper_tool_frame'
return tfu.transform(frame, gripper_tool_frame, self.tf_listener)
def pose_cartesian_tf(self, frame='base_link'):
p, r = tfu.matrix_as_tf(self.pose_cartesian(frame))
return np.matrix(p).T, np.matrix(r).T
class PR2Head(Joint):
def __init__(self, name, joint_provider):
Joint.__init__(self, name, joint_provider)
self.head_client = actionlib.SimpleActionClient('head_traj_controller/point_head_action',
pm.PointHeadAction)
def look_at(self, pt3d, frame='base_link', pointing_frame="wide_stereo_link",
pointing_axis=np.matrix([1, 0, 0.]).T, wait=True):
g = pm.PointHeadGoal()
g.target.header.frame_id = frame
g.target.point = gm.Point(*pt3d.T.A1.tolist())
#pdb.set_trace()
g.pointing_frame = pointing_frame
g.pointing_axis.x = pointing_axis[0,0]
g.pointing_axis.y = pointing_axis[1,0]
g.pointing_axis.z = pointing_axis[2,0]
g.min_duration = rospy.Duration(1.0)
g.max_velocity = 10.
self.head_client.send_goal(g)
if wait:
self.head_client.wait_for_result(rospy.Duration(1.))
if self.head_client.get_state() == amsg.GoalStatus.SUCCEEDED:
return True
else:
return False
def set_pose(self, pos, nsecs=5.):
for i in range(2):
cpos = self.pose()
min_time = .1
self.set_poses(np.column_stack([cpos, pos]), np.array([min_time, min_time+nsecs]))
###
# DANGER. DON"T RUN STOP AND WALK AWAY.
##
class PR2Base:
def __init__(self, tflistener):
self.tflistener = tflistener
self.client = actionlib.SimpleActionClient('move_base', mm.MoveBaseAction)
rospy.loginfo('pr2base: waiting for move_base')
self.client.wait_for_server()
rospy.loginfo('pr2base: waiting transforms')
try:
self.tflistener.waitForTransform('map', 'base_footprint', rospy.Time(), rospy.Duration(20))
except Exception, e:
rospy.loginfo('pr2base: WARNING! Transform from map to base_footprint not found! Did you launch the nav stack?')
# pass
self.go_angle_client = actionlib.SimpleActionClient('go_angle', hm.GoAngleAction)
self.go_xy_client = actionlib.SimpleActionClient('go_xy', hm.GoXYAction)
##
# Turns to given angle using pure odometry
def turn_to(self, angle, block=True):
goal = hm.GoAngleGoal()
goal.angle = angle
self.go_angle_client.send_goal(goal)
print 'SENT TURN GOAL'
if block:
rospy.loginfo('turn_to: waiting for turn..')
self.go_angle_client.wait_for_result()
rospy.loginfo('turn_to: done.')
##
# Turns a relative amount given angle using pure odometry
def turn_by(self, delta_ang, block=True, overturn=False):
#overturn
if overturn and (abs(delta_ang) < math.radians(10.)):
#turn in that direction by an extra 15 deg
turn1 = np.sign(delta_ang) * math.radians(15.) + delta_ang
turn2 = -np.sign(delta_ang) * math.radians(15.)
rospy.loginfo('Requested really small turn angle. Using overturn trick.')
#pdb.set_trace()
self._turn_by(turn1, block=True)
time.sleep(3) #TODO remove this restriction
self._turn_by(turn2, block)
else:
self._turn_by(delta_ang, block)
def _turn_by(self, delta_ang, block=True):
current_ang_odom = tr.euler_from_matrix(tfu.transform('base_footprint',\
'odom_combined', self.tflistener)[0:3, 0:3], 'sxyz')[2]
self.turn_to(current_ang_odom + delta_ang, block)
##
# Move to xy_loc_bf
def move_to(self, xy_loc_bf, block=True):
goal = hm.GoXYGoal()
goal.x = xy_loc_bf[0,0]
goal.y = xy_loc_bf[1,0]
self.go_xy_client.send_goal(goal)
if block:
self.go_xy_client.wait_for_result()
def set_pose(self, t, r, frame, block=True):
g = mm.MoveBaseGoal()
p = g.target_pose
p.header.frame_id = frame
p.header.stamp = rospy.get_rostime()
p.pose.position.x = t[0]
p.pose.position.y = t[1]
p.pose.position.z = 0
p.pose.orientation.x = r[0]
p.pose.orientation.y = r[1]
p.pose.orientation.z = r[2]
p.pose.orientation.w = r[3]
self.client.send_goal(g)
if block:
self.client.wait_for_result()
return self.client.get_state()
def get_pose(self):
p_base = tfu.transform('map', 'base_footprint', self.tflistener) \
* tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
return tfu.matrix_as_tf(p_base)
class PR2Torso(Joint):
def __init__(self, joint_provider):
Joint.__init__(self, 'torso_controller', joint_provider)
self.torso = actionlib.SimpleActionClient('torso_controller/position_joint_action', pm.SingleJointPositionAction)
rospy.loginfo('waiting for torso_controller')
self.torso.wait_for_server()
def set_pose(self, p, block=True):
self.torso.send_goal(pm.SingleJointPositionGoal(position = p))
if block:
self.torso.wait_for_result()
return self.torso.get_state()
class PR2Gripper:
def __init__(self, gripper, joint_provider):
self.gripper = gripper
self.joint_provider = joint_provider
if gripper == 'l':
self.client = actionlib.SimpleActionClient(
'l_gripper_controller/gripper_action', pm.Pr2GripperCommandAction)
self.full_gripper_name = 'left_gripper'
self.joint_names = [rospy.get_param('/l_gripper_controller/joint')]
else:
self.client = actionlib.SimpleActionClient(
'r_gripper_controller/gripper_action', pm.Pr2GripperCommandAction)
self.full_gripper_name = 'right_gripper'
self.joint_names = [rospy.get_param('/r_gripper_controller/joint')]
self.client.wait_for_server()
self.names_index = None
def pose(self, joint_states=None):
if joint_states == None:
joint_states = self.joint_provider()
if self.names_index == None:
self.names_index = {}
for i, n in enumerate(joint_states.name):
self.names_index[n] = i
self.joint_idx = [self.names_index[n] for n in self.joint_names]
return (np.matrix(joint_states.position).T)[self.joint_idx, 0]
def close(self, block, position=0.0, effort=-1):
self.client.send_goal(pm.Pr2GripperCommandGoal(
pm.Pr2GripperCommand(position = position, max_effort = effort)))
if block:
self.client.wait_for_result()
return self.client.get_state()
def open(self, block, position=0.1, effort = -1):
self.client.send_goal(pm.Pr2GripperCommandGoal(
pm.Pr2GripperCommand(position = position, max_effort = effort)))
if block:
self.client.wait_for_result()
return self.client.get_state()
class StructuredLightProjector:
def __init__(self):
self.client = dc.Client("camera_synchronizer_node")
def set(self, on):
config = {"projector_mode":2}
if on:
config["narrow_stereo_trig_mode"] = 3
else:
config["narrow_stereo_trig_mode"] = 2
self.client.update_configuration(config)
def set_prosilica_inhibit(self, on):
self.node_config['prosilica_projector_inhibit'] = on
self.client.update_configuration(self.node_config)
class ControllerManager:
def __init__(self):
# LoadController
self.load = rospy.ServiceProxy('pr2_controller_manager/load_controller', pmm.LoadController)
# UnloadController
self.unload = rospy.ServiceProxy('pr2_controller_manager/unload_controller', pmm.UnloadController)
# SwitchController
self._switch_controller = rospy.ServiceProxy('pr2_controller_manager/switch_controller', pmm.SwitchController)
def switch(self, start_con, stop_con):
for n in start_con:
self.load(n)
resp = self._switch_controller(start_con, stop_con, pmm.SwitchControllerRequest.STRICT)
for n in stop_con:
self.unload(n)
return resp.ok
class SoundPlay:
def __init__(self):
self.ros_home = pt.join(os.getenv("HOME"), '.ros')
def say(self, phrase):
wav_file_name = pt.join(self.ros_home, 'soundplay_temp.wav')
os.system("text2wave %s -o %s" % (phrase, wav_file_name))
os.system("aplay %s" % (wav_file_name))
def play(self, filename):
os.system("aplay %s" % filename)
class PR2:
def __init__(self, tf_listener=None, arms=True, base=False, grippers=True,
use_kinematics=True, use_projector=True):
try:
rospy.init_node('pr2', anonymous=True)
except rospy.exceptions.ROSException, e:
pass
if tf_listener == None:
self.tf_listener = tf.TransformListener()
else:
self.tf_listener = tf_listener
jl = ru.GenericListener('joint_state_listener', sm.JointState, 'joint_states', 100)
self.joint_provider = ft.partial(jl.read, allow_duplication=False, willing_to_wait=True, warn=False, quiet=True)
if arms:
self.left = PR2Arm(self.joint_provider, self.tf_listener, 'l',
use_kinematics)
self.right = PR2Arm(self.joint_provider, self.tf_listener, 'r',
use_kinematics)
if grippers:
self.left_gripper = PR2Gripper('l', self.joint_provider)
self.right_gripper = PR2Gripper('r', self.joint_provider)
self.head = PR2Head('head_traj_controller', self.joint_provider)
if base:
self.base = PR2Base(self.tf_listener)
self.torso = PR2Torso(self.joint_provider)
self.controller_manager = ControllerManager()
self.sound = SoundPlay()
#SoundClient()
if use_projector:
self.projector = StructuredLightProjector()
def pose(self):
s = self.joint_provider()
return {'larm': self.left.pose(s), 'rarm': self.right.pose(s), 'head_traj': self.head.pose(s)}
#if __name__ == '__main__':
# #pr2 = PR2()
# #pr2.controller_manager
#
# raw_input('put robot in final pose')
# pose2 = pr2.left.pose_cartesian()
#
# raw_input('put robot in initial pose')
# pose1 = pr2.left.pose_cartesian()
# pose2 = pose1.copy()
# pose2[0,3] = pose2[0,3] + .2
# r = rospy.Rate(4)
# while not rospy.is_shutdown():
# cart = pr2.left.pose_cartesian()
# ik_sol = pr2.left.kinematics.ik(cart, 'base_link')
# if ik_sol != None:
# diff = pr2.left.kinematics.fk(ik_sol, 'base_link') - cart
# pos_diff = diff[0:3,3]
# print '%.2f %.2f %.2f' % (pos_diff[0,0], pos_diff[1,0], pos_diff[2,0])
#
# pdb.set_trace()
# print 'going to final pose'
# pr2.left.set_cart_pose_ik(pose2, 2.5)
#
# print 'going back to initial pose'
# pr2.left.set_cart_pose_ik(pose1, 2.5)
#
#
# r = rospy.Rate(4)
# while not rospy.is_shutdown():
# cart = pr2.left.pose_cartesian()
# ik_sol = pr2.left.kinematics.ik(cart, 'base_link', seed=pr2.left.pose())
# if ik_sol != None:
# print ik_sol.T
# r.sleep()
#from class PR2Arm
#def set_cart_pose_ik(self, cart, total_time, frame='base_link', block=True,
# seed=None, pos_spacing=.001, rot_spacing=.001, best_attempt=True):
# cpos = self.pose()
# start_pos, start_rot = tfu.matrix_as_tf(self.pose_cartesian(frame))
# #Check to see if there is an IK solution at end point.
# target_pose = None
# alpha = 1.
# dir_endpoint = cart[0:3,3] - start_pos
# while target_pose == None:
# target_pose = self.kinematics.ik(perturbed_cart, frame, seed)
# if target_pose == None:
# raise KinematicsError('Unable to reach goal at %s.' % str(cart))
# cpos = self.pose()
# start_pos, start_rot = tfu.matrix_as_tf(self.pose_cartesian(frame))
# end_pos, end_rot = tfu.matrix_as_tf(cart)
# interpolated_poses = self.ik_utilities.interpolate_cartesian(start_pos, start_rot, end_pos, end_rot, pos_spacing, rot_spacing)
# nsteps = len(interpolated_poses)
# tstep = total_time / nsteps
# tsteps = np.array(range(nsteps+1)) * tstep
# valid_wps = []
# valid_times = []
# #last_valid = seed
# #all_sols = []
# if seed == None:
# seed = cpos
# for idx, pose in enumerate(interpolated_poses):
# pos, rot = pose
# #sol = self.kinematics.ik(tfu.tf_as_matrix((pos,rot)), frame, seed=last_valid)
# sol = self.kinematics.ik(tfu.tf_as_matrix((pos,rot)), frame, seed=seed)
# if sol != None:
# sol_cpy = sol.copy()
# sol_cpy[4,0] = unwrap2(cpos[4,0], sol[4,0])
# sol_cpy[6,0] = unwrap2(cpos[6,0], sol[6,0])
# valid_wps.append(sol_cpy)
# valid_times.append(tsteps[idx])
# #cpos = sol_cpy
# #all_sols.append(sol)
# #last_valid = sol_cpy
# #valid_wps.reverse()
# #all_sols = np.column_stack(all_sols)
# #pdb.set_trace()
# if len(valid_wps) > 2:
# rospy.loginfo('set_cart_pose_ik: number of waypoints %d' % len(valid_wps))
# valid_wps_mat = np.column_stack(valid_wps)
# valid_times_arr = np.array(valid_times) + .3
# #self.set_pose(valid_wps_mat[:,0])
# #pdb.set_trace()
# self.set_poses(valid_wps_mat, valid_times_arr, block=block)
# else:
# raise KinematicsError('Unable to reach goal at %s. Not enough valid IK solutions.' % str(cart))
|
bsd-3-clause
| 4,273,235,845,285,186,600
| 34.338073
| 137
| 0.589611
| false
| 3.146575
| false
| false
| false
|
qenops/dGraph
|
test/test5.py
|
1
|
5488
|
#!/usr/bin/python
'''Test for an openGL based stereo renderer - test binocular rendering to a single window
David Dunn
Feb 2017 - created
www.qenops.com
'''
__author__ = ('David Dunn')
__version__ = '1.0'
import OpenGL
OpenGL.ERROR_CHECKING = False # Uncomment for 2x speed up
OpenGL.ERROR_LOGGING = False # Uncomment for speed up
#OpenGL.FULL_LOGGING = True # Uncomment for verbose logging
#OpenGL.ERROR_ON_COPY = True # Comment for release
import OpenGL.GL as GL
import math, os
import numpy as np
import dGraph as dg
import dGraph.ui as ui
import dGraph.cameras as dgc
import dGraph.shapes as dgs
import dGraph.materials as dgm
import dGraph.shaders as dgshdr
import dGraph.config as config
import dGraph.util.imageManip as im
import time
MODELDIR = '%s/data'%os.path.dirname(__file__)
WINDOWS = [{
"name": 'Test 5',
"location": (0, 0),
#"location": (2436, 1936), # px coordinates of the startup screen for window location
#"size": (1920, 1080),
"size": (1600,800), # px size of the startup screen for centering
"center": (400,400), # center of the display
"refresh_rate": 60, # refreshrate of the display for precise time measuring
"px_size_mm": 0.09766, # px size of the display in mm
"distance_cm": 20, # distance from the viewer in cm,
#"is_hmd": False,
#"warp_path": 'data/calibration/newRight/',
},
]
def loadScene(renderStack, file=None, cross=False):
'''Load or create our sceneGraph'''
scene = dg.SceneGraph(file)
stereoCam = dgc.StereoCamera('front', scene)
stereoCam.setResolution((renderStack.width/2, renderStack.height))
stereoCam.setTranslate(0.,-.06,0.)
stereoCam.setRotate(20.,0.,0.)
stereoCam.setFOV(50.)
stereoCam.IPD = .062
crosses = [
#np.array((.031,.0,-10.)),
#np.array((-.031,.0,-10.)),
np.array((-.2,-.2,-10.)),
np.array((-.2,.0,-10.)),
np.array((-.2,.2,-10.)),
np.array((.0,-.2,-10.)),
np.array((.0,.0,-10.)),
np.array((.0,.2,-10.)),
np.array((.2,-.2,-10.)),
np.array((.2,.0,-10.)),
np.array((.2,.2,-10.)),
]
for idx, position in enumerate(crosses):
cross = dgs.PolySurface('cross%s'%idx, scene, file = '%s/cross.obj'%MODELDIR)
cross.setScale(.01,.01,.01)
cross.translate = position
renderStack.objects[cross.name] = cross
print(1,(idx/3.)/3.+1/3.,(idx%3)/3.+1/3.)
material = dgm.Material('material%s'%idx,ambient=(1,(idx/3.)/3.+1/3.,(idx%3)/3.+1/3.), amb_coeff=.5)
#material = dgm.Lambert('material%s'%idx,ambient=(1,0,0), amb_coeff=.5, diffuse=(1,1,1), diff_coeff=1)
cross.setMaterial(material)
renderStack.cameras = [stereoCam]
renderStack.append(stereoCam)
return True
def animateScene(renderStack, frame):
''' Create motion in our scene '''
# infinity rotate:
y = 1
x = math.cos(frame*math.pi/60)
for obj in renderStack.objects.itervalues():
obj.rotate += np.array((x,y,0.))
def addInput():
for rs in renderStack:
ui.add_key_callback(arrowKey, ui.KEY_RIGHT, renderStack=rs, direction=3)
ui.add_key_callback(arrowKey, ui.KEY_LEFT, renderStack=rs, direction=2)
ui.add_key_callback(arrowKey, ui.KEY_UP, renderStack=rs, direction=1)
ui.add_key_callback(arrowKey, ui.KEY_DOWN, renderStack=rs, direction=0)
def arrowKey(window,renderStack,direction):
for o in renderStack.objects:
if direction == 3: # print "right"
o.rotate(np.array((0.,5.,0.)))
elif direction == 2: # print "left"
o.rotate(-np.array((0.,5.,0.)))
elif direction == 1: # print 'up'
o.translate(np.array((0.,.01,0.)))
else: # print "down"
o.translate(-np.array((0.,.01,0.)))
def drawScene(renderStack):
''' Render the stack '''
myStack = list(renderStack) # copy the renderStack so we can pop and do it again next frame
temp = myStack.pop()
temp.render(renderStack.width, renderStack.height, myStack) # Render our warp to screen
def setup():
winData = WINDOWS[0]
renderStack = ui.RenderStack()
renderStack.display = ui.Display(resolution=winData['size'])
ui.init()
mainWindow = renderStack.addWindow(ui.open_window(winData['name'], winData['location'][0], winData['location'][1], renderStack.display.width, renderStack.display.height))
if not mainWindow:
ui.terminate()
exit(1)
ui.make_context_current(mainWindow)
ui.add_key_callback(ui.close_window, ui.KEY_ESCAPE)
scenes = [loadScene(renderStack) for renderStack in renderStacks]
for rs in renderStacks:
rs.graphicsCardInit()
return renderStacks, scenes, windows
def runLoop(renderStack, mainWindow):
# Print message to console, and kick off the loop to get it rolling.
print("Hit ESC key to quit.")
frame = 0
start = time.time()
while not ui.window_should_close(mainWindow):
ui.make_context_current(mainWindow)
drawScene(renderStack)
now = time.time()
time.sleep(max((frame+1)/config.maxFPS+start-now,0))
ui.swap_buffers(mainWindow)
ui.poll_events()
#animateScene(renderStack, frame)
frame += 1
ui.terminate()
exit(0)
if __name__ == '__main__':
renderStack, scene, windows = setup()
addInput()
runLoop(renderStack, windows[0])
|
apache-2.0
| 1,592,792,672,699,302,100
| 35.586667
| 174
| 0.622449
| false
| 3.218768
| false
| false
| false
|
taxpon/pyomni
|
pyomni/pyomni.py
|
1
|
5467
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import sys
import io
import zipfile
import logging
import datetime
from pyomni import util
from pyomni.webdav.WebdavClient import CollectionStorer
from pyomni.webdav.WebdavClient import ResourceStorer
from pyomni.webdav.WebdavClient import parseDigestAuthInfo
from pyomni.webdav.Connection import AuthorizationError
OMNI_SERVER_BASE_URL = "https://sync1.omnigroup.com/"
logging.disable(logging.INFO)
def digest(storer):
def _digest(func):
def __digest(*args, **kwargs):
self = args[0]
if len(args) > 1 and isinstance(args[1], (str, unicode)):
if args[1][0] != "/":
url = self.base_url + "/" + args[1]
else:
url = self.base_url + args[1]
else:
url = self.base_url
conn = storer(url, validateResourceNames=False)
try:
conn.readAllProperties()
except AuthorizationError as e:
if e.authType == "Digest":
info = parseDigestAuthInfo(e.authInfo)
conn.connection.addDigestAuthorization(
self.username, self.password,
realm=info["realm"],
qop=info["qop"],
nonce=info["nonce"]
)
else:
raise
args = args + (conn, )
result = func(*args, **kwargs)
return result
return __digest
return _digest
class PyOmni(object):
BASE_HOST = "https://sync1.omnigroup.com"
BASE_URL = BASE_HOST + "/{}/OmniFocus.ofocus/"
def __init__(self, username, password):
"""Create PyOmni instance
:param str username:
:param str password:
:return:
"""
self._username = username
self._password = password
self._base_url = PyOmni.BASE_URL.format(username)
return
@property
def username(self):
return self._username
@property
def password(self):
return self._password
@property
def base_url(self):
return self._base_url
@staticmethod
def create_zip_name(last_id):
now = datetime.datetime.utcnow().strftime("%Y%m%d%H%M%S")
return "{}={}+{}.zip".format(
now, last_id, util.generate_random_code()
)
@staticmethod
def create_zip_body(file_list, write=False):
if write:
fo = "sample.zip"
else:
fo = io.BytesIO()
with zipfile.ZipFile(fo, "w") as fh:
for fd in file_list:
fh.writestr(fd[0], fd[1])
if write:
return
return fo.getvalue()
@staticmethod
def unzip_body(buf):
fo = io.BytesIO(buf)
zf = zipfile.ZipFile(fo)
file_list = []
for zip_file in zf.infolist():
with zf.open(zip_file.filename) as fh:
file_info = [
zip_file.filename,
fh.read()
]
file_list.append(file_info)
return file_list
@digest(CollectionStorer)
def ls(self, conn):
file_list = []
for resource, properties in conn.getCollectionContents():
file_list.append(resource.path.encode(sys.getfilesystemencoding()))
return file_list
@digest(CollectionStorer)
def get_last_id(self, conn):
"""Return latest transaction id
:param conn:
:rtype: str | None
"""
zip_file_list = []
for resource, properties in conn.getCollectionContents():
ext = resource.path.encode(sys.getfilesystemencoding()).split('.')[-1]
if ext == "zip":
zip_file_list.append(resource.path.encode(sys.getfilesystemencoding()))
if len(zip_file_list) > 0:
zip_file_list.sort()
return zip_file_list[-1].split("+")[1].split(".")[0]
return None
@digest(ResourceStorer)
def get_content(self, file_path, conn):
buf = conn.downloadContent().read()
if file_path.split('.')[-1] == "zip":
fo = io.BytesIO(buf)
zf = zipfile.ZipFile(fo)
file_list = []
for zip_file in zf.infolist():
with zf.open(zip_file.filename) as fh:
file_info = [
zip_file.filename,
fh.read()
]
file_list.append(file_info)
else:
file_list = [
file_path.split('/')[-1],
buf
]
return file_list
@digest(ResourceStorer)
def upload_content(self, file_path, buf, conn):
conn.uploadContent(buf)
@digest(ResourceStorer)
def delete_content(self, file_path, conn):
conn.deleteContent()
@digest(ResourceStorer)
def rm(self, file_path, conn):
conn.delete()
def add_task(self, task):
"""Add task to Omnifocus sync server
:param pyomni.object.task.OmniTask task:
:rtype: None
"""
last_id = self.get_last_id()
zip_name = PyOmni.create_zip_name(last_id)
zip_buf = PyOmni.create_zip_body([[
"contents.xml",
task.get_xml()
]])
self.upload_content(zip_name, zip_buf)
return
|
mit
| -726,213,792,981,818,500
| 28.235294
| 87
| 0.527529
| false
| 4.037666
| false
| false
| false
|
carloderamo/mushroom
|
mushroom_rl/environments/mujoco_envs/humanoid_gait/_external_simulation/mtc_model.py
|
1
|
7802
|
import numpy as np
"""
It is created for using MTC in Mujoco. The dynamics in this model is not continuous. The integration error will be
accumulated overtime. And the system might get unstable if the timestep is too large. It is recommended to set the
timestamp lower than 5e-4 to get decent results.
The model is created based on Song's and Geyer's 2015 paper:
Song, S. and Geyer, H., 2015. A neural circuitry that emphasizes spinal feedback generates diverse behaviours of human
locomotion. The Journal of physiology, 593(16), pp.3493-3511.
V0.1
Passed basic tests. There're slightly difference compared to the simmechanics model.
V0.2
1. Verified with the simmechanics model. Difference in most of the cases can be ignored.
2. Changed the integration method from forward Euler to trapezoid.
3. Muscle force vce etc might vibrate/jitter if in some cases if the timestep is not low enough.
Need to improve this in the next version.
"""
class MuscleTendonComplex:
def __init__(self, paraMuscle, stateMuscle, paraMusAttach, offsetCorr, timestep, nameMuscle, angJoi):
self.frcmax, self.vmax, self.eref, self.lslack, self.lopt, self.tau, self.w, self.c, self.N, self.K = paraMuscle
self.stim, self.act, self.lmtc, self.lce, self.vce, self.frcmtc = stateMuscle
self.timestep = timestep
self.nameMuscle = nameMuscle
self.angJoi = angJoi
self.offsetCorr = offsetCorr
self.r, self.phiref, self.phimaxref, self.rho, self.dirAng, self.phiScale = paraMusAttach
self.MR = 0.01
self.typeMuscle = self.angJoi.size
nJoi = self.typeMuscle
self.levelArm = np.zeros(nJoi)
tmpL = np.zeros(nJoi)
for i in range(0, nJoi):
if self.offsetCorr[i] == 0:
tmpL[i] = self.dirAng[i] * (self.angJoi[i] - self.phiref[i]) * self.r[i] * self.rho[i]
self.levelArm[i] = self.r[i]
elif self.offsetCorr[i] == 1:
tmp1 = np.sin((self.phiref[i] - self.phimaxref[i]) * self.phiScale[i])
tmp2 = np.sin((self.angJoi[i] - self.phimaxref[i]) * self.phiScale[i])
tmpL[i] = self.dirAng[i] * (tmp2 - tmp1) * self.r[i] * self.rho[i] / self.phiScale[i]
self.levelArm[i] = np.cos((self.angJoi[i] - self.phimaxref[i]) * self.phiScale[i]) * self.r[i]
else:
raise ValueError('Invalid muscle level arm offset correction type. ')
self.lmtc = self.lslack + self.lopt + np.sum(tmpL)
self.lce = self.lmtc - self.lslack
self.lse = self.lmtc - self.lce
# unitless parameters
self.Lse = self.lse / self.lslack
self.Lce = self.lce / self.lopt
self.actsubstep = (self.stim - self.act) * self.timestep / 2.0 / self.tau + self.act
self.lcesubstep = self.vce * self.timestep / 2.0 + self.lce
# test
self.lce_avg = self.lce
self.vce_avg = self.vce
self.frcmtc_avg = 0
self.act_avg = self.act
self.frame = 0
# self.Fse = 0.0
# self.Fbe = 0.0
# self.Fpe = 0.0
# self.Fce = 0.0
def stepUpdateState(self, angJoi):
"""
Muscle Tendon Complex Dynamics
update muscle states based on the muscle dynamics
Muscle state stim has to be updated outside before this function is called
"""
# update lmtc and level arm based on the geometry
self.angJoi = angJoi
nJoi = self.typeMuscle
tmpL = np.zeros(nJoi)
for i in range(0, nJoi):
if self.offsetCorr[i] == 0:
tmpL[i] = self.dirAng[i] * (self.angJoi[i] - self.phiref[i]) * self.r[i] * self.rho[i]
self.levelArm[i] = self.r[i]
elif self.offsetCorr[i] == 1:
tmp1 = np.sin((self.phiref[i] - self.phimaxref[i]) * self.phiScale[i])
tmp2 = np.sin((self.angJoi[i] - self.phimaxref[i]) * self.phiScale[i])
tmpL[i] = self.dirAng[i] * (tmp2 - tmp1) * self.r[i] * self.rho[i] / self.phiScale[i]
self.levelArm[i] = np.cos((self.angJoi[i] - self.phimaxref[i]) * self.phiScale[i]) * self.r[i]
else:
raise ValueError('Invalid muscle level arm offset correction type. ')
self.lmtc = self.lslack + self.lopt + np.sum(tmpL)
# update muscle activation
# integration, forward-Euler method
# self.act = (self.stim - self.act) * self.timestep / self.tau + self.act
# integration, trapezoidal method, 2-step
self.act = (self.stim - self.actsubstep) * self.timestep / 2.0 / self.tau + self.actsubstep
self.actsubstep = (self.stim - self.act) * self.timestep / 2.0 / self.tau + self.act
# update lce and lse based on the lmtc
# integration, forward-Euler method
# self.lce = self.vce * self.timestep + self.lce
# integration, trapezoidal method, 2-step
self.lce = self.vce * self.timestep / 2.0 + self.lcesubstep
self.lcesubstep = self.vce * self.timestep / 2.0 + self.lce
self.lse = self.lmtc - self.lce
self.Lse = self.lse / self.lslack
self.Lce = self.lce / self.lopt
# Serial Elastic element (tendon) force-length relationship
if self.Lse > 1.0:
Fse = np.power((self.Lse - 1.0) / self.eref, 2)
else:
Fse = 0.0
# Parallel Elasticity PE
if self.Lce > 1.0:
Fpe = np.power((self.Lce - 1.0) / self.w, 2)
else:
Fpe = 0.0
# update frcmtc
self.frcmtc = Fse * self.frcmax
#self.frcmtc = np.clip(self.frcmtc, 0, self.frcmax)
# Buffer Elasticity BE
if (self.Lce - (1.0 - self.w)) < 0:
Fbe = np.power((self.Lce - (1.0 - self.w)) / (self.w / 2), 2)
else:
Fbe = 0.0
# Contractile Element force-length relationship
tmp = np.power(np.absolute(self.Lce - 1.0) / self.w, 3)
Fce = np.exp(tmp * np.log(self.c))
#Fv = (Fse + Fbe) / (Fpe + Fce * self.act)
if (Fpe + Fce * self.act) < 1e-10: # avoid numerical error
if (Fse + Fbe) < 1e-10:
Fv = 1.0
else:
Fv = (Fse + Fbe) / 1e-10
else:
Fv = (Fse + Fbe) / (Fpe + Fce * self.act)
# Contractile Element inverse force-velocity relationship
if Fv <= 1.0:
# Concentric
v = (Fv - 1) / (Fv * self.K + 1.0)
elif Fv <= self.N:
# excentric
tmp = (Fv - self.N) / (self.N - 1.0)
v = (tmp + 1.0) / (1.0 - tmp * 7.56 * self.K)
else:
# excentric overshoot
v = ((Fv - self.N) * 0.01 + 1)
self.vce = v * self.lopt * self.vmax
v_frac = self.vce / self.vmax
mr_scale = self.act * np.absolute(self.frcmax*self.vmax) *self.timestep
if self.vce <= 1:
self.MR = 0.01 - 0.11*(v_frac) + 0.06*np.exp(-8*v_frac)
else:
self.MR = 0.23 - 0.16*np.exp(-8*v_frac)
self.MR *= mr_scale
self.frame += 1
self.lce_avg = (self.lce_avg*(self.frame - 1) + self.lce) / self.frame
self.vce_avg = (self.vce_avg*(self.frame - 1) + self.vce) / self.frame
self.frcmtc_avg = (self.frcmtc_avg*(self.frame - 1) + self.frcmtc) / self.frame
self.act_avg = (self.act_avg*(self.frame - 1) + self.act) / self.frame
#self.MR = np.exp(-self.MR)
# print(self.MR, np.exp(-self.MR))
# self.Fv = Fv
# self.Fse = Fse
# self.Fbe = Fbe
# self.Fpe = Fpe
# self.Fce = Fce
def reset_state(self):
self.frame = 0
self.lce_avg = 0
self.frcmtc_avg = 0
self.act_avg = 0
self.vce_avg = 0
|
mit
| -4,500,805,567,716,075,500
| 40.721925
| 120
| 0.566265
| false
| 2.929778
| false
| false
| false
|
m4nh/roars
|
scripts/roars/training/meta_trainer.py
|
1
|
4030
|
from abc import ABCMeta, abstractmethod, abstractproperty
import os
#TODO: li abbiamo da qualche parte questi?
LABEL_FOLDER_NAME = 'labels'
LABEL_FILES_EXTENSION = 'txt'
IMAGE_FOLDER_NAME = 'images'
IMAGE_FILES_EXTENSION = 'jpg'
CLASS_FILE = 'class_list.txt'
class meta_trainer():
"""
Meta class for a generic CNN trainer, it will have different implementation according to the framework used
"""
__metaclass__=ABCMeta
def __init__(self, **kwargs):
self._ready=False
self._setup_trainer(kwargs)
#abstract stuff that should be implemented in the inheriting classes
@abstractproperty
def _default_batch_size(self):
pass
@abstractproperty
def _default_max_iteration(self):
pass
@abstractmethod
def _prepare_dataset(self):
"""Setup the dataset to be used during the training phase, return False if the dataset already exist in the detsination folder, True otherways"""
pass
@abstractmethod
def _setup_training_parameters(self):
"""Create additional configuration file if needed"""
pass
@abstractmethod
def _train(self):
"""Proper training"""
pass
@abstractmethod
def _export(self):
"""Save the best model found"""
pass
#Common high order methods
def _setup_trainer(self,args):
"""
Check the args to see if everything is properly configured and save needed info in internal fields
"""
print('='*50)
print('= Checking Arguments =')
print('='*50)
if 'input_folder' not in args:
print('ERROR: Please specify an input directory')
raise Exception('"input_folder" is missing')
if 'detector_name' not in args:
print('ERROR: "detector_name" not specified, this should not happen')
raise Exception('Detector_name not specified')
if 'batch_size' not in args:
print('WARNING: "batch_size" not specified, using default {}'.format(self._default_batch_size))
args['batch_size']=self._default_batch_size
if 'max_iteration' not in args:
print('WARNING: "max_iteration" not specified, using default {}'.format(self._default_max_iteration))
args['max_iteration']=self._default_max_iteration
#map args to objet fields
self._input_folder = args['input_folder']
self._detector_name = args['detector_name']
self._batch_size = args['batch_size']
self._max_iteration = args['max_iteration']
def _check_ready(self):
"""
If the trainer is not ready raise an exception
"""
if not self._ready:
print('ERROR: trainer not correctly configured, you are not supposed to end here!')
raise Exception('trainer not correctly configured')
@property
def _class_map(self):
"""
Read class list files and return a list containing class names
"""
self._check_ready()
if not hasattr(self,'_c_map'):
if os.path.exists(os.path.join(self._input_folder,CLASS_FILE)):
with open(os.path.join(self._input_folder,CLASS_FILE)) as f_in:
classes =[c.strip() for c in f_in.readlines()]
self._c_map = classes
else:
raise Exception('Unable to find {}'.format(CLASS_FILE))
return self._c_map
def train_detector(self,output_folder):
self._check_ready()
self._output_folder = output_folder
#setup data
self._prepare_dataset()
#setup config parameters
self._setup_training_parameters()
#lunch training
result=self._train()
if result==0:
#save final model
self._export()
print('All DONE!')
else:
raise Exception('Train Failed')
|
gpl-3.0
| 7,888,146,045,697,104,000
| 29.308271
| 153
| 0.588337
| false
| 4.438326
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.