repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
krintoxi/NoobSec-Toolkit | NoobSecToolkit /scripts/pySteg/pysteg.py | Python | gpl-2.0 | 340 | 0.005882 | import sys
import os
print "-------------------------"
print "StegHide Options"
print "-------------------------"
print "Usage Example :"
print ""
print"To embed emb.txt in cvr.jpg: steghide embed -cf cvr.jpg -ef emb.txt"
print ""
print "To extract embedded data from stg.jpg: steghide ext | ract -sf stg.jpg"
cmd1 = os.system ("xt | erm ")
|
hyunoklee/turtlebot3_RBIZ | line_detect/include/duckietown_utils/bag_logs.py | Python | gpl-2.0 | 6,453 | 0.004029 | import numpy as np
from . import logger
from duckietown_utils.expand_variables import expand_environment
import os
__all__ = [
'd8n_read_images_interval',
'd8n_read_all_images',
'd8n_get_all_images_topic',
]
def d8n_read_images_interval(filename, t0, t1):
"""
Reads all the RGB data from the bag,
in the interval [t0, t1], where t0 = 0 indicates
the first image.
"""
data = d8n_read_all_images(filename, t0, t1)
logger.info('Read %d images from %s.' % (len(data), filename))
timestamps = data['timestamp']
# normalize timestamps
first = data['timestamp'][0]
timestamps -= first
logger.info('Sequence has length %.2f seconds.' % timestamps[-1])
return data
def d8n_read_all_images(filename, t0=None, t1=None):
"""
Raises a ValueError if not data could be read.
Returns a numpy array.
data = d8n_read_all_images(bag)
print data.shape # (928,)
print data.dtype # [('timestamp', '<f8'), ('rgb', 'u1', (480, 640, 3))]
"""
import rosbag # @UnresolvedImport
filename = expand_environment(filename)
if not os.path.exists(filename):
msg = 'File does not exist: %r' % filename
raise ValueError(msg)
bag = rosbag.Bag(filename)
that_topic = get_image_topic(bag)
data = []
first_timestamp = None
with rosbag.Bag(filename, 'r') as bag:
for j, (topic, msg, t) in enumerate(bag.read_messages()):
if topic == that_topic:
float_time = t.to_sec()
if first_timestamp is None:
first_timestamp = float_time
rel_time = float_time - first_timestamp
if t0 is not None:
if rel_time < t0:
continue
if t1 is not None:
if rel_time > t1:
continue
rgb = numpy_from_ros_compressed(msg)
data.append({'timestamp': float_time, 'rgb': rgb})
if j % 10 == 0:
print('Read %d images from topic %s' % (j, topic))
print('Returned %d images' % len(data))
if not data:
raise ValueError('no data found')
H, W, _ = rgb.shape # (480, 640, 3)
print('Detected image shape: %s x %s' % (W, H))
n = len(data)
dtype = [
('timestamp', 'float'),
('rgb', 'uint8', (H, W, 3)),
]
x = np.zeros((n,), dtype=dtype)
for i, v in enumerate(data):
x[i]['timestamp'] = v['timestamp']
x[i]['rgb'][:] = v['rgb']
return x
def d8n_get_all_images_topic(bag_filename):
""" Returns the (name, type) of all topics that look like images """
import rosbag # @UnresolvedImport
bag = rosbag.Bag(bag_filename)
tat = bag.get_type_and_topic_info()
# Type | sAndTopicsTuple(msg_types={'sensor_msgs/Image': '060021388200f6f0f447d0fcd9c64743', 'dynamic_reconfigure/ConfigDescript
# ion': '757ce9d44ba8ddd801bb30bc456f946f', 'diagnostic_msgs/DiagnosticArray': '60810da900de1dd6ddd437c3503511da', 'rosgraph_
# msgs/Log': 'acffd30cd6b6de30f120938c17c593fb', 'sensor_msgs/CameraInfo': 'c9a58c1b0b154e0e6da7578cb991d214', 'duckietown_ms
# gs/CarControl': '8cc92f3e13698e26d1f14ab2f75ce13b', 'theora_image_transport/Packet': '33ac4e14a7cff32e7e0d65f18bb410f3', 'd
# ynamic_rec | onfigure/Config': '958f16a05573709014982821e6822580', 'sensor_msgs/Joy': '5a9ea5f83505693b71e785041e67a8bb'}, top
# ics={'/rosberrypi_cam/image_raw/theora/parameter_updates': TopicTuple(msg_type='dynamic_reconfigure/Config', message_count=
# 1, connections=1, frequency=None), '/rosberrypi_cam/image_raw/theora': TopicTuple(msg_type='theora_image_transport/Packet',
# message_count=655, connections=1, frequency=8.25919467858131), '/rosout': TopicTuple(msg_type='rosgraph_msgs/Log', message
# _count=13, connections=6, frequency=23763.762039660058), '/rosberrypi_cam/camera_info': TopicTuple(msg_type='sensor_msgs/Ca
# meraInfo', message_count=649, connections=1, frequency=8.231283478868388), '/rosberrypi_cam/image_raw/theora/parameter_desc
# riptions': TopicTuple(msg_type='dynamic_reconfigure/ConfigDescription', message_count=1, connections=1, frequency=None), '/
# joy': TopicTuple(msg_type='sensor_msgs/Joy', message_count=1512, connections=1, frequency=182.16304017372423), '/rosout_agg
# ': TopicTuple(msg_type='rosgraph_msgs/Log', message_count=2, connections=1, frequency=12122.265895953757), '/diagnostics':
# TopicTuple(msg_type='diagnostic_msgs/DiagnosticArray', message_count=65, connections=1, frequency=0.9251713284731169), '/ca
# r_supervisor/car_control': TopicTuple(msg_type='duckietown_msgs/CarControl', message_count=3886, connections=1, frequency=5
# 0.09799097011538), '/joy_mapper/joy_control': TopicTuple(msg_type='duckietown_msgs/CarControl', message_count=3881, connect
# ions=1, frequency=50.10517261975869), '/rosberrypi_cam/image_raw': TopicTuple(msg_type='sensor_msgs/Image', message_count=6
# 45, connections=1, frequency=7.711386340215899)}
consider_images = [
'sensor_msgs/Image',
'sensor_msgs/CompressedImage',
]
all_types = set()
found = []
topics = tat.topics
for t,v in topics.items():
msg_type = v.msg_type
all_types.add(msg_type)
message_count = v.message_count
if msg_type in consider_images:
# quick fix: ignore image_raw if we have image_compressed version
if 'raw' in t:
other = t.replace('raw', 'compressed')
if other in topics:
continue
found.append((t,msg_type))
print('all_types: %s' % all_types)
print('found: %s' % found)
return found
def get_image_topic(bag):
""" Returns the name of the topic for the main camera """
topics = bag.get_type_and_topic_info()[1].keys()
for t in topics:
if 'camera_node/image/compressed' in t:
return t
msg = 'Cannot find the topic: %s' % topics
raise ValueError(msg)
def numpy_from_ros_compressed(msg):
if 'CompressedImage' in msg.__class__.__name__:
return rgb_from_pil(pil_from_CompressedImage(msg))
assert False, msg.__class__.__name__
def pil_from_CompressedImage(msg):
from PIL import ImageFile # @UnresolvedImport
parser = ImageFile.Parser()
parser.feed(msg.data)
res = parser.close()
return res
def rgb_from_pil(im):
return np.asarray(im).astype(np.uint8)
|
jimmy201602/django-gateone | applications/plugins/playback/playback.py | Python | gpl-3.0 | 3,927 | 0.009422 | # -*- coding: utf-8 -*-
#
# Copyright 2011 Liftoff Software Corporation
#
__doc__ = """\
playback.py - A plugin for Gate One that adds support for saving and playing
back session recordings.
.. note:: Yes this only contains one function and it is exposed to clients through a WebSocket hook.
Hooks
-----
This Python plugin file implements the following hooks::
hooks = {
'WebSocket': {
'playback_save_recording': save_recording,
}
}
Docstrings
----------
"""
# Meta
__version__ = '1.0'
__license__ = "GNU AGPLv3 or Proprietary (see LICENSE.txt)"
__version_info__ = (1, 0)
__author__ = 'Dan McDougall <daniel.mcdougall@liftoffsoftware.com>'
# Python stdlib
import os
from applications.locale import get_translation
from applications.utils import render_string
import io
_ = get_translation()
# Globals
PLUGIN_PATH = os.path.split(__file__)[0]
def get_256_colors(self):
"""
Returns the rendered 256-color CSS.
"""
colors_256_path = self.render_256_colors()
mtime = os.stat(colors_256_path).st_mtime
cached_filename = "%s:%s" % (colors_256_path.replace('/', '_'), mtime)
cache_dir = self.ws.settings['cache_dir']
cached_file_path = os.path.join(cache_dir, cached_filename)
if os.path.exists(cached_file_path):
with open(cached_file_path) as f:
colors_256 = f.read()
else:
# Debug mode is enabled
with open(os.path.join(cache_dir, '256_colors.css')) as f:
colors_256 = f.read()
return colors_256
def save_recording(self, settings):
"""
Handles uploads of session recordings and returns them to the client in a
self-contained HTML file that will auto-start playback.
..note:: The real crux of the code that handles this is in the template.
"""
#import tornado.template
from datetime import datetime
now = datetime.now().strftime('%Y%m%d%H%m%S') # e.g. '20120208200222'
out_dict = {
'result': 'Success',
'filename': 'GateOne_recording-%s.html' % now,
'data': None,
'mimetype': 'text/html'
}
recording = settings["recording"]
container = settings["container"]
prefix = settings["prefix"]
theme_css = settings['theme_css']
colors_css = settings['colors_css']
colors_256 = get_256_colors(self)
templates_path = os.path.join(PLUGIN_PATH, "templates")
recording_template_path = os.path.join(
templates_path, "self_contained_recording.html")
#with | open(recording_template_path) as f:
#recording_template_data = f.read()
extra_theme_path = os.path.join(templates_path,'themes/black.css')
with io.open(extra_theme_path, mode='r',encoding='UTF-8') as f:
extra_theme = f.read()
rendered_recording = render_string(recording_template_path,**dict(recording=recording,
container=container,
| prefix=prefix,
theme=theme_css,
colors=colors_css,
colors_256=colors_256,
extra_theme=extra_theme))#extra_theme to fix bug
#recording_template = tornado.template.Template(recording_template_data)
#rendered_recording = recording_template.generate(
#recording=recording,
#container=container,
#prefix=prefix,
#theme=theme_css,
#colors=colors_css,
#colors_256=colors_256
#)
out_dict['data'] = rendered_recording
message = {'go:save_file': out_dict}
self.write_message(message)
hooks = {
'WebSocket': {
'terminal:playback_save_recording': save_recording,
}
}
|
tbpmig/mig-website | bookswap/urls.py | Python | apache-2.0 | 747 | 0.022758 | from django.conf.urls import patterns, url
from bookswap import views
urlpatterns = patterns(
'',
#url(r'^$', views.index, name='index'),
url(r'^admin/start_transaction/$',
views.start_transaction, name='start_transaction'),
url(r'^admin/update_person/$',
views.update_person, name='update_person'),
url(r'^admin/create_book_type/$',
| views.create_book_type, name='create_book_type'),
url(r'^admin/receive_book_start/(?P<uniqname>[a-z]{3,8})/$',
views.receive_book_start, name='receive_book_start'),
url(r'^admin/receive_book/(?P<uniqname>[a-z]{3,8})-(?P<book_type_id>\d+)/$',
views.receive_book, name='receive_book'),
url(r'^admin/$',
| views.admin_index, name='admin_index'),
)
|
georgemarshall/django | tests/middleware/urls.py | Python | bsd-3-clause | 299 | 0 | from django.urls import path, re_path
from . import views
urlpatterns = [
path('noslash', views.empty_view),
path('slash/', views.empty_view),
path('needsquoting#/', views.empty_view),
# Accepts | paths with two leading slashes.
re_path(r'^(.+)/security/$', vie | ws.empty_view),
]
|
vermaete/ipxact2systemverilog | ipxact2systemverilog/ipxact2hdlCommon.py | Python | gpl-2.0 | 46,497 | 0.003054 | #!/usr/bin/env python3
# This file is part of ipxact2systemverilog
# Copyright (C) 2013 Andreas Lindh
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# andreas.lindh (a) hiced.com
import math
import os
import sys
import xml.etree.ElementTree as ETree
import tabulate
from mdutils.mdutils import MdUtils
DEFAULT_INI = {'global': {'unusedholes': 'yes',
'onebitenum': 'no'}}
def sortRegisterAndFillHoles(regName,
fieldNameList,
bitOffsetList,
bitWidthList,
fieldDescList,
enumTypeList,
unusedHoles=True):
# sort the lists, highest offset first
fieldNameList = fieldNameList
bitOffsetList = [int(x) for x in bitOffsetList]
bitWidthList = [int(x) for x in bitWidthList]
fieldDescList = fieldDescList
enumTypeList = enumTypeList
matrix = list(zip(bitOffsetList, fieldNameList, bitWidthList, fieldDescList, enumTypeList))
matrix.sort(key=lambda x: x[0]) # , reverse=True)
bitOffsetList, fieldNameList, bitWidthList, fieldDescList, enumTypeList = list(zip(*matrix))
# zip return tuples not lists
fieldNameList = list(fieldNameList)
bitOffsetList = list([int(x) for x in bitOffsetList])
bitWidthList = list([int(x) for x in bitWidthList])
fieldDescList = list(fieldDescList)
enumTypeList = list(enumTypeList)
if unusedHoles:
unUsedCnt = 0
nextFieldStartingPos = 0
# fill up the holes
index = 0
register_width = bitOffsetList[-1] + bitWidthList[-1]
while register_width > nextFieldStartingPos:
if nextFieldStartingPos != bitOffsetList[index]:
newBitWidth = bitOffsetList[index] - nextFieldStartingPos
bitOffsetList.insert(index, nextFieldStartingPos)
fieldNameList.insert(index, 'unused' + str(unUsedCnt))
bitWidthList.insert(index, newBitWidth)
fieldDescList.insert(index, 'unused')
enumTypeList.insert(index, '')
unUsedCnt += 1
nextFieldStartingPos = int(bitOffsetList[index]) + int(bitWidthList[index])
index += 1
return regName, fieldNameList, bitOffsetList, bitWidthList, fieldDescList, enumTypeList
class documentClass():
def __init__(self, name):
self.name = name
self.memoryMapList = []
def addMemoryMap(self, memoryMap):
self.memoryMapList.append(memoryMap)
class memoryMapClass():
def __init__(self, name):
self.name = name
self.addressBlockList = []
def addAddressBlock(self, addressBlock):
self.addressBlockList.append(addressBlock)
class addressBlockClass():
def __init__(self, name, addrWidth, dataWidth):
self.name = name
self.addrWidth = addrWidth
self.dataWidth = dataWidth
self.registerList = []
self.suffix = ""
def addRegister(self, reg):
assert isinstance(reg, registerClass)
self.registerList.append(reg)
def setRegisterList(self, registerList):
self.registerList = registerList
def returnAsString(self):
raise NotImplementedError("method returnAsString() is virutal and must be overridden.")
class registerClass() | :
def __init__(self, name, address, resetValue, size, access, desc, fieldNameList,
bitOffsetList, bitWidthList, fieldDescList, enumTy | peList):
assert isinstance(enumTypeList, list), 'enumTypeList is not a list'
self.name = name
self.address = address
self.resetValue = resetValue
self.size = size
self.access = access
self.desc = desc
self.fieldNameList = fieldNameList
self.bitOffsetList = bitOffsetList
self.bitWidthList = bitWidthList
self.fieldDescList = fieldDescList
self.enumTypeList = enumTypeList
class enumTypeClassRegistry():
""" should perhaps be a singleton instead """
def __init__(self):
self.listOfEnums = []
def enumAllReadyExist(self, enum):
for e in self.listOfEnums:
if e.compare(enum):
enum.allReadyExist = True
enum.enumName = e.name
break
self.listOfEnums.append(enum)
return enum
class enumTypeClass():
def __init__(self, name, bitWidth, keyList, valueList, descrList):
self.name = name
self.bitWidth = bitWidth
matrix = list(zip(valueList, keyList, descrList))
matrix.sort(key=lambda x: x[0])
valueList, keyList, descrList = list(zip(*matrix))
self.keyList = list(keyList)
self.valueList = list(valueList)
self.allReadyExist = False
self.enumName = None
self.descrList = descrList
def compare(self, other):
result = True
result = self.bitWidth == other.bitWidth and result
result = self.compareLists(self.keyList, other.keyList) and result
return result
def compareLists(self, list1, list2):
for val in list1:
if val in list2:
return True
return False
class rstAddressBlock(addressBlockClass):
"""Generates a ReStructuredText file from a IP-XACT register description"""
def __init__(self, name, addrWidth, dataWidth):
self.name = name
self.addrWidth = addrWidth
self.dataWidth = dataWidth
self.registerList = []
self.suffix = ".rst"
def returnEnumValueString(self, enumTypeObj):
if isinstance(enumTypeObj, enumTypeClass):
l = []
for i in range(len(enumTypeObj.keyList)):
l.append(enumTypeObj.keyList[i] + '=' + enumTypeObj.valueList[i])
s = ", ".join(l)
else:
s = ''
return s
def returnAsString(self):
r = ""
regNameList = [reg.name for reg in self.registerList]
regAddressList = [reg.address for reg in self.registerList]
regDescrList = [reg.desc for reg in self.registerList]
r += self.returnRstTitle()
r += self.returnRstSubTitle()
summary_table = []
for i in range(len(regNameList)):
summary_table.append(["%#04x" % regAddressList[i], str(regNameList[i]) + "_", str(regDescrList[i])])
r += tabulate.tabulate(summary_table,
headers=['Address', 'Register Name', 'Description'],
tablefmt="grid")
r += "\n"
r += "\n"
for reg in self.registerList:
r += self.returnRstRegDesc(reg.name, reg.address, reg.size, reg.resetValue, reg.desc, reg.access)
reg_table = []
for fieldIndex in reversed(list(range(len(reg.fieldNameList)))):
bits = "[" + str(reg.bitOffsetList[fieldIndex] + reg.bitWidthList[fieldIndex] - 1) + \
":" + str(reg.bitOffsetList[fieldIndex]) + "]"
_line = [bits,
reg.fieldNameList[fieldIndex]]
if reg.resetValue:
temp = (int(reg.resetValue, 0) >> reg.bitOffsetList[fieldIndex])
mask = (2 ** reg.bitWidthList[fieldIndex]) - 1
temp &= mask
temp = "{value:#0{width}x}".format(value=temp,
width=math.ceil(reg.bitWidthList[fieldIndex] / 4) |
polarkac/TaskTracker | tasks/utils.py | Python | mit | 837 | 0.003584 | from django.db.models import Sum
from django.contrib.auth.mixins import LoginRequiredMixin
from tasks.models import TimeLog
def get_total_project_spend_time(tasks):
unpaid_tasks = tasks.filter(paid=False)
total_time = (
TimeLog.objects.filter(comment__task__in=unpaid_tasks)
.aggregate(Sum('spend_time'))
)[ | 'spend_time__sum']
return total_time
def annotate_total_time_per_task(tasks):
total_times = {}
times = (
TimeLog.objects.filter(comment__task__in=tasks).values('comment__task')
.annotate(Sum('spend_time'))
)
for time in times:
total_times.update({time['comment__task']: time['spend_time__sum']})
for task in tasks:
task.total_spend_time = total_times.get(task.id, 0)
class LoginRequired(LoginRequiredMixin):
redirec | t_field_name = None
|
aeslaughter/falcon | scripts/generate_input_syntax.py | Python | lgpl-2.1 | 949 | 0.011591 | #!/usr/bin/env python
import sys, os
# get the location of this script
app_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))
# Set the name of the application here and moose directory relative to the application
app_name = 'falcon'
MOOSE_DIR = os.path.abspath(os.path.join(app_pat | h, '..' 'moose'))
FRAMEWORK_DIR = os.path.abspath(os.path.join(app_path, '..', 'moose', 'framework'))
#### See if MOOSE_DIR is already in the environment instead
if os.environ.has_key("MOOSE_DIR"):
MOOSE_DIR = os.environ['MOOSE_DIR']
FRAMEWORK_DIR = os.path.join(MOOSE_DIR, 'framework')
if os.environ.has_key("FRAMEWORK_DIR"):
FRAMEWORK_DIR = os.environ['FRAMEWORK_DIR']
sys.path. | append(FRAMEWORK_DIR + '/scripts/syntaxHTML')
import genInputFileSyntaxHTML
# this will automatically copy the documentation to the base directory
# in a folder named syntax
genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, FRAMEWORK_DIR)
|
wayfinder/Wayfinder-CppCore-v3 | ngplib/generator_print.py | Python | bsd-3-clause | 2,826 | 0.010262 | #!/bin/env python
#
# Copyright (c) 1999 - 2010, Vodafone Group Services Ltd
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# * Neither the name of the Vodafone Group Services Ltd nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import parser_xml
def print_enum(enum):
print "Enum: ",enum.name
for value in enum.values:
prin | t value[0],"=",value[1]
def print_param(param):
print "id:",param.id,"name:",param.name,"default:",param.default, \
"description:",param.description
def print_member(member):
print "Name:",member.name
print "Type:",member.type
print "Comment:",member.comment
def print_struct(struct):
print "Name:",struct.name
for member in struct.members:
print_member(member)
def print_request(req):
print "Name:",req.name,"version:",req.version
for param in req.params.values():
print_param(param)
for struct in req.structs.values():
print_struct(struct)
for enum in req.enums.values():
print_enum(enum)
def print_doc(doc):
print "---- Global Params ----"
for param in doc.params.values():
print_param(param)
print "---- Global Enums ----"
for enum in doc.enums.values():
print_enum(enum)
print "---- Requests ----"
for request in doc.requests.values():
print_request(request)
print "---- Replies ----"
for reply in doc.replies.values():
print_request(reply)
doc = parser_xml.parse("protocol.xml")
print_doc(doc)
|
raghavsub/gtkpass | gtkpass/main.py | Python | mit | 3,474 | 0.001727 | import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, Gdk
import os
from subprocess import call, Popen, PIPE, STDOUT
class GtkPassWindow(Gtk.Window):
def __init__(self):
self.search_text = ''
self.search_result_text = ''
self.get_pass_path()
self.build_gui()
self.build_data_structures()
def get_pass_path(self):
self.pass_path = os.path.expanduser('~/.password-store')
def build_gui(self):
Gtk.Window.__init__(self, title='pass')
self.set_border_width(10)
self.set_default_size(300, -1)
self.text_view = Gtk.Entry()
self.text_view.set_editable(False)
self.text_view.set_can_focus(False)
self.text_entry = Gtk.Entry()
self.text_entry.connect('key-release-event', self.on_key_release)
self.text_entry.connect('activate', self.on_activate)
self.text_entry.set_icon_from_icon_name(Gtk.EntryIconPosition.PRIMARY,
'system-search-symbolic')
self.box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=6)
self.box.pack_start(self.text_view, True, True, 0)
self.box.pack_start(self.text_entry, True, True, 0)
self.add(self.box)
self.text_entry.grab_focus()
def build_data_structures(self):
self.pass_list = []
for root, dirs, files in os.walk(self.pass_path):
for file_ in files:
file_ = os.path.join(root, file_)
if os.path.splitext(file_)[1] == '.gpg':
pass_list_item = os.path.relpath(file_, self.pass_path)
pass_list_item = os.path.splitext(pass_list_item)[0]
self.pass_list.append(pass_list_item)
def fuzzy_find(self):
env = os.environ.copy()
fzf_bin = os.path.expanduser('~/.fzf/bin')
if fzf_bin not in env['PATH']:
env['PATH'] += '{}:{}'.format(env['PATH'], fzf_bin)
p = Popen(['fzf', '-f', self.search_text], env=env,
stdin=PIPE, stdout=PIPE, stderr=STDOUT)
fzf_in = '\n'.join(self.pass_list).encode('utf-8')
return p.communicate(fzf_in)[0].decode().strip().split(' | \n')
def on_key_release(self, widget, event):
if event.keyval == Gdk.KEY_Escape:
Gtk.main_quit()
self.search_text = self.text_entry.get_text().strip()
if self.search_text == '':
self.search_result_text = None
else:
search_result = self.fuzzy_find()
if search_result == []:
s | elf.search_result_text = None
else:
self.search_result_text = search_result[0]
if self.search_result_text:
self.text_view.set_text(self.search_result_text)
else:
self.text_view.set_text('')
def on_button_release(self, widget, event):
self.copy_to_clipboard()
def on_activate(self, event):
self.copy_to_clipboard()
def copy_to_clipboard(self):
if self.search_result_text:
p = call(['pass', '-c', self.search_result_text])
self.text_entry.set_icon_from_icon_name(
Gtk.EntryIconPosition.SECONDARY,
'edit-paste-symbolic')
def main():
win = GtkPassWindow()
win.connect('delete-event', Gtk.main_quit)
win.show_all()
Gtk.main()
if __name__ == '__main__':
main()
|
dimtruck/magnum | magnum/tests/unit/conductor/test_monitors.py | Python | apache-2.0 | 11,331 | 0 | # Copyright 2015 Huawei Technologies Co.,LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo_serialization import jsonutils
from magnum.conductor import k8s_monitor
from magnum.conductor import mesos_monitor
from magnum.conductor import monitors
from magnum.conductor import swarm_monitor
from magnum import objects
from magnum.tests import base
from magnum.tests.unit.db import utils
class MonitorsTestCase(base.TestCase):
test_metrics_spec = {
'metric1': {
'unit': 'metric1_unit',
'func': 'metric1_func',
},
'metric2': {
'unit': 'metric2_unit',
'func': 'metric2_func',
| },
}
def setUp(self):
super(MonitorsTestCase, self).setUp()
bay = utils.get_test_bay(node_addresses=['1.2.3.4'],
api_address='https://5.6.7.8:2376')
self.bay = objects.Bay(self.context, **bay)
self.monitor = swarm_monitor.SwarmMonitor(self.context, self.bay)
self.k8s_monitor = k8s_monitor.K8sMonitor(self.context, self.bay)
self.mesos_monitor = mesos_monitor.MesosMonitor(self.context,
| self.bay)
p = mock.patch('magnum.conductor.swarm_monitor.SwarmMonitor.'
'metrics_spec', new_callable=mock.PropertyMock)
self.mock_metrics_spec = p.start()
self.mock_metrics_spec.return_value = self.test_metrics_spec
self.addCleanup(p.stop)
@mock.patch('magnum.objects.BayModel.get_by_uuid')
def test_create_monitor_success(self, mock_baymodel_get_by_uuid):
baymodel = mock.MagicMock()
baymodel.coe = 'swarm'
mock_baymodel_get_by_uuid.return_value = baymodel
monitor = monitors.create_monitor(self.context, self.bay)
self.assertIsInstance(monitor, swarm_monitor.SwarmMonitor)
@mock.patch('magnum.objects.BayModel.get_by_uuid')
def test_create_monitor_k8s_bay(self, mock_baymodel_get_by_uuid):
baymodel = mock.MagicMock()
baymodel.coe = 'kubernetes'
mock_baymodel_get_by_uuid.return_value = baymodel
monitor = monitors.create_monitor(self.context, self.bay)
self.assertIsInstance(monitor, k8s_monitor.K8sMonitor)
@mock.patch('magnum.objects.BayModel.get_by_uuid')
def test_create_monitor_mesos_bay(self, mock_baymodel_get_by_uuid):
baymodel = mock.MagicMock()
baymodel.coe = 'mesos'
mock_baymodel_get_by_uuid.return_value = baymodel
monitor = monitors.create_monitor(self.context, self.bay)
self.assertIsInstance(monitor, mesos_monitor.MesosMonitor)
@mock.patch('magnum.objects.BayModel.get_by_uuid')
def test_create_monitor_unsupported_coe(self, mock_baymodel_get_by_uuid):
baymodel = mock.MagicMock()
baymodel.coe = 'unsupported'
mock_baymodel_get_by_uuid.return_value = baymodel
monitor = monitors.create_monitor(self.context, self.bay)
self.assertIsNone(monitor)
@mock.patch('magnum.common.docker_utils.docker_for_bay')
def test_swarm_monitor_pull_data_success(self, mock_docker_for_bay):
mock_docker = mock.MagicMock()
mock_docker.info.return_value = {'DriverStatus': [[
u' \u2514 Reserved Memory', u'0 B / 1 GiB']]}
mock_docker.containers.return_value = [mock.MagicMock()]
mock_docker.inspect_container.return_value = 'test_container'
mock_docker_for_bay.return_value.__enter__.return_value = mock_docker
self.monitor.pull_data()
self.assertEqual([{'MemTotal': 1073741824.0}],
self.monitor.data['nodes'])
self.assertEqual(['test_container'], self.monitor.data['containers'])
@mock.patch('magnum.common.docker_utils.docker_for_bay')
def test_swarm_monitor_pull_data_raise(self, mock_docker_for_bay):
mock_container = mock.MagicMock()
mock_docker = mock.MagicMock()
mock_docker.info.return_value = {'DriverStatus': [[
u' \u2514 Reserved Memory', u'0 B / 1 GiB']]}
mock_docker.containers.return_value = [mock_container]
mock_docker.inspect_container.side_effect = Exception("inspect error")
mock_docker_for_bay.return_value.__enter__.return_value = mock_docker
self.monitor.pull_data()
self.assertEqual([{'MemTotal': 1073741824.0}],
self.monitor.data['nodes'])
self.assertEqual([mock_container], self.monitor.data['containers'])
def test_swarm_monitor_get_metric_names(self):
names = self.monitor.get_metric_names()
self.assertEqual(sorted(['metric1', 'metric2']), sorted(names))
def test_swarm_monitor_get_metric_unit(self):
unit = self.monitor.get_metric_unit('metric1')
self.assertEqual('metric1_unit', unit)
def test_swarm_monitor_compute_metric_value(self):
mock_func = mock.MagicMock()
mock_func.return_value = 'metric1_value'
self.monitor.metric1_func = mock_func
value = self.monitor.compute_metric_value('metric1')
self.assertEqual('metric1_value', value)
def test_swarm_monitor_compute_memory_util(self):
test_data = {
'nodes': [
{
'Name': 'node',
'MemTotal': 20,
},
],
'containers': [
{
'Name': 'container',
'HostConfig': {
'Memory': 10,
},
},
],
}
self.monitor.data = test_data
mem_util = self.monitor.compute_memory_util()
self.assertEqual(50, mem_util)
test_data = {
'nodes': [],
'containers': [],
}
self.monitor.data = test_data
mem_util = self.monitor.compute_memory_util()
self.assertEqual(0, mem_util)
@mock.patch('magnum.conductor.k8s_api.create_k8s_api')
def test_k8s_monitor_pull_data_success(self, mock_k8s_api):
mock_nodes = mock.MagicMock()
mock_node = mock.MagicMock()
mock_node.status = mock.MagicMock()
mock_node.status.capacity = "{'memory': '2000Ki'}"
mock_nodes.items = [mock_node]
mock_k8s_api.return_value.list_namespaced_node.return_value = (
mock_nodes)
mock_pods = mock.MagicMock()
mock_pod = mock.MagicMock()
mock_pod.spec = mock.MagicMock()
mock_container = mock.MagicMock()
mock_container.resources = mock.MagicMock()
mock_container.resources.limits = "{'memory':'100Mi'}"
mock_pod.spec.containers = [mock_container]
mock_pods.items = [mock_pod]
mock_k8s_api.return_value.list_namespaced_pod.return_value = mock_pods
self.k8s_monitor.pull_data()
self.assertEqual(self.k8s_monitor.data['nodes'],
[{'Memory': 2048000.0}])
self.assertEqual(self.k8s_monitor.data['pods'],
[{'Memory': 104857600.0}])
def test_k8s_monitor_get_metric_names(self):
k8s_metric_spec = 'magnum.conductor.k8s_monitor.K8sMonitor.'\
'metrics_spec'
with mock.patch(k8s_metric_spec,
new_callable=mock.PropertyMock) as mock_k8s_metric:
mock_k8s_metric.return_value = self.test_metrics_spec
names = self.k8s_monitor.get_metric_names()
self.assertEqual(sorted(['metric1', 'metric2']), sorted(names))
def test_k8s_monitor_get_metric_unit(self):
k8s_metric_spec = 'magnum.conductor.k8s_monitor.K8sMonitor.' \
|
rojassergio/Aprendiendo-a-programar-en-Python-con-mi-computador | Programas_Capitulo_01/Cap01_pagina_12.py | Python | mit | 407 | 0.007389 | '''
@author: Sergio Rojas
@contact: rr.sergio@gmail.com
--------------------------
Contenido bajo
Atribución-NoComercial-CompartirIgual 3.0 Venezuela (CC BY-NC-SA 3.0 VE)
http://creativecommons.org/licenses/by-nc-sa/3.0/ve/
Creado en abril 18, 2016
'''
from sympy import *
a, b, | c, d, e, f, x, y = symbols ('a b c d e f x y ')
eqs = (a*x + b*y - e, | c*x + d*y -f)
res = solve(eqs , x, y)
print(res)
|
ppb/ppb-vector | tests/test_normalize.py | Python | artistic-2.0 | 477 | 0 | from hypothesis import assume, given
from utils import angle_isclose, isclose, vectors
@given(v=vectors())
def test_normal | ize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normali | ze())
@given(v=vectors())
def test_normalize_angle(v):
"""Normalization preserves direction."""
assume(v)
assert angle_isclose(v.normalize().angle(v), 0)
|
fedora-infra/bodhi | bodhi-server/bodhi/server/validators.py | Python | gpl-2.0 | 51,555 | 0.00128 | # Copyright © 2007-2019 Red Hat, Inc. and others.
#
# This file is part of Bodhi.
#
# This program is free software; you can redist | ribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundati | on; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""A collection of validators for Bodhi requests."""
from datetime import date, datetime, timedelta
from functools import wraps
from pyramid.exceptions import HTTPNotFound, HTTPBadRequest
from pyramid.httpexceptions import HTTPFound, HTTPNotImplemented
from sqlalchemy.sql import or_, and_
import colander
import koji
import pyramid.threadlocal
import rpm
from bodhi.server.config import config
from bodhi.server.exceptions import BodhiException
from . import buildsys, log
from .models import (
Build,
Bug,
Comment,
ContentType,
Group,
Package,
PackageManager,
Release,
RpmBuild,
ReleaseState,
TestCase,
TestGatingStatus,
Update,
UpdateStatus,
UpdateRequest,
UpdateSeverity,
UpdateType,
UpdateSuggestion,
User,
)
from .util import (
splitter,
tokenize,
taskotron_results,
)
csrf_error_message = """CSRF tokens do not match. This happens if you have
the page open for a long time. Please reload the page and try to submit your
data again. Make sure to save your input somewhere before reloading.
""".replace('\n', ' ')
def postschema_validator(f):
"""
Modify a validator function, so that it is skipped if schema validation already failed.
Args:
f (callable): The function we are wrapping.
Returns:
callable: The wrapped function.
"""
@wraps(f)
def validator(request, **kwargs):
"""
Run the validator, but only if there aren't errors and there is validated data.
Args:
request (pyramid.request.Request): The current web request.
kwargs (dict): The other arguments to pass on to the wrapped validator.
"""
# The check on request.errors is to make sure we don't bypass other checks without
# failing the request
if len(request.validated) == 0 and len(request.errors) > 0:
return
f(request, **kwargs)
return validator
# This one is a colander validator which is different from the cornice
# validators defined elsewhere.
def validate_csrf_token(node, value):
"""
Ensure that the value is the expected CSRF token.
Args:
node (colander.SchemaNode): The Colander Schema Node that validates the token.
value (str): The value of the CSRF to be validated.
Raises:
colander.Invalid: If the CSRF token does not match the expected value.
"""
request = pyramid.threadlocal.get_current_request()
expected = request.session.get_csrf_token()
if value != expected:
raise colander.Invalid(node, csrf_error_message)
def cache_tags(request, build):
"""
Cache the tags for a koji build.
Args:
request (pyramid.request.Request): The current request.
build (str): The NVR of the build to cache.
Returns:
list or None: The list of tags, or None if there was a failure communicating with koji.
"""
if build in request.buildinfo and 'tags' in request.buildinfo[build]:
return request.buildinfo[build]['tags']
tags = None
try:
tags = [tag['name'] for tag in request.koji.listTags(build)]
if len(tags) == 0:
request.errors.add('body', 'builds',
'Cannot find any tags associated with build: %s' % build)
except koji.GenericError:
request.errors.add('body', 'builds',
'Invalid koji build: %s' % build)
# This might end up setting tags to None. That is expected, and indicates it failed.
request.buildinfo[build]['tags'] = tags + request.from_tag_inherited
return tags + request.from_tag_inherited
def cache_release(request, build):
"""
Cache the builds release from the request.
Args:
request (pyramid.request.Request): The current request.
build (str): The NVR of the build to cache.
Returns:
Release or None: The release object, or None if no release can be matched to the tags
associated with the build.
"""
if build in request.buildinfo and 'release' in request.buildinfo[build]:
return request.buildinfo[build]['release']
tags = cache_tags(request, build)
if tags is None:
return None
build_rel = Release.from_tags(tags, request.db)
if not build_rel:
msg = 'Cannot find release associated with ' + \
'build: {}, tags: {}'.format(build, tags)
log.warning(msg)
request.errors.add('body', 'builds', msg)
# This might end up setting build_rel to None. That is expected, and indicates it failed.
request.buildinfo[build]['release'] = build_rel
return build_rel
def cache_nvrs(request, build):
"""
Cache the NVR from the given build on the request, and the koji getBuild() response.
Args:
request (pyramid.request.Request): The current request.
build (str): The NVR of the build to cache.
Raises:
ValueError: If the build could not be found in koji.
koji.GenericError: If an error was thrown by koji's getBuild() call.
"""
if build in request.buildinfo and 'nvr' in request.buildinfo[build]:
return
if build not in request.buildinfo:
request.buildinfo[build] = {}
# Request info from koji, used to split NVR and determine type
# We use Koji's information to get the NVR split, because modules can have dashes in their
# stream.
kbinfo = request.koji.getBuild(build)
if not kbinfo:
request.buildinfo[build]['info'] = None
request.buildinfo[build]['nvr'] = None
raise ValueError('Build %s did not exist' % build)
request.buildinfo[build]['info'] = kbinfo
request.buildinfo[build]['nvr'] = kbinfo['name'], kbinfo['version'], kbinfo['release']
@postschema_validator
def validate_build_nvrs(request, **kwargs):
"""
Ensure that the given builds reference valid Build objects.
Args:
request (pyramid.request.Request): The current request.
kwargs (dict): The kwargs of the related service definition. Unused.
"""
for build in request.validated.get('builds') or []: # cope with builds being None
try:
cache_nvrs(request, build)
if request.validated.get('from_tag'):
n, v, r = request.buildinfo[build]['nvr']
release = request.db.query(Release).filter(or_(Release.name == r,
Release.name == r.upper(),
Release.version == r)).first()
if release and release.composed_by_bodhi:
request.errors.add(
'body', 'builds',
f"Can't create update from tag for release"
f" '{release.name}' composed by Bodhi.")
except ValueError:
request.validated['builds'] = []
request.errors.add('body', 'builds', 'Build does not exist: %s' % build)
return
except koji.GenericError:
log.exception("Error retrieving koji build for %s" % build)
request.validated['builds'] = []
request.errors.add('body', 'builds',
'Koji error getting build: %s' % build)
return
@postschema_validator
def validate_builds_or_from_tag_exist( |
R-daneel-olivaw/mutation-tolerance-voting | code/sf/sf_stv.py | Python | lgpl-3.0 | 1,610 | 0.004348 | '''
Created on Mar 13, 2015
@author: Akshat
'''
from pyvotecore.stv import STV
from code.pref_sf_conv import PrefSFConverter
class ImplSTV(object):
'''
classdocs
'''
def __init__(self, raw_pref):
'''
Constructor
'''
self.processed_pref = PrefSFConverter(raw_pref).convert()
# ballots = [
# { "count":3, "ballot":[["A"], ["C"], ["D"], ["B"]] },
# { "count":9, "ballot":[["B"], ["A"], ["C"], ["D"]] },
# { "count":8, "ballot":[["C"], ["D"], ["A"], ["B"]] },
# { "count":5, "ballot":[["D"], ["A"], ["B"], ["C"]] },
# { "count":5, "ballot":[["D"], ["B"], ["C"], ["A"]] }
# ]
# print(SchulzeMethod(ballots, ballot_notation=0).as_dict())
# Generate data
# input_pr = [
# {"count": 56, "ballot": ["c1", "c2", "c3", "c4"]},
# {"count": 40, "ballot": ["c4", "c2", "c3", "c1"]},
# {"count": 20, "ballot": ["c3", "c4", "c1", "c2"]},
# {"count": 20, "ballot": ["c3", "c1", "c4", "c2"]}
# ]
# output = STV(input_pr, required_winners=2).as_dict()
# print(output)
def run_stv(self):
output = STV(self.processed_pref, required_winners=3).as_dict()
# print('##STV Winners##')
# #print(output)
# | print(output['winners'])
# print(output['rounds'])
# print('##STV Winners##')
| # print()
return output
|
crypt3lx2k/Imageboard-Web-Interface | iwi/web/Links.py | Python | mit | 1,469 | 0.017699 | import re
import urlparse
__all__ = ['Links']
class Links (ob | ject):
"""
Utility class for URL creation.
"""
scheme = 'http'
netloc = 'boards.4chan.org'
apiloc = 'a.4cdn.org'
imgloc = 'i.4cdn.org'
board_pattern = re.compile(r'/(\w+)$')
page_pattern = re.compile(r'/(\w+)/(\d+)$')
thread_pattern = re.compile(r'/(\w+)/thread/(\d+)')
@classmethod
def __makeURL (cls, path, netloc, fragment=''):
| """
Creates an URL based on path, whether it is an API URL and optionally
a fragment for a specific post.
"""
return urlparse.urlunparse (
urlparse.ParseResult (
scheme = cls.scheme,
netloc = netloc,
path = path,
params = '',
query = '',
fragment = fragment
)
)
@classmethod
def createURL (cls, path, fragment=''):
"""
Generates an URL based on a specific path and an optional fragment.
"""
return cls.__makeURL(path, cls.netloc, fragment)
@classmethod
def createAPIURL (cls, path):
"""
Generates an API URL based on a specific path.
"""
return cls.__makeURL(path, cls.apiloc)
@classmethod
def createImageURL (cls, path):
"""
Generates an Image URL based on a specific path.
"""
return cls.__makeURL(path, cls.imgloc)
|
architecture-building-systems/CEAforArcGIS | cea/interfaces/dashboard/api/glossary.py | Python | mit | 692 | 0.00289 |
from flask_restplus import Namespace, Resourc | e
from flask import current_app
from cea.glossary import read_glossary_df
api = Namespace('Glossary', description='Glossary for variables used in CEA')
@api.route('/')
class Glossary(Resource):
def get(self):
glossary = read_glossary_df(plugins=current_app.cea_config.plugins)
groups = glossary.groupby('SCRIPT')
data = []
for group in groups.groups:
df = groups.get_group(group)
result = df[~df.index.duplicated(keep='first')].filln | a('-')
data.append({'script': group if group != '-' else 'inputs', 'variables': result.to_dict(orient='records')})
return data
|
apollo17march/TweePY | json2csv/gen_outline.py | Python | gpl-3.0 | 2,679 | 0.004479 | #!/usr/bin/env python
import json
import os, os.path
def key_paths(d):
def helper(path, x):
if isinstance(x, dict):
for k, v in x.iteritems():
for ret in helper(path + [k], v):
yield ret
elif isinstance(x, list):
for i, item in enumerate(x):
for ret in helper(path + [i], item):
yield ret
else:
yield path
return helper([], d)
def line_iter(f):
for line in f:
yield json.loads(line)
def coll_iter(f, coll_key):
data = json.load(f)
for obj in data[coll_key]:
yield obj
def gather_key_map(iterator):
key_map = {}
for d in iterator:
for path in key_paths(d):
key_map[tuple(path)] = True
return key_map
def path_join(path, sep='.'):
return sep.join(str(k) for k in path)
def key_map_to_list(key_map):
# We convert to strings *after* sorting so that array indices come out
# in the correct order.
return [(path_join(k, '_'), path_join(k)) for k in sorted(key_map.keys())]
def make_outline(json_file, each_line, collection_key):
if each_line:
iterator = line_iter(json_file)
else:
iterator = coll_iter(json_file, collection_key)
key_map = gather_key_map(iterator)
outline = {'map': key_map_to_list(key_map)}
if collection_key:
outline['collection'] = collection_key
return outline
def init_parser():
import argparse
parser = argparse.ArgumentParser(description="Generate an outline file for json2csv.py")
parser.add_argument('json_file', type=argparse.FileType('r'),
help= | "Path to JSON data file to analyze")
parser.add_argument('-o', '--output-file', type=str, default=None,
help="Path to outline file to output")
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-e', '--each-line', action="store_true", default=False,
help="Process each line of JSON file separately")
group.add_argument('-c', '--collection', type=str, default=None,
help="Key in JSON of array to pr | ocess", metavar="KEY")
return parser
def main():
parser = init_parser()
args = parser.parse_args()
outline = make_outline(args.json_file, args.each_line, args.collection)
outfile = args.output_file
if outfile is None:
fileName, fileExtension = os.path.splitext(args.json_file.name)
outfile = fileName + '.outline.json'
with open(outfile, 'w') as f:
json.dump(outline, f, indent=2, sort_keys=True)
if __name__ == '__main__':
main()
|
tomkralidis/geonode | geonode/services/migrations/0027_auto_20190429_0831.py | Python | gpl-3.0 | 401 | 0.002494 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-04-29 08:31
from django.db import mig | rations
class Migration(migrations.Migration):
dependencies = [
('services', '0026_auto_20171130_0600_squashed_0041_auto_20190404_0820'),
]
operations = [
migrations.AlterModelManagers(
name='service',
managers=[
| ],
),
]
|
fnp/librarian | src/librarian/elements/headers/naglowek_scena.py | Python | agpl-3.0 | 297 | 0.003367 | from ..ba | se import WLElement
class NaglowekScena(WLElement):
SE | CTION_PRECEDENCE = 2
TXT_TOP_MARGIN = 4
TXT_BOTTOM_MARGIN = 2
TXT_LEGACY_TOP_MARGIN = 4
TXT_LEGACY_BOTTOM_MARGIN = 0
HTML_TAG = 'h3'
EPUB_TAG = 'h2'
EPUB_CLASS = 'h3'
EPUB_START_CHUNK = False
|
brendanwhitfield/python-OBD | obd/protocols/protocol_legacy.py | Python | gpl-2.0 | 7,623 | 0.002624 | # -*- coding: utf-8 -*-
########################################################################
# #
# python-OBD: A python OBD-II serial module derived from pyobd #
# #
# Copyright 2004 Donour Sizemore (donour@uchicago.edu) #
# Copyright 2009 Secons Ltd. (www.obdtester.com) #
# Copyright 2009 Peter J. Creath #
# Copyright 2015 Brendan Whitfield (bcw7044@rit.edu) #
# #
########################################################################
# #
# protocols/protocol_legacy.py #
# #
# This file is part of python-OBD (a derivative of pyOBD) #
# #
# python-OBD is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 2 of the License, or #
# (at your option) any later version. #
# #
# python-OBD is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with python-OBD. If not, see <http://www.gnu.org/licenses/>. #
# #
########################################################################
from binascii import unhexlify
from obd.utils import contiguous
from .protocol import Protocol, Message, Frame, ECU
import logging
logger = logging.getLogger(__name__)
class LegacyProtocol(Protocol):
TX_ID_ENGINE = 0x10
def __init__(self, lines_0100):
Protocol.__init__(self, lines_0100)
def parse_frame(self, frame):
raw = frame.raw
# Handle odd size frames and drop
if len(raw) & 1:
logger.debug("Dropping frame for being odd")
return False
raw_bytes = bytearray(unhexlify(raw))
if len(raw_bytes) < 6:
logger.debug("Dropped frame for being too short")
return False
if len(raw_bytes) > 11:
logger.debug("Dropped frame for being too long")
return False
# Ex.
# [Header] [ Frame ]
# 48 6B 10 41 00 BE 7F B8 13 ck
# ck = checksum byte
# exclude header and trailing checksum (handled by ELM adapter)
frame.data = raw_bytes[3:-1]
# read header information
frame.priority = raw_bytes[0]
frame.rx_id = raw_bytes[1]
frame.tx_id = raw_bytes[2]
return True
def parse_message(self, message):
frames = message.frames
# len(frames) will always be >= 1 (see the caller, protocol.py)
mode = frames[0].data[0]
# test that all frames are responses to the same Mode (SID)
if len(frames) > 1:
if not all([mode == f.data[0] for f in frames[1:]]):
logger.debug("Recieved frames from multiple commands")
return False
# legacy protocols have different re-assembly
# procedures for different Modes
# ~~~~
# NOTE: THERE ARE HACKS IN HERE to make some output compatible with CAN
# since CAN is the standard, and this is considered legacy, I'm
# fixing ugly inconsistencies between the two protocols here.
# ~~~~
if mode == 0x43:
# GET_DTC requests return frames with no PID or order bytes
# accumulate all of the data, minus the Mode bytes of each frame
# Ex.
# insert faux-byte to mimic the CAN style DTC requests
# |
# [ | Frame ]
# 48 6B 10 43 03 00 03 02 03 03 ck
# 48 6B 10 43 03 04 00 00 00 00 ck
# [ Data ]
message.data = bytearray([0x43, 0x00]) # forge the mode byte and CAN's DTC_count byte
for f in frames:
message.data += f.data[1:]
else:
if len(frames) == 1:
# return data, excluding the mode/pid bytes
# Ex.
# | [ Frame/Data ]
# 48 6B 10 41 00 BE 7F B8 13 ck
message.data = frames[0].data
else: # len(frames) > 1:
# generic multiline requests carry an order byte
# Ex.
# [ Frame ]
# 48 6B 10 49 02 01 00 00 00 31 ck
| # 48 6B 10 49 02 02 44 34 47 50 ck
# 48 6B 10 49 02 03 30 30 52 35 ck
# etc... [] [ Data ]
# becomes:
# 49 02 [] 00 00 00 31 44 34 47 50 30 30 52 35
# | [ ] [ ] [ ]
# order byte is removed
# sort the frames by the order byte
frames = sorted(frames, key=lambda f: f.data[2])
# check contiguity
indices = [f.data[2] for f in frames]
if not contiguous(indices, 1, len(frames)):
logger.debug("Recieved multiline response with missing frames")
return False
# now that they're in order, accumulate the data from each frame
# preserve the first frame's mode and PID bytes (for consistency with CAN)
frames[0].data.pop(2) # remove the sequence byte
message.data = frames[0].data
# add the data from the remaining frames
for f in frames[1:]:
message.data += f.data[3:] # loose the mode/pid/seq bytes
return True
##############################################
# #
# Here lie the class stubs for each protocol #
# #
##############################################
class SAE_J1850_PWM(LegacyProtocol):
ELM_NAME = "SAE J1850 PWM"
ELM_ID = "1"
def __init__(self, lines_0100):
LegacyProtocol.__init__(self, lines_0100)
class SAE_J1850_VPW(LegacyProtocol):
ELM_NAME = "SAE J1850 VPW"
ELM_ID = "2"
def __init__(self, lines_0100):
LegacyProtocol.__init__(self, lines_0100)
class ISO_9141_2(LegacyProtocol):
ELM_NAME = "ISO 9141-2"
ELM_ID = "3"
def __init__(self, lines_0100):
LegacyProtocol.__init__(self, lines_0100)
class ISO_14230_4_5baud(LegacyProtocol):
ELM_NAME = "ISO 14230-4 (KWP 5BAUD)"
ELM_ID = "4"
def __init__(self, lines_0100):
LegacyProtocol.__init__(self, lines_0100)
class ISO_14230_4_fast(LegacyProtocol):
ELM_NAME = "ISO 14230-4 (KWP FAST)"
ELM_ID = "5"
def __init__(self, lines_0100):
LegacyProtocol.__init__(self, lines_0100)
|
shaftoe/home-assistant | tests/components/device_tracker/test_unifi.py | Python | apache-2.0 | 4,732 | 0 | """The tests for the Unifi WAP device tracker platform."""
from unittest import mock
import urllib
import pytest
import voluptuous as vol
from homeassistant.components.device_tracker import DOMAIN, unifi as unifi
from homeassistant.const import (CONF_HOST, CONF_USERNAME, CONF_PASSWORD,
CONF_PLATFORM, CONF_VERIFY_SSL)
@pytest.fixture
def mock_ctrl():
"""Mock pyunifi."""
module = mock.MagicMock()
with mock.patch.dict('sys.modules', {
'pyunifi.controller': module.controller,
}):
yield module.controller.Controller
@pytest.fixture
def mock_scanner():
"""Mock UnifyScanner."""
with mock.patch('homeassistant.components.device_tracker'
'.unifi.UnifiScanner') as scanner:
yield scanner
def test_config_minimal(hass, mock_scanner, mock_ctrl):
"""Test the setup with minimal configuration."""
config = {
DOMAIN: unifi.PLATFORM_SCHEMA({
CONF_PLATFORM: unifi.DOMAIN,
CONF_USERNAME: 'foo',
CONF_PASSWORD: 'password',
})
}
result = unifi.get_scanner(hass, config)
assert mock_scanner.return_value == result
assert mock_ctrl.call_count == 1
assert mock_ctrl.mock_calls[0] == \
mock.call('localhost', 'foo', 'password', 8443,
version='v4', site_id='default', ssl_verify=True)
assert mock_scanner.call_count == 1
assert mock_scanner.call_args == mock.call(mock_ctrl.return_value)
def test_config_full(hass, mock_scanner, mock_ctrl):
"""Test the setup with full configuration."""
config = {
DOMAIN: unifi.PLATFORM_SCHEMA({
CONF_PLATFORM: unifi.DOMAIN,
CONF_USERNAME: 'foo',
CONF_PASSWORD: 'password',
CONF_HOST: 'myhost',
CONF_VERIFY_SSL: False,
'port': 123,
'site_id': 'abcdef01',
})
}
result = unifi.get_scanner(hass, config)
assert mock_scanner.return_value == result
assert mock_ctrl.call_count == 1
assert mock_ctrl.call_args == \
mock.call('myhost', 'foo', 'password', 123,
version='v4', site_id='abcdef01', ssl_verify=False)
assert mock_scanner.call_count == 1
assert mock_scanner.call_args == mock.call(mock_ctrl.return_value)
def test_config_error():
"""Test for configuration errors."""
with pytest.raises(vol.Invalid):
unifi.PLATFORM_SCHEMA({
# no username
CONF_PLATFORM: unifi.DOMAIN,
CONF_HOST: 'myhost',
'port': 123,
})
with pytest.raises(vol.Invalid):
unifi.PLATFOR | M_SCHEMA({
CONF_PLATFORM: unifi.DOMAIN,
CONF_USERNAME: 'foo',
CONF_PASSWORD: 'password',
CONF_HOST: 'myhost',
| 'port': 'foo', # bad port!
})
def test_config_controller_failed(hass, mock_ctrl, mock_scanner):
"""Test for controller failure."""
config = {
'device_tracker': {
CONF_PLATFORM: unifi.DOMAIN,
CONF_USERNAME: 'foo',
CONF_PASSWORD: 'password',
}
}
mock_ctrl.side_effect = urllib.error.HTTPError(
'/', 500, 'foo', {}, None)
result = unifi.get_scanner(hass, config)
assert result is False
def test_scanner_update():
"""Test the scanner update."""
ctrl = mock.MagicMock()
fake_clients = [
{'mac': '123'},
{'mac': '234'},
]
ctrl.get_clients.return_value = fake_clients
unifi.UnifiScanner(ctrl)
assert ctrl.get_clients.call_count == 1
assert ctrl.get_clients.call_args == mock.call()
def test_scanner_update_error():
"""Test the scanner update for error."""
ctrl = mock.MagicMock()
ctrl.get_clients.side_effect = urllib.error.HTTPError(
'/', 500, 'foo', {}, None)
unifi.UnifiScanner(ctrl)
def test_scan_devices():
"""Test the scanning for devices."""
ctrl = mock.MagicMock()
fake_clients = [
{'mac': '123'},
{'mac': '234'},
]
ctrl.get_clients.return_value = fake_clients
scanner = unifi.UnifiScanner(ctrl)
assert set(scanner.scan_devices()) == set(['123', '234'])
def test_get_device_name():
"""Test the getting of device names."""
ctrl = mock.MagicMock()
fake_clients = [
{'mac': '123', 'hostname': 'foobar'},
{'mac': '234', 'name': 'Nice Name'},
{'mac': '456'},
]
ctrl.get_clients.return_value = fake_clients
scanner = unifi.UnifiScanner(ctrl)
assert scanner.get_device_name('123') == 'foobar'
assert scanner.get_device_name('234') == 'Nice Name'
assert scanner.get_device_name('456') is None
assert scanner.get_device_name('unknown') is None
|
jnns/wagtail | wagtail/contrib/frontend_cache/tests.py | Python | bsd-3-clause | 22,580 | 0.002834 | from unittest import mock
from urllib.error import HTTPError, URLError
import requests
from azure.mgmt.cdn import CdnManagementClient
from azure.mgmt.frontdoor import FrontDoorManagementClient
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase
from django.test.utils import override_settings
from wagtail.contrib.frontend_cache.backends import (
AzureCdnBackend, AzureFrontDoorBackend, BaseBackend, CloudflareBackend, CloudfrontBackend,
HTTPBackend)
from wagtail.contrib.frontend_cache.utils import get_backends
from wagtail.core.models import Page
from wagtail.tests.testapp.models import EventIndex
from .utils import (
PurgeBatch, purge_page_from_cache, purge_pages_from_cache, purge_url_from_cache,
purge_urls_from_cache)
class TestBackendConfiguration(TestCase):
def test_default(self):
backends = get_backends()
self.assertEqual(len(backends), 0)
def test_varnish(self):
backends = get_backends(backend_settings={
'varnish': {
'BACKEND': 'wagtail.contrib.frontend_cache.backends.HTTPBackend',
'LOCATION': 'http://localhost:8000',
},
})
self.assertEqual(set(backends.keys()), set(['varnish']))
self.assertIsInstance(backends['varnish'], HTTPBackend)
self.assertEqual(backends['varnish'].cache_scheme, 'http')
self.assertEqual(backends['varnish'].cache_netloc, 'localhost:8000')
def test_cloudflare(self):
backends = get_backends(backend_settings={
'cloudfla | re': {
'BACKEND': 'wagtail.contrib.frontend_cache.backends.CloudflareBackend',
'EMAIL': 'test@ | test.com',
'API_KEY': 'this is the api key',
'ZONEID': 'this is a zone id',
'BEARER_TOKEN': 'this is a bearer token'
},
})
self.assertEqual(set(backends.keys()), set(['cloudflare']))
self.assertIsInstance(backends['cloudflare'], CloudflareBackend)
self.assertEqual(backends['cloudflare'].cloudflare_email, 'test@test.com')
self.assertEqual(backends['cloudflare'].cloudflare_api_key, 'this is the api key')
self.assertEqual(backends['cloudflare'].cloudflare_token, 'this is a bearer token')
def test_cloudfront(self):
backends = get_backends(backend_settings={
'cloudfront': {
'BACKEND': 'wagtail.contrib.frontend_cache.backends.CloudfrontBackend',
'DISTRIBUTION_ID': 'frontend',
},
})
self.assertEqual(set(backends.keys()), set(['cloudfront']))
self.assertIsInstance(backends['cloudfront'], CloudfrontBackend)
self.assertEqual(backends['cloudfront'].cloudfront_distribution_id, 'frontend')
def test_azure_cdn(self):
backends = get_backends(backend_settings={
'azure_cdn': {
'BACKEND': 'wagtail.contrib.frontend_cache.backends.AzureCdnBackend',
'RESOURCE_GROUP_NAME': 'test-resource-group',
'CDN_PROFILE_NAME': 'wagtail-io-profile',
'CDN_ENDPOINT_NAME': 'wagtail-io-endpoint',
},
})
self.assertEqual(set(backends.keys()), set(['azure_cdn']))
self.assertIsInstance(backends['azure_cdn'], AzureCdnBackend)
self.assertEqual(backends['azure_cdn']._resource_group_name, 'test-resource-group')
self.assertEqual(backends['azure_cdn']._cdn_profile_name, 'wagtail-io-profile')
self.assertEqual(backends['azure_cdn']._cdn_endpoint_name, 'wagtail-io-endpoint')
def test_azure_front_door(self):
backends = get_backends(backend_settings={
'azure_front_door': {
'BACKEND': 'wagtail.contrib.frontend_cache.backends.AzureFrontDoorBackend',
'RESOURCE_GROUP_NAME': 'test-resource-group',
'FRONT_DOOR_NAME': 'wagtail-io-front-door',
},
})
self.assertEqual(set(backends.keys()), set(['azure_front_door']))
self.assertIsInstance(backends['azure_front_door'], AzureFrontDoorBackend)
self.assertEqual(backends['azure_front_door']._resource_group_name, 'test-resource-group')
self.assertEqual(backends['azure_front_door']._front_door_name, 'wagtail-io-front-door')
def test_azure_cdn_get_client(self):
mock_credentials = mock.MagicMock()
backends = get_backends(backend_settings={
'azure_cdn': {
'BACKEND': 'wagtail.contrib.frontend_cache.backends.AzureCdnBackend',
'RESOURCE_GROUP_NAME': 'test-resource-group',
'CDN_PROFILE_NAME': 'wagtail-io-profile',
'CDN_ENDPOINT_NAME': 'wagtail-io-endpoint',
'SUBSCRIPTION_ID': 'fake-subscription-id',
'CREDENTIALS': mock_credentials,
},
})
self.assertEqual(set(backends.keys()), set(['azure_cdn']))
client = backends['azure_cdn']._get_client()
self.assertIsInstance(client, CdnManagementClient)
self.assertEqual(client.config.subscription_id, 'fake-subscription-id')
self.assertIs(client.config.credentials, mock_credentials)
def test_azure_front_door_get_client(self):
mock_credentials = mock.MagicMock()
backends = get_backends(backend_settings={
'azure_front_door': {
'BACKEND': 'wagtail.contrib.frontend_cache.backends.AzureFrontDoorBackend',
'RESOURCE_GROUP_NAME': 'test-resource-group',
'FRONT_DOOR_NAME': 'wagtail-io-fake-front-door-name',
'SUBSCRIPTION_ID': 'fake-subscription-id',
'CREDENTIALS': mock_credentials,
},
})
client = backends['azure_front_door']._get_client()
self.assertEqual(set(backends.keys()), set(['azure_front_door']))
self.assertIsInstance(client, FrontDoorManagementClient)
self.assertEqual(client.config.subscription_id, 'fake-subscription-id')
self.assertIs(client.config.credentials, mock_credentials)
@mock.patch('wagtail.contrib.frontend_cache.backends.AzureCdnBackend._make_purge_call')
def test_azure_cdn_purge(self, make_purge_call_mock):
backends = get_backends(backend_settings={
'azure_cdn': {
'BACKEND': 'wagtail.contrib.frontend_cache.backends.AzureCdnBackend',
'RESOURCE_GROUP_NAME': 'test-resource-group',
'CDN_PROFILE_NAME': 'wagtail-io-profile',
'CDN_ENDPOINT_NAME': 'wagtail-io-endpoint',
'CREDENTIALS': 'Fake credentials',
},
})
self.assertEqual(set(backends.keys()), set(['azure_cdn']))
self.assertIsInstance(backends['azure_cdn'], AzureCdnBackend)
# purge()
backends['azure_cdn'].purge('http://www.wagtail.org/home/events/christmas/?test=1')
make_purge_call_mock.assert_called_once()
call_args = tuple(make_purge_call_mock.call_args)[0]
self.assertEqual(len(call_args), 2)
self.assertIsInstance(call_args[0], CdnManagementClient)
self.assertEqual(call_args[1], ["/home/events/christmas/?test=1"])
make_purge_call_mock.reset_mock()
# purge_batch()
backends['azure_cdn'].purge_batch([
'http://www.wagtail.org/home/events/christmas/?test=1', 'http://torchbox.com/blog/'
])
make_purge_call_mock.assert_called_once()
call_args = tuple(make_purge_call_mock.call_args)[0]
self.assertIsInstance(call_args[0], CdnManagementClient)
self.assertEqual(call_args[1], ["/home/events/christmas/?test=1", "/blog/"])
@mock.patch('wagtail.contrib.frontend_cache.backends.AzureFrontDoorBackend._make_purge_call')
def test_azure_front_door_purge(self, make_purge_call_mock):
backends = get_backends(backend_settings={
'azure_front_door': {
'BACKEND': 'wagtail.contrib.frontend_cache.backends.AzureFrontDoorBackend',
'RESOURCE_GROUP_NAME': 'test-resource-group',
'FRONT_DOOR_NAME': 'wagtail-io-front-door',
|
mirumee/wagtail-saleor | wagtailsaleor/wagtailsaleor/wsgi.py | Python | bsd-3-clause | 401 | 0.002494 | """
WSGI conf | ig for wagtailsaleor project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/depl | oyment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wagtailsaleor.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
jonesyboynz/DuplicateFileFinder | unittests/HashTaskUnitTests.py | Python | gpl-3.0 | 4,641 | 0.023917 | """
Unit tests for the HashTask object
By Simon Jones
26/8/2017
"""
import unittest
from test.TestFunctions import *
from source.HashTask import *
from source.Channel import Channel
class HashTaskUnitTests(unittest.TestCase):
"""
Unit tests for the HashTask object
"""
def setUp(self):
self.task1_t_channel = Channel(True)
self.task1_c_channel = Channel(True)
self.task2_t_channel = Channel(True)
self.task2_c_channel = Channel(True)
self.task1 = spawn_thread(self.task1_c_channel, self.task1_t_channel)
self.task2 = spawn_thread(self.task2_c_channel, self.task2_t_channel)
self.test_filename = "hash_task_unittest_file.log"
file = open(self.test_filename, "w")
file.write("Test Data 123\nHello, world!")
file.close()
print("\nNew Test case:")
def test_spawning_and_joining_tasks(self):
"""
Tests that tasks can be created and joined
:return:
"""
self.task1.start()
self.task2.start()
self.task1_t_channel.put(str(TaskMessage(TaskMessage.FLAG_ECHO, 1, TaskMessage.REQUEST, "Hello World Task1")))
self.task2_t_channel.put(str(TaskMessage(TaskMessage.FLAG_ECHO, 2, TaskMessage.REQUEST, "Hello World Task2")))
# Should be ignored
self.task2_t_channel.put(str(TaskMessage(TaskMessage.FLAG_ECHO, 2, TaskMessage.RESPONSE, "Hello World Task2")))
delay_do_nothing(1)
self.task1_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 1, TaskMessage.REQUEST)))
self.task2_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 2, TaskMessage.REQUEST)))
self.task1.join()
self.task2.join()
empty_channel(self, self.task1_t_channel.get_in_queue(), 3)
empty_channel(self, self.task2_t_channel.get_in_queue(), 3)
empty_channel(self, self.task1_c_channel.get_in_queue(), 2)
empty_channel(self, self.task2_c_channel.get_in_queue(), 2)
def test_task_errors(self):
"""
Tests that HashTasks can handle errors properly
:return:
"""
self.task1.start()
self.task2.start()
self.ta | sk1_t_channel.put(str(TaskMessage("NOT_A_FLAG", 1, TaskMessage.REQUEST, "ERROR_TASK")))
delay_do_nothing()
self.task1_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 1, TaskMessage.REQUEST)))
self.task2_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 2, TaskMessage.REQUEST)))
self.task1.join()
self.task2.join()
empty_channel(self, self.task1_t_channel.get_in_queue(), 3)
empty_channel(self, self. | task2_t_channel.get_in_queue(), 1)
empty_channel(self, self.task1_c_channel.get_in_queue(), 2)
empty_channel(self, self.task2_c_channel.get_in_queue(), 2)
def test_another_hash_task_case(self):
"""
Another test for robustness
:return:
"""
self.task1.start()
self.task2.start()
self.task1_t_channel.put(str(TaskMessage(TaskMessage.FLAG_HASH, 1, TaskMessage.REQUEST, self.test_filename, 0, 10)))
self.task1_t_channel.put(str(TaskMessage(TaskMessage.FLAG_HASH, 1, TaskMessage.REQUEST, self.test_filename, 10, 10)))
self.task1_t_channel.put(str(TaskMessage(TaskMessage.FLAG_HASH, 1, TaskMessage.REQUEST, self.test_filename, 20, 10)))
delay_do_nothing(1)
self.task1_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 1, TaskMessage.REQUEST)))
self.task2_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 2, TaskMessage.REQUEST)))
self.task1.join()
self.task2.join()
empty_channel(self, self.task1_t_channel.get_in_queue(), 5)
empty_channel(self, self.task2_t_channel.get_in_queue(), 1)
empty_channel(self, self.task1_c_channel.get_in_queue(), 2)
empty_channel(self, self.task2_c_channel.get_in_queue(), 2)
def test_actions_after_join_are_executes(self):
"""
Tests that the messages received after a join task are executed.
:return:
"""
self.task1_t_channel.put(str(TaskMessage(TaskMessage.FLAG_ECHO, 1, TaskMessage.REQUEST, "Echo 1")))
self.task2_t_channel.put(str(TaskMessage(TaskMessage.FLAG_ECHO, 2, TaskMessage.REQUEST, "Echo 1")))
self.task1_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 1, TaskMessage.REQUEST)))
self.task2_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 2, TaskMessage.REQUEST)))
self.task1_t_channel.put(str(TaskMessage(TaskMessage.FLAG_ECHO, 1, TaskMessage.REQUEST, "Echo 2")))
self.task2_t_channel.put(str(TaskMessage(TaskMessage.FLAG_ECHO, 2, TaskMessage.REQUEST, "Echo 2")))
self.task1.start()
self.task2.start()
delay_do_nothing(1)
self.task1.join()
self.task2.join()
empty_channel(self, self.task1_t_channel.get_in_queue(), 2)
empty_channel(self, self.task2_t_channel.get_in_queue(), 2)
empty_channel(self, self.task1_c_channel.get_in_queue(), 2)
empty_channel(self, self.task2_c_channel.get_in_queue(), 2)
if __name__ == "__main__":
unittest.main()
|
efulet/pca | pca/main.py | Python | mit | 3,043 | 0.008216 | """
@created_at 2014-07-15
@author Exequiel Fuentes <efulet@gmail.com>
@author Brian Keith <briankeithn@gmail.com>
"""
# Se recomienda seguir los siguientes estandares:
# 1. Para codificacion: PEP 8 - Style Guide for Python Code (http://legacy.python.org/dev/peps/pep-0008/)
# 2. Para documentacion: PEP 257 - Docstring Conventions (http://legacy.python.org/dev/peps/pep-0257/)
import os
import traceback
import sys
from lib import *
def check_version():
"""Python v2.7 es requerida por el curso, entonces verificamos la version"""
if sys.version_info[:2] != (2, 7):
raise Exception("Parece que python v2.7 no esta instalado en el sistema")
def db_path():
"""Retorna el path de las base de datos"""
pathfile = os.path.dirname(os.path.abspath(__file__))
return os.path.join(pathfile, "db")
if __name__ == "__main__":
try:
# Verificar version de python
check_version()
# Cargar los datos
my_pca_lda = FKSkLearn(os.path.join(db_path(), "datos_diabetes.npz"))
# Preparar los datos para validacion
my_pca_lda.fk_train_test_split()
# Se entrena el clasificador PCA + LDA con la dimension optima.
my_pca_lda.fk_pca_lda()
# Contruye el clasificar Bayes usando la libreria sklearn
my_pca_lda.fk_bayes_classifier()
print("**************")
print("sklearn_Bayes:")
print("Number of mislabeled points : %d" % (my_pca_lda.fk_get_y_test() != my_pca_lda.fk_get_y_pred()).sum())
print("Accuracy: ", my_pca_lda.fk_score())
print("**************")
# Implementacion propia del clasificador.
fknb = FKNaiveBayesClassifier()
fknb.fit(my_pca_lda.fk_get_lda_train(), my_pca_lda.fk_get_y_train())
y_pred_FK = fknb.predict(my_pca_lda.fk_get_lda_test())
print("FK_Bayes")
print("Number of mislabeled points : %d" % (my_pca_lda.fk_get_y_test() != y_pred_FK).sum())
print("Accuracy: ", fknb.score(my_pca_lda.fk_get_lda_test(), my_pca_lda.fk_get_y_test()))
print("**************")
# Esto es para verificar que las predicciones son iguales, deberia entregar una lista vacia.
print("...probando igualdad...")
y_pred_SK = [int(i) for i in my_pca_lda.fk_get_y_pred()]
| #print y_pred_SK
#print y_pred_FK
# Se verifica si la lista esta vacia.
if y_pred_SK == y_pred_FK:
print "Son iguales los dos metodos!"
else:
print "No son iguales. :("
# Se grafica la informacion.
graph = Graph(my_pca_lda.fk_get_lda_train(), my_pca_lda.fk_ | get_y_train())
graph.frequencies_histogram()
graph.probability_density_functions()
graph.conditional_probability(my_pca_lda.fk_get_lda_train(), my_pca_lda.fk_get_y_prob())
graph.show_graphs()
except Exception, err:
print traceback.format_exc()
finally:
sys.exit()
|
uclouvain/OSIS-Louvain | base/migrations/0527_auto_20200730_1502.py | Python | agpl-3.0 | 560 | 0.001786 | # Generated by Django 2.2.13 on 2020-07-30 15:02
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0526_unique_together_groupelementyear'),
]
operations = [
migrations.AlterField(
model_name='groupelementyear',
name='parent',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT,
| to='base.EducationGroupYear'),
),
]
| |
bizarrechaos/newsapi-client | tests/test_newsconfig.py | Python | apache-2.0 | 630 | 0.001587 | import pytest
from news import newsconfig
class TestNewsConfig(object):
def test_news_config_with_path(self):
n = newsconfig.NewsConfig('./news.cfg.exa | mple')
assert n.configpath == './news.cfg.example'
def test_news_config_without_path(self, capsys):
try:
with pytest.raises(SystemExit):
n = newsconfig.NewsConfig()
except:
n = newsconfig.NewsConfig()
assert n.apikey == n.set_api_key(None)
def test_news_config_with_tmp_path(self):
with pytest.raises(SystemExit):
n = | newsconfig.NewsConfig('/tmp/notafile')
|
google-research/google-research | dedal/alignment.py | Python | apache-2.0 | 20,335 | 0.006934 | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions used to manipulate alignments and smith-waterman parameters."""
from typing import Sequence, Tuple, Union
import tensorflow as tf
# Type aliases
PackedSWParams = tf.Tensor
UnpackedSWParams = Tuple[tf.Tensor, tf.Tensor, tf.Tensor]
SWParams = Union[PackedSWParams, UnpackedSWParams]
# SW dynamic program edge types, grouped by associated edge weight kind. See
MATCH_STATES = [0, 1, 2, 3]
GAP_OPEN_STATES = [4, 6, 7]
GAP_EXTEND_STATES = [5, 8]
STATES = {
'match': MATCH_STATES,
'gap_open': GAP_OPEN_STATES,
'gap_extend': GAP_EXTEND_STATES,
}
def large_compatible_positive(tensor_type):
"""Large positive number as Tensor.
This function is necessary because the standard value for "inf" in this module
(1e9) cannot be represented using tf.float16.
NOTE(fllinares): Borrowed from
tensorflow/python/keras/layers/advanced_activations.py
which is used already in this codebase indirectly (e.g. in self-attention
layers).
Args:
tensor_type: a dtype to determine the type.
Returns:
A large positive number.
"""
if tensor_type == tf.dtypes.float16:
return tf.dtypes.float16.max
return tf.convert_to_tensor(1e9, dtype=tensor_type)
def top_pad(t, v):
"""Pads tf.Tensor `t` by prepending `v` along the leading dimension."""
return tf.pad(t, [[1, 0], [0, 0], [0, 0]], constant_values=v)
def left_pad(t, v):
"""Pads tf.Tensor `t` by prepending `v` along the second leading dimension."""
return tf.pad(t, [[0, 0], [1, 0], [0, 0]], constant_values=v)
def right_pad(t, v):
"""Pads tf.Tensor `t` by appending `v` along the second leading dimension."""
return tf.pad(t, [[0, 0], | [0, 1], [0, 0]], constant_values=v)
def alignments_to_paths(
alignments, len_x, len_y):
"""Converts sparse representation of align | ments into dense paths tensor.
Args:
alignments: A tf.Tensor<int>[batch, 3, align_len] = tf.stack([pos_x, pos_y,
enc_trans], 1) such that
(pos_x[b][i], pos_y[b][i], enc_trans[b][i]) represents the i-th
transition in the alignment for the b-th sequence pair in the minibatch.
Both pos_x and pos_y are assumed to use one-based indexing and enc_trans
follows the (categorical) 9-state encoding of edge types used throughout
alignment/smith_waterman.py.
len_x: The (padded) length of "X"/"query" sequences in the minibatch.
len_y: The (padded) length of "Y"/"subject" sequences in the minibatch.
Returns:
A tf.Tensor of type tf.float32 and shape (batch_size, len_x, len_y, 9) with
binary entries, representing the trajectory of the indices along the
alignment path, by having a one along the taken edges, with nine possible
edges for each i,j.
"""
batch_size = tf.shape(alignments)[0]
align_len = tf.shape(alignments)[-1]
# Tensor with the same shape as pos_x, pos_y and enc_trans such that
# seq_indicators[b][l] = b for all l in [0, align_len).
seq_indicators = tf.multiply(tf.expand_dims(tf.range(batch_size), -1),
tf.ones((1, align_len), dtype=tf.int32))
# Prepares inputs to scatter_nd.
indices = tf.concat([seq_indicators[Ellipsis, None],
tf.transpose(alignments, (0, 2, 1))], -1)
indices = tf.reshape(indices, (-1, 4))
updates = tf.ones(tf.shape(indices)[0], dtype=tf.float32)
shape = (batch_size, len_x + 1, len_y + 1, 9)
# Note(fllinares): this is a (fairly ugly) hack to deal with padding.
# - pos_x, pos_y must use one-based indexing instead of zero-based indexing.
# - we use the (b, 0, 0, 0) entries of paths as "padding dumps".
# - the resulting tensor will be sliced to remove these starting row/col.
paths = tf.scatter_nd(indices, updates, shape)
return paths[:, 1:, 1:, :]
def alignments_to_state_indices(
alignments,
states,
zero_based_idx = True,
):
"""Retrieves indices of MATCH/GAP OPEN/GAP EXTEND states in alignments.
Args:
alignments: A tf.Tensor<int>[batch, 3, align_len] = tf.stack([pos_x, pos_y,
enc_trans], 1) such that
(pos_x[b][i], pos_y[b][i], enc_trans[b][i]) represents the i-th
transition in the alignment for the b-th sequence pair in the minibatch.
Both pos_x and pos_y are assumed to use one-based indexing and enc_trans
follows the (categorical) 9-state encoding of edge types used throughout
alignment/smith_waterman.py.
states: A Python list of integers in [0, 9), representing an arbitrary
subset of (encoded) edge types. Can also be set to 'match', 'gap_open' or
'gap_extend' to query the set of edge types associated with each of those
conditions.
zero_based_idx: Whether to use zero-based (True) or one-based (False)
indexing for the function's output. Note that, however, alignment must use
one-based indexing regardless of the value of this argument.
Returns:
A tf.Tensor `state_indices` of type tf.int32 and shape (n_entries, 3) such
that, for a tf.Tensor `sim_mat` of shape (batch_size, len_x, len_y),
tf.gather_nd(sim_mat, state_indices)
returns the set of entries in `sim_mat` along the alignments described by
`alignment` that correspond to one of the states in `states`.
Note(fllinares): this function aims to provide a way to avoid materializing
weights in the crf_loss function in alignment/smith_waterman.py, as
suggested by @mblondel. Some extra care might be needed to keep per-example
losses, as tf.gather_nd will flatten the output by default. For
position-independent gap penalties, only the total number of entries per
example in state_indices would be needed. See `score_from_alignment` below
for extra details.
"""
pos_x, pos_y, enc_trans = alignments[:, 0], alignments[:, 1], alignments[:, 2]
states = STATES.get(states, states)
# Note(fllinares): another ugly "hack", here we assume one-based idx to encode
# the padding mask implicitly.
padding_mask = tf.logical_and(pos_x > 0, pos_y > 0)
hits = enc_trans == states[0]
for state in states[1:]:
hits = tf.logical_or(hits, enc_trans == state)
hits = tf.logical_and(hits, padding_mask)
indices = tf.cast(tf.where(hits), tf.int32)
batch_indices = indices[:, 0]
x_indices = tf.gather_nd(pos_x, indices) - int(zero_based_idx)
y_indices = tf.gather_nd(pos_y, indices) - int(zero_based_idx)
state_indices = tf.stack([batch_indices, x_indices, y_indices], axis=0)
return tf.transpose(state_indices, (1, 0))
def paths_to_state_indicators(
paths,
states,
):
"""Computes (batch_size, len_x, len_y) tensor of binary state indicators.
Args:
paths: A tf.Tensor of type tf.float32 and shape (batch_size, len_x, len_y,
9) with binary entries, representing the trajectory of the indices along
the alignment path, by having a one along the taken edges, with nine
possible edges for each i,j.
states: A Python list of integers in [0, 9), representing an arbitrary
subset of (encoded) edge types. Can also be set to 'match', 'gap_open' or
'gap_extend' to query the set of edge types associated with each of those
conditions.
Returns:
A tf.Tensor `state_indicators` of type tf.float32 and shape (batch_size,
len_x, len_y) with binary entries such that
state_indicators[b][i][j] = 1.0
iff the trajectory of the alignment for the b-th sequence pair passes by
character pair (i, j) under one of the states in `states`.
"""
states = STATES.get(states, states)
return tf.reduce_max(tf.gather(paths, indices=states, axis=-1), axis=-1)
def sw_score_from_a |
llhe/tensorflow | tensorflow/python/ops/array_ops.py | Python | apache-2.0 | 79,903 | 0.004543 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Support for manipulating tensors.
See the @{$python/array_ops} guide.
@@string_to_number
@@to_double
@@to_float
@@to_bfloat16
@@to_int32
@@to_int64
@@cast
@@bitcast
@@saturate_cast
@@broadcast_dynamic_shape
@@broadcast_static_shape
@@shape
@@shape_n
@@size
@@rank
@@reshape
@@squeeze
@@expand_dims
@@meshgrid
@@slice
@@strided_slice
@@split
@@tile
@@pad
@@concat
@@stack
@@parallel_stack
@@unstack
@@reverse_sequence
@@reverse
@@reverse_v2
@@transpose
@@extract_image_patches
@@space_to_batch_nd
@@space_to_batch
@@required_space_to_batch_paddings
@@batch_to_space_nd
@@batch_to_space
@@space_to_depth
@@depth_to_space
@@gather
@@gather_nd
@@unique_with_counts
@@scatter_nd
@@dynamic_partition
@@dynamic_stitch
@@boolean_mask
@@one_hot
@@sequence_mask
@@dequantize
@@quantize_v2
@@quantized_concat
@@setdiff1d
@@fake_quant_with_min_max_args
@@fake_quant_with_min_max_args_gradient
@@fake_quant_with_min_max_vars
@@fake_quant_with_min_max_vars_gradient
@@fake_quant_with_min_max_vars_per_channel
@@fake_quant_with_min_max_vars_per_channel_gradient
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import numpy as np
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
# 'Constant' gets imported in the module 'array_ops'.
from tensorflow.python.framework.constant_op import constant
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gen_math_ops
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_array_ops import *
from tensorflow.python.util import deprecation
from tensorflow.python.util.deprecation import deprecated
# pylint: enable=wildcard-import
# Used for slicing to specify a new 1 size dimension
newaxis = None
# We override the 'slice' for the "slice" op, so we keep python's
# existing 'slice' for later use in this module.
_baseslice = slice
# pylint: disable=redefined-builtin,protected-access
def expand_dims(input, axis=None, name=None, dim=None):
"""Inserts a dimension of 1 into a tensor's shape.
Given a tensor `input`, this operation inserts a dimension of 1 at the
dimension index `axis` of `input`'s shape. The dimension index `axis` starts
at zero; if you specify a negative number for `axis` it is counted backward
from the end.
This operation is useful if you want to add a batch dimension to a single
element. For example, if you have a single image of shape `[height, width,
channels]`, you can make it a batch of 1 image with `expand_dims(image, 0)`,
which will make the shape `[1, height, width, channels]`.
Other examples:
```python
# 't' is a tensor of shape [2]
shape(expand_dims(t, 0)) ==> [1, 2]
shape(expand_dims(t, 1)) ==> [2, 1]
shape(expand_dims(t, -1)) ==> [2, 1]
# 't2' is a tensor of shape [2, 3, 5]
shape(expand_dims(t2, 0)) ==> [1, 2, 3, 5]
shape(expand_dims(t2, 2)) ==> [2, 3, 1, 5]
shape(expand_dims(t2, 3)) ==> [2, 3, 5, 1]
```
This operation requires that:
`-1-input.dims() <= dim <= input.dims()`
This operation is related to `squeeze()`, which removes dimensions of
size 1.
Args:
input: A `Tensor`.
axis: 0-D (scalar). Specifies the dimension index at which to
expand the shape of `input`.
name: The name of the output `Tensor`.
dim: 0-D (scalar). Equivalent to `axis`, to be deprecated.
Returns:
A `Tensor` with the same data as `input`, but its shape has an additional
dimension of size 1 added.
Raises:
ValueError: if both `dim` and `axis` are specified.
"""
# TODO(aselle): Remove argument dim
if dim is not None:
if axis is not None:
raise ValueError("can't specify both 'dim' and 'axis'")
axis = dim
return gen_array_ops._expand_dims(input, axis, name)
# pylint: enable=redefined-builtin,protected-access
# Aliases for some automatically-generated names.
# pylint: disable=protected-access
@deprecated(
"2016-11-30",
"This op will be removed after the deprecation date. "
"Please switch to tf.setdiff1d().")
def listdiff(x, y, out_idx=None, name=None):
return gen_array_ops._list_diff(x, y, out_idx, name)
listdiff.__doc__ = gen_array_ops._list_diff.__doc__ + "\n" + listdiff.__doc__
# pylint: enable=protected-access
# pylint: disable=undefined-variable,protected-access
def setdiff1d(x, y, index_dtype=dtypes.int32, name=None):
return gen_array_ops._list_diff(x, y, index_dtype, name)
setdiff1d.__doc__ = gen_array_ops._list_diff.__doc__
# pylint: enable=protected-access
def broadcast_dynamic_shape(shape_x, shape_y):
# pylint: disable=protected-access
"""Returns the broadcasted dynamic shape between `shape_x` and `shape_y`.
Args:
shape_x: A rank 1 integer `Tensor`, representing the shape of x.
shape_y: A rank 1 integer `Tensor`, representing the shape of y.
Returns:
A rank 1 integer `Tensor` representing the broadcasted shape.
"""
return gen_array_ops._broadcast_args(shape_x, shape_y)
# pylint: enable=protected-access
def broadcast_static_shape(shape_x, shape_y):
"""Returns the broadcasted static shape between `shape_x` and `shape_y`.
Args:
shape_x: A `TensorShape`
shape_y: A `TensorShape`
Returns:
A `TensorShape` representing the broadcasted shape.
Raises:
ValueError: If the two shapes can not be broadcasted.
"""
return common_shapes.broadcast_shape(shape_x, shape_y)
def shape(input, name=None, out_type=dtypes.int32):
# pylint: disable=redefined-builtin
"""Returns the shape of a tensor.
This operation returns a 1-D integer tensor representing the shape of `input`.
For example:
```python
# 't' is [[[1, 1, 1], [2, 2, 2]], [[3, 3, 3], [4, | 4, 4]]]
shape(t) ==> [2, 2, 3]
```
Args:
input: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
out_type: (Optional) The specified output type of the operation
(`int32` or `int64`). Defaults to `tf.int32`.
Returns:
A `Tensor` of type `out_type`.
"""
return shape_internal(input, name, optimize=True, out_type=out_type)
def shape_internal(input, name=None, optimize=True, out_type=dtypes.int32):
# pylint: disab | le=redefined-builtin
"""Returns the shape of a tensor.
Args:
input: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
optimize: if true, encode the shape as a constant when possible.
out_type: (Optional) The specified output type of the operation
(`int32` or `int64`). Defaults to tf.int32.
Returns:
A `Tensor` of type `out_type`.
"""
with ops.name_scope(name, "Shape", [input]) as name:
if isinstance(
input, (sparse_tensor.SparseTensor, sparse_tensor.SparseTensorValue)):
return gen_math_ops.cast(input.dense_shape, out_type)
else:
input_tensor = ops.convert_to_tensor(input)
input_shape = input_tensor.get_shape()
if optimize and input_shape.is_fully_defined():
return constant(input_shape.as_list(), out_type, name=name)
return gen_array_ops.shape(input, name=name, out_type=out_type)
def size(input, name=None, out_type=dtypes.int32):
# pylint: disable=redefined-builtin
"""Returns the siz |
jelly/calibre | src/calibre/gui2/tweak_book/check_links.py | Python | gpl-3.0 | 6,253 | 0.002239 | #!/usr/bin/env python2
# vim:fileencoding=utf-8
# License: GPLv3 Copyright: 2015, Kovid Goyal <kovid at kovidgoyal.net>
from __future__ import (unicode_literals, division, absolute_import,
print_function)
from collections import defaultdict
from threading import Thread
from PyQt5.Qt import (
QVBoxLayout, QTextBrowser, QProgressBar, Qt, QWidget, QStackedWidget,
QLabel, QSizePolicy, pyqtSignal, QIcon, QInputDialog
)
from calibre.gui2 import error_dialog
from calibre.gui2.tweak_book import current_container, set_current_container, editors
from calibre.gui2.tweak_book.boss import get_boss
from calibre.gui2.tweak_book.widgets import Dialog
def get_data(name):
'Get the data for name. Returns a unicode string if name is a text document/stylesheet'
if name in editors:
return editors[name].get_raw_data()
return current_container().raw_data(name)
def set_data(name, val):
if name in editors:
editors[name].replace_data(val, only_if_different=False)
else:
with current_container().open(name, 'wb') as f:
f.write(val)
get_boss().set_modified()
class CheckExternalLinks(Dialog):
progress_made = pyqtSignal(object, object)
def __init__(self, parent=None):
Dialog.__init__(self, _('Check external links'), 'check-external-links-dialog', parent)
self.progress_made.connect(self.on_progress_made, type=Qt.QueuedConnection)
def show(self):
if self.rb.isEnabled():
self.refresh()
return Dialog.show(self)
def refresh(self):
self.stack.setCurrentIndex(0)
self.rb.setEnabled(False)
t = Thread(name='CheckLinksMaster', target=self.run)
t.daemon = True
t.start()
def setup_ui(self):
self.pb = pb = QProgressBar(self)
pb.setTextVisible(True)
pb.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
pb.setRange(0, 0)
self.w = w = QWidget(self)
self.w.l = l = QVBoxLayout(w)
l.addStretch(), l.addWidget(pb)
self.w.la = la = QLabel(_('Checking external links, please wait...'))
la.setStyleSheet('QLabel { font-size: 20px; font-weight: bold }')
l.addWidget(la, 0, Qt.AlignCenter), l.addStretch()
self.l = l = QVBoxLayout(self)
self.results = QTextBrowser(self)
self.results.setOpenLinks(False)
self.results.anchorClicked.connect(self.anchor_clicked)
self.stack = s = QStackedWidget(self)
s.addWidget(w), s.addWidget(self.results)
l.addWidget(s)
l.addWidget(self.bb)
self.bb.setStandardButtons(self.bb.Close)
self.rb = b = self.bb.addButton(_('&Refresh'), self.bb.ActionRole)
b.setIcon(QIcon(I('view-refresh.png')))
b.clicked.connect(self.refresh)
def sizeHint(self):
ans = Dialog.sizeHint(self)
ans.setHeight(600)
ans.setWidth(max(ans.width(), 800))
return ans
def run(self):
from calibre.ebooks.oeb.polish.check.links import check_external_links
self.tb = None
self.errors = []
try:
self.errors = check_external_links(current_container(), self.progress_made.emit)
except Exception:
import traceback
self.tb = traceback.format_exc()
self.progress_made.emit(None, None)
def on_progress_made(self, curr, total):
if curr is None:
self.results.setText('')
self.stack.setCurrentIndex(1)
self.fixed_errors = set()
self.rb.setEnabled(True)
if self.tb is not None:
return error_dialog(self, _('Checking failed'), _(
'There was an error while checking links, click "Show details" for more information'),
det_msg=self.tb, show=True)
if not self.errors:
self.results.setText(_('No broken links found'))
else:
self.populate_results()
else:
self.pb.setMaximum(total), self.pb.setValue(curr)
def populate_results(self, preserve_pos=False):
num = len(self.errors) - len(self.fixed_errors)
text = '<h3>%s</h3><ol>' % (ngettext(
'Found a broken link', 'Found {} broken links', num).format(num))
for i, (locations, err, url) in enumerate(self.errors):
if i in self.fixed_errors:
continue
text += '<li><b>%s</b> \xa0<a href="err:%d">[%s]</a><br>%s<br><ul>' % (url, i, _('Fix this link'), err)
for name, href, | lnum, col in locations:
text += '<li>{name} \xa0<a href="loc:{lnum},{name}">[{line}: {lnum}]</a></li>'.format(
name=name, lnum=lnum, line=_('line number'))
text += '</ul></li><hr>'
self.results.setHtml(text)
def anchor_clicked(self, qurl):
url = qurl.toString()
if url.startswith('err:'):
errnum = int(url | [4:])
err = self.errors[errnum]
newurl, ok = QInputDialog.getText(self, _('Fix URL'), _('Enter the corrected URL:') + '\xa0'*40, text=err[2])
if not ok:
return
nmap = defaultdict(set)
for name, href in {(l[0], l[1]) for l in err[0]}:
nmap[name].add(href)
for name, hrefs in nmap.iteritems():
raw = oraw = get_data(name)
for href in hrefs:
raw = raw.replace(href, newurl)
if raw != oraw:
set_data(name, raw)
self.fixed_errors.add(errnum)
self.populate_results()
elif url.startswith('loc:'):
lnum, name = url[4:].partition(',')[::2]
lnum = int(lnum or 1)
editor = get_boss().edit_file(name)
if lnum and editor is not None and editor.has_line_numbers:
editor.current_line = lnum
if __name__ == '__main__':
import sys
from calibre.gui2 import Application
from calibre.gui2.tweak_book.boss import get_container
app = Application([])
set_current_container(get_container(sys.argv[-1]))
d = CheckExternalLinks()
d.refresh()
d.exec_()
del app
|
kerimlcr/ab2017-dpyo | ornek/lollypop/lollypop-0.9.229/src/widgets_indicator.py | Python | gpl-3.0 | 7,051 | 0 | # Copyright (c) 2014-2016 Cedric Bellegarde <cedric.bellegarde@adishatz.org>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk, GLib
from gettext import gettext as _
from lollypop.define import Lp
from lollypop.utils import is_loved
class IndicatorWidget(Gtk.EventBox):
"""
Show play/loved indicator
"""
def __init__(self, track_id):
"""
Init indicator widget, ui will be set when needed
@param track id as int
"""
Gtk.EventBox.__init__(self)
self.__id = track_id
self.__pass = 1
self.__timeout_id = None
self.__button = None
self.__stack = None
self.__signal_id = Lp().player.connect('queue-changed',
self.__on_queue_changed)
self.connect('destroy', self.__on_destroy)
# Here a hack to make old Gtk version support min-height css attribute
# min-height = 24px, borders = 2px, we set directly on stack
# min-width = 24px, borders = 2px, padding = 8px
self.set_size_request(34, 26)
def show_spinner(self):
"""
Show spinner
"""
self.__init()
spinner = self.__stack.get_child_by_name('spinner')
if spinner is None:
spinner = Gtk.Spinner()
spinner.set_property('halign', Gtk.Align.CENTER)
spinner.set_property('valign', Gtk.Align.CENTER)
spinner.show()
self.__stack.add_named(spinner, 'spinner')
spinner.start()
self.__stack.set_visible_child_name('spinner')
def empty(self):
"""
Show no indicator
"""
if self.__button is not None:
self.__stack.set_visible_child_name('button')
def play(self):
"""
Show play indicator
"""
self.__init()
self.__clear_spinner()
self.__stack.set_visible_child_name('play')
def loved(self):
"""
Show loved indicator
"""
self.__init()
self.__clear_spinner()
self.__stack.set_visible_child_name('loved')
def play_loved(self):
"""
Show play/loved indicator
"""
self.__init()
self.__clear_spinner()
self.__pass = 1
self.play()
self.__timeout_id = GLib.timeout_add(500, self.__play_loved)
def clear(self):
"""
Clear timeout
"""
if self.__timeout_id is not None:
GLib.source_remove(self.__timeout_id)
self.__timeout_id = None
def update_button(self):
"""
Update button based on queue status
"""
self.__init()
if self.__id in Lp().player.get_queue():
self.__button.set_tooltip_text(_("Remove from queue"))
self.__image.set_from_icon_name('list-remove-symbolic',
Gtk.IconSize.MENU)
else:
self.__button.set_tooltip_text(_("Add to queue"))
self.__image.set_from_icon_name('list-add-symbolic',
Gtk.IconSize.MENU)
#######################
# PRIVATE #
#######################
def __clear_spinner(self):
"""
Clear spinner
"""
spinner = self.__stack.get_child_by_name('spinner')
if spinner is not None:
spinner.stop()
def __init(self):
"""
Init widget content if needed
"""
if self.__button is not None:
return
self.__stack = Gtk.Stack()
self.__stack.set_transition_duration(500)
self.__stack.set_transition_type(Gtk.StackTransitionType.CROSSFADE)
self.__button = Gtk.Button.new()
self.__image = Gtk.Image.new()
self.__button.set_image(self.__image)
self.__button.set_relief(Gtk.ReliefStyle.NONE)
self.__button.get_style_context().add_class('menu-button')
self.__button.get_style_context().add_class('track-menu-button')
self.__image.set_opacity(0.2)
self.__button.show()
self.__button.connect('clicked', self.__on_button_clicked)
play = Gtk.Image.new_from_icon_name('media-playback-start-symbolic',
Gtk.IconSize.MENU)
loved = Gtk.Image.new_from_icon_name('emblem-favorite-symbolic',
Gtk.IconSize.MENU)
self.__stack.add_named(self.__button, 'button')
self.__stack.add_named(play, 'play')
self.__stack.add_named(loved, 'loved')
self.add(self.__stack)
self.connect('enter-notify-event', self.__on_enter_notify)
self.connect('leave-notify-event', self.__on_leave_notify)
self.show_all()
def __on_enter_notify(self, widget, event):
"""
Show queue button
@param widget as Gtk.Widget
@param event as Gdk.Event
"""
self.empty()
def __on_leave_notify(self, widget, event):
"""
Show love button or play button again
@param widget as Gtk.Widget
@param event as Gdk.Event
"""
if self.__id == Lp().player.current_track.id:
| self.play()
elif is_loved(self.__id):
self.loved()
def __on_queue_changed(self, unused):
"""
Update button widget
"""
self.update_button()
def __on_button_clicked(self, widget):
"""
Popup menu for track relative to button
@param widget as Gtk.Button
"""
| if self.__id in Lp().player.get_queue():
Lp().player.del_from_queue(self.__id)
else:
Lp().player.append_to_queue(self.__id)
def __on_destroy(self, widget):
"""
Clear timeout
@param widget as Gtk.Widget
"""
if self.__signal_id is not None:
Lp().player.disconnect(self.__signal_id)
self.clear()
def __play_loved(self):
"""
Show play/loved indicator
"""
if self.__timeout_id is None:
return False
if self.__stack.get_visible_child_name() == 'play':
if self.__pass == 10:
self.__pass = 0
self.loved()
else:
self.play()
self.__pass += 1
return True
|
phe-bioinformatics/PHEnix | phe/metadata/__init__.py | Python | gpl-3.0 | 419 | 0.002387 | """Metadata related information.
:Date: 22 Sep, 2015
:Author: Alex Jironkin
"""
impo | rt abc
class PHEMetaData(object):
"""Abstract class to provide interface for meta-data creation."""
__metaclass__ = abc.ABCMeta
| def __init__(self):
pass
@abc.abstractmethod
def get_meta(self):
"""Get the metadata."""
raise NotImplementedError("get meta has not been implemented yet.")
|
loulich/Couchpotato | couchpotato/core/downloaders/synology.py | Python | gpl-3.0 | 10,004 | 0.009296 | import json
import traceback
from couchpotato.core._base.downloader.main import DownloaderBase
from couchpotato.core.helpers.encoding import isInt
from couchpotato.core.helpers.variable import cleanHost
from couchpotato.core.logger import CPLog
import requests
log = CPLog(__name__)
autoload = 'Synology'
class Synology(DownloaderBase):
protocol = ['nzb', 'torrent', 'torrent_magnet']
status_support = False
def download(self, data = None, media = None, filedata = None):
"""
Send a torrent/nzb file to the downloader
:param data: dict returned from provider
Contains the release information
:param media: media dict with information
Used for creating the filename when possible
:param filedata: downloaded torrent/nzb filedata
The file gets downloaded in the searcher and send to this function
This is done to have fail checking before using the downloader, so the downloader
doesn't need to worry about that
:return: boolean
One fail returns false, but the downloader should log his own errors
"""
if not media: media = {}
if not data: data = {}
response = False
log.info('Sending "%s" (%s) to Synology.', (data['name'], data['protocol']))
# Load host from config and split out port.
host = cleanHost(self.conf('host'), protocol = False).split(':')
if not isInt(host[1]):
log.error('Config properties are not filled in correctly, port is missing.')
return False
try:
# Send request to Synology
srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password'), self.conf('destination'))
if data['protocol'] == 'torrent_magnet':
log.info('Adding torrent URL %s', data['url'])
response = srpc.create_task(url = data['url'])
elif data['protocol'] in ['nzb', 'torrent']:
log.info('Adding %s' % data['protocol'])
if not filedata:
log.error('No %s data found', data['protocol'])
else:
filename = data['name'] + '.' + data['protocol']
response = srpc.create_task(filename = filename, filedata = filedata)
except:
log.error('Exception while adding torrent: %s', traceback.format_exc())
finally:
return self.downloadReturnId('') if response else False
def test(self):
""" Check if connection works
:return: bool
"""
host = cleanHost(self.conf('host'), protocol = False).split(':')
try:
srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password'))
test_result = srpc.test()
except:
return False
return test_result
def getEnabledProtocol(self):
if self.conf('use_for') == 'both':
return super(Synology, self).getEnabledProtocol()
elif self.conf('use_for') == 'torrent':
return ['torrent', 'torrent_magnet']
else:
return ['nzb']
def isEnabled(self, manual = False, data = None):
if not data: data = {}
for_protocol = ['both']
if data and 'torrent' in data.get('protocol'):
for_protocol.append('torrent')
elif data:
for_protocol.append(data.get('protocol'))
return super(Synology, self).isEnabled(manual, data) and\
((self.conf('use_for') in for_protocol))
class SynologyRPC(object):
"""SynologyRPC lite library"""
def __init__(self, host = 'localhost', port = 5000, username = None, password = None, destination = None):
super(SynologyRPC, self).__init__()
self.download_url = 'http://%s:%s/webapi/DownloadStation/task.cgi' % (host, port)
self.auth_url = 'http://%s:%s/webapi/auth.cgi' % (host, port)
self.sid = None
self.username = username
self.password = password
self.destination = destination
self.session_name = 'DownloadStation'
def _login(self):
if self.username and self.password:
args = {'api': 'SYNO.API.Auth', 'account': self.username, 'passwd': self.password, 'version': 2,
'method': 'login', 'session': self.session_name, 'format': 'sid'}
response = self._req(self.auth_url, args)
if response['success']:
self.sid = response['data']['sid']
log.debug('sid=%s', self.sid)
else:
log.error('Couldn\'t log into Synology, %s', response)
return response['success']
else:
log.error('User or password missing, not using authentication.')
return False
def _logout(self):
args = {'api':'SYNO.API.Auth', 'version':1, 'method':'logout', 'session':self.session_name, '_sid':self.sid}
return self._req(self.auth_url, args)
def _req(self, url, args, files = None):
response = {'success': False}
try:
req = requests.post(url, data = args, files = files, verify = False)
req.raise_for_status()
response = json.loads(req.text)
if response['success']:
log.info('Synology action successfull')
| return response
except requests.ConnectionError as err:
log.error('Synology connection error, check your config %s', err)
except requests.HTTPError as err:
log.error('SynologyRPC HTTP | Error: %s', err)
except Exception as err:
log.error('Exception: %s', err)
finally:
return response
def create_task(self, url = None, filename = None, filedata = None):
""" Creates new download task in Synology DownloadStation. Either specify
url or pair (filename, filedata).
Returns True if task was created, False otherwise
"""
result = False
# login
if self._login():
args = {'api': 'SYNO.DownloadStation.Task',
'version': '1',
'method': 'create',
'_sid': self.sid}
if self.destination and len(self.destination) > 0:
args['destination'] = self.destination
if url:
log.info('Login success, adding torrent URI')
args['uri'] = url
response = self._req(self.download_url, args = args)
if response['success']:
log.info('Response: %s', response)
else:
log.error('Response: %s', response)
synoerrortype = {
400 : 'File upload failed',
401 : 'Max number of tasks reached',
402 : 'Destination denied',
403 : 'Destination does not exist',
404 : 'Invalid task id',
405 : 'Invalid task action',
406 : 'No default destination',
407 : 'Set destination failed',
408 : 'File does not exist'
}
log.error('DownloadStation returned the following error : %s', synoerrortype[response['error']['code']])
result = response['success']
elif filename and filedata:
log.info('Login success, adding torrent')
files = {'file': (filename, filedata)}
response = self._req(self.download_url, args = args, files = files)
log.info('Response: %s', response)
result = response['success']
else:
log.error('Invalid use of SynologyRPC.create_task: either url or filename+filedata must be specified')
self._logout()
return result
def test(self):
return bool(self._login())
config = [{
'name': 'synology',
'groups': [
{
'tab': 'downloaders',
'list': 'download_providers',
|
Nixonite/Reddit-Bot-PipeTobacco | PipeTobaccoBot.py | Python | gpl-2.0 | 2,001 | 0.02099 | #!/usr/bin/python
from HTMLParser import HTMLParser
from ReddiWrap import ReddiWrap
import re
import time
import urllib2
tobaccoName = ""
class MyHTMLParser(HTMLParser):
def handle_starttag(self, tag, attrs):
global tobaccoName
if(tag == "meta"):
if 'itemprop' in dict(attrs):
if dict(attrs)['itemprop'] == "itemreviewed":
tobaccoName = dict(attrs)['content']
reddit = ReddiWrap(user | _agent= | 'ReddiWrap')
USERNAME = 'PipeTobaccoBot'
PASSWORD = 'pipetobaccobot.py'
MOD_SUB = 'PipeTobacco' # A subreddit moderated by USERNAME
# Load cookies from local file and verify cookies are valid
reddit.load_cookies('cookies.txt')
# If we had no cookies, or cookies were invalid,
# or the user we are logging into wasn't in the cookie file:
if not reddit.logged_in or reddit.user.lower() != USERNAME.lower():
print('logging into %s' % USERNAME)
login = reddit.login(user=USERNAME, password=PASSWORD)
if login != 0:
# 1 means invalid password, 2 means rate limited, -1 means unexpected error
print('unable to log in: %d' % login)
print('remember to change USERNAME and PASSWORD')
exit(1)
# Save cookies so we won't have to log in again later
reddit.save_cookies('cookies.txt')
print('logged in as %s' % reddit.user)
uinfo = reddit.user_info()
print('\nlink karma: %d' % uinfo.link_karma)
print('comment karma: %d' % uinfo.comment_karma)
# Retrieve posts in a subreddit
posts = reddit.get('/r/%s' % MOD_SUB)
print('posts in subreddit /r/%s:' % MOD_SUB)
for post in posts:
if(post.clicked is False):
reddit.fetch_comments(post)
for comment in post.comments:
words = re.split("\?|\ |,|!|\n",comment.body)
for word in words:
if "http://www.tobaccoreviews.com" in word:
linkURL = word
if "/blend/" in word:
webtry = urllib2.urlopen(word)
html = webtry.read()
parser = MyHTMLParser()
parser.feed(html)
replyMessage = "["+tobaccoName+"]("+word+")"+"\n\n"
reddit.reply(comment,replyMessage)
post.clicked = True
|
meduz/NeuroTools | src/parameters/__init__.py | Python | gpl-2.0 | 35,171 | 0.002985 | """
NeuroTools.parameters
=====================
A module for dealing with model parameters.
Classes
-------
Parameter
ParameterRange - for specifying a list of possible values for a given parameter.
ParameterReference - specify a parameter in terms of the value of another parameter.
ParameterSet - for representing/managing hierarchical parameter s | ets.
ParameterTable - a sub-class of ParameterSet that can represent a table of parameters.
ParameterSpace - a collection of ParameterSets, | representing multiple points in
parameter space.
**Imported from NeuroTools.parameters.validators**
ParameterSchema - A sub-class of ParameterSet against which other ParameterSets can be validated
against using a Validator as found in the sub-package
NeuroTools.parameters.validators
CongruencyValidator - A CongruencyValidator validates a ParameterSet against a ParameterSchema
via member "validate(parameter_set,parameter_schema)".
ValidationError - The Exception raised when validation fails
SchemaBase - The base class of all "active" Schema objects to be placed in a ParameterSchema.
-> Sublass - Validates the same-path ParameterSet value if it is of the specified type.
-> Eval - Validates the same-path ParameterSet value if the provided expression
evaluates ("eval") to True.
Functions
---------
nesteddictwalk - Walk a nested dict structure, using a generator.
nesteddictflatten - Return a flattened version of a nested dict structure.
string_table - Convert a table written as a multi-line string into a dict of dicts.
Sub-Packages
------------
validators - A module implementing validation of ParameterSets against ParameterSchema.
"""
# import copy
import warnings
import math
import numpy
import operator
from functools import wraps
try:
from urllib2 import build_opener, install_opener, urlopen, ProxyHandler # Python 2
from urlparse import urlparse
except ImportError:
from urllib.request import build_opener, install_opener, urlopen, ProxyHandler # Python 3
from urllib.parse import urlparse
from NeuroTools.random import ParameterDist, GammaDist, UniformDist, NormalDist
from os import environ, path
import random
from copy import copy
try:
basestring
except NameError:
basestring = str
try:
next # Python 3
except NameError:
def next(obj): # Python 2
return obj.next()
__version__ = '0.2.1'
if 'HTTP_PROXY' in environ:
HTTP_PROXY = environ['HTTP_PROXY'] # user has to define it
''' next lines are for communication to urllib of proxy information '''
proxy_support = ProxyHandler({"https": HTTP_PROXY})
opener = build_opener(proxy_support, HTTPHandler)
install_opener(opener)
def isiterable(x):
return (hasattr(x, '__iter__') and not isinstance(x, basestring))
def contains_instance(collection, cls):
return any(isinstance(o, cls) for o in collection)
def nesteddictwalk(d, separator='.'):
"""
Walk a nested dict structure, using a generator.
Composite keys are created by joining each key to the key of the parent dict
using `separator`.
"""
for key1, value1 in d.items():
if isinstance(value1, dict):
for key2, value2 in nesteddictwalk(value1, separator): # recurse into subdict
yield "%s%s%s" % (key1, separator, key2), value2
else:
yield key1, value1
def nesteddictflatten(d, separator='.'):
"""
Return a flattened version of a nested dict structure.
Composite keys are created by joining each key to the key of the parent dict
using `separator`.
"""
flatd = {}
for k, v in nesteddictwalk(d, separator):
flatd[k] = v
return flatd
# --- Parameters, and ranges and distributions of them -------------------
class Parameter(object):
def __init__(self, value, units=None, name=""):
self.name = name
self.value = value
self.units = units
self.type = type(value)
def __repr__(self):
s = "%s = %s" % (self.name, self.value)
if self.units is not None:
s += " %s" % self.units
return s
class ParameterRange(Parameter):
"""
A class for specifying a list of possible values for a given parameter.
The value must be an iterable. It acts like a Parameter, but .next() can be
called to iterate through the values
"""
def __init__(self, value, units=None, name="", shuffle=False):
if not isiterable(value):
raise TypeError("A ParameterRange value must be iterable")
Parameter.__init__(self, next(value.__iter__()), units, name)
self._values = copy(value)
self._iter_values = self._values.__iter__()
if shuffle:
random.shuffle(self._values)
def __repr__(self):
units_str = ''
if self.units:
units_str = ', units="%s"' % self.units
return 'ParameterRange(%s%s)' % (self._values.__repr__(), units_str)
def __iter__(self):
self._iter_values = self._values.__iter__()
return self._iter_values
def __next__(self):
self._value = next(self._iter_values)
return self._value
def next(self):
return self.__next__()
def __len__(self):
return len(self._values)
def __eq__(self, o):
if (type(self) == type(o) and
self.name == o.name and
self._values == o._values and
self.units == o.units):
return True
else:
return False
# --- ReferenceParameter
def reverse(func):
"""Given a function f(a, b), returns f(b, a)"""
@wraps(func)
def reversed_func(a, b):
return func(b, a)
reversed_func.__doc__ = "Reversed argument form of %s" % func.__doc__
reversed_func.__name__ = "reversed %s" % func.__name__
return reversed_func
def lazy_operation(name, reversed=False):
def op(self, val):
f = getattr(operator, name)
if reversed:
f = reverse(f)
self.operations.append((f, val))
return self
return op
class ParameterReference(object):
"""
This class provides a place-holder for a reference parameter that will
later be replaced with the value of the parameter pointed to by the
reference. This class also allows for lazy application of operations,
meaning that one can use the reference in simple formulas that will get
evaluated at the moment the reference is replaced.
Check below which operations are supported.
"""
def __init__(self,reference):
object.__init__(self)
self.reference_path = reference
self.operations = []
def _apply_operations(self, x):
for f, arg in self.operations:
try:
if arg is None:
x = f(x)
else:
x = f(x, arg)
except TypeError:
raise TypeError("ParameterReference: error applying operation " + str(f) + " with argument " + str(arg) + " to " + str(x))
return x
def evaluate(self,parameter_set):
"""
This function evaluetes the reference, using the ParameterSet in parameter_set as the source.
"""
ref_value = parameter_set[self.reference_path]
if isinstance(ref_value,ParameterSet):
if self.operations == []:
return ref_value.tree_copy()
else:
raise ValueError("ParameterReference: lazy operations cannot be applied to argument of type ParameterSet> %s" % self.reference_path)
elif isinstance(ref_value,ParameterReference):
#lets wait until the refe
return self
else:
return self._apply_operations(ref_value)
def copy(self):
pr = ParameterReference(self.reference_path)
for f, arg in self.operations:
if isinstance(arg,ParameterReference):
pr.operations.append((f,arg.copy()))
else:
|
daxadal/Computational-Geometry | Practica_2/classify_plane_points_template1.py | Python | apache-2.0 | 2,030 | 0.010837 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 4 13:13:13 2017
@author: avaldes
"""
from __future__ import division, print_function
import numpy as np
import matplotlib.pyplot as plt
from MLP import MLP
#%% Create data
#np.random.seed(5)
nb_black = 50
nb_red = 50
nb_data = nb_black + nb_red
s = np.linspace(0, 2*np.pi, nb_black)
x_black = np.vstack([np.cos(s), np.sin(s)]).T +\
np.random.randn(nb_black, 2) * 0
x_red = np.vstack([2*np.cos(s), 2*np.sin(s)]).T +\
np.random.randn(nb_red, 2) * 0
x_data = np.vstack((x_black, x_red))
t_data = np.asarray([0]*nb_black + [1]*nb_red).reshape(nb_data, 1)
#%% Net structure
D = x_data.shape[1] #initial dimension
K = 1 #final dimension
i_min = 0
j_min = 0
data_min = 1000000
middle_min = data_min
for | j in range(10, 51):
for i in range(10, 51):
K_list = [D, i, j, K] #list of dimensions of layers
activation_functions = [np.tanh,
np.tanh,
MLP.sigmoid]
diff_activation_functions = [MLP.dtanh,
MLP.dtanh,
| MLP.dsigmoid]
#%%
x_middle = (x_black + x_red) / 2
nb_middle = x_middle.shape[0]
t_middle = np.asarray([0.5] * nb_middle).reshape(nb_middle, 1)
mlp = MLP(K_list, activation_functions, diff_activation_functions)
for k in range(20):
mlp.train(x_data, t_data,
epochs=10, batch_size=10,
epsilon=0.1,
print_cost=True)
mlp.get_activations_and_units(x_data)
data_cost = mlp.binary_cross_entropy(mlp.y, t_data)
if data_cost < data_min:
data_min = data_cost
i_min = i
j_min = j
print(i, " ", j, " ", data_min, "\n")
#mlp.get_activations_and_units(x_middle)
#print(mlp.binary_cross_entropy(mlp.y, t_middle))
|
kobejohn/PQHelper | pqhelper/easy.py | Python | mit | 2,931 | 0.000682 | from pqhelper import base, capture, versus
# these parts are heavy so keep one common object for the module
_state_investigator = base.StateInvestigator()
def versus_summaries(turns=2, sims_to_average=2, async_results_q=None):
"""Return summaries of the likely resutls of each available action..
Arguments:
- turns: how many turns to simulate.
- in 2013, 1 is fast (seconds), 2 is slow (seconds), 3 who knows
- sims_to_average: how many times to run the simulation
to get more representative average results of each action.
- async_results_q: provide a multiprocessing Queue on which
the summaries of each turn will be placed. this is an asynchronous
alternative to waiting for the final return value
"""
board, player, opponent, extra_actions = _state_investigator.get_versus()
if extra_actions: extra_actions = 1 # limit value for realistic time
if board is None:
return tuple()
averaged_summaries = list() # default return value is empty
# keep a separate advisor for each simulation to average
advisors = list()
for i in range(sims_to_average):
advisor = versus.Advisor()
advisor.reset(board, player, opponent, extra_actions)
advisors.append(advisor)
# provide async sim results per turn; final results as return value
for turn in range(turns):
# store {action: list of results from each simulation}
summaries_by_action = dict()
for i in range(sims_to_average):
advisor = advisors[i]
advisor.simulate_next_turn()
for s in advisor.sorted_current_summaries():
summaries_by_action.setdefault(s.action, list()).append(s)
# now all sims and analysis for this turn have been completed
averaged_summaries = list()
for action, summaries in summaries_by_action.items():
board = summaries[0].board # any board. they are all the same
action = summaries[0].action # any action. they are all the same
score_sum = sum(s.score for s in summaries)
score_avg = score_sum / len(summaries)
manadrain_sum = sum(s.mana_drain_leaves for s in summaries)
leaves_sum = sum(s.total_leaves for s in summaries)
avg_summary = base.Summary(board, action, score_avg,
manadrain_sum, leaves_sum)
averaged_summaries.append(avg_summary)
averaged_summaries.sort(key=lambda s: s.score, reverse=True)
# option to provide the results asynchronouslys
if not async_results_q is None:
async_results_q.put(averaged_summaries)
return averaged_summaries
def capture_solution():
board = _state_investigator.get | _capture()
if board is None:
return tuple()
steps = c | apture.capture(board)
return steps
if __name__ == '__main__':
pass
|
mike-lawrence/fileForker | fileForker/__init__.py | Python | gpl-3.0 | 916 | 0.050218 | import billiard
billiard.forking_enable(0)
########
# Define a class that spawns a new process
########
class childClass:
def __init__(self,childFile):
self.childFile = ch | ildFile
self.initDict = {}
self.qTo = billiard.Queue()
self.qFrom = billiard.Queue()
self.started | = False
def f(self,childFile,qTo,qFrom,initDict):
execfile(childFile)
import sys
sys.exit()
def start(self):
if self.started:
print 'Oops! Already started this child.'
else:
self.process = billiard.Process( target=self.f , args=(self.childFile,self.qTo,self.qFrom,self.initDict,) )
self.process.start()
self.started = True
def isAlive(self):
return self.process.is_alive()
def stop(self,killAfter=None):
if not self.started:
print 'Oops! Not started yet!'
else:
self.qTo.put('quit')
self.process.join(timeout=killAfter)
if self.process.is_alive():
self.process.terminate()
return None
|
dockerian/pyapi | demo/setup.py | Python | apache-2.0 | 1,256 | 0 | """
# setup module
"""
import os
from se | tuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
with o | pen(os.path.join(HERE, 'README.md')) as f:
README = f.read()
# with open(os.path.join(HERE, 'CHANGES.txt')) as f:
# CHANGES = f.read()
CHANGES = "Changes"
PREQ = [
'pyramid',
'python-keystoneclient',
'python-swiftclient',
'pyyaml',
'responses',
'sniffer',
'waitress',
]
PREQ_DEV = [
'coverage',
'flake8',
'mock',
'nose',
'pylint',
'pyramid',
'tissue',
'webtest',
'tox',
]
setup(
name='codebase',
version='0.0.1',
description='Coding demo for Python',
long_description=README + '\n\n' + CHANGES,
classifiers=["Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application"],
author='Jason Zhu',
author_email='yuxin.zhu@hp.com',
url='',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=PREQ,
tests_require=PREQ_DEV,
test_suite="codebase",
entry_points="""\
[paste.app_factory]
main = codebase:main
""",
)
|
cpaulik/xray | xray/test/test_backends.py | Python | apache-2.0 | 38,439 | 0.000364 | from io import BytesIO
from threading import Lock
import contextlib
import itertools
import os.path
import pickle
import shutil
import tempfile
import unittest
import sys
import numpy as np
import pandas as pd
import xray
from xray import Dataset, open_dataset, open_mfdataset, backends, save_mfdataset
from xray.backends.common import robust_getitem
from xray.core.pycompat import iteritems, PY3
from . import (TestCase, requires_scipy, requires_netCDF4, requires_pydap,
requires_scipy_or_netCDF4, requires_dask, requires_h5netcdf,
has_netCDF4, has_scipy)
from .test_dataset import create_test_data
try:
import netCDF4 as nc4
except ImportError:
pass
try:
import dask
import dask.array as da
except ImportError:
pass
def open_example_dataset(name, *args, **kwargs):
return open_dataset(os.path.join(os.path.dirname(__file__), 'data', name),
*args, **kwargs)
def create_masked_and_scaled_data():
x = np.array([np.nan, np.nan, 10, 10.1, 10.2])
encoding = {'_FillValue': -1, 'add_offset': 10,
'scale_factor': np.float32(0.1), 'dtype': 'i2'}
return Dataset({'x': ('t', x, {}, encoding)})
def create_encoded_masked_and_scaled_data():
attributes = {'_FillValue': -1, 'add_offset': 10,
'scale_factor': np.float32(0.1)}
return Dataset({'x': ('t', [-1, -1, 0, 1, 2], attributes)})
class TestCommon(TestCase):
def test_robust_getitem(self):
class UnreliableArrayFailure(Exception):
pass
class UnreliableArray(object):
def __init__(self, array, failures=1):
self.array = array
self.failures = failures
def __getitem__(self, key):
if self.failures > 0:
self.failures -= 1
raise UnreliableArrayFailure
| return self.array[key]
array = UnreliableArray([0])
with self.assertRaises(UnreliableArrayFailure):
array[0]
self.assertEqual(array[0], 0)
actual = robust_getitem(array, 0, catch=UnreliableArrayFailure,
initial_delay=0)
self.assertEqual(actual, 0)
class Only32BitTypes(obj | ect):
pass
class DatasetIOTestCases(object):
def create_store(self):
raise NotImplementedError
def roundtrip(self, data, **kwargs):
raise NotImplementedError
def test_zero_dimensional_variable(self):
expected = create_test_data()
expected['float_var'] = ([], 1.0e9, {'units': 'units of awesome'})
expected['string_var'] = ([], np.array('foobar', dtype='S'))
with self.roundtrip(expected) as actual:
self.assertDatasetAllClose(expected, actual)
def test_write_store(self):
expected = create_test_data()
with self.create_store() as store:
expected.dump_to_store(store)
# we need to cf decode the store because it has time and
# non-dimension coordinates
actual = xray.decode_cf(store)
self.assertDatasetAllClose(expected, actual)
def test_roundtrip_test_data(self):
expected = create_test_data()
with self.roundtrip(expected) as actual:
self.assertDatasetAllClose(expected, actual)
def test_load(self):
expected = create_test_data()
@contextlib.contextmanager
def assert_loads(vars=None):
if vars is None:
vars = expected
with self.roundtrip(expected) as actual:
for v in actual.values():
self.assertFalse(v._in_memory)
yield actual
for k, v in actual.items():
if k in vars:
self.assertTrue(v._in_memory)
self.assertDatasetAllClose(expected, actual)
with self.assertRaises(AssertionError):
# make sure the contextmanager works!
with assert_loads() as ds:
pass
with assert_loads() as ds:
ds.load()
with assert_loads(['var1', 'dim1', 'dim2']) as ds:
ds['var1'].load()
# verify we can read data even after closing the file
with self.roundtrip(expected) as ds:
actual = ds.load()
self.assertDatasetAllClose(expected, actual)
def test_roundtrip_None_variable(self):
expected = Dataset({None: (('x', 'y'), [[0, 1], [2, 3]])})
with self.roundtrip(expected) as actual:
self.assertDatasetAllClose(expected, actual)
def test_roundtrip_object_dtype(self):
floats = np.array([0.0, 0.0, 1.0, 2.0, 3.0], dtype=object)
floats_nans = np.array([np.nan, np.nan, 1.0, 2.0, 3.0], dtype=object)
letters = np.array(['ab', 'cdef', 'g'], dtype=object)
letters_nans = np.array(['ab', 'cdef', np.nan], dtype=object)
all_nans = np.array([np.nan, np.nan], dtype=object)
original = Dataset({'floats': ('a', floats),
'floats_nans': ('a', floats_nans),
'letters': ('b', letters),
'letters_nans': ('b', letters_nans),
'all_nans': ('c', all_nans),
'nan': ([], np.nan)})
expected = original.copy(deep=True)
if isinstance(self, Only32BitTypes):
# for netCDF3 tests, expect the results to come back as characters
expected['letters_nans'] = expected['letters_nans'].astype('S')
expected['letters'] = expected['letters'].astype('S')
with self.roundtrip(original) as actual:
try:
self.assertDatasetIdentical(expected, actual)
except AssertionError:
# Most stores use '' for nans in strings, but some don't
# first try the ideal case (where the store returns exactly)
# the original Dataset), then try a more realistic case.
# ScipyDataTest, NetCDF3ViaNetCDF4DataTest and NetCDF4DataTest
# all end up using this case.
expected['letters_nans'][-1] = ''
self.assertDatasetIdentical(expected, actual)
def test_roundtrip_string_data(self):
expected = Dataset({'x': ('t', ['ab', 'cdef'])})
with self.roundtrip(expected) as actual:
if isinstance(self, Only32BitTypes):
expected['x'] = expected['x'].astype('S')
self.assertDatasetIdentical(expected, actual)
def test_roundtrip_datetime_data(self):
times = pd.to_datetime(['2000-01-01', '2000-01-02', 'NaT'])
expected = Dataset({'t': ('t', times), 't0': times[0]})
with self.roundtrip(expected) as actual:
self.assertDatasetIdentical(expected, actual)
def test_roundtrip_timedelta_data(self):
time_deltas = pd.to_timedelta(['1h', '2h', 'NaT'])
expected = Dataset({'td': ('td', time_deltas), 'td0': time_deltas[0]})
with self.roundtrip(expected) as actual:
self.assertDatasetIdentical(expected, actual)
def test_roundtrip_float64_data(self):
expected = Dataset({'x': ('y', np.array([1.0, 2.0, np.pi], dtype='float64'))})
with self.roundtrip(expected) as actual:
self.assertDatasetIdentical(expected, actual)
def test_roundtrip_example_1_netcdf(self):
expected = open_example_dataset('example_1.nc')
with self.roundtrip(expected) as actual:
# we allow the attributes to differ since that
# will depend on the encoding used. For example,
# without CF encoding 'actual' will end up with
# a dtype attribute.
self.assertDatasetEqual(expected, actual)
def test_roundtrip_coordinates(self):
original = Dataset({'foo': ('x', [0, 1])},
{'x': [2, 3], 'y': ('a', [42]), 'z': ('x', [4, 5])})
with self.roundtrip(original) as actual:
self.assertDatasetIdentical(original, actual)
expected = original.drop('foo')
with self.roundtrip(expected) |
googleads/google-ads-python | google/ads/googleads/v9/common/types/metrics.py | Python | apache-2.0 | 52,950 | 0.001001 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v9.enums.types import interaction_event_type
from google.ads.googleads.v9.enums.types import quality_score_bucket
__protobuf__ = proto.module(
package="google.ads.googleads.v9.common",
marshal="google.ads.googleads.v9",
manifest={"Metrics",},
)
class Metrics(proto.Message):
r"""Metrics data.
Attributes:
absolute_top_impression_percentage (float):
The percent of your ad impressions that are
shown as the very first ad above the organic
search results.
This field is a member of `oneof`_ ``_absolute_top_impression_percentage``.
active_view_cpm (float):
| Average cost of viewable impressions
(``active_view_impressions``).
This field is a member of `oneof`_ ``_active_view_cpm``.
active_view_ctr (float):
Active view measurable clicks divided by
active view viewable impressions. This metric is
reported only for display network.
This field is a member of `oneof`_ ``_active_view_ctr``.
| active_view_impressions (int):
A measurement of how often your ad has become
viewable on a Display Network site.
This field is a member of `oneof`_ ``_active_view_impressions``.
active_view_measurability (float):
The ratio of impressions that could be
measured by Active View over the number of
served impressions.
This field is a member of `oneof`_ ``_active_view_measurability``.
active_view_measurable_cost_micros (int):
The cost of the impressions you received that
were measurable by Active View.
This field is a member of `oneof`_ ``_active_view_measurable_cost_micros``.
active_view_measurable_impressions (int):
The number of times your ads are appearing on
placements in positions where they can be seen.
This field is a member of `oneof`_ ``_active_view_measurable_impressions``.
active_view_viewability (float):
The percentage of time when your ad appeared
on an Active View enabled site (measurable
impressions) and was viewable (viewable
impressions).
This field is a member of `oneof`_ ``_active_view_viewability``.
all_conversions_from_interactions_rate (float):
All conversions from interactions (as oppose
to view through conversions) divided by the
number of ad interactions.
This field is a member of `oneof`_ ``_all_conversions_from_interactions_rate``.
all_conversions_value (float):
The value of all conversions.
This field is a member of `oneof`_ ``_all_conversions_value``.
all_conversions_value_by_conversion_date (float):
The value of all conversions. When this column is selected
with date, the values in date column means the conversion
date. Details for the by_conversion_date columns are
available at
https://support.google.com/google-ads/answer/9549009.
all_conversions (float):
The total number of conversions. This includes all
conversions regardless of the value of
include_in_conversions_metric.
This field is a member of `oneof`_ ``_all_conversions``.
all_conversions_by_conversion_date (float):
The total number of conversions. This includes all
conversions regardless of the value of
include_in_conversions_metric. When this column is selected
with date, the values in date column means the conversion
date. Details for the by_conversion_date columns are
available at
https://support.google.com/google-ads/answer/9549009.
all_conversions_value_per_cost (float):
The value of all conversions divided by the
total cost of ad interactions (such as clicks
for text ads or views for video ads).
This field is a member of `oneof`_ ``_all_conversions_value_per_cost``.
all_conversions_from_click_to_call (float):
The number of times people clicked the "Call"
button to call a store during or after clicking
an ad. This number doesn't include whether or
not calls were connected, or the duration of any
calls. This metric applies to feed items only.
This field is a member of `oneof`_ ``_all_conversions_from_click_to_call``.
all_conversions_from_directions (float):
The number of times people clicked a "Get
directions" button to navigate to a store after
clicking an ad. This metric applies to feed
items only.
This field is a member of `oneof`_ ``_all_conversions_from_directions``.
all_conversions_from_interactions_value_per_interaction (float):
The value of all conversions from
interactions divided by the total number of
interactions.
This field is a member of `oneof`_ ``_all_conversions_from_interactions_value_per_interaction``.
all_conversions_from_menu (float):
The number of times people clicked a link to
view a store's menu after clicking an ad.
This metric applies to feed items only.
This field is a member of `oneof`_ ``_all_conversions_from_menu``.
all_conversions_from_order (float):
The number of times people placed an order at
a store after clicking an ad. This metric
applies to feed items only.
This field is a member of `oneof`_ ``_all_conversions_from_order``.
all_conversions_from_other_engagement (float):
The number of other conversions (for example,
posting a review or saving a location for a
store) that occurred after people clicked an ad.
This metric applies to feed items only.
This field is a member of `oneof`_ ``_all_conversions_from_other_engagement``.
all_conversions_from_store_visit (float):
Estimated number of times people visited a
store after clicking an ad. This metric applies
to feed items only.
This field is a member of `oneof`_ ``_all_conversions_from_store_visit``.
all_conversions_from_store_website (float):
The number of times that people were taken to
a store's URL after clicking an ad.
This metric applies to feed items only.
This field is a member of `oneof`_ ``_all_conversions_from_store_website``.
average_cost (float):
The average amount you pay per interaction.
This amount is the total cost of your ads
divided by the total number of interactions.
This field is a member of `oneof`_ ``_average_cost``.
average_cpc (float):
The total cost of all clicks divided by the
total number of clicks received.
This field is a member of `oneof`_ ``_average_cpc``.
average_cpe (float):
The average amount that you've been charged
for an ad engagement. This amount is the total
cost of all ad engagements divided by the total
number of ad engagements.
This field is a |
google/grr | grr/core/grr_response_core/lib/parsers/linux_release_parser_test.py | Python | apache-2.0 | 6,093 | 0.004103 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""Unit test for the linux distribution parser."""
import io
import os
from absl import app
from grr_response_core.lib.parsers import linux_release_parser
from grr_response_core.lib.rdfvalues import anomaly as rdf_anomaly
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.rdfvalues import protodict as rdf_protodict
from grr.test_lib import test_lib
class LinuxReleaseParserTest(test_lib.GRRBaseTest):
"""Test parsing of linux distribution collection."""
def setUp(self):
super().setUp()
self.parser_test_dir = os.path.join(self.base_path, "parser_test")
def testMalformedLsbReleaseFile(self):
path = os.path.join(self.parser_test_dir, "lsb-release-bad")
with io.open(path, "r") as f:
data = f.read()
parser = linux_release_parser.LsbReleaseParseHandler(data)
complete, result = parser.Parse()
self.assertFalse(complete)
self.assertTupleEqual((None, 0, 0), result)
def testGoodLsbReleaseFile(self):
path = os.path.join(self.parser_test_dir, "lsb-release")
with io.open(path, "r") as f:
data = f.read()
parser = linux_release_parser.LsbReleaseParseHandler(data)
complete, result = parser.Parse()
self.assertTrue(complete)
self.assertTupleEqual(("Ubuntu", 14, 4), result)
def testFallbackLsbReleaseFile(self):
path = os.path.join(self.parser_test_dir, "lsb-release-notubuntu")
with io.open(path, "r") as f:
data = f.read()
parser = linux_release_parser.LsbReleaseParseHandler(data)
complete, result = parser.Parse()
self.assertFalse(complete)
self.assertTupleEqual(("NotUbuntu", 0, 0), result)
def testReleaseFileRedHatish(self):
path = os.path.join(self.parser_test_dir, "oracle-release")
with io.open(path, "r") as f:
data = f.read()
parser = linux_release_parser.ReleaseFileParseHandler("OracleLinux")
parser(data)
complete, result = parser.Parse()
self.assertTrue(complete)
self.assertTupleEqual(("OracleLinux", 6, 5), result)
def testMalformedReleaseFileRedHatish(self):
path = os.path.join(self.parser_test_dir, "oracle-release-bad")
with io.open(path, "r") as f:
data = f.read()
parser = linux_release_parser.ReleaseFileParseHandler("OracleLinux")
parser(data)
complete, result = parser.Parse()
self.assertFalse(complete)
self.assertTupleEqual(("OracleLinux", 0, 0), result)
def _CreateTestData(self, testdata):
"""Create 'stats' and 'file_objects' lists for passing to ParseMultiple."""
pathspecs = []
files = []
for filepath, localfile in testdata:
files.append(open(localfile, "rb"))
p = rdf_paths.PathSpec(path=filepath)
pathspecs.append(p)
return pathspecs, files
def testEndToEndUbuntu(self):
parser = linux_release_parser.LinuxReleaseParser()
testdata = [
("/etc/lsb-release", os.path.join(self.parser_test_dir, "lsb-release")),
]
pathspecs, files = self._CreateTestData(testdata)
result = list(parser.ParseFiles(None, pathspecs, files)).pop()
self.assertIsInstance(result, rdf_protodict.Dict)
self.assertEqual("Ubuntu", result["os_release"])
self.assertEqual(14, result["os_major_version"])
self.assertEqual(4, result["os_minor_version"])
def testEndToEndOracleLinux(self):
parser = | linux_release_parser.LinuxReleaseParser()
testdata = [
("/etc/lsb-release",
| os.path.join(self.parser_test_dir, "lsb-release-notubuntu")),
("/etc/oracle-release",
os.path.join(self.parser_test_dir, "oracle-release")),
]
pathspecs, files = self._CreateTestData(testdata)
result = list(parser.ParseFiles(None, pathspecs, files)).pop()
self.assertIsInstance(result, rdf_protodict.Dict)
self.assertEqual("OracleLinux", result["os_release"])
self.assertEqual(6, result["os_major_version"])
self.assertEqual(5, result["os_minor_version"])
def testEndToEndAmazon(self):
parser = linux_release_parser.LinuxReleaseParser()
test_data = [
("/etc/system-release",
os.path.join(self.parser_test_dir, "amazon-system-release")),
]
pathspecs, file_objects = self._CreateTestData(test_data)
actual_result = list(parser.ParseFiles(None, pathspecs, file_objects))
expected_result = [
rdf_protodict.Dict({
"os_release": "AmazonLinuxAMI",
"os_major_version": 2018,
"os_minor_version": 3,
})
]
self.assertCountEqual(actual_result, expected_result)
def testEndToEndCoreOS(self):
parser = linux_release_parser.LinuxReleaseParser()
test_data = [
("/etc/os-release",
os.path.join(self.parser_test_dir, "coreos-os-release")),
]
pathspecs, file_objects = self._CreateTestData(test_data)
actual_result = list(parser.ParseFiles(None, pathspecs, file_objects))
expected_result = [
rdf_protodict.Dict({
"os_release": "Container Linux by CoreOS",
"os_major_version": 2023,
"os_minor_version": 4,
})
]
self.assertCountEqual(actual_result, expected_result)
def testEndToEndGoogleCOS(self):
parser = linux_release_parser.LinuxReleaseParser()
test_data = [
("/etc/os-release",
os.path.join(self.parser_test_dir, "google-cos-os-release")),
]
pathspecs, file_objects = self._CreateTestData(test_data)
actual_result = list(parser.ParseFiles(None, pathspecs, file_objects))
expected_result = [
rdf_protodict.Dict({
"os_release": "Container-Optimized OS",
"os_major_version": 69,
"os_minor_version": 0,
})
]
self.assertCountEqual(actual_result, expected_result)
def testAnomaly(self):
parser = linux_release_parser.LinuxReleaseParser()
result = list(parser.ParseFiles(None, [], []))
self.assertLen(result, 1)
self.assertIsInstance(result[0], rdf_anomaly.Anomaly)
def main(args):
test_lib.main(args)
if __name__ == "__main__":
app.run(main)
|
PawarPawan/h2o-v3 | h2o-py/tests/testdir_algos/glm/pyunit_wide_dataset_largeGLM.py | Python | apache-2.0 | 1,540 | 0.01039 | import sys
sys.path.insert(1, "../../../")
import h2o
import numpy as np
def wide_dataset_large(ip,port):
print("Reading in Arcene training data for binomial modeling.")
trainDataResponse = np.genfromtxt(h2o.locate("smalldata/arcene/arcene_train_labels.labels"), delimiter=' ')
trainDataResponse = np.where(trainDataResponse == -1, 0, 1)
trainDataFeatures = np.genfromtxt(h2o.locate("smalldata/arcene/arcene_train.data"), delimiter=' ')
trainData = h2o.H2OFrame(np.column_stack((trainDataResponse, trainDataFeatures)).tolist())
print("Run model on 3250 columns of Arcene with strong rules off.")
model = h2o.glm(x=trainData[1:3250], y=trainData[0].asfactor(), family="binomial", lambda_search=False, alpha=[1])
print("Test model on validation set.")
validDataResponse = np.genfromtxt(h2o.locate("smalldata/arcene/arcene_valid_labels.labels"), delimiter=' ')
validDataResponse = np.where(validDataResponse == -1, 0, 1)
validDataFeatures = np.genfromtxt(h2o.locate("smalldata/arcene/arcene_valid.data"), delimiter=' ')
validData = h2o.H2OFrame(np.column_stack((validDataResponse, validDataFeatures)).tolist())
prediction = model.predict(validData)
print | ("Check performance of predictions.")
performance = model.model_performance(validData)
print("Check that prediction AUC better than guessing (0.5).")
assert performance.auc() > 0.5, "predictions should be better then pure chance"
if __name__ == "__main_ | _":
h2o.run_test(sys.argv, wide_dataset_large)
|
inspirehep/sqlalchemy | test/orm/test_mapper.py | Python | mit | 101,238 | 0.005897 | """General mapper operations with an emphasis on selecting/loading."""
from sqlalchemy.testing import assert_raises, assert_raises_message
import sqlalchemy as sa
from sqlalchemy import testing
from sqlalchemy import MetaData, Integer, String, ForeignKey, func, util
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.engine import default
from sqlalchemy.orm import mapper, relationship, backref, \
create_session, class_mapper, configure_mappers, reconstructor, \
validates, aliased, defer, deferred, synonym, attributes, \
column_property, composite, dynamic_loader, \
comparable_property, Session
from sqlalchemy.orm.persistence import _sort_states
from sqlalchemy.testing import eq_, AssertsCompiledSQL, is_
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy.testing.assertsql import CompiledSQL
import logging
import logging.handlers
class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
__dialect__ = 'default'
def test_pr | op_shadow(self):
"""A backref name may not shadow an existing property name."""
Address, addresses, users, User = (self.classe | s.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
mapper(Address, addresses)
mapper(User, users,
properties={
'addresses':relationship(Address, backref='email_address')
})
assert_raises(sa.exc.ArgumentError, sa.orm.configure_mappers)
def test_update_attr_keys(self):
"""test that update()/insert() use the correct key when given InstrumentedAttributes."""
User, users = self.classes.User, self.tables.users
mapper(User, users, properties={
'foobar':users.c.name
})
users.insert().values({User.foobar:'name1'}).execute()
eq_(sa.select([User.foobar]).where(User.foobar=='name1').execute().fetchall(), [('name1',)])
users.update().values({User.foobar:User.foobar + 'foo'}).execute()
eq_(sa.select([User.foobar]).where(User.foobar=='name1foo').execute().fetchall(), [('name1foo',)])
def test_utils(self):
users = self.tables.users
addresses = self.tables.addresses
Address = self.classes.Address
from sqlalchemy.orm.base import _is_mapped_class, _is_aliased_class
class Foo(object):
x = "something"
@property
def y(self):
return "something else"
m = mapper(Foo, users, properties={"addresses":relationship(Address)})
mapper(Address, addresses)
a1 = aliased(Foo)
f = Foo()
for fn, arg, ret in [
(_is_mapped_class, Foo.x, False),
(_is_mapped_class, Foo.y, False),
(_is_mapped_class, Foo.name, False),
(_is_mapped_class, Foo.addresses, False),
(_is_mapped_class, Foo, True),
(_is_mapped_class, f, False),
(_is_mapped_class, a1, True),
(_is_mapped_class, m, True),
(_is_aliased_class, a1, True),
(_is_aliased_class, Foo.x, False),
(_is_aliased_class, Foo.y, False),
(_is_aliased_class, Foo, False),
(_is_aliased_class, f, False),
(_is_aliased_class, a1, True),
(_is_aliased_class, m, False),
]:
assert fn(arg) == ret
def test_entity_descriptor(self):
users = self.tables.users
from sqlalchemy.orm.base import _entity_descriptor
class Foo(object):
x = "something"
@property
def y(self):
return "something else"
m = mapper(Foo, users)
a1 = aliased(Foo)
f = Foo()
for arg, key, ret in [
(m, "x", Foo.x),
(Foo, "x", Foo.x),
(a1, "x", a1.x),
(users, "name", users.c.name)
]:
assert _entity_descriptor(arg, key) is ret
def test_friendly_attribute_str_on_uncompiled_boom(self):
User, users = self.classes.User, self.tables.users
def boom():
raise Exception("it broke")
mapper(User, users, properties={
'addresses':relationship(boom)
})
# test that QueryableAttribute.__str__() doesn't
# cause a compile.
eq_(str(User.addresses), "User.addresses")
def test_exceptions_sticky(self):
"""test preservation of mapper compile errors raised during hasattr(),
as well as for redundant mapper compile calls. Test that
repeated calls don't stack up error messages.
"""
Address, addresses, User = (self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(Address, addresses, properties={
'user':relationship(User)
})
try:
hasattr(Address.user, 'property')
except sa.orm.exc.UnmappedClassError:
assert util.compat.py32
for i in range(3):
assert_raises_message(sa.exc.InvalidRequestError,
"^One or more mappers failed to "
"initialize - can't proceed with "
"initialization of other mappers. "
"Original exception was: Class "
"'test.orm._fixtures.User' is not mapped$"
, configure_mappers)
def test_column_prefix(self):
users, User = self.tables.users, self.classes.User
mapper(User, users, column_prefix='_', properties={
'user_name': synonym('_name')
})
s = create_session()
u = s.query(User).get(7)
eq_(u._name, 'jack')
eq_(u._id,7)
u2 = s.query(User).filter_by(user_name='jack').one()
assert u is u2
def test_no_pks_1(self):
User, users = self.classes.User, self.tables.users
s = sa.select([users.c.name]).alias('foo')
assert_raises(sa.exc.ArgumentError, mapper, User, s)
def test_no_pks_2(self):
User, users = self.classes.User, self.tables.users
s = sa.select([users.c.name]).alias()
assert_raises(sa.exc.ArgumentError, mapper, User, s)
def test_reconfigure_on_other_mapper(self):
"""A configure trigger on an already-configured mapper
still triggers a check against all mappers."""
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users)
sa.orm.configure_mappers()
assert sa.orm.mapperlib.Mapper._new_mappers is False
m = mapper(Address, addresses, properties={
'user': relationship(User, backref="addresses")})
assert m.configured is False
assert sa.orm.mapperlib.Mapper._new_mappers is True
u = User()
assert User.addresses
assert sa.orm.mapperlib.Mapper._new_mappers is False
def test_configure_on_session(self):
User, users = self.classes.User, self.tables.users
m = mapper(User, users)
session = create_session()
session.connection(m)
def test_incomplete_columns(self):
"""Loading from a select which does not contain all columns"""
addresses, Address = self.tables.addresses, self.classes.Address
mapper(Address, addresses)
s = create_session()
a = s.query(Address).from_statement(
sa.select([addresses.c.id, addresses.c.user_id]).
order_by(addresses.c.id)).first()
eq_(a.user_id, 7)
eq_(a.id, 1)
# email address auto-defers
assert 'email_addres' not in a.__dict__
eq_(a.email_address, 'jack@bean.com')
def test_column_not_present(self):
users, addresses, User = (self.tables.users,
self.tab |
QISKit/qiskit-sdk-py | qiskit/extensions/standard/u3.py | Python | apache-2.0 | 1,759 | 0 | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
# pylint: disable=invalid-name
"""
Two-pulse single-qubit gate.
"""
import numpy
from qiskit.circuit import Gate
from qiskit.circuit import QuantumCircuit
class U3Gate(Gate):
"""Two-pulse single-qubit gate."""
def __init__(self, theta, phi, lam, label=None):
"""Create new two-pulse single qubit gate."""
super().__init__("u3", 1, [theta, phi, lam], label=label)
def inverse(self):
"""Invert this gate.
u3(theta, phi, lamb)^dagger = u3(-theta, -lam, -phi)
"""
return U3Gate(-self.params[0], -self.params[2], -self.params[1])
def to_matrix(self):
"""Return a Numpy.array for the U3 gate."""
theta, phi, lam = self.params
theta, phi, lam = float(theta), float(phi), float(lam)
return numpy.array(
[[
numpy.cos(theta / 2),
-numpy.exp(1j * lam) * nump | y.sin(theta / 2)
],
[
numpy.exp(1j * phi) * numpy.sin(theta / 2),
numpy.exp(1j * (phi + lam)) * numpy.cos(th | eta / 2)
]],
dtype=complex)
def u3(self, theta, phi, lam, q):
"""Apply u3 to q."""
return self.append(U3Gate(theta, phi, lam), [q], [])
QuantumCircuit.u3 = u3
|
h4ck3rm1k3/FEC-Field-Documentation | fec/version/v5_0/F2.py | Python | unlicense | 1,867 | 0.001071 | import fechbase
class Records(fechbase.RecordsBase):
def __init__(self):
fechbase.RecordsBase.__init__(self)
self.fields = [
{'name': 'FORM TYPE', 'number': '1'},
{'name': 'FILER FEC CAND ID', 'number': '2'},
{'name': 'CANDIDATE NAME', 'number': '3'},
{'name': 'STREET 1', 'number': '4'},
{'name': 'STREET 2', 'number': '5'},
{'name': 'CITY', 'number': '6'},
{'name': 'STATE', 'number': '7'},
{'name': 'ZIP', 'number': '8'},
{'name': 'PTY/CODE', 'number': '9'},
{'name': 'CAN/OFFICE', 'number': '10'},
{'name': 'CAN/STATE', 'number': '11'},
{'name': 'CAN/DIST', 'number': '12'},
{'name': 'YEAR OF ELECTION 1900-2999', 'number': '13'},
{'name': 'FEC COMMITTEE ID NUMBER (PCC)', 'number': '14'},
{'name': 'COMMITTEE NAME (PCC)', 'number': '15'},
{'name': 'STREET 1', 'number': '16'},
{'name': 'STREET 2', 'number': '17'},
| {'name': 'CITY', 'number': '18'},
{'name': 'STATE', 'number': '19'},
{'name': 'ZIP', 'number': '20'},
{'name': 'FEC COMMITTEE ID NUMBER (Auth)', 'number': '21'},
{'name': 'COMMITTEE NAME (Auth)', 'number': '22'},
{'name': 'STREET 1', 'number': '23'},
{'name': 'STREET 2', 'num | ber': '24'},
{'name': 'CITY', 'number': '25'},
{'name': 'STATE', 'number': '26'},
{'name': 'ZIP', 'number': '27'},
{'name': 'NAME/CAN (as signed)', 'number': '28'},
{'name': 'Signed', 'number': '29-'},
{'name': 'PRI PERSONAL FUNDS DECLARED', 'number': '30'},
{'name': 'GEN PERSONAL FUNDS DECLARED', 'number': '31'},
]
self.fields_names = self.hash_names(self.fields)
|
CheckMateIO/checkmate_python | checkmate/test/test_properties.py | Python | mit | 1,134 | 0 | import unittest
import json
import responses |
import checkmate
class TestProperties(unittest.TestCase):
def setUp(self):
cm = checkmate.CheckMate(api_key='12345',
| api_base='http://partners.checkmate.dev')
self.properties_client = cm.properties
self.property_response = {
'id': 123,
'name': 'New Hotel'
}
self.search_params = {
'name': 'New Hotel',
'phone': '15555555555',
'address': {
'street': '1625 Main St',
'city': 'San Francisco',
'region': 'CA',
'postal_code': '94115',
'country_code': 'US'
}
}
@responses.activate
def test_search(self):
url = 'http://partners.checkmate.dev/properties'
responses.add(responses.GET, url,
body=json.dumps(self.property_response), status=200,
content_type='application/json')
property = self.properties_client.search(self.search_params)
self.assertTrue(property.id == 123)
|
gppezzi/easybuild-framework | easybuild/toolchains/gqacml.py | Python | gpl-2.0 | 1,671 | 0.001795 | ##
# Copyright 2012-2019 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscen | trum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as p | ublished by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for gqacml compiler toolchain (includes GCC, QLogicMPI, ACML, BLACS, ScaLAPACK and FFTW).
:author: Kenneth Hoste (Ghent University)
"""
from easybuild.toolchains.gcc import GccToolchain
from easybuild.toolchains.fft.fftw import Fftw
from easybuild.toolchains.linalg.acml import Acml
from easybuild.toolchains.linalg.scalapack import ScaLAPACK
from easybuild.toolchains.mpi.qlogicmpi import QLogicMPI
class Gqacml(GccToolchain, QLogicMPI, Acml, ScaLAPACK, Fftw):
"""Compiler toolchain with GCC, QLogic MPI, ACML, ScaLAPACK and FFTW."""
NAME = 'gqacml'
SUBTOOLCHAIN = GccToolchain.NAME
|
hgl888/TeamTalk | win-client/3rdParty/src/json/scons-tools/targz.py | Python | apache-2.0 | 3,137 | 0.01562 | """tarball
Tool-specific initialization for tarball.
"""
## Commands to tackle a command based implementation:
##to unpack on the fly...
##gunzip < FILE.tar.gz | tar xvf -
##to pack on the fly...
##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz
i | mport os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
try:
import gzip
import tarfile
internal_targz = 1
except ImportError:
internal_ | targz = 0
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
if internal_targz:
def targz(target, source, env):
def archive_name( path ):
path = os.path.normpath( os.path.abspath( path ) )
common_path = os.path.commonprefix( (base_dir, path) )
archive_name = path[len(common_path):]
return archive_name
def visit(tar, dirname, names):
for name in names:
path = os.path.join(dirname, name)
if os.path.isfile(path):
tar.add(path, archive_name(path) )
compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL)
base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath )
target_path = str(target[0])
fileobj = gzip.GzipFile( target_path, 'wb', compression )
tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj)
for source in source:
source_path = str(source)
if source.isdir():
os.path.walk(source_path, visit, tar)
else:
tar.add(source_path, archive_name(source_path) ) # filename, arcname
tar.close()
targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR'])
def makeBuilder( emitter = None ):
return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'),
source_factory = SCons.Node.FS.Entry,
source_scanner = SCons.Defaults.DirScanner,
suffix = '$TARGZ_SUFFIX',
multi = 1)
TarGzBuilder = makeBuilder()
def generate(env):
"""Add Builders and construction variables for zip to an Environment.
The following environnement variables may be set:
TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level).
TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative
to something other than top-dir).
"""
env['BUILDERS']['TarGz'] = TarGzBuilder
env['TARGZ_COM'] = targzAction
env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9
env['TARGZ_SUFFIX'] = '.tar.gz'
env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory.
else:
def generate(env):
pass
def exists(env):
return internal_targz
|
facoy/facoy | FrontEnd/wsgi.py | Python | apache-2.0 | 176 | 0 | from Server_GitHub import application
if __name__ == "__main__":
while True:
try:
| application.run()
except Exception as e:
| print e
|
kkaczkowski/Hoover | robot/motors/motor_i2c.py | Python | apache-2.0 | 6,150 | 0.003415 | #!/usr/bin/python3
import re
import smbus
# ===========================================================================
# I2C Class
# ===========================================================================
class MotorI2C(object):
@staticmethod
def getPiRevision():
"Gets the version number of the Raspberry Pi board"
# Revision list available at: http://elinux.org/RPi_HardwareHistory#Board_Revision_History
try:
with open('/proc/cpuinfo', 'r') as infile:
for line in infile:
# Match a line of the form "Revision : 0002" while ignoring extra
# info in front of the revsion (like 1000 when the Pi was over-volted).
match = re.match('Revision\s+:\s+.*(\w{4})$', line)
if match and match.group(1) in ['0000', '0002', '0003']:
# Return revision 1 if revision ends with 0000, 0002 or 0003.
return 1
elif match:
# Assume revision 2 if revision ends with any other 4 chars.
return 2
# Couldn't find the revision, assume revision 0 like older code for compatibility.
return 0
except:
return 0
@staticmethod
def getPiI2CBusNumber():
# Gets the I2C bus number /dev/i2c#
return 1 if MotorI2C.getPiRevision() > 1 else 0
def __init__(self, address, busnum=-1, debug=False):
self.address = address
# By default, the correct I2C bus is auto-detected using /proc/cpuinfo
# Alternatively, you can hard-code the bus version below:
# self.bus = smbus.SMBus(0); # Force I2C0 (early 256MB Pi's)
# self.bus = smbus.SMBus(1); # Force I2C1 (512MB Pi's)
self.bus = smbus.SMBus(busnum if busnum >= 0 else MotorI2C.getPiI2CBusNumber())
self.debug = debug
def reverseByteOrder(self, data):
"Reverses the byte order of an int (16-bit) or long (32-bit) value"
# Courtesy Vishal Sapre
byteCount = len(hex(data)[2:].replace('L', '')[::2])
val = 0
for i in range(byteCount):
val = (val << 8) | (data & 0xff)
data >>= 8
return val
def errMsg(self):
print("Error accessing 0x%02X: Check your I2C address" % self.address)
return -1
def write8(self, reg, value):
"Writes an 8-bit value to the specified register/address"
try:
self.bus.write_byte_data(self.address, reg, value)
if self.debug:
print("I2C: Wrote 0x%02X to register 0x%02X" % (value, reg))
except IOError as err:
return self.errMsg()
def write16(self, reg, value): |
"Writes a 16-bit value to the specified register/address pair"
try:
self.bus.write_word_data(self.address, reg, value)
if self.debug:
print("I2C: Wrote 0x%02X to register pair 0x%02X,0x%02X" % (value, reg, reg + 1))
except IOErro as err:
return self.errMsg()
def writeRaw8(self, value):
"Writes an 8-bit value on the bus"
try:
self.bus.write_byte(s | elf.address, value)
if self.debug:
print("I2C: Wrote 0x%02X" % value)
except IOError as err:
return self.errMsg()
def writeList(self, reg, list):
"Writes an array of bytes using I2C format"
try:
if self.debug:
print("I2C: Writing list to register 0x%02X:" % reg)
print(list)
self.bus.write_i2c_block_data(self.address, reg, list)
except IOError as err:
return self.errMsg()
def readList(self, reg, length):
"Read a list of bytes from the I2C device"
try:
results = self.bus.read_i2c_block_data(self.address, reg, length)
if self.debug:
print("I2C: Device 0x%02X returned the following from reg 0x%02X" % (self.address, reg))
print(results)
return results
except IOError as err:
return self.errMsg()
def readU8(self, reg):
"Read an unsigned byte from the I2C device"
try:
result = self.bus.read_byte_data(self.address, reg)
if self.debug:
print("I2C: Device 0x%02X returned 0x%02X from reg 0x%02X" % (self.address, result & 0xFF, reg))
return result
except IOError as err:
return self.errMsg()
def readS8(self, reg):
"Reads a signed byte from the I2C device"
try:
result = self.bus.read_byte_data(self.address, reg)
if result > 127: result -= 256
if self.debug:
print("I2C: Device 0x%02X returned 0x%02X from reg 0x%02X" % (self.address, result & 0xFF, reg))
return result
except IOError as err:
return self.errMsg()
def readU16(self, reg, little_endian=True):
"Reads an unsigned 16-bit value from the I2C device"
try:
result = self.bus.read_word_data(self.address, reg)
# Swap bytes if using big endian because read_word_data assumes little
# endian on ARM (little endian) systems.
if not little_endian:
result = ((result << 8) & 0xFF00) + (result >> 8)
if (self.debug):
print("I2C: Device 0x%02X returned 0x%04X from reg 0x%02X" % (self.address, result & 0xFFFF, reg))
return result
except IOError as err:
return self.errMsg()
def readS16(self, reg, little_endian=True):
"Reads a signed 16-bit value from the I2C device"
try:
result = self.readU16(reg, little_endian)
if result > 32767: result -= 65536
return result
except IOError as err:
return self.errMsg()
if __name__ == '__main__':
try:
bus = MotorI2C(address=0)
print("Default I2C bus is accessible")
except:
print("Error accessing default I2C bus")
|
cbrunet/ouf | src/ouf/filemodel/filesystemitem.py | Python | gpl-3.0 | 455 | 0 | from PyQt5.QtCore import Qt
from ouf.filemodel.filemodelitem import FileModelI | tem, FileItemType
class FileSystemItem(FileModelItem):
def __init__(self, path):
super().__init__(FileItemType.filesystem, path)
def data(self, column, role=Qt.DisplayRole):
if column == 0:
if role == Qt.DisplayRole:
if self.is_root | :
return _("File System")
return super().data(column, role)
|
ytsarev/rally | tests/deploy/engines/test_devstack.py | Python | apache-2.0 | 4,266 | 0 | # Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import jsonschema
import mock
import uuid
from rally.deploy.engines import devstack
from rally.openstack.common import test
SAMPLE_CONFIG = {
'name': 'DevstackEngine',
'provider': {
'name': 'ExistingServers',
'credentials': [{'user': 'root', 'host': 'example.com'}],
},
'localrc': {
'ADMIN_PASSWORD': 'secret',
},
}
DEVSTACK_REPO = 'https://github.com/openstack-dev/devstack.git'
class DevstackEngineTestCase(test.BaseTestCase):
def setUp(self):
super(DevstackEngineTestCase, self).setUp()
self.deployment = {
'uuid': str(uuid.uuid4()),
'config': SAMPLE_CONFIG,
}
self.engine = devstack.DevstackEngine(self.deployment)
def test_invalid_config(self):
self.deployment = SAMPLE_CONFIG.copy()
self.deployment['config'] = {'name': 42}
self.assertRaises(jsonschema.ValidationError,
devstack.DevstackEngine, self.deployment)
def test_construct(self):
self.assertEqual(self.engine.localrc['ADMIN_PASSWORD'], 'secret')
@mock.patch('rally.deploy.engines.devstack.open', create=True)
def test_prepare_server(self, m_open):
m_open.return_value = 'fake_file'
server = mock.Mock()
server.password = 'secret'
self.engine.prepare_server(server)
calls = [
mock.call('/bin/sh -e', stdin='fake_file'),
mock.call('chpasswd', stdin='rally:secret'),
| ]
self.assertEqual(calls, server.ssh.run.mock_calls)
filename = m_open.mock_calls[0][1][0]
self.assertTrue(filename.endswith('rally/deploy/engines/'
'devstack/install.sh'))
self.assertEqual([mock.call(filename, 'rb')], m_open.mock_calls)
@mock.patch('rally.deploy.engine.EngineFactory.get_provider')
@mock.patch('rally.deploy.engines.devstack.get_updated_server')
@mock.pat | ch('rally.deploy.engines.devstack.get_script')
@mock.patch('rally.deploy.serverprovider.provider.Server')
@mock.patch('rally.deploy.engines.devstack.objects.Endpoint')
def test_deploy(self, m_endpoint, m_server, m_gs, m_gus, m_gp):
m_gp.return_value = fake_provider = mock.Mock()
server = mock.Mock()
server.host = 'host'
m_endpoint.return_value = 'fake_endpoint'
m_gus.return_value = ds_server = mock.Mock()
m_gs.return_value = 'fake_script'
server.get_credentials.return_value = 'fake_credentials'
fake_provider.create_servers.return_value = [server]
with mock.patch.object(self.engine, 'deployment') as m_d:
endpoints = self.engine.deploy()
self.assertEqual(['fake_endpoint'], endpoints)
m_endpoint.assert_called_once_with('http://host:5000/v2.0/', 'admin',
'secret', 'admin', 'admin')
m_d.add_resource.assert_called_once_with(
info='fake_credentials',
provider_name='DevstackEngine',
type='credentials')
repo = 'https://github.com/openstack-dev/devstack.git'
cmd = '/bin/sh -e -s %s master' % repo
server.ssh.run.assert_called_once_with(cmd, stdin='fake_script')
ds_calls = [
mock.call.ssh.run('cat > ~/devstack/localrc', stdin=mock.ANY),
mock.call.ssh.run('~/devstack/stack.sh')
]
self.assertEqual(ds_calls, ds_server.mock_calls)
localrc = ds_server.mock_calls[0][2]['stdin']
self.assertIn('ADMIN_PASSWORD=secret', localrc)
|
devopshq/youtrack | youtrack/import_helper.py | Python | mit | 7,567 | 0.003568 | # -*- coding: utf-8 -*-
from youtrack import YouTrackException
def utf8encode(source):
if isinstance(source, str):
source = source.encode('utf-8')
return source
def _create_custom_field_prototype(connection, cf_type, cf_name, auto_attached=False, additional_params=None):
if additional_params is None:
additional_params = dict([])
field = _get_custom_field(connection, cf_name)
if field is not None:
if field.type != cf_type:
msg = "Custom field with name [ %s ] already exists. It has type [ %s ] instead of [ %s ]" % \
(utf8encode(cf_name), field.type, cf_type)
raise LogicException(msg)
else:
connection.create_custom_field_detailed(cf_name, cf_type, False, True, auto_attached, additional_params)
def _get_custom_field(connection, cf_name):
existing_fields = [item for item in connection.get_custom_fields() if utf8encode(item.name).lower() ==
utf8encode(cf_name).lower()]
if len(existing_fields):
return existing_fields[0]
return None
def create_custom_field(connection, cf_type, cf_name, auto_attached, value_names=None, bundle_policy="0"):
"""
Creates custom field prototype(if not exist) and sets default values bundle if needed
Args:
connection: An opened Connection instance.
cf_type: Type of custom field to be created
cf_name: Name of custom field that should be created (if not exists)
auto_attached: If this field should be auto attached or not.
value_names: Values, that should be attached with this cf by default.
If None, no bundle is created to this field, if empty, empty bundle is created.
bundle_policy: ???
Raises:
LogicException: If custom field already exists, but has wrong type.
YouTrackException: If something is wrong with queries.
"""
if (value_names is None) and (not auto_attached or "[" not in cf_type):
_create_custom_field_prototype(connection, cf_type, cf_name, auto_attached)
return
if value_names is None:
value_names = set([])
else:
value_names = set(value_names)
field = _get_custom_field(connection, cf_name)
if field is not None:
if hasattr(field, "defaultBundle"):
bundle = connection.get_bundle(field.type, field.defaultBundle)
elif field.autoAttached:
return
else:
bundle = create_bundle_safe(connection, cf_name + "_bundle", cf_type)
else:
bundle = create_bundle_safe(connection, cf_name + "_bundle", cf_type)
_create_custom_field_prototype(connection, cf_type, cf_name, auto_attached,
{"defaultBundle": bundle.name,
"attachBundlePolicy": bundle_policy})
for value_name in value_names:
try:
connection.add_value_to_bundle(bundle, value_name)
except YouTrackException:
pass
#
# values_to_add = calculate_missing_value_names(bundle, value_names)
# [connection.addValueToBundle(bundle, name) for name in values_to_add]
# if field is None:
# bundle_name = cf_name + "_bundle"
# _create_bundle_safe(connection, bundle_name, cf_type)
# bundle = connection.getBundle(cf_type, bundle_name)
# values_to_add = calculate_missing_value_names(bundle, value_names)
#
#
# for value in values_to_add:
# connection.addValueToBundle(bundle, value)
#
#
def process_custom_field(connection, project_id, cf_type, cf_name, value_names=None):
"""
Creates custom field and attaches it to the project. If custom field already exists and has type
cf_type it is attached to the project. If it has another type, LogicException is raised. If project field already
exists, uses it and bundle from it. If not, creates project field and bundle with name
<cf_name>_bundle_<project_id> for it.
Adds value_names to bundle.
Args:
connection: An opened Connection instance.
project_id: Id of the project to attach CF to.
cf_type: Type of cf to be created.
cf_name: Name of cf that should be created (if not exists) and attached to the project (if not yet attached)
value_names: Values, that cf must have. If None, does not create any bundle for the field. If empty list,
creates bundle, but does not create any value_names in it. If bundle already contains
some value_names, only value_names that do not already exist are added.
Raises:
LogicException: If custom field already exists, bu | t has wrong type.
YouTrackException: If something is wrong with que | ries.
"""
_create_custom_field_prototype(connection, cf_type, cf_name)
if cf_type[0:-3] not in connection.bundle_types:
value_names = None
elif value_names is None:
value_names = []
existing_project_fields = [item for item in connection.getProjectCustomFields(project_id) if
utf8encode(item.name) == cf_name]
if len(existing_project_fields):
if value_names is None:
return
bundle = connection.getBundle(cf_type, existing_project_fields[0].bundle)
values_to_add = calculate_missing_value_names(bundle, value_names)
else:
if value_names is None:
connection.createProjectCustomFieldDetailed(project_id, cf_name, "No " + cf_name)
return
bundle = create_bundle_safe(connection, cf_name + "_bundle_" + project_id, cf_type)
values_to_add = calculate_missing_value_names(bundle, value_names)
connection.createProjectCustomFieldDetailed(project_id, cf_name, "No " + cf_name,
params={"bundle": bundle.name})
for name in values_to_add:
connection.addValueToBundle(bundle, bundle.createElement(name))
def add_values_to_bundle_safe(connection, bundle, values):
"""
Adds values to specified bundle. Checks, whether each value already contains in bundle. If yes, it is not added.
Args:
connection: An opened Connection instance.
bundle: Bundle instance to add values in.
values: Values, that should be added in bundle.
Raises:
YouTrackException: if something is wrong with queries.
"""
for value in values:
try:
connection.addValueToBundle(bundle, value)
except YouTrackException as e:
if e.response.status == 409:
print("Value with name [ %s ] already exists in bundle [ %s ]" %
(utf8encode(value.name), utf8encode(bundle.name)))
else:
raise e
def create_bundle_safe(connection, bundle_name, bundle_type):
bundle = connection.bundle_types[bundle_type[0:-3]](None, None)
bundle.name = bundle_name
try:
connection.createBundle(bundle)
except YouTrackException as e:
if e.response.status == 409:
print("Bundle with name [ %s ] already exists" % bundle_name)
else:
raise e
return connection.getBundle(bundle_type, bundle_name)
def calculate_missing_value_names(bundle, value_names):
bundle_elements_names = [elem.name.lower() for elem in bundle.values]
return [value for value in value_names if value.lower() not in bundle_elements_names]
class LogicException(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
|
MakeHer/edx-platform | lms/djangoapps/support/views/__init__.py | Python | agpl-3.0 | 180 | 0 | """
Aggregate all views for the support app.
"""
# pylint: disable=wildcard-import
f | rom .index import *
from .certificate import *
from .enrollm | ents import *
from .refund import *
|
labhackercd/colab-edemocracia-plugin | src/colab_edemocracia/views.py | Python | gpl-3.0 | 12,087 | 0 | # -*- coding: utf-8 -*-
from django.views.generic import View
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.core.mail import EmailMultiAlternatives
from django.shortcuts import redirect, resolve_url
from django.views.decorators.debug import sensitive_post_parameters
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect, csrf_exempt
from django.utils.http import is_safe_url
from django.conf import settings
from django.http import (
HttpResponseRedirect, HttpResponseBadRequest, JsonResponse)
from django.contrib.auth import (
REDIRECT_FIELD_NAME, login as auth_login, update_session_auth_hash
)
from django.contrib.auth.forms import AuthenticationForm, PasswordChangeForm
from django.contrib.auth.decorators import login_required
from django.contrib.sites.shortcuts import get_current_site
from django.template.response import TemplateResponse
from django.template.loader import render_to_string
from django.views.generic import UpdateView, FormView
from django.views.decorators.clickjacking import xframe_options_exempt
from django.template.defaultfilters import slugify
from .forms.accounts import (
SignUpForm, UserProfileForm, SignUpAjaxForm)
from .models import UserProfile
from colab.accounts.models import EmailAddressValidation, EmailAddress
from colab_edemocracia import captcha
import string
import random
User = get_user_model()
@sensitive_post_parameters()
@csrf_protect
@never_cache
def login(request, template_name='registration/login.html',
redirect_field_name=REDIRECT_FIELD_NAME,
authentication_form=AuthenticationForm,
current_app=None, extra_context=None):
"""
Displays the login form and handles the login action.
"""
redirect_to = request.POST.get(redirect_field_name,
request.GET.get(redirect_field_name, ''))
if request.method == "POST":
form = authentication_form(request, data=request.POST)
if form.is_valid():
# Ensure the user-originating redirection url is safe.
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
# Okay, security check complete. Log the user in.
auth_login(request, form.get_user())
return HttpResponseRedirect(redirect_to)
else:
messages.add_message(request, messages.ERROR,
u"Usuário ou senhas incorretos.")
else:
form = authentication_form(request)
current_site = get_current_site(request)
context = {
'form': form,
redirect_field_name: redirect_to,
'site': current_site,
'site_name': current_site.name,
}
return TemplateResponse(request, template_name, context,
current_app=current_app)
@sensitive_post_parameters()
@csrf_protect
@login_required
def password_change(request,
template_name='registration/password_change_form.html',
post_change_redirect=None,
password_change_form=PasswordChangeForm,
current_app=None, extra_context=None):
if post_change_redirect is None:
post_change_redirect = reverse('colab_edemocracia:profile')
else:
post_change_redirect = resolve_url(post_change_redirect)
if request.method == "POST":
form = password_change_form(user=request.user, data=request.POST)
if form.is_valid():
form.save()
messages.success(request, 'Senha alterada com sucesso!')
# Updating the password logs out all other sessions for the user
# except the current one if
# django.contrib.auth.middleware.SessionAuthenticationMiddleware
# is enabled.
update_session_auth_hash(request, form.user)
return HttpResponseRedirect(post_change_redirect)
else:
form = password_change_form(user=request.user)
context = {
'form': form,
'title': 'Alterar senha',
}
if extra_context is not None:
context.update(extra_context)
if current_app is not None:
request.current_app = current_app
return TemplateResponse(request, template_name, context)
def send_verification_email(email, verification_url):
html = render_to_string('emails/edemocracia_new_user.html',
{'verification_url': verification_url})
subject = "Confirmação de cadastro"
mail = EmailMultiAlternatives(subject=subject, to=[email])
mail.attach_alternative(html, 'text/html')
mail.send()
def generate_username(name):
name = slugify(name.split('@')[0])[:29]
if User.objects.filter(username=name).exists():
return generate_username(
name + random.choice(string.letters + string.digits))
else:
return name
class SignUpView(View):
http_method_names = [u'post']
def post(self, request):
if request.user.is_authenticated():
return reverse('colab_edemocracia:home')
user_form = SignUpForm(request.POST)
if not user_form.is_valid():
for error in user_form.errors.values():
messages.add_message(request, messages.ERROR, error[0])
return redirect(reverse('colab_edemocracia:home'))
user = user_form.save(commit=False)
user.needs_update = False
user.is_active = False
user.set_password(user_form.cleaned_data['password'])
user.save()
profile = UserProfile.objects.get(user=user)
profile.uf = user_form.cleaned_data['uf']
profile.save()
email = EmailAddressValidation.create(user.email, user)
location = reverse('email_view',
kwargs={'key': email.validation_key})
verification_url = request.build_absolute_uri(location)
send_verification_email(user.email, verification_url)
# Check if the user's email have been used previously in the mainling
# lists to link the user to old messages
email_addr, created = EmailAddress.objects.get_or_create(
address=user.email)
if created:
messages.add_message(
request, messages.SUCCESS,
u"Usuário criado com sucesso! Por favor, verifique seu email"
" para concluir seu cadastro."
)
email_addr.real_name = user.get_full_name()
email_addr.user = user
email_addr.save()
return redirect(reverse('colab_edemocracia:home'))
class ProfileView(UpdateView):
model = UserProfile
form_class = UserProfileForm
template_name = 'profile.html'
def get_object(self, queryset=None):
return self.request.user.profile
def get_success_url(self):
messages.success(self.request, 'Perfil modificado com sucesso!')
return reverse('colab_edemocracia:profile')
class WidgetLoginView(FormView):
form_class = AuthenticationForm
template_name = 'widget/login.html'
@xframe_options_exempt
def dispatch(self, *args, **kwargs):
return super(WidgetLoginView, self).dispatch(*args, **kwargs)
def form_valid(self, form):
login(self.request, form.get_user())
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
next_url = self.request.POST.get('next', None)
if next_url:
return next_url
else:
raise HttpResponseBadRequest()
class WidgetSignUpView(View): |
http_method_names = [u'post']
@xframe_options_exempt
def dispatch(self, *args, **kwargs):
return super(WidgetS | ignUpView, self).dispatch(*args, **kwargs)
def post(self, request):
if request.user.is_authenticated():
if request.kwargs['next']:
return reverse(request.kwargs['next'])
else:
return HttpResponseBadRequest()
user_form = SignUpForm(request.POST |
scheib/chromium | tools/perf/core/services/isolate_service_test.py | Python | bsd-3-clause | 2,741 | 0.004743 | # Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import base64
import json
import os
import shutil
import tempfile
import unittest
import zlib
import mock
from core.services import isolate_service
def ContentResponse(content):
return [{'content': base64.b64encode(zlib.compress(content.encode('utf-8')))}]
def UrlResponse(url, content):
return [{'url': url}, zlib.compress(content.encode('utf-8'))]
class TestIsolateApi(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
mock.patch('core.services.isolate_service.CACHE_DIR', os.path.join(
self.temp_dir, 'isolate_cache')).start()
self.mock_request = mock.patch('core.services.request.Request').start()
def tearDown(self):
shutil.rmtree(self.temp_dir)
mock.patch.stopall()
def testRetrieve_content(self):
self.mock_request.side_effect = ContentResponse('OK!')
se | lf.assertEqual(isolate_service.Retrieve('hash'), b'OK!')
def testRetrieve_fromUrl(self):
self.mock_request.side_effect = UrlResponse('http://get/response', 'OK!')
self.assertEqual(isolate_service.Retrieve('hash'), b'OK!')
def testRetrieveCompressed_content(self):
self.mock_request.side_effect = ContentResponse('OK!')
self | .assertEqual(isolate_service.RetrieveCompressed('hash'),
zlib.compress(b'OK!'))
def testRetrieveCompressed_fromUrl(self):
self.mock_request.side_effect = UrlResponse('http://get/response', 'OK!')
self.assertEqual(isolate_service.RetrieveCompressed('hash'),
zlib.compress(b'OK!'))
def testRetrieveCompressed_usesCache(self):
self.mock_request.side_effect = ContentResponse('OK!')
self.assertEqual(isolate_service.RetrieveCompressed('hash'),
zlib.compress(b'OK!'))
self.assertEqual(isolate_service.RetrieveCompressed('hash'),
zlib.compress(b'OK!'))
# We retrieve the same hash twice, but the request is only made once.
self.assertEqual(self.mock_request.call_count, 1)
def testRetrieveFile_succeeds(self):
self.mock_request.side_effect = (
ContentResponse(json.dumps({'files': {'foo': {'h': 'hash2'}}})) +
UrlResponse('http://get/file/contents', 'nice!'))
self.assertEqual(isolate_service.RetrieveFile('hash1', 'foo'), b'nice!')
def testRetrieveFile_fails(self):
self.mock_request.side_effect = (
ContentResponse(json.dumps({'files': {'foo': {'h': 'hash2'}}})) +
UrlResponse('http://get/file/contents', 'nice!'))
with self.assertRaises(KeyError):
isolate_service.RetrieveFile('hash1', 'bar') # File not in isolate.
|
tjcsl/ion | intranet/apps/users/templatetags/grades.py | Python | gpl-2.0 | 329 | 0 | from django import template
from ..models import Grade
register = template.Library()
@register.filter
def to_grade_number(year):
"""Returns a `Grad | e` object f | or a year."""
return Grade(year).number
@register.filter
def to_grade_name(year):
"""Returns a `Grade` object for a year."""
return Grade(year).name
|
StevenMPhillips/arrow | python/benchmarks/array.py | Python | apache-2.0 | 2,575 | 0.000777 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
import pandas as pd
import pyarrow as A
class PyListConversions(object):
param_names = ('size',)
params = (1, 10 ** 5, 10 ** 6, 10 ** 7)
def setup(self, n):
self.data = list(range(n))
def time_from_pylist(self, n):
A.from_pylist(self.data)
def peakmem_from_pylist(self, n):
A.from_pylist(self.data)
class PandasConversionsBase(object):
def setup(self, n, dtype):
if dtype == 'float64_nans':
arr = np.arange(n).astype('float64')
arr[arr % 10 == 0] = np.nan
else:
arr = np.arange(n).astype(dtype)
self.data = pd.DataFrame({'column': arr})
class PandasConversionsToArrow(PandasConversionsBase):
param_names = ('size', 'dtype')
params = ((1, 10 ** 5, 10 ** 6, 10 ** 7), ('int64', 'float64', 'float64_nans', 'str'))
def time_from_series(self, n, dtype):
A.Table.f | rom_pandas(self.data)
def peakmem_from_series(self, n, dtype):
A.Table.from_pandas(self.data)
class PandasConversionsFromArrow(Pand | asConversionsBase):
param_names = ('size', 'dtype')
params = ((1, 10 ** 5, 10 ** 6, 10 ** 7), ('int64', 'float64', 'float64_nans', 'str'))
def setup(self, n, dtype):
super(PandasConversionsFromArrow, self).setup(n, dtype)
self.arrow_data = A.Table.from_pandas(self.data)
def time_to_series(self, n, dtype):
self.arrow_data.to_pandas()
def peakmem_to_series(self, n, dtype):
self.arrow_data.to_pandas()
class ScalarAccess(object):
param_names = ('size',)
params = (1, 10 ** 5, 10 ** 6, 10 ** 7)
def setUp(self, n):
self._array = A.from_pylist(list(range(n)))
def time_as_py(self, n):
for i in range(n):
self._array[i].as_py()
|
zozo123/buildbot | master/buildbot/test/fake/fakebuild.py | Python | gpl-3.0 | 2,371 | 0.000422 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; w | ithout even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street | , Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import mock
import posixpath
from buildbot import config
from buildbot import interfaces
from buildbot.process import factory
from buildbot.process import properties
from buildbot.test.fake import fakemaster
from twisted.python import components
class FakeBuildStatus(properties.PropertiesMixin, mock.Mock):
properties = properties.Properties()
# work around http://code.google.com/p/mock/issues/detail?id=105
def _get_child_mock(self, **kw):
return mock.Mock(**kw)
def getInterestedUsers(self):
return []
components.registerAdapter(
lambda build_status: build_status.properties,
FakeBuildStatus, interfaces.IProperties)
class FakeBuild(properties.PropertiesMixin):
def __init__(self, props=None, master=None):
self.build_status = FakeBuildStatus()
self.builder = fakemaster.FakeBuilderStatus(master)
self.builder.config = config.BuilderConfig(
name='bldr',
slavenames=['a'],
factory=factory.BuildFactory())
self.path_module = posixpath
self.buildid = 92
self.number = 13
self.workdir = 'build'
self.sources = {}
if props is None:
props = properties.Properties()
props.build = self
self.build_status.properties = props
def getSourceStamp(self, codebase):
if codebase in self.sources:
return self.sources[codebase]
return None
def allFiles(self):
return []
def getBuilder(self):
return self.builder
components.registerAdapter(
lambda build: build.build_status.properties,
FakeBuild, interfaces.IProperties)
|
nuodb/nuodbTools | nuodbTools/cluster/domain.py | Python | bsd-3-clause | 4,530 | 0.018102 | # requests module available at http://docs.python-requests.org/en/latest/
import nuodbTools
import collections, inspect, json, re, requests, socket
class Domain():
'''
classdocs
'''
def __init__(self, rest_url=None, rest_username=None, rest_password=None):
args, _, _, values = inspect.getargvalues(inspect.currentframe())
for i in args:
setattr(self, i, values[i])
self.rest_urls = []
# Set up for REST
#################
# handle urls that have a slash at the end by truncating it
def process(url):
c = {}
if url[-1] == "/":
url = url[0:len(url) - 1]
parts = url.split("://")
if len(parts) > 1:
c['protocol'] = parts[0]
c['path'] = parts[1]
c['url'] = url
else:
c['protocol'] = "http"
c['rest_port'] = 80
c['path'] = url
c['url'] = "://".join([c['protocol'], c['path']])
path_components = c['path'].split("/")
c['query_string'] = "/".join(path_components[1:len(path_components)])
self.rest_urls.append(c)
if isinstance(rest_url, tuple) or isinstance(rest_url, list):
for url in rest_url:
process(url)
else:
process(rest_url)
def get_databases(self):
data = self.rest_req("GET", "databases")
databases = []
for db in data:
databases.append(db['name'])
return sorted(databases)
def get_host_id(self, hostname):
for host in self.get_hosts():
if hostname == host['hostname']:
return host['id']
return None
def get_hosts(self):
return self.rest_req(action = "GET", path = "hosts")
def get_processes(self, database=None):
data = self.rest_req("GET", "processes")
processes = []
for process in data:
if database == None:
processes.append(process)
elif process['dbname'] == database:
processes.append(process)
return processes
def rest_req(self, action="GET", path="", data=None, timeout=10):
if path[0] == "/":
path = path[1:len(path)]
# Try each url we have, return if we get a hit, good or bad.
urls_tried = []
for rest_url in self.rest_urls:
url = "/".join([rest_url['url'], path])
headers = {"Accept": "application/json", "Content-type": "application/json"}
if isinstance(data, dict) or isinstance(data, collections.OrderedDict):
data_json = json.dumps(data)
else:
data_json = None
urls_tried.append(url)
try:
if action == "POST":
req = requests.post(url, data=data_json, auth=(self.rest_username, self.rest_password), headers=headers)
elif action == "PUT":
req = requests.put(url, data=data_json, auth=(self.rest_username, self.rest_password), headers=headers)
elif action == "DELETE":
req = requests.delete(url, data=data_json, auth=(self.rest_username, self.rest_password), headers=headers)
elif action == "HEAD":
req = requests.head(url, data=data_json, auth=(self.rest_username, self.rest_password), headers=headers)
elif action == "OPTIONS":
req = requests.options(url, auth=(self.rest_username, self.rest_password), headers=headers)
else: # Assume GET
req = requests.get(url, auth=(self.rest_username, self.rest_password), headers=headers)
except r | equests.ConnectionError, e:
# Can't connect to this guy, try the next
pass
else:
if req.status_code == 200:
if len(req.text) > 0:
return req.json()
else:
return {}
else:
d = {"content": req.content, "method": action, "url": url, "data": data, "headers": headers, "code": req.status_code}
| s = "Failed REST Request. DEBUG: %s" % json.dumps(d)
raise nuodbTools.RESTError(s)
# If we are here then we couldn't connect to anyone. Raise the flag.
raise nuodbTools.RESTNotAvailableError("Can't get a connection to any endpoint. Tried: %s" % ",".join(urls_tried))
def pp_rest_req(self, action="GET", path="", data=None, timeout=10):
return json.dumps(self.rest_req(action, path, data, timeout), indent=4, sort_keys=True)
class Error(Exception):
pass
class TemporaryAddPolicy:
def missing_host_key(self, client, hostname, key):
pass
|
ella/esus | tests/unit_project/tests/fixtures.py | Python | bsd-3-clause | 1,899 | 0.035808 | from django.contrib.auth.models import User
from esus.phorum.models import Category, Table
__all__ = ("user_super", "users_usual", "table_simple")
def user_super(case):
case.user_super = User.objects.create(
username = "superuser",
password = "sha1$aaa$b27189d65f3a148a8186753f3f30774182d | 923d5",
first_name = "Esus",
last_name = "master",
is_staff = True,
is_superuser = True,
)
def users_usual(case):
case.user_tester = User.objects.create(
username = "Tester",
password = "sha1$ | aaa$b27189d65f3a148a8186753f3f30774182d923d5",
first_name = "I",
last_name = "Robot",
is_staff = False,
is_superuser = False,
)
case.user_john_doe = User.objects.create(
username = "JohnDoe",
password = "sha1$aaa$b27189d65f3a148a8186753f3f30774182d923d5",
first_name = "John",
last_name = "Doe",
is_staff = False,
is_superuser = False,
)
case.user_staff = User.objects.create(
username = "Gnome",
password = "sha1$aaa$b27189d65f3a148a8186753f3f30774182d923d5",
first_name = "Wiki",
last_name = "Gnome",
is_staff = True,
is_superuser = False,
)
def table_simple(case, table_owner=None):
case.category = Category.objects.create(
name = u"Category",
slug = u"category",
)
case.table = case.category.add_table(
name = u"Table",
owner = table_owner or case.user_tester,
)
def comment_simple(case, table=None, author=None):
table = table or case.table
author = author or case.user_john_doe
case.comment_doe = case.table.add_comment(
author = author,
text = u"Humble user's comment"
)
case.comment_owner = case.table.add_comment(
author = table.owner,
text = u"Table 0wn3rz comment"
)
|
varunagrawal/nuke | nuke/dirtree.py | Python | mit | 3,107 | 0.001293 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Utilities for getting directory tree."""
import os
from pathlib import Path
import crayons
from nuke.utils import parse_ignore_file
def fg(text, color):
"""Set text to foregound color."""
return "\33[38;5;" + str(color) + "m" + text + "\33[0m"
def bg(text, color):
"""Set text to background color."""
return "\33[48;5;" + str(color) + "m" + text + "\33[0m"
def get_colorized(path: Path):
"""Colorize path name based on type."""
name = path.name
if path.is_dir():
return crayons.blue(name)
elif path.is_file():
return crayons.green(name)
elif path.is_mount():
return crayons.red(name)
elif path.is_symlink():
return crayons.cyan(name)
elif path.is_socket():
return crayons.magenta(name)
else:
return crayons.white(name)
def get_dirtree(directory):
"""
Get the directory tree of the `directory`.
:param directory: The root directory from where to generate the directory tree.
:return: The list of paths with appropriate indenting
"""
element_list = []
ignore_patterns = []
file_link = fg("├── ", 241) # u'\u251c\u2500\u2500 '
last_file_link = fg("└── ", 241) # u'\u2514\u2500\u2500 '
tree_branch = fg("│ ", 241) # u'\u2502 '
# Get the list of all the files/dirs in the directory to nuke.
# We traverse in a bottom up manner so that directory removal is trivial.
for (dirpath_str, dirnames, filenames) in os.walk(directory, topdown=False):
level = dirpath_str.replace(str(directory), "").count(os.sep)
if level > 0:
indent = tree_branch * (level - 1) + file_link
else:
indent = ""
dirpath = Path(dirpath_str)
# We record every element in the tree as a dict of the indented name (repr)
# and the path so we can use the ignore methods on the paths and still
# have the indented names for our tree
# only add current directory as element to be nuked if no .nukeignore file is present
if ".nukeignore" not in filenames:
# Add the current directory
element = {
"repr": "{}{}/".format(indent, get_colorized(dirpath)),
"path": dirpath,
}
element_list.append(element)
subindent = tree_branch * (level)
# Add the files in the directory
for idx, fn in enumerate(filenames):
if fn == ".nukeignore":
ignore_patterns.extend(
parse_ignore_file((dirpath / fn), dirpath))
| continue
# Check if it is the last element
if idx == len(filenames) - 1:
branch = subindent + last_file_link
else:
branch = subindent + file_link
element = {
"repr": "{}{}".format(branch, get_colorized(dirpath / fn)),
"path": (dirpat | h / fn),
}
element_list.append(element)
return element_list, ignore_patterns
|
pmclanahan/pytest-progressive | setup.py | Python | mit | 1,649 | 0.001213 | import sys
import codecs
# Prevent spurious errors during `python setup.py test`, a la
# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html:
try:
import multiprocessing
except ImportError:
pass
from setuptools import setup, find_packages
extra_setup = {}
if sys.version_info >= (3,):
extra_setup['use_2to3'] = True
setup(
name='nose-progressive',
version='1.5.1',
desc | ripti | on='A testrunner with a progress bar and smarter tracebacks',
long_description=codecs.open('README.rst', encoding='utf-8').read(),
author='Erik Rose',
author_email='erikrose@grinchcentral.com',
license='MIT',
packages=find_packages(exclude=['ez_setup']),
install_requires=['nose>=1.2.1', 'blessings>=1.3,<2.0'],
test_suite='nose.collector',
url='https://github.com/erikrose/nose-progressive',
include_package_data=True,
entry_points="""
[nose.plugins.0.10]
noseprogressive = noseprogressive:ProgressivePlugin
""",
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Testing'
],
**extra_setup
)
|
Sokrates80/air-py | aplink/messages/ap_save_tx_calibration.py | Python | mit | 2,515 | 0.00159 | """
airPy is a flight controller based on pyboard and written in micropython.
The MIT License (MIT)
Copyright (c) 2016 Fabrizio Scimia, fabrizio.scimia@gmail.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial port | ions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import stru | ct
class SaveTxCalibration:
MESSAGE_TYPE_ID = 110
def __init__(self):
pass
@staticmethod
def decode_payload(payload):
"""
Decode message payload
:param payload: byte stream representing the message payload
:return: a list of 3 list of floats representing the PWM threshold values for each of the N active channels
[[min threshold values],[max threshold values], [center threshold values]]
"""
# 4 byte per float * 3 set of thesholds
byte_per_thd_set = int(len(payload)/3)
min_thd_vals = [0.0 for i in range(0, int(byte_per_thd_set/4))]
max_thd_vals = [0.0 for i in range(0, int(byte_per_thd_set/4))]
center_thd_vals = [0.0 for i in range(0, int(byte_per_thd_set/4))]
for i in range(0, int(byte_per_thd_set/4)):
min_thd_vals[i] = struct.unpack('>f', payload[i*4:i*4 + 4])[0]
for i in range(0, int(byte_per_thd_set/4)):
max_thd_vals[i] = struct.unpack('>f', payload[byte_per_thd_set + i*4:i*4 + 4 + byte_per_thd_set])[0]
for i in range(0, int(byte_per_thd_set/4)):
center_thd_vals[i] = struct.unpack('>f', payload[2*byte_per_thd_set + i*4:i*4 + 2*byte_per_thd_set + 4])[0]
return [min_thd_vals, max_thd_vals, center_thd_vals]
|
glabilloy/fabrydb | fabrydb/conf/urls.py | Python | bsd-2-clause | 1,723 | 0.005804 | import re
from django.conf.urls import url, patterns, include
from django.conf import settings
from django.contrib import admin
from django.views.generic import TemplateView
from django.template import add_to_builtins
from fabrydb.admin import fadmin
add_to_builtins('avocado.templatetags.avocado_tags')
admin.autodiscover()
urlpatterns = patterns('',
# Landing Page
url(r'^$', 'fabry.views.home', name='home'),
url(r'^$', 'fabrydb.views.landing', name='landing'),
url(r'^accounts/login/$', 'django.contrib.auth.views.login', name='login'),
url(r'^accounts/logout/$', 'django.contrib.auth.views.logout', name='logout'),
# Cilantro Pages
url(r'^ | workspace/', TemplateView.as_view(template_name='index.html'), name='workspace'),
url(r'^query/', TemplateView.as_view(template_name='index.html'), name='query'),
url(r'^results/', TemplateView.as_view(template_name='index.html'), name='r | esults'),
# Serrano-compatible Endpoint
url(r'^api/', include('serrano.urls')),
# Administrative components
url(r'^admin/', include(admin.site.urls)),
url(r'^fadmin/', include(fadmin.urls), name='fadmin'),
)
# if not settings.DEBUG:
urlpatterns += patterns(
'',
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)
# In production, these two locations must be served up statically
urlpatterns += patterns('django.views.static',
url(r'^{0}(?P<path>.*)$'.format(re.escape(settings.MEDIA_URL.lstrip('/'))), 'serve', {
'document_root': settings.MEDIA_ROOT
}),
url(r'^{0}(?P<path>.*)$'.format(re.escape(settings.STATIC_URL.lstrip('/'))), 'serve', {
'document_root': settings.STATIC_ROOT
}),
)
|
lmazuel/azure-sdk-for-python | azure-mgmt-media/azure/mgmt/media/models/media_service_paged.py | Python | mit | 938 | 0.001066 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.paging import Paged
class MediaServicePaged(Paged):
"""
A paging container for iterating over a list of :class:`MediaService <azure.mgmt.media.models.MediaService>` object
"""
_attribute | _map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[MediaService]'}
}
def __init__(self, *args, **kwargs):
super(MediaServicePaged, self).__init__(*args, | **kwargs)
|
andrewklau/openshift-tools | openshift/installer/vendored/openshift-ansible-3.5.13/roles/lib_openshift/src/lib/volume.py | Python | apache-2.0 | 1,669 | 0.004194 | # pylint: skip-file
# flake8: noqa
class Volume(object):
''' Class to model an openshift volume object'''
volume_mounts_path = {"pod": "spec.containers[0].volumeMounts",
"dc": "spec.template.spec.containers[0].volumeMounts",
"rc": "spec.template.spec.containers[0].volumeMounts",
}
volumes_path = {"pod": "spec.volumes",
"dc": "spec.template.spec.volumes",
"rc": "spec.template.spec.volumes",
}
@staticmethod
def create_volume_structure(volume_info):
''' return a properly structured volume '''
volume_mount = None
volume = | {'name': volume_info['name']}
if volume_info['type'] == 'secret':
volume['secret'] = {}
volume[volume_info['type']] = {'secretName': volume_info['secret_name']}
volume_mount = {'mountPath': volume_info['path'],
'name': volume_info['name']}
elif volume_info['type'] == 'emptydir':
volume['emptyDir'] = {}
volume_mount = {'mountPath': volu | me_info['path'],
'name': volume_info['name']}
elif volume_info['type'] == 'pvc':
volume['persistentVolumeClaim'] = {}
volume['persistentVolumeClaim']['claimName'] = volume_info['claimName']
volume['persistentVolumeClaim']['claimSize'] = volume_info['claimSize']
elif volume_info['type'] == 'hostpath':
volume['hostPath'] = {}
volume['hostPath']['path'] = volume_info['path']
return (volume, volume_mount)
|
great-expectations/great_expectations | great_expectations/rule_based_profiler/types/attributes.py | Python | apache-2.0 | 604 | 0.003311 | from great_expectations.core import IDDict
from great_expectations.core.util import convert_to_json_serializable
from great_expectations.types import SerializableDotDict
# TODO: <Alex>If/when usage of this class gains traction, it can be moved to common general utilities location.</Alex>
class Attributes(SerializableDotDict, IDDict):
"""
This class generalizes dictionar | y in order to hold generic attributes with unique ID.
"""
def to_dict(self) -> dic | t:
return dict(self)
def to_json_dict(self) -> dict:
return convert_to_json_serializable(data=self.to_dict())
|
cchristelis/feti | django_project/core/settings/test_travis.py | Python | bsd-2-clause | 592 | 0.001689 | # -*- coding: utf-8 -*-
from .test import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'test_db',
'USER': | 'postgres',
'PASSWORD': '',
'HOST': 'localhost',
# Set to empty string for default.
'PORT': '',
}
}
MEDIA_ROOT = '/tmp/media'
STATIC_ROOT = '/tmp/static'
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.elasticsearch | _backend.ElasticsearchSearchEngine',
'URL': 'http://127.0.0.1:9200/',
'INDEX_NAME': 'haystack',
},
}
|
pas256/troposphere | tests/test_ecs.py | Python | bsd-2-clause | 7,887 | 0 | import unittest
from troposphere import Ref
import troposphere.ecs as ecs
from troposphere import iam
class TestECS(unittest.TestCase):
def test_allow_placement_strategy_constraint(self):
task_definition = ecs.TaskDefinition(
"mytaskdef",
ContainerDefinitions=[
ecs.ContainerDefinition(
Image="myimage",
Memory="300",
Name="mycontainer",
)
],
Volumes=[
ecs.Volume(Name="my-vol"),
],
)
ecs_service = ecs.Service(
'Service',
Cluster='cluster',
DesiredCount=2,
PlacementStrategies=[
ecs.PlacementStrategy(
Type="random",
)
],
PlacementConstraints=[
ecs.PlacementConstraint(
Type="distinctInstance",
)
],
TaskDefinition=Ref(task_definition),
)
ecs_service.to_dict()
def test_allow_scheduling_strategy(self):
task_definition = ecs.TaskDefinition(
"mytaskdef",
ContainerDefinitions=[
ecs.ContainerDefinition(
Image="myimage",
Memory="300",
Name="mycontainer",
)
],
Volumes=[
ecs.Volume(Name="my-vol"),
],
)
ecs_service = ecs.Service(
'Service',
Cluster='cluster',
DesiredCount=2,
TaskDefinition=Ref(task_definition),
SchedulingStrategy=ecs.SCHEDULING_STRATEGY_DAEMON
)
ecs_service.to_dict()
def test_fargate_launch_type(self):
task_definition = ecs.TaskDefinition(
"mytaskdef",
ContainerDefinitions=[
ecs.ContainerDefinition(
Image="myimage",
Memory="300",
Name="mycontainer",
)
],
Volumes=[
ecs.Volume(Name="my-vol"),
],
)
ecs_service = ecs.Service(
'Service',
Cluster='cluster',
DesiredCount=2,
PlacementStrategies=[
ecs.PlacementStrategy(
Type="random",
)
],
LaunchType='FARGATE',
NetworkConfiguration=ecs.NetworkConfiguration(
AwsvpcConfiguration=ecs.AwsvpcConfiguration(
AssignPublicIp='DISABLED',
SecurityGroups=['sg-1234'],
Subnets=['subnet-1234']
)
),
PlacementConstraints=[
ecs.PlacementConstraint(
Type="distinctInstance",
)
],
TaskDefinition=Ref(task_definition),
)
ecs_service.to_dict()
def test_allow_string_cluster(self):
task_definition = ecs.TaskDefinition(
"mytaskdef",
ContainerDefinitions=[
ecs.ContainerDefinition(
Image="myimage",
Memory="300",
Name="mycontainer",
)
],
Volumes=[
ecs.Volume(Name="my-vol"),
],
)
ecs_service = ecs.Service(
'Service',
Cluster='cluster',
DesiredCount=2,
TaskDefinition=Ref(task_definition),
)
ecs_service.to_dict()
def test_allow_ref_cluster(self):
task_definition = ecs.TaskDefinition(
"mytaskdef",
ContainerDefinitions=[
ecs.ContainerDefinition(
Image="myimage",
Memory="300",
Name="mycontainer",
)
],
Volumes=[
ecs.Volume(Name="my-vol"),
],
)
cluster = ecs.Cluster("mycluster")
ecs_service = ecs.Service(
'Service',
Cluster=Ref(cluster),
DesiredCount=2,
TaskDefinition=Ref(task_definition),
)
ecs_service.to_dict()
def test_task_role_arn_is_optional(self):
task_definition = ecs.TaskDefinition(
"mytaskdef",
ContainerDefinitions=[
ecs.ContainerDefinition(
Image="myimage",
Memory="300",
Name="mycontainer",
)
],
)
task_definition.to_dict()
def test_allow_string_task_role_arn(self):
task_definition = ecs.TaskDefinition(
"mytaskdef",
ContainerDefinitions=[
ecs.ContainerDefinition(
Image="myimage",
Memory="300",
Name="mycontainer",
)
],
TaskRoleArn="myiamrole"
)
task_definition.to_dict()
def test_allow_ref_task_role_arn(self):
task_definition = ecs.TaskDefinition(
"mytaskdef",
ContainerDefinitions=[
ecs.ContainerDefinition(
Image="myimage",
Memory="300",
Name="mycontainer",
)
],
TaskRoleArn=Ref(iam.Role("myRole"))
)
task_definition.to_dict()
def test_allow_port_mapping_protocol(self):
container_definition = ecs.ContainerDefinition(
Image="myimage",
Memory="300",
Name="mycontainer",
PortMappings=[
ecs.PortMapping(
ContainerPort=8125, HostPort=8125, Protocol="udp"
)
]
)
container_definit | ion.to_dict()
def test_port_mapping_does_not_require_protocol(self):
container_definition = ecs.ContainerDefinition(
Image="myimage",
Memory="300",
Name="mycontainer",
PortMappings=[
ecs.PortMapping(
ContainerPort=8125, HostPort=81 | 25,
)
]
)
container_definition.to_dict()
def test_allow_container_healthcheck(self):
health_check_def = ecs.HealthCheck(
Command=[
"CMD-SHELL",
"curl -f http://localhost/ || exit 1"
],
Interval=5,
Timeout=30,
Retries=5,
)
container_definition = ecs.ContainerDefinition(
Image="myimage",
Memory="300",
Name="mycontainer",
HealthCheck=health_check_def,
)
container_definition.to_dict()
def test_docker_volume_configuration(self):
docker_volume_configuration = ecs.DockerVolumeConfiguration(
Autoprovision=True,
Scope="task"
)
task_definition = ecs.TaskDefinition(
"mytaskdef",
ContainerDefinitions=[
ecs.ContainerDefinition(
Image="myimage",
Memory="300",
Name="mycontainer",
)
],
Volumes=[
ecs.Volume(
Name="my-vol",
DockerVolumeConfiguration=docker_volume_configuration
),
],
)
task_definition.to_dict()
class TestECSValidators(unittest.TestCase):
def test_scope_validator(self):
valid_values = ['shared', 'task']
for x in valid_values:
ecs.scope_validator(x)
with self.assertRaises(ValueError):
ecs.scope_validator("bad_scope")
|
vdloo/raptiformica | tests/unit/raptiformica/settings/load/test_load_module_configs.py | Python | mit | 849 | 0.001178 | from mock import call
from raptiformica.settings import conf
from raptiformica.settings.load import load_module_configs
from tests.testcase | import TestCase
class TestLoadModuleConfigs(TestCase):
def setUp(self):
self.load_module_config = self.set_up_patch(
'raptiformica.settings.load.load_module_config'
)
self.load_module_config.return_value = [{}]
def test_load_module_configs_loads_module_configs(self):
list(load_module_configs())
expected_calls = map(
call, (conf().MODULES_DIR, conf().USER_MODULES_DIR)
)
self.assertCountEqual(
| self.load_module_config.mock_calls, expected_calls
)
def test_load_module_configs_flattens_module_configs(self):
ret = load_module_configs()
self.assertCountEqual(ret, [{}] * 2)
|
TwilioDevEd/webhooks-example-django | webhooks/views.py | Python | mit | 2,249 | 0.000889 | from django.conf import settings
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POS | T
from functools import wraps
from twilio.twiml.voice_response import VoiceResponse
from twilio.twiml.messaging_response import MessagingResponse
from twilio.util import RequestValidator
import os
def validate_twilio_request(f):
"""Validates that incomi | ng requests genuinely originated from Twilio"""
@wraps(f)
def decorated_function(request, *args, **kwargs):
# Create an instance of the RequestValidator class
validator = RequestValidator(os.environ.get('TWILIO_AUTH_TOKEN'))
# Validate the request using its URL, POST data,
# and X-TWILIO-SIGNATURE header
request_valid = validator.validate(
request.build_absolute_uri(),
request.POST,
request.META.get('HTTP_X_TWILIO_SIGNATURE', ''))
# Continue processing the request if it's valid, return a 403 error if
# it's not
if request_valid or settings.DEBUG:
return f(request, *args, **kwargs)
else:
return HttpResponseForbidden()
return decorated_function
@require_POST
@csrf_exempt
@validate_twilio_request
def incoming_call(request):
"""Twilio Voice URL - receives incoming calls from Twilio"""
# Create a new TwiML response
resp = VoiceResponse()
# <Say> a message to the caller
from_number = request.POST['From']
body = """
Thanks for calling!
Your phone number is {0}. I got your call because of Twilio's webhook.
Goodbye!""".format(' '.join(from_number))
resp.say(body)
# Return the TwiML
return HttpResponse(resp)
@require_POST
@csrf_exempt
@validate_twilio_request
def incoming_message(request):
"""Twilio Messaging URL - receives incoming messages from Twilio"""
# Create a new TwiML response
resp = MessagingResponse()
# <Message> a text back to the person who texted us
body = "Your text to me was {0} characters long. Webhooks are neat :)" \
.format(len(request.POST['Body']))
resp.message(body)
# Return the TwiML
return HttpResponse(resp)
|
killpanda/Ailurus | ailurus/computer_doctor_pane.py | Python | gpl-2.0 | 6,390 | 0.003756 | #coding: utf-8
#
# Ailurus - a simple application installer and GNOME tweaker
#
# Copyright (C) 2009-2010, Ailurus developers and Ailurus contributors
# Copyright (C) 2007-2010, Trusted Digital Technology Laboratory, Shanghai Jiao Tong University, China.
#
# Ailurus is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Ailurus is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ailurus; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
from __future__ import with_statement
import gtk, gobject, sys, os
import pango
from lib import *
from libu import *
class ComputerDoctorPane(gtk.VBox):
icon = D+'sora_icons/m_computer_doctor.png'
text = _('Computer\nDoctor')
def render_type_func(self, column, cell, model, iter):
cure_obj = model.get_value(iter, 1)
pixbuf = [self.icon_must_fix, self.icon_suggestion][cure_obj.type]
cell.set_property('pixbuf', pixbuf)
def render_text_func(self, column, cell, model, iter):
cure_obj = model.get_value(iter, 1)
markup = '<b>%s</b>' % cure_obj.__doc__
if cure_obj.detail: markup += '\n' + cure_obj.detail
cell.set_property('markup', markup)
def toggled(self, render_toggle, path, sortedstore):
path = sortedstore.convert_path_to_child_path(path)
self.liststore[path][0] = not self.liststore[path][0]
sensitive = False
for row in self.liststore:
to_apply = row[0]
sensitive = sensitive or to_apply
self.button_apply.set_sensitive(sensitive)
def sort_by_type(self, model, iter1, iter2):
obj1 = model.get_value(iter1, 1)
obj2 = model.get_value(iter2, 1)
if obj1 and obj2:
return cmp(obj1.type, obj2.type) or cmp(obj1.__doc__, obj2.__doc__)
else:
return 0
def sort_by_text(self, model, iter1, iter2):
obj1 = model.get_value(iter1, 1)
obj2 = model.get_value(iter2, 1)
if obj1 and obj2:
return cmp(obj1.__doc__, obj2.__doc__)
else:
return 0
def refresh(self):
self.liststore.clear()
for obj in self.cure_objs:
if obj.exists():
self.liststore.append([False, obj])
self.sortedstore.set_sort_column_id(1000, gtk.SORT_ASCENDING)
self.button_apply.set_sensitive(False)
self.show_text('')
must_fix = 0
for row in self.liststore:
obj = row[1]
if obj.type == C.MUST_FIX: must_fix += 1
text = ''
if len(self.liststore):
if must_fix:
text += _('Found %s errors in your system.') % must_fix
text += ' '
text += _('There is a total of %s suggestions.') % len(self.liststore)
else:
text = _('Found no error :)')
self.show_text(text)
def apply(self):
success = 0
for row in self.liststore:
apply = row[0]
if apply:
obj = row[1]
try:
obj.cure()
success += 1
except: print_traceback()
self.refresh()
if success:
notify(_('Computer doctor'), _('Successfully applied %s suggestions.') % success)
def show_text(self, text):
self.column_text.set_title(text)
def __init__(self, main_view, cure_objs):
self.cure_objs = cure_objs
self.icon_must_fix = get_pixbuf(D+'sora_icons/c_must_fix.png', 24, 24)
self.icon_suggestion = get_pixbuf(D+'sora_icons/c_suggestion.png', 24, 24)
self.liststore = liststore = gtk.ListStore(bool, gobject.TYPE_PYOBJECT) # apply?, cure_object
self.sortedstore = sortedstore = gtk.TreeModelSort(liststore)
sortedstore.set_sort_func(1000, self.sort_by_type)
sortedstore.set_sort_func(1001, self.sort_by_text)
render_toggle = gtk.CellRendererToggle()
render_toggle.connect('toggled', self.toggled, sortedstore)
render_type = gtk.CellRendererPixbuf()
render_text = gtk.CellRendererText()
render_text.set_property('ellipsize', pango.ELLIPSIZE_END)
column_toggle = gtk.TreeViewColumn()
column_toggle.pack_start(render_toggle, False)
column_toggle.add_attribute(render_toggle, 'active', 0)
column_toggle.set_sort_column_id(0)
column_type = gtk.TreeViewColumn()
column_type.pack_start(render_type, False)
column_type | .set_cell_data_func(render_type, self.render_type_func)
column_type.set_sort_column_id(1000)
self.column_text = column_text = gtk.TreeViewColumn()
| column_text.pack_start(render_text)
column_text.set_cell_data_func(render_text, self.render_text_func)
column_text.set_sort_column_id(1001)
self.view = view = gtk.TreeView(sortedstore)
view.set_rules_hint(True)
view.append_column(column_toggle)
view.append_column(column_type)
view.append_column(column_text)
scroll = gtk.ScrolledWindow()
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
scroll.set_shadow_type(gtk.SHADOW_IN)
scroll.add(view)
button_refresh = image_stock_button(gtk.STOCK_REFRESH, _('Refresh'))
button_refresh.connect('clicked', lambda *w: self.refresh())
self.button_apply = button_apply = image_stock_button(gtk.STOCK_APPLY, _('Apply'))
button_apply.connect('clicked', lambda *w: self.apply())
button_apply.set_sensitive(False)
button_box = gtk.HBox(False, 10)
button_box.pack_start(button_refresh, False)
button_box.pack_start(button_apply, False)
gtk.VBox.__init__(self, False, 10)
self.set_border_width(5)
self.pack_start(button_box, False)
self.pack_start(scroll)
self.show_text(_('Please click "refresh" button.'))
self.refresh() |
bthyreau/hippodeep | applyseg_unique.py | Python | mit | 11,344 | 0.024066 | from __future__ import print_function
import time
ct = time.time()
from lasagne.layers import get_output, InputLayer, DenseLayer, ReshapeLayer, NonlinearityLayer
from lasagne.nonlinearities import rectify, leaky_rectify
from lasagne.updates import nesterov_momentum, rmsprop, adamax
import sys, os, time
scriptpath = os.path.dirname(__file__)
import nibabel
import numpy as np
import theano
import theano.tensor as T
import lasagne
# Note that Conv3DLayer and dnn.Conv3DDNNLayer have opposite filter-fliping defaults
from lasagne.layers import Conv3DLayer, MaxPool3DLayer
from lasagne.layers import Upscale3DLayer
from lasagne.layers import *
from lasagne.layers import Layer
from lasagne.utils import as_tuple
import pickle
import theano.misc.pkl_utils
cachefile = os.path.dirname(os.path.realpath(__file__)) + "/model_hippo.pkl"
if not os.path.exists(cachefile):
# This broadcast-enabled layer is required to apply a 3d-mask along the feature-dimension
# From GH PR #633
class ElemwiseMergeLayerBroadcast(MergeLayer):
"""
This layer performs an elementwise merge of its input layers.
It requires all input layers to have the same output shape.
Parameters
----------
incomings : Unless `cropping` is given, all shapes must be equal, except
for dimensions that are undefined (``None``) or broadcastable (``1``).
merge_function : callable
the merge function to use. Should take two arguments and return the
updated value. Some possible merge functions are ``theano.tensor``:
``mul``, ``add``, ``maximum`` and ``minimum``.
cropping : None or [crop]
Cropping for each input axis. Cropping is described in the docstring
for :func:`autocrop`
See Also
--------
ElemwiseSumLayer : Shortcut for sum layer.
"""
def __init__(self, incomings, merge_function, cropping=None, **kwargs):
super(ElemwiseMergeLayerBroadcast, self).__init__(incomings, **kwargs)
self.merge_function = merge_function
self.cropping = cropping
self.broadcastable = None
def get_output_shape_for(self, input_shapes):
input_shapes = autocrop_array_shapes(input_shapes, self.cropping)
input_dims = [len(shp) for shp in input_shapes]
if not all(input_dim == input_dims[0] for input_dim in input_dims):
raise ValueError('Input dimensions must be the same but were %s' %
", ".join(map(str, input_shapes)))
def broadcasting(input_dim):
# Identify dimensions that will be broadcasted.
sorted_dim = sorted(input_dim,
key=lambda x: x if x is not None else -1)
if isinstance(sorted_dim[-1], int) and sorted_dim[-1] != 1 \
and all([d == 1 for d in sorted_dim[:-1]]):
size_after_broadcast = sorted_dim[-1]
broadcast = [True if d == 1 else None for d in input_dim]
return ((size_after_broadcast,)*len(input_dim), broadcast)
else:
return (input_dim, [None]*len(input_dim))
# if the dimension is broadcastable we replace 1's with the size
# after broadcasting.
input_dims, broadcastable = list(zip(
*[broadcasting(input_dim)for input_dim in zip(*input_shapes)]))
self.broadcastable = list(zip(*broadcastable))
input_shapes = list(zip(*input_dims))
# Infer the output shape by grabbing, for each axis, the first
# input size that is not `None` (if there is any)
output_shape = tuple(next((s for s in sizes if s is not None), None)
for sizes in zip(*input_shapes))
def match(shape1, shape2):
return (len(shape1) == len(shape2) and
all(s1 is None or s2 is None or s1 == s2
for s1, s2 in zip(shape1, shape2)))
# Check for compatibility with inferred output shape
if not all(match(shape, output_shape) for shape in input_shapes):
raise ValueError("Mismatch: not all input shapes are the same")
return output_shape
def get_output_for(self, inputs, **kwargs):
inputs = autocrop(inputs, self.cropping)
# modify broadcasting pattern.
if self.broadcastable is not None:
for n, broadcasting_dim in enumerate(self.broadcastable):
for dim, broadcasting in enumerate(broadcasting_dim):
if broadcasting:
inputs[n] = T.addbroadcast(inputs[n], dim)
output = None
for input in inputs:
if output is not None:
output = self.merge_function(output, input)
else:
output = input
return output
# Definition of the network
conv_num_filters = 48
l = InputLayer(shape = (None, 1, 48, 72, 64), name="input")
l_input = l
# # # #
# encoding
# # # #
l = Conv3DLayer(l, flip_filters=False, num_filters = 16, filter_size = (1,1,3), pad = 'valid', name="conv")
l = Conv3DLayer(l, flip_filters=False, num_filters = 16, filter_size = (1,3,1), pad = 'valid', name="conv")
l_conv_0 = l = Conv3DLayer(l, flip_filters=False, num_filters = 16, filter_size = (3,1,1), pad = 'valid', name="conv")
l = l_conv_f1 = Conv3DLayer(l, flip_filters=False, num_filters = conv_num_filters, filter_size = 3, pad = 'valid', name="conv_f1")
l = l_maxpool1 = MaxPool3DLayer(l, pool_size = 2, name ='maxpool1')
l = BatchNormLayer(l, name="batchnorm")
l = Conv3DLayer(l, flip_filters=False, num_filters = conv_num_filters, filter_size = (3,3,3), pad = "same", name="conv")
l = l_convout1 = Conv3DLayer(l, flip_filters=False, num_filters = conv_num_filters, filter_size = (3, 3, 3), pad = 'same', name ='convout1', nonlinearity = None)
l = ElemwiseSumLayer(incomings = [l_maxpool1, l_convout1], name="sum_1s")
l = NonlinearityLayer(l, nonlinearity = rectify, name="relu")
conv_num_filters2 = 48
l = l_maxpool2 = MaxPool3DLayer(l, pool_size = 2, name = 'maxpool2')
l_maxpool2_conv = l
l = BatchNormLayer(l, name="batchnorm")
l = Conv3DLayer(l, flip_filters=False, num_filters = conv_num_filters2, filter_size = (3,3,3), pad = "same", name="conv")
l = l_convout2 = Conv3DLayer(l, flip_filters=False, num_filters = conv_num_filters2, filter_size = (3, 3, 3), pad = 'same', name ='convout2', nonlinearity = None)
l = ElemwiseSumLayer(incomings = [l_maxpool2_conv, l_convout2], name="sum_2s")
l = NonlinearityLayer(l, nonlinearity = rectify, name="relu")
# # # #
# segmentation
# # # #
l_middle = l
l = Upscale3DLayer(l, scale_factor = 2, name="upscale")
l = Conv3DLayer(l_middle, flip_filters=False, num_filters = conv_num_filters, filter_size | = 3, pad = "same", name="conv")
l = Upscale3DLayer(l, scale_factor = 2, name="upscale")
l = l_convout1 = Conv3DLayer(l, flip_filters=False, num_filters = conv_num_filters, filter_size = 3, pad = 1, name="conv")
l = Upscale3DLayer(l, scale_factor = 2, name="upscale")
l_upscale = l
| l_convout2 = Conv3DLayer(l_upscale, flip_filters=False, num_filters = 16, filter_size = 3, pad = 1, name="conv")
# Original (before refinement) output
l_output1 = Conv3DLayer(l_convout2, flip_filters=False, num_filters = 1, filter_size = 1, pad = 'same', name="conv_1x", nonlinearity =lasagne.nonlinearities.sigmoid )
# # #
# refinement
# # #
## The next output is reusing masked original filters to temptatively improve the network
l_blur = Conv3DLayer(l_output1, flip_filters=False, num_filters=1, filter_size=7, stride=1, pad='same', W=lasagne.init.Constant(1.), b=lasagne.init.Constant(-7*7*7.*.10), nonlinearity=lasagne.nonlinearities.sigmoid)
# in the above, *10 is : threshold at 10% of the sm |
frodrigo/osrm-backend | scripts/gdb_printers.py | Python | bsd-2-clause | 17,883 | 0.011188 | import gdb.printing
# https://sourceware.org/gdb/onlinedocs/gdb/Pretty-Printing.html
# https://sourceware.org/gdb/onlinedocs/gdb/Writing-a-Pretty_002dPrinter.html
COORDINATE_PRECISION = 1e6
coord2float = lambda x: int(x) / COORDINATE_PRECISION
lonlat = lambda x: (coord2float(x['lon']['__value']), coord2float(x['lat']['__value']))
def call(this, method, *args):
"""Call this.method(args)"""
command = '(*({})({})).{}({})'.format(this.type.target().pointer(), this.address, method, ','.join((str(x) for x in args)))
return gdb.parse_and_eval(command)
def iterate(v):
s, e = v['_M_impl']['_M_start'], v['_M_impl']['_M_finish']
while s != e:
yield s.dereference()
s +=1
class CoordinatePrinter:
"""Print a CoordinatePrinter object."""
def __init__(self, val):
self.val = val
def to_string(self):
return '{{{}, {}}}'.format(*lonlat(self.val))
class TurnInstructionPrinter:
"""Print a TurnInstruction object."""
modifiers = {0:'UTurn', 1:'SharpRight', 2:'Right', 3:'SlightRight',
4:'Straight', 5:'SlightLeft', 6:'Left', 7:'SharpLeft'}
types = {0:'Invalid', 1:'NewName', 2:'Continue', 3:'Turn', 4:'Merge', 5:'OnRamp',
6:'OffRamp', 7:'Fork', 8:'EndOfRoad', 9:'Notification', 10:'EnterRoundabout',
11:'EnterAndExitRoundabout', 12:'EnterRotary', 13:'EnterAndExitRotary',
14:'EnterRoundaboutIntersection', 15:'EnterAndExitRoundaboutIntersection',
16:'UseLane', 17:'NoTurn', 18:'Suppressed', 19:'EnterRoundaboutAtExit',
20:'ExitRoundabout', 21:'EnterRotaryAtExit', 22:'ExitRotary',
23:'EnterRoundaboutIntersectionAtExit', 24:'ExitRoundaboutIntersection',
25:'StayOnRoundabout', 26:'Sliproad'}
def __init__(self, val):
self.val = val
def to_string(self):
t, m = int(self.val['type']), int(self.val['direction_modifier'])
m = '%s (%d)' % (self.modifiers[m], m) if m in self.modifiers else str(m)
t = '%s (%d)' % (self.types[t], t) if t in self.types else str(t)
return '{{type = {}, direction_modifier = {}}}'.format(t, m)
class TurnLaneDataPrinter:
"""Print a TurnLaneData object."""
mask = {0:'Empty', 1:'None', 2:'Straight', 4:'SharpLeft', 8:'Left', 16:'SlightLeft',
32:'SlightRight', 64:'Right', 128:'SharpRight', 256:'UTurn', 512:'MergeToLeft',
1024:'MergeToRight'}
def __init__(self, val):
self.val = val
def to_string(self):
tg = int(self.val['tag'])
fr, to = int(self.val['from']), int(self.val['to'])
return '{{tag = {}, from = {}, to = {}}}'.format(self.mask[tg] if tg in self.mask else tg, fr, to)
def build_pretty_printer():
pp = gdb.printing.RegexpCollectionPrettyPrinter('OSRM')
pp.add_printer('TurnInstruction', '::TurnInstruction$', TurnInstructionPrinter)
pp.add_printer('Coordinate', '::Coordinate$', CoordinatePrinter)
pp.add_printer('TurnLaneData', '::TurnLaneData$', TurnLaneDataPrinter)
return pp
gdb.pretty_printers = [x for x in gdb.pretty_printers if x.name != 'OSRM'] # unregister OSRM pretty printer before (re)loading
gdb.printing.register_pretty_printer(gdb.current_objfile(), build_pretty_printer())
import geojson
import os
import time
import tempfile
import urllib.parse
import webbrowser
import re
class GeojsonPrinter (gdb.Command):
"""Display features on geojson.io."""
def __init__ (self):
super (GeojsonPrinter, self).__init__ ('geojson', gdb.COMMAND_USER)
self.to_geojson = {
'osrm::engine::guidance::RouteSteps': self.RouteSteps,
'std::vector<osrm::engine::guidance::RouteStep, std::allocator<osrm::engine::guidance::RouteStep> >': self.RouteSteps}
@staticmethod
def encodeURIComponent(s):
return urllib.parse.quote(s.encode('utf-8'), safe='~()*!.\'')
@staticmethod
def RouteSteps(steps):
k, road, result = 0, [], []
for step in iterate(steps):
maneuver, location = step['maneuver'], step['maneuver']['location']
ll = lonlat(location)
road.append(ll)
properties= {field.name: str(step[field.name]) for field in step.type.fields() if str(step[field.name]) != '""'}
properties.update({'maneuver.' + field.name: str(maneuver[field.name]) for field in maneuver.type.fields()})
properties.update({'stroke': '#0000ff', 'stroke-opacity': 0.8, 'stroke-width': 15})
result.append(geojson.Feature(geometry=geojson.LineString([ll, ll]), properties=properties))
road = geojson.Feature(geometry=geojson.LineString(road), properties={'stroke': '#0000ff', 'stroke-opacity': 0.5, 'stroke-width':5})
return [road, *result]
def invoke (self, arg, from_tty):
try:
val = gdb.parse_and_eval(arg)
features = self.to_geojson[str(val.type)](val)
request = self.encodeURIComponent(str(geojson.FeatureCollection(features)))
webbrowser.open('http://geojson.io/#data=data:application/json,' + request)
except KeyError as e:
print ('no GeoJSON printer for: ' + str(e))
except gdb.error as e:
print('error: ' % (e.args[0] if len(e.args)>0 else 'unspecified'))
return
GeojsonPrinter()
class SVGPrinter (gdb.Command):
"""
Generate SVG representation within HTML of edge-based graph in facade.
SVG image contains:
- thick lines with arrow heads are edge-based graph nodes with forward (green) and reverse (red) node IDs (large font)
- segments weights are numbers (small font) in the middle of segments in forward (green) or reverse (red) direction
- thin lines are edge-based graph edges in forward (green), backward (red) or both (yellow) directions with
weights, edge-based graph node IDs (source, targte) and some algorithm-specific information
- coordinates of segments end points (node-based graph nodes)
"""
def __init__ (self):
super (SVGPrinter, self).__init__ ('svg', gdb.COMMAND_USER)
self.re_bbox = None
self.to_svg = {
'const osrm::engine::datafacade::ContiguousInternalMemoryDataFacade<osrm::engine::routing_algorithms::ch::Algorithm> &': self.Facade,
'const osrm::engine::datafacade::ContiguousInternalMemoryDataFacade<osrm::engine::routing_algorithms::corech::Algorithm> &': self.Facade,
'const osrm::engine::datafacade::ContiguousInternalMemoryDataFacade<osrm::engine::routing_algorithms::mld::Algorithm> &': self.Facade,
'osrm::engine::routing_algorithms::Facade': self.Facade,
'osrm::engine::DataFacade': self.Facade}
@staticmethod
def show_svg(svg, width, height):
svg = """<!DOCTYPE HTML>
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
<head><meta http-equiv="content-type" content="application/xhtml+xm | l; charset=utf-8"/>
<style>
svg { background-color: beige; }
.node { stroke: #000; stroke-width: 4; fill: none; marker-end: url(#forward) }
.node.forward { stroke-width: 2; stroke: #0c0; font-family: | sans; font-size: 42px }
.node.reverse { stroke-width: 2; stroke: #f00; font-family: sans; font-size: 42px }
.segment { marker-start: url(#osm-node); marker-end: url(#osm-node); }
.segment.weight { font-family: sans; font-size:24px; text-anchor:middle; stroke-width: 1; }
.segment.weight.forward { stroke: #0c0; fill: #0c0; }
.segment.weight.reverse { stroke: #f00; fill: #f00; }
.edge { stroke: #00f; stroke-width: 2; fill: none; }
.edge.forward { stroke: #0c0; stroke-width: 1; marker-end: url(#forward) }
.edge.backward { stroke: #f00; stroke-width: 1; marker-start: url(#reverse) }
.edge.both { stroke: #fc0; stroke-width: 1; marker-end: url(#forward); marker-start: url(#reverse) }
.coordinates { font-size: 12px; fill: #333 }
</style>
</head>
<svg viewBox="0 0 """ + str(width) + ' ' + str(height) + """"
xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<marker id="forward" markerWidth="10" markerHeight="10" refX="9" refY="3" orient="au |
thica/ORCA-Remote | src/ORCA/widgets/core/ScrollableLabelLarge.py | Python | gpl-3.0 | 15,619 | 0.013573 | # -*- coding: utf-8 -*-
"""
ORCA Open Remote Control Application
Copyright (C) 2013-2020 Carsten Thielepape
Please contact me by : http://www.orca-remote.org/
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that | it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If | not, see <http://www.gnu.org/licenses/>.
"""
from kivy.lang import Builder
from kivy.uix.recycleview import RecycleView
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.widget import Widget
from kivy.metrics import dp
from kivy.uix.label import Label
from kivy.properties import StringProperty
from kivy.properties import Property
from kivy.properties import BoundedNumericProperty
from kivy.properties import NumericProperty
from kivy.properties import AliasProperty
# noinspection PyProtectedMember
from kivy.properties import dpi2px
from kivy.graphics.opengl import GL_MAX_TEXTURE_SIZE
from ORCA.widgets.core.Label import cLabel
from ORCA.widgets.core.TouchRectangle import cTouchRectangle
from ORCA.utils.TypeConvert import ToUnicode
from ORCA.utils.TypeConvert import ToHex
from ORCA.utils.RemoveNoClassArgs import RemoveNoClassArgs
__all__ = ['cScrollableLabelLarge']
Builder.load_string('''
<cScrollableLabelLargeInner>:
RecycleBoxLayout:
default_size_hint: 1, None
size_hint: None,None
height: self.minimum_height
''')
# noinspection PyUnusedLocal
class cScrollableLabelLarge(Widget):
""" Main Widget to display a large text
By default, x and y scrolling is enabled
Horizontal scrolling can be disabled by passing
noxscroll = False
Supports background color for the Label
As implementation, it is a Widget which contains a Background (if color is given)
and a customized RecycleView
"""
text = StringProperty('')
#font_size = Property('20sp')
def __init__(self, **kwargs):
kwargsInner={}
for k in kwargs:
if k not in ["size_hint","size","pos","pos_hint"]:
kwargsInner[k]=kwargs[k]
self.oScrollableLabelLargeInner=cScrollableLabelLargeInner(**kwargsInner)
super(self.__class__, self).__init__(**RemoveNoClassArgs(dInArgs=kwargs,oObject=Widget))
self.oBackGround = None
if "background_color" in kwargs:
self.oBackGround=cTouchRectangle(size=self.size,pos=self.pos, background_color=kwargs["background_color"])
self.add_widget(self.oBackGround)
del kwargs["background_color"]
self.oScrollableLabelLargeInner.size = self.size
self.oScrollableLabelLargeInner.pos = self.pos
self.add_widget(self.oScrollableLabelLargeInner)
self.bind(pos=self.update_graphics_pos,size=self.update_graphics_size)
def update_graphics_pos(self, instance, value):
""" Updates the child widget position (Backgrund and Recycleview) """
if self.oBackGround is not None:
self.oBackGround.pos = value
self.oScrollableLabelLargeInner.pos = value
def update_graphics_size(self, instance, value):
""" Updates the child widget size (Backgrund and Recycleview) """
if self.oBackGround is not None:
self.oBackGround.size = value
self.oScrollableLabelLargeInner.size = value
def IncreaseFontSize(self,*args):
""" Pass through function for the Recycleview """
self.oScrollableLabelLargeInner.IncreaseFontSize(args)
def DecreaseFontSize(self,*args):
""" Pass through function for the Recycleview """
self.oScrollableLabelLargeInner.DecreaseFontSize(args)
def on_text(self, instance, value):
""" Pass through function for the Recycleview """
self.oScrollableLabelLargeInner.text=value
def on_oOrcaWidget(self, instance, value):
""" Passes the OrcaWidget to the Childs """
if self.oBackGround is not None:
self.oBackGround.oOrcaWidget=value
self.oScrollableLabelLargeInner.oOrcaWidget=value
def _get_font_size(self):
"""Returns the Font Size """
return self.oScrollableLabelLargeInner.fFontSize
def _set_font_size(self, value):
"""Passes the change of font size """
self.oScrollableLabelLargeInner.font_size = value
def EnableWidget(self, *, bEnable:bool) -> bool:
if bEnable:
if self.oBackGround:
self.oBackGround.opacity = self.oScrollableLabelLargeInner.oOrcaWidget.fOrgOpacity
self.oScrollableLabelLargeInner.opacity = self.oScrollableLabelLargeInner.oOrcaWidget.fOrgOpacity
else:
if self.oBackGround:
self.oBackGround.opacity = 0.0
self.oScrollableLabelLargeInner.opacity = 0.0
font_size = AliasProperty(_get_font_size, _set_font_size)
# noinspection PyUnusedLocal
class cLineLayoutBase(BoxLayout):
""" embedded class to present a single line of text """
text = StringProperty("")
font_size = NumericProperty(0)
def __init__(self, **kwargs):
super(self.__class__,self).__init__(**RemoveNoClassArgs(dInArgs=kwargs,oObject=BoxLayout))
self.oLabel = cLabel(**self.oScrollableLabelLargeInner.kwFontArgs)
if self.oScrollableLabelLargeInner.oOrcaWidget is not None:
self.oLabel.oOrcaWidget = self.oScrollableLabelLargeInner.oOrcaWidget
self.add_widget(self.oLabel)
def on_size(self,*largs):
""" Updates the child widget size (label) """
self.oLabel.height = self.height
self.oLabel.text_size = self.size
def on_text(self,instance,value):
""" sets the text """
self.oLabel.text=value
def on_font_size(self,instance,value):
""" sets the font size """
self.oLabel.font_size=value
# noinspection PyProtectedMember,PyUnusedLocal
class cScrollableLabelLargeInner(RecycleView):
""" The "real' scrollable label (without background) """
# to have similar properties as a Label
font_size = Property('20sp')
text = StringProperty('')
oOrcaWidget = Property(None)
# Internal Property which handles fonmt resizing (not working as RecycleView can't manage change of cached widget)
fFontSize = BoundedNumericProperty(dpi2px(20,'sp'), min=4.0, max=96.0,errorhandler=lambda x: 96.0 if x > 96.0 else 4.0)
def __init__(self, **kwargs):
#we create a new class on the fly top ass the font args to the creation process, as the view adapter creates without arguments
self.cLineLayout=type('cLineLayout', cLineLayoutBase.__bases__, dict(cLineLayoutBase.__dict__))
# passes myself to the embedded class. Not good style but Recycleview limits passing customized parameters
self.cLineLayout.oScrollableLabelLargeInner=self
self.oOrcaWidget = kwargs.get('ORCAWIDGET',None)
# maximal len (in chars) of a single ine of the given text
self.iMaxLen = 0
# Setting the scrolltypes / bars for the Recycleview
self.scroll_type = ['bars', 'content']
self.scroll_wheel_distance = dp(114)
self.bar_width = dp(10)
# The original passed Data array
self.aData |
walterbender/speak | eye.py | Python | gpl-3.0 | 4,073 | 0 | # Speak.activity
# A simple front end to the espeak text-to-speech engine on the XO laptop
# http://wiki.laptop.org/go/Speak
#
# Copyright (C) 2008 Joshua Minor
# This file is part of Speak.activity
#
# Parts of Speak.activity are based on code from Measure.activity
# Copyright (C) 2007 Arjun Sarwal - arjun@laptop.org
#
# Speak.activity is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Speak.activity is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have receive | d a copy of the GNU General Public License
# along with Speak.activity. If not, see <http://www.gnu.org/licenses/>.
import math
from gi.repository import Gtk
class Eye(Gtk.DrawingArea):
def __init__(self, fill_color):
Gtk.DrawingArea.__init__(self)
self.connect("draw", self.draw)
self.x, self.y = 0, 0
self.fill_color = fill_color
def has_padding(self):
return True
def | has_left_center_right(self):
return False
def look_at(self, x, y):
self.x = x
self.y = y
self.queue_draw()
def look_ahead(self):
self.x = None
self.y = None
self.queue_draw()
# Thanks to xeyes :)
def computePupil(self):
a = self.get_allocation()
if self.x is None or self.y is None:
# look ahead, but not *directly* in the middle
pw = self.get_parent().get_allocation().width
if a.x + a.width // 2 < pw // 2:
cx = a.width * 0.6
else:
cx = a.width * 0.4
return cx, a.height * 0.6
EYE_X, EYE_Y = self.translate_coordinates(
self.get_toplevel(), a.width // 2, a.height // 2)
EYE_HWIDTH = a.width
EYE_HHEIGHT = a.height
BALL_DIST = EYE_HWIDTH / 4
dx = self.x - EYE_X
dy = self.y - EYE_Y
if dx or dy:
angle = math.atan2(dy, dx)
cosa = math.cos(angle)
sina = math.sin(angle)
h = math.hypot(EYE_HHEIGHT * cosa, EYE_HWIDTH * sina)
x = (EYE_HWIDTH * EYE_HHEIGHT) * cosa / h
y = (EYE_HWIDTH * EYE_HHEIGHT) * sina / h
dist = BALL_DIST * math.hypot(x, y)
if dist < math.hypot(dx, dy):
dx = dist * cosa
dy = dist * sina
return a.width // 2 + dx, a.height // 2 + dy
def draw(self, widget, cr):
bounds = self.get_allocation()
eyeSize = min(bounds.width, bounds.height)
outlineWidth = eyeSize / 20.0
pupilSize = eyeSize / 10.0
pupilX, pupilY = self.computePupil()
dX = pupilX - bounds.width / 2.
dY = pupilY - bounds.height / 2.
distance = math.sqrt(dX * dX + dY * dY)
limit = eyeSize // 2 - outlineWidth * 2 - pupilSize
if distance > limit:
pupilX = bounds.width // 2 + dX * limit // distance
pupilY = bounds.height // 2 + dY * limit // distance
# background
cr.set_source_rgba(*self.fill_color.get_rgba())
cr.rectangle(0, 0, bounds.width, bounds.height)
cr.fill()
# eye ball
cr.arc(bounds.width // 2, bounds.height // 2,
eyeSize // 2 - outlineWidth // 2, 0, 2 * math.pi)
cr.set_source_rgb(1, 1, 1)
cr.fill()
# outline
cr.set_line_width(outlineWidth)
cr.arc(bounds.width // 2, bounds.height // 2,
eyeSize // 2 - outlineWidth // 2, 0, 2 * math.pi)
cr.set_source_rgb(0, 0, 0)
cr.stroke()
# pupil
cr.arc(pupilX, pupilY, pupilSize, 0, 2 * math.pi)
cr.set_source_rgb(0, 0, 0)
cr.fill()
return True
|
ckan/ckanext-issues | ckanext/issues/logic/schema/__init__.py | Python | mit | 21 | 0 | from schema | impo | rt *
|
zhewang/lcvis | pca.py | Python | gpl-2.0 | 431 | 0.016241 | from matplotlib.mlab import PCA
i | mport gen_icp as icp
def calculate(ids, matrix, target=None):
results = PCA(matrix)
data = []
for obj_id, row in zip(ids, matrix):
data.append([round(results.project(row)[0],6),
round(results.project(row)[1],6)])
#target = []
data = icp.align(data, target)
#for obj_id, row in zip(ids, data):
| #row.append(obj_id)
return data.tolist()
|
xiaoyongaa/ALL | 函数和常用模块/cash3.py | Python | apache-2.0 | 193 | 0.036269 | import re
a="2+1+3+3123123"
r=re | .findall(r"\d{0,}[\+\-]{0,}",a)
print(r)
with open("re.txt","w") as re:
for i in r:
re.write(i | )
with open("re.txt","r") as r:
r=r.read()
print(r) |
tensorflow/tpu | models/official/detection/projects/fashionpedia/configs/factory.py | Python | apache-2.0 | 1,161 | 0.003445 | # Copyright 2019 The TensorFlow A | uthors. All Rights Reserved.
#
# Licensed under the Apache License | , Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Factory to provide Attribute-Mask R-CNN model configs."""
from projects.fashionpedia.configs import model_config
from hyperparameters import params_dict
def config_generator(model):
"""Model function generator."""
if model == 'attribute_mask_rcnn':
default_config = model_config.CFG
restrictions = model_config.RESTRICTIONS
else:
raise ValueError('Model %s is not supported.' % model)
return params_dict.ParamsDict(default_config, restrictions)
|
gopalkoduri/vichakshana-public | vichakshana/SASD.py | Python | agpl-3.0 | 7,505 | 0.005463 | from __future__ import division
from os.path import expanduser, exists
from os import chdir, mkdir
import pickle
import numpy as np
from scipy import sparse
import networkx as nx
home = expanduser('~')
chdir(home+'/workspace/vichakshana/vichakshana/')
#CELERY
from mycelery import app
class SASD():
def __init__(self, keyword):
self.keyword = keyword
self.sasd_data = pickle.load(file(home+'/data/text-analysis/vichakshana/SASD/'+keyword+'.pickle'))
self.normalize_sasd_data()
self.cocitation_g = nx.read_graphml(home+'/data/text-analysis/vichakshana/page_graphs/'
+ keyword+'_entitylinks_core_cocitation.graphml', node_type=unicode)
self.fileindex_reverse = pickle.load(file(home+'/data/text-analysis/fileindex/'+keyword+'_fileindex.pickle'))
self.fileindex = {v: k for k, v in self.fileindex_reverse.items()}
self.fileindex_sorted = sorted(self.fileindex.items(), key=lambda x: x[1])
def normalize_sasd_data(self):
max_score = max([i['relevance_score'] for i in self.sasd_data])
for i in self.sasd_data:
i['relevance_score'] /= max_score
def compute_shortest_paths(self):
import graph_tool.all as gt
graph_file = home+'/data/text-analysis/vichakshana/page_graphs/' + self.keyword + '_entitylinks_core.graphml'
g = gt.load_graph(graph_file, fmt='xml')
distance_data = gt.shortest_distance(g)
vertices = list(g.vertices())
rows = []
cols = []
distances = []
for src_v in vertices:
for i in xrange(len(vertices)):
if distance_data[src_v][i] > 100:
continue
rows.append(self.fileindex[unicode(g.vertex_properties['_graphml_vertex_id'][src_v],
encoding='utf-8')])
cols.append(self.fileindex[unicode(g.vertex_properties['_graphml_vertex_id'][vertices[i]],
encoding='utf-8')])
distances.append(distance_data[src_v][i])
n = max(self.fileindex.values())+1 # since the indexing starts with 0
shortest_paths = sparse.coo_matrix((distances, (rows, cols)), shape=(n, n))
shortest_paths = sparse.csr_matrix(shortest_paths).todense()
if not exists(home+'/data/text-analysis/vichakshana/page_graphs/'+self.keyword+'_shortest_paths/'):
mkdir(home+'/data/text-analysis/vichakshana/page_graphs/'+self.keyword+'_shortest_paths/')
for i in xrange(shortest_paths.shape[0]):
pickle.dump(shortest_paths[i], file(home+'/data/text-analysis/vichakshana/page_graphs/'
+ self.keyword+'_shortest_paths/'+str(i)+'.pickle', 'w'))
def get_sasd(self, page_a, page_b, shortest_paths):
"""
There are three parts in this distance:
1. Corelevance based
2. Shortest paths: extended version of direct links
3. Indirect links: cocitation and bibcouling graphs' direct edges
Each distance varies between 0-1.
The returned distance is a weighted average of all these distances.
"""
if page_a != page_b:
#1. Corelevance
co_relevance = []
for group in self.sasd_data:
if page_a in group['pages'] and page_b in group['pages']:
co_relevance.append(group['relevance_score'])
if len(co_relevance) > 0:
similarity_corel = len(co_relevance)+np.average(co_relevance)
else:
similarity_corel = 0
distance_corel = 1/(1+similarity_corel)
#2. Shortest paths
shortest_path_length = shortest_paths[0, self.fileindex[page_b]]-1
if shortest_path_length == -1:
shortest_path_length = np.inf
similarity_shortest_path = 1/(1+shortest_path_length*shortest_path_length)
distance_shortest_path = 1-similarity_shortest_path
#3. Indirect links
try:
cocitation_weight = self.cocitation_g[page_a][page_b]['weight']
except KeyError:
cocitation_weight = 0
distance_cocitation = 1-cocitation_weight
#Finally, the weighted version
distance = 0.25*distance_shortest_path + 0.25*distance_cocitation + 0.50*distance_corel
#print co_relevance, distance_corel, distance_shortest_path, distance_cocitation
else:
distance = 1
return distance
#CELERY
@app.task
def get_sasd_celery(self, page_a):
pages = self.fileindex.keys()
distances = []
shortest_paths = pickle.load(file(home+'/data/text-analysis/vichakshana/page_graphs/'
+ self.keyword+'_shortest_paths/'+str(self.fileindex[page_a])+'.pickle'))
for page_b in pages:
distances.append((self.fileindex[page_b], self.get_sasd(page_a, page_b, shortest_paths)))
if not exists(home+'/data/text-analysis/vichakshana/SASD/'+self.keyword+'/'):
mkdir(home+'/data/text-analysis/vichakshana/SASD/'+self.keyword)
pickle.dump(distances, file(home+'/data/text-analysis/vichakshana/SASD/' +
self.keyword+'/'+str(self.fileindex[page_a])+'.pickle', 'w'))
def get_sasd_cluster(self, index_a):
if exists(home+'/data/text-analysis/vichakshana/SASD/' + self.keyword+'/'+str(index_a)+'.pickle'):
return
page_a = self.fileindex_reverse[index_a]
pages = self.fileindex.keys()
distances = []
shortest_paths = pickle.load(file(home+'/data/text-analysis/vichakshana/page_graphs/'
+ self.keyword+'_shortest_paths/'+str(index_a)+'.pickle'))
for page_b in pages:
distances.append((self.fileindex[page_b], self.get_sasd(page_a, page_b, shortest_paths)))
if not exists(home+'/data/text-analysis/vichakshana/SASD/'+self.keyword+'/'):
mkdir(home+'/data/text-analysis/vichakshana/SASD/'+self.keyword)
pickle.dump(distances, file(home+'/data/text-analysis/vichakshana/SASD/' +
self.keyword+'/'+str(self.fileindex[page_a])+'.pickle', 'w'))
def compute_sasds(self, n_jobs=0):
submitted_jobs = 0
for page_a, index_a in self.fileindex_sorted:
#CELERY
if exists(home+'/data/text-analysis/vichakshana/SASD/' + self.keyword+'/'+str(index_a)+'.pickle'):
continue
self.get_sasd_celery.apply_async((self, page_a,))
if n_jobs > 0:
submitted_jobs += 1
if submitted_jobs >= n_jobs:
print index_a
break
def get_related(self, page, n=10, distance_threshold=0.5):
distance_data = pickle.load(file(home+'/data/text-analysis/vichakshana/SASD/' +
self.keyword+'/'+str(self.fileindex[page])+'.p | ickle'))
distance_data = sorted(dis | tance_data, key=lambda x: x[1])
related_entities = []
for i in distance_data:
if i[1] >= distance_threshold:
break
if len(related_entities) < n:
related_entities.append((self.fileindex_reverse[i[0]], i[1]))
return related_entities
|
timj/scons | test/Builder/multi/different-actions.py | Python | mit | 2,193 | 0.000456 | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify that environments with actions that have different signatures
generate an error.
"""
import TestSCons
test = TestSCons.TestSCons(match=TestSCons.match_re)
test.write('SConstruct', """\
def build(env, target, source):
file = open(str(target[0]), 'wb')
for s in source:
file.write(open(str(s), 'rb').read())
B = Builder(action=Action(build, varlist=['XXX']), multi=1)
env = Environment(BUILDERS = { 'B' : B }, XXX = 'foo')
env2 = env. | Clone(XXX = 'var')
env.B(target = 'file6.out', source = 'file6a.in')
env2.B(target = 'file6.out', source = 'fi | le6b.in')
""")
test.write('file6a.in', 'file6a.in\n')
test.write('file6b.in', 'file6b.in\n')
expect = TestSCons.re_escape("""
scons: *** Two environments with different actions were specified for the same target: file6.out
""") + TestSCons.file_expr
test.pass_test()
test.run(arguments='file6.out', status=2, stderr=expect)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
quantifiedcode-bot/invenio-base | tests/test_apps/flash_msg/views.py | Python | gpl-2.0 | 1,351 | 0 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distribute | d in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Test Application f | or displaying flash messages on base page."""
from flask import Blueprint, flash, render_template
from invenio_base.decorators import wash_arguments
blueprint = Blueprint('flash_msg', __name__, url_prefix='/flash_msg',
template_folder='templates', static_folder='static')
@blueprint.route('/')
@wash_arguments({"context": (unicode, ""), "message": (unicode, None)})
def index(context, message=None):
"""Print the given message string as a flash message."""
flash(message, context)
return render_template("page.html")
|
1egoman/quail-flask | src/people.py | Python | mit | 1,454 | 0.020633 | from datetime import datetime
from json import loads, dumps
import os
DEFAULTCONFIG = """{"people": []}"""
PERSONTEMPLATE = {"name": None, "tags": None, "birthday": None, "pic": None, "frequency": 0}
class PeopleContainer(object):
def __init__(self, app):
self.data = []
self.cfgpath = os.path.join(app.get_root(), "config", "people.json")
# see if config exists
if not os.path.exists(self.cfgpath):
with open( self.cfgpath, 'w' ) as f:
f.write(DEFAULTCONFIG)
# read config
with open( self.cfgpath, 'r' ) as f:
r = f.read()
if len(r):
self.data = loads( r )["people"]
else:
self.data = []
# convert to datetime objects
for p in self.data:
for k,v in p.items():
# try and convert to dateime object, if possible
try:
p[k] = datetime.strptime(v, '%c')
except TypeError: pass
except ValueError: pass
def __iter__(self): return iter(self.data)
def sync(self):
""" Sync the file and the program's list """
with open( self.cfgpath, 'w' ) as f:
f.write( dumps({"people": self.data}, indent=2) )
def add_person(self, **person):
""" Add a new person to the list of known people """
person = PERSONTEMPLATE.copy().update(person)
self.data.append(person)
cl | ass Person(dict):
def __init__(self, *args, **kwargs):
super(self, Person).__init | __(*args, **kwargs)
self.__dict__ = self |
linuxmidhun/0install | tests/basetest.py | Python | lgpl-2.1 | 8,927 | 0.031814 | #!/usr/bin/env python
import locale
locale.setlocale(locale.LC_ALL, 'C')
import sys, tempfile, os, shutil, imp, time
import unittest, subprocess
import logging
import warnings
from xml.dom import minidom
if sys.version_info[0] > 2:
from io import StringIO, BytesIO
else:
from StringIO import StringIO
BytesIO = StringIO
warnings.filterwarnings("ignore", message = 'The CObject type')
# Catch silly mistakes...
os.environ['HOME'] = '/home/idontexist'
os.environ['LANGUAGE'] = 'C'
os.environ['LANG'] = 'C'
sys.path.insert(0, '..')
from zeroinstall.injector import qdom, background, namespaces
from zeroinstall.injector import iface_cache, download, distro, model, handler, reader, trust
from zeroinstall.zerostore import NotStored, Store, Stores; Store._add_with_helper = lambda *unused, **kwargs: False
from zeroinstall import support, cmd
from zeroinstall.support import basedir, tasks
def skipIf(condition, reason):
def wrapped(underlying):
if condition:
if hasattr(underlying, 'func_name'):
print("Skipped %s: %s" % (underlying.func_name, reason)) # Python 2
else:
print("Skipped %s: %s" % (underlying.__name__, reason)) # Python 3
def run(self): pass
return run
else:
return underlying
return wrapped
class BackgroundException(Exception):
pass
def throw_background():
raise BackgroundException("Tried to spawn background process")
dpkgdir = os.path.join(os.path.dirname(__file__), 'dpkg')
empty_feed = qdom.parse(BytesIO(b"""<interface xmlns='http://zero-install.sourceforge.net/2004/injector/interface'>
<name>Empty</name>
<summary>just for testing</summary>
</interface>"""))
import my_dbus
sys.modules['dbus'] = my_dbus
sys.modules['dbus.glib'] = my_dbus
my_dbus.types = my_dbus
sys.modules['dbus.types'] = my_dbus
sys.modules['dbus.mainloop'] = my_dbus
sys.modules['dbus.mainloop.glib'] = my_dbus
mydir = os.path.dirname(__file__)
ocaml_0install = os.path.join(mydir, '..', 'build', 'ocaml', '0install')
class ExecMan(Exception):
def __init__(self, args):
self.man_args = args
Exception.__init__(self, 'ExecMan')
# Catch us trying to run the GUI and return a dummy string instead
old_execvp = os.execvp
def test_execvp(prog, args):
if prog == sys.executable and args[1].endswith('/0launch-gui'):
prog = os.path.join(mydir, 'test-gui')
if prog == 'man':
raise ExecMan(args)
return old_execvp(prog, args)
os.execvp = test_execvp
test_locale = (None, None)
assert model.locale
class TestLocale:
LC_ALL = 'LC_ALL' # Note: LC_MESSAGES not present on Windows
def getlocale(self, x = None):
assert x is not TestLocale.LC_ALL
return test_locale
model.locale = TestLocale()
class DummyPackageKit:
available = False
def get_candidates(self, package, factory, prefix):
pass
class DummyHandler(handler.Handler):
__slots__ = ['ex', 'tb', 'allow_downloads']
def __init__(self):
handler.Handler.__init__(self)
self.ex = None
self.allow_downloads = False
def wait_for_blocker(self, blocker):
self.ex = None
handler.Handler.wait_for_blocker(self, blocker)
if self.ex:
support.raise_with_traceback(self.ex, self.tb)
def report_error(self, ex, tb = None):
assert self.ex is None, self.ex
self.ex = ex
self.tb = tb
#import traceback
#traceback.print_exc()
class DummyKeyInfo:
def __init__(self, fpr):
self.fpr = fpr
self.info = [minidom.parseString('<item vote="bad"/>')]
self.blocker = None
class TestFetcher:
def __init__(self, config):
self.allowed_downloads = set()
self.allowed_feed_downloads = {}
self.config = config
def allow_download(self, digest):
assert isinstance(self.config.stores, TestStores)
self.allowed_downloads.add(digest)
def allow_feed_download(self, url, feed_xml):
assert isinstance(feed_xml, support.basestring), feed_xml
self.allowed_feed_downloads[url] = feed_xml
def download_impls(self, impls, stores):
@tasks.async
def fake_download():
yield
for impl in impls:
assert impl.id in self.allowed_downloads, impl
self.allowed_downloads.remove(impl.id)
self.config.stores.add_fake(impl.id)
return fake_download()
def download_and_import_feed(self, feed_url, iface_cache, force = False):
@tasks.async
def fake_download():
yield
feed_xml = self.allowed_feed_downloads.get(feed_url, None)
assert feed_xml, feed_url
if not isinstance(feed_xml, bytes):
feed_xml = feed_xml.encode('utf-8')
self.config.iface_cache.update_feed_from_network(feed_url, feed_xml, int(time.time()))
del self.allowed_feed_downloads[feed_url]
return fake_download()
def fetch_key_info(self, fingerprint):
return DummyKeyInfo(fingerprint)
class TestStores:
def __init__(self):
self.fake_impls = set()
def add_fake(self, digest):
self.fake_impls.add(digest)
def lookup_maybe(self, digests):
for d in digests:
if d in self.fake_impls:
return '/fake_store/' + d
return None
def lookup_any(self, digests):
path = self.lookup_maybe(digests)
if path:
return path
raise NotStored()
class TestConfig:
freshness = 0
help_with_testing = False
network_use = model.network_full
key_info_server = None
auto_approve_keys = False
mirror = None
def __init__(self):
self.iface_cache = iface_cache.IfaceCache()
self.handler = DummyHandler()
self.stores = Stores()
self.fetcher = TestFetcher(self)
self.trust_db = trust.trust_db
self.trust_mgr = trust.TrustMgr(self)
class BaseTest(unittest.TestCase):
def setUp(self):
background._detach = throw_background
warnings.resetwarnings()
self.config_home = tempfile.mktemp()
self.cache_home = tempfile.mktemp()
self.cache_system = tempfile.mktemp()
self.data_home = tempfile.mktemp()
self.gnupg_home = tempfile.mktemp()
os.environ['GNUPGHOME'] = self.gnupg_home
os.environ['XDG_CONFIG_HOME'] = self.config_home
os.environ['XDG_CONFIG_DIRS'] = ''
os.environ['XDG_CACHE_HOME'] = self.cache_home
os.environ['XDG_CACHE_DIRS'] = self.cache_system
os.environ['XDG_DATA_HOME'] = self.data_home
os.environ['XDG_DATA_DIRS'] = ''
if 'ZEROINSTALL_PORTABLE_BASE' in os.environ:
del os.environ['ZEROINSTALL_PORTABLE_BASE']
imp.reload(basedir)
assert basedir.xdg_config_home == self.config_home
os.mkdir(self.config_home, 0o700)
os.mkdir(self.cache_home, 0o700)
os.mkdir(self.cache_system, 0o500)
os.mkdir(self.gnupg_home, 0o700)
if 'DISPLAY' in os.environ:
del os.environ['DISPLAY']
self.config = TestConfig()
iface_cache.iface_cache = self.config.iface_cache
logging.getLogger().setLevel(logging.WARN)
download._downloads = {}
self.old_path = os.environ['PATH']
os.environ['PATH'] = self.config_home + ':' + dpkgdir + ':' + self.old_path
distro._host_distribution = distro.DebianDistribution(dpkgdir + '/status')
distro._host_distribution._packagekit = DummyPackageKit()
my_dbus.system_services = {}
trust.trust_db.watchers = []
trust.trust_db.keys = None
trust.trust_db._dry_run = False
def tearDown(self):
if self.config.handler.ex:
support.raise_with_traceback(self.config.handler.ex, self.config.handler.tb)
shutil.rmtree(self.config_home)
support.ro_rmtree(self.cache_home)
shutil.rmtree(self.cache_system)
shutil.rmtree(self.gnupg_home)
os.environ['PATH'] = self.old_path
def run_ocaml(self, args, stdin = None, stderr = subprocess.PIPE, binary = False):
child = subprocess.Popen([ocaml_0install] + args,
stdin = subprocess.PIPE if stdin is not None else None,
stdout = subprocess.PIPE, stderr = stderr, universal_newlines = not binary)
out, err = child.communicate(stdin)
status = child.wait()
if status:
msg = "Exit status: %d\n" % status
if binary:
msg = msg.encode('utf-8')
err += msg
return out, err
def import_feed(self, url, contents):
"""contents can be a path or an Element."""
iface_cache = self.config.iface_cache
iface_cache.get_interface(url)
if isinstance(contents, qdom.Element):
feed = model.ZeroInstallFeed(contents)
else:
feed = reader.load_feed(contents)
iface_cache._feeds[url | ] = feed
xml = qdom.to_UTF8(feed.feed_element)
upstream_dir = basedir.save_cache_path(namespaces.config_site, 'interfaces')
cached = os.path.join(upstrea | m_dir, model.esca |
jorvis/biocode | taxonomy/create_taxonomic_profile_from_blast.py | Python | mit | 11,261 | 0.014031 | #!/usr/bin/env python3.2
import argparse
import os
import re
import sqlite3
from collections import OrderedDict
from biocode.utils import read_list_file
def main():
"""This is the second script I've written in Python. I'm sure it shows."""
parser = argparse.ArgumentParser( description='Reads a BLAST m8 file and taxonomy DB to produce a taxonomic profile at any user-specified ranking level.')
## input formats: btab, blast_m8
parser.add_argument('-f', '--input_format', type=str, required=True, help='Blast format: current options are btab or blast_m8' )
## The SQLite3 | file that will be read for taxonomy information
parser.add_argument( | '-t', '--taxonomy_db', type=str, required=True, help='Path to a taxonomy.db file created by "create_taxonomy_db.py"' )
## BLAST list file
parser.add_argument('-b', '--blast_list_file', type=str, required=True, help='List of BLAST files (m8 format)' )
## output file to be written
parser.add_argument('-o', '--output_file', type=str, required=True, help='Path where the result file should written' )
## E-value cutoff to use
parser.add_argument('-e', '--eval_cutoff', type=float, required=False, help='Optional E-value cutoff to use.' )
## Top N hits per query to score. Only counts those where the taxon could be looked up in the indexes
parser.add_argument('-n', '--top_n', type=int, required=False, default=1, help=' Top N hits per query to score. Only counts unique taxon matches which could be looked up in the indexes' )
## rank on which matches will be grouped and reported. values like: species, genus, order, family, etc.
parser.add_argument('-r', '--rank', type=str, required=True, help='Taxonomy rank on which to group all matches, such as: species, genus, order, family, etc.' )
args = parser.parse_args()
conn = sqlite3.connect( args.taxonomy_db )
c = conn.cursor()
blast_files = read_list_file( args.blast_list_file )
taxon_counts = {}
processed_file_count = 0
stats = {}
stats['gi_lookup_success_count'] = 0
stats['gi_lookup_fail_count'] = 0
stats['taxon_lookup_success_count'] = 0
stats['taxon_lookup_failure_count'] = 0
for file in blast_files:
print("Processing file: ", file)
if args.input_format == 'blast_m8' or args.input_format == 'btab':
parse_blast_file( file, c, taxon_counts, args.eval_cutoff, args.input_format, stats, args.top_n )
else:
raise Exception("Unsupported input format passed: {0}".format(args.input_format) )
processed_file_count += 1
#if processed_file_count == 50:
#break
## process the taxon counts, conforming them to the user-specified rank
result_table = group_taxa_by_rank( args.rank, taxon_counts, c )
node_names = get_selected_node_names( result_table, c )
c.close()
fout = open(args.output_file, mode='w')
## write the results to the output file in order of most-found clade first
for tax_id in OrderedDict(sorted(result_table.items(), reverse=True, key=lambda t: t[1])):
sci_name = ''
if tax_id in node_names:
sci_name = node_names[tax_id]
fout.write( "{0}\t{1}\t{2}\n".format(tax_id, int(result_table[tax_id]), sci_name ) )
fout.close()
print("INFO: successful GI lookups: {0}/{1}".format(stats['gi_lookup_success_count'], \
(stats['gi_lookup_fail_count'] + stats['gi_lookup_success_count'])) )
print("INFO: successful taxon lookups: {0}/{1}".format( stats['taxon_lookup_success_count'], \
(stats['taxon_lookup_success_count'] + stats['taxon_lookup_failure_count']) ) )
def get_selected_node_names( res_table, cursor):
node_names = {}
for taxon_id in res_table:
cursor.execute('''SELECT scientific_name FROM orgs WHERE tax_id=?''', (taxon_id,) )
row = cursor.fetchone()
if row:
node_names[taxon_id] = row[0]
else:
print("WARN: failed to get scientific name for tax_id:", taxon_id)
return node_names
def get_ranked_taxon( tax_id, c, rank, rec_depth ):
c.execute("""SELECT parent_tax_id, rank FROM nodes WHERE tax_id = ?""", (tax_id,) )
row = c.fetchone()
if rec_depth > 20:
print("WARN: deep recursion detected for tax ID:", tax_id)
return None
if row:
if row[1] == rank:
return tax_id
else:
return get_ranked_taxon( row[0], c, rank, rec_depth + 1 )
else:
print("WARN: unable to find ranked taxon for tax_id:", tax_id)
return None
def group_taxa_by_rank(rank, counts, cursor):
"""Given a taxonomic rank, the input count table is regrouped by walking
up the taxonomy tree until all nodes are at the level of the passed
rank
"""
ranked_counts = dict()
unranked_taxon_count = 0
for taxon_id in counts:
ranked_taxon_id = get_ranked_taxon(taxon_id, cursor, rank, 0)
if ranked_taxon_id:
if ranked_taxon_id in ranked_counts:
#print("DEBUG: increasing count for taxon: {0} by: {1}".format(taxon_id, counts[taxon_id]['n']) )
ranked_counts[ranked_taxon_id] += counts[taxon_id]['n']
else:
#print("DEBUG: initializing a count for ranked_taxon_id {0} from taxon_id {1}".format(ranked_taxon_id, taxon_id) )
ranked_counts[ranked_taxon_id] = counts[taxon_id]['n']
else:
unranked_taxon_count += 1
return ranked_counts
def parse_blast_file( file, cursor, tax, eval_cutoff, format, stats, hits_per_query ):
""" For each query sequence find the top match above the E-val cutoff (if any)
which has an NCBI taxonomy assignment.
"""
if ( not os.path.isfile(file) ):
raise Exception("Couldn't find file: " + file)
## presets are for ncbi_m8, for which lines should have 12 columns. they are
# 1: Query - The query sequence id
# 2: Subject - The matching subject sequence id
# 3: Percent identity
# 4: alignment length
# 5: mismatches
# 6: gap openings
# 7: q.start
# 8: q.end
# 9: s.start
# 10: s.end
# 11: e-value
# 12: bit score
ID_COLUMN_NUM = 0
SUBJECT_LABEL_COLUMN_NUM = 1
EVAL_COLUMN_NUM = 10
ALIGN_LEN_COLUMN_NUM = 3
BIT_SCORE_COLUMN_NUM = 11
if format == 'btab':
ID_COLUMN_NUM = 0
SUBJECT_LABEL_COLUMN_NUM = 5
EVAL_COLUMN_NUM = 19
current_id = ""
current_id_classified = False
current_id_match_count = 0
current_match_ids = dict()
for line in open(file, "r"):
cols = line.split("\t")
if len(cols) >= 10:
this_id = cols[ID_COLUMN_NUM]
## this controls that we only look at the top hit for query
if this_id != current_id:
current_id_classified = False
current_id_match_ids = dict()
current_id_match_count = 0
current_id = this_id
if current_id_match_count < hits_per_query:
if eval_cutoff is None or eval_cutoff >= float(cols[EVAL_COLUMN_NUM]):
#print("DEBUG: attempting to parse a GI for header: ({0})".format(cols[SUBJECT_LABEL_COLUMN_NUM]) )
gi = parse_gi(cols[SUBJECT_LABEL_COLUMN_NUM], cursor)
if gi:
#print("DEBUG: Got a GI ({0}) for hit with id this_id".format(gi))
stats['gi_lookup_success_count'] += 1
taxon_id = get_taxon_id_by_gi(gi, cursor)
if taxon_id:
stats['taxon_lookup_success_count'] += 1
if taxon_id not in current_match_ids:
#print("DEBUG: adding match to taxon_id: {0}".format(taxon_id) )
match_score = int(cols[BIT_SCORE_COLUMN_NUM])/int(cols[ALIGN_LEN_COLUMN_NUM])
|
samueladam/worldgame | src/worldgame/forms.py | Python | bsd-3-clause | 737 | 0 | # -*- coding: utf-8 -*-
from django.contrib.gis import admin
from django import forms
from .models import Country
# create a geoadmin instance
geoadmin = admin.GeoModelAdmin(Country, admin.site)
geoadmin.num_zoom = 4
geoadmin.modifiable = False
geoadmin.layerswitcher = False
geoadmin.mouse_position = False
geoadmin.scale_text = False
# get the Open Layers widget for t | he geom field
field = Country._meta.get_field('geom')
widget = geoadmin.get_map_widget(f | ield)
class CountryForm(forms.ModelForm):
"This form is a hack to display an OpenLayers map."
geom = forms.CharField(widget=widget)
class Meta:
model = Country
fields = ('name', 'geom',)
class Media:
js = (geoadmin.openlayers_url,)
|
minhphung171093/GreenERP_V9 | openerp/addons/l10n_no/__openerp__.py | Python | gpl-3.0 | 630 | 0.015898 | # -*- encoding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
"name" : "Norway - Accounting",
"version" : "1.1",
" | author" : "Rolv Råen",
"category" : "Localization/Account Charts",
"description": """This is the module to manage the accounting chart for Norway in Odoo.
Updated for Odoo 9 by Bringsvor Consulting AS <www.bringsvor.com>
""",
"depends" : ["account", "base_iban", "base_vat"],
"demo_xml" : [],
"data" : ["account_chart.xml",
'account_tax.xml','account_chart_template.yml'],
"active": False,
"in | stallable": True
}
|
Jorge-Rodriguez/ansible | lib/ansible/modules/cloud/vmware/vmware_host_vmnic_facts.py | Python | gpl-3.0 | 14,150 | 0.003322 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
# Copyright: (c) 2018, Christian Kotte <christian.kotte@gmx.de>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_host_vmnic_facts
short_description: Gathers facts about vmnics available on the given ESXi host
description:
- This module can be used to gather facts about vmnics available on the given ESXi host.
- If C(cluster_name) is provided, then vmnic facts about all hosts from given cluster will be returned.
- If C(esxi_hostname) is provided, then vmnic facts about given host system will be returned.
- Additional details about vswitch and dvswitch with respective vmnic is also provided which is added in 2.7 version.
version_added: '2.5'
author:
- Abhijeet Kasurde (@Akasurde)
- Christian Kotte (@ckotte)
notes:
- Tested on vSphere 6.5
requirements:
- python >= 2.6
- PyVmomi
options:
capabilities:
description:
- Gather facts about general capabilities (Auto negotioation, Wake On LAN, and Network I/O Control).
type: bool
default: false
version_added: 2.8
directpath_io:
description:
- Gather facts about DirectPath I/O capabilites and configuration.
type: bool
default: false
version_added: 2.8
sriov:
description:
- Gather facts about SR-IOV capabilites and configuration.
type: bool
default: false
version_added: 2.8
esxi_hostname:
description:
- Name of the host system to work with.
- Vmnic facts about this ESXi server will be returned.
- This parameter is required if C(cluster_name) is not specified.
type: str
cluster_name:
description:
- Name of the cluster from which all host systems will be used.
- Vmnic facts about each ESXi server will be returned for the given cluster.
- This parameter is required if C(esxi_hostname) is not specified.
type: str
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Gather facts about vmnics of all ESXi Host in the given Cluster
vmware_host_vmnic_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
cluster_name: '{{ cluster_name }}'
delegate_to: localhost
register: cluster_host_vmnics
- name: Gather facts about vmnics of an ESXi Host
vmware_host_vmnic_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
esxi_hostname: '{{ esxi_hostname }}'
delegate_to: localhost
register: host_vmnics
'''
RETURN = r'''
hosts_vmnics_facts:
description:
- dict with hostname as key and dict with vmnics facts as value.
- for C(num_vmnics), only NICs starting with vmnic are counted. NICs like vusb* are not counted.
- details about vswitch and dvswitch was added in version 2.7.
- details about vmnics was added in version 2.8.
returned: hosts_vmnics_facts
type: dict
sample:
{
"10.76.33.204": {
"all": [
"vmnic0",
"vmnic1"
],
"available" | : [],
"dvswitch": {
"dvs_0002": [
"vmnic1"
]
},
"num_vmnics": 2,
"used": [
"vmnic1",
"vmnic0"
],
"vmnic_details": [
{
"actual_duplex": "Full Duplex",
"actua | l_speed": 10000,
"adapter": "Intel(R) 82599 10 Gigabit Dual Port Network Connection",
"configured_duplex": "Auto negotiate",
"configured_speed": "Auto negotiate",
"device": "vmnic0",
"driver": "ixgbe",
"location": "0000:01:00.0",
"mac": "aa:bb:cc:dd:ee:ff",
"status": "Connected",
},
{
"actual_duplex": "Full Duplex",
"actual_speed": 10000,
"adapter": "Intel(R) 82599 10 Gigabit Dual Port Network Connection",
"configured_duplex": "Auto negotiate",
"configured_speed": "Auto negotiate",
"device": "vmnic1",
"driver": "ixgbe",
"location": "0000:01:00.1",
"mac": "ab:ba:cc:dd:ee:ff",
"status": "Connected",
},
],
"vswitch": {
"vSwitch0": [
"vmnic0"
]
}
}
}
'''
try:
from pyVmomi import vim
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import vmware_argument_spec, PyVmomi, get_all_objs
class HostVmnicMgr(PyVmomi):
"""Class to manage vmnic facts"""
def __init__(self, module):
super(HostVmnicMgr, self).__init__(module)
self.capabilities = self.params.get('capabilities')
self.directpath_io = self.params.get('directpath_io')
self.sriov = self.params.get('sriov')
cluster_name = self.params.get('cluster_name', None)
esxi_host_name = self.params.get('esxi_hostname', None)
self.hosts = self.get_all_host_objs(cluster_name=cluster_name, esxi_host_name=esxi_host_name)
if not self.hosts:
self.module.fail_json(msg="Failed to find host system.")
def find_dvs_by_uuid(self, uuid=None):
"""Find DVS by it's UUID"""
dvs_obj = None
if uuid is None:
return dvs_obj
dvswitches = get_all_objs(self.content, [vim.DistributedVirtualSwitch])
for dvs in dvswitches:
if dvs.uuid == uuid:
dvs_obj = dvs
break
return dvs_obj
def gather_host_vmnic_facts(self):
"""Gather vmnic facts"""
hosts_vmnic_facts = {}
for host in self.hosts:
host_vmnic_facts = dict(all=[], available=[], used=[], vswitch=dict(), dvswitch=dict())
host_nw_system = host.configManager.networkSystem
if host_nw_system:
nw_config = host_nw_system.networkConfig
host_vmnic_facts['all'] = [pnic.device for pnic in nw_config.pnic]
host_vmnic_facts['num_vmnics'] = (
len(filter(lambda s: s.startswith('vmnic'), [pnic.device for pnic in nw_config.pnic]))
)
host_vmnic_facts['vmnic_details'] = []
for pnic in host.config.network.pnic:
pnic_facts = dict()
if pnic.device.startswith('vmnic'):
if pnic.pci:
pnic_facts['location'] = pnic.pci
for pci_device in host.hardware.pciDevice:
if pci_device.id == pnic.pci:
pnic_facts['adapter'] = pci_device.vendorName + ' ' + pci_device.deviceName
break
else:
pnic_facts['location'] = 'PCI'
pnic_facts['device'] = pnic.device
pnic_facts['driver'] = pnic.driver
if pnic.linkSpeed:
pnic_facts['status'] = 'Connected'
pnic_facts['actual_speed'] = pnic.linkSpeed.speedMb
pnic_facts['actual_duplex'] = 'Full Duplex' if pnic.linkSpeed.duplex else 'Half Duplex'
else:
pnic_facts['status'] = 'D |
PandaWei/tp-libvirt | libvirt/tests/src/timer_management.py | Python | gpl-2.0 | 19,341 | 0.00031 | """
Test module for timer management.
"""
import os
import logging
import time
from autotest.client import utils
from autotest.client.shared import error
from virttest.libvirt_xml import vm_xml
from virttest import utils_test
from virttest import virsh
from virttest import data_dir
from virttest import virt_vm
CLOCK_SOURCE_PATH = '/sys/devices/system/clocksource/clocksource0/'
def set_clock_xml(vm, params):
"""
Config VM clock XML.
:param vm: VM instance
:param params: Test parameters
"""
timer_elems = []
if "yes" == params.get("specific_timer", "no"):
timers = params.get("timer_name", "").split(',')
timer_present = params.get("timer_present", "no")
if len(timers):
new_present = timer_present
for timer in timers:
if timer_present == 'mix':
if new_present == "yes":
new_present = "no"
else:
new_present = "yes"
timer_attrs = {'name': timer, 'present': new_present}
timer_elems.append(timer_attrs)
else:
raise error.TestError("No timer provided")
offset = params.get("clock_offset", "utc")
adjustment = params.get("clock_adjustment")
timezone = params.get("clock_timezone")
vmclockxml = vm_xml.VMClockXML()
vmclockxml.from_dumpxml(vm.name)
vmclockxml.offset = offset
del vmclockxml.adjustment
del vmclockxml.timezone
if adjustment is not None:
vmclockxml.adjustment = adjustment
if timezone is not None:
vmclockxml.timezone = timezone
# Clear timers for re-creating
vmclockxml.timers = []
newtimers = []
for element in timer_elems:
newtimer = vm_xml.VMClockXML.TimerXML()
newtimer.update(element)
newtimers.append(newtimer)
vmclockxml.timers = newtimers
logging.debug("New vm XML:\n%s", vmclockxml)
vmclockxml.sync()
# Return timer elements for test verify
return timer_elems
def vm_clock_source(vm, target, value=''):
"""
Get/Set available/current clocksource in vm
:param vm: VM instance
:param target: Available or current clocksource
:param value: target clocksource value
:return: Return clocksource if value is empty
"""
if target == 'available':
clock_file = 'available_clocksource'
elif target == 'current':
clock_file = 'current_clocksource'
else:
error.TestError("Clock source target must be 'available' or 'current'")
session = vm.wait_for_login()
session.cmd("cd %s" % CLOCK_SOURCE_PATH)
set_clock = False
if value:
set_clock = True
cmd = "echo %s > %s" % (value, clock_file)
else:
cmd = "cat %s" % clock_file
cmd_s, cmd_o = session.cmd_status_output(cmd)
session.close()
result = ''
if cmd_s:
logging.error("Run command %s in VM fail: %s", cmd, cmd_o)
else:
result = cmd_o.strip()
if set_clock:
result = cmd_s == 0
return result
def get_vm_time(vm, time_type=None, windows=False):
"""
Return epoch time. Windows will return timezone only.
:param time_type: UTC or timezone time
:param windows: If the vm is a windows guest
:return: Epoch time or timezone
"""
if time_type == "utc":
cmd = "date -u +%s"
elif windows is True:
time_type == "tz"
cmd = (r"echo %date:~0,4%/%date:~5,2%/%date:~8,2%/"
"%time:~0,2%/%time:~3,2%/%time:~6,2%")
else:
cmd = "date +%Y/%m/%d/%H/%M/%S"
session = vm.wait_for_login()
ts, timestr = session.cmd_status_output(cmd)
session.close()
if ts:
logging.error("Get time in vm failed: %s", timestr)
return -1
# To avoid some unexpected space, strip it manually
if time_type == "utc":
return int(timestr)
else:
# Strip potential space in timestr(for windows)
elems = timestr.split('/')
timestr = "%s/%s/%s/%s/%s/%s" % (elems[0].strip(), elems[1].strip(),
elems[2].strip(), elems[3].strip(),
elems[4].strip(), elems[5].strip())
return int(time.mktime(time.strptime(timestr.strip(),
'%Y/%m/%d/%H/%M/%S')))
def set_host_timezone(timezone="America/New_York"):
"""
Set host timezone
:param timezone: New timezone
"""
timezone_file = "/usr/share/zoneinfo/%s" % timezone
if utils.run("ls %s" % timezone_file, ignore_status=True).exit_status:
raise error.TestError("Invalid timezone file: %s", timezone_file)
else:
utils.run("unlink /etc/localtime", ignore_status=True)
result = utils.run("ln -s %s /etc/localtime" % timezone_file,
ignore_status=True)
if result.exit_status:
raise error.TestError("Set timezone failed: %s", result)
else:
logging.debug("Set host timezone to %s", timezone)
def set_vm_timezone(vm, timezone="America/New_York", windows=False):
"""
Set vm timezone
:param vm: VM instance
:param timezone: Timezone name
:param windows_vm: If the vm is a windows guest
"""
cmd_s = 0
cmd_o = ''
if not windows:
timezone_file = "/usr/share/zoneinfo/%s" % timezone
session = vm.wait_for_login()
if session.cmd_status("ls %s" % timezone_file):
session.close()
raise error.TestError("Not correct timezone:%s", timezone_file)
else:
session.cmd("unlink /etc/localtime")
cmd_s, cmd_o = session.cmd_status_output("ln -s %s /etc/localtime"
% timezone_file)
session.close()
else:
timezone_codes = {"America/New_York": "Eastern Standard Time",
"Europe/London": "UTC",
"Asia/Shanghai": "China Standard Time",
"Asia/Tokyo": "Tokyo Standard Time"}
if timezone not in timezone_codes.keys():
raise error.TestError("Not supported timezone, please add it.")
cmd = "tzutil /s \"%s\"" % timezone_codes[timezone]
session = vm.wait_for_login()
cmd_s, cmd_o = session.cmd_status_output(cmd)
session.close()
if cmd_s:
raise error.TestError("Set vm timezone failed: %s", cmd_o)
else:
logging.debug("Set vm timezone to %s", timezone)
def convert_tz_to_vector(tz_name="Europe/London"):
"""
Convert string of city to a vector with utc time(hours).
:param tz_name: Timezone name
:return: Timezone code or None
"""
# TODO: inspect timezone automatically
zoneinfo = {'0': ["Europe/London"],
'8': ["Asia/HongKong", "Asia/Shanghai"],
'9': ["Asia/Tokyo"],
'-4': ["America/New_York"]}
for key in zoneinfo:
if tz_name in zoneinfo[key]:
return int(key)
logging.error("Not supported timezone:%s", tz_name)
return None
def manipulate_vm(vm, operation, params=None):
"""
Manipulate the VM.
:param vm: VM instance
:param operation: stress_in_vms, inject_nmi, dump, suspend_resume
or save_restore
:param params: Test parameters
"""
err_msg = ''
# Special operations for test
if operation == "stress":
logging.debug("Load stress in VM")
err_msg = utils_test.load_stress(operation, [vm], params)[0]
elif operation == "inject_nmi":
inject_times | = int(params.get("inject_times", 10))
logging.info("Trying to inject nmi %s times", inject_times)
while inject_times > 0:
try:
inject_times -= 1
virsh.inject_nmi(vm.name, debug=True, ignore_status=False)
except error.CmdError, detail:
err_msg = "Inject nmi failed: %s" % detail
elif operation == "dump":
dump_times = | int(params.get("dump_times", 10))
logging.info("Trying to dump vm %s times", dump_times)
while dump_times > 0:
|
certik/pyquante | PyQuante/MINDO3_Parameters.py | Python | bsd-3-clause | 4,890 | 0.038855 | """\
MINDO3.py: Dewar's MINDO/3 Semiempirical Method
This program is part of the PyQuante quantum chemistry program suite.
Copyright (c) 2004, Richard P. Muller. All Rights Reserved.
PyQuante version 1.2 and later is covered by the modified BSD
license. Please see the file LICENSE that is part of this
distribution.
"""
#MINDO/3 Parameters: Thru Ar
# in eV
Uss = [ None, -12.505, None,
None, None, -33.61, -51.79, -66.06,
-91.73, -129. | 86, None,
None, None, None, -39.82, -56.23, -73.39, -98.99, None]
Upp = [ None, None, None,
None, None, -25.11, -39.18, -56.40, -78.80, -105.93, None,
None, None, None, -29.15, -42.31, -57.25, -76.43, None]
gss = [ None, 12.848, None,
None, None, 10.59, 12.23, 13.59, 15.42, 16.92, None,
None, None, None, 9.82, 11.56, 12.88, 15.03, None]
gpp = [ None, None, None,
None, None, 8.86, 11.08, 12.98, 14.52, 16.71, None,
No | ne, None, None, 7.31, 8.64, 9.90, 11.30, None]
gsp = [ None, None, None,
None, None, 9.56, 11.47, 12.66, 14.48, 17.25, None,
None, None, None, 8.36, 10.08, 11.26, 13.16, None]
gppp = [ None, None, None,
None, None, 7.86, 9.84, 11.59, 12.98, 14.91, None,
None, None, None, 6.54, 7.68, 8.83, 9.97, None]
hsp = [ None, None, None,
None, None, 1.81, 2.43, 3.14, 3.94, 4.83, None,
None, None, None, 1.32, 1.92, 2.26, 2.42, None]
hppp = [ None, None, None,
None, None, 0.50, 0.62, 0.70, 0.77, 0.90, None,
None, None, None, 0.38, 0.48, 0.54, 0.67, None]
f03 = [ None, 12.848, 10.0, #averaged repulsion integral for use in gamma
10.0, 0.0, 8.958, 10.833, 12.377, 13.985, 16.250,
10.000, 10.000, 0.000, 0.000,7.57 , 9.00 ,10.20 , 11.73]
IPs = [ None, -13.605, None,
None, None, -15.160, -21.340, -27.510, -35.300, -43.700, -17.820,
None, None, None, None, -21.100, -23.840, -25.260, None]
IPp = [ None, None, None,
None, None, -8.520, -11.540, -14.340, -17.910, -20.890, -8.510,
None, None, None, None, -10.290, -12.410, -15.090, None]
# slater exponents
zetas = [ None, 1.30, None,
None, None, 1.211156, 1.739391, 2.704546, 3.640575, 3.111270, None,
None, None, None, 1.629173, 1.926108, 1.719480, 3.430887, None]
zetap = [ None, None, None,
None, None, 0.972826, 1.709645, 1.870839, 2.168448, 1.419860, None,
None, None, None, 1.381721, 1.590665, 1.403205, 1.627017, None]
# Bxy resonance coefficients
Bxy = {
(1,1) : 0.244770, (1,5) : 0.185347, (1,6) : 0.315011, (1,7) : 0.360776,
(1,8) : 0.417759, (1,9) : 0.195242, (1,14) : 0.289647, (1,15) : 0.320118,
(1,16) : 0.220654, (1,17) : 0.231653,
(5,5) : 0.151324, (5,6) : 0.250031, (5,7) : 0.310959, (5,8) : 0.349745,
(5,9) : 0.219591,
(6,6) : 0.419907, (6,7) : 0.410886, (6,8) : 0.464514, (6,9) : 0.247494,
(6,14) : 0.411377, (6,15) : 0.457816, (6,16) : 0.284620, (6,17) : 0.315480,
(7,7) : 0.377342, (7,8) : 0.458110, (7,9) : 0.205347,
(8,8) : 0.659407, (8,9) : 0.334044, (9,9) : 0.197464,
(14,14) : 0.291703, (15,15) : 0.311790, (16,16) : 0.202489,
(17,17) : 0.258969,
(7,15) : 0.457816, # Rick hacked this to be the same as 6,15
(8,15) : 0.457816, # Rick hacked this to be the same as 6,15
}
# axy Core repulsion function terms
axy = {
(1,1) : 1.489450, (1,5) : 2.090352, (1,6) : 1.475836, (1,7) : 0.589380,
(1,8) : 0.478901, (1,9) : 3.771362, (1,14) : 0.940789, (1,15) : 0.923170,
(1,16) : 1.700689, (1,17) : 2.089404,
(5,5) : 2.280544, (5,6) : 2.138291, (5,7) : 1.909763, (5,8) : 2.484827,
(5,9) : 2.862183,
(6,6) : 1.371208, (6,7) : 1.635259, (6,8) : 1.820975, (6,9) : 2.725913,
(6,14) : 1.101382, (6,15) : 1.029693, (6,16) : 1.761370, (6,17) : 1.676222,
(7,7) : 2.209618, (7,8) : 1.873859, (7,9) : 2.861667,
(8,8) : 1.537190, (8,9) : 2.266949, (9,9) : 3.864997,
(14,14) : 0.918432, (15,15) : 1.186652, (16,16) : 1.751617,
(17,17) : 1.792125,
(7,15) : 1.029693, # Rick hacked this to be the same as 6,15
(8,15) : 1.029693, # Rick hacked this to be the same as 6,15
}
# Atomic heat of formations: Mopac got from CRC
Hfat = [ None, 52.102, None,
None, None, 135.7, 170.89, 113.0, 59.559, 18.86, None,
None, None, None, 106.0, 79.8, 65.65, 28.95, None]
# Default isolated atomic energy values from Mopac:EISOL3
Eat = [None, -12.505, None,
None ,None,-61.70,-119.47,-187.51,-307.07,-475.00,None,
None,None,None,-90.98,-150.81,-229.15,-345.93,None]
nbfat = [ None, 1, None,
None, None, 4, 4, 4, 4, 4, None,
None, None, None, 4, 4, 4, 4, None]
CoreQ = [ None, 1, None,
None, None, 3, 4, 5, 6, 7, None,
None, None, None, 4, 5, 6, 7, None]
NQN = [ None, 1, 1, # principle quantum number N
2, 2, 2, 2, 2, 2, 2, 2,
3, 3, 3, 3, 3, 3, 3, 3]
|
Copenbacon/code-katas | katas/convert_array.py | Python | mit | 335 | 0 | """Convert a number (n) in a reverse list of the individual numbers."""
def digitize(n):
"""Convert a number into a reverse list of each individual number."""
str_list = list(str(n))
str_list.reverse()
num_list = []
for idx in range( | len( | str_list)):
num_list.append(int(str_list[idx]))
return num_list
|
admetricks/printer-webservice | tests.py | Python | mit | 1,374 | 0.000728 | """
html-pdf-webservice
Copyright 2014 Nathan Jones
See LICENSE for more details
"""
from unittest.case import TestCase
from werkzeug.test import Client
from werkzeug.wrappers import BaseResponse
from app import application
class AppTest(TestCase):
def setUp(self):
self.client = Client(application, BaseResponse)
def test_post_html_file_should_produce_pdf_response(self):
response = self.client.post('/', data={'html': open('sample.html')})
self.assertEquals(200, response.status_code)
self.assertEquals('application/pdf', response.headers['Content-Type'])
def test_post_html_file_as_form_param_should_produce_pdf_response(self):
| response = self.client.post('/', data={'html': '<html><body><p>Hello</p></body></html>'})
self.assertEquals(200, response.status_code)
self.assertEquals('application/pdf', response.headers['Content-Type'])
def test_get_request_should_produce_method_not_allowed_response(self):
response = self.client.get('/')
self.assertE | quals(405, response.status_code)
self.assertEquals('POST', response.headers['Allow'])
def test_request_without_file_should_produce_bad_request(self):
response = self.client.post('/')
self.assertEquals(400, response.status_code)
self.assertIn('html is required', response.data)
|
bartTC/like | manage.py | Python | mit | 245 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "like.settings")
from django.core.management i | mport execute_from_co | mmand_line
execute_from_command_line(sys.argv)
|
Jheguy2/Mercury | qa/rpc-tests/smartfees.py | Python | mit | 4,241 | 0.008489 | #!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test fee estimation code
#
from test_framework import BitcoinTestFramework
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
class EstimateFeeTest(BitcoinTestFramework):
def setup_network(self):
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir,
["-debug=mempool", "-debug=estimatefee"]))
# Node1 mines small-but-not-tiny blocks, and allows free transactions.
# NOTE: the CreateNewBlock code starts counting block size at 1,000 bytes,
# so blockmaxsize of 2,000 is really just 1,000 bytes (room enough for
# 6 or 7 transactions)
self.nodes.append(start_node(1, self.options.tmpdir,
| ["-blockprioritysize=1500", "-blockmaxsize=2000",
"-debug=mempool", "-debug=estimatefee"]))
connect_nodes(self.nodes[1], 0)
# Node2 is a stingy miner, that
# produces very small blocks (room for only 3 or so transactions)
node2args = [ "-blockprioritysize=0", "-blockmaxsize=1500",
| "-debug=mempool", "-debug=estimatefee"]
self.nodes.append(start_node(2, self.options.tmpdir, node2args))
connect_nodes(self.nodes[2], 0)
self.is_network_split = False
self.sync_all()
def run_test(self):
# Prime the memory pool with pairs of transactions
# (high-priority, random fee and zero-priority, random fee)
min_fee = Decimal("0.001")
fees_per_kb = [];
for i in range(12):
(txid, txhex, fee) = random_zeropri_transaction(self.nodes, Decimal("1.1"),
min_fee, min_fee, 20)
tx_kbytes = (len(txhex)/2)/1000.0
fees_per_kb.append(float(fee)/tx_kbytes)
# Mine blocks with node2 until the memory pool clears:
count_start = self.nodes[2].getblockcount()
while len(self.nodes[2].getrawmempool()) > 0:
self.nodes[2].setgenerate(True, 1)
self.sync_all()
all_estimates = [ self.nodes[0].estimatefee(i) for i in range(1,20) ]
print("Fee estimates, super-stingy miner: "+str([str(e) for e in all_estimates]))
# Estimates should be within the bounds of what transactions fees actually were:
delta = 1.0e-6 # account for rounding error
for e in filter(lambda x: x >= 0, all_estimates):
if float(e)+delta < min(fees_per_kb) or float(e)-delta > max(fees_per_kb):
raise AssertionError("Estimated fee (%f) out of range (%f,%f)"%(float(e), min_fee_kb, max_fee_kb))
# Generate transactions while mining 30 more blocks, this time with node1:
for i in range(30):
for j in range(random.randrange(6-4,6+4)):
(txid, txhex, fee) = random_transaction(self.nodes, Decimal("1.1"),
Decimal("0.0"), min_fee, 20)
tx_kbytes = (len(txhex)/2)/1000.0
fees_per_kb.append(float(fee)/tx_kbytes)
self.nodes[1].setgenerate(True, 1)
self.sync_all()
all_estimates = [ self.nodes[0].estimatefee(i) for i in range(1,20) ]
print("Fee estimates, more generous miner: "+str([ str(e) for e in all_estimates]))
for e in filter(lambda x: x >= 0, all_estimates):
if float(e)+delta < min(fees_per_kb) or float(e)-delta > max(fees_per_kb):
raise AssertionError("Estimated fee (%f) out of range (%f,%f)"%(float(e), min_fee_kb, max_fee_kb))
# Finish by mining a normal-sized block:
while len(self.nodes[0].getrawmempool()) > 0:
self.nodes[0].setgenerate(True, 1)
self.sync_all()
final_estimates = [ self.nodes[0].estimatefee(i) for i in range(1,20) ]
print("Final fee estimates: "+str([ str(e) for e in final_estimates]))
if __name__ == '__main__':
EstimateFeeTest().main()
|
jiahaoliang/group-based-policy | gbpservice/neutron/services/servicechain/plugins/ncp/config.py | Python | apache-2.0 | 1,338 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
service_chain_opts = [
cfg.ListOpt('node_drivers',
default=['node_dummy'],
help=_("An ordered list of service chain node drivers "
"entrypoints to be loaded from the "
"gbpservice.neutron.servicechain.ncp_drivers "
"namespace.")),
cfg.StrOpt('node_plumber',
default='dummy_plumber',
help=_("The plumber used by the Node Composition Plugin "
"for service plumbing. Entrypoint loaded from the "
| "gbpservice.neutron.servicechain.ncp_plumbers "
"namespace."))
]
cfg.CONF.register_ | opts(service_chain_opts, "node_composition_plugin")
|
emsrc/algraeph | lib/graeph/graphml/graphview.py | Python | gpl-3.0 | 1,755 | 0.006268 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2013 by
# Erwin Marsi and TST-Centrale
#
#
# This file is part of the Algraeph program.
#
# The Algraeph program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# The Algraeph program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHA | NTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
subclasses of GraphView and ViewMenu specific to Graphml format
"""
__author__ = "Erwin Marsi <e.marsi@gmai | l.com>"
import wx
from graeph.graphview import BasicGraphView, BasicViewMenu
from graeph.pubsub import subscribe, unsubscribe, send, receive
from graeph.graphml.dotgraph import GraphmlDotGraphPair
class GraphmlGraphView(BasicGraphView):
"""
Graph viewer for graph pairs in Graphml format
"""
def initDotGraphPair(self, msg=None):
"""
Initialize the dot visualization for Graphml graphs
"""
self.dotGraphPair = GraphmlDotGraphPair()
def initViewMenu(self, msg=None):
"""
Initialise the pop-up menu for Graphml graph viewing options
"""
self.viewMenu = GraphmlViewMenu(self, self.aligner, self.algraephFrame)
class GraphmlViewMenu(BasicViewMenu):
"""
Pop-up menu with viewing options for Graphml graphs
"""
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.