code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
import pytest
import httpretty
import logging
class callcounted(object):
"""Decorator to determine number of calls for a method"""
def __init__(self,method):
self.method=method
self.counter=0
def __call__(self,*args,**kwargs):
self.counter+=1
return self.method(*args,**kwargs)
class test_ctx(object):
def __init__(self):
self.obj = {}
self.obj['SETTINGS'] = {}
self.obj['SETTINGS']['apiUrl'] = 'http://example.com/'
self.obj['SUBMISSION'] = {}
logger = logging.getLogger('ega_submission')
self.obj['LOGGER'] = logger
self.obj['LOGGER'].error=callcounted(logging.error)
self.obj['LOGGER'].warning=callcounted(logging.warning)
test_ctx = test_ctx()
class mkclick(object):
def __init__(self):
pass
def prompt(self):
pass
@pytest.fixture(scope="session")
def ctx():
return test_ctx
#@pytest.fixture(scope="session")
#def mock_click():
# return mkclick
@pytest.fixture(scope="session")
def mock_server(ctx):
httpretty.enable()
httpretty.register_uri(httpretty.POST,"%slogin" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header":{"code" : "200"},"response" : {"result" : [ { "session" : { "sessionToken":"abcdefg" }}]}}',
content_type="application/json")
httpretty.register_uri(httpretty.POST,"%ssubmissions" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345" }]}}',
content_type="application/json")
httpretty.register_uri(httpretty.DELETE, "%slogout" % (ctx.obj['SETTINGS']['apiUrl']),
content_type="application/json")
httpretty.register_uri(httpretty.POST, "%ssubmissions/12345/studies" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"6789" }]}}',
content_type="application/json")
httpretty.register_uri(httpretty.PUT, "%sstudies/6789?action=VALIDATE" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345" }]}}',
content_type="application/json")
httpretty.register_uri(httpretty.GET, "%sstudies/test_alias?idType=ALIAS" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345" }]}}',
content_type="application/json")
httpretty.register_uri(httpretty.GET, "%ssamples/sample_alias?idType=ALIAS&skip=0&limit=0" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345" }]}}',
content_type="application/json")
httpretty.register_uri(httpretty.GET, "%sdatasets/dataset_alias?idType=ALIAS&skip=0&limit=0" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345" }]}}',
content_type="application/json")
httpretty.register_uri(httpretty.GET, "%spolicies/policy_alias?idType=ALIAS&skip=0&limit=0" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345" }]}}',
content_type="application/json")
httpretty.register_uri(httpretty.GET, "%ssamples?status=SUBMITTED&skip=0&limit=0" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345" }]}}',
content_type="application/json")
httpretty.register_uri(httpretty.GET, "%sstudies?status=SUBMITTED&skip=0&limit=0" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"123456", "alias": "study", "studyType": "type", "title": "title", "studyAbstract": "abstract", "studyTypeId": "Id", "shortName": "short"}]}}',
content_type="application/json")
httpretty.register_uri(httpretty.GET, "%sstudies?status=SUBMITTED&skip=0&limit=0" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345" }]}}',
content_type="application/json")
httpretty.register_uri(httpretty.GET, "%spolicies?status=SUBMITTED&skip=0&limit=0" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345" }]}}',
content_type="application/json")
httpretty.register_uri(httpretty.GET, "%sdatasets?status=SUBMITTED&skip=0&limit=0" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345" }]}}',
content_type="application/json")
httpretty.register_uri(httpretty.DELETE, "%ssamples?status=SUBMITTED&skip=0&limit=0" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345" }]}}',
content_type="application/json")
#for test_submitter/object_submission unaligned
httpretty.register_uri(httpretty.GET, "%ssamples/test_u?idType=ALIAS&skip=0&limit=0" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345", "status": "SUBMITTED"}]}}',
content_type="application/json")
# for test_submitter/object_submission unaligned
httpretty.register_uri(httpretty.GET,
"%ssamples/ssample_y?idType=ALIAS&skip=0&limit=0" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345", "status": ["SUBMITTED_DRAFT"]}]}}',
content_type="application/json")
#for test_submitter/object_submission alignment
httpretty.register_uri(httpretty.GET, "%ssamples/test_a?idType=ALIAS&skip=0&limit=0" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345", "status": ["SUBMITTED"]}]}}',
content_type="application/json")
#for object_submission/update_obj
httpretty.register_uri(httpretty.GET, "%ssamples/12345?action=EDIT" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345", "status": ["SUBMITTED_DRAFT"]}]}}',
content_type="application/json")
#for submitter PUT
httpretty.register_uri(httpretty.PUT, "%ssamples/12345?action=EDIT" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345", "alias": "fgfg", "status": "VALIDATED"}]}}',
content_type="application/json")
# for submitter DELETE
httpretty.register_uri(httpretty.DELETE, "https://ega.crg.eu:443/submitterportal/v1/samples/12345",
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345", "alias": "fgfg", "status": "VALIDATED"}]}}',
content_type="application/json")
httpretty.register_uri(httpretty.PUT, "https://ega.crg.eu:443/submitterportal/v1/submissions/12345?action=SUBMIT",
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345", "alias": "fgfg", "status": "VALIDATED"}]}}',
content_type="application/json",
X_Token= "sdfsd")
httpretty.register_uri(httpretty.POST, "%ssubmissions/12345/samples" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345", "alias": "fgfg", "status": "VALIDATED"}]}}',
content_type="application/json")
httpretty.register_uri(httpretty.GET, "http://hetl2-dcc.res.oicr.on.ca:9000/sample/id?submittedProjectId=abjdh&submittedSampleId=alias&create=true",
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345", "status": ["SUBMITTED_DRAFT"]}]}}',
content_type="application/json",
Authorization= 'Bearer True')
httpretty.register_uri(httpretty.GET, "http://hetl2-dcc.res.oicr.on.ca:9000/donor/id?submittedProjectId=abjdh&submittedDonorId=3&create=true",
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"12345", "status": ["SUBMITTED_DRAFT"]}]}}',
content_type="application/json",
Authorization= 'Bearer True')
httpretty.register_uri(httpretty.PUT, "%sanalysiss/555?action=SUBMIT" % (ctx.obj['SETTINGS']['apiUrl']),
body='{"header" : {"code" : "200"}, "response" : {"result" : [{ "id":"555", "alias": "fgfg", "status": "VALIDATED"}]}}',
content_type="application/json")
|
icgc-dcc/egasub
|
tests/conftest.py
|
Python
|
gpl-3.0
| 9,299
|
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Translation GUI
$Id: translate.py 26889 2004-08-04 04:00:36Z pruggera $
"""
__docformat__ = 'restructuredtext'
from zope.app.i18n.browser import BaseView
class Translate(BaseView):
def getMessages(self):
"""Get messages"""
filter = self.request.get('filter', '%')
messages = []
for msg_id in self.context.getMessageIds(filter):
messages.append((msg_id, len(messages)))
return messages
def getTranslation(self, msgid, target_lang):
return self.context.translate(msgid, target_language=target_lang)
def getEditLanguages(self):
'''get the languages that are selected for editing'''
languages = self.request.cookies.get('edit_languages', '')
return filter(None, languages.split(','))
def editMessage(self):
msg_id = self.request['msg_id']
for language in self.getEditLanguages():
msg = self.request['msg_lang_%s' %language]
if msg != self.context.translate(msg_id,
target_language=language):
self.context.updateMessage(msg_id, msg, language)
return self.request.response.redirect(self.request.URL[-1])
def editMessages(self):
# Handle new Messages
for count in range(5):
msg_id = self.request.get('new-msg_id-%i' %count, '')
if msg_id:
for language in self.getEditLanguages():
msg = self.request.get('new-%s-%i' %(language, count),
msg_id)
self.context.addMessage(msg_id, msg, language)
# Handle edited Messages
keys = filter(lambda k: k.startswith('edit-msg_id-'),
self.request.keys())
keys = map(lambda k: k[12:], keys)
for key in keys:
msg_id = self.request['edit-msg_id-'+key]
for language in self.getEditLanguages():
msg = self.request['edit-%s-%s' %(language, key)]
if msg != self.context.translate(msg_id,
target_language=language):
self.context.updateMessage(msg_id, msg, language)
return self.request.response.redirect(self.request.URL[-1])
def deleteMessages(self, message_ids):
for id in message_ids:
msgid = self.request.form['edit-msg_id-%s' %id]
for language in self.context.getAvailableLanguages():
# Some we edit a language, but no translation exists...
try:
self.context.deleteMessage(msgid, language)
except KeyError:
pass
return self.request.response.redirect(self.request.URL[-1])
def addLanguage(self, language):
self.context.addLanguage(language)
return self.request.response.redirect(self.request.URL[-1])
def changeEditLanguages(self, languages=[]):
self.request.response.setCookie('edit_languages',
','.join(languages))
return self.request.response.redirect(self.request.URL[-1])
def changeFilter(self):
filter = self.request.get('filter', '%')
self.request.response.setCookie('filter', filter)
return self.request.response.redirect(self.request.URL[-1])
def deleteLanguages(self, languages):
for language in languages:
self.context.deleteLanguage(language)
return self.request.response.redirect(self.request.URL[-1])
|
Donkyhotay/MoonPy
|
zope/app/i18n/browser/translate.py
|
Python
|
gpl-3.0
| 4,221
|
# -*- coding: utf-8 -*-
# Zeobuilder is an extensible GUI-toolkit for molecular model construction.
# Copyright (C) 2007 - 2012 Toon Verstraelen <Toon.Verstraelen@UGent.be>, Center
# for Molecular Modeling (CMM), Ghent University, Ghent, Belgium; all rights
# reserved unless otherwise stated.
#
# This file is part of Zeobuilder.
#
# Zeobuilder is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# In addition to the regulations of the GNU General Public License,
# publications and communications based in parts on this program or on
# parts of this program are required to cite the following article:
#
# "ZEOBUILDER: a GUI toolkit for the construction of complex molecules on the
# nanoscale with building blocks", Toon Verstraelen, Veronique Van Speybroeck
# and Michel Waroquier, Journal of Chemical Information and Modeling, Vol. 48
# (7), 1530-1541, 2008
# DOI:10.1021/ci8000748
#
# Zeobuilder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
#--
import read, edit, faulty, group, composed, optional
|
molmod/zeobuilder
|
zeobuilder/gui/fields/__init__.py
|
Python
|
gpl-3.0
| 1,515
|
from numpy.distutils.core import setup, Extension
#from setuptools import setup, Extension
setup(
name = "Infer", version = "1.0",
description='Python version of MCMC, plus other inference codes under development',
author='Neale Gibson',
author_email='ngibson@eso.org',
packages=['Infer'],
package_dir={'Infer':'src'},
#and extension package for solving toeplitz matrices...
ext_modules = [
Extension("Infer.LevinsonTrenchZoharSolve",sources=["src/LevinsonTrenchZoharSolve.c"],),
]
)
|
nealegibson/Infer
|
setup.py
|
Python
|
gpl-3.0
| 518
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import re
class RegexLib(object):
"""
A class containing all regular expressions used throughout the DataHound
application.
"""
# Class Members
# Potentially better email regex
# "([A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4})"
# http://www.webmonkey.com/2008/08/four_regular_expressions_to_check_email_addresses/
caps_alpha_regex = re.compile("^[A-Z]+$")
cc_last_four_regex = re.compile("^[0-9]{4}$")
docker_log_entry_regex = re.compile("^\[\d{4}-\d{2}-\d{2}")
# domain_name_regex = re.compile("^[a-zA-Z0-9-*]+(\.[a-zA-Z0-9-]+)*$")
domain_name_regex = re.compile("^((?!-)[A-Za-z0-9-]{1,63}(?<!-)\.)+[A-Za-z]{2,63}$")
email_regex = re.compile("^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,10}$")
export_file_name_regex = re.compile("^[0-9A-Za-z_-]{1,32}$")
file_log_entry_regex = re.compile("^\[\d{2}/\d{2}/\d{2} ")
file_name_regex = re.compile("^[A-Za-z-_0-9]+$")
first_name_regex = re.compile("^[A-Za-z\-']{1,32}$")
hostname_regex = re.compile(
"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z]|[A-Za-z][A-Za-z\-]*[A-Za-z])$",
flags=re.IGNORECASE
)
html_form_regex = re.compile("<form.*?</form>", flags=re.IGNORECASE | re.DOTALL)
integer_regex = re.compile("^[0-9]+$")
ipv4_address_regex = re.compile(
"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$",
flags=re.IGNORECASE
)
ipv4_cidr_regex = re.compile(
"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/([0-9]|[1-2][0-9]|3[0-2]))$",
flags=re.IGNORECASE
)
last_name_regex = re.compile("^[A-Za-z\-']{1,32}$")
log_entry_stub_regex = re.compile("\[(.*?)\]")
mime_string_regex = re.compile("^[a-z\-]+/[a-z\.\-_0-9]+(;(\s?[\w=\.\-]+)+)?$", flags=re.IGNORECASE)
order_name_regex = re.compile("^[A-Za-z-_0-9]+$")
protocol_regex = re.compile("^([A-Z]{1,10})://", flags=re.IGNORECASE)
query_string_regex = re.compile(
"^([\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]+=([\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]*)?(&[\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]+=([\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]*)?)*)$",
flags=re.IGNORECASE,
)
url_port_regex = re.compile(".+:([1-9]([0-9]{1,10})?)$", flags=re.IGNORECASE)
url_protocol_regex = re.compile("^([A-Z0-9-_]+?):", flags=re.IGNORECASE)
url_scheme_regex = re.compile("^([A-Z0-9]{1,25})://", flags=re.IGNORECASE)
user_name_regex = re.compile("^[A-Z0-9]{1,32}$", flags=re.IGNORECASE)
uuid4_string_regex = re.compile(
"^[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}$",
flags=re.IGNORECASE,
)
zmap_bandwidth_regex = re.compile("^\d+[GMK]$")
zmap_empty_bandwidth_regex = re.compile("^0+[GMK]$")
ssl_certificate_regex = re.compile("(-----BEGIN CERTIFICATE-----.*?-----END CERTIFICATE-----)", flags=re.DOTALL)
authority_info_uri_regex = re.compile("URI:(.*)")
basic_auth_realm_regex = re.compile("realm=\"(.*?)\"")
card_last_four_regex = re.compile("^\d\d\d\d$")
# Instantiation
# Static Methods
# Class Methods
# Public Methods
# Protected Methods
# Private Methods
# Properties
# Representation and Comparison
|
lavalamp-/ws-backend-community
|
lib/wsregex.py
|
Python
|
gpl-3.0
| 3,414
|
#
# Honeybee: A Plugin for Environmental Analysis (GPL) started by Mostapha Sadeghipour Roudsari
#
# This file is part of Honeybee.
#
# Copyright (c) 2013-2015, Mostapha Sadeghipour Roudsari <Sadeghipour@gmail.com>
# Honeybee is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License,
# or (at your option) any later version.
#
# Honeybee is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Honeybee; If not, see <http://www.gnu.org/licenses/>.
#
# @license GPL-3.0+ <http://spdx.org/licenses/GPL-3.0+>
"""
Use this component to change the schedules of your HBZones.
-
Provided by Honeybee 0.0.57
Args:
_HBZones: HBZones for which you want to change shcedules.
occupancySchedule_: A text string representing the occupancy shceudle that you want to use. This can be either a shcedule from the schedule libirary or a CSV file path to a CSV schedule you created with the "Honeybee_Create CSV Schedule" component.
occupancyActivitySchs_: A text string representing the shceudle for the metabolic rate of the occupants that you want to use. This can be either a shcedule from the schedule libirary or a CSV file path to a CSV schedule you created with the "Honeybee_Create CSV Schedule" component. If this is a CSV schedule, the values in it should be Watts and the "units_" input should be "ActivityLevel."
heatingSetPtSchedule_: A text string representing the heating setpoint shceudle that you want to use. This can be either a shcedule from the schedule libirary or a CSV file path to a CSV schedule you created with the "Honeybee_Create CSV Schedule" component. If it is a CSV schedule, the values in it should be temperature values in Celcius and the "units_" input should be "Temperature."
coolingSetPtSchedule_: A text string representing the cooling setpoint shceudle that you want to use. This can be either a shcedule from the schedule libirary or a CSV file path to a CSV schedule you created with the "Honeybee_Create CSV Schedule" component. If it is a CSV schedule, the values in it should be temperature values in Celcius and the "units_" input should be "Temperature."
lightingSchedule_: A text string representing the lighting shceudle that you want to use. This can be either a shcedule from the schedule libirary or a CSV file path to a CSV schedule you created with the "Honeybee_Create CSV Schedule" component.
equipmentSchedule_: A text string representing the equipment shceudle that you want to use. This can be either a shcedule from the schedule libirary or a CSV file path to a CSV schedule you created with the "Honeybee_Create CSV Schedule" component.
infiltrationSchedule_: A text string representing the infiltration shceudle that you want to use. This can be either a shcedule from the schedule libirary or a CSV file path to a CSV schedule you created with the "Honeybee_Create CSV Schedule" component.
HVACAvailabiltySchs_: A text string representing the HVAC availability that you want to use. This can be either a shcedule from the schedule libirary or a CSV file path to a CSV schedule you created with the "Honeybee_Create CSV Schedule" component.
Returns:
schedules: A report of what shcedules are assigned to each zone.
HBZones: HBZones that have had thier shcedules modified.
"""
ghenv.Component.Name = "Honeybee_Set EnergyPlus Zone Schedules"
ghenv.Component.NickName = 'setEPZoneSchedules'
ghenv.Component.Message = 'VER 0.0.57\nJUL_06_2015'
ghenv.Component.Category = "Honeybee"
ghenv.Component.SubCategory = "08 | Energy | Set Zone Properties"
#compatibleHBVersion = VER 0.0.56\nFEB_01_2015
#compatibleLBVersion = VER 0.0.59\nFEB_01_2015
try: ghenv.Component.AdditionalHelpFromDocStrings = "1"
except: pass
import scriptcontext as sc
import uuid
import Grasshopper.Kernel as gh
import os
def checkTheInputs():
#If the user puts in only one value, apply that value to all of the zones.
def duplicateData(data, calcLength):
dupData = []
for count in range(calcLength):
dupData.append(data[0])
return dupData
if len(occupancySchedules_) == 1: occupancySchedules = duplicateData(occupancySchedules_, len(_HBZones))
else: occupancySchedules = occupancySchedules_
if len(occupancyActivitySchs_) == 1: occupancyActivitySchs = duplicateData(occupancyActivitySchs_, len(_HBZones))
else: occupancyActivitySchs = occupancyActivitySchs_
if len(coolingSetPtSchedules_) == 1: coolingSetPtSchedules = duplicateData(coolingSetPtSchedules_, len(_HBZones))
else: coolingSetPtSchedules = coolingSetPtSchedules_
if len(heatingSetPtSchedules_) == 1: heatingSetPtSchedules = duplicateData(heatingSetPtSchedules_, len(_HBZones))
else: heatingSetPtSchedules = heatingSetPtSchedules_
if len(lightingSchedules_) == 1: lightingSchedules = duplicateData(lightingSchedules_, len(_HBZones))
else: lightingSchedules = lightingSchedules_
if len(equipmentSchedules_) == 1: equipmentSchedules = duplicateData(equipmentSchedules_, len(_HBZones))
else: equipmentSchedules = equipmentSchedules_
if len(infiltrationSchedules_) == 1: infiltrationSchedules = duplicateData(infiltrationSchedules_, len(_HBZones))
else: infiltrationSchedules = infiltrationSchedules_
if len(HVACAvailabilitySchs_) == 1: HVACAvailabilitySchs = duplicateData(HVACAvailabilitySchs_, len(_HBZones))
else: HVACAvailabilitySchs = HVACAvailabilitySchs_
return occupancySchedules, occupancyActivitySchs, coolingSetPtSchedules, heatingSetPtSchedules, lightingSchedules, equipmentSchedules, infiltrationSchedules, HVACAvailabilitySchs
def main(HBZones, occupancySchedule, occupancyActivitySch, heatingSetPtSchedule, coolingSetPtSchedule, lightingSchedule, equipmentSchedule, infiltrationSchedule, HVACAvailabilitySchs):
# check for Honeybee
if not sc.sticky.has_key('honeybee_release'):
print "You should first let Honeybee to fly..."
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, "You should first let Honeybee to fly...")
return -1
try:
if not sc.sticky['honeybee_release'].isCompatible(ghenv.Component): return -1
except:
warning = "You need a newer version of Honeybee to use this compoent." + \
" Use updateHoneybee component to update userObjects.\n" + \
"If you have already updated userObjects drag Honeybee_Honeybee component " + \
"into canvas and try again."
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, warning)
return -1
# make sure schedules are in HB schedule
schedules = [occupancySchedule, heatingSetPtSchedule, coolingSetPtSchedule, lightingSchedule, equipmentSchedule, infiltrationSchedule, HVACAvailabilitySchs]
HBScheduleList = sc.sticky["honeybee_ScheduleLib"].keys()
for scheduleList in schedules:
for schedule in scheduleList:
if schedule!=None:
schedule= schedule.upper()
if schedule!=None and not schedule.lower().endswith(".csv") and schedule not in HBScheduleList:
msg = "Cannot find " + schedule + " in Honeybee schedule library."
print msg
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg)
return -1
elif schedule!=None and schedule.lower().endswith(".csv"):
# check if csv file is existed
if not os.path.isfile(schedule):
msg = "Cannot find the shchedule file: " + schedule
print msg
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg)
return -1
# call the objects from the lib
hb_hive = sc.sticky["honeybee_Hive"]()
HBObjectsFromHive = hb_hive.callFromHoneybeeHive(HBZones)
schedules = []
for zoneCount, HBZone in enumerate(HBObjectsFromHive):
if occupancySchedule != [] and occupancySchedule[0] != None:
try: HBZone.occupancySchedule = occupancySchedule[zoneCount]
except: HBZone.occupancySchedule = occupancySchedule[0]
if occupancyActivitySch != [] and occupancyActivitySch[0] != None:
try: HBZone.occupancyActivitySch = occupancyActivitySch[zoneCount]
except: HBZone.occupancyActivitySch = occupancyActivitySch[0]
if heatingSetPtSchedule != [] and heatingSetPtSchedule[0] != None:
try: HBZone.heatingSetPtSchedule = heatingSetPtSchedule[zoneCount]
except: HBZone.heatingSetPtSchedule = heatingSetPtSchedule[0]
if coolingSetPtSchedule != [] and coolingSetPtSchedule[0] != None:
try: HBZone.coolingSetPtSchedule = coolingSetPtSchedule[zoneCount]
except: HBZone.coolingSetPtSchedule = coolingSetPtSchedule[0]
if lightingSchedule != [] and lightingSchedule[0] != None:
try: HBZone.lightingSchedule = lightingSchedule[zoneCount]
except: HBZone.lightingSchedule = lightingSchedule[0]
if equipmentSchedule != [] and equipmentSchedule[0] != None:
try: HBZone.equipmentSchedule = equipmentSchedule[zoneCount]
except: HBZone.equipmentSchedule = equipmentSchedule[0]
if infiltrationSchedule != [] and infiltrationSchedule[0] != None:
try: HBZone.infiltrationSchedule = infiltrationSchedule[zoneCount]
except: HBZone.infiltrationSchedule = infiltrationSchedule[0]
if HVACAvailabilitySchs != [] and HVACAvailabilitySchs[0] != None:
try: HBZone.HVACAvailabilitySched = HVACAvailabilitySchs[zoneCount]
except: HBZone.HVACAvailabilitySched = HVACAvailabilitySchs[0]
schedules.append(HBZone.getCurrentSchedules())
HBZones = hb_hive.addToHoneybeeHive(HBObjectsFromHive, ghenv.Component.InstanceGuid.ToString() + str(uuid.uuid4()))
return HBZones, schedules
occupancySchedules, occupancyActivitySchs, coolingSetPtSchedules, heatingSetPtSchedules, lightingSchedules, equipmentSchedules, infiltrationSchedules, HVACAvailabilitySchs = checkTheInputs()
if _HBZones and _HBZones[0]!=None:
occupancySchedules, occupancyActivitySchs, coolingSetPtSchedules, heatingSetPtSchedules, lightingSchedules, equipmentSchedules, infiltrationSchedules, HVACAvailabilitySchs = checkTheInputs()
results = main(_HBZones, occupancySchedules, occupancyActivitySchs, heatingSetPtSchedules, coolingSetPtSchedules, lightingSchedules, equipmentSchedules, infiltrationSchedules, HVACAvailabilitySchs)
if results != -1: HBZones, schedules = results
|
samuto/Honeybee
|
src/Honeybee_Set EnergyPlus Zone Schedules.py
|
Python
|
gpl-3.0
| 11,139
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2016 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import json
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import env_fallback, return_values
from ansible.module_utils.network.common.utils import to_list, ComplexList
from ansible.module_utils.connection import Connection, ConnectionError
_DEVICE_CONFIGS = {}
ios_provider_spec = {
'host': dict(),
'port': dict(type='int'),
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
'auth_pass': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS']), no_log=True),
'timeout': dict(type='int')
}
ios_argument_spec = {
'provider': dict(type='dict', options=ios_provider_spec),
}
ios_top_spec = {
'host': dict(removed_in_version=2.9),
'port': dict(removed_in_version=2.9, type='int'),
'username': dict(removed_in_version=2.9),
'password': dict(removed_in_version=2.9, no_log=True),
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
'auth_pass': dict(removed_in_version=2.9, no_log=True),
'timeout': dict(removed_in_version=2.9, type='int')
}
ios_argument_spec.update(ios_top_spec)
def get_provider_argspec():
return ios_provider_spec
def get_connection(module):
if hasattr(module, '_ios_connection'):
return module._ios_connection
capabilities = get_capabilities(module)
network_api = capabilities.get('network_api')
if network_api == 'cliconf':
module._ios_connection = Connection(module._socket_path)
else:
module.fail_json(msg='Invalid connection type %s' % network_api)
return module._ios_connection
def get_capabilities(module):
if hasattr(module, '_ios_capabilities'):
return module._ios_capabilities
capabilities = Connection(module._socket_path).get_capabilities()
module._ios_capabilities = json.loads(capabilities)
return module._ios_capabilities
def check_args(module, warnings):
pass
def get_defaults_flag(module):
connection = get_connection(module)
out = connection.get('show running-config ?')
out = to_text(out, errors='surrogate_then_replace')
commands = set()
for line in out.splitlines():
if line.strip():
commands.add(line.strip().split()[0])
if 'all' in commands:
return ['all']
else:
return ['full']
def get_config(module, flags=None):
flag_str = ' '.join(to_list(flags))
try:
return _DEVICE_CONFIGS[flag_str]
except KeyError:
connection = get_connection(module)
out = connection.get_config(filter=flags)
cfg = to_text(out, errors='surrogate_then_replace').strip()
_DEVICE_CONFIGS[flag_str] = cfg
return cfg
def to_commands(module, commands):
spec = {
'command': dict(key=True),
'prompt': dict(),
'answer': dict()
}
transform = ComplexList(spec, module)
return transform(commands)
def run_commands(module, commands, check_rc=True):
responses = list()
connection = get_connection(module)
for cmd in to_list(commands):
if isinstance(cmd, dict):
command = cmd['command']
prompt = cmd['prompt']
answer = cmd['answer']
else:
command = cmd
prompt = None
answer = None
try:
out = connection.get(command, prompt, answer)
except ConnectionError as exc:
if check_rc:
module.fail_json(msg=to_text(exc))
else:
out = exc
try:
out = to_text(out, errors='surrogate_or_strict')
except UnicodeError:
module.fail_json(msg=u'Failed to decode output from %s: %s' % (cmd, to_text(out)))
responses.append(out)
return responses
def load_config(module, commands):
connection = get_connection(module)
try:
return connection.edit_config(commands)
except ConnectionError as exc:
module.fail_json(msg=to_text(exc))
|
Nitaco/ansible
|
lib/ansible/module_utils/network/ios/ios.py
|
Python
|
gpl-3.0
| 5,949
|
'''
Created on 26/09/2014
@author: javgar119
'''
cluster_list =([Cluster(set([]), 0, 0, 1, 0),
Cluster(set([]), 1, 0, 1, 0)])
cluster_list2 = ([Cluster(set([]), 0, 0, 1, 0),
Cluster(set([]), 1, 0, 1, 0),
Cluster(set([]), 2, 0, 1, 0),
Cluster(set([]), 3, 0, 1, 0),
Cluster(set([]), 4, 0, 1, 0),
Cluster(set([]), 5, 0, 1, 0),
Cluster(set([]), 6, 0, 1, 0),
Cluster(set([]), 7, 0, 1, 0),
Cluster(set([]), 8, 0, 1, 0),
Cluster(set([]), 9, 0, 1, 0),
Cluster(set([]), 10, 0, 1, 0),
Cluster(set([]), 11, 0, 1, 0),
Cluster(set([]), 12, 0, 1, 0),
Cluster(set([]), 13, 0, 1, 0),
Cluster(set([]), 14, 0, 1, 0),
Cluster(set([]), 15, 0, 1, 0),
Cluster(set([]), 16, 0, 1, 0),
Cluster(set([]), 17, 0, 1, 0),
Cluster(set([]), 18, 0, 1, 0),
Cluster(set([]), 19, 0, 1, 0)])
expected = set([(1.0, 0, 1)])
expected2 = set([(1.0, 9, 10), (1.0, 2, 3), (1.0, 15, 16),
(1.0, 11, 12), (1.0, 13, 14), (1.0, 16, 17),
(1.0, 14, 15), (1.0, 12, 13), (1.0, 4, 5),
(1.0, 18, 19), (1.0, 3, 4), (1.0, 8, 9),
(1.0, 17, 18), (1.0, 6, 7), (1.0, 7, 8),
(1.0, 5, 6), (1.0, 10, 11), (1.0, 0, 1), (1.0, 1, 2)])
cluster_list3 = ([Cluster(set([]), 90.9548590217, -17.089022585, 1, 0),
Cluster(set([]), 90.2536656675, -70.5911544718, 1, 0),
Cluster(set([]), -57.5872347006, 99.7124028905, 1, 0),
Cluster(set([]), -15.9338519877, 5.91547495626, 1, 0),
Cluster(set([]), 19.1869055492, -28.0681513017, 1, 0),
Cluster(set([]), -23.0752410653, -42.1353490324, 1, 0),
Cluster(set([]), -65.1732261872, 19.675582646, 1, 0),
Cluster(set([]), 99.7789872101, -11.2619165604, 1, 0),
Cluster(set([]), -43.3699854405, -94.7349852817, 1, 0),
Cluster(set([]), 48.2281912402, -53.3441788034, 1, 0)])
expected3 = set([(10.5745166749, 0, 7)])
|
JavierGarciaD/Algorithmic_Thinking
|
src/project_3_test_data.py
|
Python
|
gpl-3.0
| 2,503
|
from .util.deb import deb
from .util.nrange import nrange
from .cell import Cell
#F,e,Cursor
from .grid import spoint
CURSOR_POS=None
def gcp(): #get cursor position
global CURSOR_POS
deb('gcp',CURSOR_POS)
return CURSOR_POS
def scp(x,y):
deb('scp',gcp(),x,y)
cxc=0 #todo, normalize in cursor...
global CURSOR_POS
CURSOR_POS=(x,y)
assert (x,y)==gcp()
#todo cpget and cpset
cpget=gcp
cpset=scp
def cursor(HG,x,y,f,X,Y):
deb('make an a cursor in the empty space around point in cell x,y',x,y)
#x,y=x-1,y-1
assert len(f)==4
#HG=_clearcursor(HG)
i=x
j=y
scp(i,j)
cxl=Cell(f[0],0,0)
cyu=Cell(f[1],0,0)
cxr=Cell(f[2],0,0)
cyd=Cell(f[3],0,0,)
HG=spoint(i-1,j,HG,cxl)
HG=spoint(i,j-1,HG,cyu)
HG=spoint(i+1,j,HG,cxr)
HG=spoint(i,j+1,HG,cyd)
return HG
def grid_cursor(HG,x,y,f,X,Y):
return cursor(HG,x,y,f,X,Y)
def _clearcursor(HG):
cp=gcp()
r1=r2=r3=r4=Cell('.',0,0)
deb('clear a cursor in the empty space around point in cell x,y',cp)
if not cp:return HG
i,j=cp
HG=spoint(i-1,j,HG,r1)
HG=spoint(i,j-1,HG,r2)
HG=spoint(i+1,j,HG,r3)
HG=spoint(i,j+1,HG,r4)
return HG
|
e7dal/hexy
|
hexy/cursor.py
|
Python
|
gpl-3.0
| 1,100
|
# This Python file uses the following encoding: utf-8
"""
Copyright 2013 Giacomo Antolini <giacomo.antolini@gmail.com>.
This file is part of flocca_dot_com.
flocca_dot_com is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
flocca_dot_com is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Nome-Programma. If not, see <http://www.gnu.org/licenses/>.
"""
import time
from fabric.api import env, sudo, require, settings, local, put, cd
# globals
env.project_name = 'flocca_dot_com'
env.user = 'flocca'
env.tauron_host_name = 'tauron'
env.tauron_domain_name = 'flocca.com'
env.tauron_hostname = "{tauron_host_name}.{tauron_domain_name}".format(**env)
env.tauron_address = '178.79.137.238'
env.admin_user = 'flocca'
env.admin_group = 'sudo'
# environments
def tauron():
"""
Use the tauron virtual server.
"""
env.hosts = [env.tauron_hostname]
env.path = '/opt/webapps/{project_name}'.format(**env)
env.virtualhost_path = "/"
# tasks
def setup():
if env.tauron_hostname in env.hosts:
setup_directories()
setup_virtualenv()
def deploy():
require('path', 'project_name')
env.release = time.strftime('%Y%m%d%H%M%S')
# Create an archive from the current Git master branch and upload it
local("git archive --format=tar master | gzip > {release}.tar.gz".format(**env))
sudo("mkdir -p {path}/releases/{release}".format(**env), user='www-data')
put('{release}.tar.gz'.format(**env), '/tmp/')
sudo('mv /tmp/{release}.tar.gz {path}/packages/'.format(**env))
sudo("chown www-data:www-data {path}/packages/{release}.tar.gz;".format(**env))
sudo('cd {path}/releases/{release} && tar zxf ../../packages/{release}.tar.gz'.format(**env), user='www-data')
local('rm {release}.tar.gz'.format(**env))
# Install the required packages from the requirements file using pip
package_install('python-dev')
sudo('cd {path}; ./bin/pip install -U --use-mirrors -r ./releases/{release}/requirements.txt'.format(**env),
user='www-data')
# Symlink the current release
with settings(warn_only=True):
sudo('cd {path}; rm -f releases/previous'.format(**env), user='www-data')
sudo('cd {path}; mv -f releases/current releases/previous;'.format(**env), user='www-data')
sudo('cd {path}; ln -s {release} releases/current'.format(**env), user='www-data')
# Delete unnecessary files
sudo('rm {path}/releases/{release}/fabfile.py'.format(**env))
sudo('rm {path}/releases/{release}/requirements.txt'.format(**env))
sudo('rm {path}/releases/{release}/.gitignore'.format(**env))
sudo('rm {path}/releases/{release}/cic/settings_local.py'.format(**env))
# Install and enable Apache2 site
sudo('cd {path}/releases/{release}; cp apache2.conf /etc/apache2/sites-available/{project_name}'.format(**env))
sudo('a2ensite {project_name}'.format(**env))
# Sync DB
with cd('{path}/releases/current'.format(**env)):
sudo('../../bin/python manage.py syncdb --noinput', user='www-data')
# Collect static files in the same directory
with cd('{path}/releases/current'.format(**env)):
sudo('../../bin/python manage.py collectstatic -v0 --noinput', user='www-data')
sudo('invoke-rc.d apache2 restart')
def setup_directories():
require('path')
# Crea la directory dove far risiedere la webapp
with settings(warn_only=True):
sudo("mkdir -p {path}; chown www-data:www-data {path};".format(**env))
sudo("cd {path}; mkdir releases; mkdir shared; mkdir packages;".format(**env), user='www-data')
def setup_virtualenv():
require('path')
# Install virtualenv
package_install('python-setuptools')
sudo('easy_install pip')
sudo('pip install virtualenv')
with settings(warn_only=True):
sudo("cd {path}; virtualenv --no-site-packages --distribute .;".format(**env), user='www-data')
# Helpers
def package_install(packages):
sudo('apt-get -y --no-upgrade install {0}'.format(packages))
|
flocca/flocca_dot_com
|
fabfile.py
|
Python
|
gpl-3.0
| 4,507
|
"""
Program do gry w Blackjack (a.k.a. Oczko) w języku Python przy użyciu biblioteki PyGame
Projekt zaliczeniowy - Języki Skryptowe, Informatyka i Ekonometria, rok 1, WZ, AGH
Autorzy: Joanna Jeziorek, Mateusz Koziestański, Katarzyna Maciocha
III 2016
"""
import random as rd
import os
import sys
import pygame
from pygame import *
pygame.font.init()
pygame.mixer.init()
screen = pygame.display.set_mode((800, 480))
clock = pygame.time.Clock()
# poniższe zmienne muszę wstępnie zadeklarować tu, bo inaczej wywala błędy niżej w metodach.
display_font = pygame.font.Font(None, 28)
aces = ['ki_a', 'ka_a', 'pi_a', 'tr_a']
player_hand, dealer_hand = [], []
def load_image(imgname, card):
"""
Metoda do wczytywania plików obrazów.
:param imgname: nazwa pliku png
:param card: obiekt karty
:return: zwraca obraz oraz prostokąt go ograniczający
"""
if card == 1:
fullname = os.path.join("obrazy/karty", imgname)
else:
fullname = os.path.join('obrazy', imgname)
try:
imgname = pygame.image.load(fullname)
except pygame.error as message:
print('Nie można zaladować obrazu:', imgname)
imgname = imgname.convert()
return imgname, imgname.get_rect()
def display(font, sentence):
""" Wyswietlacz tekstu na dole ekranu. Tekst sluży do informowania gracza o tym co sie dzieje."""
display_font = pygame.font.Font.render(font, sentence, 1, (255, 255, 255), (0, 0, 0))
return display_font
# =============Funkcje logiki gry==================
def game_over():
"""
Jesli graczowi skoncza sie pieniadze, wyswietla ekran koncowy. Gracz moze tylko zamknac gre.
"""
while 1:
for event in pygame.event.get():
if event.type == QUIT:
sys.exit()
if event.type == KEYDOWN and event.key == K_ESCAPE:
sys.exit()
# Czarny ekran
screen.fill((0, 0, 0))
# Napis Koniec Gry
oFont = pygame.font.Font(None, 50)
display_font = pygame.font.Font.render(oFont, "Koniec gry! Skonczyly ci sie pieniadze!", 1, (255, 255, 255),
(0, 0, 0))
screen.blit(display_font, (125, 220))
pygame.display.flip()
def create_deck():
"""
Tworzy talię kart nazwanych w konwencji [dwie pierwsze litery koloru]_[karta],
po czym zwraca talię
a = as, k = król, d = dama, w = walet
"""
deck = ['ki_a', 'ki_k', 'ki_d', 'ki_w',
'ka_a', 'ka_k', 'ka_d', 'ka_w',
'tr_a', 'tr_k', 'tr_d', 'tr_w',
'pi_a', 'pi_k', 'pi_d', 'pi_w']
for x in range(2, 11):
kier = 'ki_' + str(x)
karo = 'ka_' + str(x)
trefl = 'tr_' + str(x)
pik = 'pi_' + str(x)
for kolor in [kier, karo, trefl, pik]:
deck.append(kolor)
return deck
def shuffle(deck):
# Przyjmuje talię jako argument i zwraca potasowaną talię. Tasowanie metodą random.shuffle().
rd.shuffle(deck)
return deck
def return_played(deck, played_deck):
# Przekazuje zagrane obrazy do głównej talii.
# Zwraca potasowaną talię i pustą talię zagranych kart.
for card in played_deck:
deck.append(played_deck.pop())
shuffle(deck)
return deck, played_deck
def deck_deal(deck, played_deck):
# Jeśli talia nie jest pusta, rozdaje pierwsze cztery obrazy z talii na przemian graczowi i krupierowi.
# Zwraca kolejno: talię, zagraną talię, rękę gracza i rękę krupiera
dealer_hand, player_hand = [], []
shuffle(deck)
if len(deck) < 5:
deck, played_deck = return_played(deck, played_deck)
# wymaga dopracowania zwracania kart do talii, jeśli jest już pusta.
dealer_hand.append(deck.pop(0))
played_deck.append(dealer_hand[-1])
player_hand.append(deck.pop(0))
played_deck.append(player_hand[-1])
dealer_hand.append(deck.pop(0))
played_deck.append(dealer_hand[-1])
player_hand.append(deck.pop(0))
played_deck.append(player_hand[-1])
return deck, played_deck, player_hand, dealer_hand
def hit(deck, played_deck, hand):
# Jeśli talia nie jest pusta, daje graczowi kartę do ręki.
if len(deck) < 2:
deck, played_deck = return_played(deck, played_deck)
hand.append(deck.pop(0))
played_deck.append(hand[-1])
return deck, played_deck, hand
def value(hand):
# Oblicza wartość kart w ręce.
# Jeśli w ręce znajduje się as, a wartość przekracza 21, zmienia wartość asa z 11 do 1pkt.
value_total = 0
for card in hand:
if card[3] == 'a':
value_total += 11
elif card[3] in ['k', 'd', 'w', '1']:
value_total += 10
else:
value_total += int(card[3])
if value_total > 21:
for card in hand:
if card[3] == 'a':
value_total -= 10
if value_total <= 21:
break
else:
continue
return value_total
def round_end(deck, player_hand, dealer_hand, played_deck, funds, money_gain, money_loss, dealer_cards, CardSprite):
if len(player_hand) == 2 and player_hand[:1] in aces:
money_gain += (money_gain * 3 / 2)
dealer_cards.empty()
dealer_card_position = (50, 70)
for x in dealer_hand:
card = CardSprite(x, dealer_card_position)
dealer_card_position = (dealer_card_position[0] + 80, dealer_card_position[1])
dealer_cards.add(card)
if not dealer_hand:
for card in player_hand:
played_deck.append(card)
player_hand.pop()
for card in dealer_hand:
played_deck.append(card)
dealer_hand.pop()
funds += money_gain
funds -= money_loss
display_font = pygame.font.Font(None, 28)
if funds <= 0:
game_over()
end_round = 1
return deck, player_hand, dealer_hand, played_deck, funds, end_round
def bust(deck, player_hand, dealer_hand, played_deck, funds, money_gain, money_loss, dealer_cards, CardSprite):
font = pygame.font.Font(None, 28)
display_font = display(font, "Gracz przebił! Przegrana: $%.1f." % money_loss)
deck, player_hand, dealer_hand, played_deck, funds, end_round = round_end(deck, player_hand, dealer_hand,
played_deck, funds,
money_gain, money_loss, dealer_cards,
CardSprite)
return deck, player_hand, dealer_hand, played_deck, funds, end_round, display_font
def compare(deck, played_deck, player_hand, dealer_hand, funds, bet, dealer_cards, CardSprite):
pv, dv = value(player_hand), value(dealer_hand)
display_font = pygame.font.Font(None, 28)
while dv < 17:
deck, played_deck, dealer_hand = hit(deck, played_deck, dealer_hand)
dv = value(dealer_hand)
if dv < pv <= 21:
# Gracz wygrywa
funds += 2 * bet
deck, player_hand, dealer_hand, played_deck, funds, end_round = round_end(deck, player_hand, dealer_hand,
played_deck, funds, bet, 0,
dealer_cards,
CardSprite)
display_font = display(display_font, "Wygrana: $%.1f." % bet)
elif pv == dv and pv <= 21:
# Remis
deck, player_hand, dealer_hand, played_deck, funds, end_round = round_end(deck, player_hand, dealer_hand,
played_deck, funds, 0, 0,
dealer_cards,
CardSprite)
display_font = display(display_font, "Remis!")
elif dv > 21 >= pv:
# Krupier przebił, a gracz nie
deck, player_hand, dealer_hand, played_deck, funds, end_round = round_end(deck, player_hand, dealer_hand,
played_deck, funds, bet, 0,
dealer_cards,
CardSprite)
display_font = display(display_font, "Krupier przebił! Wygrana: $%.1f." % bet)
else:
# W każdej innej sytuacji krupier wygrywa
deck, player_hand, dealer_hand, played_deck, funds, end_round = round_end(deck, player_hand, dealer_hand,
played_deck, funds, 0, bet,
dealer_cards,
CardSprite)
display_font = display(display_font, "Krupier wygrywa! Przegrana $%.1f." % bet)
return deck, played_deck, end_round, funds, display_font
def blackJack(deck, played_deck, player_hand, dealer_hand, funds, bet, dealer_cards, CardSprite):
""" Metoda sprawdzająca, czy któryś z graczy ma blackjack (BJ) """
textFont = pygame.font.Font(None, 28)
pv = value(player_hand)
dv = value(dealer_hand)
if pv == 21 and dv == 21:
# Zarówno gracz, jak i krupier mają BJ, jest remis i nikt nie traci pieniędzy.
display_font = display(textFont, "Blackjack! Krupier także go ma, więc jest remis!")
deck, player_hand, dealer_hand, played_deck, funds, end_round = round_end(deck, player_hand, dealer_hand,
played_deck,
funds, 0, bet, dealer_cards,
CardSprite)
elif pv == 21 and dv != 21:
# Krupier przegrywa, gracz ma BJ
display_font = display(textFont, "Blackjack! Wygrana: $%.1f." % (bet * 1.5))
deck, player_hand, dealer_hand, played_deck, funds, end_round = round_end(deck, player_hand, dealer_hand,
played_deck,
funds, bet, 0, dealer_cards,
CardSprite)
elif dv == 21 and pv != 21:
# Gracz przegrywa, a krupier ma BJ
deck, player_hand, dealer_hand, played_deck, funds, end_round = round_end(deck, player_hand, dealer_hand,
played_deck,
funds, 0, bet, dealer_cards,
CardSprite)
display_font = display(textFont, "Krupier ma blackjack! Przegrana: $%.1f." % bet)
return display_font, player_hand, dealer_hand, played_deck, funds, end_round
# ==============Koniec logiki gry===============
class CardSprite(pygame.sprite.Sprite):
""" Sprite wyświetlający określoną kartę. """
def __init__(self, card, position):
pygame.sprite.Sprite.__init__(self)
card_image = card + ".png"
self.image, self.rect = load_image(card_image, 1)
self.position = position
def update(self):
self.rect.center = self.position
# metoda update w każdym guziku to zasadniczo instrukcja wykonywania funkcjonalności każdego guzika po kliknięciu
class BetButtonUp(pygame.sprite.Sprite):
""" Guzik zwiększający zakład """
# noinspection PyTypeChecker
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image, self.rect = load_image("arrow_up.png", 0)
self.position = (710, 225)
def update(self, mX, mY, bet, funds, click, end_round):
self.image, self.rect = load_image("arrow_up.png", 0)
self.position = (710, 225)
self.rect.center = self.position
if self.rect.collidepoint(mX, mY) == 1 and click == 1 and end_round == 1:
if bet < funds:
bet += 5.0
if bet % 5 != 0:
while bet % 5 != 0:
bet -= 1
click = 0
return bet, click
class BetButtonDown(pygame.sprite.Sprite):
""" Guzik zmniejszający zakład """
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image, self.rect = load_image("arrow_down.png", 0)
self.position = (710, 225)
def update(self, mX, mY, bet, click, end_round):
self.image, self.rect = load_image("arrow_down.png", 0)
self.position = (760, 225)
self.rect.center = self.position
if self.rect.collidepoint(mX, mY) == 1 and click == 1 and end_round == 1:
if bet > 5:
bet -= 5.0
if bet % 5 != 0:
while bet % 5 != 0:
bet += 1
click = 0
return bet, click
class HitButton(pygame.sprite.Sprite):
""" Guzik pozwalający graczowi dobrać kartę z talii. """
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image, self.rect = load_image("hit.png", 0)
self.position = (735, 390)
def update(self, mX, mY, deck, played_deck, player_hand, dealer_cards, player_card_position, end_round, CardSprite,
click):
self.image, self.rect = load_image("hit.png", 0)
self.position = (735, 390)
self.rect.center = self.position
if self.rect.collidepoint(mX, mY) == 1 and click == 1:
if end_round == 0:
deck, played_deck, player_hand = hit(deck, played_deck, player_hand)
current_card = len(player_hand) - 1
card = CardSprite(player_hand[current_card], player_card_position)
dealer_cards.add(card)
player_card_position = (player_card_position[0] - 80, player_card_position[1])
click = 0
return deck, played_deck, player_hand, player_card_position, click
class StandButton(pygame.sprite.Sprite):
""" Guzik umożliwiający graczowi zostanie przy obecnej liczbie kart. """
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image, self.rect = load_image("stand.png", 0)
self.position = (735, 350)
def update(self, mX, mY, deck, played_deck, player_hand, dealer_hand, dealer_cards, player_card_position, end_round,
CardSprite, funds,
bet, display_font):
self.image, self.rect = load_image("stand.png", 0)
self.position = (735, 350)
self.rect.center = self.position
if self.rect.collidepoint(mX, mY) == 1:
if end_round == 0:
deck, played_deck, end_round, funds, display_font = compare(deck, played_deck, player_hand, dealer_hand,
funds, bet, dealer_cards, CardSprite)
return deck, played_deck, end_round, funds, player_hand, played_deck, player_card_position, display_font
class DoubleButton(pygame.sprite.Sprite):
""" Guzik umożliwiający graczowi podwojenie zakładu i wzięcie jedynej dodatkowej karty."""
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image, self.rect = load_image("double.png", 0)
self.position = (735, 305)
def update(self, mX, mY, deck, played_deck, player_hand, dealer_hand, playerCards, dealer_cards,
player_card_position,
end_round,
CardSprite, funds, bet, display_font):
self.image, self.rect = load_image("double.png", 0)
self.position = (735, 305)
self.rect.center = self.position
if self.rect.collidepoint(mX, mY) == 1:
if end_round == 0 and funds >= bet * 2 and len(player_hand) == 2:
bet *= 2
deck, played_deck, player_hand = hit(deck, played_deck, player_hand)
current_card = len(player_hand) - 1
card = CardSprite(player_hand[current_card], player_card_position)
playerCards.add(card)
player_card_position = (player_card_position[0] - 80, player_card_position[1])
deck, played_deck, end_round, funds, display_font = compare(deck, played_deck, player_hand, dealer_hand,
funds, bet, dealer_cards, CardSprite)
bet /= 2
return deck, played_deck, end_round, funds, player_hand, played_deck, player_card_position, display_font, bet
class DealButton(pygame.sprite.Sprite):
""" Guzik umożliwiający rozpoczęcie nowej rundy / rozdania """
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image, self.rect = load_image("deal.png", 0)
self.position = (735, 430)
def update(self, mX, mY, deck, played_deck, end_round, CardSprite, dealer_cards, player_hand, dealer_hand,
dealer_card_posit,
player_card_position, display_font, playerCards, click, handsPlayed) -> object:
textFont = pygame.font.Font(None, 28)
self.image, self.rect = load_image("deal.png", 0)
self.position = (735, 430)
self.rect.center = self.position
if self.rect.collidepoint(mX, mY) == 1:
if end_round == 1 and click == 1:
display_font = display(textFont, "")
dealer_cards.empty()
playerCards.empty()
deck, played_deck, player_hand, dealer_hand = deck_deal(deck, played_deck)
dealer_card_posit = (50, 70)
player_card_position = (540, 370)
for x in player_hand:
card = CardSprite(x, player_card_position)
player_card_position = (player_card_position[0] - 80, player_card_position[1])
playerCards.add(card)
faceDownCard = CardSprite("back", dealer_card_posit)
dealer_card_posit = (dealer_card_posit[0] + 80, dealer_card_posit[1])
dealer_cards.add(faceDownCard)
card = CardSprite(dealer_hand[0], dealer_card_posit)
dealer_cards.add(card)
end_round = 0
click = 0
handsPlayed += 1
return deck, played_deck, player_hand, dealer_hand, dealer_card_posit, player_card_position, end_round, display_font, click, handsPlayed
# czcionka używana po prawej stronie ekranu (fundusze, zakład itd)
textFont = pygame.font.Font(None, 28)
# ustawiam plik tła/ planszy
background, backgroundRect = load_image("plansza.png", 0)
# grupa grafik kart krupiera
dealer_cards = pygame.sprite.Group()
# jak wyżej, tylko dla gracza
player_cards = pygame.sprite.Group()
# Tworzę instancje wszystkich guzików
bet_up = BetButtonUp()
bet_down = BetButtonDown()
stand_button = StandButton()
deal_butt = DealButton()
hit_butt = HitButton()
dbl_butt = DoubleButton()
# Grupa zawierająca wszystkie guziki
buttons = pygame.sprite.Group(bet_up, bet_down, hit_butt, stand_button, deal_butt, dbl_butt)
# Tworzę talię
deck = create_deck()
# Definiuję pusty zbiór zużytych kart
played_deck = []
dealer_card_position, player_card_position = (), ()
mX, mY = 0, 0
click = 0
# Startowe wartości stawki i banku.
funds = 100.0
bet = 10.0
# Ile rund zostało zagrane - inicjalizacja zmiennej
handsPlayed = 0
# Zmienna używana do oznaczenia końca rundy. Równa 0, oprócz pomiędzy rundami, gdzie ma wartość 1.
end_round = 1
firstTime = 1
while 1:
screen.blit(background, backgroundRect)
if bet > funds:
bet = funds
if end_round == 1 and firstTime == 1:
display_font = display(textFont,
"Klikaj w strzałki, aby określić stawkę. Potem wciśnij Deal aby rozpocząć grę.")
firstTime = 0
screen.blit(display_font, (10, 455))
fundsFont = pygame.font.Font.render(textFont, "Bank: $%.1f" % funds, 1, (255, 255, 255), (0, 0, 0))
screen.blit(fundsFont, (658, 175))
betFont = pygame.font.Font.render(textFont, "Stawka: $%.1f" % bet, 1, (255, 255, 255), (0, 0, 0))
screen.blit(betFont, (658, 259))
hpFont = pygame.font.Font.render(textFont, "Runda: %i " % handsPlayed, 1, (255, 255, 255), (0, 0, 0))
screen.blit(hpFont, (658, 150))
for event in pygame.event.get():
if event.type == QUIT:
sys.exit()
elif event.type == MOUSEBUTTONDOWN:
if event.button == 1:
mX, mY = pygame.mouse.get_pos()
click = 1
elif event.type == MOUSEBUTTONUP:
mX, mY = 0, 0
click = 0
# początkowe sprawdzenie, czy po rozdaniu dwóch pierwszych kart ktoś ma blackjack.
# Jako że nie umiem zaprogramować "insurance bet" , jeśli krupier ma BJ od razu, to od razu wygrywa.
if end_round == 0:
# to co dzieje się w trakcie rundy
pv = value(player_hand)
dv = value(dealer_hand)
if pv == 21 and len(player_hand) == 2:
# Jeśli gracz ma BJ
display_font, player_hand, dealer_hand, played_deck, funds, end_round = blackJack(deck, played_deck,
player_hand,
dealer_hand, funds, bet,
dealer_cards,
CardSprite)
if dv == 21 and len(dealer_hand) == 2:
# Jeśli krupier ma BJ
display_font, player_hand, dealer_hand, played_deck, funds, end_round = blackJack(deck, played_deck,
player_hand,
dealer_hand, funds, bet,
dealer_cards,
CardSprite)
if pv > 21:
# Jesli gracz przebił
deck, player_hand, dealer_hand, played_deck, funds, end_round, display_font = bust(deck, player_hand,
dealer_hand,
played_deck, funds, 0,
bet, dealer_cards,
CardSprite)
# Update guzików
# deal
deck, played_deck, player_hand, dealer_hand, dealer_card_position, player_card_position, end_round, display_font, click, handsPlayed = deal_butt.update(
mX, mY, deck, played_deck, end_round, CardSprite, dealer_cards, player_hand, dealer_hand, dealer_card_position,
player_card_position, display_font,
player_cards, click, handsPlayed)
# hit
deck, played_deck, player_hand, player_card_position, click = hit_butt.update(mX, mY, deck, played_deck,
player_hand,
player_cards,
player_card_position, end_round,
CardSprite, click)
# stand
deck, played_deck, end_round, funds, player_hand, played_deck, player_card_position, display_font = stand_button.update(
mX,
mY,
deck,
played_deck,
player_hand,
dealer_hand,
dealer_cards,
player_card_position,
end_round,
CardSprite,
funds,
bet,
display_font)
# double
deck, played_deck, end_round, funds, player_hand, played_deck, player_card_position, display_font, bet = dbl_butt.update(
mX,
mY,
deck,
played_deck,
player_hand,
dealer_hand,
player_cards,
dealer_cards,
player_card_position,
end_round,
CardSprite,
funds,
bet,
display_font)
# Stawka - guziki
bet, click = bet_up.update(mX, mY, bet, funds, click, end_round)
bet, click = bet_down.update(mX, mY, bet, click, end_round)
# wrzucam je na ekran.
buttons.draw(screen)
# jeśli są karty na ekranie, wrzuć je tam
if dealer_cards:
player_cards.update()
player_cards.draw(screen)
dealer_cards.update()
dealer_cards.draw(screen)
# update okna gry
pygame.display.flip()
|
Kotzyk/Projekt-Blackjack
|
blackjack.py
|
Python
|
gpl-3.0
| 25,701
|
from chiplotle.geometry.core.path import Path
from chiplotle.geometry.core.coordinate import Coordinate
from chiplotle.core import errors
from py.test import raises
def test_path_add_01( ):
'''A Path and an int cannot be added.'''
assert raises(TypeError, 'Path([(1, 2), (3, 4)]) + 3')
def test_path_add_02( ):
'''A Path and a float cannot be added.'''
assert raises(TypeError, 'Path([(1, 2), (3, 4)]) + 3.2')
def test_path_radd_02( ):
'''A float and a Path cannot be added.'''
assert raises(TypeError, '3.2 + Path([(1, 2), (3, 4)])')
def test_path_add_03( ):
'''A Path and a Coordinate can be added.'''
a = Path([(1, 2), (3, 4)])
t = a + Coordinate(1, 2)
assert t is not a
assert isinstance(t, Path)
assert t == Path([(2, 4), (4, 6)])
def test_path_radd_03( ):
'''A Coordinate and a Path can be added.'''
a = Path([(1, 2), (3, 4)])
t = Coordinate(1, 2) + a
assert t is not a
assert isinstance(t, Path)
assert t == Path([(2, 4), (4, 6)])
def test_path_add_04( ):
'''A Path and a duple cannot be added.'''
a = Path([(1, 2), (3, 4)])
assert raises(TypeError, 'a + (1, 2)')
def test_path_radd_04( ):
'''A duple and a Path cannot be added.'''
a = Path([(1, 2), (3, 4)])
assert raises(TypeError, '(1, 2) + a')
def test_path_add_05( ):
'''A 2D Path and a triple cannot be added.'''
a = Path([(1, 2), (3, 4)])
assert raises(TypeError, 'a + (1, 2, 3)')
def test_path_add_06( ):
'''A Path and a Path cannot be added.'''
a = Path([(1, 2), (3, 4)])
b = Path([(2, 3)])
assert raises(TypeError, 'a + b')
## in place addition __iadd__ ##
def test_path_iadd_01( ):
'''A float and a Path cannot be added.'''
t = Path([(1, 2), (3, 4)])
assert raises(TypeError, 't += 3.2')
|
drepetto/chiplotle
|
chiplotle/geometry/core/test/test_path_add.py
|
Python
|
gpl-3.0
| 1,814
|
__author__ = 'hkar'
import Vault.Crypto
import Vault.Key
from test_helpers import *
import os
def test_aes(tmpdir):
# make tmp text files
f = create_test_file(tmpdir)
text = str(f.read())
# define file names
file_in = str(f)
file_out = file_in + ".enc"
# generate random secret
secret = get_random_string()
# encrypt test file
Vault.Crypto.AesSymmetric.encrypt(file_in, file_out, secret)
# remove original test file
os.remove(file_in)
# decrypt test file
Vault.Crypto.AesSymmetric.decrypt(file_out, file_in, secret)
assert text == open(file_in, 'r').read()
def test_rsa(tmpdir):
# make tmp text files
f = create_test_file(tmpdir, length=64)
text = str(f.read())
# define file names
file_in = str(f)
file_out = file_in + ".enc"
# generate keys
Vault.Key.RsaKey.generate()
# encrypt test file
Vault.Crypto.RsaAsymmetric.encrypt(file_in, file_out, Vault.Key.RsaKey.public())
# remove original test file
os.remove(file_in)
# decrypt test file
Vault.Crypto.RsaAsymmetric.decrypt(file_out, file_in, Vault.Key.RsaKey.private())
Vault.Key.RsaKey.delete_keys()
assert text == open(file_in, 'r').read()
|
hkar/get-vault
|
tests/test_crypto.py
|
Python
|
gpl-3.0
| 1,236
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2011 thomasv@gitorious
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import datetime
import thread, time, ast, sys, re
import socket, traceback
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, Gdk, GObject, cairo
from decimal import Decimal
from electrum.util import print_error, InvalidPassword
from electrum.bitcoin import is_valid, COIN
from electrum.wallet import NotEnoughFunds
from electrum import WalletStorage, Wallet
Gdk.threads_init()
APP_NAME = "Electrum"
import platform
MONOSPACE_FONT = 'Lucida Console' if platform.system() == 'Windows' else 'monospace'
from electrum.util import format_satoshis, parse_URI
from electrum.bitcoin import MIN_RELAY_TX_FEE
def numbify(entry, is_int = False):
text = entry.get_text().strip()
chars = '0123456789'
if not is_int: chars +='.'
s = ''.join([i for i in text if i in chars])
if not is_int:
if '.' in s:
p = s.find('.')
s = s.replace('.','')
s = s[:p] + '.' + s[p:p+8]
try:
amount = int(Decimal(s) * COIN)
except Exception:
amount = None
else:
try:
amount = int( s )
except Exception:
amount = None
entry.set_text(s)
return amount
def show_seed_dialog(seed, parent):
if not seed:
show_message("No seed")
return
dialog = Gtk.MessageDialog(
parent = parent,
flags = Gtk.DialogFlags.MODAL,
buttons = Gtk.ButtonsType.OK,
message_format = "Your wallet generation seed is:\n\n" + '"' + seed + '"'\
+ "\n\nPlease keep it in a safe place; if you lose it, you will not be able to restore your wallet.\n\n" )
dialog.set_title("Seed")
dialog.show()
dialog.run()
dialog.destroy()
def restore_create_dialog():
# ask if the user wants to create a new wallet, or recover from a seed.
# if he wants to recover, and nothing is found, do not create wallet
dialog = Gtk.Dialog("electrum", parent=None,
flags=Gtk.DialogFlags.MODAL,
buttons= ("create", 0, "restore",1, "cancel",2) )
label = Gtk.Label("Wallet file not found.\nDo you want to create a new wallet,\n or to restore an existing one?" )
label.show()
dialog.vbox.pack_start(label, True, True, 0)
dialog.show()
r = dialog.run()
dialog.destroy()
if r==2: return False
return 'restore' if r==1 else 'create'
def run_recovery_dialog():
message = "Please enter your wallet seed or the corresponding mnemonic list of words, and the gap limit of your wallet."
dialog = Gtk.MessageDialog(
parent = None,
flags = Gtk.DialogFlags.MODAL,
buttons = Gtk.ButtonsType.OK_CANCEL,
message_format = message)
vbox = dialog.vbox
dialog.set_default_response(Gtk.ResponseType.OK)
# ask seed, server and gap in the same dialog
seed_box = Gtk.HBox()
seed_label = Gtk.Label(label='Seed or mnemonic:')
seed_label.set_size_request(150,-1)
seed_box.pack_start(seed_label, False, False, 10)
seed_label.show()
seed_entry = Gtk.Entry()
seed_entry.show()
seed_entry.set_size_request(450,-1)
seed_box.pack_start(seed_entry, False, False, 10)
add_help_button(seed_box, '.')
seed_box.show()
vbox.pack_start(seed_box, False, False, 5)
dialog.show()
r = dialog.run()
seed = seed_entry.get_text()
dialog.destroy()
if r==Gtk.ResponseType.CANCEL:
return False
if Wallet.is_seed(seed):
return seed
show_message("no seed")
return False
def run_settings_dialog(self):
message = "Here are the settings of your wallet. For more explanations, click on the question mark buttons next to each input field."
dialog = Gtk.MessageDialog(
parent = self.window,
flags = Gtk.DialogFlags.MODAL,
buttons = Gtk.ButtonsType.OK_CANCEL,
message_format = message)
image = Gtk.Image()
image.set_from_stock(Gtk.STOCK_PREFERENCES, Gtk.IconSize.DIALOG)
image.show()
dialog.set_image(image)
dialog.set_title("Settings")
vbox = dialog.vbox
dialog.set_default_response(Gtk.ResponseType.OK)
fee = Gtk.HBox()
fee_entry = Gtk.Entry()
fee_label = Gtk.Label(label='Transaction fee:')
fee_label.set_size_request(150,10)
fee_label.show()
fee.pack_start(fee_label,False, False, 10)
fee_entry.set_text(str(Decimal(self.wallet.fee_per_kb) / COIN))
fee_entry.connect('changed', numbify, False)
fee_entry.show()
fee.pack_start(fee_entry,False,False, 10)
add_help_button(fee, 'Fee per kilobyte of transaction. Recommended value:0.0001')
fee.show()
vbox.pack_start(fee, False,False, 5)
nz = Gtk.HBox()
nz_entry = Gtk.Entry()
nz_label = Gtk.Label(label='Display zeros:')
nz_label.set_size_request(150,10)
nz_label.show()
nz.pack_start(nz_label,False, False, 10)
nz_entry.set_text( str( self.num_zeros ))
nz_entry.connect('changed', numbify, True)
nz_entry.show()
nz.pack_start(nz_entry,False,False, 10)
add_help_button(nz, "Number of zeros displayed after the decimal point.\nFor example, if this number is 2, then '5.' is displayed as '5.00'")
nz.show()
vbox.pack_start(nz, False,False, 5)
dialog.show()
r = dialog.run()
fee = fee_entry.get_text()
nz = nz_entry.get_text()
dialog.destroy()
if r==Gtk.ResponseType.CANCEL:
return
try:
fee = int(COIN * Decimal(fee))
except Exception:
show_message("error")
return
self.config.set_key('fee_per_kb', fee)
try:
nz = int( nz )
if nz>8: nz = 8
except Exception:
show_message("error")
return
if self.num_zeros != nz:
self.num_zeros = nz
self.config.set_key('num_zeros',nz,True)
self.update_history_tab()
def run_network_dialog( network, parent ):
image = Gtk.Image()
image.set_from_stock(Gtk.STOCK_NETWORK, Gtk.IconSize.DIALOG)
host, port, protocol, proxy_config, auto_connect = network.get_parameters()
server = "%s:%s:%s"%(host, port, protocol)
if parent:
if network.is_connected():
status = "Connected to %s\n%d blocks"%(host, network.get_local_height())
else:
status = "Not connected"
else:
import random
status = "Please choose a server.\nSelect cancel if you are offline."
servers = network.get_servers()
dialog = Gtk.MessageDialog( parent, Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.QUESTION, Gtk.ButtonsType.OK_CANCEL, status)
dialog.set_title("Server")
dialog.set_image(image)
image.show()
vbox = dialog.vbox
host_box = Gtk.HBox()
host_label = Gtk.Label(label='Connect to:')
host_label.set_size_request(100,-1)
host_label.show()
host_box.pack_start(host_label, False, False, 10)
host_entry = Gtk.Entry()
host_entry.set_size_request(200,-1)
if network.is_connected():
host_entry.set_text(server)
else:
host_entry.set_text("Not Connected")
host_entry.show()
host_box.pack_start(host_entry, False, False, 10)
add_help_button(host_box, 'The name, port number and protocol of your Electrum server, separated by a colon. Example: "ecdsa.org:50002:s". Some servers allow you to connect through http (port 80) or https (port 443)')
host_box.show()
p_box = Gtk.HBox(False, 10)
p_box.show()
p_label = Gtk.Label(label='Protocol:')
p_label.set_size_request(100,-1)
p_label.show()
p_box.pack_start(p_label, False, False, 10)
combobox = Gtk.ComboBoxText()
combobox.show()
combobox.append_text("TCP")
combobox.append_text("SSL")
combobox.append_text("HTTP")
combobox.append_text("HTTPS")
p_box.pack_start(combobox, True, True, 0)
def current_line():
return unicode(host_entry.get_text()).split(':')
def set_combobox(protocol):
combobox.set_active('tshg'.index(protocol))
def set_protocol(protocol):
host = current_line()[0]
pp = servers[host]
if protocol not in pp.keys():
protocol = pp.keys()[0]
set_combobox(protocol)
port = pp[protocol]
host_entry.set_text( host + ':' + port + ':' + protocol)
combobox.connect("changed", lambda x:set_protocol('tshg'[combobox.get_active()]))
if network.is_connected():
set_combobox(protocol)
server_list = Gtk.ListStore(str)
for host in servers.keys():
server_list.append([host])
treeview = Gtk.TreeView(model=server_list)
treeview.show()
label = 'Active Servers' if network.is_connected() else 'Default Servers'
tvcolumn = Gtk.TreeViewColumn(label)
treeview.append_column(tvcolumn)
cell = Gtk.CellRendererText()
tvcolumn.pack_start(cell, False)
tvcolumn.add_attribute(cell, 'text', 0)
vbox.pack_start(host_box, False,False, 5)
vbox.pack_start(p_box, True, True, 0)
#scroll = Gtk.ScrolledWindow()
#scroll.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.ALWAYS)
#scroll.add_with_viewport(treeview)
#scroll.show()
#vbox.pack_start(scroll, True)
vbox.pack_start(treeview, True, True, 0)
def my_treeview_cb(treeview):
path, view_column = treeview.get_cursor()
host = server_list.get_value( server_list.get_iter(path), 0)
pp = servers[host]
if 't' in pp.keys():
protocol = 't'
else:
protocol = pp.keys()[0]
port = pp[protocol]
host_entry.set_text( host + ':' + port + ':' + protocol)
set_combobox(protocol)
treeview.connect('cursor-changed', my_treeview_cb)
dialog.show_all()
r = dialog.run()
server = host_entry.get_text()
dialog.destroy()
if r==Gtk.ResponseType.CANCEL:
return False
try:
host, port, protocol = server.split(':')
except Exception:
show_message("error:" + server)
return False
network.set_parameters(host, port, protocol, proxy_config, auto_connect)
def show_message(message, parent=None):
dialog = Gtk.MessageDialog(
parent = parent,
flags = Gtk.DialogFlags.MODAL,
buttons = Gtk.ButtonsType.CLOSE,
message_format = message )
dialog.show()
dialog.run()
dialog.destroy()
def password_line(label):
password = Gtk.HBox()
password_label = Gtk.Label(label=label)
password_label.set_size_request(120,10)
password_label.show()
password.pack_start(password_label,False, False, 10)
password_entry = Gtk.Entry()
password_entry.set_size_request(300,-1)
password_entry.set_visibility(False)
password_entry.show()
password.pack_start(password_entry,False,False, 10)
password.show()
return password, password_entry
def password_dialog(parent):
dialog = Gtk.MessageDialog( parent, Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.QUESTION, Gtk.ButtonsType.OK_CANCEL, "Please enter your password.")
dialog.get_image().set_visible(False)
current_pw, current_pw_entry = password_line('Password:')
current_pw_entry.connect("activate", lambda entry, dialog, response: dialog.response(response), dialog, Gtk.ResponseType.OK)
dialog.vbox.pack_start(current_pw, False, True, 0)
dialog.show()
result = dialog.run()
pw = current_pw_entry.get_text()
dialog.destroy()
if result != Gtk.ResponseType.CANCEL: return pw
def change_password_dialog(is_encrypted, parent):
if parent:
msg = 'Your wallet is encrypted. Use this dialog to change the password. To disable wallet encryption, enter an empty new password.' if is_encrypted else 'Your wallet keys are not encrypted'
else:
msg = "Please choose a password to encrypt your wallet keys"
dialog = Gtk.MessageDialog( parent, Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT, Gtk.MessageType.QUESTION, Gtk.ButtonsType.OK_CANCEL, msg)
dialog.set_title("Change password")
image = Gtk.Image()
image.set_from_stock(Gtk.STOCK_DIALOG_AUTHENTICATION, Gtk.IconSize.DIALOG)
image.show()
dialog.set_image(image)
if is_encrypted:
current_pw, current_pw_entry = password_line('Current password:')
dialog.vbox.pack_start(current_pw, False, True, 0)
password, password_entry = password_line('New password:')
dialog.vbox.pack_start(password, False, True, 5)
password2, password2_entry = password_line('Confirm password:')
dialog.vbox.pack_start(password2, False, True, 5)
dialog.show()
result = dialog.run()
password = current_pw_entry.get_text() if is_encrypted else None
new_password = password_entry.get_text()
new_password2 = password2_entry.get_text()
dialog.destroy()
if result == Gtk.ResponseType.CANCEL:
return
if new_password != new_password2:
show_message("passwords do not match")
return change_password_dialog(is_encrypted, parent)
if not new_password:
new_password = None
return True, password, new_password
def add_help_button(hbox, message):
button = Gtk.Button('?')
button.connect("clicked", lambda x: show_message(message))
button.show()
hbox.pack_start(button,False, False, 0)
class ElectrumWindow:
def show_message(self, msg):
show_message(msg, self.window)
def on_key(self, w, event):
if Gdk.ModifierType.CONTROL_MASK & event.state and event.keyval in [113,119]:
Gtk.main_quit()
return True
def __init__(self, wallet, config, network):
self.config = config
self.wallet = wallet
self.network = network
self.funds_error = False # True if not enough funds
self.num_zeros = int(self.config.get('num_zeros',0))
self.window = Gtk.Window(Gtk.WindowType.TOPLEVEL)
self.window.connect('key-press-event', self.on_key)
title = 'Electrum ' + self.wallet.electrum_version + ' - ' + self.config.path
if not self.wallet.seed: title += ' [seedless]'
self.window.set_title(title)
self.window.connect("destroy", Gtk.main_quit)
self.window.set_border_width(0)
#self.window.connect('mykeypress', Gtk.main_quit)
self.window.set_default_size(720, 350)
self.wallet_updated = False
from electrum.util import StoreDict
self.contacts = StoreDict(self.config, 'contacts')
vbox = Gtk.VBox()
self.notebook = Gtk.Notebook()
self.create_history_tab()
if self.wallet.seed:
self.create_send_tab()
self.create_recv_tab()
self.create_book_tab()
self.create_about_tab()
self.notebook.show()
vbox.pack_start(self.notebook, True, True, 2)
self.status_bar = Gtk.Statusbar()
vbox.pack_start(self.status_bar, False, False, 0)
self.status_image = Gtk.Image()
self.status_image.set_from_stock(Gtk.STOCK_NO, Gtk.IconSize.MENU)
self.status_image.set_alignment(True, 0.5 )
self.status_image.show()
self.network_button = Gtk.Button()
self.network_button.connect("clicked", lambda x: run_network_dialog(self.network, self.window) )
self.network_button.add(self.status_image)
self.network_button.set_relief(Gtk.ReliefStyle.NONE)
self.network_button.show()
self.status_bar.pack_end(self.network_button, False, False, 0)
if self.wallet.seed:
def seedb(w, wallet):
if wallet.use_encryption:
password = password_dialog(self.window)
if not password: return
else: password = None
seed = wallet.get_mnemonic(password)
show_seed_dialog(seed, self.window)
button = Gtk.Button('S')
button.connect("clicked", seedb, self.wallet )
button.set_relief(Gtk.ReliefStyle.NONE)
button.show()
self.status_bar.pack_end(button,False, False, 0)
settings_icon = Gtk.Image()
settings_icon.set_from_stock(Gtk.STOCK_PREFERENCES, Gtk.IconSize.MENU)
settings_icon.set_alignment(0.5, 0.5)
settings_icon.set_size_request(16,16 )
settings_icon.show()
prefs_button = Gtk.Button()
prefs_button.connect("clicked", lambda x: run_settings_dialog(self) )
prefs_button.add(settings_icon)
prefs_button.set_tooltip_text("Settings")
prefs_button.set_relief(Gtk.ReliefStyle.NONE)
prefs_button.show()
self.status_bar.pack_end(prefs_button,False,False, 0)
self.pw_icon = Gtk.Image()
self.pw_icon.set_from_stock(Gtk.STOCK_DIALOG_AUTHENTICATION, Gtk.IconSize.MENU)
self.pw_icon.set_alignment(0.5, 0.5)
self.pw_icon.set_size_request(16,16 )
self.pw_icon.show()
if self.wallet.seed:
if self.wallet.use_encryption:
self.pw_icon.set_tooltip_text('Wallet is encrypted')
else:
self.pw_icon.set_tooltip_text('Wallet is unencrypted')
password_button = Gtk.Button()
password_button.connect("clicked", self.do_update_password, self.wallet)
password_button.add(self.pw_icon)
password_button.set_relief(Gtk.ReliefStyle.NONE)
password_button.show()
self.status_bar.pack_end(password_button,False,False, 0)
self.window.add(vbox)
self.window.show_all()
#self.fee_box.hide()
self.context_id = self.status_bar.get_context_id("statusbar")
self.update_status_bar()
self.network.register_callback('updated', self.update_callback)
def update_status_bar_thread():
while True:
GObject.idle_add( self.update_status_bar )
time.sleep(0.5)
def check_recipient_thread():
old_r = ''
while True:
time.sleep(0.5)
if self.payto_entry.is_focus():
continue
r = self.payto_entry.get_text()
if r != old_r:
old_r = r
r = r.strip()
if re.match('^(|([\w\-\.]+)@)((\w[\w\-]+\.)+[\w\-]+)$', r):
try:
to_address = self.wallet.get_alias(r, interactive=False)
except Exception:
continue
if to_address:
s = r + ' <' + to_address + '>'
GObject.idle_add( lambda: self.payto_entry.set_text(s) )
thread.start_new_thread(update_status_bar_thread, ())
if self.wallet.seed:
thread.start_new_thread(check_recipient_thread, ())
self.notebook.set_current_page(0)
def update_callback(self):
self.wallet_updated = True
def do_update_password(self, button, wallet):
if not wallet.seed:
show_message("No seed")
return
res = change_password_dialog(wallet.use_encryption, self.window)
if res:
_, password, new_password = res
try:
wallet.get_seed(password)
except InvalidPassword:
show_message("Incorrect password")
return
wallet.update_password(password, new_password)
if wallet.use_encryption:
self.pw_icon.set_tooltip_text('Wallet is encrypted')
else:
self.pw_icon.set_tooltip_text('Wallet is unencrypted')
def add_tab(self, page, name):
tab_label = Gtk.Label(label=name)
tab_label.show()
self.notebook.append_page(page, tab_label)
def create_send_tab(self):
page = vbox = Gtk.VBox()
page.show()
payto = Gtk.HBox()
payto_label = Gtk.Label(label='Pay to:')
payto_label.set_size_request(100,-1)
payto.pack_start(payto_label, False, False, 0)
payto_entry = Gtk.Entry()
payto_entry.set_size_request(450, 26)
payto.pack_start(payto_entry, False, False, 0)
vbox.pack_start(payto, False, False, 5)
message = Gtk.HBox()
message_label = Gtk.Label(label='Description:')
message_label.set_size_request(100,-1)
message.pack_start(message_label, False, False, 0)
message_entry = Gtk.Entry()
message_entry.set_size_request(450, 26)
message.pack_start(message_entry, False, False, 0)
vbox.pack_start(message, False, False, 5)
amount_box = Gtk.HBox()
amount_label = Gtk.Label(label='Amount:')
amount_label.set_size_request(100,-1)
amount_box.pack_start(amount_label, False, False, 0)
amount_entry = Gtk.Entry()
amount_entry.set_size_request(120, -1)
amount_box.pack_start(amount_entry, False, False, 0)
vbox.pack_start(amount_box, False, False, 5)
self.fee_box = fee_box = Gtk.HBox()
fee_label = Gtk.Label(label='Fee:')
fee_label.set_size_request(100,-1)
fee_box.pack_start(fee_label, False, False, 0)
fee_entry = Gtk.Entry()
fee_entry.set_size_request(60, 26)
fee_box.pack_start(fee_entry, False, False, 0)
vbox.pack_start(fee_box, False, False, 5)
end_box = Gtk.HBox()
empty_label = Gtk.Label(label='')
empty_label.set_size_request(100,-1)
end_box.pack_start(empty_label, False, False, 0)
send_button = Gtk.Button("Send")
send_button.show()
end_box.pack_start(send_button, False, False, 0)
clear_button = Gtk.Button("Clear")
clear_button.show()
end_box.pack_start(clear_button, False, False, 15)
send_button.connect("clicked", self.do_send, (payto_entry, message_entry, amount_entry, fee_entry))
clear_button.connect("clicked", self.do_clear, (payto_entry, message_entry, amount_entry, fee_entry))
vbox.pack_start(end_box, False, False, 5)
# display this line only if there is a signature
payto_sig = Gtk.HBox()
payto_sig_id = Gtk.Label(label='')
payto_sig.pack_start(payto_sig_id, False, False, 0)
vbox.pack_start(payto_sig, True, True, 5)
self.user_fee = False
def entry_changed( entry, is_fee ):
amount = numbify(amount_entry)
fee = numbify(fee_entry)
if not is_fee: fee = None
if amount is None:
return
coins = self.wallet.get_spendable_coins()
try:
tx = self.wallet.make_unsigned_transaction(coins, [('op_return', 'dummy_tx', amount)], self.config, fee)
self.funds_error = False
except NotEnoughFunds:
self.funds_error = True
if not self.funds_error:
if not is_fee:
fee = tx.get_fee()
fee_entry.set_text(str(Decimal(fee) / COIN))
self.fee_box.show()
amount_entry.modify_text(Gtk.StateType.NORMAL, Gdk.color_parse("#000000"))
fee_entry.modify_text(Gtk.StateType.NORMAL, Gdk.color_parse("#000000"))
send_button.set_sensitive(True)
else:
send_button.set_sensitive(False)
amount_entry.modify_text(Gtk.StateType.NORMAL, Gdk.color_parse("#cc0000"))
fee_entry.modify_text(Gtk.StateType.NORMAL, Gdk.color_parse("#cc0000"))
amount_entry.connect('changed', entry_changed, False)
fee_entry.connect('changed', entry_changed, True)
self.payto_entry = payto_entry
self.payto_fee_entry = fee_entry
self.payto_sig_id = payto_sig_id
self.payto_sig = payto_sig
self.amount_entry = amount_entry
self.message_entry = message_entry
self.add_tab(page, 'Send')
def set_frozen(self,entry,frozen):
if frozen:
entry.set_editable(False)
entry.set_has_frame(False)
entry.modify_base(Gtk.StateType.NORMAL, Gdk.color_parse("#eeeeee"))
else:
entry.set_editable(True)
entry.set_has_frame(True)
entry.modify_base(Gtk.StateType.NORMAL, Gdk.color_parse("#ffffff"))
def set_url(self, url):
out = parse_URI(url)
address = out.get('address')
message = out.get('message')
amount = out.get('amount')
self.notebook.set_current_page(1)
self.payto_entry.set_text(address)
self.message_entry.set_text(message)
self.amount_entry.set_text(amount)
self.payto_sig.set_visible(False)
def create_about_tab(self):
from gi.repository import Pango
page = Gtk.VBox()
page.show()
tv = Gtk.TextView()
tv.set_editable(False)
tv.set_cursor_visible(False)
tv.modify_font(Pango.FontDescription(MONOSPACE_FONT))
scroll = Gtk.ScrolledWindow()
scroll.add(tv)
page.pack_start(scroll, True, True, 0)
self.info = tv.get_buffer()
self.add_tab(page, 'Wall')
def do_clear(self, w, data):
self.payto_sig.set_visible(False)
self.payto_fee_entry.set_text('')
for entry in [self.payto_entry,self.amount_entry,self.message_entry]:
self.set_frozen(entry,False)
entry.set_text('')
def question(self,msg):
dialog = Gtk.MessageDialog( self.window, Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT, Gtk.MessageType.QUESTION, Gtk.ButtonsType.OK_CANCEL, msg)
dialog.show()
result = dialog.run()
dialog.destroy()
return result == Gtk.ResponseType.OK
def do_send(self, w, data):
payto_entry, label_entry, amount_entry, fee_entry = data
label = label_entry.get_text()
r = payto_entry.get_text()
r = r.strip()
m1 = re.match('^(|([\w\-\.]+)@)((\w[\w\-]+\.)+[\w\-]+)$', r)
m2 = re.match('(|([\w\-\.]+)@)((\w[\w\-]+\.)+[\w\-]+) \<([1-9A-HJ-NP-Za-km-z]{26,})\>', r)
if m1:
to_address = self.wallet.get_alias(r, True, self.show_message, self.question)
if not to_address:
return
else:
self.update_sending_tab()
elif m2:
to_address = m2.group(5)
else:
to_address = r
if not is_valid(to_address):
self.show_message( "invalid bitcoin address:\n"+to_address)
return
try:
amount = int(Decimal(amount_entry.get_text()) * COIN)
except Exception:
self.show_message( "invalid amount")
return
try:
fee = int(Decimal(fee_entry.get_text()) * COIN)
except Exception:
self.show_message( "invalid fee")
return
if self.wallet.use_encryption:
password = password_dialog(self.window)
if not password:
return
else:
password = None
try:
tx = self.wallet.mktx( [(to_address, amount)], password, self.config, fee)
except Exception as e:
self.show_message(str(e))
return
if tx.requires_fee(self.wallet) and fee < MIN_RELAY_TX_FEE:
self.show_message( "This transaction requires a higher fee, or it will not be propagated by the network." )
return
if label:
self.wallet.labels[tx.hash()] = label
status, msg = self.wallet.sendtx( tx )
if status:
self.show_message( "payment sent.\n" + msg )
payto_entry.set_text("")
label_entry.set_text("")
amount_entry.set_text("")
fee_entry.set_text("")
#self.fee_box.hide()
self.update_sending_tab()
else:
self.show_message( msg )
def treeview_button_press(self, treeview, event):
if event.type == Gdk.EventType.DOUBLE_BUTTON_PRESS:
c = treeview.get_cursor()[0]
if treeview == self.history_treeview:
tx_details = self.history_list.get_value( self.history_list.get_iter(c), 8)
self.show_message(tx_details)
elif treeview == self.contacts_treeview:
m = self.addressbook_list.get_value( self.addressbook_list.get_iter(c), 0)
#a = self.wallet.aliases.get(m)
#if a:
# if a[0] in self.wallet.authorities.keys():
# s = self.wallet.authorities.get(a[0])
# else:
# s = "self-signed"
# msg = 'Alias: '+ m + '\nTarget address: '+ a[1] + '\n\nSigned by: ' + s + '\nSigning address:' + a[0]
# self.show_message(msg)
def treeview_key_press(self, treeview, event):
c = treeview.get_cursor()[0]
if event.keyval == Gdk.KEY_Up:
if c and c[0] == 0:
treeview.parent.grab_focus()
treeview.set_cursor((0,))
elif event.keyval == Gdk.KEY_Return:
if treeview == self.history_treeview:
tx_details = self.history_list.get_value( self.history_list.get_iter(c), 8)
self.show_message(tx_details)
elif treeview == self.contacts_treeview:
m = self.addressbook_list.get_value( self.addressbook_list.get_iter(c), 0)
#a = self.wallet.aliases.get(m)
#if a:
# if a[0] in self.wallet.authorities.keys():
# s = self.wallet.authorities.get(a[0])
# else:
# s = "self"
# msg = 'Alias:'+ m + '\n\nTarget: '+ a[1] + '\nSigned by: ' + s + '\nSigning address:' + a[0]
# self.show_message(msg)
return False
def create_history_tab(self):
self.history_list = Gtk.ListStore(str, str, str, str, 'gboolean', str, str, str, str)
treeview = Gtk.TreeView(model=self.history_list)
self.history_treeview = treeview
treeview.set_tooltip_column(7)
treeview.show()
treeview.connect('key-press-event', self.treeview_key_press)
treeview.connect('button-press-event', self.treeview_button_press)
tvcolumn = Gtk.TreeViewColumn('')
treeview.append_column(tvcolumn)
cell = Gtk.CellRendererPixbuf()
tvcolumn.pack_start(cell, False)
tvcolumn.set_attributes(cell, stock_id=1)
tvcolumn = Gtk.TreeViewColumn('Date')
treeview.append_column(tvcolumn)
cell = Gtk.CellRendererText()
tvcolumn.pack_start(cell, False)
tvcolumn.add_attribute(cell, 'text', 2)
tvcolumn = Gtk.TreeViewColumn('Description')
treeview.append_column(tvcolumn)
cell = Gtk.CellRendererText()
cell.set_property('foreground', 'grey')
cell.set_property('family', MONOSPACE_FONT)
cell.set_property('editable', True)
def edited_cb(cell, path, new_text, h_list):
tx = h_list.get_value( h_list.get_iter(path), 0)
self.wallet.set_label(tx,new_text)
self.update_history_tab()
cell.connect('edited', edited_cb, self.history_list)
def editing_started(cell, entry, path, h_list):
tx = h_list.get_value( h_list.get_iter(path), 0)
if not self.wallet.labels.get(tx): entry.set_text('')
cell.connect('editing-started', editing_started, self.history_list)
tvcolumn.set_expand(True)
tvcolumn.pack_start(cell, True)
tvcolumn.set_attributes(cell, text=3, foreground_set = 4)
tvcolumn = Gtk.TreeViewColumn('Amount')
treeview.append_column(tvcolumn)
cell = Gtk.CellRendererText()
cell.set_alignment(1, 0.5)
cell.set_property('family', MONOSPACE_FONT)
tvcolumn.pack_start(cell, False)
tvcolumn.add_attribute(cell, 'text', 5)
tvcolumn = Gtk.TreeViewColumn('Balance')
treeview.append_column(tvcolumn)
cell = Gtk.CellRendererText()
cell.set_alignment(1, 0.5)
cell.set_property('family', MONOSPACE_FONT)
tvcolumn.pack_start(cell, False)
tvcolumn.add_attribute(cell, 'text', 6)
tvcolumn = Gtk.TreeViewColumn('Tooltip')
treeview.append_column(tvcolumn)
cell = Gtk.CellRendererText()
tvcolumn.pack_start(cell, False)
tvcolumn.add_attribute(cell, 'text', 7)
tvcolumn.set_visible(False)
scroll = Gtk.ScrolledWindow()
scroll.set_policy(Gtk.PolicyType.NEVER, Gtk.PolicyType.AUTOMATIC)
scroll.add(treeview)
self.add_tab(scroll, 'History')
self.update_history_tab()
def create_recv_tab(self):
self.recv_list = Gtk.ListStore(str, str, str, str, str)
self.add_tab( self.make_address_list(True), 'Receive')
self.update_receiving_tab()
def create_book_tab(self):
self.addressbook_list = Gtk.ListStore(str, str, str)
self.add_tab( self.make_address_list(False), 'Contacts')
self.update_sending_tab()
def make_address_list(self, is_recv):
liststore = self.recv_list if is_recv else self.addressbook_list
treeview = Gtk.TreeView(model= liststore)
treeview.connect('key-press-event', self.treeview_key_press)
treeview.connect('button-press-event', self.treeview_button_press)
treeview.show()
if not is_recv:
self.contacts_treeview = treeview
tvcolumn = Gtk.TreeViewColumn('Address')
treeview.append_column(tvcolumn)
cell = Gtk.CellRendererText()
cell.set_property('family', MONOSPACE_FONT)
tvcolumn.pack_start(cell, True)
tvcolumn.add_attribute(cell, 'text', 0)
tvcolumn = Gtk.TreeViewColumn('Label')
tvcolumn.set_expand(True)
treeview.append_column(tvcolumn)
cell = Gtk.CellRendererText()
cell.set_property('editable', True)
def edited_cb2(cell, path, new_text, liststore):
address = liststore.get_value( liststore.get_iter(path), 0)
self.wallet.set_label(address, new_text)
self.update_receiving_tab()
self.update_sending_tab()
self.update_history_tab()
cell.connect('edited', edited_cb2, liststore)
tvcolumn.pack_start(cell, True)
tvcolumn.add_attribute(cell, 'text', 1)
tvcolumn = Gtk.TreeViewColumn('Tx')
treeview.append_column(tvcolumn)
cell = Gtk.CellRendererText()
tvcolumn.pack_start(cell, True)
tvcolumn.add_attribute(cell, 'text', 2)
if is_recv:
tvcolumn = Gtk.TreeViewColumn('Balance')
treeview.append_column(tvcolumn)
cell = Gtk.CellRendererText()
tvcolumn.pack_start(cell, True)
tvcolumn.add_attribute(cell, 'text', 3)
tvcolumn = Gtk.TreeViewColumn('Type')
treeview.append_column(tvcolumn)
cell = Gtk.CellRendererText()
tvcolumn.pack_start(cell, True)
tvcolumn.add_attribute(cell, 'text', 4)
scroll = Gtk.ScrolledWindow()
scroll.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
scroll.add(treeview)
hbox = Gtk.HBox()
if not is_recv:
button = Gtk.Button("New")
button.connect("clicked", self.newaddress_dialog)
button.show()
hbox.pack_start(button,False, False, 0)
def showqrcode(w, treeview, liststore):
import qrcode
path, col = treeview.get_cursor()
if not path: return
address = liststore.get_value(liststore.get_iter(path), 0)
qr = qrcode.QRCode()
qr.add_data(address)
boxsize = 7
matrix = qr.get_matrix()
boxcount_row = len(matrix)
size = (boxcount_row + 4) * boxsize
def area_expose_cb(area, cr):
style = area.get_style()
Gdk.cairo_set_source_color(cr, style.white)
cr.rectangle(0, 0, size, size)
cr.fill()
Gdk.cairo_set_source_color(cr, style.black)
for r in range(boxcount_row):
for c in range(boxcount_row):
if matrix[r][c]:
cr.rectangle((c + 2) * boxsize, (r + 2) * boxsize, boxsize, boxsize)
cr.fill()
area = Gtk.DrawingArea()
area.set_size_request(size, size)
area.connect("draw", area_expose_cb)
area.show()
dialog = Gtk.Dialog(address, parent=self.window, flags=Gtk.DialogFlags.MODAL, buttons = ("ok",1))
dialog.vbox.add(area)
dialog.run()
dialog.destroy()
button = Gtk.Button("QR")
button.connect("clicked", showqrcode, treeview, liststore)
button.show()
hbox.pack_start(button,False, False, 0)
button = Gtk.Button("Copy to clipboard")
def copy2clipboard(w, treeview, liststore):
import platform
path, col = treeview.get_cursor()
if path:
address = liststore.get_value( liststore.get_iter(path), 0)
if platform.system() == 'Windows':
from Tkinter import Tk
r = Tk()
r.withdraw()
r.clipboard_clear()
r.clipboard_append( address )
r.destroy()
else:
atom = Gdk.atom_intern('CLIPBOARD', True)
c = Gtk.Clipboard.get(atom)
c.set_text( address, len(address) )
button.connect("clicked", copy2clipboard, treeview, liststore)
button.show()
hbox.pack_start(button,False, False, 0)
if is_recv:
button = Gtk.Button("Freeze")
def freeze_address(w, treeview, liststore, wallet):
path, col = treeview.get_cursor()
if path:
address = liststore.get_value( liststore.get_iter(path), 0)
wallet.set_frozen_state([address], not wallet.is_frozen(address))
self.update_receiving_tab()
button.connect("clicked", freeze_address, treeview, liststore, self.wallet)
button.show()
hbox.pack_start(button,False, False, 0)
if not is_recv:
button = Gtk.Button("Pay to")
def payto(w, treeview, liststore):
path, col = treeview.get_cursor()
if path:
address = liststore.get_value( liststore.get_iter(path), 0)
self.payto_entry.set_text( address )
self.notebook.set_current_page(1)
self.amount_entry.grab_focus()
button.connect("clicked", payto, treeview, liststore)
button.show()
hbox.pack_start(button,False, False, 0)
vbox = Gtk.VBox()
vbox.pack_start(scroll,True, True, 0)
vbox.pack_start(hbox, False, False, 0)
return vbox
def update_status_bar(self):
if self.funds_error:
text = "Not enough funds"
elif self.network.is_connected():
host, port, _,_,_ = self.network.get_parameters()
port = int(port)
height = self.network.get_local_height()
self.network_button.set_tooltip_text("Connected to %s:%d.\n%d blocks"%(host, port, height))
if not self.wallet.up_to_date:
self.status_image.set_from_stock(Gtk.STOCK_REFRESH, Gtk.IconSize.MENU)
text = "Synchronizing..."
else:
self.status_image.set_from_stock(Gtk.STOCK_YES, Gtk.IconSize.MENU)
c, u, x = self.wallet.get_balance()
text = "Balance: %s "%(format_satoshis(c, False, self.num_zeros))
if u:
text += "[%s unconfirmed]"%(format_satoshis(u, True, self.num_zeros).strip())
if x:
text += "[%s unmatured]"%(format_satoshis(x, True, self.num_zeros).strip())
else:
self.status_image.set_from_stock(Gtk.STOCK_NO, Gtk.IconSize.MENU)
self.network_button.set_tooltip_text("Not connected.")
text = "Not connected"
self.status_bar.pop(self.context_id)
self.status_bar.push(self.context_id, text)
if self.wallet.up_to_date and self.wallet_updated:
self.update_history_tab()
self.update_receiving_tab()
# addressbook too...
self.info.set_text( self.network.banner )
self.wallet_updated = False
def update_receiving_tab(self):
self.recv_list.clear()
for address in self.wallet.addresses(True):
Type = "R"
c = u = 0
if self.wallet.is_change(address): Type = "C"
if address in self.wallet.imported_keys.keys():
Type = "I"
c, u, x = self.wallet.get_addr_balance(address)
if self.wallet.is_frozen(address): Type = Type + "F"
label = self.wallet.labels.get(address)
h = self.wallet.history.get(address,[])
n = len(h)
tx = "0" if n==0 else "%d"%n
self.recv_list.append((address, label, tx, format_satoshis(c+u+x, False, self.num_zeros), Type ))
def update_sending_tab(self):
self.addressbook_list.clear()
for k, v in self.contacts.items():
t, v = v
self.addressbook_list.append((k, v, t))
def update_history_tab(self):
cursor = self.history_treeview.get_cursor()[0]
self.history_list.clear()
for item in self.wallet.get_history():
tx_hash, conf, value, timestamp, balance = item
if conf > 0:
try:
time_str = datetime.datetime.fromtimestamp( timestamp).isoformat(' ')[:-3]
except Exception:
time_str = "------"
conf_icon = Gtk.STOCK_APPLY
elif conf == -1:
time_str = 'unverified'
conf_icon = None
else:
time_str = 'pending'
conf_icon = Gtk.STOCK_EXECUTE
label, is_default_label = self.wallet.get_label(tx_hash)
tooltip = tx_hash + "\n%d confirmations"%conf if tx_hash else ''
details = self.get_tx_details(tx_hash)
self.history_list.prepend( [tx_hash, conf_icon, time_str, label, is_default_label,
format_satoshis(value,True,self.num_zeros, whitespaces=True),
format_satoshis(balance,False,self.num_zeros, whitespaces=True), tooltip, details] )
if cursor: self.history_treeview.set_cursor( cursor )
def get_tx_details(self, tx_hash):
import datetime
if not tx_hash: return ''
tx = self.wallet.transactions.get(tx_hash)
tx.deserialize()
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
conf, timestamp = self.wallet.get_confirmations(tx_hash)
if timestamp:
time_str = datetime.datetime.fromtimestamp(timestamp).isoformat(' ')[:-3]
else:
time_str = 'pending'
inputs = map(lambda x: x.get('address'), tx.inputs)
outputs = map(lambda x: x[0], tx.get_outputs())
tx_details = "Transaction Details" +"\n\n" \
+ "Transaction ID:\n" + tx_hash + "\n\n" \
+ "Status: %d confirmations\n"%conf
if is_mine:
if fee:
tx_details += "Amount sent: %s\n"% format_satoshis(v-fee, False) \
+ "Transaction fee: %s\n"% format_satoshis(fee, False)
else:
tx_details += "Amount sent: %s\n"% format_satoshis(v, False) \
+ "Transaction fee: unknown\n"
else:
tx_details += "Amount received: %s\n"% format_satoshis(v, False) \
tx_details += "Date: %s\n\n"%time_str \
+ "Inputs:\n-"+ '\n-'.join(inputs) + "\n\n" \
+ "Outputs:\n-"+ '\n-'.join(outputs)
return tx_details
def newaddress_dialog(self, w):
title = "New Contact"
dialog = Gtk.Dialog(title, parent=self.window,
flags=Gtk.DialogFlags.MODAL,
buttons= ("cancel", 0, "ok",1) )
dialog.show()
label = Gtk.HBox()
label_label = Gtk.Label(label='Label:')
label_label.set_size_request(120,10)
label_label.show()
label.pack_start(label_label, True, True, 0)
label_entry = Gtk.Entry()
label_entry.show()
label.pack_start(label_entry, True, True, 0)
label.show()
dialog.vbox.pack_start(label, False, True, 5)
address = Gtk.HBox()
address_label = Gtk.Label(label='Address:')
address_label.set_size_request(120,10)
address_label.show()
address.pack_start(address_label, True, True, 0)
address_entry = Gtk.Entry()
address_entry.show()
address.pack_start(address_entry, True, True, 0)
address.show()
dialog.vbox.pack_start(address, False, True, 5)
result = dialog.run()
address = address_entry.get_text()
label = label_entry.get_text()
dialog.destroy()
if result == 1:
if is_valid(address):
self.contacts[label] = address
self.update_sending_tab()
else:
errorDialog = Gtk.MessageDialog(
parent=self.window,
flags=Gtk.DialogFlags.MODAL,
buttons= Gtk.ButtonsType.CLOSE,
message_format = "Invalid address")
errorDialog.show()
errorDialog.run()
errorDialog.destroy()
class ElectrumGui():
def __init__(self, config, network):
self.network = network
self.config = config
def main(self, url=None):
storage = WalletStorage(self.config.get_wallet_path())
if not storage.file_exists:
action = self.restore_or_create()
if not action:
exit()
self.wallet = wallet = Wallet(storage)
gap = self.config.get('gap_limit', 5)
if gap != 5:
wallet.gap_limit = gap
wallet.storage.put('gap_limit', gap, True)
if action == 'create':
seed = wallet.make_seed()
show_seed_dialog(seed, None)
r = change_password_dialog(False, None)
password = r[2] if r else None
wallet.add_seed(seed, password)
wallet.create_master_keys(password)
wallet.create_main_account(password)
wallet.synchronize() # generate first addresses offline
elif action == 'restore':
seed = self.seed_dialog()
if not seed:
exit()
r = change_password_dialog(False, None)
password = r[2] if r else None
wallet.add_seed(seed, password)
wallet.create_master_keys(password)
wallet.create_main_account(password)
else:
exit()
else:
self.wallet = Wallet(storage)
action = None
self.wallet.start_threads(self.network)
if action == 'restore':
self.restore_wallet(wallet)
w = ElectrumWindow(self.wallet, self.config, self.network)
if url: w.set_url(url)
Gtk.main()
def restore_or_create(self):
return restore_create_dialog()
def seed_dialog(self):
return run_recovery_dialog()
def network_dialog(self):
return run_network_dialog( self.network, parent=None )
def restore_wallet(self, wallet):
dialog = Gtk.MessageDialog(
parent = None,
flags = Gtk.DialogFlags.MODAL,
buttons = Gtk.ButtonsType.CANCEL,
message_format = "Please wait..." )
dialog.show()
def recover_thread( wallet, dialog ):
wallet.restore(lambda x:x)
GObject.idle_add( dialog.destroy )
thread.start_new_thread( recover_thread, ( wallet, dialog ) )
r = dialog.run()
dialog.destroy()
if r==Gtk.ResponseType.CANCEL: return False
if not wallet.is_found():
show_message("No transactions found for this seed")
return True
|
shanew/electrum
|
gui/gtk.py
|
Python
|
gpl-3.0
| 49,880
|
from PIL import Image
class Channel:
def __init__(self, channelLabel, size):
self.channelLabel = channelLabel
self.channel = Image.new("CMYK", (size[0], size[1]), "black")
self.pixelMap = self.channel.load()
def save(self, filename):
self.channel.save(filename)
|
ChristianAnthony46/PomegranateCMYK
|
Channel.py
|
Python
|
gpl-3.0
| 314
|
# -*- Mode: python; coding: utf-8; tab-width: 4; indent-tabs-mode: nil; -*-
#
# Copyright (C) 2012 - fossfreedom
# Copyright (C) 2012 - Agustin Carrasco
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
from gi.repository import GObject
from gi.repository import GLib
from coverart_widgets import AbstractView
class ListShowingPolicy(GObject.Object):
'''
Policy that mostly takes care of how and when things should be showed on
the view that makes use of the `AlbumsModel`.
'''
def __init__(self, list_view):
super(ListShowingPolicy, self).__init__()
self.counter = 0
self._has_initialised = False
def initialise(self, album_manager):
if self._has_initialised:
return
self._has_initialised = True
class ListView(AbstractView):
__gtype_name__ = "ListView"
name = 'listview'
use_plugin_window = False
def __init__(self):
super(ListView, self).__init__()
self.view = self
self._has_initialised = False
self.show_policy = ListShowingPolicy(self)
def initialise(self, source):
if self._has_initialised:
return
self._has_initialised = True
self.view_name = "list_view"
super(ListView, self).initialise(source)
# self.album_manager = source.album_manager
self.shell = source.shell
def switch_to_view(self, source, album):
self.initialise(source)
GLib.idle_add(self.shell.props.display_page_tree.select,
self.shell.props.library_source)
def get_selected_objects(self):
'''
finds what has been selected
returns an array of `Album`
'''
return []
|
fossfreedom/coverart-browser
|
coverart_listview.py
|
Python
|
gpl-3.0
| 2,367
|
#!/usr/bin/env python
# Copyright 2012-2014 Keith Fancher
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import tempfile
import unittest
from list import TodoTxtList
class TestTodoTxtList(unittest.TestCase):
def test_init_from_text(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
self.assertEqual(3, test_list.num_items())
self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item two', test_list.items[1].text)
self.assertEqual('Z', test_list.items[1].priority)
self.assertFalse(test_list.items[1].is_completed)
self.assertEqual('Item three', test_list.items[2].text)
self.assertEqual(None, test_list.items[2].priority)
self.assertTrue(test_list.items[2].is_completed)
def test_init_from_file(self):
file_name = 'sample-todo.txt'
test_list = TodoTxtList(file_name)
self.assertEqual(8, test_list.num_items())
self.assertEqual('Do that really important thing', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Summon AppIndicator documentation from my ass', test_list.items[1].text)
self.assertEqual('D', test_list.items[1].priority)
self.assertFalse(test_list.items[1].is_completed)
self.assertEqual('This other important thing', test_list.items[2].text)
self.assertEqual('A', test_list.items[2].priority)
self.assertFalse(test_list.items[2].is_completed)
self.assertEqual('Walk the cat', test_list.items[3].text)
self.assertEqual('B', test_list.items[3].priority)
self.assertFalse(test_list.items[3].is_completed)
self.assertEqual('Something with no priority!', test_list.items[4].text)
self.assertEqual(None, test_list.items[4].priority)
self.assertFalse(test_list.items[4].is_completed)
self.assertEqual('Cook the dog', test_list.items[5].text)
self.assertEqual('C', test_list.items[5].priority)
self.assertFalse(test_list.items[5].is_completed)
self.assertEqual('Be annoyed at GTK3 docs', test_list.items[6].text)
self.assertEqual(None, test_list.items[6].priority)
self.assertTrue(test_list.items[6].is_completed)
self.assertEqual('Something I already did', test_list.items[7].text)
self.assertEqual(None, test_list.items[7].priority)
self.assertTrue(test_list.items[7].is_completed)
def test_reload_from_file(self):
test_list = TodoTxtList() # Start with an empty list
test_list.reload_from_file() # Should do nothing
test_list.todo_filename = 'sample-todo.txt'
test_list.reload_from_file()
self.assertEqual(8, test_list.num_items())
self.assertEqual('Do that really important thing', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Summon AppIndicator documentation from my ass', test_list.items[1].text)
self.assertEqual('D', test_list.items[1].priority)
self.assertFalse(test_list.items[1].is_completed)
self.assertEqual('This other important thing', test_list.items[2].text)
self.assertEqual('A', test_list.items[2].priority)
self.assertFalse(test_list.items[2].is_completed)
self.assertEqual('Walk the cat', test_list.items[3].text)
self.assertEqual('B', test_list.items[3].priority)
self.assertFalse(test_list.items[3].is_completed)
self.assertEqual('Something with no priority!', test_list.items[4].text)
self.assertEqual(None, test_list.items[4].priority)
self.assertFalse(test_list.items[4].is_completed)
self.assertEqual('Cook the dog', test_list.items[5].text)
self.assertEqual('C', test_list.items[5].priority)
self.assertFalse(test_list.items[5].is_completed)
self.assertEqual('Be annoyed at GTK3 docs', test_list.items[6].text)
self.assertEqual(None, test_list.items[6].priority)
self.assertTrue(test_list.items[6].is_completed)
self.assertEqual('Something I already did', test_list.items[7].text)
self.assertEqual(None, test_list.items[7].priority)
self.assertTrue(test_list.items[7].is_completed)
def test_has_items(self):
test_list = TodoTxtList()
self.assertFalse(test_list.has_items())
test_list = TodoTxtList(None, 'An item')
self.assertTrue(test_list.has_items())
def test_remove_item(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
self.assertEqual(3, test_list.num_items())
test_list.remove_item('Item two')
self.assertEqual(2, test_list.num_items())
self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item three', test_list.items[1].text)
self.assertEqual(None, test_list.items[1].priority)
self.assertTrue(test_list.items[1].is_completed)
def test_remove_completed_items(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
self.assertEqual(3, test_list.num_items())
test_list.remove_completed_items()
self.assertEqual(2, test_list.num_items())
self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item two', test_list.items[1].text)
self.assertEqual('Z', test_list.items[1].priority)
self.assertFalse(test_list.items[1].is_completed)
def test_mark_item_completed(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
test_list.mark_item_completed('Item two')
self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item two', test_list.items[1].text)
self.assertEqual('Z', test_list.items[1].priority)
self.assertTrue(test_list.items[1].is_completed)
self.assertEqual('Item three', test_list.items[2].text)
self.assertEqual(None, test_list.items[2].priority)
self.assertTrue(test_list.items[2].is_completed)
def test_mark_item_completed_with_full_text(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
test_list.mark_item_completed_with_full_text('(Z) Item two')
self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item two', test_list.items[1].text)
self.assertEqual('Z', test_list.items[1].priority)
self.assertTrue(test_list.items[1].is_completed)
self.assertEqual('Item three', test_list.items[2].text)
self.assertEqual(None, test_list.items[2].priority)
self.assertTrue(test_list.items[2].is_completed)
def test_sort_list(self):
todo_text = "x (C) No biggie\n(Z) aaaaa\nNothing\n(B) hey hey\n(Z) bbbbb\n(A) aaaaa\nx Item three\n\nx (B) Done it\n"
test_list = TodoTxtList(None, todo_text)
test_list.sort_list()
self.assertEqual(8, test_list.num_items())
self.assertEqual('aaaaa', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('hey hey', test_list.items[1].text)
self.assertEqual('B', test_list.items[1].priority)
self.assertFalse(test_list.items[1].is_completed)
self.assertEqual('aaaaa', test_list.items[2].text)
self.assertEqual('Z', test_list.items[2].priority)
self.assertFalse(test_list.items[2].is_completed)
self.assertEqual('bbbbb', test_list.items[3].text)
self.assertEqual('Z', test_list.items[3].priority)
self.assertFalse(test_list.items[3].is_completed)
self.assertEqual('Nothing', test_list.items[4].text)
self.assertEqual(None, test_list.items[4].priority)
self.assertFalse(test_list.items[4].is_completed)
self.assertEqual('Done it', test_list.items[5].text)
self.assertEqual('B', test_list.items[5].priority)
self.assertTrue(test_list.items[5].is_completed)
self.assertEqual('No biggie', test_list.items[6].text)
self.assertEqual('C', test_list.items[6].priority)
self.assertTrue(test_list.items[6].is_completed)
self.assertEqual('Item three', test_list.items[7].text)
self.assertEqual(None, test_list.items[7].priority)
self.assertTrue(test_list.items[7].is_completed)
def test_to_text(self):
test_list = TodoTxtList()
# Empty list yields empty string:
self.assertEqual('', str(test_list))
todo_text = "(A) Do one thing\n (B) Do another thing\n x One last thing"
expected_output = "(A) Do one thing\n(B) Do another thing\nx One last thing"
test_list.init_from_text(todo_text)
self.assertEqual(expected_output, str(test_list))
def test_write_to_file(self):
todo_text = "(A) Do one thing\n (B) Do another thing\n x One last thing"
expected_output = "(A) Do one thing\n(B) Do another thing\nx One last thing"
test_list = TodoTxtList(None, todo_text)
# Write to a temporary output file:
output_file = tempfile.NamedTemporaryFile(mode='w+')
test_list.todo_filename = output_file.name
test_list.write_to_file()
# Now read the file in and see that it all matches up:
self.assertEqual(expected_output, output_file.read())
if __name__ == '__main__':
unittest.main()
|
keithfancher/Todo-Indicator
|
todotxt/test_list.py
|
Python
|
gpl-3.0
| 11,017
|
def sort_by_length(arr):
return sorted(arr, key=len)
|
VladKha/CodeWars
|
7 kyu/Sort array by string length/solve.py
|
Python
|
gpl-3.0
| 57
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-11 11:15
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('article', '0006_article_owner'),
]
operations = [
migrations.RenameField(
model_name='article',
old_name='owner',
new_name='owne',
),
]
|
zhangvs1988/zhangyl-Djangodemo
|
article/migrations/0007_auto_20160811_1915.py
|
Python
|
gpl-3.0
| 419
|
#!/usr/bin/python
import time
from tentacle_pi.AM2315 import AM2315
am = AM2315(0x5c, "/dev/i2c-1")
for x in range(10):
temperature, humidity, crc_check = am.sense()
print "Temperature: %s" % temperature
print "Humidity: %s" % humidity
print "CRC: %s" % crc_check
time.sleep(2)
|
Cabalist/Mycodo
|
3.5/cgi-bin/Test-Sensor-HT-AM2315.py
|
Python
|
gpl-3.0
| 302
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import os
from io import BytesIO
from flask import flash, redirect, request, session
from PIL import Image
from werkzeug.exceptions import NotFound
from indico.core.db import db
from indico.modules.events.layout import layout_settings, logger
from indico.modules.events.layout.forms import (LayoutForm, LogoForm, CSSForm, CSSSelectionForm)
from indico.modules.events.layout.util import get_css_url
from indico.modules.events.layout.views import WPLayoutEdit
from indico.util.fs import secure_filename
from indico.util.i18n import _
from indico.util.string import to_unicode, crc32
from indico.web.flask.util import url_for, send_file
from indico.web.forms.base import FormDefaults
from indico.web.util import jsonify_data
from MaKaC.webinterface.pages.conferences import WPConfModifPreviewCSS
from MaKaC.webinterface.rh.conferenceModif import RHConferenceModifBase
from MaKaC.webinterface.rh.conferenceDisplay import RHConferenceBaseDisplay
def _logo_data(event):
return {
'url': event.logo_url,
'filename': event.logo_metadata['filename'],
'size': event.logo_metadata['size'],
'content_type': event.logo_metadata['content_type']
}
def _css_file_data(event):
return {
'filename': event.stylesheet_metadata['filename'],
'size': event.stylesheet_metadata['size'],
'content_type': 'text/css'
}
class RHLayoutBase(RHConferenceModifBase):
CSRF_ENABLED = True
def _checkParams(self, params):
RHConferenceModifBase._checkParams(self, params)
self.event = self._conf.as_event
class RHLayoutEdit(RHLayoutBase):
def _checkProtection(self):
RHLayoutBase._checkProtection(self)
if self._conf.getType() != 'conference':
raise NotFound('Only conferences have layout settings')
def _process(self):
defaults = FormDefaults(**layout_settings.get_all(self._conf))
form = LayoutForm(obj=defaults, event=self.event)
css_form = CSSForm()
logo_form = LogoForm()
if form.validate_on_submit():
data = {unicode(key): value for key, value in form.data.iteritems() if key in layout_settings.defaults}
layout_settings.set_multi(self._conf, data)
if form.theme.data == '_custom':
layout_settings.set(self._conf, 'use_custom_css', True)
flash(_('Settings saved'), 'success')
return redirect(url_for('event_layout.index', self._conf))
else:
if self.event.logo_metadata:
logo_form.logo.data = _logo_data(self.event)
if self.event.has_stylesheet:
css_form.css_file.data = _css_file_data(self.event)
return WPLayoutEdit.render_template('layout.html', self._conf, form=form, event=self._conf,
logo_form=logo_form, css_form=css_form)
class RHLayoutLogoUpload(RHLayoutBase):
def _process(self):
f = request.files['file']
try:
img = Image.open(f)
except IOError:
flash(_('You cannot upload this file as a logo.'), 'error')
return jsonify_data(content=None)
if img.format.lower() not in {'jpeg', 'png', 'gif'}:
flash(_('The file has an invalid format ({format})').format(format=img.format), 'error')
return jsonify_data(content=None)
if img.mode == 'CMYK':
flash(_('The logo you uploaded is using the CMYK colorspace and has been converted to RGB. Please check if '
'the colors are correct and convert it manually if necessary.'), 'warning')
img = img.convert('RGB')
image_bytes = BytesIO()
img.save(image_bytes, 'PNG')
image_bytes.seek(0)
content = image_bytes.read()
self.event.logo = content
self.event.logo_metadata = {
'hash': crc32(content),
'size': len(content),
'filename': os.path.splitext(secure_filename(f.filename, 'logo'))[0] + '.png',
'content_type': 'image/png'
}
flash(_('New logo saved'), 'success')
logger.info("New logo '%s' uploaded by %s (%s)", f.filename, session.user, self.event)
return jsonify_data(content=_logo_data(self.event))
class RHLayoutLogoDelete(RHLayoutBase):
def _process(self):
self.event.logo = None
self.event.logo_metadata = None
flash(_('Logo deleted'), 'success')
logger.info("Logo of %s deleted by %s", self.event, session.user)
return jsonify_data(content=None)
class RHLayoutCSSUpload(RHLayoutBase):
def _process(self):
f = request.files['file']
self.event.stylesheet = to_unicode(f.read()).strip()
self.event.stylesheet_metadata = {
'hash': crc32(self.event.stylesheet),
'size': len(self.event.stylesheet),
'filename': secure_filename(f.filename, 'stylesheet.css')
}
db.session.flush()
flash(_('New CSS file saved. Do not forget to enable it ("Use custom CSS") after verifying that it is correct '
'using the preview.'), 'success')
logger.info('CSS file for %s uploaded by %s', self.event, session.user)
return jsonify_data(content=_css_file_data(self.event))
class RHLayoutCSSDelete(RHLayoutBase):
def _process(self):
self.event.stylesheet = None
self.event.stylesheet_metadata = None
layout_settings.set(self.event, 'use_custom_css', False)
flash(_('CSS file deleted'), 'success')
logger.info("CSS file for %s deleted by %s", self.event, session.user)
return jsonify_data(content=None)
class RHLayoutCSSPreview(RHLayoutBase):
def _process(self):
form = CSSSelectionForm(event=self.event, formdata=request.args, csrf_enabled=False)
css_url = None
if form.validate():
css_url = get_css_url(self.event, force_theme=form.theme.data, for_preview=True)
return WPConfModifPreviewCSS(self, self._conf, form=form, css_url=css_url).display()
class RHLayoutCSSSaveTheme(RHLayoutBase):
def _process(self):
form = CSSSelectionForm(event=self.event)
if form.validate_on_submit():
layout_settings.set(self.event, 'use_custom_css', form.theme.data == '_custom')
if form.theme.data != '_custom':
layout_settings.set(self._conf, 'theme', form.theme.data)
flash(_('Settings saved'), 'success')
return redirect(url_for('event_layout.index', self.event))
class RHLogoDisplay(RHConferenceBaseDisplay):
def _process(self):
event = self._conf.as_event
if not event.has_logo:
raise NotFound
metadata = event.logo_metadata
return send_file(metadata['filename'], BytesIO(event.logo), mimetype=metadata['content_type'], conditional=True)
class RHLayoutCSSDisplay(RHConferenceBaseDisplay):
def _process(self):
event = self._conf.as_event
if not event.has_stylesheet:
raise NotFound
data = BytesIO(event.stylesheet.encode('utf-8'))
return send_file(event.stylesheet_metadata['filename'], data, mimetype='text/css', conditional=True)
|
belokop/indico_bare
|
indico/modules/events/layout/controllers/layout.py
|
Python
|
gpl-3.0
| 7,996
|
# PyParticles : Particles simulation in python
# Copyright (C) 2012 Simone Riva
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import pyparticles.pset.boundary as bd
class ReboundBoundary( bd.Boundary ):
def __init__( self , bound=(-1,1) , dim=3 ):
self.set_boundary( bound , dim )
self.set_normals()
def set_normals( self ):
self.__N = np.zeros( ( 2*self.dim , self.dim ) )
#print( self.__N )
if self.dim >= 2 :
self.__N[0,:2] = np.array( [1,0] )
self.__N[1,:2] = np.array( [-1,0] )
self.__N[2,:2] = np.array( [0,1] )
self.__N[3,:2] = np.array( [0,-1] )
if self.dim == 3 :
self.__N[4,:] = np.array( [0,0,1] )
self.__N[5,:] = np.array( [0,0,-1] )
def boundary( self , p_set ):
v_mi = np.zeros((3))
v_mx = np.zeros((3))
for i in range( self.dim ) :
j = 2*i
v_mi[:] = 0.0
v_mx[:] = 0.0
#delta = self.bound[i,1] - self.bound[i,0]
b_mi = p_set.X[:,i] < self.bound[i,0]
b_mx = p_set.X[:,i] > self.bound[i,1]
v_mi[i] = self.bound[i,0]
v_mx[i] = self.bound[i,1]
p_set.X[b_mi,:] = p_set.X[b_mi,:] + 2.0 * self.__N[j,:] * ( v_mi - p_set.X[b_mi,:] )
p_set.X[b_mx,:] = p_set.X[b_mx,:] + 2.0 * self.__N[j,:] * ( v_mx - p_set.X[b_mx,:] )
p_set.V[b_mi,i] = -p_set.V[b_mi,i]
p_set.V[b_mx,i] = -p_set.V[b_mx,i]
|
simon-r/PyParticles
|
pyparticles/pset/rebound_boundary.py
|
Python
|
gpl-3.0
| 2,289
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import frappe, os, json
from frappe.utils import cstr
from unidecode import unidecode
from six import iteritems
def create_charts(company, chart_template=None, existing_company=None):
chart = get_chart(chart_template, existing_company)
if chart:
accounts = []
def _import_accounts(children, parent, root_type, root_account=False):
for account_name, child in iteritems(children):
if root_account:
root_type = child.get("root_type")
if account_name not in ["account_number", "account_type",
"root_type", "is_group", "tax_rate"]:
account_number = cstr(child.get("account_number")).strip()
account_name, account_name_in_db = add_suffix_if_duplicate(account_name,
account_number, accounts)
is_group = identify_is_group(child)
report_type = "Balance Sheet" if root_type in ["Asset", "Liability", "Equity"] \
else "Profit and Loss"
account = frappe.get_doc({
"doctype": "Account",
"account_name": account_name,
"company": company,
"parent_account": parent,
"is_group": is_group,
"root_type": root_type,
"report_type": report_type,
"account_number": account_number,
"account_type": child.get("account_type"),
"account_currency": frappe.db.get_value("Company", company, "default_currency"),
"tax_rate": child.get("tax_rate")
})
if root_account or frappe.local.flags.allow_unverified_charts:
account.flags.ignore_mandatory = True
account.flags.ignore_permissions = True
account.insert()
accounts.append(account_name_in_db)
_import_accounts(child, account.name, root_type)
_import_accounts(chart, None, None, root_account=True)
def add_suffix_if_duplicate(account_name, account_number, accounts):
if account_number:
account_name_in_db = unidecode(" - ".join([account_number,
account_name.strip().lower()]))
else:
account_name_in_db = unidecode(account_name.strip().lower())
if account_name_in_db in accounts:
count = accounts.count(account_name_in_db)
account_name = account_name + " " + cstr(count)
return account_name, account_name_in_db
def identify_is_group(child):
if child.get("is_group"):
is_group = child.get("is_group")
elif len(set(child.keys()) - set(["account_type", "root_type", "is_group", "tax_rate", "account_number"])):
is_group = 1
else:
is_group = 0
return is_group
def get_chart(chart_template, existing_company=None):
chart = {}
if existing_company:
return get_account_tree_from_existing_company(existing_company)
elif chart_template == "Standard":
from erpnext.accounts.doctype.account.chart_of_accounts.verified import standard_chart_of_accounts
return standard_chart_of_accounts.get()
elif chart_template == "Standard with Numbers":
from erpnext.accounts.doctype.account.chart_of_accounts.verified \
import standard_chart_of_accounts_with_account_number
return standard_chart_of_accounts_with_account_number.get()
else:
folders = ("verified",)
if frappe.local.flags.allow_unverified_charts:
folders = ("verified", "unverified")
for folder in folders:
path = os.path.join(os.path.dirname(__file__), folder)
for fname in os.listdir(path):
fname = frappe.as_unicode(fname)
if fname.endswith(".json"):
with open(os.path.join(path, fname), "r") as f:
chart = f.read()
if chart and json.loads(chart).get("name") == chart_template:
return json.loads(chart).get("tree")
@frappe.whitelist()
def get_charts_for_country(country, with_standard=False):
charts = []
def _get_chart_name(content):
if content:
content = json.loads(content)
if (content and content.get("disabled", "No") == "No") \
or frappe.local.flags.allow_unverified_charts:
charts.append(content["name"])
country_code = frappe.db.get_value("Country", country, "code")
if country_code:
folders = ("verified",)
if frappe.local.flags.allow_unverified_charts:
folders = ("verified", "unverified")
for folder in folders:
path = os.path.join(os.path.dirname(__file__), folder)
if not os.path.exists(path):
continue
for fname in os.listdir(path):
fname = frappe.as_unicode(fname)
if (fname.startswith(country_code) or fname.startswith(country)) and fname.endswith(".json"):
with open(os.path.join(path, fname), "r") as f:
_get_chart_name(f.read())
# if more than one charts, returned then add the standard
if len(charts) != 1 or with_standard:
charts += ["Standard", "Standard with Numbers"]
return charts
def get_account_tree_from_existing_company(existing_company):
all_accounts = frappe.get_all('Account',
filters={'company': existing_company},
fields = ["name", "account_name", "parent_account", "account_type",
"is_group", "root_type", "tax_rate", "account_number"],
order_by="lft, rgt")
account_tree = {}
# fill in tree starting with root accounts (those with no parent)
if all_accounts:
build_account_tree(account_tree, None, all_accounts)
return account_tree
def build_account_tree(tree, parent, all_accounts):
# find children
parent_account = parent.name if parent else ""
children = [acc for acc in all_accounts if cstr(acc.parent_account) == parent_account]
# if no children, but a group account
if not children and parent.is_group:
tree["is_group"] = 1
tree["account_number"] = parent.account_number
# build a subtree for each child
for child in children:
# start new subtree
tree[child.account_name] = {}
# assign account_type and root_type
if child.account_number:
tree[child.account_name]["account_number"] = child.account_number
if child.account_type:
tree[child.account_name]["account_type"] = child.account_type
if child.tax_rate:
tree[child.account_name]["tax_rate"] = child.tax_rate
if not parent:
tree[child.account_name]["root_type"] = child.root_type
# call recursively to build a subtree for current account
build_account_tree(tree[child.account_name], child, all_accounts)
@frappe.whitelist()
def validate_bank_account(coa, bank_account):
accounts = []
chart = get_chart(coa)
if chart:
def _get_account_names(account_master):
for account_name, child in iteritems(account_master):
if account_name not in ["account_number", "account_type",
"root_type", "is_group", "tax_rate"]:
accounts.append(account_name)
_get_account_names(child)
_get_account_names(chart)
return (bank_account in accounts)
|
manassolanki/erpnext
|
erpnext/accounts/doctype/account/chart_of_accounts/chart_of_accounts.py
|
Python
|
gpl-3.0
| 6,558
|
import datetime
from collections import namedtuple
import os
import tempfile
import time
import uuid
import random
import stat
from textwrap import dedent
import pytest
from GangaCore.Utility.logging import getLogger
from GangaDirac.Lib.Utilities.DiracUtilities import execute
from GangaCore.testlib.mark import external
from GangaCore.testlib.GangaUnitTest import load_config_files, clear_config
logger = getLogger(modulename=True)
statusmapping = {
'Checking': 'submitted',
'Completed': 'running',
'Deleted': 'failed',
'Done': 'completed',
'Failed': 'failed',
'Killed': 'killed',
'Matched': 'submitted',
'Received': 'submitted',
'Running': 'running',
'Staging': 'submitted',
'Stalled': 'running',
'Waiting': 'submitted',
}
JobInfo = namedtuple('JobInfo', ['id', 'get_file_lfn', 'remove_file_lfn'])
@pytest.yield_fixture(scope='module')
def load_config():
"""Load the Ganga config files before the test and clean them up afterwards"""
load_config_files()
# make sure post-boostrap hook is run to ensure Dirac config options are set correctly
# Only becomes an issue if this test is run on it's own
from GangaLHCb import postBootstrapHook
postBootstrapHook()
yield
clear_config()
@pytest.yield_fixture(scope='class')
def dirac_job(load_config):
sandbox_str = uuid.uuid4()
get_file_str = uuid.uuid4()
remove_file_str = uuid.uuid4()
exe_script = """#!/bin/bash
echo '%s' > sandboxFile.txt
echo '%s' > getFile.dst
echo '%s' > removeFile.dst
""" % (sandbox_str, get_file_str, remove_file_str)
logger.info("exe_script:\n%s\n" % str(exe_script))
exe_file, exe_path_name = tempfile.mkstemp()
with os.fdopen(exe_file, 'wb') as f:
f.write(exe_script)
st = os.stat(exe_path_name)
os.chmod(exe_path_name, st.st_mode | stat.S_IEXEC)
api_script = """
# Script written in TestDiracCommands.py
from LHCbDIRAC.Interfaces.API.Dirac import Dirac
from LHCbDIRAC.Interfaces.API.Job import Job
from DIRAC.Core.Utilities.SiteSEMapping import getSEsForCountry
uk_ses = getSEsForCountry('uk')['Value']
j = Job()
j.setName('Ganga-DiracCommands-InitTestJob')
j.setCPUTime(10)
j.setExecutable('###EXE_SCRIPT_BASE###','','Ganga_Executable.log')
j.setInputSandbox(['###EXE_SCRIPT###'])
j.setOutputSandbox(['std.out','std.err','sandboxFile.txt'])
j.setOutputData(['getFile.dst', 'removeFile.dst'], outputSE=uk_ses)
#submit the job to dirac
dirac=Dirac()
result = dirac.submitJob(j)
output(result)
"""
api_script = dedent(api_script)
final_submit_script = api_script.replace('###EXE_SCRIPT###', exe_path_name).replace('###EXE_SCRIPT_BASE###', os.path.basename(exe_path_name))
confirm = execute(final_submit_script, return_raw_dict=True)
if not isinstance(confirm, dict):
raise RuntimeError('Problem submitting job\n{0}'.format(confirm))
assert 'OK' in confirm, 'Failed to submit job!'
assert confirm['OK'], 'Failed to submit job!'
job_id = confirm['Value']
logger.info(job_id)
os.remove(exe_path_name)
logger.info('Waiting for DIRAC job to finish')
timeout = 1200
end_time = datetime.datetime.utcnow() + datetime.timedelta(seconds=timeout)
status = execute('status([%s], %s)' % (job_id, repr(statusmapping)), return_raw_dict=True)
while (status['OK'] and statusmapping[status['Value'][0][1]] not in ['completed', 'failed']) and datetime.datetime.utcnow() < end_time:
time.sleep(5)
status = execute('status([%s], %s)' % (job_id, repr(statusmapping)), return_raw_dict=True)
print("Job status: %s" % status)
assert 'OK' in status, 'Failed to get job Status!'
assert status['OK'], 'Failed to get job Status!'
assert statusmapping[status['Value'][0][1]] == 'completed', 'job not completed properly: %s' % status
logger.info("status: %s", status)
output_data_info = execute('getOutputDataInfo("%s")' % job_id, return_raw_dict=True)
logger.info('output_data_info: %s' % output_data_info)
max_retry = 20
count = 0
while not output_data_info.get('OK', True) and count != max_retry:
time.sleep(5)
output_data_info = execute('getOutputDataInfo("%s")' % job_id, return_raw_dict=True)
logger.info("output_data_info:\n%s\n", output_data_info)
count += 1
assert 'OK' in output_data_info, 'getOutputDataInfo Failed!'
assert output_data_info['OK'], 'getOutputDataInfo Failed!'
logger.info("\n\n\noutput_data_info: %s\n\n\n" % output_data_info)
get_file_lfn = output_data_info['Value']['getFile.dst']['LFN']
remove_file_lfn = output_data_info['Value']['removeFile.dst']['LFN']
logger.info("%s %s", get_file_lfn, remove_file_lfn)
yield JobInfo(job_id, get_file_lfn, remove_file_lfn)
confirm = execute('removeFile("%s")' % get_file_lfn, return_raw_dict=True)
assert 'OK' in confirm, 'removeFile Failed!'
assert confirm['OK'], 'removeFile Failed!'
@pytest.fixture(scope='module')
def dirac_sites(load_config):
"""Grab a shuffled list of UK DIRAC storage elements"""
site_script = dedent("""
from DIRAC.Core.Utilities.SiteSEMapping import getSEsForCountry
output(getSEsForCountry('uk'))
""")
output = execute(site_script, return_raw_dict=True)
assert output['OK'], 'Could not fetch list of SEs'
sites = output['Value']
random.shuffle(sites)
return sites
@external
class TestDiracCommands(object):
def test_peek(self, dirac_job):
confirm = execute('peek("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'peek command not executed successfully'
def test_getJobCPUTime(self, dirac_job):
confirm = execute('getJobCPUTime("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getJobCPUTime command not executed successfully'
def test_getOutputData(self, dirac_job):
confirm = execute('getOutputData("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getOutputData command not executed successfully'
def test_getOutputSandbox(self, dirac_job):
confirm = execute('getOutputSandbox("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getOutputSandbox command not executed successfully'
def test_getOutputDataInfo(self, dirac_job):
confirm = execute('getOutputDataInfo("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getOutputDataInfo command not executed successfully'
assert isinstance(confirm['Value']['getFile.dst'], dict), 'getOutputDataInfo command not executed successfully'
def test_getOutputDataLFNs(self, dirac_job):
confirm = execute('getOutputDataLFNs("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
logger.info(confirm)
assert confirm['OK'], 'getOutputDataLFNs command not executed successfully'
def test_normCPUTime(self, dirac_job):
confirm = execute('normCPUTime("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'normCPUTime command not executed successfully'
assert isinstance(confirm['Value'], str), 'normCPUTime ommand not executed successfully'
def test_getStateTime(self, dirac_job):
confirm = execute('getStateTime("%s", "completed")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getStateTime command not executed successfully'
assert isinstance(confirm['Value'], datetime.datetime), 'getStateTime command not executed successfully'
def test_timedetails(self, dirac_job):
confirm = execute('timedetails("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'timedetails command not executed successfully'
assert isinstance(confirm['Value'], dict), 'Command not executed successfully'
def test_y_reschedule(self, dirac_job):
confirm = execute('reschedule("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'reschedule command not executed successfully'
def test_z_kill(self, dirac_job):
# remove_files()
confirm = execute('kill("%s")' % dirac_job.id, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'kill command not executed successfully'
def test_status(self, dirac_job):
confirm = execute('status([%s], %s)' % (dirac_job.id, repr(statusmapping)), return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'status command not executed successfully'
assert isinstance(confirm['Value'], list), 'Command not executed successfully'
def test_getFile(self, dirac_job):
confirm = execute('getFile("%s")' % dirac_job.get_file_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getFile command not executed successfully'
def test_removeFile(self, dirac_job):
confirm = execute('removeFile("%s")' % dirac_job.remove_file_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'removeFile command not executed successfully'
def test_ping(self, dirac_job):
confirm = execute('ping("WorkloadManagement","JobManager")', return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'ping command not executed successfully'
def test_getMetadata(self, dirac_job):
confirm = execute('getMetadata("%s")' % dirac_job.get_file_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getMetaData command not executed successfully'
def test_getReplicas(self, dirac_job):
confirm = execute('getReplicas("%s")' % dirac_job.get_file_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getReplicas command not executed successfully'
def test_getAccessURL(self, dirac_job):
confirm = execute('getReplicas("%s")' % dirac_job.get_file_lfn, cred_req=dirac_job.cred_req, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'getReplicas command not executed successfully'
SE = random.choice(list(confirm['Value']['Successful'][dirac_job.get_file_lfn].keys()))
accessResult = execute('getAccessURL("%s", "%s")' % (dirac_job.get_file_lfn, SE), cred_req=dirac_job.cred_req, return_raw_dict = True)
logger.info(accessResult)
assert accessResult['OK'], 'getAccessURL command not executed successfully'
accessResultProtocol = execute('getAccessURL("%s", "%s", %s)' % (dirac_job.get_file_lfn, SE, ['xroot']), cred_req=dirac_job.cred_req, return_raw_dict = True)
logger.info(accessResultProtocol)
assert accessResultProtocol['OK'], 'getAccessURL command with protocol not executed successfully'
assert ('root://' in accessResultProtocol['Value']['Successful'][dirac_job.get_file_lfn]), 'URL does not start with root protocol'
def test_replicateFile(self, dirac_job, dirac_sites):
for new_location in dirac_sites:
confirm = execute('replicateFile("%s","%s","")' % (dirac_job.get_file_lfn, new_location), return_raw_dict=True)
logger.info(confirm)
if not confirm['OK']:
continue # If we couldn't add the file, try the next site
confirm = execute('removeReplica("%s","%s")' % (dirac_job.get_file_lfn, new_location), return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'Command not executed successfully'
break # Once we found a working site, stop looking
else:
raise AssertionError('No working site found')
def test_splitInputData(self, dirac_job):
confirm = execute('splitInputData("%s","1")' % dirac_job.get_file_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'splitInputData command not executed successfully'
def test_uploadFile(self, tmpdir, dirac_job, dirac_sites):
new_lfn = '%s_add_file' % os.path.dirname(dirac_job.get_file_lfn)
for location in dirac_sites:
temp_file = tmpdir.join('upload_file')
temp_file.write(uuid.uuid4())
logger.info('Adding file to %s', location)
confirm = execute('uploadFile("%s","%s",["%s"],"")' % (new_lfn, temp_file, location), return_raw_dict=True)
logger.info(confirm)
if confirm.get(location, False):
continue # If we couldn't add the file, try the next site
logger.info('Removing file from %s', location)
confirm_remove = execute('removeFile("%s")' % new_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm_remove['OK'], 'Command not executed successfully'
break # Once we found a working site, stop looking
else:
raise AssertionError('No working site found')
def test_addFile(self, tmpdir, dirac_job, dirac_sites):
new_lfn = '%s_add_file' % os.path.dirname(dirac_job.get_file_lfn)
for location in dirac_sites:
temp_file = tmpdir.join('add_file')
temp_file.write(uuid.uuid4())
logger.info('Adding file to %s', location)
confirm = execute('addFile("%s","%s","%s","")' % (new_lfn, temp_file, location), return_raw_dict=True)
logger.info(confirm)
if not confirm['OK']:
continue # If we couldn't add the file, try the next site
logger.info('Removing file from %s', location)
confirm_remove = execute('removeFile("%s")' % new_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm_remove['OK'], 'Command not executed successfully'
break # Once we found a working site, stop looking
else:
raise AssertionError('No working site found')
def test_getJobGroupJobs(self, dirac_job):
confirm = execute('getJobGroupJobs("")', return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'Command not executed successfully'
def test_bkQueryDict(self, dirac_job):
confirm = execute('bkQueryDict({"FileType":"Path","ConfigName":"LHCb","ConfigVersion":"Collision09","EventType":"10","ProcessingPass":"Real Data","DataTakingConditions":"Beam450GeV-VeloOpen-MagDown"})', return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'bkQuery command not executed successfully'
def test_checkSites(self, dirac_job):
confirm = execute('checkSites()', return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'checkSites command not executed successfully'
def test_bkMetaData(self, dirac_job):
confirm = execute('bkMetaData("")', return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'Command not executed successfully'
def test_getDataset(self, dirac_job):
confirm = execute('getDataset("LHCb/Collision09/Beam450GeV-VeloOpen-MagDown/Real Data/RecoToDST-07/10/DST","","Path","","","")', return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'Command not executed successfully'
def test_checkTier1s(self, dirac_job):
confirm = execute('checkTier1s()', return_raw_dict=True)
logger.info(confirm)
assert confirm['OK'], 'Command not executed successfully'
def test_getInputDataCatalog(self, dirac_job):
confirm = execute('getInputDataCatalog("%s","","")' % dirac_job.get_file_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm['Message'].startswith('Failed to access') or confirm['Message'].startswith('Exception during construction'), 'Command not executed successfully'
def test_getLHCbInputDataCatalog(self, dirac_job):
confirm = execute('getLHCbInputDataCatalog("%s",0,"","")' % dirac_job.get_file_lfn, return_raw_dict=True)
logger.info(confirm)
assert confirm['Message'].startswith('Failed to access') or confirm['Message'].startswith('Exception during construction'), 'Command not executed successfully'
|
ganga-devs/ganga
|
ganga/GangaLHCb/test/Unit/DiracAPI/TestDiracCommands.py
|
Python
|
gpl-3.0
| 16,423
|
from __future__ import absolute_import
import datetime
from django.db import models
from django.db.models import get_model
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_unicode, force_unicode
from django.template import loader, Context, TemplateDoesNotExist
from django.utils.translation import get_language, activate
from notification.models import NoticeType
from transifex.txcommon.log import logger
from .queues import log_to_queues
def _get_formatted_message(label, context):
"""
Return a message that is a rendered template with the given context using
the default language of the system.
"""
current_language = get_language()
# Setting the environment to the default language
activate(settings.LANGUAGE_CODE)
c = Context(context)
template = 'notification/%s/notice.html' % label
try:
msg = loader.get_template(template).render(c)
except TemplateDoesNotExist:
logger.error("Template '%s' doesn't exist." % template)
msg = None
# Reset environment to original language
activate(current_language)
return msg
def _user_counting(query):
"""
Get a LogEntry queryset and return a list of dictionaries with the
counting of times that the users appeared on the queryset.
Example of the resultant dictionary:
[{'user__username': u'editor', 'number': 5},
{'user__username': u'guest', 'number': 1}]
"""
query_result = query.values('user__username').annotate(
number=models.Count('user')).order_by('-number')
# Rename key from 'user__username' to 'username'
result=[]
for entry in query_result:
result.append({'username': entry['user__username'],
'number': entry['number']})
return result
def _distinct_action_time(query, limit=None):
"""
Distinct rows by the 'action_time' field, keeping in the query only the
entry with the highest 'id' for the related set of entries with equal
'action_time'.
If 'limit' is set, the function will return the 'limit'-most-recent
actionlogs.
Example:
For the following query set:
id | action_time
----+----------------------------
1 | 2010-03-11 10:55:26.32941-03
2 | 2010-03-11 10:55:26.32941-03
3 | 2010-03-11 13:48:22.202596-09
4 | 2010-03-11 13:48:53.505697-03
5 | 2010-03-11 13:48:53.505697-03
6 | 2010-03-11 13:51:09.013079-05
7 | 2010-03-11 13:51:09.013079-05
8 | 2010-03-11 13:51:09.013079-05
After passing through this function the query will be:
id | action_time
----+----------------------------
2 | 2010-03-11 10:55:26.32941-03
3 | 2010-03-11 13:48:22.202596-09
5 | 2010-03-11 13:48:53.505697-03
8 | 2010-03-11 13:51:09.013079-05
Rows with the same 'action_time' are eliminated, keeping the one with
highest 'id'.
"""
pks = query.defer('object_id', 'content_type').distinct()
if limit:
pks = pks.order_by('-id')[:limit]
else:
# For some reason, when using defer() the Meta ordering
# is not respected so we have to set it explicitly.
pks = pks.order_by('-action_time')
return pks.select_related('user')
class LogEntryManager(models.Manager):
def by_object(self, obj, limit=None):
"""Return LogEntries for a related object."""
ctype = ContentType.objects.get_for_model(obj)
q = self.filter(content_type__pk=ctype.pk, object_id=obj.pk)
return _distinct_action_time(q, limit)
def by_user(self, user, limit=None):
"""Return LogEntries for a specific user."""
q = self.filter(user__pk__exact=user.pk)
return _distinct_action_time(q, limit)
def by_object_last_week(self, obj):
"""Return LogEntries of the related object for the last week."""
last_week_date = datetime.datetime.today() - datetime.timedelta(days=7)
ctype = ContentType.objects.get_for_model(obj)
return self.filter(content_type__pk=ctype.pk, object_id=obj.pk,
action_time__gt=last_week_date)
def by_user_and_public_projects(self, user, limit=None):
"""
Return LogEntries for a specific user and his actions on public projects.
"""
# Avoiding circular import troubles. get_model didn't make it.
from transifex.projects.models import Project
ctype = ContentType.objects.get(model='project')
q = self.filter(user__pk__exact=user.pk, content_type=ctype,
object_id__in=Project.objects.filter(private=False))
return _distinct_action_time(q, limit)
def for_projects_by_user(self, user):
"""Return project LogEntries for a related user."""
ctype = ContentType.objects.get(model='project')
return self.filter(user__pk__exact=user.pk, content_type__pk=ctype.pk)
def top_submitters_by_project_content_type(self, number=10):
"""
Return a list of dicts with the ordered top submitters for the
entries of the 'project' content type.
"""
return self.top_submitters_by_content_type('projects.project', number)
def top_submitters_by_team_content_type(self, number=10):
"""
Return a list of dicts with the ordered top submitters for the
entries of the 'team' content type.
"""
return self.top_submitters_by_content_type('teams.team', number)
def top_submitters_by_language_content_type(self, number=10):
"""
Return a list of dicts with the ordered top submitters for the
entries of the 'language' content type.
"""
return self.top_submitters_by_content_type('languages.language', number)
class LogEntry(models.Model):
"""A Entry in an object's log."""
user = models.ForeignKey(User, verbose_name=_('User'), blank=True,
null=True, related_name="actionlogs")
object_id = models.IntegerField(blank=True, null=True, db_index=True)
content_type = models.ForeignKey(ContentType, blank=True, null=True,
related_name="actionlogs")
object = generic.GenericForeignKey('content_type', 'object_id')
action_type = models.ForeignKey(NoticeType, verbose_name=_('Action type'))
action_time = models.DateTimeField(_('Action time'), db_index=True)
object_name = models.CharField(blank=True, max_length=200)
message = models.TextField(blank=True, null=True)
# Managers
objects = LogEntryManager()
class Meta:
verbose_name = _('log entry')
verbose_name_plural = _('log entries')
ordering = ('-action_time',)
def __unicode__(self):
return u'%s.%s.%s' % (self.action_type, self.object_name, self.user)
def __repr__(self):
return smart_unicode("<LogEntry %d (%s)>" % (self.id,
self.action_type.label))
def save(self, *args, **kwargs):
"""Save the object in the database."""
if self.action_time is None:
self.action_time = datetime.datetime.now()
super(LogEntry, self).save(*args, **kwargs)
def message_safe(self):
"""Return the message as HTML"""
return self.message
message_safe.allow_tags = True
message_safe.admin_order_field = 'message'
@property
def action_type_short(self):
"""
Return a shortened, generalized version of an action type.
Useful for presenting an image signifying an action type. Example::
>>> from notification.models import NoticeType
>>> nt = NoticeType(label='project_added')
>>> zlog = LogEntry(action_type=nt)
>>> nt
<NoticeType: project_added>
>>> zlog.action_type
<NoticeType: project_added>
>>> zlog.action_type_short
'added'
"""
return self.action_type.label.split('_')[-1]
def action_logging(user, object_list, action_type, message=None, context=None):
"""
Add ActionLog using a set of parameters.
user:
The user that did the action.
object_list:
A list of objects that should be created the actionlog for.
action_type:
Label of a type of action from the NoticeType model.
message:
A message to be included at the actionlog. If no message is passed
it will try do render a message using the notice.html from the
notification application.
context:
To render the message using the notification files, sometimes it is
necessary to pass some vars by using a context.
Usage::
al = 'project_added'
context = {'project': object}
action_logging(request.user, [object], al , context=context):
"""
if not getattr(settings, 'ACTIONLOG_ENABLED', None):
return
if context is None:
context = {}
if message is None:
message = _get_formatted_message(action_type, context)
action_type_obj = NoticeType.objects.get(label=action_type)
time = datetime.datetime.now()
try:
for object in object_list:
l = LogEntry(
user_id = user.pk,
content_type = ContentType.objects.get_for_model(object),
object_id = object.pk,
object_name = force_unicode(object)[:200],
action_type = action_type_obj,
action_time = time,
message = message)
l.save()
if settings.USE_REDIS:
log_to_queues(object, user, time, action_type_obj, message)
except TypeError:
raise TypeError("The 'object_list' parameter must be iterable")
|
tymofij/adofex
|
transifex/actionlog/models.py
|
Python
|
gpl-3.0
| 10,045
|
#!/usr/bin/python
#coding: UTF-8
#COPIRIGHT: Patrick Roncagliolo
#LICENCE: GNU GPL 3
import cgi, json
argsDict = cgi.FieldStorage()
EMPTY_DICT = {}
def getState (init = False):
dataDict = getDataDict ()
if dataDict is None \
and init is True:
(key, uri) = generateTOTP ()
generateQR (key, uri)
dataDict = newDataDict (key, uri)
setDataDict (dataDict)
devDict = getDevDict ()
if devDict is None \
and init is True:
devDict = newDevDict ()
setDevDict (devDict)
return (dataDict, devDict)
def generateTOTP ():
import string, random
from otpauth import OtpAuth as otpauth
key=''.join((random.choice(string.ascii_uppercase + string.digits)) for x in range(30))
auth = otpauth(key)
uri = auth.to_uri('totp', 'patrick@WakeOnLAN', 'WakeOnLAN')
return (key, uri)
def generateQR (key, uri):
import os, qrcode
from glob import glob
img = qrcode.make(uri)
for oldImg in glob("data/*.png"):
os.remove(oldImg)
img.save("data/%s.png" % key)
def newDataDict (key, uri):
return {'otp-type': 'totp', 'key': key, 'uri': uri, 'post-token': '0'}
def getDataDict ():
try:
with open('data/data.json', 'r') as f:
dataDict = json.load(f)
except IOError:
dataDict = None
return dataDict
def setDataDict(dataDict):
with open('data/data.json', 'w') as dataFile:
json.dump(dataDict, dataFile)
def newDevDict():
return {}
def getDevDict():
try:
with open('data/devices.json', 'r') as devFile:
devDict = json.load(devFile)
except IOError:
devDict = None
return devDict
def setDevDict(devDict):
with open('data/devices.json', 'w') as devFile:
json.dump(devDict, devFile)
def addDevice(devDict, devname, devaddr):
devname = devname.lower().capitalize()
devaddr = devaddr.lower().replace('-',':')
if devname not in devDict:
devDict[devname]=devaddr
setDevDict(devDict)
return True
else:
return False
def rmvDevice(devDict, devname):
devname = devname.lower().capitalize()
if devname in devDict:
del devDict[devname]
setDevDict(devDict)
return True
else:
return False
def checkToken(dataDict):
if 'post-token' in dataDict.keys():
data_token = int(dataDict['post-token'])
token = data_token + 1
else:
raise KeyError
if 'action' in argsDict.keys() \
and 'token' in argsDict.keys():
post_token = int(argsDict['token'].value)
if post_token > data_token:
updateToken(dataDict, post_token)
token = post_token + 1
return (True, token)
else:
return (False, token)
else:
return (False, token)
def updateToken(dataDict, post_token):
dataDict['post-token'] = post_token
with open('data/data.json', 'w') as dataFile:
json.dump(dataDict, dataFile)
return int(dataDict['post-token'])
def printIndexHeader(stylesheets):
print 'Content-type: text/html\n\n',
print '<!DOCTYPE html>',
print '<meta name="viewport" content="width=device-width, initial-scale=1.0">',
print '<title>RWOLS - Remote WakeOnLan Server</title>',
for stylesheet in stylesheets:
print '<link rel="stylesheet" type="text/css" href="%s">' % stylesheet,
print '<script src="https://cdn.jsdelivr.net/clipboard.js/1.5.13/clipboard.min.js"></script>',
print '<h1>Remote WakeOnLan Server</h1>'
def printBottomButton(label, link):
print '<form method="post"'
print 'action="%s">' % link,
print '<input type="submit"'
print 'value="%s">' % label,
print '</form>'
|
roncapat/RWOL
|
rwol-web-src/utilities.py
|
Python
|
gpl-3.0
| 3,772
|
from common import common_global
from common import common_pagination_bootstrap
from sanic import Blueprint
blueprint_user_metadata_game_system = Blueprint('name_blueprint_user_metadata_game_system',
url_prefix='/user')
@blueprint_user_metadata_game_system.route('/user_meta_game_system', methods=['GET', 'POST'])
@common_global.jinja_template.template('bss_user/metadata/bss_user_metadata_game_system.html')
@common_global.auth.login_required
async def url_bp_user_metadata_game_system(request):
"""
Display list of game system metadata
"""
page, offset = common_pagination_bootstrap.com_pagination_page_calc(request)
request.ctx.session['search_page'] = 'meta_game_system'
db_connection = await request.app.db_pool.acquire()
pagination = common_pagination_bootstrap.com_pagination_boot_html(page,
url='/user/user_meta_game',
item_count=await request.app.db_functions.db_meta_game_system_list_count(
db_connection=db_connection),
client_items_per_page=
int(request.ctx.session[
'per_page']),
format_number=True)
media_data = await request.app.db_functions.db_meta_game_system_list(offset,
int(request.ctx.session[
'per_page']),
request.ctx.session[
'search_text'],
db_connection=db_connection)
await request.app.db_pool.release(db_connection)
return {
'media': media_data,
'pagination_links': pagination,
}
@blueprint_user_metadata_game_system.route('/user_meta_game_system_detail/<guid>')
@common_global.jinja_template.template(
'bss_user/metadata/bss_user_metadata_game_system_detail.html')
@common_global.auth.login_required
async def url_bp_user_metadata_game_system_detail(request, guid):
"""
Display metadata game detail
"""
db_connection = await request.app.db_pool.acquire()
media_data = await request.app.db_functions.db_meta_game_system_by_guid(guid,
db_connection=db_connection)
await request.app.db_pool.release(db_connection)
return {
'guid': guid,
'data': media_data,
}
|
MediaKraken/MediaKraken_Deployment
|
source/web_app_sanic/blueprint/user/bp_user_metadata_game_system.py
|
Python
|
gpl-3.0
| 3,005
|
# -*- coding: utf-8 -*-
#
# eofs documentation build configuration file, created by
# sphinx-quickstart on Thu Jul 5 15:47:55 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import time
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.append(os.path.abspath('sphinxext'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.autosummary',
'sphinx.ext.intersphinx',
'sphinx.ext.extlinks',
'matplotlib.sphinxext.plot_directive',]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'eofs'
copyright = '2013-{} Andrew Dawson'.format(time.localtime().tm_year)
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
import eofs
version = eofs.__version__
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
highlight_language = 'python'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- extlinks configuration ----------------------------------------------------
# Allow e.g. :issue:`42` and :pr:`42` roles:
extlinks = {'issue': ('https://github.com/ajdawson/eofs/issues/%s', '#'),
'pr': ('https://github.com/ajdawson/eofs/pull/%s', '#')}
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx13'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {'**': ['sidebar_toc.html',
'relations.html',
'sourcelink.html',
'searchbox.html']}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {'index': 'index.html'}
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'eofsdoc'
# Options for intersphinx.
intersphinx_mapping = {
'eof2': ('http://ajdawson.github.com/eof2', None),
'iris': ('http://scitools.org.uk/iris/docs/latest', None),
'numpy': ('http://docs.scipy.org/doc/numpy', None),
'xarray': ('http://xarray.pydata.org/en/stable', None),
'dask': ('https://docs.dask.org/en/latest', None),
}
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '11pt',
# Additional stuff for the LaTeX preamble.
'preamble': """\\usepackage{amssymb}
\\usepackage{amsmath}""",
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('userguide/index', 'userguide.tex', 'eofs User Guide', 'Andrew Dawson',
'manual'),
('examples/index', 'examples.tex', 'eofs Examples', 'Andrew Dawson',
'manual'),
('api/index', 'api.tex', 'eofs API Reference', 'Andrew Dawson',
'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'eofs', 'eofs Documentation',
['Andrew Dawson'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'eofs', 'eofs Documentation',
'Andrew Dawson', 'eofs', 'EOF analysis in Python.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Autodoc settings -- #
autoclass_content = 'both'
autodoc_member_order = 'bysource'
autodoc_docstring_signature = True
autosummary_generate = True
|
ajdawson/eofs
|
doc/conf.py
|
Python
|
gpl-3.0
| 9,234
|
from vsg.token import primary_unit_declaration as token
from vsg.vhdlFile import utils
def detect(iToken, lObjects):
'''
primary_unit_declaration ::= identifier;
'''
return classify(iToken, lObjects)
def classify(iToken, lObjects):
iCurrent = iToken
while not utils.is_next_token(';', iCurrent, lObjects):
iCurrent = utils.assign_next_token(token.identifier, iCurrent, lObjects)
iCurrent = utils.assign_token(lObjects, iCurrent, token.semicolon)
return iCurrent
|
jeremiah-c-leary/vhdl-style-guide
|
vsg/vhdlFile/classify/primary_unit_declaration.py
|
Python
|
gpl-3.0
| 510
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import urlparse
def uc2utf8(input):
## argh! this feels wrong, but seems to be needed.
if type(input) == unicode:
return input.encode('utf-8')
else:
return input
class URL:
"""
This class is for wrapping URLs into objects. It's used
internally in the library, end users should not need to know
anything about this class. All methods that accept URLs can be
fed either with an URL object, a string or an urlparse.ParsedURL
object.
Addresses may be one out of three:
1) a path relative to the DAV-root, i.e. "someuser/calendar" may
refer to
"http://my.davical-server.example.com/pycaldav.php/someuser/calendar".
2) an absolute path, i.e. "/pycaldav.php/someuser/calendar"
3) a fully qualified URL,
i.e. "http://someuser:somepass@my.davical-server.example.com/pycaldav.php/someuser/calendar".
Remark that hostname, port, user, pass is typically given when
instantiating the DAVClient object and cannot be overridden later.
As of 2013-11, some methods in the pycaldav library expected strings
and some expected urlparse.ParseResult objects, some expected
fully qualified URLs and most expected absolute paths. The purpose
of this class is to ensure consistency and at the same time
maintaining backward compatibility. Basically, all methods should
accept any kind of URL.
"""
def __init__(self, url):
if isinstance(url, urlparse.ParseResult) or isinstance(url, urlparse.SplitResult):
self.url_parsed = url
self.url_raw = None
else:
self.url_raw = url
self.url_parsed = None
def __nonzero__(self):
if self.url_raw or self.url_parsed:
return True
else:
return False
def __ne__(self, other):
return not self == other
def __eq__(self, other):
if str(self) == str(other):
return True
## The URLs could have insignificant differences
me = self.canonical()
if hasattr(other, 'canonical'):
other = other.canonical()
return str(me) == str(other)
## TODO: better naming? Will return url if url is already an URL
## object, else will instantiate a new URL object
@classmethod
def objectify(self, url):
if url is None:
return None
if isinstance(url, URL):
return url
else:
return URL(url)
## To deal with all kind of methods/properties in the ParseResult
## class
def __getattr__(self, attr):
if self.url_parsed is None:
self.url_parsed = urlparse.urlparse(self.url_raw)
if hasattr(self.url_parsed, attr):
return getattr(self.url_parsed, attr)
else:
return getattr(self.__unicode__(), attr)
## returns the url in text format
def __str__(self):
return self.__unicode__().encode('utf-8')
## returns the url in text format
def __unicode__(self):
if self.url_raw is None:
self.url_raw = self.url_parsed.geturl()
if isinstance(self.url_raw, unicode):
return self.url_raw
else:
return unicode(self.url_raw, 'utf-8')
def __repr__(self):
return "URL(%s)" % str(self)
def is_auth(self):
return self.username is not None
def unauth(self):
if not self.is_auth():
return self
return URL.objectify(urlparse.ParseResult(
self.scheme, '%s:%s' % (self.hostname, self.port),
self.path.replace('//', '/'), self.params, self.query, self.fragment))
def canonical(self):
"""
a canonical URL ... remove authentication details, make sure there
are no double slashes, and to make sure the URL is always the same,
run it through the urlparser
"""
url = self.unauth()
## this is actually already done in the unauth method ...
if '//' in url.path:
raise NotImplementedError("remove the double slashes")
## TODO: optimize - we're going to burn some CPU cycles here
if url.endswith('/'):
url = URL.objectify(str(url)[:-1])
## This looks like a noop - but it may have the side effect
## that urlparser be run (actually not - unauth ensures we
## have an urlparse.ParseResult object)
url.scheme
## make sure to delete the string version
url.url_raw = None
return url
def join(self, path):
"""
assumes this object is the base URL or base path. If the path
is relative, it should be appended to the base. If the path
is absolute, it should be added to the connection details of
self. If the path already contains connection details and the
connection details differ from self, raise an error.
"""
if not path:
return self
path = URL.objectify(path)
if (
(path.scheme and self.scheme and path.scheme != self.scheme)
or
(path.hostname and self.hostname and path.hostname != self.hostname)
or
(path.port and self.port and path.port != self.port)
):
raise ValueError("%s can't be joined with %s" % (self, path))
if path.path[0] == '/':
ret_path = uc2utf8(path.path)
else:
sep = "/"
if self.path.endswith("/"):
sep = ""
ret_path = "%s%s%s" % (self.path, sep, uc2utf8(path.path))
return URL(urlparse.ParseResult(
self.scheme or path.scheme, self.netloc or path.netloc, ret_path, path.params, path.query, path.fragment))
def make(url):
"""Backward compatibility"""
return URL.objectify(url)
|
wasw100/pycaldav
|
pycaldav/lib/url.py
|
Python
|
gpl-3.0
| 5,906
|
"""Test class for Subscriptions
:Requirement: Subscription
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: CLI
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import tempfile
import csv
import os
from robottelo import manifests
from robottelo.cli.activationkey import ActivationKey
from robottelo.cli.base import CLIReturnCodeError
from robottelo.cli.csv_ import CSV_
from robottelo.cli.factory import (
activationkey_add_subscription_to_repo,
make_activation_key,
make_lifecycle_environment,
make_org,
setup_org_for_a_rh_repo,
)
from robottelo.cli.host import Host
from robottelo.cli.repository import Repository
from robottelo.cli.repository_set import RepositorySet
from robottelo.cli.subscription import Subscription
from robottelo.constants import (
PRDS,
REPOS,
REPOSET,
DEFAULT_SUBSCRIPTION_NAME,
SATELLITE_SUBSCRIPTION_NAME,
)
from robottelo.decorators import (
run_in_one_thread,
skip_if_bug_open,
tier1,
tier2,
tier3,
upgrade
)
from robottelo.ssh import download_file, upload_file
from robottelo.test import CLITestCase
from robottelo.vm import VirtualMachine
@run_in_one_thread
class SubscriptionTestCase(CLITestCase):
"""Manifest CLI tests"""
def setUp(self):
"""Tests for content-view via Hammer CLI"""
super(SubscriptionTestCase, self).setUp()
self.org = make_org()
# pylint: disable=no-self-use
def _upload_manifest(self, org_id, manifest=None):
"""Uploads a manifest into an organization.
A cloned manifest will be used if ``manifest`` is None.
"""
if manifest is None:
manifest = manifests.clone()
self.upload_manifest(org_id, manifest)
@staticmethod
def _read_csv_file(file_path):
"""Read a csv file as a dictionary
:param str file_path: The file location path to read as csv
:returns a tuple (list, list[dict]) that represent field_names, data
"""
csv_data = []
with open(file_path, 'r') as csv_file:
csv_reader = csv.DictReader(csv_file, delimiter=',')
field_names = csv_reader.fieldnames
for csv_row in csv_reader:
csv_data.append(csv_row)
return field_names, csv_data
@staticmethod
def _write_csv_file(file_path, filed_names, csv_data):
"""Write to csv file
:param str file_path: The file location path to write as csv
:param list filed_names: The field names to be written
:param list[dict] csv_data: the list dict data to be saved
"""
with open(file_path, 'w') as csv_file:
csv_writer = csv.DictWriter(csv_file, filed_names, delimiter=',')
csv_writer.writeheader()
for csv_row in csv_data:
csv_writer.writerow(csv_row)
@tier1
def test_positive_manifest_upload(self):
"""upload manifest
:id: e5a0e4f8-fed9-4896-87a0-ac33f6baa227
:expectedresults: Manifest are uploaded properly
:CaseImportance: Critical
"""
self._upload_manifest(self.org['id'])
Subscription.list(
{'organization-id': self.org['id']},
per_page=False,
)
@tier1
@upgrade
def test_positive_manifest_delete(self):
"""Delete uploaded manifest
:id: 01539c07-00d5-47e2-95eb-c0fd4f39090f
:expectedresults: Manifest are deleted properly
:CaseImportance: Critical
"""
self._upload_manifest(self.org['id'])
Subscription.list(
{'organization-id': self.org['id']},
per_page=False,
)
Subscription.delete_manifest({
'organization-id': self.org['id'],
})
Subscription.list(
{'organization-id': self.org['id']},
per_page=False,
)
@tier2
@upgrade
def test_positive_enable_manifest_reposet(self):
"""enable repository set
:id: cc0f8f40-5ea6-4fa7-8154-acdc2cb56b45
:expectedresults: you are able to enable and synchronize repository
contained in a manifest
:CaseLevel: Integration
:CaseImportance: Critical
"""
self._upload_manifest(self.org['id'])
Subscription.list(
{'organization-id': self.org['id']},
per_page=False,
)
RepositorySet.enable({
'basearch': 'x86_64',
'name': REPOSET['rhva6'],
'organization-id': self.org['id'],
'product': PRDS['rhel'],
'releasever': '6Server',
})
Repository.synchronize({
'name': REPOS['rhva6']['name'],
'organization-id': self.org['id'],
'product': PRDS['rhel'],
})
@tier1
def test_positive_manifest_history(self):
"""upload manifest and check history
:id: 000ab0a0-ec1b-497a-84ff-3969a965b52c
:expectedresults: Manifest history is shown properly
:CaseImportance: Critical
"""
self._upload_manifest(self.org['id'])
Subscription.list(
{'organization-id': self.org['id']},
per_page=None,
)
history = Subscription.manifest_history({
'organization-id': self.org['id'],
})
self.assertIn(
'{0} file imported successfully.'.format(self.org['name']),
''.join(history),
)
@tier1
@upgrade
def test_positive_manifest_refresh(self):
"""upload manifest and refresh
:id: 579bbbf7-11cf-4d78-a3b1-16d73bd4ca57
:expectedresults: Manifests can be refreshed
:CaseImportance: Critical
"""
self._upload_manifest(
self.org['id'], manifests.original_manifest())
Subscription.list(
{'organization-id': self.org['id']},
per_page=False,
)
Subscription.refresh_manifest({
'organization-id': self.org['id'],
})
Subscription.delete_manifest({
'organization-id': self.org['id'],
})
@skip_if_bug_open('bugzilla', 1226425)
@tier1
def test_negative_manifest_refresh(self):
"""manifest refresh must fail with a cloned manifest
:id: 7f40795f-7841-4063-8a43-de0325c92b1f
:expectedresults: the refresh command returns a non-zero return code
:BZ: 1226425
:CaseImportance: Critical
"""
self._upload_manifest(self.org['id'])
Subscription.list(
{'organization-id': self.org['id']},
per_page=False,
)
with self.assertRaises(CLIReturnCodeError):
Subscription.refresh_manifest({
'organization-id': self.org['id'],
})
@tier3
def test_positive_restore_ak_and_content_hosts_subscriptions(self):
"""Restore activation key and content hosts subscriptions
:id: a44fdeda-9c8c-4316-85b4-a9b6b9f1ffdb
:customerscenario: true
:steps:
1. Setup activation key , lifecycle environment and content view
with RH repository
2. Add RH subscription to activation key
3. Setup hosts (minimum two) and subscribe them to activation key
4. Attach RH subscription to the created content hosts
5. export the activation key and content hosts subscriptions
6. Delete the subscription manifest
7. Ensure that the activation key and content hosts subscriptions
does not exist
8. Upload the subscription manifest
9. Ensure the activation key and content hosts subscriptions does
not exist
10. Restore the activation key and content hosts subscriptions
:expectedresults: activation key and content hosts subscriptions
restored
:CaseImportance: Critical
"""
lce = make_lifecycle_environment({'organization-id': self.org['id']})
activation_key = make_activation_key({
'organization-id': self.org['id'],
'lifecycle-environment-id': lce['id'],
})
ActivationKey.update({
'organization-id': self.org['id'],
'id': activation_key['id'],
'auto-attach': 'false',
})
setup_org_for_a_rh_repo({
'product': PRDS['rhel'],
'repository-set': REPOSET['rhst7'],
'repository': REPOS['rhst7']['name'],
'organization-id': self.org['id'],
'lifecycle-environment-id': lce['id'],
'activationkey-id': activation_key['id'],
}, force_use_cdn=True)
org_subs = Subscription.list({u'organization-id': self.org['id']})
default_subscription_id = None
for sub in org_subs:
if sub['name'] == DEFAULT_SUBSCRIPTION_NAME:
default_subscription_id = sub['id']
break
self.assertIsNotNone(
default_subscription_id, msg='Default subscription not found')
ak_subs = ActivationKey.subscriptions({
'organization-id': self.org['id'],
'id': activation_key['id'],
}, output_format='json')
self.assertIn(
DEFAULT_SUBSCRIPTION_NAME, [sub['name'] for sub in ak_subs])
with VirtualMachine() as client1, VirtualMachine() as client2:
hosts = []
for client in [client1, client2]:
client.install_katello_ca()
client.register_contenthost(
self.org['label'], activation_key=activation_key['name'])
self.assertTrue(client.subscribed)
host = Host.info({'name': client.hostname})
hosts.append(host)
Host.subscription_attach({
'host-id': host['id'],
'subscription-id': default_subscription_id,
})
host_subscriptions = ActivationKey.subscriptions({
'organization-id': self.org['id'],
'id': activation_key['id'],
'host-id': host['id'],
}, output_format='json')
self.assertIn(
DEFAULT_SUBSCRIPTION_NAME,
[sub['name'] for sub in host_subscriptions]
)
# export the current activations and content hosts subscriptions
ak_file_path = '/tmp/ak_{0}.csv'.format(self.org['label'])
ch_file_path = '/tmp/content_hosts_{0}.csv'.format(
self.org['label'])
CSV_.activation_keys({
'export': True,
'file': ak_file_path,
'organization': self.org['name'],
'itemized-subscriptions': True,
})
CSV_.content_hosts({
'export': True,
'file': ch_file_path,
'organization': self.org['name'],
'itemized-subscriptions': True,
})
# delete the manifest
Subscription.delete_manifest({'organization-id': self.org['id']})
# ensure that the subscription does not exist any more
ak_subs = ActivationKey.subscriptions({
'organization-id': self.org['id'],
'id': activation_key['id'],
}, output_format='json')
self.assertNotIn(
DEFAULT_SUBSCRIPTION_NAME, [sub['name'] for sub in ak_subs])
for host in hosts:
host_subscriptions = ActivationKey.subscriptions({
'organization-id': self.org['id'],
'id': activation_key['id'],
'host-id': host['id'],
}, output_format='json')
self.assertNotIn(
DEFAULT_SUBSCRIPTION_NAME,
[sub['name'] for sub in host_subscriptions]
)
# upload the manifest again
self._upload_manifest(self.org['id'])
# ensure that the subscription was not auto attached
ak_subs = ActivationKey.subscriptions({
'organization-id': self.org['id'],
'id': activation_key['id'],
}, output_format='json')
self.assertNotIn(
DEFAULT_SUBSCRIPTION_NAME, [sub['name'] for sub in ak_subs])
for host in hosts:
host_subscriptions = ActivationKey.subscriptions({
'organization-id': self.org['id'],
'id': activation_key['id'],
'host-id': host['id'],
}, output_format='json')
self.assertNotIn(
DEFAULT_SUBSCRIPTION_NAME,
[sub['name'] for sub in host_subscriptions]
)
# restore from the saved activation key and content hosts
# subscriptions
CSV_.activation_keys({
'file': ak_file_path,
'organization': self.org['name'],
'itemized-subscriptions': True,
})
CSV_.content_hosts({
'file': ch_file_path,
'organization': self.org['name'],
'itemized-subscriptions': True,
})
# ensure that the subscriptions has been restored
ak_subs = ActivationKey.subscriptions({
'organization-id': self.org['id'],
'id': activation_key['id'],
}, output_format='json')
self.assertIn(
DEFAULT_SUBSCRIPTION_NAME, [sub['name'] for sub in ak_subs])
for host in hosts:
host_subscriptions = ActivationKey.subscriptions({
'organization-id': self.org['id'],
'id': activation_key['id'],
'host-id': host['id'],
}, output_format='json')
self.assertIn(
DEFAULT_SUBSCRIPTION_NAME,
[sub['name'] for sub in host_subscriptions]
)
@tier3
def test_positive_restore_content_hosts_with_modified_subscription(self):
"""Restore content hosts subscription from an exported content host csv
file with modified subscription.
:id: d8ac08fe-24e0-41e7-b3d8-0ca13a702a64
:customerscenario: true
:steps:
1. Setup activation key , lifecycle environment and content view
with RH tools repository
2. Setup hosts (minimum two) and subscribe them to activation key
3. Attach RH subscription to the created content hosts
4. Export the organization content hosts to a csv file
5. Create a new csv file and modify the subscription with an other
one (the new subscription must have other data than the default
one)
6. Import the new csv file to organization content hosts
:expectedresults: content hosts restored with the new subscription
:BZ: 1296978
:CaseImportance: Critical
"""
lce = make_lifecycle_environment({'organization-id': self.org['id']})
activation_key = make_activation_key({
'organization-id': self.org['id'],
'lifecycle-environment-id': lce['id'],
})
ActivationKey.update({
'organization-id': self.org['id'],
'id': activation_key['id'],
'auto-attach': 'false',
})
# Create RH tools repository and contents, this step should upload
# the default manifest
setup_org_for_a_rh_repo({
'product': PRDS['rhel'],
'repository-set': REPOSET['rhst7'],
'repository': REPOS['rhst7']['name'],
'organization-id': self.org['id'],
'lifecycle-environment-id': lce['id'],
'activationkey-id': activation_key['id'],
}, force_use_cdn=True)
# Export and download the organization subscriptions to prepare the new
# subscription (The replacement of the default subscription)
org_subs_csv_filename = 'subs_{0}.csv'.format(self.org['name'])
org_subs_csv_remote_file_path = '/tmp/{0}'.format(
org_subs_csv_filename)
# export organization subscription to csv file
CSV_.subscriptions({
'export': True,
'file': org_subs_csv_remote_file_path,
'organization': self.org['name'],
})
# download the organization subscriptions
org_subs_csv_local_file_path = os.path.join(
tempfile.gettempdir(), org_subs_csv_filename)
download_file(
org_subs_csv_remote_file_path, org_subs_csv_local_file_path)
_, org_subscriptions = self._read_csv_file(
org_subs_csv_local_file_path)
new_subscription = None
for sub in org_subscriptions:
if sub['Subscription Name'] == SATELLITE_SUBSCRIPTION_NAME:
new_subscription = sub
break
self.assertIsNotNone(new_subscription)
# retrieve the default subscription id
org_subs = Subscription.list({u'organization-id': self.org['id']})
default_subscription_id = None
for sub in org_subs:
if sub['name'] == DEFAULT_SUBSCRIPTION_NAME:
default_subscription_id = sub['id']
break
self.assertIsNotNone(
default_subscription_id, msg='Default subscription not found')
# create 2 Virtual machines
with VirtualMachine() as client1, VirtualMachine() as client2:
hosts = []
for client in [client1, client2]:
client.install_katello_ca()
client.register_contenthost(
self.org['label'], activation_key=activation_key['name'])
self.assertTrue(client.subscribed)
host = Host.info({'name': client.hostname})
hosts.append(host)
Host.subscription_attach({
'host-id': host['id'],
'subscription-id': default_subscription_id,
})
host_subscriptions = ActivationKey.subscriptions({
'organization-id': self.org['id'],
'id': activation_key['id'],
'host-id': host['id'],
}, output_format='json')
self.assertEqual(len(host_subscriptions), 1)
self.assertEqual(
host_subscriptions[0]['name'], DEFAULT_SUBSCRIPTION_NAME)
# export the content host data to csv file
chs_export_file_name = 'chs_export_{0}.csv'.format(
self.org['label'])
chs_export_remote_file_path = (
'/tmp/{0}'.format(chs_export_file_name)
)
CSV_.content_hosts({
'export': True,
'file': chs_export_remote_file_path,
'organization': self.org['name'],
})
# download the csv file
chs_export_local_file_path = os.path.join(
tempfile.gettempdir(), chs_export_file_name)
download_file(
chs_export_remote_file_path, chs_export_local_file_path)
# modify the content hosts subscription
field_names, csv_data = self._read_csv_file(
chs_export_local_file_path)
# each client is represented by one row of data
self.assertEqual(len(csv_data), 2)
for row_data in csv_data:
# The subscription is saved in the following format:
# """<quantity>|<sku>|<name>|<contract>|<account>"""
subscription_data = row_data['Subscriptions'].strip(
'"').split('|')
# change the subscription SKU (looks like RH00001)
subscription_data[1] = new_subscription['Subscription SKU']
# change the name
subscription_data[2] = new_subscription['Subscription Name']
# change the contract number
subscription_data[3] = new_subscription[
'Subscription Contract']
# change the subscription account
subscription_data[4] = new_subscription[
'Subscription Account']
# modify the subscription data
row_data['Subscriptions'] = '"{0}"'.format(
'|'.join(subscription_data))
# generate a new csv file
chs_import_file_name = 'chs_import_{0}.csv'.format(
self.org['name'])
chs_import_local_file_path = os.path.join(
tempfile.gettempdir(), chs_import_file_name)
self._write_csv_file(
chs_import_local_file_path, field_names, csv_data)
# upload the file
chs_import_remote_file_path = (
'/tmp/{0}'.format(chs_import_file_name)
)
upload_file(
chs_import_local_file_path, chs_import_remote_file_path)
# import content hosts data from csv file
CSV_.content_hosts({
'file': chs_import_remote_file_path,
'organization': self.org['name'],
})
for host in hosts:
host_subscriptions = ActivationKey.subscriptions({
'organization-id': self.org['id'],
'id': activation_key['id'],
'host-id': host['id'],
}, output_format='json')
self.assertEqual(len(host_subscriptions), 1)
self.assertEqual(
host_subscriptions[0]['name'], SATELLITE_SUBSCRIPTION_NAME)
self.assertEqual(
host_subscriptions[0]['contract'],
new_subscription['Subscription Contract'])
self.assertEqual(
host_subscriptions[0]['account'],
new_subscription['Subscription Account'])
@tier3
def test_positive_restore_ak_with_modified_subscription(self):
"""Restore activation key subscription from an exported activation key
csv file with modified subscription.
:id: 40b86d1c-88f8-451c-bf19-c5bf11223cb6
:steps:
1. Upload a manifest
2. Create an activation key
3. Attach RH subscription to the created activation key
4. Export the organization activation keys to a csv file
5. Create a new csv file and modify the subscription with an other
one (the new subscription must have other data than the default
one)
6. Import the new csv file to organization activation keys
:expectedresults: activation key restored with the new subscription
:BZ: 1296978
:CaseImportance: Critical
"""
# upload the organization default manifest
self._upload_manifest(self.org['id'])
# Export and download the organization subscriptions to prepare the new
# subscription (The replacement of the default subscription)
org_subs_csv_filename = 'subs_{0}.csv'.format(self.org['name'])
org_subs_csv_remote_file_path = '/tmp/{0}'.format(
org_subs_csv_filename)
# export organization subscription to csv file
CSV_.subscriptions({
'export': True,
'file': org_subs_csv_remote_file_path,
'organization': self.org['name'],
})
# download the organization subscriptions
org_subs_csv_local_file_path = os.path.join(
tempfile.gettempdir(), org_subs_csv_filename)
download_file(
org_subs_csv_remote_file_path, org_subs_csv_local_file_path)
_, org_subscriptions = self._read_csv_file(
org_subs_csv_local_file_path)
new_subscription = None
for sub in org_subscriptions:
if sub['Subscription Name'] == SATELLITE_SUBSCRIPTION_NAME:
new_subscription = sub
break
self.assertIsNotNone(new_subscription)
# Create an activation key and add the default subscription
activation_key = make_activation_key({
'organization-id': self.org['id'],
})
activationkey_add_subscription_to_repo({
'organization-id': self.org['id'],
'activationkey-id': activation_key['id'],
'subscription': DEFAULT_SUBSCRIPTION_NAME,
})
org_subs = Subscription.list({u'organization-id': self.org['id']})
default_subscription_id = None
for sub in org_subs:
if sub['name'] == DEFAULT_SUBSCRIPTION_NAME:
default_subscription_id = sub['id']
break
self.assertIsNotNone(
default_subscription_id, msg='Default subscription not found')
ak_subs = ActivationKey.subscriptions({
'organization-id': self.org['id'],
'id': activation_key['id'],
}, output_format='json')
self.assertEqual(len(ak_subs), 1)
self.assertEqual(
ak_subs[0]['name'], DEFAULT_SUBSCRIPTION_NAME)
# export activation key data to csv file
ak_export_file_name = 'ak_{0}_{1}_export.csv'.format(
self.org['name'], activation_key['name'])
ak_remote_export_file_path = '/tmp/{0}'.format(ak_export_file_name)
CSV_.activation_keys({
'export': True,
'file': ak_remote_export_file_path,
'organization': self.org['name'],
})
# download the file to local temp dir
ak_local_export_file_path = os.path.join(
tempfile.gettempdir(), ak_export_file_name)
download_file(
ak_remote_export_file_path, local_file=ak_local_export_file_path)
# modify the file with new subscription data and upload it
field_names, csv_ak_data = self._read_csv_file(
ak_local_export_file_path)
self.assertEqual(len(csv_ak_data), 1)
csv_ak_data = csv_ak_data[0]
field_names = csv_ak_data.keys()
self.assertIn('Subscriptions', field_names)
self.assertIn('Subscriptions', csv_ak_data)
# The subscription is saved in the following format:
# """<quantity>|<sku>|<name>|<contract>|<account>"""
subscription_data = csv_ak_data['Subscriptions'].strip('"').split('|')
# change the subscription SKU (looks like RH00001)
subscription_data[1] = new_subscription['Subscription SKU']
# change the name
subscription_data[2] = new_subscription['Subscription Name']
# change the contract number
subscription_data[3] = new_subscription['Subscription Contract']
# change the subscription account
subscription_data[4] = new_subscription['Subscription Account']
# modify the subscription data and generate a new csv file
csv_ak_data['Subscriptions'] = '"{0}"'.format(
'|'.join(subscription_data))
ak_import_file_name = 'ak_{0}_{1}_import.csv'.format(
self.org['name'], activation_key['name'])
ak_local_import_file_path = os.path.join(
tempfile.gettempdir(), ak_import_file_name)
self._write_csv_file(
ak_local_import_file_path, field_names, [csv_ak_data])
# upload the generated file
ak_remote_import_file_path = '/tmp/{0}'.format(ak_import_file_name)
upload_file(ak_local_import_file_path, ak_remote_import_file_path)
# import the generated csv file
CSV_.activation_keys({
'file': ak_remote_import_file_path,
'organization': self.org['name'],
})
ak_subs = ActivationKey.subscriptions({
'organization-id': self.org['id'],
'id': activation_key['id'],
}, output_format='json')
self.assertEqual(len(ak_subs), 1)
self.assertEqual(
ak_subs[0]['name'], SATELLITE_SUBSCRIPTION_NAME)
self.assertEqual(
ak_subs[0]['contract'],
new_subscription['Subscription Contract'])
self.assertEqual(
ak_subs[0]['account'], new_subscription['Subscription Account'])
|
sghai/robottelo
|
tests/foreman/cli/test_subscription.py
|
Python
|
gpl-3.0
| 28,458
|
import logging
from time import strftime
def closed():
logging.info('Headlights process stopped')
def criterr(errortext):
logging.critical('A fatal error occured :: ' + errortext)
exit()
def err(errortext):
logging.error('An error occured :: ' + errortext)
def warn(errortext):
logging.warning(errortext)
def inf(errortext):
logging.info(errortext)
def debug(errortext):
logging.debug(errortext)
|
mashedkeyboard/Headlights
|
handlers.py
|
Python
|
gpl-3.0
| 429
|
#!/usr/bin/python3
# This file is part of Epoptes, http://epoptes.org
# Copyright 2012-2018 the Epoptes team, see AUTHORS.
# SPDX-License-Identifier: GPL-3.0-or-later
"""
Display a simple window with a message.
"""
import os
import sys
from _common import gettext as _
from gi.repository import Gtk
class MessageWindow(Gtk.Window):
"""Display a simple window with a message."""
def __init__(self, text, title="Epoptes", markup=True,
icon_name="dialog-information"):
super().__init__(title=title, icon_name=icon_name)
self.set_position(Gtk.WindowPosition.CENTER)
grid = Gtk.Grid(column_spacing=10, row_spacing=10, margin=10)
self.add(grid)
image = Gtk.Image.new_from_icon_name(icon_name, Gtk.IconSize.DIALOG)
grid.add(image)
# Always load the plain text first in case the markup parsing fails
label = Gtk.Label(
label=text, selectable=True, hexpand=True, vexpand=True,
halign=Gtk.Align.START, valign=Gtk.Align.START)
if markup:
label.set_markup(text)
grid.add(label)
button = Gtk.Button.new_from_stock(Gtk.STOCK_CLOSE)
button.set_hexpand(False)
button.set_halign(Gtk.Align.END)
button.connect("clicked", Gtk.main_quit)
grid.attach(button, 1, 1, 2, 1)
self.set_focus_child(button)
accelgroup = Gtk.AccelGroup()
key, modifier = Gtk.accelerator_parse('Escape')
accelgroup.connect(
key, modifier, Gtk.AccelFlags.VISIBLE, Gtk.main_quit)
self.add_accel_group(accelgroup)
def main():
"""Run the module from the command line."""
if len(sys.argv) <= 1 or len(sys.argv) > 5:
print(_("Usage: {} text [title] [markup] [icon_name]").format(
os.path.basename(__file__)), file=sys.stderr)
exit(1)
text = sys.argv[1]
if len(sys.argv) > 2 and sys.argv[2]:
title = sys.argv[2]
else:
title = "Epoptes"
if len(sys.argv) > 3 and sys.argv[3]:
markup = sys.argv[3].lower() == "true"
else:
markup = True
if len(sys.argv) > 4:
icon_name = sys.argv[4]
else:
icon_name = "dialog-information"
window = MessageWindow(text, title, markup, icon_name)
window.connect("destroy", Gtk.main_quit)
window.show_all()
Gtk.main()
if __name__ == '__main__':
main()
|
Epoptes/epoptes
|
epoptes-client/message.py
|
Python
|
gpl-3.0
| 2,401
|
##############################################################################
#
# Copyright (c) 2004 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""
$Id: test_adapter.py 67630 2006-04-27 00:54:03Z jim $
"""
import unittest
from zope.testing.doctestunit import DocTestSuite
def test_suite():
return unittest.TestSuite((
DocTestSuite('zope.security.adapter'),
))
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
|
Donkyhotay/MoonPy
|
zope/security/tests/test_adapter.py
|
Python
|
gpl-3.0
| 948
|
########## recombination.py parameters
class Recombination_Parameters(object):
# Change these two values to the folders you prefer - use an absolute path e.g. /Users/Harry/fastq-data and
# /Users/Harry/csv-data or a path relative to the tools directory.
# You may use the same folder for input and output.
input_folder = "data"
output_folder = "data"
# The number of bases to retrieve before the seed sequence
HEAD = 10
# The number of bases to retrieve after the seed sequences
TAIL = 10
seed_sequences = {
"loxP": "ATAACTTCGTATAGCATACATTATACGAAGTTAT",
"lox2272": "ATAACTTCGTATAGGATACTTTATACGAAGTTAT",
}
########## serotypes.py parameters
class Serotypes_Parameters(object):
# Change these two values to the folders you prefer - use an absolute path e.g. /Users/Harry/fastq-data and
# /Users/Harry/csv-data or a path relative to the tools directory.
# You may use the same folder for input and output.
input_folder = "data"
output_folder = "data"
# These are the signatures that will be matched. The first part is the name, the part in brackets contains the
# actual signatures, separated by a comma (each serotype can have multiple signatures)
signatures = {
"AAV1": [
"AGTGCTTCAACGGGGGCCAG",
"GGGCGTGAATCCATCATCAACCCTGG",
"CCGGAGCTTCAAACACTGCATTGGACAAT"
],
"AAV2": [
"AGGCAACAGACAAGCAGCTACC",
"AACAGACAAGCAGCTACCGCA"
],
"AAV5": [
"TCCAAGCCTTCCACCTCGTCAGACGCCGAA",
"CACCAACAACCAGAGCTCCACCACTG",
"GCCCGTCAGCAGCTTCATC"
],
"AAV7": [
"AGTGAAACTGCAGGTAGTACC"
],
"AAV8": [
"GCAAAACACGGCTCCTCAAAT",
"CAGCAAGCGCTGGAACCCCGAGATCCAGTA",
"AAATACCATCTGAATGGAAGAAATTCATTG",
"CGTGGCAGATAACTTGCAGC",
"ATCCTCCGACCACCTTCAACC"
],
"AAV9": [
"AGTGCCCAAGCACAGGCGCA",
"ATCTCTCAAAGACTATTAAC",
"GGCGAGCAGTCTTCCAGGCA"
],
"AAVrh10": [
"CTACAAATCTACAAATGTGGACTTTG"
],
"PHPeB": [
"CTTTGGCGGTGCCTTTTAAGGCACAGGCGCAGA"
],
"PHPs": [
"AGGCGGTTAGGACGTCTTTGGCACAGGCGCAGA"
],
"AAVrg": [
"TAGCAGACCAAGACTACACAAAAACTGCT"
],
}
|
addgene/research
|
toolkit/parameters.py
|
Python
|
gpl-3.0
| 2,405
|
#
# Copyright (C) 2013,2014,2015,2016 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Tests particle property setters/getters
import unittest as ut
import espressomd
import numpy as np
from espressomd.magnetostatics import *
from tests_common import *
class MagnetostaticsInteractionsTests(ut.TestCase):
# Handle to espresso system
system = espressomd.System()
def setUp(self):
self.system.box_l = 10, 10, 10
if not self.system.part.exists(0):
self.system.part.add(id=0, pos=(0.1, 0.1, 0.1), dip=(1.3, 2.1, -6))
if not self.system.part.exists(1):
self.system.part.add(id=1, pos=(0, 0, 0), dip=(7.3, 6.1, -4))
if "DP3M" in espressomd.features():
test_DP3M = generate_test_for_class(DipolarP3M, dict(prefactor=1.0,
epsilon=0.0,
inter=1000,
mesh_off=[
0.5, 0.5, 0.5],
r_cut=2.4,
mesh=[
8, 8, 8],
cao=1,
alpha=12,
accuracy=0.01))
if "DIPOLAR_DIRECT_SUM" in espressomd.features():
test_DdsCpu = generate_test_for_class(
DipolarDirectSumCpu, dict(prefactor=3.4))
test_DdsRCpu = generate_test_for_class(
DipolarDirectSumWithReplicaCpu, dict(prefactor=3.4, n_replica=2))
if __name__ == "__main__":
print("Features: ", espressomd.features())
ut.main()
|
tbereau/espresso
|
testsuite/python/magnetostaticInteractions.py
|
Python
|
gpl-3.0
| 2,692
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, copy
import os
import json
from frappe.utils import cstr, flt, getdate
from frappe import _
from frappe.utils.file_manager import save_file
from .default_website import website_maker
import install_fixtures
from .sample_data import make_sample_data
from erpnext.accounts.doctype.account.account import RootNotEditable
from frappe.core.doctype.communication.comment import add_info_comment
from erpnext.setup.setup_wizard.domainify import setup_domain
def setup_complete(args=None):
if frappe.db.sql("select name from tabCompany"):
frappe.throw(_("Setup Already Complete!!"))
install_fixtures.install(args.get("country"))
create_price_lists(args)
create_fiscal_year_and_company(args)
create_sales_tax(args)
create_users(args)
set_defaults(args)
create_territories()
create_feed_and_todo()
create_email_digest()
create_letter_head(args)
create_taxes(args)
create_items(args)
create_customers(args)
create_suppliers(args)
if args.get('setup_website'):
website_maker(args)
create_logo(args)
frappe.local.message_log = []
setup_domain(args.get('domain'))
frappe.db.commit()
login_as_first_user(args)
frappe.db.commit()
frappe.clear_cache()
if args.get("add_sample_data"):
try:
make_sample_data(args)
frappe.clear_cache()
except:
# clear message
if frappe.message_log:
frappe.message_log.pop()
pass
def create_fiscal_year_and_company(args):
if (args.get('fy_start_date')):
curr_fiscal_year = get_fy_details(args.get('fy_start_date'), args.get('fy_end_date'))
frappe.get_doc({
"doctype":"Fiscal Year",
'year': curr_fiscal_year,
'year_start_date': args.get('fy_start_date'),
'year_end_date': args.get('fy_end_date'),
}).insert()
args["curr_fiscal_year"] = curr_fiscal_year
# Company
if (args.get('company_name')):
frappe.get_doc({
"doctype":"Company",
'company_name':args.get('company_name').strip(),
'abbr':args.get('company_abbr'),
'default_currency':args.get('currency'),
'country': args.get('country'),
'create_chart_of_accounts_based_on': 'Standard Template',
'chart_of_accounts': args.get('chart_of_accounts'),
'domain': args.get('domain')
}).insert()
#Enable shopping cart
enable_shopping_cart(args)
# Bank Account
create_bank_account(args)
def enable_shopping_cart(args):
frappe.get_doc({
"doctype": "Shopping Cart Settings",
"enabled": 1,
'company': args.get('company_name').strip(),
'price_list': frappe.db.get_value("Price List", {"selling": 1}),
'default_customer_group': _("Individual"),
'quotation_series': "QTN-",
}).insert()
def create_bank_account(args):
if args.get("bank_account"):
company_name = args.get('company_name').strip()
bank_account_group = frappe.db.get_value("Account",
{"account_type": "Bank", "is_group": 1, "root_type": "Asset",
"company": company_name})
if bank_account_group:
bank_account = frappe.get_doc({
"doctype": "Account",
'account_name': args.get("bank_account"),
'parent_account': bank_account_group,
'is_group':0,
'company': company_name,
"account_type": "Bank",
})
try:
return bank_account.insert()
except RootNotEditable:
frappe.throw(_("Bank account cannot be named as {0}").format(args.get("bank_account")))
except frappe.DuplicateEntryError:
# bank account same as a CoA entry
pass
def create_price_lists(args):
for pl_type, pl_name in (("Selling", _("Standard Selling")), ("Buying", _("Standard Buying"))):
frappe.get_doc({
"doctype": "Price List",
"price_list_name": pl_name,
"enabled": 1,
"buying": 1 if pl_type == "Buying" else 0,
"selling": 1 if pl_type == "Selling" else 0,
"currency": args["currency"]
}).insert()
def set_defaults(args):
# enable default currency
frappe.db.set_value("Currency", args.get("currency"), "enabled", 1)
global_defaults = frappe.get_doc("Global Defaults", "Global Defaults")
global_defaults.update({
'current_fiscal_year': args.curr_fiscal_year,
'default_currency': args.get('currency'),
'default_company':args.get('company_name').strip(),
"country": args.get("country"),
})
global_defaults.save()
frappe.db.set_value("System Settings", None, "email_footer_address", args.get("company"))
accounts_settings = frappe.get_doc("Accounts Settings")
accounts_settings.auto_accounting_for_stock = 1
accounts_settings.save()
stock_settings = frappe.get_doc("Stock Settings")
stock_settings.item_naming_by = "Item Code"
stock_settings.valuation_method = "FIFO"
stock_settings.default_warehouse = frappe.db.get_value('Warehouse', {'warehouse_name': _('Stores')})
stock_settings.stock_uom = _("Nos")
stock_settings.auto_indent = 1
stock_settings.auto_insert_price_list_rate_if_missing = 1
stock_settings.automatically_set_serial_nos_based_on_fifo = 1
stock_settings.save()
selling_settings = frappe.get_doc("Selling Settings")
selling_settings.cust_master_name = "Customer Name"
selling_settings.so_required = "No"
selling_settings.dn_required = "No"
selling_settings.allow_multiple_items = 1
selling_settings.save()
buying_settings = frappe.get_doc("Buying Settings")
buying_settings.supp_master_name = "Supplier Name"
buying_settings.po_required = "No"
buying_settings.pr_required = "No"
buying_settings.maintain_same_rate = 1
buying_settings.allow_multiple_items = 1
buying_settings.save()
notification_control = frappe.get_doc("Notification Control")
notification_control.quotation = 1
notification_control.sales_invoice = 1
notification_control.purchase_order = 1
notification_control.save()
hr_settings = frappe.get_doc("HR Settings")
hr_settings.emp_created_by = "Naming Series"
hr_settings.save()
def create_feed_and_todo():
"""update Activity feed and create todo for creation of item, customer, vendor"""
add_info_comment(**{
"subject": _("ERPNext Setup Complete!")
})
def create_email_digest():
from frappe.utils.user import get_system_managers
system_managers = get_system_managers(only_name=True)
if not system_managers:
return
companies = frappe.db.sql_list("select name FROM `tabCompany`")
for company in companies:
if not frappe.db.exists("Email Digest", "Default Weekly Digest - " + company):
edigest = frappe.get_doc({
"doctype": "Email Digest",
"name": "Default Weekly Digest - " + company,
"company": company,
"frequency": "Weekly",
"recipient_list": "\n".join(system_managers)
})
for df in edigest.meta.get("fields", {"fieldtype": "Check"}):
if df.fieldname != "scheduler_errors":
edigest.set(df.fieldname, 1)
edigest.insert()
# scheduler errors digest
if companies:
edigest = frappe.new_doc("Email Digest")
edigest.update({
"name": "Scheduler Errors",
"company": companies[0],
"frequency": "Daily",
"recipient_list": "\n".join(system_managers),
"scheduler_errors": 1,
"enabled": 1
})
edigest.insert()
def get_fy_details(fy_start_date, fy_end_date):
start_year = getdate(fy_start_date).year
if start_year == getdate(fy_end_date).year:
fy = cstr(start_year)
else:
fy = cstr(start_year) + '-' + cstr(start_year + 1)
return fy
def create_sales_tax(args):
country_wise_tax = get_country_wise_tax(args.get("country"))
if country_wise_tax and len(country_wise_tax) > 0:
for sales_tax, tax_data in country_wise_tax.items():
make_tax_account_and_template(args.get("company_name").strip(),
tax_data.get('account_name'), tax_data.get('tax_rate'), sales_tax)
def get_country_wise_tax(country):
data = {}
with open (os.path.join(os.path.dirname(__file__), "data", "country_wise_tax.json")) as countrywise_tax:
data = json.load(countrywise_tax).get(country)
return data
def create_taxes(args):
for i in xrange(1,6):
if args.get("tax_" + str(i)):
# replace % in case someone also enters the % symbol
tax_rate = cstr(args.get("tax_rate_" + str(i)) or "").replace("%", "")
account_name = args.get("tax_" + str(i))
make_tax_account_and_template(args.get("company_name").strip(), account_name, tax_rate)
def make_tax_account_and_template(company, account_name, tax_rate, template_name=None):
try:
account = make_tax_account(company, account_name, tax_rate)
if account:
make_sales_and_purchase_tax_templates(account, template_name)
except frappe.NameError, e:
if e.args[2][0]==1062:
pass
else:
raise
except RootNotEditable, e:
pass
def get_tax_account_group(company):
tax_group = frappe.db.get_value("Account",
{"account_name": "Duties and Taxes", "is_group": 1, "company": company})
if not tax_group:
tax_group = frappe.db.get_value("Account", {"is_group": 1, "root_type": "Liability",
"account_type": "Tax", "company": company})
return tax_group
def make_tax_account(company, account_name, tax_rate):
tax_group = get_tax_account_group(company)
if tax_group:
return frappe.get_doc({
"doctype":"Account",
"company": company,
"parent_account": tax_group,
"account_name": account_name,
"is_group": 0,
"report_type": "Balance Sheet",
"root_type": "Liability",
"account_type": "Tax",
"tax_rate": flt(tax_rate) if tax_rate else None
}).insert(ignore_permissions=True)
def make_sales_and_purchase_tax_templates(account, template_name=None):
if not template_name:
template_name = account.name
sales_tax_template = {
"doctype": "Sales Taxes and Charges Template",
"title": template_name,
"company": account.company,
"taxes": [{
"category": "Valuation and Total",
"charge_type": "On Net Total",
"account_head": account.name,
"description": "{0} @ {1}".format(account.account_name, account.tax_rate),
"rate": account.tax_rate
}]
}
# Sales
frappe.get_doc(copy.deepcopy(sales_tax_template)).insert(ignore_permissions=True)
# Purchase
purchase_tax_template = copy.deepcopy(sales_tax_template)
purchase_tax_template["doctype"] = "Purchase Taxes and Charges Template"
frappe.get_doc(purchase_tax_template).insert(ignore_permissions=True)
def create_items(args):
for i in xrange(1,6):
item = args.get("item_" + str(i))
if item:
item_group = args.get("item_group_" + str(i))
is_sales_item = args.get("is_sales_item_" + str(i))
is_purchase_item = args.get("is_purchase_item_" + str(i))
is_stock_item = item_group!=_("Services")
default_warehouse = ""
if is_stock_item:
default_warehouse = frappe.db.get_value("Warehouse", filters={
"warehouse_name": _("Finished Goods") if is_sales_item else _("Stores"),
"company": args.get("company_name").strip()
})
try:
frappe.get_doc({
"doctype":"Item",
"item_code": item,
"item_name": item,
"description": item,
"show_in_website": 1,
"is_sales_item": is_sales_item,
"is_purchase_item": is_purchase_item,
"is_stock_item": is_stock_item and 1 or 0,
"item_group": item_group,
"stock_uom": args.get("item_uom_" + str(i)),
"default_warehouse": default_warehouse
}).insert()
if args.get("item_img_" + str(i)):
item_image = args.get("item_img_" + str(i)).split(",")
if len(item_image)==3:
filename, filetype, content = item_image
fileurl = save_file(filename, content, "Item", item, decode=True).file_url
frappe.db.set_value("Item", item, "image", fileurl)
if args.get("item_price_" + str(i)):
item_price = flt(args.get("item_price_" + str(i)))
if is_sales_item:
price_list_name = frappe.db.get_value("Price List", {"selling": 1})
make_item_price(item, price_list_name, item_price)
if is_purchase_item:
price_list_name = frappe.db.get_value("Price List", {"buying": 1})
make_item_price(item, price_list_name, item_price)
except frappe.NameError:
pass
def make_item_price(item, price_list_name, item_price):
frappe.get_doc({
"doctype": "Item Price",
"price_list": price_list_name,
"item_code": item,
"price_list_rate": item_price
}).insert()
def create_customers(args):
for i in xrange(1,6):
customer = args.get("customer_" + str(i))
if customer:
try:
doc = frappe.get_doc({
"doctype":"Customer",
"customer_name": customer,
"customer_type": "Company",
"customer_group": _("Commercial"),
"territory": args.get("country"),
"company": args.get("company_name").strip()
}).insert()
if args.get("customer_contact_" + str(i)):
create_contact(args.get("customer_contact_" + str(i)),
"Customer", doc.name)
except frappe.NameError:
pass
def create_suppliers(args):
for i in xrange(1,6):
supplier = args.get("supplier_" + str(i))
if supplier:
try:
doc = frappe.get_doc({
"doctype":"Supplier",
"supplier_name": supplier,
"supplier_type": _("Local"),
"company": args.get("company_name").strip()
}).insert()
if args.get("supplier_contact_" + str(i)):
create_contact(args.get("supplier_contact_" + str(i)),
"Supplier", doc.name)
except frappe.NameError:
pass
def create_contact(contact, party_type, party):
"""Create contact based on given contact name"""
contact = contact.strip().split(" ")
contact = frappe.get_doc({
"doctype":"Contact",
"first_name":contact[0],
"last_name": len(contact) > 1 and contact[1] or ""
})
contact.append('links', dict(link_doctype=party_type, link_name=party))
contact.insert()
def create_letter_head(args):
if args.get("attach_letterhead"):
frappe.get_doc({
"doctype":"Letter Head",
"letter_head_name": _("Standard"),
"is_default": 1
}).insert()
attach_letterhead = args.get("attach_letterhead").split(",")
if len(attach_letterhead)==3:
filename, filetype, content = attach_letterhead
fileurl = save_file(filename, content, "Letter Head", _("Standard"), decode=True).file_url
frappe.db.set_value("Letter Head", _("Standard"), "content", "<img src='%s' style='max-width: 100%%;'>" % fileurl)
def create_logo(args):
if args.get("attach_logo"):
attach_logo = args.get("attach_logo").split(",")
if len(attach_logo)==3:
filename, filetype, content = attach_logo
fileurl = save_file(filename, content, "Website Settings", "Website Settings",
decode=True).file_url
frappe.db.set_value("Website Settings", "Website Settings", "brand_html",
"<img src='{0}' style='max-width: 40px; max-height: 25px;'> {1}".format(fileurl, args.get("company_name").strip()))
def create_territories():
"""create two default territories, one for home country and one named Rest of the World"""
from frappe.utils.nestedset import get_root_of
country = frappe.db.get_default("country")
root_territory = get_root_of("Territory")
for name in (country, _("Rest Of The World")):
if name and not frappe.db.exists("Territory", name):
frappe.get_doc({
"doctype": "Territory",
"territory_name": name.replace("'", ""),
"parent_territory": root_territory,
"is_group": "No"
}).insert()
def login_as_first_user(args):
if args.get("email") and hasattr(frappe.local, "login_manager"):
frappe.local.login_manager.login_as(args.get("email"))
def create_users(args):
if frappe.session.user == 'Administrator':
return
# create employee for self
emp = frappe.get_doc({
"doctype": "Employee",
"employee_name": " ".join(filter(None, [args.get("first_name"), args.get("last_name")])),
"user_id": frappe.session.user,
"status": "Active",
"company": args.get("company_name")
})
emp.flags.ignore_mandatory = True
emp.insert(ignore_permissions = True)
for i in xrange(1,5):
email = args.get("user_email_" + str(i))
fullname = args.get("user_fullname_" + str(i))
if email:
if not fullname:
fullname = email.split("@")[0]
parts = fullname.split(" ", 1)
user = frappe.get_doc({
"doctype": "User",
"email": email,
"first_name": parts[0],
"last_name": parts[1] if len(parts) > 1 else "",
"enabled": 1,
"user_type": "System User"
})
# default roles
user.append_roles("Projects Manager", "Stock User", "Support Team")
if args.get("user_sales_" + str(i)):
user.append_roles("Sales User", "Sales Manager", "Accounts User")
if args.get("user_purchaser_" + str(i)):
user.append_roles("Purchase User", "Purchase Manager", "Accounts User")
if args.get("user_accountant_" + str(i)):
user.append_roles("Accounts Manager", "Accounts User")
user.flags.delay_emails = True
if not frappe.db.get_value("User", email):
user.insert(ignore_permissions=True)
# create employee
emp = frappe.get_doc({
"doctype": "Employee",
"employee_name": fullname,
"user_id": email,
"status": "Active",
"company": args.get("company_name")
})
emp.flags.ignore_mandatory = True
emp.insert(ignore_permissions = True)
def create_academic_term():
at = ["Semester 1", "Semester 2", "Semester 3"]
ay = ["2013-14", "2014-15", "2015-16", "2016-17", "2017-18"]
for y in ay:
for t in at:
academic_term = frappe.new_doc("Academic Term")
academic_term.academic_year = y
academic_term.term_name = t
try:
academic_term.save()
except frappe.DuplicateEntryError:
pass
def create_academic_year():
ac = ["2013-14", "2014-15", "2015-16", "2016-17", "2017-18"]
for d in ac:
academic_year = frappe.new_doc("Academic Year")
academic_year.academic_year_name = d
try:
academic_year.save()
except frappe.DuplicateEntryError:
pass
def create_program(args):
for i in xrange(1,6):
if args.get("program_" + str(i)):
program = frappe.new_doc("Program")
program.program_code = args.get("program_" + str(i))
program.program_name = args.get("program_" + str(i))
try:
program.save()
except frappe.DuplicateEntryError:
pass
def create_course(args):
for i in xrange(1,6):
if args.get("course_" + str(i)):
course = frappe.new_doc("Course")
course.course_code = args.get("course_" + str(i))
course.course_name = args.get("course_" + str(i))
try:
course.save()
except frappe.DuplicateEntryError:
pass
def create_instructor(args):
for i in xrange(1,6):
if args.get("instructor_" + str(i)):
instructor = frappe.new_doc("Instructor")
instructor.instructor_name = args.get("instructor_" + str(i))
try:
instructor.save()
except frappe.DuplicateEntryError:
pass
def create_room(args):
for i in xrange(1,6):
if args.get("room_" + str(i)):
room = frappe.new_doc("Room")
room.room_name = args.get("room_" + str(i))
room.seating_capacity = args.get("room_capacity_" + str(i))
try:
room.save()
except frappe.DuplicateEntryError:
pass
|
bpshetty/erpnext
|
erpnext/setup/setup_wizard/setup_wizard.py
|
Python
|
gpl-3.0
| 18,648
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Dane Summers <dsummers@pinedesk.biz>
# Copyright: (c) 2013, Mike Grozak <mike.grozak@gmail.com>
# Copyright: (c) 2013, Patrick Callahan <pmc@patrickcallahan.com>
# Copyright: (c) 2015, Evan Kaufman <evan@digitalflophouse.com>
# Copyright: (c) 2015, Luca Berruti <nadirio@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: cron
short_description: Manage cron.d and crontab entries
description:
- Use this module to manage crontab and environment variables entries. This module allows
you to create environment variables and named crontab entries, update, or delete them.
- 'When crontab jobs are managed: the module includes one line with the description of the
crontab entry C("#Ansible: <name>") corresponding to the "name" passed to the module,
which is used by future ansible/module calls to find/check the state. The "name"
parameter should be unique, and changing the "name" value will result in a new cron
task being created (or a different one being removed).'
- 'When environment variables are managed: no comment line is added, but, when the module
needs to find/check the state, it uses the "name" parameter to find the environment
variable definition line.'
- 'When using symbols such as %, they must be properly escaped.'
version_added: "0.9"
options:
name:
description:
- Description of a crontab entry or, if env is set, the name of environment variable.
Required if state=absent. Note that if name is not set and state=present, then a
new crontab entry will always be created, regardless of existing ones.
user:
description:
- The specific user whose crontab should be modified.
default: root
job:
description:
- The command to execute or, if env is set, the value of environment variable.
The command should not contain line breaks.
Required if state=present.
aliases: [ value ]
state:
description:
- Whether to ensure the job or environment variable is present or absent.
choices: [ absent, present ]
default: present
cron_file:
description:
- If specified, uses this file instead of an individual user's crontab.
If this is a relative path, it is interpreted with respect to
/etc/cron.d. (If it is absolute, it will typically be /etc/crontab).
Many linux distros expect (and some require) the filename portion to consist solely
of upper- and lower-case letters, digits, underscores, and hyphens.
To use the C(cron_file) parameter you must specify the C(user) as well.
backup:
description:
- If set, create a backup of the crontab before it is modified.
The location of the backup is returned in the C(backup_file) variable by this module.
type: bool
default: 'no'
minute:
description:
- Minute when the job should run ( 0-59, *, */2, etc )
default: "*"
hour:
description:
- Hour when the job should run ( 0-23, *, */2, etc )
default: "*"
day:
description:
- Day of the month the job should run ( 1-31, *, */2, etc )
default: "*"
aliases: [ dom ]
month:
description:
- Month of the year the job should run ( 1-12, *, */2, etc )
default: "*"
weekday:
description:
- Day of the week that the job should run ( 0-6 for Sunday-Saturday, *, etc )
default: "*"
aliases: [ dow ]
reboot:
description:
- If the job should be run at reboot. This option is deprecated. Users should use special_time.
version_added: "1.0"
type: bool
default: "no"
special_time:
description:
- Special time specification nickname.
choices: [ annually, daily, hourly, monthly, reboot, weekly, yearly ]
version_added: "1.3"
disabled:
description:
- If the job should be disabled (commented out) in the crontab.
- Only has effect if C(state=present).
type: bool
default: 'no'
version_added: "2.0"
env:
description:
- If set, manages a crontab's environment variable. New variables are added on top of crontab.
"name" and "value" parameters are the name and the value of environment variable.
type: bool
default: "no"
version_added: "2.1"
insertafter:
description:
- Used with C(state=present) and C(env). If specified, the environment variable will be
inserted after the declaration of specified environment variable.
version_added: "2.1"
insertbefore:
description:
- Used with C(state=present) and C(env). If specified, the environment variable will be
inserted before the declaration of specified environment variable.
version_added: "2.1"
requirements:
- cron
author:
- Dane Summers (@dsummersl)
- Mike Grozak (@rhaido)
- Patrick Callahan (@dirtyharrycallahan)
- Evan Kaufman (@EvanK)
- Luca Berruti (@lberruti)
"""
EXAMPLES = '''
- name: Ensure a job that runs at 2 and 5 exists. Creates an entry like "0 5,2 * * ls -alh > /dev/null"
cron:
name: "check dirs"
minute: "0"
hour: "5,2"
job: "ls -alh > /dev/null"
- name: 'Ensure an old job is no longer present. Removes any job that is prefixed by "#Ansible: an old job" from the crontab'
cron:
name: "an old job"
state: absent
- name: Creates an entry like "@reboot /some/job.sh"
cron:
name: "a job for reboot"
special_time: reboot
job: "/some/job.sh"
- name: Creates an entry like "PATH=/opt/bin" on top of crontab
cron:
name: PATH
env: yes
job: /opt/bin
- name: Creates an entry like "APP_HOME=/srv/app" and insert it after PATH declaration
cron:
name: APP_HOME
env: yes
job: /srv/app
insertafter: PATH
- name: Creates a cron file under /etc/cron.d
cron:
name: yum autoupdate
weekday: 2
minute: 0
hour: 12
user: root
job: "YUMINTERACTIVE=0 /usr/sbin/yum-autoupdate"
cron_file: ansible_yum-autoupdate
- name: Removes a cron file from under /etc/cron.d
cron:
name: "yum autoupdate"
cron_file: ansible_yum-autoupdate
state: absent
- name: Removes "APP_HOME" environment variable from crontab
cron:
name: APP_HOME
env: yes
state: absent
'''
import os
import platform
import pipes
import pwd
import re
import sys
import tempfile
from ansible.module_utils.basic import AnsibleModule, get_platform
CRONCMD = "/usr/bin/crontab"
class CronTabError(Exception):
pass
class CronTab(object):
"""
CronTab object to write time based crontab file
user - the user of the crontab (defaults to root)
cron_file - a cron file under /etc/cron.d, or an absolute path
"""
def __init__(self, module, user=None, cron_file=None):
self.module = module
self.user = user
self.root = (os.getuid() == 0)
self.lines = None
self.ansible = "#Ansible: "
self.existing = ''
if cron_file:
if os.path.isabs(cron_file):
self.cron_file = cron_file
else:
self.cron_file = os.path.join('/etc/cron.d', cron_file)
else:
self.cron_file = None
self.read()
def read(self):
# Read in the crontab from the system
self.lines = []
if self.cron_file:
# read the cronfile
try:
f = open(self.cron_file, 'r')
self.existing = f.read()
self.lines = self.existing.splitlines()
f.close()
except IOError:
# cron file does not exist
return
except Exception:
raise CronTabError("Unexpected error:", sys.exc_info()[0])
else:
# using safely quoted shell for now, but this really should be two non-shell calls instead. FIXME
(rc, out, err) = self.module.run_command(self._read_user_execute(), use_unsafe_shell=True)
if rc != 0 and rc != 1: # 1 can mean that there are no jobs.
raise CronTabError("Unable to read crontab")
self.existing = out
lines = out.splitlines()
count = 0
for l in lines:
if count > 2 or (not re.match(r'# DO NOT EDIT THIS FILE - edit the master and reinstall.', l) and
not re.match(r'# \(/tmp/.*installed on.*\)', l) and
not re.match(r'# \(.*version.*\)', l)):
self.lines.append(l)
else:
pattern = re.escape(l) + '[\r\n]?'
self.existing = re.sub(pattern, '', self.existing, 1)
count += 1
def is_empty(self):
if len(self.lines) == 0:
return True
else:
return False
def write(self, backup_file=None):
"""
Write the crontab to the system. Saves all information.
"""
if backup_file:
fileh = open(backup_file, 'w')
elif self.cron_file:
fileh = open(self.cron_file, 'w')
else:
filed, path = tempfile.mkstemp(prefix='crontab')
os.chmod(path, int('0644', 8))
fileh = os.fdopen(filed, 'w')
fileh.write(self.render())
fileh.close()
# return if making a backup
if backup_file:
return
# Add the entire crontab back to the user crontab
if not self.cron_file:
# quoting shell args for now but really this should be two non-shell calls. FIXME
(rc, out, err) = self.module.run_command(self._write_execute(path), use_unsafe_shell=True)
os.unlink(path)
if rc != 0:
self.module.fail_json(msg=err)
# set SELinux permissions
if self.module.selinux_enabled() and self.cron_file:
self.module.set_default_selinux_context(self.cron_file, False)
def do_comment(self, name):
return "%s%s" % (self.ansible, name)
def add_job(self, name, job):
# Add the comment
self.lines.append(self.do_comment(name))
# Add the job
self.lines.append("%s" % (job))
def update_job(self, name, job):
return self._update_job(name, job, self.do_add_job)
def do_add_job(self, lines, comment, job):
lines.append(comment)
lines.append("%s" % (job))
def remove_job(self, name):
return self._update_job(name, "", self.do_remove_job)
def do_remove_job(self, lines, comment, job):
return None
def add_env(self, decl, insertafter=None, insertbefore=None):
if not (insertafter or insertbefore):
self.lines.insert(0, decl)
return
if insertafter:
other_name = insertafter
elif insertbefore:
other_name = insertbefore
other_decl = self.find_env(other_name)
if len(other_decl) > 0:
if insertafter:
index = other_decl[0] + 1
elif insertbefore:
index = other_decl[0]
self.lines.insert(index, decl)
return
self.module.fail_json(msg="Variable named '%s' not found." % other_name)
def update_env(self, name, decl):
return self._update_env(name, decl, self.do_add_env)
def do_add_env(self, lines, decl):
lines.append(decl)
def remove_env(self, name):
return self._update_env(name, '', self.do_remove_env)
def do_remove_env(self, lines, decl):
return None
def remove_job_file(self):
try:
os.unlink(self.cron_file)
return True
except OSError:
# cron file does not exist
return False
except Exception:
raise CronTabError("Unexpected error:", sys.exc_info()[0])
def find_job(self, name, job=None):
# attempt to find job by 'Ansible:' header comment
comment = None
for l in self.lines:
if comment is not None:
if comment == name:
return [comment, l]
else:
comment = None
elif re.match(r'%s' % self.ansible, l):
comment = re.sub(r'%s' % self.ansible, '', l)
# failing that, attempt to find job by exact match
if job:
for i, l in enumerate(self.lines):
if l == job:
# if no leading ansible header, insert one
if not re.match(r'%s' % self.ansible, self.lines[i - 1]):
self.lines.insert(i, self.do_comment(name))
return [self.lines[i], l, True]
# if a leading blank ansible header AND job has a name, update header
elif name and self.lines[i - 1] == self.do_comment(None):
self.lines[i - 1] = self.do_comment(name)
return [self.lines[i - 1], l, True]
return []
def find_env(self, name):
for index, l in enumerate(self.lines):
if re.match(r'^%s=' % name, l):
return [index, l]
return []
def get_cron_job(self, minute, hour, day, month, weekday, job, special, disabled):
# normalize any leading/trailing newlines (ansible/ansible-modules-core#3791)
job = job.strip('\r\n')
if disabled:
disable_prefix = '#'
else:
disable_prefix = ''
if special:
if self.cron_file:
return "%s@%s %s %s" % (disable_prefix, special, self.user, job)
else:
return "%s@%s %s" % (disable_prefix, special, job)
else:
if self.cron_file:
return "%s%s %s %s %s %s %s %s" % (disable_prefix, minute, hour, day, month, weekday, self.user, job)
else:
return "%s%s %s %s %s %s %s" % (disable_prefix, minute, hour, day, month, weekday, job)
def get_jobnames(self):
jobnames = []
for l in self.lines:
if re.match(r'%s' % self.ansible, l):
jobnames.append(re.sub(r'%s' % self.ansible, '', l))
return jobnames
def get_envnames(self):
envnames = []
for l in self.lines:
if re.match(r'^\S+=', l):
envnames.append(l.split('=')[0])
return envnames
def _update_job(self, name, job, addlinesfunction):
ansiblename = self.do_comment(name)
newlines = []
comment = None
for l in self.lines:
if comment is not None:
addlinesfunction(newlines, comment, job)
comment = None
elif l == ansiblename:
comment = l
else:
newlines.append(l)
self.lines = newlines
if len(newlines) == 0:
return True
else:
return False # TODO add some more error testing
def _update_env(self, name, decl, addenvfunction):
newlines = []
for l in self.lines:
if re.match(r'^%s=' % name, l):
addenvfunction(newlines, decl)
else:
newlines.append(l)
self.lines = newlines
def render(self):
"""
Render this crontab as it would be in the crontab.
"""
crons = []
for cron in self.lines:
crons.append(cron)
result = '\n'.join(crons)
if result:
result = result.rstrip('\r\n') + '\n'
return result
def _read_user_execute(self):
"""
Returns the command line for reading a crontab
"""
user = ''
if self.user:
if platform.system() == 'SunOS':
return "su %s -c '%s -l'" % (pipes.quote(self.user), pipes.quote(CRONCMD))
elif platform.system() == 'AIX':
return "%s -l %s" % (pipes.quote(CRONCMD), pipes.quote(self.user))
elif platform.system() == 'HP-UX':
return "%s %s %s" % (CRONCMD, '-l', pipes.quote(self.user))
elif pwd.getpwuid(os.getuid())[0] != self.user:
user = '-u %s' % pipes.quote(self.user)
return "%s %s %s" % (CRONCMD, user, '-l')
def _write_execute(self, path):
"""
Return the command line for writing a crontab
"""
user = ''
if self.user:
if platform.system() in ['SunOS', 'HP-UX', 'AIX']:
return "chown %s %s ; su '%s' -c '%s %s'" % (pipes.quote(self.user), pipes.quote(path), pipes.quote(self.user), CRONCMD, pipes.quote(path))
elif pwd.getpwuid(os.getuid())[0] != self.user:
user = '-u %s' % pipes.quote(self.user)
return "%s %s %s" % (CRONCMD, user, pipes.quote(path))
def main():
# The following example playbooks:
#
# - cron: name="check dirs" hour="5,2" job="ls -alh > /dev/null"
#
# - name: do the job
# cron: name="do the job" hour="5,2" job="/some/dir/job.sh"
#
# - name: no job
# cron: name="an old job" state=absent
#
# - name: sets env
# cron: name="PATH" env=yes value="/bin:/usr/bin"
#
# Would produce:
# PATH=/bin:/usr/bin
# # Ansible: check dirs
# * * 5,2 * * ls -alh > /dev/null
# # Ansible: do the job
# * * 5,2 * * /some/dir/job.sh
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str'),
user=dict(type='str'),
job=dict(type='str', aliases=['value']),
cron_file=dict(type='str'),
state=dict(type='str', default='present', choices=['present', 'absent']),
backup=dict(type='bool', default=False),
minute=dict(type='str', default='*'),
hour=dict(type='str', default='*'),
day=dict(type='str', default='*', aliases=['dom']),
month=dict(type='str', default='*'),
weekday=dict(type='str', default='*', aliases=['dow']),
reboot=dict(type='bool', default=False),
special_time=dict(type='str', choices=["reboot", "yearly", "annually", "monthly", "weekly", "daily", "hourly"]),
disabled=dict(type='bool', default=False),
env=dict(type='bool'),
insertafter=dict(type='str'),
insertbefore=dict(type='str'),
),
supports_check_mode=True,
mutually_exclusive=[
['reboot', 'special_time'],
['insertafter', 'insertbefore'],
],
required_by=dict(
cron_file=('user'),
),
required_if=(
('state', 'present', ('job')),
),
)
name = module.params['name']
user = module.params['user']
job = module.params['job']
cron_file = module.params['cron_file']
state = module.params['state']
backup = module.params['backup']
minute = module.params['minute']
hour = module.params['hour']
day = module.params['day']
month = module.params['month']
weekday = module.params['weekday']
reboot = module.params['reboot']
special_time = module.params['special_time']
disabled = module.params['disabled']
env = module.params['env']
insertafter = module.params['insertafter']
insertbefore = module.params['insertbefore']
do_install = state == 'present'
changed = False
res_args = dict()
warnings = list()
if cron_file:
cron_file_basename = os.path.basename(cron_file)
if not re.search(r'^[A-Z0-9_-]+$', cron_file_basename, re.I):
warnings.append('Filename portion of cron_file ("%s") should consist' % cron_file_basename +
' solely of upper- and lower-case letters, digits, underscores, and hyphens')
# Ensure all files generated are only writable by the owning user. Primarily relevant for the cron_file option.
os.umask(int('022', 8))
crontab = CronTab(module, user, cron_file)
module.debug('cron instantiated - name: "%s"' % name)
if module._diff:
diff = dict()
diff['before'] = crontab.existing
if crontab.cron_file:
diff['before_header'] = crontab.cron_file
else:
if crontab.user:
diff['before_header'] = 'crontab for user "%s"' % crontab.user
else:
diff['before_header'] = 'crontab'
# --- user input validation ---
if (special_time or reboot) and \
(True in [(x != '*') for x in [minute, hour, day, month, weekday]]):
module.fail_json(msg="You must specify time and date fields or special time.")
# cannot support special_time on solaris
if (special_time or reboot) and get_platform() == 'SunOS':
module.fail_json(msg="Solaris does not support special_time=... or @reboot")
if (insertafter or insertbefore) and not env and do_install:
module.fail_json(msg="Insertafter and insertbefore parameters are valid only with env=yes")
if reboot:
special_time = "reboot"
# if requested make a backup before making a change
if backup and not module.check_mode:
(backuph, backup_file) = tempfile.mkstemp(prefix='crontab')
crontab.write(backup_file)
if crontab.cron_file and not name and not do_install:
if module._diff:
diff['after'] = ''
diff['after_header'] = '/dev/null'
else:
diff = dict()
if module.check_mode:
changed = os.path.isfile(crontab.cron_file)
else:
changed = crontab.remove_job_file()
module.exit_json(changed=changed, cron_file=cron_file, state=state, diff=diff)
if env:
if ' ' in name:
module.fail_json(msg="Invalid name for environment variable")
decl = '%s="%s"' % (name, job)
old_decl = crontab.find_env(name)
if do_install:
if len(old_decl) == 0:
crontab.add_env(decl, insertafter, insertbefore)
changed = True
if len(old_decl) > 0 and old_decl[1] != decl:
crontab.update_env(name, decl)
changed = True
else:
if len(old_decl) > 0:
crontab.remove_env(name)
changed = True
else:
if do_install:
for char in ['\r', '\n']:
if char in job.strip('\r\n'):
warnings.append('Job should not contain line breaks')
break
job = crontab.get_cron_job(minute, hour, day, month, weekday, job, special_time, disabled)
old_job = crontab.find_job(name, job)
if len(old_job) == 0:
crontab.add_job(name, job)
changed = True
if len(old_job) > 0 and old_job[1] != job:
crontab.update_job(name, job)
changed = True
if len(old_job) > 2:
crontab.update_job(name, job)
changed = True
else:
old_job = crontab.find_job(name)
if len(old_job) > 0:
crontab.remove_job(name)
changed = True
# no changes to env/job, but existing crontab needs a terminating newline
if not changed and crontab.existing != '':
if not (crontab.existing.endswith('\r') or crontab.existing.endswith('\n')):
changed = True
res_args = dict(
jobs=crontab.get_jobnames(),
envs=crontab.get_envnames(),
warnings=warnings,
changed=changed
)
if changed:
if not module.check_mode:
crontab.write()
if module._diff:
diff['after'] = crontab.render()
if crontab.cron_file:
diff['after_header'] = crontab.cron_file
else:
if crontab.user:
diff['after_header'] = 'crontab for user "%s"' % crontab.user
else:
diff['after_header'] = 'crontab'
res_args['diff'] = diff
# retain the backup only if crontab or cron file have changed
if backup and not module.check_mode:
if changed:
res_args['backup_file'] = backup_file
else:
os.unlink(backup_file)
if cron_file:
res_args['cron_file'] = cron_file
module.exit_json(**res_args)
# --- should never get here
module.exit_json(msg="Unable to execute cron task.")
if __name__ == '__main__':
main()
|
Jorge-Rodriguez/ansible
|
lib/ansible/modules/system/cron.py
|
Python
|
gpl-3.0
| 24,897
|
#!/usr/bin/env python
#
# Unit Tests for temp module
#
# See __usage__ for an explanation of runtime arguments.
#
# -Christopher Blunck
#
import unittest
from ..temp import *
__author__ = 'Christopher Blunck'
__email__ = 'chris@wxnet.org'
__revision__ = '$Revision: 1.6 $'
__doc__ = '''
Unit tests the temp module.
'''
__usage__ = '''
python $0
'''
def usage():
print(__usage__)
sys.exit(1)
class TestCase(unittest.TestCase):
def setUp(self): pass
def tearDown(self): pass
def test__calc_heat_index(self):
# if the temperature is < 80, heat index == temperature
assert calc_heat_index(70, 100) == 70 , "value not correct"
assert calc_heat_index(79.9, 100) == 79.9 , "value not correct"
assert calc_heat_index(80, 100) != 80 , "value not correct"
# make sure some hard-coded values work
assert int(calc_heat_index(80, 100)) == 87, "value not correct"
assert int(calc_heat_index(80, 10)) == 78, "value not correct"
assert int(calc_heat_index(90, 50)) == 94, "value not correct"
assert int(calc_heat_index(120, 100)) == 380, "value not correct"
def test__calc_wind_chill(self):
# make sure some hard-coded values work
assert int(calc_wind_chill(80, 10)) == 83, "value not correct"
assert int(calc_wind_chill(32, 10)) == 23, "value not correct"
assert int(calc_wind_chill(-20, 5)) == -34, "value not correct"
def test__fahrenheit_to_celsius(self):
# make sure some special values work
assert int(fahrenheit_to_celsius(32)) == 0, "value not correct"
assert int(fahrenheit_to_celsius(212)) == 100, "value not correct"
# make sure some hard coded values work
assert int(fahrenheit_to_celsius(60)) == 15, "value not correct"
assert int(fahrenheit_to_celsius(-60)) == -51, "value not correct"
assert int(fahrenheit_to_celsius(90)) == 32, "value not correct"
def test__celsius_to_fahrenheit(self):
# make sure some special values work
assert int(celsius_to_fahrenheit(0)) == 32, "value not correct"
assert int(celsius_to_fahrenheit(100)) == 212, "value not correct"
# make sure some hard coded values work
assert int(celsius_to_fahrenheit(60)) == 140, "value not correct"
assert int(celsius_to_fahrenheit(-60)) == -76, "value not correct"
assert int(celsius_to_fahrenheit(30)) == 86, "value not correct"
def test__celsius_to_kelvin(self):
# make sure some special values work
assert int(celsius_to_kelvin(-273.15)) == 0, "value not correct"
assert int(celsius_to_kelvin(100)) == 373, "value not correct"
# make sure some hard coded values work
assert int(celsius_to_kelvin(60)) == 333, "value not correct"
assert int(celsius_to_kelvin(-60)) == 213, "value not correct"
assert int(celsius_to_kelvin(30)) == 303, "value not correct"
def test__celsius_to_rankine(self):
# make sure some special values work
assert int(celsius_to_rankine(0)) == 491, "value not correct"
assert int(celsius_to_rankine(100)) == 671, "value not correct"
# make sure some hard coded values work
assert int(celsius_to_rankine(60)) == 599, "value not correct"
assert int(celsius_to_rankine(-60)) == 383, "value not correct"
assert int(celsius_to_rankine(30)) == 545, "value not correct"
def test__fahrenheit_to_kelvin(self):
# make sure some special values work
assert int(fahrenheit_to_kelvin(32)) == 273, "value not correct"
assert int(fahrenheit_to_kelvin(212)) == 373, "value not correct"
# make sure some hard coded values work
assert int(fahrenheit_to_kelvin(60)) == 288, "value not correct"
assert int(fahrenheit_to_kelvin(-60)) == 222, "value not correct"
assert int(fahrenheit_to_kelvin(90)) == 305, "value not correct"
def test__fahrenheit_to_rankine(self):
# make sure some special values work
assert int(fahrenheit_to_rankine(32)) == 491, "value not correct"
assert int(fahrenheit_to_rankine(212)) == 671, "value not correct"
# make sure some hard coded values work
assert int(fahrenheit_to_rankine(60)) == 519, "value not correct"
assert int(fahrenheit_to_rankine(-60)) == 399, "value not correct"
assert int(fahrenheit_to_rankine(90)) == 549, "value not correct"
def test__kelvin_to_celsius(self):
# make sure some special values work
assert int(kelvin_to_celsius(273.15)) == 0, "value not correct"
assert int(kelvin_to_celsius(373.15)) == 100, "value not correct"
# make sure some hard coded values work
assert int(kelvin_to_celsius(0)) == -273, "value not correct"
assert int(kelvin_to_celsius(293.15)) == 20, "value not correct"
assert int(kelvin_to_celsius(343.15)) == 70, "value not correct"
def test__kelvin_to_fahrenheit(self):
# make sure some special values work
assert int(kelvin_to_fahrenheit(273.15)) == 32, "value not correct"
assert int(kelvin_to_fahrenheit(373.15)) == 212, "value not correct"
# make sure some hard coded values work
assert int(kelvin_to_fahrenheit(0)) == -459, "value not correct"
assert int(kelvin_to_fahrenheit(293.15)) == 68, "value not correct"
assert int(kelvin_to_fahrenheit(343.15)) == 158, "value not correct"
def test__kelvin_to_rankine(self):
# make sure some special values work
assert int(kelvin_to_rankine(273.15)) == 491, "value not correct"
assert int(kelvin_to_rankine(373.15)) == 671, "value not correct"
# make sure some hard coded values work
assert int(kelvin_to_rankine(0)) == 0, "value not correct"
assert int(kelvin_to_rankine(293.15)) == 527, "value not correct"
assert int(kelvin_to_rankine(343.15)) == 617, "value not correct"
def test__rankine_to_celsius(self):
# make sure some special values work
assert int(rankine_to_celsius(491)) == 0, "value not correct"
assert int(rankine_to_celsius(671)) == 99, "value not correct"
# make sure some hard coded values work
assert int(rankine_to_celsius(0)) == -273, "value not correct"
assert int(rankine_to_celsius(527)) == 19, "value not correct"
assert int(rankine_to_celsius(617)) == 69, "value not correct"
def test__rankine_to_fahrenheit(self):
# make sure some special values work
assert int(rankine_to_fahrenheit(491)) == 31, "value not correct"
assert int(rankine_to_fahrenheit(671)) == 211, "value not correct"
# make sure some hard coded values work
assert int(rankine_to_fahrenheit(0)) == -459, "value not correct"
assert int(rankine_to_fahrenheit(527)) == 67, "value not correct"
assert int(rankine_to_fahrenheit(617)) == 157, "value not correct"
def test__rankine_to_kelvin(self):
# make sure some special values work
assert int(rankine_to_kelvin(491)) == 272, "value not correct"
assert int(rankine_to_kelvin(671)) == 372, "value not correct"
# make sure some hard coded values work
assert int(rankine_to_kelvin(0)) == 0, "value not correct"
assert int(rankine_to_kelvin(527)) == 292, "value not correct"
assert int(rankine_to_kelvin(617)) == 342, "value not correct"
def test__dewpoint(self):
# make sure some hard coded values work
assert int(calc_dewpoint(12, 72)) == 4, "value not correct"
assert int(calc_dewpoint(75, 33)) == 43, "value not correct"
assert int(calc_dewpoint(90, 85)) == 84, "value not correct"
def test__humidity(self):
# make sure some hard coded values work
assert int(calc_humidity(87, 76) * 100) == 69, "value not correct"
assert int(calc_humidity(75, 45) * 100) == 34, "value not correct"
assert int(calc_humidity(50, 10) * 100) == 19, "value not correct"
assert int(calc_humidity(100, 88) * 100) == 68, "value not correct"
def main():
suite = unittest.makeSuite(TestCase, 'test')
runner = unittest.TextTestRunner()
runner.run(suite)
if __name__ == '__main__':
main()
|
cmcginty/PyWeather
|
weather/units/tests/test_temp.py
|
Python
|
gpl-3.0
| 8,293
|
from enum import Enum
from Adafruit_MotorHAT import Adafruit_MotorHAT, Adafruit_DCMotor
import time
import atexit
Direction = Enum('Forward','Reverse','Spin','Left','Right','None')
class Wheels:
def __init__(self):
self.lc = 100 #Loop count
self.ld = .02 #Loop delay
self.mh = Adafruit_MotorHAT(addr=0x60)
self.speed = 0
self.direction = Direction.None
atexit.register(self.turnOffMotors)
self.rightWheel = self.mh.getMotor(4)
self.leftWheel = self.mh.getMotor(1)
self.minPower = 55
def setConfig(self,config):
self.config = config
self.lc = config.wheelsLoopCount
self.ld = config.wheelsLoopDelay
self.mh = Adafruit_MotorHAT(addr=config.wheelsAddr)
self.speed = 0
self.direction = Direction.None
self.minPower = config.wheelsMinPower
self.rightWheel = self.mh.getMotor(config.wheelsRight)
self.leftWheel = self.mh.getMotor(config.wheelsLeft)
def turnOffMotors(self):
self.mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
self.mh.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.mh.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
def setSpeed(self,speed, direction):
if((self.speed>0) and (speed>0) and (self.direction != direction)):
self.setSpeed(0,self.direction)
step = 1
if(speed < self.speed):
step = -step
if (direction!=self.direction):
if direction == Direction.Forward:
self.rightWheel.run(Adafruit_MotorHAT.FORWARD)
self.leftWheel.run(Adafruit_MotorHAT.FORWARD)
elif direction == Direction.Reverse:
self.rightWheel.run(Adafruit_MotorHAT.BACKWARD)
self.leftWheel.run(Adafruit_MotorHAT.BACKWARD)
elif (direction == Direction.Spin) or (direction == Direction.Left):
self.rightWheel.run(Adafruit_MotorHAT.FORWARD)
self.leftWheel.run(Adafruit_MotorHAT.BACKWARD)
elif direction == Direction.Right:
self.rightWheel.run(Adafruit_MotorHAT.BACKWARD)
self.leftWheel.run(Adafruit_MotorHAT.FORWARD)
elif direction == Direction.None:
self.turnOffMotors()
return
finalSpeed = self.speed
for i in range(self.speed, speed, step):
self.rightWheel.setSpeed(i)
self.leftWheel.setSpeed(i)
finalSpeed = i
time.sleep(self.ld)
self.speed = finalSpeed
self.direction = direction
def correct(self,direction,bump=20):
wheel = self.rightWheel
if(direction == Direction.Right):
wheel = self.leftWheel
wheel.setSpeed(self.speed + bump)
time.sleep(.25)
wheel.setSpeed(self.speed)
def pulse(self,seconds=.25,bump=20):
self.rightWheel.setSpeed(self.speed + bump)
self.leftWheel.setSpeed(self.speed + bump)
time.sleep(seconds)
self.rightWheel.setSpeed(self.speed)
self.leftWheel.setSpeed(self.speed)
def test(self):
print("Forward!")
self.setSpeed(100,Direction.Forward)
print("Speed: {} \t Direction: {}".format(w.speed, w.direction))
time.sleep(2)
#self.setSpeed(0,Direction.Forward)
print("Reverse!")
self.setSpeed(100,Direction.Reverse)
time.sleep(2)
#self.setSpeed(0,Direction.Reverse)
print("Spin!")
self.setSpeed(100,Direction.Spin)
time.sleep(2)
self.setSpeed(0,Direction.Spin)
if __name__ == '__main__':
w = wheels()
w.test()
w.turnOffMotors()
|
twallace27603/robot_army
|
wheels.py
|
Python
|
gpl-3.0
| 3,797
|
"""
project and subproject adding
# @@ needs tests
"""
from Acquisition import aq_inner
from Products.CMFCore.utils import getToolByName
from Products.Five.browser.pagetemplatefile import ZopeTwoPageTemplateFile
from opencore.browser.formhandler import OctopoLite, action
from opencore.i18n import _
from opencore.interfaces import IHomePage
from opencore.interfaces.event import AfterProjectAddedEvent
from opencore.browser.naming import get_view_names
from opencore.project.browser.base import ProjectBaseView
from topp.featurelets.interfaces import IFeatureletSupporter, IFeaturelet
from topp.utils.text import valid_title, valid_id, strip_extra_whitespace
from zope import event
from zope.component import getAdapters, getMultiAdapter
from zope.interface import implements
import logging
log = logging.getLogger('opencore.project.browser.add')
class ProjectAddView(ProjectBaseView, OctopoLite):
template = ZopeTwoPageTemplateFile('create.pt')
def reserved_names(self):
return list(get_view_names(self.context)) + ['people', 'projects', 'unique', 'summary', 'pending']
@action('validate')
def validate(self, target=None, fields=None):
putils = getToolByName(self.context, 'plone_utils')
errors = {}
id_ = self.request.form.get('projid')
id_ = putils.normalizeString(id_)
if (self.context.has_key(id_)
or id_ in self.reserved_names()):
errors['oc-id-error'] = {
'html': 'The requested url is already taken.',
'action': 'copy',
'effects': 'highlight'
}
else:
errors['oc-id-error'] = {
'html': '',
'action': 'copy',
'effects': ''
}
return errors
def check_logo(self, project, logo):
try:
project.setLogo(logo)
except ValueError: # must have tried to upload an unsupported filetype
self.addPortalStatusMessage('Please choose an image in gif, jpeg, png, or bmp format.')
return False
return True
@action('add')
def handle_request(self, target=None, fields=None):
#XXX all of the errors that are reported back here are not going
# through the translation machinery
putils = getToolByName(self.context, 'plone_utils')
self.request.set('__initialize_project__', True)
self.errors = {}
title = self.request.form.get('project_title')
title = strip_extra_whitespace(title)
if not isinstance(title, unicode):
title = unicode(title, 'utf-8')
self.request.form['project_title'] = title
if not valid_title(title):
self.errors['project_title'] = 'The name must contain 2 or more characters.'
id_ = self.request.form.get('projid')
if not valid_id(id_):
self.errors['id'] = 'The url must contain 2 or more characters; ' + \
'only A-Z, 0-9 and "-" are valid characters.'
else:
id_ = putils.normalizeString(id_)
if self.context.has_key(id_):
self.errors['id'] = 'The requested url is already taken.'
# Give plugin viewlets a chance to validate. We don't have a
# project yet, so they'll have to tolerate validating with the
# project container as the context.
viewlet_mgr = getMultiAdapter((self.context, self.request, self),
name='opencore.proj_prefs')
if not hasattr(viewlet_mgr, 'viewlets'):
viewlet_mgr.update()
viewlets = viewlet_mgr.viewlets
for viewlet in viewlets:
if hasattr(viewlet, 'validate'):
self.errors.update(viewlet.validate())
# XXX TO DO: handle featurelets, just like in preferences.py
if self.errors:
self.add_status_message(_(u'psm_correct_errors_below', u'Please correct the errors indicated below.'))
return
self.request.form['featurelets'] = [f['id'] for f in self.featurelets()]
# Aarrgghh!! #*!&% plone snoops into the request, and reads the form variables directly,
# so we have to set the form variables with the same names as the schema
self.request.form['title'] = title
proj = self.context.restrictedTraverse('portal_factory/OpenProject/%s' %id_)
# not calling validate because it explodes on "'" for project titles
# XXX is no validation better than an occasional ugly error?
#proj.validate(REQUEST=self.request, errors=self.errors, data=1, metadata=0)
if self.errors:
self.add_status_message(_(u'psm_correct_errors_below', u'Please correct the errors indicated below.'))
return
if id_ in self.reserved_names():
self.errors['id'] = 'Name reserved'
self.add_status_message(_(u'psm_project_name_reserved', u'The name "${project_name}" is reserved. Please try a different name.',
mapping={u'project_name':id_}))
return
self.context.portal_factory.doCreate(proj, id_)
proj = aq_inner(self.context)._getOb(id_)
self.notify(proj)
logo = self.request.form.get('logo')
if logo:
if not self.check_logo(proj, logo):
return
del self.request.form['logo']
hpcontext = IHomePage(proj)
hpcontext.home_page = 'summary'
# We have to look up the viewlets again, now that we have
# a project for them to use as the context to save to.
viewlet_mgr = getMultiAdapter((proj, self.request, self),
name='opencore.proj_prefs')
if not hasattr(viewlet_mgr, 'viewlets'):
viewlet_mgr.update()
for viewlet in viewlet_mgr.viewlets:
if hasattr(viewlet, 'save'):
viewlet.save()
self.template = None # Don't render anything before redirect.
site_url = getToolByName(self.context, 'portal_url')()
proj_edit_url = '%s/projects/%s/project-home/edit' % (site_url, id_)
s_message_mapping = {'title': title, 'proj_edit_url': proj_edit_url,
'project_noun': self.project_noun,}
s_message = _(u'project_created',
u'"${title}" has been created. Create a team by searching for other members to invite to your ${project_noun}, then <a href="${proj_edit_url}">edit your ${project_noun} home page</a>.',
mapping=s_message_mapping)
# self.add_status_message(s_message)
self.redirect('%s/tour' % proj.absolute_url())
def notify(self, project):
event.notify(AfterProjectAddedEvent(project, self.request))
def featurelets(self):
# create a stub object that provides IFeatureletSupporter
# is there a better way to get the list of adapters without having
# the "for" object?
# @@ dwm: look at the adapter reg or uses the apidoc api which
# featurelet to display is a policy decision on the portal
# (like opencore_properties). Might work best to build the ui
# around a policy abstraction
obj = DummyFeatureletSupporter()
flets = getAdapters((obj,), IFeaturelet)
flet_data = [dict(id=f.id,
title=f.title,
url=f._info['menu_items'][0]['action'],
checked=False,
)
for name, f in flets]
return flet_data
def homepages(self):
flet_data = self.intrinsic_homepages() + self.featurelets()
return flet_data
class DummyFeatureletSupporter(object):
implements(IFeatureletSupporter)
|
socialplanning/opencore
|
opencore/project/browser/add.py
|
Python
|
gpl-3.0
| 7,853
|
#!/usr/bin/env python
# coding: utf-8
import os
import sys
from distutils.core import setup
from distutils.command.install import install
VERSION_NUMBER = "1.6.3"
class CustomInstall(install):
def run(self):
install.run(self)
for script in self.distribution.scripts:
script_path = os.path.join(self.install_scripts,
os.path.basename(script))
with open(script_path, "rb") as fh:
content = fh.read()
content = content.replace("@ INSTALLED_BASE_DIR @",
self._custom_data_dir)
with open(script_path, "wb") as fh:
fh.write(content)
def finalize_options(self):
install.finalize_options(self)
data_dir = os.path.join(self.prefix, "share", self.distribution.get_name())
if self.root is None:
build_dir = data_dir
else:
build_dir = os.path.join(self.root, data_dir[1:])
self.install_lib = build_dir
self._custom_data_dir = data_dir
def setup_linux():
hosts_dir = "guicavane/Hosts"
hosts = os.listdir(hosts_dir)
hosts = ["guicavane.Hosts." + x for x in hosts if os.path.isdir(
os.path.join(hosts_dir, x))]
translations_dir = "guicavane/Translations"
translations = []
for trans in os.listdir(translations_dir):
trans_path = os.path.join(translations_dir, trans)
if os.path.isdir(trans_path):
translations.append("Translations/" + trans + "/LC_MESSAGES/*")
setup(
name = "guicavane",
version = VERSION_NUMBER,
license = "GPL-3",
author = "Gonzalo García Berrotarán",
author_email = "j0hn.com.ar@gmail.com",
description = "Graphical user interface for www.cuevana.tv",
url = "http://www.github.com/j0hn/guicavane/",
packages = ["guicavane", "guicavane.Downloaders", "guicavane.Utils",
"guicavane.Accounts", "guicavane.Hosts"] + hosts,
package_data = {"guicavane": ["Glade/*.glade", "Images/*.png",
"Images/Downloaders/*.png"] + translations},
scripts = ["bin/guicavane"],
cmdclass = {"install": CustomInstall}
)
def setup_windows():
import py2exe
outdata_win = {
"script": "bin\\guicavane",
"dest_base": "guicavane",
"icon_resources": [(1, "guicavane\\Images\\logo.ico")]
}
outdata_con = outdata_win.copy()
outdata_con['dest_base'] = "guicavane_debug"
opts = {
'py2exe': {
'packages': 'encodings, gtk, guicavane, guicavane.Downloaders',
'includes': 'cairo, pangocairo, pango, atk, gobject, os, urllib,' \
'urllib2, cookielib, guicavane, gettext, gtk.glade, ' \
'gio, unicodedata, webbrowser, ' \
'guicavane.Downloaders, guicavane.Accounts, ' \
'guicavane.Utils',
'excludes': ["pywin", "pywin.debugger", "pywin.debugger.dbgcon",
"pywin.dialogs", "pywin.dialogs.list", "Tkconstants",
"Tkinter", "tcl", "doctest", "macpath", "pdb",
"ftplib", "win32wnet", "getopt",],
'dll_excludes': ["w9xpopen.exe"],
'dist_dir': './windows/build',
}
}
files = []
files.append(("Glade",
["guicavane\\Glade\\" + x for x in os.listdir("guicavane\\Glade")]))
files.append(("Images",
["guicavane\\Images\\" + x for x in os.listdir("guicavane\\Images") if \
not os.path.isdir("guicavane\\Images\\" + x)]))
files.append(("Images\\Downloaders\\",
["guicavane\\Images\\Downloaders\\" + x for x in os.listdir("guicavane\\Images\\Downloaders\\")]))
files.append(("Images\\Sites\\",
["guicavane\\Images\\Sites\\" + x for x in os.listdir("guicavane\\Images\\Sites\\")]))
for translation in os.listdir("guicavane\\Translations\\"):
if not os.path.isdir("guicavane\\Translations\\" + translation):
continue
files.append(("Translations\\" + translation + "\\LC_MESSAGES",
["guicavane\\Translations\\" + translation + "\\LC_MESSAGES\\" + \
x for x in os.listdir("guicavane\\Translations\\" + translation + "\\LC_MESSAGES")]))
hosts_dir = "guicavane\\Hosts"
hosts = os.listdir(hosts_dir)
hosts = [os.path.join(hosts_dir, x) for x in hosts if os.path.isdir(
os.path.join(hosts_dir, x))]
for host in hosts:
cleanhost = host.replace("guicavane\\", "")
files.append((cleanhost, [os.path.join(host, x) for x in os.listdir(host)]))
setup(
name = "Guicavane",
license = "GPL-3",
author = "Gonzalo García Berrotarán",
author_email = "j0hn.com.ar@gmail.com",
description = "Graphical user interface for www.cuevana.tv",
version = VERSION_NUMBER,
windows = [outdata_win],
console = [outdata_con],
options = opts,
data_files = files
)
if __name__ == "__main__":
path = os.path.dirname(sys.argv[0])
if path:
os.chdir(path)
if os.name == "nt":
setup_windows()
else:
setup_linux()
|
j0hn/guicavane
|
setup.py
|
Python
|
gpl-3.0
| 5,235
|
import Adafruit_BBIO.PWM as PWM
class Servo:
def __init__(self, pin):
self.servo_pin = pin
self.duty_min = 3
self.duty_max = 14.5
self.duty_span = self.duty_max - self.duty_min
def StartServo(self):
print("Starting servo")
print(self.servo_pin)
PWM.start(self.servo_pin, (100 - self.duty_min), 60.0, 1)
self.current_angle = 90.0
self.SetAngle(self.current_angle)
def SetAngle(self, angle):
angle_f = float(angle)
duty = 100 - ((angle_f / 180) * self.duty_span + self.duty_min)
PWM.set_duty_cycle(self.servo_pin, duty)
def IncreaseAngle(self, angle):
self.current_angle += angle
self.SetAngle(self.current_angle)
def DecreaseAngle(self, angle):
self.current_angle -= angle
self.SetAngle(self.current_angle)
def StopServo(self):
PWM.stop(self.servo_pin)
|
sahdman/Plane
|
Servo.py
|
Python
|
gpl-3.0
| 804
|
# -*- encoding: utf-8 -*-
"""A utility that tries saved genetic tests and removes those failing"""
import asyncio
import yaml
from pathlib import Path
from logzero import logger
from rizza import entity_tester
from rizza import genetic_tester
def genetic_prune(conf, entity='All'):
"""Check all saved genetic_tester tests for an entity, prune failures"""
if entity == 'All':
for target in list(entity_tester.EntityTester.pull_entities()):
genetic_prune(conf, target)
else:
test_file = conf.base_dir.joinpath(
'data/genetic_tests/{}.yaml'.format(entity))
logger.debug('Current target file: {}'.format(test_file))
to_remove = []
if test_file.exists() and test_file.stat().st_size > 10:
logger.debug('Beginning tests for {}'.format(entity))
tests = yaml.load(test_file.open('r'))
for test in tests:
ent, method, mode = test.split(' ')
if mode == 'positive':
logger.debug('Running test {}'.format(method))
result = genetic_tester.GeneticEntityTester(
conf, entity, method
).run_best()
if result == -1:
logger.debug('{} failed.'.format(test))
to_remove.append(test)
else:
logger.debug('{} passed.'.format(test))
for test in to_remove:
logger.warning('Removing {} from {}'.format(test, test_file))
del tests[test]
logger.debug('Deleting file {}'.format(test_file))
test_file.unlink()
logger.debug('Writing tests to {}'.format(test_file))
yaml.dump(tests, test_file.open('w+'), default_flow_style=False)
logger.info('Done pruning {}'.format(entity))
if test_file.exists() and test_file.stat().st_size < 10:
logger.warning('Deleting empty file {}'.format(test_file))
test_file.unlink()
async def _async_prune(conf, entity, loop, sem):
"""Run an individual prune task"""
async with sem:
await loop.run_in_executor(
None, # use default executor
genetic_prune, conf, entity # function and args
)
async def _async_prune_all(conf, loop, sem):
"""Construct all the prune tasks, and await them"""
tasks = [
asyncio.ensure_future(_async_prune(conf, entity, loop, sem))
for entity in list(entity_tester.EntityTester.pull_entities())
]
await asyncio.wait(tasks)
def async_genetic_prune(conf, entity='All', async_limit=100):
"""Asynchronously perform a genetic prune for all entities"""
if entity != 'All':
genetic_prune(conf, entity)
return
sem = asyncio.Semaphore(async_limit)
loop = asyncio.get_event_loop()
loop.run_until_complete(_async_prune_all(conf, loop, sem))
loop.close()
|
JacobCallahan/rizza
|
rizza/helpers/prune.py
|
Python
|
gpl-3.0
| 2,971
|
DD_SAVE_DIR = 'home/pi/'
DD_TIME_FORMAT = '%H:%S-%d-%m-%Y'
MOUNT_DIR = '/mnt'
FC_SAVE_DIR = '/home/pi/'
FC_TIME_FORMAT = '%S:%H-%d-%m-%Y'
POP_INDEX = 1
DEBUG = 0
PLANT_LOAD_DIR = '/home/pi/pruebas/'
PLANT_SAVE_DIR = '/'
|
Daklon/autocopy
|
settings.py
|
Python
|
gpl-3.0
| 220
|
from setuptools import setup
setup(
name='geovalidation.server',
version='0.5',
long_description="Flask-based server to validate GIS datasets (with prepair and val3dity).",
packages=['geovalidation'],
include_package_data=True,
zip_safe=False,
install_requires=[ 'Flask>=1.1'
,'Jinja2>=2.7.2'
,'Werkzeug>=0.9.4'
,'celery>=3.1.11'
,'redis>=2.9.1'
,'lxml>=3.3.3'
,'subprocess32>=3.2.6'
,'cjio>=0.5'
]
author='Hugo Ledoux',
author_email='h.ledoux@tudelft.nl'
)
|
tudelft3d/geovalidation.server
|
setup.py
|
Python
|
gpl-3.0
| 590
|
#!/usr/bin/python
import os
import sys
import urllib
from gi.repository import Clutter, ClutterX11, Mx, Lomo, GObject, GLib
class Cover(Clutter.Box):
__gtype_name__ = 'GlosseCover'
#
# Member, setter and getter for the 'lomo' property
#
def _get_lomo_player_prop(self):
return self._lomo_player_prop
def _set_lomo_player_prop(self, lomo_player):
self._lomo_player_prop = lomo_player
if not lomo_player:
raise Exception('No lomo-player')
lomo_player.connect('notify::current', lambda f, t: self._sync_from_model())
self._sync_from_model()
lomo_player = GObject.property (type = Lomo.Player,
setter = _set_lomo_player_prop, getter = _get_lomo_player_prop,
flags = GObject.PARAM_READWRITE)
def __init__(self, *args, **kwargs):
self._layout = Clutter.BinLayout()
Clutter.Box.__init__(self, *args, layout_manager = self._layout, **kwargs)
self.set_property('lomo-player', kwargs['lomo_player'])
self._actors = []
self._timelines = []
self.set_from_file(os.path.join(os.path.dirname(__file__), 'cover-default.png'))
def _sync_from_model(self):
"""
Sync data from model
"""
lomo_player = self.get_property('lomo-player')
if not lomo_player:
raise Exception('Missing model')
stream = lomo_player.get_nth_stream(lomo_player.get_current())
art = stream.get_extended_metadata('art-data')
if type(art) == str and art.startswith('file:///'):
self.set_from_file(urllib.unquote(art[7:]))
def set_from_file(self, filename):
try:
w = self.get_width()
texture = Clutter.Texture(
filename = filename,
sync_size = True,
keep_aspect_ratio = True,
opacity = 0x00)
except Exception as e:
print repr(e)
return
timeline = Clutter.Timeline(duration = 1000)
texture.animate_with_timelinev(Clutter.AnimationMode.LINEAR, timeline,
("opacity",), (0xff,))
timeline.connect('completed', self.timeline_completed_cb)
if len(self._actors) > 0:
t = self._actors[-1]
t.animatev(Clutter.AnimationMode.LINEAR, 1000,
("opacity",) , (0x00,))
self._actors.append(texture)
self._timelines.append(timeline)
self._layout.add(texture,
Clutter.BinAlignment.CENTER, Clutter.BinAlignment.CENTER)
texture.show()
def timeline_completed_cb(self, timeline):
try:
index = self._timelines.index(timeline)
except ValueError:
print "Invalid timeline"
return
if index == 0:
return
index = index - 1
actor = self._actors[index]
self.remove_actor(actor)
self._actors.pop(index)
self._timelines.pop(index)
class Seek(Clutter.Box):
__gtype_name__ = 'GlosseSeek'
def _get_lomo_player_prop(self):
return self._lomo_player_prop
def _set_lomo_player_prop(self, lomo_player):
if not lomo_player:
raise Exception('Missing lomo')
self._lomo_player_prop = lomo_player
lomo_player.connect('notify', lambda a, b: self._update_from_model())
self._update_from_model()
lomo_player = GObject.property(type = Lomo.Player,
getter = _get_lomo_player_prop, setter = _set_lomo_player_prop,
flags = GObject.PARAM_READWRITE)
def __init__(self, *args, **kwargs):
self._updater_id = 0
self._inhibitor = False
self._slider = None
layout = Clutter.TableLayout()
super(Seek, self).__init__(*args, layout_manager = layout, **kwargs)
white = Clutter.Color()
white.from_string('#ffffffff')
self._curr = Clutter.Text(text = '1:23', color = white)
layout.pack(self._curr, 0, 0)
self._slider = Mx.Slider()
layout.pack(self._slider, 1, 0)
self._total = Clutter.Text(text = '5:14', color = white)
layout.pack(self._total, 2, 0)
self._slider.connect('notify::value', self._on_notify_value)
def _on_notify_value(self, widget, prop):
if self._inhibitor:
return
lomo = self.get_property('lomo-player')
pos = lomo.get_length() * self._slider.get_value()
lomo.set_position(pos)
def _update_from_model(self):
lomo = self.get_property('lomo-player')
if not lomo:
raise Exception('Missing model')
self._inhibitor = True
state = lomo.get_state()
#print "State %s ID: %d" % (repr(Lomo.State.PLAY), self._updater_id )
if state == Lomo.State.PLAY and (self._updater_id == 0):
self._updater_id = GLib.timeout_add(500, self._update_from_model_timeout_helper)
elif state != Lomo.State.PLAY and self._updater_id > 0:
GLib.source_remove(self._updater_id)
self._updater_id = 0
pos = lomo.get_position()
if pos == -1:
self._slider.set_value(0)
self._curr.set_text('-:--')
self._total.set_text('-:--')
else:
print pos
secs = pos / 1e9
total = lomo.get_length() / 1e9
self._slider.set_value(pos / float(lomo.get_length()))
self._curr.set_text("%d:%02d" % (secs / 60, secs % 60))
self._total.set_text("%d:%02d" % (total / 60, total % 60))
self._inhibitor = False
def _update_from_model_timeout_helper(self):
self._update_from_model()
return True
class Controls(Clutter.Box):
__gtype_name__ = 'GlosseControls'
def _set_lomo_player_prop(self, lomo):
self._lomo_prop = lomo
if not lomo:
raise Exception('No lomo-player')
lomo.connect('notify::state', lambda l,state: self.sync_from_model())
self.sync_from_model()
def _get_lomo_player_prop(self):
return self._lomo_prop
lomo_player = GObject.property (type = Lomo.Player,
setter = _set_lomo_player_prop, getter = _get_lomo_player_prop,
flags = GObject.PARAM_READWRITE)
def __init__(self, *args, **kwargs):
layout = Clutter.TableLayout()
super(Controls, self).__init__(*args, layout_manager = layout, **kwargs)
d = (('previous', 'media-skip-backward' ),
('playback', 'media-playback-start'),
('next', 'media-skip-forward' ))
self._buttons = dict()
for index, (id_, icon_name) in enumerate(d):
button = Mx.Button()
button.add_actor(Mx.Icon(
icon_name = icon_name,
icon_size = 32))
button.show_all()
layout.pack(button, index, 0)
self._buttons[id_] = button
self._buttons[id_].connect('clicked', self._button_clicked_cb)
self.set_property('lomo-player', kwargs['lomo_player'])
def sync_from_model(self):
if not hasattr(self, '_buttons'):
return
lomo = self.get_property('lomo-player')
if not lomo:
raise Exception('Missing model')
state = lomo.get_state()
if state == Lomo.State.PLAY:
icon_name = 'media-playback-pause'
elif state in (Lomo.State.STOP, Lomo.State.PAUSE):
icon_name = 'media-playback-start'
else:
raise Exception('Unknow state')
self._buttons['playback'].set_icon_name(icon_name)
def _button_clicked_cb(self, w):
lomo = self.get_property('lomo-player')
if lomo is None:
raise Exception('No lomo')
if w == self._buttons['previous']:
i = lomo.get_previous()
if i < 0:
return
lomo.set_current(i)
elif w == self._buttons['next']:
i = lomo.get_next()
if i < 0:
return
lomo.set_current(i)
else:
lomo.toggle_playback_state()
class App(Clutter.Stage):
__gtype_name__ = 'GlosseApp'
_lomo = None
_controls = None
_cover = None
def _set_lomo(self, lomo):
self._lomo = lomo
d = { 'controls' : self._controls, 'cover' : self._cover }
for widget in (self._cover, self._controls):
if widget:
widget.set_property('lomo', lomo)
def _get_lomo(self):
return self._lomo
lomo = GObject.property(type = Lomo.Player,
setter = _set_lomo, getter = _get_lomo)
def __init__(self, uris):
Clutter.Stage.__init__(self,
use_alpha = True,
user_resizable = True,
min_height = 200,
min_width = 200)
self.set_property('lomo', Lomo.Player(random = True, repeat = True))
self.insert_songs(uris, 0)
bg_color = Clutter.Color()
bg_color.from_string('#000000ff')
self.set_color(bg_color)
# Setup main container
main_layout = Clutter.BinLayout()
main_box = Clutter.Box(layout_manager = main_layout)
main_box.add_constraint(Clutter.BindConstraint.new(self, Clutter.BindCoordinate.SIZE, 0.0))
main_box.show()
self.add_actor(main_box)
# Setup cover (or background)
self._cover = Cover(lomo_player = self._get_lomo())
self._cover.show()
main_layout.add(self._cover, Clutter.BinAlignment.FILL, Clutter.BinAlignment.FILL)
bottom_layout = Clutter.TableLayout()
self._bottom_box = Clutter.Box(opacity = 0x00, layout_manager = bottom_layout)
# Setup controls
self._controls = Controls(lomo_player = self._get_lomo())
bottom_layout.pack(self._controls, 0, 0)
# Setup seek
self._seek = Seek(lomo_player = self._get_lomo())
bottom_layout.pack(self._seek, 0, 1)
# Add bottom_box
main_layout.add(self._bottom_box, Clutter.BinAlignment.CENTER, Clutter.BinAlignment.END)
self.connect('enter-event', self.fade_in)
self.connect('leave-event', self.fade_out)
def insert_songs(self, songs, index):
model = self.get_property('lomo')
for song in songs:
model.insert_uri(Lomo.create_uri(song), index)
def fade_in(self, actor, ev):
self._bottom_box.animatev(Clutter.AnimationMode.EASE_OUT_EXPO, 500,
("opacity",), (0xff,))
def fade_out(self, actor, ev):
self._bottom_box.animatev(Clutter.AnimationMode.EASE_OUT_EXPO, 500,
("opacity",), (0x00,))
if __name__ == '__main__':
Lomo.init(0, "")
ClutterX11.set_use_argb_visual(True)
Clutter.init([])
app = App(sys.argv[1:])
app.connect('destroy', lambda w: Clutter.main_quit())
app.show()
Clutter.main()
|
ldotlopez/eina
|
toys/clutter-ui.py
|
Python
|
gpl-3.0
| 9,266
|
#!/usr/bin/env python
"""
DragonPy - base memory info
~~~~~~~~~~~~~~~~~~~~~~~~~~~
:created: 2013 by Jens Diemer - www.jensdiemer.de
:copyleft: 2013 by the MC6809 team, see AUTHORS for more details.
:license: GNU GPL v3 or above, see LICENSE for more details.
"""
import sys
class BaseMemoryInfo:
def __init__(self, out_func):
self.out_func = out_func
def get_shortest(self, addr):
shortest = None
size = sys.maxsize
for start, end, txt in self.MEM_INFO:
if not start <= addr <= end:
continue
current_size = abs(end - start)
if current_size < size:
size = current_size
shortest = start, end, txt
if shortest is None:
return f"${addr:x}: UNKNOWN"
start, end, txt = shortest
if start == end:
return f"${addr:x}: {txt}"
else:
return f"${addr:x}: ${start:x}-${end:x} - {txt}"
def __call__(self, addr, info="", shortest=True):
if shortest:
mem_info = self.get_shortest(addr)
if info:
self.out_func(f"{info}: {mem_info}")
else:
self.out_func(mem_info)
return
mem_info = []
for start, end, txt in self.MEM_INFO:
if start <= addr <= end:
mem_info.append(
(start, end, txt)
)
if not mem_info:
self.out_func(f"{info} ${addr:x}: UNKNOWN")
else:
self.out_func(f"{info} ${addr:x}:")
for start, end, txt in mem_info:
if start == end:
self.out_func(f" * ${start:x} - {txt}")
else:
self.out_func(f" * ${start:x}-${end:x} - {txt}")
|
6809/MC6809
|
MC6809/core/memory_info.py
|
Python
|
gpl-3.0
| 1,835
|
# -*- encoding: utf-8 -*-
# vim: ts=4 sw=4 expandtab ai
"""Test class for Medium CLI"""
from ddt import ddt
from fauxfactory import gen_string, gen_alphanumeric
from robottelo.cli.factory import CLIFactoryError
from robottelo.test import CLITestCase
from robottelo.common.decorators import data, run_only_on
from robottelo.cli.factory import make_medium, make_os
from robottelo.cli.medium import Medium
URL = "http://mirror.fakeos.org/%s/$major.$minor/os/$arch"
OSES = [
'Archlinux',
'Debian',
'Gentoo',
'Redhat',
'Solaris',
'Suse',
'Windows',
]
@run_only_on('sat')
@ddt
class TestMedium(CLITestCase):
@data({'name': gen_string("latin1", 10)},
{'name': gen_string("utf8", 10)},
{'name': gen_string("alpha", 10)},
{'name': gen_string("alphanumeric", 10)},
{'name': gen_string("numeric", 10)},
{'name': gen_string("html", 10)})
def test_positive_create_1(self, test_data):
"""@Test: Check if Medium can be created
@Feature: Medium - Positive Create
@Assert: Medium is created
"""
new_obj = make_medium(test_data)
# Can we find the new object?
result = Medium.info({'id': new_obj['id']})
self.assertEqual(result.return_code, 0, "Failed to create object")
self.assertEqual(len(result.stderr), 0,
"There should not be an exception here")
self.assertGreater(
len(result.stdout), 0, "Failed to fetch medium")
self.assertEqual(new_obj['name'],
result.stdout['name'])
@data({'name': gen_string("latin1", 10)},
{'name': gen_string("utf8", 10)},
{'name': gen_string("alpha", 10)},
{'name': gen_string("alphanumeric", 10)},
{'name': gen_string("numeric", 10)},
{'name': gen_string("html", 10)})
def test_positive_delete_1(self, test_data):
"""@Test: Check if Medium can be deleted
@Feature: Medium - Positive Delete
@Assert: Medium is deleted
"""
new_obj = make_medium(test_data)
# Can we find the new object?
result = Medium.info({'id': new_obj['id']})
self.assertEqual(result.return_code, 0)
self.assertEqual(len(result.stderr), 0)
self.assertEqual(new_obj['name'], result.stdout['name'])
return_value = Medium.delete({'id': new_obj['id']})
self.assertEqual(return_value.return_code, 0, "Deletion failed")
self.assertEqual(
len(return_value.stderr), 0, "There should not be an error here")
# Can we find the object?
result = Medium.info({'id': new_obj['id']})
self.assertNotEqual(
result.return_code, 0, "Medium should be deleted")
self.assertGreater(len(result.stderr), 0,
"There should be an exception here")
self.assertEqual(
len(result.stdout), 0, "Output should be blank.")
def test_addoperatingsystem_medium(self):
"""@Test: Check if Medium can be associated with operating system
@Feature: Medium - Add operating system
@Assert: Operating system added
"""
try:
medium = make_medium({'name': gen_alphanumeric(6)})
os = make_os()
except CLIFactoryError as err:
self.fail(err)
args = {
'id': medium['id'],
'operatingsystem-id': os['id'],
}
result = Medium().add_operating_system(args)
self.assertEqual(result.return_code, 0,
"Could not associate the operating system to media")
self.assertEqual(len(result.stderr), 0,
"There should not be an exception here")
def test_removeoperatingsystem_medium(self):
"""@Test: Check if operating system can be removed from media
@Feature: Medium - Remove operating system
@Assert: Operating system removed
"""
try:
medium = make_medium({'name': gen_alphanumeric(6)})
os = make_os()
except CLIFactoryError as err:
self.fail(err)
args = {
'id': medium['id'],
'operatingsystem-id': os['id'],
}
result = Medium().add_operating_system(args)
self.assertEqual(result.return_code, 0,
"Could not associate the operating system to media")
self.assertEqual(len(result.stderr), 0,
"There should not be an exception here")
result = Medium().info({'id': medium['id']})
self.assertIn(os['title'],
result.stdout['operating-systems'],
"Operating system is not added to the media")
result = Medium().remove_operating_system(args)
self.assertEqual(result.return_code, 0,
"Removed the operating system from media")
self.assertEqual(len(result.stderr), 0,
"There should not be an exception here")
result = Medium().info({'id': medium['id']})
self.assertNotIn(os['name'],
result.stdout['operating-systems'],
"Operating system is not removed from the media")
def test_medium_update(self):
"""@Test: Check if medium can be updated
@Feature: Medium - Update medium
@Assert: Medium updated
"""
new_name = gen_alphanumeric(6)
try:
medium = make_medium({'name': gen_alphanumeric(6)})
except CLIFactoryError as e:
self.fail(e)
args = {
'name': medium['name'],
'new-name': new_name,
}
result = Medium().update(args)
self.assertEqual(result.return_code, 0,
"Could not update media")
self.assertEqual(len(result.stderr), 0,
"There should not be an exception here")
result = Medium().info({'id': medium['id']})
self.assertEqual(result.stdout['name'], new_name,
"Medium name was not updated")
|
oshtaier/robottelo
|
tests/foreman/cli/test_medium.py
|
Python
|
gpl-3.0
| 6,174
|
import os
import json
import re
from BeautifulSoup import BeautifulSoup
from psrd.rules import write_rules
from psrd.files import char_replace
from psrd.universal import parse_universal, print_struct
from psrd.sections import ability_pass, is_anonymous_section, has_subsections, entity_pass, quote_pass
def core_structure_pass(section, filename):
section['name'] = 'Spell Lists'
sections = []
spell_lists = []
for s in section['sections']:
if s['name'].endswith('Spells'):
spell_lists.append(s)
elif s['name'].endswith('Formulae'):
spell_lists.append(s)
elif s['name'] != 'Spells by Class':
sections.append(s)
section['sections'] = sections
return section, spell_lists
def advanced_class_guide_structure_pass(section, filename):
spell_lists = section['sections'][6:]
del section['sections'][6:]
return section, spell_lists
def advanced_structure_pass(section, filename):
sections = []
spell_lists = []
top = section['sections'].pop(0)
top['name'] = "Spell Lists"
for s in section['sections']:
if s['name'].endswith('Spells'):
spell_lists.append(s)
return top, spell_lists
def ultimate_magic_structure_pass(section, filename):
section['sections'].pop(0)
return None, section['sections']
def spell_list_structure_pass(section, filename):
spell_lists = []
if filename == 'spellLists.html' and len(section['sections']) == 18:
section, spell_lists = mythic_structure_pass(section, filename)
elif section['source'] == "Advanced Class Guide":
section, spell_lists = advanced_class_guide_structure_pass(section, filename)
elif filename in ('spellLists.html'):
section, spell_lists = core_structure_pass(section, filename)
elif filename in ('advancedSpellLists.html', 'ultimateCombatSpellLists.html'):
section, spell_lists = advanced_structure_pass(section, filename)
elif filename in ('ultimateMagicSpellLists.html'):
section, spell_lists = ultimate_magic_structure_pass(section, filename)
else:
del section['sections']
print section
return section, spell_lists
def spell_list_name_pass(spell_lists):
retlists = []
for casting_class in spell_lists:
clname = casting_class['name']
clname = clname.replace('Spells', '').strip()
clname = clname.replace('Formulae', '').strip()
for sl in casting_class['sections']:
sl['type'] = 'spell_list'
if clname.find('Mythic') > -1:
clname = clname.replace('Mythic', '').strip()
sl['type'] = 'mythic_spell_list'
sl['class'] = clname
m = re.search('(\d)', sl['name'])
sl['level'] = int(m.group(0))
retlists.append(sl)
return retlists
def spell_pass(spell_list):
spells = []
school = None
descriptor = None
school_type = True
if spell_list['class'] in ['Elementalist Wizard']:
school_type = False
for s in spell_list['sections']:
if s.has_key('sections'):
if school_type:
school = s['name']
else:
descriptor = s['name']
for ss in s['sections']:
soup = BeautifulSoup(ss['text'])
spells.append(create_spell(ss['name'], soup, school, descriptor))
elif spell_list['source'] in ('Mythic Adventures', 'Advanced Race Guide'):# spell_list['type'] == 'mythic_spell_list':
soup = BeautifulSoup(s['text'])
spells.append(create_spell(s['name'], soup))
else:
soup = BeautifulSoup(s['text'])
if ''.join(soup.findAll(text=True)) == '':
if school_type:
school = s['name']
else:
descriptor = s['name']
else:
spells.append(create_spell(s['name'], soup, school, descriptor))
spell_list['spells'] = spells
del spell_list['sections']
return spell_list
def create_spell(name, soup, school=None, descriptor=None):
if name.endswith(":"):
name = name[:-1]
comps = ""
if soup.find('sup'):
sup = soup.find('sup')
comps = sup.renderContents()
sup.replaceWith('')
if comps.find(",") > -1:
comps = [c.strip() for c in comps.split(",")]
else:
comps = list(comps)
desc = ''.join(soup.findAll(text=True))
if desc.startswith(":"):
desc = desc[1:].strip()
spell = {'name': name}
if desc.strip() != '':
desc = desc.strip()
desc = desc.replace("“", '"')
desc = desc.replace("”", '"')
desc = desc.replace("–", '-')
spell['description'] = desc
if len(comps) > 0:
spell['material'] = comps
if school:
spell['school'] = school
if descriptor:
spell['descriptor'] = descriptor
return spell
def parse_spell_lists(filename, output, book):
struct = parse_universal(filename, output, book)
struct = quote_pass(struct)
struct = entity_pass(struct)
rules, spell_lists = spell_list_structure_pass(struct, os.path.basename(filename))
spell_lists = spell_list_name_pass(spell_lists)
for spell_list in spell_lists:
sl = spell_pass(spell_list)
print "%s: %s" %(sl['source'], sl['name'])
filename = create_spell_list_filename(output, book, sl)
fp = open(filename, 'w')
json.dump(sl, fp, indent=4)
fp.close()
if rules:
write_rules(output, rules, book, "spell_lists")
def create_spell_list_filename(output, book, spell_list):
title = char_replace(book) + "/spell_lists/" + char_replace(spell_list['class']) + "-" + unicode(spell_list['level'])
return os.path.abspath(output + "/" + title + ".json")
|
devonjones/PSRD-Parser
|
src/psrd/spell_lists.py
|
Python
|
gpl-3.0
| 5,140
|
# Plotting performance of string_subst_.py scripts
# bar chart of relative comparison with variances as error bars
import numpy as np
import matplotlib.pyplot as plt
performance = [10.3882388499416,1,10.3212281215746]
variance = [0.790435196936213,0,0.827207394592818]
scripts = ['string_subst_1.py', 'string_subst_2.py', 'string_subst_3.py']
x_pos = np.arange(len(scripts))
plt.bar(x_pos, performance, yerr=variance, align='center', alpha=0.5)
plt.xticks(x_pos, scripts)
plt.axhline(y=1, linestyle='--', color='black')
plt.ylim([0,12])
plt.ylabel('rel. performance gain')
plt.title('String substitution - Speed improvements')
#plt.show()
plt.savefig('PNGs/string_subst_bar.png')
|
pswaminathan/python_efficiency_tweaks
|
plots/plot_string_subst_bar.py
|
Python
|
gpl-3.0
| 686
|
##############################################################################
#
# Copyright (c) 2006 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Bootstrap a buildout-based project
Simply run this script in a directory containing a buildout.cfg.
The script accepts buildout command-line options, so you can
use the -c option to specify an alternate configuration file.
"""
import os, shutil, sys, tempfile, urllib, urllib2, subprocess
from optparse import OptionParser
if sys.platform == 'win32':
def quote(c):
if ' ' in c:
return '"%s"' % c # work around spawn lamosity on windows
else:
return c
else:
quote = str
# See zc.buildout.easy_install._has_broken_dash_S for motivation and comments.
stdout, stderr = subprocess.Popen(
[sys.executable, '-Sc',
'try:\n'
' import ConfigParser\n'
'except ImportError:\n'
' print 1\n'
'else:\n'
' print 0\n'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
has_broken_dash_S = bool(int(stdout.strip()))
# In order to be more robust in the face of system Pythons, we want to
# run without site-packages loaded. This is somewhat tricky, in
# particular because Python 2.6's distutils imports site, so starting
# with the -S flag is not sufficient. However, we'll start with that:
if not has_broken_dash_S and 'site' in sys.modules:
# We will restart with python -S.
args = sys.argv[:]
args[0:0] = [sys.executable, '-S']
args = map(quote, args)
os.execv(sys.executable, args)
# Now we are running with -S. We'll get the clean sys.path, import site
# because distutils will do it later, and then reset the path and clean
# out any namespace packages from site-packages that might have been
# loaded by .pth files.
clean_path = sys.path[:]
import site # imported because of its side effects
sys.path[:] = clean_path
for k, v in sys.modules.items():
if k in ('setuptools', 'pkg_resources') or (
hasattr(v, '__path__') and
len(v.__path__) == 1 and
not os.path.exists(os.path.join(v.__path__[0], '__init__.py'))):
# This is a namespace package. Remove it.
sys.modules.pop(k)
is_jython = sys.platform.startswith('java')
setuptools_source = 'https://bitbucket.org/pypa/setuptools/raw/0.7.2/ez_setup.py'
distribute_source = 'http://python-distribute.org/distribute_setup.py'
# parsing arguments
def normalize_to_url(option, opt_str, value, parser):
if value:
if '://' not in value: # It doesn't smell like a URL.
value = 'file://%s' % (
urllib.pathname2url(
os.path.abspath(os.path.expanduser(value))),)
if opt_str == '--download-base' and not value.endswith('/'):
# Download base needs a trailing slash to make the world happy.
value += '/'
else:
value = None
name = opt_str[2:].replace('-', '_')
setattr(parser.values, name, value)
usage = '''\
[DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options]
Bootstraps a buildout-based project.
Simply run this script in a directory containing a buildout.cfg, using the
Python that you want bin/buildout to use.
Note that by using --setup-source and --download-base to point to
local resources, you can keep this script from going over the network.
'''
parser = OptionParser(usage=usage)
parser.add_option("-v", "--version", dest="version",
help="use a specific zc.buildout version")
parser.add_option("-d", "--distribute",
action="store_true", dest="use_distribute", default=False,
help="Use Distribute rather than Setuptools.")
parser.add_option("--setup-source", action="callback", dest="setup_source",
callback=normalize_to_url, nargs=1, type="string",
help=("Specify a URL or file location for the setup file. "
"If you use Setuptools, this will default to " +
setuptools_source + "; if you use Distribute, this "
"will default to " + distribute_source + "."))
parser.add_option("--download-base", action="callback", dest="download_base",
callback=normalize_to_url, nargs=1, type="string",
help=("Specify a URL or directory for downloading "
"zc.buildout and either Setuptools or Distribute. "
"Defaults to PyPI."))
parser.add_option("--eggs",
help=("Specify a directory for storing eggs. Defaults to "
"a temporary directory that is deleted when the "
"bootstrap script completes."))
parser.add_option("-t", "--accept-buildout-test-releases",
dest='accept_buildout_test_releases',
action="store_true", default=False,
help=("Normally, if you do not specify a --version, the "
"bootstrap script and buildout gets the newest "
"*final* versions of zc.buildout and its recipes and "
"extensions for you. If you use this flag, "
"bootstrap and buildout will get the newest releases "
"even if they are alphas or betas."))
parser.add_option("-c", None, action="store", dest="config_file",
help=("Specify the path to the buildout configuration "
"file to be used."))
options, args = parser.parse_args()
# if -c was provided, we push it back into args for buildout's main function
if options.config_file is not None:
args += ['-c', options.config_file]
if options.eggs:
eggs_dir = os.path.abspath(os.path.expanduser(options.eggs))
else:
eggs_dir = tempfile.mkdtemp()
if options.setup_source is None:
if options.use_distribute:
options.setup_source = distribute_source
else:
options.setup_source = setuptools_source
if options.accept_buildout_test_releases:
args.append('buildout:accept-buildout-test-releases=true')
args.append('bootstrap')
try:
import pkg_resources
import setuptools # A flag. Sometimes pkg_resources is installed alone.
if not hasattr(pkg_resources, '_distribute'):
raise ImportError
except ImportError:
ez_code = urllib2.urlopen(
options.setup_source).read().replace('\r\n', '\n')
ez = {}
exec ez_code in ez
setup_args = dict(to_dir=eggs_dir, download_delay=0)
if options.download_base:
setup_args['download_base'] = options.download_base
if options.use_distribute:
setup_args['no_fake'] = True
ez['use_setuptools'](**setup_args)
if 'pkg_resources' in sys.modules:
reload(sys.modules['pkg_resources'])
import pkg_resources
# This does not (always?) update the default working set. We will
# do it.
for path in sys.path:
if path not in pkg_resources.working_set.entries:
pkg_resources.working_set.add_entry(path)
cmd = [quote(sys.executable),
'-c',
quote('from setuptools.command.easy_install import main; main()'),
'-mqNxd',
quote(eggs_dir)]
if not has_broken_dash_S:
cmd.insert(1, '-S')
find_links = options.download_base
if not find_links:
find_links = os.environ.get('bootstrap-testing-find-links')
if find_links:
cmd.extend(['-f', quote(find_links)])
if options.use_distribute:
setup_requirement = 'distribute'
else:
setup_requirement = 'setuptools'
ws = pkg_resources.working_set
setup_requirement_path = ws.find(
pkg_resources.Requirement.parse(setup_requirement)).location
env = dict(
os.environ,
PYTHONPATH=setup_requirement_path)
requirement = 'zc.buildout'
version = options.version
if version is None and not options.accept_buildout_test_releases:
# Figure out the most recent final version of zc.buildout.
import setuptools.package_index
_final_parts = '*final-', '*final'
def _final_version(parsed_version):
for part in parsed_version:
if (part[:1] == '*') and (part not in _final_parts):
return False
return True
index = setuptools.package_index.PackageIndex(
search_path=[setup_requirement_path])
if find_links:
index.add_find_links((find_links,))
req = pkg_resources.Requirement.parse(requirement)
if index.obtain(req) is not None:
best = []
bestv = None
for dist in index[req.project_name]:
distv = dist.parsed_version
if _final_version(distv):
if bestv is None or distv > bestv:
best = [dist]
bestv = distv
elif distv == bestv:
best.append(dist)
if best:
best.sort()
version = best[-1].version
if version:
requirement = '=='.join((requirement, version))
cmd.append(requirement)
if is_jython:
import subprocess
exitcode = subprocess.Popen(cmd, env=env).wait()
else: # Windows prefers this, apparently; otherwise we would prefer subprocess
exitcode = os.spawnle(*([os.P_WAIT, sys.executable] + cmd + [env]))
if exitcode != 0:
sys.stdout.flush()
sys.stderr.flush()
print ("An error occurred when trying to install zc.buildout. "
"Look above this message for any errors that "
"were output by easy_install.")
sys.exit(exitcode)
ws.add_entry(eggs_dir)
ws.require(requirement)
import zc.buildout.buildout
zc.buildout.buildout.main(args)
if not options.eggs: # clean up temporary egg directory
shutil.rmtree(eggs_dir)
|
kamal-gade/rockstor-core
|
bootstrap.py
|
Python
|
gpl-3.0
| 10,120
|
from __future__ import absolute_import
from django.utils.translation import ugettext_lazy as _
from documents.permissions import PERMISSION_DOCUMENT_TYPE_EDIT
from .permissions import (PERMISSION_METADATA_DOCUMENT_EDIT,
PERMISSION_METADATA_DOCUMENT_ADD, PERMISSION_METADATA_DOCUMENT_REMOVE,
PERMISSION_METADATA_DOCUMENT_VIEW, PERMISSION_METADATA_TYPE_EDIT,
PERMISSION_METADATA_TYPE_CREATE, PERMISSION_METADATA_TYPE_DELETE,
PERMISSION_METADATA_TYPE_VIEW, PERMISSION_METADATA_SET_EDIT,
PERMISSION_METADATA_SET_CREATE, PERMISSION_METADATA_SET_DELETE,
PERMISSION_METADATA_SET_VIEW)
metadata_edit = {'text': _(u'edit metadata'), 'view': 'metadata_edit', 'args': 'object.pk', 'famfam': 'xhtml_go', 'permissions': [PERMISSION_METADATA_DOCUMENT_EDIT]}
metadata_view = {'text': _(u'metadata'), 'view': 'metadata_view', 'args': 'object.pk', 'famfam': 'xhtml_go', 'permissions': [PERMISSION_METADATA_DOCUMENT_VIEW], 'children_view_regex': ['metadata']}
metadata_multiple_edit = {'text': _(u'edit metadata'), 'view': 'metadata_multiple_edit', 'famfam': 'xhtml_go', 'permissions': [PERMISSION_METADATA_DOCUMENT_EDIT]}
metadata_add = {'text': _(u'add metadata'), 'view': 'metadata_add', 'args': 'object.pk', 'famfam': 'xhtml_add', 'permissions': [PERMISSION_METADATA_DOCUMENT_ADD]}
metadata_multiple_add = {'text': _(u'add metadata'), 'view': 'metadata_multiple_add', 'famfam': 'xhtml_add', 'permissions': [PERMISSION_METADATA_DOCUMENT_ADD]}
metadata_remove = {'text': _(u'remove metadata'), 'view': 'metadata_remove', 'args': 'object.pk', 'famfam': 'xhtml_delete', 'permissions': [PERMISSION_METADATA_DOCUMENT_REMOVE]}
metadata_multiple_remove = {'text': _(u'remove metadata'), 'view': 'metadata_multiple_remove', 'famfam': 'xhtml_delete', 'permissions': [PERMISSION_METADATA_DOCUMENT_REMOVE]}
setup_metadata_type_list = {'text': _(u'metadata types'), 'view': 'setup_metadata_type_list', 'famfam': 'xhtml_go', 'icon': 'xhtml.png', 'permissions': [PERMISSION_METADATA_TYPE_VIEW]}
setup_metadata_type_edit = {'text': _(u'edit'), 'view': 'setup_metadata_type_edit', 'args': 'object.pk', 'famfam': 'xhtml', 'permissions': [PERMISSION_METADATA_TYPE_EDIT]}
setup_metadata_type_delete = {'text': _(u'delete'), 'view': 'setup_metadata_type_delete', 'args': 'object.pk', 'famfam': 'xhtml_delete', 'permissions': [PERMISSION_METADATA_TYPE_DELETE]}
setup_metadata_type_create = {'text': _(u'create new'), 'view': 'setup_metadata_type_create', 'famfam': 'xhtml_add', 'permissions': [PERMISSION_METADATA_TYPE_CREATE]}
setup_metadata_set_list = {'text': _(u'metadata sets'), 'view': 'setup_metadata_set_list', 'famfam': 'table', 'icon': 'table.png', 'permissions': [PERMISSION_METADATA_SET_VIEW]}
setup_metadata_set_edit = {'text': _(u'edit'), 'view': 'setup_metadata_set_edit', 'args': 'object.pk', 'famfam': 'table_edit', 'permissions': [PERMISSION_METADATA_SET_EDIT]}
setup_metadata_set_members = {'text': _(u'members'), 'view': 'setup_metadata_set_members', 'args': 'object.pk', 'famfam': 'table_link', 'permissions': [PERMISSION_METADATA_SET_EDIT]}
setup_metadata_set_delete = {'text': _(u'delete'), 'view': 'setup_metadata_set_delete', 'args': 'object.pk', 'famfam': 'table_delete', 'permissions': [PERMISSION_METADATA_SET_DELETE]}
setup_metadata_set_create = {'text': _(u'create new'), 'view': 'setup_metadata_set_create', 'famfam': 'table_add', 'permissions': [PERMISSION_METADATA_SET_CREATE]}
setup_document_type_metadata = {'text': _(u'default metadata'), 'view': 'setup_document_type_metadata', 'args': 'document_type.pk', 'famfam': 'xhtml', 'permissions': [PERMISSION_DOCUMENT_TYPE_EDIT]}
|
rosarior/mayan
|
apps/metadata/links.py
|
Python
|
gpl-3.0
| 3,605
|
r"""
==================================================
Feature computation for univariate time series
==================================================
This sub-module provides routines for computing features on univariate time series.
Many functions are improved version of PyEEG [PYEEG]_ functions. Be careful,
some functions will give different results compared to PyEEG as the maths have been changed to match original definitions.
Have a look at the documentation notes/ source code to know more.
Here a list of the functions that were reimplemented:
* Approximate entropy :func:`~pyrem.univariate.ap_entropy` [RIC00]_
* Fisher information :func:`~pyrem.univariate.fisher_info` [PYEEG]_
* Higuchi fractal dimension :func:`~pyrem.univariate.hfd` [HIG88]_
* Hjorth parameters :func:`~pyrem.univariate.hjorth` [HJO70]_
* Petrosian fractal dimension :func:`~pyrem.univariate.pfd` [PET95]_
* Sample entropy :func:`~pyrem.univariate.samp_entropy` [RIC00]_
* Singular value decomposition entropy :func:`~pyrem.univariate.svd_entropy` [PYEEG]_
* Spectral entropy :func:`~pyrem.univariate.spectral_entropy` [PYEEG]_
.. [PET95] A. Petrosian, Kolmogorov complexity of finite sequences and recognition of different preictal EEG patterns, in ,
Proceedings of the Eighth IEEE Symposium on Computer-Based Medical Systems, 1995, 1995, pp. 212-217.
.. [PYEEG] F. S. Bao, X. Liu, and C. Zhang, PyEEG: An Open Source Python Module for EEG/MEG Feature Extraction,
Computational Intelligence and Neuroscience, vol. 2011, p. e406391, Mar. 2011.
.. [HJO70] B. Hjorth, EEG analysis based on time domain properties,
Electroencephalography and Clinical Neurophysiology, vol. 29, no. 3, pp. 306-310, Sep. 1970.
.. [COS05] M. Costa, A. L. Goldberger, and C.-K. Peng, "Multiscale entropy analysis of biological signals," Phys. Rev. E, vol. 71, no. 2, p. 021906, Feb. 2005.
.. [RIC00] J. S. Richman and J. R. Moorman, "Physiological time-series analysis using approximate entropy and sample entropy,"
American Journal of Physiology - Heart and Circulatory Physiology, vol. 278, no. 6, pp. H2039-H2049, Jun. 2000.
.. [HIG88] T. Higuchi, "Approach to an irregular time series on the basis of the fractal theory," Physica D: Nonlinear Phenomena, vol. 31, no. 2, pp. 277-283, Jun. 1988.
"""
__author__ = 'quentin'
import numpy as np
def _embed_seq(X,tau,de):
N =len(X)
if de * tau > N:
raise ValueError("Cannot build such a matrix, because D * Tau > N")
if tau<1:
raise ValueError("Tau has to be at least 1")
Y=np.zeros((de, N - (de - 1) * tau))
for i in range(de):
Y[i] = X[i *tau : i*tau + Y.shape[1] ]
return Y.T
def _make_cmp(X, M, R, in_range_i, in_range_j):
#Then we make Cmp
N = len(X)
Emp = _embed_seq(X, 1, M + 1)
inrange_cmp = np.abs(Emp[in_range_i,-1] - Emp[in_range_j,-1]) <= R
in_range_cmp_i = in_range_i[inrange_cmp]
Cmp = np.bincount(in_range_cmp_i, minlength=N-M)
in_range_cmp_j = in_range_j[inrange_cmp]
Cmp += np.bincount(in_range_cmp_j, minlength=N-M)
return Cmp.astype(np.float)
def _coarse_grainning(a, tau):
"""
Coarse grainning for multiscale (sample) entropy.
"""
if tau ==1:
return a
length_out = a.size / tau
n_dropped = a.size % tau
mat = a[0:a.size - n_dropped].reshape((tau, length_out))
return np.mean(mat, axis=0)
def _make_cm(X,M,R):
N = len(X)
# we pregenerate all indices
i_idx,j_idx = np.triu_indices(N - M)
# We start by making Cm
Em = _embed_seq(X, 1, M)
dif = np.abs(Em[i_idx] - Em[j_idx])
max_dist = np.max(dif, 1)
inrange_cm = max_dist <= R
in_range_i = i_idx[inrange_cm]
in_range_j = j_idx[inrange_cm]
Cm = np.bincount(in_range_i, minlength=N-M+1)
Cm += np.bincount(in_range_j, minlength=N-M+1)
inrange_last = np.max(np.abs(Em[:-1] - Em[-1]),1) <= R
Cm[inrange_last] += 1
# all matches + self match
Cm[-1] += np.sum(inrange_last) + 1
return Cm.astype(np.float), in_range_i, in_range_j
def pfd(a):
r"""
Compute Petrosian Fractal Dimension of a time series [PET95]_.
It is defined by:
.. math::
\frac{log(N)}{log(N) + log(\frac{N}{N+0.4N_{\delta}})}
.. note::
**Difference with PyEEG:**
Results is different from [PYEEG]_ which implemented an apparently erroneous formulae:
.. math::
\frac{log(N)}{log(N) + log(\frac{N}{N}+0.4N_{\delta})}
Where:
:math:`N` is the length of the time series, and
:math:`N_{\delta}` is the number of sign changes.
:param a: a one dimensional floating-point array representing a time series.
:type a: :class:`~numpy.ndarray` or :class:`~pyrem.time_series.Signal`
:return: the Petrosian Fractal Dimension; a scalar.
:rtype: float
Example:
>>> import pyrem as pr
>>> import numpy as np
>>> # generate white noise:
>>> noise = np.random.normal(size=int(1e4))
>>> pr.univariate.pdf(noise)
"""
diff = np.diff(a)
# x[i] * x[i-1] for i in t0 -> tmax
prod = diff[1:-1] * diff[0:-2]
# Number of sign changes in derivative of the signal
N_delta = np.sum(prod < 0)
n = len(a)
return np.log(n)/(np.log(n)+np.log(n/(n+0.4*N_delta)))
def hjorth(a):
r"""
Compute Hjorth parameters [HJO70]_.
.. math::
Activity = m_0 = \sigma_{a}^2
.. math::
Complexity = m_2 = \sigma_{d}/ \sigma_{a}
.. math::
Morbidity = m_4 = \frac{\sigma_{dd}/ \sigma_{d}}{m_2}
Where:
:math:`\sigma_{x}^2` is the mean power of a signal :math:`x`. That is, its variance, if it's mean is zero.
:math:`a`, :math:`d` and :math:`dd` represent the original signal, its first and second derivatives, respectively.
.. note::
**Difference with PyEEG:**
Results is different from [PYEEG]_ which appear to uses a non normalised (by the length of the signal) definition of the activity:
.. math::
\sigma_{a}^2 = \sum{\mathbf{x}[i]^2}
As opposed to
.. math::
\sigma_{a}^2 = \frac{1}{n}\sum{\mathbf{x}[i]^2}
:param a: a one dimensional floating-point array representing a time series.
:type a: :class:`~numpy.ndarray` or :class:`~pyrem.time_series.Signal`
:return: activity, complexity and morbidity
:rtype: tuple(float, float, float)
Example:
>>> import pyrem as pr
>>> import numpy as np
>>> # generate white noise:
>>> noise = np.random.normal(size=int(1e4))
>>> activity, complexity, morbidity = pr.univariate.hjorth(noise)
"""
first_deriv = np.diff(a)
second_deriv = np.diff(a,2)
var_zero = np.mean(a ** 2)
var_d1 = np.mean(first_deriv ** 2)
var_d2 = np.mean(second_deriv ** 2)
activity = var_zero
morbidity = np.sqrt(var_d1 / var_zero)
complexity = np.sqrt(var_d2 / var_d1) / morbidity
return activity, morbidity, complexity
def svd_entropy(a, tau, de):
r"""
Compute the Singular Value Decomposition entropy of a signal with embedding dimension "de" and delay "tau" [PYEEG]_.
.. note::
**Difference with PyEEG:**
The result differs from PyEEG implementation because :math:`log_2` is used (as opposed to natural logarithm in PyEEG code),
according to the definition in their paper [PYEEG]_ (eq. 9):
.. math::
H_{SVD} = -\sum{\bar\sigma{}_i log_2 \bar\sigma{}_i}
:param a: a one dimensional floating-point array representing a time series.
:type a: :class:`~numpy.ndarray` or :class:`~pyrem.time_series.Signal`
:param tau: the delay
:type tau: int
:param de: the embedding dimension
:type de: int
:return: the SVD entropy, a scalar
:rtype: float
"""
mat = _embed_seq(a, tau, de)
W = np.linalg.svd(mat, compute_uv = False)
W /= sum(W) # normalize singular values
return -1*sum(W * np.log2(W))
def fisher_info(a, tau, de):
r"""
Compute the Fisher information of a signal with embedding dimension "de" and delay "tau" [PYEEG]_.
Vectorised (i.e. faster) version of the eponymous PyEEG function.
:param a: a one dimensional floating-point array representing a time series.
:type a: :class:`~numpy.ndarray` or :class:`~pyrem.time_series.Signal`
:param tau: the delay
:type tau: int
:param de: the embedding dimension
:type de: int
:return: the Fisher information, a scalar
:rtype: float
"""
mat = _embed_seq(a, tau, de)
W = np.linalg.svd(mat, compute_uv = False)
W /= sum(W) # normalize singular values
FI_v = (W[1:] - W[:-1]) **2 / W[:-1]
return np.sum(FI_v)
def ap_entropy(a, m, R):
r"""
Compute the approximate entropy of a signal with embedding dimension "de" and delay "tau" [PYEEG]_.
Vectorised version of the PyEEG function. Faster than PyEEG, but still critically slow.
:param a: a one dimensional floating-point array representing a time series.
:type a: :class:`~numpy.ndarray` or :class:`~pyrem.time_series.Signal`
:param m: the scale
:type m: int
:param R: The tolerance
:type R: float`
:return: the approximate entropy, a scalar
:rtype: float
"""
N = len(a)
Cm, in_range_i, in_range_j = _make_cm(a,m,R)
Cmp = _make_cmp(a, m, R, in_range_i, in_range_j)
Cm /= float((N - m +1 ))
Cmp /= float(N - m)
Phi_m, Phi_mp = np.sum(np.log(Cm)), np.sum(np.log(Cmp))
Ap_En = (Phi_m - Phi_mp) / (N - m)
return Ap_En
def samp_entropy(a, m, r, tau=1, relative_r=True):
r"""
Compute the sample entropy [RIC00]_ of a signal with embedding dimension `de` and delay `tau` [PYEEG]_.
Vectorised version of the eponymous PyEEG function.
In addition, this function can also be used to vary tau and therefore compute Multi-Scale Entropy(MSE) [COS05]_ by
coarse grainning the time series (see example bellow).
By default, r is expressed as relatively to the standard deviation of the signal.
:param a: a one dimensional floating-point array representing a time series.
:type a: :class:`~numpy.ndarray` or :class:`~pyrem.time_series.Signal`
:param m: the scale
:type m: int
:param r: The tolerance
:type r: float
:param tau: The scale for coarse grainning.
:type tau: int
:param relative_r: whether the argument r is relative to the standard deviation. If false, an absolute value should be given for r.
:type relative_r: true
:return: the approximate entropy, a scalar
:rtype: float
Example:
>>> import pyrem as pr
>>> import numpy as np
>>> # generate white noise:
>>> noise = np.random.normal(size=int(1e4))
>>> pr.univariate.samp_entropy(noise, m=2, r=1.5)
>>> # now we can do that for multiple scales (MSE):
>>> [pr.univariate.samp_entropy(noise, m=2, r=1.5, tau=tau) for tau in range(1, 5)]
"""
coarse_a = _coarse_grainning(a, tau)
if relative_r:
coarse_a /= np.std(coarse_a)
embsp = _embed_seq(coarse_a, 1 , m + 1)
embsp_last = embsp[:,-1]
embs_mini = embsp[:, :-1]
# Buffers are preallocated chunks of memory storing temporary results.
# see the `out` argument in numpy *ufun* documentation
dist_buffer = np.zeros(embsp.shape[0] - 1, dtype=np.float32)
subtract_buffer = np.zeros((dist_buffer.size ,m), dtype=np.float32)
in_range_buffer = np.zeros_like(dist_buffer, dtype=np.bool)
sum_cm, sum_cmp = 0.0, 0.0
# we iterate through all templates (rows), except last one.
for i,template in enumerate(embs_mini[:-1]):
# these are just views to the buffer arrays. to store intermediary matrices
dist_b_view = dist_buffer[i:]
sub_b_view = subtract_buffer[i:]
range_b_view = in_range_buffer[i:]
embsp_view = embsp_last[i+1:]
# substract the template from each subsequent row of the embedded matrix
np.subtract(embs_mini[i+1:], template, out=sub_b_view)
# Absolute distance
np.abs(sub_b_view, out=sub_b_view)
# Maximal absolute difference between a scroll and a template is the distance
np.max(sub_b_view, axis=1, out=dist_b_view)
# we compare this distance to a tolerance r
np.less_equal(dist_b_view, r, out= range_b_view)
# score one for this template for each match
in_range_sum = np.sum(range_b_view)
sum_cm += in_range_sum
### reuse the buffers for last column
dist_b_view = dist_buffer[:in_range_sum]
where = np.flatnonzero(range_b_view)
dist_b_view= np.take(embsp_view,where,out=dist_b_view)
range_b_view = in_range_buffer[range_b_view]
# score one to TODO for each match of the last element
dist_b_view -= embsp_last[i]
np.abs(dist_b_view, out=dist_b_view)
np.less_equal(dist_b_view, r, out=range_b_view)
sum_cmp += np.sum(range_b_view)
if sum_cm == 0 or sum_cmp ==0:
return np.NaN
return np.log(sum_cm/sum_cmp)
def spectral_entropy(a, sampling_freq, bands=None):
r"""
Compute spectral entropy of a signal with respect to frequency bands.
The power spectrum is computed through fft. Then, it is normalised and assimilated to a probability density function.
The entropy of the signal :math:`x` can be expressed by:
.. math::
H(x) = -\sum_{f=0}^{f = f_s/2} PSD(f) log_2[PSD(f)]
Where:
:math:`PSD` is the normalised power spectrum (Power Spectrum Density), and
:math:`f_s` is the sampling frequency
:param a: a one dimensional floating-point array representing a time series.
:type a: :class:`~numpy.ndarray` or :class:`~pyrem.time_series.Signal`
:param sampling_freq: the sampling frequency
:type sampling_freq: float
:param bands: a list of numbers delimiting the bins of the frequency bands. If None the entropy is computed over the whole range of the DFT (from 0 to :math:`f_s/2`)
:return: the spectral entropy; a scalar
"""
psd = np.abs(np.fft.rfft(a))**2
psd /= np.sum(psd) # psd as a pdf (normalised to one)
if bands is None:
power_per_band= psd[psd>0]
else:
freqs = np.fft.rfftfreq(a.size, 1/float(sampling_freq))
bands = np.asarray(bands)
freq_limits_low = np.concatenate([[0.0],bands])
freq_limits_up = np.concatenate([bands, [np.Inf]])
power_per_band = [np.sum(psd[np.bitwise_and(freqs >= low, freqs<up)])
for low,up in zip(freq_limits_low, freq_limits_up)]
power_per_band= power_per_band[ power_per_band > 0]
return - np.sum(power_per_band * np.log2(power_per_band))
def hfd(a, k_max):
r"""
Compute Higuchi Fractal Dimension of a time series.
Vectorised version of the eponymous [PYEEG]_ function.
.. note::
**Difference with PyEEG:**
Results is different from [PYEEG]_ which appears to have implemented an erroneous formulae.
[HIG88]_ defines the normalisation factor as:
.. math::
\frac{N-1}{[\frac{N-m}{k} ]\dot{} k}
[PYEEG]_ implementation uses:
.. math::
\frac{N-1}{[\frac{N-m}{k}]}
The latter does *not* give the expected fractal dimension of approximately `1.50` for brownian motion (see example bellow).
:param a: a one dimensional floating-point array representing a time series.
:type a: :class:`~numpy.ndarray` or :class:`~pyrem.time_series.Signal`
:param k_max: the maximal value of k
:type k_max: int
:return: Higuchi's fractal dimension; a scalar
:rtype: float
Example from [HIG88]_. This should produce a result close to `1.50`:
>>> import numpy as np
>>> import pyrem as pr
>>> i = np.arange(2 ** 15) +1001
>>> z = np.random.normal(size=int(2 ** 15) + 1001)
>>> y = np.array([np.sum(z[1:j]) for j in i])
>>> pr.univariate.hfd(y,2**8)
"""
L = []
x = []
N = a.size
# TODO this could be used to pregenerate k and m idxs ... but memory pblem?
# km_idxs = np.triu_indices(k_max - 1)
# km_idxs = k_max - np.flipud(np.column_stack(km_idxs)) -1
# km_idxs[:,1] -= 1
#
for k in xrange(1,k_max):
Lk = 0
for m in xrange(0,k):
#we pregenerate all idxs
idxs = np.arange(1,int(np.floor((N-m)/k)),dtype=np.int32)
Lmk = np.sum(np.abs(a[m+idxs*k] - a[m+k*(idxs-1)]))
Lmk = (Lmk*(N - 1)/(((N - m)/ k)* k)) / k
Lk += Lmk
L.append(np.log(Lk/(m+1)))
x.append([np.log(1.0/ k), 1])
(p, r1, r2, s)=np.linalg.lstsq(x, L)
return p[0]
def dfa(X, Ave = None, L = None, sampling= 1):
"""
WIP on this function. It is basically copied and pasted from [PYEEG]_, without verification of the maths or unittests.
"""
X = np.array(X)
if Ave is None:
Ave = np.mean(X)
Y = np.cumsum(X)
Y -= Ave
if not L:
max_power = np.int(np.log2(len(X)))-4
L = X.size / 2 ** np.arange(4,max_power)
if len(L)<2:
raise Exception("Too few values for L. Time series too short?")
F = np.zeros(len(L)) # F(n) of different given box length n
for i,n in enumerate(L):
sampled = 0
for j in xrange(0,len(X) -n ,n):
if np.random.rand() < sampling:
F[i] += np.polyfit(np.arange(j,j+n), Y[j:j+n],1, full=True)[1]
sampled += 1
if sampled > 0:
F[i] /= float(sampled)
LF = np.array([(l,f) for l,f in zip(L,F) if l>0]).T
F = np.sqrt(LF[1])
Alpha = np.polyfit(np.log(LF[0]), np.log(F),1)[0]
return Alpha
def hurst(signal):
"""
**Experimental**/untested implementation taken from:
http://drtomstarke.com/index.php/calculation-of-the-hurst-exponent-to-test-for-trend-and-mean-reversion/
Use at your own risks.
"""
tau = []; lagvec = []
# Step through the different lags
for lag in range(2,20):
# produce price difference with lag
pp = np.subtract(signal[lag:],signal[:-lag])
# Write the different lags into a vector
lagvec.append(lag)
# Calculate the variance of the difference vector
tau.append(np.std(pp))
# linear fit to double-log graph (gives power)
m = np.polyfit(np.log10(lagvec),np.log10(tau),1)
# calculate hurst
hurst = m[0]
return hurst
|
gilestrolab/pyrem
|
src/pyrem/univariate.py
|
Python
|
gpl-3.0
| 18,418
|
# -*- coding: utf-8 -*-
"""
pytest fixtures
"""
import pytest
from django.contrib.auth.models import User
from orb.models import Category, Tag, UserProfile
from orb.peers.models import Peer
from orb.resources.tests.factory import resource_factory
pytestmark = pytest.mark.django_db
@pytest.fixture
def testing_user():
user, _ = User.objects.get_or_create(username="tester")
user.set_password("password")
user.save()
yield user
@pytest.fixture
def testing_profile(testing_user):
yield UserProfile.objects.create(user=testing_user)
@pytest.fixture()
def import_user():
user, _ = User.objects.get_or_create(username="importer")
user.set_password("password")
user.save()
yield user
@pytest.fixture
def importer_profile(import_user):
yield UserProfile.objects.create(user=import_user)
@pytest.fixture
def sample_category():
category, _ = Category.objects.get_or_create(name="test category")
yield category
@pytest.fixture
def sample_tag(sample_category, testing_user):
tag, _ = Tag.objects.get_or_create(name="test tag", defaults={
"category": sample_category,
"create_user": testing_user,
"update_user": testing_user,
})
yield tag
@pytest.fixture
def role_category():
category, _ = Category.objects.get_or_create(name="audience")
yield category
@pytest.fixture
def role_tag(role_category, testing_user):
tag, _ = Tag.objects.get_or_create(name="cadre", defaults={
"category": role_category,
"create_user": testing_user,
"update_user": testing_user,
})
assert Tag.tags.roles()
yield tag
@pytest.fixture
def test_resource(testing_user):
yield resource_factory(
user=testing_user,
title=u"Básica salud del recién nacido",
description=u"Básica salud del recién nacido",
)
@pytest.fixture(scope="session")
def test_peer():
peer = Peer.peers.create(name="Distant ORB", host="http://www.orb.org/")
yield peer
@pytest.fixture(scope="session")
def remote_resource(import_user, test_peer):
"""Fixture for a remotely created resource"""
yield resource_factory(
user=import_user,
title=u"A remote resource",
description=u"<p>A remote resource</p>",
source_peer=test_peer,
)
|
mPowering/django-orb
|
orb/fixtures/__init__.py
|
Python
|
gpl-3.0
| 2,306
|
# Name: Examples for using conditioning number finders for curves and surfaces
# Description: Contains some examples with descriptions of how to use the functions
# Created: 2016-08-18
# Author: Janis Lazovskis
# Navigate to the conditioning directory
# Run Python 2
# Example (curve)
execfile('curves.py')
x = variety()
x0,x1,x2 = sp.var('x0,x1,x2')
x.varlist = [x0,x1,x2]
x.func = x0*x0 + x1*x2 - x1*x0
x.points = [[1,1,0], [2,1,-2]]
cnumcurve(x)
# Non-example (curve)
# Use the above, but instead, put:
x.points = [[1,1,0], [2,1,-2], [0,0,0]]
# Then cnumcurve will return an empty list saying the last point isn't in P^2
cnumcurve(x)
# Non-example (curve)
# Use the above, but instead, put:
x.points = [[1,1,0], [2,1,-2], [1,1,1]]
# Then cnumcurve will return an empty list saying the last point isn't on the curve
cnumcurve(x)
# Example surface
execfile('surfaces.py')
x = variety()
x0,x1,x2,x3 = sp.var('x0,x1,x2,x3')
x.varlist = [x0,x1,x2,x3]
x.func = x0*x1 - x2*x3
x.points = [[1,1,1,1], [0,1,1,0], [0,1,0,1], [2,1,1,2]]
cnumsurface(x)
# Non-example (surface)
execfile('surfaces.py')
x = variety()
x0,x1,x2,x3 = sp.var('x0,x1,x2,x3')
x.varlist = [x0,x1,x2,x3]
x.func = x0*x0*x1 - x2*x3*x3 + x0*x1*x2 +x2*x2*x2
x.points = [[0,1,1,1], [1,0,1,1], [1,0,2,2], [1,1,-1,1]]
# This will raise an error because the curve is not smooth
cnumsurface(x)
|
jlazovskis/conditioning
|
examples/examples-curves-surfaces.py
|
Python
|
gpl-3.0
| 1,355
|
# -*- encoding: utf-8 -*-
"""
leap/email/imap/tests/__init__.py
----------------------------------
Module intialization file for leap.mx.tests, a module containing unittesting
code, using twisted.trial, for testing leap_mx.
@authors: Kali Kaneko, <kali@leap.se>
@license: GPLv3, see included LICENSE file
@copyright: © 2013 Kali Kaneko, see COPYLEFT file
"""
import os
import u1db
from leap.common.testing.basetest import BaseLeapTest
from leap.soledad.client import Soledad
from leap.soledad.common.document import SoledadDocument
__all__ = ['test_imap']
def run():
"""xxx fill me in"""
pass
# -----------------------------------------------------------------------------
# Some tests inherit from BaseSoledadTest in order to have a working Soledad
# instance in each test.
# -----------------------------------------------------------------------------
class BaseSoledadIMAPTest(BaseLeapTest):
"""
Instantiates GPG and Soledad for usage in LeapIMAPServer tests.
Copied from BaseSoledadTest, but moving setup to classmethod
"""
def setUp(self):
# open test dbs
self.db1_file = os.path.join(
self.tempdir, "db1.u1db")
self.db2_file = os.path.join(
self.tempdir, "db2.u1db")
self._db1 = u1db.open(self.db1_file, create=True,
document_factory=SoledadDocument)
self._db2 = u1db.open(self.db2_file, create=True,
document_factory=SoledadDocument)
# soledad config info
self.email = 'leap@leap.se'
secrets_path = os.path.join(
self.tempdir, Soledad.STORAGE_SECRETS_FILE_NAME)
local_db_path = os.path.join(
self.tempdir, Soledad.LOCAL_DATABASE_FILE_NAME)
server_url = ''
cert_file = None
self._soledad = self._soledad_instance(
self.email, '123',
secrets_path=secrets_path,
local_db_path=local_db_path,
server_url=server_url,
cert_file=cert_file)
def _soledad_instance(self, uuid, passphrase, secrets_path, local_db_path,
server_url, cert_file):
"""
Return a Soledad instance for tests.
"""
# mock key fetching and storing so Soledad doesn't fail when trying to
# reach the server.
Soledad._fetch_keys_from_shared_db = Mock(return_value=None)
Soledad._assert_keys_in_shared_db = Mock(return_value=None)
# instantiate soledad
def _put_doc_side_effect(doc):
self._doc_put = doc
class MockSharedDB(object):
get_doc = Mock(return_value=None)
put_doc = Mock(side_effect=_put_doc_side_effect)
def __call__(self):
return self
Soledad._shared_db = MockSharedDB()
return Soledad(
uuid,
passphrase,
secrets_path=secrets_path,
local_db_path=local_db_path,
server_url=server_url,
cert_file=cert_file,
)
def tearDown(self):
self._db1.close()
self._db2.close()
self._soledad.close()
# Key material for testing
KEY_FINGERPRINT = "E36E738D69173C13D709E44F2F455E2824D18DDF"
PUBLIC_KEY = """
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG v1.4.10 (GNU/Linux)
mQINBFC9+dkBEADNRfwV23TWEoGc/x0wWH1P7PlXt8MnC2Z1kKaKKmfnglVrpOiz
iLWoiU58sfZ0L5vHkzXHXCBf6Eiy/EtUIvdiWAn+yASJ1mk5jZTBKO/WMAHD8wTO
zpMsFmWyg3xc4DkmFa9KQ5EVU0o/nqPeyQxNMQN7px5pPwrJtJFmPxnxm+aDkPYx
irDmz/4DeDNqXliazGJKw7efqBdlwTHkl9Akw2gwy178pmsKwHHEMOBOFFvX61AT
huKqHYmlCGSliwbrJppTG7jc1/ls3itrK+CWTg4txREkSpEVmfcASvw/ZqLbjgfs
d/INMwXnR9U81O8+7LT6yw/ca4ppcFoJD7/XJbkRiML6+bJ4Dakiy6i727BzV17g
wI1zqNvm5rAhtALKfACha6YO43aJzairO4II1wxVHvRDHZn2IuKDDephQ3Ii7/vb
hUOf6XCSmchkAcpKXUOvbxm1yfB1LRa64mMc2RcZxf4mW7KQkulBsdV5QG2276lv
U2UUy2IutXcGP5nXC+f6sJJGJeEToKJ57yiO/VWJFjKN8SvP+7AYsQSqINUuEf6H
T5gCPCraGMkTUTPXrREvu7NOohU78q6zZNaL3GW8ai7eSeANSuQ8Vzffx7Wd8Y7i
Pw9sYj0SMFs1UgjbuL6pO5ueHh+qyumbtAq2K0Bci0kqOcU4E9fNtdiovQARAQAB
tBxMZWFwIFRlc3QgS2V5IDxsZWFwQGxlYXAuc2U+iQI3BBMBCAAhBQJQvfnZAhsD
BQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEC9FXigk0Y3fT7EQAKH3IuRniOpb
T/DDIgwwjz3oxB/W0DDMyPXowlhSOuM0rgGfntBpBb3boezEXwL86NPQxNGGruF5
hkmecSiuPSvOmQlqlS95NGQp6hNG0YaKColh+Q5NTspFXCAkFch9oqUje0LdxfSP
QfV9UpeEvGyPmk1I9EJV/YDmZ4+Djge1d7qhVZInz4Rx1NrSyF/Tc2EC0VpjQFsU
Y9Kb2YBBR7ivG6DBc8ty0jJXi7B4WjkFcUEJviQpMF2dCLdonCehYs1PqsN1N7j+
eFjQd+hqVMJgYuSGKjvuAEfClM6MQw7+FmFwMyLgK/Ew/DttHEDCri77SPSkOGSI
txCzhTg6798f6mJr7WcXmHX1w1Vcib5FfZ8vTDFVhz/XgAgArdhPo9V6/1dgSSiB
KPQ/spsco6u5imdOhckERE0lnAYvVT6KE81TKuhF/b23u7x+Wdew6kK0EQhYA7wy
7LmlaNXc7rMBQJ9Z60CJ4JDtatBWZ0kNrt2VfdDHVdqBTOpl0CraNUjWE5YMDasr
K2dF5IX8D3uuYtpZnxqg0KzyLg0tzL0tvOL1C2iudgZUISZNPKbS0z0v+afuAAnx
2pTC3uezbh2Jt8SWTLhll4i0P4Ps5kZ6HQUO56O+/Z1cWovX+mQekYFmERySDR9n
3k1uAwLilJmRmepGmvYbB8HloV8HqwgguQINBFC9+dkBEAC0I/xn1uborMgDvBtf
H0sEhwnXBC849/32zic6udB6/3Efk9nzbSpL3FSOuXITZsZgCHPkKarnoQ2ztMcS
sh1ke1C5gQGms75UVmM/nS+2YI4vY8OX/GC/on2vUyncqdH+bR6xH5hx4NbWpfTs
iQHmz5C6zzS/kuabGdZyKRaZHt23WQ7JX/4zpjqbC99DjHcP9BSk7tJ8wI4bkMYD
uFVQdT9O6HwyKGYwUU4sAQRAj7XCTGvVbT0dpgJwH4RmrEtJoHAx4Whg8mJ710E0
GCmzf2jqkNuOw76ivgk27Kge+Hw00jmJjQhHY0yVbiaoJwcRrPKzaSjEVNgrpgP3
lXPRGQArgESsIOTeVVHQ8fhK2YtTeCY9rIiO+L0OX2xo9HK7hfHZZWL6rqymXdyS
fhzh/f6IPyHFWnvj7Brl7DR8heMikygcJqv+ed2yx7iLyCUJ10g12I48+aEj1aLe
dP7lna32iY8/Z0SHQLNH6PXO9SlPcq2aFUgKqE75A/0FMk7CunzU1OWr2ZtTLNO1
WT/13LfOhhuEq9jTyTosn0WxBjJKq18lnhzCXlaw6EAtbA7CUwsD3CTPR56aAXFK
3I7KXOVAqggrvMe5Tpdg5drfYpI8hZovL5aAgb+7Y5ta10TcJdUhS5K3kFAWe/td
U0cmWUMDP1UMSQ5Jg6JIQVWhSwARAQABiQIfBBgBCAAJBQJQvfnZAhsMAAoJEC9F
Xigk0Y3fRwsP/i0ElYCyxeLpWJTwo1iCLkMKz2yX1lFVa9nT1BVTPOQwr/IAc5OX
NdtbJ14fUsKL5pWgW8OmrXtwZm1y4euI1RPWWubG01ouzwnGzv26UcuHeqC5orZj
cOnKtL40y8VGMm8LoicVkRJH8blPORCnaLjdOtmA3rx/v2EXrJpSa3AhOy0ZSRXk
ZSrK68AVNwamHRoBSYyo0AtaXnkPX4+tmO8X8BPfj125IljubvwZPIW9VWR9UqCE
VPfDR1XKegVb6VStIywF7kmrknM1C5qUY28rdZYWgKorw01hBGV4jTW0cqde3N51
XT1jnIAa+NoXUM9uQoGYMiwrL7vNsLlyyiW5ayDyV92H/rIuiqhFgbJsHTlsm7I8
oGheR784BagAA1NIKD1qEO9T6Kz9lzlDaeWS5AUKeXrb7ZJLI1TTCIZx5/DxjLqM
Tt/RFBpVo9geZQrvLUqLAMwdaUvDXC2c6DaCPXTh65oCZj/hqzlJHH+RoTWWzKI+
BjXxgUWF9EmZUBrg68DSmI+9wuDFsjZ51BcqvJwxyfxtTaWhdoYqH/UQS+D1FP3/
diZHHlzwVwPICzM9ooNTgbrcDzyxRkIVqsVwBq7EtzcvgYUyX53yG25Giy6YQaQ2
ZtQ/VymwFL3XdUWV6B/hU4PVAFvO3qlOtdJ6TpE+nEWgcWjCv5g7RjXX
=MuOY
-----END PGP PUBLIC KEY BLOCK-----
"""
PRIVATE_KEY = """
-----BEGIN PGP PRIVATE KEY BLOCK-----
Version: GnuPG v1.4.10 (GNU/Linux)
lQcYBFC9+dkBEADNRfwV23TWEoGc/x0wWH1P7PlXt8MnC2Z1kKaKKmfnglVrpOiz
iLWoiU58sfZ0L5vHkzXHXCBf6Eiy/EtUIvdiWAn+yASJ1mk5jZTBKO/WMAHD8wTO
zpMsFmWyg3xc4DkmFa9KQ5EVU0o/nqPeyQxNMQN7px5pPwrJtJFmPxnxm+aDkPYx
irDmz/4DeDNqXliazGJKw7efqBdlwTHkl9Akw2gwy178pmsKwHHEMOBOFFvX61AT
huKqHYmlCGSliwbrJppTG7jc1/ls3itrK+CWTg4txREkSpEVmfcASvw/ZqLbjgfs
d/INMwXnR9U81O8+7LT6yw/ca4ppcFoJD7/XJbkRiML6+bJ4Dakiy6i727BzV17g
wI1zqNvm5rAhtALKfACha6YO43aJzairO4II1wxVHvRDHZn2IuKDDephQ3Ii7/vb
hUOf6XCSmchkAcpKXUOvbxm1yfB1LRa64mMc2RcZxf4mW7KQkulBsdV5QG2276lv
U2UUy2IutXcGP5nXC+f6sJJGJeEToKJ57yiO/VWJFjKN8SvP+7AYsQSqINUuEf6H
T5gCPCraGMkTUTPXrREvu7NOohU78q6zZNaL3GW8ai7eSeANSuQ8Vzffx7Wd8Y7i
Pw9sYj0SMFs1UgjbuL6pO5ueHh+qyumbtAq2K0Bci0kqOcU4E9fNtdiovQARAQAB
AA/+JHtlL39G1wsH9R6UEfUQJGXR9MiIiwZoKcnRB2o8+DS+OLjg0JOh8XehtuCs
E/8oGQKtQqa5bEIstX7IZoYmYFiUQi9LOzIblmp2vxOm+HKkxa4JszWci2/ZmC3t
KtaA4adl9XVnshoQ7pijuCMUKB3naBEOAxd8s9d/JeReGIYkJErdrnVfNk5N71Ds
FmH5Ll3XtEDvgBUQP3nkA6QFjpsaB94FHjL3gDwum/cxzj6pCglcvHOzEhfY0Ddb
J967FozQTaf2JW3O+w3LOqtcKWpq87B7+O61tVidQPSSuzPjCtFF0D2LC9R/Hpky
KTMQ6CaKja4MPhjwywd4QPcHGYSqjMpflvJqi+kYIt8psUK/YswWjnr3r4fbuqVY
VhtiHvnBHQjz135lUqWvEz4hM3Xpnxydx7aRlv5NlevK8+YIO5oFbWbGNTWsPZI5
jpoFBpSsnR1Q5tnvtNHauvoWV+XN2qAOBTG+/nEbDYH6Ak3aaE9jrpTdYh0CotYF
q7csANsDy3JvkAzeU6WnYpsHHaAjqOGyiZGsLej1UcXPFMosE/aUo4WQhiS8Zx2c
zOVKOi/X5vQ2GdNT9Qolz8AriwzsvFR+bxPzyd8V6ALwDsoXvwEYinYBKK8j0OPv
OOihSR6HVsuP9NUZNU9ewiGzte/+/r6pNXHvR7wTQ8EWLcEIAN6Zyrb0bHZTIlxt
VWur/Ht2mIZrBaO50qmM5RD3T5oXzWXi/pjLrIpBMfeZR9DWfwQwjYzwqi7pxtYx
nJvbMuY505rfnMoYxb4J+cpRXV8MS7Dr1vjjLVUC9KiwSbM3gg6emfd2yuA93ihv
Pe3mffzLIiQa4mRE3wtGcioC43nWuV2K2e1KjxeFg07JhrezA/1Cak505ab/tmvP
4YmjR5c44+yL/YcQ3HdFgs4mV+nVbptRXvRcPpolJsgxPccGNdvHhsoR4gwXMS3F
RRPD2z6x8xeN73Q4KH3bm01swQdwFBZbWVfmUGLxvN7leCdfs9+iFJyqHiCIB6Iv
mQfp8F0IAOwSo8JhWN+V1dwML4EkIrM8wUb4yecNLkyR6TpPH/qXx4PxVMC+vy6x
sCtjeHIwKE+9vqnlhd5zOYh7qYXEJtYwdeDDmDbL8oks1LFfd+FyAuZXY33DLwn0
cRYsr2OEZmaajqUB3NVmj3H4uJBN9+paFHyFSXrH68K1Fk2o3n+RSf2EiX+eICwI
L6rqoF5sSVUghBWdNegV7qfy4anwTQwrIMGjgU5S6PKW0Dr/3iO5z3qQpGPAj5OW
ATqPWkDICLbObPxD5cJlyyNE2wCA9VVc6/1d6w4EVwSq9h3/WTpATEreXXxTGptd
LNiTA1nmakBYNO2Iyo3djhaqBdWjk+EIAKtVEnJH9FAVwWOvaj1RoZMA5DnDMo7e
SnhrCXl8AL7Z1WInEaybasTJXn1uQ8xY52Ua4b8cbuEKRKzw/70NesFRoMLYoHTO
dyeszvhoDHberpGRTciVmpMu7Hyi33rM31K9epA4ib6QbbCHnxkWOZB+Bhgj1hJ8
xb4RBYWiWpAYcg0+DAC3w9gfxQhtUlZPIbmbrBmrVkO2GVGUj8kH6k4UV6kUHEGY
HQWQR0HcbKcXW81ZXCCD0l7ROuEWQtTe5Jw7dJ4/QFuqZnPutXVRNOZqpl6eRShw
7X2/a29VXBpmHA95a88rSQsL+qm7Fb3prqRmuMCtrUZgFz7HLSTuUMR867QcTGVh
cCBUZXN0IEtleSA8bGVhcEBsZWFwLnNlPokCNwQTAQgAIQUCUL352QIbAwULCQgH
AwUVCgkICwUWAgMBAAIeAQIXgAAKCRAvRV4oJNGN30+xEACh9yLkZ4jqW0/wwyIM
MI896MQf1tAwzMj16MJYUjrjNK4Bn57QaQW926HsxF8C/OjT0MTRhq7heYZJnnEo
rj0rzpkJapUveTRkKeoTRtGGigqJYfkOTU7KRVwgJBXIfaKlI3tC3cX0j0H1fVKX
hLxsj5pNSPRCVf2A5mePg44HtXe6oVWSJ8+EcdTa0shf03NhAtFaY0BbFGPSm9mA
QUe4rxugwXPLctIyV4uweFo5BXFBCb4kKTBdnQi3aJwnoWLNT6rDdTe4/nhY0Hfo
alTCYGLkhio77gBHwpTOjEMO/hZhcDMi4CvxMPw7bRxAwq4u+0j0pDhkiLcQs4U4
Ou/fH+pia+1nF5h19cNVXIm+RX2fL0wxVYc/14AIAK3YT6PVev9XYEkogSj0P7Kb
HKOruYpnToXJBERNJZwGL1U+ihPNUyroRf29t7u8flnXsOpCtBEIWAO8Muy5pWjV
3O6zAUCfWetAieCQ7WrQVmdJDa7dlX3Qx1XagUzqZdAq2jVI1hOWDA2rKytnReSF
/A97rmLaWZ8aoNCs8i4NLcy9Lbzi9QtornYGVCEmTTym0tM9L/mn7gAJ8dqUwt7n
s24dibfElky4ZZeItD+D7OZGeh0FDuejvv2dXFqL1/pkHpGBZhEckg0fZ95NbgMC
4pSZkZnqRpr2GwfB5aFfB6sIIJ0HGARQvfnZARAAtCP8Z9bm6KzIA7wbXx9LBIcJ
1wQvOPf99s4nOrnQev9xH5PZ820qS9xUjrlyE2bGYAhz5Cmq56ENs7THErIdZHtQ
uYEBprO+VFZjP50vtmCOL2PDl/xgv6J9r1Mp3KnR/m0esR+YceDW1qX07IkB5s+Q
us80v5LmmxnWcikWmR7dt1kOyV/+M6Y6mwvfQ4x3D/QUpO7SfMCOG5DGA7hVUHU/
Tuh8MihmMFFOLAEEQI+1wkxr1W09HaYCcB+EZqxLSaBwMeFoYPJie9dBNBgps39o
6pDbjsO+or4JNuyoHvh8NNI5iY0IR2NMlW4mqCcHEazys2koxFTYK6YD95Vz0RkA
K4BErCDk3lVR0PH4StmLU3gmPayIjvi9Dl9saPRyu4Xx2WVi+q6spl3ckn4c4f3+
iD8hxVp74+wa5ew0fIXjIpMoHCar/nndsse4i8glCddINdiOPPmhI9Wi3nT+5Z2t
9omPP2dEh0CzR+j1zvUpT3KtmhVICqhO+QP9BTJOwrp81NTlq9mbUyzTtVk/9dy3
zoYbhKvY08k6LJ9FsQYySqtfJZ4cwl5WsOhALWwOwlMLA9wkz0eemgFxStyOylzl
QKoIK7zHuU6XYOXa32KSPIWaLy+WgIG/u2ObWtdE3CXVIUuSt5BQFnv7XVNHJllD
Az9VDEkOSYOiSEFVoUsAEQEAAQAP/1AagnZQZyzHDEgw4QELAspYHCWLXE5aZInX
wTUJhK31IgIXNn9bJ0hFiSpQR2xeMs9oYtRuPOu0P8oOFMn4/z374fkjZy8QVY3e
PlL+3EUeqYtkMwlGNmVw5a/NbNuNfm5Darb7pEfbYd1gPcni4MAYw7R2SG/57GbC
9gucvspHIfOSfBNLBthDzmK8xEKe1yD2eimfc2T7IRYb6hmkYfeds5GsqvGI6mwI
85h4uUHWRc5JOlhVM6yX8hSWx0L60Z3DZLChmc8maWnFXd7C8eQ6P1azJJbW71Ih
7CoK0XW4LE82vlQurSRFgTwfl7wFYszW2bOzCuhHDDtYnwH86Nsu0DC78ZVRnvxn
E8Ke/AJgrdhIOo4UAyR+aZD2+2mKd7/waOUTUrUtTzc7i8N3YXGi/EIaNReBXaq+
ZNOp24BlFzRp+FCF/pptDW9HjPdiV09x0DgICmeZS4Gq/4vFFIahWctg52NGebT0
Idxngjj+xDtLaZlLQoOz0n5ByjO/Wi0ANmMv1sMKCHhGvdaSws2/PbMR2r4caj8m
KXpIgdinM/wUzHJ5pZyF2U/qejsRj8Kw8KH/tfX4JCLhiaP/mgeTuWGDHeZQERAT
xPmRFHaLP9/ZhvGNh6okIYtrKjWTLGoXvKLHcrKNisBLSq+P2WeFrlme1vjvJMo/
jPwLT5o9CADQmcbKZ+QQ1ZM9v99iDZol7SAMZX43JC019sx6GK0u6xouJBcLfeB4
OXacTgmSYdTa9RM9fbfVpti01tJ84LV2SyL/VJq/enJF4XQPSynT/tFTn1PAor6o
tEAAd8fjKdJ6LnD5wb92SPHfQfXqI84rFEO8rUNIE/1ErT6DYifDzVCbfD2KZdoF
cOSp7TpD77sY1bs74ocBX5ejKtd+aH99D78bJSMM4pSDZsIEwnomkBHTziubPwJb
OwnATy0LmSMAWOw5rKbsh5nfwCiUTM20xp0t5JeXd+wPVWbpWqI2EnkCEN+RJr9i
7dp/ymDQ+Yt5wrsN3NwoyiexPOG91WQVCADdErHsnglVZZq9Z8Wx7KwecGCUurJ2
H6lKudv5YOxPnAzqZS5HbpZd/nRTMZh2rdXCr5m2YOuewyYjvM757AkmUpM09zJX
MQ1S67/UX2y8/74TcRF97Ncx9HeELs92innBRXoFitnNguvcO6Esx4BTe1OdU6qR
ER3zAmVf22Le9ciXbu24DN4mleOH+OmBx7X2PqJSYW9GAMTsRB081R6EWKH7romQ
waxFrZ4DJzZ9ltyosEJn5F32StyLrFxpcrdLUoEaclZCv2qka7sZvi0EvovDVEBU
e10jOx9AOwf8Gj2ufhquQ6qgVYCzbP+YrodtkFrXRS3IsljIchj1M2ffB/0bfoUs
rtER9pLvYzCjBPg8IfGLw0o754Qbhh/ReplCRTusP/fQMybvCvfxreS3oyEriu/G
GufRomjewZ8EMHDIgUsLcYo2UHZsfF7tcazgxMGmMvazp4r8vpgrvW/8fIN/6Adu
tF+WjWDTvJLFJCe6O+BFJOWrssNrrra1zGtLC1s8s+Wfpe+bGPL5zpHeebGTwH1U
22eqgJArlEKxrfarz7W5+uHZJHSjF/K9ZvunLGD0n9GOPMpji3UO3zeM8IYoWn7E
/EWK1XbjnssNemeeTZ+sDh+qrD7BOi+vCX1IyBxbfqnQfJZvmcPWpruy1UsO+aIC
0GY8Jr3OL69dDQ21jueJAh8EGAEIAAkFAlC9+dkCGwwACgkQL0VeKCTRjd9HCw/+
LQSVgLLF4ulYlPCjWIIuQwrPbJfWUVVr2dPUFVM85DCv8gBzk5c121snXh9Swovm
laBbw6ate3BmbXLh64jVE9Za5sbTWi7PCcbO/bpRy4d6oLmitmNw6cq0vjTLxUYy
bwuiJxWREkfxuU85EKdouN062YDevH+/YResmlJrcCE7LRlJFeRlKsrrwBU3BqYd
GgFJjKjQC1peeQ9fj62Y7xfwE9+PXbkiWO5u/Bk8hb1VZH1SoIRU98NHVcp6BVvp
VK0jLAXuSauSczULmpRjbyt1lhaAqivDTWEEZXiNNbRyp17c3nVdPWOcgBr42hdQ
z25CgZgyLCsvu82wuXLKJblrIPJX3Yf+si6KqEWBsmwdOWybsjygaF5HvzgFqAAD
U0goPWoQ71PorP2XOUNp5ZLkBQp5etvtkksjVNMIhnHn8PGMuoxO39EUGlWj2B5l
Cu8tSosAzB1pS8NcLZzoNoI9dOHrmgJmP+GrOUkcf5GhNZbMoj4GNfGBRYX0SZlQ
GuDrwNKYj73C4MWyNnnUFyq8nDHJ/G1NpaF2hiof9RBL4PUU/f92JkceXPBXA8gL
Mz2ig1OButwPPLFGQhWqxXAGrsS3Ny+BhTJfnfIbbkaLLphBpDZm1D9XKbAUvdd1
RZXoH+FTg9UAW87eqU610npOkT6cRaBxaMK/mDtGNdc=
=JTFu
-----END PGP PRIVATE KEY BLOCK-----
"""
|
andrejb/leap_mail
|
src/leap/mail/imap/tests/__init__.py
|
Python
|
gpl-3.0
| 13,000
|
class Solution(object):
def distributeCandies(self, candies):
"""
:type candies: List[int]
:rtype: int
"""
result = 0
kind = list(set(candies))
if len(kind) > len(candies)/2:
result = len(candies)/2
else:
result = len(kind)
return result
|
sadad111/leetcodebox
|
Distribute Candies.py
|
Python
|
gpl-3.0
| 338
|
"""Utility modules used throughout the rest of the codebase.
Note that these modules should in general not depend (at compile time)
on any other modules, to avoid cyclic dependencies. They could be
imported (at the top level) into any other module so should not have
any top-level imports from other modules.
"""
|
markgw/jazzparser
|
src/jazzparser/utils/__init__.py
|
Python
|
gpl-3.0
| 318
|
############################################################################
#
# Copyright (C) 2016 The Qt Company Ltd.
# Contact: https://www.qt.io/licensing/
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see https://www.qt.io/terms-conditions. For further
# information use the contact form at https://www.qt.io/contact-us.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3 as published by the Free Software
# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
# included in the packaging of this file. Please review the following
# information to ensure the GNU General Public License requirements will
# be met: https://www.gnu.org/licenses/gpl-3.0.html.
#
############################################################################
import operator
# for easier re-usage (because Python hasn't an enum type)
class Targets:
ALL_TARGETS = map(lambda x: 2 ** x , range(7))
(DESKTOP_474_GCC,
DESKTOP_480_DEFAULT,
SIMULATOR,
EMBEDDED_LINUX,
DESKTOP_521_DEFAULT,
DESKTOP_531_DEFAULT,
DESKTOP_541_GCC) = ALL_TARGETS
@staticmethod
def desktopTargetClasses():
desktopTargets = (sum(Targets.ALL_TARGETS) & ~Targets.SIMULATOR & ~Targets.EMBEDDED_LINUX)
if platform.system() == 'Darwin':
desktopTargets &= ~Targets.DESKTOP_541_GCC
return desktopTargets
@staticmethod
def qt4Classes():
return (Targets.DESKTOP_474_GCC | Targets.DESKTOP_480_DEFAULT
| Targets.SIMULATOR | Targets.EMBEDDED_LINUX)
@staticmethod
def getStringForTarget(target):
if target == Targets.DESKTOP_474_GCC:
return "Desktop 474 GCC"
elif target == Targets.DESKTOP_480_DEFAULT:
if platform.system() in ('Windows', 'Microsoft'):
return "Desktop 480 MSVC2010"
else:
return "Desktop 480 GCC"
elif target == Targets.SIMULATOR:
return "Qt Simulator"
elif target == Targets.EMBEDDED_LINUX:
return "Embedded Linux"
elif target == Targets.DESKTOP_521_DEFAULT:
return "Desktop 521 default"
elif target == Targets.DESKTOP_531_DEFAULT:
return "Desktop 531 default"
elif target == Targets.DESKTOP_541_GCC:
return "Desktop 541 GCC"
else:
return None
@staticmethod
def getTargetsAsStrings(targets):
if not isinstance(targets, (tuple,list)):
test.fatal("Wrong usage... This function handles only tuples or lists.")
return None
result = map(Targets.getStringForTarget, targets)
if None in result:
test.fatal("You've passed at least one unknown target!")
return result
@staticmethod
def intToArray(targets):
return filter(lambda x: x & targets, Targets.ALL_TARGETS)
@staticmethod
def arrayToInt(targetArr):
return reduce(operator.or_, targetArr, 0)
@staticmethod
def getDefaultKit():
return Targets.DESKTOP_521_DEFAULT
# this class holds some constants for easier usage inside the Projects view
class ProjectSettings:
BUILD = 1
RUN = 2
# this class defines some constants for the views of the creator's MainWindow
class ViewConstants:
WELCOME, EDIT, DESIGN, DEBUG, PROJECTS, HELP = range(6)
FIRST_AVAILABLE = 0
# always adjust the following to the highest value of the available ViewConstants when adding new
LAST_AVAILABLE = HELP
# this function returns a regex of the tooltip of the FancyTabBar elements
# this is needed because the keyboard shortcut is OS specific
# if the provided argument does not match any of the ViewConstants it returns None
@staticmethod
def getToolTipForViewTab(viewTab):
if viewTab == ViewConstants.WELCOME:
toolTip = ur'Switch to <b>Welcome</b> mode <span style="color: gray; font-size: small">(Ctrl\+|\u2303)%d</span>'
elif viewTab == ViewConstants.EDIT:
toolTip = ur'Switch to <b>Edit</b> mode <span style="color: gray; font-size: small">(Ctrl\+|\u2303)%d</span>'
elif viewTab == ViewConstants.DESIGN:
toolTip = ur'Switch to <b>Design</b> mode <span style="color: gray; font-size: small">(Ctrl\+|\u2303)%d</span>'
elif viewTab == ViewConstants.DEBUG:
toolTip = ur'Switch to <b>Debug</b> mode <span style="color: gray; font-size: small">(Ctrl\+|\u2303)%d</span>'
elif viewTab == ViewConstants.PROJECTS:
toolTip = ur'Switch to <b>Projects</b> mode <span style="color: gray; font-size: small">(Ctrl\+|\u2303)%d</span>'
elif viewTab == ViewConstants.HELP:
toolTip = ur'Switch to <b>Help</b> mode <span style="color: gray; font-size: small">(Ctrl\+|\u2303)%d</span>'
else:
return None
return toolTip % (viewTab + 1)
class SubprocessType:
QT_WIDGET=0
QT_QUICK_APPLICATION=1
QT_QUICK_UI=2
USER_DEFINED=3
@staticmethod
def getWindowType(subprocessType, qtQuickVersion="1.1"):
if subprocessType == SubprocessType.QT_WIDGET:
return "QMainWindow"
if subprocessType == SubprocessType.QT_QUICK_APPLICATION:
qqv = "2"
if qtQuickVersion[0] == "1":
qqv = "1"
return "QtQuick%sApplicationViewer" % qqv
if subprocessType == SubprocessType.QT_QUICK_UI:
if qtQuickVersion == "1.1":
return "QDeclarativeViewer"
else:
return "QQuickView"
if subprocessType == SubprocessType.USER_DEFINED:
return "user-defined"
test.fatal("Could not determine the WindowType for SubprocessType %s" % subprocessType)
return None
class QtInformation:
QT_VERSION = 0
QT_BINPATH = 1
QT_LIBPATH = 2
class LibType:
SHARED = 0
STATIC = 1
QT_PLUGIN = 2
@staticmethod
def getStringForLib(libType):
if libType == LibType.SHARED:
return "Shared Library"
if libType == LibType.STATIC:
return "Statically Linked Library"
if libType == LibType.QT_PLUGIN:
return "Qt Plugin"
return None
class Qt5Path:
DOCS = 0
EXAMPLES = 1
@staticmethod
def getPaths(pathSpec):
if pathSpec == Qt5Path.DOCS:
path52 = "/doc"
path53 = "/Docs/Qt-5.3"
path54 = "/Docs/Qt-5.4"
elif pathSpec == Qt5Path.EXAMPLES:
path52 = "/examples"
path53 = "/Examples/Qt-5.3"
path54 = "/Examples/Qt-5.4"
else:
test.fatal("Unknown pathSpec given: %s" % str(pathSpec))
return []
if platform.system() in ('Microsoft', 'Windows'):
return ["C:/Qt/Qt5.2.1/5.2.1/msvc2010" + path52,
"C:/Qt/Qt5.3.1" + path53, "C:/Qt/Qt5.4.1" + path54]
elif platform.system() == 'Linux':
if __is64BitOS__():
return map(os.path.expanduser, ["~/Qt5.2.1/5.2.1/gcc_64" + path52,
"~/Qt5.3.1" + path53, "~/Qt5.4.1" + path54])
return map(os.path.expanduser, ["~/Qt5.2.1/5.2.1/gcc" + path52,
"~/Qt5.3.1" + path53, "~/Qt5.4.1" + path54])
else:
return map(os.path.expanduser, ["~/Qt5.2.1/5.2.1/clang_64" + path52,
"~/Qt5.3.1" + path53])
|
pivonroll/Qt_Creator
|
tests/system/shared/classes.py
|
Python
|
gpl-3.0
| 7,863
|
class Information:
def __init__(self, objectid, cvid, information_type_id, description):
self.objectid = objectid
self.cvid = cvid
self.information_type_id = information_type_id
self.description = description
self.deleted = 0
|
itucsdb1611/itucsdb1611
|
classes/information.py
|
Python
|
gpl-3.0
| 270
|
#! /usr/bin/env python
import argparse
import sys
from soma import aims
import highres_cortex.cortex_topo
def fix_cortex_topology_files(input_filename, output_filename,
filling_size, fclosing):
"""Call highres_cortex.cortex_topo.fix_cortex_topology on files."""
input_volume = aims.read(input_filename)
output = highres_cortex.cortex_topo.fix_cortex_topology(
input_volume, filling_size, fclosing)
# BUG: aims.write offers no error checking, so the program will exit
# successfully even if writing fails
aims.write(output, output_filename)
def parse_command_line(argv=sys.argv):
"""Parse the script's command line."""
parser = argparse.ArgumentParser(
description="""\
Impose the topology of a hollow sphere onto the cortex in a voxelwise
segmentation, which uses the following labels: 100 in the cortex itself, 0
outside (CSF), 200 inside (white matter). In the output, the cortex is defined
using 6-connectivity, each other compartment using 26-connectivity.
""")
parser.add_argument("input",
help="3D volume containing the input segmentation")
parser.add_argument("output",
help="output 3D volume")
parser.add_argument("--filling-size", type=float, default=2.,
help="""\
The size, in millimetres, of the largest holes in either cortical boundary that
will be filled. This must be smaller than the thinnest cortex in the image. The
default value is 2 mm, which is appropriate for a human brain.""")
parser.add_argument("--fclosing", type=float, default=10.,
help="""\
The radius of the morphological closing which is used by VipHomotopic in
Cortical surface mode to retrieve the brain's outer envelope. The default
value, 10 mm, is appropriate for a human brain.""")
args = parser.parse_args(argv[1:])
if not args.filling_size >= 0:
parser.error("filling_size must be a non-negative number")
if not args.fclosing >= 0:
parser.error("fclosing must be a non-negative number")
return args
def main(argv=sys.argv):
"""The script's entry point."""
args = parse_command_line(argv)
return fix_cortex_topology_files(
args.input, args.output, args.filling_size, args.fclosing) or 0
if __name__ == "__main__":
sys.exit(main())
|
domanova/highres-cortex
|
python/highres_cortex/scripts/fix_cortex_topology.py
|
Python
|
gpl-3.0
| 2,374
|
# mhkutil - A utility for dealing with Mohawk archives
#
# mhkutil is the legal property of its developers, whose names
# can be found in the AUTHORS file distributed with this source
# distribution.
#
# mhkutil is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# mhkutil is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with mhkutil. If not, see <http://www.gnu.org/licenses/>.
import os
import struct
# TODO: Find a better place for this
def makeTag(text):
if len(text) != 4:
raise Exception('Invalid text size {0}'.format(len(text)))
return struct.unpack('>L', text)[0]
# TODO: Find a better place for this
def tagToString(tag):
return struct.pack('>L', tag)
class Stream:
def readByte(self):
return struct.unpack('B', self.read(1))[0]
def readSByte(self):
return struct.unpack('b', self.read(1))[0]
def readUint16LE(self):
return struct.unpack('<H', self.read(2))[0]
def readSint16LE(self):
return struct.unpack('<h', self.read(2))[0]
def readUint16BE(self):
return struct.unpack('>H', self.read(2))[0]
def readSint16BE(self):
return struct.unpack('>h', self.read(2))[0]
def readUint32LE(self):
return struct.unpack('<L', self.read(4))[0]
def readSint32LE(self):
return struct.unpack('<l', self.read(4))[0]
def readUint32BE(self):
return struct.unpack('>L', self.read(4))[0]
def readSint32BE(self):
return struct.unpack('>l', self.read(4))[0]
def readCString(self):
text = ''
while True:
char = self.readByte()
if char == 0:
break
text += chr(char)
return text
class WriteStream:
def writeByte(self, x):
self.write(struct.pack('B', x))
def writeSByte(self, x):
self.write(struct.pack('b', x))
def writeUint16LE(self, x):
self.write(struct.pack('<H', x))
def writeSint16LE(self, x):
self.write(struct.pack('<h', x))
def writeUint16BE(self, x):
self.write(struct.pack('>H', x))
def writeSint16BE(self, x):
self.write(struct.pack('>h', x))
def writeUint32LE(self, x):
self.write(struct.pack('<L', x))
def writeSint32LE(self, x):
self.write(struct.pack('<l', x))
def writeUint32BE(self, x):
self.write(struct.pack('>L', x))
def writeSint32BE(self, x):
self.write(struct.pack('>l', x))
class FileStream(Stream):
def __init__(self, handle):
self._handle = handle
handle.seek(0, os.SEEK_END)
self._size = handle.tell()
handle.seek(0)
def tell(self):
return self._handle.tell()
def size(self):
return self._size
def seek(self, offset, whence=os.SEEK_SET):
return self._handle.seek(offset, whence)
def read(self, size):
return bytearray(self._handle.read(size))
class FileWriteStream(WriteStream):
def __init__(self, handle):
self._handle = handle
def write(self, x):
self._handle.write(x)
class ByteStream(Stream):
def __init__(self, data):
self._data = data
self._pos = 0
def tell(self):
return self._pos
def size(self):
return len(self._data)
def seek(self, offset, whence=os.SEEK_SET):
if whence == os.SEEK_CUR:
self._pos += offset
elif whence == os.SEEK_END:
self._pos = len(self._data) + offset
else:
self._pos = offset
def read(self, size):
if size == 0:
return bytearray()
start = self._pos
end = start + size
self._pos = end
return self._data[start:end]
|
clone2727/mhkutil
|
stream.py
|
Python
|
gpl-3.0
| 3,692
|
#!/usr/bin/env python
"""
=================================================
Draw a Quantile-Quantile Plot and Confidence Band
=================================================
This is an example of drawing a quantile-quantile plot with a confidence level
(CL) band.
"""
print __doc__
import ROOT
from rootpy.interactive import wait
from rootpy.plotting import Hist, Canvas, Legend, set_style
from rootpy.plotting.contrib.quantiles import qqgraph
set_style('ATLAS')
c = Canvas(width=1200, height=600)
c.Divide(2, 1, 1e-3, 1e-3)
rand = ROOT.TRandom3()
h1 = Hist(100, -5, 5, name="h1", title="Histogram 1",
linecolor='red', legendstyle='l')
h2 = Hist(100, -5, 5, name="h2", title="Histogram 2",
linecolor='blue', legendstyle='l')
for ievt in xrange(10000):
h1.Fill(rand.Gaus(0, 0.8))
h2.Fill(rand.Gaus(0, 1))
pad = c.cd(1)
h1.Draw('hist')
h2.Draw('hist same')
leg = Legend([h1, h2], pad=pad, leftmargin=0.5,
topmargin=0.11, rightmargin=0.05,
textsize=20)
leg.Draw()
pad = c.cd(2)
gr = qqgraph(h1, h2)
gr.xaxis.title = h1.title
gr.yaxis.title = h2.title
gr.fillcolor = 17
gr.fillstyle = 'solid'
gr.linecolor = 17
gr.markercolor = 'darkred'
gr.markerstyle = 20
gr.title = "QQ with CL"
gr.Draw("ap")
x_min = gr.GetXaxis().GetXmin()
x_max = gr.GetXaxis().GetXmax()
y_min = gr.GetXaxis().GetXmin()
y_max = gr.GetXaxis().GetXmax()
gr.Draw('a3')
gr.Draw('Xp same')
# a straight line y=x to be a reference
f_dia = ROOT.TF1("f_dia", "x",
h1.GetXaxis().GetXmin(),
h1.GetXaxis().GetXmax())
f_dia.SetLineColor(9)
f_dia.SetLineWidth(2)
f_dia.SetLineStyle(2)
f_dia.Draw("same")
leg = Legend(3, pad=pad, leftmargin=0.45,
topmargin=0.45, rightmargin=0.05,
textsize=20)
leg.AddEntry(gr, "QQ points", "p")
leg.AddEntry(gr, "68% CL band", "f")
leg.AddEntry(f_dia, "Diagonal line", "l")
leg.Draw()
c.Modified()
c.Update()
c.Draw()
wait()
|
qbuat/rootpy
|
examples/stats/plot_quantiles.py
|
Python
|
gpl-3.0
| 1,944
|
############################################################################
# This file is part of LImA, a Library for Image Acquisition
#
# Copyright (C) : 2009-2011
# European Synchrotron Radiation Facility
# BP 220, Grenoble 38043
# FRANCE
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
############################################################################
import os
root_name = __path__[0]
csadmin_dirs = ['/csadmin/local', '/csadmin/common']
script_get_os = 'scripts/get_compat_os.share'
get_os = None
for d in csadmin_dirs:
aux_get_os = os.path.join(d, script_get_os)
if os.path.exists(aux_get_os):
get_os = aux_get_os
break
if get_os is not None:
compat_plat = os.popen(get_os).readline().strip()
plat = None
compat_plat_list = compat_plat.split()
for aux_plat in compat_plat_list:
if aux_plat.strip() in os.listdir(root_name):
plat = aux_plat
break
if plat is None:
raise ImportError, ('Could not find Lima directory for %s '
'(nor compat. %s) platform(s) at %s' %
(compat_plat_list[0],
compat_plat_list[1:], root_name))
lima_plat = os.path.join(root_name, plat)
__path__.insert(0, lima_plat)
# This mandatory variable is systematically overwritten by 'make install'
os.environ['LIMA_LINK_STRICT_VERSION'] = 'MINOR'
if get_os is not None:
all_dirs = os.listdir(lima_plat)
all_dirs.remove('lib')
__all__ = all_dirs
del plat, compat_plat, aux_plat, lima_plat, all_dirs
del root_name, csadmin_dirs, get_os, script_get_os, d, aux_get_os
del os
|
gjover/Lima_subtree
|
python/__init__.py
|
Python
|
gpl-3.0
| 2,173
|
#!/usr/bin/env python
# coding=utf-8
"""54. Poker hands
https://projecteuler.net/problem=54
In the card game poker, a hand consists of five cards and are ranked, from
lowest to highest, in the following way:
* **High Card** : Highest value card.
* **One Pair** : Two cards of the same value.
* **Two Pairs** : Two different pairs.
* **Three of a Kind** : Three cards of the same value.
* **Straight** : All cards are consecutive values.
* **Flush** : All cards of the same suit.
* **Full House** : Three of a kind and a pair.
* **Four of a Kind** : Four cards of the same value.
* **Straight Flush** : All cards are consecutive values of same suit.
* **Royal Flush** : Ten, Jack, Queen, King, Ace, in same suit.
The cards are valued in the order:
2, 3, 4, 5, 6, 7, 8, 9, 10, Jack, Queen, King, Ace.
If two players have the same ranked hands then the rank made up of the highest
value wins; for example, a pair of eights beats a pair of fives (see example 1
below). But if two ranks tie, for example, both players have a pair of queens,
then highest cards in each hand are compared (see example 4 below); if the
highest cards tie then the next highest cards are compared, and so on.
Consider the following five hands dealt to two players:
**Hand**| | **Player 1**| | **Player 2**| | **Winner**
---|---|---|---|---|---|---
**1**| | 5H 5C 6S 7S KD
Pair of Fives
| | 2C 3S 8S 8D TD
Pair of Eights
| | Player 2
**2**| | 5D 8C 9S JS AC
Highest card Ace
| | 2C 5C 7D 8S QH
Highest card Queen
| | Player 1
**3**| | 2D 9C AS AH AC
Three Aces
| | 3D 6D 7D TD QD
Flush with Diamonds
| | Player 2
**4**| | 4D 6S 9H QH QC
Pair of Queens
Highest card Nine
| | 3D 6D 7H QD QS
Pair of Queens
Highest card Seven
| | Player 1
**5**| | 2H 2D 4C 4D 4S
Full House
With Three Fours
| | 3C 3D 3S 9S 9D
Full House
with Three Threes
| | Player 1
The file, [poker.txt](project/resources/p054_poker.txt), contains one-thousand
random hands dealt to two players. Each line of the file contains ten cards
(separated by a single space): the first five are Player 1's cards and the
last five are Player 2's cards. You can assume that all hands are valid (no
invalid characters or repeated cards), each player's hand is in no specific
order, and in each hand there is a clear winner.
How many hands does Player 1 win?
"""
|
openqt/algorithms
|
projecteuler/pe054-poker-hands.py
|
Python
|
gpl-3.0
| 2,408
|
# /* UVa problem: 10407
# * Simple Division
# * Topic: Number Theory
# *
# * Level: challenging
# *
# * Brief problem description:
# * Given a list of numbers, a1, a2, a3.... an compute a number m such that
# * ai mod m = x for some arbitrary x for all ai.
# * In other words, find a congruence class modulo m to which each number belongs
# * Solution Summary:
# * Compute the differences of each of the numbers, then find the gcd
# * of all of the differences.
# * Used Resources:
# *
# * Textbook: Competitive Programming 3
# * Hints given on 'Spanish Problem Archive'
# *
# * I hereby certify that I have produced the following solution myself
# * using only the resources listed above in accordance with the CMPUT
# * 403 collaboration policy.
# *
# * Tristan Hunt
# */
import sys
def gcd(a, b):
if b== 0:
return a
return gcd(b, a%b)
def lcm(a, b):
return (a* (b/gcd(a, b)))
def load():
while(1):
line = next(sys.stdin).split()
line = [int(x) for x in line]
line.pop(-1)
if len(line) == 0:
break
yield(line)
for (sequence) in load():
n = len(sequence)
diff = list()
for i in range(0, n-1):
# Now find gcd of all the differences:
diff.append(abs(sequence[i+1] - sequence[i])) #compute the differences
if n == 2:
sys.stdout.write("{}\n".format(diff[0]))
else:
# Compute gcd of the differences
#print(diff)
#sys.stdout.write("gcd({}, {}) = {}\n".format(diff[0], diff[1], gcd(diff[0], diff[1])))
m = gcd(diff[0], diff[1])
for i in range(2, n-1):
#sys.stdout.write("gcd({}, {}) = {}\n".format(m, diff[i], gcd(m, diff[i])))
m = gcd(m, diff[i])
sys.stdout.write("{}\n".format(m))
|
tristan-hunt/UVaProblems
|
AcceptedUVa/uva_challenging/uva_10407.py
|
Python
|
gpl-3.0
| 1,675
|
#!/usr/bin/env python3
# Copyright 2013 Iain Peddie inr314159@hotmail.com
#
# This file is part of WellBehavedPython
#
# WellBehavedPython is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# WellBehavedPython is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with WellBehavedPython. If not, see <http://www.gnu.org/licenses/>.
from WellBehavedPython.api import *
from WellBehavedPython.Engine.TestCase import TestCase
from WellBehavedPython.Discovery.ModuleExaminer import ModuleExaminer
class ModuleExaminerTests(TestCase):
def test_examiner_can_find__only_class_in_simple_module(self):
# Where
examiner = ModuleExaminer('WellBehavedPythonTests.Samples.SampleModule');
# When
classes = examiner.listAllClasses()
# The classes have been imported
# Then
from ..Samples import SampleModule
expect(classes).toEqual([SampleModule.SampleTests])
def test_examiner_can_find_all_classes_in_complex_module(self):
# Where
examiner = ModuleExaminer('WellBehavedPythonTests.Samples.SampleComplexModule');
# When
classes = examiner.listAllClasses()
# The classes have been imported
# Then
from ..Samples import SampleComplexModule
expect(classes).toContain(SampleComplexModule.SampleFirstTests)
expect(classes).toContain(SampleComplexModule.SampleSecondTests)
expect(classes).toContain(SampleComplexModule.StandaloneClass)
def test_examiner_can_find_all_modules(self):
# Where
examiner = ModuleExaminer('WellBehavedPythonTests.Samples');
# When
modules = examiner.listAllModules();
# Then
from ..Samples import SampleModule
from ..Samples import SampleComplexModule
expect(modules).toContain('WellBehavedPythonTests.Samples.SampleModule');
expect(modules).toContain('WellBehavedPythonTests.Samples.SampleComplexModule');
def test_examiner_is_not_recursive_for_modules(self):
# Where
examiner = ModuleExaminer('WellBehavedPythonTests');
# When
modules = examiner.listAllModules();
# Then
expect(modules).toContain('WellBehavedPythonTests.BackwardsCompatibilityTests');
expect(modules).Not.toContain('WellBehavedPythonTests.Discovery.Samples.SampleModule');
def test_examining_can_find_subpackages(self):
# Where
examiner = ModuleExaminer('WellBehavedPythonTests')
# When
packages = examiner.listAllPackages()
# Then
expect(packages).toContain('WellBehavedPythonTests.Discovery')
|
iain-peddie/well-behaved-python
|
tests/WellBehavedPythonTests/Discovery/ModuleExaminerTests.py
|
Python
|
gpl-3.0
| 3,114
|
# -*- coding: Latin-1 -*-
"""
@file VelocityOverTime.py
@author Sascha Krieg
@author Daniel Krajzewicz
@author Michael Behrisch
@date 2008-05-29
Shows the velocityCurve of the chosen taxi for all available sources, thereby the time duration per Edge is apparent.
SUMO, Simulation of Urban MObility; see http://sumo.sourceforge.net/
Copyright (C) 2008-2013 DLR (http://www.dlr.de/) and contributors
All rights reserved
"""
from pylab import *
import profile
import util.Path as path
import util.Reader as reader
from cPickle import load
from analysis.Taxi import *
from matplotlib.collections import LineCollection
from matplotlib.colors import colorConverter
#global vars
WEE=True #=withoutEmptyEdges decide which analysis file should be used
edgeDict={}
taxis=[]
def main():
print "start program"
global taxis, edgeDict
#decide if you want to save charts for every taxi or show a single one
all=False;
taxiId="316_3"
#load data
edgeDict=load(open(path.edgeLengthDict,'r'))
taxis=reader.readAnalysisInfo(WEE)
#reader.readEdgesLength()
if all:
plotAllTaxis()
else:
plotIt(taxiId)
show()
print "end"
def plotAllTaxis():
"""plot all taxis to an folder."""
#kind of progress bar :-)
allT=len(taxis)
lastProz=0
for i in range(5,105,5):
s="%02d" %i
print s,
print "%"
for i in range(allT):
actProz=(100*i/allT)
if actProz!=lastProz and actProz%5==0:
print "**",
lastProz=actProz
if plotIt(taxis[i].id)!=-1:
savefig(path.vOverTimeDir+"taxi_"+str(taxis[i].id)+".png", format="png")
close() #close the figure
def fetchData(taxiId):
"""fetch the data for the given taxi"""
route=[[],[],[],[]] #route of the taxi (edge, length, edgeSimFCD(to find doubles))
values=[[],[],[],[]] #x,y1,x2,y2 (position, vFCD,vSIMFCD)
actLen=0
x=0
def getTime(s,v):
if v==0:
return 0
return s/(v/3.6)
for step in taxis[taxis.index(taxiId)].getSteps():
if step.source==SOURCE_FCD or step.source==SOURCE_SIMFCD:
routeLen=edgeDict[step.edge]
#save the simFCD infos in apart Lists
if step.source==SOURCE_SIMFCD and len(values[2])<=0:
x=2
actLen=0
if len(route[0+x])>0 and step.edge==route[0+x][-1]:
#print step.edge
values[1+x][-1]=(values[1+x][-1]+step.speed)/2.0
values[1+x][-2]=values[1+x][-1]
else:
#start point of route
values[0+x].append(actLen)
values[1+x].append(step.speed)
actLen+=getTime(routeLen,step.speed)
print "l ",actLen," rL ",routeLen," s ",step.speed
route[0+x].append(step.edge) #label
route[1+x].append(actLen) #location
#end point of route
values[0+x].append(actLen)
values[1+x].append(step.speed)
return route,values
def plotIt(taxiId):
"""draws the chart"""
width=12 #1200px
height=9 #900px
#fetch data
route,values=fetchData(taxiId)
#check if a route exists for this vehicle
if len(route[1])<1 or len(route[3])<1:
return -1
#make nice labels
maxRoute=max((route[1][-1]),route[3][-1])
minDist=(maxRoute/(width-4.5))
def makethemNice(source=SOURCE_FCD):
"""create labels of the x-Axis for the FCD and simFCD chart"""
if source==SOURCE_FCD:
label=0; loc=1
elif source==SOURCE_SIMFCD:
label=2; loc=3
else:
assert False
lastShown=route[loc][0]
for i in range(len(route[label])):
if i==0 or i==len(route[label])-1:
route[label][i]=str(int(round(route[loc][i])))+"\n"+route[label][i]
elif route[loc][i]-lastShown>minDist: #if distance between last Label location and actual location big enough
route[label][i]=str(int(round(route[loc][i])))+"\n"+route[label][i]
lastShown=route[loc][i]
else:
route[label][i]=""
if route[loc][-1]-lastShown<minDist: #check if the last shown element troubles the last
route[label][route[loc].index(lastShown)]=""
makethemNice(SOURCE_FCD)
makethemNice(SOURCE_SIMFCD)
#plot the results
fig=figure(figsize=(width,height), dpi=96)
subplot(211)
title(U"Geschwindigkeit \u00FCber Zeit pro Kante")
ylabel("v (km/h)")
grid(True)
#set the x scale
xticks(route[1],route[0])
plot(values[0],values[1], label='FCD')
legend()
#set that the axis
axis([axis()[0],maxRoute,0,max(max(values[1]),max(values[3]))+10])
subplot(212)
xlabel("\n\nt (s) unterteilt in Routenabschnitte (Kanten)\n\n")
ylabel("v (km/h)")
grid(True)
#set the x scale
xticks(route[3],route[2])
plot(values[2],values[3], label='simulierte FCD', color='g')
legend()
#set that the axis
axis([axis()[0],maxRoute,0,max(max(values[1]),max(values[3]))+10])
return 1
#start the program
#profile.run('main()')
main()
|
rudhir-upretee/Sumo17_With_Netsim
|
tools/projects/TaxiFCD_Krieg/src/analysis/VelocityOverTime.py
|
Python
|
gpl-3.0
| 5,635
|
import abc
from typing import Optional, Callable
from math import fabs
import itertools
from copy import copy
import numbers
from array import array
from .space2d import Point2D, Segment2D
from ...orientations.orientations import *
from ...mathematics.statistics import *
from ...mathematics.quaternions import *
from ...utils.types import check_type
class Shape3D(object, metaclass=abc.ABCMeta):
@abc.abstractmethod
def area(self):
"""Calculate shape area"""
@abc.abstractmethod
def length(self):
"""Calculate shape area"""
'''
@abc.abstractmethod
def clone(self):
"""Create a clone of the shape"""
'''
class Point3D:
"""
Cartesian point.
Dimensions: 3D
"""
def __init__(
self,
x: numbers.Real,
y: numbers.Real,
z: numbers.Real = 0.0
):
"""
Construct a Point instance.
:param x: point x coordinate.
:type x: numbers.Real.
:param y: point y coordinate.
:type y: numbers.Real.
:param z: point z coordinate.
:type z: numbers.Real.
"""
vals = [x, y]
if any(map(lambda val: not isinstance(val, numbers.Real), vals)):
raise Exception("X and y input values must be integer or float type")
if not all(map(math.isfinite, vals)):
raise Exception("X and y input values must be finite (#03)")
self._x = float(x)
self._y = float(y)
self._z = float(z)
@classmethod
def fromVect(cls,
vect: Vect3D) -> 'Point3D':
"""
:param vect:
:return:
"""
return cls(
x=vect.x,
y=vect.y,
z=vect.z
)
@property
def x(self) -> numbers.Real:
"""
Return the x coordinate of the current point.
:return: x coordinate.
:rtype: numbers.Real
Examples:
>>> Point3D(4, 3, 7).x
4.0
>>> Point3D(-0.39, 3, 7).x
-0.39
"""
return self._x
@property
def y(self) -> numbers.Real:
"""
Return the y coordinate of the current point.
:return: y coordinate.
:rtype: numbers.Real
Examples:
>>> Point3D(4, 3, 7).y
3.0
>>> Point3D(-0.39, 17.42, 7).y
17.42
"""
return self._y
@property
def z(self) -> numbers.Real:
"""
Return the z coordinate of the current point.
:return: z coordinate.
:rtype: numbers.Real
Examples:
>>> Point3D(4, 3, 7).z
7.0
>>> Point3D(-0.39, 17.42, 8.9).z
8.9
"""
return self._z
def __iter__(self):
"""
Return the elements of a Point.
:return:
Examples;
>>> x, y, z = Point3D(1,1)
>>> x == 1
True
>>> y == 1
True
"""
return (i for i in self.a())
def __repr__(self) -> str:
return "Point3D({:.4f}, {:.4f}, {:.4f})".format(self.x, self.y, self.z)
def __eq__(self,
another: 'Point3D'
) -> bool:
"""
Return True if objects are equal.
:param another: another point.
:type another: Point.
:raise: Exception.
Example:
>>> Point3D(1., 1., 1.) == Point3D(1, 1, 1)
True
>>> Point3D(1., 1., 1.) == Point3D(1, 1, 1)
True
>>> Point3D(1., 1., 1.) == Point3D(1, 1, -1)
False
"""
if not isinstance(another, Point3D):
raise Exception("Another instance must be a Point")
return all([
self.x == another.x,
self.y == another.y,
self.z == another.z
]
)
def __ne__(self,
another: 'Point3D'
) -> bool:
"""
Return False if objects are equal.
Example:
>>> Point3D(1., 1., 1.) != Point3D(0., 0., 0.)
True
>>> Point3D(1., 1., 1.) != Point3D(1, 1, 1)
False
"""
return not (self == another)
def a(self) -> Tuple[numbers.Real, numbers.Real, numbers.Real]:
"""
Return the individual values of the point.
:return: double array of x, y, z values
Examples:
>>> Point3D(4, 3, 7).a()
(4.0, 3.0, 7.0)
"""
return self.x, self.y, self.z
def __add__(self, another: 'Point3D') -> 'Point3D':
"""
Sum of two points.
:param another: the point to add
:type another: Point3D
:return: the sum of the two points
:rtype: Point3D
:raise: Exception
Example:
>>> Point3D(1, 0, 0) + Point3D(0, 1, 1)
Point3D(1.0000, 1.0000, 1.0000)
>>> Point3D(1, 1, 1) + Point3D(-1, -1, -1)
Point3D(0.0000, 0.0000, 0.0000)
"""
check_type(another, "Second point", Point3D)
x0, y0, z0 = self
x1, y1, z1 = another
return Point3D(
x=x0+x1,
y=y0+y1,
z=z0+z1
)
def __sub__(self,
another: 'Point3D'
) -> 'Point3D':
"""Subtract two points.
:param another: the point to subtract
:type another: Point3D
:return: the difference between the two points
:rtype: Point3D
:raise: Exception
Example:
>>> Point3D(1., 1., 1.) - Point3D(1., 1., 1.)
Point3D(0.0000, 0.0000, 0.0000)
>>> Point3D(1., 1., 3.) - Point3D(1., 1., 2.2)
Point3D(0.0000, 0.0000, 0.8000)
"""
check_type(another, "Second point", Point3D)
x0, y0, z0 = self
x1, y1, z1 = another
return Point3D(
x=x0 - x1,
y=y0 - y1,
z=z0 - z1
)
def clone(self) -> 'Point3D':
"""
Clone a point.
:return: a new point.
:rtype: Point.
"""
return Point3D(*self.a())
def toXYZ(self) -> Tuple[numbers.Real, numbers.Real, numbers.Real]:
"""
Returns the spatial components as a tuple of three values.
:return: the spatial components (x, y, z).
:rtype: a tuple of three floats.
Examples:
>>> Point3D(1, 0, 3).toXYZ()
(1.0, 0.0, 3.0)
"""
return self.x, self.y, self.z
def toArray(self) -> np.ndarray:
"""
Return a Numpy array representing the point values.
:return: Numpy array
Examples:
>>> np.allclose(Point3D(1, 2, 3).toArray(), np.array([ 1., 2., 3.]))
True
"""
return np.asarray(self.toXYZ())
def to2d(self) -> Point2D:
"""
Projection on the x-y plane as a 2D point.
Examples:
>>> Point3D(2, 3, 4).to2d()
Point2D(2.0000, 3.0000)
"""
return Point2D(
x=self.x,
y=self.y
)
def pXY(self) -> 'Point3D':
"""
Projection on the x-y plane
:return: projected object instance
Examples:
>>> Point3D(2, 3, 4).pXY()
Point3D(2.0000, 3.0000, 0.0000)
"""
return Point3D(self.x, self.y, 0.0)
def pXZ(self) -> 'Point3D':
"""
Projection on the x-z plane
:return: projected object instance
Examples:
>>> Point3D(2, 3, 4).pXZ()
Point3D(2.0000, 0.0000, 4.0000)
"""
return Point3D(self.x, 0.0, self.z)
def pYZ(self) -> 'Point3D':
"""
Projection on the y-z plane
:return: projected object instance
Examples:
>>> Point3D(2, 3, 4).pYZ()
Point3D(0.0000, 3.0000, 4.0000)
"""
return Point3D(0.0, self.y, self.z)
def deltaX(self,
another: 'Point3D'
) -> Optional[numbers.Real]:
"""
Delta between x components of two Point Instances.
:return: x coordinates difference value.
:rtype: optional numbers.Real.
:raise: Exception
Examples:
>>> Point3D(1, 2, 3).deltaX(Point3D(4, 7, 1))
3.0
"""
return another.x - self.x
def deltaY(self,
another: 'Point3D'
) -> Optional[numbers.Real]:
"""
Delta between y components of two Point Instances.
:return: y coordinates difference value.
:rtype: optional numbers.Real.
Examples:
>>> Point3D(1, 2, 3).deltaY(Point3D(4, 7, 1))
5.0
"""
return another.y - self.y
def deltaZ(self,
another: 'Point3D'
) -> Optional[numbers.Real]:
"""
Delta between z components of two Point Instances.
:return: z coordinates difference value.
:rtype: optional numbers.Real.
Examples:
>>> Point3D(1, 2, 3).deltaZ(Point3D(4, 7, 1))
-2.0
"""
return another.z - self.z
def distance(self,
another: 'Point3D'
) -> numbers.Real:
"""
Calculate Euclidean spatial distance between two points.
TODO: consider case of polar CRS
:param another: another Point instance.
:type another: Point.
:return: the distance (when the two points have the same CRS).
:rtype: numbers.Real.
:raise: Exception.
Examples:
>>> Point3D(1., 1., 1.).distance(Point3D(4., 5., 1))
5.0
>>> Point3D(1, 1, 1).distance(Point3D(4, 5, 1))
5.0
>>> Point3D(1, 1, 1).distance(Point3D(4, 5, 1))
5.0
"""
check_type(another, "Point", Point3D)
return sqrt((self.x - another.x) ** 2 + (self.y - another.y) ** 2 + (self.z - another.z) ** 2)
def dist_2d(self,
another: 'Point3D'
) -> numbers.Real:
"""
Calculate horizontal (2D) distance between two points.
TODO: consider case of polar CRS
:param another: another Point instance.
:type another: Point.
:return: the 2D distance (when the two points have the same CRS).
:rtype: numbers.Real.
:raise: Exception.
Examples:
>>> Point3D(1., 1., 1.).dist_2d(Point3D(4., 5., 7.))
5.0
"""
check_type(another, "Second point", Point3D)
return sqrt((self.x - another.x) ** 2 + (self.y - another.y) ** 2)
def scale(self,
scale_factor: numbers.Real
) -> 'Point3D':
"""
Create a scaled object.
Note: it does not make sense for polar coordinates.
TODO: manage polar coordinates cases OR deprecate and remove - after dependency check.
Example;
>>> Point3D(1, 0, 1).scale(2.5)
Point3D(2.5000, 0.0000, 2.5000)
>>> Point3D(1, 0, 1).scale(2.5)
Point3D(2.5000, 0.0000, 2.5000)
"""
x, y, z = self.x * scale_factor, self.y * scale_factor, self.z * scale_factor
return Point3D(x, y, z)
def invert(self) -> 'Point3D':
"""
Create a new object with inverted direction.
Note: it depends on scale method, that could be deprecated/removed.
Examples:
>>> Point3D(1, 1, 1).invert()
Point3D(-1.0000, -1.0000, -1.0000)
>>> Point3D(2, -1, 4).invert()
Point3D(-2.0000, 1.0000, -4.0000)
"""
return self.scale(-1)
def reflect_vertical(self) -> 'Point3D':
"""
Reflect a point along a vertical axis.
:return: reflected point.
:rtype: Point3D
Examples:
>>> Point3D(1,1,1).reflect_vertical()
Point3D(-1.0000, -1.0000, 1.0000)
"""
x, y, z = self
return Point3D(
x=-x,
y=-y,
z=z
)
def is_coincident(self,
another: 'Point3D',
tolerance: numbers.Real = MIN_SEPARATION_THRESHOLD
) -> bool:
"""
Check spatial coincidence of two points
:param another: the point to compare.
:type another: Point.
:param tolerance: the maximum allowed distance between the two points.
:type tolerance: numbers.Real.
:return: whether the two points are coincident.
:rtype: bool.
:raise: Exception.
Example:
>>> Point3D(1., 0., -1.).is_coincident(Point3D(1., 1.5, -1.))
False
>>> Point3D(1., 0., 0.).is_coincident(Point3D(1., 0., 0.))
True
"""
check_type(another, "Second point", Point3D)
return self.distance(another) <= tolerance
def already_present(self,
pt_list: List['Point3D'],
tolerance: numbers.Real = MIN_SEPARATION_THRESHOLD
) -> Optional[bool]:
"""
Determines if a point is already in a given point list, using an optional distance separation,
:param pt_list: list of points. May be empty.
:type pt_list: List of Points.
:param tolerance: optional maximum distance between near-coincident point pair.
:type tolerance: numbers.Real.
:return: True if already present, False otherwise.
:rtype: optional boolean.
"""
for pt in pt_list:
if self.is_coincident(pt, tolerance=tolerance):
return True
return False
def shift(self,
sx: numbers.Real,
sy: numbers.Real,
sz: numbers.Real
) -> Optional['Point3D']:
"""
Create a new object shifted by given amount from the self instance.
Example:
>>> Point3D(1, 1, 1).shift(0.5, 1., 1.5)
Point3D(1.5000, 2.0000, 2.5000)
>>> Point3D(1, 2, -1).shift(0.5, 1., 1.5)
Point3D(1.5000, 3.0000, 0.5000)
"""
return Point3D(self.x + sx, self.y + sy, self.z + sz)
def shiftByVect(self,
v: Vect3D
) -> 'Point3D':
"""
Create a new point shifted from the self instance by given vector.
:param v: the shift vector.
:type v: Vect.
:return: the shifted point.
:rtype: Point.
:raise: Exception
Example:
>>> Point3D(1, 1, 1).shiftByVect(Vect3D(0.5, 1., 1.5))
Point3D(1.5000, 2.0000, 2.5000)
>>> Point3D(1, 2, -1).shiftByVect(Vect3D(0.5, 1., 1.5))
Point3D(1.5000, 3.0000, 0.5000)
"""
x, y, z = self
sx, sy, sz = v.toXYZ()
return Point3D(x + sx, y + sy, z + sz)
def asVect(self) -> 'Vect3D':
"""
Create a vector based on the point coordinates
Example:
>>> Point3D(1, 1, 0).asVect()
Vect3D(1.0000, 1.0000, 0.0000)
>>> Point3D(0.2, 1, 6).asVect()
Vect3D(0.2000, 1.0000, 6.0000)
"""
return Vect3D(self.x, self.y, self.z)
def rotate(self,
rotation_axis: RotationAxis,
center_point: 'Point3D' = None
) -> 'Point3D':
"""
Rotates a point.
:param rotation_axis:
:param center_point:
:return: the rotated point
:rtype: Point3D
Examples:
>>> pt = Point3D(0,0,1)
>>> rot_axis = RotationAxis(0,0,90)
>>> center_pt = Point3D(0,0,0.5)
>>> pt.rotate(rotation_axis=rot_axis, center_point=center_pt)
Point3D(0.5000, 0.0000, 0.5000)
>>> center_pt = Point3D(0,0,1)
>>> pt.rotate(rotation_axis=rot_axis, center_point=center_pt)
Point3D(0.0000, 0.0000, 1.0000)
>>> center_pt = Point3D(0, 0, 2)
>>> pt.rotate(rotation_axis=rot_axis, center_point=center_pt)
Point3D(-1.0000, 0.0000, 2.0000)
>>> rot_axis = RotationAxis(0,0,180)
>>> pt.rotate(rotation_axis=rot_axis, center_point=center_pt)
Point3D(-0.0000, 0.0000, 3.0000)
>>> pt.rotate(rotation_axis=rot_axis)
Point3D(0.0000, 0.0000, -1.0000)
>>> pt = Point3D(1,1,1)
>>> rot_axis = RotationAxis(0,90,90)
>>> pt.rotate(rotation_axis=rot_axis)
Point3D(1.0000, -1.0000, 1.0000)
>>> rot_axis = RotationAxis(0,90,180)
>>> pt.rotate(rotation_axis=rot_axis)
Point3D(-1.0000, -1.0000, 1.0000)
>>> center_pt = Point3D(1,1,1)
>>> pt.rotate(rotation_axis=rot_axis, center_point=center_pt)
Point3D(1.0000, 1.0000, 1.0000)
>>> center_pt = Point3D(2,2,10)
>>> pt.rotate(rotation_axis=rot_axis, center_point=center_pt)
Point3D(3.0000, 3.0000, 1.0000)
>>> pt = Point3D(1, 1, 2)
>>> rot_axis = RotationAxis(135, 0, 180)
>>> center_pt = Point3D(0,0,1)
>>> pt.rotate(rotation_axis=rot_axis, center_point=center_pt)
Point3D(-1.0000, -1.0000, 0.0000)
"""
if not center_point:
center_point = Point3D(
x=0.0,
y=0.0,
z=0.0
)
check_type(center_point, "Center point", Point3D)
p_diff = self - center_point
p_vect = p_diff.asVect()
rot_vect = rotVectByAxis(
v=p_vect,
rot_axis=rotation_axis
)
x, y, z = rot_vect
rot_pt = Point3D(
x=x,
y=y,
z=z
)
transl_pt = center_point + rot_pt
return transl_pt
@classmethod
def random(cls,
lower_boundary: float = -MAX_SCALAR_VALUE,
upper_boundary: float = MAX_SCALAR_VALUE
):
"""
Creates a random point.
:return: random point
:rtype: Point3D
"""
vals = [random.uniform(lower_boundary, upper_boundary) for _ in range(3)]
return cls(*vals)
def pack_to_points(
xs: array,
ys: array,
zs: Optional[array] = None,
) -> List[Point3D]:
# Side effects: None
"""
Create a list of points given a set
of input arrays.
:param xs: array of x values
:param ys: array of y values
:param zs: optional array of z values
:return: a list of Point3D instances
"""
if zs is None:
zs = [0.0] * len(xs)
pts = []
for x, y, z, t in zip(xs, ys, zs):
pts.append(
Point3D(
x,
y,
z
)
)
return pts
class Segment3D:
"""
Segment is a geometric object defined by the straight line between
two vertices.
"""
def __init__(self,
start_pt: Point3D,
end_pt: Point3D):
"""
Creates a segment instance provided the two points have the same CRS code.
:param start_pt: the start point.
:type: Point.
:param end_pt: the end point.
:type end_pt: Point.
:return: the new segment instance if both points have the same georeferenced.
:raises: CRSCodeException.
"""
check_type(start_pt, "Start point", Point3D)
check_type(end_pt, "End point", Point3D)
if start_pt.distance(end_pt) == 0.0:
raise Exception("Source points cannot be coincident")
self._start_pt = start_pt.clone()
self._end_pt = end_pt.clone()
@classmethod
def fromVector(cls,
point: Point3D,
dir_vector: Vect3D):
check_type(point, "Input point", Point3D)
check_type(dir_vector, "Directional vector", Vect3D)
start_pt = point
end_pt = start_pt.shiftByVect(dir_vector)
return cls(
start_pt=start_pt,
end_pt=end_pt
)
@classmethod
def from2D(cls,
segment: Segment2D):
check_type(segment, "Input segment", Segment2D)
start_pt = Point3D(
x=segment.start_pt.x,
y=segment.start_pt.y,
z=0.0
)
end_pt = Point3D(
x=segment.end_pt.x,
y=segment.end_pt.y,
z=0.0
)
return cls(
start_pt=start_pt,
end_pt=end_pt
)
def __repr__(self) -> str:
"""
Represents a Segment instance.
:return: the Segment representation.
:rtype: str.
"""
return "Segment3D(start_pt={}, end_pt={})".format(
self.start_pt,
self.end_pt
)
@property
def start_pt(self) -> Point3D:
return self._start_pt
@property
def end_pt(self) -> Point3D:
return self._end_pt
def __iter__(self):
"""
Return the elements of a Segment, i.e., start and end point.
"""
return (i for i in [self.start_pt, self.end_pt])
def clone(self) -> 'Segment3D':
return Segment3D(self._start_pt, self._end_pt)
def increasing_x(self) -> 'Segment3D':
if self.end_pt.x < self.start_pt.x:
return Segment3D(self.end_pt, self.start_pt)
else:
return self.clone()
def x_range(self) -> Tuple[numbers.Real, numbers.Real]:
if self.start_pt.x < self.end_pt.x:
return self.start_pt.x, self.end_pt.x
else:
return self.end_pt.x, self.start_pt.x
def y_range(self) -> Tuple[numbers.Real, numbers.Real]:
if self.start_pt.y < self.end_pt.y:
return self.start_pt.y, self.end_pt.y
else:
return self.end_pt.y, self.start_pt.y
def z_range(self) -> Tuple[numbers.Real, numbers.Real]:
if self.start_pt.z < self.end_pt.z:
return self.start_pt.z, self.end_pt.z
else:
return self.end_pt.z, self.start_pt.z
def delta_x(self) -> numbers.Real:
"""
X delta between segment end point and start point.
:return: the horizontal, x-parallel distance between segment end point and start point.
"""
return self.end_pt.x - self.start_pt.x
def delta_y(self) -> numbers.Real:
"""
Y delta between segment end point and start point.
:return: the horizontal, y-parallel distance between segment end point and start point.
"""
return self.end_pt.y - self.start_pt.y
def delta_z(self) -> numbers.Real:
"""
Z delta between segment end point and start point.
:return: the vertical distance between segment end point and start point.
"""
return self.end_pt.z - self.start_pt.z
def as_vector(self) -> Vect3D:
"""
Convert a segment to a vector.
"""
return Vect3D(
x=self.delta_x(),
y=self.delta_y(),
z=self.delta_z()
)
def length_horizontal(self) -> numbers.Real:
return self.start_pt.dist_2d(self.end_pt)
def length(self) -> numbers.Real:
return self.start_pt.distance(self.end_pt)
def ratio_delta_zs(self) -> Optional[numbers.Real]:
"""
Calculates the delta z - delta s ratio of a segment.
:return: optional numbers.Real.
"""
len2d = self.length_horizontal()
if len2d == 0.0:
return None
return self.delta_z() / len2d
def slope_rad(self) -> Optional[numbers.Real]:
"""
Calculates the slope in radians of the segment.
Positive is downward point, negative upward pointing.
:return: optional numbers.Real.
"""
delta_zs = self.ratio_delta_zs()
if delta_zs is None:
return None
else:
return - math.atan(delta_zs)
def vector(self) -> Vect3D:
return Vect3D(self.delta_x(),
self.delta_y(),
self.delta_z()
)
def antivector(self) -> Vect3D:
"""
Returns the vector pointing from the segment end to the segment start.
:return: the vector pointing from the segment end to the segment start.
:rtype: Vect.
"""
return self.vector().invert()
def contains_pt(self,
pt: Point3D
) -> bool:
"""
Checks whether a point is contained in a segment.
:param pt: the point for which to check containement.
:return: bool.
:raise: Exception.
Examples:
>>> segment = Segment3D(Point3D(0, 0, 0), Point3D(1, 0, 0))
>>> segment.contains_pt(Point3D(0, 0, 0))
True
>>> segment.contains_pt(Point3D(1, 0, 0))
True
>>> segment.contains_pt(Point3D(0.5, 0, 0))
True
>>> segment.contains_pt(Point3D(0.5, 0.00001, 0))
False
>>> segment.contains_pt(Point3D(0.5, 0, 0.00001))
False
>>> segment.contains_pt(Point3D(1.00001, 0, 0))
False
>>> segment.contains_pt(Point3D(0.000001, 0, 0))
True
>>> segment.contains_pt(Point3D(-0.000001, 0, 0))
False
>>> segment.contains_pt(Point3D(0.5, 1000, 1000))
False
>>> segment = Segment3D(Point3D(0, 0, 0), Point3D(0, 1, 0))
>>> segment.contains_pt(Point3D(0, 0, 0))
True
>>> segment.contains_pt(Point3D(0, 0.5, 0))
True
>>> segment.contains_pt(Point3D(0, 1, 0))
True
>>> segment.contains_pt(Point3D(0, 1.5, 0))
False
>>> segment = Segment3D(Point3D(0, 0, 0), Point3D(1, 1, 1))
>>> segment.contains_pt(Point3D(0.5, 0.5, 0.5))
True
>>> segment.contains_pt(Point3D(1, 1, 1))
True
>>> segment = Segment3D(Point3D(1,2,3), Point3D(9,8,2))
>>> segment.contains_pt(segment.pointAt(0.745))
True
>>> segment.contains_pt(segment.pointAt(1.745))
False
>>> segment.contains_pt(segment.pointAt(-0.745))
False
>>> segment.contains_pt(segment.pointAt(0))
True
"""
check_type(pt, "Point", Point3D)
segment_length = self.length()
length_startpt_pt = self.start_pt.distance(pt)
length_endpt_pt = self.end_pt.distance(pt)
return areClose(
a=segment_length,
b=length_startpt_pt + length_endpt_pt
)
def pointAt(self,
scale_factor: numbers.Real
) -> Point3D:
"""
Returns a point aligned with the segment
and lying at given scale factor, where 1 is segment length
ans 0 is segment start.
:param scale_factor: the scale factor, where 1 is the segment length.
:type scale_factor: numbers.Real
:return: Point at scale factor
:rtype: Point3D
Examples:
>>> s = Segment3D(Point3D(0,0,0), Point3D(1,0,0))
>>> s.pointAt(0)
Point3D(0.0000, 0.0000, 0.0000)
>>> s.pointAt(0.5)
Point3D(0.5000, 0.0000, 0.0000)
>>> s.pointAt(1)
Point3D(1.0000, 0.0000, 0.0000)
>>> s.pointAt(-1)
Point3D(-1.0000, 0.0000, 0.0000)
>>> s.pointAt(-2)
Point3D(-2.0000, 0.0000, 0.0000)
>>> s.pointAt(2)
Point3D(2.0000, 0.0000, 0.0000)
>>> s = Segment3D(Point3D(0,0,0), Point3D(0,0,1))
>>> s.pointAt(0)
Point3D(0.0000, 0.0000, 0.0000)
>>> s.pointAt(0.5)
Point3D(0.0000, 0.0000, 0.5000)
>>> s.pointAt(1)
Point3D(0.0000, 0.0000, 1.0000)
>>> s.pointAt(-1)
Point3D(0.0000, 0.0000, -1.0000)
>>> s.pointAt(-2)
Point3D(0.0000, 0.0000, -2.0000)
>>> s.pointAt(2)
Point3D(0.0000, 0.0000, 2.0000)
>>> s = Segment3D(Point3D(0,0,0), Point3D(1,1,1))
>>> s.pointAt(0.5)
Point3D(0.5000, 0.5000, 0.5000)
>>> s = Segment3D(Point3D(0,0,0), Point3D(4,0,0))
>>> s.pointAt(7.5)
Point3D(30.0000, 0.0000, 0.0000)
"""
dx = self.delta_x() * scale_factor
dy = self.delta_y() * scale_factor
dz = self.delta_z() * scale_factor
return Point3D(
x=self.start_pt.x + dx,
y=self.start_pt.y + dy,
z=self.start_pt.z + dz
)
def pointProjection(self,
point: Point3D
) -> Point3D:
"""
Return the point projection on the segment.
Examples:
>>> s = Segment3D(start_pt=Point3D(0,0,0), end_pt=Point3D(1,0,0))
>>> p = Point3D(0.5, 1, 4)
>>> s.pointProjection(p)
Point3D(0.5000, 0.0000, 0.0000)
>>> s = Segment3D(start_pt=Point3D(0,0,0), end_pt=Point3D(4,0,0))
>>> p = Point3D(7.5, 19.2, -14.72)
>>> s.pointProjection(p)
Point3D(7.5000, 0.0000, 0.0000)
"""
check_type(point, "Input point", Point3D)
other_segment = Segment3D(
self.start_pt,
point
)
scale_factor = self.vector().scalar_projection(other_segment.vector()) / self.length()
return self.pointAt(scale_factor)
def pointDistance(self,
point: Point3D
) -> numbers.Real:
"""
Returns the point distance to the segment.
:param point: the point to calculate the distance with
:type point: Point3D
:return: the distance of the point to the segment
:rtype: numbers.Real
Examples:
>>> s = Segment3D(Point3D(0,0,0), Point3D(0,0,4))
>>> s.pointDistance(Point3D(-17.2, 0.0, -49))
17.2
>>> s.pointDistance(Point3D(-17.2, 1.22, -49))
17.24321315764553
"""
check_type(point, "Input point", Point3D)
#check_crs(self, point)
point_projection = self.pointProjection(point)
return point.distance(point_projection)
def point_s(self,
point: Point3D
) -> Optional[numbers.Real]:
"""
Calculates the optional distance of the point along the segment.
A zero value is for a point coinciding with the start point.
Returns None if the point is not contained in the segment.
:param point: the point to calculate the optional distance in the segment.
:type point: Point3D
:return: the the optional distance of the point along the segment.
"""
check_type(point, "Input point", Point3D)
#check_crs(self, point)
if not self.contains_pt(point):
return None
return self.start_pt.distance(point)
def scale(self,
scale_factor
) -> 'Segment3D':
"""
Scale a segment by the given scale_factor.
Start point does not change.
:param scale_factor: the scale factor, where 1 is the segment length.
:type scale_factor: numbers.Real
:return: Point at scale factor
:rtype: Point3D
"""
end_pt = self.pointAt(scale_factor)
return Segment3D(
self.start_pt,
end_pt)
def vertical_plane(self) -> Optional['CPlane3D']:
"""
Returns the vertical Cartesian plane containing the segment.
:return: the vertical Cartesian plane containing the segment.
:rtype: Optional[CPlane3D].
"""
if self.length_horizontal() == 0.0: # collapsed segment
return None
elif self.length_horizontal() == 0.0: # vertical segment
return None
# arbitrary point on the same vertical as end point
section_final_pt_up = self.end_pt.shift(
sx=0.0,
sy=0.0,
sz=1000.0)
return CPlane3D.fromPoints(
pt1=self.start_pt,
pt2=self.end_pt,
pt3=section_final_pt_up)
def same_start(self,
another: 'Segment3D',
tol: numbers.Real = 1e-12
) -> bool:
"""
Check whether the two segments have the same start point.
:param another: a segment to check for.
:type another: Segment.
:param tol: tolerance for distance between points.
:type tol: numbers.Real.
:return: whether the two segments have the same start point.
:rtype: bool.
Examples:
>>> s1 = Segment3D(Point3D(0,0,0), Point3D(1,0,0))
>>> s2 = Segment3D(Point3D(0,0,0), Point3D(0,1,0))
>>> s1.same_start(s2)
True
"""
return self.start_pt.is_coincident(
another=another.start_pt,
tolerance=tol
)
def same_end(self,
another: 'Segment3D',
tol: numbers.Real = 1e-12
) -> bool:
"""
Check whether the two segments have the same end point.
:param another: a segment to check for.
:type another: Segment.
:param tol: tolerance for distance between points.
:type tol: numbers.Real.
:return: whether the two segments have the same end point.
:rtype: bool.
Examples:
>>> s1 = Segment3D(Point3D(0,0,0), Point3D(1,0,0))
>>> s2 = Segment3D(Point3D(2,0,0), Point3D(1,0,0))
>>> s1.same_end(s2)
True
"""
return self.end_pt.is_coincident(
another=another.end_pt,
tolerance=tol)
def conn_to_other(self,
another: 'Segment3D',
tol: numbers.Real = 1e-12
) -> bool:
"""
Check whether the first segment is sequentially connected to the second one.
:param another: a segment to check for.
:type another: Segment.
:param tol: tolerance for distance between points.
:type tol: numbers.Real.
:return: whether the first segment is sequentially connected to the second one.
:rtype: bool.
Examples:
>>> s1 = Segment3D(Point3D(0,0,0), Point3D(1,0,0))
>>> s2 = Segment3D(Point3D(1,0,0), Point3D(2,0,0))
>>> s1.conn_to_other(s2)
True
"""
return self.end_pt.is_coincident(
another=another.start_pt,
tolerance=tol)
def other_connected(self,
another: 'Segment3D',
tol: numbers.Real = 1e-12
) -> bool:
"""
Check whether the second segment is sequentially connected to the first one.
:param another: a segment to check for.
:type another: Segment.
:param tol: tolerance for distance between points.
:type tol: numbers.Real.
:return: whether the second segment is sequentially connected to the first one.
:rtype: bool.
Examples:
>>> s1 = Segment3D(Point3D(0,0,0), Point3D(1,0,0))
>>> s2 = Segment3D(Point3D(-1,0,0), Point3D(0,0,0))
>>> s1.other_connected(s2)
True
"""
return another.end_pt.is_coincident(
another=self.start_pt,
tolerance=tol)
def segment_start_in(self,
another: 'Segment3D'
) -> bool:
"""
Check whether the second segment contains the first segment start point.
:param another: a segment to check for.
:type another: Segment.
:return: whether the second segment contains the first segment start point.
:rtype: bool.
Examples:
>>> s1 = Segment3D(Point3D(0,0,0), Point3D(1,0,0))
>>> s2 = Segment3D(Point3D(-0.5,0,0), Point3D(0.5,0,0))
>>> s1.segment_start_in(s2)
True
>>> s1 = Segment3D(Point3D(0,0,0), Point3D(1,1,1))
>>> s1.segment_start_in(s2)
True
>>> s1 = Segment3D(Point3D(0,1,0), Point3D(1,1,1))
>>> s1.segment_start_in(s2)
False
>>> s1 = Segment3D(Point3D(-1,-1,-1), Point3D(1,1,1))
>>> s1.segment_start_in(s2)
False
"""
return another.contains_pt(self.start_pt)
def segment_end_in(self,
another: 'Segment3D'
) -> bool:
"""
Check whether the second segment contains the first segment end point.
:param another: a segment to check for.
:type another: Segment.
:return: whether the second segment contains the first segment end point.
:rtype: bool.
Examples:
>>> s1 = Segment3D(Point3D(0,0,0), Point3D(1,0,0))
>>> s2 = Segment3D(Point3D(-0.5,0,0), Point3D(0.5,0,0))
>>> s1.segment_end_in(s2)
False
>>> s1 = Segment3D(Point3D(0,0,0), Point3D(1,1,1))
>>> s1.segment_end_in(s2)
False
>>> s1 = Segment3D(Point3D(0,1,0), Point3D(1,1,1))
>>> s2 = Segment3D(Point3D(1,1,1), Point3D(0.5,0,0))
>>> s1.segment_end_in(s2)
True
>>> s1 = Segment3D(Point3D(-1,-1,3), Point3D(1,1,3))
>>> s2 = Segment3D(Point3D(0,2,3), Point3D(2,0,3))
>>> s1.segment_end_in(s2)
True
"""
return another.contains_pt(self.end_pt)
def rotate(self,
rotation_axis: 'RotationAxis',
center_point: 'Point3D' = None
) -> 'Segment3D':
"""
Rotates a segment.
:param rotation_axis:
:param center_point:
:return: the rotated segment
:rtype: Segment3D
Examples:
>>> seg = Segment3D(Point3D(0,0,0), Point3D(0,0,1))
>>> rot_ax = RotationAxis(0, 0, 90)
>>> seg.rotate(rot_ax)
Segment3D(start_pt=Point3D(0.0000, 0.0000, 0.0000), end_pt=Point3D(1.0000, 0.0000, 0.0000))
>>> rot_ax = RotationAxis(0, 0, 180)
>>> centr_pt = Point3D(0,0,0.5)
>>> seg.rotate(rotation_axis=rot_ax, center_point=centr_pt)
Segment3D(start_pt=Point3D(-0.0000, 0.0000, 1.0000), end_pt=Point3D(0.0000, 0.0000, 0.0000))
>>> seg = Segment3D(Point3D(0,0,0), Point3D(1,1,0))
>>> centr_pt = Point3D(1,0,0)
>>> rot_ax = RotationAxis(0, 90, 90)
>>> seg.rotate(rotation_axis=rot_ax, center_point=centr_pt)
Segment3D(start_pt=Point3D(1.0000, 1.0000, 0.0000), end_pt=Point3D(2.0000, 0.0000, -0.0000))
>>> seg = Segment3D(Point3D(1,1,1), Point3D(0,0,0))
>>> rot_ax = RotationAxis(135, 0, 180)
>>> centr_pt = Point3D(0.5,0.5,0.5)
>>> seg.rotate(rotation_axis=rot_ax, center_point=centr_pt)
Segment3D(start_pt=Point3D(0.0000, 0.0000, 0.0000), end_pt=Point3D(1.0000, 1.0000, 1.0000))
"""
start_pt, end_pt = self
rotated_start_pt = start_pt.rotate(
rotation_axis=rotation_axis,
center_point=center_point
)
rotated_end_pt = end_pt.rotate(
rotation_axis=rotation_axis,
center_point=center_point
)
return Segment3D(
start_pt=rotated_start_pt,
end_pt=rotated_end_pt
)
@classmethod
def random(cls,
lower_boundary: float = -MAX_SCALAR_VALUE,
upper_boundary: float = MAX_SCALAR_VALUE):
"""
Creates a random segment.
:return: random segment
:rtype: Segment3D
"""
return cls(
start_pt=Point3D.random(lower_boundary, upper_boundary),
end_pt=Point3D.random(lower_boundary, upper_boundary)
)
def densify_as_line3d(self,
densify_distance
) -> 'Line3D':
"""
Densify a segment by adding additional points
separated a distance equal to densify_distance.
The result is no longer a Segment instance, instead it is a Line instance.
:param densify_distance: float
:return: a Line3D
"""
length3d = self.length()
segment_versor = self.as_vector().versor()
generator_vector = segment_versor.scale(densify_distance)
interpolated_line = Line3D(
pts=[self.start_pt])
n = 0
while True:
n += 1
shift_vector = generator_vector.scale(n)
new_pt = self.start_pt.shift(
shift_vector.x,
shift_vector.y,
shift_vector.z
)
distance = self.start_pt.distance(new_pt)
if distance >= length3d:
break
interpolated_line.add_pt(new_pt)
interpolated_line.add_pt(self.end_pt)
return interpolated_line
def densify_as_pts3d(self,
densify_distance
) -> List[Point3D]:
return self.densify_as_line3d(densify_distance=densify_distance).pts()
def densify_as_steps3d(self,
densify_distance: numbers.Real
) -> array:
"""
Defines the array storing the incremental lengths according to the provided densify distance.
:param densify_distance: the step distance.
:type densify_distance: numbers.Real.
:return: array storing incremental steps, with the last step being equal to the segment length.
:rtype: array.
"""
if not isinstance(densify_distance, numbers.Real):
raise Exception("Densify distance must be float or int")
if not math.isfinite(densify_distance):
raise Exception("Densify distance must be finite")
if densify_distance <= 0.0:
raise Exception("Densify distance must be positive")
segment_length = self.length()
s_list = []
n = 0
length = n * densify_distance
while length < segment_length:
s_list.append(length)
n += 1
length = n * densify_distance
s_list.append(segment_length)
return array('d', s_list)
def point_or_segment3d(
point1: Point3D,
point2: Point3D,
tol: numbers.Real = PRACTICAL_MIN_DIST
) -> Union[Point3D, Segment3D]:
"""
Creates a point or segment based on the points distance.
:param point1: first input point.
:type point1: Point.
:param point2: second input point.
:type point2: Point.
:param tol: distance tolerance between the two points.
:type tol: numbers.Real.
:return: point or segment based on their distance.
:rtype: PointOrSegment.
:raise: Exception.
"""
check_type(point1, "First point", Point3D)
check_type(point2, "Second point", Point3D)
if point1.distance(point2) <= tol:
return Point3D(
x=(point1.x + point2.x) / 2,
y=(point1.y + point2.y) / 2,
z=(point1.z + point2.z) / 2
)
else:
return Segment3D(
start_pt=point1,
end_pt=point2
)
def intersect_segments3d(
segment1: Segment3D,
segment2: Segment3D,
tol: numbers.Real = PRACTICAL_MIN_DIST
) -> Optional[Union[Point3D, Segment3D]]:
"""
Determines the optional point or segment intersection between the segment pair.
:param segment1: the first segment
:param segment2: the second segment
:param tol: the distance tolerance for collapsing a intersection segment into a point
:return: the optional point or segment intersection between the segment pair.
Examples:
>>> s2 = Segment3D(Point3D(0,0,0), Point3D(1,0,0))
>>> s1 = Segment3D(Point3D(0,0,0), Point3D(1,0,0))
>>> intersect_segments3d(s1, s2)
Segment3D(start_pt=Point3D(0.0000, 0.0000, 0.0000), end_pt=Point3D(1.0000, 0.0000, 0.0000))
>>> s1 = Segment3D(Point3D(-2,0,0), Point3D(-1,0,0))
>>> intersect_segments3d(s1, s2) is None
True
>>> s1 = Segment3D(Point3D(-2,0,0), Point3D(0,0,0))
>>> intersect_segments3d(s1, s2)
Point3D(0.0000, 0.0000, 0.0000)
>>> s1 = Segment3D(Point3D(-2,0,0), Point3D(0.5,0,0))
>>> intersect_segments3d(s1, s2)
Segment3D(start_pt=Point3D(0.0000, 0.0000, 0.0000), end_pt=Point3D(0.5000, 0.0000, 0.0000))
>>> s1 = Segment3D(Point3D(-2,0,0), Point3D(1,0,0))
>>> intersect_segments3d(s1, s2)
Segment3D(start_pt=Point3D(0.0000, 0.0000, 0.0000), end_pt=Point3D(1.0000, 0.0000, 0.0000))
>>> s1 = Segment3D(Point3D(-2,0,0), Point3D(2,0,0))
>>> intersect_segments3d(s1, s2)
Segment3D(start_pt=Point3D(0.0000, 0.0000, 0.0000), end_pt=Point3D(1.0000, 0.0000, 0.0000))
>>> s1 = Segment3D(Point3D(0,0,0), Point3D(0.5,0,0))
>>> intersect_segments3d(s1, s2)
Segment3D(start_pt=Point3D(0.0000, 0.0000, 0.0000), end_pt=Point3D(0.5000, 0.0000, 0.0000))
>>> s1 = Segment3D(Point3D(0.25,0,0), Point3D(0.75,0,0))
>>> intersect_segments3d(s1, s2)
Segment3D(start_pt=Point3D(0.2500, 0.0000, 0.0000), end_pt=Point3D(0.7500, 0.0000, 0.0000))
>>> s1 = Segment3D(Point3D(0.25,0,0), Point3D(1,0,0))
>>> intersect_segments3d(s1, s2)
Segment3D(start_pt=Point3D(0.2500, 0.0000, 0.0000), end_pt=Point3D(1.0000, 0.0000, 0.0000))
>>> s1 = Segment3D(Point3D(0.25,0,0), Point3D(1.25,0,0))
>>> intersect_segments3d(s1, s2)
Segment3D(start_pt=Point3D(0.2500, 0.0000, 0.0000), end_pt=Point3D(1.0000, 0.0000, 0.0000))
>>> s1 = Segment3D(Point3D(0,0,0), Point3D(1.25,0,0))
>>> intersect_segments3d(s1, s2)
Segment3D(start_pt=Point3D(0.0000, 0.0000, 0.0000), end_pt=Point3D(1.0000, 0.0000, 0.0000))
>>> s1 = Segment3D(Point3D(1,0,0), Point3D(1.25,0,0))
>>> intersect_segments3d(s1, s2)
Point3D(1.0000, 0.0000, 0.0000)
>>> s2 = Segment3D(Point3D(0,0,0), Point3D(1,1,1))
>>> s1 = Segment3D(Point3D(0.25,0.25,0.25), Point3D(0.75,0.75,0.75))
>>> intersect_segments3d(s1, s2)
Segment3D(start_pt=Point3D(0.2500, 0.2500, 0.2500), end_pt=Point3D(0.7500, 0.7500, 0.7500))
>>> s1 = Segment3D(Point3D(0.25,0.25,0.25), Point3D(1.75,1.75,1.75))
>>> intersect_segments3d(s1, s2)
Segment3D(start_pt=Point3D(0.2500, 0.2500, 0.2500), end_pt=Point3D(1.0000, 1.0000, 1.0000))
>>> s1 = Segment3D(Point3D(0.25,0.25,0.25), Point3D(1.75,0,1.75))
>>> intersect_segments3d(s1, s2)
Point3D(0.2500, 0.2500, 0.2500)
>>> s1 = Segment3D(Point3D(0.25,1,0.25), Point3D(0.75,0.75,0.75))
>>> intersect_segments3d(s1, s2)
Point3D(0.7500, 0.7500, 0.7500)
>>> s2 = Segment3D(Point3D(-1,-1,-1), Point3D(1,1,1))
>>> s1 = Segment3D(Point3D(-1,1,1), Point3D(1,-1,-1))
>>> intersect_segments3d(s1, s2)
Point3D(0.0000, 0.0000, 0.0000)
"""
check_type(segment1, "First segment", Segment3D)
check_type(segment2, "Second segment", Segment3D)
#check_crs(segment1, segment2)
s1_startpt_inside = segment1.segment_start_in(segment2)
s2_startpt_inside = segment2.segment_start_in(segment1)
s1_endpt_inside = segment1.segment_end_in(segment2)
s2_endpt_inside = segment2.segment_end_in(segment1)
elements = [s1_startpt_inside, s2_startpt_inside, s1_endpt_inside, s2_endpt_inside]
if all(elements):
return segment1.clone()
if s1_startpt_inside and s1_endpt_inside:
return segment1.clone()
if s2_startpt_inside and s2_endpt_inside:
return segment2.clone()
if s1_startpt_inside and s2_startpt_inside:
return point_or_segment3d(
segment1.start_pt,
segment2.start_pt,
tol=tol
)
if s1_startpt_inside and s2_endpt_inside:
return point_or_segment3d(
segment1.start_pt,
segment2.end_pt,
tol=tol
)
if s1_endpt_inside and s2_startpt_inside:
return point_or_segment3d(
segment2.start_pt,
segment1.end_pt,
tol=tol
)
if s1_endpt_inside and s2_endpt_inside:
return point_or_segment3d(
segment1.end_pt,
segment2.end_pt,
tol=tol
)
if s1_startpt_inside:
return segment1.start_pt.clone()
if s1_endpt_inside:
return segment1.end_pt.clone()
if s2_startpt_inside:
return segment2.start_pt.clone()
if s2_endpt_inside:
return segment2.end_pt.clone()
shortest_segm_or_pt = shortest_segment_or_point3d(
segment1,
segment2,
tol=tol
)
if not shortest_segm_or_pt:
return None
if not isinstance(shortest_segm_or_pt, Point3D):
return None
inters_pt = shortest_segm_or_pt
if not segment1.contains_pt(inters_pt):
return None
if not segment2.contains_pt(inters_pt):
return None
return inters_pt
class PointSegmentCollection3D(list):
"""
Collection of point or segment elements.
"""
def __init__(
self,
geoms: Optional[List[Union[Point3D, Segment3D]]] = None,
# epsg_code: Optional[numbers.Integral] = None
):
if geoms is not None:
for geom in geoms:
check_type(geom, "Spatial element", (Point3D, Segment3D))
"""
if epsg_code is not None:
check_type(
var=epsg_code,
name="EPSG code",
expected_types=numbers.Integral
)
if geoms is not None and epsg_code is not None:
for geom in geoms:
check_epsg(
spatial_element=geom,
epsg_code=epsg_code
)
elif geoms is not None and len(geoms) > 0:
epsg_code = geoms[0].epsg_code()
"""
if geoms is not None and len(geoms) > 0:
super(PointSegmentCollection3D, self).__init__(geoms)
else:
super(PointSegmentCollection3D, self).__init__()
# self.epsg_code = epsg_code
def append(self,
spatial_element: Union[Point3D, Segment3D]
) -> None:
check_type(
var=spatial_element,
name="Spatial element",
expected_types=(Point3D, Segment3D)
)
"""
if self.epsg_code is not None:
check_epsg(
spatial_element=spatial_element,
epsg_code=self.epsg_code
)
else:
self.epsg_code = spatial_element.epsg_code()
"""
self.append(spatial_element)
class Line3D:
"""
A line.
"""
def __init__(self,
pts: Optional[List[Point3D]] = None):
"""
"""
if pts is not None:
check_type(pts, "List", list)
for el in pts:
check_type(el, "Point3D", Point3D)
self._pts = pts
else:
self._pts = []
def __repr__(self) -> str:
"""
Represents a Line instance as a shortened text.
:return: a textual shortened representation of a Line instance.
:rtype: str.
"""
num_points = self.num_pts()
if num_points == 0:
txt = "Empty Line3D"
else:
x1, y1, z1 = self.start_pt()
if num_points == 1:
txt = f"Line3D with unique point: {x1:.4f}, {y1:.4f}, {z1:.4f}"
else:
x2, y2, z2 = self.end_pt()
txt = f"Line3D with {self.num_pts()} points: ({x1:.4f}, {y1:.4f}, {z1:.4f}) ... ({x2:.4f}, {y2:.4f}, {z2:.4f})"
return txt
def pts(self):
return self._pts
def pt(self,
ndx: numbers.Integral):
"""
"""
return self._pts[ndx]
def start_pt(self) -> Optional[Point3D]:
"""
Return the first point of a Line or None when no points.
:return: the first point or None.
"""
return self.pt(0) if self.num_pts() > 0 else None
def end_pt(self) -> Optional[Point3D]:
"""
Return the last point of a Line or None when no points.
:return: the last point or None.
"""
return self.pt(-1) if self.num_pts() > 0 else None
def add_pt(self,
pt: Point3D):
self._pts.append(pt)
def num_pts(self):
return len(self._pts)
def segment(self,
ndx: numbers.Integral
) -> Optional[Segment3D]:
"""
Returns the optional segment at index ndx.
:param ndx: the segment index.
:type ndx: numbers.Integral
:return: the optional segment
:rtype: Optional[Segment]
"""
start_pt = self.pt(ndx)
end_pt = self.pt(ndx + 1)
if start_pt.is_coincident(end_pt):
return None
else:
return Segment3D(
start_pt=self.pt(ndx),
end_pt=self.pt(ndx + 1)
)
def __iter__(self):
"""
Return each element of a Line, i.e., its segments.
"""
return (self.segment(i) for i in range(self.num_pts()-1))
def x_list(self) -> List[numbers.Real]:
return list(map(lambda pt: pt.x, self._pts))
def y_list(self) -> List[numbers.Real]:
return list(map(lambda pt: pt.y, self._pts))
def x_array(self):
return np.asarray([pt.x for pt in self.pts()])
def y_array(self):
return np.asarray([pt.y for pt in self.pts()])
def z_array(self):
return np.asarray([pt.z for pt in self.pts()])
def xy_arrays(self):
return self.x_array, self.y_array
def x_min(self):
return np.nanmin(list(map(lambda pt: pt.x, self._pts)))
def x_max(self):
return np.nanmax(list(map(lambda pt: pt.x, self._pts)))
def y_min(self):
return np.nanmin(list(map(lambda pt: pt.y, self._pts)))
def y_max(self):
return np.nanmax(list(map(lambda pt: pt.y, self._pts)))
def z_min(self):
return np.nanmin(list(map(lambda pt: pt.z, self._pts)))
def z_max(self):
return np.nanmax(list(map(lambda pt: pt.z, self._pts)))
def as_segments(self):
"""
Convert to a list of segments.
:return: list of Segment objects
"""
pts_pairs = zip(self.pts()[:-1], self.pts()[1:])
segments = [Segment3D(pt_a, pt_b) for (pt_a, pt_b) in pts_pairs]
return segments
'''
def densify_2d_line(self, sample_distance) -> 'Points':
"""
Densify a line into a new line instance,
using the provided sample distance.
Returned Line instance has coincident successive points removed.
:param sample_distance: numbers.Real
:return: Line instance
"""
if sample_distance <= 0.0:
raise Exception(f"Sample distance must be positive. {sample_distance} received")
segments = self.as_segments()
densified_line_list = [segment.densify2d_asLine(sample_distance) for segment in segments]
densifyied_multiline = MultiLine(densified_line_list)
densifyied_line = densifyied_multiline.to_line()
densifyied_line_wo_coinc_pts = densifyied_line.remove_coincident_points()
return densifyied_line_wo_coinc_pts
'''
def join(self, another) -> 'Line3D':
"""
Joins together two lines and returns the join as a new line without point changes,
with possible overlapping points
and orientation mismatches between the two original lines
"""
return Line3D(self.pts() + another.pts())
def length(self) -> numbers.Real:
length = 0.0
for ndx in range(self.num_pts() - 1):
length += self.pt(ndx).distance(self.pt(ndx + 1))
return length
def length_2d(self) -> numbers.Real:
length = 0.0
for ndx in range(self.num_pts() - 1):
length += self.pt(ndx).to2d().distance(self.pt(ndx + 1).to2d())
return length
def step_delta_z(self) -> List[numbers.Real]:
"""
Return the difference in elevation between consecutive points:
z[ndx+1] - z[ndx]
:return: a list of height differences.
:rtype: list of floats.
"""
delta_z = [0.0]
for ndx in range(1, self.num_pts()):
delta_z.append(self.pt(ndx).z - self.pt(ndx - 1).z)
return delta_z
def step_lengths_3d(self) -> List[numbers.Real]:
"""
Returns the point-to-point 3D distances.
It is the distance between a point and its previous one.
The list has the same lenght as the source point list.
:return: the individual 3D segment lengths.
:rtype: list of floats.
Examples:
"""
step_length_list = [0.0]
for ndx in range(1, self.num_pts()):
length = self.pt(ndx).distance(self.pt(ndx - 1))
step_length_list.append(length)
return step_length_list
'''
def step_lengths_2d(self) -> List[numbers.Real]:
"""
Returns the point-to-point 2D distances.
It is the distance between a point and its previous one.
The list has the same length as the source point list.
:return: the individual 2D segment lengths.
:rtype: list of floats.
Examples:
"""
step_length_list = [0.0]
for ndx in range(1, self.num_pts()):
length = self.pt(ndx).dist2DWith(self.pt(ndx - 1))
step_length_list.append(length)
return step_length_list
'''
def incremental_length_2d(self):
lIncrementalLengths = []
length = 0.0
lIncrementalLengths.append(length)
for ndx in range(self.num_pts() - 1):
length += self.pts()[ndx].dist_2d(self.pts()[ndx + 1])
lIncrementalLengths.append(length)
return np.asarray(lIncrementalLengths)
def incremental_length_3d(self) -> List[numbers.Real]:
"""
Returns the accumulated 3D segment lengths.
:return: accumulated 3D segment lenghts
:rtype: list of floats.
"""
return list(itertools.accumulate(self.step_lengths_3d()))
'''
def incremental_length_2d(self) -> List[numbers.Real]:
"""
Returns the accumulated 2D segment lengths.
:return: accumulated 2D segment lenghts
:rtype: list of floats.
"""
return list(itertools.accumulate(self.step_lengths_2d()))
'''
def reversed(self) -> 'Line3D':
"""
Return a Line instance with reversed point list.
:return: a new Line instance.
:rtype: Line.
"""
pts = [pt.clone() for pt in self.pts()]
pts.reverse()
return Line3D(
pts=pts
)
def slopes_degr(self) -> List[Optional[numbers.Real]]:
"""
Calculates the slopes (in degrees) of each Line segment.
The first value is the slope of the first segment.
The last value, always None, is the slope of the segment starting at the last point.
The number of elements is equal to the number of points in the Line.
:return: list of slopes (degrees).
:rtype: List[Optional[numbers.Real]].
"""
lSlopes = []
segments = self.as_segments()
for segment in segments:
vector = segment.vector()
lSlopes.append(-vector.slope_degr()) # minus because vector convention is positive downward
lSlopes.append(None) # None refers to the slope of the Segment starting with the last point
return lSlopes
def slopes_stats(self) -> Dict:
"""
Returns the line directional slope statistics.
:return: the statistics parameters: min, max, mean, var, std.
"""
return get_statistics(self.slopes_degr())
def abs_slopes_degr(self) -> List[Optional[numbers.Real]]:
return [abs(val) for val in self.slopes_degr()]
def dir_slopes(self) -> np.ndarray:
lSlopes = []
for ndx in range(self.num_pts() - 1):
segment_start_pt = self.pts()[ndx]
segment_end_pt = self.pts()[ndx + 1]
if np.isnan(segment_start_pt.z) or np.isnan(segment_end_pt.z):
lSlopes.append(np.nan)
else:
vector = Segment3D(self.pts()[ndx], self.pts()[ndx + 1]).vector()
lSlopes.append(-vector.slope_degr()) # minus because vector convention is positive downward
lSlopes.append(np.nan) # slope value for last point is unknown
return np.asarray(lSlopes)
def absolute_slopes(self) -> np.ndarray:
return np.asarray(list(map(abs, self.dir_slopes())))
def abs_slopes_stats(self) -> Dict:
"""
Returns the line absolute slopes statistics.
:return: the statistics parameters: min, max, mean, var, std.
:rtype: Dictionary.
"""
return get_statistics(self.abs_slopes_degr())
def extremes_distance_3d(self) -> numbers.Real:
"""
Calculate the 3D distance between start and end points.
:return: the 3D distance between start and end points
:rtype: numbers.Real
"""
return self.pt(-1).distance(self.pt(0))
'''
def extremes_distance_2d(self) -> numbers.Real:
"""
Calculate the 2D distance between start and end points.
:return: the 2D distance between start and end points
"""
return self.end_pt().dist2DWith(self.start_pt())
'''
def is_closed(self,
tolerance: numbers.Real = MIN_SEPARATION_THRESHOLD
) -> bool:
"""
Determine if the line is 3D-closed.
:param tolerance: the tolerance for considering the line closed
:type tolerance: numbers.Real
:return: whether the line is to be considered 3D-closed
:rtype: bool
"""
return self.pt(-1).is_coincident(self.pt(0), tolerance=tolerance)
'''
def isClosed_2d(self,
tolerance: numbers.Real = MIN_SEPARATION_THRESHOLD
) -> bool:
"""
Determine if the line is 2D-closed.
:param tolerance: the tolerance for considering the line closed
:return: whether the line is to be considered 2D-closed
"""
return self.end_pt().isCoinc2D(self.start_pt(), tolerance=tolerance)
'''
def walk_backward(self) -> 'Line3D':
"""
Create a new line by walking the line backward from the last point up to the first and thus closing it.
:return: a closed line with zero area
:rtype: 'Line'
"""
return Line3D(self.pts() + self.reversed()[1:])
def clone(self) -> 'Line3D':
"""
Clone a line.
:return: the cloned line
:rtype: Line3D
"""
return Line3D(self.pts())
'''
def close_2d(self) -> 'Points':
"""
Return a line that is 2D-closed.
:return: a 2D-closed line
:rtype: Points
"""
line = self.clone()
if not line.isClosed_2d():
line.add_pt(line.start_pt())
return line
'''
def close_3d(self) -> 'Line3D':
"""
Return a line that is 3D-closed.
:return: a 3D-closed line
:rtype: Line3D
"""
line = self.clone()
if not line.is_closed():
line.add_pt(line.start_pt())
return line
def remove_coincident_points(self) -> Optional['Line3D']:
"""
Remove coincident successive points
:return: Line instance
:rtype: Optional[Line3D]
"""
if self.num_pts() == 0:
return
new_line = Line3D(
pts=[self.pt(0)]
)
for ndx in range(1, self.num_pts()):
if not self.pt(ndx).is_coincident(new_line.pt(-1)):
new_line.add_pt(self.pt(ndx))
return new_line
def intersectSegment(self,
segment: Segment3D
) -> Optional[PointSegmentCollection3D]:
"""
Calculates the possible intersection between the line and a provided segment.
:param segment: the input segment
:return: the optional intersections, points or segments
:raise: Exception
"""
if self.num_pts() <= 1:
return
check_type(segment, "Input segment", Segment3D)
intersections = [intersect_segments3d(curr_segment, segment) for curr_segment in self if curr_segment is not None]
intersections = list(filter(lambda val: val is not None, intersections))
intersections = PointSegmentCollection3D(intersections)
return intersections
class MultiLine3D:
"""
MultiLine is a list of Line objects
"""
def __init__(self, lines_list=None):
if lines_list is None:
lines_list = []
self._lines = lines_list
@property
def lines(self):
return self._lines
def add(self, line):
return MultiLine3D(self.lines + [line])
def clone(self):
return MultiLine3D(self.lines)
@property
def num_parts(self):
return len(self.lines)
@property
def num_points(self):
num_points = 0
for line in self.lines:
num_points += line.num_pts
return num_points
@property
def x_min(self):
return np.nanmin([line.x_min for line in self.lines])
@property
def x_max(self):
return np.nanmax([line.x_max for line in self.lines])
@property
def y_min(self):
return np.nanmin([line.y_min for line in self.lines])
@property
def y_max(self):
return np.nanmax([line.y_max for line in self.lines])
@property
def z_min(self):
return np.nanmin([line.z_min for line in self.lines])
@property
def z_max(self):
return np.nanmax([line.z_max for line in self.lines])
def is_continuous(self):
for line_ndx in range(len(self._lines) - 1):
if not self.lines[line_ndx].pts[-1].coincident(self.lines[line_ndx + 1].pts[0]) or \
not self.lines[line_ndx].pts[-1].coincident(self.lines[line_ndx + 1].pts[-1]):
return False
return True
def is_unidirectional(self):
for line_ndx in range(len(self.lines) - 1):
if not self.lines[line_ndx].pts[-1].coincident(self.lines[line_ndx + 1].pts[0]):
return False
return True
def to_line(self):
return Line3D([point for line in self.lines for point in line.pts])
'''
def crs_project(self, srcCrs, destCrs):
lines = []
for line in self.lines:
lines.append(line.crs_project(srcCrs, destCrs))
return MultiLine4D(lines)
'''
'''
def densify_2d_multiline(self, sample_distance):
lDensifiedLines = []
for line in self.lines:
lDensifiedLines.append(line.densify_2d_line(sample_distance))
return MultiLine4D(lDensifiedLines)
'''
def remove_coincident_points(self):
cleaned_lines = []
for line in self.lines:
cleaned_lines.append(line.remove_coincident_points())
return MultiLine3D(cleaned_lines)
def shortest_segment_or_point3d(
first_segment: Segment3D,
second_segment: Segment3D,
tol: numbers.Real = PRACTICAL_MIN_DIST
) -> Optional[Union[Segment3D, Point3D]]:
"""
Calculates the optional shortest segment - or the intersection point - between two lines represented by two segments.
Adapted from:
http://paulbourke.net/geometry/pointlineplane/
C code from:
http://paulbourke.net/geometry/pointlineplane/lineline.c
[
typedef struct {
double x,y,z;
} XYZ;
/*
Calculate the line segment PaPb that is the shortest route between
two lines P1P2 and P3P4. Calculate also the values of mua and mub where
Pa = P1 + mua (P2 - P1)
Pb = P3 + mub (P4 - P3)
Return FALSE if no solution exists.
*/
int LineLineIntersect(
XYZ p1,XYZ p2,XYZ p3,XYZ p4,XYZ *pa,XYZ *pb,
double *mua, double *mub)
{
XYZ p13,p43,p21;
double d1343,d4321,d1321,d4343,d2121;
double numer,denom;
p13.x = p1.x - p3.x;
p13.y = p1.y - p3.y;
p13.z = p1.z - p3.z;
p43.x = p4.x - p3.x;
p43.y = p4.y - p3.y;
p43.z = p4.z - p3.z;
if (ABS(p43.x) < EPS && ABS(p43.y) < EPS && ABS(p43.z) < EPS)
return(FALSE);
p21.x = p2.x - p1.x;
p21.y = p2.y - p1.y;
p21.z = p2.z - p1.z;
if (ABS(p21.x) < EPS && ABS(p21.y) < EPS && ABS(p21.z) < EPS)
return(FALSE);
d1343 = p13.x * p43.x + p13.y * p43.y + p13.z * p43.z;
d4321 = p43.x * p21.x + p43.y * p21.y + p43.z * p21.z;
d1321 = p13.x * p21.x + p13.y * p21.y + p13.z * p21.z;
d4343 = p43.x * p43.x + p43.y * p43.y + p43.z * p43.z;
d2121 = p21.x * p21.x + p21.y * p21.y + p21.z * p21.z;
denom = d2121 * d4343 - d4321 * d4321;
if (ABS(denom) < EPS)
return(FALSE);
numer = d1343 * d4321 - d1321 * d4343;
*mua = numer / denom;
*mub = (d1343 + d4321 * (*mua)) / d4343;
pa->x = p1.x + *mua * p21.x;
pa->y = p1.y + *mua * p21.y;
pa->z = p1.z + *mua * p21.z;
pb->x = p3.x + *mub * p43.x;
pb->y = p3.y + *mub * p43.y;
pb->z = p3.z + *mub * p43.z;
return(TRUE);
}
:param first_segment: the first segment
:param second_segment: the second segment
:param tol: tolerance value for collapsing a segment into the midpoint.
:return: the optional shortest segment or an intersection point.
"""
check_type(second_segment, "Second Cartesian line", Segment3D)
p1 = first_segment.start_pt
p2 = first_segment.end_pt
p3 = second_segment.start_pt
p4 = second_segment.end_pt
p13 = Point3D(
x=p1.x - p3.x,
y=p1.y - p3.y,
z=p1.z - p3.z
)
p43 = Point3D(
x=p4.x - p3.x,
y=p4.y - p3.y,
z=p4.z - p3.z
)
if p43.asVect().is_close_to_zero:
return None
p21 = Point3D(
x=p2.x - p1.x,
y=p2.y - p1.y,
z=p2.z - p1.z,
)
if p21.asVect().is_close_to_zero:
return None
d1343 = p13.x * p43.x + p13.y * p43.y + p13.z * p43.z
d4321 = p43.x * p21.x + p43.y * p21.y + p43.z * p21.z
d1321 = p13.x * p21.x + p13.y * p21.y + p13.z * p21.z
d4343 = p43.x * p43.x + p43.y * p43.y + p43.z * p43.z
d2121 = p21.x * p21.x + p21.y * p21.y + p21.z * p21.z
denom = d2121 * d4343 - d4321 * d4321
if fabs(denom) < MIN_SCALAR_VALUE:
return None
numer = d1343 * d4321 - d1321 * d4343
mua = numer / denom
mub = (d1343 + d4321 * mua) / d4343
pa = Point3D(
x=p1.x + mua * p21.x,
y=p1.y + mua * p21.y,
z=p1.z + mua * p21.z
)
pb = Point3D(
x=p3.x + mub * p43.x,
y=p3.y + mub * p43.y,
z=p3.z + mub * p43.z
)
intersection = point_or_segment3d(
point1=pa,
point2=pb,
tol=tol
)
return intersection
'''
class ParamLine3D(object):
"""
parametric line
srcPt: source Point
l, m, n: .....
"""
def __init__(self, srcPt, l, m, n):
assert -1.0 <= l <= 1.0
assert -1.0 <= m <= 1.0
assert -1.0 <= n <= 1.0
self._srcPt = srcPt
self._l = l
self._m = m
self._n = n
def intersect_cartes_plane(self, cartes_plane):
"""
Return intersection point between parametric line and Cartesian plane
"""
# line parameters
x1, y1, z1 = self._srcPt.x, self._srcPt.y, self._srcPt.z
l, m, n = self._l, self._m, self._n
# Cartesian plane parameters
a, b, c, d = cartes_plane.a, cartes_plane.b, cartes_plane.c, cartes_plane.d
try:
k = (a * x1 + b * y1 + c * z1 + d) / (a * l + b * m + c * n)
except ZeroDivisionError:
return None
return Point3D(x1 - l * k,
y1 - m * k,
z1 - n * k)
'''
def eq_xy_pair(xy_pair_1, xy_pair_2):
if xy_pair_1[0] == xy_pair_2[0] and xy_pair_1[1] == xy_pair_2[1]:
return True
return False
'''
def remove_equal_consecutive_xypairs(xy_list):
out_xy_list = [xy_list[0]]
for n in range(1, len(xy_list)):
if not eq_xy_pair(xy_list[n], out_xy_list[-1]):
out_xy_list.append(xy_list[n])
return out_xy_list
'''
class CPlane3D:
"""
Cartesian plane.
Expressed by equation:
ax + by + cz + d = 0
Note: CPlane3D is locational - its position in space is defined.
This contrast with Plane, defined just by its attitude, but with undefined position
"""
def __init__(self,
a: numbers.Real,
b: numbers.Real,
c: numbers.Real,
d: numbers.Real
):
if not isinstance(a, numbers.Real):
raise Exception("Input value a must be float or int but is {}".format(type(a)))
if not isinstance(b, numbers.Real):
raise Exception("Input value b must be float or int but is {}".format(type(b)))
if not isinstance(c, numbers.Real):
raise Exception("Input value c must be float or int but is {}".format(type(c)))
if not isinstance(d, numbers.Real):
raise Exception("Input value d must be float or int but is {}".format(type(d)))
norm = sqrt(a*a + b*b + c*c)
self._a = float(a) / norm
self._b = float(b) / norm
self._c = float(c) / norm
self._d = float(d) / norm
def a(self) -> numbers.Real:
"""
Return a coefficient of a CPlane3D instance.
Example:
>>> CPlane3D(1, 0, 0, 2).a()
1.0
"""
return self._a
def b(self) -> numbers.Real:
"""
Return b coefficient of a CPlane3D instance.
Example:
>>> CPlane3D(1, 4, 0, 2).b()
0.9701425001453319
"""
return self._b
def c(self) -> numbers.Real:
"""
Return a coefficient of a CPlane3D instance.
Example:
>>> CPlane3D(1, 0, 5.4, 2).c()
0.9832820049844602
"""
return self._c
def d(self) -> numbers.Real:
"""
Return a coefficient of a CPlane3D instance.
Example:
>>> CPlane3D(1, 0, 0, 2).d()
2.0
"""
return self._d
def v(self) -> Tuple[numbers.Real, numbers.Real, numbers.Real, numbers.Real]:
"""
Return coefficients of a CPlane3D instance.
Example:
>>> CPlane3D(1, 1, 7, -4).v()
(0.14002800840280097, 0.14002800840280097, 0.9801960588196068, -0.5601120336112039)
"""
return self.a(), self.b(), self.c(), self.d()
@classmethod
def fromPoints(cls, pt1, pt2, pt3) -> 'CPlane3D':
"""
Create a CPlane3D from three given Point instances.
Example:
>>> CPlane3D.fromPoints(Point3D(0, 0, 0), Point3D(1, 0, 0), Point3D(0, 1, 0))
CPlane3D(0.0000, 0.0000, 1.0000, 0.0000)
>>> CPlane3D.fromPoints(Point3D(0, 0, 0), Point3D(1, 0, 0), Point3D(0, 1, 0))
CPlane3D(0.0000, 0.0000, 1.0000, 0.0000)
>>> CPlane3D.fromPoints(Point3D(0, 0, 0), Point3D(0, 1, 0), Point3D(0, 0, 1))
CPlane3D(1.0000, 0.0000, 0.0000, 0.0000)
>>> CPlane3D.fromPoints(Point3D(1,2,3), Point3D(2,3,4), Point3D(-1,7,-2))
CPlane3D(-0.7956, 0.2387, 0.5569, -1.3524)
"""
if not (isinstance(pt1, Point3D)):
raise Exception("First input point should be Point but is {}".format(type(pt1)))
if not (isinstance(pt2, Point3D)):
raise Exception("Second input point should be Point but is {}".format(type(pt2)))
if not (isinstance(pt3, Point3D)):
raise Exception("Third input point should be Point but is {}".format(type(pt3)))
matr_a = np.array(
[[pt1.y, pt1.z, 1],
[pt2.y, pt2.z, 1],
[pt3.y, pt3.z, 1]])
matr_b = - np.array(
[[pt1.x, pt1.z, 1],
[pt2.x, pt2.z, 1],
[pt3.x, pt3.z, 1]])
matr_c = np.array(
[[pt1.x, pt1.y, 1],
[pt2.x, pt2.y, 1],
[pt3.x, pt3.y, 1]])
matr_d = - np.array(
[[pt1.x, pt1.y, pt1.z],
[pt2.x, pt2.y, pt2.z],
[pt3.x, pt3.y, pt3.z]])
return cls(
np.linalg.det(matr_a),
np.linalg.det(matr_b),
np.linalg.det(matr_c),
np.linalg.det(matr_d)
)
@classmethod
def from_geological_plane(cls,
geol_plane: Plane,
pt: Point3D):
"""
Given a Plane instance and a provided Point instance,
calculate the corresponding Plane instance.
Example:
>>> CPlane3D.from_geological_plane(Plane(0, 0), Point3D(0, 0, 0))
CPlane3D(0.0000, 0.0000, 1.0000, -0.0000)
>>> CPlane3D.from_geological_plane(Plane(90, 45), Point3D(0, 0, 0))
CPlane3D(0.7071, 0.0000, 0.7071, -0.0000)
>>> CPlane3D.from_geological_plane(Plane(0, 90), Point3D(0, 0, 0))
CPlane3D(0.0000, 1.0000, -0.0000, -0.0000)
"""
normal_versor = geol_plane.normDirectFrwrd().as_versor()
a, b, c = normal_versor.x, normal_versor.y, normal_versor.z
d = - (a * pt.x + b * pt.y + c * pt.z)
return CPlane3D(a, b, c, d)
def __repr__(self):
return "CPlane3D({:.4f}, {:.4f}, {:.4f}, {:.4f})".format(*self.v())
def normVersor(self) -> Optional[Vect3D]:
"""
Return the versor normal to the cartesian plane.
Examples:
>>> CPlane3D(0, 0, 5, -2).normVersor()
Vect3D(0.0000, 0.0000, 1.0000)
>>> CPlane3D(0, 7, 0, 5).normVersor()
Vect3D(0.0000, 1.0000, 0.0000)
"""
return Vect3D(self.a(), self.b(), self.c()).versor()
def toPoint(self) -> Point3D:
"""
Returns a point lying in the plane (non-unique solution).
Examples:
>>> CPlane3D(0, 0, 1, -1).toPoint()
Point3D(0.0000, 0.0000, 1.0000)
"""
point = Point3D(
*pointSolution(
np.array([[self.a(), self.b(), self.c()]]),
np.array([-self.d()]))
)
return point
"""
def gplane_point(self):
'''
Converts a cartesian plane into a geological plane
and a point lying in the plane (non-unique solution).
Examples:
>>> gpl, pt = CPlane3D(0, 0, 1, -1).gplane_point()
>>> gpl
GPlane(000.00, +00.00)
>>> pt
Point(0.0000, 0.0000, 1.0000, nan)
'''
geol_plane = self.normVersor().gvect.normal_gplane
point = Point4D(*point_solution(np.array([[self.a, self.b, self.c]]),
np.array([-self.d])))
return geol_plane, point
"""
def intersVersor(self, another) -> Optional[Vect3D]:
"""
Return intersection versor for two intersecting planes.
Return None for not intersecting planes.
:param another: another Cartesian plane.
:type another: CPlane3D.
:return: the intersection line as a vector.
:rtype: Optional[Vect].
:raise: Exception.
Examples:
>>> a = CPlane3D(1, 0, 0, 0)
>>> b = CPlane3D(0, 0, 1, 0)
>>> a.intersVersor(b)
Vect3D(0.0000, -1.0000, 0.0000)
>>> b = CPlane3D(-1, 0, 0, 0) # parallel plane, no intersection
>>> a.intersVersor(b) is None
True
"""
check_type(another, "Input Cartesian plane", CPlane3D)
return self.normVersor().cross_product(another.normVersor()).versor()
def intersPoint(self,
another) -> Optional[Point3D]:
"""
Return point on intersection line (non-unique solution)
for two planes.
:param another: the second cartesian plane
:type another: CPlane3D
:return: the optional instersection point
:rtype: Optional[Point]
:raise: Exception
Examples:
>>> p_a = CPlane3D(1, 0, 0, 0)
>>> p_b = CPlane3D(0, 0, 1, 0)
>>> p_a.intersPoint(p_b)
Point3D(0.0000, 0.0000, 0.0000)
>>> p_b = CPlane3D(-1, 0, 0, 0) # parallel plane, no intersection
>>> p_a.intersPoint(p_b) is None
"""
check_type(another, "Second plane", CPlane3D)
# find a point lying on the intersection line (this is a non-unique solution)
a = np.array([[self.a(), self.b(), self.c()], [another.a(), another.b(), another.c()]])
b = np.array([-self.d(), -another.d()])
x, y, z = pointSolution(a, b)
if x is not None and y is not None and z is not None:
return Point3D(x, y, z)
else:
return None
def pointDistance(self,
pt: Point3D
) -> numbers.Real:
"""
Calculate the distance between a point and the cartesian plane.
Distance expression:
distance = a * x1 + b * y1 + c * z1 + d
where a, b, c and d are plane parameters of the plane equation:
a * x + b * y + c * z + d = 0
and x1, y1, and z1 are the point coordinates.
:param pt: the point to calculate distance with.
:type pt: Point.
:return: the distance value.
:rtype: numbers.Real.
:raise: Exception.
Examples:
>>> cpl = CPlane3D(0, 0, 1, 0)
>>> pt = Point3D(0, 0, 1)
>>> cpl.pointDistance(pt)
1.0
>>> pt = Point3D(0, 0, 0.5)
>>> cpl.pointDistance(pt)
0.5
>>> pt = Point3D(0, 0, -0.5)
>>> cpl.pointDistance(pt)
-0.5
>>> pt = Point3D(10, 20, 0.0)
>>> cpl.pointDistance(pt)
0.0
"""
check_type(pt, "Input point", Point3D)
return self.a() * pt.x + self.b() * pt.y + self.c() * pt.z + self.d()
def isPointInPlane(self,
pt: Union[Point3D, Point2D]
) -> bool:
"""
Check whether a point lies in the current plane.
:param pt: the point to check.
:return: whether the point lies in the current plane.
:raise: Exception.
Examples:
>>> pl = CPlane3D(0, 0, 1, 0)
>>> pt = Point3D(0, 1, 0)
>>> pl.isPointInPlane(pt)
True
>>> pl = CPlane3D(0, 0, 1, 0)
>>> pt = Point3D(0, 1, 0)
>>> pl.isPointInPlane(pt)
True
"""
check_type(pt, "Input point", (Point2D, Point3D))
if isinstance(pt, Point2D):
pt = Point3D(
pt.x,
pt.y,
0.0
)
if abs(self.pointDistance(pt)) < MIN_SEPARATION_THRESHOLD:
return True
else:
return False
def angle_as_degrees(self,
another: 'CPlane3D'
) -> numbers.Real:
"""
Calculate angle (in degrees) between two planes.
:param another: the CPlane3D instance to calculate angle with.
:type another: CPlane3D.
:return: the angle (in degrees) between the two planes.
:rtype: numbers.Real.
:raise: Exception.
Examples:
>>> CPlane3D(1,0,0,0).angle_as_degrees(CPlane3D(0,1,0,0))
90.0
>>> CPlane3D(1,0,0,0).angle_as_degrees(CPlane3D(0,1,0,0))
90.0
>>> CPlane3D(1,0,0,0).angle_as_degrees(CPlane3D(1,0,1,0))
45.0
>>> CPlane3D(1,0,0,0).angle_as_degrees(CPlane3D(1,0,0,0))
0.0
"""
check_type(another, "Second Cartesian plane", CPlane3D)
angle_degr = self.normVersor().angle_as_degrees(another.normVersor())
if angle_degr > 90.0:
angle_degr = 180.0 - angle_degr
return angle_degr
class ParamLine3D(object):
"""
parametric line
srcPt: source Point
l, m, n: line coefficients
"""
def __init__(self, srcPt, l, m, n):
for v in (l, m, n):
if not (-1.0 <= v <= 1.0):
raise Exception("Parametric line values must be in -1 to 1 range")
self._srcPt = srcPt.clone()
self._l = l
self._m = m
self._n = n
'''
def epsg(self) -> numbers.Integral:
"""
Return the EPSG code of the parametric line.
"""
return self._srcPt.epsg_code
'''
def intersect_cartes_plane(self, cartes_plane) -> Optional[Point3D]:
"""
Return intersection point between parametric line and Cartesian plane.
:param cartes_plane: a Cartesian plane:
:type cartes_plane: CPlane3D.
:return: the intersection point between parametric line and Cartesian plane.
:rtype: Point.
:raise: Exception.
"""
if not isinstance(cartes_plane, CPlane3D):
raise Exception("Method argument should be a Cartesian plane but is {}".format(type(cartes_plane)))
'''
if cartes_plane.epsg_code != self.epsg_code:
raise Exception("Parametric line has EPSG {} while Cartesian plane has {}".format(self.epsg_code, cartes_plane.epsg_code))
'''
# line parameters
x1, y1, z1 = self._srcPt.x, self._srcPt.y, self._srcPt.z
l, m, n = self._l, self._m, self._n
# Cartesian plane parameters
a, b, c, d = cartes_plane.a(), cartes_plane.b(), cartes_plane.c(), cartes_plane.d()
try:
k = (a * x1 + b * y1 + c * z1 + d) / (a * l + b * m + c * n)
except ZeroDivisionError:
return None
return Point3D(
x=x1 - l * k,
y=y1 - m * k,
z=z1 - n * k
)
def closure_plane_from_geo(
plane: Plane,
src_pt: Point3D
) -> Callable:
"""
Closure that embodies the analytical formula for a given, non-vertical plane.
This closure is used to calculate the z value from given horizontal coordinates (x, y).
:param plane: the geological plane
:param src_pt: the 3D point expressing a location point contained by the plane.
:return: lambda (closure) expressing an analytical formula for deriving z given x and y values.
"""
x0 = src_pt.x
y0 = src_pt.y
z0 = src_pt.z
# slope of the line parallel to the x axis and contained by the plane
a = plane.slope_x_dir()
# slope of the line parallel to the y axis and contained by the plane
b = plane.slope_y_dir()
return lambda x, y: a * (x - x0) + b * (y - y0) + z0
class Points3D:
"""
Collection of points.
"""
def __init__(self,
x_array: array,
y_array: array,
z_array: array
):
"""
Construct a point list from a set of array values.
:param x_array: the array storing the x values
:param y_array: the array storing the y values
:param z_array: the optional array storing the z values
"""
check_type(
var=x_array,
name="X array",
expected_types=array
)
check_type(
var=y_array,
name="Y array",
expected_types=array
)
array_length = len(x_array)
if len(y_array) != array_length:
raise Exception(f"Y array has length {len(y_array)} while X array has length {len(x_array)}")
check_type(
var=z_array,
name="Z array",
expected_types=array
)
if len(z_array) != array_length:
raise Exception(f"Z array has length {len(z_array)} while X array has length {len(x_array)}")
self._x_array = x_array
self._y_array = y_array
self._z_array = z_array
def num_pts(self
) -> int:
"""
Numbers of points.
"""
return len(self._x_array)
@classmethod
def fromPoints(cls,
points: List[Point3D]
):
"""
:param points: list of points
"""
for ndx, point in enumerate(points):
check_type(point, "Input point {}".format(ndx), Point3D)
return Points3D(
x_array=array('d', [p.x for p in points]),
y_array=array('d', [p.y for p in points]),
z_array=array('d', [p.z for p in points])
)
@property
def xs(self
) -> array:
"""
Returns a copy of the points x values.
:return: points x values
"""
return copy(self._x_array)
@property
def ys(self
) -> array:
"""
Returns a copy of the points y values.
:return: points y values
"""
return copy(self._y_array)
@property
def zs(self
) -> array:
"""
Returns a copy of the points z values.
:return: points z values
"""
return copy(self._z_array)
def pt(self, pt_ndx: numbers.Integral) -> Point3D:
"""
Extract the point at index pt_ndx.
:param pt_ndx: point index.
:type pt_ndx: numbers.Integral.
:return: the extracted Point instance.
:rtype: Point.
Examples:
"""
return Point3D(
x=self._x_array[pt_ndx],
y=self._y_array[pt_ndx],
z=self._z_array[pt_ndx]
)
def values_at(self,
ndx: numbers.Integral
) -> Tuple[float, float, float]:
"""
Return the values at given index.
:param ndx: the index of the point values to extract
:type ndx: numbers.Integral
:return: the x, y and z values
"""
return (
self._x_array[ndx],
self._y_array[ndx],
self._z_array[ndx]
)
def pts(self):
return [Point3D(*self.values_at(ndx)) for ndx in range(self.num_pts())]
def __repr__(self) -> str:
"""
Represents a Points instance as a shortened text.
:return: a textual shortened representation of a Points instance.
"""
num_points = self.num_pts()
if num_points == 0:
txt = "Empty Points3D"
else:
x1, y1, z1 = self.values_at(0)
if num_points == 1:
txt = "Points3D with unique point: {.4f}.{.4f},{.4f}".format(x1, y1, z1)
else:
x2, y2, z2 = self.values_at(self.num_pts()-1)
txt = "Points3D with {} points: ({:.4f}, {:.4f}, {:.4f}) ... ({:.4f}, {:.4f}, {:.4f})".format(
num_points, x1, y1, z1, x2, y2, z2)
return txt
def __iter__(self):
"""
Return each point.
"""
return (self.pt(ndx) for ndx in range(self.num_pts()))
def asXyzArray(self):
"""
Convert to a Numpy x-y-z array
"""
return np.vstack(
(
self.xs,
self.ys,
self.zs
)
).transpose()
def add_pt(self, pt) -> None:
"""
In-place transformation of the original Points3D instance
by adding a new point at the end.
:param pt: the point to add
:return: nothing
"""
self._x_array.append(pt.x)
self._y_array.append(pt.y)
self._z_array.append(pt.z)
def add_pts(self,
pts: 'Points3D'
):
"""
In-place transformation of the original Points instance
by adding a new set of points at the end.
:param pts: list of Points.
"""
check_type(pts, "Points", Points3D)
self._x_array.extend(pts.xs)
self._y_array.extend(pts.ys)
self._z_array.extend(pts.zs)
def x_min(self) -> Optional[numbers.Real]:
"""
Optional minimum of x values.
:return: the optional minimum of x values.
:rtype: Optional[numbers.Real]
"""
return np.nanmin(self._x_array) if self.num_pts() > 0 else None
def x_max(self) -> Optional[numbers.Real]:
"""
Optional maximum x value.
"""
return np.nanmax(self._x_array) if self.num_pts() > 0 else None
def x_mean(self) -> Optional[numbers.Real]:
"""
Optional mean x value.
"""
return np.nanmean(self._x_array) if self.num_pts() > 0 else None
def y_min(self) -> Optional[numbers.Real]:
"""
Optional minimum y value.
"""
return np.nanmin(self._y_array) if self.num_pts() > 0 else None
def y_max(self) -> Optional[numbers.Real]:
"""
Optional maximum y value.
"""
return np.nanmax(self._y_array) if self.num_pts() > 0 else None
def y_mean(self) -> Optional[numbers.Real]:
"""
Optional mean y value.
"""
return np.nanmean(self._y_array) if self.num_pts() > 0 else None
def z_min(self) -> Optional[numbers.Real]:
"""
Optional minimum z value.
"""
return np.nanmin(self._z_array) if self.num_pts() > 0 else None
def z_max(self) -> Optional[numbers.Real]:
"""
Optional maximum z value.
"""
return np.nanmax(self._z_array) if self.num_pts() > 0 else None
def z_mean(self) -> Optional[numbers.Real]:
"""
Optional mean z value.
"""
return np.nanmean(self._z_array) if self.num_pts() > 0 else None
def z_var(self) -> Optional[numbers.Real]:
"""
Optional variance of z values.
:return: the optional variance of z values.
:rtype: Optional[numbers.Real]
Examples:
>>> l = Points3D.fromPoints([Point3D(0, 0, 2), Point3D(1, 0, 2), Point3D(0, 1, 2)])
>>> l.z_var()
0.0
"""
return np.nanvar(self._z_array) if self.num_pts() > 0 else None
def z_std(self) -> Optional[numbers.Real]:
"""
Optional standard deviation of z values.
:return: the optional standard deviation of z values.
:rtype: Optional[numbers.Real]
Examples:
>>> l = Points3D.fromPoints([Point3D(0, 0, 2), Point3D(1, 0, 2), Point3D(0, 1, 2)])
>>> l.z_std()
0.0
"""
return np.nanstd(self._z_array) if self.num_pts() > 0 else None
def nanmean_point(self) -> Point3D:
"""
Returns the nan- excluded mean point of the collection.
It is the mean point for a collection of point in a x-y-z frame (i.e., not lat-lon).
:return: the nan- excluded mean point of the collection.
"""
return Point3D(
x=np.nanmean(self._x_array),
y=np.nanmean(self._y_array),
z=np.nanmean(self._z_array)
)
def segment(self,
ndx: int
) -> Optional[Segment3D]:
"""
Returns the optional segment starting at index ndx.
:param ndx: the segment index.
:return: the optional segment
"""
if ndx < 0 or ndx >= self.num_pts() - 1:
return None
return Segment3D(
start_pt=self.pt(ndx),
end_pt=self.pt(ndx + 1)
)
def reversed(self) -> 'Points3D':
"""
Return a Points3D instance with reversed point list.
:return: a new Points3D instance.
"""
xs = self._x_array.reversed()
ys = self._y_array.reversed()
zs = self._z_array.reversed()
return Points3D(
x_array=xs,
y_array=ys,
z_array=zs
)
|
mauroalberti/gsf
|
pygsf/geometries/shapes/space3d.py
|
Python
|
gpl-3.0
| 95,789
|
#! /usr/bin/ python
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Deepin, Inc.
# 2013 Hailong Qiu
#
# Author: Hailong Qiu <356752238@qq.com>
# Maintainer: Hailong Qiu <356752238@qq.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gtk
from gtk import gdk
import gobject
from draw import draw_text
from color import alpha_color_hex_to_cairo
class NoteBook(gtk.Container):
def __init__(self):
gtk.Container.__init__(self)
self.__init_values()
def __init_values(self):
self.add_events(gtk.gdk.ALL_EVENTS_MASK)
self.title_child1 = gtk.Button("本地列表")
self.title_child1.set_parent(self)
self.title_child2 = gtk.Button("网络列表")
self.title_child2.set_parent(self)
self.title_w = 120
self.title_h = 30
self.save_title_h = self.title_h
#
self.title_child1.connect("clicked", self.__title_child1_clicked)
self.title_child2.connect("clicked", self.__title_child2_clicked)
self.title_child1.connect("expose-event", self.__title_child1_expose_event)
self.title_child2.connect("expose-event", self.__title_child2_expose_event)
#
self.layout_show_check = True
self.layout1 = None
self.layout2 = None
self.children = []
# 添加子控件.
self.children.append(self.title_child1)
self.children.append(self.title_child2)
def __title_child1_clicked(self, widget):
if self.layout2 and self.layout1:
self.layout_show_check = True
def __title_child2_clicked(self, widget):
if self.layout1 and self.layout2:
self.layout_show_check = False
def __title_child1_expose_event(self, widget, event):
self.__title_expose_event(widget, event, self.layout_show_check)
return True
def __title_child2_expose_event(self, widget, event):
self.__title_expose_event(widget, event, not self.layout_show_check)
return True
def __title_expose_event(self, widget, event, show_check):
cr = widget.window.cairo_create()
rect = widget.allocation
# draw background.
if show_check:
bg_color = "#272727"
else:
bg_color = "#1b1b1b"
cr.set_source_rgba(*alpha_color_hex_to_cairo((bg_color,1.0)))
cr.rectangle(rect.x, rect.y, rect.width + 1, rect.height)
cr.fill()
# draw title name.
text = widget.get_label()
import pango
if show_check:
text_color = "#FFFFFF"
else:
text_color = "#A9A9A9"
draw_text(cr,
text,
rect.x, rect.y, rect.width, rect.height,
text_color=text_color,
text_size=9,
alignment=pango.ALIGN_CENTER)
def add_layout1(self, layout1):
self.layout1 = layout1
self.layout1.set_parent(self)
def add_layout2(self, layout2):
self.layout2 = layout2
self.layout2.set_parent(self)
def do_realize(self):
self.set_realized(True)
self.__init_window()
self.__init_children()
self.queue_resize()
def __init_window(self):
self.window = gdk.Window(
self.get_parent_window(),
window_type=gdk.WINDOW_CHILD,
x=self.allocation.x,
y=self.allocation.y,
width=self.allocation.width,
height=self.allocation.height,
colormap=self.get_colormap(),
wclass=gdk.INPUT_OUTPUT,
visual=self.get_visual(),
event_mask=(self.get_events()
| gtk.gdk.VISIBILITY_NOTIFY
| gdk.EXPOSURE_MASK
| gdk.SCROLL_MASK
| gdk.POINTER_MOTION_MASK
| gdk.ENTER_NOTIFY_MASK
| gdk.LEAVE_NOTIFY_MASK
| gdk.BUTTON_PRESS_MASK
| gdk.BUTTON_RELEASE_MASK
| gdk.KEY_PRESS_MASK
| gdk.KEY_RELEASE_MASK
))
self.window.set_user_data(self)
self.style.set_background(self.window, gtk.STATE_NORMAL)
def __init_children(self):
if self.title_child1:
self.title_child1.set_parent_window(self.window)
if self.title_child2:
self.title_child2.set_parent_window(self.window)
self.layout1.set_parent_window(self.window)
self.layout2.set_parent_window(self.window)
def do_unrealize(self):
pass
def do_map(self):
gtk.Container.do_map(self)
self.set_flags(gtk.MAPPED)
#
self.window.show()
def do_umap(self):
gtk.Container.do_umap(self)
self.window.hide()
def do_expose_event(self, e):
#
gtk.Container.do_expose_event(self, e)
return False
def do_size_request(self, req):
self.title_child1.size_request()
self.title_child2.size_request()
self.layout1.size_request()
self.layout2.size_request()
def do_size_allocate(self, allocation):
gtk.Container.do_size_allocate(self, allocation)
self.allocation = allocation
#
title_w_padding = self.allocation.width/len(self.children)
allocation = gdk.Rectangle()
allocation.x = 0
allocation.y = 0
allocation.width = title_w_padding
allocation.height = self.title_h
self.title_child1.size_allocate(allocation)
allocation.x = 0 + allocation.width
self.title_child2.size_allocate(allocation)
#
if self.layout_show_check:
layout2_x = -self.allocation.width
else:
layout2_x = 0
allocation.x = layout2_x
allocation.y = 0 + self.title_h #self.layout2.allocation.y
allocation.width = self.allocation.width
allocation.height = self.allocation.height - self.title_h
self.layout2.size_allocate(allocation)
if not self.layout_show_check:
layout1_x = - self.allocation.width
else:
layout1_x = 0
allocation.x = layout1_x
allocation.y = 0 + self.title_h #self.layout1.allocation.y
self.layout1.size_allocate(allocation)
#
if self.get_realized():
self.window.move_resize(
self.allocation.x,
self.allocation.y,
self.allocation.width,
self.allocation.height)
def do_show(self):
gtk.Container.do_show(self)
def do_forall(self, include_internals, callback, data):
callback(self.title_child1, data)
callback(self.title_child2, data)
callback(self.layout1, data)
callback(self.layout2, data)
def do_remove(self, widget):
pass
def hide_title(self):
self.save_title_h = self.title_h
self.title_h = 0
def show_title(self):
self.title_h = self.save_title_h
gobject.type_register(NoteBook)
if __name__ == "__main__":
from treeview_base import TreeViewBase
win = gtk.Window(gtk.WINDOW_TOPLEVEL)
scroll_win = gtk.ScrolledWindow()
treeview_base = TreeViewBase()
scroll_win.add_with_viewport(treeview_base)
note_book = NoteBook()
note_book.add_layout1(scroll_win)
note_book.add_layout2(gtk.Button("测试一下"))
win.add(note_book)
#
node1 = treeview_base.nodes.add("优酷视频")
dianshiju = node1.nodes.add("电视剧")
node1.nodes.add("电影")
node1.nodes.add("综艺")
node1.nodes.add("音乐")
node1.nodes.add("动漫")
# 电视剧?
xinshangying = dianshiju.nodes.add("新上映")
dianshiju.nodes.add("明星")
dianshiju.nodes.add("大陆剧")
dianshiju.nodes.add("韩剧")
dianshiju.nodes.add("TVB")
#
xinshangying.nodes.add("桐柏英雄")
xinshangying.nodes.add("血雨母子情")
win.show_all()
gtk.main()
|
hailongqiu/new-deepin-media-player
|
src/widget/notebook.py
|
Python
|
gpl-3.0
| 8,517
|
#
# This file is part of the LibreOffice project.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# This file incorporates work covered by the following license notice:
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed
# with this work for additional information regarding copyright
# ownership. The ASF licenses this file to you under the Apache
# License, Version 2.0 (the "License"); you may not use this file
# except in compliance with the License. You may obtain a copy of
# the License at http://www.apache.org/licenses/LICENSE-2.0 .
#
class AgendaWizardDialogResources(object):
RID_AGENDAWIZARDDIALOG_START = 5000
RID_AGENDAWIZARDROADMAP_START = 5049
RID_COMMON_START = 500
SECTION_ITEMS = "AGENDA_ITEMS"
SECTION_TOPICS = "AGENDA_TOPICS"
SECTION_MINUTES_ALL = "MINUTES_ALL"
SECTION_MINUTES = "MINUTES"
def __init__(self, oWizardResource):
self.resAgendaWizardDialog_title = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 1)
self.resoptMakeChanges_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 2)
self.reslblTemplateName_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 3)
self.reslblTemplatePath_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 4)
self.reslblProceed_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 5)
self.reslblTitle1_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 6)
self.reslblTitle3_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 7)
self.reslblTitle2_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 8)
self.reslblTitle4_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 9)
self.reslblTitle5_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 10)
self.reslblTitle6_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 11)
self.reschkMinutes_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 12)
self.reslblHelp1_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 13)
self.reslblTime_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 14)
self.reslblTitle_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 15)
self.reslblLocation_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 16)
self.reslblHelp2_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 17)
self.resbtnTemplatePath_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 18)
self.resoptCreateAgenda_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 19)
self.reslblHelp6_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 20)
self.reslblTopic_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 21)
self.reslblResponsible_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 22)
self.reslblDuration_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 23)
self.reschkConvenedBy_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 24)
self.reschkPresiding_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 25)
self.reschkNoteTaker_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 26)
self.reschkTimekeeper_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 27)
self.reschkAttendees_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 28)
self.reschkObservers_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 29)
self.reschkResourcePersons_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 30)
self.reslblHelp4_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 31)
self.reschkMeetingTitle_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 32)
self.reschkRead_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 33)
self.reschkBring_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 34)
self.reschkNotes_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 35)
self.reslblHelp3_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 36)
self.reslblDate_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 38)
self.reslblHelpPg6_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 39)
self.reslblPageDesign_value = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 40)
self.resDefaultFilename = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 41)
self.resDefaultFilename = self.resDefaultFilename[:-4] + ".ott"
self.resDefaultTitle = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 42)
self.resErrSaveTemplate = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 43)
self.resPlaceHolderTitle = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 44)
self.resPlaceHolderDate = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 45)
self.resPlaceHolderTime = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 46)
self.resPlaceHolderLocation = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 47)
self.resPlaceHolderHint = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 48)
self.resErrOpenTemplate = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 56)
self.itemMeetingType = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 57)
self.itemBring = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 58)
self.itemRead = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 59)
self.itemNote = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 60)
self.itemCalledBy = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 61)
self.itemFacilitator = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 62)
self.itemAttendees = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 63)
self.itemNotetaker = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 64)
self.itemTimekeeper = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 65)
self.itemObservers = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 66)
self.itemResource = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 67)
self.resButtonInsert = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 68)
self.resButtonRemove = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 69)
self.resButtonUp = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 70)
self.resButtonDown = oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 71)
#Create a dictionary for localised string in the template
self.dictConstants = {
"#datetitle#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 72),
"#timetitle#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 73),
"#locationtitle#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 74),
"#topics#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 75),
"#num.#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 76),
"#topicheader#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 77),
"#responsibleheader#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 78),
"#timeheader#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 79),
"#additional-information#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 80),
"#minutes-for#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 81),
"#discussion#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 82),
"#conclusion#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 83),
"#to-do#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 84),
"#responsible-party#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 85),
"#deadline#" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 86)}
#Create a dictionary for localising the page design
self.dictPageDesign = {
"Blue" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 87),
"Classic" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 88),
"Colorful" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 89),
"Elegant" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 90),
"Green" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 91),
"Grey" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 92),
"Modern" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 93),
"Orange" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 94),
"Red" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 95),
"Simple" : oWizardResource.getResText(
AgendaWizardDialogResources.RID_AGENDAWIZARDDIALOG_START + 96)}
#Common Resources
self.resOverwriteWarning = oWizardResource.getResText(
AgendaWizardDialogResources.RID_COMMON_START + 19)
self.resTemplateDescription = oWizardResource.getResText(
AgendaWizardDialogResources.RID_COMMON_START + 20)
self.RoadmapLabels = []
self.RoadmapLabels.append(oWizardResource.getResText(AgendaWizardDialogResources.RID_AGENDAWIZARDROADMAP_START + 1))
self.RoadmapLabels.append(oWizardResource.getResText(AgendaWizardDialogResources.RID_AGENDAWIZARDROADMAP_START + 2))
self.RoadmapLabels.append(oWizardResource.getResText(AgendaWizardDialogResources.RID_AGENDAWIZARDROADMAP_START + 3))
self.RoadmapLabels.append(oWizardResource.getResText(AgendaWizardDialogResources.RID_AGENDAWIZARDROADMAP_START + 4))
self.RoadmapLabels.append(oWizardResource.getResText(AgendaWizardDialogResources.RID_AGENDAWIZARDROADMAP_START + 5))
self.RoadmapLabels.append(oWizardResource.getResText(AgendaWizardDialogResources.RID_AGENDAWIZARDROADMAP_START + 6))
|
beppec56/core
|
wizards/com/sun/star/wizards/agenda/AgendaWizardDialogResources.py
|
Python
|
gpl-3.0
| 14,293
|
# Copyright 2007-2012 University Of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "Rajiv Mayani"
from time import localtime, strftime
from sqlalchemy.orm.exc import NoResultFound
from Pegasus.tools import utils
from Pegasus.plots_stats import utils as stats_utils
from Pegasus.db.workflow import stampede_statistics
from Pegasus.service.dashboard import queries
class NoWorkflowsFoundError(Exception):
def __init__(self, **args):
if 'count' in args:
self.count = args['count']
else:
self.count = 0
if 'filtered' in args:
self.filtered = args['filtered']
class Dashboard(object):
def __init__(self, main_db_url, root_wf_id=None, wf_id=None):
self._main_db_url = main_db_url
# If the ID is specified, it means that the query is specific to a workflow.
# So we will now query the main database to get the connection URL for the workflow.
if root_wf_id or wf_id:
self.initialize(root_wf_id, wf_id)
def initialize(self, root_wf_id, wf_id):
try:
workflow = queries.MainDatabase(self._main_db_url)
self._db_id, self._root_wf_uuid, self._wf_db_url = workflow.get_wf_id_url(root_wf_id)
self._wf_id = wf_id
finally:
Dashboard.close(workflow)
@staticmethod
def close(conn):
if conn:
conn.close()
def __get_wf_db_url(self):
if not self._wf_db_url:
raise ValueError, 'workflow database URL is not set'
return self._wf_db_url
def get_root_workflow_list(self, counts_only=False, **table_args):
"""
Get basic information about all workflows running, on all databases. This is for the index page.
Returns a list of workflows.
"""
self._workflows = []
# Now, let's try to access the database
try:
all_workflows = None
all_workflows = queries.MainDatabase(self._main_db_url)
counts = all_workflows.get_workflow_counts()
if counts_only:
if counts[0] == 0:
raise NoWorkflowsFoundError(count=None, filtered=None)
return counts
count, filtered, workflows = all_workflows.get_all_workflows(**table_args)
if workflows:
self._workflows.extend(workflows)
if len(self._workflows) == 0:
# Throw no workflows found error.
raise NoWorkflowsFoundError(count=count, filtered=filtered)
return(count, filtered, self._workflows, counts)
finally:
Dashboard.close(all_workflows)
def workflow_stats(self):
try:
workflow = stampede_statistics.StampedeStatistics(self.__get_wf_db_url(), False)
workflow.initialize(root_wf_id = self._wf_id)
individual_stats = self._workflow_stats(workflow)
workflow2 = stampede_statistics.StampedeStatistics(self.__get_wf_db_url())
workflow2.initialize(self._root_wf_uuid)
all_stats = self._workflow_stats(workflow2)
return { 'individual' : individual_stats, 'all' : all_stats }
finally:
Dashboard.close(workflow)
Dashboard.close(workflow2)
def _workflow_stats(self, workflow):
# tasks
tasks = {}
workflow.set_job_filter('nonsub')
tasks['total_tasks'] = int(workflow.get_total_tasks_status())
tasks['total_succeeded_tasks'] = int(workflow.get_total_succeeded_tasks_status(False))
tasks['total_failed_tasks'] = int(workflow.get_total_failed_tasks_status())
tasks['total_unsubmitted_tasks'] = tasks['total_tasks'] -(tasks['total_succeeded_tasks'] + tasks['total_failed_tasks'])
tasks['total_task_retries'] = int(workflow.get_total_tasks_retries())
tasks['total_task_invocations'] = tasks['total_succeeded_tasks'] + tasks['total_failed_tasks'] + tasks['total_task_retries']
# job status
jobs = {}
workflow.set_job_filter('nonsub')
jobs['total_jobs'] = int(workflow.get_total_jobs_status())
jobs['total_succeeded_jobs'] = int(workflow.get_total_succeeded_jobs_status())
jobs['total_failed_jobs'] = int(workflow.get_total_failed_jobs_status())
jobs['total_unsubmitted_jobs'] = jobs['total_jobs'] -(jobs['total_succeeded_jobs'] + jobs['total_failed_jobs'])
jobs['total_job_retries'] = int(workflow.get_total_jobs_retries())
jobs['total_job_invocations'] = jobs['total_succeeded_jobs'] + jobs['total_failed_jobs'] + jobs['total_job_retries']
# sub workflow
wfs = {}
workflow.set_job_filter('subwf')
wfs['total_sub_wfs'] = int(workflow.get_total_jobs_status())
wfs['total_succeeded_sub_wfs'] = int(workflow.get_total_succeeded_jobs_status())
wfs['total_failed_sub_wfs'] = int(workflow.get_total_failed_jobs_status())
wfs['total_unsubmitted_sub_wfs'] = wfs['total_sub_wfs'] -(wfs['total_succeeded_sub_wfs'] + wfs['total_failed_sub_wfs'])
wfs['total_sub_wfs_retries'] = int(workflow.get_total_jobs_retries())
wfs['total_sub_wfs_invocations'] = wfs['total_succeeded_sub_wfs'] + wfs['total_failed_sub_wfs'] + wfs['total_sub_wfs_retries']
return [tasks, jobs, wfs]
def job_breakdown_stats(self):
try:
workflow = stampede_statistics.StampedeStatistics(self.__get_wf_db_url(), True)
workflow.initialize(root_wf_id = self._wf_id)
content = []
for t in workflow.get_transformation_statistics():
content.append([t.transformation, int(t.count), int(t.success),
int(t.failure), float(t.min), float(t.max), float(t.avg), float(t.sum)])
return content
finally:
Dashboard.close(workflow)
def job_stats(self):
try:
workflow = stampede_statistics.StampedeStatistics(self.__get_wf_db_url(), False)
workflow.initialize(root_wf_id = self._wf_id)
workflow.set_job_filter('all')
job_retry_count_dict = {}
content = []
for job in workflow.get_job_statistics():
kickstart = '0' if job.kickstart == None else float(job.kickstart)
multiplier_factor = '0' if job.multiplier_factor == None else int(job.multiplier_factor)
kickstart_multi = '0' if job.kickstart_multi == None else float(job.kickstart_multi)
remote_cpu_time = '0' if job.remote_cpu_time == None else float(job.remote_cpu_time)
post_time = '0' if job.post_time == None else float(job.post_time)
condor_q_time = '0' if job.condor_q_time == None else float(job.condor_q_time)
resource_delay = '0' if job.resource_delay == None else float(job.resource_delay)
runtime = '0' if job.runtime == None else float(job.runtime)
seqexec = '-' if job.seqexec == None else float(job.seqexec)
seqexec_delay = '-'
if job.seqexec is not None and job.kickstart is not None:
seqexec_delay =(float(job.seqexec) - float(job.kickstart))
if job_retry_count_dict.has_key(job.job_name):
job_retry_count_dict[job.job_name] += 1
else:
job_retry_count_dict[job.job_name] = 1
retry_count = job_retry_count_dict[job.job_name]
content.append([job.job_name, retry_count, job.site, kickstart, multiplier_factor, kickstart_multi,
remote_cpu_time, post_time, condor_q_time,
resource_delay, runtime, seqexec, seqexec_delay,
utils.raw_to_regular(job.exit_code), job.host_name])
return content
finally:
Dashboard.close(workflow)
def plots_gantt_chart(self):
try:
#Expand has to be set to false. The method does not provide information when expand set to True.
workflow = stampede_statistics.StampedeStatistics(self.__get_wf_db_url(), False)
workflow.initialize(self._root_wf_uuid)
gantt_chart = workflow.get_job_states()
return gantt_chart
finally:
Dashboard.close(workflow)
def plots_time_chart(self, wf_id, time_filter='hour'):
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id)
details = workflow.get_workflow_information()
workflow_plots = stampede_statistics.StampedeStatistics(self.__get_wf_db_url())
workflow_plots.initialize(details.wf_uuid)
workflow_plots.set_job_filter('nonsub')
workflow_plots.set_time_filter(time_filter)
workflow_plots.set_transformation_filter(exclude=['condor::dagman'])
job, invocation = workflow_plots.get_jobs_run_by_time(), workflow_plots.get_invocation_by_time()
for j in job:
j.date_format *= 3600
for i in invocation:
i.date_format *= 3600
return job, invocation
finally:
Dashboard.close(workflow)
Dashboard.close(workflow_plots)
def plots_transformation_statistics(self, wf_id):
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id)
details = workflow.get_workflow_information()
workflow_plots = stampede_statistics.StampedeStatistics(self.__get_wf_db_url())
workflow_plots.initialize(details.wf_uuid)
workflow_plots.set_job_filter('nonsub')
workflow_plots.set_time_filter('hour')
workflow_plots.set_transformation_filter(exclude=['condor::dagman'])
dist = workflow_plots.get_transformation_statistics()
return dist
finally:
Dashboard.close(workflow)
Dashboard.close(workflow_plots)
def get_workflow_information(self, wf_id=None, wf_uuid=None):
"""
Get workflow specific information. This is when user click on a workflow link.
Returns a workflow object.
"""
try:
if not wf_id and not wf_uuid:
raise ValueError, 'Workflow ID or Workflow UUID is required'
workflow = None
workflow_statistics = None
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id=wf_id, wf_uuid=wf_uuid)
details = self._get_workflow_details(workflow)
job_counts = self._get_workflow_job_counts(workflow)
#workflow_statistics = stampede_statistics.StampedeStatistics(self.__get_wf_db_url(), expand_workflow=(details.root_wf_id == details.wf_id))
workflow_statistics = stampede_statistics.StampedeStatistics(self.__get_wf_db_url(), expand_workflow=True)
workflow_statistics.initialize(details.wf_uuid)
statistics = {}
statistics.update(self._get_workflow_summary_times(workflow_statistics))
#if details.root_wf_id == details.wf_id:
statistics.update(self._get_workflow_summary_counts(workflow_statistics))
return job_counts, details, statistics
finally:
Dashboard.close(workflow)
Dashboard.close(workflow_statistics)
def get_workflow_details(self, wf_id=None, wf_uuid=None):
"""
Get workflow specific information. This is when user click on a workflow link.
Returns a workflow object.
"""
try:
if not wf_id and not wf_uuid:
raise ValueError, 'Workflow ID or Workflow UUID is required'
workflow = None
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id=wf_id, wf_uuid=wf_uuid)
details = self._get_workflow_details(workflow)
return details
finally:
Dashboard.close(workflow)
def workflow_summary_stats(self, wf_id=None, wf_uuid=None):
try:
workflow = stampede_statistics.StampedeStatistics(self.__get_wf_db_url(), expand_workflow=False)
workflow.initialize(root_wf_id = self._wf_id)
dictionary = self._get_workflow_summary_times(workflow)
dictionary['retry-count'] = self._get_workflow_retries(workflow)
return dictionary
finally:
Dashboard.close(workflow)
def _get_workflow_details(self, workflow):
return workflow.get_workflow_information()
def _get_workflow_job_counts(self, workflow):
return workflow.get_workflow_job_counts()
def _get_workflow_summary_times(self, workflow):
statistics = {}
workflow_states_list = workflow.get_workflow_states()
wall_time = stats_utils.get_workflow_wall_time(workflow_states_list)
if wall_time != None:
wall_time = float(wall_time)
cum_time = workflow.get_workflow_cum_job_wall_time()
cum_time = [float(v) if v is not None else v for v in cum_time ]
#if cum_time != None:
# cum_time = float(cum_time)
job_cum_time = workflow.get_submit_side_job_wall_time()
job_cum_time = [float(v) if v is not None else v for v in job_cum_time ]
#if job_cum_time != None:
# job_cum_time = float(job_cum_time)
statistics['wall-time'] = wall_time
statistics['cum-time'] = cum_time[0]
statistics['cum-badput-time'] = cum_time[2]
statistics['job-cum-time'] = job_cum_time[0]
statistics['job-cum-badput-time'] = job_cum_time[2]
return statistics
def _get_workflow_retries(self, workflow):
workflow.set_job_filter('all')
return int(workflow.get_workflow_retries())
def _get_workflow_summary_counts(self, workflow):
statistics = {}
workflow.set_job_filter('nonsub')
statistics['total-jobs'] = workflow.get_total_jobs_status()
statistics['successful-jobs'] = workflow.get_total_succeeded_jobs_status()
statistics['failed-jobs'] = workflow.get_total_failed_jobs_status()
statistics['unsubmitted-jobs'] = statistics['total-jobs'] -(statistics['successful-jobs'] + statistics['failed-jobs'])
statistics['job-retries'] = workflow.get_total_jobs_retries()
statistics['job-instance-retries'] = statistics['successful-jobs'] + statistics['failed-jobs'] + statistics['job-retries']
return statistics
def get_job_information(self, wf_id, job_id, job_instance_id):
"""
Get job specific information. This is when user click on a job link, on the workflow details page.
Returns a Job object.
"""
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id)
job_details = workflow.get_job_information(job_id, job_instance_id)
return job_details
except NoResultFound:
return None
finally:
Dashboard.close(workflow)
def get_job_instances(self, wf_id, job_id):
"""
Get a list of all job instances for a given job
"""
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id)
job_instances = workflow.get_job_instances(job_id)
return job_instances
except NoResultFound:
return None
finally:
Dashboard.close(workflow)
def get_job_states(self, wf_id, job_id, job_instance_id):
"""
Get information about the job states that a job has gone through.
"""
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id)
job_states = workflow.get_job_states(job_id, job_instance_id)
return job_states
finally:
Dashboard.close(workflow)
def get_failed_jobs(self, wf_id, **table_args):
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id=wf_id)
total_count, filtered_count, failed_jobs = workflow.get_failed_jobs(**table_args)
return total_count, filtered_count, failed_jobs
finally:
Dashboard.close(workflow)
def get_successful_jobs(self, wf_id, **table_args):
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id=wf_id)
total_count, filtered_count, successful_jobs = workflow.get_successful_jobs(**table_args)
return total_count, filtered_count, successful_jobs
finally:
Dashboard.close(workflow)
def get_running_jobs(self, wf_id, **table_args):
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id=wf_id)
total_count, filtered_count, running_jobs = workflow.get_other_jobs(**table_args)
return total_count, filtered_count, running_jobs
finally:
Dashboard.close(workflow)
def get_failing_jobs(self, wf_id, **table_args):
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id=wf_id)
total_count, filtered_count, failed_jobs = workflow.get_failing_jobs(**table_args)
return total_count, filtered_count, failed_jobs
finally:
Dashboard.close(workflow)
def get_sub_workflows(self, wf_id):
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id=wf_id)
sub_workflows = workflow.get_sub_workflows()
return sub_workflows
finally:
Dashboard.close(workflow)
def get_stdout(self, wf_id, job_id, job_instance_id):
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id)
stdout = workflow.get_stdout(job_id, job_instance_id)
return stdout
finally:
Dashboard.close(workflow)
def get_successful_job_invocation(self, wf_id, job_id, job_instance_id):
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id)
successful_invocations = workflow.get_successful_job_invocations(job_id, job_instance_id)
return successful_invocations
finally:
Dashboard.close(workflow)
def get_failed_job_invocation(self, wf_id, job_id, job_instance_id):
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id)
failed_invocations = workflow.get_failed_job_invocations(job_id, job_instance_id)
return failed_invocations
finally:
Dashboard.close(workflow)
def get_stderr(self, wf_id, job_id, job_instance_id):
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id)
stderr = workflow.get_stderr(job_id, job_instance_id)
return stderr
finally:
Dashboard.close(workflow)
def get_invocation_information(self, wf_id, job_id, job_instance_id, invocation_id):
try:
workflow = queries.WorkflowInfo(self.__get_wf_db_url(), wf_id)
invocation = workflow.get_invocation_information(job_id, job_instance_id, invocation_id)
invocation.start_time = strftime("%a, %d %b %Y %H:%M:%S", localtime(invocation.start_time))
return invocation
finally:
Dashboard.close(workflow)
|
elainenaomi/sciwonc-dataflow-examples
|
sbbd2016/experiments/1-postgres/3_workflow_full_10files_primary_nosh_nors_annot_with_proj_3s/pegasus.bDkvI/pegasus-4.6.0/lib/python2.7/dist-packages/Pegasus/service/dashboard/dashboard.py
|
Python
|
gpl-3.0
| 20,107
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
#------------------------------------------------------------
import selenium
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
import time
import urlparse,urllib2,urllib,re,xbmcplugin,xbmcgui,xbmcaddon,xbmc
import os, sys
from core import logger
from core import config
from core import scrapertools
from core.item import Item
import cookielib
import requests
import os.path
__channel__ = "itastreaming"
__category__ = "F"
__type__ = "generic"
__title__ = "itastreaming"
__language__ = "IT"
COOKIEFILE = "/Users/arturo/itacookie.lwp"
h = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Gecko/20100101 Firefox/37.0'}
baseUrl = "http://itastreaming.co"
def createCookies():
if not os.path.isfile(COOKIEFILE):
print "File not exists"
#get cookies!
dcap = dict(DesiredCapabilities.PHANTOMJS)
dcap["phantomjs.page.settings.userAgent"] = ("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Gecko/20100101 Firefox/37.0")
browser = webdriver.PhantomJS(executable_path='/bin/phantomjs',desired_capabilities = dcap, service_log_path=os.path.devnull)
browser.get(baseUrl)
time.sleep(10)
a = browser.get_cookies()
print 'Got cloudflare cookies:\n'
browser.close()
b = cookielib.MozillaCookieJar()
for i in a:
# create the cf_session_cookie
ck = cookielib.Cookie(name=i['name'], value=i['value'], domain=i['domain'], path=i['path'], secure=i['secure'], rest=False, version=0,port=None,port_specified=False,domain_specified=False,domain_initial_dot=False,path_specified=True,expires=i['expiry'],discard=True,comment=None,comment_url=None,rfc2109=False)
b.set_cookie(ck)
# save into a file
print b
b.save(filename=COOKIEFILE, ignore_discard=True, ignore_expires=False)
else:
print "found it, do nothing!"
b = True
return b
def isGeneric():
return True
def mainlist(item):
logger.info("pelisalacarta.itastreaming mainlist")
itemlist = []
itemlist.append( Item(channel=__channel__ , action="movies", title="ultimi film inseriti..." , url="http://itastreaming.co" ))
itemlist.append( Item(channel=__channel__ , action="search", title="Cerca Film"))
itemlist.append( Item(channel=__channel__ , action="movies", title="animazione" , url="http://itastreaming.co/genere/animazione" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="avventura" , url="http://itastreaming.co/genere/avventura" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="azione" , url="http://itastreaming.co/genere/azione" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="biografico" , url="http://itastreaming.co/genere/biografico" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="comico" , url="http://itastreaming.co/genere/comico" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="commedia" , url="http://itastreaming.co/genere/commedia" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="documentario" , url="http://itastreaming.co/genere/documentario" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="drammatico" , url="http://itastreaming.co/genere/drammatico" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="erotico" , url="http://itastreaming.co/genere/erotico" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="fantascienza" , url="http://itastreaming.co/genere/fantascienza" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="fantasy" , url="http://itastreaming.co/genere/fantasy" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="gangstar" , url="http://itastreaming.co/genere/gangstar" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="giallo" , url="http://itastreaming.co/genere/giallo" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="guerra" , url="http://itastreaming.co/genere/guerra" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="horror" , url="http://itastreaming.co/genere/horror" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="musical" , url="http://itastreaming.co/genere/musical" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="romantico" , url="http://itastreaming.co/genere/romantico" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="storico" , url="http://itastreaming.co/genere/storico" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="thriller" , url="http://itastreaming.co/genere/thriller" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="western" , url="http://itastreaming.co/genere/western" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="HD" , url="http://itastreaming.co/qualita/hd" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="DVD-RIP" , url="http://itastreaming.co/qualita/dvdripac3" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="CAM" , url="http://itastreaming.co/qualita/cam" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="HD-MD" , url="http://itastreaming.co/qualita/hd-md" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="HD-TS" , url="http://itastreaming.co/qualita/hd-ts" ))
return itemlist
#searching for films
def search(item, text):
createCookies()
itemlist = []
text = text.replace(" ", "%20")
item.url = "http://itastreaming.co/?s=" + text
try:
biscotto = cookielib.MozillaCookieJar()
biscotto.load(COOKIEFILE)
data = requests.get(item.url, cookies=biscotto, headers=h)
data = data.text.encode('utf-8')
data = data.replace('–','-').replace('’',' ')
pattern = '<img class="imx" style="margin-top:0px;" src="?([^>"]+)"?.*?alt="?([^>"]+)"?.*?'
pattern += '<h3><a href="?([^>"]+)"?.*?</h3>'
matches = re.compile(pattern,re.DOTALL).findall(data)
for scrapedthumbnail, scrapedtitle, scrapedurl in matches:
title = scrapedtitle.strip()
url = urlparse.urljoin(item.url, scrapedurl)
#thumbnail = urlparse.urljoin(item.url, scrapedthumbnail)
thumbnail = scrapthumb(title)
itemlist.append(Item(channel=__channel__, action="grabing", title=title, url=url, thumbnail=thumbnail, folder=True))
return itemlist
except:
import sys
for line in sys.exc_info():
logger.error("%s" % line)
return []
#azione "movies" server per estrerre i titoli
def movies(item):
createCookies()
itemlist = []
biscotto = cookielib.MozillaCookieJar()
biscotto.load(COOKIEFILE)
data = requests.get(item.url, cookies=biscotto, headers=h)
data = data.text.encode('utf-8')
data = data.replace('–','-').replace('’',' ')
patron = '<div class="item">\s*'
patron += '<a href="?([^>"]+)"?.*?title="?([^>"]+)"?.*?'
patron += '<div class="img">\s*'
patron += '<img.*?src="([^>"]+)'
matches = re.compile(patron,re.DOTALL).findall(data)
if not matches:
print "Coockies expired!, delete it"
os.remove(COOKIEFILE)
for scrapedurl,scrapedtitle,scrapedthumbnail in matches:
title = scrapedtitle.strip()
url = urlparse.urljoin(item.url,scrapedurl)
thumbnail = scrapthumb(title)
scrapedplot = ""
itemlist.append( Item(channel=__channel__, action="grabing", title=title , url=url , thumbnail=thumbnail , plot=scrapedplot , folder=True) )
#next page
patternpage = '<a rel="nofollow" class="previouspostslink\'" href="(.*?)">Seguente \›</a>'
matches = re.compile(patternpage,re.DOTALL).findall(data)
#print matches
if not matches:
patternpage = "<span class='current'.*?</span>"
patternpage += "<a rel='nofollow' class='page larger' href='([^']+)'>.*?</a>"
matches = re.compile(patternpage,re.DOTALL).findall(data)
#print matches
if len(matches)>0:
scrapedurl = urlparse.urljoin(item.url,matches[0])
itemlist.append( Item(channel=__channel__, action="movies", title="Next Page >>" , url=scrapedurl , folder=True) )
return itemlist
def grabing(item):
itemlist = []
biscotto = cookielib.MozillaCookieJar()
biscotto.load(COOKIEFILE)
data = requests.get(item.url, cookies=biscotto, headers=h)
data = data.text.encode('utf-8')
#esegue questa funziona solo se si clicca sul titolo del film
if item.title:
filmtitle = str(item.title)
filmtitle = filmtitle.replace('–','')
dcap = dict(DesiredCapabilities.PHANTOMJS)
dcap["phantomjs.page.settings.userAgent"] = (
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Gecko/20100101 Firefox/37.0")
browser = webdriver.PhantomJS(executable_path='/bin/phantomjs',desired_capabilities = dcap, service_log_path=os.path.devnull)
browser.get(item.url)
time.sleep(7)
try:
nData = browser.execute_script("return nData")
print nData
for block in nData:
itemlist.append( Item(channel=__channel__, action="playit", title=filmtitle + " quality: " + block['width'] + " x " + block['height'] , url=block['url'] ))
browser.close()
except:
fakeurl = re.findall('"((http)s?://.*?hdpass.link.*?)"', data)
print fakeurl
url = fakeurl[0][0]
browser.get(url)
time.sleep(7)
nData = browser.execute_script("return nData")
print nData
print filmtitle
for block in nData:
print block['url']
itemlist.append( Item(channel=__channel__, action="playit", title=filmtitle + " quality: " + block['width'] + " x " + block['height'] , url=block['url'] ))
browser.close()
return itemlist
def playit(item):
itemlist = []
print item.url
itemlist.append( Item(channel=__channel__, action="playit", title=item.title , url=item.url ))
if not xbmc.Player().isPlayingVideo():
xbmc.Player(xbmc.PLAYER_CORE_DVDPLAYER).play(item.url)
return itemlist
def scrapthumb(title):
title = title.strip().replace('–','').replace('’','-').replace('à','a')
title = title.replace(' ','-')
title = title[:-7]
#print title
mdburl = 'https://www.themoviedb.org/search/movie?query=' + title
req = urllib2.Request(mdburl)
response = urllib2.urlopen(req)
data = response.read()
pattern = '<div class="poster">\s*'
pattern += '<a.*?src="(.*?)"'
matches = re.compile(pattern,re.DOTALL).findall(data)
thumbnail = ""
if matches:
thumbnail = matches[0]
thumbnail = thumbnail.replace('w92','original')
else:
print "thumb not found for: " + mdburl
return thumbnail
|
Reat0ide/plugin.video.pelisalacarta
|
pelisalacarta/channels/itastreaming.py
|
Python
|
gpl-3.0
| 11,292
|
#!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
# (c) Con Radchenko mailto:lankier@gmail.com
#
# $Id: fb2desc.py,v 1.10 2008/09/15 04:18:45 con Exp con $
#
import sys, os
import locale
import getopt
import codecs
import zipfile
from cStringIO import StringIO
import xml.sax
import shutil
import traceback
def get_filename(authors_list, sequence_name, sequence_number, title):
'''Форматы:
1 - "полные имена авторов, разделенные запятой - название (серия #номер)"
2 - тоже, но преобразованное в транслит и с заменой пробелов
3 - "фамилии авторов, разделенные запятой - название"
4 - тоже, но преобразованное в транслит и с заменой пробелов
5 - "первая буква автора в нижнем регистре/авторы, разделенные запятой, в нижнем регистре/авторы, разделенные запятой - название (серия #номер)"
6 - тоже, но преобразованное в транслит и с заменой пробелов
'''
format = options['fn-format']
out = []
authors = []
full_authors = []
for a in authors_list:
if a[0]:
authors.append(a[0])
fa = ' '.join(i for i in a if i)
if fa:
full_authors.append(fa)
authors = ', '.join(authors)
if not authors:
authors = 'unknown'
full_authors = ', '.join(full_authors)
if not full_authors:
full_authors = 'unknown'
if not title:
title = 'unknown'
seq = ''
if sequence_name:
if sequence_number:
seq = '(%s #%s)' % (sequence_name, sequence_number)
else:
seq = '(%s)' % sequence_name
if format == 3:
out.append(authors)
out.append('-')
out.append(title)
out = ' '.join(out)
else:
out.append(full_authors)
out.append('-')
out.append(title)
if seq:
out.append(seq)
out = ' '.join(out)
if format in (2, 4, 6):
out = translit(out)
full_authors = translit(full_authors)
#out = out.replace('/', '%').replace('\0', '').replace('?', '')
for c in '|\\?*<":>+[]/': # invalid chars in VFAT
out = out.replace(c, '')
if format in (4, 5):
full_authors = full_authors.replace(c, '')
fn_max = 240
if format in (5, 6):
fl = full_authors[0]
if not fl.isalpha():
fl = full_authors[1] # FIXME
out = os.path.join(
fl.lower().encode(options['charset']),
full_authors.lower().encode(options['charset'])[:fn_max],
out.encode(options['charset'])[:fn_max])
else:
out = out.encode(options['charset'])[:fn_max]
return out
##----------------------------------------------------------------------
options = {
'format' : '',
'charset' : 'utf-8',
'zip-charset' : 'cp866',
'elements' : [],
'replace' : False,
'rename' : False,
'slink' : False,
'copy' : False,
'fn-format' : 2,
'show-cover' : False,
'show-content' : False,
'show-tree' : False,
'image-viewer' : 'xv',
'quiet' : False,
'dest-dir' : None,
#
'suffix' : None,
}
##----------------------------------------------------------------------
class StopParsing(Exception):
pass
##----------------------------------------------------------------------
# u'\u2013' -> '--'
# u'\u2014' -> '---'
# u'\xa0' -> неразрывный пробел
# u'\u2026' -> dots...
# u'\xab' -> '<<'
# u'\xbb' -> '>>'
# u'\u201c' -> ``
# u'\u201d' -> ''
# u'\u201e' -> ,,
def replace_chars(s):
return (s
.replace(u'\u2013', u'--')
.replace(u'\u2014', u'---')
.replace(u'\xa0' , u' ')
.replace(u'\u2026', u'...')
.replace(u'\xab' , u'<<')
.replace(u'\xbb' , u'>>')
.replace(u'\u201c', u'``')
.replace(u'\u201d', u'\'\'')
.replace(u'\u201e', u',,')
)
def translit(s):
trans_tbl = {
u'\u0430': 'a', #а
u'\u0431': 'b', #б
u'\u0432': 'v', #в
u'\u0433': 'g', #г
u'\u0434': 'd', #д
u'\u0435': 'e', #е
u'\u0451': 'yo', #ё
u'\u0436': 'zh', #ж
u'\u0437': 'z', #з
u'\u0438': 'i', #и
u'\u0439': 'y', #й
u'\u043a': 'k', #к
u'\u043b': 'l', #л
u'\u043c': 'm', #м
u'\u043d': 'n', #н
u'\u043e': 'o', #о
u'\u043f': 'p', #п
u'\u0440': 'r', #р
u'\u0441': 's', #с
u'\u0442': 't', #т
u'\u0443': 'u', #у
u'\u0444': 'f', #ф
u'\u0445': 'h', #х
u'\u0446': 'c', #ц
u'\u0447': 'ch', #ч
u'\u0448': 'sh', #ш
u'\u0449': 'sh', #щ
u'\u044a': '', #ъ
u'\u044b': 'y', #ы
u'\u044c': '', #ь
u'\u044d': 'e', #э
u'\u044e': 'ju', #ю
u'\u044f': 'ya', #я
}
alnum = 'abcdefghijklmnopqrstuvwxyz0123456789'
out = []
out_s = ''
for i in s.lower():
if i.isalnum():
if i in trans_tbl:
out_s += trans_tbl[i]
elif i in alnum:
out_s += i
else:
if out_s: out.append(out_s)
out_s = ''
if out_s: out.append(out_s)
return '_'.join(out)
def wrap_line(s):
if len(s) <= 70:
return u' '+s
ss = u' '
sl = []
for word in s.split():
if len(ss+word) > 72:
sl.append(ss)
ss = word
elif ss:
ss += u' ' + word
else:
ss = word
sl.append(ss)
return '\n'.join(sl)
##----------------------------------------------------------------------
def show_cover(filename, data, content_type):
if not data:
print >> sys.stderr, '%s: sorry, cover not found' % filename
return
import base64, tempfile
data = base64.decodestring(data)
if content_type and content_type.startswith('image/'):
suffix = '.'+content_type[6:]
else:
suffix = ''
tmp_id, tmp_file = tempfile.mkstemp(suffix)
try:
open(tmp_file, 'w').write(data)
os.system(options['image-viewer']+' '+tmp_file)
finally:
os.close(tmp_id)
os.remove(tmp_file)
def show_content(filename, titles):
for secttion_level, data in titles:
if options['replace']: data = replace_chars(data)
print ' '*secttion_level+data.encode(options['charset'], 'replace')
print
def rename(filename, zipfilename, desc, data):
to = pretty_format(filename, zipfilename, len(data), desc, 'filename')
##filename = os.path.abspath(filename)
to += options['suffix']
if options['dest-dir']:
to = os.path.join(options['dest-dir'], to)
to = os.path.abspath(to)
if os.path.exists(to):
print >> sys.stderr, 'file %s already exists' % to
return
dir_name = os.path.dirname(to)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
if options['slink']:
os.symlink(filename, to)
return
elif options['copy']:
shutil.copy(filename, to)
return
os.rename(filename, to)
def pretty_format(filename, zipfilename, filesize, desc, format='pretty'):
ann = []
title = ''
authors_list = []
# [last-name, first-name, middle-name, nick-name]
author_name = [None, None, None, None]
genres = []
sequence_name = ''
sequence_number = ''
for elem, data in desc:
## data = data.strip()
## if not data:
## continue
if elem.startswith('/description/title-info/annotation/'):
if not elem.endswith('href'):
ann.append(data) #wrap_line(data))
if elem.endswith('/p'):
ann.append('\n')
elif elem == '/description/title-info/book-title':
title = data
elif elem == '/description/title-info/author/first-name':
author_name[1] = data
elif elem == '/description/title-info/author/middle-name':
author_name[2] = data
elif elem == '/description/title-info/author/last-name':
author_name[0] = data
authors_list.append(author_name)
author_name = [None, None, None, None]
elif elem == '/description/title-info/author/nick-name':
#author_name[3] = data
if not author_name[0]:
author_name[0] = data
else:
author_name[3] = data
authors_list.append(author_name)
author_name = [None, None, None, None]
elif elem == '/description/title-info/genre':
genres.append(data)
elif elem == '/description/title-info/sequence/name':
sequence_name = data
elif elem == '/description/title-info/sequence/number':
sequence_number = data
##authors_list.sort()
authors = u', '.join(' '.join(n for n in a if n) for a in authors_list if a)
annotation = []
ann = ''.join(ann).split('\n')
for s in ann:
annotation.append(wrap_line(s))
annotation = '\n'.join(annotation)
if format == 'single':
if sequence_name and sequence_number:
out = u'%s - %s (%s %s)' % (authors, title,
sequence_name, sequence_number)
elif sequence_name:
out = u'%s - %s (%s)' % (authors, title, sequence_name)
else:
out = u'%s - %s' % (authors, title)
#out = '%s: %s' % (filename, out)
if options['replace']: out = replace_chars(out)
return out.encode(options['charset'], 'replace')
elif format == 'pretty':
out = u'''\
File : %s
''' % filename
if zipfilename:
out += u'''\
Zip Filename : %s
''' % zipfilename
out += u'''\
Size : %d kb
''' % int(filesize/1024)
out += u'''\
Author(s) : %s
Title : %s
Genres : %s
''' % (authors, title, u', '.join(genres))
if sequence_name:
if sequence_number:
sequence = u'%s (%s)' % (sequence_name, sequence_number)
else:
sequence = sequence_name
out += u'''\
Sequence : %s
''' % sequence
if annotation:
out += u'''\
Annotation :
%s
''' % annotation
if options['replace']: out = replace_chars(out)
return out.encode(options['charset'], 'replace')
elif format == 'filename':
return get_filename(authors_list, sequence_name, sequence_number, title)
def raw_format(filename, zipfilename, desc):
if options['quiet']:
out = u''
else:
out = u'filename: %s\n' % filename
if zipfilename:
out += u'zipfilename: %s\n' % zipfilename
for elem, data in desc:
if not data:
continue
t = filter(elem.startswith, options['elements'])
#t = [x for x in options['elements'] if elem.startswith(x)]
if options['elements'] == [] or t:
out += u'%s: %s\n' % (elem, data)
if options['replace']: out = replace_chars(out)
return out.encode(options['charset'], 'replace')
##----------------------------------------------------------------------
class ContentHandler(xml.sax.handler.ContentHandler):
def __init__(self):
self.elem_stack = []
self.is_desc = False
self.is_cover = False
self.cur_data = ''
self.desc = []
self.cover = ''
self.cover_name = ''
self.cover_content_type = ''
self.is_title = False
self.cur_title = []
self.titles = []
self.section_level = 0
self.tree = []
def startElement(self, name, attrs):
if name == 'description': self.is_desc = True
if name == 'section': self.section_level += 1
if self.is_desc or options['show-tree']:
self.elem_stack.append(name)
elem = '/'+'/'.join(self.elem_stack)
if options['show-tree']:
if self.tree and self.tree[-1][0] == elem:
#print self.tree[-1]
self.tree[-1][1] += 1
else:
#if not elem.endswith('/p') and not elem.endswith('/v'):
self.tree.append([elem, 1])
for atr in attrs.getNames():
#t = (elem+u'/'+atr, attrs.getValue(atr))
self.desc.append((elem+u'/'+atr, attrs.getValue(atr)))
if elem == '/description/title-info/coverpage/image' and \
atr.endswith('href'):
self.cover_name = attrs.getValue(atr)[1:]
self.is_cover = False
if options['show-cover'] and name == 'binary':
content_type = ''
for atr in attrs.getNames():
if atr == 'id' and attrs.getValue(atr) == self.cover_name:
self.is_cover = True
elif atr == 'content-type':
content_type = attrs.getValue(atr)
if self.is_cover and content_type:
self.cover_content_type = content_type
if options['show-content'] and name == 'title':
self.is_title = True
self.cur_title = []
def endElement(self, name):
if self.is_desc and self.cur_data:
elem_name = '/'+'/'.join(self.elem_stack)
self.desc.append((elem_name, self.cur_data.strip()))
self.cur_data = ''
if self.is_desc or options['show-tree']:
del self.elem_stack[-1]
if name == 'description':
if not options['show-cover'] \
and not options['show-content'] \
and not options['show-tree']:
raise StopParsing
else:
self.is_desc = False
if options['show-content'] and name == 'title':
self.is_title = False
self.titles.append((self.section_level, ' '.join(self.cur_title)))
self.cur_data = ''
if name == 'section': self.section_level -= 1
def characters(self, data):
if self.is_desc:
#data = data.strip()
data = data.replace('\n', ' ')
if self.cur_data:
self.cur_data += data
else:
self.cur_data = data
if options['show-cover'] and self.is_cover:
self.cover += data
if options['show-content'] and self.is_title:
data = data.strip()
if data: self.cur_title.append(data)
class ErrorHandler(xml.sax.handler.ErrorHandler): pass
class EntityResolver(xml.sax.handler.EntityResolver): pass
class DTDHandler(xml.sax.handler.DTDHandler): pass
##----------------------------------------------------------------------
def fb2parse(filename, zipfilename, data):
if not data.startswith('<?xml') and not data.startswith('\xef\xbb\xbf<?xml'):
print >> sys.stderr, \
'Warning: file %s is not an XML file. Skipped.' % filename
print repr(data[:5])
#shutil.copy(filename, '/home/con/t/')
return
chandler = ContentHandler()
input_source = xml.sax.InputSource()
input_source.setByteStream(StringIO(data))
xml_reader = xml.sax.make_parser()
xml_reader.setContentHandler(chandler)
xml_reader.setErrorHandler(ErrorHandler())
xml_reader.setEntityResolver(EntityResolver())
xml_reader.setDTDHandler(DTDHandler())
try:
xml_reader.parse(input_source)
except StopParsing:
pass
if options['rename']:
rename(filename, zipfilename, chandler.desc, data)
return
if options['show-tree']:
for e, n in chandler.tree:
if n > 1:
print '%s [%d]' % (e, n)
else:
print e
return
if options['format'] == 'pretty':
print pretty_format(filename, zipfilename, len(data), chandler.desc, 'pretty')
elif options['format'] == 'filename':
print pretty_format(filename, zipfilename, len(data), chandler.desc, 'filename')
elif options['format'] == 'single':
print pretty_format(filename, zipfilename, len(data), chandler.desc, 'single')
elif options['format'] == '' \
and not options['show-cover'] \
and not options['show-content']:
print raw_format(filename, zipfilename, chandler.desc)
if options['show-cover'] or options['show-content']:
if options['format'] == 'raw':
print raw_format(filename, zipfilename, chandler.desc)
if options['show-content']:
show_content(filename, chandler.titles)
if options['show-cover']:
show_cover(filename, chandler.cover, chandler.cover_content_type)
##----------------------------------------------------------------------
def main():
#locale.setlocale(locale.LC_ALL, '')
default_charset = locale.getdefaultlocale()[1]
if default_charset:
options['charset'] = default_charset
prog_name = os.path.basename(sys.argv[0])
try:
optlist, args = getopt.getopt(sys.argv[1:], 'c:Ce:f:hlopqrRStvwz:',
['raw', 'pretty',
'single',
'output=',
'rename', 'copy', 'slink',
'fn-format=',
'cover', 'contents', 'tree',
'charset=', 'zip-charset=',
'elements=',
'dest-dir=',
'image-viewer=',
'replace', 'quiet', 'help'])
except getopt.GetoptError, err:
sys.exit('%s: %s\ntry %s --help for more information'
% (prog_name, err, prog_name))
help_msg = '''fb2desc -- show description of FB2 file(s)
Usage: %s [options] files|dir
-w --raw-format output in raw format (default)
-p --pretty output in pretty format
-l --single output in single format
--output format output in format (raw, pretty, single, filename)
-o --contents show contents
-t --tree
-v --cover show cover
-c --charset <charset> specify output charset (default: %s)
-z --zip-charset <charset>
-r --replace replace any chars
-e --elements <elements> show only this elements (comma separeted)
-R --rename rename mode
-S --slink create softlinks
-C --copy copy files
--fn-format <format> rename pattern (1, 2, 3, 4, 5, 6)
--dest-dir
--image-viewer
-q --quiet suppress output filename
-h --help display this help''' \
% (prog_name, default_charset)
for i in optlist:
if i[0] == '--help' or i[0] == '-h':
print help_msg
sys.exit()
elif i[0] in ('--charset', '-c'):
charset = i[1]
try:
codecs.lookup(charset)
except LookupError, err:
sys.exit('%s: %s' % (prog_name, err))
options['charset'] = charset
elif i[0] in ('-z', '--zip-charset'):
charset = i[1]
try:
codecs.lookup(charset)
except LookupError, err:
sys.exit('%s: %s' % (prog_name, err))
options['zip-charset'] = charset
elif i[0] == '--elements' or i[0] == '-e':
options['elements'] = i[1].split(',')
elif i[0] == '--output':
f = i[1]
if f not in ('raw', 'pretty', 'single', 'filename'):
sys.exit('''bad option for --output
must be raw, pretty, single, filename
''')
options['format'] = f
elif i[0] == '--raw' or i[0] == '-w':
options['format'] = 'raw'
elif i[0] == '--single' or i[0] == '-l':
options['format'] = 'single'
elif i[0] == '--pretty-format' or i[0] == '-p':
options['format'] = 'pretty'
elif i[0] == '--replace' or i[0] == '-r':
options['replace'] = True
elif i[0] == '--rename' or i[0] == '-R':
options['rename'] = True
elif i[0] == '--slink' or i[0] == '-S':
options['rename'] = True
options['slink'] = True
elif i[0] == '--copy' or i[0] == '-C':
options['rename'] = True
options['copy'] = True
elif i[0] in ('--fn-format', '-f'):
f = i[1]
if f not in ('1', '2', '3', '4', '5', '6'):
sys.exit('''bad option for --fn-format
must be 1, 2, 3, 4, 5, 6
''')
options['fn-format'] = int(f)
elif i[0] == '--contents' or i[0] == '-o':
options['show-content'] = True
elif i[0] == '--cover' or i[0] == '-v':
options['show-cover'] = True
elif i[0] == '--tree' or i[0] == '-t':
options['show-tree'] = True
elif i[0] == '--quiet' or i[0] == '-q':
options['quiet'] = True
elif i[0] == '--dest-dir':
options['dest-dir'] = i[1]
elif i[0] == '--image-viewer':
options['image-viewer'] = i[1]
if len(args) == 0:
sys.exit('%s: missing filename\ntry %s --help for more information'
% (prog_name, prog_name))
in_files = []
for fn in args:
if os.path.isdir(fn):
for root, dirs, files in os.walk(fn):
for f in files:
in_files.append(os.path.join(root, f))
else:
in_files.append(fn)
#print in_files
#return
for raw_filename in in_files:
try:
filename = os.path.abspath(raw_filename)
filename = unicode(filename, options['charset'])
except UnicodeDecodeError, err:
#raise
#print >> sys.stderr, 'WARNING: decode filename:', str(err)
#continue
filename = '' # fixme
pass
if zipfile.is_zipfile(raw_filename):
options['suffix'] = '.fb2.zip'
zf = zipfile.ZipFile(raw_filename)
for zip_filename in zf.namelist():
data = zf.read(zip_filename)
try:
##zip_filename = unicode(zip_filename, options['charset'])
zip_filename = unicode(zip_filename, options['zip-charset'])
except UnicodeDecodeError, err:
print >> sys.stderr, 'WARNING: decode zip filename:', str(err)
zip_filename = ''
try:
fb2parse(filename, zip_filename, data)
except:
traceback.print_exc()
##shutil.copy(raw_filename, '/home/con/t/')
else:
if options['rename']:
continue
else:
options['suffix'] = '.fb2'
data = None
try:
data = open(raw_filename).read()
except IOError as e:
data = open(filename).read()
if data.startswith('BZh'):
import bz2
options['suffix'] = '.fb2.bz2'
data = bz2.decompress(data)
elif data.startswith('\x1f\x8b'):
import gzip
options['suffix'] = '.fb2.gz'
data = gzip.GzipFile(fileobj=StringIO(data)).read()
try:
fb2parse(filename, '', data)
except:
traceback.print_exc()
if __name__ == '__main__':
main()
|
vasnake/fb2tools
|
fb2tools/fb2desc.py
|
Python
|
gpl-3.0
| 24,174
|
'''
Created on 01.12.2016
@author: michael
'''
from alex_test_utils import MODE_SIMPLE
from alexandriabase import baseinjectorkeys
from alexandriabase.domain import Event, AlexDateRange, AlexDate, Document, \
DocumentType
from alexpresenters.MessageBroker import REQ_SAVE_CURRENT_EVENT, \
REQ_SAVE_CURRENT_DOCUMENT, Message, CONF_DOCUMENT_CHANGED, \
CONF_EVENT_CHANGED
from integration.baseintegrationtest import BaseIntegrationTest
from alexandriabase.daos import EventDao, DocumentDao
class BaseReferenceIntegrationTest(BaseIntegrationTest):
'''
Add some helper methods to make testing references
more easy.
'''
def receive_message(self, message):
BaseIntegrationTest.receive_message(self, message)
if message == REQ_SAVE_CURRENT_EVENT:
self.injector.get(EventDao).save(self.presenter.view.current_event)
if message == REQ_SAVE_CURRENT_DOCUMENT:
self.injector.get(DocumentDao).save(self.presenter.view.current_document)
def set_current_document(self, document_id):
if document_id is not None:
document = self.document_dao.get_by_id(document_id)
else:
document = Document()
document.document_type = DocumentType(1)
message = Message(CONF_DOCUMENT_CHANGED, document=document)
self.message_broker.send_message(message)
def set_current_event(self, event_id):
if event_id is not None:
event = self.event_dao.get_by_id(event_id)
else:
event = Event()
event.daterange = AlexDateRange(AlexDate(1936), None)
message = Message(CONF_EVENT_CHANGED, event=event)
self.message_broker.send_message(message)
|
archivsozialebewegungen/AlexandriaGui
|
tests/integration/components/references/basereferenceintegrationtest.py
|
Python
|
gpl-3.0
| 1,748
|
from django import template
import sys
register = template.Library()
@register.filter
def get(dictionary, key):
return dictionary.get(key)
@register.filter
def pertinent_values(dictionary, key):
limit = '10'
if ',' in key:
key, limit = key.split(',')
keys = dictionary.keys()
total = reduce(lambda a, b: a + dictionary[b][key], keys, 0)
if limit[-1:] == '%':
limit = total * float(limit[:-1]) / 100.0
else:
limit = int(limit)
print >>sys.stderr, limit
keys.sort(lambda a, b: cmp(dictionary[b][key], dictionary[a][key]))
index = len(keys) - 1
while dictionary[keys[index]][key] < limit and index > 10:
index -= 1
return keys[:index + 1]
@register.filter
def other_values(dictionary, key):
limit = '10'
if ',' in key:
key, limit = key.split(',')
keys = dictionary.keys()
total = reduce(lambda a, b: a + dictionary[b][key], keys, 0)
if limit[-1:] == '%':
limit = total * float(limit[:-1]) / 100.0
else:
limit = int(limit)
print >>sys.stderr, limit
keys.sort(lambda a, b: cmp(dictionary[b][key], dictionary[a][key]))
index = len(keys) - 1
while dictionary[keys[index]][key] < limit and index > 10:
index -= 1
return keys[index + 1:]
@register.filter
def get_range(last):
return range(last)
@register.filter
def get_max(l):
return max(l)
@register.filter
def get_min(l):
return min(l)
|
Puyb/sixhdp
|
inscriptions/templatetags/stats.py
|
Python
|
gpl-3.0
| 1,467
|
class LazyProxy(object):
def __init__(self, original_module, original_class, init_args):
self._original_module = original_module
self._original_class = original_class
self._original_init_args = init_args
self._instance = None
def __getattr__(self, name):
if self._instance is None:
self.__init_class__()
return getattr(self._instance, name)
def __init_class__(self):
import importlib
module = importlib.import_module(self._original_module)
class_ = getattr(module, self._original_class)
if self._original_init_args is not None:
for index, arg in enumerate(self._original_init_args):
if arg[:1] == '@':
from resources.lib.di.requiredfeature import RequiredFeature
self._original_init_args[index] = RequiredFeature(arg[1:]).request()
import inspect
args = inspect.getargspec(class_.__init__)[0]
if args[0] == 'self':
args.pop(0)
argument_dict = dict(zip(args, self._original_init_args))
self._instance = class_(**argument_dict)
else:
self._instance = class_()
|
wackerl91/luna
|
resources/lib/di/lazyproxy.py
|
Python
|
gpl-3.0
| 1,231
|
# JointBox - Your DIY smart home. Simplified.
# Copyright (C) 2017 Dmitry Berezovsky
#
# JointBox is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# JointBox is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def boolean(value):
return isinstance(value, bool)
def integer(value):
return isinstance(value, int)
def module_id(value):
return True
|
JointBox/jointbox
|
src/common/validators.py
|
Python
|
gpl-3.0
| 913
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Patrik Lundin <patrik@sigterm.se>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: openbsd_pkg
author: "Patrik Lundin (@eest)"
version_added: "1.1"
short_description: Manage packages on OpenBSD.
description:
- Manage packages on OpenBSD using the pkg tools.
requirements: [ "python >= 2.5" ]
options:
name:
required: true
description:
- Name of the package.
state:
required: true
choices: [ present, latest, absent ]
description:
- C(present) will make sure the package is installed.
C(latest) will make sure the latest version of the package is installed.
C(absent) will make sure the specified package is not installed.
build:
required: false
choices: [ yes, no ]
default: no
description:
- Build the package from source instead of downloading and installing
a binary. Requires that the port source tree is already installed.
Automatically builds and installs the 'sqlports' package, if it is
not already installed.
version_added: "2.1"
ports_dir:
required: false
default: /usr/ports
description:
- When used in combination with the 'build' option, allows overriding
the default ports source directory.
version_added: "2.1"
clean:
required: false
choices: [ yes, no ]
default: no
description:
- When updating or removing packages, delete the extra configuration
file(s) in the old packages which are annotated with @extra in
the packaging-list.
version_added: "2.3"
quick:
required: false
choices: [ yes, no ]
default: no
description:
- Replace or delete packages quickly; do not bother with checksums
before removing normal files.
version_added: "2.3"
'''
EXAMPLES = '''
# Make sure nmap is installed
- openbsd_pkg:
name: nmap
state: present
# Make sure nmap is the latest version
- openbsd_pkg:
name: nmap
state: latest
# Make sure nmap is not installed
- openbsd_pkg:
name: nmap
state: absent
# Make sure nmap is installed, build it from source if it is not
- openbsd_pkg:
name: nmap
state: present
build: yes
# Specify a pkg flavour with '--'
- openbsd_pkg:
name: vim--no_x11
state: present
# Specify the default flavour to avoid ambiguity errors
- openbsd_pkg:
name: vim--
state: present
# Specify a package branch (requires at least OpenBSD 6.0)
- openbsd_pkg:
name: python%3.5
state: present
# Update all packages on the system
- openbsd_pkg:
name: '*'
state: latest
# Purge a package and it's configuration files
- openbsd_pkg: name=mpd clean=yes state=absent
# Quickly remove a package without checking checksums
- openbsd_pkg: name=qt5 quick=yes state=absent
'''
import os
import platform
import re
import shlex
import sqlite3
from distutils.version import StrictVersion
# Function used for executing commands.
def execute_command(cmd, module):
# Break command line into arguments.
# This makes run_command() use shell=False which we need to not cause shell
# expansion of special characters like '*'.
cmd_args = shlex.split(cmd)
return module.run_command(cmd_args)
# Function used to find out if a package is currently installed.
def get_package_state(names, pkg_spec, module):
info_cmd = 'pkg_info -Iq'
for name in names:
command = "%s inst:%s" % (info_cmd, name)
rc, stdout, stderr = execute_command(command, module)
if stderr:
module.fail_json(msg="failed in get_package_state(): " + stderr)
if stdout:
# If the requested package name is just a stem, like "python", we may
# find multiple packages with that name.
pkg_spec[name]['installed_names'] = [installed_name for installed_name in stdout.splitlines()]
module.debug("get_package_state(): installed_names = %s" % pkg_spec[name]['installed_names'])
pkg_spec[name]['installed_state'] = True
else:
pkg_spec[name]['installed_state'] = False
# Function used to make sure a package is present.
def package_present(names, pkg_spec, module):
build = module.params['build']
for name in names:
# It is possible package_present() has been called from package_latest().
# In that case we do not want to operate on the whole list of names,
# only the leftovers.
if pkg_spec['package_latest_leftovers']:
if name not in pkg_spec['package_latest_leftovers']:
module.debug("package_present(): ignoring '%s' which is not a package_latest() leftover" % name)
continue
else:
module.debug("package_present(): handling package_latest() leftovers, installing '%s'" % name)
if module.check_mode:
install_cmd = 'pkg_add -Imn'
else:
if build is True:
port_dir = "%s/%s" % (module.params['ports_dir'], get_package_source_path(name, pkg_spec, module))
if os.path.isdir(port_dir):
if pkg_spec[name]['flavor']:
flavors = pkg_spec[name]['flavor'].replace('-', ' ')
install_cmd = "cd %s && make clean=depends && FLAVOR=\"%s\" make install && make clean=depends" % (port_dir, flavors)
elif pkg_spec[name]['subpackage']:
install_cmd = "cd %s && make clean=depends && SUBPACKAGE=\"%s\" make install && make clean=depends" % (port_dir,
pkg_spec[name]['subpackage'])
else:
install_cmd = "cd %s && make install && make clean=depends" % (port_dir)
else:
module.fail_json(msg="the port source directory %s does not exist" % (port_dir))
else:
install_cmd = 'pkg_add -Im'
if pkg_spec[name]['installed_state'] is False:
# Attempt to install the package
if build is True and not module.check_mode:
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = module.run_command(install_cmd, module, use_unsafe_shell=True)
else:
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = execute_command("%s %s" % (install_cmd, name), module)
# The behaviour of pkg_add is a bit different depending on if a
# specific version is supplied or not.
#
# When a specific version is supplied the return code will be 0 when
# a package is found and 1 when it is not. If a version is not
# supplied the tool will exit 0 in both cases.
#
# It is important to note that "version" relates to the
# packages-specs(7) notion of a version. If using the branch syntax
# (like "python%3.5") even though a branch name may look like a
# version string it is not used an one by pkg_add.
if pkg_spec[name]['version'] or build is True:
# Depend on the return code.
module.debug("package_present(): depending on return code for name '%s'" % name)
if pkg_spec[name]['rc']:
pkg_spec[name]['changed'] = False
else:
# Depend on stderr instead.
module.debug("package_present(): depending on stderr for name '%s'" % name)
if pkg_spec[name]['stderr']:
# There is a corner case where having an empty directory in
# installpath prior to the right location will result in a
# "file:/local/package/directory/ is empty" message on stderr
# while still installing the package, so we need to look for
# for a message like "packagename-1.0: ok" just in case.
match = re.search("\W%s-[^:]+: ok\W" % pkg_spec[name]['stem'], pkg_spec[name]['stdout'])
if match:
# It turns out we were able to install the package.
module.debug("package_present(): we were able to install package for name '%s'" % name)
else:
# We really did fail, fake the return code.
module.debug("package_present(): we really did fail for name '%s'" % name)
pkg_spec[name]['rc'] = 1
pkg_spec[name]['changed'] = False
else:
module.debug("package_present(): stderr was not set for name '%s'" % name)
if pkg_spec[name]['rc'] == 0:
pkg_spec[name]['changed'] = True
else:
pkg_spec[name]['rc'] = 0
pkg_spec[name]['stdout'] = ''
pkg_spec[name]['stderr'] = ''
pkg_spec[name]['changed'] = False
# Function used to make sure a package is the latest available version.
def package_latest(names, pkg_spec, module):
if module.params['build'] is True:
module.fail_json(msg="the combination of build=%s and state=latest is not supported" % module.params['build'])
upgrade_cmd = 'pkg_add -um'
if module.check_mode:
upgrade_cmd += 'n'
if module.params['clean']:
upgrade_cmd += 'c'
if module.params['quick']:
upgrade_cmd += 'q'
for name in names:
if pkg_spec[name]['installed_state'] is True:
# Attempt to upgrade the package.
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = execute_command("%s %s" % (upgrade_cmd, name), module)
# Look for output looking something like "nmap-6.01->6.25: ok" to see if
# something changed (or would have changed). Use \W to delimit the match
# from progress meter output.
pkg_spec[name]['changed'] = False
for installed_name in pkg_spec[name]['installed_names']:
module.debug("package_latest(): checking for pre-upgrade package name: %s" % installed_name)
match = re.search("\W%s->.+: ok\W" % installed_name, pkg_spec[name]['stdout'])
if match:
module.debug("package_latest(): pre-upgrade package name match: %s" % installed_name)
pkg_spec[name]['changed'] = True
break
# FIXME: This part is problematic. Based on the issues mentioned (and
# handled) in package_present() it is not safe to blindly trust stderr
# as an indicator that the command failed, and in the case with
# empty installpath directories this will break.
#
# For now keep this safeguard here, but ignore it if we managed to
# parse out a successful update above. This way we will report a
# successful run when we actually modify something but fail
# otherwise.
if pkg_spec[name]['changed'] is not True:
if pkg_spec[name]['stderr']:
pkg_spec[name]['rc'] = 1
else:
# Note packages that need to be handled by package_present
module.debug("package_latest(): package '%s' is not installed, will be handled by package_present()" % name)
pkg_spec['package_latest_leftovers'].append(name)
# If there were any packages that were not installed we call
# package_present() which will handle those.
if pkg_spec['package_latest_leftovers']:
module.debug("package_latest(): calling package_present() to handle leftovers")
package_present(names, pkg_spec, module)
# Function used to make sure a package is not installed.
def package_absent(names, pkg_spec, module):
remove_cmd = 'pkg_delete -I'
if module.check_mode:
remove_cmd += 'n'
if module.params['clean']:
remove_cmd += 'c'
if module.params['quick']:
remove_cmd += 'q'
for name in names:
if pkg_spec[name]['installed_state'] is True:
# Attempt to remove the package.
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = execute_command("%s %s" % (remove_cmd, name), module)
if pkg_spec[name]['rc'] == 0:
pkg_spec[name]['changed'] = True
else:
pkg_spec[name]['changed'] = False
else:
pkg_spec[name]['rc'] = 0
pkg_spec[name]['stdout'] = ''
pkg_spec[name]['stderr'] = ''
pkg_spec[name]['changed'] = False
# Function used to parse the package name based on packages-specs(7).
# The general name structure is "stem-version[-flavors]".
#
# Names containing "%" are a special variation not part of the
# packages-specs(7) syntax. See pkg_add(1) on OpenBSD 6.0 or later for a
# description.
def parse_package_name(names, pkg_spec, module):
# Initialize empty list of package_latest() leftovers.
pkg_spec['package_latest_leftovers'] = []
for name in names:
module.debug("parse_package_name(): parsing name: %s" % name)
# Do some initial matches so we can base the more advanced regex on that.
version_match = re.search("-[0-9]", name)
versionless_match = re.search("--", name)
# Stop if someone is giving us a name that both has a version and is
# version-less at the same time.
if version_match and versionless_match:
module.fail_json(msg="package name both has a version and is version-less: " + name)
# All information for a given name is kept in the pkg_spec keyed by that name.
pkg_spec[name] = {}
# If name includes a version.
if version_match:
match = re.search("^(?P<stem>[^%]+)-(?P<version>[0-9][^-]*)(?P<flavor_separator>-)?(?P<flavor>[a-z].*)?(%(?P<branch>.+))?$", name)
if match:
pkg_spec[name]['stem'] = match.group('stem')
pkg_spec[name]['version_separator'] = '-'
pkg_spec[name]['version'] = match.group('version')
pkg_spec[name]['flavor_separator'] = match.group('flavor_separator')
pkg_spec[name]['flavor'] = match.group('flavor')
pkg_spec[name]['branch'] = match.group('branch')
pkg_spec[name]['style'] = 'version'
module.debug("version_match: stem: %s, version: %s, flavor_separator: %s, flavor: %s, branch: %s, style: %s" %
(
pkg_spec[name]['stem'],
pkg_spec[name]['version'],
pkg_spec[name]['flavor_separator'],
pkg_spec[name]['flavor'],
pkg_spec[name]['branch'],
pkg_spec[name]['style']
)
)
else:
module.fail_json(msg="unable to parse package name at version_match: " + name)
# If name includes no version but is version-less ("--").
elif versionless_match:
match = re.search("^(?P<stem>[^%]+)--(?P<flavor>[a-z].*)?(%(?P<branch>.+))?$", name)
if match:
pkg_spec[name]['stem'] = match.group('stem')
pkg_spec[name]['version_separator'] = '-'
pkg_spec[name]['version'] = None
pkg_spec[name]['flavor_separator'] = '-'
pkg_spec[name]['flavor'] = match.group('flavor')
pkg_spec[name]['branch'] = match.group('branch')
pkg_spec[name]['style'] = 'versionless'
module.debug("versionless_match: stem: %s, flavor: %s, branch: %s, style: %s" %
(
pkg_spec[name]['stem'],
pkg_spec[name]['flavor'],
pkg_spec[name]['branch'],
pkg_spec[name]['style']
)
)
else:
module.fail_json(msg="unable to parse package name at versionless_match: " + name)
# If name includes no version, and is not version-less, it is all a
# stem, possibly with a branch (%branchname) tacked on at the
# end.
else:
match = re.search("^(?P<stem>[^%]+)(%(?P<branch>.+))?$", name)
if match:
pkg_spec[name]['stem'] = match.group('stem')
pkg_spec[name]['version_separator'] = None
pkg_spec[name]['version'] = None
pkg_spec[name]['flavor_separator'] = None
pkg_spec[name]['flavor'] = None
pkg_spec[name]['branch'] = match.group('branch')
pkg_spec[name]['style'] = 'stem'
module.debug("stem_match: stem: %s, branch: %s, style: %s" %
(
pkg_spec[name]['stem'],
pkg_spec[name]['branch'],
pkg_spec[name]['style']
)
)
else:
module.fail_json(msg="unable to parse package name at else: " + name)
# Verify that the managed host is new enough to support branch syntax.
if pkg_spec[name]['branch']:
branch_release = "6.0"
if StrictVersion(platform.release()) < StrictVersion(branch_release):
module.fail_json(msg="package name using 'branch' syntax requires at least OpenBSD %s: %s" % (branch_release, name))
# Sanity check that there are no trailing dashes in flavor.
# Try to stop strange stuff early so we can be strict later.
if pkg_spec[name]['flavor']:
match = re.search("-$", pkg_spec[name]['flavor'])
if match:
module.fail_json(msg="trailing dash in flavor: " + pkg_spec[name]['flavor'])
# Function used for figuring out the port path.
def get_package_source_path(name, pkg_spec, module):
pkg_spec[name]['subpackage'] = None
if pkg_spec[name]['stem'] == 'sqlports':
return 'databases/sqlports'
else:
# try for an exact match first
sqlports_db_file = '/usr/local/share/sqlports'
if not os.path.isfile(sqlports_db_file):
module.fail_json(msg="sqlports file '%s' is missing" % sqlports_db_file)
conn = sqlite3.connect(sqlports_db_file)
first_part_of_query = 'SELECT fullpkgpath, fullpkgname FROM ports WHERE fullpkgname'
query = first_part_of_query + ' = ?'
module.debug("package_package_source_path(): exact query: %s" % query)
cursor = conn.execute(query, (name,))
results = cursor.fetchall()
# next, try for a fuzzier match
if len(results) < 1:
looking_for = pkg_spec[name]['stem'] + (pkg_spec[name]['version_separator'] or '-') + (pkg_spec[name]['version'] or '%')
query = first_part_of_query + ' LIKE ?'
if pkg_spec[name]['flavor']:
looking_for += pkg_spec[name]['flavor_separator'] + pkg_spec[name]['flavor']
module.debug("package_package_source_path(): fuzzy flavor query: %s" % query)
cursor = conn.execute(query, (looking_for,))
elif pkg_spec[name]['style'] == 'versionless':
query += ' AND fullpkgname NOT LIKE ?'
module.debug("package_package_source_path(): fuzzy versionless query: %s" % query)
cursor = conn.execute(query, (looking_for, "%s-%%" % looking_for,))
else:
module.debug("package_package_source_path(): fuzzy query: %s" % query)
cursor = conn.execute(query, (looking_for,))
results = cursor.fetchall()
# error if we don't find exactly 1 match
conn.close()
if len(results) < 1:
module.fail_json(msg="could not find a port by the name '%s'" % name)
if len(results) > 1:
matches = map(lambda x:x[1], results)
module.fail_json(msg="too many matches, unsure which to build: %s" % ' OR '.join(matches))
# there's exactly 1 match, so figure out the subpackage, if any, then return
fullpkgpath = results[0][0]
parts = fullpkgpath.split(',')
if len(parts) > 1 and parts[1][0] == '-':
pkg_spec[name]['subpackage'] = parts[1]
return parts[0]
# Function used for upgrading all installed packages.
def upgrade_packages(pkg_spec, module):
if module.check_mode:
upgrade_cmd = 'pkg_add -Imnu'
else:
upgrade_cmd = 'pkg_add -Imu'
# Create a minimal pkg_spec entry for '*' to store return values.
pkg_spec['*'] = {}
# Attempt to upgrade all packages.
pkg_spec['*']['rc'], pkg_spec['*']['stdout'], pkg_spec['*']['stderr'] = execute_command("%s" % upgrade_cmd, module)
# Try to find any occurrence of a package changing version like:
# "bzip2-1.0.6->1.0.6p0: ok".
match = re.search("\W\w.+->.+: ok\W", pkg_spec['*']['stdout'])
if match:
pkg_spec['*']['changed'] = True
else:
pkg_spec['*']['changed'] = False
# It seems we can not trust the return value, so depend on the presence of
# stderr to know if something failed.
if pkg_spec['*']['stderr']:
pkg_spec['*']['rc'] = 1
else:
pkg_spec['*']['rc'] = 0
# ===========================================
# Main control flow.
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True, type='list'),
state = dict(required=True, choices=['absent', 'installed', 'latest', 'present', 'removed']),
build = dict(default='no', type='bool'),
ports_dir = dict(default='/usr/ports'),
quick = dict(default='no', type='bool'),
clean = dict(default='no', type='bool')
),
supports_check_mode = True
)
name = module.params['name']
state = module.params['state']
build = module.params['build']
ports_dir = module.params['ports_dir']
rc = 0
stdout = ''
stderr = ''
result = {}
result['name'] = name
result['state'] = state
result['build'] = build
# The data structure used to keep track of package information.
pkg_spec = {}
if build is True:
if not os.path.isdir(ports_dir):
module.fail_json(msg="the ports source directory %s does not exist" % (ports_dir))
# build sqlports if its not installed yet
parse_package_name(['sqlports'], pkg_spec, module)
get_package_state(['sqlports'], pkg_spec, module)
if not pkg_spec['sqlports']['installed_state']:
module.debug("main(): installing 'sqlports' because build=%s" % module.params['build'])
package_present(['sqlports'], pkg_spec, module)
asterisk_name = False
for n in name:
if n == '*':
if len(name) != 1:
module.fail_json(msg="the package name '*' can not be mixed with other names")
asterisk_name = True
if asterisk_name:
if state != 'latest':
module.fail_json(msg="the package name '*' is only valid when using state=latest")
else:
# Perform an upgrade of all installed packages.
upgrade_packages(pkg_spec, module)
else:
# Parse package names and put results in the pkg_spec dictionary.
parse_package_name(name, pkg_spec, module)
# Not sure how the branch syntax is supposed to play together
# with build mode. Disable it for now.
for n in name:
if pkg_spec[n]['branch'] and module.params['build'] is True:
module.fail_json(msg="the combination of 'branch' syntax and build=%s is not supported: %s" % (module.params['build'], n))
# Get state for all package names.
get_package_state(name, pkg_spec, module)
# Perform requested action.
if state in ['installed', 'present']:
package_present(name, pkg_spec, module)
elif state in ['absent', 'removed']:
package_absent(name, pkg_spec, module)
elif state == 'latest':
package_latest(name, pkg_spec, module)
# The combined changed status for all requested packages. If anything
# is changed this is set to True.
combined_changed = False
# We combine all error messages in this comma separated string, for example:
# "msg": "Can't find nmapp\n, Can't find nmappp\n"
combined_error_message = ''
# Loop over all requested package names and check if anything failed or
# changed.
for n in name:
if pkg_spec[n]['rc'] != 0:
if pkg_spec[n]['stderr']:
if combined_error_message:
combined_error_message += ", %s" % pkg_spec[n]['stderr']
else:
combined_error_message = pkg_spec[n]['stderr']
else:
if combined_error_message:
combined_error_message += ", %s" % pkg_spec[n]['stdout']
else:
combined_error_message = pkg_spec[n]['stdout']
if pkg_spec[n]['changed'] is True:
combined_changed = True
# If combined_error_message contains anything at least some part of the
# list of requested package names failed.
if combined_error_message:
module.fail_json(msg=combined_error_message)
result['changed'] = combined_changed
module.exit_json(**result)
# Import module snippets.
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
DazWorrall/ansible
|
lib/ansible/modules/packaging/os/openbsd_pkg.py
|
Python
|
gpl-3.0
| 26,447
|
from scrapelib import HTTPError
from openstates.utils import LXMLMixin
from pupa.scrape import Person, Scraper
class UTPersonScraper(Scraper, LXMLMixin):
def scrape(self):
PARTIES = {"R": "Republican", "D": "Democratic"}
representative_url = "http://house.utah.gov/rep/{}"
senator_url = "http://senate.utah.gov/senators/district{}.html"
json_link = "http://le.utah.gov/data/legislators.json"
person_json = self.get(json_link).json()
for info in person_json["legislators"]:
chamber = "lower" if info["house"] == "H" else "upper"
person = Person(
name=info["formatName"],
district=info["district"],
party=PARTIES[info["party"]],
image=info["image"],
primary_org=chamber,
)
person.add_source(json_link)
if chamber == "lower":
link = representative_url.format(info["id"])
else:
link = senator_url.format(info["district"])
try:
self.head(link)
except HTTPError:
self.logger.warning("Bad URL for {}".format(info["formatName"]))
else:
person.add_link(link)
address = info.get("address")
email = info.get("email")
fax = info.get("fax")
# Work phone seems to be the person's non-legislative
# office phone, and thus a last option
# For example, we called one and got the firm
# where he's a lawyer. We're picking
# them in order of how likely we think they are
# to actually get us to the person we care about.
phone = info.get("cell") or info.get("homePhone") or info.get("workPhone")
if address:
person.add_contact_detail(
type="address", value=address, note="District Office"
)
if phone:
person.add_contact_detail(
type="voice", value=phone, note="District Office"
)
if email:
person.add_contact_detail(
type="email", value=email, note="District Office"
)
if fax:
person.add_contact_detail(type="fax", value=fax, note="District Office")
BASE_FINANCE_URL = "http://www.disclosures.utah.gov/Search/PublicSearch"
conflicts_of_interest = info.get("CofI") or []
finance_reports = info.get("FinanceReport") or []
extra_links = []
for conflict in conflicts_of_interest:
extra_links.append(conflict["url"])
for finance in finance_reports:
# Some links are just to the base disclosure website
# Presumably, these members don't yet have their forms up
if finance != BASE_FINANCE_URL:
extra_links.append(finance["url"])
if extra_links:
person.extras["links"] = extra_links
yield person
|
openstates/openstates
|
openstates/ut/people.py
|
Python
|
gpl-3.0
| 3,136
|
from django.conf.urls import url
from . import views
urlpatterns = [
# Captures special abbreviations and redirects to UOS websites
url(r"^(?P<site>bb|udc|ms|uos)/?$", views.redirect_to_uos),
# All website related requests link to views.layout
# as the layout loads other dependencies as per request
url(r"", views.layout),
]
|
UOSHUB/BackEnd
|
Website/urls.py
|
Python
|
gpl-3.0
| 347
|
from NfaBuilder import NfaBuilder
def enum(**enums):
return type('Enum', (), enums)
nodeTypes = enum(CHARACTER = 1, STAR = 2, QUESTION = 3, ALTER = 4, CONCAT = 5, PLUS = 6)
specialTransitions = enum(EPSILON = -1)
class ParseError( Exception ): pass
class ParseNode():
"""
Represents a node in the resulted binary tree
"""
def __init__(self, nodetype, data, left, right):
self.nodeType = nodetype
self.data = data #Actual input character found in tree leaves
self.left = left
self.right = right
class RegexParser():
"""
Parses the given regular expression into a binary tree of ParseNodes
"""
def __init__(self):
self.builder = NfaBuilder()
def getCharacter(self):
"""
Returns:
a ParseNode with input character, a leaf node
"""
return ParseNode(nodeTypes.CHARACTER, self.scanner.pop(), 0, 0)
def getAtomicNode(self):
"""
Either parses a part of regex inside parenthesis, or a single input character
Returns:
The parsed part of regex (string)
"""
atomicNode = None
if self.scanner.peek() == '(':
self.scanner.pop()
atomicNode = self.getExpressionNode()
if self.scanner.pop() != ')':
raise ParseError('Expected )')
else:
atomicNode = self.getCharacter()
return atomicNode
def getRepetitionNode(self):
"""
Returns:
a repeating node, or atomic node if no repetition found
"""
atomicNode = self.getAtomicNode()
if self.scanner.peek() == '*':
self.scanner.pop()
return ParseNode(nodeTypes.STAR, 0, atomicNode, 0)
elif self.scanner.peek() == '?':
self.scanner.pop()
return ParseNode(nodeTypes.QUESTION, 0, atomicNode, 0)
elif self.scanner.peek() == '+':
self.scanner.pop()
return ParseNode(nodeTypes.PLUS, 0, atomicNode, 0)
else:
return atomicNode
def getConcatNode(self):
"""
Returns:
a concat node
"""
left = self.getRepetitionNode()
if self.scanner.peek() == '.':
self.scanner.pop()
right = self.getConcatNode()
return ParseNode(nodeTypes.CONCAT, 0, left, right)
else:
return left
def getExpressionNode(self):
"""
Returns:
an expression node, starts a new recursive parsing
"""
left = self.getConcatNode()
if self.scanner.peek() == '|':
self.scanner.pop()
right = self.getExpressionNode()
return ParseNode(nodeTypes.ALTER, 0, left, right)
else:
return left
def prepare(self, inputData):
"""
Adds concat dots to the input regex. This helps parsing
Args:
* inputData (string): The regex in which the dots are added to
Returns:
The modified regex
"""
output = []
for c in range(0, len(inputData) - 1):
currentSymbol = inputData[c]
output.append(currentSymbol)
nextSymbol = inputData[c+1]
if ((currentSymbol.isalnum() or currentSymbol in [')', '*', '?','+']) and
nextSymbol not in [')', '|', '*', '?','+']):
output.append('.')
output.append(inputData[len(inputData)-1])
self.scanner = RegexScanner(output)
return output
def printNode(self, node, offset):
if not node:
return
if node.nodeType == nodeTypes.CHARACTER:
print node.data.rjust(offset)
elif node.nodeType == nodeTypes.ALTER:
print '|'.rjust(offset)
elif node.nodeType == nodeTypes.CONCAT:
print '.'.rjust(offset)
elif node.nodeType == nodeTypes.QUESTION:
print '?'.rjust(offset)
elif node.nodeType == nodeTypes.STAR:
print '*'.rjust(offset)
elif node.nodeType == nodeTypes.PLUS:
print '+'.rjust(offset)
else:
print ''.rjust(offset)
self.printNode(node.left, offset - 4)
self.printNode(node.right, offset + 4)
def getNfaFromParseTree(self, tree):
"""
Generates a nfa from regex which has been parsed to a tree
"""
if tree.nodeType == nodeTypes.CHARACTER:
return self.builder.getSingleInputNfa(tree.data)
elif tree.nodeType == nodeTypes.ALTER:
return self.builder.getAlterNfa(self.getNfaFromParseTree(tree.left),
self.getNfaFromParseTree(tree.right))
elif tree.nodeType == nodeTypes.CONCAT:
return self.builder.getConcatNfa(self.getNfaFromParseTree(tree.left),
self.getNfaFromParseTree(tree.right))
elif tree.nodeType == nodeTypes.STAR:
return self.builder.getStarNfa(self.getNfaFromParseTree(tree.left))
elif tree.nodeType == nodeTypes.PLUS:
return self.builder.getPlusNfa(self.getNfaFromParseTree(tree.left))
elif tree.nodeType == nodeTypes.QUESTION:
return self.builder.getAlterNfa(self.getNfaFromParseTree(tree.left),
self.builder.getSingleInputNfa(specialTransitions.EPSILON))
else:
return
def regexToNfa(self, regex):
"""
Constructs a NFA from Regex
Args:
* regex (string): NFA is constructed from this regex
Returns:
NFA created from the parse tree using :func:`getNfaFromParseTree`
"""
if not regex:
raise ParseError('Empty expression')
self.prepare(regex)
self.treeRoot = self.getExpressionNode()
return self.getNfaFromParseTree(self.treeRoot)
class RegexScanner():
def __init__(self, inputData):
self.data = inputData
self.next = 0
def peek(self):
"""
Peeks character from regex string
Returns:
char or 0 if no characters are available
"""
return self.data[self.next] if self.next < len(self.data) else 0
def pop(self):
"""
Pops character from regex string
Returns:
Popped character
"""
nextChar = self.peek()
if self.next < len(self.data):
self.next += 1
return nextChar
def current_position(self):
"""
Returns:
int
"""
return self.next
|
induktio/LamaTrainer
|
falib/RegexParser.py
|
Python
|
gpl-3.0
| 6,711
|
#!/usr/bin/python2.7
# cuon_server install
import os, sys, platform
import subprocess, shlex, shutil
import commands
import locale
import pwd, grp
from gi.repository import Gtk
import ConfigParser
class cssi():
def __init__(self, user=None):
self.user = user
self.win = None
self.grid = None
self.CalendarDir="/usr/local/iCal"
self.program_names = ["Postgres", "Subversion", "ssh"]
self.programs = []
self.programs_gentoo = ["/usr/bin/postmaster", "/usr/bin/svn", "/usr/bin/ssh-keygen"]
self.programs_ubuntu = ["/usr/bin/pg_ctlcluster", "/usr/bin/svn", "/usr/bin/ssh-keygen"]
self.programs_debian = ["/usr/bin/pg_ctlcluster", "/usr/bin/svn", "/usr/bin/ssh-keygen"]
self.program_installer_gentoo = [{"Postgres":["dev-db/postgresql-server", "app-admin/eselect-postgresql", "dev-db/postgresql-docs"]}, {"Subversion":["dev-vcs/subversion"]}, {"ssh":["virtual/ssh"]}]
self.program_installer_ubuntu = [{"Postgres":["postgresql-9.1", "postgresql-client-9.1"]},{"Subversion":["subversion"]}, {"ssh":["ssh"]}]
self.program_installer_debian = [{"Postgres":["postgresql-9.1", "postgresql-client-9.1"]},{"ssh":["ssh"]} ]
self.program_installer = []
self.programs_exist = []
self.python_modules = ["PIL", "reportlab", "twisted.web", "twisted.words", "pygments", "webkit", "pg"]
self.python_modules_exist = []
self.python_installer = []
self.python_installer_gentoo = [{"PIL":["dev-python/imaging"]}, {"reportlab":["dev-python/reportlab"]}, {"twisted.web":["dev-python/twisted-web"]}, {"twisted.words":[]}, {"pygments":[]}, {"webkit":["dev-python/pywebkitgtk"]},{"pg":[]} ]
self.python_installer_ubuntu = [{"PIL":["python-imaging", "python-imaging-sane"]}, {"reportlab":["python-reportlab"]} , {"twisted.web":["python-twisted-web" ]}, {"twisted.words":["python-twisted-words"]}, {"pygments":["python-pygments"]}, {"webkit":["python-webkit"]},{"pg":["python-pygresql"]} ]
self.python_installer_debian = []
self.OS_Installer = None
self.OS = None
self.Sudo = "" # or gksu
self.Terminals = ["gnome-terminal", "konsole", "xfce4-terminal", "terminal", "xterm"]
self.Terminal = None
self.cpServer = ConfigParser.ConfigParser()
self.CUON_FS = "/etc/cuon"
self.dia = MessageDialogWindow()
def checkOS(self):
print "start check OS"
if sys.platform.startswith('linux'):
# Linux-specific code here...
os_desc = os.uname()[1].upper()
os_name = os.uname()[2].upper()
os_machine = os.uname()[3].upper()
os_type = os.uname()[4].upper()
os_dist = platform.linux_distribution()[0].upper()
print os_dist, os_name, os_machine, os_type
if os_dist.find("GENTOO")>= 0:
if os.path.exists("/usr/bin/emerge"):
self.OS = "GENTOO"
self.program_installer = self.program_installer_gentoo
self.python_installer = self.python_installer_gentoo
self.programs = self.programs_gentoo
print 'Check1', self.programs , self.programs_gentoo
self.OS_Installer = "/usr/bin/emerge "
elif os_dist.find("UBUNTU")>= 0:
if os.path.exists("/usr/bin/apt-get"):
self.OS = "UBUNTU"
self.program_installer = self.program_installer_ubuntu
self.python_installer = self.python_installer_ubuntu
self.programs = self.programs_ubuntu
self.OS_Installer = "/usr/bin/apt-get install "
elif os_dist.find("DEBIAN")>= 0:
if os.path.exists("/usr/bin/apt-get"):
self.OS = "DEBIAN"
self.program_installer = self.program_installer_debian
self.python_installer = self.python_installer_debian
self.programs = self.programs_debian
self.OS_Installer = "/usr/bin/apt-get install "
print "OS = ", self.OS
for j in self.Terminals:
if os.path.exists("/usr/bin/" + j):
self.Terminal = "/usr/bin/" + j
print "Terminal = " + self.Terminal
break
def checkEnvironment(self):
self.programs_exist = []
self.python_modules_exist = []
print 'programs = ', self.programs
for program in self.programs:
print program
if os.path.exists(program):
self.programs_exist.append(True)
else:
self.programs_exist.append(False)
print 'Exist 8', self.programs, self.programs_exist
for python_module in self.python_modules:
try:
print python_module
if python_module == "webkit":
if os.path.exists("/usr/lib/python2.7/site-packages/webkit/webkit.so"):
self.python_modules_exist.append(True)
elif os.path.exists("/usr/lib/python2.7/dist-packages/webkit/webkit.so"):
self.python_modules_exist.append(True)
else:
self.python_modules_exist.append(False)
else:
if __import__(python_module):
self.python_modules_exist.append(True)
except ImportError:
self.python_modules_exist.append(False)
except:
self.python_modules_exist.append(False)
print 'Exist 9', self.python_modules, self.python_modules_exist
def on_button_clicked(self, widget):
print "Hello World"
self.dia.wrong_requirement()
def start(self, again=False):
print 'again', again
self.checkOS()
self.checkEnvironment()
if not again:
self.win = Gtk.Window()
self.win.connect("delete-event", Gtk.main_quit)
self.button = Gtk.Button(label="Next")
self.button.connect("clicked", self.on_check_missing)
if again:
self.win.remove(self.grid)
self.grid = Gtk.Table(10, 4, True)
z= 0
print self.programs_exist
for name in self.program_names:
print z, self.programs_exist[z]
self.grid.attach(Gtk.Label(name), 0, 1, z, z+1)
self.grid.attach(Gtk.Label(`self.programs_exist[z]`), 1, 2, z,z+1)
z += 1
z = 0
for pName in self.python_modules:
l1 = Gtk.Label(pName)
l1.set_justify(Gtk.Justification.LEFT)
self.grid.attach(l1, 3, 4, z,z+1, 0, 0.5, 0, 0.5)
self.grid.attach(Gtk.Label(`self.python_modules_exist[z]`), 4, 5,z, z+1, 0, 0.5, 0, 0.5)
z += 1
if not again:
self.grid.attach(self.button, 4, 5, 9 , 10)
self.win.add(self.grid)
self.win.show_all()
self.dia.warn(self.user)
if self.dia.Q2 == False:
sys.exit(0)
if not again:
Gtk.main()
def on_check_missing(self, widget, again=False):
if again:
self.start(again)
if not self.Terminal:
self.dia.AbortInfo1()
sys.exit(0)
if False in self.python_modules_exist or False in self.programs_exist:
if again:
self.dia.error1()
sys.exit(0)
self.dia.wrong_requirement()
print 'q1', self.dia.Q1
if self.dia.Q1:
self.try_install_missing_programs()
else:
# All is ok, next step ssh
self.configure_ssh()
def try_install_missing_programs(self):
s = ""
for i in range(len(self.programs_exist)):
if not self.programs_exist[i]:
print self.programs_exist
print self.program_installer
print self.program_names
print self.program_names[i]
for j in self.program_installer[i][self.program_names[i]]:
s += j + " "
if s:
s = self.Terminal + ' -e "' + self.Sudo + " " + self.OS_Installer +" " + s +'"'
print s
#shellcommand = shlex.split('"' + s + '"')
liStatus = subprocess.call(args = s, shell = True)
s = ""
for i in range(len(self.python_modules_exist)):
if not self.python_modules_exist[i]:
try:
print i, self.python_modules[i], self.python_installer[i]
print self.python_installer[i][self.python_modules[i]]
for j in self.python_installer[i][self.python_modules[i]]:
s += j + " "
except:
pass
if s:
s = self.Terminal + ' -e "' + self.Sudo + " " +self.OS_Installer +' ' + s +'"'
print "start Terminal with " + s
#shellcommand = shlex.split(s )
#print shellcommand
liStatus = subprocess.call(args=s, shell=True )
print liStatus
self.checkEnvironment()
self.on_check_missing(None, again=True)
def configure_ssh(self):
self.dia.sshInfo1()
print 'open ssh terminal 1'
# generate key
s = self.Terminal + ' -e "' + " /usr/bin/ssh-keygen -t rsa -f /root/cuon_server_key" + '"'
liStatus = subprocess.call(args=s, shell=True )
print 'ok, done', s
#copy to user .ssh as id_rsa
s = self.Terminal + ' -e "' + self.Sudo +" mkdir /home/" + self.user + "/.ssh ; mv /root/cuon_server_key /home/" + self.user + "/.ssh/id_rsa ; mv /root/cuon_server_key.pub /home/" + self.user + "/.ssh/id_rsa.pub ; chown " + self.user + ":" + self.user + " /home/" + self.user + "/.ssh/id_rsa* ; /mkdir /root/.ssh " + '"'
print s
liStatus = subprocess.call(args=s, shell=True )
# insert them to the authorized_keys
s = self.Terminal + ' -e "' + self.Sudo +" cat /home/" + self.user + "/.ssh/cuon_server_key.pub >> /root/.ssh/authorized_keys " + '"'
print s
liStatus = subprocess.call(args=s, shell=True )
# next Step postgres
self.configure_postgres()
def configure_postgres(self):
# insert line at pg_hba.conf
# at a line to the pg_hba.conf
# check different locations, very bad (
h = None
z=0
for j in ["/etc/postgresql/9.1/main/pg_hba.conf", "/etc/postgresql-9.1/pg_hba.conf", "/etc/postgresql-9.1/pg_hba.conf", "/etc/postgresql-9.0/pg_hba.conf"]:
if os.path.exists(j):
h= j
break
z+= 1
if h:
f = open(h, 'r')
s = f.readline()
newConf = ""
while s:
#print s
#print s[0:5]
if s[0:5]== "local":
print "local found", s, s.find("postgres")
if s.find("postgres") > 0:
print "postgres in local found"
s = s.replace("peer", "trust")
print "replaced = ", s
newConf += s
s = f.readline()
#print newConf
f.close()
f = open(h, 'w')
f.write(newConf)
f.close()
f = open(h, 'a')
f.write("# NEW generated Line for the cuon Database \nlocal cuon all trust\n")
f.close()
#s = self.Terminal + ' -e ' + self.Sudo + ' echo "# NEW generated Line for the cuon Database \nlocal cuon all trust \n" >> ' + h
#print s
#iStatus = subprocess.call(args=s, shell=True )
s = None
if self.OS in ["DEBIAN", "UBUNTU"]:
#s = self.Terminal + ' -e ' + "/etc/init.d/postgresql restart "
s = self.Terminal + ' -e ' + '"/etc/init.d/postgresql restart"'
if self.OS == "GENTOO":
if z == 2:
s = self.Terminal + ' -e ' + '"/etc/init.d/postgresql-9.1 restart" '
elif z == 3:
s = self.Terminal + ' -e ' + '"/etc/init.d/postgresql-9.0 restart " '
if s:
print s
iStatus = subprocess.call(args=s, shell=True )
else:
sys.exit(0)
#ok, create database and user
#set the path
pa = ""
for j in ["/usr/lib/postgresql-9.1/bin", "/usr/lib/postgresql-9.0/bin","/usr/lib/postgresql/9.0/bin", "/usr/lib/postgresql/9.1/bin" ]:
if os.path.exists(j):
pa = j + "/"
break
s = self.Terminal + ' -e ' + pa + '"createdb -Upostgres -E utf-8 cuon" '
print "create database = " , s
liStatus = subprocess.call(args=s, shell=True )
s = self.Terminal + ' -e ' + pa + '"createlang -Upostgres -d cuon plpgsql"'
liStatus = subprocess.call(args=s, shell=True )
s = self.Terminal + ' -e ' + pa + '"createuser -Upostgres -d -s cuon_admin"'
liStatus = subprocess.call(args=s, shell=True )
s = self.Terminal + ' -e ' + pa + '"createuser -Upostgres -D -S -R zope"'
liStatus = subprocess.call(args=s, shell=True )
self.configure_cuon()
def configure_cuon(self):
# version 83
setupDir = "/home/" + self.user+ "/Projekte/"
setupStartDir = setupDir + "cuon/cuon_client"
try:
sLocale = locale.getdefaultlocale()[0].split("_")[0]
except Exception, params:
print Exception, params
sLocale = "us"
if not os.path.exists(setupDir):
os.mkdir(setupDir)
os.chdir(setupDir)
s = self.Terminal + ' -e ' + '"svn co -r 83 https://cuon.svn.sourceforge.net/svnroot/cuon cuon "'
print s
liStatus = subprocess.call(args=s, shell=True )
print "get svn ", liStatus
# now write the setup.ini
os.chdir(setupStartDir)
f = open("cuon_setup.ini", "w")
s = "[local]\nxmlrpc_port = 7080\nprotocol = http\ndescription = Install on Localhost\nssh_port = 22\nip = 127.0.0.1\ndefault = True\nlocale = " + sLocale + "\ncuonadmin = cuon_admin"
f.write(s)
f.close()
os.chdir(setupDir +"cuon/LGPL")
s = self.Terminal + ' -e ' + '"tar -xvzf iCalendar-0.11.tgz ; cd iCalendar ; python setup.py install"'
print s
liStatus = subprocess.call(args=s, shell=True )
dirList=os.listdir(setupDir)
os.chown(setupDir, pwd.getpwnam(self.user).pw_gid ,grp.getgrnam( self.user).gr_gid )
for fname in dirList:
os.chown(fname, pwd.getpwnam(self.user).pw_gid ,grp.getgrnam( self.user).gr_gid )
# now write install config files
# Now write the config files
server_ini = self.CUON_FS + "/server.ini"
if not os.path.exists(server_ini):
shutil.copy(setupDir +"cuon/cuon_server/examples/server.ini", self.CUON_FS )
shutil.copy(setupDir +"cuon/cuon_server/examples/user_cfg", self.CUON_FS )
shutil.copy(setupDir +"cuon/cuon_server/examples/clients.ini", self.CUON_FS )
shutil.copy(setupDir +"cuon/cuon_server/examples/menus.ini", self.CUON_FS )
try:
self.cpServer, f = self.getParser(self.CUON_FS + "/server.ini")
# Instances
value = self.getConfigOption('INSTANCES','XMLRPC')
if value:
self.XMLRPC_INSTANCES = int(value)
except:
pass
class MessageDialogWindow(Gtk.Window):
def __init__(self):
self.Q1 = False
self.Q2 = False
def AbortInfo1(self):
dialog = Gtk.MessageDialog(self, 0, Gtk.MessageType.INFO,
Gtk.ButtonsType.OK, "A valid terminal is missing, we must abort, sorry.")
dialog.format_secondary_text(
"Install a Terminal Emulator like Gnome-Terminal, Konsole, terminal or similar")
dialog.run()
print "INFO dialog closed"
dialog.destroy()
def sshInfo1(self):
dialog = Gtk.MessageDialog(self, 0, Gtk.MessageType.INFO,
Gtk.ButtonsType.OK, "We install now a pgp key for root access to the cuon-server")
dialog.format_secondary_text(
"Please, press ONLY Enter in the terminal window !! No Passphrase is allowed !!")
dialog.run()
print "INFO dialog closed"
dialog.destroy()
def sshInfo2(self):
dialog = Gtk.MessageDialog(self, 0, Gtk.MessageType.INFO,
Gtk.ButtonsType.OK, "We install now a pgp key at the authorized_keys file")
dialog.format_secondary_text(
"Perhaps you must enter a password for su or sudo.")
dialog.run()
print "INFO dialog closed"
dialog.destroy()
def error1(self):
dialog = Gtk.MessageDialog(self, 0, Gtk.MessageType.ERROR,
Gtk.ButtonsType.CANCEL, "There are again missing program files")
dialog.format_secondary_text(
"Sorry, you shall try manually install the missing files")
dialog.run()
print "ERROR dialog closed"
dialog.destroy()
def warn(self, user ):
self.Q2 = False
dialog = Gtk.MessageDialog(self, 0, Gtk.MessageType.WARNING,
Gtk.ButtonsType.OK_CANCEL, "WARNING Warning WARNING")
dialog.format_secondary_text(
"This setup install a cuon-server on this computer. To do this, the user " + user + " get lasting root access\n Please, press cancel if you are not sure that you want this !!! PLEASE !!!!")
response = dialog.run()
if response == Gtk.ResponseType.OK:
print "WARN dialog closed by clicking OK button"
self.Q2 = True
elif response == Gtk.ResponseType.CANCEL:
print "WARN dialog closed by clicking CANCEL button"
dialog.destroy()
def wrong_requirement(self):
self.Q1 = False
dialog = Gtk.MessageDialog(self, 0, Gtk.MessageType.QUESTION,
Gtk.ButtonsType.YES_NO, "Some Programs or Python Module are missing!")
dialog.format_secondary_text(
"Shall I try to install them ?")
response = dialog.run()
if response == Gtk.ResponseType.YES:
print "QUESTION dialog closed by clicking YES button"
self.Q1 = True
elif response == Gtk.ResponseType.NO:
print "QUESTION dialog closed by clicking NO button"
dialog.destroy()
#
#if [ -d $CalendarDir ]; then
# echo "dir iCal ok"
# cd $CalendarDir/iCalendar
# sudo python ./setup.py install
#
# ## create database
# #sudo su postgres
# #createdb -E utf-8 cuon
# #createlang -d cuon plpgsql
# #echo "now creating the user "zope" with no Rights"
# #createuser zope
# #echo "and this is your cuonadmin user with superrights"
# #createuser cuonadmin
#
#else
# echo " No Calendar found, something wrong! We stop it."
#fi
#
def getConfigOption(self, section, option, configParser = None):
value = None
if configParser:
cps = configParser
else:
cps = self.cpServer
if cps.has_option(section,option):
try:
value = cps.get(section, option).strip()
#print 'options = ',option, value
except:
value = None
#print 'getConfigOption', section + ', ' + option + ' = ' + value
return value
def getParser(self, sFile):
cpParser = ConfigParser.ConfigParser()
f = open(sFile)
#print 'f1 = ', f
cpParser.readfp(f)
#print 'cpp', cpParser
return cpParser, f
print sys.argv
if len(sys.argv) > 1:
print sys.argv[1]
t1 = cssi(user=sys.argv[1])
t1.start()
|
CuonDeveloper/cuon
|
Distributionen/CuonServer/cuon-simple-server-install.py
|
Python
|
gpl-3.0
| 21,083
|
# -*- coding: utf-8 -*-
"""
# Plugins
Plugins allow flexible modification and execution of OpenNFT without touching the core codebase. Plugins can access data, process them in a specific way,
and they can be switched on and off according to the user's need.
Each plugin has to be a subclass of *Process class specified in pyniexp.mlplugins. It has to contain a header in a format of dictionary (called META) with prespecified keys:
- plugin_name: It is a freeform text which will be displayed in the plugin dialog and in the logs.
- plugin_time: It is a event timestamp as specified in opennft.eventrecorder. Times, and it determines the execution time of the plugin (so far only t3 is implemented)
- plugin_init: It is the initialization code of the plugin. "{}" can be used to refer to OpenNFT parameters as specified in the P parameter dictionary. It can be a list of
commands, in which case, the first is run to create the object, and the rest are executed afterwards.
- plugin_signal: It is an expression returning to logical value, and it speicies the condition when the plugin can be executed.
- plugin_exec: It is the execution code of the plugin, and it is usually calls the plugin's load_data method to transfer some data to the plugin.
*Process classes pyniexp.mlplugins has an abstract/placeholder method called process, which should be overwritten to specify the operation on the data.
- the input to the process method of dataProcess (called data) is a one-dimensional numpy array
- the input to the process method of imageProcess (called image) is a multi-dimensional (usually 3D) numpy array as specified during initialization
# ROI step-wise GLM
This plugin demonstrates how to add you own approach (this one is a step-wise addition of each block) for ROI analysis.
__________________________________________________________________________
Copyright (C) 2016-2021 OpenNFT.org
Written by Tibor Auer
""" # noqa: E501
from pyniexp.mlplugins import dataProcess
from loguru import logger
from multiprocessing import Value, RawArray
from numpy import array, meshgrid, savetxt
import matplotlib.pyplot as plt
from os import path
META = {
"plugin_name": "ROI step-wise GLM",
"plugin_time": "t4", # according to opennft.eventrecorder.Times
"plugin_init": [
"ROIswGLM(int({NrROIs}),len({ProtNF}),r'{nfbDataFolder}')",
"self.parent.eng.evalin('base','onp_roiswglm')"
],
"plugin_signal": "self.parent.eng.evalin('base','isfield(mainLoopData,\\\'tmp_rawTimeSeriesAR1\\\')')",
"plugin_exec": "load_data(self.parent.eng.evalin('base','onp_roiswglm'))",
}
class ROIswGLM(dataProcess):
def __init__(self, nROIs, nBlocks, nfbDataFolder):
super().__init__(nROIs*nBlocks, autostart=False)
self.nfbDataFolder = nfbDataFolder
self.nROIs = nROIs
self.nBlocks = nBlocks
self.rtdata = RawArray('d', [0]*self.nROIs*self.nBlocks*self.nBlocks)
self.nData = Value('i', 0)
self.start_process()
def process(self, data):
if any(array(data) != 0):
for r in data:
self.rtdata[self.nData.value] = r
self.nData.value += 1
logger.info(('ROIs: [ ' + '{:.3f} '*len(data) + ']').format(*data))
def finalize_process(self):
dat = array(self.rtdata).reshape(self.nBlocks, self.nROIs, self.nBlocks)
for b in range(0, self.nBlocks):
fname = path.join(path.normpath(self.nfbDataFolder), 'ROIswGLM_{:02d}.txt'.format(b+1))
savetxt(fname=fname, X=dat[b,:,0:b+1].transpose(), fmt='%.3f', delimiter=',')
X, Y = meshgrid(self.nBlocks, self.nBlocks)
for r in range(0, self.nROIs):
ax = plt.subplot(120+(r+1), projection='3d')
ax.plot_surface(X, Y, dat[:,r,:])
plt.show()
|
OpenNFT/OpenNFT
|
opennft/plugins/onp_roiswglm.py
|
Python
|
gpl-3.0
| 3,813
|
#!/usr/bin/env python
#
# Copyright 2014 Philipp Winter <phw@nymity.ch>
#
# This file is part of atlas tools.
#
# atlas tools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# atlas tools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with atlas tools. If not, see <http://www.gnu.org/licenses/>.
import urllib2
import logger
log = logger.get_logger()
def send_query(json_blurb, api_key):
"""
Send HTTP POST request containing the JSON-formatted Atlas query.
If successful, Atlas' API should return the JSON-formatted measurement ID.
"""
url = "https://atlas.ripe.net/api/v1/measurement/?key=" + api_key
log.debug("Sending %d bytes of JSON blurb to %s." % (len(json_blurb), url))
request = urllib2.Request(url, json_blurb)
request.add_header("Content-Type", "application/json")
request.add_header("Accept", "application/json")
try:
response = urllib2.urlopen(request)
except urllib2.URLError as err:
log.error("urllib2.urlopen failed: %s" % err)
return None
result = response.read()
log.debug("Received: %s" % result)
return result
|
NullHypothesis/atlas_tools
|
rest_api/query_issuer.py
|
Python
|
gpl-3.0
| 1,569
|
# -*- coding: utf-8; -*-
# Copyright (C) 2015 - 2019 Lionel Ott
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import copy
import logging
import threading
import time
from xml.etree import ElementTree
from PyQt5 import QtWidgets
import gremlin
import gremlin.ui.common
import gremlin.ui.input_item
class DoubleTapContainerWidget(gremlin.ui.input_item.AbstractContainerWidget):
"""DoubleTap container for actions for double or single taps."""
def __init__(self, profile_data, parent=None):
"""Creates a new instance.
:param profile_data the profile data represented by this widget
:param parent the parent of this widget
"""
super().__init__(profile_data, parent)
def _create_action_ui(self):
"""Creates the UI components."""
self.profile_data.create_or_delete_virtual_button()
self.options_layout = QtWidgets.QHBoxLayout()
# Activation delay
self.options_layout.addWidget(
QtWidgets.QLabel("<b>Double-tap delay: </b>")
)
self.delay_input = gremlin.ui.common.DynamicDoubleSpinBox()
self.delay_input.setRange(0.1, 2.0)
self.delay_input.setSingleStep(0.1)
self.delay_input.setValue(0.5)
self.delay_input.setValue(self.profile_data.delay)
self.delay_input.valueChanged.connect(self._delay_changed_cb)
self.options_layout.addWidget(self.delay_input)
self.options_layout.addStretch()
# Activation moment
self.options_layout.addWidget(QtWidgets.QLabel("<b>Single/Double Tap: </b>"))
self.activate_exclusive = QtWidgets.QRadioButton("exclusive")
self.activate_combined = QtWidgets.QRadioButton("combined")
if self.profile_data.activate_on == "combined":
self.activate_combined.setChecked(True)
else:
self.activate_exclusive.setChecked(True)
self.activate_combined.toggled.connect(self._activation_changed_cb)
self.activate_exclusive.toggled.connect(self._activation_changed_cb)
self.options_layout.addWidget(self.activate_exclusive)
self.options_layout.addWidget(self.activate_combined)
self.action_layout.addLayout(self.options_layout)
if self.profile_data.action_sets[0] is None:
self._add_action_selector(
lambda x: self._add_action(0, x),
"Single Tap"
)
else:
self._create_action_widget(
0,
"Single Tap",
self.action_layout,
gremlin.ui.common.ContainerViewTypes.Action
)
if self.profile_data.action_sets[1] is None:
self._add_action_selector(
lambda x: self._add_action(1, x),
"Double Tap"
)
else:
self._create_action_widget(
1,
"Double Tap",
self.action_layout,
gremlin.ui.common.ContainerViewTypes.Action
)
def _create_condition_ui(self):
if self.profile_data.activation_condition_type == "action":
if self.profile_data.action_sets[0] is not None:
self._create_action_widget(
0,
"Single Tap",
self.activation_condition_layout,
gremlin.ui.common.ContainerViewTypes.Condition
)
if self.profile_data.action_sets[1] is not None:
self._create_action_widget(
1,
"Double Tap",
self.activation_condition_layout,
gremlin.ui.common.ContainerViewTypes.Condition
)
def _add_action_selector(self, add_action_cb, label):
"""Adds an action selection UI widget.
:param add_action_cb function to call when an action is added
:param label the description of the action selector
"""
action_selector = gremlin.ui.common.ActionSelector(
self.profile_data.get_input_type()
)
action_selector.action_added.connect(add_action_cb)
group_layout = QtWidgets.QVBoxLayout()
group_layout.addWidget(action_selector)
group_layout.addStretch(1)
group_box = QtWidgets.QGroupBox(label)
group_box.setLayout(group_layout)
self.action_layout.addWidget(group_box)
def _create_action_widget(self, index, label, layout, view_type):
"""Creates a new action widget.
:param index the index at which to store the created action
:param label the name of the action to create
"""
widget = self._create_action_set_widget(
self.profile_data.action_sets[index],
label,
view_type
)
layout.addWidget(widget)
widget.redraw()
widget.model.data_changed.connect(self.container_modified.emit)
def _add_action(self, index, action_name):
"""Adds a new action to the container.
:param action_name the name of the action to add
"""
plugin_manager = gremlin.plugin_manager.ActionPlugins()
action_item = plugin_manager.get_class(action_name)(self.profile_data)
if self.profile_data.action_sets[index] is None:
self.profile_data.action_sets[index] = []
self.profile_data.action_sets[index].append(action_item)
self.profile_data.create_or_delete_virtual_button()
self.container_modified.emit()
def _delay_changed_cb(self, value):
"""Updates the activation delay value.
:param value the value after which the double-tap action activates
"""
self.profile_data.delay = value
def _activation_changed_cb(self, value):
"""Updates the activation condition state.
:param value whether or not the selection was toggled - ignored
"""
if self.activate_combined.isChecked():
self.profile_data.activate_on = "combined"
else:
self.profile_data.activate_on = "exclusive"
def _handle_interaction(self, widget, action):
"""Handles interaction icons being pressed on the individual actions.
:param widget the action widget on which an action was invoked
:param action the type of action being invoked
"""
index = self._get_widget_index(widget)
if index != -1:
if index == 0 and self.profile_data.action_sets[0] is None:
index = 1
self.profile_data.action_sets[index] = None
self.container_modified.emit()
def _get_window_title(self):
"""Returns the title to use for this container.
:return title to use for the container
"""
if self.profile_data.is_valid():
return "Double Tap: ({}) / ({})".format(
", ".join([a.name for a in self.profile_data.action_sets[0]]),
", ".join([a.name for a in self.profile_data.action_sets[1]])
)
else:
return "DoubleTap"
class DoubleTapContainerFunctor(gremlin.base_classes.AbstractFunctor):
"""Executes the contents of the associated DoubleTap container."""
def __init__(self, container):
super().__init__(container)
self.single_tap = gremlin.execution_graph.ActionSetExecutionGraph(
container.action_sets[0]
)
self.double_tap = gremlin.execution_graph.ActionSetExecutionGraph(
container.action_sets[1]
)
self.delay = container.delay
self.activate_on = container.activate_on
self.start_time = 0
self.double_action_timer = None
self.tap_type = None
self.value_press = None
self.event_press = None
def process_event(self, event, value):
# TODO: Currently this does not handle hat or axis events, however
# virtual buttons created on those inputs is supported
if not isinstance(value.current, bool):
logging.getLogger("system").warning(
"Invalid data type received in DoubleTap container: {}".format(
type(event.value)
)
)
return False
# Copy state when input is pressed
if value.current:
self.value_press = copy.deepcopy(value)
self.event_press = event.clone()
# Execute double tap logic
if value.current:
# Second activation within the delay, i.e. second tap
if (self.start_time + self.delay) > time.time():
# Prevent repeated double taps from repeated button presses
self.start_time = 0
self.tap_type = "double"
if self.activate_on == "exclusive":
self.double_action_timer.cancel()
# First acitvation within the delay, i.e. first tap
else:
self.start_time = time.time()
self.tap_type = "single"
if self.activate_on == "exclusive":
self.double_action_timer = \
threading.Timer(self.delay, self._single_tap)
self.double_action_timer.start()
# Input is being released at this point
elif self.double_action_timer and self.double_action_timer.is_alive():
# if releasing single tap before delay
# we will want to send a short press and release
self.double_action_timer.cancel()
self.double_action_timer = threading.Timer(
(self.start_time + self.delay) - time.time(),
lambda: self._single_tap(event, value)
)
self.double_action_timer.start()
if self.tap_type == "double":
self.double_tap.process_event(event, value)
if self.activate_on == "combined":
self.single_tap.process_event(event, value)
elif self.activate_on != "exclusive":
self.single_tap.process_event(event, value)
def _single_tap(self, event_release=None, value_release=None):
"""Callback executed, when the delay expires."""
self.single_tap.process_event(self.event_press, self.value_press)
if event_release:
time.sleep(0.05)
self.single_tap.process_event(event_release, value_release)
class DoubleTapContainer(gremlin.base_classes.AbstractContainer):
"""A container with two actions which are triggered based on the delay
between the taps.
A single tap will run the first action while a double tap will run the
second action.
"""
name = "Double Tap"
tag = "double_tap"
functor = DoubleTapContainerFunctor
widget = DoubleTapContainerWidget
input_types = [
gremlin.common.InputType.JoystickAxis,
gremlin.common.InputType.JoystickButton,
gremlin.common.InputType.JoystickHat,
gremlin.common.InputType.Keyboard
]
interaction_types = [
gremlin.ui.input_item.ActionSetView.Interactions.Edit,
]
def __init__(self, parent=None):
"""Creates a new instance.
:param parent the InputItem this container is linked to
"""
super().__init__(parent)
self.action_sets = [[], []]
self.delay = 0.5
self.activate_on = "exclusive"
def _parse_xml(self, node):
"""Populates the container with the XML node's contents.
:param node the XML node with which to populate the container
"""
super()._parse_xml(node)
self.delay = gremlin.profile.safe_read(node, "delay", float, 0.5)
self.activate_on = \
gremlin.profile.safe_read(node, "activate-on", str, "combined")
def _generate_xml(self):
"""Returns an XML node representing this container's data.
:return XML node representing the data of this container
"""
node = ElementTree.Element("container")
node.set("type", DoubleTapContainer.tag)
node.set("delay", str(self.delay))
node.set("activate-on", self.activate_on)
for actions in self.action_sets:
as_node = ElementTree.Element("action-set")
for action in actions:
as_node.append(action.to_xml())
node.append(as_node)
return node
def _is_container_valid(self):
"""Returns whether or not this container is configured properly.
:return True if the container is configured properly, False otherwise
"""
return any(len(action_set) for action_set in self.action_sets)
# Plugin definitions
version = 1
name = "double_tap"
create = DoubleTapContainer
|
WhiteMagic/JoystickGremlin
|
container_plugins/double_tap/__init__.py
|
Python
|
gpl-3.0
| 13,349
|
# -*- mode: python; tab-width: 4; indent-tabs-mode: nil -*-
from gi.repository import Gtk
import os
import shutil
import gettext
from cloudsn.core import config, provider, account, indicator, keyring
from cloudsn import logger
import cloudsn.core.utils as coreutils
STOP_RESPONSE = 1
class MainWindow:
__default = None
def __init__ (self):
if MainWindow.__default:
raise MainWindow.__default
self.builder = None
self.window = None
self.dialog_only = False
self.pref_dialog = None
self.config = config.SettingsController.get_instance()
self.pm = provider.ProviderManager.get_instance()
self.am = account.AccountManager.get_instance()
self.im = indicator.IndicatorManager.get_instance()
self.km = keyring.KeyringManager.get_instance()
self.am.connect ("account-deleted", self.account_deleted_cb)
@staticmethod
def get_instance():
if not MainWindow.__default:
MainWindow.__default = MainWindow()
return MainWindow.__default
def get_main_account_selected (self):
selection = self.main_account_tree.get_selection()
if selection:
model, paths = selection.get_selected_rows()
for path in paths:
citer = self.main_store.get_iter(path)
account_name = self.main_store.get_value(citer, 1)
acc = self.am.get_account(account_name)
return acc, citer
return None, None
def __get_account_date(self, acc):
last_update = ''
dt = acc.get_last_update()
if dt:
last_update = dt.strftime("%Y-%m-%d %H:%M:%S")
return last_update
def select_provider_combo (self, providers_combo, name):
#Select the provider and disable item
i=0
for row in providers_combo.get_model():
if row[1] == name:
providers_combo.set_active (i)
break
i += 1
def load_window(self):
from cloudsn.core.controller import Controller
self.builder=Gtk.Builder()
self.builder.set_translation_domain("cloudsn")
self.builder.add_from_file(config.add_data_prefix("preferences.ui"))
self.builder.connect_signals(self)
self.window=self.builder.get_object("main_window")
self.window.connect ("delete-event", self.window_delete_event_cb)
self.window.set_icon(config.get_cloudsn_icon())
self.main_account_tree = self.builder.get_object("main_account_tree");
self.main_store = self.builder.get_object("account_store");
self.providers_combo = self.builder.get_object("providers_combo");
self.providers_store = self.builder.get_object("providers_store");
self.play_button = self.builder.get_object("tool_play");
self.read_button = self.builder.get_object("main_read_button");
#Populate accounts
for acc in self.am.get_accounts():
self.main_store.append([acc.get_icon(), acc.get_name(),
self.__get_account_date(acc), acc.get_active(),
acc.get_total_unread()])
#Populate providers
for prov in self.pm.get_providers():
self.providers_store.append([prov.get_icon(), prov.get_name()])
#Update the last check date
Controller.get_instance().connect ("account-checked",
self.__on_account_checked_cb)
Controller.get_instance().connect ("account-check-error",
self.__on_account_check_error_cb)
self.set_play_active (Controller.get_instance().get_active())
def run(self):
self.load_window()
self.window.show()
def set_play_active(self, active):
self.play_button.set_active(active)
if active:
self.play_button.set_stock_id(Gtk.STOCK_MEDIA_PAUSE)
self.play_button.set_tooltip_text(
_("Press to pause the checker daemon"))
else:
self.play_button.set_stock_id(Gtk.STOCK_MEDIA_PLAY)
self.play_button.set_tooltip_text(
_("Press to start the checker daemon"))
def preferences_action_activate_cb (self, widget, data=None):
self.pref_dialog = self.builder.get_object("preferences_dialog")
self.pref_dialog.set_transient_for(self.window)
self.pref_dialog.set_destroy_with_parent (True)
indicator_combo = self.builder.get_object("indicator_combo")
indicators_store = self.builder.get_object("indicators_store");
keyring_combo = self.builder.get_object("keyring_combo")
keyring_store = self.builder.get_object("keyring_store");
minutes=self.builder.get_object("minutes_spin")
max_not_spin=self.builder.get_object("max_not_spin")
startup_check = self.builder.get_object("startup_check")
enable_sounds_check = self.builder.get_object("enable_sounds_check")
minutes.set_value (float(self.config.get_prefs()["minutes"]))
max_not_spin.set_value (float(self.config.get_prefs()["max_notifications"]))
if os.path.exists(config.get_startup_file_path()):
startup_check.set_active(True)
else:
startup_check.set_active(False)
enable_sounds_check.set_active(coreutils.get_boolean(self.config.get_prefs()["enable_sounds"]))
#Populate indicator combo
i=0
indicator_name = self.config.get_prefs()["indicator"]
indicators_store.clear()
for indi in self.im.get_indicators():
indicators_store.append([indi.get_name()])
if indi.get_name() == indicator_name:
indicator_combo.set_active(i)
i+=1
i=0
keyring_id = self.config.get_prefs()["keyring"]
keyring_store.clear()
for k in self.km.get_managers():
keyring_store.append([k.get_name(), k.get_id()])
if k.get_id() == keyring_id:
keyring_combo.set_active(i)
i+=1
response = self.pref_dialog.run()
self.pref_dialog.hide()
self.config.set_pref ("minutes", minutes.get_value())
self.config.set_pref ("max_notifications", max_not_spin.get_value())
self.config.set_pref ("enable_sounds", enable_sounds_check.get_active())
iiter = indicator_combo.get_active_iter()
if iiter:
self.config.set_pref ("indicator", indicators_store.get_value(iiter,0))
iiter = keyring_combo.get_active_iter()
selected = keyring_store.get_value(iiter,1)
for m in self.km.get_managers():
logger.debug("selected %s, current %s" % (selected, m.get_id()))
if m.get_id() == selected:
self.km.set_manager(m)
break
self.config.set_pref ("keyring", selected)
#Check startup checkbox
if startup_check.get_active():
if not os.path.exists(config.get_startup_file_path()):
if not os.path.exists(config.get_startup_file_dir()):
os.makedirs(config.get_startup_file_dir())
shutil.copyfile(config.add_data_prefix("cloudsn.desktop"),
config.get_startup_file_path())
else:
if os.path.exists(config.get_startup_file_path()):
os.remove (config.get_startup_file_path())
self.config.save_prefs()
def about_action_activate_cb (self, widget, data=None):
about.show_about_dialog()
def quit_action_activate_cb (self, widget, data=None):
Gtk.main_quit()
def close_action_activate_cb (self, widget, data=None):
if self.dialog_only:
Gtk.main_quit()
else:
self.window.hide()
def main_delete_button_clicked_cb(self, widget, data=None):
acc, citer = self.get_main_account_selected()
if not acc:
return
msg = (_('Are you sure you want to delete the account %s?')) % (acc.get_name());
dia = Gtk.MessageDialog(self.window,
Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.QUESTION,
Gtk.ButtonsType.YES_NO,
msg)
dia.show_all()
if dia.run() == Gtk.ResponseType.YES:
self.am.del_account(acc, True)
dia.hide()
def main_update_button_clicked_cb(self, widget, data=None):
from cloudsn.core.controller import Controller
acc, citer = self.get_main_account_selected()
if acc:
Controller.get_instance().update_account(acc)
def main_read_button_clicked_cb(self, widget, data=None):
acc, citer = self.get_main_account_selected()
if acc and acc.can_mark_read():
acc.mark_read()
self.__on_account_checked_cb(None, acc)
def main_account_tree_cursor_changed_cb(self, widget, data=None):
acc, citer = self.get_main_account_selected()
if acc and acc.can_mark_read():
self.read_button.set_sensitive(True)
else:
self.read_button.set_sensitive(False)
def tool_play_toggled_cb (self, widget, data=None):
from cloudsn.core.controller import Controller
self.set_play_active(widget.get_active())
Controller.get_instance().set_active(widget.get_active())
def account_deleted_cb(self, widget, acc):
selection = self.main_account_tree.get_selection()
if selection:
model, paths = selection.get_selected_rows()
for path in paths:
citer = self.main_store.get_iter(path)
self.main_store.remove(citer)
def window_delete_event_cb (self, widget, event, data=None):
if self.dialog_only:
Gtk.main_quit()
else:
self.window.hide()
def active_cell_toggled_cb(self, cell, path, data=None):
active = not self.main_store[path][3]
self.main_store[path][3] = active
account_name = self.main_store[path][1]
acc = self.am.get_account(account_name)
self.am.set_account_active(acc, active)
def new_action_activate_cb(self, widget, data=None):
self.new_dialog = self.builder.get_object("account_new_dialog")
account_name_entry = self.builder.get_object("account_name_entry");
self.provider_content = self.builder.get_object("provider_content")
self.activate_command_entry = self.builder.get_object("activate_command_entry")
self.provider_content.account = None
self.new_dialog.set_transient_for(self.window)
self.new_dialog.set_destroy_with_parent (True)
account_name_entry.set_text("")
account_name_entry.set_sensitive (True)
self.providers_combo.set_sensitive (True)
self.providers_combo.set_active(-1)
for c in self.provider_content.get_children():
if c:
self.provider_content.remove(c)
c.destroy()
end = False
while not end:
response = self.new_dialog.run()
if response == 0:
try:
if len(self.provider_content.get_children())==0:
raise Exception(_("You must select a provider and fill the data"))
acc_name = account_name_entry.get_text()
if acc_name == '':
raise Exception(_("You must fill the account name"))
custom_widget = self.provider_content.get_children()[0]
citer = self.providers_combo.get_active_iter()
provider_name = self.providers_store.get_value (citer, 1)
provider = self.pm.get_provider(provider_name)
acc = provider.set_account_data_from_widget(acc_name, custom_widget)
acc.set_activate_command (self.activate_command_entry.get_text())
self.am.add_account(acc)
self.am.save_account(acc)
self.main_store.append([acc.get_icon(),
acc.get_name(),self.__get_account_date(acc),
acc.get_active(), acc.get_total_unread()])
end = True
except Exception, e:
logger.error ('Error adding a new account: %s', e)
md = Gtk.MessageDialog(self.window,
Gtk.DialogFlags.DESTROY_WITH_PARENT, Gtk.MessageType.ERROR,
Gtk.ButtonsType.CLOSE,
_('Error adding a new account: ') + str(e))
md.run()
md.destroy()
else:
end = True
self.new_dialog.hide()
def edit_action_activate_cb(self, widget, data=None):
acc, citer = self.get_main_account_selected()
if not acc:
return
self.new_dialog = self.builder.get_object("account_new_dialog")
account_name_entry = self.builder.get_object("account_name_entry");
account_name_entry.set_text(acc.get_name())
#TODO the name cannot be modified by the moment
account_name_entry.set_sensitive (False)
self.provider_content = self.builder.get_object("provider_content")
self.activate_command_entry = self.builder.get_object("activate_command_entry")
self.provider_content.account = acc
self.new_dialog.set_transient_for(self.window)
self.new_dialog.set_destroy_with_parent (True)
#Select the provider and disable item
providers_combo = self.builder.get_object("providers_combo")
providers_combo.set_active(-1)
self.select_provider_combo (providers_combo, acc.get_provider().get_name())
providers_combo.set_sensitive (False)
end = False
while not end:
response = self.new_dialog.run()
if response == 0:
try:
acc_name = account_name_entry.get_text()
if acc_name == '':
raise Exception(_("You must fill the account name"))
custom_widget = self.provider_content.get_children()[0]
acc = acc.get_provider().set_account_data_from_widget(acc_name, custom_widget, acc)
acc.set_activate_command (self.activate_command_entry.get_text())
self.am.save_account(acc)
end = True
except Exception, e:
logger.exception ('Error editing the account: %s', e)
md = Gtk.MessageDialog(self.window,
Gtk.DialogFlags.DESTROY_WITH_PARENT, Gtk.MessageType.ERROR,
Gtk.ButtonsType.CLOSE,
_('Error editing the account: ') + str(e))
md.run()
md.destroy()
else:
end = True
self.new_dialog.hide()
def update_all_action_activate_cb (self, widget, data=None):
from cloudsn.core.controller import Controller
Controller.get_instance().update_accounts()
def providers_combo_changed_cb(self, widget, data=None):
ch = self.provider_content.get_children()
for c in ch:
self.provider_content.remove(c)
c.destroy()
citer = self.providers_combo.get_active_iter()
if not citer:
return
provider_name = self.providers_store.get_value (citer, 1)
provider = self.pm.get_provider(provider_name)
if provider.get_import_error():
md = Gtk.MessageDialog(self.window,
Gtk.DialogFlags.DESTROY_WITH_PARENT, Gtk.MessageType.ERROR,
Gtk.ButtonsType.CLOSE,
_('Error loading the provider: ') + str(provider.get_import_error()))
md.run()
md.destroy()
return
box = provider.get_account_data_widget(self.provider_content.account)
self.provider_content.add(box)
if self.provider_content.account:
self.activate_command_entry.set_text(self.provider_content.account.get_activate_command())
box.show_all()
def __on_account_checked_cb(self, widget, acc):
for row in self.main_store:
if row[1] == acc.get_name():
row[0] = acc.get_icon()
row[2] = self.__get_account_date(acc)
row[4] = acc.get_total_unread()
def __on_account_check_error_cb(self, widget, acc):
for row in self.main_store:
if row[1] == acc.get_name():
row[0] = acc.get_icon()
row[2] = self.__get_account_date(acc)
row[4] = acc.get_total_unread()
def main ():
import cloudsn.cloudsn
import cloudsn.core.controller
cloudsn.cloudsn.setup_locale_and_gettext()
#account.AccountManager.get_instance().load_accounts()
cloudsn.core.controller.Controller.get_instance()
win = MainWindow.get_instance()
win.dialog_only = True
win.run()
Gtk.main()
if __name__ == "__main__":
main()
|
chuchiperriman/cloud-services-notifications
|
src/cloudsn/ui/window.py
|
Python
|
gpl-3.0
| 17,147
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This progam (as part of SHEAR) searches adapter sequences
and generates an adapter file for use with SHEAR/Scythe.
"""
import sys
import os
import argparse
import re
import gzip
_LICENSE = """
SHEAR: Simple Handler for Error and Adapter Removal
James B. Pease
http://www.github.com/jbpease/shear
This file is part of SHEAR.
SHEAR is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
SHEAR is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with SHEAR. If not, see <http://www.gnu.org/licenses/>.
"""
ADAPTERS = [
# GATCGGAAGAGCACACGTCTGAACTCCAGTCAC[6 bases] ATCTCGTATGCCGTCTTCTGCTTG
# GATCGGAAGAGCACACGTCTGAACTCCAGTCAC[6 bases]CAATCTCGTATGCCGTCTTCTGCTTG
# GATCGGAAGAGCACACGTCTGAACTCCAGTCAC[6 bases]GTATCTCGTATGCCGTCTTCTGCTTG
# GATCGGAAGAGCACACGTCTGAACTCCAGTCAC[6 bases]GAATCTCGTATGCCGTCTTCTGCTTG
# GATCGGAAGAGCACACGTCTGAACTCCAGTCAC[6 bases]CGATCTCGTATGCCGTCTTCTGCTTG
# GATCGGAAGAGCACACGTCTGAACTCCAGTCAC[6 bases]ACATCTCGTATGCCGTCTTCTGCTTG
# GATCGGAAGAGCACACGTCTGAACTCCAGTCAC[6 bases]TTATCTCGTATGCCGTCTTCTGCTTG
# GATCGGAAGAGCACACGTCTGAACTCCAGTCAC[6 bases]TAATCTCGTATGCCGTCTTCTGCTTG
# GATCGGAAGAGCACACGTCTGAACTCCAGTCAC[6 bases]ATATCTCGTATGCCGTCTTCTGCTTG
# GATCGGAAGAGCACACGTCTGAACTCCAGTCAC[7 bases] ATCTCGTATGCCGTCTTCTGCTTG
# GATCGGAAGAGCACACGTCTGAACTCCAGTCAC[6 bases] ATCTCGTATGCCGTCTTCTGCTTG
"A?GATCGGAAGAGCACACGTCTGAACTCCAGTCAC([ATGC]{6,8})"
"ATCTCGTATGCCGTCTTCTGCTTG",
"AATGATACGGCGACCACCGAGATCTACAC([ATGC]{6,8})"
"ACACTCTTTCCCTACACGACGCTCTTCCGATCT",
# AATGATACGGCGACCACCGAGATCTACAC[5 bases]ACACTCTTTCCCTACACGACGCTCTTCCGATCT
# AATGATACGGCGACCACCGAGATCTACAC[5 bases]TCGTCGGCAGCGTC
# AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT
# AATGATACGGCGACCACCGAGATCTACACGTTCAGAGTTCTACAGTCCGA
# AATGATACGGCGACCACCGACAGGTTCAGAGTTCTACAGTCCGA
"AATGATACGGCGACCACCGAGATCTACAC([ACGT]{5,7})"
"ACACTCTTTCCCTACACGACGCTCTTCCGATCT",
"AATGATACGGCGACCACCGAGATCTACAC([ACGT]{5,7})TCGTCGGCAGCGTC",
"AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT",
"AATGATACGGCGACCACCGAGATCTACACGTTCAGAGTTCTACAGTCCGA",
"AATGATACGGCGACCACCGACAGGTTCAGAGTTCTACAGTCCGA",
# CAAGCAGAAGACGGCATACGAGAT[6 bases]GTGACTGGAGTTCAGACGTGTGCTCTTCCGATCT
# CAAGCAGAAGACGGCATACGAGAT[6 bases]GTGACTGGAGTTCCTTGGCACCCGAGAATTCCA
# CAAGCAGAAGACGGCATACGAGATCGGTCTCGGCATTCCTGCTGAACCGCTCTTCCGATCT
# CAAGCAGAAGACGGCATACGAGAT[6 bases]GTGACTGGAGTTC
# CAAGCAGAAGACGGCATACGAGAT[7 bases]GTCTCGTGGGCTCGG
# CAAGCAGAAGACGGCATACGAGCTCTTCCGATCT
"CAAGCAGAAGACGGCATACGAGAT([ATGC]{5,7})GTGACTGGAGTTCAGACGTGTGCTCTTCCGATCT",
"CAAGCAGAAGACGGCATACGAGAT([ATGC]{5,7})GTGACTGGAGTTCCTTGGCACCCGAGAATTCCA",
"CAAGCAGAAGACGGCATACGAGATCGGTCTCGGCATTCCTGCTGAACCGCTCTTCCGATCT",
"CAAGCAGAAGACGGCATACGAGAT([ATGC]{5,7})GTGACTGGAGTTC",
"CAAGCAGAAGACGGCATACGAGAT([ATGC]{5,7})GTCTCGTGGGCTCGG",
"CGGTTCTTCCCTGCCGAACCCTATCTTCGTCGGCAGCGTCAGATGTGTATAAGAGACAGTACGCTTGCAT",
"TTTTTAATGATACGGCGACCACCGAGATCTACACACACTCTTTCCCTACACGACGCTCTTCCGATCT",
"ATGATACGGCGACCACCGAGATCTACACGTTCAGAGTTCTACAGTCCGACG",
# ACAGGTTCAGAGTTCTACAGTCCGAC
# ACAGGTTCAGAGTTCTACAGTCCGACATG
# CCGACAGGTTCAGAGTTCTACAGTCCGACATG
# CGACAGGTTCAGAGTTCTACAGTCCGACGATC
# GTTCAGAGTTCTACAGTCCGACGATC
"C?C?G?A?C?A?G?GTTCAGAGTTCTACAGTCCGACA?T?G?A?T?C?",
#
# GATCGGAAGAGCACACGTCTGAACTCCAGTCAC
# AGATCGGAAGAGCACACGTCT
# GATCGGAAGAGCACACGTCTGAACTCCAGTCAC
# AGATCGGAAGAGCACACGTCT
# GATCGGAAGAGCACACGTCT
"A?GATCGGAAGAGCACACGTCTGAACTCCAGTCAC",
"A?GATCGGAAGAGCACACGTCT",
"AGATCGGAAGAGCGTCGGTGTAGGGAAAG",
"AGATCGGAAGAGCGGTTCAGCAGGAATGCCGAG",
"AGATCGGAAGAGCGTCGTGTAGGGAAAGAGTGTA",
"ATCTCGTATGCCGTCTTCTGCTTG",
"CAAGCAGAAGACGGCATACGA",
"CAAGCAGAAGACGGCATACGAGCTCTTCCGATCT",
"ACACTCTTTCCCTACACGACGCTCTTCCGATCT",
"CGGTCTCGGCATTCCTGCTGAACCGCTCTTCCGATCT",
"CTGTCTCTTATACACATCTCCGAGCCCACGAGAC",
"CTGTCTCTTATACACATCTGACGCTGCCGACGA",
"GAAUUCCACCACGUUCCCGUGG",
"GATCGGAAGAGCGGTTCAGCAGGAATGCCGAG",
"GATCGGAAGAGCTCGTATGCCGTCTTCTGCTTG",
"GATCGTCGGACTGTAGAACTCTGAAC",
"GCCTTGGCACCCGAGAATTCCA",
"GTCTCGTGGGCTCGGAGATGTGTATAAGAGACAG",
"GTGACTGGAGTTCAGACGTGTGCTCTTCCGATCT",
"GUUCAGAGUUCUACAGUCCGACGAUC",
"TCGGACTGTAGAACTCTGAAC",
"TCGTATGCCGTCTTCTGCTTG",
"TCGTCGGCAGCGTCAGATGTGTATAAGAGACAG",
"TGGAATTCTCGGGTGCCAAGG",
"AGATCGGAAGAG",
"CTGTCTCTTATA",
"CGCCTTGGCCGT"
"ATCGTCGGACT",
"GGAATTCTCGG",
]
def gopen(path, mode):
"""Automatically invokes gzip file opening when path ends with .gz"""
return path.endswith('.gz') and gzip.open(path, mode) or open(path, mode)
def revcom(adapter):
revstr = []
i = 0
while i < len(adapter):
if adapter[i] in 'ATGCU':
if adapter[i+1:i+2] in '?+*':
revstr.append(adapter[i:i+2])
i += 2
else:
revstr.append(adapter[i])
i += 1
elif adapter[i] == '(':
j = adapter.find(")", i)
revstr.append(adapter[i:j+1])
i = j + 2
revstr = [complement(x) if ('(' not in x and "?" not in x)
else x for x in revstr[::-1]]
revcom = ''.join(revstr)
return revcom
def complement(seq):
"""Make complement of DNA/RNA sequence"""
return seq[::-1].lower().replace("a", "U" if "u" in seq else "T").replace(
"c", "G").replace("g", "C").replace("t", "A").replace(
"u", "A")
class AdaptFind(object):
def __init__(self):
self.known_adapters = []
for adapter in ADAPTERS:
self.known_adapters.append(re.compile(adapter))
self.known_adapters.append(re.compile(revcom(adapter)))
self.found = {}
self.barcodes = set([])
# print([x.pattern for x in self.known_adapters])
def find_known_adapter(self, read):
for pattern in self.known_adapters:
match = re.match(pattern, read)
if match is not None:
self.found[match.group(0)] = self.found.get(
match.group(0), 0) + 1
if len(match.groups()) > 0:
if match.groups()[0] is not None:
if len(match.groups()[0]) > 0:
self.barcodes.update([match.groups()[0]])
return ''
class Adaptamers(object):
def __init__(self):
self.endmers = {}
def add_read_end(self, read, k=16):
endmer = read[len(read) - k:]
if "N" in endmer:
return ''
self.endmers[endmer] = self.endmers.get(endmer, 0) + 1
return ''
def generate_argparser():
parser = argparse.ArgumentParser(
prog="adapt.py",
description=__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
epilog=_LICENSE)
parser.add_argument("--fq1", required=True, nargs='*',
type=os.path.abspath,
help=("one or more fastq file paths, "
"separated by spaces"))
parser.add_argument("--fq2", nargs='*', type=os.path.abspath,
help=("one or more fastq file paths separated "
"by spaces, only use this for "
"paired-end fastq files and enter "
"these files in the same order "
"as their counterparts in --fq1"))
parser.add_argument("-m", "--mode", choices=("known", "endmer", "both"),
default="known",
help=("known=only use list of known adapters;"
"endmer=search for common 3'end sequences;"
"both=both known and endmers"))
parser.add_argument("-o", "--out", type=os.path.abspath, required=True,
help=("output FASTA of adapters detected"))
parser.add_argument("-N", "--number-of-reads", type=int, default=200000,
help="Number of reads to search in each fastq")
parser.add_argument("-k", "--end-klength", type=int, default=16,
help=("Length of end kmer to tabulate for possible "
"adapter matches."))
parser.add_argument("-E", "--end-min-match", type=float, default=0.0001,
help=("Minimum proportion of read match required to "
"report the endmer as a possible match."))
parser.add_argument("-M", "--known-min-match", type=float, default=-1,
help=("Minimum proportion of read match required to "
"report the endmer as a possible match."
"Set to -1 (default) to accept all matches"))
parser.add_argument("--quiet", action="store_true",
help="Suppress progress messages")
parser.add_argument("--version", action="version", version="2017-06-21",
help="Display software version")
return parser
def main(arguments=None):
"""Main method"""
arguments = sys.argv[1:] if arguments is None else arguments
# time0 = time()
parser = generate_argparser()
args = parser.parse_args(args=arguments)
args.end_min_match = int(args.end_min_match * args.number_of_reads)
args.known_min_match = int(args.known_min_match * args.number_of_reads)
# ===== BEGIN ITERATION =====
ndex = 0
paired_end = False
knownfinder1 = AdaptFind()
endmerfinder1 = Adaptamers()
knownfinder2 = AdaptFind()
endmerfinder2 = Adaptamers()
if args.fq2 is not None:
paired_end = True
input_fq = zip(args.fq1, args.fq2)
else:
input_fq = zip(args.fq1, ['']*len(args.fq1))
for (fq1, fq2) in input_fq:
infq1 = gopen(fq1, 'rt')
if paired_end is True:
infq2 = gopen(fq2, 'rt')
while 1:
# ===== Read1 Filtering =====
infq1.readline()
line1_seq = infq1.readline().rstrip()
infq1.readline()
infq1.readline()
if not line1_seq:
break
if args.mode in ("known", "both"):
knownfinder1.find_known_adapter(line1_seq)
if args.mode in ("endmer", "both"):
endmerfinder1.add_read_end(
line1_seq, k=args.end_klength)
# ===== Read2 Filtering =====
if paired_end is True:
infq2.readline()
line2_seq = infq2.readline()
infq2.readline()
infq2.readline()
if not line2_seq:
break
if args.mode in ("known", "both"):
knownfinder2.find_known_adapter(line2_seq)
if args.mode in ("endmer", "both"):
endmerfinder2.add_read_end(
line2_seq, k=args.end_klength)
ndex += 1
if ndex % 10000 == 0:
print(ndex, 'reads read')
if ndex == args.number_of_reads:
break
infq1.close()
if paired_end:
infq2.close()
adapter_entries = []
for i, kadapt, ematch in (
(1, knownfinder1, endmerfinder1),
(2, knownfinder2, endmerfinder2)):
if len(kadapt.found) > 0:
print("=== Known adapters found in FQ{} ===".format(i))
for entry, val in kadapt.found.items():
print("{} found {} times.".format(
entry, val))
if args.known_min_match == -1 or val >= args.known_min_match:
print("Min matches met, added to output.")
adapter_entries.append(entry)
for bcode in kadapt.barcodes:
print("Possible barcode found: {}".format(bcode))
if len(ematch.endmers) > 0:
print("=== Possible adapters inferred from FQ{} ===".format(i))
for entry, val in ematch.endmers.items():
print("{} found {} times.".format(entry, val))
if val >= args.end_min_match:
print("Min matches met, added to output.")
adapter_entries.append(entry)
final_entries = []
for entry in adapter_entries:
if not any(entry in x for x in adapter_entries if x != entry):
final_entries.append(entry)
with open(args.out, 'w') as outfile:
for i, entry in enumerate(final_entries):
outfile.write(">ADAPTER{}\n{}\n".format(i + 1, entry))
outfile.write(">ADAPTER{}R\n{}\n".format(i + 1, complement(entry)))
return ''
if __name__ == "__main__":
main()
|
jbpease/shear
|
adapt.py
|
Python
|
gpl-3.0
| 13,208
|
""" Set-up script to install PyFAST locally
"""
from setuptools import setup
setup(name='pyfast',
version='0.1',
description='Tools for working with wind turbine simulator FAST',
url='https://github.com/jennirinker/PyFAST.git',
author='Jenni Rinker',
author_email='jennifer.m.rinker@gmail.com',
license='GPL',
packages=['pyfast'],
zip_safe=False)
|
jennirinker/PyFAST
|
setup.py
|
Python
|
gpl-3.0
| 395
|
"""
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
The development of this software was sponsored by NAG Ltd. (http://www.nag.co.uk)
and the EPSRC Centre For Doctoral Training in Industrially Focused Mathematical
Modelling (EP/L015803/1) at the University of Oxford. Please contact NAG for
alternative licensing.
"""
# Ensure compatibility with Python 2
from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
import unittest
from pybobyqa.hessian import Hessian
def array_compare(x, y, thresh=1e-14):
return np.max(np.abs(x - y)) < thresh
class TestBasicInit(unittest.TestCase):
def runTest(self):
n = 4
nvals = n*(n+1)//2
hess = Hessian(n)
self.assertEqual(hess.shape(), (nvals,), 'Wrong shape for initialisation')
self.assertEqual(hess.dim(), n, 'Wrong dimension')
self.assertEqual(len(hess), nvals, 'Wrong length')
self.assertTrue(np.all(hess.upper_triangular() == np.zeros((nvals,))), 'Wrong initialised values')
class TestInitFromVector(unittest.TestCase):
def runTest(self):
n = 5
nvals = n*(n+1)//2
x = np.arange(nvals, dtype=float)
hess = Hessian(n, vals=x)
self.assertEqual(hess.shape(), (nvals,), 'Wrong shape for initialisation')
self.assertEqual(hess.dim(), n, 'Wrong dimension')
self.assertEqual(len(hess), nvals, 'Wrong length')
self.assertTrue(np.all(hess.upper_triangular() == x), 'Wrong initialised values')
class TestInitFromMatrix(unittest.TestCase):
def runTest(self):
n = 3
nvals = n*(n+1)//2
A = np.arange(n**2, dtype=float).reshape((n,n))
hess = Hessian(n, vals=A+A.T) # force symmetric
self.assertEqual(hess.shape(), (nvals,), 'Wrong shape for initialisation')
self.assertEqual(hess.dim(), n, 'Wrong dimension')
self.assertEqual(len(hess), nvals, 'Wrong length')
self.assertTrue(np.all(hess.upper_triangular() == np.array([0.0, 4.0, 8.0, 8.0, 12.0, 16.0])),
'Wrong initialised values')
class TestToFull(unittest.TestCase):
def runTest(self):
n = 7
A = np.arange(n ** 2, dtype=float).reshape((n, n))
H = A + A.T # force symmetric
hess = Hessian(n, vals=H)
self.assertTrue(np.all(hess.as_full() == H), 'Wrong values')
class TestGetElementGood(unittest.TestCase):
def runTest(self):
n = 3
A = np.arange(n ** 2, dtype=float).reshape((n, n))
H = A + A.T # force symmetric
hess = Hessian(n, vals=H)
for i in range(n):
for j in range(n):
self.assertEqual(hess.get_element(i, j), H[i,j], 'Wrong value for (i,j)=(%g,%g): got %g, expecting %g'
% (i, j, hess.get_element(i, j), H[i,j]))
class TestGetElementBad(unittest.TestCase):
def runTest(self):
n = 4
A = np.arange(n ** 2, dtype=float).reshape((n, n))
H = A + A.T # force symmetric
hess = Hessian(n, vals=H)
# When testing for assertion errors, need lambda to stop assertion from actually happening
self.assertRaises(AssertionError, lambda: hess.get_element(-1, 0))
self.assertRaises(AssertionError, lambda: hess.get_element(-1, 0))
self.assertRaises(AssertionError, lambda: hess.get_element(-3, n-1))
self.assertRaises(AssertionError, lambda: hess.get_element(n, 0))
self.assertRaises(AssertionError, lambda: hess.get_element(n+3, 0))
self.assertRaises(AssertionError, lambda: hess.get_element(n+7, n-1))
self.assertRaises(AssertionError, lambda: hess.get_element(0, -1))
self.assertRaises(AssertionError, lambda: hess.get_element(0, -1))
self.assertRaises(AssertionError, lambda: hess.get_element(n-1, -3))
self.assertRaises(AssertionError, lambda: hess.get_element(0, n))
self.assertRaises(AssertionError, lambda: hess.get_element(0, n+3))
self.assertRaises(AssertionError, lambda: hess.get_element(n-1, n+7))
class TestSetElementGood(unittest.TestCase):
def runTest(self):
n = 3
A = np.arange(n ** 2, dtype=float).reshape((n, n))
H = A + A.T # force symmetric
hess = Hessian(n, vals=H)
H2 = np.sin(H)
for i in range(n):
for j in range(n):
hess.set_element(i, j, H2[i,j])
for i in range(n):
for j in range(n):
self.assertEqual(hess.get_element(i, j), H2[i, j], 'Wrong value for (i,j)=(%g,%g): got %g, expecting %g'
% (i, j, hess.get_element(i, j), H2[i, j]))
class TestSetElementBad(unittest.TestCase):
def runTest(self):
n = 5
A = np.arange(n ** 2, dtype=float).reshape((n, n))
H = A + A.T # force symmetric
hess = Hessian(n, vals=H)
# When testing for assertion errors, need lambda to stop assertion from actually happening
self.assertRaises(AssertionError, lambda: hess.set_element(-1, 0, 1.0))
self.assertRaises(AssertionError, lambda: hess.set_element(-1, 0, 2.0))
self.assertRaises(AssertionError, lambda: hess.set_element(-3, n - 1, 3.0))
self.assertRaises(AssertionError, lambda: hess.set_element(n, 0, 4.0))
self.assertRaises(AssertionError, lambda: hess.set_element(n + 3, 0, -4.0))
self.assertRaises(AssertionError, lambda: hess.set_element(n + 7, n - 1, 5.0))
self.assertRaises(AssertionError, lambda: hess.set_element(0, -1, 6.0))
self.assertRaises(AssertionError, lambda: hess.set_element(0, -1, 7.0))
self.assertRaises(AssertionError, lambda: hess.set_element(n - 1, -3, -7.0))
self.assertRaises(AssertionError, lambda: hess.set_element(0, n, -76.3))
self.assertRaises(AssertionError, lambda: hess.set_element(0, n + 3, 2.8))
self.assertRaises(AssertionError, lambda: hess.set_element(n - 1, n + 7, -1.0))
class TestMultGood(unittest.TestCase):
def runTest(self):
n = 5
A = np.arange(n ** 2, dtype=float).reshape((n, n))
H = np.sin(A + A.T) # force symmetric
hess = Hessian(n, vals=H)
vec = np.exp(np.arange(n, dtype=float))
hs = np.dot(H, vec)
self.assertTrue(array_compare(hess*vec, hs, thresh=1e-12), 'Wrong values')
class TestMultBad(unittest.TestCase):
def runTest(self):
n = 5
A = np.arange(n ** 2, dtype=float).reshape((n, n))
H = A + A.T # force symmetric
hess = Hessian(n, vals=H)
# When testing for assertion errors, need lambda to stop assertion from actually happening
self.assertRaises(AssertionError, lambda: hess * 1.0)
self.assertRaises(AssertionError, lambda: hess * None)
self.assertRaises(AssertionError, lambda: hess * [float(i) for i in range(n)])
self.assertRaises(AssertionError, lambda: hess * np.arange(n-1, dtype=float))
self.assertRaises(AssertionError, lambda: hess * np.arange(n+1, dtype=float))
class TestNeg(unittest.TestCase):
def runTest(self):
n = 5
A = np.arange(n ** 2, dtype=float).reshape((n, n))
H = A + A.T # force symmetric
hess = Hessian(n, vals=H)
neghess = -hess
self.assertTrue(np.allclose(hess.upper_triangular(), -neghess.upper_triangular()), 'Wrong negative values')
|
numericalalgorithmsgroup/pybobyqa
|
pybobyqa/tests/test_hessian.py
|
Python
|
gpl-3.0
| 7,948
|
# coding=utf-8
import unittest
"""993. Cousins in Binary Tree
https://leetcode.com/problems/cousins-in-binary-tree/description/
In a binary tree, the root node is at depth `0`, and children of each depth
`k` node are at depth `k+1`.
Two nodes of a binary tree are _cousins_ if they have the same depth, but have
**different parents**.
We are given the `root` of a binary tree with unique values, and the values
`x` and `y` of two different nodes in the tree.
Return `true` if and only if the nodes corresponding to the values `x` and `y`
are cousins.
**Example 1:
**
**Input:** root = [1,2,3,4], x = 4, y = 3
**Output:** false
**Example 2:
**
**Input:** root = [1,2,3,null,4,null,5], x = 5, y = 4
**Output:** true
**Example 3:**
****
**Input:** root = [1,2,3,null,4], x = 2, y = 3
**Output:** false
**Note:**
1. The number of nodes in the tree will be between `2` and `100`.
2. Each node has a unique integer value from `1` to `100`.
Similar Questions:
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def isCousins(self, root, x, y):
"""
:type root: TreeNode
:type x: int
:type y: int
:rtype: bool
"""
def test(self):
pass
if __name__ == "__main__":
unittest.main()
|
openqt/algorithms
|
leetcode/python/lc993-cousins-in-binary-tree.py
|
Python
|
gpl-3.0
| 1,692
|
from setuptools import setup
with open("README.rst") as readme_file:
long_description = readme_file.read()
setup(name="Presser",
version="0.1.8",
packages=["presser",],
license="GNU GPL v3.0",
description="Extracts data from vine, in lieu of an API",
author="Gemma Hentsch",
author_email="contact@halfapenguin.com",
install_requires=[
"beautifulsoup4>=4.3.2",
"requests>=2.4.0",
"PyExecJS>=1.0.4",
],
tests_require=[
"beautifulsoup4",
"requests",
"mock",
"coverage",
"nose",
"PyExecJS",
"responses"
],
long_description=long_description,
test_suite="nose.collector",
url="https://github.com/ladyrassilon/presser",
keywords = ['scraping','vine'],
download_url="https://github.com/ladyrassilon/presser/archive/",
classifiers=[
"Development Status :: 4 - Beta",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 2.7",
# "Programming Language :: Python :: Implementation",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Programming Language :: Python",
"Intended Audience :: Developers",
]
)
|
ladyrassilon/presser
|
setup.py
|
Python
|
gpl-3.0
| 1,357
|
# -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
"""
from pyxmpp import streamtls
from pyxmpp.all import JID, Message
from pyxmpp.jabber.client import JabberClient
from pyxmpp.interface import implements
from pyxmpp.interfaces import *
from module.plugins.hooks.IRCInterface import IRCInterface
class XMPPInterface(IRCInterface, JabberClient):
__name__ = "XMPPInterface"
__version__ = "0.11"
__type__ = "hook"
__config__ = [("activated", "bool", "Activated", False),
("jid", "str", "Jabber ID", "user@exmaple-jabber-server.org"),
("pw", "str", "Password", ""),
("tls", "bool", "Use TLS", False),
("owners", "str", "List of JIDs accepting commands from", "me@icq-gateway.org;some@msn-gateway.org"),
("info_file", "bool", "Inform about every file finished", False),
("info_pack", "bool", "Inform about every package finished", True),
("captcha", "bool", "Send captcha requests", True)]
__description__ = """Connect to jabber and let owner perform different tasks"""
__author_name__ = "RaNaN"
__author_mail__ = "RaNaN@pyload.org"
implements(IMessageHandlersProvider)
def __init__(self, core, manager):
IRCInterface.__init__(self, core, manager)
self.jid = JID(self.getConfig("jid"))
password = self.getConfig("pw")
# if bare JID is provided add a resource -- it is required
if not self.jid.resource:
self.jid = JID(self.jid.node, self.jid.domain, "pyLoad")
if self.getConfig("tls"):
tls_settings = streamtls.TLSSettings(require=True, verify_peer=False)
auth = ("sasl:PLAIN", "sasl:DIGEST-MD5")
else:
tls_settings = None
auth = ("sasl:DIGEST-MD5", "digest")
# setup client with provided connection information
# and identity data
JabberClient.__init__(self, self.jid, password,
disco_name="pyLoad XMPP Client", disco_type="bot",
tls_settings=tls_settings, auth_methods=auth)
self.interface_providers = [
VersionHandler(self),
self,
]
def coreReady(self):
self.new_package = {}
self.start()
def packageFinished(self, pypack):
try:
if self.getConfig("info_pack"):
self.announce(_("Package finished: %s") % pypack.name)
except:
pass
def downloadFinished(self, pyfile):
try:
if self.getConfig("info_file"):
self.announce(
_("Download finished: %(name)s @ %(plugin)s") % {"name": pyfile.name, "plugin": pyfile.pluginname})
except:
pass
def run(self):
# connect to IRC etc.
self.connect()
try:
self.loop()
except Exception, ex:
self.logError("pyLoad XMPP: %s" % str(ex))
def stream_state_changed(self, state, arg):
"""This one is called when the state of stream connecting the component
to a server changes. This will usually be used to let the user
know what is going on."""
self.logDebug("pyLoad XMPP: *** State changed: %s %r ***" % (state, arg))
def disconnected(self):
self.logDebug("pyLoad XMPP: Client was disconnected")
def stream_closed(self, stream):
self.logDebug("pyLoad XMPP: Stream was closed | %s" % stream)
def stream_error(self, err):
self.logDebug("pyLoad XMPP: Stream Error: %s" % err)
def get_message_handlers(self):
"""Return list of (message_type, message_handler) tuples.
The handlers returned will be called when matching message is received
in a client session."""
return [("normal", self.message)]
def message(self, stanza):
"""Message handler for the component."""
subject = stanza.get_subject()
body = stanza.get_body()
t = stanza.get_type()
self.logDebug(u'pyLoad XMPP: Message from %s received.' % (unicode(stanza.get_from(),)))
self.logDebug(u'pyLoad XMPP: Body: %s Subject: %s Type: %s' % (body, subject, t))
if t == "headline":
# 'headline' messages should never be replied to
return True
if subject:
subject = u"Re: " + subject
to_jid = stanza.get_from()
from_jid = stanza.get_to()
#j = JID()
to_name = to_jid.as_utf8()
from_name = from_jid.as_utf8()
names = self.getConfig("owners").split(";")
if to_name in names or to_jid.node + "@" + to_jid.domain in names:
messages = []
trigger = "pass"
args = None
try:
temp = body.split()
trigger = temp[0]
if len(temp) > 1:
args = temp[1:]
except:
pass
handler = getattr(self, "event_%s" % trigger, self.event_pass)
try:
res = handler(args)
for line in res:
m = Message(
to_jid=to_jid,
from_jid=from_jid,
stanza_type=stanza.get_type(),
subject=subject,
body=line)
messages.append(m)
except Exception, e:
self.logError("pyLoad XMPP: " + repr(e))
return messages
else:
return True
def response(self, msg, origin=""):
return self.announce(msg)
def announce(self, message):
""" send message to all owners"""
for user in self.getConfig("owners").split(";"):
self.logDebug("pyLoad XMPP: Send message to %s" % user)
to_jid = JID(user)
m = Message(from_jid=self.jid,
to_jid=to_jid,
stanza_type="chat",
body=message)
stream = self.get_stream()
if not stream:
self.connect()
stream = self.get_stream()
stream.send(m)
def beforeReconnecting(self, ip):
self.disconnect()
def afterReconnecting(self, ip):
self.connect()
class VersionHandler(object):
"""Provides handler for a version query.
This class will answer version query and announce 'jabber:iq:version' namespace
in the client's disco#info results."""
implements(IIqHandlersProvider, IFeaturesProvider)
def __init__(self, client):
"""Just remember who created this."""
self.client = client
def get_features(self):
"""Return namespace which should the client include in its reply to a
disco#info query."""
return ["jabber:iq:version"]
def get_iq_get_handlers(self):
"""Return list of tuples (element_name, namespace, handler) describing
handlers of <iq type='get'/> stanzas"""
return [("query", "jabber:iq:version", self.get_version)]
def get_iq_set_handlers(self):
"""Return empty list, as this class provides no <iq type='set'/> stanza handler."""
return []
def get_version(self, iq):
"""Handler for jabber:iq:version queries.
jabber:iq:version queries are not supported directly by PyXMPP, so the
XML node is accessed directly through the libxml2 API. This should be
used very carefully!"""
iq = iq.make_result_response()
q = iq.new_query("jabber:iq:version")
q.newTextChild(q.ns(), "name", "Echo component")
q.newTextChild(q.ns(), "version", "1.0")
return iq
|
estaban/pyload
|
module/plugins/hooks/XMPPInterface.py
|
Python
|
gpl-3.0
| 8,389
|
# from .. import Workflow, Stage, Task, TaskFile
#
# from flask.ext import admin
# from flask.ext.admin.contrib import sqla
#
#
# def add_cosmos_admin(flask_app, session):
# adm = admin.Admin(flask_app, 'Flask Admin', base_template="admin_layout.html")
# for m in [Workflow, Stage, Task, TaskFile]:
# adm.add_view(sqla.ModelView(m, session))
|
vamst/COSMOS2
|
cosmos/web/admin.py
|
Python
|
gpl-3.0
| 358
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Rodrigo Silva (MestreLion) <linux@rodrigosilva.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. See <http://www.gnu.org/licenses/gpl.html>
'''Player class'''
import logging
from . import g
from . import rnd
from . import enum2 as enum
log = logging.getLogger(__name__)
class HUNGER(enum.Enum):
'''Minimum food in stomach before some effect or warning
Values represent the last "safe" food amount before the step
'''
HUNGRY = 300 # WEAK * 2 in DOS. No effect, just a warning.
WEAK = 150 # No effect either, just the warning
FAINT = 0 # Start fainting. Unix: 20
STARVE = -851 # Die of starvation. Unix: 0. Ouch! :)
# This is the effective value in DOS, intended was probably -850
@classmethod
def name(cls, v):
if v == cls.STARVE: return "?" # Per DOS code, but never seen by player
return super(HUNGER, cls).name(v)
class Player(object):
# Constants
char = "@" # Display character
ac = 1 # Armor Class when wearing no armor
stomach = 2000 # Stomach size, how much food can the player have
xplevels = tuple(10*2**xplevels for xplevels in range(19)) + (0,)
def __init__(self, name, screen=None):
self.name = name
# Input and output
self.screen = screen
# Map and position
self.level = None
self.row = 0
self.col = 0
# Items
self.armor = None # Worn armor
self.weapon = None # Wielded weapon
self.ringright = None # Ring on right hand
self.ringleft = None # Ring on left hand
self.pack = [] # Inventory (list of items)
# Main status
self.hp = 12 # Current hit points left (life)
self.hpmax = 12 # Maximum hit points
self.str = 16 # Current strength
self.strmax = 16 # Maximum strength
self.gold = 0 # Gold (purse)
self.xp = 0 # Experience points
self.xplevel = 1 # Experience level
# Food left in stomach. Fixed 1250 in Unix
self.food = rnd.spread(1300)
# Condition status and flags
self.skipturns = 0 # Used by sleep, faint, freeze, etc
@property
def armorclass(self):
if self.armor is None:
return self.ac
else:
return self.armor.ac
@property
def hungerstage(self):
'''Name of the hunger stage, based on current food in stomach'''
# DOS: "Faint" in status bar is only displayed after player actually
# faints for the first time. This would require a private property,
# `hungry_stage` or similar, which would defeat the whole point
# of this function.
for food in HUNGER:
if self.food < food:
return HUNGER.name(food)
else:
return "" # All fine :)
@property
def metabolism(self):
'''How much food is consumed every turn.
Depends on current food, worn rings and screen width.
Some rings have random consumption, so this value may change
on every read!
'''
deltafood = 1
if self.food <= HUNGER.FAINT:
return deltafood
for ring in (self.ringleft,
self.ringright):
if ring is not None:
deltafood += ring.consumption
# DOS: 40 column mode use food twice as fast
if self.screen.size[1] <= 40:
deltafood *= 2
return deltafood
@property
def has_amulet(self):
return "AMULET" in self.pack # @@fake
# ACTIONS ###########
def move(self, dr, dc):
row = self.row + dr
col = self.col + dc
if not self.level.is_passable(row, col):
return
# Update current tile
self.level.reveal(self.row, self.col)
# Update to new position
self.row = row
self.col = col
self.level.draw(self)
self.level.tick()
def rest(self):
'''Do nothing for a turn'''
self.level.tick()
def show_inventory(self):
dialog = self.screen.dialog()
for item in self.pack:
dialog.addline(item)
dialog.show()
# DAEMONS ###########
def heal(self):
pass
def digest(self):
'''Deplete food in stomach'''
# Unix has very different mechanics, specially on fainting and rings
oldfood = self.food
self.food -= self.metabolism
if self.food < HUNGER.STARVE:
raise g.Lose("Starvation")
if self.food < HUNGER.FAINT:
# 80% chance to avoid fainting, if not already
if self.skipturns > 0 or rnd.perc(80):
return
# Faint for a few turns
self.skipturns += rnd.rand(4, 11) # Harsh!
#@@ Disable running
#@@ Cancel multiple actions
# DOS 1.1: "%sYou faint", "You feel too weak from lack of food. "
self.screen.message("%sYou faint from the lack of food",
"You feel very weak. ")
return
if self.food < HUNGER.WEAK and oldfood >= HUNGER.WEAK:
self.screen.message("You are starting to feel weak")
elif self.food < HUNGER.HUNGRY and oldfood >= HUNGER.HUNGRY:
self.screen.message("You are starting to get hungry")
|
MestreLion/pyrogue
|
pyrogue/player.py
|
Python
|
gpl-3.0
| 6,092
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Citrix Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: citrix_adc_appfw_settings
short_description: Manage Citrix ADC Web Application Firewall settings.
description:
- Manage Citrix ADC Web Application Firewall settings.
- The module uses the NITRO API to make configuration changes to WAF settings on the target Citrix ADC.
- The NITRO API reference can be found at https://developer-docs.citrix.com/projects/netscaler-nitro-api/en/latest
- Note that due to NITRO API limitations this module will always report a changed status even when configuration changes have not taken place.
version_added: "1.0.0"
author:
- George Nikolopoulos (@giorgos-nikolopoulos)
- Sumanth Lingappa (@sumanth-lingappa)
options:
defaultprofile:
description:
- >-
Profile to use when a connection does not match any policy. Default setting is APPFW_BYPASS, which
unmatched connections back to the Citrix ADC without attempting to filter them further.
- "Minimum length = 1"
type: str
undefaction:
description:
- "Profile to use when an application firewall policy evaluates to undefined (UNDEF)."
- >-
An UNDEF event indicates an internal error condition. The APPFW_BLOCK built-in profile is the default
You can specify a different built-in or user-created profile as the UNDEF profile.
- "Minimum length = 1"
type: str
sessiontimeout:
description:
- >-
Timeout, in seconds, after which a user session is terminated. Before continuing to use the protected
site, the user must establish a new session by opening a designated start URL.
- "Minimum value = C(1)"
- "Maximum value = C(65535)"
type: str
learnratelimit:
description:
- >-
Maximum number of connections per second that the application firewall learning engine examines to
new relaxations for learning-enabled security checks. The application firewall drops any connections
this limit from the list of connections used by the learning engine.
- "Minimum value = C(1)"
- "Maximum value = C(1000)"
type: str
sessionlifetime:
description:
- >-
Maximum amount of time (in seconds) that the application firewall allows a user session to remain
regardless of user activity. After this time, the user session is terminated. Before continuing to
the protected web site, the user must establish a new session by opening a designated start URL.
- "Minimum value = C(0)"
- "Maximum value = C(2147483647)"
type: str
sessioncookiename:
description:
- "Name of the session cookie that the application firewall uses to track user sessions."
- >-
Must begin with a letter or number, and can consist of from 1 to 31 letters, numbers, and the hyphen
and underscore (_) symbols.
- "The following requirement applies only to the Citrix ADC CLI:"
- >-
If the name includes one or more spaces, enclose the name in double or single quotation marks (for
"my cookie name" or 'my cookie name').
- "Minimum length = 1"
type: str
clientiploggingheader:
description:
- >-
Name of an HTTP header that contains the IP address that the client used to connect to the protected
site or service.
type: str
importsizelimit:
description:
- >-
Cumulative total maximum number of bytes in web forms imported to a protected web site. If a user
to upload files with a total byte count higher than the specified limit, the application firewall
the request.
- "Minimum value = C(1)"
- "Maximum value = C(268435456)"
type: str
signatureautoupdate:
description:
- "Flag used to enable/disable auto update signatures."
type: bool
signatureurl:
description:
- "URL to download the mapping file from server."
type: str
cookiepostencryptprefix:
description:
- "String that is prepended to all encrypted cookie values."
- "Minimum length = 1"
type: str
logmalformedreq:
description:
- "Log requests that are so malformed that application firewall parsing doesn't occur."
type: bool
geolocationlogging:
description:
- "Enable Geo-Location Logging in CEF format logs."
type: bool
ceflogging:
description:
- "Enable CEF format logs."
type: bool
entitydecoding:
description:
- "Transform multibyte (double- or half-width) characters to single width characters."
type: bool
useconfigurablesecretkey:
description:
- "Use configurable secret key in AppFw operations."
type: bool
sessionlimit:
description:
- >-
Maximum number of sessions that the application firewall allows to be active, regardless of user
After the max_limit reaches, No more user session will be created .
- "Minimum value = C(0)"
- "Maximum value = C(500000)"
type: str
malformedreqaction:
elements: str
choices:
- 'none'
- 'block'
- 'log'
- 'stats'
description:
- "flag to define action on malformed requests that application firewall cannot parse."
type: list
extends_documentation_fragment: citrix.adc.citrixadc
'''
EXAMPLES = '''
- name: setup basic settings
delegate_to: localhost
citrix_adc_appfw_settings:
nitro_user: nsroot
nitro_pass: nsroot
nsip: 172.18.0.2
state: present
defaultprofile: APPFW_BYPASS
undefaction: APPFW_BLOCK
sessiontimeout: "1000"
learnratelimit: "500"
sessionlifetime: "2000"
sessioncookiename: cookie_name
clientiploggingheader: header_name
importsizelimit: "268435456"
signatureautoupdate: on
signatureurl: http://signature.url
cookiepostencryptprefix: prepend
logmalformedreq: on
geolocationlogging: on
ceflogging: on
entitydecoding: on
useconfigurablesecretkey: on
sessionlimit: "10000"
'''
RETURN = '''
loglines:
description: list of logged messages by the module
returned: always
type: list
sample: ['message 1', 'message 2']
msg:
description: Message detailing the failure reason
returned: failure
type: str
sample: "Action does not exist"
'''
import copy
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.citrix.adc.plugins.module_utils.citrix_adc import (
NitroResourceConfig,
NitroException,
netscaler_common_arguments,
log,
loglines
)
class ModuleExecutor(object):
def __init__(self, module):
self.module = module
self.main_nitro_class = 'appfwsettings'
# Dictionary containing attribute information
# for each NITRO object utilized by this module
self.attribute_config = {
'appfwsettings': {
'attributes_list': [
'defaultprofile',
'undefaction',
'sessiontimeout',
'learnratelimit',
'sessionlifetime',
'sessioncookiename',
'clientiploggingheader',
'importsizelimit',
'signatureautoupdate',
'signatureurl',
'cookiepostencryptprefix',
'logmalformedreq',
'geolocationlogging',
'ceflogging',
'entitydecoding',
'useconfigurablesecretkey',
'sessionlimit',
'malformedreqaction',
],
'transforms': {
'signatureautoupdate': lambda v: 'ON' if v else 'OFF',
'logmalformedreq': lambda v: 'ON' if v else 'OFF',
'geolocationlogging': lambda v: 'ON' if v else 'OFF',
'ceflogging': lambda v: 'ON' if v else 'OFF',
'entitydecoding': lambda v: 'ON' if v else 'OFF',
'useconfigurablesecretkey': lambda v: 'ON' if v else 'OFF',
},
'get_id_attributes': [
],
'delete_id_attributes': [
],
},
}
self.module_result = dict(
changed=False,
failed=False,
loglines=loglines,
)
def update(self):
log('ModuleExecutor.update()')
# Check if main object exists
config = NitroResourceConfig(
module=self.module,
resource=self.main_nitro_class,
attribute_values_dict=self.module.params,
attributes_list=self.attribute_config[self.main_nitro_class]['attributes_list'],
transforms=self.attribute_config[self.main_nitro_class]['transforms'],
)
self.module_result['changed'] = True
if not self.module.check_mode:
config.update()
def main(self):
try:
if self.module.params['state'] == 'present':
self.update()
elif self.module.params['state'] == 'absent':
log('Nothing to do for state absent')
self.module.exit_json(**self.module_result)
except NitroException as e:
msg = "Nitro exception: errorcode=%s, message=%s, severity=%s" % (str(e.errorcode), e.message, e.severity)
self.module.fail_json(msg=msg, **self.module_result)
except Exception as e:
msg = 'Exception %s: %s' % (type(e), str(e))
self.module.fail_json(msg=msg, **self.module_result)
def main():
argument_spec = dict()
module_specific_arguments = dict(
defaultprofile=dict(type='str'),
undefaction=dict(type='str'),
sessiontimeout=dict(type='str'),
learnratelimit=dict(type='str'),
sessionlifetime=dict(type='str'),
sessioncookiename=dict(type='str'),
clientiploggingheader=dict(type='str'),
importsizelimit=dict(type='str'),
signatureautoupdate=dict(type='bool'),
signatureurl=dict(type='str'),
cookiepostencryptprefix=dict(type='str'),
logmalformedreq=dict(type='bool'),
geolocationlogging=dict(type='bool'),
ceflogging=dict(type='bool'),
entitydecoding=dict(type='bool'),
useconfigurablesecretkey=dict(type='bool'),
sessionlimit=dict(type='str'),
malformedreqaction=dict(
type='list',
elements='str',
choices=[
'none',
'block',
'log',
'stats',
]
),
)
argument_spec.update(netscaler_common_arguments)
argument_spec.update(module_specific_arguments)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
executor = ModuleExecutor(module=module)
executor.main()
if __name__ == '__main__':
main()
|
citrix/netscaler-ansible-modules
|
ansible-collections/adc/plugins/modules/citrix_adc_appfw_settings.py
|
Python
|
gpl-3.0
| 12,977
|
"""
OwO whats this?
"""
__author__ = ('Smurphicus')
COMMAND = 'owo'
from random import choice
substitutions = {'r':'w','R':'W','l':'w','L':'W','na':'nya','NA':'NYA','qu':'qw','QU':'QW'}
faces = [' OwO', ' owo', ' UwU', ' uwu', ' :3', ' :33', ' :333', '']
def owoify(message):
for key in substitutions.keys():
message = message.replace(key,substitutions[key])
return message + choice(faces)
def main(bot, author_id, message, thread_id, thread_type, **kwargs):
message = bot.fetchThreadMessages(thread_id=thread_id, limit=2)[1]
owoified_message = owoify(message.text)
bot.sendMessage(owoified_message, thread_id=thread_id, thread_type=thread_type)
|
sentriz/steely
|
steely/plugins/owo.py
|
Python
|
gpl-3.0
| 679
|
# Copyright (c) 2014-2015 Cedric Bellegarde <cedric.bellegarde@adishatz.org>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk, Gdk
from lollypop.define import Lp, ArtSize, Type
from lollypop.widgets_album import AlbumWidget
from lollypop.pop_radio import RadioPopover
class RadioWidget(AlbumWidget):
"""
Widget with radio cover and title
"""
def __init__(self, name, radios_manager):
"""
Init radio widget
@param name as string
@param radios_manager as RadiosManager
"""
AlbumWidget.__init__(self, None)
builder = Gtk.Builder()
builder.add_from_resource('/org/gnome/Lollypop/RadioWidget.ui')
builder.connect_signals(self)
self._cover = builder.get_object('cover')
self._name = name
self._radios_manager = radios_manager
self._popover = None
self._title = builder.get_object('title')
self._title.set_label(name)
self.add(builder.get_object('widget'))
self.set_cover()
self.set_property('halign', Gtk.Align.START)
def set_sensitive(self, b):
"""
Ignore set sensitive
"""
pass
def get_id(self):
"""
Return widget id (same value for all radio widgets)
"""
return Type.RADIOS
def do_get_preferred_width(self):
"""
Set maximum width
"""
return self._cover.get_preferred_width()
def set_name(self, name):
"""
Set radio name
@param name as string
"""
self._name = name
self._title.set_label(name)
def get_name(self):
"""
Return radio name
@return name as string
"""
return self._name
def set_cover(self, force=False):
"""
Set cover for album if state changed
@param force as bool
"""
selected = Lp.player.current_track.id == Type.RADIOS and\
self._name == Lp.player.current_track.artist
if self._cover is not None and (selected != self._selected or force):
self._selected = selected
surface = Lp.art.get_radio(
self._name,
ArtSize.BIG,
selected)
self._cover.set_from_surface(surface)
del surface
def update_cover(self):
"""
Update cover for album id id needed
"""
if self._cover is not None:
self._selected = Lp.player.current_track.id == Type.RADIOS\
and self._name == Lp.player.current_track.artist
surface = Lp.art.get_radio(
self._name,
ArtSize.BIG,
self._selected)
self._cover.set_from_surface(surface)
del surface
def edit(self, widget):
"""
Edit radio
@param widget as Gtk.Widget
"""
self._popover = RadioPopover(self._name, self._radios_manager)
self._popover.set_relative_to(widget)
self._popover.show()
#######################
# PRIVATE #
#######################
def _on_title_press(self, widget, event):
"""
Edit radio
@param widget as Gtk.Widget
@param event as Gdk.Event
"""
self.edit(widget)
def _on_cover_press(self, widget, event):
"""
Edit radio on right click
@param widget as Gtk.Widget
@param event as Gdk.Event
"""
if event.button != 1:
self.edit(widget)
def _on_eventbox_realize1(self, eventbox):
"""
Change cursor over cover eventbox
@param eventbox as Gdk.Eventbox
"""
eventbox.get_window().set_cursor(Gdk.Cursor(Gdk.CursorType.HAND1))
def _on_eventbox_realize2(self, eventbox):
"""
Change cursor over title eventbox
@param eventbox as Gdk.Eventbox
"""
eventbox.get_window().set_cursor(Gdk.Cursor(Gdk.CursorType.PENCIL))
|
gigitux/lollypop
|
src/widgets_radio.py
|
Python
|
gpl-3.0
| 4,766
|
#!/usr/bin/env python3
import gettext
import locale
from obozrenie.global_settings import *
current_locale, encoding = locale.getdefaultlocale()
t = gettext.translation(APPLICATION_ID, localedir=LOCALE_DIR, languages=[
current_locale], codeset=encoding, fallback=True)
_ = t.gettext
|
obozrenie/obozrenie
|
obozrenie/i18n.py
|
Python
|
gpl-3.0
| 311
|