code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserMsg',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('message', models.TextField()),
('receivedDate', models.DateTimeField(default=django.utils.timezone.now)),
('users', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
]
|
johngrantuk/myCommServer
|
myCommServer/migrations/0001_initial.py
|
Python
|
mit
| 767
|
import py
import os
import pytest
import numpy as np
import scipy as sp
import openpnm as op
class HDF5Test:
def setup_class(self):
ws = op.Workspace()
ws.settings['local_data'] = True
self.net = op.network.Cubic(shape=[2, 2, 2])
Ps = [0, 1, 2, 3]
Ts = self.net.find_neighbor_throats(pores=Ps)
self.geo_1 = op.geometry.GenericGeometry(network=self.net,
pores=Ps, throats=Ts)
self.geo_1['pore.boo'] = 1
self.geo_1['throat.boo'] = 1
Ps = [4, 5, 6, 7]
Ts = self.net.find_neighbor_throats(pores=Ps, mode='xnor')
self.geo_2 = op.geometry.GenericGeometry(network=self.net,
pores=Ps, throats=Ts)
self.geo_2['pore.boo'] = 1
self.geo_2['throat.boo'] = 1
self.phase_1 = op.phases.GenericPhase(network=self.net)
self.phase_1['pore.bar'] = 2
self.phase_1['throat.bar'] = 2
self.phase_2 = op.phases.GenericPhase(network=self.net)
self.phase_2['pore.bar'] = 2
self.phase_2['throat.bar'] = 2
self.phys_1 = op.physics.GenericPhysics(network=self.net,
phase=self.phase_1,
geometry=self.geo_1)
self.phys_1['pore.baz'] = 11
self.phys_1['throat.baz'] = 11
self.phys_2 = op.physics.GenericPhysics(network=self.net,
phase=self.phase_1,
geometry=self.geo_2)
self.phys_2['pore.baz'] = 12
self.phys_2['throat.baz'] = 12
self.phys_3 = op.physics.GenericPhysics(network=self.net,
phase=self.phase_2,
geometry=self.geo_1)
self.phys_3['pore.baz'] = 21
self.phys_3['throat.baz'] = 21
self.phys_4 = op.physics.GenericPhysics(network=self.net,
phase=self.phase_2,
geometry=self.geo_2)
self.phys_4['pore.baz'] = 22
self.phys_4['throat.baz'] = 22
self.net['pore.object'] = np.ones(self.net.Np, dtype=object)
def teardown_class(self):
ws = op.Workspace()
ws.clear()
def test_to_hdf5(self, tmpdir):
fname = tmpdir.join(self.net.project.name)
f = op.io.HDF5.to_hdf5(network=[self.net],
phases=[self.phase_1, self.phase_2],
filename=fname)
assert list(f.keys()) == ['net_01', 'phase_01', 'phase_02']
filename = f.filename
f.close()
os.remove(filename)
def test_from_hdf5(self):
with pytest.raises(NotImplementedError):
op.io.HDF5.from_hdf5()
def test_print(self, tmpdir):
fname = tmpdir.join(self.net.project.name)
f = op.io.HDF5.to_hdf5(network=[self.net], filename=fname,
interleave=False)
op.io.HDF5.print_levels(f)
op.io.HDF5.print_flattened(f)
f.close()
os.remove(fname.dirpath().join(self.net.project.name + '.hdf'))
if __name__ == '__main__':
# All the tests in this file can be run with 'playing' this file
t = HDF5Test()
self = t # For interacting with the tests at the command line
t.setup_class()
for item in t.__dir__():
if item.startswith('test'):
print('running test: '+item)
try:
t.__getattribute__(item)()
except TypeError:
t.__getattribute__(item)(tmpdir=py.path.local())
|
TomTranter/OpenPNM
|
tests/unit/io/HDF5Test.py
|
Python
|
mit
| 3,746
|
import json
import random
import traceback
import urllib
from twisted_gears import client
from time import time
from twisted.application.service import Service
from twisted.internet import defer, protocol, reactor, task
from twisted.python import log
from twisted.web.client import getPage, HTTPClientFactory
from twisted.web.error import Error
# we don't want to hear about each web request we make
HTTPClientFactory.noisy = False
# By default, verbose logging is disabled. This function is redefined
# when the service starts if verbose logging is enabled.
log.verbose = lambda x: None
class CurlerClient(client.GearmanProtocol):
def __init__(self, service, server, base_urls, job_queue, num_workers):
self.service = service
self.server = server
self.base_urls = base_urls
self.job_queue = job_queue
self.num_workers = num_workers
def connectionLost(self, reason):
log.msg('CurlerClient lost connection to %s: %s'
% (self.server, reason))
client.GearmanProtocol.connectionLost(self, reason)
def connectionMade(self):
log.msg('CurlerClient made connection to %s' % self.server)
self.start_work()
def start_work(self):
worker = client.GearmanWorker(self)
worker.registerFunction(self.job_queue, self.handle_job)
log.msg('Firing up %d workers...' % self.num_workers)
coop = task.Cooperator()
for i in range(self.num_workers):
reactor.callLater(0.1 * i, lambda: coop.coiterate(worker.doJobs()))
@defer.inlineCallbacks
def handle_job(self, job):
time_start = time()
try:
log.msg('Got job: %s' % job.handle)
log.verbose('data=%r' % job.data)
response = yield self._make_request(job.handle, job.data)
except Exception, e:
log.msg('ERROR: Unhandled exception: %r' % e)
# Log full traceback on multiple lines
for line in traceback.format_exc().split('\n'):
log.msg(line)
response = {"error": "Internal curler error. Check the logs."}
# always include handle in response
response['job_handle'] = job.handle
# log error if we're returning one
if 'error' in response:
log.msg('ERROR: %s' % response['error'])
response['job_data'] = job.data
# format response nicely
response_json = json.dumps(response, sort_keys=True, indent=2)
time_taken = int((time() - time_start) * 1000 + 0.5)
log.msg('Completed job: %s, method=%s, time=%sms, status=%d'
% (job.handle, response['url'], time_taken,
response.get('status')))
defer.returnValue(response_json)
@defer.inlineCallbacks
def _make_request(self, handle, data):
# make sure job arg is valid json
try:
job_data = json.loads(data, encoding='UTF-8')
except ValueError, e:
defer.returnValue({"error": "Job data is not valid JSON"})
# make sure it contains a method
if 'method' not in job_data:
defer.returnValue({"error":
"Missing \"method\" property in job data"})
# make sure it contains data
if 'data' not in job_data:
defer.returnValue({"error":
"Missing \"data\" property in job data"})
headers = self.build_headers(job_data)
# we'll post the data as JSON, so convert it back
data = json.dumps(job_data['data'])
# select random base URL to hit
path = random.choice(self.base_urls)
url = str("%s/%s" % (path, job_data['method']))
try:
log.verbose('POSTing to %s, data=%r' % (url, data))
postdata = urllib.urlencode({
"job_handle": handle,
"data": data})
try:
# despite our name, we're not actually using curl :)
response = yield getPage(url, method='POST', postdata=postdata,
headers=headers)
status = 200
except Error, e:
status = int(e.status)
response = e.response
log.verbose('POST complete: status=%d, response=%r'
% (status, response))
defer.returnValue({'url': url,
'status': status,
'response': response})
except Exception, e:
defer.returnValue({"error": "POST failed: %r - %s" % (e, e)})
@staticmethod
def build_headers(job_data):
# default headers - can be overridden by job_data['headers']
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
if 'headers' in job_data:
# headers can't be unicode but json.loads makes all the string unicode
for key, value in job_data['headers'].iteritems():
if isinstance(key, unicode):
key = key.encode('utf-8')
if isinstance(value, unicode):
value = value.encode('utf-8')
headers[key] = value
return headers
class CurlerClientFactory(protocol.ReconnectingClientFactory):
noisy = True
protocol = CurlerClient
# retry every 5 seconds for up to 10 minutes
initialDelay = 5
maxDelay = 5
maxRetries = 120
def __init__(self, service, server, base_urls, job_queue, num_workers):
self.service = service
self.server = server
self.base_urls = base_urls
self.job_queue = job_queue
self.num_workers = num_workers
def buildProtocol(self, addr):
p = self.protocol(self.service, self.server, self.base_urls,
self.job_queue, self.num_workers)
p.factory = self
return p
class CurlerService(Service):
def __init__(self, base_urls, gearmand_servers, job_queue, num_workers,
verbose=False):
self.base_urls = base_urls
self.gearmand_servers = gearmand_servers
self.job_queue = job_queue
self.num_workers = num_workers
# define verbose logging function
if verbose:
log.verbose = lambda x: log.msg('VERBOSE: %s' % x)
@defer.inlineCallbacks
def startService(self):
Service.startService(self)
log.msg('Service starting. servers=%r, job queue=%s, base urls=%r'
% (self.gearmand_servers, self.job_queue, self.base_urls))
log.verbose('Verbose logging is enabled')
for server in self.gearmand_servers:
host, port = server.split(':')
f = CurlerClientFactory(self, server, self.base_urls,
self.job_queue, self.num_workers)
proto = yield reactor.connectTCP(host, int(port), f)
def stopService(self):
Service.stopService(self)
log.msg('Service stopping')
|
hipchat/curler
|
curler/service.py
|
Python
|
mit
| 7,039
|
"""Entry point for the Supernova Catalog
"""
def main(args, clargs, log):
from .supernovacatalog import SupernovaCatalog
from astrocats.catalog.argshandler import ArgsHandler
# Create an `ArgsHandler` instance with the appropriate argparse machinery
log.debug("Initializing `ArgsHandler`")
args_handler = ArgsHandler(log)
# Parse the arguments to get the configuration settings
args = args_handler.load_args(args=args, clargs=clargs)
# Returns 'None' if no subcommand is given
if args is None:
return
log.debug("Initializing `SupernovaCatalog`")
catalog = SupernovaCatalog(args, log)
# Run the subcommand given in `args`
log.debug("Running subcommand")
args_handler.run_subcommand(args, catalog)
return
|
astrocatalogs/supernovae
|
main.py
|
Python
|
mit
| 777
|
from django.db import models
from binder.models import BinderModel
class Country(BinderModel):
name = models.CharField(unique=True, max_length=100)
|
CodeYellowBV/django-binder
|
tests/testapp/models/country.py
|
Python
|
mit
| 154
|
"""Distance methods between two boolean vectors (representing word
occurrences).
References:
1. SciPy, https://www.scipy.org
"""
import numpy as np
from .utils import distance
def _nbool_correspond_ft_tf(u, v):
"""Function used by some distance methods (in Distance class).
Based on: https://github.com/scipy/scipy
Args:
u (numpy.ndarray) : boolean vector, shape: (N, 1)
v (numpy.ndarray) : as above
Returns:
tuple of two numbers
Examples:
>>> u = np.array([True, False, True])
>>> v = np.array([True, True, False])
>>> print(_nbool_correspond_ft_tf(u, v))
(1, 1)
"""
not_u = ~u
not_v = ~v
nft = (not_u & v).sum()
ntf = (u & not_v).sum()
return (nft, ntf)
def _nbool_correspond_all(u, v):
"""Function used by some distance methods (in Distance class).
Based on: https://github.com/scipy/scipy
Args:
u (numpy.ndarray) : bool, shape: (N, )
v (numpy.ndarray) : as above
Returns:
tuple of four numbers
Examples:
>>> u = np.array([True, False, True])
>>> v = np.array([True, True, False])
>>> print(_nbool_correspond_all(u, v))
(0, 1, 1, 1)
"""
not_u = ~u
not_v = ~v
nff = (not_u & not_v).sum()
nft = (not_u & v).sum()
ntf = (u & not_v).sum()
ntt = (u & v).sum()
return (nff, nft, ntf, ntt)
class Distance(distance.Distance):
"""Combine vector boolean data (numpy.ndarray) with distance method.
"""
def pwdist_dice(self, seq1idx, seq2idx):
"""Compute the Dice dissimilarity (Sorensen-Dice coefficient)
between two boolean 1-D arrays.
Returns:
distance value (double)
"""
u = self[seq1idx]
v = self[seq2idx]
ntt = (u & v).sum()
(nft, ntf) = _nbool_correspond_ft_tf(u, v)
return float(ntf + nft) / float(2.0 * ntt + ntf + nft)
def pwdist_yule(self, seq1idx, seq2idx):
"""Compute the Yule dissimilarity between two boolean 1-D arrays.
Returns:
distance value (double)
"""
u = self[seq1idx]
v = self[seq2idx]
(nff, nft, ntf, ntt) = _nbool_correspond_all(u, v)
return float(2.0 * ntf * nft) / float(ntt * nff + ntf * nft)
def pwdist_rogerstanimoto(self, seq1idx, seq2idx):
"""Compute the Rogers-Tanimoto dissimilarity between two boolean
1-D arrays.
Returns:
distance value (double)
"""
u = self[seq1idx]
v = self[seq2idx]
(nff, nft, ntf, ntt) = _nbool_correspond_all(u, v)
r = float(2.0 * (ntf + nft)) / float(ntt + nff + (2.0 * (ntf + nft)))
return r
def pwdist_russellrao(self, seq1idx, seq2idx):
"""Compute the Russell-Rao dissimilarity between two boolean 1-D arrays.
Returns:
distance value (double)
"""
u = self[seq1idx]
v = self[seq2idx]
ntt = (u & v).sum()
return float(len(u) - ntt) / float(len(u))
def pwdist_sokalmichener(self, seq1idx, seq2idx):
"""Compute the Sokal-Michener dissimilarity
between two boolean 1-D arrays.
Returns:
distance value (double)
"""
u = self[seq1idx]
v = self[seq2idx]
ntt = (u & v).sum()
nff = (~u & ~v).sum()
(nft, ntf) = _nbool_correspond_ft_tf(u, v)
return float(2.0 * (ntf + nft)) / float(ntt + nff + 2.0 * (ntf + nft))
def pwdist_sokalsneath(self, seq1idx, seq2idx):
"""Compute the Sokal-Sneath dissimilarity
between two boolean 1-D arrays.
Returns:
distance value (double)
"""
u = self[seq1idx]
v = self[seq2idx]
ntt = (u & v).sum()
(nft, ntf) = _nbool_correspond_ft_tf(u, v)
denom = ntt + 2.0 * (ntf + nft)
if denom == 0:
raise ValueError('Sokal-Sneath dissimilarity is not defined for '
'vectors that are entirely false.')
return float(2.0 * (ntf + nft)) / denom
def pwdist_jaccard(self, seq1idx, seq2idx):
"""Compute the Jaccard-Needham dissimilarity
between two boolean 1-D arrays.
Returns:
distance value (double)
"""
u = self[seq1idx]
v = self[seq2idx]
dist = (np.double(np.bitwise_and((u != v),
np.bitwise_or(u != 0, v != 0)).sum()) /
np.double(np.bitwise_or(u != 0, v != 0).sum()))
return dist
def pwdist_hamming(self, seq1idx, seq2idx):
"""Compute the Hamming distance between two 1-D arrays.
The Hamming distance between 1-D arrays `u` and `v`, is simply the
proportion of disagreeing components in `u` and `v`.
Returns:
distance value (double)
"""
u = self[seq1idx]
v = self[seq2idx]
return (u != v).mean()
def pwdist_kulsinski(self, seq1idx, seq2idx):
"""Compute the Kulsinski dissimilarity between two boolean 1-D arrays.
Returns:
distance value (double)
"""
u = self[seq1idx]
v = self[seq2idx]
n = float(len(u))
(_nff, nft, ntf, ntt) = _nbool_correspond_all(u, v)
return (ntf + nft - ntt + n) / (ntf + nft + n)
def main():
from .utils.seqrecords import SeqRecords
from . import word_vector
from . import word_pattern
from .utils import distmatrix
seq_records = SeqRecords()
seq_records.add('seq1', 'MKSTGWHF')
seq_records.add('seq2', 'MKSSSSTGWGWG')
seq_records.add('seq3', 'MKSTLKNGTEQ')
p = word_pattern.create(seq_records.seq_list, 2)
bools = word_vector.Bools(seq_records.length_list, p)
dist = Distance(bools, 'jaccard')
matrix = distmatrix.create(seq_records.id_list, dist)
matrix.display()
if __name__ == '__main__':
main()
|
aziele/alfpy
|
alfpy/word_bool_distance.py
|
Python
|
mit
| 5,945
|
import app.texto as texto
import app.twitter as twitter
import app.mongo_database as mongo
import app.redis_database as rd
from collections import Counter
from textblob import TextBlob as tb
import numpy as np
def buscarTermo(termo):
lista = list()
bons = 0
ruins = 0
medios = 0
analysis = None
twitts = twitter.buscar(termo)
for twitt in twitts:
palavras = texto.limparTexto(twitt.text)
lista = lista + palavras.split()
analysis = tb(twitt.text)
if analysis.sentiment.polarity > 0:
bons += 1
else:
ruins +=1
count = Counter(lista)
listagem = list()
sentimento = list()
sentimento.append({'bons':bons,'ruins':ruins,'media':np.mean(analysis.sentiment.polarity)})
for item in count:
listagem.append({'text':item,"size":count[item],"repeticao":count[item]})
id = mongo.salvar(termo,listagem,sentimento)
rd.salvar(termo,id)
retorno = {'resultados':listagem,'sentimento':sentimento}
return retorno
def verificar(termo):
id = rd.checar(termo)
print(id)
if(id):
dados = mongo.recuperar(id)
return dados
else:
return buscarTermo(termo)
def recentes():
return rd.recentes()
|
robsonpiere/nuvemdepalavras
|
app/contador.py
|
Python
|
mit
| 1,262
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('issue', '0002_auto_20170518_2349'),
]
operations = [
migrations.AddField(
model_name='issue',
name='claimusers',
field=models.TextField(default=b'', blank=True),
),
]
|
genonfire/portality
|
issue/migrations/0003_issue_claimusers.py
|
Python
|
mit
| 411
|
"""Utilities for fast persistence of big data, with optional compression."""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import pickle
import os
import sys
import warnings
try:
from pathlib import Path
except ImportError:
Path = None
from .numpy_pickle_utils import _COMPRESSORS
from .numpy_pickle_utils import BinaryZlibFile
from .numpy_pickle_utils import Unpickler, Pickler
from .numpy_pickle_utils import _read_fileobject, _write_fileobject
from .numpy_pickle_utils import _read_bytes, BUFFER_SIZE
from .numpy_pickle_compat import load_compatibility
from .numpy_pickle_compat import NDArrayWrapper
# For compatibility with old versions of joblib, we need ZNDArrayWrapper
# to be visible in the current namespace.
# Explicitly skipping next line from flake8 as it triggers an F401 warning
# which we don't care.
from .numpy_pickle_compat import ZNDArrayWrapper # noqa
from ._compat import _basestring, PY3_OR_LATER
from .backports import make_memmap
###############################################################################
# Utility objects for persistence.
class NumpyArrayWrapper(object):
"""An object to be persisted instead of numpy arrays.
This object is used to hack into the pickle machinery and read numpy
array data from our custom persistence format.
More precisely, this object is used for:
* carrying the information of the persisted array: subclass, shape, order,
dtype. Those ndarray metadata are used to correctly reconstruct the array
with low level numpy functions.
* determining if memmap is allowed on the array.
* reading the array bytes from a file.
* reading the array using memorymap from a file.
* writing the array bytes to a file.
Attributes
----------
subclass: numpy.ndarray subclass
Determine the subclass of the wrapped array.
shape: numpy.ndarray shape
Determine the shape of the wrapped array.
order: {'C', 'F'}
Determine the order of wrapped array data. 'C' is for C order, 'F' is
for fortran order.
dtype: numpy.ndarray dtype
Determine the data type of the wrapped array.
allow_mmap: bool
Determine if memory mapping is allowed on the wrapped array.
Default: False.
"""
def __init__(self, subclass, shape, order, dtype, allow_mmap=False):
"""Constructor. Store the useful information for later."""
self.subclass = subclass
self.shape = shape
self.order = order
self.dtype = dtype
self.allow_mmap = allow_mmap
def write_array(self, array, pickler):
"""Write array bytes to pickler file handle.
This function is an adaptation of the numpy write_array function
available in version 1.10.1 in numpy/lib/format.py.
"""
# Set buffer size to 16 MiB to hide the Python loop overhead.
buffersize = max(16 * 1024 ** 2 // array.itemsize, 1)
if array.dtype.hasobject:
# We contain Python objects so we cannot write out the data
# directly. Instead, we will pickle it out with version 2 of the
# pickle protocol.
pickle.dump(array, pickler.file_handle, protocol=2)
else:
for chunk in pickler.np.nditer(array,
flags=['external_loop',
'buffered',
'zerosize_ok'],
buffersize=buffersize,
order=self.order):
pickler.file_handle.write(chunk.tostring('C'))
def read_array(self, unpickler):
"""Read array from unpickler file handle.
This function is an adaptation of the numpy read_array function
available in version 1.10.1 in numpy/lib/format.py.
"""
if len(self.shape) == 0:
count = 1
else:
count = unpickler.np.multiply.reduce(self.shape)
# Now read the actual data.
if self.dtype.hasobject:
# The array contained Python objects. We need to unpickle the data.
array = pickle.load(unpickler.file_handle)
else:
if (not PY3_OR_LATER and
unpickler.np.compat.isfileobj(unpickler.file_handle)):
# In python 2, gzip.GzipFile is considered as a file so one
# can use numpy.fromfile().
# For file objects, use np.fromfile function.
# This function is faster than the memory-intensive
# method below.
array = unpickler.np.fromfile(unpickler.file_handle,
dtype=self.dtype, count=count)
else:
# This is not a real file. We have to read it the
# memory-intensive way.
# crc32 module fails on reads greater than 2 ** 32 bytes,
# breaking large reads from gzip streams. Chunk reads to
# BUFFER_SIZE bytes to avoid issue and reduce memory overhead
# of the read. In non-chunked case count < max_read_count, so
# only one read is performed.
max_read_count = BUFFER_SIZE // min(BUFFER_SIZE,
self.dtype.itemsize)
array = unpickler.np.empty(count, dtype=self.dtype)
for i in range(0, count, max_read_count):
read_count = min(max_read_count, count - i)
read_size = int(read_count * self.dtype.itemsize)
data = _read_bytes(unpickler.file_handle,
read_size, "array data")
array[i:i + read_count] = \
unpickler.np.frombuffer(data, dtype=self.dtype,
count=read_count)
del data
if self.order == 'F':
array.shape = self.shape[::-1]
array = array.transpose()
else:
array.shape = self.shape
return array
def read_mmap(self, unpickler):
"""Read an array using numpy memmap."""
offset = unpickler.file_handle.tell()
if unpickler.mmap_mode == 'w+':
unpickler.mmap_mode = 'r+'
marray = make_memmap(unpickler.filename,
dtype=self.dtype,
shape=self.shape,
order=self.order,
mode=unpickler.mmap_mode,
offset=offset)
# update the offset so that it corresponds to the end of the read array
unpickler.file_handle.seek(offset + marray.nbytes)
return marray
def read(self, unpickler):
"""Read the array corresponding to this wrapper.
Use the unpickler to get all information to correctly read the array.
Parameters
----------
unpickler: NumpyUnpickler
Returns
-------
array: numpy.ndarray
"""
# When requested, only use memmap mode if allowed.
if unpickler.mmap_mode is not None and self.allow_mmap:
array = self.read_mmap(unpickler)
else:
array = self.read_array(unpickler)
# Manage array subclass case
if (hasattr(array, '__array_prepare__') and
self.subclass not in (unpickler.np.ndarray,
unpickler.np.memmap)):
# We need to reconstruct another subclass
new_array = unpickler.np.core.multiarray._reconstruct(
self.subclass, (0,), 'b')
return new_array.__array_prepare__(array)
else:
return array
###############################################################################
# Pickler classes
class NumpyPickler(Pickler):
"""A pickler to persist big data efficiently.
The main features of this object are:
* persistence of numpy arrays in a single file.
* optional compression with a special care on avoiding memory copies.
Attributes
----------
fp: file
File object handle used for serializing the input object.
protocol: int
Pickle protocol used. Default is pickle.DEFAULT_PROTOCOL under
python 3, pickle.HIGHEST_PROTOCOL otherwise.
"""
dispatch = Pickler.dispatch.copy()
def __init__(self, fp, protocol=None):
self.file_handle = fp
self.buffered = isinstance(self.file_handle, BinaryZlibFile)
# By default we want a pickle protocol that only changes with
# the major python version and not the minor one
if protocol is None:
protocol = (pickle.DEFAULT_PROTOCOL if PY3_OR_LATER
else pickle.HIGHEST_PROTOCOL)
Pickler.__init__(self, self.file_handle, protocol=protocol)
# delayed import of numpy, to avoid tight coupling
try:
import numpy as np
except ImportError:
np = None
self.np = np
def _create_array_wrapper(self, array):
"""Create and returns a numpy array wrapper from a numpy array."""
order = 'F' if (array.flags.f_contiguous and
not array.flags.c_contiguous) else 'C'
allow_mmap = not self.buffered and not array.dtype.hasobject
wrapper = NumpyArrayWrapper(type(array),
array.shape, order, array.dtype,
allow_mmap=allow_mmap)
return wrapper
def save(self, obj):
"""Subclass the Pickler `save` method.
This is a total abuse of the Pickler class in order to use the numpy
persistence function `save` instead of the default pickle
implementation. The numpy array is replaced by a custom wrapper in the
pickle persistence stack and the serialized array is written right
after in the file. Warning: the file produced does not follow the
pickle format. As such it can not be read with `pickle.load`.
"""
if self.np is not None and type(obj) in (self.np.ndarray,
self.np.matrix,
self.np.memmap):
if type(obj) is self.np.memmap:
# Pickling doesn't work with memmapped arrays
obj = self.np.asanyarray(obj)
# The array wrapper is pickled instead of the real array.
wrapper = self._create_array_wrapper(obj)
Pickler.save(self, wrapper)
# A framer was introduced with pickle protocol 4 and we want to
# ensure the wrapper object is written before the numpy array
# buffer in the pickle file.
# See https://www.python.org/dev/peps/pep-3154/#framing to get
# more information on the framer behavior.
if self.proto >= 4:
self.framer.commit_frame(force=True)
# And then array bytes are written right after the wrapper.
wrapper.write_array(obj, self)
return
return Pickler.save(self, obj)
class NumpyUnpickler(Unpickler):
"""A subclass of the Unpickler to unpickle our numpy pickles.
Attributes
----------
mmap_mode: str
The memorymap mode to use for reading numpy arrays.
file_handle: file_like
File object to unpickle from.
filename: str
Name of the file to unpickle from. It should correspond to file_handle.
This parameter is required when using mmap_mode.
np: module
Reference to numpy module if numpy is installed else None.
"""
dispatch = Unpickler.dispatch.copy()
def __init__(self, filename, file_handle, mmap_mode=None):
# The next line is for backward compatibility with pickle generated
# with joblib versions less than 0.10.
self._dirname = os.path.dirname(filename)
self.mmap_mode = mmap_mode
self.file_handle = file_handle
# filename is required for numpy mmap mode.
self.filename = filename
self.compat_mode = False
Unpickler.__init__(self, self.file_handle)
try:
import numpy as np
except ImportError:
np = None
self.np = np
def load_build(self):
"""Called to set the state of a newly created object.
We capture it to replace our place-holder objects, NDArrayWrapper or
NumpyArrayWrapper, by the array we are interested in. We
replace them directly in the stack of pickler.
NDArrayWrapper is used for backward compatibility with joblib <= 0.9.
"""
Unpickler.load_build(self)
# For backward compatibility, we support NDArrayWrapper objects.
if isinstance(self.stack[-1], (NDArrayWrapper, NumpyArrayWrapper)):
if self.np is None:
raise ImportError("Trying to unpickle an ndarray, "
"but numpy didn't import correctly")
array_wrapper = self.stack.pop()
# If any NDArrayWrapper is found, we switch to compatibility mode,
# this will be used to raise a DeprecationWarning to the user at
# the end of the unpickling.
if isinstance(array_wrapper, NDArrayWrapper):
self.compat_mode = True
self.stack.append(array_wrapper.read(self))
# Be careful to register our new method.
if PY3_OR_LATER:
dispatch[pickle.BUILD[0]] = load_build
else:
dispatch[pickle.BUILD] = load_build
###############################################################################
# Utility functions
def dump(value, filename, compress=0, protocol=None, cache_size=None):
"""Persist an arbitrary Python object into one file.
Parameters
-----------
value: any Python object
The object to store to disk.
filename: str or pathlib.Path
The path of the file in which it is to be stored. The compression
method corresponding to one of the supported filename extensions ('.z',
'.gz', '.bz2', '.xz' or '.lzma') will be used automatically.
compress: int from 0 to 9 or bool or 2-tuple, optional
Optional compression level for the data. 0 or False is no compression.
Higher value means more compression, but also slower read and
write times. Using a value of 3 is often a good compromise.
See the notes for more details.
If compress is True, the compression level used is 3.
If compress is a 2-tuple, the first element must correspond to a string
between supported compressors (e.g 'zlib', 'gzip', 'bz2', 'lzma'
'xz'), the second element must be an integer from 0 to 9, corresponding
to the compression level.
protocol: positive int
Pickle protocol, see pickle.dump documentation for more details.
cache_size: positive int, optional
This option is deprecated in 0.10 and has no effect.
Returns
-------
filenames: list of strings
The list of file names in which the data is stored. If
compress is false, each array is stored in a different file.
See Also
--------
joblib.load : corresponding loader
Notes
-----
Memmapping on load cannot be used for compressed files. Thus
using compression can significantly slow down loading. In
addition, compressed files take extra extra memory during
dump and load.
"""
if Path is not None and isinstance(filename, Path):
filename = str(filename)
is_filename = isinstance(filename, _basestring)
is_fileobj = hasattr(filename, "write")
compress_method = 'zlib' # zlib is the default compression method.
if compress is True:
# By default, if compress is enabled, we want to be using 3 by default
compress_level = 3
elif isinstance(compress, tuple):
# a 2-tuple was set in compress
if len(compress) != 2:
raise ValueError(
'Compress argument tuple should contain exactly 2 elements: '
'(compress method, compress level), you passed {}'
.format(compress))
compress_method, compress_level = compress
else:
compress_level = compress
if compress_level is not False and compress_level not in range(10):
# Raising an error if a non valid compress level is given.
raise ValueError(
'Non valid compress level given: "{}". Possible values are '
'{}.'.format(compress_level, list(range(10))))
if compress_method not in _COMPRESSORS:
# Raising an error if an unsupported compression method is given.
raise ValueError(
'Non valid compression method given: "{}". Possible values are '
'{}.'.format(compress_method, _COMPRESSORS))
if not is_filename and not is_fileobj:
# People keep inverting arguments, and the resulting error is
# incomprehensible
raise ValueError(
'Second argument should be a filename or a file-like object, '
'%s (type %s) was given.'
% (filename, type(filename))
)
if is_filename and not isinstance(compress, tuple):
# In case no explicit compression was requested using both compression
# method and level in a tuple and the filename has an explicit
# extension, we select the corresponding compressor.
if filename.endswith('.z'):
compress_method = 'zlib'
elif filename.endswith('.gz'):
compress_method = 'gzip'
elif filename.endswith('.bz2'):
compress_method = 'bz2'
elif filename.endswith('.lzma'):
compress_method = 'lzma'
elif filename.endswith('.xz'):
compress_method = 'xz'
else:
# no matching compression method found, we unset the variable to
# be sure no compression level is set afterwards.
compress_method = None
if compress_method in _COMPRESSORS and compress_level == 0:
# we choose a default compress_level of 3 in case it was not given
# as an argument (using compress).
compress_level = 3
if not PY3_OR_LATER and compress_method in ('lzma', 'xz'):
raise NotImplementedError("{} compression is only available for "
"python version >= 3.3. You are using "
"{}.{}".format(compress_method,
sys.version_info[0],
sys.version_info[1]))
if cache_size is not None:
# Cache size is deprecated starting from version 0.10
warnings.warn("Please do not set 'cache_size' in joblib.dump, "
"this parameter has no effect and will be removed. "
"You used 'cache_size={}'".format(cache_size),
DeprecationWarning, stacklevel=2)
if compress_level != 0:
with _write_fileobject(filename, compress=(compress_method,
compress_level)) as f:
NumpyPickler(f, protocol=protocol).dump(value)
elif is_filename:
with open(filename, 'wb') as f:
NumpyPickler(f, protocol=protocol).dump(value)
else:
NumpyPickler(filename, protocol=protocol).dump(value)
# If the target container is a file object, nothing is returned.
if is_fileobj:
return
# For compatibility, the list of created filenames (e.g with one element
# after 0.10.0) is returned by default.
return [filename]
def _unpickle(fobj, filename="", mmap_mode=None):
"""Internal unpickling function."""
# We are careful to open the file handle early and keep it open to
# avoid race-conditions on renames.
# That said, if data is stored in companion files, which can be
# the case with the old persistence format, moving the directory
# will create a race when joblib tries to access the companion
# files.
unpickler = NumpyUnpickler(filename, fobj, mmap_mode=mmap_mode)
obj = None
try:
obj = unpickler.load()
if unpickler.compat_mode:
warnings.warn("The file '%s' has been generated with a "
"joblib version less than 0.10. "
"Please regenerate this pickle file."
% filename,
DeprecationWarning, stacklevel=3)
except UnicodeDecodeError as exc:
# More user-friendly error message
if PY3_OR_LATER:
new_exc = ValueError(
'You may be trying to read with '
'python 3 a joblib pickle generated with python 2. '
'This feature is not supported by joblib.')
new_exc.__cause__ = exc
raise new_exc
# Reraise exception with Python 2
raise
return obj
def load(filename, mmap_mode=None):
"""Reconstruct a Python object from a file persisted with joblib.dump.
Parameters
-----------
filename: str or pathlib.Path
The path of the file from which to load the object
mmap_mode: {None, 'r+', 'r', 'w+', 'c'}, optional
If not None, the arrays are memory-mapped from the disk. This
mode has no effect for compressed files. Note that in this
case the reconstructed object might no longer match exactly
the originally pickled object.
Returns
-------
result: any Python object
The object stored in the file.
See Also
--------
joblib.dump : function to save an object
Notes
-----
This function can load numpy array files saved separately during the
dump. If the mmap_mode argument is given, it is passed to np.load and
arrays are loaded as memmaps. As a consequence, the reconstructed
object might not match the original pickled object. Note that if the
file was saved with compression, the arrays cannot be memmaped.
"""
if Path is not None and isinstance(filename, Path):
filename = str(filename)
if hasattr(filename, "read"):
fobj = filename
filename = getattr(fobj, 'name', '')
with _read_fileobject(fobj, filename, mmap_mode) as fobj:
obj = _unpickle(fobj)
else:
with open(filename, 'rb') as f:
with _read_fileobject(f, filename, mmap_mode) as fobj:
if isinstance(fobj, _basestring):
# if the returned file object is a string, this means we
# try to load a pickle file generated with an version of
# Joblib so we load it with joblib compatibility function.
return load_compatibility(fobj)
obj = _unpickle(fobj, filename, mmap_mode)
return obj
|
flennerhag/mlens
|
mlens/externals/joblib/numpy_pickle.py
|
Python
|
mit
| 23,236
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-16 17:04
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('frisor_urls', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='url',
name='publish_date',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='url',
name='url',
field=models.URLField(),
),
]
|
vevurka/frisor
|
frisor/frisor_urls/migrations/0002_auto_20170316_1704.py
|
Python
|
mit
| 584
|
import socket
import time
import sys
import traceback
import logging
from dispatch import receiver
from stoppable import StoppableLoopThread
import signals
logger = logging.getLogger(__name__)
class TcpServer(StoppableLoopThread):
def __init__(self):
super(TcpServer, self).__init__()
self.daemon = True
self.sock = None
self.ip = '0.0.0.0'
self.port = 31033
def configure(self, config):
self.listen_host = config.get('network', 'listen_host')
self.port = config.getint('network', 'listen_port')
def pre_loop(self):
# start server
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((self.listen_host, self.port))
sock.listen(5)
self.sock = sock
self.ip, self.port = sock.getsockname()
logger.info("TCP server started {0}:{1}".format(self.ip, self.port))
super(TcpServer, self).pre_loop()
def loop_body(self):
logger.debug('in run loop')
try:
connection, (ip, port) = self.sock.accept()
except IOError:
traceback.print_exc(file=sys.stdout)
time.sleep(0.1)
return
signals.peer_connection_accepted.send(sender=None,
connection=connection,
ip=ip,
port=port)
tcp_server = TcpServer()
@receiver(signals.config_ready)
def config_tcp_server(sender, config, **kwargs):
tcp_server.configure(config)
|
mrmayfield/pyethereum
|
pyethereum/tcpserver.py
|
Python
|
mit
| 1,643
|
# -*- coding: utf-8 -*-
from .exceptions import ClientException, SnapshotException
from . import debug, cache
from .region import Region
class Snapshot:
"""Manage operations related to Droplet snapshots"""
def __init__(self, render):
self.render = render
self.region = Region(self.render)
def __unique__(self, name):
entries = self.list()
if len(set(i.name for i in entries)) != len(entries):
return False
return True
@debug
def list(self):
"""List snapshots"""
return self.render('images', '/images', params={
'filter': 'my_images'
})
def show(self, name):
"""Show details for a single snapshot"""
id = self.id_from_name(name)
return self.render('image', '/images/%s' % id)
@debug
def transfer(self, name, region):
id = self.id_from_name(name)
region_id = self.region.id_from_name(region)
try:
return self.render('event_id', '/images/%s/transfer' % id, params={
'region_id': region_id
})
except ClientException as e:
raise ImageException(str(e))
@debug
def id_from_name(self, name):
"""Translate snapshot name into ID"""
if not self.__unique__(name):
raise SnapshotException("More than one snapshot matches %s" % name)
res = next((
r.id for r in self.list() if r.name.lower() == name.lower()),
None)
if res:
return res
raise SnapshotException('No snapshot called %s' % name)
@debug
def destroy(self, name):
"""Destroy snapshot"""
try:
id = self.id_from_name(name)
return self.render('status', '/images/%s/destroy' % id)
except ClientException as e:
raise SnapshotException(str(e))
|
bendtherules/pontoon
|
pontoon/snapshot.py
|
Python
|
mit
| 1,885
|
import pytest
import sys
from pwny import *
@pytest.mark.xfail(sys.version_info < (2, 7),
reason="inspect.getcallargs new in python 2.7")
def test_shellcode_translate():
@sc.LinuxX86Mutable.translate()
def shellcode():
buf = alloc_buffer(64)
reg_add(SYSCALL_RET_REG, 127)
reg_add(SYSCALL_RET_REG, 8192)
reg_sub(SYSCALL_RET_REG, 127)
reg_sub(SYSCALL_RET_REG, 8192)
sys_read(0, buf, buf.length)
sys_write(1, buf, buf.length)
sys_setresuid(sys_geteuid(), sys_geteuid(), sys_geteuid())
sys_execve(u'/bin/sh', [u'/bin/sh', u'-c', buf, None], None)
shellcode()
SHELLCODE_ENVS = [
(sc.LinuxX86Mutable, ()),
(sc.LinuxX86MutableNullSafe, ()),
(sc.LinuxX86Stack, ()),
(sc.LinuxX86StackNullSafe, ()),
(sc.LinuxX86_64Mutable, ()),
(sc.LinuxX86_64MutableNullSafe, ()),
(sc.LinuxX86_64Stack, ()),
(sc.LinuxX86_64StackNullSafe, ()),
(sc.LinuxARMMutable, ()),
(sc.LinuxARMMutable, (Target.Endian.little,)),
(sc.LinuxARMMutable, (Target.Endian.big,)),
(sc.LinuxARMStack, ()),
(sc.LinuxARMStack, (Target.Endian.little,)),
(sc.LinuxARMStack, (Target.Endian.big,)),
(sc.LinuxARMThumbMutable, ()),
(sc.LinuxARMThumbMutable, (Target.Endian.little,)),
(sc.LinuxARMThumbMutable, (Target.Endian.big,)),
(sc.LinuxARMThumbStack, ()),
(sc.LinuxARMThumbStack, (Target.Endian.little,)),
(sc.LinuxARMThumbStack, (Target.Endian.big,)),
(sc.LinuxARMThumbMixedMutable, ()),
(sc.LinuxARMThumbMixedMutable, (Target.Endian.little,)),
(sc.LinuxARMThumbMixedMutable, (Target.Endian.big,)),
(sc.LinuxARMThumbMixedStack, ()),
(sc.LinuxARMThumbMixedStack, (Target.Endian.little,)),
(sc.LinuxARMThumbMixedStack, (Target.Endian.big,)),
(sc.LinuxAArch64Mutable, ()),
(sc.LinuxAArch64Mutable, (Target.Endian.little,)),
(sc.LinuxAArch64Mutable, (Target.Endian.big,)),
]
@pytest.mark.parametrize(('env_type', 'env_args'), SHELLCODE_ENVS)
def test_shellcode_env_compile_simple(env_type, env_args):
env = env_type(*env_args)
env.compile([
env.sys_exit(0),
])
@pytest.mark.parametrize(('env_type', 'env_args'), SHELLCODE_ENVS)
@pytest.mark.xfail(raises=RuntimeError,
reason='proper binutils missing on CI system')
def test_shellcode_env_assemble_simple(env_type, env_args):
env = env_type(*env_args)
env.assemble([
env.sys_exit(0),
])
@pytest.mark.parametrize(('env_type', 'env_args'), SHELLCODE_ENVS)
def test_shellcode_env_compile_complex(env_type, env_args):
env = env_type(*env_args)
buf = env.alloc_buffer(64)
env.compile([
sc.LoadRegister(env.SYSCALL_REG, 0xdeadcafe),
env.sys_read(0, buf, buf.length - 1),
env.sys_write(1, buf, buf.length - 1),
env.sys_setresuid(env.sys_geteuid(), env.sys_geteuid(), env.sys_geteuid()),
env.sys_execve(u'/bin/sh', [u'/bin/sh', u'-c', buf, None], None),
])
@pytest.mark.parametrize(('env_type', 'env_args'), SHELLCODE_ENVS)
@pytest.mark.xfail(raises=RuntimeError,
reason='proper binutils missing on CI system')
def test_shellcode_env_assemble_complex(env_type, env_args):
env = env_type(*env_args)
buf = env.alloc_buffer(64)
env.assemble([
sc.LoadRegister(env.SYSCALL_REG, 0xdeadcafe),
env.sys_read(0, buf, buf.length - 1),
env.sys_write(1, buf, buf.length - 1),
env.sys_setresuid(env.sys_geteuid(), env.sys_geteuid(), env.sys_geteuid()),
env.sys_execve(u'/bin/sh', [u'/bin/sh', u'-c', buf, None], None),
])
|
edibledinos/pwnypack
|
tests/test_shellcode.py
|
Python
|
mit
| 3,629
|
from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:41879")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:41879")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Einsteinium address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Einsteinium address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
|
einsteinium/einsteinium
|
contrib/bitrpc/bitrpc.py
|
Python
|
mit
| 7,846
|
import errno
import os
import signal
import subprocess
import sys
import tempfile
import time
import mock
import unittest2
import testlib
import mitogen.parent
def wait_for_child(pid, timeout=1.0):
deadline = time.time() + timeout
while timeout < time.time():
try:
target_pid, status = os.waitpid(pid, os.WNOHANG)
if target_pid == pid:
return
except OSError:
e = sys.exc_info()[1]
if e.args[0] == errno.ECHILD:
return
time.sleep(0.05)
assert False, "wait_for_child() timed out"
class GetDefaultRemoteNameTest(testlib.TestCase):
func = staticmethod(mitogen.parent.get_default_remote_name)
@mock.patch('os.getpid')
@mock.patch('getpass.getuser')
@mock.patch('socket.gethostname')
def test_slashes(self, mock_gethostname, mock_getuser, mock_getpid):
# Ensure slashes appearing in the remote name are replaced with
# underscores.
mock_gethostname.return_value = 'box'
mock_getuser.return_value = 'ECORP\\Administrator'
mock_getpid.return_value = 123
self.assertEquals("ECORP_Administrator@box:123", self.func())
class WstatusToStrTest(testlib.TestCase):
func = staticmethod(mitogen.parent.wstatus_to_str)
def test_return_zero(self):
pid = os.fork()
if not pid:
os._exit(0)
(pid, status), _ = mitogen.core.io_op(os.waitpid, pid, 0)
self.assertEquals(self.func(status),
'exited with return code 0')
def test_return_one(self):
pid = os.fork()
if not pid:
os._exit(1)
(pid, status), _ = mitogen.core.io_op(os.waitpid, pid, 0)
self.assertEquals(
self.func(status),
'exited with return code 1'
)
def test_sigkill(self):
pid = os.fork()
if not pid:
time.sleep(600)
os.kill(pid, signal.SIGKILL)
(pid, status), _ = mitogen.core.io_op(os.waitpid, pid, 0)
self.assertEquals(
self.func(status),
'exited due to signal %s (SIGKILL)' % (signal.SIGKILL,)
)
# can't test SIGSTOP without POSIX sessions rabbithole
class ReapChildTest(testlib.RouterMixin, testlib.TestCase):
def test_connect_timeout(self):
# Ensure the child process is reaped if the connection times out.
stream = mitogen.parent.Stream(
router=self.router,
remote_id=1234,
old_router=self.router,
max_message_size=self.router.max_message_size,
python_path=testlib.data_path('python_never_responds.sh'),
connect_timeout=0.5,
)
self.assertRaises(mitogen.core.TimeoutError,
lambda: stream.connect()
)
wait_for_child(stream.pid)
e = self.assertRaises(OSError,
lambda: os.kill(stream.pid, 0)
)
self.assertEquals(e.args[0], errno.ESRCH)
class StreamErrorTest(testlib.RouterMixin, testlib.TestCase):
def test_direct_eof(self):
e = self.assertRaises(mitogen.core.StreamError,
lambda: self.router.local(
python_path='true',
connect_timeout=3,
)
)
prefix = "EOF on stream; last 300 bytes received: "
self.assertTrue(e.args[0].startswith(prefix))
def test_via_eof(self):
# Verify FD leakage does not keep failed process open.
local = self.router.fork()
e = self.assertRaises(mitogen.core.StreamError,
lambda: self.router.local(
via=local,
python_path='true',
connect_timeout=3,
)
)
s = "EOF on stream; last 300 bytes received: "
self.assertTrue(s in e.args[0])
def test_direct_enoent(self):
e = self.assertRaises(mitogen.core.StreamError,
lambda: self.router.local(
python_path='derp',
connect_timeout=3,
)
)
prefix = 'Child start failed: [Errno 2] No such file or directory'
self.assertTrue(e.args[0].startswith(prefix))
def test_via_enoent(self):
local = self.router.fork()
e = self.assertRaises(mitogen.core.StreamError,
lambda: self.router.local(
via=local,
python_path='derp',
connect_timeout=3,
)
)
s = 'Child start failed: [Errno 2] No such file or directory'
self.assertTrue(s in e.args[0])
class ContextTest(testlib.RouterMixin, unittest2.TestCase):
def test_context_shutdown(self):
local = self.router.local()
pid = local.call(os.getpid)
local.shutdown(wait=True)
wait_for_child(pid)
self.assertRaises(OSError, lambda: os.kill(pid, 0))
class OpenPtyTest(testlib.TestCase):
func = staticmethod(mitogen.parent.openpty)
def test_pty_returned(self):
master_fd, slave_fd = self.func()
self.assertTrue(isinstance(master_fd, int))
self.assertTrue(isinstance(slave_fd, int))
os.close(master_fd)
os.close(slave_fd)
@mock.patch('os.openpty')
def test_max_reached(self, openpty):
openpty.side_effect = OSError(errno.ENXIO)
e = self.assertRaises(mitogen.core.StreamError,
lambda: self.func())
msg = mitogen.parent.OPENPTY_MSG % (openpty.side_effect,)
self.assertEquals(e.args[0], msg)
class TtyCreateChildTest(unittest2.TestCase):
func = staticmethod(mitogen.parent.tty_create_child)
def test_dev_tty_open_succeeds(self):
# In the early days of UNIX, a process that lacked a controlling TTY
# would acquire one simply by opening an existing TTY. Linux and OS X
# continue to follow this behaviour, however at least FreeBSD moved to
# requiring an explicit ioctl(). Linux supports it, but we don't yet
# use it there and anyway the behaviour will never change, so no point
# in fixing things that aren't broken. Below we test that
# getpass-loving apps like sudo and ssh get our slave PTY when they
# attempt to open /dev/tty, which is what they both do on attempting to
# read a password.
tf = tempfile.NamedTemporaryFile()
try:
pid, fd, _ = self.func([
'bash', '-c', 'exec 2>%s; echo hi > /dev/tty' % (tf.name,)
])
deadline = time.time() + 5.0
for line in mitogen.parent.iter_read([fd], deadline):
self.assertEquals(mitogen.core.b('hi\n'), line)
break
waited_pid, status = os.waitpid(pid, 0)
self.assertEquals(pid, waited_pid)
self.assertEquals(0, status)
self.assertEquals(mitogen.core.b(''), tf.read())
finally:
tf.close()
class IterReadTest(unittest2.TestCase):
func = staticmethod(mitogen.parent.iter_read)
def make_proc(self):
args = [testlib.data_path('iter_read_generator.sh')]
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
mitogen.core.set_nonblock(proc.stdout.fileno())
return proc
def test_no_deadline(self):
proc = self.make_proc()
try:
reader = self.func([proc.stdout.fileno()])
for i, chunk in enumerate(reader, 1):
self.assertEqual(i, int(chunk))
if i > 3:
break
finally:
proc.terminate()
def test_deadline_exceeded_before_call(self):
proc = self.make_proc()
reader = self.func([proc.stdout.fileno()], 0)
try:
got = []
try:
for chunk in reader:
got.append(chunk)
assert 0, 'TimeoutError not raised'
except mitogen.core.TimeoutError:
self.assertEqual(len(got), 0)
finally:
proc.terminate()
def test_deadline_exceeded_during_call(self):
proc = self.make_proc()
reader = self.func([proc.stdout.fileno()], time.time() + 0.4)
try:
got = []
try:
for chunk in reader:
got.append(chunk)
assert 0, 'TimeoutError not raised'
except mitogen.core.TimeoutError:
# Give a little wiggle room in case of imperfect scheduling.
# Ideal number should be 9.
self.assertLess(3, len(got))
self.assertLess(len(got), 5)
finally:
proc.terminate()
class WriteAllTest(unittest2.TestCase):
func = staticmethod(mitogen.parent.write_all)
def make_proc(self):
args = [testlib.data_path('write_all_consumer.sh')]
proc = subprocess.Popen(args, stdin=subprocess.PIPE)
mitogen.core.set_nonblock(proc.stdin.fileno())
return proc
ten_ms_chunk = (mitogen.core.b('x') * 65535)
def test_no_deadline(self):
proc = self.make_proc()
try:
self.func(proc.stdin.fileno(), self.ten_ms_chunk)
finally:
proc.terminate()
def test_deadline_exceeded_before_call(self):
proc = self.make_proc()
try:
self.assertRaises(mitogen.core.TimeoutError, (
lambda: self.func(proc.stdin.fileno(), self.ten_ms_chunk, 0)
))
finally:
proc.terminate()
def test_deadline_exceeded_during_call(self):
proc = self.make_proc()
try:
deadline = time.time() + 0.1 # 100ms deadline
self.assertRaises(mitogen.core.TimeoutError, (
lambda: self.func(proc.stdin.fileno(),
self.ten_ms_chunk * 100, # 1s of data
deadline)
))
finally:
proc.terminate()
if __name__ == '__main__':
unittest2.main()
|
ConnectBox/wifi-test-framework
|
ansible/plugins/mitogen-0.2.3/tests/parent_test.py
|
Python
|
mit
| 10,015
|
from __future__ import absolute_import, unicode_literals
import logging
from .lowlevel import batches
from .queue import delete_queues
from .conf import settings
logger = logging.getLogger(__name__)
class ChinupMiddleware(object):
def process_request(self, request):
delete_queues()
def process_response(self, request, response):
if settings.DEBUG and batches:
logger.info("%d requests in %d batches",
sum(len(b) for b in batches),
len(batches))
batches[:] = []
return response
|
pagepart/chinup
|
chinup/middleware.py
|
Python
|
mit
| 590
|
from __future__ import unicode_literals
import base64
import logging
import six
import sys
from requestlogger import ApacheFormatter
from sys import stderr
from werkzeug import urls
# The joy of version splintering.
if sys.version_info[0] < 3:
from urllib import urlencode
else:
from urllib.parse import urlencode
BINARY_METHODS = [
"POST",
"PUT",
"PATCH",
"DELETE",
"CONNECT",
"OPTIONS"
]
def create_wsgi_request(event_info,
server_name='zappa',
script_name=None,
trailing_slash=True,
binary_support=False,
context_header_mappings={}
):
"""
Given some event_info via API Gateway,
create and return a valid WSGI request environ.
"""
method = event_info['httpMethod']
params = event_info['pathParameters']
query = event_info['queryStringParameters'] # APIGW won't allow multiple entries, ex ?id=a&id=b
headers = event_info['headers'] or {} # Allow for the AGW console 'Test' button to work (Pull #735)
if context_header_mappings:
for key, value in context_header_mappings.items():
parts = value.split('.')
header_val = event_info['requestContext']
for part in parts:
if part not in header_val:
header_val = None
break
else:
header_val = header_val[part]
if header_val is not None:
headers[key] = header_val
# Extract remote user from context if Authorizer is enabled
remote_user = None
if event_info['requestContext'].get('authorizer'):
remote_user = event_info['requestContext']['authorizer'].get('principalId')
elif event_info['requestContext'].get('identity'):
remote_user = event_info['requestContext']['identity'].get('userArn')
# Related: https://github.com/Miserlou/Zappa/issues/677
# https://github.com/Miserlou/Zappa/issues/683
# https://github.com/Miserlou/Zappa/issues/696
# https://github.com/Miserlou/Zappa/issues/836
# https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Summary_table
if binary_support and (method in BINARY_METHODS):
if event_info.get('isBase64Encoded', False):
encoded_body = event_info['body']
body = base64.b64decode(encoded_body)
else:
body = event_info['body']
if isinstance(body, six.string_types):
body = body.encode("utf-8")
else:
body = event_info['body']
if isinstance(body, six.string_types):
body = body.encode("utf-8")
# Make header names canonical, e.g. content-type => Content-Type
for header in headers.keys():
canonical = header.title()
if canonical != header:
headers[canonical] = headers.pop(header)
path = urls.url_unquote(event_info['path'])
if query:
query_string = urlencode(query)
else:
query_string = ""
x_forwarded_for = headers.get('X-Forwarded-For', '')
if ',' in x_forwarded_for:
# The last one is the cloudfront proxy ip. The second to last is the real client ip.
# Everything else is user supplied and untrustworthy.
remote_addr = x_forwarded_for.split(', ')[-2]
else:
remote_addr = '127.0.0.1'
environ = {
'PATH_INFO': get_wsgi_string(path),
'QUERY_STRING': get_wsgi_string(query_string),
'REMOTE_ADDR': remote_addr,
'REQUEST_METHOD': method,
'SCRIPT_NAME': get_wsgi_string(str(script_name)) if script_name else '',
'SERVER_NAME': str(server_name),
'SERVER_PORT': headers.get('X-Forwarded-Port', '80'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': headers.get('X-Forwarded-Proto', 'http'),
'wsgi.input': body,
'wsgi.errors': stderr,
'wsgi.multiprocess': False,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
# Input processing
if method in ["POST", "PUT", "PATCH", "DELETE"]:
if 'Content-Type' in headers:
environ['CONTENT_TYPE'] = headers['Content-Type']
# This must be Bytes or None
environ['wsgi.input'] = six.BytesIO(body)
if body:
environ['CONTENT_LENGTH'] = str(len(body))
else:
environ['CONTENT_LENGTH'] = '0'
for header in headers:
wsgi_name = "HTTP_" + header.upper().replace('-', '_')
environ[wsgi_name] = str(headers[header])
if script_name:
environ['SCRIPT_NAME'] = script_name
path_info = environ['PATH_INFO']
if script_name in path_info:
environ['PATH_INFO'].replace(script_name, '')
if remote_user:
environ['REMOTE_USER'] = remote_user
if event_info['requestContext'].get('authorizer'):
environ['API_GATEWAY_AUTHORIZER'] = event_info['requestContext']['authorizer']
return environ
def common_log(environ, response, response_time=None):
"""
Given the WSGI environ and the response,
log this event in Common Log Format.
"""
logger = logging.getLogger()
if response_time:
formatter = ApacheFormatter(with_response_time=True)
try:
log_entry = formatter(response.status_code, environ,
len(response.content), rt_us=response_time)
except TypeError:
# Upstream introduced a very annoying breaking change on the rt_ms/rt_us kwarg.
log_entry = formatter(response.status_code, environ,
len(response.content), rt_ms=response_time)
else:
formatter = ApacheFormatter(with_response_time=False)
log_entry = formatter(response.status_code, environ,
len(response.content))
logger.info(log_entry)
return log_entry
# Related: https://github.com/Miserlou/Zappa/issues/1199
def get_wsgi_string(string, encoding='utf-8'):
"""
Returns wsgi-compatible string
"""
return string.encode(encoding).decode('iso-8859-1')
|
anush0247/Zappa
|
zappa/wsgi.py
|
Python
|
mit
| 6,777
|
from __future__ import division
import numpy as np
import pandas as pd
from multiprocessing import Pool
from matplotlib import pyplot as plt
def load_panel(a):
a = pd.read_pickle(a)
return a
def time_index(a):
a = a.reindex(index=a.index.to_datetime())
return a
def resamp(a):
a = a.resample('10T')
return a
def magnitude(a):
mag = np.sqrt(a['u']**2+a['v']**2+a['w']**2)
return mag
def std(a):
a = a.resample('10T',how='std')
return a
if __name__ == "__main__":
f0 = '/home/aidan/thesis/probe_data/panels/2013/june_july/GP-130621-BPb-hr'
f1 = '/home/aidan/thesis/probe_data/panels/2013/june_july/GP-130621-24-BPb_sigmas'
el = '/home/aidan/thesis/probe_data/panels/2013/june_july/GP-130621-24-BPb_els'
el = pd.read_pickle(el)
el = el.reindex(index=el.index.to_datetime())
el = el.resample('10T')
f = [f0,f1]
f = Pool().map(load_panel,f)
bins = f[0].minor_axis[:].values
sigma = np.array([0.98999,0.94999,0.86999,0.74999,0.58999,0.41000,0.25000,0.13000,0.05000,0.01000])
f = Pool().map(magnitude,f)
f = Pool().map(time_index,f)
f1 = f[1].shift(-20,freq='T')
f = [f[0],f1]
d = Pool().map(resamp,f)
s = Pool().map(std,f)
print d
time = '2013-06-21 14:50:00'
d = [d[0].loc[time],d[1].loc[time]]
s = [s[0].loc[time],s[1].loc[time]]
el = el.loc[time]
d[0][20.61:] = np.nan
sigma = sigma*el.values
rms = 0.26
width = '4'
fig,ax = plt.subplots()
plt.rc('font',size='22')
ax.errorbar(d[1].values, el[::-1],linewidth=width, xerr=s[1],label='FVCOM')
ax.set_ylim([0,np.ceil(np.max(el))])#.values)])
ax.grid(True)
ax.set_ylabel(r'FVCOM Elevation $(m)$')
ax.set_xlabel(r'Velocity $(m/s)$')
ax.legend(loc=3,fontsize='18')
ax1 = ax.twinx()
ax1.errorbar(d[0], bins, xerr=s[0],linewidth=width,color='red',label='ADCP')
ax1.set_ylim([0,np.ceil(np.max(el))])#.values)])
ax1.set_ylabel(r'ADCP Elevation $(m)$')
ax1.legend(loc=2,fontsize='18')
plt.show()
|
Aidan-Bharath/code_and_stuffs
|
profiles.py
|
Python
|
mit
| 2,042
|
from setuptools import setup
setup(
name="deferred2",
version='0.0.1',
description='Successor of the deferred library shipped with Google AppEngine (GAE)',
long_description=open('README.rst').read(),
license='MIT',
author='herr kaste',
author_email='herr.kaste@gmail.com',
url='https://github.com/kaste/deferred2',
platforms=['linux', 'osx', 'win32'],
packages = ['deferred2'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Utilities',
'Programming Language :: Python',
],
)
|
kaste/deferred2
|
setup.py
|
Python
|
mit
| 777
|
import numpy as np
import matplotlib.pyplot as plt
fig, ((ax00, ax01, ax02), (ax10, ax11, ax12)) = plt.subplots(nrows=2, ncols=3, sharey=True)
x = np.arange(4)
ax00.plot(x, x, 'ro--')
ax01.plot(x, x**1.5, 'g^-.')
ax02.plot(x, x**2, 'bs:')
ax10.bar(x, x + 1, width=0.5, align='center', color='r')
ax11.bar(x, x**1.5 + 1, width=0.5, align='center', color='g')
ax12.bar(x, x**2 + 1, width=0.5, align='center', color='b')
plt.rcParams['font.size'] = 10
plt.tight_layout()
plt.savefig("data/dst/matplotlib_example_multi.png")
|
nkmk/python-snippets
|
notebook/matplotlib_example_multi.py
|
Python
|
mit
| 526
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# admin.py
#
# Copyright 2014 Gary Dalton <gary@ggis.biz>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
"""
Module admin.py documentation
Sets up the admin for Twenty47
"""
from flask import Blueprint, request, redirect, render_template, url_for, flash, abort
from flask.views import MethodView
from flask.ext.mongoengine.wtf import model_form
from flask.ext.security import login_required, roles_required, script
from twenty47.models import Role, User, user_datastore, Subscriber, \
IncidentType, UnitsImpacted, AssistanceRequested
from twenty47 import app, debug, subscription_updated, sns_error
from twenty47.utils import get_serializer, update_user_subscriptions
from itsdangerous import BadSignature
admin = Blueprint('admin', __name__, template_folder='templates')
class Pager(MethodView):
decorators = [login_required, roles_required('Admin')]
cls = User
def get(self, page=1):
users = self.cls.objects.paginate(page=page, per_page=20)
return render_template('admin/pager.html', users=users)
class List(MethodView):
decorators = [login_required, roles_required('Admin')]
clsRole = Role
clsIncidentTypes = IncidentType
clsUnitsImpacted = UnitsImpacted
clsAssistanceRequested = AssistanceRequested
def get(self):
roles = self.clsRole.objects.all()
incidenttypes = self.clsIncidentTypes.objects.all().order_by('order')
unitsimpacted = self.clsUnitsImpacted.objects.all().order_by('order')
assistancerequested = self.clsAssistanceRequested.objects.all().order_by('order')
context = {
"roles": roles,
"incidenttypes": incidenttypes,
"unitsimpacted": unitsimpacted,
"assistancerequested": assistancerequested,
}
return render_template('admin/list.html', **context)
class Detail(MethodView):
decorators = [login_required, roles_required('Admin')]
def get_context(self, uid=None, action=None):
form_cls = model_form(User, field_args = {
'firstName': {'label': 'First Name'},
'lastName': {'label': 'Last Name'},
})
del form_cls.created_at
del form_cls.modified_at
del form_cls.active
del form_cls.confirmed_at
del form_cls.last_login_at
del form_cls.current_login_at
del form_cls.last_login_ip
del form_cls.current_login_ip
del form_cls.login_count
del form_cls.subscription
if uid:
del form_cls.password
user = User.objects.get_or_404(id=uid)
if request.method == 'POST':
form = form_cls(request.form, inital=user._data)
else:
form = form_cls(obj=user)
else:
user = User()
form = form_cls(request.form)
form.roles.label_attr='name'
context = {
"user": user,
"form": form,
"create": uid is None
}
return context
def get(self, uid):
context = self.get_context(uid)
return render_template('admin/detail.html', **context)
def post(self, uid):
context = self.get_context(uid)
form = context.get('form')
try:
if request.form['action'] == "none":
if set_subscription_status(context.get('user'), "NONE"):
return "True"
else:
return(app.config['DISPATCH_ERROR_GENERAL'])
elif request.form['action'] == "denied":
if set_subscription_status(context.get('user'), "DENIED"):
return "True"
else:
return(app.config['DISPATCH_ERROR_GENERAL'])
elif request.form['action'] == "pending":
if set_subscription_status(context.get('user'), "PENDING"):
return "True"
else:
return(app.config['DISPATCH_ERROR_GENERAL'])
elif request.form['action'] == "approved":
if set_subscription_status(context.get('user'), "APPROVED"):
return "True"
else:
return(app.config['DISPATCH_ERROR_GENERAL'])
elif request.form['action'] == "activate":
user_datastore.activate_user(user=context.get('user'))
return "True"
elif request.form['action'] == "deactivate":
user_datastore.deactivate_user(user=context.get('user'))
return "True"
else:
flash("Action failed, " + request.form['action'], 'danger')
return redirect(url_for('admin.user_list'))
except KeyError, e:
pass
if form.validate():
user = context.get('user')
form.populate_obj(user)
if uid:
user.save()
else:
user_datastore.create_user(firstName=user.firstName,
lastName=user.lastName, password=user.password,
email=user.email, comments=user.comments)
return redirect(url_for('admin.user_list'))
return render_template('admin/detail.html', **context)
class Remove(MethodView):
decorators = [login_required, roles_required('Admin')]
def get_context(self, id=None, action=None):
if id:
if action == "user":
target = User.objects.get_or_404(id=id)
form_cls = model_form(User, field_args = {
'firstName': {'label': 'First Name'},
'lastName': {'label': 'Last Name'},
})
del form_cls.roles
del form_cls.password
del form_cls.subscription
if request.method == 'POST':
form = form_cls(request.form, inital=target._data)
else:
form = form_cls(obj=target)
elif action == "role":
target = Role.objects.get_or_404(id=id)
form_cls = model_form(Role)
if request.method == 'POST':
form = form_cls(request.form, inital=target._data)
else:
form = form_cls(obj=target)
#form.roles.label_attr='name'
context = {
"action": action,
"target": target,
"form": form,
}
return context
else:
flash("Action failed, " + request.form['action'], 'danger')
return redirect(url_for('admin.user_list'))
def get(self, id, action):
context = self.get_context(id, action)
return render_template('admin/remove.html', **context)
def post(self, id, action):
context = self.get_context(id, action)
form = context.get('form')
if form.validate():
target = context.get('target')
if action == "user":
user_datastore.delete_user(target)
elif action == "role":
pass
return redirect(url_for('admin.user_list'))
class RoleDetail(MethodView):
decorators = [login_required, roles_required('Admin')]
def get_context(self, id=None):
form_cls = model_form(Role)
del form_cls.created_at
del form_cls.modified_at
if id:
role = Role.objects.get_or_404(id=id)
if request.method == 'POST':
form = form_cls(request.form, inital=role._data)
else:
form = form_cls(obj=role)
else:
role = Role()
form = form_cls(request.form)
context = {
"role": role,
"form": form,
"create": id is None
}
return context
def get(self, id):
context = self.get_context(id)
return render_template('admin/roledetail.html', **context)
def post(self, id):
context = self.get_context(id)
form = context.get('form')
if form.validate():
role = context.get('role')
form.populate_obj(role)
if id:
role.save()
else:
user_datastore.create_role(name=role.name,
description=role.description)
flash("Role saved", 'success')
return redirect(url_for('admin.index'))
return render_template('admin/roledetail.html', **context)
class RemoteUserAdmin(MethodView):
def get(self, payload):
s = get_serializer()
try:
paystr = s.loads(payload, max_age= app.config['DISPATCH_MAX_TOKEN_AGE'])
listload =paystr.split(',')
user_id = listload[0]
action = listload[1]
except BadSignature, e:
abort(500)
return
print user_id
print action
user = User.objects.get_or_404(id=user_id)
if action == "deactivate":
# OK
user_datastore.deactivate_user(user)
completed_action = "Deactivated."
elif action == "makeuser":
# OK
user_datastore.add_role_to_user(user, user_datastore.find_or_create_role("User"))
completed_action = "has User role."
elif action == "makeregistered":
# OK
user_datastore.add_role_to_user(user, user_datastore.find_or_create_role("Registered"))
completed_action = "has Registered role."
elif action == "subscriptiondeny":
# OK
set_subscription_status(user, "DENIED")
completed_action = "subscriptions are denied."
elif action == "subscriptionapprove":
# OK
set_subscription_status(user, "APPROVED")
completed_action = "subscriptions are approved."
else:
flash(app.config['DISPATCH_ERROR_REMOTEADMIN'], 'danger')
return redirect(url_for('dispatch.index'))
flash(user.firstName + " " + user.lastName + " " + completed_action, 'success')
return redirect(url_for('dispatch.index'))
class DetailFactory(MethodView):
decorators = [login_required, roles_required('Admin')]
def get_context(self, id=None, action=None):
if action == "IncidentType":
form_cls = model_form(IncidentType)
if id:
target = IncidentType.objects.get_or_404(id=id)
if request.method == 'POST':
form = form_cls(request.form, inital=target._data)
else:
form = form_cls(obj=target)
else:
target = IncidentType()
form = form_cls(request.form)
elif action == "UnitsImpacted":
form_cls = model_form(UnitsImpacted)
if id:
target = UnitsImpacted.objects.get_or_404(id=id)
if request.method == 'POST':
form = form_cls(request.form, inital=target._data)
else:
form = form_cls(obj=target)
else:
target = UnitsImpacted()
form = form_cls(request.form)
elif action == "AssistanceRequested":
form_cls = model_form(AssistanceRequested)
if id:
target = AssistanceRequested.objects.get_or_404(id=id)
if request.method == 'POST':
form = form_cls(request.form, inital=target._data)
else:
form = form_cls(obj=target)
else:
target = AssistanceRequested()
form = form_cls(request.form)
context = {
"action": action,
"target": target,
"form": form,
"create": id is None
}
return context
def get(self, action, id):
context = self.get_context(id, action)
return render_template('admin/detail_factory.html', **context)
def post(self, id, action):
context = self.get_context(id, action)
form = context.get('form')
if form.validate():
target = context.get('target')
form.populate_obj(target)
target.save()
flash(target.name + " saved", 'success')
return redirect(url_for('admin.index'))
return render_template('admin/detail_factory.html', **context)
class RemoveFactory(MethodView):
decorators = [login_required, roles_required('Admin')]
def get_context(self, id=None, action=None):
if id:
if action == "IncidentType":
form_cls = model_form(IncidentType)
target = IncidentType.objects.get_or_404(id=id)
elif action == "UnitsImpacted":
form_cls = model_form(UnitsImpacted)
target = UnitsImpacted.objects.get_or_404(id=id)
elif action == "AssistanceRequested":
form_cls = model_form(AssistanceRequested)
target = AssistanceRequested.objects.get_or_404(id=id)
else:
flash("Action failed, " + request.form['action'], 'danger')
return redirect(url_for('admin.index'))
if request.method == 'POST':
form = form_cls(request.form, inital=target._data)
else:
form = form_cls(obj=target)
context = {
"action": action,
"target": target,
"form": form,
"create": id is None
}
return context
else:
flash("Action failed, " + request.form['action'], 'danger')
return redirect(url_for('admin.index'))
def get(self, id, action):
context = self.get_context(id, action)
return render_template('admin/remove_factory.html', **context)
def post(self, id, action):
context = self.get_context(id, action)
form = context.get('form')
if form.validate():
target = context.get('target')
target.delete()
flash("Target deleted", 'success')
return redirect(url_for('admin.index'))
def enable_subscription(user):
'''
Deprecated
'''
user.subscription.enabled = True
if user.save():
update_user_subscriptions(user)
return True
return False
def disable_subscription(user):
'''
Deprecated
'''
user.subscription.enabled = False
if user.save():
return True
return False
def set_subscription_status(user, status):
if not user.subscription:
target = Subscriber()
user.subscription = target
user.subscription.status = status
if user.save():
subscription_updated.send(app, user=user)
return True
return False
# Register the urls
admin.add_url_rule('/admin/', view_func=List.as_view('index'))
admin.add_url_rule('/admin/users/', defaults={'page': 1}, view_func=Pager.as_view('user_list'))
admin.add_url_rule('/admin/users/page/<int:page>', view_func=Pager.as_view('user_pager'))
admin.add_url_rule('/admin/userCreate/', defaults={'uid': None}, view_func=Detail.as_view('user_create'))
admin.add_url_rule('/admin/userEdit/<uid>/', view_func=Detail.as_view('user_edit'))
admin.add_url_rule('/admin/roleCreate/', defaults={'id': None}, view_func=RoleDetail.as_view('role_create'))
admin.add_url_rule('/admin/roleEdit/<id>/', view_func=RoleDetail.as_view('role_edit'))
admin.add_url_rule('/admin/remove/<id>/<action>', view_func=Remove.as_view('remove'))
admin.add_url_rule('/admin/remoteAdmin/<payload>', view_func=RemoteUserAdmin.as_view('remote_admin'))
admin.add_url_rule('/admin/create/<action>', defaults={'id': None}, view_func=DetailFactory.as_view('create'))
admin.add_url_rule('/admin/update/<action>/<id>', view_func=DetailFactory.as_view('update'))
admin.add_url_rule('/admin/delete/<action>/<id>', view_func=RemoveFactory.as_view('delete'))
|
gary-dalton/Twenty47
|
twenty47/admin.py
|
Python
|
mit
| 17,181
|
class ClientProcess(object):
def __init__(self, proc_id, server_ids, timeout, set_value=1):
self._id = proc_id
self._server_ids = server_ids
self._server_states = dict((svr_id, True) for svr_id in server_ids)
self._sent_requests = {}
self._timeout = timeout
self._set_value = set_value
def process(self, _input, time):
if time == 0:
for svr_id in self._server_ids:
if self._server_states[svr_id]:
self._sent_requests[svr_id] = (time, 'set')
return {svr_id: ('set', self._set_value)}
assert False
for svr_id, msgs in _input.iteritems():
assert svr_id in self._sent_requests
assert time - self._sent_requests[svr_id][0] < self._timeout
if self._sent_requests[svr_id][1] == 'set':
assert msgs == ['set success']
print 'set success'
self._sent_requests[svr_id] = (time, 'get')
return {svr_id: ('get',)}
else:
assert msgs == [self._set_value]
print 'get result', msgs
self._sent_requests = {}
# the flow is over here
return None
output = {}
timeout_svr = None
for svr_id, (sent_time, cmd) in self._sent_requests.iteritems():
if time - sent_time >= self._timeout:
self._server_states[svr_id] = False
timeout_svr = svr_id
print 'server_%s has timeout' % svr_id
for server_id in self._server_ids:
if self._server_states[server_id]:
if cmd == 'set':
self._sent_requests[server_id] = (time, 'set')
output = {server_id: ('set', self._set_value)}
break
else:
self._sent_requests[server_id] = (time, 'get')
output = {server_id: ('get',)}
break
if output:
break
if output:
del self._sent_requests[timeout_svr]
return output
def get_id(self):
return self._id
class ServerProcess(object):
def __init__(self, proc_id, replica_ids, timeout):
self._id = proc_id
self._replica_ids = replica_ids
self._replica_ids.remove(proc_id)
self._timeout = timeout
self._value = None
self._replica_routine = None
self._src_id = None
def process(self, _input, time):
print 'ServerProcess_%d[%d] recv: %s' % (self._id, time, _input)
output = {}
is_routine_called = False
for src_id, msgs in _input.iteritems():
if src_id in self._replica_ids: # request from other replica
if 'setreplica success' in msgs or 'setreplica failed' in msgs:
if self._replica_routine is not None:
is_routine_called = True
try:
self._replica_routine.send((src_id, msgs[0]))
except StopIteration:
self._replica_routine = None
if self._value is not None:
output[self._src_id] = 'set success'
else:
output[self._src_id] = 'set failed'
self._src_id = None
continue
else:
for m in msgs:
if m[0] == 'setreplica':
if self._value is None and self._replica_routine is None:
self._value = m[1]
output[src_id] = 'setreplica success'
else:
output[src_id] = 'setreplica failed'
else: # request from client
for m in msgs:
if m[0] == 'set':
if self._value is None:
if self._replica_routine is not None:
output[src_id] = 'set failed'
else:
assert self._replica_routine is None
assert self._src_id is None
self._src_id = src_id
self._replica_routine = self.set_value(m[1], time)
msg = self._replica_routine.next()
for replica_id in self._replica_ids:
output[replica_id] = msg
else:
output[src_id] = 'set failed'
elif m[0] == 'get':
if self._value is not None:
output[src_id] = self._value
else:
output[src_id] = 'get failed'
if not is_routine_called and self._replica_routine:
try:
self._replica_routine.send(None)
except StopIteration:
self._replica_routine = None
if self._value is not None:
output[self._src_id] = 'set success'
else:
output[self._src_id] = 'set failed'
self._src_id = None
return output
# this is a generator
def set_value(self, value, start_time):
success_count = 0
replica_set_states = dict((replica_id, None) for replica_id in self._replica_ids)
result = yield ('setreplica', value)
cur_time = start_time + 1
while True:
if result is None:
if cur_time - start_time >= self._timeout:
return # failed
else:
replica_id, msg = result
assert replica_set_states[replica_id] is None
if msg == 'setreplica success':
replica_set_states[replica_id] = True
success_count += 1
else:
return # failed
if success_count == len(replica_set_states):
break
result = yield
cur_time += 1
self._value = value
return
def get_id(self):
return self._id
processes = [ClientProcess(0, [1, 2], 5),
ServerProcess(1, [1, 2], 3), ServerProcess(2, [1, 2], 3)]
links = {(0, 1): 1, (0, 2): 1, (1, 2): 1}
commands = ['next 5', 'status', 'next 3', 'status']
|
airekans/paxosim
|
simple/two.py
|
Python
|
mit
| 6,862
|
"""
A sample desktop application using the raumfeld library
"""
import time
import raumfeld
from PySide import QtCore, QtGui
from raumfeld_desktop import __version__
from .mainwindow_ui import Ui_MainWindow as Ui
class SearchThread(QtCore.QThread):
devices_found = QtCore.Signal(list)
def run(self):
devices = raumfeld.discover()
self.devices_found.emit(devices)
for device in devices:
print(device.location)
class DeviceControlThread(QtCore.QThread):
volume_infos = QtCore.Signal(bool, int)
def __init__(self):
super(DeviceControlThread, self).__init__()
self.device = None
self._needs_update_flag = False
self.exit_flag = False
def run(self):
"""
Poll volume and mute setting
"""
while not self.exit_flag:
if self.device is None:
continue
if self._needs_update_flag:
self.device.volume = self._volume
else:
self.volume_infos.emit(self.device.mute, self.device.volume)
self._needs_update_flag = False
time.sleep(1.0)
def set_volume(self, volume):
self._volume = volume
self._needs_update_flag = True
def set_mute(self, mute):
self._mute = mute
self._needs_update_flag = True
class MainWindow(QtGui.QMainWindow):
"""
Raumfeld Desktop Application
"""
search_for_devices = QtCore.Signal()
def __init__(self, parent=None):
"""
Initializes the application's UI
"""
super(MainWindow, self).__init__()
self.ui = Ui()
self.ui.setupUi(self)
self.setWindowTitle('Raumfeld v%s' % __version__)
self.setEnabled(False)
# the dial has factor 10 more ticks than the slider
self.ui.dialVolume.setMinimum(0)
self.ui.dialVolume.setMaximum(1000)
self.ui.dialVolume.setTicksPerRotation(100)
# open a thread to search for devices
self.search_thread = SearchThread()
self.search_thread.devices_found.connect(self.devices_found)
self.search_thread.start()
# create a worker thread to communicate with the device
self.device_thread = DeviceControlThread()
self.device_thread.volume_infos.connect(self.volume_infos)
# the device thread is started as soon as a device is found
def devices_found(self, devices):
"""Is called when the search thread finishes searching
"""
try:
device = devices[0]
self.ui.lblConnection.setText(device.friendly_name)
self.setEnabled(True)
self.device_thread.device = device
self.device_thread.start()
except IndexError:
# no devices found, search again
self.search_thread.start()
def volume_infos(self, mute, volume):
self.ui.dialVolume.setValue(volume * 10)
#
# View Callbacks
#
@QtCore.Slot(int)
def on_dialVolume_valueChanged(self, value):
self.ui.sliderVolume.setValue(value // 10)
self.device_thread.set_volume(value // 10)
@QtCore.Slot(int)
def on_sliderVolume_sliderMoved(self, value):
self.ui.dialVolume.setValue(value * 10)
@QtCore.Slot(bool)
def on_btnMute_toggled(self, checked):
self.device_thread.set_mute(checked)
self.ui.btnMute.setEnabled(False)
def closeEvent(self, event):
self.search_thread.wait()
self.device_thread.exit_flag = True
self.device_thread.wait()
|
tfeldmann/Raumfeld-Desktop
|
raumfeld_desktop/mainwindow.py
|
Python
|
mit
| 3,596
|
from pkg_resources import get_distribution
__version__ = get_distribution('cmddocs').version
|
noqqe/cmddocs
|
cmddocs/version.py
|
Python
|
mit
| 93
|
import os
from twilio.rest import Client
# put your own credentials here
# To set up environmental variables, see http://twil.io/secure
account_sid = os.environ['TWILIO_ACCOUNT_SID']
auth_token = os.environ['TWILIO_AUTH_TOKEN']
fax_sid = "FXaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
client = Client(account_sid, auth_token)
fax = client.fax.v1.faxes(sid=fax_sid).fetch()
print(fax.sid)
|
TwilioDevEd/api-snippets
|
fax/instance-get-example/instance-get-example.7.x.py
|
Python
|
mit
| 383
|
import logging
logging.basicConfig()
import bacon
bacon.window.resizable = True
font = bacon.Font('res/DejaVuSans.ttf', 64)
font2 = bacon.Font('res/DejaVuSans.ttf', 72)
runs = [
bacon.GlyphRun(bacon.Style(font), 'Hello, '),
bacon.GlyphRun(bacon.Style(font2, color=(1, 0.5, 0.5, 1)), 'Bacon'),
bacon.GlyphRun(bacon.Style(font), '!'),
]
glyph_layout = bacon.GlyphLayout(runs,
0,
bacon.window.height / 2,
width=bacon.window.width,
align=bacon.Alignment.center,
vertical_align=bacon.VerticalAlignment.center,
overflow=bacon.Overflow.wrap)
class Game(bacon.Game):
def on_tick(self):
bacon.clear(0, 0, 0, 1)
glyph_layout.y = bacon.window.height / 2
glyph_layout.width = bacon.window.width
bacon.draw_glyph_layout(glyph_layout)
bacon.run(Game())
|
aholkner/bacon
|
examples/style.py
|
Python
|
mit
| 818
|
import subprocess
def test_hypot():
subprocess.check_call(["python", "RunHypot.py"])
def test_matmul():
subprocess.check_call(["python", "Matmul.py", "-N", "10"])
def test_pisum():
subprocess.check_call(["python", "Pisum.py", "-N", "10000"])
|
scienceopen/python-performance
|
python_performance/tests/test_all.py
|
Python
|
mit
| 260
|
class Solution:
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
num1 = {}
for count, value in enumerate(nums):
if target - value in num1:
return [num1[target - value], count]
num1[value] = count
return []
# Test
if __name__=="__main__":
nums = [2,7,11,15]
target = 9
print(Solution().twoSum(nums, target))
|
rukashi10/LeetCode_Practice
|
Easy/#1 TwoSum.py
|
Python
|
mit
| 476
|
__author__ = 'mouton'
import os
import stateMachine
import shutil
exportMainFileName = 'exportMain.py'
def copyFiles(exportFolder, rootDir):
if not os.path.exists(exportFolder):
os.makedirs(exportFolder)
for fileName in os.listdir(rootDir):
if rootDir + '/' + fileName == exportFolder:
continue
if os.path.isdir(rootDir + '/' + fileName):
try:
shutil.copytree(rootDir + '/' + fileName, exportFolder + '/' + fileName)
except OSError:
shutil.rmtree(exportFolder + '/' + fileName)
shutil.copytree(rootDir + '/' + fileName, exportFolder + '/' + fileName)
else:
shutil.copy2(rootDir + '/' + fileName, exportFolder + '/' + fileName)
def writeStaticInfos(exportFolder):
exportFilesNames = ['stateMachine.py',
'database.py',
'utils/__init__.py',
'utils/dictSet.py',
'utils/mathutils.py',
'grammar/__init__.py',
'grammar/arithmeticExpressions.py',
'grammar/consequenceExpressions.py',
'grammar/keywords.py',
'grammar/triggerExpressions.py',
'game/__init__.py',
'game/gameWindow.py',
'game/Registeries/__init__.py',
'game/Registeries/LineRegistery.py',
'game/Registeries/OvalRegistery.py',
'game/Registeries/PolygonRegistery.py',
'game/Registeries/RectRegistery.py',
'game/Registeries/SoundRegistery.py',
'game/Registeries/SpriteRegistery.py',
'game/Registeries/TextRegistery.py']
try:
for exportFileName in exportFilesNames:
path = os.path.dirname(exportFolder + '/' + exportFileName)
if not os.path.exists(path):
os.makedirs(path)
importFileName = './' + exportFileName
with open(exportFolder + '/' + exportFileName, 'w') as exportFile:
with open(importFileName, 'r') as importFile:
export = True
for line in importFile:
if '# NO EXPORT' in line:
export = False
continue
if '# EXPORT' in line:
export = True
continue
if export:
exportFile.write(line)
exportFile.write('\n\n')
except IOError as e:
print e
def writeImportInfos(exportFolderName):
try:
with open(exportFolderName + '/' + exportMainFileName, 'w') as exportMainFile:
exportMainFile.write('import stateMachine\n')
exportMainFile.write('from stateMachine import Transition\n')
exportMainFile.write('import game.gameWindow as gameWindow\n')
exportMainFile.write('from grammar.triggerExpressions import *\n')
exportMainFile.write('from grammar.consequenceExpressions import *\n')
exportMainFile.write('from grammar.arithmeticExpressions import *\n')
exportMainFile.write('from grammar.keywords import *\n')
exportMainFile.write('from utils.mathutils import sign\n')
exportMainFile.write('from random import random, randint\n')
exportMainFile.write('from math import cos, sin, tan, exp, log, floor, '
'ceil, acos, asin, atan, cosh, sinh, tanh, acosh, atanh, asinh\n')
exportMainFile.write('\n\n')
except IOError as e:
print e
pass
def writeVariableInfos(exportFolderName, initConsequences):
nodes = stateMachine.getNodes()
try:
with open(exportFolderName + '/' + exportMainFileName, 'a') as exportMainFile:
exportMainFile.write('def _random(x):\n')
exportMainFile.write(' return random() * x\n\n')
exportMainFile.write('def _random(x):\n')
exportMainFile.write(' return random() * x\n\n')
exportMainFile.write('def _randint(x):\n')
exportMainFile.write(' return randint(0, x - 1)\n\n')
def printNode(n):
exportMainFile.write('stateMachine.addNode(' + str(n.num) + ',\'' + n._name + '\')\n')
def printArc(a):
formula = arc.exportFormula()
consequence = arc.exportConsequences()
exportMainFile.write('Transition(stateMachine.getNodeByNum(' + str(a.n1.num) +
'), stateMachine.getNodeByNum(' + str(a.n2.num) + '), '
+ formula + ', ' + consequence + ', False)\n')
for node in nodes:
printNode(node)
exportMainFile.write('\n\n')
for node in nodes:
for arc in node.outputArcs:
printArc(arc)
exportMainFile.write('\n\n')
exportMainFile.write('initConsequences = ' + stateMachine.exportInitConsequences(initConsequences) + '\n')
exportMainFile.write('stateMachine.setInitConsequences(initConsequences, False)\n')
exportMainFile.write('\n\n')
except IOError as e:
print e
pass
def writeLaunchInfos(exportFolderName, fps, maxTick, width, height, rootDir):
try:
with open(exportFolderName + '/' + exportMainFileName, 'a') as exportMainFile:
exportMainFile.write('fps = ' + str(fps) + '\n')
exportMainFile.write('maxTick = ' + str(maxTick) + '\n')
exportMainFile.write('width = ' + str(width) + '\n')
exportMainFile.write('height = ' + str(height) + '\n')
exportMainFile.write('rootDir = \'' + rootDir + '/export\'' + '\n')
exportMainFile.write('\n\n')
runFile = open('./exporter/run', 'r')
exportMainFile.write(runFile.read())
exportMainFile.write('\n\n')
except IOError as e:
print e
pass
|
mouton5000/DiscreteEventApplicationEditor
|
exporter/exporter.py
|
Python
|
mit
| 6,274
|
"""
You have a list of points in the plane. Return the area of the largest triangle that can be formed by any 3 of the points.
Example:
Input: points = [[0,0],[0,1],[1,0],[0,2],[2,0]]
Output: 2
Explanation:
The five points are show in the figure below. The red triangle is the largest.

Notes:
3 <= points.length <= 50.
No points will be duplicated.
-50 <= points[i][j] <= 50.
Answers within 10^-6 of the true value will be accepted as correct.
"""
class Solution(object):
def largestTriangleArea(self, points):
"""
:type points: List[List[int]]
:rtype: float
"""
def calcArea(p1, p2, p3):
u = ((p2[0] - p1[0]) * (p3[0] - p1[0]) + (p2[1] - p1[1]) * (p3[1] - p1[1]))
l1 = (((p2[0] - p1[0]) ** 2 + (p2[1] - p1[1]) ** 2) ** 0.5)
l2 = (((p3[0] - p1[0]) ** 2 + (p3[1] - p1[1]) ** 2) ** 0.5)
try:
cosine = u / (l1 * l2)
except ZeroDivisionError:
return 0
if abs(cosine) > 1:
cosine = 1 if cosine > 0 else -1
sine = (1 - cosine ** 2) ** 0.5
return l1 * l2 * sine / 2
m = 0
for i, p1 in enumerate(points):
for j, p2 in enumerate(points):
if j <= i:
continue
for p3 in points[j + 1:]:
a = calcArea(p1, p2, p3)
m = max(m, a)
return m
|
franklingu/leetcode-solutions
|
questions/largest-triangle-area/Solution.py
|
Python
|
mit
| 1,520
|
import logging
from dateutil.relativedelta import relativedelta
from dataactbroker.helpers.generation_helper import a_file_query, d_file_query, copy_file_generation_to_job
from dataactcore.config import CONFIG_BROKER
from dataactcore.interfaces.function_bag import (mark_job_status, filename_fyp_sub_format, filename_fyp_format,
get_timestamp)
from dataactcore.models.jobModels import Job
from dataactcore.models.lookups import DETACHED_FILENAMES, SUBMISSION_FILENAMES
from dataactcore.utils import fileA, fileD1, fileD2, fileE_F
from dataactcore.utils.responseException import ResponseException
from dataactvalidator.filestreaming.csv_selection import write_stream_query
logger = logging.getLogger(__name__)
class FileGenerationManager:
""" Responsible for managing the generation of all files.
Attributes:
sess: Current database session
is_local: A boolean flag indicating whether the application is being run locally or not
file_generation: FileGeneration object representing a D file generation task
job: Job object for an E or F file generation task
file_type: File type letter name
"""
def __init__(self, sess, is_local, file_generation=None, job=None):
""" Initialize the FileGeneration Manager.
Args:
sess: Current database session
is_local: A boolean flag indicating whether the application is being run locally or not
file_generation: FileGeneration object representing a D file generation task
job: Job object for an E or F file generation task
"""
self.sess = sess
self.is_local = is_local
self.file_generation = file_generation
self.job = job
self.file_type = job.file_type.letter_name if job else file_generation.file_type
self.element_numbers = file_generation.element_numbers if file_generation else False
def generate_file(self, agency_code=None):
""" Generates a file based on the FileGeneration object and updates any Jobs referencing it """
fillin_vals = {'timestamp': get_timestamp()}
if self.file_generation:
fillin_vals.update({
'start': self.file_generation.start_date.strftime('%Y%m%d'),
'end': self.file_generation.end_date.strftime('%Y%m%d'),
'agency_type': self.file_generation.agency_type,
'ext': '.{}'.format(self.file_generation.file_format),
})
if self.job and self.job.submission:
# Submission Files
fillin_vals.update({
'submission_id': self.job.submission_id,
'FYP': filename_fyp_sub_format(self.job.submission),
})
file_name = SUBMISSION_FILENAMES[self.file_type].format(**fillin_vals)
else:
# Detached Files
if self.job and self.job.file_type.letter_name == 'A':
period_date = self.job.end_date + relativedelta(months=3)
fillin_vals['FYP'] = filename_fyp_format(period_date.year, period_date.month, False)
file_name = DETACHED_FILENAMES[self.file_type].format(**fillin_vals)
if self.is_local:
file_path = "".join([CONFIG_BROKER['broker_files'], file_name])
else:
file_path = "".join(["None/", file_name])
# Generate the file and upload to S3
log_data = {'message': 'Finished file {} generation'.format(self.file_type), 'message_type': 'ValidatorInfo',
'file_type': self.file_type, 'file_path': file_path}
if self.file_generation:
self.generate_d_file(file_path)
log_data.update({
'agency_code': self.file_generation.agency_code, 'agency_type': self.file_generation.agency_type,
'start_date': self.file_generation.start_date, 'end_date': self.file_generation.end_date,
'file_generation_id': self.file_generation.file_generation_id
})
elif self.job.file_type.letter_name in ['A', 'E', 'F']:
log_data['job_id'] = self.job.job_id
mark_job_status(self.job.job_id, 'running')
if self.job.file_type.letter_name == 'A':
if not agency_code:
raise ResponseException('Agency code not provided for an A file generation')
self.generate_a_file(agency_code, file_path)
else:
# Call self.generate_%s_file() where %s is e or f based on the Job's file_type
file_type_lower = self.job.file_type.letter_name.lower()
getattr(self, 'generate_%s_file' % file_type_lower)()
mark_job_status(self.job.job_id, 'finished')
else:
e = 'No FileGeneration object for D file generation.' if self.file_type in ['D1', 'D2'] else \
'Cannot generate file for {} file type.'.format(self.file_type if self.file_type else 'empty')
raise ResponseException(e)
logger.info(log_data)
def generate_d_file(self, file_path):
""" Write file D1 or D2 to an appropriate CSV. """
log_data = {
'message': 'Starting file {} generation'.format(self.file_type), 'message_type': 'ValidatorInfo',
'agency_code': self.file_generation.agency_code, 'agency_type': self.file_generation.agency_type,
'start_date': self.file_generation.start_date, 'end_date': self.file_generation.end_date,
'file_generation_id': self.file_generation.file_generation_id, 'file_type': self.file_type,
'file_format': self.file_generation.file_format, 'file_path': file_path,
'element_numbers': self.element_numbers
}
logger.info(log_data)
original_filename = file_path.split('/')[-1]
local_file = "".join([CONFIG_BROKER['d_file_storage_path'], original_filename])
header_index = 0
# Prepare file data
if self.file_type == 'D1':
file_utils = fileD1
if self.file_generation.element_numbers:
header_index = 1
elif self.file_type == 'D2':
file_utils = fileD2
else:
raise ResponseException('Failed to generate_d_file with file_type:{} (must be D1 or D2).'.format(
self.file_type))
headers = [val[header_index] for key, val in file_utils.mapping.items()]
log_data['message'] = 'Writing {} file {}: {}'.format(self.file_type, self.file_generation.file_format.upper(),
original_filename)
logger.info(log_data)
query_utils = {
"sess": self.sess, "file_utils": file_utils, "agency_code": self.file_generation.agency_code,
"agency_type": self.file_generation.agency_type, "start": self.file_generation.start_date,
"end": self.file_generation.end_date}
logger.debug({'query_utils': query_utils})
# Generate the file locally, then place in S3
write_stream_query(self.sess, d_file_query(query_utils), local_file, file_path, self.is_local, header=headers,
file_format=self.file_generation.file_format)
log_data['message'] = 'Finished writing {} file {}: {}'.format(self.file_type,
self.file_generation.file_format.upper(),
original_filename)
logger.info(log_data)
self.file_generation.file_path = file_path
self.sess.commit()
for job in self.sess.query(Job).filter_by(file_generation_id=self.file_generation.file_generation_id).all():
copy_file_generation_to_job(job, self.file_generation, self.is_local)
def generate_e_file(self):
""" Write file E to an appropriate CSV. """
log_data = {'message': 'Starting file E generation', 'message_type': 'ValidatorInfo', 'job_id': self.job.job_id,
'submission_id': self.job.submission_id, 'file_type': 'executive_compensation'}
logger.info(log_data)
file_e_sql = fileE_F.generate_file_e_sql(self.job.submission_id)
log_data['message'] = 'Writing E file CSV: {}'.format(self.job.original_filename)
logger.info(log_data)
# Generate the file and put in S3
write_stream_query(self.sess, file_e_sql, self.job.original_filename, self.job.filename, self.is_local,
generate_headers=True, generate_string=False)
log_data['message'] = 'Finished writing E file CSV: {}'.format(self.job.original_filename)
logger.info(log_data)
def generate_f_file(self):
""" Write rows from fileF.generate_f_rows to an appropriate CSV. """
log_data = {'message': 'Starting file F generation', 'message_type': 'ValidatorInfo', 'job_id': self.job.job_id,
'submission_id': self.job.submission_id, 'file_type': 'sub_award'}
logger.info(log_data)
file_f_sql = fileE_F.generate_file_f_sql(self.job.submission_id)
# writing locally first without uploading
log_data['message'] = 'Writing F file CSV: {}'.format(self.job.original_filename)
logger.info(log_data)
# Generate the file and put in S3
write_stream_query(self.sess, file_f_sql, self.job.original_filename, self.job.filename, self.is_local,
generate_headers=True, generate_string=False)
log_data['message'] = 'Finished writing F file CSV: {}'.format(self.job.original_filename)
logger.info(log_data)
def generate_a_file(self, agency_code, file_path):
""" Write file A to an appropriate CSV. """
self.job.filename = file_path
self.job.original_filename = file_path.split('/')[-1]
self.sess.commit()
log_data = {'message': 'Starting file A generation', 'message_type': 'ValidatorInfo', 'job_id': self.job.job_id,
'agency_code': agency_code, 'file_type': self.job.file_type.letter_name,
'start_date': self.job.start_date, 'end_date': self.job.end_date,
'filename': self.job.original_filename}
logger.info(log_data)
local_file = "".join([CONFIG_BROKER['d_file_storage_path'], self.job.original_filename])
headers = [val[0] for key, val in fileA.mapping.items()]
# add 3 months to account for fiscal year
period_date = self.job.end_date + relativedelta(months=3)
log_data['message'] = 'Writing A file CSV: {}'.format(self.job.original_filename)
logger.info(log_data)
query_utils = {"agency_code": agency_code, "period": period_date.month, "year": period_date.year,
"sess": self.sess}
logger.debug({'query_utils': query_utils})
# Generate the file and put in S3
write_stream_query(self.sess, a_file_query(query_utils), local_file, self.job.filename, self.is_local,
header=headers)
log_data['message'] = 'Finished writing A file CSV: {}'.format(self.job.original_filename)
logger.info(log_data)
|
fedspendingtransparency/data-act-broker-backend
|
dataactvalidator/validation_handlers/file_generation_manager.py
|
Python
|
cc0-1.0
| 11,318
|
# Program accepts a quiz score and prints out a grade
def main():
#Create list
letter = ["F", "F", "D", "C", "B", "A"]
#prompt for quiz score
score = eval(input("Input score: "))
#set grade
grade = letter[score]
#print grade
print("Congrats you got a: ", grade)
main()
|
src053/PythonComputerScience
|
chap5/grades.py
|
Python
|
cc0-1.0
| 283
|
def euler1():
counter = 0
num = 0
while counter < 1000:
if counter %3 == 0 or counter %5 == 0:
num += counter
counter += 1
return num
def euler2():
term1 = 1
term2 = 2
termone = 1
num = 0
while term1 + term2 < 4000000:
if term2 %2 == 0:
num += term2
term1 = term2
term2 = termone + term2
termone = term1
return num
def main():
print euler1()
print euler2()
main()
|
steven1695-cmis/steven1695-cmis-cs2
|
projecteuler1.py
|
Python
|
cc0-1.0
| 397
|
import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'MODEL1310110049.xml')
with open(sbmlFilePath,'r') as f:
sbmlString = f.read()
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString)
|
biomodels/MODEL1310110049
|
MODEL1310110049/model.py
|
Python
|
cc0-1.0
| 427
|
# Eating Functions
# Author: Lmctruck30
#
from server.util import ScriptManager
# heal, delay, itemId, itemSlot
# cake
def itemClick_1891(player, itemId, itemSlot):
player.getPA().eatFood(4, 1600, itemId, itemSlot)
# cake 2/3
def itemClick_1893(player, itemId, itemSlot):
player.getPA().eatFood(4, 1600, itemId, itemSlot)
# cake 1/3
def itemClick_1895(player, itemId, itemSlot):
player.getPA().eatFood(4, 1600, itemId, itemSlot)
# Bread
def itemClick_2309(player, itemId, itemSlot):
player.getPA().eatFood(2, 1600, itemId, itemSlot)
# C cake 1/3
def itemClick_1901(player, itemId, itemSlot):
player.getPA().eatFood(5, 1600, itemId, itemSlot)
# Shrimp
def itemClick_315(player, itemId, itemSlot):
player.getPA().eatFood(3, 1600, itemId, itemSlot)
# Mackerel
def itemClick_355(player, itemId, itemSlot):
player.getPA().eatFood(6, 1600, itemId, itemSlot)
# Cod
def itemClick_339(player, itemId, itemSlot):
player.getPA().eatFood(7, 1600, itemId, itemSlot)
# Trout
def itemClick_333(player, itemId, itemSlot):
player.getPA().eatFood(7, 1600, itemId, itemSlot)
# Pike
def itemClick_351(player, itemId, itemSlot):
player.getPA().eatFood(8, 1600, itemId, itemSlot)
# Salmon
def itemClick_329(player, itemId, itemSlot):
player.getPA().eatFood(9, 1600, itemId, itemSlot)
# Tuna
def itemClick_361(player, itemId, itemSlot):
player.getPA().eatFood(10, 1600, itemId, itemSlot)
# Lobster
def itemClick_379(player, itemId, itemSlot):
player.getPA().eatFood(12, 1600, itemId, itemSlot)
# Bass
def itemClick_365(player, itemId, itemSlot):
player.getPA().eatFood(13, 1600, itemId, itemSlot)
# Swordfish
def itemClick_373(player, itemId, itemSlot):
player.getPA().eatFood(14, 1600, itemId, itemSlot)
# Shark
def itemClick_385(player, itemId, itemSlot):
player.getPA().eatFood(20, 1600, itemId, itemSlot)
# Manta
def itemClick_391(player, itemId, itemSlot):
player.getPA().eatFood(22, 1600, itemId, itemSlot)
|
RodriguesJ/Atem
|
data/scripts/player/eat/eat.py
|
Python
|
epl-1.0
| 1,909
|
# Copyright (c) 2013-2015 by Ron Frederick <ronf@timeheart.net>.
# All rights reserved.
#
# This program and the accompanying materials are made available under
# the terms of the Eclipse Public License v1.0 which accompanies this
# distribution and is available at:
#
# http://www.eclipse.org/legal/epl-v10.html
#
# Contributors:
# Ron Frederick - initial implementation, API, and documentation
"""DSA public key encryption handler"""
from .asn1 import *
from .crypto import *
from .logging import *
from .misc import *
from .packet import *
from .public_key import *
class _DSAKey(SSHKey):
"""Handler for DSA public key encryption"""
algorithm = b'ssh-dss'
pem_name = b'DSA'
pkcs8_oid = ObjectIdentifier('1.2.840.10040.4.1')
def __init__(self, key, private):
self._key = key
self._private = private
def __eq__(self, other):
return (isinstance(other, self.__class__) and
self._key.p == other._key.p and
self._key.q == other._key.q and
self._key.g == other._key.g and
self._key.y == other._key.y and
((self._private and self._key.x) ==
(other._private and other._key.x)))
def __hash__(self):
return hash((self._key.p, self._key.q, self._key.g, self._key.y,
self._key.x if hasattr(self, 'x') else None))
@classmethod
def make_private(cls, *args):
return cls(DSAPrivateKey(*args), True)
@classmethod
def make_public(cls, *args):
return cls(DSAPublicKey(*args), False)
@classmethod
def decode_pkcs1_private(cls, key_data):
if (isinstance(key_data, tuple) and len(key_data) == 6 and
all_ints(key_data) and key_data[0] == 0):
return key_data[1:]
else:
return None
@classmethod
def decode_pkcs1_public(cls, key_data):
if (isinstance(key_data, tuple) and len(key_data) == 4 and
all_ints(key_data)):
y, p, q, g = key_data
return p, q, g, y
else:
return None
@classmethod
def decode_pkcs8_private(cls, alg_params, data):
x = der_decode(data)
if len(alg_params) == 3 and all_ints(alg_params) and isinstance(x, int):
p, q, g = alg_params
y = pow(g, x, p)
return p, q, g, y, x
else:
return None
@classmethod
def decode_pkcs8_public(cls, alg_params, data):
y = der_decode(data)
if len(alg_params) == 3 and all_ints(alg_params) and isinstance(y, int):
p, q, g = alg_params
return p, q, g, y
else:
return None
@classmethod
def decode_ssh_private(cls, packet):
p = packet.get_mpint()
q = packet.get_mpint()
g = packet.get_mpint()
y = packet.get_mpint()
x = packet.get_mpint()
return p, q, g, y, x
@classmethod
def decode_ssh_public(cls, packet):
p = packet.get_mpint()
q = packet.get_mpint()
g = packet.get_mpint()
y = packet.get_mpint()
return p, q, g, y
def encode_pkcs1_private(self):
if not self._private:
raise KeyExportError('Key is not private')
return (0, self._key.p, self._key.q, self._key.g,
self._key.y, self._key.x)
def encode_pkcs1_public(self):
return (self._key.y, self._key.p, self._key.q, self._key.g)
def encode_pkcs8_private(self):
if not self._private:
raise KeyExportError('Key is not private')
return (self._key.p, self._key.q, self._key.g), der_encode(self._key.x)
def encode_pkcs8_public(self):
return (self._key.p, self._key.q, self._key.g), der_encode(self._key.y)
def encode_ssh_private(self):
if not self._private:
raise KeyExportError('Key is not private')
return b''.join((String(self.algorithm), MPInt(self._key.p),
MPInt(self._key.q), MPInt(self._key.g),
MPInt(self._key.y), MPInt(self._key.x)))
def encode_ssh_public(self):
return b''.join((String(self.algorithm), MPInt(self._key.p),
MPInt(self._key.q), MPInt(self._key.g),
MPInt(self._key.y)))
def sign(self, data):
if not self._private:
raise ValueError('Private key needed for signing')
r, s = self._key.sign(data)
return b''.join((String(self.algorithm), String(r.to_bytes(20, 'big') +
s.to_bytes(20, 'big'))))
def verify(self, data, sig):
sig = SSHPacket(sig)
if sig.get_string() != self.algorithm:
return False
sig = sig.get_string()
return self._key.verify(data, (int.from_bytes(sig[:20], 'big'),
int.from_bytes(sig[20:], 'big')))
register_public_key_alg(b'ssh-dss', _DSAKey)
register_certificate_alg(b'ssh-dss-cert-v01@openssh.com',
_DSAKey, SSHCertificateV01)
register_certificate_alg(b'ssh-dss-cert-v00@openssh.com',
_DSAKey, SSHCertificateV00)
|
nchammas/asyncssh
|
asyncssh/dsa.py
|
Python
|
epl-1.0
| 5,244
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Author: Corentin Arnaud
Module: cluster
Description: script to run the project on cluster
'''
import sys
from Utils.ReadXmlFile import ReadXmlFile
from Main.MainCMAES import launchCMAESForAllPoint
if __name__ == '__main__':
rs = ReadXmlFile(sys.argv[1])
launchCMAESForAllPoint(rs, float(sys.argv[2]),True)
|
osigaud/ArmModelPython
|
Control/clusterOneTargetNController.py
|
Python
|
gpl-2.0
| 367
|
#!/usr/bin/python
import SaX
config = SaX.SaXConfig;
keyboard = SaX.SaXImport ( SaX.SAX_KEYBOARD );
keyboard.doImport();
manip = SaX.SaXManipulateKeyboard (keyboard);
models = manip.getModels();
for (key, item) in models.items():
print "Key: [%s] Value: [%s]" % (key, item)
|
schaefi/sax2
|
libsax/bindings/python/example.py
|
Python
|
gpl-2.0
| 284
|
versionNumberString = '0.90' # a string that can be turned into a number
versionNumber = float(versionNumberString)
versionNumberModifier = ' [2012.10.17]' # a string
versionString = '%s%s' % (versionNumberString, versionNumberModifier)
dateString = "17 October, 2012"
|
Anaphory/p4-phylogeny
|
p4/version.py
|
Python
|
gpl-2.0
| 275
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"""
Package providing filtering framework for GRAMPS.
"""
#-------------------------------------------------------------------------
#
# GTK
#
#-------------------------------------------------------------------------
from gi.repository import Gtk
from gramps.gen.ggettext import gettext as _
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from gramps.gen.filters import CustomFilters
#-------------------------------------------------------------------------
#
# FilterStore
#
#-------------------------------------------------------------------------
class FilterStore(Gtk.ListStore):
def __init__(self, local_filters=[], namespace="generic", default=""):
GObject.GObject.__init__(self, str)
self.list_map = []
self.def_index = 0
cnt = 0
for filt in local_filters:
name = filt.get_name()
self.append(row=[name])
self.list_map.append(filt)
if default != "" and default == name:
self.def_index = cnt
cnt += 1
for filt in CustomFilters.get_filters(namespace):
name = _(filt.get_name())
self.append(row=[name])
self.list_map.append(filt)
if default != "" and default == name:
self.def_index = cnt
cnt += 1
def default_index(self):
return self.def_index
def get_filter(self, index):
return self.list_map[index]
|
arunkgupta/gramps
|
gramps/gui/filters/_filterstore.py
|
Python
|
gpl-2.0
| 2,380
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # Django settings for OMERO.web project. # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
#
# Copyright (c) 2008-2016 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>, 2008.
#
# Version: 1.0
#
import os.path
import warnings
import sys
import logging
import omero
import omero.config
import omero.clients
import tempfile
import re
import json
import random
import string
from omero_ext import portalocker
from omero.install.python_warning import py27_only, PYTHON_WARNING
logger = logging.getLogger(__name__)
if not py27_only():
warnings.warn("WARNING: %s" % PYTHON_WARNING, RuntimeWarning)
# LOGS
# NEVER DEPLOY a site into production with DEBUG turned on.
# Debuging mode.
# A boolean that turns on/off debug mode.
# handler404 and handler500 works only when False
if 'OMERO_HOME' in os.environ:
OMERO_HOME = os.environ.get('OMERO_HOME')
else:
OMERO_HOME = os.path.join(os.path.dirname(__file__), '..', '..', '..')
OMERO_HOME = os.path.normpath(OMERO_HOME)
# Logging
LOGDIR = os.path.join(OMERO_HOME, 'var', 'log').replace('\\', '/')
if not os.path.isdir(LOGDIR):
try:
os.makedirs(LOGDIR)
except Exception, x:
exctype, value = sys.exc_info()[:2]
raise exctype(value)
# DEBUG: Never deploy a site into production with DEBUG turned on.
# Logging levels: logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR
# logging.CRITICAL
# FORMAT: 2010-01-01 00:00:00,000 INFO [omeroweb.webadmin.webadmin_utils]
# (proc.1308 ) getGuestConnection:20 Open connection is not available
STANDARD_LOGFORMAT = (
'%(asctime)s %(levelname)5.5s [%(name)40.40s]'
' (proc.%(process)5.5d) %(funcName)s():%(lineno)d %(message)s')
FULL_REQUEST_LOGFORMAT = (
'%(asctime)s %(levelname)5.5s [%(name)40.40s]'
' (proc.%(process)5.5d) %(funcName)s():%(lineno)d'
' HTTP %(status_code)d %(request)s')
LOGGING_CLASS = 'omero_ext.cloghandler.ConcurrentRotatingFileHandler'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': STANDARD_LOGFORMAT
},
'full_request': {
'format': FULL_REQUEST_LOGFORMAT
},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'handlers': {
'default': {
'level': 'DEBUG',
'class': LOGGING_CLASS,
'filename': os.path.join(
LOGDIR, 'OMEROweb.log').replace('\\', '/'),
'maxBytes': 1024*1024*5, # 5 MB
'backupCount': 10,
'formatter': 'standard',
},
'request_handler': {
'level': 'DEBUG',
'class': LOGGING_CLASS,
'filename': os.path.join(
LOGDIR, 'OMEROweb.log').replace('\\', '/'),
'maxBytes': 1024*1024*5, # 5 MB
'backupCount': 10,
'filters': ['require_debug_false'],
'formatter': 'full_request',
},
'null': {
'level': 'DEBUG',
'class': 'django.utils.log.NullHandler',
},
'console': {
'level': 'DEBUG',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
'formatter': 'standard'
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': { # Stop SQL debug from logging to main logger
'handlers': ['default', 'request_handler', 'mail_admins'],
'level': 'DEBUG',
'propagate': False
},
'django': {
'handlers': ['null'],
'level': 'DEBUG',
'propagate': True
},
'': {
'handlers': ['default'],
'level': 'DEBUG',
'propagate': True
}
}
}
# Load custom settings from etc/grid/config.xml
# Tue 2 Nov 2010 11:03:18 GMT -- ticket:3228
from omero.util.concurrency import get_event
CONFIG_XML = os.path.join(OMERO_HOME, 'etc', 'grid', 'config.xml')
count = 10
event = get_event("websettings")
while True:
try:
CUSTOM_SETTINGS = dict()
if os.path.exists(CONFIG_XML):
CONFIG_XML = omero.config.ConfigXml(CONFIG_XML, read_only=True)
CUSTOM_SETTINGS = CONFIG_XML.as_map()
CONFIG_XML.close()
break
except portalocker.LockException:
# logger.error("Exception while loading configuration retrying...",
# exc_info=True)
exctype, value = sys.exc_info()[:2]
count -= 1
if not count:
raise exctype(value)
else:
event.wait(1) # Wait a total of 10 seconds
except:
# logger.error("Exception while loading configuration...",
# exc_info=True)
exctype, value = sys.exc_info()[:2]
raise exctype(value)
del event
del count
del get_event
WSGI = "wsgi"
WSGITCP = "wsgi-tcp"
WSGI_TYPES = (WSGI, WSGITCP)
DEVELOPMENT = "development"
DEFAULT_SERVER_TYPE = WSGITCP
ALL_SERVER_TYPES = (WSGI, WSGITCP, DEVELOPMENT)
DEFAULT_SESSION_ENGINE = 'omeroweb.filesessionstore'
SESSION_ENGINE_VALUES = ('omeroweb.filesessionstore',
'django.contrib.sessions.backends.db',
'django.contrib.sessions.backends.file',
'django.contrib.sessions.backends.cache',
'django.contrib.sessions.backends.cached_db')
def parse_boolean(s):
s = s.strip().lower()
if s in ('true', '1', 't'):
return True
return False
def parse_paths(s):
return [os.path.normpath(path) for path in json.loads(s)]
def check_server_type(s):
if s not in ALL_SERVER_TYPES:
raise ValueError(
"Unknown server type: %s. Valid values are: %s"
% (s, ALL_SERVER_TYPES))
return s
def check_session_engine(s):
if s not in SESSION_ENGINE_VALUES:
raise ValueError(
"Unknown session engine: %s. Valid values are: %s"
% (s, SESSION_ENGINE_VALUES))
return s
def identity(x):
return x
def str_slash(s):
if s is not None:
s = str(s)
if s and not s.endswith("/"):
s += "/"
return s
class LeaveUnset(Exception):
pass
def leave_none_unset(s):
if s is None:
raise LeaveUnset()
return s
def leave_none_unset_int(s):
s = leave_none_unset(s)
if s is not None:
return int(s)
CUSTOM_HOST = CUSTOM_SETTINGS.get("Ice.Default.Host", "localhost")
CUSTOM_HOST = CUSTOM_SETTINGS.get("omero.master.host", CUSTOM_HOST)
# DO NOT EDIT!
INTERNAL_SETTINGS_MAPPING = {
"omero.qa.feedback":
["FEEDBACK_URL", "http://qa.openmicroscopy.org.uk", str, None],
"omero.web.upgrades.url":
["UPGRADES_URL", None, leave_none_unset, None],
"omero.web.check_version":
["CHECK_VERSION", "true", parse_boolean, None],
# Allowed hosts:
# https://docs.djangoproject.com/en/1.8/ref/settings/#allowed-hosts
"omero.web.allowed_hosts":
["ALLOWED_HOSTS", '["*"]', json.loads, None],
# Do not show WARNING (1_8.W001): The standalone TEMPLATE_* settings
# were deprecated in Django 1.8 and the TEMPLATES dictionary takes
# precedence. You must put the values of the following settings
# into your default TEMPLATES dict:
# TEMPLATE_DIRS, TEMPLATE_CONTEXT_PROCESSORS.
"omero.web.system_checks":
["SILENCED_SYSTEM_CHECKS", '["1_8.W001"]', json.loads, None],
# Internal email notification for omero.web.admins,
# loaded from config.xml directly
"omero.mail.from":
["SERVER_EMAIL",
None,
identity,
("The email address that error messages come from, such as those"
" sent to :property:`omero.web.admins`. Requires EMAIL properties"
" below.")],
"omero.mail.host":
["EMAIL_HOST",
None,
identity,
"The SMTP server host to use for sending email."],
"omero.mail.password":
["EMAIL_HOST_PASSWORD",
None,
identity,
"Password to use for the SMTP server."],
"omero.mail.username":
["EMAIL_HOST_USER",
None,
identity,
"Username to use for the SMTP server."],
"omero.mail.port":
["EMAIL_PORT",
25,
identity,
"Port to use for the SMTP server."],
"omero.web.admins.email_subject_prefix":
["EMAIL_SUBJECT_PREFIX",
"[OMERO.web - admin notification]",
str,
"Subject-line prefix for email messages"],
"omero.mail.smtp.starttls.enable":
["EMAIL_USE_TLS",
"false",
parse_boolean,
("Whether to use a TLS (secure) connection when talking to the SMTP"
" server.")],
}
CUSTOM_SETTINGS_MAPPINGS = {
# Deployment configuration
"omero.web.debug":
["DEBUG",
"false",
parse_boolean,
"A boolean that turns on/off debug mode."],
"omero.web.secret_key":
["SECRET_KEY",
None,
leave_none_unset,
("A boolean that sets SECRET_KEY for a particular Django "
"installation.")],
"omero.web.admins":
["ADMINS",
'[]',
json.loads,
("A list of people who get code error notifications whenever the "
"application identifies a broken link or raises an unhandled "
"exception that results in an internal server error. This gives "
"the administrators immediate notification of any errors, "
"see :doc:`/sysadmins/mail`. "
"Example:``'[[\"Full Name\", \"email address\"]]'``.")],
"omero.web.application_server":
["APPLICATION_SERVER",
DEFAULT_SERVER_TYPE,
check_server_type,
("OMERO.web is configured to run in Gunicorn as a generic WSGI "
"application by default. If you are using Apache change this "
"to \"wsgi\" before generating your web server configuration. "
"Available options: \"wsgi-tcp\" (Gunicorn), \"wsgi\" (Apache)")],
"omero.web.application_server.host":
["APPLICATION_SERVER_HOST",
"127.0.0.1",
str,
"Upstream application host"],
"omero.web.application_server.port":
["APPLICATION_SERVER_PORT", 4080, int, "Upstream application port"],
"omero.web.application_server.max_requests":
["APPLICATION_SERVER_MAX_REQUESTS", 0, int,
("The maximum number of requests a worker will process before "
"restarting.")],
"omero.web.middleware":
["MIDDLEWARE_CLASSES_LIST",
('['
'{"index": 1, '
'"class": "django.middleware.common.BrokenLinkEmailsMiddleware"},'
'{"index": 2, '
'"class": "django.middleware.common.CommonMiddleware"},'
'{"index": 3, '
'"class": "django.contrib.sessions.middleware.SessionMiddleware"},'
'{"index": 4, '
'"class": "django.middleware.csrf.CsrfViewMiddleware"},'
'{"index": 5, '
'"class": "django.contrib.messages.middleware.MessageMiddleware"}'
']'),
json.loads,
('Warning: Only system administrators should use this feature. '
'List of Django middleware classes in the form '
'[{"class": "class.name", "index": FLOAT}]. '
'See https://docs.djangoproject.com/en/1.8/topics/http/middleware/. '
'Classes will be ordered by increasing index')],
"omero.web.prefix":
["FORCE_SCRIPT_NAME",
None,
leave_none_unset,
("Used as the value of the SCRIPT_NAME environment variable in any"
" HTTP request.")],
"omero.web.use_x_forwarded_host":
["USE_X_FORWARDED_HOST",
"false",
parse_boolean,
("Specifies whether to use the X-Forwarded-Host header in preference "
"to the Host header. This should only be enabled if a proxy which "
"sets this header is in use.")],
"omero.web.static_url":
["STATIC_URL",
"/static/",
str_slash,
("URL to use when referring to static files. Example: ``'/static/'``"
" or ``'http://static.example.com/'``. Used as the base path for"
" asset definitions (the Media class) and the staticfiles app. It"
" must end in a slash if set to a non-empty value.")],
"omero.web.static_root":
["STATIC_ROOT",
os.path.join(os.path.dirname(__file__), 'static').replace('\\', '/'),
os.path.normpath,
("The absolute path to the directory where collectstatic will"
" collect static files for deployment. If the staticfiles contrib"
" app is enabled (default) the collectstatic management command"
" will collect static files into this directory.")],
"omero.web.session_engine":
["SESSION_ENGINE",
DEFAULT_SESSION_ENGINE,
check_session_engine,
("Controls where Django stores session data. See :djangodoc:"
"`Configuring the session engine for more details <ref/settings"
"/#session-engine>`.")],
"omero.web.session_expire_at_browser_close":
["SESSION_EXPIRE_AT_BROWSER_CLOSE",
"true",
parse_boolean,
("A boolean that determines whether to expire the session when the "
"user closes their browser. See :djangodoc:`Django Browser-length "
"sessions vs. persistent sessions documentation <topics/http/"
"sessions/#browser-length-vs-persistent-sessions>` for more "
"details.")],
"omero.web.caches":
["CACHES",
('{"default": {"BACKEND":'
' "django.core.cache.backends.dummy.DummyCache"}}'),
json.loads,
("OMERO.web offers alternative session backends to automatically"
" delete stale data using the cache session store backend, see "
":djangodoc:`Django cached session documentation <topics/http/"
"sessions/#using-cached-sessions>` for more details.")],
"omero.web.session_cookie_age":
["SESSION_COOKIE_AGE",
86400,
int,
"The age of session cookies, in seconds."],
"omero.web.session_cookie_domain":
["SESSION_COOKIE_DOMAIN",
None,
leave_none_unset,
"The domain to use for session cookies"],
"omero.web.session_cookie_name":
["SESSION_COOKIE_NAME",
None,
leave_none_unset,
"The name to use for session cookies"],
"omero.web.logdir":
["LOGDIR", LOGDIR, str, "A path to the custom log directory."],
"omero.web.secure_proxy_ssl_header":
["SECURE_PROXY_SSL_HEADER",
'[]',
json.loads,
("A tuple representing a HTTP header/value combination that "
"signifies a request is secure. Example "
"``'[\"HTTP_X_FORWARDED_PROTO_OMERO_WEB\", \"https\"]'``. "
"For more details see :djangodoc:`secure proxy ssl header <ref/"
"settings/#secure-proxy-ssl-header>`.")],
"omero.web.wsgi_args":
["WSGI_ARGS",
None,
leave_none_unset,
("A string representing Gunicorn additional arguments. "
"Check Gunicorn Documentation "
"http://docs.gunicorn.org/en/latest/settings.html")],
"omero.web.wsgi_workers":
["WSGI_WORKERS",
5,
int,
("The number of worker processes for handling requests. "
"Check Gunicorn Documentation "
"http://docs.gunicorn.org/en/stable/settings.html#workers")],
"omero.web.wsgi_timeout":
["WSGI_TIMEOUT",
60,
int,
("Workers silent for more than this many seconds are killed "
"and restarted. Check Gunicorn Documentation "
"http://docs.gunicorn.org/en/stable/settings.html#timeout")],
# Public user
"omero.web.public.enabled":
["PUBLIC_ENABLED",
"false",
parse_boolean,
"Enable and disable the OMERO.web public user functionality."],
"omero.web.public.url_filter":
["PUBLIC_URL_FILTER",
r'^/(?!webadmin)',
re.compile,
("Set a URL filter for which the OMERO.web public user is allowed to"
" navigate. The idea is that you can create the public pages"
" yourself (see OMERO.web framework since we do not provide public"
" pages.")],
"omero.web.public.server_id":
["PUBLIC_SERVER_ID", 1, int, "Server to authenticate against."],
"omero.web.public.user":
["PUBLIC_USER",
None,
leave_none_unset,
"Username to use during authentication."],
"omero.web.public.password":
["PUBLIC_PASSWORD",
None,
leave_none_unset,
"Password to use during authentication."],
"omero.web.public.cache.enabled":
["PUBLIC_CACHE_ENABLED", "false", parse_boolean, None],
"omero.web.public.cache.key":
["PUBLIC_CACHE_KEY", "omero.web.public.cache.key", str, None],
"omero.web.public.cache.timeout":
["PUBLIC_CACHE_TIMEOUT", 60 * 60 * 24, int, None],
# Application configuration
"omero.web.server_list":
["SERVER_LIST",
'[["%s", 4064, "omero"]]' % CUSTOM_HOST,
json.loads,
"A list of servers the Web client can connect to."],
"omero.web.ping_interval":
["PING_INTERVAL",
60000,
int,
"Timeout interval between ping invocations in seconds"],
"omero.web.chunk_size":
["CHUNK_SIZE",
1048576,
int,
"Size, in bytes, of the “chunk”"],
"omero.web.webgateway_cache":
["WEBGATEWAY_CACHE", None, leave_none_unset, None],
# VIEWER
"omero.web.viewer.view":
["VIEWER_VIEW",
'omeroweb.webclient.views.image_viewer',
str,
("Django view which handles display of, or redirection to, the "
"desired full image viewer.")],
# OPEN WITH
"omero.web.open_with":
["OPEN_WITH",
('[["Image viewer", "webgateway", {"supported_objects": ["image"],'
'"script_url": "webclient/javascript/ome.openwith_viewer.js"}]]'),
json.loads,
("A list of viewers that can be used to display selected Images "
"or other objects. Each viewer is defined as "
"``[\"Name\", \"url\", options]``. Url is reverse(url). "
"Selected objects are added to the url as ?image=:1&image=2"
"Objects supported must be specified in options with "
"e.g. ``{\"supported_objects\":[\"images\"]}`` "
"to enable viewer for one or more images.")],
# PIPELINE 1.3.20
# Pipeline is an asset packaging library for Django, providing both CSS
# and JavaScript concatenation and compression, built-in JavaScript
# template support, and optional data-URI image and font embedding.
"omero.web.pipeline_js_compressor":
["PIPELINE_JS_COMPRESSOR",
None,
identity,
("Compressor class to be applied to JavaScript files. If empty or "
"None, JavaScript files won't be compressed.")],
"omero.web.pipeline_css_compressor":
["PIPELINE_CSS_COMPRESSOR",
None,
identity,
("Compressor class to be applied to CSS files. If empty or None,"
" CSS files won't be compressed.")],
"omero.web.pipeline_staticfile_storage":
["STATICFILES_STORAGE",
"pipeline.storage.PipelineStorage",
str,
("The file storage engine to use when collecting static files with"
" the collectstatic management command. See `the documentation "
"<http://django-pipeline.readthedocs.org/en/latest/storages.html>`_"
" for more details.")],
# Customisation
"omero.web.login_logo":
["LOGIN_LOGO",
None,
leave_none_unset,
("Customize webclient login page with your own logo. Logo images "
"should ideally be 150 pixels high or less and will appear above "
"the OMERO logo. You will need to host the image somewhere else "
"and link to it with"
" ``\"http://www.openmicroscopy.org/site/logo.jpg\"``.")],
"omero.web.login_view":
["LOGIN_VIEW", "weblogin", str, None],
"omero.web.staticfile_dirs":
["STATICFILES_DIRS",
'[]',
json.loads,
("Defines the additional locations the staticfiles app will traverse"
" if the FileSystemFinder finder is enabled, e.g. if you use the"
" collectstatic or findstatic management command or use the static"
" file serving view.")],
"omero.web.template_dirs":
["TEMPLATE_DIRS",
'[]',
json.loads,
("List of locations of the template source files, in search order. "
"Note that these paths should use Unix-style forward slashes.")],
"omero.web.index_template":
["INDEX_TEMPLATE",
None,
identity,
("Define template used as an index page ``http://your_host/omero/``."
"If None user is automatically redirected to the login page."
"For example use 'webclient/index.html'. ")],
"omero.web.login_redirect":
["LOGIN_REDIRECT",
'{}',
json.loads,
("Redirect to the given location after logging in. It only supports "
"arguments for :djangodoc:`Django reverse function"
" <ref/urlresolvers/#django.core.urlresolvers.reverse>`. "
"For example: ``'{\"redirect\": [\"webindex\"], \"viewname\":"
" \"load_template\", \"args\":[\"userdata\"], \"query_string\":"
" {\"experimenter\": -1}}'``")],
"omero.web.apps":
["ADDITIONAL_APPS",
'[]',
json.loads,
("Add additional Django applications. For example, see"
" :doc:`/developers/Web/CreateApp`")],
"omero.web.databases":
["DATABASES", '{}', json.loads, None],
"omero.web.page_size":
["PAGE",
200,
int,
("Number of images displayed within a dataset or 'orphaned'"
" container to prevent from loading them all at once.")],
"omero.web.thumbnails_batch":
["THUMBNAILS_BATCH",
50,
int,
("Number of thumbnails retrieved to prevent from loading them"
" all at once. Make sure the size is not too big, otherwise"
" you may exceed limit request line, see"
" http://docs.gunicorn.org/en/latest/settings.html"
"?highlight=limit_request_line")],
"omero.web.ui.top_links":
["TOP_LINKS",
('['
'["Data", "webindex", {"title": "Browse Data via Projects, Tags'
' etc"}],'
'["History", "history", {"title": "History"}],'
'["Help", "http://help.openmicroscopy.org/",'
'{"title":"Open OMERO user guide in a new tab", "target":"new"}]'
']'),
json.loads,
("Add links to the top header: links are ``['Link Text', "
"'link|lookup_view', options]``, where the url is reverse('link'), "
"simply 'link' (for external urls) or lookup_view is a detailed "
"dictionary {\"viewname\": \"str\", \"args\": [], \"query_string\": "
"{\"param\": \"value\" }], "
"E.g. ``'[\"Webtest\", \"webtest_index\"] or [\"Homepage\","
" \"http://...\", {\"title\": \"Homepage\", \"target\": \"new\"}"
" ] or [\"Repository\", {\"viewname\": \"webindex\", "
"\"query_string\": {\"experimenter\": -1}}, "
"{\"title\": \"Repo\"}]'``")],
"omero.web.ui.metadata_panes":
["METADATA_PANES",
('['
'{"name": "tag", "label": "Tags", "index": 1},'
'{"name": "map", "label": "Key-Value Pairs", "index": 2},'
'{"name": "table", "label": "Tables", "index": 3},'
'{"name": "file", "label": "Attachments", "index": 4},'
'{"name": "comment", "label": "Comments", "index": 5},'
'{"name": "rating", "label": "Ratings", "index": 6},'
'{"name": "other", "label": "Others", "index": 7}'
']'),
json.loads,
("Manage Metadata pane accordion. This functionality is limited to"
" the exiting sections.")],
"omero.web.ui.right_plugins":
["RIGHT_PLUGINS",
('[["Acquisition",'
' "webclient/data/includes/right_plugin.acquisition.js.html",'
' "metadata_tab"],'
# '["ROIs", "webtest/webclient_plugins/right_plugin.rois.js.html",
# "image_roi_tab"],'
'["Preview", "webclient/data/includes/right_plugin.preview.js.html"'
', "preview_tab"]]'),
json.loads,
("Add plugins to the right-hand panel. "
"Plugins are ``['Label', 'include.js', 'div_id']``. "
"The javascript loads data into ``$('#div_id')``.")],
"omero.web.ui.center_plugins":
["CENTER_PLUGINS",
('['
# '["Split View",
# "webtest/webclient_plugins/center_plugin.splitview.js.html",
# "split_view_panel"],'
']'),
json.loads,
("Add plugins to the center panels. Plugins are "
"``['Channel overlay',"
" 'webtest/webclient_plugins/center_plugin.overlay.js.html',"
" 'channel_overlay_panel']``. "
"The javascript loads data into ``$('#div_id')``.")],
# CORS
"omero.web.cors_origin_whitelist":
["CORS_ORIGIN_WHITELIST",
'[]',
json.loads,
("A list of origin hostnames that are authorized to make cross-site "
"HTTP requests. "
"Used by the django-cors-headers app as described at "
"https://github.com/ottoyiu/django-cors-headers")],
"omero.web.cors_origin_allow_all":
["CORS_ORIGIN_ALLOW_ALL",
"false",
parse_boolean,
("If True, cors_origin_whitelist will not be used and all origins "
"will be authorized to make cross-site HTTP requests.")],
}
DEPRECATED_SETTINGS_MAPPINGS = {
# Deprecated settings, description should indicate the replacement.
"omero.web.force_script_name":
["FORCE_SCRIPT_NAME",
None,
leave_none_unset,
("Use omero.web.prefix instead.")],
"omero.web.server_email":
["SERVER_EMAIL",
None,
identity,
("Use omero.mail.from instead.")],
"omero.web.email_host":
["EMAIL_HOST",
None,
identity,
("Use omero.mail.host instead.")],
"omero.web.email_host_password":
["EMAIL_HOST_PASSWORD",
None,
identity,
("Use omero.mail.password instead.")],
"omero.web.email_host_user":
["EMAIL_HOST_USER",
None,
identity,
("Use omero.mail.username instead.")],
"omero.web.email_port":
["EMAIL_PORT",
None,
identity,
("Use omero.mail.port instead.")],
"omero.web.email_subject_prefix":
["EMAIL_SUBJECT_PREFIX",
"[OMERO.web]",
str,
("Default email subject is no longer configurable.")],
"omero.web.email_use_tls":
["EMAIL_USE_TLS",
"false",
parse_boolean,
("Use omero.mail.smtp.* instead to set up"
" javax.mail.Session properties.")],
"omero.web.plate_download.enabled":
["PLATE_DOWNLOAD_ENABLED",
"false",
parse_boolean,
("Use omero.policy.binary_access instead to restrict download.")],
"omero.web.viewer.initial_zoom_level":
["VIEWER_INITIAL_ZOOM_LEVEL",
None,
leave_none_unset_int,
("Use omero.client.viewer.initial_zoom_level instead.")],
"omero.web.send_broken_link_emails":
["SEND_BROKEN_LINK_EMAILS",
"false",
parse_boolean,
("Replaced by django.middleware.common.BrokenLinkEmailsMiddleware."
"To get notification set :property:`omero.web.admins` property.")
],
}
del CUSTOM_HOST
def check_worker_class(c):
if c == "gevent":
try:
import gevent # NOQA
except ImportError:
raise ImportError("You are using async workers based "
"on Greenlets via Gevent. Install gevent")
return str(c)
def check_threading(t):
if t > 1:
try:
import concurrent.futures # NOQA
except ImportError:
raise ImportError("You are using sync workers with "
"multiple threads. Install futures")
return int(t)
# DEVELOPMENT_SETTINGS_MAPPINGS - WARNING: For each setting developer MUST open
# a ticket that needs to be resolved before a release either by moving the
# setting to CUSTOM_SETTINGS_MAPPINGS or by removing the setting at all.
DEVELOPMENT_SETTINGS_MAPPINGS = {
"omero.web.wsgi_worker_class":
["WSGI_WORKER_CLASS",
"sync",
check_worker_class,
("The default OMERO.web uses sync workers to handle most “normal” "
"types of workloads. Check Gunicorn Design Documentation "
"http://docs.gunicorn.org/en/stable/design.html")],
"omero.web.wsgi_worker_connections":
["WSGI_WORKER_CONNECTIONS",
1000,
int,
("(ASYNC WORKERS only) The maximum number of simultaneous clients. "
"Check Gunicorn Documentation http://docs.gunicorn.org"
"/en/stable/settings.html#worker-connections")],
"omero.web.wsgi_threads":
["WSGI_THREADS",
1,
check_threading,
("(SYNC WORKERS only) The number of worker threads for handling "
"requests. Check Gunicorn Documentation "
"http://docs.gunicorn.org/en/stable/settings.html#threads")],
}
def map_deprecated_settings(settings):
m = {}
for key, values in settings.items():
try:
global_name = values[0]
m[global_name] = (CUSTOM_SETTINGS[key], key)
if len(values) < 5:
# Not using default (see process_custom_settings)
values.append(False)
except KeyError:
if len(values) < 5:
values.append(True)
return m
def process_custom_settings(
module, settings='CUSTOM_SETTINGS_MAPPINGS', deprecated=None):
logging.info('Processing custom settings for module %s' % module.__name__)
if deprecated:
deprecated_map = map_deprecated_settings(
getattr(module, deprecated, {}))
else:
deprecated_map = {}
for key, values in getattr(module, settings, {}).items():
# Django may import settings.py more than once, see:
# http://blog.dscpl.com.au/2010/03/improved-wsgi-script-for-use-with.html
# In that case, the custom settings have already been processed.
if len(values) == 5:
continue
global_name, default_value, mapping, description = values
try:
global_value = CUSTOM_SETTINGS[key]
values.append(False)
except KeyError:
global_value = default_value
values.append(True)
try:
using_default = values[-1]
if global_name in deprecated_map:
dep_value, dep_key = deprecated_map[global_name]
if using_default:
logging.warning(
'Setting %s is deprecated, use %s', dep_key, key)
global_value = dep_value
else:
logging.error(
'%s and its deprecated key %s are both set, using %s',
key, dep_key, key)
setattr(module, global_name, mapping(global_value))
except ValueError, e:
raise ValueError(
"Invalid %s (%s = %r). %s. %s" %
(global_name, key, global_value, e.message, description))
except ImportError, e:
raise ImportError(
"ImportError: %s. %s (%s = %r).\n%s" %
(e.message, global_name, key, global_value, description))
except LeaveUnset:
pass
process_custom_settings(sys.modules[__name__], 'INTERNAL_SETTINGS_MAPPING')
process_custom_settings(sys.modules[__name__], 'CUSTOM_SETTINGS_MAPPINGS',
'DEPRECATED_SETTINGS_MAPPINGS')
process_custom_settings(sys.modules[__name__], 'DEVELOPMENT_SETTINGS_MAPPINGS')
if not DEBUG: # from CUSTOM_SETTINGS_MAPPINGS # noqa
LOGGING['loggers']['django.request']['level'] = 'INFO'
LOGGING['loggers']['django']['level'] = 'INFO'
LOGGING['loggers']['']['level'] = 'INFO'
def report_settings(module):
from django.views.debug import cleanse_setting
custom_settings_mappings = getattr(module, 'CUSTOM_SETTINGS_MAPPINGS', {})
for key in sorted(custom_settings_mappings):
values = custom_settings_mappings[key]
global_name, default_value, mapping, description, using_default = \
values
source = using_default and "default" or key
global_value = getattr(module, global_name, None)
if global_name.isupper():
logger.debug(
"%s = %r (source:%s)", global_name,
cleanse_setting(global_name, global_value), source)
deprecated_settings = getattr(module, 'DEPRECATED_SETTINGS_MAPPINGS', {})
for key in sorted(deprecated_settings):
values = deprecated_settings[key]
global_name, default_value, mapping, description, using_default = \
values
global_value = getattr(module, global_name, None)
if global_name.isupper() and not using_default:
logger.debug(
"%s = %r (deprecated:%s, %s)", global_name,
cleanse_setting(global_name, global_value), key, description)
report_settings(sys.modules[__name__])
SITE_ID = 1
# Local time zone for this installation. Choices can be found here:
# http://www.postgresql.org/docs/8.1/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
# although not all variations may be possible on all operating systems.
TIME_ZONE = 'Europe/London'
FIRST_DAY_OF_WEEK = 0 # 0-Monday, ... 6-Sunday
# LANGUAGE_CODE: A string representing the language code for this
# installation. This should be in standard language format. For example, U.S.
# English is "en-us".
LANGUAGE_CODE = 'en-gb'
# SECRET_KEY: A secret key for this particular Django installation. Used to
# provide a seed in secret-key hashing algorithms. Set this to a random string,
# the longer, the better. Make this unique, and don't share it with anybody.
try:
SECRET_KEY
except NameError:
secret_path = os.path.join(OMERO_HOME, 'var',
'django_secret_key').replace('\\', '/')
if not os.path.isfile(secret_path):
try:
secret_key = ''.join(
[random.SystemRandom()
.choice("{0}{1}{2}"
.format(string.ascii_letters,
string.digits,
string.punctuation)) for i in range(50)]
)
with os.fdopen(os.open(secret_path,
os.O_WRONLY | os.O_CREAT,
0600), 'w') as secret_file:
secret_file.write(secret_key)
except IOError, e:
raise IOError("Please create a %s file with random characters"
" to generate your secret key!" % secret_path)
try:
with open(secret_path, 'r') as secret_file:
SECRET_KEY = secret_file.read().strip()
except IOError, e:
raise IOError("Could not find secret key in %s!" % secret_path)
# USE_I18N: A boolean that specifies whether Django's internationalization
# system should be enabled.
# This provides an easy way to turn it off, for performance. If this is set to
# False, Django will make some optimizations so as not to load the
# internationalization machinery.
USE_I18N = True
# ROOT_URLCONF: A string representing the full Python import path to your root
# URLconf.
# For example: "mydjangoapps.urls". Can be overridden on a per-request basis
# by setting the attribute urlconf on the incoming HttpRequest object.
ROOT_URLCONF = 'omeroweb.urls'
# STATICFILES_FINDERS: The list of finder backends that know how to find
# static files in various locations. The default will find files stored in the
# STATICFILES_DIRS setting (using
# django.contrib.staticfiles.finders.FileSystemFinder) and in a static
# subdirectory of each app (using
# django.contrib.staticfiles.finders.AppDirectoriesFinder)
STATICFILES_FINDERS = (
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
)
# STATICFILES_DIRS: This setting defines the additional locations the
# staticfiles app will traverse if the FileSystemFinder finder is enabled,
# e.g. if you use the collectstatic or findstatic management command or use
# the static file serving view.
# from CUSTOM_SETTINGS_MAPPINGS
# STATICFILES_DIRS += (("webapp/custom_static", path/to/statics),) # noqa
# TEMPLATES: A list containing the settings for all template engines
# to be used with Django. Each item of the list is a dictionary containing
# the options for an individual engine.
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': TEMPLATE_DIRS, # noqa
'APP_DIRS': True,
'OPTIONS': {
'debug': DEBUG, # noqa
'context_processors': [
# Insert your TEMPLATE_CONTEXT_PROCESSORS here or use this
# list if you haven't customized them:
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'omeroweb.custom_context_processor.url_suffix',
],
},
},
]
# INSTALLED_APPS: A tuple of strings designating all applications that are
# enabled in this Django installation. Each string should be a full Python
# path to a Python package that contains a Django application, as created by
# django-admin.py startapp.
INSTALLED_APPS = (
'django.contrib.staticfiles',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
)
# ADDITONAL_APPS: We import any settings.py from apps. This allows them to
# modify settings.
# We're also processing any CUSTOM_SETTINGS_MAPPINGS defined there.
for app in ADDITIONAL_APPS: # from CUSTOM_SETTINGS_MAPPINGS # noqa
# Previously the app was added to INSTALLED_APPS as 'omeroweb.app', which
# then required the app to reside within or be symlinked from within
# omeroweb, instead of just having to be somewhere on the python path.
# To allow apps to just be on the path, but keep it backwards compatible,
# try to import as omeroweb.app, if it works, keep that in INSTALLED_APPS,
# otherwise add it to INSTALLED_APPS just with its own name.
try:
__import__('omeroweb.%s' % app)
INSTALLED_APPS += ('omeroweb.%s' % app,)
except ImportError:
INSTALLED_APPS += (app,)
try:
logger.debug(
'Attempting to import additional app settings for app: %s' % app)
module = __import__('%s.settings' % app)
process_custom_settings(module.settings)
report_settings(module.settings)
except ImportError:
logger.debug("Couldn't import settings from app: %s" % app)
INSTALLED_APPS += (
'omeroweb.feedback',
'omeroweb.webadmin',
'omeroweb.webclient',
'omeroweb.webgateway',
'omeroweb.webredirect',
'omeroweb.api',
'pipeline',
)
logger.debug('INSTALLED_APPS=%s' % [INSTALLED_APPS])
PIPELINE_CSS = {
'webgateway_viewer': {
'source_filenames': (
'webgateway/css/reset.css',
'webgateway/css/ome.body.css',
'webclient/css/dusty.css',
'webgateway/css/ome.viewport.css',
'webgateway/css/ome.toolbar.css',
'webgateway/css/ome.gs_slider.css',
'webgateway/css/base.css',
'webgateway/css/ome.snippet_header_logo.css',
'webgateway/css/ome.postit.css',
'3rdparty/farbtastic-1.2/farbtastic.css',
'webgateway/css/ome.colorbtn.css',
'3rdparty/JQuerySpinBtn-1.3a/JQuerySpinBtn.css',
'3rdparty/jquery-ui-1.10.4/themes/base/jquery-ui.all.css',
'webgateway/css/omero_image.css',
'3rdparty/panojs-2.0.0/panojs.css',
),
'output_filename': 'omeroweb.viewer.min.css',
},
}
PIPELINE_JS = {
'webgateway_viewer': {
'source_filenames': (
'3rdparty/jquery-1.11.1.js',
'3rdparty/jquery-migrate-1.2.1.js',
'3rdparty/jquery-ui-1.10.4/js/jquery-ui.1.10.4.js',
'webgateway/js/ome.popup.js',
'3rdparty/aop-1.3.js',
'3rdparty/raphael-2.1.0/raphael.js',
'3rdparty/raphael-2.1.0/scale.raphael.js',
'3rdparty/panojs-2.0.0/utils.js',
'3rdparty/panojs-2.0.0/PanoJS.js',
'3rdparty/panojs-2.0.0/controls.js',
'3rdparty/panojs-2.0.0/pyramid_Bisque.js',
'3rdparty/panojs-2.0.0/pyramid_imgcnv.js',
'3rdparty/panojs-2.0.0/pyramid_Zoomify.js',
'3rdparty/panojs-2.0.0/control_thumbnail.js',
'3rdparty/panojs-2.0.0/control_info.js',
'3rdparty/panojs-2.0.0/control_svg.js',
'3rdparty/panojs-2.0.0/control_roi.js',
'3rdparty/panojs-2.0.0/control_scalebar.js',
'3rdparty/hammer-2.0.2/hammer.min.js',
'webgateway/js/ome.gs_utils.js',
'webgateway/js/ome.viewportImage.js',
'webgateway/js/ome.gs_slider.js',
'webgateway/js/ome.viewport.js',
'webgateway/js/omero_image.js',
'webgateway/js/ome.roidisplay.js',
'webgateway/js/ome.scalebardisplay.js',
'webgateway/js/ome.smartdialog.js',
'webgateway/js/ome.roiutils.js',
'3rdparty/JQuerySpinBtn-1.3a/JQuerySpinBtn.js',
'webgateway/js/ome.colorbtn.js',
'webgateway/js/ome.postit.js',
'3rdparty/jquery.selectboxes-2.2.6.js',
'3rdparty/farbtastic-1.2/farbtastic.js',
'3rdparty/jquery.mousewheel-3.0.6.js',
),
'output_filename': 'omeroweb.viewer.min.js',
}
}
CSRF_FAILURE_VIEW = "omeroweb.feedback.views.csrf_failure"
# Configuration for django-cors-headers app
# See https://github.com/ottoyiu/django-cors-headers
# Configration of allowed origins is handled by custom settings above
CORS_ALLOW_CREDENTIALS = True
# Needed for Django <1.9 since CSRF_TRUSTED_ORIGINS not supported
CORS_REPLACE_HTTPS_REFERER = True
# FEEDBACK - DO NOT MODIFY!
# FEEDBACK_URL: Is now configurable for testing purpuse only. Used in
# feedback.sendfeedback.SendFeedback class in order to submit errors or
# comment messages to http://qa.openmicroscopy.org.uk.
# FEEDBACK_APP: 6 = OMERO.web
FEEDBACK_APP = 6
# IGNORABLE_404_STARTS:
# Default: ('/cgi-bin/', '/_vti_bin', '/_vti_inf')
# IGNORABLE_404_ENDS:
# Default: ('mail.pl', 'mailform.pl', 'mail.cgi', 'mailform.cgi',
# 'favicon.ico', '.php')
# SESSION_FILE_PATH: If you're using file-based session storage, this sets the
# directory in which Django will store session data. When the default value
# (None) is used, Django will use the standard temporary directory for the
# system.
SESSION_FILE_PATH = tempfile.gettempdir()
# FILE_UPLOAD_TEMP_DIR: The directory to store data temporarily while
# uploading files.
FILE_UPLOAD_TEMP_DIR = tempfile.gettempdir()
# # FILE_UPLOAD_MAX_MEMORY_SIZE: The maximum size (in bytes) that an upload
# will be before it gets streamed to the file system.
FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # default 2621440 (i.e. 2.5 MB).
# DEFAULT_IMG: Used in
# webclient.webclient_gateway.OmeroWebGateway.defaultThumbnail in order to
# load default image while thumbnail can't be retrieved from the server.
DEFAULT_IMG = os.path.join(
os.path.dirname(__file__), 'webgateway', 'static', 'webgateway', 'img',
'image128.png').replace('\\', '/')
# # DEFAULT_USER: Used in
# webclient.webclient_gateway.OmeroWebGateway.getExperimenterDefaultPhoto in
# order to load default avatar while experimenter photo can't be retrieved
# from the server.
DEFAULT_USER = os.path.join(
os.path.dirname(__file__), 'webgateway', 'static', 'webgateway', 'img',
'personal32.png').replace('\\', '/')
# MANAGERS: A tuple in the same format as ADMINS that specifies who should get
# broken-link notifications when
# SEND_BROKEN_LINK_EMAILS=True.
MANAGERS = ADMINS # from CUSTOM_SETTINGS_MAPPINGS # noqa
# https://docs.djangoproject.com/en/1.6/releases/1.6/#default-session-serialization-switched-to-json
# JSON serializer, which is now the default, cannot handle
# omeroweb.connector.Connector object
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.PickleSerializer'
# Load server list and freeze
from utils import sort_properties_to_tuple
# MIDDLEWARE_CLASSES: A tuple of middleware classes to use.
MIDDLEWARE_CLASSES = sort_properties_to_tuple(MIDDLEWARE_CLASSES_LIST) # noqa
# Load server list and freeze
from connector import Server
def load_server_list():
for s in SERVER_LIST: # from CUSTOM_SETTINGS_MAPPINGS # noqa
server = (len(s) > 2) and unicode(s[2]) or None
Server(host=unicode(s[0]), port=int(s[1]), server=server)
Server.freeze()
load_server_list()
|
simleo/openmicroscopy
|
components/tools/OmeroWeb/omeroweb/settings.py
|
Python
|
gpl-2.0
| 47,080
|
from django.db import models
class Language(models.Model):
code = models.CharField(max_length=5)
description = models.CharField(max_length=50)
def __unicode__(self):
return u'%s' % (self.code)
|
cjaniake/ionicweb
|
webapp/common/models.py
|
Python
|
gpl-2.0
| 214
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os, sys, time
import csv
def diffCSVofNoHeaderRow(file1, file2, resultFile, fieldnames, keyColumn):
#with open('masterlist.csv', 'rb') as master:
with open(file1, 'rb') as master:
#for i, r in enumerate(csv.reader(master)
# print i,r
#print "<---------------"
reader1 = csv.reader(master, delimiter='\t')
print "-----reader1---------->"
#for row in reader1:
# print row
#master_indices = dict((r[2], i) for i, r in enumerate(reader1))
master_indices = dict((r[2], i) for i, r in enumerate(reader1))
#print master_indices
#with open('hosts.csv', 'rb') as hosts:
with open(file2, 'rb') as hosts:
#with open('results.csv', 'wb') as results:
with open(resultFile, 'wb') as results:
file2Reader = csv.reader(hosts, delimiter='\t')
writer = csv.writer(results)
#Add header row
writer.writerow(fieldnames+ ['RESULTS'])
#writer.writerow(next(file2Reader, []) + ['RESULTS'])
for row in file2Reader:
#Compare key column
index = master_indices.get(row[2])
if index is not None:
message = 'FOUND in file1 list (row {})'.format(index)
#Compare every column
else:
message = 'NOT FOUND in file1 list'
writer.writerow(row + [message])
writer.writerow('\n')
if __name__ == '__main__':
#path1 = format_path(raw_input('Please input PATH 1: '))
#path2 = format_path(raw_input('Please input PATH 2: '))
file1 = "a.csv"
file2 = "b.csv"
resultFile = "results.csv"
# table updatedTranscription
fieldnames = ['id', 'batch_date', 'transcriber_id', 'voice_num', 'voice_filename', 'transcription', 'tag_1', 'tag_2', 'NonN', 'reviewer1_id', 'reviewer2_id', 'comments', 'AMI_transcription', 'AMI_tag_1', 'AMI_tag_2', 'AMI_NonN', 'check_result', 'pending_level', 'to_so_comment', 'in_AMI_comment', 'result_basis', 'AMI_checker', 'final_checker', 'result_filename', 'create_date', 'update_date']
keyColumn = 'transcriber_id'
diffCSVofNoHeaderRow(file1, file2, resultFile, fieldnames, keyColumn)
|
AaronZhangL/az-pyFilesLib
|
case2/outDiffRecordByIndexColumn.py
|
Python
|
gpl-2.0
| 2,326
|
'''
Created on Feb 2, 2014
@author: Chris
TODO:
- test no argparse module
- test argparse in main
- test argparse in try/catch
-
'''
import os
import ast
import unittest
import source_parser
basic_pyfile = '''
import os
def say_jello():
print "Jello!"
def main():
print "hello!"
parser = ArgumentParser(description='Example Argparse Program', formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("filename", help="filename")
parser.add_argument("-r", "--recursive", dest="recurse", action="store_true",
help="recurse into subfolders [default: %(default)s]")
parser.add_argument("-v", "--verbose", dest="verbose", action="count",
help="set verbosity level [default: %(default)s]")
parser.add_argument("-i", "--include", action="append",
help="only include paths matching this regex pattern. Note: exclude is given preference over include. [default: %(default)s]",
metavar="RE")
parser.add_argument("-m", "--mycoolargument", help="mycoolargument")
parser.add_argument("-e", "--exclude", dest="exclude",
help="exclude paths matching this regex pattern. [default: %(default)s]", metavar="RE")
parser.add_argument('-V', '--version', action='version')
parser.add_argument('-T', '--tester', choices=['yes', 'no'])
parser.add_argument(dest="paths", help="paths to folder(s) with source file(s) [default: %(default)s]",
metavar="path", nargs='+')
if __name__ == '__main__':
main()
'''
class TestSourceParser(unittest.TestCase):
PATH = os.path.join(os.path.dirname(__file__), 'mockapplications')
def module_path(self, name):
return os.path.join(self.PATH, name)
def setUp(self):
self._mockapp = self.module_path('mockapplications.py')
self._module_with_noargparse = self.module_path('module_with_no_argparse.py')
self._module_with_arparse_in_try = self.module_path('example_argparse_souce_in_try.py')
self._module_with_argparse_in_main = self.module_path('example_argparse_souce_in_main.py')
def test_should_throw_parser_exception_if_no_argparse_found_in_module(self):
with self.assertRaises(source_parser.ParserError):
source_parser.parse_source_file(self._module_with_noargparse)
def test_find_main(self):
example_source = '''
def main(): pass
'''
nodes = ast.parse(example_source)
main_node = source_parser.find_main(nodes)
self.assertEqual('main', main_node.name)
def test_find_main_throws_exception_if_not_found(self):
example_source = '''
def some_cool_function_that_is_not_main(): pass
'''
with self.assertRaises(source_parser.ParserError):
nodes = ast.parse(example_source)
main_node = source_parser.find_main(nodes)
self.assertEqual('main', main_node.name)
def test_find_try_blocks_finds_all_tryblock_styles(self):
example_source = '''
try: a = 1
except: pass
try: pass
finally: pass
try: pass
except: pass
else: pass
'''
nodes = ast.parse(example_source)
try_blocks = source_parser.find_try_blocks(nodes)
self.assertEqual(3, len(try_blocks))
def test_find_try_blocks_returns_empty_if_no_blocks_present(self):
example_source = 'def main(): pass'
nodes = ast.parse(example_source)
result = source_parser.find_try_blocks(nodes)
self.assertEqual(list(), result)
def test_find_argparse_located_object_when_imported_by_direct_name(self):
example_source = '''
def main():
parser = ArgumentParser(description='Example Argparse Program', formatter_class=RawDescriptionHelpFormatter)
'''
nodes = ast.parse(example_source)
main_node = source_parser.find_main(nodes)
self.assertEqual('main', main_node.name)
containing_block = source_parser.find_block_containing_argparse([main_node])
self.assertTrue(containing_block is not None)
def test_find_argparse_located_object_when_access_through_module_dot_notation(self):
example_source = '''
def main():
parser = argparse.ArgumentParser(description='Example Argparse Program', formatter_class=RawDescriptionHelpFormatter)
'''
nodes = ast.parse(example_source)
main_node = source_parser.find_main(nodes)
self.assertEqual('main', main_node.name)
containing_block = source_parser.find_block_containing_argparse([main_node])
self.assertTrue(containing_block is not None)
def test_find_argparse_locates_assignment_stmnt_in_main(self):
nodes = ast.parse(source_parser._openfile(self._module_with_argparse_in_main))
main_node = source_parser.find_main(nodes)
self.assertEqual('main', main_node.name)
containing_block = source_parser.find_block_containing_argparse([main_node])
self.assertTrue(containing_block is not None)
self.assertEqual('main', containing_block.name)
def test_find_argparse_locates_assignment_stmnt_in_try_block(self):
nodes = ast.parse(source_parser._openfile(self._module_with_arparse_in_try))
main_node = source_parser.find_main(nodes)
self.assertEqual('main', main_node.name)
try_nodes = source_parser.find_try_blocks(main_node)
self.assertTrue(len(try_nodes) > 0)
containing_block = source_parser.find_block_containing_argparse([main_node] + try_nodes)
self.assertEqual(ast.TryExcept, type(containing_block))
def test_find_argparse_throws_exception_if_not_found(self):
with self.assertRaises(source_parser.ParserError):
nodes = ast.parse(source_parser._openfile(self._module_with_noargparse))
main_node = source_parser.find_main(nodes)
self.assertEqual('main', main_node.name)
try_nodes = source_parser.find_try_blocks(main_node)
containing_block = source_parser.find_block_containing_argparse([main_node] + try_nodes)
def test_has_instantiator_returns_true_if_object_found(self):
source = '''
parser = ArgumentParser(description='Example Argparse Program', formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("filename", help="filename")
'''
nodes = ast.parse(source)
self.assertTrue(source_parser.has_instantiator(nodes.body[0], 'ArgumentParser'))
def test_has_instantiator_returns_false_if_object_not_found(self):
source = '''
parser = NopeParser(description='Example Argparse Program', formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("filename", help="filename")
'''
nodes = ast.parse(source)
self.assertFalse(source_parser.has_instantiator(nodes.body[0], 'ArgumentParser'))
def test_has_assignment_returns_true_if_object_found(self):
source = '''
parser = ArgumentParser(description='Example Argparse Program', formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("filename", help="filename")
'''
nodes = ast.parse(source)
self.assertTrue(source_parser.has_assignment(nodes.body[1], 'add_argument'))
def test_has_assignment_returns_false_if_object_not_found(self):
source = '''
parser = ArgumentParser(description='Example Argparse Program', formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("filename", help="filename")
'''
nodes = ast.parse(source)
self.assertFalse(source_parser.has_instantiator(nodes.body[1], 'add_argument'))
def test_parser_identifies_import_module(self):
source = '''
import os
import itertools
from os import path
'''
import _ast
nodes = ast.parse(source)
module_imports = source_parser.get_nodes_by_instance_type(nodes, _ast.Import)
self.assertEqual(2, len(module_imports))
def test_parser_identifies_import_from(self):
source = '''
import os
import itertools
from os import path
from gooey.gooey_decorator import Gooey
'''
import _ast
nodes = ast.parse(source)
from_imports = source_parser.get_nodes_by_instance_type(nodes, _ast.ImportFrom)
self.assertEqual(2, len(from_imports))
def test_get_indent_return_indent_amount_for_tabs_and_spaces(self):
spaced_lines = ["def main"," def main"," def main"," def main"]
expected_indent = ["", " ", " ", " "]
for line, expected in zip(spaced_lines, expected_indent):
self.assertEqual(expected, source_parser.get_indent(line))
# def test_parse_source_file__file_with_argparse_in_main__succesfully_finds_and_returns_ast_obejcts(self):
# ast_objects = source_parser.parse_source_file(self._module_with_argparse_in_main)
# for obj in ast_objects:
# self.assertTrue(type(obj) in (ast.Assign, ast.Expr))
#
# def test_parse_source_file__file_with_argparse_in_try_block__succesfully_finds_and_returns_ast_obejcts(self):
# ast_objects = source_parser.parse_source_file(self._module_with_arparse_in_try)
# for obj in ast_objects:
# self.assertTrue(type(obj) in (ast.Assign, ast.Expr))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
garrettcap/Bulletproof-Backup
|
gooey/source_parser_unittest.py
|
Python
|
gpl-2.0
| 9,087
|
# Django settings for PayForward project.
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
DEBUG = True
TEMPLATE_DEBUG = DEBUG
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'loginza.authentication.LoginzaBackend',
)
TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.request',
)
LOGINZA_AMNESIA_PATHS = ('/users/complete_registration/',)
ADMINS = (
# ('Sergii Danyuk', 'danikmil@gmail.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'payforward', # Or path to database file if using sqlite3.
'USER': 'openerp', # Not used with sqlite3.
'PASSWORD': 'postgres', # Not used with sqlite3.
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
#'PORT': '5050', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Europe/Kiev'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'ru-ru'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = '/home/danikmil/Dropbox/payforward/uploads/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/uploads/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '^r_#^_zr14ch4+ipe_6*cfbw1s(ka^dh=a(f7wt&x8n90@sih)'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware'
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'PayForward.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'PayForward.wsgi.application'
import os
TEMPLATE_DIRS = (os.path.join(os.path.dirname(__file__), '../', 'templates').replace('\\','/'),)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
'django.contrib.flatpages',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'tasks',
'south',
'loginza',
'users'
)
|
danikmil/payforward
|
PayForward/settings.py
|
Python
|
gpl-2.0
| 4,845
|
# -*- coding: utf-8 -*-
from django.conf.urls import include, url
from plugins.models import Plugin, PluginVersion
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.decorators import login_required, user_passes_test
from plugins.models import Plugin, PluginVersion
from plugins.views import *
from rpc4django.views import serve_rpc_request
# Plugins filtered views (need user parameter from request)
urlpatterns = [
# XML
url(r'^plugins_new.xml$', xml_plugins_new, {}, name='xml_plugins_new'),
url(r'^plugins.xml$', xml_plugins, {}, name='xml_plugins'),
url(r'^plugins_(?P<qg_version>\d+\.\d+).xml$', xml_plugins, {}, name='xml_plugins_version_filtered_cached'),
url(r'^version_filtered/(?P<qg_version>\d+\.\d+).xml$', xml_plugins, {}, name='xml_plugins_version_filtered_uncached'),
url(r'^tags/(?P<tags>[^\/]+)/$', TagsPluginsList.as_view(), name='tags_plugins'),
url(r'^add/$', plugin_upload, {}, name='plugin_upload'),
url(r'^user/(?P<username>\w+)/block/$', user_block, {}, name='user_block'),
url(r'^user/(?P<username>\w+)/unblock/$', user_unblock, {}, name='user_unblock'),
url(r'^user/(?P<username>\w+)/trust/$', user_trust, {}, name='user_trust'),
url(r'^user/(?P<username>\w+)/untrust/$', user_untrust, {}, name='user_untrust'),
url(r'^(?P<package_name>[A-Za-z][A-Za-z0-9-_]+)/manage/$', plugin_manage, {}, name='plugin_manage'),
url(r'^(?P<package_name>[A-Za-z][A-Za-z0-9-_]+)/delete/$', plugin_delete, {}, name='plugin_delete'),
url(r'^(?P<package_name>[A-Za-z][A-Za-z0-9-_]+)/update/$', plugin_update, {}, name='plugin_update'),
url(r'^(?P<package_name>[A-Za-z][A-Za-z0-9-_]+)/set_featured/$', plugin_set_featured, {}, name='plugin_set_featured'),
url(r'^(?P<package_name>[A-Za-z][A-Za-z0-9-_]+)/unset_featured/$', plugin_unset_featured, {}, name='plugin_unset_featured'),
url(r'^user/(?P<username>\w+)/admin$', UserDetailsPluginsList.as_view(), name='user_details'),
url(r'^$', PluginsList.as_view(), name='approved_plugins'),
url(r'^my$', login_required(MyPluginsList.as_view(additional_context={'title':_('My Plugins')})), name='my_plugins'),
url(r'^featured/$', PluginsList.as_view(queryset=Plugin.featured_objects.all(), additional_context={'title' : _('Featured plugins')}), name='featured_plugins'),
url(r'^user/(?P<username>\w+)/$', UserPluginsList.as_view(), name='user_plugins'),
url(r'^server/$', PluginsList.as_view(queryset=Plugin.server_objects.all(), additional_context={'title' : _('QGIS Server plugins')}), name='server_plugins'),
url(r'^unapproved/$', PluginsList.as_view(queryset=Plugin.unapproved_objects.all(), additional_context={'title' : _('Unapproved plugins')}), name='unapproved_plugins'),
url(r'^deprecated/$', PluginsList.as_view(queryset=Plugin.deprecated_objects.all(), additional_context={'title' : _('Deprecated plugins')}), name='deprecated_plugins'),
url(r'^fresh/$', PluginsList.as_view(queryset=Plugin.fresh_objects.all(), additional_context={'title' : _('New plugins')}), name='fresh_plugins'),
url(r'^latest/$', PluginsList.as_view(queryset=Plugin.latest_objects.all(), additional_context={'title' : _('Updated plugins')}), name='latest_plugins'),
url(r'^stable/$', PluginsList.as_view(queryset=Plugin.stable_objects.all(), additional_context={'title' : _('Stable plugins')}), name='stable_plugins'),
url(r'^experimental/$', PluginsList.as_view(queryset=Plugin.experimental_objects.all(), additional_context={'title' : _('Experimental plugins')}), name='experimental_plugins'),
url(r'^popular/$', PluginsList.as_view(queryset=Plugin.popular_objects.all(), additional_context={'title' : _('Popular plugins')}), name='popular_plugins'),
url(r'^most_voted/$', PluginsList.as_view(queryset=Plugin.most_voted_objects.all(), additional_context={'title' : _('Most voted plugins')}), name='most_voted_plugins'),
url(r'^most_downloaded/$', PluginsList.as_view(queryset=Plugin.most_downloaded_objects.all(), additional_context={'title' : _('Most downloaded plugins')}), name='most_downloaded_plugins'),
url(r'^most_voted/$', PluginsList.as_view(queryset=Plugin.most_voted_objects.all(), additional_context={'title' : _('Most voted plugins')}), name='most_voted_plugins'),
url(r'^most_rated/$', PluginsList.as_view(queryset=Plugin.most_rated_objects.all(), additional_context={'title' : _('Most rated plugins')}), name='most_rated_plugins'),
url(r'^author/(?P<author>[^/]+)/$', AuthorPluginsList.as_view(), name='author_plugins'),
]
# User management
urlpatterns += [
url(r'^user/(?P<username>\w+)/manage/$', user_permissions_manage, {}, name='user_permissions_manage'),
]
# Version Management
urlpatterns += [
url(r'^(?P<package_name>[A-Za-z][A-Za-z0-9-_]+)/version/(?P<version>[^\/]+)/manage/$', version_manage, {}, name='version_manage'),
url(r'^(?P<package_name>[A-Za-z][A-Za-z0-9-_]+)/version/add/$', version_create, {}, name='version_create'),
url(r'^(?P<package_name>[A-Za-z][A-Za-z0-9-_]+)/version/(?P<version>[^\/]+)/$', version_detail, {}, name='version_detail'),
url(r'^(?P<package_name>[A-Za-z][A-Za-z0-9-_]+)/version/(?P<version>[^\/]+)/delete/$', version_delete, {}, name='version_delete'),
url(r'^(?P<package_name>[A-Za-z][A-Za-z0-9-_]+)/version/(?P<version>[^\/]+)/update/$', version_update, {}, name='version_update'),
url(r'^(?P<package_name>[A-Za-z][A-Za-z0-9-_]+)/version/(?P<version>[^\/]+)/download/$', version_download, {}, name='version_download'),
url(r'^(?P<package_name>[A-Za-z][A-Za-z0-9-_]+)/version/(?P<version>[^\/]+)/approve/$', version_approve, {}, name='version_approve'),
url(r'^(?P<package_name>[A-Za-z][A-Za-z0-9-_]+)/version/(?P<version>[^\/]+)/unapprove/$', version_unapprove, {}, name='version_unapprove'),
]
# RPC
urlpatterns += [
# rpc4django will need to be in your Python path
url(r'^RPC2/$', serve_rpc_request),
]
# plugin rating
from djangoratings.views import AddRatingFromModel
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_protect, ensure_csrf_cookie
urlpatterns += [
url(r'rate/(?P<object_id>\d+)/(?P<score>\d+)/', require_POST(csrf_protect(AddRatingFromModel())), {
'app_label': 'plugins',
'model': 'plugin',
'field_name': 'rating',
}, name='plugin_rate'),
]
# Plugin detail (keep last)
urlpatterns += [
url(r'^(?P<package_name>[A-Za-z][A-Za-z0-9-_]+)/$', PluginDetailView.as_view(slug_url_kwarg='package_name', slug_field='package_name'), name='plugin_detail'),
]
|
qgis/QGIS-Django
|
qgis-app/plugins/urls.py
|
Python
|
gpl-2.0
| 6,575
|
"""Stockplanconnect release/trade transaction source.
Data format
===========
To use, first download PDF release and trade confirmations into a directory on
the filesystem either manually or using the `finance_dl.stockplanconnect`
module.
You might have a directory structure like:
financial/
documents/
stockplanconnect/
%Y-%m-%d.Restricted_Units.Trade_Confirmations.Confirmation.pdf
%Y-%m-%d.Restricted_Units.Trade_Confirmations.Confirmation.<N>.pdf
%Y-%m-%d.Restricted_Units.Trade_Confirmations.Release_Confirmation.pdf
%Y-%m-%d.Restricted_Units.Trade_Confirmations.Release_Confirmation.<N>.pdf
where `<N>` is a base-10 number. Only filenames with these patterns are
recognized.
Specifying the source to beancount_import
=========================================
Within your Python script for invoking beancount_import, you might use an
expression like the following to specify the Stockplanconnect source:
dict(
module='beancount_import.source.stockplanconnect',
payee='My Company',
directory=os.path.join(journal_dir,
'documents', 'stockplanconnect'),
income_account='Income:MyCompany:Equity',
capital_gains_account='Income:Morgan-Stanley:Capital-Gains',
fees_account='Income:Expenses:Financial:Investment-Fees:Morgan-Stanley',
asset_account='Assets:Investment:Morgan-Stanley:MyCompany',
)
Optionally, you may also specify a `tax_accounts` key with value like:
tax_accounts=collections.OrderedDict([
('Federal Tax', 'Income:Expenses:Taxes:TY{year:04d}:Federal:Income'),
('State Tax', 'Income:Expenses:Taxes:TY{year:04d}:California:Income'),
('Medicare Tax', 'Income:Expenses:Taxes:TY{year:04d}:Federal:Medicare'),
]),
However, if you are also importing payroll statements that include the tax
breakdown as well, it works better to leave `tax_accounts` unspecified.
"""
from typing import Union, Optional, List, Set, Dict, Tuple, Any
import datetime
import os
import re
import collections
from beancount.core.data import Open, Transaction, Posting, Amount, Pad, Balance, Directive, EMPTY_SET, Entries
from beancount.core.amount import sub as amount_sub
from beancount.core.position import CostSpec
from beancount.core.number import D, ZERO
from beancount.core.number import MISSING
from beancount_import.posting_date import POSTING_DATE_KEY
from beancount_import.amount_parsing import parse_amount
from beancount_import.matching import FIXME_ACCOUNT
from beancount_import.source import ImportResult, Source, InvalidSourceReference, SourceResults, AssociatedData, LogFunction
from .stockplanconnect_statement import Release, TradeConfirmation, get_document_type
AWARD_NOTE_KEY = 'stock_award_note'
AWARD_ID_KEY = 'stock_award_id'
TRADE_REFERENCE_NUMBER_KEY = 'trade_ref_number'
def load_documents(directory: str, log_status: LogFunction):
releases = []
trades = []
for name in sorted(os.listdir(directory)):
path = os.path.join(directory, name)
class_type = get_document_type(path)
if class_type is None: continue
log_status('stockplanconnect_source: loading %s' % name)
doc = class_type(path)
if class_type is Release:
releases.append(doc)
else:
trades.append(doc)
return releases, trades
class StockplanconnectSource(Source):
def __init__(self,
directory: str,
income_account: str,
asset_account: str,
capital_gains_account: str,
fees_account: str,
payee: str,
tax_accounts: Optional[Dict[str, str]] = None,
**kwargs):
super().__init__(**kwargs)
self.directory = directory
self.releases, self.trades = load_documents(directory, self.log_status)
self.income_account = income_account
self.asset_account = asset_account
self.asset_cash_account = asset_account + ':Cash'
self.fees_account = fees_account
self.capital_gains_account = capital_gains_account
self.tax_accounts = tax_accounts
self.payee = payee
def check_for_duplicates(documents, get_key):
result = dict()
documents_without_duplicates = []
for x in documents:
key = get_key(x)
if key in result:
# raise RuntimeError('Duplicate document found: existing=%r, new=%r' %
# (result[key].path, x.path))
continue
documents_without_duplicates.append(x)
result[key] = x
return result, documents_without_duplicates
# Maps (income_account, release_date) pair to release object
self.release_dates, self.releases = check_for_duplicates(
self.releases, self.get_release_key)
self.release_stock_posting_keys = set(
self.get_stock_posting_key(r) for r in self.releases
if r.net_release_shares is not None)
self.trade_keys, self.trades = check_for_duplicates(
self.trades, self.get_trade_key)
self.expected_trade_transfers = {(t.settlement_date,
t.reference_number): t
for t in self.trades}
self.expected_release_transfers = {(r.settlement_date or r.release_date,
r.award_id): r
for r in self.releases}
self.income_accounts = set(
self.get_income_account(r) for r in self.releases)
self.managed_accounts = set(self.income_accounts)
self.managed_accounts.add(self.asset_cash_account)
self.stock_accounts = set(
self.asset_account + ':' + r.symbol for r in self.releases)
self.managed_accounts.update(self.stock_accounts)
def get_income_account(self, r: Release):
return '%s:%s:%s' % (self.income_account, r.award_id, r.symbol)
def get_stock_account(self, x: Union[Release, TradeConfirmation]):
return self.asset_account + ':' + x.symbol
def get_stock_posting_key(self, r: Release):
stock_account = '%s:%s' % (self.asset_account, r.symbol)
return (stock_account, r.release_date, r.award_id)
def get_release_key(self, r: Release):
return (self.get_income_account(r), r.release_date)
def get_trade_key(self, r: TradeConfirmation):
return (self.get_stock_account(r), r.trade_date, r.reference_number)
def _preprocess_entries(self, entries: Entries):
seen_releases = dict(
) # type: Dict[Tuple[str, Optional[datetime.date]], List[Tuple[Transaction, Posting]]]
seen_release_stock_postings = dict(
) # type: Dict[Tuple[str, datetime.date, Optional[str]], List[Tuple[Transaction, Posting]]]
seen_trades = dict(
) # type: Dict[Tuple[str, datetime.date, Any], List[Tuple[Transaction, Posting]]]
seen_release_transfers = dict(
) # type: Dict[Tuple[datetime.date, str], List[Tuple[Transaction, Posting]]]
seen_trade_transfers = dict(
) # type: Dict[Tuple[datetime.date, str], List[Tuple[Transaction, Posting]]]
income_account_prefix = self.income_account + ':'
for entry in entries:
if not isinstance(entry, Transaction): continue
for posting in entry.postings:
if posting.account.startswith(income_account_prefix):
date = (posting.meta.get(POSTING_DATE_KEY)
if posting.meta is not None else None)
seen_releases.setdefault((posting.account, date),
[]).append((entry, posting))
elif posting.account in self.stock_accounts:
date = (posting.meta.get(POSTING_DATE_KEY)
if posting.meta is not None else None)
if posting.units.number > ZERO:
seen_release_stock_postings.setdefault(
(posting.account, date,
(posting.cost or posting.cost_spec).label),
[]).append((entry, posting))
else:
ref = posting.meta.get(TRADE_REFERENCE_NUMBER_KEY)
seen_trades.setdefault((posting.account, date, ref),
[]).append((entry, posting))
elif posting.account == self.asset_cash_account:
date = (posting.meta.get(POSTING_DATE_KEY)
if posting.meta is not None else None)
ref = (posting.meta.get(TRADE_REFERENCE_NUMBER_KEY)
if posting.meta is not None else None)
if ref is not None and ref.startswith('>'):
seen_trade_transfers.setdefault(
(date, ref[1:]), []).append((entry, posting))
award_id = posting.meta.get(AWARD_ID_KEY)
if award_id is not None and award_id.startswith('>'):
seen_release_transfers.setdefault(
(date, award_id[1:]), []).append((entry, posting))
return seen_releases, seen_release_stock_postings, seen_trades, seen_release_transfers, seen_trade_transfers
def _make_journal_entry(self, r: Release):
txn = Transaction(
meta=collections.OrderedDict(),
date=r.release_date,
flag='*',
payee=self.payee,
narration='Stock Vesting',
tags=EMPTY_SET,
links=EMPTY_SET,
postings=[])
txn.postings.append(
Posting(
account=self.get_income_account(r),
units=-r.amount_released,
cost=None,
meta={POSTING_DATE_KEY: r.release_date},
price=r.vest_price,
flag=None,
))
vest_cost_spec = CostSpec(
number_per=r.vest_price.number,
currency=r.vest_price.currency,
number_total=None,
date=r.vest_date,
label=r.award_id,
merge=False)
txn.postings.append(
Posting(
account=self.asset_account + ':Cash',
units=r.released_market_value_minus_taxes,
cost=None,
meta={
POSTING_DATE_KEY: r.release_date,
AWARD_NOTE_KEY:
'Market value of vested shares minus taxes.',
AWARD_ID_KEY: r.award_id,
},
price=None,
flag=None))
if r.net_release_shares is not None:
# Shares were retained
txn.postings.append(
Posting(
account=self.asset_account + ':' + r.symbol,
units=r.net_release_shares,
cost=vest_cost_spec,
meta={
POSTING_DATE_KEY: r.release_date,
AWARD_ID_KEY: r.award_id,
},
price=None,
flag=None))
txn.postings.append(
Posting(
account=self.asset_account + ':Cash',
units=-Amount(
number=round(
r.vest_price.number * r.net_release_shares.number,
2),
currency=r.vest_price.currency),
cost=None,
meta={
POSTING_DATE_KEY: r.release_date,
AWARD_NOTE_KEY: 'Cost of shares retained',
AWARD_ID_KEY: r.award_id,
},
price=None,
flag=None,
))
else:
# Shares were sold
# Add capital gains posting.
txn.postings.append(
Posting(
meta=None,
account=self.capital_gains_account + ':' + r.symbol,
units=-r.capital_gains,
cost=None,
price=None,
flag=None,
))
capital_gains_amount = r.capital_gains
if r.fee_amount is not None:
capital_gains_amount = amount_sub(capital_gains_amount,
r.fee_amount)
# Add cash posting for capital gains.
txn.postings.append(
Posting(
account=self.asset_account + ':Cash',
units=capital_gains_amount,
cost=None,
meta={
POSTING_DATE_KEY: r.release_date,
AWARD_NOTE_KEY: 'Capital gains less transaction fees',
AWARD_ID_KEY: r.award_id,
},
price=None,
flag=None,
))
if r.fee_amount is not None:
txn.postings.append(
Posting(
account=self.fees_account,
units=r.fee_amount,
cost=None,
meta={
POSTING_DATE_KEY: r.release_date,
AWARD_NOTE_KEY: 'Supplemental transaction fee',
AWARD_ID_KEY: r.award_id,
},
price=None,
flag=None,
))
if self.tax_accounts is None:
# Just use a single unknown account to catch all of the tax costs.
# This allows the resultant entry to match a payroll entry that includes the tax costs.
txn.postings.append(
Posting(
account=FIXME_ACCOUNT,
units=r.total_tax_amount,
cost=None,
meta=dict(),
price=None,
flag=None,
))
else:
for tax_key, tax_account_pattern in self.tax_accounts.items():
if tax_key not in r.fields:
continue
amount = parse_amount(r.fields[tax_key])
account = tax_account_pattern.format(year=r.release_date.year)
txn.postings.append(
Posting(
account=account,
units=amount,
cost=None,
meta={POSTING_DATE_KEY: r.release_date},
price=None,
flag=None,
))
return txn
def _make_transfer_journal_entry(self, r: Release):
date = r.settlement_date or r.release_date
return Transaction(
meta=collections.OrderedDict(),
date=date,
flag='*',
payee=self.payee,
narration='Stock Vesting - %s' % r.transfer_description,
tags=EMPTY_SET,
links=EMPTY_SET,
postings=[
Posting(
account=self.asset_cash_account,
units=-r.transfer_amount,
cost=None,
meta=collections.OrderedDict([
(POSTING_DATE_KEY, date),
(AWARD_ID_KEY, '>' + r.award_id),
(AWARD_NOTE_KEY, r.transfer_description),
]),
price=None,
flag=None,
),
Posting(
account=FIXME_ACCOUNT,
units=r.transfer_amount,
cost=None,
meta=None,
price=None,
flag=None,
),
])
def _make_transfer_trade_journal_entry(self, t: TradeConfirmation):
return Transaction(
meta=collections.OrderedDict(),
date=t.settlement_date,
flag='*',
payee=self.payee,
narration='Transfer due to stock sale',
tags=EMPTY_SET,
links=EMPTY_SET,
postings=[
Posting(
account=self.asset_cash_account,
units=-t.net_amount,
cost=None,
meta=collections.OrderedDict([
(POSTING_DATE_KEY, t.settlement_date),
(TRADE_REFERENCE_NUMBER_KEY, '>' + t.reference_number),
]),
price=None,
flag=None,
),
Posting(
account=FIXME_ACCOUNT,
units=t.net_amount,
cost=None,
meta=None,
price=None,
flag=None,
),
])
def _make_trade_journal_entry(self, t: TradeConfirmation):
txn = Transaction(
meta=collections.OrderedDict(),
date=t.settlement_date,
flag='*',
payee=self.payee,
narration='Sell',
tags=EMPTY_SET,
links=EMPTY_SET,
postings=[])
txn.postings.append(
Posting(
account=self.get_stock_account(t),
units=-t.quantity,
cost=CostSpec(
number_per=MISSING,
number_total=None,
currency=t.gross_amount.currency,
date=None,
label=None,
merge=False),
price=t.share_price,
meta={
POSTING_DATE_KEY: t.trade_date,
TRADE_REFERENCE_NUMBER_KEY: t.reference_number
},
flag=None,
))
txn.postings.append(
Posting(
account=self.capital_gains_account + ':' + t.symbol,
units=MISSING,
meta=None,
cost=None,
price=None,
flag=None,
))
txn.postings.append(
Posting(
account=self.fees_account,
units=t.fees,
cost=None,
meta={
POSTING_DATE_KEY: t.trade_date,
TRADE_REFERENCE_NUMBER_KEY: t.reference_number,
},
price=None,
flag=None,
))
txn.postings.append(
Posting(
account=self.asset_cash_account,
units=t.net_amount,
cost=None,
meta=None,
price=None,
flag=None,
))
return txn
def is_posting_cleared(self, posting: Posting):
if posting.meta is None:
return False
if posting.account.startswith('Income:'):
return True
return ((AWARD_ID_KEY in posting.meta or
TRADE_REFERENCE_NUMBER_KEY in posting.meta) and
POSTING_DATE_KEY in posting.meta)
def prepare(self, journal, results: SourceResults):
seen_releases, seen_release_stock_postings, seen_trades, seen_release_transfers, seen_trade_transfers = self._preprocess_entries(
journal.all_entries)
for seen_dict, valid_set in (
(seen_releases, self.release_dates),
(seen_release_stock_postings, self.release_stock_posting_keys),
(seen_trades, self.trade_keys),
(seen_release_transfers, self.expected_release_transfers),
(seen_trade_transfers, self.expected_trade_transfers),
):
for seen_key, pairs in seen_dict.items():
expected = 1 if seen_key in valid_set else 0
num_extra = len(pairs) - expected
if num_extra != 0:
results.add_invalid_reference(
InvalidSourceReference(num_extra, pairs))
for r in self.releases:
key = self.get_release_key(r)
if key not in seen_releases:
results.add_pending_entry(
self._make_import_result(self._make_journal_entry(r), r))
if (r.transfer_amount is not None and (
r.settlement_date or r.release_date,
r.award_id) not in seen_release_transfers):
results.add_pending_entry(
self._make_import_result(
self._make_transfer_journal_entry(r), r))
for r in self.trades:
key = self.get_trade_key(r)
if key not in seen_trades:
results.add_pending_entry(
self._make_import_result(
self._make_trade_journal_entry(r), r))
# FIXME: seems to be separate transactions now
# if (r.settlement_date,
# r.reference_number) not in seen_trade_transfers:
# results.add_pending_entry(
# self._make_import_result(
# self._make_transfer_trade_journal_entry(r), r))
results.add_accounts(self.managed_accounts)
def _make_import_result(self, txn: Transaction,
x: Union[Release, TradeConfirmation]):
return ImportResult(
date=txn.date,
entries=[txn],
info=dict(type='application/pdf', filename=os.path.abspath(x.path)))
@property
def name(self):
return 'stockplanconnect'
def get_associated_data(self,
entry: Directive) -> Optional[List[AssociatedData]]:
if not isinstance(entry, Transaction): return None
result = [] # type: List[AssociatedData]
already_seen = set() # type: Set[int]
for posting in entry.postings:
if posting.account in self.income_accounts and posting.meta is not None:
date = posting.meta.get(POSTING_DATE_KEY)
if date is not None:
release = self.release_dates.get((posting.account, date))
if release is not None and id(release) not in already_seen:
already_seen.add(id(release))
result.append(
AssociatedData(
meta=(POSTING_DATE_KEY, date),
posting=posting,
description='Release confirmation',
type='application/pdf',
path=os.path.realpath(release.path),
))
elif posting.account in self.stock_accounts and posting.meta is not None:
date = posting.meta.get(POSTING_DATE_KEY)
if posting.units.number > ZERO:
pass
else:
ref = posting.meta.get(TRADE_REFERENCE_NUMBER_KEY)
trade = self.trade_keys.get((posting.account, date, ref))
if trade is not None and id(trade) not in already_seen:
already_seen.add(id(trade))
result.append(
AssociatedData(
meta=(TRADE_REFERENCE_NUMBER_KEY, ref),
posting=posting,
description='Trade confirmation',
type='application/pdf',
path=os.path.realpath(trade.path),
))
elif posting.account == self.asset_cash_account:
date = posting.meta.get(POSTING_DATE_KEY)
ref = posting.meta.get(TRADE_REFERENCE_NUMBER_KEY)
if ref is not None and ref.startswith('>'):
trade = self.trade_keys.get((date, ref[1:]))
if trade is not None and id(trade) not in already_seen:
already_seen.add(id(trade))
result.append(
AssociatedData(
meta=(TRADE_REFERENCE_NUMBER_KEY, ref),
posting=posting,
description='Trade confirmation',
type='application/pdf',
path=os.path.realpath(trade.path),
))
award_id = posting.meta.get(AWARD_ID_KEY)
if award_id is not None and award_id.startswith('>'):
release = self.expected_release_transfers.get((date, award_id[1:]))
if release is not None and id(release) not in already_seen:
already_seen.add(id(release))
result.append(
AssociatedData(
meta=(AWARD_ID_KEY, award_id),
posting=posting,
description='Release confirmation',
type='application/pdf',
path=os.path.realpath(release.path),
))
return result
def load(spec, log_status):
return StockplanconnectSource(log_status=log_status, **spec)
|
jbms/beancount-import
|
beancount_import/source/stockplanconnect.py
|
Python
|
gpl-2.0
| 25,823
|
# -*- coding: utf-8 -*-
# Micha Wildermuth, micha.wildermuth@kit.edu 2020
from qkit.core.instrument_base import Instrument
import numpy as np
def get_IVC_JJ(x, Ic, Rn, SNR):
sign = np.sign(x[-1] - x[0])
return Rn * x * np.heaviside(np.abs(x) - Ic, int(sign > 0)) \
+ (np.heaviside(x, int(sign > 0)) - np.heaviside(x + sign * Ic, 0)) * Ic * Rn \
+ Ic * Rn / SNR * np.random.rand(x.size)
class IVD_dummy(Instrument):
'''
This is a driver for a dummy IV-Device as used for transport measurements with qkit.measure.transport.transport.transport.
Usage:
Initialize with
<name> = qkit.instruments.create('<name>', 'IVD_dummy')
'''
def __init__(self, name):
self.__name__ = __name__
Instrument.__init__(self, name, tags=['virtual'])
self.func = get_IVC_JJ
self.args = ()
self.kwargs = {'Ic': 1e-06, 'Rn': 0.5, 'SNR': 1e2}
def set_status(self, status, channel=1):
return
def get_sweep_mode(self):
return 0
def get_sweep_channels(self):
return (1, 2)
def get_sweep_bias(self):
return 0
def set_func(self, func, *args, **kwargs):
self.func, self.args, self.kwargs = func, args, kwargs
def take_IV(self, sweep):
start, stop, step, _ = sweep
x = np.array([np.sign(val)*round(np.abs(val), -int(np.floor(np.log10(np.abs(step))))+1) for val in np.linspace(start, stop, int(round(np.abs(start-stop)/step+1)))]) # round to overcome missing precision of np.linspace
y = self.func(x, *self.args, **self.kwargs)
return x, y
def get_parameters(self):
return {}
def get(self, param, **kwargs):
return
|
qkitgroup/qkit
|
qkit/drivers/IVD_dummy.py
|
Python
|
gpl-2.0
| 1,735
|
import re
class TimeConverter(object):
def __init__(self, data):
self.seconds = self.convert_to_second(data)
self.string = self.convert_to_string(data)
def convert_to_second(self, stringtime):
if isinstance(stringtime, str):
try:
re.purge
data = re.match("(\d+)(\w*)", stringtime)
num = data.group(2)
unit = int(data.group(1))
units = {"s": 1,
"m": 60,
"h": 3600,
"d": 86400,
"w": 604800
}
seconds = units[num] * unit
return seconds
except:
print("Some error occurred tranforming %s in seconds" % (stringtime))
def week(self, seconds):
return str(seconds / 604800) + 'w'
def day(self, seconds):
return str(seconds / 86400) + 'd'
def hour(self, seconds):
return str(seconds / 3600) + 'h'
def minute(self, seconds):
return str(seconds / 60) + 'm'
def second(self, seconds):
return str(seconds / 60) + 's'
def convert_to_string(self, seconds):
if isinstance(seconds, int):
if self.week(seconds) != '0w':
return self.week(seconds)
if self.day(seconds) != '0d':
return self.day(seconds)
if self.hour(seconds) != '0h':
return self.hour(seconds)
if self.minute(seconds) != '0m':
return self.minute(seconds)
if self.second(seconds) != '0s':
return self.second(seconds)
else:
print("Some error occurred tranforming %s in string" % (seconds))
return '666'
# time = TimeConverter('14m').seconds
# print(time)
# time = TimeConverter('0').seconds
# print(time)
# time = TimeConverter(567).string
# print(time)
|
stregatto/fabric_lib
|
converter.py
|
Python
|
gpl-2.0
| 1,975
|
# User creation spoke
#
# Copyright (C) 2013-2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Martin Sivak <msivak@redhat.com>
# Chris Lumens <clumens@redhat.com>
#
from gi.repository import Gtk
from pyanaconda.flags import flags
from pyanaconda.i18n import _, N_
from pyanaconda.users import cryptPassword, validatePassword, guess_username, USERNAME_VALID
from pwquality import PWQError
from pyanaconda.ui.gui.spokes import NormalSpoke
from pyanaconda.ui.gui import GUIObject
from pyanaconda.ui.gui.categories.user_settings import UserSettingsCategory
from pyanaconda.ui.common import FirstbootSpokeMixIn
from pyanaconda.ui.gui.utils import enlightbox
from pykickstart.constants import FIRSTBOOT_RECONFIG
from pyanaconda.constants import ANACONDA_ENVIRON, FIRSTBOOT_ENVIRON
import pwquality
__all__ = ["UserSpoke", "AdvancedUserDialog"]
class AdvancedUserDialog(GUIObject):
builderObjects = ["advancedUserDialog", "uid", "gid"]
mainWidgetName = "advancedUserDialog"
uiFile = "spokes/advanced_user.glade"
def __init__(self, user, groupDict, data):
GUIObject.__init__(self, data)
self._user = user
self._groupDict = groupDict
def initialize(self):
GUIObject.initialize(self)
def _apply_checkboxes(self, _editable = None, data = None):
"""Update the state of this screen according to the
checkbox states on the screen. It is called from
the toggled Gtk event.
"""
c_home = self.builder.get_object("c_home").get_active()
c_uid = self.builder.get_object("c_uid").get_active()
c_gid = self.builder.get_object("c_gid").get_active()
self.builder.get_object("t_home").set_sensitive(c_home)
self.builder.get_object("l_home").set_sensitive(c_home)
self.builder.get_object("spin_uid").set_sensitive(c_uid)
self.builder.get_object("spin_gid").set_sensitive(c_gid)
def refresh(self):
t_home = self.builder.get_object("t_home")
if self._user.homedir:
t_home.set_text(self._user.homedir)
elif self._user.name:
homedir = "/home/" + self._user.name
t_home.set_text(homedir)
self._user.homedir = homedir
c_home = self.builder.get_object("c_home")
c_home.set_active(bool(self._user.homedir))
c_uid = self.builder.get_object("c_uid")
c_uid.set_active(bool(self._user.uid))
c_gid = self.builder.get_object("c_gid")
c_gid.set_active(bool(self._user.gid))
self._apply_checkboxes()
self.builder.get_object("spin_uid").update()
self.builder.get_object("spin_gid").update()
groups = []
for group_name in self._user.groups:
group = self._groupDict[group_name]
if group.name and group.gid is not None:
groups.append("%s (%d)" % (group.name, group.gid))
elif group.name:
groups.append(group.name)
elif group.gid is not None:
groups.append("(%d)" % (group.gid,))
self.builder.get_object("t_groups").set_text(", ".join(groups))
def run(self):
self.window.show()
rc = self.window.run()
self.window.hide()
#OK clicked
if rc == 1:
if self.builder.get_object("c_home").get_active():
self._user.homedir = self.builder.get_object("t_home").get_text()
else:
self._user.homedir = None
if self.builder.get_object("c_uid").get_active():
self._user.uid = int(self.builder.get_object("uid").get_value())
else:
self._user.uid = None
if self.builder.get_object("c_gid").get_active():
self._user.gid = int(self.builder.get_object("gid").get_value())
else:
self._user.gid = None
groups = self.builder.get_object("t_groups").get_text().split(",")
self._user.groups = []
for group in groups:
group = group.strip()
if group not in self._groupDict:
self._groupDict[group] = self.data.GroupData(name = group)
self._user.groups.append(group)
#Cancel clicked, window destroyed...
else:
pass
return rc
class UserSpoke(FirstbootSpokeMixIn, NormalSpoke):
builderObjects = ["userCreationWindow"]
mainWidgetName = "userCreationWindow"
uiFile = "spokes/user.glade"
category = UserSettingsCategory
icon = "avatar-default-symbolic"
title = N_("_USER CREATION")
@classmethod
def should_run(cls, environment, data):
# the user spoke should run always in the anaconda and in firstboot only
# when doing reconfig or if no user has been created in the installation
if environment == ANACONDA_ENVIRON:
return True
elif environment == FIRSTBOOT_ENVIRON and data is None:
# cannot decide, stay in the game and let another call with data
# available (will come) decide
return True
elif environment == FIRSTBOOT_ENVIRON and data and \
(data.firstboot.firstboot == FIRSTBOOT_RECONFIG or \
len(data.user.userList) == 0):
return True
else:
return False
def __init__(self, *args):
NormalSpoke.__init__(self, *args)
self._oldweak = None
self._error = False
def initialize(self):
NormalSpoke.initialize(self)
if self.data.user.userList:
self._user = self.data.user.userList[0]
else:
self._user = self.data.UserData()
self._wheel = self.data.GroupData(name = "wheel")
self._groupDict = {"wheel": self._wheel}
# placeholders for the text boxes
self.fullname = self.builder.get_object("t_fullname")
self.username = self.builder.get_object("t_username")
self.pw = self.builder.get_object("t_password")
self.confirm = self.builder.get_object("t_verifypassword")
self.admin = self.builder.get_object("c_admin")
self.usepassword = self.builder.get_object("c_usepassword")
self.b_advanced = self.builder.get_object("b_advanced")
self.guesser = {
self.username: True
}
# set up passphrase quality checker
self._pwq = pwquality.PWQSettings()
self._pwq.read_config()
self.pw_bar = self.builder.get_object("password_bar")
self.pw_label = self.builder.get_object("password_label")
# indicate when the password was set by kickstart
self._user.password_kickstarted = self.data.user.seen
if self._user.password_kickstarted:
self.usepassword.set_active(self._user.password != "")
if not self._user.isCrypted:
self.pw.set_text(self._user.password)
self.confirm.set_text(self._user.password)
else:
self.usepassword.set_active(True)
self.pw.set_placeholder_text(_("The password was set by kickstart."))
self.confirm.set_placeholder_text(_("The password was set by kickstart."))
self._advanced = AdvancedUserDialog(self._user, self._groupDict,
self.data)
self._advanced.initialize()
def refresh(self):
self.username.set_text(self._user.name)
self.fullname.set_text(self._user.gecos)
self.admin.set_active(self._wheel.name in self._user.groups)
if self.usepassword.get_active():
self._checkPassword()
if self.username.get_text() and self.usepassword.get_active() and \
self._user.password == "":
self.pw.grab_focus()
elif self.fullname.get_text():
self.username.grab_focus()
else:
self.fullname.grab_focus()
self.b_advanced.set_sensitive(bool(self._user.name))
@property
def status(self):
if self._error:
return _("Error creating user account: %s") % self._error
elif len(self.data.user.userList) == 0:
return _("No user will be created")
elif self._wheel.name in self.data.user.userList[0].groups:
return _("Administrator %s will be created") % self.data.user.userList[0].name
else:
return _("User %s will be created") % self.data.user.userList[0].name
@property
def mandatory(self):
# mandatory only if root account is disabled
return (not self.data.rootpw.password) or self.data.rootpw.lock
def apply(self):
# set the password only if the user enters anything to the text entry
# this should preserve the kickstart based password
if self.usepassword.get_active():
if self.pw.get_text():
self._user.password_kickstarted = False
self._user.password = cryptPassword(self.pw.get_text())
self._user.isCrypted = True
self.pw.set_placeholder_text("")
self.confirm.set_placeholder_text("")
# reset the password when the user unselects it
else:
self.pw.set_placeholder_text("")
self.confirm.set_placeholder_text("")
self._user.password = ""
self._user.isCrypted = False
self._user.password_kickstarted = False
self._user.name = self.username.get_text()
self._user.gecos = self.fullname.get_text()
# the user will be created only if the username is set
if self._user.name:
if self.admin.get_active() and \
self._wheel.name not in self._user.groups:
self._user.groups.append(self._wheel.name)
elif not self.admin.get_active() and \
self._wheel.name in self._user.groups:
self._user.groups.remove(self._wheel.name)
self.data.group.groupList += (self._groupDict[g] for g in self._user.groups
if g != self._wheel.name)
if self._user not in self.data.user.userList:
self.data.user.userList.append(self._user)
elif self._user in self.data.user.userList:
self.data.user.userList.remove(self._user)
@property
def sensitive(self):
return not (self.completed and flags.automatedInstall)
@property
def completed(self):
return len(self.data.user.userList) > 0
def _passwordDisabler(self, editable = None, data = None):
"""Called by Gtk callback when the "Use password" check
button is toggled. It will make password entries in/sensitive."""
self.pw.set_sensitive(self.usepassword.get_active())
self.confirm.set_sensitive(self.usepassword.get_active())
if not self.usepassword.get_active():
self.clear_info()
else:
self._checkPassword()
def _guessNameDisabler(self, editable = None, data = None):
"""Called by Gtk callback when the username or hostname
entry changes. It disables the guess algorithm if the
user added his own text there and reenable it when the
user deletes the whole text."""
if editable.get_text() == "":
self.guesser[editable] = True
self.b_advanced.set_sensitive(False)
else:
self.guesser[editable] = False
self.b_advanced.set_sensitive(True)
def _guessNames(self, editable = None, data = None):
"""Called by Gtk callback when the full name field changes.
It guesses the username and hostname, strips diacritics
and make those lowercase.
"""
fullname = self.fullname.get_text()
username = guess_username(fullname)
# after the text is updated in guesser, the guess has to be reenabled
if self.guesser[self.username]:
self.username.set_text(username)
self.guesser[self.username] = True
def _checkPassword(self, editable = None, data = None):
"""This method updates the password indicators according
to the passwords entered by the user. It is called by
the changed Gtk event handler.
"""
# If the password was set by kickstart, skip the strength check
if self._user.password_kickstarted:
return True
try:
strength = self._pwq.check(self.pw.get_text(), None, None)
_pwq_error = None
except pwquality.PWQError as (e, msg):
_pwq_error = msg
strength = 0
if strength < 50:
val = 1
text = _("Weak")
self._error = _("The password you have provided is weak")
if _pwq_error:
self._error += ": %s. " % _pwq_error
else:
self._error += ". "
self._error += _("You will have to press Done twice to confirm it.")
elif strength < 75:
val = 2
text = _("Fair")
self._error = False
elif strength < 90:
val = 3
text = _("Good")
self._error = False
else:
val = 4
text = _("Strong")
self._error = False
if not self.pw.get_text():
val = 0
text = _("Empty")
self._error = _("The password is empty.")
elif self.confirm.get_text() and self.pw.get_text() != self.confirm.get_text():
self._error = _("The passwords do not match.")
self.pw_bar.set_value(val)
self.pw_label.set_text(text)
self.clear_info()
if self._error:
self.set_warning(self._error)
self.window.show_all()
return False
return True
def _validatePassword(self):
"""This method checks the password weakness and
implements the Press Done twice logic. It is used from
the on_back_clicked handler.
It also sets the self._error of the password is not
sufficient or does not pass the pwquality checks.
:return: True if the password should be accepted, False otherwise
:rtype: bool
"""
# Do various steps to validate the password
# sets self._error to an error string
# Return True if valid, False otherwise
self._error = False
pw = self.pw.get_text()
confirm = self.confirm.get_text()
if not pw and not confirm:
self._error = _("You must provide and confirm a password.")
return False
try:
self._error = validatePassword(pw, confirm)
except PWQError as (_e, msg):
if pw == self._oldweak:
# We got a second attempt with the same weak password
pass
else:
self._error = _("You have provided a weak password: %s. "
" Press Done again to use anyway.") % msg
self._oldweak = pw
return False
if self._error:
return False
# the self._checkPassword function is used to indicate the password
# strength and need of hitting the Done button twice so use it here as
# well
if not self._checkPassword() and pw != self._oldweak:
# check failed and the Done button was clicked for the first time
self._oldweak = pw
return False
# if no errors, clear the info for next time we go into the spoke
self._password = pw
self.clear_info()
self._error = False
return True
def on_advanced_clicked(self, _button):
"""Handler for the Advanced.. button. It starts the Advanced dialog
for setting homedit, uid, gid and groups.
"""
self._user.name = self.username.get_text()
if self.admin.get_active() and \
self._wheel.name not in self._user.groups:
self._user.groups.append(self._wheel.name)
elif not self.admin.get_active() and \
self._wheel.name in self._user.groups:
self._user.groups.remove(self._wheel.name)
self._advanced.refresh()
with enlightbox(self.window, self._advanced.window):
response = self._advanced.run()
self.admin.set_active(self._wheel.name in self._user.groups)
def on_back_clicked(self, button):
username = self.username.get_text()
# if an invalid username was given, that's the biggest issue
if username and not USERNAME_VALID.match(username):
self.clear_info()
self.set_warning(_("Invalid username"))
self.username.grab_focus()
self.window.show_all()
# Return if:
# - no user is requested (empty username)
# - no password is required
# - password is set by kickstart and password text entry is empty
# - password is set by dialog and _validatePassword returns True
elif not username or \
not self.usepassword.get_active() or \
(self.pw.get_text() == "" and \
self.pw.get_text() == self.confirm.get_text() and \
self._user.password_kickstarted) or \
self._validatePassword():
self._error = False
self.clear_info()
NormalSpoke.on_back_clicked(self, button)
# Show the confirmation message if the password is not acceptable
else:
self.clear_info()
self.set_warning(self._error)
self.pw.grab_focus()
self.window.show_all()
|
projectatomic/anaconda
|
pyanaconda/ui/gui/spokes/user.py
|
Python
|
gpl-2.0
| 18,693
|
from gi.repository import Gtk
from gi.repository import Gdk
import constants
from gettext import gettext as _
from sugar3.graphics.icon import Icon
from ReadTab import evinceadapter, epubadapter
class _ModesComboHeaderNavigator:
def __init__(self, app):
self._app = app
self._modes = {}
self._icon_size = Gtk.IconSize.LARGE_TOOLBAR
def get_new_header(self, icon_header):
event_box = Gtk.EventBox()
event_box.modify_bg(Gtk.StateType.NORMAL, Gdk.color_parse('#000000'))
container = Gtk.HBox()
event_box.add(container)
self._highest_mode = -1
self._current_mode = constants.MODE_ZOOM
container.pack_start(Gtk.VBox(), True, True, 0)
self._middle_container = Gtk.HBox()
self._middle_container.pack_start(Gtk.VBox(), True, True, 0)
icons_container = Gtk.HBox()
self._middle_container.pack_start(icons_container, True, True, 0)
self._middle_container.pack_start(Gtk.VBox(), True, True, 0)
container.pack_start(self._middle_container, True, True, 0)
container.pack_start(Gtk.VBox(), True, True, 0)
# Add the "left arrow" icon, irrespective.
icons_container.pack_start(Icon(icon_name='bookreader-left',
icon_size=self._icon_size),
True, True, 0)
for icon_name in ['bookreader-dummy-zoom', 'bookreader-libraries']:
if icon_name.startswith(icon_header):
icon_name = icon_name + '-click'
else:
icon_name = icon_name + '-select'
icon = Icon(icon_name=icon_name, icon_size=self._icon_size)
icons_container.pack_start(icon, True, True, 0)
if icon_name.startswith('bookreader-dummy-zoom'):
self._zoom_icon = icon
# Add the "right arrow" icon, irrespective.
icons_container.pack_start(Icon(icon_name='bookreader-right',
icon_size=self._icon_size),
True, True, 0)
event_box.show_all()
return event_box
def get_new_footer(self, show_select_option):
bottom_note = Gtk.HBox()
bottom_note.set_spacing(10)
right_container = \
self.__get_right_aligned_note_and_label_icon('bookreader-gamekeys-menu',
_('Exit'), bottom_note)
if show_select_option:
bottom_note.pack_end(Icon(icon_name='bookreader-divider',
icon_size=Gtk.IconSize.LARGE_TOOLBAR),
False, False, 0)
self.__get_right_aligned_note_and_label_icon('bookreader-gamekeys-enter',
_('Select'), bottom_note)
event_box = Gtk.EventBox()
event_box.modify_bg(Gtk.StateType.NORMAL, Gdk.color_parse('#000000'))
event_box.add(bottom_note)
event_box.show_all()
return event_box
def __get_right_aligned_note_and_label_icon(self, icon_name, text,
container):
label = Gtk.Label()
label.set_markup('<span color="white">' + text +'</span>');
container.pack_end(Icon(icon_name=icon_name,
icon_size=Gtk.IconSize.LARGE_TOOLBAR),
False, False, 0)
container.pack_end(label, False, False, 0)
def add_mode(self, mode, mode_widget, icon_header, app,
show_select_option):
self._app = app
if mode in self._modes.keys():
return
self._modes[mode] = mode_widget
self._modes.keys().sort()
mode_widget._header = self.get_new_header(icon_header)
mode_widget.get_overlaid_widget().pack_start(mode_widget._header, False, False, 0)
mode_widget.get_overlaid_widget().reorder_child(mode_widget._header, 0)
mode_widget._footer = self.get_new_footer(show_select_option)
mode_widget.get_overlaid_widget().pack_start(mode_widget._footer, False, False, 0)
if mode > self._highest_mode:
self._highest_mode = mode
def switch_mode(self, mode):
self._current_mode = mode
if len(self._modes.keys()) > 1:
self._app.set_mode(self._app._modes[mode])
mode_widget = self._modes[mode]
mode_widget.show_overlay_widget()
def go_to_previous_mode(self):
index = None
for key in range(1, len(self._modes.keys())):
if self._current_mode == self._modes.keys()[key]:
index = key
break
if index is not None:
self.switch_mode(self._modes.keys()[index - 1])
def is_previous_mode_possible(self):
return self._current_mode != self._modes.keys()[0]
def go_to_next_mode(self):
index = None
for key in range(0, len(self._modes.keys())):
if self._current_mode == self._modes.keys()[key]:
index = key
break
if index is not None:
self.switch_mode(self._modes.keys()[index + 1])
def is_next_mode_possible(self):
return (self._current_mode != self._modes.keys()[-1]) and \
(len(self._modes.keys()) != 1)
def update_zoom_icons(self):
for key in self._modes.keys():
self.__update_icons_for_mode_widget(self._modes[key])
def __update_icons_for_mode_widget(self, mode_widget):
view = self._app.get_reader_tab()._view
if isinstance(view, Gtk.Label):
return
zoom_icon = mode_widget._header.get_children()[0].get_children()[1].get_children()[1].get_children()[1]
if isinstance(view, evinceadapter.EvinceViewer):
if self._current_mode == constants.MODE_ZOOM:
zoom_icon.props.icon_name = 'bookreader-popup-zoom-click'
else:
zoom_icon.props.icon_name = 'bookreader-popup-zoom-select'
elif isinstance(view, epubadapter.EpubViewer):
if self._current_mode == constants.MODE_ZOOM:
zoom_icon.props.icon_name = 'bookreader-popup-textsize-click'
else:
zoom_icon.props.icon_name = 'bookreader-popup-textsize-select'
_navigator = None
def get_modes_navigator(app):
global _navigator
if _navigator is None:
_navigator = _ModesComboHeaderNavigator(app)
return _navigator
class ModesComboHeader:
def __init__(self, mode, mode_widget, icon_header, app,
show_select_option):
self._navigator = get_modes_navigator(app)
self._mode_widget = mode_widget
self._app = app
self._navigator.add_mode(mode, mode_widget, icon_header, app,
show_select_option)
def handle_left_navigation_key(self):
if self._navigator.is_previous_mode_possible():
self._mode_widget.hide()
self._navigator.go_to_previous_mode()
self._navigator.update_zoom_icons()
def handle_right_navigation_key(self):
if self._navigator.is_next_mode_possible():
self._mode_widget.hide()
self._navigator.go_to_next_mode()
self._navigator.update_zoom_icons()
def switch_to_normal_mode(self):
self._navigator._current_mode = constants.MODE_ZOOM
self._app.set_mode(self._app._modes[constants.MODE_NORMAL])
self.hide()
|
activitycentral/ebookreader
|
src/widgets/modescomboheader.py
|
Python
|
gpl-2.0
| 7,573
|
# Rekall Memory Forensics
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import socket
from rekall import kb
from rekall import obj
from rekall import utils
from rekall.plugins.overlays.windows import pe_vtypes
AF_INET = 2
AF_INET6 = 0x17
# String representations of INADDR_ANY and INADDR6_ANY
inaddr_any = utils.inet_ntop(socket.AF_INET, '\0' * 4)
inaddr6_any = utils.inet_ntop(socket.AF_INET6, '\0' * 16)
protos = {
0:"HOPOPT",
1:"ICMP",
2:"IGMP",
3:"GGP",
4:"IPv4",
5:"ST",
6:"TCP",
7:"CBT",
8:"EGP",
9:"IGP",
10:"BBN-RCC-MON",
11:"NVP-II",
12:"PUP",
13:"ARGUS",
14:"EMCON",
15:"XNET",
16:"CHAOS",
17:"UDP",
18:"MUX",
19:"DCN-MEAS",
20:"HMP",
21:"PRM",
22:"XNS-IDP",
23:"TRUNK-1",
24:"TRUNK-2",
25:"LEAF-1",
26:"LEAF-2",
27:"RDP",
28:"IRTP",
29:"ISO-TP4",
30:"NETBLT",
31:"MFE-NSP",
32:"MERIT-INP",
33:"DCCP",
34:"3PC",
35:"IDPR",
36:"XTP",
37:"DDP",
38:"IDPR-CMTP",
39:"TP++",
40:"IL",
41:"IPv6",
42:"SDRP",
43:"IPv6-Route",
44:"IPv6-Frag",
45:"IDRP",
46:"RSVP",
47:"GRE",
48:"DSR",
49:"BNA",
50:"ESP",
51:"AH",
52:"I-NLSP",
53:"SWIPE",
54:"NARP",
55:"MOBILE",
56:"TLSP",
57:"SKIP",
58:"IPv6-ICMP",
59:"IPv6-NoNxt",
60:"IPv6-Opts",
61:"Host-interal",
62:"CFTP",
63:"Local Network",
64:"SAT-EXPAK",
65:"KRYPTOLAN",
66:"RVD",
67:"IPPC",
68:"Dist-FS",
69:"SAT-MON",
70:"VISA",
71:"IPCV",
72:"CPNX",
73:"CPHB",
74:"WSN",
75:"PVP",
76:"BR-SAT-MON",
77:"SUN-ND",
78:"WB-MON",
79:"WB-EXPAK",
80:"ISO-IP",
81:"VMTP",
82:"SECURE-VMTP",
83:"VINES",
84:"TTP",
# 84:"IPTM",
85:"NSFNET-IGP",
86:"DGP",
87:"TCF",
88:"EIGRP",
89:"OSPFIGP",
90:"Sprite-RPC",
91:"LARP",
92:"MTP",
93:"AX.25",
94:"IPIP",
95:"MICP",
96:"SCC-SP",
97:"ETHERIP",
98:"ENCAP",
99:"Encryption",
100:"GMTP",
101:"IFMP",
102:"PNNI",
103:"PIM",
104:"ARIS",
105:"SCPS",
106:"QNX",
107:"A/N",
108:"IPComp",
109:"SNP",
110:"Compaq-Peer",
111:"IPX-in-IP",
112:"VRRP",
113:"PGM",
114:"0-hop",
115:"L2TP",
116:"DDX",
117:"IATP",
118:"STP",
119:"SRP",
120:"UTI",
121:"SMP",
122:"SM",
123:"PTP",
124:"ISIS over IPv4",
125:"FIRE",
126:"CRTP",
127:"CRUDP",
128:"SSCOPMCE",
129:"IPLT",
130:"SPS",
131:"PIPE",
132:"SCTP",
133:"FC",
134:"RSVP-E2E-IGNORE",
135:"Mobility Header",
136:"UDPLite",
137:"MPLS-in-IP",
138:"manet",
139:"HIP",
140:"Shim6",
141:"WESP",
142:"ROHC",
253:"Experimental",
254:"Experimental",
255:"Reserved",
}
# Structures used by connections, connscan, sockets, sockscan.
# Used by x86 XP (all service packs) and x86 2003 SP0.
tcpip_vtypes = {
'_ADDRESS_OBJECT' : [0x68, {
'Next' : [0x0, ['pointer', ['_ADDRESS_OBJECT']]],
'LocalIpAddress' : [0x2c, ['Ipv4Address']],
'LocalPort' : [0x30, ['unsigned be short']],
'Protocol' : [0x32, ['unsigned short']],
'Pid' : [0x148, ['unsigned long']],
'CreateTime' : [0x158, ['WinFileTime', {}]],
}],
'_TCPT_OBJECT' : [0x20, {
'Next' : [0x0, ['pointer', ['_TCPT_OBJECT']]],
'RemoteIpAddress' : [0xc, ['Ipv4Address']],
'LocalIpAddress' : [0x10, ['Ipv4Address']],
'RemotePort' : [0x14, ['unsigned be short']],
'LocalPort' : [0x16, ['unsigned be short']],
'Pid' : [0x18, ['unsigned long']],
}],
}
# Structures used by connections, connscan, sockets, sockscan.
# Used by x64 XP and x64 2003 (all service packs).
tcpip_vtypes_2003_x64 = {
'_ADDRESS_OBJECT' : [0x250, {
'Next' : [0x0, ['pointer', ['_ADDRESS_OBJECT']]],
'LocalIpAddress' : [0x58, ['Ipv4Address']],
'LocalPort' : [0x5c, ['unsigned be short']],
'Protocol' : [0x5e, ['unsigned short']],
'Pid' : [0x238, ['unsigned long']],
'CreateTime' : [0x248, ['WinFileTime', {}]],
}],
'_TCPT_OBJECT' : [0x28, {
'Next' : [0x0, ['pointer', ['_TCPT_OBJECT']]],
'RemoteIpAddress' : [0x14, ['Ipv4Address']],
'LocalIpAddress' : [0x18, ['Ipv4Address']],
'RemotePort' : [0x1c, ['unsigned be short']],
'LocalPort' : [0x1e, ['unsigned be short']],
'Pid' : [0x20, ['unsigned long']],
}],
}
# Structures used by sockets and sockscan.
# Used by x86 2003 SP1 and SP2 only.
tcpip_vtypes_2003_sp1_sp2 = {
'_ADDRESS_OBJECT' : [0x68, {
'Next' : [0x0, ['pointer', ['_ADDRESS_OBJECT']]],
'LocalIpAddress' : [0x30, ['Ipv4Address']],
'LocalPort' : [0x34, ['unsigned be short']],
'Protocol' : [0x36, ['unsigned short']],
'Pid' : [0x14C, ['unsigned long']],
'CreateTime' : [0x158, ['WinFileTime', {}]],
}],
}
TCP_STATE_ENUM = {
0: 'CLOSED', 1: 'LISTENING', 2: 'SYN_SENT',
3: 'SYN_RCVD', 4: 'ESTABLISHED', 5: 'FIN_WAIT1',
6: 'FIN_WAIT2', 7: 'CLOSE_WAIT', 8: 'CLOSING',
9: 'LAST_ACK', 12: 'TIME_WAIT', 13: 'DELETE_TCB'
}
# Structures used by netscan for x86 Vista and 2008 (all service packs).
tcpip_vtypes_vista = {
'_IN_ADDR' : [None, {
'addr4' : [0x0, ['Ipv4Address']],
'addr6' : [0x0, ['Ipv6Address']],
}],
'_LOCAL_ADDRESS' : [None, {
'pData' : [0xC, ['pointer', ['pointer', ['_IN_ADDR']]]],
}],
'_TCP_LISTENER': [0xa8, { # TcpL
'Owner' : [0x18, ['pointer', ['_EPROCESS']]],
'CreateTime' : [0x20, ['WinFileTime', {}]],
'LocalAddr' : [0x34, ['pointer', ['_LOCAL_ADDRESS']]],
'InetAF' : [0x38, ['pointer', ['_INETAF']]],
"Endpoint": [0x50, ['Pointer', dict(
target="_TCP_ENDPOINT"
)]],
'Port' : [0x3E, ['unsigned be short']],
}],
'_TCP_ENDPOINT': [0x1f0, { # TcpE
'InetAF' : [0xC, ['pointer', ['_INETAF']]],
'AddrInfo' : [0x10, ['pointer', ['_ADDRINFO']]],
'ListEntry': [0x14, ['_LIST_ENTRY']],
'State' : [0x28, ['Enumeration', dict(
target='long',
choices=TCP_STATE_ENUM)]],
'LocalPort' : [0x2C, ['unsigned be short']],
'RemotePort' : [0x2E, ['unsigned be short']],
'Owner' : [0x160, ['pointer', ['_EPROCESS']]],
'CreateTime' : [0, ['WinFileTime', {}]],
}],
'_TCP_SYN_ENDPOINT': [None, {
'ListEntry': [8, ['_LIST_ENTRY']],
'InetAF' : [0x18, ['pointer', ['_INETAF']]],
'LocalPort' : [0x3c, ['unsigned be short']],
'RemotePort' : [0x3e, ['unsigned be short']],
'LocalAddr' : [0x1c, ['pointer', ['_LOCAL_ADDRESS']]],
'RemoteAddress' : [0x28, ['pointer', ['_IN_ADDR']]],
'Owner' : [0x20, ['pointer', ['_SYN_OWNER']]],
'CreateTime' : [0, ['WinFileTime', {}]],
}],
'_SYN_OWNER': [None, {
'Process': [0x18, ['pointer', ['_EPROCESS']]],
}],
'_TCP_TIMEWAIT_ENDPOINT': [None, {
'ListEntry': [0x14, ['_LIST_ENTRY']],
'InetAF' : [0xc, ['pointer', ['_INETAF']]],
'LocalPort' : [0x1c, ['unsigned be short']],
'RemotePort' : [0x1e, ['unsigned be short']],
'LocalAddr' : [0x20, ['pointer', ['_LOCAL_ADDRESS']]],
'RemoteAddress' : [0x24, ['pointer', ['_IN_ADDR']]],
'CreateTime' : [0, ['WinFileTime', {}]],
}],
'_INETAF' : [None, {
'AddressFamily' : [0xC, ['unsigned short']],
}],
'_ADDRINFO' : [None, {
'Local' : [0x0, ['pointer', ['_LOCAL_ADDRESS']]],
'Remote' : [0x8, ['pointer', ['_IN_ADDR']]],
}],
'_UDP_ENDPOINT': [0xa8, { # UdpA
'Owner' : [0x18, ['pointer', ['_EPROCESS']]],
'CreateTime' : [0x30, ['WinFileTime', {}]],
'LocalAddr' : [0x38, ['pointer', ['_LOCAL_ADDRESS']]],
'InetAF' : [0x14, ['pointer', ['_INETAF']]],
'Port' : [0x48, ['unsigned be short']],
}],
}
# Structures for netscan on x86 Windows 7 (all service packs).
tcpip_vtypes_7 = {
'_TCP_ENDPOINT': [0x210, { # TcpE
'InetAF' : [0xC, ['pointer', ['_INETAF']]],
'AddrInfo' : [0x10, ['pointer', ['_ADDRINFO']]],
'ListEntry': [0x14, ['_LIST_ENTRY']],
'State' : [0x34, ['Enumeration', dict(
target='long', choices=TCP_STATE_ENUM)]],
'LocalPort' : [0x38, ['unsigned be short']],
'RemotePort' : [0x3A, ['unsigned be short']],
'Owner' : [0x174, ['pointer', ['_EPROCESS']]],
'CreateTime' : [0, ['WinFileTime', {}]],
}],
'_TCP_SYN_ENDPOINT': [None, {
'ListEntry': [8, ['_LIST_ENTRY']],
'InetAF' : [0x24, ['pointer', ['_INETAF']]],
'LocalPort' : [0x48, ['unsigned be short']],
'RemotePort' : [0x4a, ['unsigned be short']],
'LocalAddr' : [0x28, ['pointer', ['_LOCAL_ADDRESS']]],
'RemoteAddress' : [0x34, ['pointer', ['_IN_ADDR']]],
'Owner' : [0x2c, ['pointer', ['_SYN_OWNER']]],
'CreateTime' : [0, ['WinFileTime', {}]],
}],
'_TCP_TIMEWAIT_ENDPOINT': [None, {
'ListEntry': [0, ['_LIST_ENTRY']],
'InetAF' : [0x18, ['pointer', ['_INETAF']]],
'LocalPort' : [0x28, ['unsigned be short']],
'RemotePort' : [0x2a, ['unsigned be short']],
'LocalAddr' : [0x2c, ['pointer', ['_LOCAL_ADDRESS']]],
'RemoteAddress' : [0x30, ['pointer', ['_IN_ADDR']]],
'CreateTime' : [0, ['WinFileTime', {}]],
}],
}
# Structures for netscan on x64 Vista SP0 and 2008 SP0
tcpip_vtypes_vista_64 = {
'_IN_ADDR' : [None, {
'addr4' : [0x0, ['Ipv4Address']],
'addr6' : [0x0, ['Ipv6Address']],
}],
'_TCP_LISTENER': [0x120, { # TcpL
'Owner' : [0x28, ['pointer', ['_EPROCESS']]],
'CreateTime' : [0x20, ['WinFileTime', {}]],
'LocalAddr' : [0x58, ['pointer', ['_LOCAL_ADDRESS']]],
'InetAF' : [0x60, ['pointer', ['_INETAF']]],
'Port' : [0x6a, ['unsigned be short']],
}],
'_INETAF' : [None, {
'AddressFamily' : [0x14, ['unsigned short']],
}],
'_LOCAL_ADDRESS' : [None, {
'pData' : [0x10, ['pointer', ['pointer', ['_IN_ADDR']]]],
}],
'_ADDRINFO' : [None, {
'Local' : [0x0, ['pointer', ['_LOCAL_ADDRESS']]],
'Remote' : [0x10, ['pointer', ['_IN_ADDR']]],
}],
'_TCP_ENDPOINT': [0x210, { # TcpE
'InetAF' : [0x18, ['pointer', ['_INETAF']]],
'AddrInfo' : [0x20, ['pointer', ['_ADDRINFO']]],
'ListEntry': [0x28, ['_LIST_ENTRY']],
'State' : [0x50, ['Enumeration', dict(
target='long',
choices=TCP_STATE_ENUM)]],
'LocalPort' : [0x54, ['unsigned be short']],
'RemotePort' : [0x56, ['unsigned be short']],
'Owner' : [0x208, ['pointer', ['_EPROCESS']]],
'CreateTime' : [0, ['WinFileTime', {}]],
}],
'_TCP_SYN_ENDPOINT': [None, {
'ListEntry': [0x10, ['_LIST_ENTRY']],
'InetAF' : [0x30, ['pointer', ['_INETAF']]],
'LocalPort' : [0x64, ['unsigned be short']],
'RemotePort' : [0x66, ['unsigned be short']],
'LocalAddr' : [0x38, ['pointer', ['_LOCAL_ADDRESS']]],
'RemoteAddress' : [0x50, ['pointer', ['_IN_ADDR']]],
'Owner' : [0x40, ['pointer', ['_SYN_OWNER']]],
'CreateTime' : [0, ['WinFileTime', {}]],
}],
'_SYN_OWNER': [None, {
'Process': [0x28, ['pointer', ['_EPROCESS']]],
}],
'_TCP_TIMEWAIT_ENDPOINT': [None, {
'ListEntry': [0, ['_LIST_ENTRY']],
'InetAF' : [0x18, ['pointer', ['_INETAF']]],
'LocalPort' : [0x30, ['unsigned be short']],
'RemotePort' : [0x32, ['unsigned be short']],
'LocalAddr' : [0x38, ['pointer', ['_LOCAL_ADDRESS']]],
'RemoteAddress' : [0x40, ['pointer', ['_IN_ADDR']]],
'CreateTime' : [0, ['WinFileTime', {}]],
}],
'_UDP_ENDPOINT': [0x82, { # UdpA
'Owner' : [0x28, ['pointer', ['_EPROCESS']]],
'CreateTime' : [0x58, ['WinFileTime', {}]],
'LocalAddr' : [0x60, ['pointer', ['_LOCAL_ADDRESS']]],
'InetAF' : [0x20, ['pointer', ['_INETAF']]],
'Port' : [0x80, ['unsigned be short']],
}],
}
tcpip_vtypes_win7_64 = {
'_TCP_ENDPOINT': [0x320, {
'State' : [0x68, ['Enumeration', dict(target='long',
choices=TCP_STATE_ENUM)]],
'LocalPort' : [0x6c, ['unsigned be short']],
'RemotePort' : [0x6e, ['unsigned be short']],
'Owner' : [0x238, ['pointer', ['_EPROCESS']]],
}],
'_TCP_SYN_ENDPOINT': [None, {
'InetAF' : [0x48, ['pointer', ['_INETAF']]],
'LocalPort' : [0x7c, ['unsigned be short']],
'RemotePort' : [0x7e, ['unsigned be short']],
'LocalAddr' : [0x50, ['pointer', ['_LOCAL_ADDRESS']]],
'RemoteAddress' : [0x68, ['pointer', ['_IN_ADDR']]],
'Owner' : [0x58, ['pointer', ['_SYN_OWNER']]],
}],
'_TCP_TIMEWAIT_ENDPOINT': [None, {
'InetAF' : [0x30, ['pointer', ['_INETAF']]],
'LocalPort' : [0x48, ['unsigned be short']],
'RemotePort' : [0x4a, ['unsigned be short']],
'LocalAddr' : [0x50, ['pointer', ['_LOCAL_ADDRESS']]],
'RemoteAddress' : [0x58, ['pointer', ['_IN_ADDR']]],
}],
}
overlays = {
"_ADDRESS_OBJECT": [None, {
"Protocol": [None, ["Enumeration", dict(
choices=protos,
target="unsigned int")]],
}]
}
# This is not used just yet but soon!
win7_x86_dynamic_overlays = {
"_TCP_LISTENER": dict(
# The Owner process.
Owner=[
# Attempt 1
["Disassembler", dict(
start="tcpip.sys!_TcpCreateListener@8",
length=300,
rules=[
"CALL *InetGetClientProcess",
"MOV [EBX+$out], EAX",
],
target="Pointer",
target_args=dict(
target="_EPROCESS"
),
)],
# Attempt 2
["Disassembler", dict(
start="tcpip.sys!_TcpCovetNetBufferList@20",
rules=[
"MOV EAX, [ESI+$out]",
"TEST EAX, EAX",
"PUSH EAX",
"CALL DWORD *PsGetProcessId",
],
target="Pointer",
target_args=dict(
target="_EPROCESS"
),
)]
],
# Socket creation time.
CreateTime=[
["Disassembler", dict(
start="tcpip.sys!_TcpCreateListener@8",
length=300,
rules=[
"LEA EAX, [EBX+$out]",
"PUSH EAX",
"CALL DWORD *KeQuerySystemTime",
],
target="WinFileTime",
)],
],
),
}
class _TCP_LISTENER(obj.Struct):
"""Class for objects found in TcpL pools"""
def dual_stack_sockets(self, vm=None):
"""Handle Windows dual-stack sockets"""
# If this pointer is valid, the socket is bound to
# a specific IP address. Otherwise, the socket is
# listening on all IP addresses of the address family.
local_addr = self.LocalAddr.dereference(vm=vm)
# Switch to the correct address space.
af_inet = self.InetAF.dereference(vm=vm)
# Note the remote address is always INADDR_ANY or
# INADDR6_ANY for sockets. The moment a client
# connects to the listener, a TCP_ENDPOINT is created
# and that structure contains the remote address.
if local_addr:
inaddr = local_addr.pData.dereference().dereference()
if af_inet.AddressFamily == AF_INET:
yield "v4", inaddr.addr4, inaddr_any
else:
yield "v6", inaddr.addr6, inaddr6_any
else:
yield "v4", inaddr_any, inaddr_any
if af_inet.AddressFamily.v() == AF_INET6:
yield "v6", inaddr6_any, inaddr6_any
class _TCP_ENDPOINT(_TCP_LISTENER):
"""Class for objects found in TcpE pools"""
def _ipv4_or_ipv6(self, in_addr, vm=None):
af_inet = self.InetAF.dereference(vm=vm)
if af_inet.AddressFamily == AF_INET:
return in_addr.addr4
else:
return in_addr.addr6
def LocalAddress(self, vm=None):
inaddr = self.AddrInfo.dereference(vm=vm).Local.\
pData.dereference().dereference()
return self._ipv4_or_ipv6(inaddr, vm=vm)
def RemoteAddress(self, vm=None):
inaddr = self.AddrInfo.dereference(vm=vm).\
Remote.dereference()
return self._ipv4_or_ipv6(inaddr, vm=vm)
class _UDP_ENDPOINT(_TCP_LISTENER):
"""Class for objects found in UdpA pools"""
class TcpipPluginMixin(object):
"""A mixin for plugins that want to use tcpip.sys profiles."""
@classmethod
def args(cls, parser):
super(TcpipPluginMixin, cls).args(parser)
parser.add_argument("--tcpip_guid", default=None,
help="Force this profile to be used for tcpip.")
def __init__(self, tcpip_guid=None, **kwargs):
super(TcpipPluginMixin, self).__init__(**kwargs)
# For the address resolver to load this GUID.
if tcpip_guid:
self.session.SetCache("tcpip_guid", tcpip_guid)
tcpip_module = self.session.address_resolver.GetModuleByName("tcpip")
self.tcpip_profile = tcpip_module.profile
if not self.tcpip_profile:
raise RuntimeError("Unable to load the profile for tcpip.sys")
class Tcpip(pe_vtypes.BasicPEProfile):
"""A profile for the TCPIP driver."""
@classmethod
def Initialize(cls, profile):
super(Tcpip, cls).Initialize(profile)
# Network Object Classess for Vista, 2008, and 7 x86 and x64
if profile.get_constant("TCP_LISTENER_ACTIVATED"):
profile.add_classes(dict(_TCP_LISTENER=_TCP_LISTENER,
_TCP_ENDPOINT=_TCP_ENDPOINT,
_UDP_ENDPOINT=_UDP_ENDPOINT))
# Switch on the kernel version. FIXME: This should be done using the
# generate_types module.
version = profile.session.profile.metadata("version")
if profile.metadata("arch") == "AMD64":
# Vista SP1.
if version == 6.0:
profile.add_overlay(tcpip_vtypes_vista_64)
profile.add_overlay({
'_TCP_ENDPOINT': [None, {
'Owner' : [0x210, ['pointer', ['_EPROCESS']]],
}],
})
# Windows 7
elif version >= 6.1:
profile.add_overlay(tcpip_vtypes_vista_64)
profile.add_overlay(tcpip_vtypes_win7_64)
# Win2k3
elif version == 5.2:
profile.add_overlay(tcpip_vtypes_2003_x64)
elif profile.metadata("arch") == "I386":
profile.add_overlay(tcpip_vtypes)
# Win2k3
if version == 5.2:
profile.add_overlay(tcpip_vtypes_2003_sp1_sp2)
# Vista
elif version == 6.0:
profile.add_overlay(tcpip_vtypes_vista)
# Windows 7
elif version >= 6.1:
profile.add_overlay(tcpip_vtypes_vista)
profile.add_overlay(tcpip_vtypes_7)
# Pool tags
profile.add_constants(UDP_END_POINT_POOLTAG="UdpA",
TCP_LISTENER_POOLTAG="TcpL",
TCP_END_POINT_POOLTAG="TcpE")
profile.add_overlay(overlays)
return profile
class TcpipHook(kb.ParameterHook):
name = "tcpip_profile"
def calculate(self):
index = self.session.LoadProfile("tcpip/index")
image_base = self.session.address_resolver.get_address_by_name("tcpip")
for guess, _ in index.LookupIndex(image_base):
return guess
|
palaniyappanBala/rekall
|
rekall-core/rekall/plugins/overlays/windows/tcpip_vtypes.py
|
Python
|
gpl-2.0
| 20,786
|
#!/usr/bin/env python3
# How did Kepler measure planets' oppositions without an accurate clock?
# One theory: a planet's opposition is right in the middle of its
# retrograde loop. Compare those two positions.
import ephem
from ephem import cities
import sys
import os
import math
import argparse
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
gi.require_version('PangoCairo', '1.0')
from gi.repository import Gdk
import cairo
from gi.repository import Pango
from gi.repository import PangoCairo
# Codes for events
START_RETROGRADE = 's'
END_RETROGRADE = 'e'
OPPOSITION = 'o'
CLOSEST_APPROACH = 'c'
STATIONARY = 'S'
MIDPOINT_RETRO = 'm'
# Codes for indices into planettrack items:
IDATE = 0
IRA = 1
IDEC = 2
IDIST = 3
IFLAGS = 4
class OppRetro(object):
def __init__(self, observer):
if type(observer) is ephem.Observer:
self.observer = observer
elif observer == "Los Alamos":
# Avoid problems from repeated city lookup
self.observer = ephem.Observer()
self.observer.name = "Los Alamos"
self.observer.lon = '-106:18.36'
self.observer.lat = '35:53.09'
self.observer.elevation = 2100
else:
self.observer = self.lookup_observer(observer)
print("observer is", self.observer)
self.planet = ephem.Mars()
self.save_all_points = False
# planettrack will be a list of tuples:
# (date, RA (radians), dec (radians), dist (AU), flags)
# Depending on save_all_points it may have all the points,
# or only points where something happens.
self.planettrack = None
def set_time(self, d):
if type(d) is ephem.Date:
self.observer.date = d
else:
self.observer.date = ephem.date(d)
def find_opp_and_retro(self, start_date):
self.observer.date = start_date
# Find retrograde
last_RA = 0.
last_dist = 999 # earth_distance is in AU and will be less than this
pre_closest = True
self.retrograding = False
retro_start = None
retro_end = None
# A place to store the data points
self.planettrack = []
# When a planet is approaching opposition, its elongation is negative.
# When it flips to positive (-180 to 180), it just passed opposition.
# Can't check for exactly 180 if we're only checking time coarsely.
last_elong = -1
end_date = ephem.Date(self.observer.date + 170)
while self.observer.date < end_date:
# Add time, in days or fractions thereof.
self.observer.date = ephem.Date(self.observer.date + .01)
# self.observer.date = ephem.Date(self.observer.date + 1)
# It's important to use compute(date), not compute(observer).
# The latter will include parallax, and will show shifts
# from retrograde to direct motion on multiple days
# because (apparently) the parallax is greater than the
# motion of the planet on those days, at least for Mars.
# self.planet.compute(self.observer)
self.planet.compute(self.observer.date)
flags = ''
# Are we starting or stopping retrograde?
if ( (self.planet.ra == last_RA) or
(self.planet.ra < last_RA and not self.retrograding) or
(self.planet.ra > last_RA and self.retrograding) ):
if self.retrograding:
flags += END_RETROGRADE
retro_end = self.observer.date
else:
flags += START_RETROGRADE
retro_start = self.observer.date
if self.planet.ra == last_RA:
flags += STATIONARY
self.retrograding = not self.retrograding
# print(self.observer.date, "Elongation:", self.planet.elong)
if last_elong < 0 and self.planet.elong > 0:
flags += OPPOSITION
if self.planet.earth_distance >= last_dist and pre_closest:
flags += CLOSEST_APPROACH
self.find_parallax(self.observer.date)
pre_closest = False
elif self.planet.earth_distance < last_dist: # receding
pre_closest = True
if flags or self.save_all_points:
self.planettrack.append([self.observer.date.datetime(),
float(self.planet.ra),
float(self.planet.dec),
self.planet.earth_distance,
flags])
last_RA = self.planet.ra
last_dist = self.planet.earth_distance
last_elong = self.planet.elong
# We're done calculating all the points.
# Now calculate the retrograde midpoint, if we have both start and end.
if retro_start and retro_end:
mid_retro_date = ephem.Date((retro_start + retro_end) / 2)
self.observer.date = mid_retro_date
self.planet.compute(self.observer.date)
# Insert that into our planettrack list:
for i, point in enumerate(self.planettrack):
if point[IDATE] == mid_retro_date:
point[IFLAGS] += MIDPOINT_RETRO
break
elif point[IDATE] > mid_retro_date.datetime():
self.planettrack.insert(i, [mid_retro_date.datetime(),
float(self.planet.ra),
float(self.planet.dec),
self.planet.earth_distance,
MIDPOINT_RETRO])
# We've just changed a list in the middle of looping
# over that list, but it's okay because we're breaking out.
break
else:
print("don't have both retro start and end")
# Now print what we found.
print("%20s %-19s %-11s %-10s %s" % ('', 'Date',
'RA', 'Dec', 'Dist (mi)'))
for point in self.planettrack:
if point[IFLAGS]:
# print("%20s %-19s %-11s %-10s %d" % \
# (self.flags_to_string(point[4]), str(point[0]),
# point[1], point[2],
# point[3] * 9.2956e7))
print("%20s %-19s %-11.7f %-10.7f %d" % \
(self.flags_to_string(point[IFLAGS]),
point[IDATE].strftime("%Y-%m-%d %H:%M"),
self.rads_to_hours(point[IRA]),
self.rads_to_degrees(point[IDEC]),
point[IDIST] * 9.2956e7))
if OPPOSITION in point[IFLAGS]:
opp_RA = point[IRA]
opp_dec = point[IDEC]
if MIDPOINT_RETRO in point[IFLAGS]:
mpr_RA = point[IRA]
mpr_dec = point[IDEC]
if opp_RA and mpr_RA:
degdiff = self.rads_to_degrees(math.sqrt((opp_RA - mpr_RA)**2
+ (opp_dec - mpr_dec)**2))
print("Difference between opposition and midpoint of retrograde:")
if (degdiff > 2):
print(degdiff, "degrees")
elif (degdiff * 60. > 2):
print(degdiff * 60., "arcmin")
elif (degdiff > 5):
print(degdiff * 3600., "arcsec")
def find_parallax(self, date):
'''Find the maximum parallax of self.planet on the given date
from self.observer's location -- in other words, the difference
in Mars' position between the observer's position and an
observer at the same latitude but opposite longitude:
this tells you you how much difference you would see from
your position if Mars didn't move between your sunrise and sunset.
'''
save_date = self.observer.date
# https://www.quora.com/Is-it-possible-to-measure-the-distance-to-Mars-using-a-telescope-and-the-parallax-method
# says it should vary between 361.9 arc sec > a > 51.6 arc sec,
# but I think he's smoking something.
# So let's calculate it.
observer = ephem.Observer()
observer.name = "Observer"
# To calculate from a point on the equator, set observer.lat to 0.
observer.lat = self.observer.lat
observer.lon = self.observer.lon
observer.elevation = 0
antipode = ephem.Observer()
antipode.name = "Anti-point"
antipode.lat = observer.lat
antipode.lon = 360 - self.observer.lon
antipode.elevation = 0
observer.date = observer.next_rising(self.planet, start=date)
self.planet.compute(observer)
our_ra = self.planet.ra
our_dec = self.planet.dec
antipode.date = observer.date
self.planet.compute(antipode)
antipode_ra = self.planet.ra
antipode_dec = self.planet.dec
# Calculate it the straightforward way using trig:
print()
mars_dist_miles = self.planet.earth_distance * 9.2956e7
print("Miles to Mars:", mars_dist_miles)
earth_mean_radius = 3958.8 # in miles
half_dist = earth_mean_radius * math.cos(observer.lat)
print("Distance between observers:", 2. * half_dist)
par = 2. * math.atan(half_dist / mars_dist_miles) * 180 / math.pi * 3600
print("Calculated parallax (arcsec):", par)
# See what pyephem calculates as the difference between observations:
print()
print(" Us:", our_ra, our_dec)
print("Anti-pt:", antipode_ra, antipode_dec)
print("parallax on %s: RA %f, dec %f" % (antipode.date,
our_ra - antipode_ra,
our_dec - antipode_dec))
total_par = (math.sqrt((our_ra - antipode_ra)**2 +
(our_dec - antipode_dec)**2)
* 180. / math.pi * 3600.)
print("Total parallax (sum of squares): %f arcseconds" % total_par)
print()
# Set planet back to its previous position,
# since we're in the middle of ongoing computations:
self.planet.compute(self.observer.date)
@staticmethod
def rads_to_degrees(a):
return float(a) * 180. / math.pi
@staticmethod
def rads_to_hours(a):
return float(a) * 12. / math.pi
@staticmethod
def flags_to_string(flags):
l = []
if OPPOSITION in flags:
l.append("Opposition")
if CLOSEST_APPROACH in flags:
l.append("Closest approach")
if STATIONARY in flags:
l.append("Stationary")
if START_RETROGRADE in flags:
l.append("Start retrograde")
if END_RETROGRADE in flags:
l.append("End retrograde")
if MIDPOINT_RETRO in flags:
l.append("Retrograde midpoint")
return ','.join(l)
@staticmethod
def lookup_observer(city):
try:
return ephem.city(city)
except KeyError:
try:
return cities.lookup(city)
except ValueError:
raise RuntimeError("I don't know where %s is, sorry" % city)
if __name__ == '__main__':
# parser = argparse.ArgumentParser()
# parser.add_argument('-w', "--window", dest="window", default=False,
# action="store_true", help="Show a graphical window")
# args = parser.parse_args(sys.argv[1:])
start_date = ephem.Date('2018/6/25 0:00')
cityname = "Los Alamos, NM"
# cityname = "Prague"
oppy = OppRetro(cityname)
oppy.find_opp_and_retro(start_date)
|
akkana/scripts
|
astro/oppretro/oppretro_ephem.py
|
Python
|
gpl-2.0
| 12,026
|
#!/usr/bin/python3
from socket import socket, AF_UNIX, SOCK_DGRAM
from select import select
from os import unlink, getcwd, stat
from os.path import exists
from os.path import relpath
from sys import argv, exit
def main():
if len(argv) < 2:
print 'Usage: %s <socket name>' % argv[0]
exit(1)
sn = relpath(argv[1] + 'slocket', getcwd())
s = socket(AF_UNIX, SOCK_DGRAM)
s.setblocking(False)
try_unlink(sn)
s.bind(sn)
print 'listening on %r' % sn
while not exists(relpath(argv[1] + '/slocket_listen_kill_flag', getcwd())):
x,_,_ = select([s], [], [], 1.0)
if len(x) > 0:
x = x[0]
y = x.recv(1024)
print y
print 'found slocket_listen_kill_flag... closing'
s.close()
try_unlink(sn)
def try_unlink(sn):
try:
unlink(sn)
except:
pass
if __name__ == '__main__':
main()
|
facebook/mysql-5.6
|
mysql-test/t/slocket_listen.py
|
Python
|
gpl-2.0
| 839
|
#!/usr/bin/env python2
# # # # # # # # # # # # # # # # # #
# JodelExtract Configuration File #
# # # # # # # # # # # # # # # # # #
# app version to use when not specified otherwise
APP_VERSION = '4.47.0'
# General and debugging settings
VERBOSE = True # Print post handling to command line
CONNECTION_VERBOSE = False # Connection actions printing
DEBUG = False # print posts to command line & activate Flask debugger
DBG_NO_IMAGES = False # Disable image download
STORE_POSTS = False
PRINT_API = False
# App name and author for temp directory
APP_NAME = "JodelExtract"
APP_AUTHOR = "MR"
MY_USER_ID = "158813957148f5c32faf16faab738946ba0a16ef"
# Flask database config
DATABASE_PATH = 'tmp/'
USERNAME = ''
PASSWORD = ''
# App key
ANDROID_CLIENT_ID="81e8a76e-1e02-4d17-9ba0-8a7020261b26"
WODEL_CLIENT_ID="6a62f24e-7784-0226-3fffb-5e0e895aaaf"
PORT = 443
SPLASH_TEXT="""
###########################################
#### Welcome to JodelExtract! ####
###########################################
...opening web browser automatically...
"""
def set_config(debug, verbose, store_images, store_posts, print_api):
global CONNECTION_VERBOSE
global DEBUG
global DBG_NO_IMAGES
global STORE_POSTS
global PRINT_API
DEBUG = debug
CONNECTION_VERBOSE = verbose
DBG_NO_IMAGES = not store_images
STORE_POSTS = store_posts
PRINT_API = print_api
class ConfigType():
""" Just a type to hold the configuration paramters """
def __init__(self, hmac_secret, version_string=None, user_agent_string=None, x_client_type=None, x_api_version='0.1',client_id=ANDROID_CLIENT_ID):
# HMAC secret
#
# The HMAC secret is generated by the Android app be feeding the
# signature SHA hash into a native (C++) library.
# This library changes presumably with every release of the app.
# The following dictionary contains the HMAC secrets for every version:
#
# To calculate this value for yourself, you would need to extract the
# library from the APK, write a proper Java interface around it, and
# pass the certificate hash to it.
if version_string is not None:
if user_agent_string is None:
user_agent_string = 'Jodel/'+version_string+' Dalvik/2.1.0 (Linux; U; Android 6.0.1; Nexus 5 Build/MMB29V)'
if x_client_type is None:
x_client_type = 'android_'+version_string
if hmac_secret is None or len(hmac_secret) != 40:
raise ValueError('The HMAC secret must be exactely 40 characters long')
self.hmac_secret = hmac_secret
self.user_agent = user_agent_string
self.x_client_type = x_client_type
self.x_api_version = x_api_version
self.client_id = client_id
APP_CONFIG={
'4.27.0': ConfigType('VwJHzYUbPjGiXWauoVNaHoCWsaacTmnkGwNtHhjy', version_string='4.27.0', x_api_version='0.2'),
'4.28.1': ConfigType('aPLFAjyUusVPHgcgvlAxihthmRaiuqCjBsRCPLan', version_string='4.28.1', x_api_version='0.2'),
'4.29.0': ConfigType('dIHNtHWOxFmoFouufSflpTKYjPmCIhWUCQHgbNzR', version_string='4.29.0', x_api_version='0.2'),
'4.29.1': ConfigType('dIHNtHWOxFmoFouufSflpTKYjPmCIhWUCQHgbNzR', version_string='4.29.1', x_api_version='0.2'),
'4.30.2': ConfigType('zpwKnTvubiKritHEnjOTcTeHxLJJNTEVumuNZqcE', version_string='4.30.2', x_api_version='0.2'),
'4.31.1': ConfigType('plerFToqEdWlzShdZlTywaCHRuzlKIMsNmOJVDGE', version_string='4.31.1', x_api_version='0.2'),
'4.32.2': ConfigType('OFIqFvBgkccPNTVbIzkYaSmrwMlbVzRoOBBjXUIG', version_string='4.32.2', x_api_version='0.2'),
'4.33.2': ConfigType('LDWWpuUigOnKCbCLpoNMDHCqHCWbLKPzHbnIUKIf', version_string='4.33.2', x_api_version='0.2'),
'4.34.2': ConfigType('SDydTnTdqqaiAMfneLkqXYxamvNuUYOmkqpdiZTu', version_string='4.34.2', x_api_version='0.2'),
'4.35.6': ConfigType('cYjTAwjdJyiuXAyrMhkCDiVZhshhLhotNotLiPVu', version_string='4.35.6', x_api_version='0.2'),
'4.37.2': ConfigType('OjZvbmHjcGoPhz6OfjIeDRzLXOFjMdJmAIplM7Gq', version_string='4.37.2', x_api_version='0.2'),
'4.37.5': ConfigType('NtMEkmHjcGldPDrOfjIeDRzLXOFjMdJmAIpwyFae', version_string='4.37.5', x_api_version='0.2'),
'4.38.3': ConfigType('KZmLMUggDeMzQfqMNYFLWNyttEmQgClvlPyACVlH', version_string='4.38.3', x_api_version='0.2'),
'4.40.1': ConfigType('XcpPpQcnfqEweoHRuOQbeGrRryHfxCoSkwpwKoxE', version_string='4.40.1', x_api_version='0.2'),
'4.41.0': ConfigType('hFvMqLauMtnodakokftuKETbIsVLxpqfjAXiRoih', version_string='4.41.0', x_api_version='0.2'),
'4.42.4': ConfigType('bJYlFZkrnLpLMVisrbstPDNsaYHPbzcKRSpqtjtt', version_string='4.42.4', x_api_version='0.2'),
'4.47.0': ConfigType('hyTBJcvtpDLSgGUWjybbYUNKSSoVvMcfdjtjiQvf', version_string='4.47.0', x_api_version='0.2'),
'wodel': ConfigType('bgulhzgo9876GFKgguzTZITFGMn879087vbgGFuz', x_client_type='wodel_1.1', user_agent_string ='Jodel/1.1 (compatible; MSIE 10.0; Windows NT 6.2; Trident/6.0)', x_api_version='0.2', client_id=WODEL_CLIENT_ID)
};
CAPTCHA_DICT={
'18FTBXVIJC' : {'md5': '4d97884c3806a531ddb7288bf0eab418', 'solution': [1, 3, 5]},
'1CEAFRH69O' : {'md5': '08116dcafc684462ea1948819475a81c', 'solution': [7, 8] },
'2QT6JRL06T' : {'md5': '389aa660266f0a8f76b5ef21c60cf6fd', 'solution': [1, 2] },
'4GEIEE5P8P' : {'md5': '42c904d3cd20f55405a64fcf8032b92a', 'solution': [2, 6, 8]},
'5VI2JTJYWY' : {'md5': '2a819973e9e6e22eeb445f548201ab40', 'solution': [0, 5] },
'6UHC4L53DG' : {'md5': '4d9a9b459f0d3c67581c4990bda3257a', 'solution': [0, 2, 3]},
'AKWROEYSD3' : {'md5': '2be5ec6995af4925299ed2fa635e4782', 'solution': [1, 5, 7]},
'BL5901E1JS' : {'md5': '61e0c2f52d510cc89b7432da01494a68', 'solution': [0, 4] },
'BNB1P58AJ6' : {'md5': '2ea52cb78ba770b72149daa428331e98', 'solution': [4] },
'CORKCXU0TA' : {'md5': '55bd1a0cc31c4d57654d927ca05b81a4', 'solution': [2, 4, 5]},
'D3SKGYMB0C' : {'md5': '681f0615747ba54f97040ef36dd2e6a0', 'solution': [1] },
'DB96PZYUM7' : {'md5': '4fed27abf3b4fa6dad5cf1d852114a1e', 'solution': [2, 7] },
'EJSHC2LTY1' : {'md5': '549f069a0189e73f43640a10f7be0de2', 'solution': [5, 6, 8]},
'G6X12MP9DW' : {'md5': 'd09f368da26b9ed9d583d61f0dd4b1dd', 'solution': [3] },
'IGDPXAFRE8' : {'md5': '2224eef78d48f63536bc7e0730ebfd54', 'solution': [1, 6, 7]},
'IH92Z2ETIE' : {'md5': '5055db4cab5e09eeeac0293ca44ebf65', 'solution': [1, 2, 7]},
'JGA66GP5TG' : {'md5': '76a3a9ced6474f3db148568d2f396dd6', 'solution': [1, 5, 8]},
'KUD8PU6UAB' : {'md5': '50abf6c375ea3115168da3be0acc5485', 'solution': [5] },
'MF7ZX46TQQ' : {'md5': '9329c0fecaece67da26a740d3519970b', 'solution': [0, 1, 8]},
'MFDV8CMHHG' : {'md5': 'b04955d8598980df71c7b69ea3a8e7a2', 'solution': [2, 7, 8]},
'MI9R8R1YIZ' : {'md5': '2ba5296ea4cb4bcd302f5a3b624ecf82', 'solution': [1, 7, 8]},
'NI1A0RU1VJ' : {'md5': '93af8a552ecf9729493b5c9fea98c748', 'solution': [3, 4, 6]},
'OFJP966MXD' : {'md5': '5b9a9ae117ebe53e71d236ea3952b974', 'solution': [1, 4, 6]},
'OQZBADCV8I' : {'md5': 'b435d7145639469b151a6b01a0bfe1c6', 'solution': [2, 5, 8]},
'QNLPAJ8XGM' : {'md5': '0635a32edc11e674f48dbbfbae98c969', 'solution': [3, 7, 8]},
'RXNR1VZPUC' : {'md5': '18eaa52fcf87e47edd684c8696aa1798', 'solution': [0, 4, 6]},
'YLJB76EJDY' : {'md5': '49a857ed6a90225b7de5b9ed22ee2c8a', 'solution': [3, 4] },
'YO9E3X95IG' : {'md5': '3f86e8960a64f884aa45ecb696890f5c', 'solution': [0, 1, 8]},
'ZJP7PW2LRG' : {'md5': 'e785f87dec2b23818dbb8892ea48f91d', 'solution': [4, 5] },
};
|
knorkinator/PythonProject
|
TOOLS/Config.py
|
Python
|
gpl-2.0
| 7,570
|
"""
.. module:: l_release_group_url
The **L Release Group Url** Model.
PostgreSQL Definition
---------------------
The :code:`l_release_group_url` table is defined in the MusicBrainz Server as:
.. code-block:: sql
CREATE TABLE l_release_group_url ( -- replicate
id SERIAL,
link INTEGER NOT NULL, -- references link.id
entity0 INTEGER NOT NULL, -- references release_group.id
entity1 INTEGER NOT NULL, -- references url.id
edits_pending INTEGER NOT NULL DEFAULT 0 CHECK (edits_pending >= 0),
last_updated TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
link_order INTEGER NOT NULL DEFAULT 0 CHECK (link_order >= 0),
entity0_credit TEXT NOT NULL DEFAULT '',
entity1_credit TEXT NOT NULL DEFAULT ''
);
"""
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class l_release_group_url(models.Model):
"""
Not all parameters are listed here, only those that present some interest
in their Django implementation.
:param int edits_pending: the MusicBrainz Server uses a PostgreSQL `check`
to validate that the value is a positive integer. In Django, this is
done with `models.PositiveIntegerField()`.
:param int link_order: the MusicBrainz Server uses a PostgreSQL `check`
to validate that the value is a positive integer. In Django, this is
done with `models.PositiveIntegerField()`.
"""
id = models.AutoField(primary_key=True)
link = models.ForeignKey('link')
entity0 = models.ForeignKey('release_group', related_name='links_to_url')
entity1 = models.ForeignKey('url')
edits_pending = models.PositiveIntegerField(default=0)
last_updated = models.DateTimeField(auto_now=True)
link_order = models.PositiveIntegerField(default=0)
entity0 = models.TextField(default='')
entity1 = models.TextField(default='')
def __str__(self):
return 'L Release Group Url'
class Meta:
db_table = 'l_release_group_url'
|
marios-zindilis/musicbrainz-django-models
|
musicbrainz_django_models/models/l_release_group_url.py
|
Python
|
gpl-2.0
| 2,149
|
r"""
Summary
----------
Test output of docker tag command
Operational Summary
----------------------
#. Make new image name.
#. tag changes.
#. check if tagged image exists.
#. remote tagged image from local repo.
"""
from autotest.client.shared import error
from autotest.client import utils
from dockertest.subtest import SubSubtest
from dockertest.images import DockerImages
from dockertest.images import DockerImage
from dockertest.output import OutputGood
from dockertest.output import mustpass
from dockertest.dockercmd import DockerCmd
from dockertest import subtest
from dockertest import config
from dockertest import xceptions
class tag(subtest.SubSubtestCaller):
""" SubSubtest caller """
class tag_base(SubSubtest):
""" tag base class """
def __init__(self, *args, **kwargs):
super(tag_base, self).__init__(*args, **kwargs)
self.dkrimg = DockerImages(self)
self.sub_stuff['tmp_image_list'] = set()
def get_images_by_name(self, full_name):
""" :return: List of images with given name """
return self.dkrimg.list_imgs_with_full_name(full_name)
def prep_image(self, base_image):
""" Tag the dockertest image to this test name """
mustpass(DockerCmd(self, "pull", [base_image],
verbose=False).execute())
subargs = [base_image, self.sub_stuff["image"]]
tag_results = DockerCmd(self, "tag", subargs, verbose=False).execute()
if tag_results.exit_status:
raise xceptions.DockerTestNAError("Problems during "
"initialization of"
" test: %s", tag_results)
img = self.get_images_by_name(self.sub_stuff["image"])
self.failif(not img, "Image %s was not created."
% self.sub_stuff["image"])
self.sub_stuff['image_list'] = img
def initialize(self):
super(tag_base, self).initialize()
config.none_if_empty(self.config)
self.dkrimg.gen_lower_only = self.config['gen_lower_only']
new_img_name = self.dkrimg.get_unique_name()
self.sub_stuff["image"] = new_img_name
base_image = DockerImage.full_name_from_defaults(self.config)
self.prep_image(base_image)
def complete_docker_command_line(self):
""" :return: tag subargs using new_image_name """
force = self.config["tag_force"]
cmd = []
if force:
cmd.append("-f")
cmd.append(self.sub_stuff["image"])
cmd.append(self.sub_stuff["new_image_name"])
self.sub_stuff["tag_cmd"] = cmd
return cmd
def run_once(self):
super(tag_base, self).run_once()
subargs = self.complete_docker_command_line()
self.sub_stuff["cmdresult"] = DockerCmd(self, 'tag', subargs).execute()
def postprocess(self):
super(tag_base, self).postprocess()
if self.config["docker_expected_result"] == "PASS":
# Raise exception if problems found
OutputGood(self.sub_stuff['cmdresult'])
self.failif(self.sub_stuff['cmdresult'].exit_status != 0,
"Non-zero tag exit status: %s"
% self.sub_stuff['cmdresult'])
img = self.get_images_by_name(self.sub_stuff["new_image_name"])
# Needed for cleanup
self.sub_stuff['image_list'] += img
self.failif(len(img) < 1,
"Failed to look up tagted image ")
elif self.config["docker_expected_result"] == "FAIL":
chck = OutputGood(self.sub_stuff['cmdresult'], ignore_error=True)
exit_code = self.sub_stuff['cmdresult'].exit_status
self.failif(not chck or not exit_code,
"Zero tag exit status: Command should fail due to"
" wrong command arguments.")
else:
self.failif(True, "Improper 'docker_expected_result' value %s"
% self.config["docker_expected_result"])
def cleanup(self):
super(tag_base, self).cleanup()
# Auto-converts "yes/no" to a boolean
if self.config['remove_after_test'] and 'image_list' in self.sub_stuff:
for image in self.sub_stuff["image_list"]:
self.logdebug("Removing image %s", image.full_name)
try:
self.dkrimg.remove_image_by_full_name(image.full_name)
except error.CmdError, exc:
err = exc.result_obj.stderr
if "tagged in multiple repositories" not in err:
raise
self.loginfo("Successfully removed test image: %s",
image.full_name)
for image in self.sub_stuff['tmp_image_list']:
image = self.get_images_by_name(image)
if image:
self.logdebug("Removing image %s", image[0].full_name)
self.dkrimg.remove_image_by_full_name(image[0].full_name)
self.loginfo("Successfully removed test image: %s",
image[0].full_name)
class change_tag(tag_base):
"""
1. tag testing image with different tag (keep the name, change only tag)
2. verify it worked well
"""
def generate_special_name(self):
""" keep the name, only get unique tag """
img = self.sub_stuff['image_list'][0]
_tag = "%s_%s" % (img.tag, utils.generate_random_string(8))
if self.config['gen_lower_only']:
_tag = _tag.lower()
else:
_tag += '_UP' # guarantee some upper-case
repo = img.repo
registry = img.repo_addr
registry_user = img.user
new_img_name = DockerImage.full_name_from_component(repo,
_tag,
registry,
registry_user)
return new_img_name
def initialize(self):
super(change_tag, self).initialize()
new_img_name = self.generate_special_name()
while self.get_images_by_name(new_img_name):
new_img_name = self.generate_special_name()
self.sub_stuff["new_image_name"] = new_img_name
class double_tag(change_tag):
"""
1. tag testing image with different tag (keep the name, change only tag)
2. do the same and expect failure
"""
def initialize(self):
super(double_tag, self).initialize()
# Tag it for the first time
self.sub_stuff['tmp_image_list'].add(self.sub_stuff["new_image_name"])
mustpass(DockerCmd(self, 'tag', self.complete_docker_command_line(),
verbose=False).execute())
class double_tag_force(double_tag):
""" Same as ``double_tag`` only this time use `--force` and expect pass """
|
luwensu/autotest-docker
|
subtests/docker_cli/tag/tag.py
|
Python
|
gpl-2.0
| 6,986
|
"""
KVM test utility functions.
@copyright: 2008-2009 Red Hat Inc.
"""
import time, string, random, socket, os, signal, re, logging, commands, cPickle
import fcntl, shelve, ConfigParser, threading, sys, UserDict, inspect, tarfile
import struct, shutil, glob
from autotest_lib.client.bin import utils, os_dep
from autotest_lib.client.common_lib import error, logging_config
from autotest_lib.client.common_lib import logging_manager, git
import rss_client, aexpect
import platform
try:
import koji
KOJI_INSTALLED = True
except ImportError:
KOJI_INSTALLED = False
ARCH = platform.machine()
if ARCH == "ppc64":
# From include/linux/sockios.h
SIOCSIFHWADDR = 0x8924
SIOCGIFHWADDR = 0x8927
SIOCSIFFLAGS = 0x8914
SIOCGIFINDEX = 0x8933
SIOCBRADDIF = 0x89a2
# From linux/include/linux/if_tun.h
TUNSETIFF = 0x800454ca
TUNGETIFF = 0x400454d2
TUNGETFEATURES = 0x400454cf
IFF_TAP = 0x2
IFF_NO_PI = 0x1000
IFF_VNET_HDR = 0x4000
# From linux/include/linux/if.h
IFF_UP = 0x1
else:
# From include/linux/sockios.h
SIOCSIFHWADDR = 0x8924
SIOCGIFHWADDR = 0x8927
SIOCSIFFLAGS = 0x8914
SIOCGIFINDEX = 0x8933
SIOCBRADDIF = 0x89a2
# From linux/include/linux/if_tun.h
TUNSETIFF = 0x400454ca
TUNGETIFF = 0x800454d2
TUNGETFEATURES = 0x800454cf
IFF_TAP = 0x0002
IFF_NO_PI = 0x1000
IFF_VNET_HDR = 0x4000
# From linux/include/linux/if.h
IFF_UP = 0x1
def _lock_file(filename):
f = open(filename, "w")
fcntl.lockf(f, fcntl.LOCK_EX)
return f
def _unlock_file(f):
fcntl.lockf(f, fcntl.LOCK_UN)
f.close()
def is_vm(obj):
"""
Tests whether a given object is a VM object.
@param obj: Python object.
"""
return obj.__class__.__name__ == "VM"
class NetError(Exception):
pass
class TAPModuleError(NetError):
def __init__(self, devname, action="open", details=None):
NetError.__init__(self, devname)
self.devname = devname
self.details = details
def __str__(self):
e_msg = "Can't %s %s" % (self.action, self.devname)
if self.details is not None:
e_msg += " : %s" % self.details
return e_msg
class TAPNotExistError(NetError):
def __init__(self, ifname):
NetError.__init__(self, ifname)
self.ifname = ifname
def __str__(self):
return "Interface %s does not exist" % self.ifname
class TAPCreationError(NetError):
def __init__(self, ifname, details=None):
NetError.__init__(self, ifname, details)
self.ifname = ifname
self.details = details
def __str__(self):
e_msg = "Cannot create TAP device %s" % self.ifname
if self.details is not None:
e_msg += ": %s" % self.details
return e_msg
class TAPBringUpError(NetError):
def __init__(self, ifname):
NetError.__init__(self, ifname)
self.ifname = ifname
def __str__(self):
return "Cannot bring up TAP %s" % self.ifname
class BRAddIfError(NetError):
def __init__(self, ifname, brname, details):
NetError.__init__(self, ifname, brname, details)
self.ifname = ifname
self.brname = brname
self.details = details
def __str__(self):
return ("Can not add if %s to bridge %s: %s" %
(self.ifname, self.brname, self.details))
class HwAddrSetError(NetError):
def __init__(self, ifname, mac):
NetError.__init__(self, ifname, mac)
self.ifname = ifname
self.mac = mac
def __str__(self):
return "Can not set mac %s to interface %s" % (self.mac, self.ifname)
class HwAddrGetError(NetError):
def __init__(self, ifname):
NetError.__init__(self, ifname)
self.ifname = ifname
def __str__(self):
return "Can not get mac of interface %s" % self.ifname
class Env(UserDict.IterableUserDict):
"""
A dict-like object containing global objects used by tests.
"""
def __init__(self, filename=None, version=0):
"""
Create an empty Env object or load an existing one from a file.
If the version recorded in the file is lower than version, or if some
error occurs during unpickling, or if filename is not supplied,
create an empty Env object.
@param filename: Path to an env file.
@param version: Required env version (int).
"""
UserDict.IterableUserDict.__init__(self)
empty = {"version": version}
if filename:
self._filename = filename
try:
if os.path.isfile(filename):
f = open(filename, "r")
env = cPickle.load(f)
f.close()
if env.get("version", 0) >= version:
self.data = env
else:
logging.warn("Incompatible env file found. Not using it.")
self.data = empty
else:
# No previous env file found, proceed...
self.data = empty
# Almost any exception can be raised during unpickling, so let's
# catch them all
except Exception, e:
logging.warn(e)
self.data = empty
else:
self.data = empty
def save(self, filename=None):
"""
Pickle the contents of the Env object into a file.
@param filename: Filename to pickle the dict into. If not supplied,
use the filename from which the dict was loaded.
"""
filename = filename or self._filename
f = open(filename, "w")
cPickle.dump(self.data, f)
f.close()
def get_all_vms(self):
"""
Return a list of all VM objects in this Env object.
"""
return [o for o in self.values() if is_vm(o)]
def get_vm(self, name):
"""
Return a VM object by its name.
@param name: VM name.
"""
return self.get("vm__%s" % name)
def register_vm(self, name, vm):
"""
Register a VM in this Env object.
@param name: VM name.
@param vm: VM object.
"""
self["vm__%s" % name] = vm
def unregister_vm(self, name):
"""
Remove a given VM.
@param name: VM name.
"""
del self["vm__%s" % name]
def register_installer(self, installer):
"""
Register a installer that was just run
The installer will be available for other tests, so that
information about the installed KVM modules and qemu-kvm can be used by
them.
"""
self['last_installer'] = installer
def previous_installer(self):
"""
Return the last installer that was registered
"""
return self.get('last_installer')
class Params(UserDict.IterableUserDict):
"""
A dict-like object passed to every test.
"""
def objects(self, key):
"""
Return the names of objects defined using a given key.
@param key: The name of the key whose value lists the objects
(e.g. 'nics').
"""
return self.get(key, "").split()
def object_params(self, obj_name):
"""
Return a dict-like object containing the parameters of an individual
object.
This method behaves as follows: the suffix '_' + obj_name is removed
from all key names that have it. Other key names are left unchanged.
The values of keys with the suffix overwrite the values of their
suffixless versions.
@param obj_name: The name of the object (objects are listed by the
objects() method).
"""
suffix = "_" + obj_name
new_dict = self.copy()
for key in self:
if key.endswith(suffix):
new_key = key.split(suffix)[0]
new_dict[new_key] = self[key]
return new_dict
# Functions related to MAC/IP addresses
def _open_mac_pool(lock_mode):
lock_file = open("/tmp/mac_lock", "w+")
fcntl.lockf(lock_file, lock_mode)
pool = shelve.open("/tmp/address_pool")
return pool, lock_file
def _close_mac_pool(pool, lock_file):
pool.close()
fcntl.lockf(lock_file, fcntl.LOCK_UN)
lock_file.close()
def _generate_mac_address_prefix(mac_pool):
"""
Generate a random MAC address prefix and add it to the MAC pool dictionary.
If there's a MAC prefix there already, do not update the MAC pool and just
return what's in there. By convention we will set KVM autotest MAC
addresses to start with 0x9a.
@param mac_pool: The MAC address pool object.
@return: The MAC address prefix.
"""
if "prefix" in mac_pool:
prefix = mac_pool["prefix"]
else:
r = random.SystemRandom()
prefix = "9a:%02x:%02x:%02x:" % (r.randint(0x00, 0xff),
r.randint(0x00, 0xff),
r.randint(0x00, 0xff))
mac_pool["prefix"] = prefix
return prefix
def generate_mac_address(vm_instance, nic_index):
"""
Randomly generate a MAC address and add it to the MAC address pool.
Try to generate a MAC address based on a randomly generated MAC address
prefix and add it to a persistent dictionary.
key = VM instance + NIC index, value = MAC address
e.g. {'20100310-165222-Wt7l:0': '9a:5d:94:6a:9b:f9'}
@param vm_instance: The instance attribute of a VM.
@param nic_index: The index of the NIC.
@return: MAC address string.
"""
mac_pool, lock_file = _open_mac_pool(fcntl.LOCK_EX)
key = "%s:%s" % (vm_instance, nic_index)
if key in mac_pool:
mac = mac_pool[key]
else:
prefix = _generate_mac_address_prefix(mac_pool)
r = random.SystemRandom()
while key not in mac_pool:
mac = prefix + "%02x:%02x" % (r.randint(0x00, 0xff),
r.randint(0x00, 0xff))
if mac in mac_pool.values():
continue
mac_pool[key] = mac
_close_mac_pool(mac_pool, lock_file)
return mac
def free_mac_address(vm_instance, nic_index):
"""
Remove a MAC address from the address pool.
@param vm_instance: The instance attribute of a VM.
@param nic_index: The index of the NIC.
"""
mac_pool, lock_file = _open_mac_pool(fcntl.LOCK_EX)
key = "%s:%s" % (vm_instance, nic_index)
if key in mac_pool:
del mac_pool[key]
_close_mac_pool(mac_pool, lock_file)
def set_mac_address(vm_instance, nic_index, mac):
"""
Set a MAC address in the pool.
@param vm_instance: The instance attribute of a VM.
@param nic_index: The index of the NIC.
"""
mac_pool, lock_file = _open_mac_pool(fcntl.LOCK_EX)
mac_pool["%s:%s" % (vm_instance, nic_index)] = mac
_close_mac_pool(mac_pool, lock_file)
def get_mac_address(vm_instance, nic_index):
"""
Return a MAC address from the pool.
@param vm_instance: The instance attribute of a VM.
@param nic_index: The index of the NIC.
@return: MAC address string.
"""
mac_pool, lock_file = _open_mac_pool(fcntl.LOCK_SH)
mac = mac_pool.get("%s:%s" % (vm_instance, nic_index))
_close_mac_pool(mac_pool, lock_file)
return mac
def verify_ip_address_ownership(ip, macs, timeout=10.0):
"""
Use arping and the ARP cache to make sure a given IP address belongs to one
of the given MAC addresses.
@param ip: An IP address.
@param macs: A list or tuple of MAC addresses.
@return: True iff ip is assigned to a MAC address in macs.
"""
# Compile a regex that matches the given IP address and any of the given
# MAC addresses
mac_regex = "|".join("(%s)" % mac for mac in macs)
regex = re.compile(r"\b%s\b.*\b(%s)\b" % (ip, mac_regex), re.IGNORECASE)
# Check the ARP cache
o = commands.getoutput("%s -n" % find_command("arp"))
if regex.search(o):
return True
# Get the name of the bridge device for arping
o = commands.getoutput("%s route get %s" % (find_command("ip"), ip))
dev = re.findall("dev\s+\S+", o, re.IGNORECASE)
if not dev:
return False
dev = dev[0].split()[-1]
# Send an ARP request
o = commands.getoutput("%s -f -c 3 -I %s %s" %
(find_command("arping"), dev, ip))
return bool(regex.search(o))
# Utility functions for dealing with external processes
def find_command(cmd):
for dir in ["/usr/local/sbin", "/usr/local/bin",
"/usr/sbin", "/usr/bin", "/sbin", "/bin"]:
file = os.path.join(dir, cmd)
if os.path.exists(file):
return file
raise ValueError('Missing command: %s' % cmd)
def pid_exists(pid):
"""
Return True if a given PID exists.
@param pid: Process ID number.
"""
try:
os.kill(pid, 0)
return True
except Exception:
return False
def safe_kill(pid, signal):
"""
Attempt to send a signal to a given process that may or may not exist.
@param signal: Signal number.
"""
try:
os.kill(pid, signal)
return True
except Exception:
return False
def kill_process_tree(pid, sig=signal.SIGKILL):
"""Signal a process and all of its children.
If the process does not exist -- return.
@param pid: The pid of the process to signal.
@param sig: The signal to send to the processes.
"""
if not safe_kill(pid, signal.SIGSTOP):
return
children = commands.getoutput("ps --ppid=%d -o pid=" % pid).split()
for child in children:
kill_process_tree(int(child), sig)
safe_kill(pid, sig)
safe_kill(pid, signal.SIGCONT)
def check_kvm_source_dir(source_dir):
"""
Inspects the kvm source directory and verifies its disposition. In some
occasions build may be dependant on the source directory disposition.
The reason why the return codes are numbers is that we might have more
changes on the source directory layout, so it's not scalable to just use
strings like 'old_repo', 'new_repo' and such.
@param source_dir: Source code path that will be inspected.
"""
os.chdir(source_dir)
has_qemu_dir = os.path.isdir('qemu')
has_kvm_dir = os.path.isdir('kvm')
if has_qemu_dir:
logging.debug("qemu directory detected, source dir layout 1")
return 1
if has_kvm_dir and not has_qemu_dir:
logging.debug("kvm directory detected, source dir layout 2")
return 2
else:
raise error.TestError("Unknown source dir layout, cannot proceed.")
# Functions and classes used for logging into guests and transferring files
class LoginError(Exception):
def __init__(self, msg, output):
Exception.__init__(self, msg, output)
self.msg = msg
self.output = output
def __str__(self):
return "%s (output: %r)" % (self.msg, self.output)
class LoginAuthenticationError(LoginError):
pass
class LoginTimeoutError(LoginError):
def __init__(self, output):
LoginError.__init__(self, "Login timeout expired", output)
class LoginProcessTerminatedError(LoginError):
def __init__(self, status, output):
LoginError.__init__(self, None, output)
self.status = status
def __str__(self):
return ("Client process terminated (status: %s, output: %r)" %
(self.status, self.output))
class LoginBadClientError(LoginError):
def __init__(self, client):
LoginError.__init__(self, None, None)
self.client = client
def __str__(self):
return "Unknown remote shell client: %r" % self.client
class SCPError(Exception):
def __init__(self, msg, output):
Exception.__init__(self, msg, output)
self.msg = msg
self.output = output
def __str__(self):
return "%s (output: %r)" % (self.msg, self.output)
class SCPAuthenticationError(SCPError):
pass
class SCPAuthenticationTimeoutError(SCPAuthenticationError):
def __init__(self, output):
SCPAuthenticationError.__init__(self, "Authentication timeout expired",
output)
class SCPTransferTimeoutError(SCPError):
def __init__(self, output):
SCPError.__init__(self, "Transfer timeout expired", output)
class SCPTransferFailedError(SCPError):
def __init__(self, status, output):
SCPError.__init__(self, None, output)
self.status = status
def __str__(self):
return ("SCP transfer failed (status: %s, output: %r)" %
(self.status, self.output))
def _remote_login(session, username, password, prompt, timeout=10, debug=False):
"""
Log into a remote host (guest) using SSH or Telnet. Wait for questions
and provide answers. If timeout expires while waiting for output from the
child (e.g. a password prompt or a shell prompt) -- fail.
@brief: Log into a remote host (guest) using SSH or Telnet.
@param session: An Expect or ShellSession instance to operate on
@param username: The username to send in reply to a login prompt
@param password: The password to send in reply to a password prompt
@param prompt: The shell prompt that indicates a successful login
@param timeout: The maximal time duration (in seconds) to wait for each
step of the login procedure (i.e. the "Are you sure" prompt, the
password prompt, the shell prompt, etc)
@raise LoginTimeoutError: If timeout expires
@raise LoginAuthenticationError: If authentication fails
@raise LoginProcessTerminatedError: If the client terminates during login
@raise LoginError: If some other error occurs
"""
password_prompt_count = 0
login_prompt_count = 0
while True:
try:
match, text = session.read_until_last_line_matches(
[r"[Aa]re you sure", r"[Pp]assword:\s*$", r"[Ll]ogin:\s*$",
r"[Cc]onnection.*closed", r"[Cc]onnection.*refused",
r"[Pp]lease wait", r"[Ww]arning", prompt],
timeout=timeout, internal_timeout=0.5)
if match == 0: # "Are you sure you want to continue connecting"
if debug:
logging.debug("Got 'Are you sure...', sending 'yes'")
session.sendline("yes")
continue
elif match == 1: # "password:"
if password_prompt_count == 0:
if debug:
logging.debug("Got password prompt, sending '%s'", password)
session.sendline(password)
password_prompt_count += 1
continue
else:
raise LoginAuthenticationError("Got password prompt twice",
text)
elif match == 2: # "login:"
if login_prompt_count == 0 and password_prompt_count == 0:
if debug:
logging.debug("Got username prompt; sending '%s'", username)
session.sendline(username)
login_prompt_count += 1
continue
else:
if login_prompt_count > 0:
msg = "Got username prompt twice"
else:
msg = "Got username prompt after password prompt"
raise LoginAuthenticationError(msg, text)
elif match == 3: # "Connection closed"
raise LoginError("Client said 'connection closed'", text)
elif match == 4: # "Connection refused"
raise LoginError("Client said 'connection refused'", text)
elif match == 5: # "Please wait"
if debug:
logging.debug("Got 'Please wait'")
timeout = 30
continue
elif match == 6: # "Warning added RSA"
if debug:
logging.debug("Got 'Warning added RSA to known host list")
continue
elif match == 7: # prompt
if debug:
logging.debug("Got shell prompt -- logged in")
break
except aexpect.ExpectTimeoutError, e:
raise LoginTimeoutError(e.output)
except aexpect.ExpectProcessTerminatedError, e:
raise LoginProcessTerminatedError(e.status, e.output)
def remote_login(client, host, port, username, password, prompt, linesep="\n",
log_filename=None, timeout=10):
"""
Log into a remote host (guest) using SSH/Telnet/Netcat.
@param client: The client to use ('ssh', 'telnet' or 'nc')
@param host: Hostname or IP address
@param port: Port to connect to
@param username: Username (if required)
@param password: Password (if required)
@param prompt: Shell prompt (regular expression)
@param linesep: The line separator to use when sending lines
(e.g. '\\n' or '\\r\\n')
@param log_filename: If specified, log all output to this file
@param timeout: The maximal time duration (in seconds) to wait for
each step of the login procedure (i.e. the "Are you sure" prompt
or the password prompt)
@raise LoginBadClientError: If an unknown client is requested
@raise: Whatever _remote_login() raises
@return: A ShellSession object.
"""
if client == "ssh":
cmd = ("ssh -o UserKnownHostsFile=/dev/null "
"-o PreferredAuthentications=password -p %s %s@%s" %
(port, username, host))
elif client == "telnet":
cmd = "telnet -l %s %s %s" % (username, host, port)
elif client == "nc":
cmd = "nc %s %s" % (host, port)
else:
raise LoginBadClientError(client)
logging.debug("Login command: '%s'", cmd)
session = aexpect.ShellSession(cmd, linesep=linesep, prompt=prompt)
try:
_remote_login(session, username, password, prompt, timeout)
except Exception:
session.close()
raise
if log_filename:
session.set_output_func(log_line)
session.set_output_params((log_filename,))
return session
def wait_for_login(client, host, port, username, password, prompt, linesep="\n",
log_filename=None, timeout=240, internal_timeout=10):
"""
Make multiple attempts to log into a remote host (guest) until one succeeds
or timeout expires.
@param timeout: Total time duration to wait for a successful login
@param internal_timeout: The maximal time duration (in seconds) to wait for
each step of the login procedure (e.g. the "Are you sure" prompt
or the password prompt)
@see: remote_login()
@raise: Whatever remote_login() raises
@return: A ShellSession object.
"""
logging.debug("Attempting to log into %s:%s using %s (timeout %ds)",
host, port, client, timeout)
end_time = time.time() + timeout
while time.time() < end_time:
try:
return remote_login(client, host, port, username, password, prompt,
linesep, log_filename, internal_timeout)
except LoginError, e:
logging.debug(e)
time.sleep(2)
# Timeout expired; try one more time but don't catch exceptions
return remote_login(client, host, port, username, password, prompt,
linesep, log_filename, internal_timeout)
def _remote_scp(session, password_list, transfer_timeout=600, login_timeout=20):
"""
Transfer file(s) to a remote host (guest) using SCP. Wait for questions
and provide answers. If login_timeout expires while waiting for output
from the child (e.g. a password prompt), fail. If transfer_timeout expires
while waiting for the transfer to complete, fail.
@brief: Transfer files using SCP, given a command line.
@param session: An Expect or ShellSession instance to operate on
@param password_list: Password list to send in reply to the password prompt
@param transfer_timeout: The time duration (in seconds) to wait for the
transfer to complete.
@param login_timeout: The maximal time duration (in seconds) to wait for
each step of the login procedure (i.e. the "Are you sure" prompt or
the password prompt)
@raise SCPAuthenticationError: If authentication fails
@raise SCPTransferTimeoutError: If the transfer fails to complete in time
@raise SCPTransferFailedError: If the process terminates with a nonzero
exit code
@raise SCPError: If some other error occurs
"""
password_prompt_count = 0
timeout = login_timeout
authentication_done = False
scp_type = len(password_list)
while True:
try:
match, text = session.read_until_last_line_matches(
[r"[Aa]re you sure", r"[Pp]assword:\s*$", r"lost connection"],
timeout=timeout, internal_timeout=0.5)
if match == 0: # "Are you sure you want to continue connecting"
logging.debug("Got 'Are you sure...', sending 'yes'")
session.sendline("yes")
continue
elif match == 1: # "password:"
if password_prompt_count == 0:
logging.debug("Got password prompt, sending '%s'" %
password_list[password_prompt_count])
session.sendline(password_list[password_prompt_count])
password_prompt_count += 1
timeout = transfer_timeout
if scp_type == 1:
authentication_done = True
continue
elif password_prompt_count == 1 and scp_type == 2:
logging.debug("Got password prompt, sending '%s'" %
password_list[password_prompt_count])
session.sendline(password_list[password_prompt_count])
password_prompt_count += 1
timeout = transfer_timeout
authentication_done = True
continue
else:
raise SCPAuthenticationError("Got password prompt twice",
text)
elif match == 2: # "lost connection"
raise SCPError("SCP client said 'lost connection'", text)
except aexpect.ExpectTimeoutError, e:
if authentication_done:
raise SCPTransferTimeoutError(e.output)
else:
raise SCPAuthenticationTimeoutError(e.output)
except aexpect.ExpectProcessTerminatedError, e:
if e.status == 0:
logging.debug("SCP process terminated with status 0")
break
else:
raise SCPTransferFailedError(e.status, e.output)
def remote_scp(command, password_list, log_filename=None, transfer_timeout=600,
login_timeout=20):
"""
Transfer file(s) to a remote host (guest) using SCP.
@brief: Transfer files using SCP, given a command line.
@param command: The command to execute
(e.g. "scp -r foobar root@localhost:/tmp/").
@param password_list: Password list to send in reply to a password prompt.
@param log_filename: If specified, log all output to this file
@param transfer_timeout: The time duration (in seconds) to wait for the
transfer to complete.
@param login_timeout: The maximal time duration (in seconds) to wait for
each step of the login procedure (i.e. the "Are you sure" prompt
or the password prompt)
@raise: Whatever _remote_scp() raises
"""
logging.debug("Trying to SCP with command '%s', timeout %ss",
command, transfer_timeout)
if log_filename:
output_func = log_line
output_params = (log_filename,)
else:
output_func = None
output_params = ()
session = aexpect.Expect(command,
output_func=output_func,
output_params=output_params)
try:
_remote_scp(session, password_list, transfer_timeout, login_timeout)
finally:
session.close()
def scp_to_remote(host, port, username, password, local_path, remote_path,
log_filename=None, timeout=600):
"""
Copy files to a remote host (guest) through scp.
@param host: Hostname or IP address
@param username: Username (if required)
@param password: Password (if required)
@param local_path: Path on the local machine where we are copying from
@param remote_path: Path on the remote machine where we are copying to
@param log_filename: If specified, log all output to this file
@param timeout: The time duration (in seconds) to wait for the transfer
to complete.
@raise: Whatever remote_scp() raises
"""
command = ("scp -v -o UserKnownHostsFile=/dev/null "
"-o PreferredAuthentications=password -r -P %s %s %s@%s:%s" %
(port, local_path, username, host, remote_path))
password_list = []
password_list.append(password)
return remote_scp(command, password_list, log_filename, timeout)
def scp_from_remote(host, port, username, password, remote_path, local_path,
log_filename=None, timeout=600):
"""
Copy files from a remote host (guest).
@param host: Hostname or IP address
@param username: Username (if required)
@param password: Password (if required)
@param local_path: Path on the local machine where we are copying from
@param remote_path: Path on the remote machine where we are copying to
@param log_filename: If specified, log all output to this file
@param timeout: The time duration (in seconds) to wait for the transfer
to complete.
@raise: Whatever remote_scp() raises
"""
command = ("scp -v -o UserKnownHostsFile=/dev/null "
"-o PreferredAuthentications=password -r -P %s %s@%s:%s %s" %
(port, username, host, remote_path, local_path))
password_list = []
password_list.append(password)
remote_scp(command, password_list, log_filename, timeout)
def scp_between_remotes(src, dst, port, s_passwd, d_passwd, s_name, d_name,
s_path, d_path, log_filename=None, timeout=600):
"""
Copy files from a remote host (guest) to another remote host (guest).
@param src/dst: Hostname or IP address of src and dst
@param s_name/d_name: Username (if required)
@param s_passwd/d_passwd: Password (if required)
@param s_path/d_path: Path on the remote machine where we are copying
from/to
@param log_filename: If specified, log all output to this file
@param timeout: The time duration (in seconds) to wait for the transfer
to complete.
@return: True on success and False on failure.
"""
command = ("scp -v -o UserKnownHostsFile=/dev/null -o "
"PreferredAuthentications=password -r -P %s %s@%s:%s %s@%s:%s" %
(port, s_name, src, s_path, d_name, dst, d_path))
password_list = []
password_list.append(s_passwd)
password_list.append(d_passwd)
return remote_scp(command, password_list, log_filename, timeout)
def copy_files_to(address, client, username, password, port, local_path,
remote_path, log_filename=None, verbose=False, timeout=600):
"""
Copy files to a remote host (guest) using the selected client.
@param client: Type of transfer client
@param username: Username (if required)
@param password: Password (if requried)
@param local_path: Path on the local machine where we are copying from
@param remote_path: Path on the remote machine where we are copying to
@param address: Address of remote host(guest)
@param log_filename: If specified, log all output to this file (SCP only)
@param verbose: If True, log some stats using logging.debug (RSS only)
@param timeout: The time duration (in seconds) to wait for the transfer to
complete.
@raise: Whatever remote_scp() raises
"""
if client == "scp":
scp_to_remote(address, port, username, password, local_path,
remote_path, log_filename, timeout)
elif client == "rss":
log_func = None
if verbose:
log_func = logging.debug
c = rss_client.FileUploadClient(address, port, log_func)
c.upload(local_path, remote_path, timeout)
c.close()
def copy_files_from(address, client, username, password, port, remote_path,
local_path, log_filename=None, verbose=False, timeout=600):
"""
Copy files from a remote host (guest) using the selected client.
@param client: Type of transfer client
@param username: Username (if required)
@param password: Password (if requried)
@param remote_path: Path on the remote machine where we are copying from
@param local_path: Path on the local machine where we are copying to
@param address: Address of remote host(guest)
@param log_filename: If specified, log all output to this file (SCP only)
@param verbose: If True, log some stats using logging.debug (RSS only)
@param timeout: The time duration (in seconds) to wait for the transfer to
complete.
@raise: Whatever remote_scp() raises
"""
if client == "scp":
scp_from_remote(address, port, username, password, remote_path,
local_path, log_filename, timeout)
elif client == "rss":
log_func = None
if verbose:
log_func = logging.debug
c = rss_client.FileDownloadClient(address, port, log_func)
c.download(remote_path, local_path, timeout)
c.close()
# The following are utility functions related to ports.
def is_port_free(port, address):
"""
Return True if the given port is available for use.
@param port: Port number
"""
try:
s = socket.socket()
#s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if address == "localhost":
s.bind(("localhost", port))
free = True
else:
s.connect((address, port))
free = False
except socket.error:
if address == "localhost":
free = False
else:
free = True
s.close()
return free
def find_free_port(start_port, end_port, address="localhost"):
"""
Return a host free port in the range [start_port, end_port].
@param start_port: First port that will be checked.
@param end_port: Port immediately after the last one that will be checked.
"""
for i in range(start_port, end_port):
if is_port_free(i, address):
return i
return None
def find_free_ports(start_port, end_port, count, address="localhost"):
"""
Return count of host free ports in the range [start_port, end_port].
@count: Initial number of ports known to be free in the range.
@param start_port: First port that will be checked.
@param end_port: Port immediately after the last one that will be checked.
"""
ports = []
i = start_port
while i < end_port and count > 0:
if is_port_free(i, address):
ports.append(i)
count -= 1
i += 1
return ports
# An easy way to log lines to files when the logging system can't be used
_open_log_files = {}
_log_file_dir = "/tmp"
def log_line(filename, line):
"""
Write a line to a file. '\n' is appended to the line.
@param filename: Path of file to write to, either absolute or relative to
the dir set by set_log_file_dir().
@param line: Line to write.
"""
global _open_log_files, _log_file_dir
if filename not in _open_log_files:
path = get_path(_log_file_dir, filename)
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
_open_log_files[filename] = open(path, "w")
timestr = time.strftime("%Y-%m-%d %H:%M:%S")
_open_log_files[filename].write("%s: %s\n" % (timestr, line))
_open_log_files[filename].flush()
def set_log_file_dir(dir):
"""
Set the base directory for log files created by log_line().
@param dir: Directory for log files.
"""
global _log_file_dir
_log_file_dir = dir
# The following are miscellaneous utility functions.
def get_path(base_path, user_path):
"""
Translate a user specified path to a real path.
If user_path is relative, append it to base_path.
If user_path is absolute, return it as is.
@param base_path: The base path of relative user specified paths.
@param user_path: The user specified path.
"""
if os.path.isabs(user_path):
return user_path
else:
return os.path.join(base_path, user_path)
def generate_random_string(length):
"""
Return a random string using alphanumeric characters.
@length: length of the string that will be generated.
"""
r = random.SystemRandom()
str = ""
chars = string.letters + string.digits
while length > 0:
str += r.choice(chars)
length -= 1
return str
def generate_random_id():
"""
Return a random string suitable for use as a qemu id.
"""
return "id" + generate_random_string(6)
def generate_tmp_file_name(file, ext=None, dir='/tmp/'):
"""
Returns a temporary file name. The file is not created.
"""
while True:
file_name = (file + '-' + time.strftime("%Y%m%d-%H%M%S-") +
generate_random_string(4))
if ext:
file_name += '.' + ext
file_name = os.path.join(dir, file_name)
if not os.path.exists(file_name):
break
return file_name
def format_str_for_message(str):
"""
Format str so that it can be appended to a message.
If str consists of one line, prefix it with a space.
If str consists of multiple lines, prefix it with a newline.
@param str: string that will be formatted.
"""
lines = str.splitlines()
num_lines = len(lines)
str = "\n".join(lines)
if num_lines == 0:
return ""
elif num_lines == 1:
return " " + str
else:
return "\n" + str
def wait_for(func, timeout, first=0.0, step=1.0, text=None):
"""
If func() evaluates to True before timeout expires, return the
value of func(). Otherwise return None.
@brief: Wait until func() evaluates to True.
@param timeout: Timeout in seconds
@param first: Time to sleep before first attempt
@param steps: Time to sleep between attempts in seconds
@param text: Text to print while waiting, for debug purposes
"""
start_time = time.time()
end_time = time.time() + timeout
time.sleep(first)
while time.time() < end_time:
if text:
logging.debug("%s (%f secs)", text, (time.time() - start_time))
output = func()
if output:
return output
time.sleep(step)
return None
def get_hash_from_file(hash_path, dvd_basename):
"""
Get the a hash from a given DVD image from a hash file
(Hash files are usually named MD5SUM or SHA1SUM and are located inside the
download directories of the DVDs)
@param hash_path: Local path to a hash file.
@param cd_image: Basename of a CD image
"""
hash_file = open(hash_path, 'r')
for line in hash_file.readlines():
if dvd_basename in line:
return line.split()[0]
def run_tests(parser, job):
"""
Runs the sequence of KVM tests based on the list of dictionaries
generated by the configuration system, handling dependencies.
@param parser: Config parser object.
@param job: Autotest job object.
@return: True, if all tests ran passed, False if any of them failed.
"""
for i, d in enumerate(parser.get_dicts()):
logging.info("Test %4d: %s" % (i + 1, d["shortname"]))
status_dict = {}
failed = False
for dict in parser.get_dicts():
if dict.get("skip") == "yes":
continue
dependencies_satisfied = True
for dep in dict.get("dep"):
for test_name in status_dict.keys():
if not dep in test_name:
continue
# So the only really non-fatal state is WARN,
# All the others make it not safe to proceed with dependency
# execution
if status_dict[test_name] not in ['GOOD', 'WARN']:
dependencies_satisfied = False
break
test_iterations = int(dict.get("iterations", 1))
test_tag = dict.get("shortname")
if dependencies_satisfied:
# Setting up profilers during test execution.
profilers = dict.get("profilers", "").split()
for profiler in profilers:
job.profilers.add(profiler)
# We need only one execution, profiled, hence we're passing
# the profile_only parameter to job.run_test().
profile_only = bool(profilers) or None
current_status = job.run_test_detail(dict.get("vm_type"),
params=dict,
tag=test_tag,
iterations=test_iterations,
profile_only=profile_only)
for profiler in profilers:
job.profilers.delete(profiler)
else:
# We will force the test to fail as TestNA during preprocessing
dict['dependency_failed'] = 'yes'
current_status = job.run_test_detail(dict.get("vm_type"),
params=dict,
tag=test_tag,
iterations=test_iterations)
if not current_status:
failed = True
status_dict[dict.get("name")] = current_status
return not failed
def display_attributes(instance):
"""
Inspects a given class instance attributes and displays them, convenient
for debugging.
"""
logging.debug("Attributes set:")
for member in inspect.getmembers(instance):
name, value = member
attribute = getattr(instance, name)
if not (name.startswith("__") or callable(attribute) or not value):
logging.debug(" %s: %s", name, value)
def get_full_pci_id(pci_id):
"""
Get full PCI ID of pci_id.
@param pci_id: PCI ID of a device.
"""
cmd = "lspci -D | awk '/%s/ {print $1}'" % pci_id
status, full_id = commands.getstatusoutput(cmd)
if status != 0:
return None
return full_id
def get_vendor_from_pci_id(pci_id):
"""
Check out the device vendor ID according to pci_id.
@param pci_id: PCI ID of a device.
"""
cmd = "lspci -n | awk '/%s/ {print $3}'" % pci_id
return re.sub(":", " ", commands.getoutput(cmd))
class Flag(str):
"""
Class for easy merge cpuflags.
"""
aliases = {}
def __new__(cls, flag):
if flag in Flag.aliases:
flag = Flag.aliases[flag]
return str.__new__(cls, flag)
def __eq__(self, other):
s = set(self.split("|"))
o = set(other.split("|"))
if s & o:
return True
else:
return False
def __hash__(self, *args, **kwargs):
return 0
kvm_map_flags_to_test = {
Flag('avx') :set(['avx']),
Flag('sse3') :set(['sse3']),
Flag('ssse3') :set(['ssse3']),
Flag('sse4.1|sse4_1|sse4.2|sse4_2'):set(['sse4']),
Flag('aes') :set(['aes','pclmul']),
Flag('pclmuldq') :set(['pclmul']),
Flag('pclmulqdq') :set(['pclmul']),
Flag('rdrand') :set(['rdrand']),
Flag('sse4a') :set(['sse4a']),
Flag('fma4') :set(['fma4']),
Flag('xop') :set(['xop']),
}
kvm_map_flags_aliases = {
'sse4.1' :'sse4_1',
'sse4.2' :'sse4_2',
'pclmulqdq' :'pclmuldq',
}
def kvm_flags_to_stresstests(flags):
"""
Covert [cpu flags] to [tests]
@param cpuflags: list of cpuflags
@return: Return tests like string.
"""
tests = set([])
for f in flags:
tests |= kvm_map_flags_to_test[f]
param = ""
for f in tests:
param += ","+f
return param
def get_cpu_flags():
"""
Returns a list of the CPU flags
"""
flags_re = re.compile(r'^flags\s*:(.*)')
for line in open('/proc/cpuinfo').readlines():
match = flags_re.match(line)
if match:
return match.groups()[0].split()
return []
def get_cpu_vendor(cpu_flags=[], verbose=True):
"""
Returns the name of the CPU vendor, either intel, amd or unknown
"""
if not cpu_flags:
cpu_flags = get_cpu_flags()
if 'vmx' in cpu_flags:
vendor = 'intel'
elif 'svm' in cpu_flags:
vendor = 'amd'
else:
vendor = 'unknown'
if verbose:
logging.debug("Detected CPU vendor as '%s'", vendor)
return vendor
def get_archive_tarball_name(source_dir, tarball_name, compression):
'''
Get the name for a tarball file, based on source, name and compression
'''
if tarball_name is None:
tarball_name = os.path.basename(source_dir)
if not tarball_name.endswith('.tar'):
tarball_name = '%s.tar' % tarball_name
if compression and not tarball_name.endswith('.%s' % compression):
tarball_name = '%s.%s' % (tarball_name, compression)
return tarball_name
def archive_as_tarball(source_dir, dest_dir, tarball_name=None,
compression='bz2', verbose=True):
'''
Saves the given source directory to the given destination as a tarball
If the name of the archive is omitted, it will be taken from the
source_dir. If it is an absolute path, dest_dir will be ignored. But,
if both the destination directory and tarball anem is given, and the
latter is not an absolute path, they will be combined.
For archiving directory '/tmp' in '/net/server/backup' as file
'tmp.tar.bz2', simply use:
>>> virt_utils.archive_as_tarball('/tmp', '/net/server/backup')
To save the file it with a different name, say 'host1-tmp.tar.bz2'
and save it under '/net/server/backup', use:
>>> virt_utils.archive_as_tarball('/tmp', '/net/server/backup',
'host1-tmp')
To save with gzip compression instead (resulting in the file
'/net/server/backup/host1-tmp.tar.gz'), use:
>>> virt_utils.archive_as_tarball('/tmp', '/net/server/backup',
'host1-tmp', 'gz')
'''
tarball_name = get_archive_tarball_name(source_dir,
tarball_name,
compression)
if not os.path.isabs(tarball_name):
tarball_path = os.path.join(dest_dir, tarball_name)
else:
tarball_path = tarball_name
if verbose:
logging.debug('Archiving %s as %s' % (source_dir,
tarball_path))
os.chdir(os.path.dirname(source_dir))
tarball = tarfile.TarFile(name=tarball_path, mode='w')
tarball = tarball.open(name=tarball_path, mode='w:%s' % compression)
tarball.add(os.path.basename(source_dir))
tarball.close()
class Thread(threading.Thread):
"""
Run a function in a background thread.
"""
def __init__(self, target, args=(), kwargs={}):
"""
Initialize the instance.
@param target: Function to run in the thread.
@param args: Arguments to pass to target.
@param kwargs: Keyword arguments to pass to target.
"""
threading.Thread.__init__(self)
self._target = target
self._args = args
self._kwargs = kwargs
def run(self):
"""
Run target (passed to the constructor). No point in calling this
function directly. Call start() to make this function run in a new
thread.
"""
self._e = None
self._retval = None
try:
try:
self._retval = self._target(*self._args, **self._kwargs)
except Exception:
self._e = sys.exc_info()
raise
finally:
# Avoid circular references (start() may be called only once so
# it's OK to delete these)
del self._target, self._args, self._kwargs
def join(self, timeout=None, suppress_exception=False):
"""
Join the thread. If target raised an exception, re-raise it.
Otherwise, return the value returned by target.
@param timeout: Timeout value to pass to threading.Thread.join().
@param suppress_exception: If True, don't re-raise the exception.
"""
threading.Thread.join(self, timeout)
try:
if self._e:
if not suppress_exception:
# Because the exception was raised in another thread, we
# need to explicitly insert the current context into it
s = error.exception_context(self._e[1])
s = error.join_contexts(error.get_context(), s)
error.set_exception_context(self._e[1], s)
raise self._e[0], self._e[1], self._e[2]
else:
return self._retval
finally:
# Avoid circular references (join() may be called multiple times
# so we can't delete these)
self._e = None
self._retval = None
def parallel(targets):
"""
Run multiple functions in parallel.
@param targets: A sequence of tuples or functions. If it's a sequence of
tuples, each tuple will be interpreted as (target, args, kwargs) or
(target, args) or (target,) depending on its length. If it's a
sequence of functions, the functions will be called without
arguments.
@return: A list of the values returned by the functions called.
"""
threads = []
for target in targets:
if isinstance(target, tuple) or isinstance(target, list):
t = Thread(*target)
else:
t = Thread(target)
threads.append(t)
t.start()
return [t.join() for t in threads]
class VirtLoggingConfig(logging_config.LoggingConfig):
"""
Used with the sole purpose of providing convenient logging setup
for the KVM test auxiliary programs.
"""
def configure_logging(self, results_dir=None, verbose=False):
super(VirtLoggingConfig, self).configure_logging(use_console=True,
verbose=verbose)
class PciAssignable(object):
"""
Request PCI assignable devices on host. It will check whether to request
PF (physical Functions) or VF (Virtual Functions).
"""
def __init__(self, type="vf", driver=None, driver_option=None,
names=None, devices_requested=None):
"""
Initialize parameter 'type' which could be:
vf: Virtual Functions
pf: Physical Function (actual hardware)
mixed: Both includes VFs and PFs
If pass through Physical NIC cards, we need to specify which devices
to be assigned, e.g. 'eth1 eth2'.
If pass through Virtual Functions, we need to specify how many vfs
are going to be assigned, e.g. passthrough_count = 8 and max_vfs in
config file.
@param type: PCI device type.
@param driver: Kernel module for the PCI assignable device.
@param driver_option: Module option to specify the maximum number of
VFs (eg 'max_vfs=7')
@param names: Physical NIC cards correspondent network interfaces,
e.g.'eth1 eth2 ...'
@param devices_requested: Number of devices being requested.
"""
self.type = type
self.driver = driver
self.driver_option = driver_option
if names:
self.name_list = names.split()
if devices_requested:
self.devices_requested = int(devices_requested)
else:
self.devices_requested = None
def _get_pf_pci_id(self, name, search_str):
"""
Get the PF PCI ID according to name.
@param name: Name of the PCI device.
@param search_str: Search string to be used on lspci.
"""
cmd = "ethtool -i %s | awk '/bus-info/ {print $2}'" % name
s, pci_id = commands.getstatusoutput(cmd)
if not (s or "Cannot get driver information" in pci_id):
return pci_id[5:]
cmd = "lspci | awk '/%s/ {print $1}'" % search_str
pci_ids = [id for id in commands.getoutput(cmd).splitlines()]
nic_id = int(re.search('[0-9]+', name).group(0))
if (len(pci_ids) - 1) < nic_id:
return None
return pci_ids[nic_id]
def _release_dev(self, pci_id):
"""
Release a single PCI device.
@param pci_id: PCI ID of a given PCI device.
"""
base_dir = "/sys/bus/pci"
full_id = get_full_pci_id(pci_id)
vendor_id = get_vendor_from_pci_id(pci_id)
drv_path = os.path.join(base_dir, "devices/%s/driver" % full_id)
if 'pci-stub' in os.readlink(drv_path):
cmd = "echo '%s' > %s/new_id" % (vendor_id, drv_path)
if os.system(cmd):
return False
stub_path = os.path.join(base_dir, "drivers/pci-stub")
cmd = "echo '%s' > %s/unbind" % (full_id, stub_path)
if os.system(cmd):
return False
driver = self.dev_drivers[pci_id]
cmd = "echo '%s' > %s/bind" % (full_id, driver)
if os.system(cmd):
return False
return True
def get_vf_devs(self):
"""
Catch all VFs PCI IDs.
@return: List with all PCI IDs for the Virtual Functions avaliable
"""
if not self.sr_iov_setup():
return []
cmd = "lspci | awk '/Virtual Function/ {print $1}'"
return commands.getoutput(cmd).split()
def get_pf_devs(self):
"""
Catch all PFs PCI IDs.
@return: List with all PCI IDs for the physical hardware requested
"""
pf_ids = []
for name in self.name_list:
pf_id = self._get_pf_pci_id(name, "Ethernet")
if not pf_id:
continue
pf_ids.append(pf_id)
return pf_ids
def get_devs(self, count):
"""
Check out all devices' PCI IDs according to their name.
@param count: count number of PCI devices needed for pass through
@return: a list of all devices' PCI IDs
"""
if self.type == "vf":
vf_ids = self.get_vf_devs()
elif self.type == "pf":
vf_ids = self.get_pf_devs()
elif self.type == "mixed":
vf_ids = self.get_vf_devs()
vf_ids.extend(self.get_pf_devs())
return vf_ids[0:count]
def get_vfs_count(self):
"""
Get VFs count number according to lspci.
"""
# FIXME: Need to think out a method of identify which
# 'virtual function' belongs to which physical card considering
# that if the host has more than one 82576 card. PCI_ID?
cmd = "lspci | grep 'Virtual Function' | wc -l"
return int(commands.getoutput(cmd))
def check_vfs_count(self):
"""
Check VFs count number according to the parameter driver_options.
"""
# Network card 82576 has two network interfaces and each can be
# virtualized up to 7 virtual functions, therefore we multiply
# two for the value of driver_option 'max_vfs'.
expected_count = int((re.findall("(\d)", self.driver_option)[0])) * 2
return (self.get_vfs_count == expected_count)
def is_binded_to_stub(self, full_id):
"""
Verify whether the device with full_id is already binded to pci-stub.
@param full_id: Full ID for the given PCI device
"""
base_dir = "/sys/bus/pci"
stub_path = os.path.join(base_dir, "drivers/pci-stub")
if os.path.exists(os.path.join(stub_path, full_id)):
return True
return False
def sr_iov_setup(self):
"""
Ensure the PCI device is working in sr_iov mode.
Check if the PCI hardware device drive is loaded with the appropriate,
parameters (number of VFs), and if it's not, perform setup.
@return: True, if the setup was completed successfuly, False otherwise.
"""
re_probe = False
s, o = commands.getstatusoutput('lsmod | grep %s' % self.driver)
if s:
re_probe = True
elif not self.check_vfs_count():
os.system("modprobe -r %s" % self.driver)
re_probe = True
else:
return True
# Re-probe driver with proper number of VFs
if re_probe:
cmd = "modprobe %s %s" % (self.driver, self.driver_option)
logging.info("Loading the driver '%s' with option '%s'",
self.driver, self.driver_option)
s, o = commands.getstatusoutput(cmd)
if s:
return False
return True
def request_devs(self):
"""
Implement setup process: unbind the PCI device and then bind it
to the pci-stub driver.
@return: a list of successfully requested devices' PCI IDs.
"""
base_dir = "/sys/bus/pci"
stub_path = os.path.join(base_dir, "drivers/pci-stub")
self.pci_ids = self.get_devs(self.devices_requested)
logging.debug("The following pci_ids were found: %s", self.pci_ids)
requested_pci_ids = []
self.dev_drivers = {}
# Setup all devices specified for assignment to guest
for pci_id in self.pci_ids:
full_id = get_full_pci_id(pci_id)
if not full_id:
continue
drv_path = os.path.join(base_dir, "devices/%s/driver" % full_id)
dev_prev_driver = os.path.realpath(os.path.join(drv_path,
os.readlink(drv_path)))
self.dev_drivers[pci_id] = dev_prev_driver
# Judge whether the device driver has been binded to stub
if not self.is_binded_to_stub(full_id):
logging.debug("Binding device %s to stub", full_id)
vendor_id = get_vendor_from_pci_id(pci_id)
stub_new_id = os.path.join(stub_path, 'new_id')
unbind_dev = os.path.join(drv_path, 'unbind')
stub_bind = os.path.join(stub_path, 'bind')
info_write_to_files = [(vendor_id, stub_new_id),
(full_id, unbind_dev),
(full_id, stub_bind)]
for content, file in info_write_to_files:
try:
utils.open_write_close(file, content)
except IOError:
logging.debug("Failed to write %s to file %s", content,
file)
continue
if not self.is_binded_to_stub(full_id):
logging.error("Binding device %s to stub failed", pci_id)
continue
else:
logging.debug("Device %s already binded to stub", pci_id)
requested_pci_ids.append(pci_id)
self.pci_ids = requested_pci_ids
return self.pci_ids
def release_devs(self):
"""
Release all PCI devices currently assigned to VMs back to the
virtualization host.
"""
try:
for pci_id in self.dev_drivers:
if not self._release_dev(pci_id):
logging.error("Failed to release device %s to host", pci_id)
else:
logging.info("Released device %s successfully", pci_id)
except Exception:
return
class KojiClient(object):
"""
Stablishes a connection with the build system, either koji or brew.
This class provides convenience methods to retrieve information on packages
and the packages themselves hosted on the build system. Packages should be
specified in the KojiPgkSpec syntax.
"""
CMD_LOOKUP_ORDER = ['/usr/bin/brew', '/usr/bin/koji' ]
CONFIG_MAP = {'/usr/bin/brew': '/etc/brewkoji.conf',
'/usr/bin/koji': '/etc/koji.conf'}
def __init__(self, cmd=None):
"""
Verifies whether the system has koji or brew installed, then loads
the configuration file that will be used to download the files.
@type cmd: string
@param cmd: Optional command name, either 'brew' or 'koji'. If not
set, get_default_command() is used and to look for
one of them.
@raise: ValueError
"""
if not KOJI_INSTALLED:
raise ValueError('No koji/brew installed on the machine')
# Instance variables used by many methods
self.command = None
self.config = None
self.config_options = {}
self.session = None
# Set koji command or get default
if cmd is None:
self.command = self.get_default_command()
else:
self.command = cmd
# Check koji command
if not self.is_command_valid():
raise ValueError('Koji command "%s" is not valid' % self.command)
# Assuming command is valid, set configuration file and read it
self.config = self.CONFIG_MAP[self.command]
self.read_config()
# Setup koji session
server_url = self.config_options['server']
session_options = self.get_session_options()
self.session = koji.ClientSession(server_url,
session_options)
def read_config(self, check_is_valid=True):
'''
Reads options from the Koji configuration file
By default it checks if the koji configuration is valid
@type check_valid: boolean
@param check_valid: whether to include a check on the configuration
@raises: ValueError
@returns: None
'''
if check_is_valid:
if not self.is_config_valid():
raise ValueError('Koji config "%s" is not valid' % self.config)
config = ConfigParser.ConfigParser()
config.read(self.config)
basename = os.path.basename(self.command)
for name, value in config.items(basename):
self.config_options[name] = value
def get_session_options(self):
'''
Filter only options necessary for setting up a cobbler client session
@returns: only the options used for session setup
'''
session_options = {}
for name, value in self.config_options.items():
if name in ('user', 'password', 'debug_xmlrpc', 'debug'):
session_options[name] = value
return session_options
def is_command_valid(self):
'''
Checks if the currently set koji command is valid
@returns: True or False
'''
koji_command_ok = True
if not os.path.isfile(self.command):
logging.error('Koji command "%s" is not a regular file',
self.command)
koji_command_ok = False
if not os.access(self.command, os.X_OK):
logging.warn('Koji command "%s" is not executable: this is '
'not fatal but indicates an unexpected situation',
self.command)
if not self.command in self.CONFIG_MAP.keys():
logging.error('Koji command "%s" does not have a configuration '
'file associated to it', self.command)
koji_command_ok = False
return koji_command_ok
def is_config_valid(self):
'''
Checks if the currently set koji configuration is valid
@returns: True or False
'''
koji_config_ok = True
if not os.path.isfile(self.config):
logging.error('Koji config "%s" is not a regular file', self.config)
koji_config_ok = False
if not os.access(self.config, os.R_OK):
logging.error('Koji config "%s" is not readable', self.config)
koji_config_ok = False
config = ConfigParser.ConfigParser()
config.read(self.config)
basename = os.path.basename(self.command)
if not config.has_section(basename):
logging.error('Koji configuration file "%s" does not have a '
'section "%s", named after the base name of the '
'currently set koji command "%s"', self.config,
basename, self.command)
koji_config_ok = False
return koji_config_ok
def get_default_command(self):
'''
Looks up for koji or brew "binaries" on the system
Systems with plain koji usually don't have a brew cmd, while systems
with koji, have *both* koji and brew utilities. So we look for brew
first, and if found, we consider that the system is configured for
brew. If not, we consider this is a system with plain koji.
@returns: either koji or brew command line executable path, or None
'''
koji_command = None
for command in self.CMD_LOOKUP_ORDER:
if os.path.isfile(command):
koji_command = command
break
else:
koji_command_basename = os.path.basename(command)
try:
koji_command = os_dep.command(koji_command_basename)
break
except ValueError:
pass
return koji_command
def get_pkg_info(self, pkg):
'''
Returns information from Koji on the package
@type pkg: KojiPkgSpec
@param pkg: information about the package, as a KojiPkgSpec instance
@returns: information from Koji about the specified package
'''
info = {}
if pkg.build is not None:
info = self.session.getBuild(int(pkg.build))
elif pkg.tag is not None and pkg.package is not None:
builds = self.session.listTagged(pkg.tag,
latest=True,
inherit=True,
package=pkg.package)
if builds:
info = builds[0]
return info
def is_pkg_valid(self, pkg):
'''
Checks if this package is altogether valid on Koji
This verifies if the build or tag specified in the package
specification actually exist on the Koji server
@returns: True or False
'''
valid = True
if pkg.build:
if not self.is_pkg_spec_build_valid(pkg):
valid = False
elif pkg.tag:
if not self.is_pkg_spec_tag_valid(pkg):
valid = False
else:
valid = False
return valid
def is_pkg_spec_build_valid(self, pkg):
'''
Checks if build is valid on Koji
@param pkg: a Pkg instance
'''
if pkg.build is not None:
info = self.session.getBuild(int(pkg.build))
if info:
return True
return False
def is_pkg_spec_tag_valid(self, pkg):
'''
Checks if tag is valid on Koji
@type pkg: KojiPkgSpec
@param pkg: a package specification
'''
if pkg.tag is not None:
tag = self.session.getTag(pkg.tag)
if tag:
return True
return False
def get_pkg_rpm_info(self, pkg, arch=None):
'''
Returns a list of infomation on the RPM packages found on koji
@type pkg: KojiPkgSpec
@param pkg: a package specification
@type arch: string
@param arch: packages built for this architecture, but also including
architecture independent (noarch) packages
'''
if arch is None:
arch = utils.get_arch()
rpms = []
info = self.get_pkg_info(pkg)
if info:
rpms = self.session.listRPMs(buildID=info['id'],
arches=[arch, 'noarch'])
if pkg.subpackages:
rpms = [d for d in rpms if d['name'] in pkg.subpackages]
return rpms
def get_pkg_rpm_names(self, pkg, arch=None):
'''
Gets the names for the RPM packages specified in pkg
@type pkg: KojiPkgSpec
@param pkg: a package specification
@type arch: string
@param arch: packages built for this architecture, but also including
architecture independent (noarch) packages
'''
if arch is None:
arch = utils.get_arch()
rpms = self.get_pkg_rpm_info(pkg, arch)
return [rpm['name'] for rpm in rpms]
def get_pkg_rpm_file_names(self, pkg, arch=None):
'''
Gets the file names for the RPM packages specified in pkg
@type pkg: KojiPkgSpec
@param pkg: a package specification
@type arch: string
@param arch: packages built for this architecture, but also including
architecture independent (noarch) packages
'''
if arch is None:
arch = utils.get_arch()
rpm_names = []
rpms = self.get_pkg_rpm_info(pkg, arch)
for rpm in rpms:
arch_rpm_name = koji.pathinfo.rpm(rpm)
rpm_name = os.path.basename(arch_rpm_name)
rpm_names.append(rpm_name)
return rpm_names
def get_pkg_urls(self, pkg, arch=None):
'''
Gets the urls for the packages specified in pkg
@type pkg: KojiPkgSpec
@param pkg: a package specification
@type arch: string
@param arch: packages built for this architecture, but also including
architecture independent (noarch) packages
'''
info = self.get_pkg_info(pkg)
rpms = self.get_pkg_rpm_info(pkg, arch)
rpm_urls = []
if self.config_options.has_key('pkgurl'):
base_url = self.config_options['pkgurl']
else:
base_url = "%s/%s" % (self.config_options['topurl'],
'packages')
for rpm in rpms:
rpm_name = koji.pathinfo.rpm(rpm)
url = ("%s/%s/%s/%s/%s" % (base_url,
info['package_name'],
info['version'], info['release'],
rpm_name))
rpm_urls.append(url)
return rpm_urls
def get_pkgs(self, pkg, dst_dir, arch=None):
'''
Download the packages
@type pkg: KojiPkgSpec
@param pkg: a package specification
@type dst_dir: string
@param dst_dir: the destination directory, where the downloaded
packages will be saved on
@type arch: string
@param arch: packages built for this architecture, but also including
architecture independent (noarch) packages
'''
rpm_urls = self.get_pkg_urls(pkg, arch)
for url in rpm_urls:
utils.get_file(url,
os.path.join(dst_dir, os.path.basename(url)))
DEFAULT_KOJI_TAG = None
def set_default_koji_tag(tag):
'''
Sets the default tag that will be used
'''
global DEFAULT_KOJI_TAG
DEFAULT_KOJI_TAG = tag
def get_default_koji_tag():
return DEFAULT_KOJI_TAG
class KojiPkgSpec(object):
'''
A package specification syntax parser for Koji
This holds information on either tag or build, and packages to be fetched
from koji and possibly installed (features external do this class).
New objects can be created either by providing information in the textual
format or by using the actual parameters for tag, build, package and sub-
packages. The textual format is useful for command line interfaces and
configuration files, while using parameters is better for using this in
a programatic fashion.
The following sets of examples are interchangeable. Specifying all packages
part of build number 1000:
>>> from kvm_utils import KojiPkgSpec
>>> pkg = KojiPkgSpec('1000')
>>> pkg = KojiPkgSpec(build=1000)
Specifying only a subset of packages of build number 1000:
>>> pkg = KojiPkgSpec('1000:kernel,kernel-devel')
>>> pkg = KojiPkgSpec(build=1000,
subpackages=['kernel', 'kernel-devel'])
Specifying the latest build for the 'kernel' package tagged with 'dist-f14':
>>> pkg = KojiPkgSpec('dist-f14:kernel')
>>> pkg = KojiPkgSpec(tag='dist-f14', package='kernel')
Specifying the 'kernel' package using the default tag:
>>> kvm_utils.set_default_koji_tag('dist-f14')
>>> pkg = KojiPkgSpec('kernel')
>>> pkg = KojiPkgSpec(package='kernel')
Specifying the 'kernel' package using the default tag:
>>> kvm_utils.set_default_koji_tag('dist-f14')
>>> pkg = KojiPkgSpec('kernel')
>>> pkg = KojiPkgSpec(package='kernel')
If you do not specify a default tag, and give a package name without an
explicit tag, your package specification is considered invalid:
>>> print kvm_utils.get_default_koji_tag()
None
>>> print kvm_utils.KojiPkgSpec('kernel').is_valid()
False
>>> print kvm_utils.KojiPkgSpec(package='kernel').is_valid()
False
'''
SEP = ':'
def __init__(self, text='', tag=None, build=None,
package=None, subpackages=[]):
'''
Instantiates a new KojiPkgSpec object
@type text: string
@param text: a textual representation of a package on Koji that
will be parsed
@type tag: string
@param tag: a koji tag, example: Fedora-14-RELEASE
(see U{http://fedoraproject.org/wiki/Koji#Tags_and_Targets})
@type build: number
@param build: a koji build, example: 1001
(see U{http://fedoraproject.org/wiki/Koji#Koji_Architecture})
@type package: string
@param package: a koji package, example: python
(see U{http://fedoraproject.org/wiki/Koji#Koji_Architecture})
@type subpackages: list of strings
@param subpackages: a list of package names, usually a subset of
the RPM packages generated by a given build
'''
# Set to None to indicate 'not set' (and be able to use 'is')
self.tag = None
self.build = None
self.package = None
self.subpackages = []
self.default_tag = None
# Textual representation takes precedence (most common use case)
if text:
self.parse(text)
else:
self.tag = tag
self.build = build
self.package = package
self.subpackages = subpackages
# Set the default tag, if set, as a fallback
if not self.build and not self.tag:
default_tag = get_default_koji_tag()
if default_tag is not None:
self.tag = default_tag
def parse(self, text):
'''
Parses a textual representation of a package specification
@type text: string
@param text: textual representation of a package in koji
'''
parts = text.count(self.SEP) + 1
if parts == 1:
if text.isdigit():
self.build = text
else:
self.package = text
elif parts == 2:
part1, part2 = text.split(self.SEP)
if part1.isdigit():
self.build = part1
self.subpackages = part2.split(',')
else:
self.tag = part1
self.package = part2
elif parts >= 3:
# Instead of erroring on more arguments, we simply ignore them
# This makes the parser suitable for future syntax additions, such
# as specifying the package architecture
part1, part2, part3 = text.split(self.SEP)[0:3]
self.tag = part1
self.package = part2
self.subpackages = part3.split(',')
def _is_invalid_neither_tag_or_build(self):
'''
Checks if this package is invalid due to not having either a valid
tag or build set, that is, both are empty.
@returns: True if this is invalid and False if it's valid
'''
return (self.tag is None and self.build is None)
def _is_invalid_package_but_no_tag(self):
'''
Checks if this package is invalid due to having a package name set
but tag or build set, that is, both are empty.
@returns: True if this is invalid and False if it's valid
'''
return (self.package and not self.tag)
def _is_invalid_subpackages_but_no_main_package(self):
'''
Checks if this package is invalid due to having a tag set (this is Ok)
but specifying subpackage names without specifying the main package
name.
Specifying subpackages without a main package name is only valid when
a build is used instead of a tag.
@returns: True if this is invalid and False if it's valid
'''
return (self.tag and self.subpackages and not self.package)
def is_valid(self):
'''
Checks if this package specification is valid.
Being valid means that it has enough and not conflicting information.
It does not validate that the packages specified actually existe on
the Koji server.
@returns: True or False
'''
if self._is_invalid_neither_tag_or_build():
return False
elif self._is_invalid_package_but_no_tag():
return False
elif self._is_invalid_subpackages_but_no_main_package():
return False
return True
def describe_invalid(self):
'''
Describes why this is not valid, in a human friendly way
'''
if self._is_invalid_neither_tag_or_build():
return 'neither a tag or build are set, and of them should be set'
elif self._is_invalid_package_but_no_tag():
return 'package name specified but no tag is set'
elif self._is_invalid_subpackages_but_no_main_package():
return 'subpackages specified but no main package is set'
return 'unkwown reason, seems to be valid'
def describe(self):
'''
Describe this package specification, in a human friendly way
@returns: package specification description
'''
if self.is_valid():
description = ''
if not self.subpackages:
description += 'all subpackages from %s ' % self.package
else:
description += ('only subpackage(s) %s from package %s ' %
(', '.join(self.subpackages), self.package))
if self.build:
description += 'from build %s' % self.build
elif self.tag:
description += 'tagged with %s' % self.tag
else:
raise ValueError, 'neither build or tag is set'
return description
else:
return ('Invalid package specification: %s' %
self.describe_invalid())
def to_text(self):
'''
Return the textual representation of this package spec
The output should be consumable by parse() and produce the same
package specification.
We find that it's acceptable to put the currently set default tag
as the package explicit tag in the textual definition for completeness.
@returns: package specification in a textual representation
'''
default_tag = get_default_koji_tag()
if self.build:
if self.subpackages:
return "%s:%s" % (self.build, ",".join(self.subpackages))
else:
return "%s" % self.build
elif self.tag:
if self.subpackages:
return "%s:%s:%s" % (self.tag, self.package,
",".join(self.subpackages))
else:
return "%s:%s" % (self.tag, self.package)
elif default_tag is not None:
# neither build or tag is set, try default_tag as a fallback
if self.subpackages:
return "%s:%s:%s" % (default_tag, self.package,
",".join(self.subpackages))
else:
return "%s:%s" % (default_tag, self.package)
else:
raise ValueError, 'neither build or tag is set'
def __repr__(self):
return ("<KojiPkgSpec tag=%s build=%s pkg=%s subpkgs=%s>" %
(self.tag, self.build, self.package,
", ".join(self.subpackages)))
def umount(src, mount_point, type):
"""
Umount the src mounted in mount_point.
@src: mount source
@mount_point: mount point
@type: file system type
"""
mount_string = "%s %s %s" % (src, mount_point, type)
if mount_string in file("/etc/mtab").read():
umount_cmd = "umount %s" % mount_point
try:
utils.system(umount_cmd)
return True
except error.CmdError:
return False
else:
logging.debug("%s is not mounted under %s", src, mount_point)
return True
def mount(src, mount_point, type, perm="rw"):
"""
Mount the src into mount_point of the host.
@src: mount source
@mount_point: mount point
@type: file system type
@perm: mount premission
"""
umount(src, mount_point, type)
mount_string = "%s %s %s %s" % (src, mount_point, type, perm)
if mount_string in file("/etc/mtab").read():
logging.debug("%s is already mounted in %s with %s",
src, mount_point, perm)
return True
mount_cmd = "mount -t %s %s %s -o %s" % (type, src, mount_point, perm)
try:
utils.system(mount_cmd)
except error.CmdError:
return False
logging.debug("Verify the mount through /etc/mtab")
if mount_string in file("/etc/mtab").read():
logging.debug("%s is successfully mounted", src)
return True
else:
logging.error("Can't find mounted NFS share - /etc/mtab contents \n%s",
file("/etc/mtab").read())
return False
class GitRepoParamHelper(git.GitRepoHelper):
'''
Helps to deal with git repos specified in cartersian config files
This class attempts to make it simple to manage a git repo, by using a
naming standard that follows this basic syntax:
<prefix>_name_<suffix>
<prefix> is always 'git_repo' and <suffix> sets options for this git repo.
Example for repo named foo:
git_repo_foo_uri = git://git.foo.org/foo.git
git_repo_foo_base_uri = /home/user/code/foo
git_repo_foo_branch = master
git_repo_foo_lbranch = master
git_repo_foo_commit = bb5fb8e678aabe286e74c4f2993dc2a9e550b627
'''
def __init__(self, params, name, destination_dir):
'''
Instantiates a new GitRepoParamHelper
'''
self.params = params
self.name = name
self.destination_dir = destination_dir
self._parse_params()
def _parse_params(self):
'''
Parses the params items for entries related to this repo
This method currently does everything that the parent class __init__()
method does, that is, sets all instance variables needed by other
methods. That means it's not strictly necessary to call parent's
__init__().
'''
config_prefix = 'git_repo_%s' % self.name
logging.debug('Parsing parameters for git repo %s, configuration '
'prefix is %s' % (self.name, config_prefix))
self.base_uri = self.params.get('%s_base_uri' % config_prefix)
if self.base_uri is None:
logging.debug('Git repo %s base uri is not set' % self.name)
else:
logging.debug('Git repo %s base uri: %s' % (self.name,
self.base_uri))
self.uri = self.params.get('%s_uri' % config_prefix)
logging.debug('Git repo %s uri: %s' % (self.name, self.uri))
self.branch = self.params.get('%s_branch' % config_prefix, 'master')
logging.debug('Git repo %s branch: %s' % (self.name, self.branch))
self.lbranch = self.params.get('%s_lbranch' % config_prefix)
if self.lbranch is None:
self.lbranch = self.branch
logging.debug('Git repo %s lbranch: %s' % (self.name, self.lbranch))
self.commit = self.params.get('%s_commit' % config_prefix)
if self.commit is None:
logging.debug('Git repo %s commit is not set' % self.name)
else:
logging.debug('Git repo %s commit: %s' % (self.name, self.commit))
self.cmd = os_dep.command('git')
class LocalSourceDirHelper(object):
'''
Helper class to deal with source code sitting somewhere in the filesystem
'''
def __init__(self, source_dir, destination_dir):
'''
@param source_dir:
@param destination_dir:
@return: new LocalSourceDirHelper instance
'''
self.source = source_dir
self.destination = destination_dir
def execute(self):
'''
Copies the source directory to the destination directory
'''
if os.path.isdir(self.destination):
shutil.rmtree(self.destination)
if os.path.isdir(self.source):
shutil.copytree(self.source, self.destination)
class LocalSourceDirParamHelper(LocalSourceDirHelper):
'''
Helps to deal with source dirs specified in cartersian config files
This class attempts to make it simple to manage a source dir, by using a
naming standard that follows this basic syntax:
<prefix>_name_<suffix>
<prefix> is always 'local_src' and <suffix> sets options for this source
dir. Example for source dir named foo:
local_src_foo_path = /home/user/foo
'''
def __init__(self, params, name, destination_dir):
'''
Instantiate a new LocalSourceDirParamHelper
'''
self.params = params
self.name = name
self.destination_dir = destination_dir
self._parse_params()
def _parse_params(self):
'''
Parses the params items for entries related to source dir
'''
config_prefix = 'local_src_%s' % self.name
logging.debug('Parsing parameters for local source %s, configuration '
'prefix is %s' % (self.name, config_prefix))
self.path = self.params.get('%s_path' % config_prefix)
logging.debug('Local source directory %s path: %s' % (self.name,
self.path))
self.source = self.path
self.destination = self.destination_dir
class LocalTarHelper(object):
'''
Helper class to deal with source code in a local tarball
'''
def __init__(self, source, destination_dir):
self.source = source
self.destination = destination_dir
def extract(self):
'''
Extracts the tarball into the destination directory
'''
if os.path.isdir(self.destination):
shutil.rmtree(self.destination)
if os.path.isfile(self.source) and tarfile.is_tarfile(self.source):
name = os.path.basename(self.destination)
temp_dir = os.path.join(os.path.dirname(self.destination),
'%s.tmp' % name)
logging.debug('Temporary directory for extracting tarball is %s' %
temp_dir)
if not os.path.isdir(temp_dir):
os.makedirs(temp_dir)
tarball = tarfile.open(self.source)
tarball.extractall(temp_dir)
#
# If there's a directory at the toplevel of the tarfile, assume
# it's the root for the contents, usually source code
#
tarball_info = tarball.members[0]
if tarball_info.isdir():
content_path = os.path.join(temp_dir,
tarball_info.name)
else:
content_path = temp_dir
#
# Now move the content directory to the final destination
#
shutil.move(content_path, self.destination)
else:
raise OSError("%s is not a file or tar file" % self.source)
def execute(self):
'''
Executes all action this helper is suposed to perform
This is the main entry point method for this class, and all other
helper classes.
'''
self.extract()
class LocalTarParamHelper(LocalTarHelper):
'''
Helps to deal with source tarballs specified in cartersian config files
This class attempts to make it simple to manage a tarball with source code,
by using a naming standard that follows this basic syntax:
<prefix>_name_<suffix>
<prefix> is always 'local_tar' and <suffix> sets options for this source
tarball. Example for source tarball named foo:
local_tar_foo_path = /tmp/foo-1.0.tar.gz
'''
def __init__(self, params, name, destination_dir):
'''
Instantiates a new LocalTarParamHelper
'''
self.params = params
self.name = name
self.destination_dir = destination_dir
self._parse_params()
def _parse_params(self):
'''
Parses the params items for entries related to this local tar helper
'''
config_prefix = 'local_tar_%s' % self.name
logging.debug('Parsing parameters for local tar %s, configuration '
'prefix is %s' % (self.name, config_prefix))
self.path = self.params.get('%s_path' % config_prefix)
logging.debug('Local source tar %s path: %s' % (self.name,
self.path))
self.source = self.path
self.destination = self.destination_dir
class RemoteTarHelper(LocalTarHelper):
'''
Helper that fetches a tarball and extracts it locally
'''
def __init__(self, source_uri, destination_dir):
self.source = source_uri
self.destination = destination_dir
def execute(self):
'''
Executes all action this helper class is suposed to perform
This is the main entry point method for this class, and all other
helper classes.
This implementation fetches the remote tar file and then extracts
it using the functionality present in the parent class.
'''
name = os.path.basename(self.source)
base_dest = os.path.dirname(self.destination_dir)
dest = os.path.join(base_dest, name)
utils.get_file(self.source, dest)
self.source = dest
self.extract()
class RemoteTarParamHelper(RemoteTarHelper):
'''
Helps to deal with remote source tarballs specified in cartersian config
This class attempts to make it simple to manage a tarball with source code,
by using a naming standard that follows this basic syntax:
<prefix>_name_<suffix>
<prefix> is always 'local_tar' and <suffix> sets options for this source
tarball. Example for source tarball named foo:
remote_tar_foo_uri = http://foo.org/foo-1.0.tar.gz
'''
def __init__(self, params, name, destination_dir):
'''
Instantiates a new RemoteTarParamHelper instance
'''
self.params = params
self.name = name
self.destination_dir = destination_dir
self._parse_params()
def _parse_params(self):
'''
Parses the params items for entries related to this remote tar helper
'''
config_prefix = 'remote_tar_%s' % self.name
logging.debug('Parsing parameters for remote tar %s, configuration '
'prefix is %s' % (self.name, config_prefix))
self.uri = self.params.get('%s_uri' % config_prefix)
logging.debug('Remote source tar %s uri: %s' % (self.name,
self.uri))
self.source = self.uri
self.destination = self.destination_dir
class PatchHelper(object):
'''
Helper that encapsulates the patching of source code with patch files
'''
def __init__(self, source_dir, patches):
'''
Initializes a new PatchHelper
'''
self.source_dir = source_dir
self.patches = patches
def download(self):
'''
Copies patch files from remote locations to the source directory
'''
for patch in self.patches:
utils.get_file(patch, os.path.join(self.source_dir,
os.path.basename(patch)))
def patch(self):
'''
Patches the source dir with all patch files
'''
os.chdir(self.source_dir)
for patch in self.patches:
patch_file = os.path.join(self.source_dir,
os.path.basename(patch))
utils.system('patch -p1 < %s' % os.path.basename(patch))
def execute(self):
'''
Performs all steps necessary to download patches and apply them
'''
self.download()
self.patch()
class PatchParamHelper(PatchHelper):
'''
Helps to deal with patches specified in cartersian config files
This class attempts to make it simple to patch source coude, by using a
naming standard that follows this basic syntax:
[<git_repo>|<local_src>|<local_tar>|<remote_tar>]_<name>_patches
<prefix> is either a 'local_src' or 'git_repo', that, together with <name>
specify a directory containing source code to receive the patches. That is,
for source code coming from git repo foo, patches would be specified as:
git_repo_foo_patches = ['http://foo/bar.patch', 'http://foo/baz.patch']
And for for patches to be applied on local source code named also foo:
local_src_foo_patches = ['http://foo/bar.patch', 'http://foo/baz.patch']
'''
def __init__(self, params, prefix, source_dir):
'''
Initializes a new PatchParamHelper instance
'''
self.params = params
self.prefix = prefix
self.source_dir = source_dir
self._parse_params()
def _parse_params(self):
'''
Parses the params items for entries related to this set of patches
This method currently does everything that the parent class __init__()
method does, that is, sets all instance variables needed by other
methods. That means it's not strictly necessary to call parent's
__init__().
'''
logging.debug('Parsing patch parameters for prefix %s' % self.prefix)
patches_param_key = '%s_patches' % self.prefix
self.patches_str = self.params.get(patches_param_key, '[]')
logging.debug('Patches config for prefix %s: %s' % (self.prefix,
self.patches_str))
self.patches = eval(self.patches_str)
logging.debug('Patches for prefix %s: %s' % (self.prefix,
", ".join(self.patches)))
class GnuSourceBuildInvalidSource(Exception):
'''
Exception raised when build source dir/file is not valid
'''
pass
class SourceBuildFailed(Exception):
'''
Exception raised when building with parallel jobs fails
This serves as feedback for code using *BuildHelper
'''
pass
class SourceBuildParallelFailed(Exception):
'''
Exception raised when building with parallel jobs fails
This serves as feedback for code using *BuildHelper
'''
pass
class GnuSourceBuildHelper(object):
'''
Handles software installation of GNU-like source code
This basically means that the build will go though the classic GNU
autotools steps: ./configure, make, make install
'''
def __init__(self, source, build_dir, prefix,
configure_options=[]):
'''
@type source: string
@param source: source directory or tarball
@type prefix: string
@param prefix: installation prefix
@type build_dir: string
@param build_dir: temporary directory used for building the source code
@type configure_options: list
@param configure_options: options to pass to configure
@throws: GnuSourceBuildInvalidSource
'''
self.source = source
self.build_dir = build_dir
self.prefix = prefix
self.configure_options = configure_options
self.install_debug_info = True
self.include_pkg_config_path()
def include_pkg_config_path(self):
'''
Adds the current prefix to the list of paths that pkg-config searches
This is currently not optional as there is no observed adverse side
effects of enabling this. As the "prefix" is usually only valid during
a test run, we believe that having other pkg-config files (*.pc) in
either '<prefix>/share/pkgconfig' or '<prefix>/lib/pkgconfig' is
exactly for the purpose of using them.
@returns: None
'''
env_var = 'PKG_CONFIG_PATH'
include_paths = [os.path.join(self.prefix, 'share', 'pkgconfig'),
os.path.join(self.prefix, 'lib', 'pkgconfig')]
if os.environ.has_key(env_var):
paths = os.environ[env_var].split(':')
for include_path in include_paths:
if include_path not in paths:
paths.append(include_path)
os.environ[env_var] = ':'.join(paths)
else:
os.environ[env_var] = ':'.join(include_paths)
logging.debug('PKG_CONFIG_PATH is: %s' % os.environ['PKG_CONFIG_PATH'])
def get_configure_path(self):
'''
Checks if 'configure' exists, if not, return 'autogen.sh' as a fallback
'''
configure_path = os.path.abspath(os.path.join(self.source,
"configure"))
autogen_path = os.path.abspath(os.path.join(self.source,
"autogen.sh"))
if os.path.exists(configure_path):
return configure_path
elif os.path.exists(autogen_path):
return autogen_path
else:
raise GnuSourceBuildInvalidSource('configure script does not exist')
def get_available_configure_options(self):
'''
Return the list of available options of a GNU like configure script
This will run the "configure" script at the source directory
@returns: list of options accepted by configure script
'''
help_raw = utils.system_output('%s --help' % self.get_configure_path(),
ignore_status=True)
help_output = help_raw.split("\n")
option_list = []
for line in help_output:
cleaned_line = line.lstrip()
if cleaned_line.startswith("--"):
option = cleaned_line.split()[0]
option = option.split("=")[0]
option_list.append(option)
return option_list
def enable_debug_symbols(self):
'''
Enables option that leaves debug symbols on compiled software
This makes debugging a lot easier.
'''
enable_debug_option = "--disable-strip"
if enable_debug_option in self.get_available_configure_options():
self.configure_options.append(enable_debug_option)
logging.debug('Enabling debug symbols with option: %s' %
enable_debug_option)
def get_configure_command(self):
'''
Formats configure script with all options set
@returns: string with all configure options, including prefix
'''
prefix_option = "--prefix=%s" % self.prefix
options = self.configure_options
options.append(prefix_option)
return "%s %s" % (self.get_configure_path(),
" ".join(options))
def configure(self):
'''
Runs the "configure" script passing apropriate command line options
'''
configure_command = self.get_configure_command()
logging.info('Running configure on build dir')
os.chdir(self.build_dir)
utils.system(configure_command)
def make_parallel(self):
'''
Runs "make" using the correct number of parallel jobs
'''
parallel_make_jobs = utils.count_cpus()
make_command = "make -j %s" % parallel_make_jobs
logging.info("Running parallel make on build dir")
os.chdir(self.build_dir)
utils.system(make_command)
def make_non_parallel(self):
'''
Runs "make", using a single job
'''
os.chdir(self.build_dir)
utils.system("make")
def make_clean(self):
'''
Runs "make clean"
'''
os.chdir(self.build_dir)
utils.system("make clean")
def make(self, failure_feedback=True):
'''
Runs a parallel make, falling back to a single job in failure
@param failure_feedback: return information on build failure by raising
the appropriate exceptions
@raise: SourceBuildParallelFailed if parallel build fails, or
SourceBuildFailed if single job build fails
'''
try:
self.make_parallel()
except error.CmdError:
try:
self.make_clean()
self.make_non_parallel()
except error.CmdError:
if failure_feedback:
raise SourceBuildFailed
if failure_feedback:
raise SourceBuildParallelFailed
def make_install(self):
'''
Runs "make install"
'''
os.chdir(self.build_dir)
utils.system("make install")
install = make_install
def execute(self):
'''
Runs appropriate steps for *building* this source code tree
'''
if self.install_debug_info:
self.enable_debug_symbols()
self.configure()
self.make()
class LinuxKernelBuildHelper(object):
'''
Handles Building Linux Kernel.
'''
def __init__(self, params, prefix, source):
'''
@type params: dict
@param params: dictionary containing the test parameters
@type source: string
@param source: source directory or tarball
@type prefix: string
@param prefix: installation prefix
'''
self.params = params
self.prefix = prefix
self.source = source
self._parse_params()
def _parse_params(self):
'''
Parses the params items for entries related to guest kernel
'''
configure_opt_key = '%s_config' % self.prefix
self.config = self.params.get(configure_opt_key, '')
build_image_key = '%s_build_image' % self.prefix
self.build_image = self.params.get(build_image_key,
'arch/x86/boot/bzImage')
build_target_key = '%s_build_target' % self.prefix
self.build_target = self.params.get(build_target_key, 'bzImage')
kernel_path_key = '%s_kernel_path' % self.prefix
default_kernel_path = os.path.join('/tmp/kvm_autotest_root/images',
self.build_target)
self.kernel_path = self.params.get(kernel_path_key,
default_kernel_path)
logging.info('Parsing Linux kernel build parameters for %s',
self.prefix)
def make_guest_kernel(self):
'''
Runs "make", using a single job
'''
os.chdir(self.source)
logging.info("Building guest kernel")
logging.debug("Kernel config is %s" % self.config)
utils.get_file(self.config, '.config')
# FIXME currently no support for builddir
# run old config
utils.system('yes "" | make oldconfig > /dev/null')
parallel_make_jobs = utils.count_cpus()
make_command = "make -j %s %s" % (parallel_make_jobs, self.build_target)
logging.info("Running parallel make on src dir")
utils.system(make_command)
def make_clean(self):
'''
Runs "make clean"
'''
os.chdir(self.source)
utils.system("make clean")
def make(self, failure_feedback=True):
'''
Runs a parallel make
@param failure_feedback: return information on build failure by raising
the appropriate exceptions
@raise: SourceBuildParallelFailed if parallel build fails, or
'''
try:
self.make_clean()
self.make_guest_kernel()
except error.CmdError:
if failure_feedback:
raise SourceBuildParallelFailed
def cp_linux_kernel(self):
'''
Copying Linux kernel to target path
'''
os.chdir(self.source)
utils.force_copy(self.build_image, self.kernel_path)
install = cp_linux_kernel
def execute(self):
'''
Runs appropriate steps for *building* this source code tree
'''
self.make()
class GnuSourceBuildParamHelper(GnuSourceBuildHelper):
'''
Helps to deal with gnu_autotools build helper in cartersian config files
This class attempts to make it simple to build source coude, by using a
naming standard that follows this basic syntax:
[<git_repo>|<local_src>]_<name>_<option> = value
To pass extra options to the configure script, while building foo from a
git repo, set the following variable:
git_repo_foo_configure_options = --enable-feature
'''
def __init__(self, params, name, destination_dir, install_prefix):
'''
Instantiates a new GnuSourceBuildParamHelper
'''
self.params = params
self.name = name
self.destination_dir = destination_dir
self.install_prefix = install_prefix
self._parse_params()
def _parse_params(self):
'''
Parses the params items for entries related to source directory
This method currently does everything that the parent class __init__()
method does, that is, sets all instance variables needed by other
methods. That means it's not strictly necessary to call parent's
__init__().
'''
logging.debug('Parsing gnu_autotools build parameters for %s' %
self.name)
configure_opt_key = '%s_configure_options' % self.name
configure_options = self.params.get(configure_opt_key, '').split()
logging.debug('Configure options for %s: %s' % (self.name,
configure_options))
self.source = self.destination_dir
self.build_dir = self.destination_dir
self.prefix = self.install_prefix
self.configure_options = configure_options
self.include_pkg_config_path()
# Support the install_debug_info feature, that automatically
# adds/keeps debug information on generated libraries/binaries
install_debug_info_cfg = self.params.get("install_debug_info", "yes")
self.install_debug_info = install_debug_info_cfg != "no"
def install_host_kernel(job, params):
"""
Install a host kernel, given the appropriate params.
@param job: Job object.
@param params: Dict with host kernel install params.
"""
install_type = params.get('host_kernel_install_type')
if install_type == 'rpm':
logging.info('Installing host kernel through rpm')
rpm_url = params.get('host_kernel_rpm_url')
dst = os.path.join("/tmp", os.path.basename(rpm_url))
k = utils.get_file(rpm_url, dst)
host_kernel = job.kernel(k)
host_kernel.install(install_vmlinux=False)
host_kernel.boot()
elif install_type in ['koji', 'brew']:
logging.info('Installing host kernel through koji/brew')
koji_cmd = params.get('host_kernel_koji_cmd')
koji_build = params.get('host_kernel_koji_build')
koji_tag = params.get('host_kernel_koji_tag')
k_deps = KojiPkgSpec(tag=koji_tag, package='kernel',
subpackages=['kernel-devel', 'kernel-firmware'])
k = KojiPkgSpec(tag=koji_tag, package='kernel',
subpackages=['kernel'])
c = KojiClient(koji_cmd)
logging.info('Fetching kernel dependencies (-devel, -firmware)')
c.get_pkgs(k_deps, job.tmpdir)
logging.info('Installing kernel dependencies (-devel, -firmware) '
'through %s', install_type)
k_deps_rpm_file_names = [os.path.join(job.tmpdir, rpm_file_name) for
rpm_file_name in c.get_pkg_rpm_file_names(k_deps)]
utils.run('rpm -U --force %s' % " ".join(k_deps_rpm_file_names))
c.get_pkgs(k, job.tmpdir)
k_rpm = os.path.join(job.tmpdir,
c.get_pkg_rpm_file_names(k)[0])
host_kernel = job.kernel(k_rpm)
host_kernel.install(install_vmlinux=False)
host_kernel.boot()
elif install_type == 'git':
logging.info('Chose to install host kernel through git, proceeding')
repo = params.get('host_kernel_git_repo')
repo_base = params.get('host_kernel_git_repo_base', None)
branch = params.get('host_kernel_git_branch')
commit = params.get('host_kernel_git_commit')
patch_list = params.get('host_kernel_patch_list')
if patch_list:
patch_list = patch_list.split()
kernel_config = params.get('host_kernel_config')
repodir = os.path.join("/tmp", 'kernel_src')
r = git.get_repo(uri=repo, branch=branch, destination_dir=repodir,
commit=commit, base_uri=repo_base)
host_kernel = job.kernel(r)
if patch_list:
host_kernel.patch(patch_list)
host_kernel.config(kernel_config)
host_kernel.build()
host_kernel.install()
host_kernel.boot()
else:
logging.info('Chose %s, using the current kernel for the host',
install_type)
def if_nametoindex(ifname):
"""
Map an interface name into its corresponding index.
Returns 0 on error, as 0 is not a valid index
@param ifname: interface name
"""
index = 0
ctrl_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
ifr = struct.pack("16si", ifname, 0)
r = fcntl.ioctl(ctrl_sock, SIOCGIFINDEX, ifr)
index = struct.unpack("16si", r)[1]
ctrl_sock.close()
return index
def vnet_hdr_probe(tapfd):
"""
Check if the IFF_VNET_HDR is support by tun.
@param tapfd: the file descriptor of /dev/net/tun
"""
u = struct.pack("I", 0)
try:
r = fcntl.ioctl(tapfd, TUNGETFEATURES, u)
except OverflowError:
return False
flags = struct.unpack("I", r)[0]
if flags & IFF_VNET_HDR:
return True
else:
return False
def open_tap(devname, ifname, vnet_hdr=True):
"""
Open a tap device and returns its file descriptor which is used by
fd=<fd> parameter of qemu-kvm.
@param ifname: TAP interface name
@param vnet_hdr: Whether enable the vnet header
"""
try:
tapfd = os.open(devname, os.O_RDWR)
except OSError, e:
raise TAPModuleError(devname, "open", e)
flags = IFF_TAP | IFF_NO_PI
if vnet_hdr and vnet_hdr_probe(tapfd):
flags |= IFF_VNET_HDR
ifr = struct.pack("16sh", ifname, flags)
try:
r = fcntl.ioctl(tapfd, TUNSETIFF, ifr)
except IOError, details:
raise TAPCreationError(ifname, details)
ifname = struct.unpack("16sh", r)[0].strip("\x00")
return tapfd
def add_to_bridge(ifname, brname):
"""
Add a TAP device to bridge
@param ifname: Name of TAP device
@param brname: Name of the bridge
"""
ctrl_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
index = if_nametoindex(ifname)
if index == 0:
raise TAPNotExistError(ifname)
ifr = struct.pack("16si", brname, index)
try:
r = fcntl.ioctl(ctrl_sock, SIOCBRADDIF, ifr)
except IOError, details:
raise BRAddIfError(ifname, brname, details)
ctrl_sock.close()
def bring_up_ifname(ifname):
"""
Bring up an interface
@param ifname: Name of the interface
"""
ctrl_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
ifr = struct.pack("16si", ifname, IFF_UP)
try:
fcntl.ioctl(ctrl_sock, SIOCSIFFLAGS, ifr)
except IOError:
raise TAPBringUpError(ifname)
ctrl_sock.close()
def if_set_macaddress(ifname, mac):
"""
Set the mac address for an interface
@param ifname: Name of the interface
@mac: Mac address
"""
ctrl_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
ifr = struct.pack("256s", ifname)
try:
mac_dev = fcntl.ioctl(ctrl_sock, SIOCGIFHWADDR, ifr)[18:24]
mac_dev = ":".join(["%02x" % ord(m) for m in mac_dev])
except IOError, e:
raise HwAddrGetError(ifname)
if mac_dev.lower() == mac.lower():
return
ifr = struct.pack("16sH14s", ifname, 1,
"".join([chr(int(m, 16)) for m in mac.split(":")]))
try:
fcntl.ioctl(ctrl_sock, SIOCSIFHWADDR, ifr)
except IOError, e:
logging.info(e)
raise HwAddrSetError(ifname, mac)
ctrl_sock.close()
def check_iso(url, destination, iso_sha1):
"""
Verifies if ISO that can be find on url is on destination with right hash.
This function will verify the SHA1 hash of the ISO image. If the file
turns out to be missing or corrupted, let the user know we can download it.
@param url: URL where the ISO file can be found.
@param destination: Directory in local disk where we'd like the iso to be.
@param iso_sha1: SHA1 hash for the ISO image.
"""
file_ok = False
if not os.path.isdir(destination):
os.makedirs(destination)
iso_path = os.path.join(destination, os.path.basename(url))
if not os.path.isfile(iso_path):
logging.warning("File %s not found", iso_path)
logging.warning("Expected SHA1 sum: %s", iso_sha1)
answer = utils.ask("Would you like to download it from %s?" % url)
if answer == 'y':
utils.interactive_download(url, iso_path, 'ISO Download')
else:
logging.warning("Missing file %s", iso_path)
logging.warning("Please download it or put an existing copy on the "
"appropriate location")
return
else:
logging.info("Found %s", iso_path)
logging.info("Expected SHA1 sum: %s", iso_sha1)
answer = utils.ask("Would you like to check %s? It might take a while" %
iso_path)
if answer == 'y':
actual_iso_sha1 = utils.hash_file(iso_path, method='sha1')
if actual_iso_sha1 != iso_sha1:
logging.error("Actual SHA1 sum: %s", actual_iso_sha1)
else:
logging.info("SHA1 sum check OK")
else:
logging.info("File %s present, but chose to not verify it",
iso_path)
return
if file_ok:
logging.info("%s present, with proper checksum", iso_path)
def virt_test_assistant(test_name, test_dir, base_dir, default_userspace_paths,
check_modules, online_docs_url):
"""
Common virt test assistant module.
@param test_name: Test name, such as "kvm".
@param test_dir: Path with the test directory.
@param base_dir: Base directory used to hold images and isos.
@param default_userspace_paths: Important programs for a successful test
execution.
@param check_modules: Whether we want to verify if a given list of modules
is loaded in the system.
@param online_docs_url: URL to an online documentation system, such as an
wiki page.
"""
logging_manager.configure_logging(VirtLoggingConfig(), verbose=True)
logging.info("%s test config helper", test_name)
step = 0
common_dir = os.path.dirname(sys.modules[__name__].__file__)
logging.info("")
step += 1
logging.info("%d - Verifying directories (check if the directory structure "
"expected by the default test config is there)", step)
sub_dir_list = ["images", "isos", "steps_data"]
for sub_dir in sub_dir_list:
sub_dir_path = os.path.join(base_dir, sub_dir)
if not os.path.isdir(sub_dir_path):
logging.debug("Creating %s", sub_dir_path)
os.makedirs(sub_dir_path)
else:
logging.debug("Dir %s exists, not creating" %
sub_dir_path)
logging.info("")
step += 1
logging.info("%d - Creating config files from samples (copy the default "
"config samples to actual config files)", step)
config_file_list = glob.glob(os.path.join(test_dir, "*.cfg.sample"))
config_file_list += glob.glob(os.path.join(common_dir, "*.cfg.sample"))
for config_file in config_file_list:
src_file = config_file
dst_file = os.path.join(test_dir, os.path.basename(config_file))
dst_file = dst_file.rstrip(".sample")
if not os.path.isfile(dst_file):
logging.debug("Creating config file %s from sample", dst_file)
shutil.copyfile(src_file, dst_file)
else:
logging.debug("Config file %s exists, not touching" % dst_file)
logging.info("")
step += 1
logging.info("%s - Verifying iso (make sure we have the OS ISO needed for "
"the default test set)", step)
iso_name = "Fedora-16-x86_64-DVD.iso"
fedora_dir = "pub/fedora/linux/releases/16/Fedora/x86_64/iso"
url = os.path.join("http://download.fedoraproject.org/", fedora_dir,
iso_name)
iso_sha1 = "76dd59c37e9a0ec2af56263fa892ff571c92c89a"
destination = os.path.join(base_dir, 'isos', 'linux')
check_iso(url, destination, iso_sha1)
logging.info("")
step += 1
logging.info("%d - Verifying winutils.iso (make sure we have the utility "
"ISO needed for Windows testing)", step)
logging.info("In order to run the KVM autotests in Windows guests, we "
"provide you an ISO that this script can download")
url = "http://people.redhat.com/mrodrigu/kvm/winutils.iso"
iso_sha1 = "02930224756510e383c44c49bffb760e35d6f892"
destination = os.path.join(base_dir, 'isos', 'windows')
path = os.path.join(destination, iso_name)
check_iso(url, destination, iso_sha1)
logging.info("")
step += 1
logging.info("%d - Checking if the appropriate userspace programs are "
"installed", step)
for path in default_userspace_paths:
if not os.path.isfile(path):
logging.warning("No %s found. You might need to install %s.",
path, os.path.basename(path))
else:
logging.debug("%s present", path)
logging.info("If you wish to change any userspace program path, "
"you will have to modify tests.cfg")
if check_modules:
logging.info("")
step += 1
logging.info("%d - Checking for modules %s", step,
",".join(check_modules))
for module in check_modules:
if not utils.module_is_loaded(module):
logging.warning("Module %s is not loaded. You might want to "
"load it", module)
else:
logging.debug("Module %s loaded", module)
if online_docs_url:
logging.info("")
step += 1
logging.info("%d - Verify needed packages to get started", step)
logging.info("Please take a look at the online documentation: %s",
online_docs_url)
client_dir = os.path.abspath(os.path.join(test_dir, "..", ".."))
autotest_bin = os.path.join(client_dir, 'bin', 'autotest')
control_file = os.path.join(test_dir, 'control')
logging.info("")
logging.info("When you are done fixing eventual warnings found, "
"you can run the test using this command line AS ROOT:")
logging.info("%s %s", autotest_bin, control_file)
logging.info("Autotest prints the results dir, so you can look at DEBUG "
"logs if something went wrong")
logging.info("You can also edit the test config files")
class NumaNode(object):
"""
Numa node to control processes and shared memory.
"""
def __init__(self, i=-1):
self.num = self.get_node_num()
if i < 0:
self.cpus = self.get_node_cpus(int(self.num) + i).split()
else:
self.cpus = self.get_node_cpus(i - 1).split()
self.dict = {}
for i in self.cpus:
self.dict[i] = "free"
def get_node_num(self):
"""
Get the number of nodes of current host.
"""
cmd = utils.run("numactl --hardware")
return re.findall("available: (\d+) nodes", cmd.stdout)[0]
def get_node_cpus(self, i):
"""
Get cpus of a specific node
@param i: Index of the CPU inside the node.
"""
cmd = utils.run("numactl --hardware")
return re.findall("node %s cpus: (.*)" % i, cmd.stdout)[0]
def free_cpu(self, i):
"""
Release pin of one node.
@param i: Index of the node.
"""
self.dict[i] = "free"
def _flush_pin(self):
"""
Flush pin dict, remove the record of exited process.
"""
cmd = utils.run("ps -eLf | awk '{print $4}'")
all_pids = cmd.stdout
for i in self.cpus:
if self.dict[i] != "free" and self.dict[i] not in all_pids:
self.free_cpu(i)
@error.context_aware
def pin_cpu(self, process):
"""
Pin one process to a single cpu.
@param process: Process ID.
"""
self._flush_pin()
error.context("Pinning process %s to the CPU" % process)
for i in self.cpus:
if self.dict[i] == "free":
self.dict[i] = str(process)
cmd = "taskset -p %s %s" % (hex(2 ** int(i)), process)
logging.debug("NumaNode (%s): " % i + cmd)
utils.run(cmd)
return i
def show(self):
"""
Display the record dict in a convenient way.
"""
logging.info("Numa Node record dict:")
for i in self.cpus:
logging.info(" %s: %s" % (i, self.dict[i]))
|
libvirt/autotest
|
client/virt/virt_utils.py
|
Python
|
gpl-2.0
| 125,243
|
__author__ = 'bison'
from PIL import Image, ImageDraw
class core:
def __init__(self, imageX, imageY, pixelSize, fileType, dpi):
self.imageX = imageX
self.imageY = imageY
self.pixelSize = pixelSize
self.img = Image.new('RGB', (imageX, imageY), "black") # create a new black image
self.pixels = self.img.load() # create the pixel map
self.fileType = fileType
self.dpi = dpi
def getColorTupleFromBytes(self, bytes3):
color = (bytes3[0], bytes3[1], bytes3[2])
return color
def rainbow(self):
for i in range(self.img.size[0]): # for every pixel:
for j in range(self.img.size[1]):
self.pixels[i, j] = (i, j, 100) # set the colour accordingly
def fromFile(self, filePath):
drawer = ImageDraw.Draw(self.img)
f = open(filePath, 'rb')
canDraw = True
currentPositionX = 0
currentPositionY = 0
while canDraw:
print('Current position:', currentPositionX, currentPositionY, '/', self.imageX, self.imageY)
bytes3 = f.read(3)
if len(bytes3) == 3:
drawer.rectangle(
(
currentPositionX, currentPositionY,
currentPositionX + self.pixelSize, currentPositionY + self.pixelSize
),
self.getColorTupleFromBytes(bytes3)
)
currentPositionX += self.pixelSize
if currentPositionX >= self.imageX:
currentPositionX = 0
currentPositionY += self.pixelSize
if currentPositionY >= self.imageY:
canDraw = False
else:
canDraw = False
f.close()
def saveImage(self, savePath=''):
if not savePath.lower().endswith('.' + self.fileType):
savePath += '.' + self.fileType
# fix sub-path
if '/' not in savePath and '\\' not in savePath:
savePath = 'createdImages/' + savePath
# 72 is probably the default value?!
if self.dpi > 0:
self.img.save(savePath, self.fileType, dpi=(self.dpi, self.dpi))
else:
self.img.save(savePath, self.fileType)
|
bison--/draw-o-matic
|
core.py
|
Python
|
gpl-2.0
| 1,859
|
from django.contrib.syndication.views import Feed
from packages.models import Package
class PackageFeed(Feed):
title = "Last updated packages"
link = "/packages/"
description = ""
def items(self):
return Package.objects.order_by('-last_update')[:5]
def item_title(self, item):
return item.name
def item_description(self, item):
return item.pkgdesc
|
osa1/noan
|
src/feeds.py
|
Python
|
gpl-2.0
| 400
|
#!/usr/bin/env python
import sys
import os
import re
import json
from collections import OrderedDict
from argparse import ArgumentParser
from argparse import RawDescriptionHelpFormatter
from elasticsearch1 import Elasticsearch
es_host = 'localhost:9200'
es_type = "donor"
es = Elasticsearch([es_host], timeout=600)
es_queries = [
# order of the queries is important
# query 0: both aligned
{
"aggs": {
"project_f": {
"aggs": {
"project": {
"terms": {
"field": "dcc_project_code",
"size": 1000
},
"aggs": {
"donor_id": {
"terms": {
"field": "donor_unique_id",
"size": 10000
}
}
}
}
},
"filter": {
"fquery": {
"query": {
"filtered": {
"query": {
"bool": {
"should": [
{
"query_string": {
"query": "*"
}
}
]
}
},
"filter": {
"bool": {
"must": [
{
"type": {
"value": es_type
}
},
{
"terms": {
"flags.is_normal_specimen_aligned": [
"T"
]
}
},
{
"exists": {
"field": "normal_specimen.is_aligned"
}
},
{
"terms": {
"flags.are_all_tumor_specimens_aligned": [
"T"
]
}
}
],
"must_not": [
{
"terms": {
"flags.is_manual_qc_failed": [
"T"
]
}
}
]
}
}
}
}
}
}
}
},
"size": 0
},
# query 1: normal aligned, tumor not
{
"aggs": {
"project_f": {
"aggs": {
"project": {
"terms": {
"field": "dcc_project_code",
"size": 1000
},
"aggs": {
"donor_id": {
"terms": {
"field": "donor_unique_id",
"size": 10000
}
}
}
}
},
"filter": {
"fquery": {
"query": {
"filtered": {
"query": {
"bool": {
"should": [
{
"query_string": {
"query": "*"
}
}
]
}
},
"filter": {
"bool": {
"must": [
{
"type": {
"value": es_type
}
},
{
"terms": {
"flags.is_normal_specimen_aligned": [
"T"
]
}
},
{
"exists": {
"field": "normal_specimen.is_aligned"
}
},
{ "not":
{ "filter":
{
"terms": {
"flags.all_tumor_specimen_aliquot_counts": [
"0"
]
}
}
}
},
{
"terms": {
"flags.are_all_tumor_specimens_aligned": [
"F"
]
}
}
],
"must_not": [
{
"terms": {
"flags.is_manual_qc_failed": [
"T"
]
}
}
]
}
}
}
}
}
}
}
},
"size": 0
},
# query 2: tumor aligned, normal not
{
"aggs": {
"project_f": {
"aggs": {
"project": {
"terms": {
"field": "dcc_project_code",
"size": 1000
},
"aggs": {
"donor_id": {
"terms": {
"field": "donor_unique_id",
"size": 10000
}
}
}
}
},
"filter": {
"fquery": {
"query": {
"filtered": {
"query": {
"bool": {
"should": [
{
"query_string": {
"query": "*"
}
}
]
}
},
"filter": {
"bool": {
"must": [
{
"type": {
"value": es_type
}
},
{
"terms": {
"flags.is_normal_specimen_aligned": [
"F"
]
}
},
{
"exists": {
"field": "normal_specimen.is_aligned"
}
},
{
"terms": {
"flags.are_all_tumor_specimens_aligned": [
"T"
]
}
}
],
"must_not": [
{
"terms": {
"flags.is_manual_qc_failed": [
"T"
]
}
}
]
}
}
}
}
}
}
}
},
"size": 0
},
# query 3: both unaligned
{
"aggs": {
"project_f": {
"aggs": {
"project": {
"terms": {
"field": "dcc_project_code",
"size": 1000
},
"aggs": {
"donor_id": {
"terms": {
"field": "donor_unique_id",
"size": 10000
}
}
}
}
},
"filter": {
"fquery": {
"query": {
"filtered": {
"query": {
"bool": {
"should": [
{
"query_string": {
"query": "*"
}
}
]
}
},
"filter": {
"bool": {
"must": [
{
"type": {
"value": es_type
}
},
{
"terms": {
"flags.is_normal_specimen_aligned": [
"F"
]
}
},
{
"exists": {
"field": "normal_specimen.is_aligned"
}
},
{ "not":
{ "filter":
{
"terms": {
"flags.all_tumor_specimen_aliquot_counts": [
"0"
]
}
}
}
},
{
"terms": {
"flags.are_all_tumor_specimens_aligned": [
"F"
]
}
}
],
"must_not": [
{
"terms": {
"flags.is_manual_qc_failed": [
"T"
]
}
}
]
}
}
}
}
}
}
}
},
"size": 0
},
# query 4: normal aligned, tumor missing
{
"aggs": {
"project_f": {
"aggs": {
"project": {
"terms": {
"field": "dcc_project_code",
"size": 1000
},
"aggs": {
"donor_id": {
"terms": {
"field": "donor_unique_id",
"size": 10000
}
}
}
}
},
"filter": {
"fquery": {
"query": {
"filtered": {
"query": {
"bool": {
"should": [
{
"query_string": {
"query": "*"
}
}
]
}
},
"filter": {
"bool": {
"must": [
{
"type": {
"value": es_type
}
},
{
"terms": {
"flags.is_normal_specimen_aligned": [
"T"
]
}
},
{
"exists": {
"field": "normal_specimen.is_aligned"
}
},
{
"terms": {
"flags.all_tumor_specimen_aliquot_counts": [
"0"
]
}
}
],
"must_not": [
{
"terms": {
"flags.is_manual_qc_failed": [
"T"
]
}
}
]
}
}
}
}
}
}
}
},
"size": 0
},
# query 5: normal unaligned, tumor missing
{
"aggs": {
"project_f": {
"aggs": {
"project": {
"terms": {
"field": "dcc_project_code",
"size": 1000
},
"aggs": {
"donor_id": {
"terms": {
"field": "donor_unique_id",
"size": 10000
}
}
}
}
},
"filter": {
"fquery": {
"query": {
"filtered": {
"query": {
"bool": {
"should": [
{
"query_string": {
"query": "*"
}
}
]
}
},
"filter": {
"bool": {
"must": [
{
"type": {
"value": es_type
}
},
{
"terms": {
"flags.is_normal_specimen_aligned": [
"F"
]
}
},
{
"exists": {
"field": "normal_specimen.is_aligned"
}
},
{
"terms": {
"flags.all_tumor_specimen_aliquot_counts": [
"0"
]
}
}
],
"must_not": [
{
"terms": {
"flags.is_manual_qc_failed": [
"T"
]
}
}
]
}
}
}
}
}
}
}
},
"size": 0
},
# query 6: tumor aligned, normal missing
{
"aggs": {
"project_f": {
"aggs": {
"project": {
"terms": {
"field": "dcc_project_code",
"size": 1000
},
"aggs": {
"donor_id": {
"terms": {
"field": "donor_unique_id",
"size": 10000
}
}
}
}
},
"filter": {
"fquery": {
"query": {
"filtered": {
"query": {
"bool": {
"should": [
{
"query_string": {
"query": "*"
}
}
]
}
},
"filter": {
"bool": {
"must": [
{
"type": {
"value": es_type
}
},
{ "not":
{ "filter":
{
"exists": {
"field": "normal_specimen.is_aligned"
}
}
}
},
{
"terms": {
"flags.are_all_tumor_specimens_aligned": [
"T"
]
}
}
],
"must_not": [
{
"terms": {
"flags.is_manual_qc_failed": [
"T"
]
}
}
]
}
}
}
}
}
}
}
},
"size": 0
},
# query 7: tumor unaligned, normal missing
{
"aggs": {
"project_f": {
"aggs": {
"project": {
"terms": {
"field": "dcc_project_code",
"size": 1000
},
"aggs": {
"donor_id": {
"terms": {
"field": "donor_unique_id",
"size": 10000
}
}
}
}
},
"filter": {
"fquery": {
"query": {
"filtered": {
"query": {
"bool": {
"should": [
{
"query_string": {
"query": "*"
}
}
]
}
},
"filter": {
"bool": {
"must": [
{
"type": {
"value": es_type
}
},
{ "not":
{ "filter":
{
"exists": {
"field": "normal_specimen.is_aligned"
}
}
}
},
{
"terms": {
"flags.are_all_tumor_specimens_aligned": [
"F"
]
}
}
],
"must_not": [
{
"terms": {
"flags.is_manual_qc_failed": [
"T"
]
}
}
]
}
}
}
}
}
}
}
},
"size": 0
},
]
def init_report_dir(metadata_dir, report_name, repo):
report_dir = metadata_dir + '/reports/' + report_name if not repo else metadata_dir + '/reports/' + report_name + '/' + repo
if not os.path.exists(report_dir):
os.makedirs(report_dir)
return report_dir
def generate_report(es_index, es_queries, metadata_dir, report_name, timestamp, repo):
# we need to run several queries to get facet counts for different type of donors
report = OrderedDict()
count_types = [
"both_aligned",
"normal_aligned_tumor_not",
"tumor_aligned_normal_not",
"both_not",
"normal_aligned_tumor_missing",
"normal_unaligned_tumor_missing",
"tumor_aligned_normal_missing",
"tumor_unaligned_normal_missing"
]
for q_index in range(len(count_types)):
response = es.search(index=es_index, body=es_queries[q_index])
#print(json.dumps(response['aggregations']['project_f'])) # for debugging
for p in response['aggregations']['project_f']['project'].get('buckets'):
count = p.get('doc_count')
donors = get_donors(p.get('donor_id').get('buckets'))
project = p.get('key')
if not report.get(project):
report[project] = {}
if not report[project].get(count_types[q_index]):
report[project][count_types[q_index]] = {}
report[project][count_types[q_index]]['count'] = count
report[project][count_types[q_index]]['donors'] = donors
report_dir = init_report_dir(metadata_dir, report_name, repo)
summary_table = []
for p in report.keys():
summary = {"project": p}
summary['timestamp'] = timestamp
for ctype in count_types:
summary[ctype] = report.get(p).get(ctype).get('count') if report.get(p).get(ctype) else 0
donors = report.get(p).get(ctype).get('donors') if report.get(p).get(ctype) else []
if donors:
with open(report_dir + '/' + p + '.' + ctype + '.donors.txt', 'w') as o:
o.write('# ' + ctype + '\n')
o.write('# dcc_project_code' + '\t' + 'submitter_donor_id' + '\n')
for d in donors:
# TODO: query ES to get JSON then retrieve BAM info: aligned/unaligned, gnos, bam file name etc
o.write(d.replace('::', '\t') + '\n')
summary_table.append(summary)
with open(report_dir + '/donor.json', 'w') as o:
o.write(json.dumps(summary_table))
def get_donors(donor_buckets):
donors = []
for d in donor_buckets:
donors.append(d.get('key'))
return donors
def main(argv=None):
if argv is None:
argv = sys.argv
else:
sys.argv.extend(argv)
parser = ArgumentParser(description="PCAWG Report Generator Using ES Backend",
formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("-m", "--metadata_dir", dest="metadata_dir",
help="Directory containing metadata manifest files", required=True)
parser.add_argument("-r", "--gnos_repo", dest="repo",
help="Specify which GNOS repo to process, process all repos if none specified", required=False)
args = parser.parse_args()
metadata_dir = args.metadata_dir # this dir contains gnos manifest files, will also host all reports
repo = args.repo
if not os.path.isdir(metadata_dir): # TODO: should add more directory name check to make sure it's right
sys.exit('Error: specified metadata directory does not exist!')
timestamp = str.split(metadata_dir, '/')[-1]
es_index = 'p_' + ('' if not repo else repo+'_') + re.sub(r'\D', '', timestamp).replace('20','',1)
report_name = re.sub(r'^pc_report-', '', os.path.basename(__file__))
report_name = re.sub(r'\.py$', '', report_name)
generate_report(es_index, es_queries, metadata_dir, report_name, timestamp, repo)
return 0
if __name__ == "__main__":
sys.exit(main())
|
ICGC-TCGA-PanCancer/pcawg-central-index
|
pcawg_metadata_parser/pc_report-donors_alignment_summary.py
|
Python
|
gpl-2.0
| 22,302
|
from setuptools import setup, find_packages
with open('README.md') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
setup(
name='Reddit Comic',
version='0.0.1',
description='Searches for Comics',
long_description=readme,
author='Ben Osment',
author_email='benjamin.j.osment@gmail.com',
url='https://github.com/benosment/reddit-pycomic',
license=license,
packages=find_packages(exclude=('tests', 'docs'))
)
|
benosment/reddit-comic
|
setup.py
|
Python
|
gpl-2.0
| 475
|
from datetime import datetime
import time
import random
import sys
import math
import string
import os
import base64
def generate_row(c_pk,c_1,c_10000):
#when = time.time() + (transactionid / 100000.0)
#datetime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(when))
#big_varchar = ''.join(random.choice(string.letters) for i in xrange(128))
#big_varchar = base64.urlsafe_b64encode(os.urandom(bigVarcharSize))
res = '"%d","%d","%d"' % (c_pk,c_1,c_10000)
return res
def main():
rowsToInsert = 10000000
rowsToReport = 100000
csvName = 'stats_data_simple.csv'
random.seed(3221223452)
start_time = time.time()
rowsInserted = 0
fo = open(csvName, "w")
c_1 = 0
c_10000 = 1
for r in range(rowsToInsert):
rowsInserted = rowsInserted + 1
if (rowsInserted % 1) == 0:
c_1 = c_1 + 1
if (rowsInserted % 10000) == 0:
c_10000 = c_10000 + 1
row = generate_row(rowsInserted,c_1,c_10000)
fo.write(row + "\n")
if (rowsInserted % rowsToReport) == 0:
now = time.time()
pct = float(rowsInserted) / float(rowsToInsert) * 100.0
seconds_to_go = 999999999
minutes_to_go = 999999999
if ((pct / 100) != 0):
seconds_to_go = ((now - start_time) / (pct / 100)) - (now - start_time)
minutes_to_go = seconds_to_go / 60.0
print '%d %.1f %.1f %.4f %.1f %.1f' % (
rowsInserted,
now - start_time,
rowsInserted / (now - start_time),
pct,
seconds_to_go,
minutes_to_go)
sys.stdout.flush()
fo.close()
return 0
sys.exit(main())
|
Percona-QA/toku-qa
|
tokudb/software/generator/generator_increments_simple.py
|
Python
|
gpl-2.0
| 1,601
|
from RepositoryInfo import RepositoryInfo
from ProjectInfo import ProjectInfo
import os
class PackageInfo:
"""
Get basic information about project:
imported packages
provided packages
tests
"""
def __init__(self, import_path, commit = "", noGodeps = [], skip_errors=False):
self.import_path = import_path
self.commit = commit
self.noGodeps = noGodeps
self.err = ""
self.repository_info = None
self.project_info = None
self.archive_dir = ""
self.repository_decoded = False
self.name = ""
self.skip_errors = skip_errors
def getError(self):
return self.err
def getName(self):
return self.name
def getRepositoryInfo(self):
return self.repository_info
def getProjectInfo(self):
return self.project_info
def decodeRepository(self):
# get repository info
self.repository_info = RepositoryInfo(self.import_path, self.commit)
if not self.repository_info.retrieve():
self.err = self.repository_info.getError()
return False
# package name
ip_info = self.repository_info.getImportPathInfo()
r_info = self.repository_info.getArchiveInfo()
self.repository_decoded = True
self.archive_dir = r_info.archive_dir
self.name = ip_info.getPackageName()
return True
def decodeProject(self, working_directory = "."):
if not self.repository_decoded:
self.err = "RepositoryInfo not decoded"
return False
# get package info
self.project_info = ProjectInfo(self.noGodeps)
source_code_directory = "%s/%s" % (working_directory, self.archive_dir)
if not os.path.exists(source_code_directory):
self.err = "Source code directory %s does not exist." % source_code_directory
self.err += "CWD: %s" % os.getcwd()
return False
if not self.project_info.retrieve(source_code_directory, self.skip_errors):
self.err = self.project_info.getError()
return False
return True
|
piotr1212/gofed
|
modules/PackageInfo.py
|
Python
|
gpl-2.0
| 1,853
|
#!/usr/bin/env python
#-*- coding: latin-1 -*-
import sys, re, time
def isvalidEntry(entry):
# Standard keys in VCF Version 3
#FN|N|NICKNAME|PHOTO|BDAY|ADR|LABEL|TEL|EMAIL|MAILER|TZ|GEO|TITLE|ROLE|LOGO|AGENT|
#ORG|CATEGORIES|NOTE|PRODID|REV|SORT\-STRING|SOUND|URL|UID|CLASS|KEY
if (not(re.match('^(?:FN|N|BDAY|ADR|TEL|EMAIL|ORG)',entry))): return False
return True
def debugprint(value):
if debug: print value
### USAGE #####################
try:
vcfFile = sys.argv[1]
ldifFile = sys.argv[2]
templateFile = sys.argv[3]
except:
print "USAGE: vcard2ldif.py vcfFile ldifFile templateFile"
sys.exit()
##############################
debug = False
### LOAD TEMPLATE ############
template = []
tpl=open(templateFile,'r+')
for line in tpl:
item = ['','']
match = re.search('{{([^}]+)}}',line)
if match: item[1] = match.groups()[0]
line = re.sub('{{[^}]+}}','$VAR',line)
line = re.sub('[\n\r]','',line)
item[0] = line
template.append(item)
tpl.close()
##############################
print "------------------------------------------------------------"
print "PARSING VCF \"%s\"" % vcfFile
vcf=open(vcfFile, 'r+')
entries = []
onvcard = False # lines between begin and end
hasmail = False
hasphone = False
iscompany = False
countervcf = 0
for line in vcf:
line = line.strip()
if line == 'BEGIN:VCARD':
entry = {}
onvcard = True
hasmail = False
hasphone = False
iscompany = False
debugprint('------------------------------------------------------------')
if line == 'END:VCARD':
entry[':ISCOMPANY'] = iscompany
entry[':HASPHONE'] = hasphone
entry[':HASMAIL'] = hasmail
for key in sorted(entry.iterkeys()):
debugprint("%s = %s" % (key, entry[key]))
print " > %s" % entry['FN']
countervcf +=1
entries.append(entry)
entry = None
onvcard = False
if onvcard:
line = re.sub('^item\d+\.','',line)
s = re.search('^([^:;]+)((?:\;type=[^;]+)*)\:(.+)$',line)
key,types,values = (None,None,None)
if s and isvalidEntry(line):
key = s.groups()[0]
types = s.groups()[1].replace('pref','').replace(';type=',' ').strip().replace(' ',':')
values = s.groups()[2].replace('\\n','').replace('\\','')
# FIRST CLEAN
if key == 'N':
if not values.replace(';','').strip():
iscompany = True
values = ''
else:
entry['N:SURNAME'] = values.split(';')[0]
entry['N:NAME'] = values.split(';')[1]
values = "%s, %s" % (entry['N:SURNAME'],entry['N:NAME'])
elif key == 'ORG':
values = re.sub('(^;+|;+$)','',values)
values = re.sub('[;]+',', ',values)
elif key == 'ADR':
address = values.split(';')
try: entry['ADR:STREET'] = address[2]
except: pass
try: entry['ADR:CITY'] = address[3]
except: pass
try: entry['ADR:STATE'] = address[4]
except: pass
try: entry['ADR:ZIP'] = address[5]
except: pass
try: entry['ADR:COUNTRY'] = address[6]
except: pass
values = re.sub('(^;+|;+$)','',values)
values = re.sub('[;]+',', ',values)
else:
values = values.split(';')
if len(values) == 1: values = values[0]
# SECOND CLEAN
if key == 'TEL':
values = values.replace(" ","").replace("-","").replace(".","")
hasphone = True
if key == 'MAIL': hasmail = True
if key == 'FN':
values = values.replace(',','')
uid = re.sub('[^a-z]','',values.lower())
entry['UID'] = "%s%s" % (uid,int(time.time()%int(time.time())*100000))
if types:
key = "%s:%s" % (key,types)
entry[key] = values
vcf.close()
print "Done %s VCF entries" % countervcf
print "------------------------------------------------------------"
print "WRITING LDIF \"%s\"" % ldifFile
ldif=open(ldifFile, 'w+')
counterldif=0
### OPEN LDIF
for entry in entries:
for tline in template:
skipline = False
line = tline[0]
replaceArray = tline[1]
if replaceArray != "":
replaceArray = replaceArray.split('|')
torepl = ""
for replaceEl in replaceArray:
if not replaceEl:
skipline = True
break #for
torepl = entry.get(replaceEl)
if torepl: break #for
if not skipline:
if not torepl: torepl = ""
line = line.replace("$VAR",torepl)
debugprint(line)
ldif.write("%s\n" % line)
else:
debugprint(line)
ldif.write("%s\n" % line)
if line.startswith('dn:'): print " > %s" % line
ldif.write("\n")
debugprint("-----------\n")
counterldif+=1
ldif.close()
print "Done %s LDIF entries" % counterldif
print "------------------------------------------------------------"
print "VCF #%s / LDIF #%s" % (countervcf,counterldif)
|
akelge/utils
|
ldap/vcard2ldif.py
|
Python
|
gpl-2.0
| 5,417
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import xbmcaddon
import xbmc
import xbmcvfs
import gzip
import os
import base64
import time
id = 'service.rytecepgdownloader'
addon = xbmcaddon.Addon(id=id)
def get_descriptions():
descriptions = []
set = [addon.getSetting('xmltv_1'), addon.getSetting('xmltv_2'), addon.getSetting('xmltv_3')]
for s in set:
if not s == 'None':
descriptions.append(s)
return descriptions
def load_local_xml(epg_url):
ret = False
name = epg_url.split('/')[-1]
xml_file = get_xml_file(name)
if os.path.exists(xml_file):
ret = check_date(xml_file)
return ret
def get_description_url(description):
epg_url = None
try:
epg_url = addon.getSetting(description)
except:
print '[Rytec EPG Downloader]: no epg url found in settings', description
return bdecode(epg_url)
def save_epg_url(epg_url, description):
addon.setSetting(description, bencode(epg_url))
def get_xml_path():
xml_path = addon.getSetting('path').decode('utf-8')
if not xml_path:
addon.openSettings()
xml_path = addon.getSetting('path').decode('utf-8')
return xml_path
def get_xml_file(name):
xml_path = get_xml_path()
xml_file = os.path.join(xml_path, name)
return xml_file
def bencode(original_string):
encoded_string = base64.b64encode(original_string)
return encoded_string
def bdecode(encoded_string):
decoded_string = base64.b64decode(encoded_string)
return decoded_string
def check_date(file):
cache_days = 3
modified_time = round(os.stat(file).st_mtime)
current_time = round(time.time())
t = current_time - modified_time
if (t / 3600) < 24*cache_days:
return True
def download_allowed(a):
gmt = time.gmtime()
if gmt.tm_hour > 2 and gmt.tm_hour < 7:
if not a:
print '[Rytec EPG Downloader]: epg download not allowed between 3 and 7 GMT'
return False
else:
return True
def get_counter():
counter = addon.getSetting('counter')
if not counter:
counter = '0'
return counter
def blocked(a):
counter = int(get_counter())
ct = round(time.time())
if counter == 0:
counter += 1
addon.setSetting('counter', str(counter))
addon.setSetting('bt', str(ct).split('.')[0])
return False
elif counter == 1:
counter += 1
addon.setSetting('counter', str(counter))
return False
elif counter > 1:
bt = int(addon.getSetting('bt'))
t = ct - bt
if (t / 3600) > 23:
addon.setSetting('counter', '0')
return False
else:
if not a:
print '[Rytec EPG Downloader]: %sh blocked' % (24 - (t / 3600))
return True
else:
return True
def get_activation_code():
ac = addon.getSetting('ac')
if bencode(ac) == 'MzAyNQ==':
return True
else:
addon.openSettings()
ac = addon.getSetting('ac')
if bencode(ac) == 'MzAyNQ==':
return True
else:
return False
def merge_epg():
# code from enen92. thank you
print '[Rytec EPG Downloader]: merge epg'
xml_path = get_xml_path()
temp = os.path.join(xml_path,'temp')
if not os.path.isdir(temp):
os.mkdir(temp)
out = os.path.join(xml_path,'merged_epg.xml')
if xbmcvfs.exists(out): os.remove(out)
print '[Rytec EPG Downloader]: start extracting files'
dirs, files = xbmcvfs.listdir(os.path.join(xml_path))
for f in files:
if f.endswith('.gz'):
inF = gzip.GzipFile(os.path.join(xml_path,f), 'rb')
s = inF.read()
inF.close()
outF = file(os.path.join(temp,f.replace('.gz','.xml')),'wb')
outF.write(s)
outF.close()
print '[Rytec EPG Downloader]: extracting files done'
print '[Rytec EPG Downloader]: start merging files'
dirs, xmltv_list = xbmcvfs.listdir(os.path.join(temp))
i=1
total = len(xmltv_list)
for xmltv in xmltv_list:
if xmltv.endswith('.xml'):
if i==1:
f = open(os.path.join(temp,xmltv), "r")
text = f.read()
f.close()
with open(out, "a") as myfile:
myfile.write(text.replace('</tv>',''))
elif i==total:
o = open(out,"a")
f = open(os.path.join(temp,xmltv),"r")
lines = f.readlines()
f.close()
li = 0
for line in lines:
if li == 0 or li == 1: pass
else: o.write(line)
li += 1
o.close()
else:
o = open(out,"a")
f = open(os.path.join(temp,xmltv),"r")
lines = f.readlines()
total_lines = len(lines)
f.close()
li = 0
for line in lines:
if li == 0 or li == 1: pass
elif li == (total_lines -1): pass
else: o.write(line)
li += 1
o.close()
os.remove(os.path.join(temp,xmltv))
i += 1
print '[Rytec EPG Downloader]: merging files done'
|
noba3/KoTos
|
addons/service.rytecepgdownloader/resources/lib/common.py
|
Python
|
gpl-2.0
| 5,341
|
#!/usr/bin/python3
########################################################################
# File Name: csvExample.py
# Author: chadd williams
# Date: Oct 30, 2014
# Class: CS 360
# Assignment: Example CSV reader
# Purpose: Show examples of using csv reader
########################################################################
import csv
# https://docs.python.org/3/library/csv.html
# with/as statement
#
# http://effbot.org/zone/python-with-statement.htm
# https://docs.python.org/3/reference/compound_stmts.html#the-with-statement
# see also:
# https://docs.python.org/3/reference/datamodel.html#context-managers
with open('csvExample.csv') as dataFile :
# open the csv reader with the file
reader = csv.reader(dataFile)
# read each line out of the file
for dataRow in reader :
# data row is a list of strings
print(dataRow)
# check out help(str) and look for the join method.
print(', '.join(dataRow))
# reset the reader to the top of the file
# note you are operating on the file not the reader
dataFile.seek(0)
with open('csvExampleWritten.csv', mode='w') as dataWriteFile:
# create the csv writer
writer = csv.writer(dataWriteFile)
# write a run of text with the header info
# writerow takes a list of strings
writer.writerow( ['office','lastname','firstname'])
# read each line out of the file
for dataRow in reader :
# we are writing each row in reverse!
writer.writerow( [x for x in reversed(dataRow) ] )
# with/as automatically calls close() on the file!
|
cs360f14/PythonExamples_Lectures-Public
|
LectureExamples/csvExample.py
|
Python
|
gpl-2.0
| 1,546
|
# encoding: utf-8
# module PyKDE4.kio
# from /usr/lib/python3/dist-packages/PyKDE4/kio.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyKDE4.kdeui as __PyKDE4_kdeui
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
class KOCRDialog(__PyKDE4_kdeui.KPageDialog):
# no doc
def getOCRDialog(self, *args, **kwargs): # real signature unknown
pass
def id(self, *args, **kwargs): # real signature unknown
pass
def nextId(self, *args, **kwargs): # real signature unknown
pass
def textRecognized(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247971765/PyKDE4/kio/KOCRDialog.py
|
Python
|
gpl-2.0
| 736
|
"""passlib.handlers.phpass - PHPass Portable Crypt
phppass located - http://www.openwall.com/phpass/
algorithm described - http://www.openwall.com/articles/PHP-Users-Passwords
phpass context - blowfish, bsdi_crypt, phpass
"""
#=============================================================================
# imports
#=============================================================================
# core
from hashlib import md5
import re
import logging; log = logging.getLogger(__name__)
from warnings import warn
# site
# pkg
from lib.passlib.utils import h64
from lib.passlib.utils.compat import b, bytes, u, uascii_to_str, unicode
import lib.passlib.utils.handlers as uh
# local
__all__ = [
"phpass",
]
#=============================================================================
# phpass
#=============================================================================
class phpass(uh.HasManyIdents, uh.HasRounds, uh.HasSalt, uh.GenericHandler):
"""This class implements the PHPass Portable Hash, and follows the :ref:`password-hash-api`.
It supports a fixed-length salt, and a variable number of rounds.
The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords:
:type salt: str
:param salt:
Optional salt string.
If not specified, one will be autogenerated (this is recommended).
If specified, it must be 8 characters, drawn from the regexp range ``[./0-9A-Za-z]``.
:type rounds: int
:param rounds:
Optional number of rounds to use.
Defaults to 19, must be between 7 and 30, inclusive.
This value is logarithmic, the actual number of iterations used will be :samp:`2**{rounds}`.
:type ident: str
:param ident:
phpBB3 uses ``H`` instead of ``P`` for its identifier,
this may be set to ``H`` in order to generate phpBB3 compatible hashes.
it defaults to ``P``.
:type relaxed: bool
:param relaxed:
By default, providing an invalid value for one of the other
keywords will result in a :exc:`ValueError`. If ``relaxed=True``,
and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning`
will be issued instead. Correctable errors include ``rounds``
that are too small or too large, and ``salt`` strings that are too long.
.. versionadded:: 1.6
"""
#===================================================================
# class attrs
#===================================================================
#--GenericHandler--
name = "phpass"
setting_kwds = ("salt", "rounds", "ident")
checksum_chars = uh.HASH64_CHARS
#--HasSalt--
min_salt_size = max_salt_size = 8
salt_chars = uh.HASH64_CHARS
#--HasRounds--
default_rounds = 19
min_rounds = 7
max_rounds = 30
rounds_cost = "log2"
#--HasManyIdents--
default_ident = u("$P$")
ident_values = [u("$P$"), u("$H$")]
ident_aliases = {u("P"):u("$P$"), u("H"):u("$H$")}
#===================================================================
# formatting
#===================================================================
#$P$9IQRaTwmfeRo7ud9Fh4E2PdI0S3r.L0
# $P$
# 9
# IQRaTwmf
# eRo7ud9Fh4E2PdI0S3r.L0
@classmethod
def from_string(cls, hash):
ident, data = cls._parse_ident(hash)
rounds, salt, chk = data[0], data[1:9], data[9:]
return cls(
ident=ident,
rounds=h64.decode_int6(rounds.encode("ascii")),
salt=salt,
checksum=chk or None,
)
def to_string(self):
hash = u("%s%s%s%s") % (self.ident,
h64.encode_int6(self.rounds).decode("ascii"),
self.salt,
self.checksum or u(''))
return uascii_to_str(hash)
#===================================================================
# backend
#===================================================================
def _calc_checksum(self, secret):
# FIXME: can't find definitive policy on how phpass handles non-ascii.
if isinstance(secret, unicode):
secret = secret.encode("utf-8")
real_rounds = 1<<self.rounds
result = md5(self.salt.encode("ascii") + secret).digest()
r = 0
while r < real_rounds:
result = md5(result + secret).digest()
r += 1
return h64.encode_bytes(result).decode("ascii")
#===================================================================
# eoc
#===================================================================
#=============================================================================
# eof
#=============================================================================
|
theguardian/JIRA-APPy
|
lib/passlib/handlers/phpass.py
|
Python
|
gpl-2.0
| 4,886
|
#!/usr/bin/env python
# -*- coding: UTF8 -*-
import sys
import os
import subprocess
import webbrowser
from fechas import CDateLocal
(EXISTENCIA_ANTERIOR, ENTRADAS, SALIDAS, RETIROS, AUTOCONSUMOS, EXISTENCIA_ACTUAL, VALOR_ANTERIOR, ENTRADAS_BS, SALIDAS_BS, RETIROS_BS, AUTOCONSUMOS_BS, EXISTENCIA_BS) = range(12)
class TreeToHTML:
def __init__(self, tree=None, title="", cols=[]):
self.treeview = tree
self.html = ""
self.title = title
self.cols = cols
def tohtml(self):
self.html = '<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=UTF-8">'
self.html += "<h1><center>%s</center></h1>" % self.title
self.html += "<tt>"
self.html += '<table WIDTH=180% border=1 rules="groups, cols"><tr>'
for i in self.treeview.get_columns():
self.html += "<th>%s</th>" % i.get_title()
#self.html += "</table><table WIDTH=150%>"
for i in self.treeview.get_model():
self.html += "<tr>"
for j in self.cols:
if j < 2:
align = "LEFT"
else:
align = "RIGHT"
if type(j) is int:
col = j
tipo = ""
else:
col = j[0]
tipo = j[1]
text = i[col]
if text is None:
text = ""
if tipo == "dte":
text = CDateLocal(text)
align = "RIGHT"
if tipo == "time":
text = text[:5]
align = "RIGHT"
self.html += "<td align=%s>%s</td>" % (align, text)
self.html += "</utr>"
def show1(self):
self.tohtml()
f = open("inventario_valorizado.html", "w")
f.write(self.html)
f.close()
if sys.platform == "win32":
#os.system("explorer reporte.html")
webbrowser.open_new_tab('inventario_valorizado.html')
else:
#os.system("firefox %s reporte.html")
webbrowser.open_new_tab('inventario_valorizado.html')
class TotalesToHTML:
def crear_columnas():
columnas = []
columnas.append([EXISTENCIA_ANTERIOR, "Existencia Anterior", str])
columnas.append([ENTRADAS, "Entradas", str])
columnas.append([SALIDAS, "Salidas", str])
columnas.append([RETIROS, "Retiros", str])
columnas.append([AUTOCONSUMOS, "Autoconsumos", str])
columnas.append([EXISTENCIA_ACTUAL,"Existencia Actual", str])
columnas.append([VALOR_ANTERIOR, "Valor Anterior", str])
columnas.append([ENTRADAS_BS, "Entradas Bs.", str])
columnas.append([SALIDAS_BS, "Salidas Bs.", str])
columnas.append([RETIROS_BS, "Retiros Bs.", str])
columnas.append([AUTOCONSUMOS_BS, "Autoconsumos Bs.", str])
columnas.append([EXISTENCIA_BS, "Existencia Bs.", str])
col_data = [z[0] for z in columnas]
return col_data
def __init__(self, tree=None, title="", cols= crear_columnas()):
self.treeview = tree
self.html = ""
self.title = title
self.cols = cols
def tohtml(self):
self.html = '<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=UTF-8">'
self.html += "<h1><center>%s</center></h1>" % self.title
self.html += "<tt>"
self.html += '<table WIDTH=180% border=1 rules="groups, cols"><tr>'
for i in self.treeview.get_columns():
self.html += "<th>%s</th>" % i.get_title()
#self.html += "</table><table WIDTH=150%>"
for i in self.treeview.get_model():
self.html += "<tr>"
for j in self.cols:
if type(j) is int:
col = j
tipo = ""
else:
col = j[0]
tipo = j[1]
text = i[col]
if text is None:
text = ""
align = "RIGHT"
if tipo == "dte":
text = CDateLocal(text)
align = "LEFT"
if tipo == "time":
text = text[:5]
align = "LEFT"
self.html += "<td align=%s>%s</td>" % (align, text)
self.html += "</utr>"
def show2(self):
self.tohtml()
f = open("totales_inventario_valorizado.html", "w")
f.write(self.html)
f.close()
if sys.platform == "win32":
webbrowser.open_new_tab('totales_inventario_valorizado.html')
else:
webbrowser.open_new_tab('totales_inventario_valorizado.html')
|
jehomez/pymeadmin
|
inventariotreetohtml.py
|
Python
|
gpl-2.0
| 4,752
|
#Elaine Mao
#ekm2133
#Computer Networks
#Programming Assignment 1 - Client
import sys, os
import socket
from threading import *
#Main code for program
def main (address, port):
HOST = address
PORT = int(port)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((HOST, PORT)) #While loop goes here?
def recv(): #Listens for incoming messages from server
while True:
data = s.recv(1024)
if not data:
sys.exit(0)
print data
if data == 'You have been logged out due to inactivity.':
break
while True:
command = raw_input('')
s.sendall(command)
reply = s.recv(1024)
print reply
break
s.close()
os._exit(1)
Thread(target=recv).start() #Creates new thread to run recv
username = '' #Sets variables for authentication loop
while True:
while True: #Authentication loop
while not username:
username, password = authenticate()
s.sendall('login ' + username + ' ' + password)
reply = s.recv(1024)
if reply == 'Your username/password combination is incorrect.':
print reply + '\n'
username = ''
elif reply == 'Sorry, that user is already logged in.':
print reply + '\n'
username = ''
elif reply == 'You have been blocked for too many incorrect tries.':
print reply + '\n'
username = ''
else:
print reply
break
break
while True:
command = raw_input('')
s.sendall(command)
reply = s.recv(1024)
print reply
if reply == 'You have been logged out.':
break
break
s.close()
sys.exit(0)
#Code to authenticate user
def authenticate ():
username = ''
password = ''
while not username:
username = raw_input('Username: ')
while not password:
password = raw_input('Password: ')
return username, password
if __name__ == "__main__": main(sys.argv[1], sys.argv[2])
|
elainekmao/chat-program
|
Client.py
|
Python
|
gpl-2.0
| 2,485
|
#! /usr/bin/env python
# (C) Copyright 2006 Nuxeo SAS <http://nuxeo.com>
# Author: bdelbosc@nuxeo.com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#
"""Bundle Manager setup
$Id: setup.py 49829 2006-10-24 16:34:17Z bdelbosc $
"""
from distutils.core import setup
from bundleman.version import __version__
from tests.distutilstestcommand import TestCommand
setup(
name="bundleman",
version=__version__,
description="Manage svn bundle releasing.",
long_description="""\
BundleMan try to manage releasing of application built on versioned products
under subversion.
An application is seen as a products suite defined using subversion
svn:externals property. An application is a bundle of products. Products are
versioned piece of software.
Releasing an application is about taking care of tagging the source
repository, managing version of each products, managing CHANGELOGs, creating
a source package archive, giving ways to maitain a release without blocking
the trunk development.
Main features:
* BundleMan is free software distributed under the GNU GPL.
* It uses a recommended trunk/branches/tags repository layouts for
products and bundles.
* It uses standard versioning MAJOR.MINOR.BUGFIX-RELEASE for products.
* Versioning of products is done automaticly by analysing a CHANGES file.
* Enforce CHANGELOG quality by requiring a product CHANGES file.
* It generates an application CHANGELOG.
* There is no locking of the trunk or version's conflict when patching a
released application.
* Can manage public, private or remote products.
* BundleMan is written in python and can be easily customized.
""",
author="Benoit Delbosc",
author_email="bdelbosc@nuxeo.com",
url="http://public.dev.nuxeo.com/~ben/bundleman/",
download_url="http://public.dev.nuxeo.com/~ben/bundleman/bundleman-%s.tar.gz"%__version__,
license='GPL',
packages=['bundleman'],
package_dir={'bundleman': 'bundleman'},
scripts=['scripts/bm-bundle',
'scripts/bm-product',
],
keywords='packaging releasing bundle subversion versioning',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Natural Language :: English',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: System :: Software Distribution',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Version Control',
'Topic :: System :: Archiving :: Packaging',
],
cmdclass = { 'test': TestCommand,}
)
|
bdelbosc/bundleman
|
setup.py
|
Python
|
gpl-2.0
| 3,472
|
import os
import pandas as pd
import glob
fname = '000'
path = os.getcwd()
files = os.listdir(path)
files = glob.glob("*"+fname+"*")
df1,df2,df3 = pd.DataFrame(),pd.DataFrame(),pd.DataFrame()
df1 = pd.read_excel(files[0])
df2 = pd.read_excel(files[1])
df3 = pd.read_excel(files[2])
ez = pd.concat([df1,df2,df3], axis=1)
ez.to_excel("Pandas"+fname+"FINAL.xlsx")
print "Done!"
|
kalfasyan/DA224x
|
code/old code/fano/merger.py
|
Python
|
gpl-2.0
| 381
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#############################################################################
##
## Copyright (C) 2016 The Qt Company Ltd.
## Contact: https://www.qt.io/licensing/
##
## This file is part of the test suite of PySide2.
##
## $QT_BEGIN_LICENSE:GPL-EXCEPT$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms
## and conditions see https://www.qt.io/terms-conditions. For further
## information use the contact form at https://www.qt.io/contact-us.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 3 as published by the Free Software
## Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
## included in the packaging of this file. Please review the following
## information to ensure the GNU General Public License requirements will
## be met: https://www.gnu.org/licenses/gpl-3.0.html.
##
## $QT_END_LICENSE$
##
#############################################################################
import unittest
from sample import *
class TestNonZeroOperator(unittest.TestCase):
def testIt(self):
c = Color()
self.assertFalse(c)
c = Color(2)
self.assertTrue(c)
if __name__ == "__main__":
unittest.main()
|
qtproject/pyside-shiboken
|
tests/samplebinding/nonzero_test.py
|
Python
|
gpl-2.0
| 1,563
|
# encoding: utf-8
# module pango
# from /usr/lib/python2.7/dist-packages/gtk-2.0/pango.so
# by generator 1.135
# no doc
# imports
import gobject as __gobject
import gobject._gobject as __gobject__gobject
class FontDescription(__gobject.GBoxed):
# no doc
def better_match(self, *args, **kwargs): # real signature unknown
pass
def copy(self, *args, **kwargs): # real signature unknown
pass
def copy_static(self, *args, **kwargs): # real signature unknown
pass
def get_family(self, *args, **kwargs): # real signature unknown
pass
def get_gravity(self, *args, **kwargs): # real signature unknown
pass
def get_set_fields(self, *args, **kwargs): # real signature unknown
pass
def get_size(self, *args, **kwargs): # real signature unknown
pass
def get_size_is_absolute(self, *args, **kwargs): # real signature unknown
pass
def get_stretch(self, *args, **kwargs): # real signature unknown
pass
def get_style(self, *args, **kwargs): # real signature unknown
pass
def get_variant(self, *args, **kwargs): # real signature unknown
pass
def get_weight(self, *args, **kwargs): # real signature unknown
pass
def hash(self, *args, **kwargs): # real signature unknown
pass
def merge(self, *args, **kwargs): # real signature unknown
pass
def merge_static(self, *args, **kwargs): # real signature unknown
pass
def set_absolute_size(self, *args, **kwargs): # real signature unknown
pass
def set_family(self, *args, **kwargs): # real signature unknown
pass
def set_family_static(self, *args, **kwargs): # real signature unknown
pass
def set_gravity(self, *args, **kwargs): # real signature unknown
pass
def set_size(self, *args, **kwargs): # real signature unknown
pass
def set_stretch(self, *args, **kwargs): # real signature unknown
pass
def set_style(self, *args, **kwargs): # real signature unknown
pass
def set_variant(self, *args, **kwargs): # real signature unknown
pass
def set_weight(self, *args, **kwargs): # real signature unknown
pass
def to_filename(self, *args, **kwargs): # real signature unknown
pass
def to_string(self, *args, **kwargs): # real signature unknown
pass
def unset_fields(self, *args, **kwargs): # real signature unknown
pass
def __cmp__(self, y): # real signature unknown; restored from __doc__
""" x.__cmp__(y) <==> cmp(x,y) """
pass
def __hash__(self): # real signature unknown; restored from __doc__
""" x.__hash__() <==> hash(x) """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __str__(self): # real signature unknown; restored from __doc__
""" x.__str__() <==> str(x) """
pass
__gtype__ = None # (!) real value is ''
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247972723/pango/FontDescription.py
|
Python
|
gpl-2.0
| 3,003
|
# coding=utf-8
import os, sys, datetime, unicodedata, re, types
import xbmc, xbmcaddon, xbmcgui, xbmcvfs, urllib
import xml.etree.ElementTree as xmltree
import hashlib, hashlist
import ast
from xml.dom.minidom import parse
from traceback import print_exc
from htmlentitydefs import name2codepoint
from unidecode import unidecode
from unicodeutils import try_decode
import nodefunctions
NODE = nodefunctions.NodeFunctions()
__addon__ = xbmcaddon.Addon()
__addonid__ = __addon__.getAddonInfo('id').decode( 'utf-8' )
__addonversion__ = __addon__.getAddonInfo('version')
__xbmcversion__ = xbmc.getInfoLabel( "System.BuildVersion" ).split(".")[0]
__language__ = __addon__.getLocalizedString
__cwd__ = __addon__.getAddonInfo('path').decode("utf-8")
__addonname__ = __addon__.getAddonInfo('name').decode("utf-8")
__resource__ = xbmc.translatePath( os.path.join( __cwd__, 'resources', 'lib' ) ).decode("utf-8")
__datapath__ = os.path.join( xbmc.translatePath( "special://profile/addon_data/" ).decode('utf-8'), __addonid__ )
__profilepath__ = xbmc.translatePath( "special://profile/" ).decode('utf-8')
__skinpath__ = xbmc.translatePath( "special://skin/shortcuts/" ).decode('utf-8')
__defaultpath__ = xbmc.translatePath( os.path.join( __cwd__, 'resources', 'shortcuts').encode("utf-8") ).decode("utf-8")
# character entity reference
CHAR_ENTITY_REXP = re.compile('&(%s);' % '|'.join(name2codepoint))
# decimal character reference
DECIMAL_REXP = re.compile('&#(\d+);')
# hexadecimal character reference
HEX_REXP = re.compile('&#x([\da-fA-F]+);')
REPLACE1_REXP = re.compile(r'[\']+')
REPLACE2_REXP = re.compile(r'[^-a-z0-9]+')
REMOVE_REXP = re.compile('-{2,}')
def log(txt):
if __addon__.getSetting( "enable_logging" ) == "true":
try:
if isinstance (txt,str):
txt = txt.decode('utf-8')
message = u'%s: %s' % (__addonid__, txt)
xbmc.log(msg=message.encode('utf-8'), level=xbmc.LOGDEBUG)
except:
pass
class DataFunctions():
def __init__(self):
self.overrides = {}
self.widgetNameAndType = {}
self.backgroundName = {}
self.fallbackProperties = {}
self.fallbackRequires = {}
self.propertyRequires = None
self.templateOnlyProperties = None
self.currentProperties = None
self.defaultProperties = None
self.propertyInformation = { "fallbackProperties": {}, "fallbacks": {},
"otherProperties": [], "requires": None, "templateOnly": None }
def _get_labelID( self, labelID, action, getDefaultID = False, includeAddOnID = True ):
# This gets the unique labelID for the item we've been passed. We'll also store it, to make sure
# we don't give it to any other item.
labelID = self.createNiceName( self.slugify( labelID.replace( " ", "" ).lower() ) )
if includeAddOnID:
addon_labelID = self._get_addon_labelID( action )
if addon_labelID is not None:
labelID = addon_labelID
# If we're getting the defaultID, just return this
if getDefaultID == True:
return labelID
# Check if the labelID exists in the list
if labelID in self.labelIDList:
# We're going to add an --[int] to the end of this
count = 0
while labelID + "--" + str( count ) in self.labelIDList:
count += 1
# We can now use this one
self.labelIDList.append( labelID + "--" + str( count ) )
return labelID + "--" + str( count )
else:
# We can use this one
self.labelIDList.append( labelID )
return labelID
def _get_addon_labelID( self, action ):
# This will check the action to see if this is a program or the root of a plugin and, if so, return that as the labelID
if action is None:
return None
try:
if action.startswith( "RunAddOn(" ) and "," not in action:
return action[9:-1]
if action.startswith( "RunScript(" ) and "," not in action:
return action[10:-1]
if "plugin://" in action and "?" not in action:
# Return the action
# - less ActivateWindow(
# - The second group after being split by comma
# - Less plugin://
return action[15:-1].split( "," )[1].replace( '"', '' )[9:]
except:
return None
return None
def _clear_labelID( self ):
# This clears our stored list of labelID's
self.labelIDList = []
def _pop_labelID( self ):
self.labelIDList.pop()
def _get_shortcuts( self, group, defaultGroup = None, isXML = False, profileDir = None, defaultsOnly = False, processShortcuts = True ):
# This will load the shortcut file
# Additionally, if the override files haven't been loaded, we'll load them too
log( "Loading shortcuts for group " + group )
if profileDir is None:
profileDir = xbmc.translatePath( "special://profile/" ).decode( "utf-8" )
userShortcuts = os.path.join( profileDir, "addon_data", __addonid__, self.slugify( group, True ) + ".DATA.xml" )
skinShortcuts = os.path.join( __skinpath__ , self.slugify( group ) + ".DATA.xml")
defaultShortcuts = os.path.join( __defaultpath__ , self.slugify( group ) + ".DATA.xml" )
if defaultGroup is not None:
skinShortcuts = os.path.join( __skinpath__ , self.slugify( defaultGroup ) + ".DATA.xml")
defaultShortcuts = os.path.join( __defaultpath__ , self.slugify( defaultGroup ) + ".DATA.xml" )
if defaultsOnly:
paths = [skinShortcuts, defaultShortcuts ]
else:
paths = [userShortcuts, skinShortcuts, defaultShortcuts ]
for path in paths:
path = try_decode( path )
tree = None
if xbmcvfs.exists( path ):
file = xbmcvfs.File( path ).read()
self._save_hash( path, file )
tree = xmltree.parse( path )
if tree is not None and processShortcuts:
# If this is a user-selected list of shortcuts...
if path == userShortcuts:
if group == "mainmenu":
self._get_skin_required( tree, group, profileDir )
# Process shortcuts, marked as user-selected
self._process_shortcuts( tree, group, profileDir, True )
else:
if group == "mainmenu":
self._get_skin_required( tree, group, profileDir )
self._process_shortcuts( tree, group, profileDir )
log( " - Loaded file " + path )
return tree
elif tree is not None:
log( " - Loaded file " + path )
log( " - Returning unprocessed shortcuts" )
return tree
else:
self._save_hash( path, None )
# No file loaded
log( " - No shortcuts" )
return xmltree.ElementTree( xmltree.Element( "shortcuts" ) )
def _process_shortcuts( self, tree, group, profileDir = "special:\\profile", isUserShortcuts = False, allowAdditionalRequired = True ):
# This function will process any overrides and add them to the tree ready to be displayed
# - We will process graphics overrides, action overrides, visibility conditions
skinoverrides = self._get_overrides_skin()
useroverrides = self._get_overrides_user( profileDir )
self._clear_labelID()
# Iterate through all <shortcut/> nodes
for node in tree.getroot().findall( "shortcut" ):
# If not user shortcuts, remove locked and defaultid nodes (in case of naughty skinners!)
if isUserShortcuts == False:
searchNode = node.find( "locked" )
if searchNode is not None:
node.remove( searchNode )
# Remove any labelID node (because it confuses us!)
searchNode = node.find( "labelID" )
if searchNode is not None:
node.remove( searchNode )
# Get the action
action = node.find( "action" )
if not action.text:
action.text = "noop"
# group overrides: add an additional onclick action for a particular menu
# this will allow you to close a modal dialog before calling any other window
# http://forum.kodi.tv/showthread.php?tid=224683
allGroupOverrides = skinoverrides.findall( "groupoverride" )
for override in allGroupOverrides:
if override.attrib.get( "group" ) == group:
newaction = xmltree.SubElement( node, "additional-action" )
newaction.text = override.text
newaction.set( "condition", override.attrib.get( "condition" ) )
# Generate the labelID
labelID = self._get_labelID( self.local( node.find( "label" ).text )[3].replace( " ", "" ).lower(), action.text )
xmltree.SubElement( node, "labelID" ).text = labelID
# If there's no defaultID, set it to the labelID
defaultID = labelID
if node.find( "defaultID" ) is not None:
defaultID = node.find( "defaultID" ).text
xmltree.SubElement( node, "defaultID" ).text = defaultID
# Check that any version node matches current XBMC version
version = node.find( "version" )
if version is not None:
if __xbmcversion__ != version.text and self.checkVersionEquivalency( version.text, node.find( "action" ) ) == False:
tree.getroot().remove( node )
self._pop_labelID()
continue
# Load additional properties
additionalProperties = self.checkAdditionalProperties( group, labelID, defaultID, isUserShortcuts, profileDir )
# If icon and thumbnail are in the additional properties, overwrite anything in the .DATA.xml file
# and remove them from the additional properties
for additionalProperty in additionalProperties:
if additionalProperty[ 0 ] == "icon":
node.find( "icon" ).text = additionalProperty[ 1 ]
additionalProperties.remove( additionalProperty )
break
if node.find( "thumb" ) is None:
xmltree.SubElement( node, "thumb" ).text = ""
for additionalProperty in additionalProperties:
if additionalProperty[ 0 ] == "thumb":
node.find( "thumb" ).text = additionalProperty[ 1 ]
additionalProperties.remove( additionalProperty )
break
xmltree.SubElement( node, "additional-properties" ).text = repr( additionalProperties )
iconNode = node.find( "icon" )
if iconNode.text is None or iconNode.text == "":
iconNode.text = "DefaultShortcut.png"
# Get a skin-overriden icon
overridenIcon = self._get_icon_overrides( skinoverrides, node.find( "icon" ).text, group, labelID )
if overridenIcon is not None:
# Add a new node with the overriden icon
xmltree.SubElement( node, "override-icon" ).text = overridenIcon
# If the action uses the special://skin protocol, translate it
if "special://skin/" in action.text:
action.text = xbmc.translatePath( action.text )
# Get visibility condition
visibilityCondition = self.checkVisibility( action.text )
visibilityNode = None
if visibilityCondition != "":
visibilityNode = xmltree.SubElement( node, "visibility" )
visibilityNode.text = visibilityCondition
# Get action and visibility overrides
overrideTrees = [useroverrides, skinoverrides]
hasOverriden = False
for overrideTree in overrideTrees:
if hasOverriden == True:
continue
if overrideTree is not None:
for elem in overrideTree.findall( "override" ):
# Retrieve group property
checkGroup = None
if "group" in elem.attrib:
checkGroup = elem.attrib.get( "group" )
# If the action and (if provided) the group match...
# OR if we have a global override specified
if ( elem.attrib.get( "action" ) == action.text and ( checkGroup is None or checkGroup == group ) ) or ( elem.attrib.get( "action" ) == "globaloverride" and ( checkGroup is None or checkGroup == group ) ):
# Check the XBMC version matches
if "version" in elem.attrib:
if elem.attrib.get( "version" ) != __xbmcversion__:
continue
hasOverriden = True
# Get the visibility condition
condition = elem.find( "condition" )
overrideVisibility = None
if condition is not None:
overrideVisibility = condition.text
# Get the new action
for actions in elem.findall( "action" ):
newaction = xmltree.SubElement( node, "override-action" )
if "::ACTION::" in actions.text:
newaction.text = actions.text.replace("::ACTION::",action.text)
else:
newaction.text = actions.text
if overrideVisibility is not None:
newaction.set( "condition", overrideVisibility )
# Add visibility if no action specified
if len( elem.findall( "action" ) ) == 0:
newaction = xmltree.SubElement( node, "override-action" )
newaction.text = action.text
if overrideVisibility is not None:
newaction.set( "condition", overrideVisibility )
# Get visibility condition of any skin-provided shortcuts
for elem in skinoverrides.findall( "shortcut" ):
if elem.text == action.text and "condition" in elem.attrib:
if not visibilityNode:
xmltree.SubElement( node, "visibility" ).text = elem.attrib.get( "condition" )
else:
visibilityNode.text = "[" + visibilityNode.text + "] + [" + elem.attrib.get( "condition" ) + "]"
# Get any visibility conditions in the .DATA.xml file
additionalVisibility = node.find( "visible" )
if additionalVisibility is not None:
if visibilityNode == None:
xmltree.SubElement( node, "visibility" ).text = additionalVisibility.text
else:
visibilityNode.text = "[" + visibilityNode.text + "] + [" + additionalVisibility.text + "]"
return tree
def _get_skin_required( self, listitems, group, profileDir ):
# This function builds a tree of any skin-required shortcuts not currently in the menu
# Once the tree is built, it sends them to _process_shortcuts for any overrides, etc, then adds them to the menu tree
tree = self._get_overrides_skin()
# Get an array of all actions currently in the menu
actions = []
for node in listitems.getroot().findall( "shortcut" ):
for action in node.findall( "action" ):
actions.append( action.text )
# Get a list of all skin-required shortcuts
requiredShortcuts = []
for elem in tree.findall( "requiredshortcut" ):
if not elem.text in actions:
# We need to add this shortcut - add it to the listitems
requiredShortcut = xmltree.SubElement( listitems.getroot(), "shortcut" )
# Label and label2
xmltree.SubElement( requiredShortcut, "label" ).text = elem.attrib.get( "label" )
xmltree.SubElement( requiredShortcut, "label2" ).text = xbmc.getSkinDir()
# Icon and thumbnail
if "icon" in elem.attrib:
xmltree.SubElement( requiredShortcut, "icon" ).text = elem.attrib.get( "icon" )
else:
xmltree.SubElement( requiredShortcut, "icon" ).text = "DefaultShortcut.png"
if "thumb" in elem.attrib:
xmltree.SubElement( requiredShortcut, "thumb" ).text = elem.attrib.get( "thumbnail" )
# Action
xmltree.SubElement( requiredShortcut, "action" ).text = elem.text
# Locked
# - This is set to the skin directory, so it will only be locked in the management directory when using this skin
xmltree.SubElement( requiredShortcut, "lock" ).text = xbmc.getSkinDir()
def _get_icon_overrides( self, tree, icon, group, labelID, setToDefault = True ):
# This function will get any icon overrides based on labelID or group
if icon is None:
return
# If the icon is a VAR or an INFO, we aren't going to override
if icon.startswith( "$" ):
return icon
oldicon = None
newicon = icon
# Check for overrides
if tree is not None:
for elem in tree.findall( "icon" ):
if oldicon is None:
if ("labelID" in elem.attrib and elem.attrib.get( "labelID" ) == labelID) or ("image" in elem.attrib and elem.attrib.get( "image" ) == icon):
# LabelID matched
if "group" in elem.attrib:
if elem.attrib.get( "group" ) == group:
# Group also matches - change icon
oldicon = icon
newicon = elem.text
elif "grouping" not in elem.attrib:
# No group - change icon
oldicon = icon
newicon = elem.text
if not xbmc.skinHasImage( newicon ) and setToDefault == True:
newicon = self._get_icon_overrides( tree, "DefaultShortcut.png", group, labelID, False )
return newicon
def _get_overrides_script( self ):
# Get overrides.xml provided by script
if "script" in self.overrides:
return self.overrides[ "script" ]
overridePath = os.path.join( __defaultpath__, "overrides.xml" )
try:
tree = xmltree.parse( overridePath )
self._save_hash( overridePath, xbmcvfs.File( overridePath ).read() )
self.overrides[ "script" ] = tree
return tree
except:
if xbmcvfs.exists( overridePath ):
log( "Unable to parse script overrides.xml. Invalid xml?" )
self._save_hash( overridePath, xbmcvfs.File( overridePath ).read() )
else:
self._save_hash( overridePath, None )
tree = xmltree.ElementTree( xmltree.Element( "overrides" ) )
self.overrides[ "script" ] = tree
return tree
def _get_overrides_skin( self ):
# Get overrides.xml provided by skin
if "skin" in self.overrides:
return self.overrides[ "skin" ]
overridePath = os.path.join( __skinpath__, "overrides.xml" )
try:
tree = xmltree.parse( overridePath )
self._save_hash( overridePath, xbmcvfs.File( overridePath ).read() )
self.overrides[ "skin" ] = tree
return tree
except:
if xbmcvfs.exists( overridePath ):
log( "Unable to parse skin overrides.xml. Invalid xml?" )
self._save_hash( overridePath, xbmcvfs.File( overridePath ).read() )
else:
self._save_hash( overridePath, None )
tree = xmltree.ElementTree( xmltree.Element( "overrides" ) )
self.overrides[ "skin" ] = tree
return tree
def _get_overrides_user( self, profileDir = "special://profile" ):
# Get overrides.xml provided by user
if "user" in self.overrides:
return self.overrides[ "user" ]
overridePath = os.path.join( profileDir, "overrides.xml" )
try:
tree = xmltree.parse( xbmc.translatePath( overridePath ) )
self._save_hash( overridePath, xbmcvfs.File( overridePath ).read() )
self.overrides[ "user" ] = tree
return tree
except:
if xbmcvfs.exists( overridePath ):
log( "Unable to parse user overrides.xml. Invalid xml?" )
self._save_hash( overridePath, xbmcvfs.File( overridePath ).read() )
else:
self._save_hash( overridePath, None )
tree = xmltree.ElementTree( xmltree.Element( "overrides" ) )
self.overrides[ "user" ] = tree
return tree
def _get_additionalproperties( self, profileDir ):
# Load all saved properties (widgets, backgrounds, custom properties)
if self.currentProperties is not None:
return[ self.currentProperties, self.defaultProperties ]
self.currentProperties = []
self.defaultProperties = []
path = os.path.join( profileDir, "addon_data", __addonid__, xbmc.getSkinDir().decode('utf-8') + ".properties" ).encode( "utf-8" )
#path = os.path.join( __datapath__ , xbmc.getSkinDir().decode('utf-8') + ".properties" )
if xbmcvfs.exists( path ):
# The properties file exists, load from it
try:
file = xbmcvfs.File( path ).read()
listProperties = ast.literal_eval( file )
self._save_hash( path, file )
for listProperty in listProperties:
# listProperty[0] = groupname
# listProperty[1] = labelID
# listProperty[2] = property name
# listProperty[3] = property value
# If listProperty[3] starts with $SKIN, it's from an older version of the script
# so quickly run it through the local function to remove the unecessary localisation
if listProperty[3].startswith( "$SKIN["):
listProperty[3] = self.local( listProperty[3] )[3]
self.currentProperties.append( [listProperty[0], listProperty[1], listProperty[2], listProperty[3]] )
except:
self.currentProperties = [ None ]
else:
self.currentProperties = [ None ]
# Load skin defaults (in case we need them...)
tree = self._get_overrides_skin()
for elemSearch in [["widget", tree.findall( "widgetdefault" )], ["widget:node", tree.findall( "widgetdefaultnode" )], ["background", tree.findall( "backgrounddefault" )], ["custom", tree.findall( "propertydefault" )] ]:
for elem in elemSearch[1]:
# Get labelID and defaultID
labelID = elem.attrib.get( "labelID" )
defaultID = labelID
if "defaultID" in elem.attrib:
defaultID = elem.attrib.get( "defaultID" )
if elemSearch[0] == "custom":
# Custom property
if "group" not in elem.attrib:
self.defaultProperties.append( ["mainmenu", labelID, elem.attrib.get( 'property' ), elem.text, defaultID ] )
else:
self.defaultProperties.append( [elem.attrib.get( "group" ), labelID, elem.attrib.get( 'property' ), elem.text, defaultID ] )
else:
# Widget or background
if "group" not in elem.attrib:
self.defaultProperties.append( [ "mainmenu", labelID, elemSearch[ 0 ].split( ":" )[ 0 ], elem.text, defaultID ] )
if elemSearch[ 0 ] == "background":
# Get and set the background name
backgroundName = self._getBackgroundName( elem.text )
if backgroundName is not None:
self.defaultProperties.append( [ "mainmenu", labelID, "backgroundName", backgroundName, defaultID ] )
if elemSearch[0] == "widget":
# Get and set widget type and name
widgetDetails = self._getWidgetNameAndType( elem.text )
if widgetDetails is not None:
self.defaultProperties.append( [ "mainmenu", labelID, "widgetName", widgetDetails[ "name" ], defaultID ] )
if "type" in widgetDetails:
self.defaultProperties.append( [ "mainmenu", labelID, "widgetType", widgetDetails[ "type" ], defaultID ] )
if "path" in widgetDetails:
self.defaultProperties.append( [ "mainmenu", labelID, "widgetPath", widgetDetails[ "path" ], defaultID ] )
if "target" in widgetDetails:
self.defaultProperties.append( [ "mainmenu", labelID, "widgetTarget", widgetDetails[ "target" ], defaultID ] )
if elemSearch[0] == "widget:node":
# Set all widget properties from the default
if elem.text:
self.defaultProperties.append( [ "mainmenu", labelID, "widget", elem.attrib.get( "label" ), defaultID ] )
if "label" in elem.attrib:
self.defaultProperties.append( [ "mainmenu", labelID, "widgetName", elem.attrib.get( "label" ), defaultID ] )
if "type" in elem.attrib:
self.defaultProperties.append( [ "mainmenu", labelID, "widgetType", elem.attrib.get( "type" ), defaultID ] )
if "path" in elem.attrib:
self.defaultProperties.append( [ "mainmenu", labelID, "widgetPath", elem.attrib.get( "path" ), defaultID ] )
if "target" in elem.attrib:
self.defaultProperties.append( [ "mainmenu", labelID, "widgetTarget", elem.attrib.get( "target" ), defaultID ] )
else:
self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, elemSearch[ 0 ].split( ":" )[ 0 ], elem.text, defaultID ] )
if elemSearch[ 0 ] == "background":
# Get and set the background name
backgroundName = self._getBackgroundName( elem.text )
if backgroundName is not None:
self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "backgroundName", backgroundName, defaultID ] )
if elemSearch[0] == "widget":
# Get and set widget type and name
widgetDetails = self._getWidgetNameAndType( elem.text )
if widgetDetails is not None:
self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetName", widgetDetails[ "name" ], defaultID ] )
if "type" in widgetDetails:
self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetType", widgetDetails[ "type" ], defaultID ] )
if "path" in widgetDetails:
self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetPath", widgetDetails[ "path" ], defaultID ] )
if "target" in widgetDetails:
self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetTarget", widgetDetails[ "target" ], defaultID ] )
if elemSearch[ 0 ] == "widget:node":
# Set all widget properties from the default
if "label" in elem.attrib:
self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetName", elem.attrib.get( "label" ), defaultID ] )
if "type" in elem.attrib:
self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetType", elem.attrib.get( "type" ), defaultID ] )
if "path" in elem.attrib:
self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetPath", elem.attrib.get( "path" ), defaultID ] )
if "target" in elem.attrib:
self.defaultProperties.append( [ elem.attrib.get( "group" ), labelID, "widgetTarget", elem.attrib.get( "target" ), defaultID ] )
# Load icons out of mainmenu.DATA.xml
path = os.path.join( __skinpath__ , "mainmenu.DATA.xml")
if xbmcvfs.exists( path ):
file = xbmcvfs.File( path ).read()
self._save_hash( path, file )
tree = xmltree.parse( path )
for node in tree.getroot().findall( "shortcut" ):
label = self.local( node.find( "label" ).text )[3].replace( " ", "" ).lower()
action = node.find( "action.text" )
labelID = self._get_labelID( label, action, getDefaultID = True )
self.defaultProperties.append( [ "mainmenu", labelID, "icon", node.find( "icon" ).text ] )
returnVal = [ self.currentProperties, self.defaultProperties ]
return returnVal
def _getCustomPropertyFallbacks( self, group ):
if group in self.propertyInformation[ "fallbacks" ]:
# We've already loaded everything, return it all
return( self.propertyInformation[ "fallbackProperties" ][ group ], self.propertyInformation[ "fallbacks" ][ group ] )
# Get skin overrides
tree = self._get_overrides_skin()
# Find all fallbacks
fallbackProperties = []
fallbacks = {}
for elem in tree.findall( "propertyfallback" ):
if ("group" not in elem.attrib and group == "mainmenu") or elem.attrib.get("group") == group:
# This is a fallback for the group we've been asked for
propertyName = elem.attrib.get( "property" )
if propertyName not in fallbackProperties:
# Save the property name in the order in which we processed it
fallbackProperties.append( propertyName )
if propertyName not in fallbacks.keys():
# Create an empty list to hold fallbacks for this property
fallbacks[ propertyName ] = []
# Check whether any attribute/value pair has to match for this fallback
attribName = None
attribValue = None
if "attribute" in elem.attrib and "value" in elem.attrib:
# This particular property is a matched property
attribName = elem.attrib.get( "attribute" )
attribValue = elem.attrib.get( "value" )
# Save details
fallbacks[ propertyName ].append( ( elem.text, attribName, attribValue ) )
# Save all the results for this group
self.propertyInformation[ "fallbackProperties" ][ group ] = fallbackProperties
self.propertyInformation[ "fallbacks" ][ group ] = fallbacks
return( self.propertyInformation[ "fallbackProperties" ][ group ], self.propertyInformation[ "fallbacks" ][ group ] )
def _getPropertyRequires( self ):
if self.propertyInformation[ "requires" ] is not None:
# We've already loaded requires and templateOnly properties, return eveything
return( self.propertyInformation[ "otherProperties" ], self.propertyInformation[ "requires" ], self.propertyInformation[ "templateOnly" ] )
# Get skin overrides
tree = self._get_overrides_skin()
# Find all property requirements
requires = {}
templateOnly = []
for elem in tree.findall( "propertySettings" ):
propertyName = elem.attrib.get( "property" )
if propertyName not in self.propertyInformation[ "otherProperties" ]:
# Save the property name in the order in which we processed it
self.propertyInformation[ "otherProperties" ].append( propertyName )
if "requires" in elem.attrib:
# This property requires another to be present
requires[ propertyName ] = elem.attrib.get( "requires" )
if "templateonly" in elem.attrib and elem.attrib.get( "templateonly" ).lower() == "true":
# This property is only used by the template, and should not be written to the main menu
templateOnly.append( propertyName )
# Save all the results
self.propertyInformation[ "requires" ] = requires
self.propertyInformation[ "templateOnly" ] = templateOnly
return( self.propertyInformation[ "otherProperties" ], self.propertyInformation[ "requires" ], self.propertyInformation[ "templateOnly" ] )
def _getWidgetNameAndType( self, widgetID ):
if widgetID in self.widgetNameAndType:
return self.widgetNameAndType[ widgetID ]
tree = self._get_overrides_skin()
for elem in tree.findall( "widget" ):
if elem.text == widgetID:
widgetInfo = { "name": elem.attrib.get( "label" ) }
if "type" in elem.attrib:
widgetInfo[ "type" ] = elem.attrib.get( "type" )
if "path" in elem.attrib:
widgetInfo[ "path" ] = elem.attrib.get( "path" )
if "target" in elem.attrib:
widgetInfo[ "target" ] = elem.attrib.get( "target" )
self.widgetNameAndType[ widgetID ] = widgetInfo
return widgetInfo
self.widgetNameAndType[ widgetID ] = None
return None
def _getBackgroundName( self, backgroundID ):
if backgroundID in self.backgroundName:
return self.backgroundName[ backgroundID ]
tree = self._get_overrides_skin()
for elem in tree.findall( "background" ):
if elem.text == backgroundID:
returnString = elem.attrib.get( "label" )
self.backgroundName[ backgroundID ] = returnString
return returnString
self.backgroundName[ backgroundID ] = None
return None
def _reset_backgroundandwidgets( self ):
# This function resets all skin properties used to identify if specific backgrounds or widgets are active
tree = self._get_overrides_skin()
for elem in tree.findall( "widget" ):
xbmc.executebuiltin( "Skin.Reset(skinshortcuts-widget-" + elem.text + ")" )
for elem in tree.findall( "background" ):
xbmc.executebuiltin( "Skin.Reset(skinshortcuts-background-" + elem.text + ")" )
def createNiceName ( self, item ):
# Translate certain localized strings into non-localized form for labelID
if item == "10006":
return "videos"
if item == "342":
return "movies"
if item == "20343":
return "tvshows"
if item == "32022":
return "livetv"
if item == "10005":
return "music"
if item == "20389":
return "musicvideos"
if item == "10002":
return "pictures"
if item == "12600":
return "weather"
if item == "10001":
return "programs"
if item == "32032":
return "dvd"
if item == "10004":
return "settings"
if item == "32087":
return "radio"
else:
return item.lower( ).replace( " ", "" )
def checkVisibility ( self, action ):
# Return whether mainmenu items should be displayed
action = action.lower().replace( " ", "" )
# Catch-all for shortcuts to plugins
if "plugin://" in action:
return ""
# Video node visibility
if action.startswith( "activatewindow(videos,videodb://" ) or action.startswith( "activatewindow(10025,videodb://" ) or action.startswith( "activatewindow(videos,library://video/" ) or action.startswith( "activatewindow(10025,library://video/" ):
path = action.split( "," )
if path[ 1 ].endswith( ")" ):
path[ 1 ] = path[ 1 ][:-1]
return NODE.get_visibility( path[ 1 ] )
# Audio node visibility - Isengard and earlier
elif action.startswith( "activatewindow(musiclibrary,musicdb://" ) or action.startswith( "activatewindow(10502,musicdb://" ) or action.startswith( "activatewindow(musiclibrary,library://music/" ) or action.startswith( "activatewindow(10502,library://music/" ):
path = action.split( "," )
if path[ 1 ].endswith( ")" ):
path[ 1 ] = path[ 1 ][:-1]
return NODE.get_visibility( path[ 1 ] )
# Audio node visibility - Additional checks for Jarvis and later
# (Note when cleaning up in the future, some of the Isengard checks - those with window 10502 - are still valid...)
elif action.startswith( "activatewindow(music,musicdb://" ) or action.startswith( "activatewindow(music,library://music/" ):
path = action.split( "," )
if path[ 1 ].endswith( ")" ):
path[ 1 ] = path[ 1 ][:-1]
return NODE.get_visibility( path[ 1 ] )
# Power menu visibilities
elif action == "quit()" or action == "quit":
return "System.ShowExitButton"
elif action == "powerdown()" or action == "powerdown":
return "System.CanPowerDown"
elif action == "alarmclock(shutdowntimer,shutdown())":
return "!System.HasAlarm(shutdowntimer) + [System.CanPowerDown | System.CanSuspend | System.CanHibernate]"
elif action == "cancelalarm(shutdowntimer)":
return "System.HasAlarm(shutdowntimer)"
elif action == "suspend()" or action == "suspend":
return "System.CanSuspend"
elif action == "hibernate()" or action == "hibernate":
return "System.CanHibernate"
elif action == "reset()" or action == "reset":
return "System.CanReboot"
elif action == "system.logoff":
return "[System.HasLoginScreen | IntegerGreaterThan(System.ProfileCount,1)] + System.Loggedon"
elif action == "mastermode":
return "System.HasLocks"
elif action == "inhibitidleshutdown(true)":
return "System.HasShutdown +!System.IsInhibit"
elif action == "inhibitidleshutdown(false)":
return "System.HasShutdown + System.IsInhibit"
elif action == "restartapp":
return "[System.Platform.Windows | System.Platform.Linux] +! System.Platform.Linux.RaspberryPi"
# General visibilities
elif action == "activatewindow(weather)":
return "!IsEmpty(Weather.Plugin)"
elif action.startswith( "activatewindowandfocus(mypvr" ) or action.startswith( "playpvr" ) and __addon__.getSetting( "donthidepvr" ) == "false":
return "system.getbool(pvrmanager.enabled)"
elif action.startswith( "activatewindow(tv" ) and __addon__.getSetting( "donthidepvr" ) == "false":
return "PVR.HasTVChannels"
elif action.startswith( "activatewindow(radio" ) and __addon__.getSetting( "donthidepvr" ) == "false":
return "PVR.HasRadioChannels"
elif action.startswith( "activatewindow(videos,movie" ):
return "Library.HasContent(Movies)"
elif action.startswith( "activatewindow(videos,recentlyaddedmovies" ):
return "Library.HasContent(Movies)"
elif action.startswith( "activatewindow(videos,tvshow" ) or action.startswith( "activatewindow(videos,tvshow" ):
return "Library.HasContent(TVShows)"
elif action.startswith( "activatewindow(videos,recentlyaddedepisodes" ):
return "Library.HasContent(TVShows)"
elif action.startswith( "activatewindow(videos,musicvideo" ):
return "Library.HasContent(MusicVideos)"
elif action.startswith( "activatewindow(videos,recentlyaddedmusicvideos" ):
return "Library.HasContent(MusicVideos)"
elif action == "xbmc.playdvd()":
return "System.HasMediaDVD"
elif action.startswith( "activatewindow(eventlog" ):
return "system.getbool(eventlog.enabled)"
return ""
def checkVersionEquivalency( self, version, action, type = "shortcuts" ):
# Check whether the version specified for a shortcut has an equivalency
# to the version of Kodi we're running
trees = [ self._get_overrides_skin(), self._get_overrides_script() ]
# Set up so we can handle both groupings and shortcuts in one
if type == "shortcuts":
if action is None:
action = ""
else:
action = action.text
findElem = "shortcutEquivalent"
findAttrib = "action"
if type == "groupings":
if action is None:
action = ""
findElem = "groupEquivalent"
findAttrib = "condition"
for tree in trees:
if tree.find( "versionEquivalency" ) is None:
continue
for elem in tree.find( "versionEquivalency" ).findall( findElem ):
if elem.attrib.get( findAttrib ) is not None and elem.attrib.get( findAttrib ).lower() != action.lower():
# Action's don't match
continue
if int( elem.attrib.get( "version" ) ) > int( __xbmcversion__ ):
# This version of Kodi is older than the shortcut is intended for
continue
# The actions match, and the version isn't too old, so
# now check it's not too new
if elem.text == "All":
# This shortcut matches all newer versions
return True
elif int( elem.text ) >= int( __xbmcversion__ ):
return True
# The version didn't match
break
return False
def checkAdditionalProperties( self, group, labelID, defaultID, isUserShortcuts, profileDir ):
# Return any additional properties, including widgets, backgrounds, icons and thumbnails
allProperties = self._get_additionalproperties( profileDir )
currentProperties = allProperties[1]
returnProperties = []
# This returns two lists...
# allProperties[0] = Saved properties
# allProperties[1] = Default properties
if isUserShortcuts and ( len( allProperties[ 0 ] ) == 0 or allProperties[ 0 ][ 0 ] is not None ):
currentProperties = allProperties[0]
# Loop through the current properties, looking for the current item
for currentProperty in currentProperties:
# currentProperty[0] = Group name
# currentProperty[1] = labelID
# currentProperty[2] = Property name
# currentProperty[3] = Property value
# currentProperty[4] = defaultID
if labelID is not None and currentProperty[0] == group and currentProperty[1] == labelID:
returnProperties.append( [ currentProperty[2], currentProperty[3] ] )
elif len( currentProperty ) is not 4:
if defaultID is not None and currentProperty[0] == group and currentProperty[4] == defaultID:
returnProperties.append( [ currentProperty[2], currentProperty[3] ] )
return returnProperties
def checkShortcutLabelOverride( self, action ):
tree = self._get_overrides_skin()
if tree is not None:
elemSearch = tree.findall( "availableshortcutlabel" )
for elem in elemSearch:
if elem.attrib.get( "action" ).lower() == action.lower():
# This matches :) Check if we're also overriding the type
if "type" in elem.attrib:
return [ elem.text, elem.attrib.get( "type" ) ]
else:
return [ elem.text ]
return None
def checkIfMenusShared( self ):
# Check if the user has asked for their menus not to be shared
if __addon__.getSetting( "shared_menu" ).lower() == "false":
return False
return True
def getSharedSkinList( self ):
# This will return a list of skins the user can import the menu from
skinNames = []
skinFiles = []
for files in xbmcvfs.listdir( __datapath__ ):
# Try deleting all shortcuts
if files:
for file in files:
if file.endswith( ".hash" ) and not file.startswith( "%s-" %( xbmc.getSkinDir() ) ):
canImport, skinName = self.parseHashFile( os.path.join( __datapath__, file.decode( 'utf-8' ) ).encode( 'utf-8' ) )
if canImport == True:
skinNames.append( skinName )
elif file.endswith( ".DATA.xml" ) and not file.startswith( "%s-" %( xbmc.getSkinDir() ) ):
skinFiles.append( file )
# Remove any files which start with one of the skin names
removeSkins = []
removeFiles = []
for skinName in skinNames:
matched = False
for skinFile in skinFiles:
if skinFile.startswith( "%s-" %( skinName ) ):
if matched == False:
matched = True
removeFiles.append( skinFile )
if matched == False:
# This skin doesn't have a custom menu
removeSkins.append( skinName )
skinNames = [x for x in skinNames if x not in removeSkins]
skinFiles = [x for x in skinFiles if x not in removeFiles]
# If there are any files left in skinFiles, we have a shared menu
if len( skinFiles ) != 0:
skinNames.insert( 0, __language__(32111) )
return (skinNames, skinFiles)
def getFilesForSkin( self, skinName ):
# This will return a list of all menu files for a particular skin
skinFiles = []
for files in xbmcvfs.listdir( __datapath__ ):
# Try deleting all shortcuts
if files:
for file in files:
if file.endswith( ".DATA.xml" ) and file.startswith( "%s-" % ( skinName ) ):
skinFiles.append( file )
return skinFiles
def parseHashFile( self, file ):
try:
hashes = ast.literal_eval( xbmcvfs.File( file ).read() )
except:
# There is no hash list, return False
return( False, "" )
canImport = False
skinName = None
for hash in hashes:
if hash[0] == "::FULLMENU::":
canImport = True
if skinName:
return( True, skinName )
if hash[0] == "::SKINDIR::":
skinName = hash[1]
if canImport == True:
return( True, skinName )
return( canImport, skinName )
def importSkinMenu( self, files, skinName = None ):
# This function copies one skins menus to another
for oldFile in files:
if skinName:
newFile = oldFile.replace( skinName, xbmc.getSkinDir() )
else:
newFile = "%s-%s" %( xbmc.getSkinDir(), oldFile )
oldPath = os.path.join( __datapath__, oldFile.decode( 'utf-8' ) ).encode( 'utf-8' )
newPath = os.path.join( __datapath__, newFile.decode( 'utf-8' ) ).encode( 'utf-8' )
# Copy file
xbmcvfs.copy( oldPath, newPath )
# Delete any .properties file
propFile = os.path.join( __datapath__, "%s.properties" %( xbmc.getSkinDir() ) ).encode( 'utf-8' )
if xbmcvfs.exists( propFile ):
xbmcvfs.delete( propFile )
def _save_hash( self, filename, file ):
if file is not None:
hasher = hashlib.md5()
hasher.update( file )
hashlist.list.append( [filename, hasher.hexdigest()] )
else:
hashlist.list.append( [filename, None] )
# in-place prettyprint formatter
def indent( self, elem, level=0 ):
i = "\n" + level*"\t"
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + "\t"
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
self.indent(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
def local( self, data ):
# This is our function to manage localisation
# It accepts strings in one of the following formats:
# #####, ::LOCAL::#####, ::SCRIPT::#####
# $LOCALISE[#####], $SKIN[####|skin.id|last translation]
# $ADDON[script.skinshortcuts #####]
# If returns a list containing:
# [Number/$SKIN, $LOCALIZE/$ADDON/Local string, Local string]
# [Used for saving, used for building xml, used for displaying in dialog]
if data is None:
return ["","","",""]
data = try_decode( data )
skinid = None
lasttranslation = None
# Get just the integer of the string, for the input forms where this is valid
if not data.find( "::SCRIPT::" ) == -1:
data = data[10:]
elif not data.find( "::LOCAL::" ) == -1:
data = data[9:]
elif not data.find( "$LOCALIZE[" ) == -1:
data = data.replace( "$LOCALIZE[", "" ).replace( "]", "" ).replace( " ", "" )
elif not data.find( "$ADDON[script.skinshortcuts" ) == -1:
data = data.replace( "$ADDON[script.skinshortcuts", "" ).replace( "]", "" ).replace( " ", "" )
# Get the integer and skin id, from $SKIN input forms
elif not data.find( "$SKIN[" ) == -1:
splitdata = data[6:-1].split( "|" )
data = splitdata[0]
skinid = splitdata[1]
lasttranslation = splitdata[2]
if data.isdigit():
if int( data ) >= 31000 and int( data ) < 32000:
# A number from a skin - we're going to return a $SKIN[#####|skin.id|last translation] unit
if skinid is None:
# Set the skinid to the current skin id
skinid = xbmc.getSkinDir()
# If we're on the same skin as the skinid, get the latest translation
if skinid == xbmc.getSkinDir():
lasttranslation = xbmc.getLocalizedString( int( data ) )
returnString = "$SKIN[" + data + "|" + skinid + "|" + lasttranslation + "]"
return [ returnString, "$LOCALIZE[" + data + "]", lasttranslation, data ]
returnString = "$SKIN[" + data + "|" + skinid + "|" + lasttranslation + "]"
return [ returnString, lasttranslation, lasttranslation, data ]
elif int( data ) >= 32000 and int( data ) < 33000:
# A number from the script
return [ data, "$ADDON[script.skinshortcuts " + data + "]", __language__( int( data ) ), data ]
else:
# A number from XBMC itself (probably)
return [ data, "$LOCALIZE[" + data + "]", xbmc.getLocalizedString( int( data ) ), data ]
# This isn't anything we can localize, just return it (in triplicate ;))
return[ data, data, data, data ]
def smart_truncate(string, max_length=0, word_boundaries=False, separator=' '):
string = string.strip(separator)
if not max_length:
return string
if len(string) < max_length:
return string
if not word_boundaries:
return string[:max_length].strip(separator)
if separator not in string:
return string[:max_length]
truncated = ''
for word in string.split(separator):
if word:
next_len = len(truncated) + len(word) + len(separator)
if next_len <= max_length:
truncated += '{0}{1}'.format(word, separator)
if not truncated:
truncated = string[:max_length]
return truncated.strip(separator)
def slugify(self, text, userShortcuts=False, entities=True, decimal=True, hexadecimal=True, max_length=0, word_boundary=False, separator='-', convertInteger=False):
# Handle integers
if convertInteger and text.isdigit():
text = "NUM-" + text
# text to unicode
if type(text) != types.UnicodeType:
text = unicode(text, 'utf-8', 'ignore')
# decode unicode ( ??? = Ying Shi Ma)
text = unidecode(text)
# text back to unicode
if type(text) != types.UnicodeType:
text = unicode(text, 'utf-8', 'ignore')
# character entity reference
if entities:
text = CHAR_ENTITY_REXP.sub(lambda m: unichr(name2codepoint[m.group(1)]), text)
# decimal character reference
if decimal:
try:
text = DECIMAL_REXP.sub(lambda m: unichr(int(m.group(1))), text)
except:
pass
# hexadecimal character reference
if hexadecimal:
try:
text = HEX_REXP.sub(lambda m: unichr(int(m.group(1), 16)), text)
except:
pass
# translate
text = unicodedata.normalize('NFKD', text)
if sys.version_info < (3,):
text = text.encode('ascii', 'ignore')
# replace unwanted characters
text = REPLACE1_REXP.sub('', text.lower()) # replace ' with nothing instead with -
text = REPLACE2_REXP.sub('-', text.lower())
# remove redundant -
text = REMOVE_REXP.sub('-', text).strip('-')
# smart truncate if requested
if max_length > 0:
text = smart_truncate(text, max_length, word_boundary, '-')
if separator != '-':
text = text.replace('-', separator)
# If this is a shortcut file (.DATA.xml) and user shortcuts aren't shared, add the skin dir
if userShortcuts == True and self.checkIfMenusShared() == False:
text = "%s-%s" %( xbmc.getSkinDir(), text )
return text
# ----------------------------------------------------------------
# --- Functions that should get their own module in the future ---
# --- (when xml building functions are revamped/simplified) ------
# ----------------------------------------------------------------
def getListProperty( self, onclick ):
# For ActivateWindow elements, extract the path property
if onclick.startswith( "ActivateWindow" ):
# An ActivateWindow - Let's start by removing the 'ActivateWindow(' and the ')'
listProperty = onclick
# Handle (the not uncommon) situation where the trailing ')' has been forgotten
if onclick.endswith( ")" ):
listProperty = onclick[ :-1 ]
listProperty = listProperty.split( "(", 1 )[ 1 ]
# Split what we've got left on commas
listProperty = listProperty.split( "," )
# Get the part of the onclick that we're actually interested in
if len( listProperty ) == 1:
# 'elementWeWant'
return listProperty[ 0 ]
elif len( listProperty ) == 2 and listProperty[ 1 ].lower().replace( " ", "" ) == "return":
# 'elementWeWant' 'return'
return listProperty[ 0 ]
elif len( listProperty ) == 2:
# 'windowToActivate' 'elementWeWant'
return listProperty[ 1 ]
elif len( listProperty ) == 3:
# 'windowToActivate' 'elementWeWant' 'return'
return listProperty[ 1 ]
else:
# Situation we haven't anticipated - log the issue and return original onclick
log( "Unable to get 'list' property for shortcut %s" %( onclick ) )
return onclick
else:
# Not an 'ActivateWindow' - return the onclick
return onclick
def upgradeAction( self, action ):
# This function looks for actions used in a previous version of Kodi, and upgrades them to the current action
# Isengard + earlier music addons
if int( __xbmcversion__ ) <= 15:
# Shortcut to addon section
if action.lower().startswith( "activatewindow(musiclibrary,addons" ) and xbmc.getCondVisibility( "!Library.HasContent(Music)" ):
return( "ActivateWindow(MusicFiles,Addons,return)" )
elif action.lower().startswith( "activatewindow(10502,addons" ) and xbmc.getCondVisibility( "!Library.HasContent(Music)" ):
return( "ActivateWindow(10501,Addons,return)" )
elif action.lower().startswith( "activatewindow(musicfiles,addons" ) and xbmc.getCondVisibility( "Library.HasContent(Music)" ):
return( "ActivateWindow(MusicLibrary,Addons,return)" )
elif action.lower().startswith( "activatewindow(10501,addons" ) and xbmc.getCondVisibility( "Library.HasContent(Music)" ):
return( "ActivateWindow(10502,Addons,return)" )
# Shortcut to a specific addon
if "plugin://" in action.lower():
if action.lower().startswith( "activatewindow(musiclibrary" ) and xbmc.getCondVisibility( "!Library.HasContent(Music)" ):
return self.buildReplacementMusicAddonAction( action, "MusicFiles" )
elif action.lower().startswith( "activatewindow(10502" ) and xbmc.getCondVisibility( "!Library.HasContent(Music)" ):
return self.buildReplacementMusicAddonAction( action, "10501" )
elif action.lower().startswith( "activatewindow(musicfiles" ) and xbmc.getCondVisibility( "Library.HasContent(Music)" ):
return self.buildReplacementMusicAddonAction( action, "MusicLibrary" )
elif action.lower().startswith( "activatewindow(10501" ) and xbmc.getCondVisibility( "Library.HasContent(Music)" ):
return self.buildReplacementMusicAddonAction( action, "10502" )
# Jarvis + later music windows
if action.lower() == "activatewindow(musicfiles)" and int( __xbmcversion__ ) >= 16:
return "ActivateWindow(Music,Files,Return)"
if "," not in action: return action
if action.lower().startswith("activatewindow(musiclibrary") and int( __xbmcversion__ ) >= 16:
return "ActivateWindow(Music," + action.split( ",", 1 )[ 1 ]
# No matching upgrade
return action
def buildReplacementMusicAddonAction( self, action, window ):
# Builds a replacement action for an Isengard or earlier shortcut to a specific music addon
splitAction = action.split( "," )
# [0] = ActivateWindow([window]
# [1] = "plugin://plugin.name/path?params"
# [2] = return)
if len(splitAction) == 2:
return "ActivateWindow(%s,%s)" %( window, splitAction[ 1 ] )
else:
return "ActivateWindow(%s,%s,return)" %( window, splitAction[ 1 ] )
|
AMOboxTV/AMOBox.LegoBuild
|
script.skinshortcuts/resources/lib/datafunctions.py
|
Python
|
gpl-2.0
| 62,551
|
"""
A wrapper for the VLBA Continuum Pipeline. This module can be invoked from the
command line or by calling the function pipeWrap directly.
External Dependencies:
* Requires python 2.7 (for logging)
* *diff* -- tested with 'diff (GNU diffutils) 2.8.1'
"""
from __future__ import absolute_import
from __future__ import print_function
import os, shutil, sys, logging, logging.config, pprint, subprocess, errno
import exceptions, pprint, re, types
from optparse import OptionParser
from six.moves.urllib.error import URLError, HTTPError
from six.moves.configparser import NoSectionError
import VLBAContPipe, VLBACal, PipeUtil
from six.moves import range
from six.moves import input
try:
logging.config.fileConfig("logging.conf")
logger = logging.getLogger("obitLog.VLBAContPipeWrap")
except NoSectionError as e:
logging.basicConfig(filename="VLBAContPipeWrap.log", level=logging.DEBUG)
logger = logging
errmsg = "CANNOT FIND logging.conf. USING BASIC LOGGING CONFIG INSTEAD."
logger.error( errmsg )
print( errmsg )
def pipeWrap( startDate, endDate, options ):
"""
A wrapper for the VLBA continuum pipeline, :mod:`VLBAContPipe`. Query archive
and begin processing all datafiles in response. Process each file in a
unique working directory.
:param string startDate: start of date range for archive query
:param string endDate: end of date range for archive query; if NONE then use
startDate
:param optparse.Option options: command line options (returned by
:func:`optparse.parse_args`)
"""
logger.info("Pipeline Wrapper Begins")
##### pipeWrap PARAMETERS #####
aipsSetup = 'PipeAIPSSetup.py' # pipeline AIPS setup file
# validation area
pipeProducts = '/lustre/aoc/users/jcrossle/VLBAPipeProducts'
checkDir = pipeProducts + '/check'
fitsDir = '/lustre/aoc/users/jcrossle/fits' # fits download directory
outfilesPickle = 'outfiles.pickle' # output files pickle
logConfig = 'logging.conf' # logging module configuration
wrapLog = 'VLBAContPipeWrap.log' # wrapper log, from logConfig file
pipeNowLog = 'pipe.now.log' # link to log of currenting running pipeline
pipeRecord = '/users/jcrossle/vlbaPipeline/record/pipelineRecord.pickle'
###############################
# Send query and print summary
logger.info("Submitting query to archive")
responseLines = PipeUtil.QueryArchive( startDate, endDate, options.project )
fileDictList = PipeUtil.ParseArchiveResponse( responseLines )
filtered = PipeUtil.FilterFileList( fileDictList )
# logger.debug( "Removed these files from archive response:\n" +
# PipeUtil.SummarizeArchiveResponse( filtered ) )
pipeRecordList = PipeUtil.FetchObject( pipeRecord )
logger.info( "\n" + PipeUtil.SummarizeArchiveResponse( fileDictList, pipeRecordList ) )
if options.metadata:
str1 = pprint.pformat( fileDictList )
logger.info( "\n" + str1 )
if options.query:
return
if not options.all:
if options.fitsidi:
print("Select file number(s) for IDI download and processing (ex: 1, 3, 4): ", end=' ')
else:
print("Select file number(s) for download and processing (ex: 1, 3, 4): ", end=' ')
selection = eval(input())
# convert selection to list
if type(selection) == int:
selection = [ selection ]
elif type(selection) == tuple:
selection = list( selection )
else:
raise TypeError("Selection must be an integer or comma-separated" +
" sequence of integers")
# Test that input numbers are valid
for num in selection:
test = fileDictList[ num ]
# Throw out file dicts that are not in selection.
# Work thru indices in reverse, so pop(index) doesn't go out of range.
indices = list(range( len( fileDictList )))
indices.reverse()
for index in indices:
if not ( index in selection ):
fileDictList.pop( index ) # remove index item
else:
logger.info("Processing all files in archive response.")
# Loop over all files in response, setup working directory and process
cwd = os.getcwd()
for i, fileDict in enumerate( fileDictList ):
logger.info("Preparing to process file (" + str(i+1) +
" / " + str( len(fileDictList) ) + "):\n" +
pprint.pformat(fileDict))
try:
IDIList = []
if options.fitsidi:
IDIList = PipeUtil.DownloadIDIFiles( fileDict, fitsDir )
else:
PipeUtil.DownloadArchiveFile( fileDict, fitsDir ) # start download
# create working subdirectory
dirName = fileDict['project_code'] + '_' + fileDict['DATE'] + '_' + \
fileDict['arch_file_id']
logger.debug("Creating directory " + dirName)
if not os.path.exists( dirName):
os.mkdir( dirName )
else:
logger.warning("Project working directory exists: " + dirName)
shutil.copy( aipsSetup, dirName ) # copy AIPSSetup into dir
# Copy logging config file; set log file name for processing
newLogConfig = dirName + '/' + logConfig
newLogName = fileDict['project_code'] + '_' + \
VLBACal.VLBAGetSessionCode( fileDict ) + '_' + \
VLBACal.VLBAGetBandLetter( fileDict )[0] + '.log'
substitute( logConfig, newLogConfig, wrapLog, newLogName )
# Create symlink to current pipeline log
if os.path.lexists( pipeNowLog ):
os.remove( pipeNowLog )
os.symlink( dirName + '/' + newLogName, pipeNowLog )
os.chdir( dirName )
# create pipeline input parm file
parmList = VLBACal.VLBAGetParms( fileDict, checkDir, IDIList=IDIList )
parmFile = "VLBAContParm_" + fileDict['project_code'] + '.py'
VLBACal.VLBAMakeParmFile( parmList, parmFile )
if not options.fitsidi:
# check that d/l is complete
PipeUtil.PollDownloadStatus( fileDict, fitsDir )
# Start the pipeline separate, synchronous process
logger.info("Starting pipeline processing (file " + str(i+1) +
" / " + str( len(fileDictList) ) + ")" )
cmd = ("python", os.environ['OBIT'] + "/python/VLBAContPipe.py",
aipsSetup, parmFile)
cmdStr = ''
for s in cmd: cmdStr += s + ' '
logger.info("Command: " + cmdStr)
subprocess.check_call( cmd )
# Validate outfiles: check that outfiles is consistent with CWD
if not VLBACal.VLBAValidOutfiles():
logging.warn('Expected data products (outfiles.pickle) and current ' +
'working directory are inconsistent.')
# Copy files to check dir
projCheckDir = checkDir + '/' + dirName
copyFiles( outfilesPickle, projCheckDir )
os.chdir( cwd )
os.remove( pipeNowLog )
# if all contents copied, remove working directory; otherwise keep
if checkDirEquality( dirName, projCheckDir ):
logger.info("Removing " + dirName)
shutil.rmtree( dirName )
else:
logger.error("Not removing " + dirName)
# Make record of processed data set
logger.info("Adding archive file metadata to pipeline processing record:\n" +
pipeRecord )
logger.debug("Setting pipe.STATUS to 'check'")
fileDict['pipe.STATUS']='check' # Set data set pipeline status to check
makeFileRecord( fileDict, pipeRecord )
# Handle exception and continue
except subprocess.CalledProcessError:
logger.error("Pipeline process failed! Error:\n" +
str(subprocess.CalledProcessError))
os.chdir( cwd )
os.remove( pipeNowLog )
# Handle exception and re-raise
except HTTPError as e:
logger.error("Server could not fulfill request. Error code: " + \
str(e.code))
raise
except URLError as e:
logger.error("Failed to reach the server. Reason: " + str(e.reason))
raise
except IOError as e:
logger.error("File " + e.filename + " not found\n" + \
" Cannot copy files to validation directory" )
os.chdir( cwd )
os.remove( pipeNowLog )
raise
logger.info("Pipeline Wrapper Ends")
def copyFiles( outfilesPickle, destDir='./output' ):
"""
Copy output files to destination directory. This is done using rsync.
:param string outfilesPickle: name of outfiles pickle file
:param string destDir: directory to which files should be copied
:raises subprocess.CalledProcessError: if rsync returns an error value
:raises exceptions.IOError: if outfilesPickle does not exist
"""
# Get a list of all output files
if not os.path.exists( outfilesPickle ):
raise exceptions.IOError( errno.ENOENT, "File not found",
outfilesPickle )
outfiles = PipeUtil.FetchObject( outfilesPickle )
outfilesList = VLBACal.VLBAMakeOutfilesList( outfiles )
logger.info( "Copying (rsync-ing) output files to " + destDir )
# Prepare destination directory
if not os.path.isdir( destDir ): # not dir
if os.path.exists( destDir ): # not dir & exists
logger.error(
"Copy failed: destination exists and is not a directory." )
raise OSError( errno = errno.ENOTDIR,
strerror = "File exists and is not a directory",
filename = destDir )
else: # not dir & not exists
os.makedirs( destDir )
# Copy files using rsync
cmd = [ "rsync", "--verbose", "--times" ]
cmd.extend( outfilesList )
cmd.append( destDir )
try:
subprocess.check_call( cmd )
except subprocess.CalledProcessError as e:
logger.error(
"Error occurred while rsyncing to destination directory.\n" +
"rsync return value: " + str(e.returncode) )
raise
logger.info( "All files copied to " + destDir )
def checkDirEquality( dir1, dir2 ):
"""
Compare directories given by paths *dir1* and *dir2*. If the files contained
in the directory are the same, return *True*. If any files differ return
*False*.
Use system utility *diff* for directory comparison. *diff* compares directory
*and* file content in one call. This is not easy to do with Python's filecmp
module.
:param string dir1: first directory to compare
:param string dir2: second directory to compare
:rtype: boolean
"""
logger.info("Comparing contents of work and validation directories.")
logger.debug("diff will ignore logging.conf")
cmd = ( "diff", dir1, dir2, "--exclude=logging.conf" )
returncode = 0
try:
subprocess.check_call( cmd )
except subprocess.CalledProcessError as e:
returncode = e.returncode
logger.error(
"Pipeline working directory and check directory differ.\n"
"System call to diff returns code: " + str(returncode) )
if returncode == 0:
logger.info("Directories equal.")
return True
else:
return False
def substitute( inFile, outFile, str1, str2 ):
"""
Write the contents of inFile to outFile with each instance of str1
replaced with str2.
:param string inFile: input file
:param string outFile: output file
:param string str1: string to be replaced
:param string str2: replacement string
"""
logger.debug("Writing " + inFile + " to " + outFile +
" while replacing " + str1 + " with " + str2 )
o = open( outFile, "w" )
data = open( inFile ).read()
o.write( re.sub( str1, str2, data ) )
o.close()
def makeFileRecord( fileDict, pickle ):
"""
Make a record of the archive file processed.
The record is a Python pickle file that holds a list of archive file
dictionaries.
* fileDict = archive file dictionary
* pickle = name of pickle file to use
"""
recordList = PipeUtil.FetchObject( pickle )
recordList.append( fileDict )
PipeUtil.SaveObject( recordList, pickle, True )
def makeFileRecord_archive( fileDictList, pickle, archFileID ):
"""
Add a file to the pipeline processing record with 'pipe.STATUS':'archive'.
"""
fileDict = {}
for fdict in fileDictList:
if fdict['arch_file_id'] == archFileID:
fileDict = fdict
break
fileDict['pipe.STATUS'] = 'archive'
print("Adding this dictionary to the pipeline record:")
pprint.pprint( fileDict )
makeFileRecord( fileDict, pickle )
if __name__ == "__main__":
# Get inputs from command line
usage = "usage: %prog [options] StartDate [StopDate]"
parser = OptionParser( usage=usage )
parser.add_option( '-P', '--project', help="project code" )
parser.add_option( '-q', '--query', action="store_true", default=False,
help="query and summary only" )
parser.add_option( '-a', '--all', action="store_true", default=False,
help="Automatically process all files in archive response" )
parser.add_option( '-m', '--metadata', action="store_true", default=False,
help="Display all metadata in archive response" )
parser.add_option( '-i', '--fitsidi', action="store_true", default=False,
help="Download and fill FITS IDI files (default is FITS AIPS)" )
(options, args) = parser.parse_args()
try:
if len(args) < 1:
logger.critical("Too few arguments given")
parser.print_help()
sys.exit()
elif len(args) == 1:
pipeWrap( args[0], args[0], options)
else:
pipeWrap( args[0], args[1], options)
finally:
# If using python version < 2.5, prevent logging module error at exit
if sys.version_info < (2,5):
try:
logging.shutdown()
except KeyError as e:
print(("Catching known logging module error for " +
"python version < 2.5. "))
|
kernsuite-debian/obit
|
python/VLBAContPipeWrap.py
|
Python
|
gpl-2.0
| 14,499
|
# -*- coding: utf-8 -*-
###############################################################################
#
# GetLegislator
# Allows you to search for information on an individual legislator.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetLegislator(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetLegislator Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(GetLegislator, self).__init__(temboo_session, '/Library/SunlightLabs/Congress/Legislator/GetLegislator')
def new_input_set(self):
return GetLegislatorInputSet()
def _make_result_set(self, result, path):
return GetLegislatorResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetLegislatorChoreographyExecution(session, exec_id, path)
class GetLegislatorInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetLegislator
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) The API Key provided by Sunlight Labs.)
"""
super(GetLegislatorInputSet, self)._set_input('APIKey', value)
def set_AllLegislators(self, value):
"""
Set the value of the AllLegislators input for this Choreo. ((optional, boolean) A boolean flag indicating to search for all legislators even when they are no longer in office.)
"""
super(GetLegislatorInputSet, self)._set_input('AllLegislators', value)
def set_BioguideID(self, value):
"""
Set the value of the BioguideID input for this Choreo. ((optional, string) The bioguide_id of the legislator to return.)
"""
super(GetLegislatorInputSet, self)._set_input('BioguideID', value)
def set_CRPID(self, value):
"""
Set the value of the CRPID input for this Choreo. ((optional, string) The crp_id associated with a legislator to return.)
"""
super(GetLegislatorInputSet, self)._set_input('CRPID', value)
def set_District(self, value):
"""
Set the value of the District input for this Choreo. ((optional, integer) Narrows the search result by district number.)
"""
super(GetLegislatorInputSet, self)._set_input('District', value)
def set_FECID(self, value):
"""
Set the value of the FECID input for this Choreo. ((optional, string) The fec_id associated with the legislator to return.)
"""
super(GetLegislatorInputSet, self)._set_input('FECID', value)
def set_FacebookID(self, value):
"""
Set the value of the FacebookID input for this Choreo. ((optional, string) The facebook id of a legislator to return.)
"""
super(GetLegislatorInputSet, self)._set_input('FacebookID', value)
def set_FirstName(self, value):
"""
Set the value of the FirstName input for this Choreo. ((optional, string) The first name of a legislator to return.)
"""
super(GetLegislatorInputSet, self)._set_input('FirstName', value)
def set_Gender(self, value):
"""
Set the value of the Gender input for this Choreo. ((optional, string) Narrows the search result by gender.)
"""
super(GetLegislatorInputSet, self)._set_input('Gender', value)
def set_GovTrackID(self, value):
"""
Set the value of the GovTrackID input for this Choreo. ((optional, string) The govetrack_id associated with a legistlator to return.)
"""
super(GetLegislatorInputSet, self)._set_input('GovTrackID', value)
def set_InOffice(self, value):
"""
Set the value of the InOffice input for this Choreo. ((optional, boolean) Whether or not the individual is in office currently. Valid values are true or false.)
"""
super(GetLegislatorInputSet, self)._set_input('InOffice', value)
def set_LastName(self, value):
"""
Set the value of the LastName input for this Choreo. ((conditional, string) The last name of the legislator to return.)
"""
super(GetLegislatorInputSet, self)._set_input('LastName', value)
def set_Party(self, value):
"""
Set the value of the Party input for this Choreo. ((optional, string) Narrows the search result by party (i.e. "D" or "R").)
"""
super(GetLegislatorInputSet, self)._set_input('Party', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are: json (the default) and xml.)
"""
super(GetLegislatorInputSet, self)._set_input('ResponseFormat', value)
def set_State(self, value):
"""
Set the value of the State input for this Choreo. ((optional, string) A state abbreviation to narrow the search results.)
"""
super(GetLegislatorInputSet, self)._set_input('State', value)
def set_Title(self, value):
"""
Set the value of the Title input for this Choreo. ((optional, string) The title associated with the individual to return.)
"""
super(GetLegislatorInputSet, self)._set_input('Title', value)
def set_TwitterID(self, value):
"""
Set the value of the TwitterID input for this Choreo. ((optional, string) The twitter id of the legislator to return (note, this can be a twitter screen name).)
"""
super(GetLegislatorInputSet, self)._set_input('TwitterID', value)
def set_VoteSmartID(self, value):
"""
Set the value of the VoteSmartID input for this Choreo. ((optional, integer) The votesmart_id of a legislator to return.)
"""
super(GetLegislatorInputSet, self)._set_input('VoteSmartID', value)
class GetLegislatorResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetLegislator Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from the Sunlight Congress API.)
"""
return self._output.get('Response', None)
class GetLegislatorChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetLegislatorResultSet(response, path)
|
willprice/arduino-sphere-project
|
scripts/example_direction_finder/temboo/Library/SunlightLabs/Congress/Legislator/GetLegislator.py
|
Python
|
gpl-2.0
| 7,597
|
#!/usr/bin/python
# Rishabh Das <rishabh5290@gmail.com>
#
# This program is a free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the license, or(at your option) any
# later version. See http://www.gnu.org/copyleft/gpl.html for the full text of
# the license.
import argparse
import ConfigParser
import os
import json
import pydoc
import inspect
import sys
from lib import pyjdox
def get_doc(pyjdoxobj, filepath):
"""
Get code documentation
"""
mod = pydoc.importfile(filepath)
jdata = pyjdoxobj.describe(mod, filepath)
return jdata
def main():
"""
Take user input and run get python doc as JSON
"""
# Get user Input
parser = argparse.ArgumentParser(description='pyjdox parser')
parser.add_argument('-f', '--file', help='Pass python file')
parser.add_argument('-d', '--dir', help='Path to library source tree')
args = parser.parse_args()
# Create object for pyjdox
pyjdoxobj = pyjdox.pyjdox()
# Read configuration
config = ConfigParser.ConfigParser()
if os.path.isfile('config'):
config.read('config')
output_dir = config.get('pyjdox', 'output-dir')
# Create output directory
if not os.path.exists(output_dir) or not os.path.isdir(output_dir):
os.makedirs(output_dir)
# Run pyjdox on user input
if args.file:
if args.file.startswith('~'):
args.file = os.path.expanduser(args.file)
if os.path.isfile(args.file):
jdata = get_doc(pyjdoxobj, args.file)
fname = '%s.json' % os.path.basename(args.file)
fname = os.path.join(output_dir, fname)
with open(fname, 'w') as jfile:
json.dump(jdata, jfile)
print("Output has been created successfully")
print("Path: %s" % (fname))
# Run pyjdox on source tree
if args.dir:
filelist = []
output_dir = os.path.join(output_dir, os.path.basename(args.dir))
if args.dir.startswith('~'):
args.dir = os.path.expanduser(args.dir)
try:
if not os.path.exists(output_dir) or not os.path.isdir(output_dir):
os.makedirs(output_dir)
for folder, subs, files in os.walk(args.dir):
for filename in files:
if filename.endswith('.py'):
filelist.append(os.path.join(folder, filename))
for fname in filelist:
print fname
jdata = get_doc(pyjdoxobj, fname)
ofloc = os.path.dirname((os.path.relpath(fname, args.dir)))
ofloc = os.path.join(output_dir, ofloc)
if not os.path.exists(ofloc) or not os.path.isdir(ofloc):
os.makedirs(ofloc)
ofname = '%s.json' % os.path.basename(fname)
ofname = os.path.join(ofloc, ofname)
with open(ofname, 'w') as jfile:
json.dump(jdata, jfile)
print("Output has been created successfully")
print("Path: %s" % (output_dir))
except Exception as e:
print e.message
sys.exit(1)
if __name__ == '__main__':
main()
|
rishabhdas/pyjdox
|
pyjdox.py
|
Python
|
gpl-2.0
| 3,316
|
#!/usr/bin/python
#coding=utf-8
#FILENAME : __models.py__
#DESCRIBE:
import google_models as gdb
from google.appengine.ext import db
import logging
import tools
class Anime(gdb.Model):
u"""
用来记录动漫更新信息
"""
#TODO把real_db的验证改成装饰器算了
def __init__(self, name = None, index = None, update_time = None, thisdb = None):
self.real_db = thisdb
self.name = name
self.index = index
self.update_time = update_time
@staticmethod
def check_used(name):
query = gdb.Anime.all()
query.filter('name =', name)
result = query.fetch(5)
if len(result) > 0:
return True
else:
return False
def store(self):
if self.real_db:
self.name = self.real_db.name
self.index = self.real_db.index
self.update_time = self.real_db.update_time
query = gdb.Anime.all()
if not self.name:
raise NotImplementedError("do not have a name")
query.filter('name =', self.name)
result = query.fetch(5)
if len(result) > 0:
self.real_db = result[0]
self.name = self.real_db.name
self.index = self.real_db.index
self.update_time = self.real_db.update_time
def update(self):
if self.real_db == None:
self.store()
if self.real_db == None:
ani = gdb.Anime(name = self.name, index = int(self.index), \
update_time = self.update_time)
ani.put()
return
self.real_db.name = self.name
self.real_db.index = self.index
self.real_db.update_time = self.update_time
self.real_db.put()
def get_update(self):
if self.real_db == None:
self.store()
return self.update_time
|
ariwaranosai/twitter_bot
|
twitter_bot/models.py
|
Python
|
gpl-2.0
| 1,903
|
import logging, os, sys, subprocess, tempfile, traceback
import time, threading
from autotest_lib.client.common_lib import utils
from autotest_lib.server import utils as server_utils
from autotest_lib.server.hosts import abstract_ssh, monitors
MONITORDIR = monitors.__path__[0]
SUPPORTED_PYTHON_VERS = ('2.4', '2.5', '2.6', '2.7')
DEFAULT_PYTHON = '/usr/bin/python'
class Error(Exception):
pass
class InvalidPatternsPathError(Error):
"""An invalid patterns_path was specified."""
class InvalidConfigurationError(Error):
"""An invalid configuration was specified."""
class FollowFilesLaunchError(Error):
"""Error occurred launching followfiles remotely."""
def list_remote_pythons(host):
"""List out installed pythons on host."""
result = host.run('ls /usr/bin/python[0-9]*')
return result.stdout.splitlines()
def select_supported_python(installed_pythons):
"""Select a supported python from a list"""
for python in installed_pythons:
if python[-3:] in SUPPORTED_PYTHON_VERS:
return python
def copy_monitordir(host):
"""Copy over monitordir to a tmpdir on the remote host."""
tmp_dir = host.get_tmp_dir()
host.send_file(MONITORDIR, tmp_dir)
return os.path.join(tmp_dir, 'monitors')
def launch_remote_followfiles(host, lastlines_dirpath, follow_paths):
"""Launch followfiles.py remotely on follow_paths."""
logging.info('Launching followfiles on target: %s, %s, %s',
host.hostname, lastlines_dirpath, str(follow_paths))
# First make sure a supported Python is on host
installed_pythons = list_remote_pythons(host)
supported_python = select_supported_python(installed_pythons)
if not supported_python:
if DEFAULT_PYTHON in installed_pythons:
logging.info('No versioned Python binary found, '
'defaulting to: %s', DEFAULT_PYTHON)
supported_python = DEFAULT_PYTHON
else:
raise FollowFilesLaunchError('No supported Python on host.')
remote_monitordir = copy_monitordir(host)
remote_script_path = os.path.join(remote_monitordir, 'followfiles.py')
followfiles_cmd = '%s %s --lastlines_dirpath=%s %s' % (
supported_python, remote_script_path,
lastlines_dirpath, ' '.join(follow_paths))
remote_ff_proc = subprocess.Popen(host._make_ssh_cmd(followfiles_cmd),
stdin=open(os.devnull, 'r'),
stdout=subprocess.PIPE, shell=True)
def wait_for_crash():
"""
Warning: this is not threadsafe due to the call to
host.job.record()
"""
# Give it enough time to crash if it's going to (it shouldn't).
time.sleep(5)
doa = remote_ff_proc.poll()
if doa:
# We're hosed, there is no point in proceeding.
logging.fatal('Failed to launch followfiles on target,'
' aborting logfile monitoring: %s', host.hostname)
if host.job:
# Put a warning in the status.log
host.job.record(
'WARN', None, 'logfile.monitor',
'followfiles launch failed')
crash_waiter = threading.Thread(target=wait_for_crash)
crash_waiter.start()
return remote_ff_proc
def resolve_patterns_path(patterns_path):
"""Resolve patterns_path to existing absolute local path or raise.
As a convenience we allow users to specify a non-absolute patterns_path.
However these need to be resolved before allowing them to be passed down
to console.py.
For now we expect non-absolute ones to be in self.monitordir.
"""
if os.path.isabs(patterns_path):
if os.path.exists(patterns_path):
return patterns_path
else:
raise InvalidPatternsPathError('Absolute path does not exist.')
else:
patterns_path = os.path.join(MONITORDIR, patterns_path)
if os.path.exists(patterns_path):
return patterns_path
else:
raise InvalidPatternsPathError('Relative path does not exist.')
def launch_local_console(
input_stream, console_log_path, pattern_paths=None):
"""Launch console.py locally.
This will process the output from followfiles and
fire warning messages per configuration in pattern_paths.
"""
r, w = os.pipe()
local_script_path = os.path.join(MONITORDIR, 'console.py')
console_cmd = [sys.executable, local_script_path]
if pattern_paths:
console_cmd.append('--pattern_paths=%s' % ','.join(pattern_paths))
console_cmd += [console_log_path, str(w)]
# Setup warning stream before we actually launch
warning_stream = os.fdopen(r, 'r', 0)
devnull_w = open(os.devnull, 'w')
# Launch console.py locally
console_proc = subprocess.Popen(
console_cmd, stdin=input_stream,
stdout=devnull_w, stderr=devnull_w)
os.close(w)
return console_proc, warning_stream
def _log_and_ignore_exceptions(f):
"""Decorator: automatically log exception during a method call.
"""
def wrapped(self, *args, **dargs):
try:
return f(self, *args, **dargs)
except Exception, e:
print "LogfileMonitor.%s failed with exception %s" % (f.__name__, e)
print "Exception ignored:"
traceback.print_exc(file=sys.stdout)
wrapped.__name__ = f.__name__
wrapped.__doc__ = f.__doc__
wrapped.__dict__.update(f.__dict__)
return wrapped
class LogfileMonitorMixin(abstract_ssh.AbstractSSHHost):
"""This can monitor one or more remote files using tail.
This class and its counterpart script, monitors/followfiles.py,
add most functionality one would need to launch and monitor
remote tail processes on self.hostname.
This can be used by subclassing normally or by calling
NewLogfileMonitorMixin (below)
It is configured via two class attributes:
follow_paths: Remote paths to monitor
pattern_paths: Local paths to alert pattern definition files.
"""
follow_paths = ()
pattern_paths = ()
def _initialize(self, console_log=None, *args, **dargs):
super(LogfileMonitorMixin, self)._initialize(*args, **dargs)
self._lastlines_dirpath = None
self._console_proc = None
self._console_log = console_log or 'logfile_monitor.log'
def reboot_followup(self, *args, **dargs):
super(LogfileMonitorMixin, self).reboot_followup(*args, **dargs)
self.__stop_loggers()
self.__start_loggers()
def start_loggers(self):
super(LogfileMonitorMixin, self).start_loggers()
self.__start_loggers()
def remote_path_exists(self, remote_path):
"""Return True if remote_path exists, False otherwise."""
return not self.run(
'ls %s' % remote_path, ignore_status=True).exit_status
def check_remote_paths(self, remote_paths):
"""Return list of remote_paths that currently exist."""
return [
path for path in remote_paths if self.remote_path_exists(path)]
@_log_and_ignore_exceptions
def __start_loggers(self):
"""Start multifile monitoring logger.
Launch monitors/followfiles.py on the target and hook its output
to monitors/console.py locally.
"""
# Check if follow_paths exist, in the case that one doesn't
# emit a warning and proceed.
follow_paths_set = set(self.follow_paths)
existing = self.check_remote_paths(follow_paths_set)
missing = follow_paths_set.difference(existing)
if missing:
# Log warning that we are missing expected remote paths.
logging.warn('Target %s is missing expected remote paths: %s',
self.hostname, ', '.join(missing))
# If none of them exist just return (for now).
if not existing:
return
# Create a new lastlines_dirpath on the remote host if not already set.
if not self._lastlines_dirpath:
self._lastlines_dirpath = self.get_tmp_dir(parent='/var/tmp')
# Launch followfiles on target
self._followfiles_proc = launch_remote_followfiles(
self, self._lastlines_dirpath, existing)
# Ensure we have sane pattern_paths before launching console.py
sane_pattern_paths = []
for patterns_path in set(self.pattern_paths):
try:
patterns_path = resolve_patterns_path(patterns_path)
except InvalidPatternsPathError, e:
logging.warn('Specified patterns_path is invalid: %s, %s',
patterns_path, str(e))
else:
sane_pattern_paths.append(patterns_path)
# Launch console.py locally, pass in output stream from followfiles.
self._console_proc, self._logfile_warning_stream = \
launch_local_console(
self._followfiles_proc.stdout, self._console_log,
sane_pattern_paths)
if self.job:
self.job.warning_loggers.add(self._logfile_warning_stream)
def stop_loggers(self):
super(LogfileMonitorMixin, self).stop_loggers()
self.__stop_loggers()
@_log_and_ignore_exceptions
def __stop_loggers(self):
if self._console_proc:
utils.nuke_subprocess(self._console_proc)
utils.nuke_subprocess(self._followfiles_proc)
self._console_proc = self._followfile_proc = None
if self.job:
self.job.warning_loggers.discard(self._logfile_warning_stream)
self._logfile_warning_stream.close()
def NewLogfileMonitorMixin(follow_paths, pattern_paths=None):
"""Create a custom in-memory subclass of LogfileMonitorMixin.
Args:
follow_paths: list; Remote paths to tail.
pattern_paths: list; Local alert pattern definition files.
"""
if not follow_paths or (pattern_paths and not follow_paths):
raise InvalidConfigurationError
return type(
'LogfileMonitorMixin%d' % id(follow_paths),
(LogfileMonitorMixin,),
{'follow_paths': follow_paths,
'pattern_paths': pattern_paths or ()})
|
libvirt/autotest
|
server/hosts/logfile_monitor.py
|
Python
|
gpl-2.0
| 10,290
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
from scrapy.item import Item, Field
class StackItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
title = Field()
url = Field()
pass
|
drupalmav/learningpython
|
scrapy/soexample/stack/stack/items.py
|
Python
|
gpl-2.0
| 359
|
#!/usr/bin/python
from pisi.actionsapi import perlmodules, pisitools
def setup():
perlmodules.configure()
def build():
perlmodules.make()
def install():
perlmodules.install()
pisitools.remove("/usr/bin/instmodsh")
pisitools.removeDir("/usr/bin")
pisitools.remove("/usr/share/man/man1/instmodsh.1")
|
richard-fisher/repository
|
programming/perl/perl-extutils-makemaker/actions.py
|
Python
|
gpl-2.0
| 335
|
# DFF -- An Open Source Digital Forensics Framework
# Copyright (C) 2009-2011 ArxSys
# This program is free software, distributed under the terms of
# the GNU General Public License Version 2. See the LICENSE file
# at the top of the source tree.
#
# See http://www.digital-forensic.org for more information about this
# project. Please do not directly contact any of the maintainers of
# DFF for assistance; the project provides a web site, mailing lists
# and IRC channels for your use.
#
# Author(s):
# Solal Jacob <sja@digital-forensic.org>
# Jeremy Mounier <jmo@digital-forensic.org>
#
import os
from PyQt4.QtGui import QFileDialog, QMessageBox, QInputDialog, QDialog, QDialogButtonBox, QComboBox, QPushButton, QFormLayout, QHBoxLayout, QPixmap, QLabel, QApplication
from PyQt4.QtCore import QObject, QString, SIGNAL, SLOT, Qt, QEvent, QDir
from api.taskmanager import *
from api.taskmanager.taskmanager import *
from api.loader import *
from api.vfs import vfs
from api.devices.devices import Devices
from api.gui.widget.devicesdialog import DevicesDialog
from api.gui.widget.layoutmanager import *
from api.types.libtypes import typeId
from ui.gui.dialog.preferences import Preferences
from ui.gui.resources.ui_about import Ui_About
from ui.gui.resources.ui_evidencedialog import Ui_evidenceDialog
from ui.gui.widget.modulesmanager import *
class Dialog(QObject):
def __init__(self, parent):
QObject.__init__(self)
self.parent = parent
self.vfs = vfs.vfs()
self.taskmanager = TaskManager()
self.loader = loader.loader()
def preferences(self):
"""Open a preferences dialog"""
pref = Preferences(self.parent)
ret = pref.exec_()
if ret:
pass
# pref.conf.root_index = pref.root_index_line.text()
# pref.conf.index_name = pref.index_name_line.text()
# pref.conf.index_path = pref.conf.root_index + "/" + pref.conf.index_name
# root_index_dir = QDir(pref.conf.root_index)
# if not root_index_dir.exists():
# root_index_dir.mkpath(pref.conf.root_index)
#@ default_index_dir = QDir(pref.conf.index_path)
# if not default_index_dir.exists():
# default_index_dir.mkpath(pref.conf.index_path)
def addDevices(self):
"""Open a device list dialog"""
dev = DevicesDialog(self.parent)
if dev.exec_():
if dev.selectedDevice:
args = {}
args["path"] = str(dev.selectedDevice.blockDevice())
args["parent"] = self.vfs.getnode("/Local devices")
args["size"] = long(dev.selectedDevice.size())
exec_type = ["thread", "gui"]
try:
if os.name == "nt":
args["name"] = str(dev.selectedDevice.model())
conf = self.loader.get_conf(str("devices"))
genargs = conf.generate(args)
self.taskmanager.add("devices", genargs, exec_type)
except RuntimeError:
err_type, err_value, err_traceback = sys.exc_info()
err_trace = traceback.format_tb(err_traceback)
err_typeval = traceback.format_exception_only(err_type, err_value)
terr = QString()
detailerr = QString()
for err in err_trace:
detailerr.append(err)
for errw in err_typeval:
terr.append(errw)
detailerr.append(err)
self.messageBox(terr, detailerr)
del dev
def addFiles(self):
""" Open a Dialog for select a file and add in VFS """
edialog = evidenceDialog(self.parent)
ir = edialog.exec_()
if ir > 0:
args = {}
paths = edialog.manager.get("local")
if edialog.rawcheck.isChecked():
module = "local"
args["path"] = paths
args["parent"] = self.vfs.getnode('/Logical files')
elif edialog.ewfcheck.isChecked():
module = "ewf"
args["files"] = paths
args["parent"] = self.vfs.getnode('/Logical files')
else:
module = "aff"
args["path"] = paths
args["parent"] = self.vfs.getnode('/Logical files')
self.conf = self.loader.get_conf(str(module))
try:
genargs = self.conf.generate(args)
self.taskmanager.add(str(module), genargs, ["thread", "gui"])
except RuntimeError:
err_type, err_value, err_traceback = sys.exc_info()
err_trace = traceback.format_tb(err_traceback)
err_typeval = traceback.format_exception_only(err_type, err_value)
terr = QString()
detailerr = QString()
for err in err_trace:
detailerr.append(err)
for errw in err_typeval:
terr.append(errw)
detailerr.append(err)
self.messageBox(terr, detailerr)
def messageBox(self, coretxt, detail):
msg = QMessageBox()
msg.setWindowTitle("Error in configuration")
msg.setText("An error was detected in the configuration")
msg.setInformativeText(coretxt)
msg.setIcon(QMessageBox.Critical)
msg.setDetailedText(detail)
msg.setStandardButtons(QMessageBox.Ok)
ret = msg.exec_()
def loadDriver(self):
sFileName = QFileDialog.getOpenFileName(self.parent, self.parent.actionLoadModule.toolTip(), os.path.expanduser('~'), "Modules(*.py)")
if (sFileName) :
self.loader.do_load(str(sFileName.toUtf8()))
def about(self):
""" Open a About Dialog """
about = About()
about.exec_()
def manager(self):
""" Open module browser dialog """
module = browserDialog(self.parent)
ir = module.exec_()
if ir > 0:
module.browser.execute()
#class managerDialog(QDialog):
# def __init__(self, mainWindow):
# QDialog.__init__(self, mainWindow)
# l = QVBoxLayout()
# l.addWidget(modulesManager(self))
# self.setLayout(l)
class About(QDialog, Ui_About):
def __init__(self):
super(QDialog, self).__init__()
self.setupUi(self)
self.label.setText(self.label.text().arg(QApplication.instance().applicationVersion()))
def changeEvent(self, event):
""" Search for a language change event
This event have to call retranslateUi to change interface language on
the fly.
"""
if event.type() == QEvent.LanguageChange:
self.retranslateUi(self)
self.label.setText(self.label.text().arg(QApplication.instance().applicationVersion()))
else:
QDialog.changeEvent(self, event)
class evidenceDialog(QDialog, Ui_evidenceDialog):
def __init__(self, parent):
super(QDialog, self).__init__()
self.setupUi(self)
self.loader = loader.loader()
self.createShape()
def createShape(self):
""" Removes EWF if not in modules
Set itemData for easy access without taking care of text (can be
translated).
TODO Futur : Get all DFF connectors
"""
if "ewf" not in self.loader.modules:
self.ewfcheck.setEnabled(False)
if "aff" not in self.loader.modules:
self.affcheck.setEnabled(False)
self.rawcheck.setChecked(True)
self.manager = layoutManager()
self.manager.addPathList("local", typeId.Path, [], [])
self.pathlayout.addWidget(self.manager)
def changeEvent(self, event):
""" Search for a language change event
This event have to call retranslateUi to change interface language on
the fly.
"""
if event.type() == QEvent.LanguageChange:
self.retranslateUi(self)
self.label.setText(self.label.text().arg(QApplication.instance().applicationVersion()))
else:
QDialog.changeEvent(self, event)
|
halbbob/dff
|
ui/gui/dialog/dialog.py
|
Python
|
gpl-2.0
| 7,654
|
from mininet.topo import Topo
from mininet.link import TCLink
class UFRGSTopo(Topo):
def __init__(self):
Topo.__init__(self)
self.host = {}
for h in range(1,231):
self.host[h] = self.addHost('h%s' %(h))
self.switch = {}
for s in range(1,12):
self.switch[s] = self.addSwitch('s%s' %(s))
# S1-links to hosts
for i in range(1,11):
self.addLink(self.switch[1], self.host[i], cls=TCLink, bw=100)
# S1-links to switches
self.addLink(self.switch[1], self.switch[2], cls=TCLink, bw=100)
self.addLink(self.switch[1], self.switch[3], cls=TCLink, bw=100)
self.addLink(self.switch[1], self.switch[7], cls=TCLink, bw=100)
self.addLink(self.switch[1], self.switch[11], cls=TCLink, bw=100)
# S2-links to hosts
for i in range(11,51):
self.addLink(self.switch[2], self.host[i], cls=TCLink, bw=100)
# S3-links to hosts
for i in range(51,71):
self.addLink(self.switch[3], self.host[i], cls=TCLink, bw=100)
# S3-links to switches
self.addLink(self.switch[3], self.switch[4], cls=TCLink, bw=100)
self.addLink(self.switch[3], self.switch[5], cls=TCLink, bw=100)
# S4-links to hosts
for i in range(71,81):
self.addLink(self.switch[4], self.host[i], cls=TCLink, bw=100)
# S5-links to hosts
for i in range(81,111):
self.addLink(self.switch[5], self.host[i], cls=TCLink, bw=100)
# S6-links to hosts
for i in range(111,121):
self.addLink(self.switch[6], self.host[i], cls=TCLink, bw=100)
# S6-links to switches
self.addLink(self.switch[6], self.switch[7], cls=TCLink, bw=100)
# S7-links to hosts
for i in range(121,141):
self.addLink(self.switch[7], self.host[i], cls=TCLink, bw=100)
# S7-links to switches
self.addLink(self.switch[7], self.switch[8], cls=TCLink, bw=100)
self.addLink(self.switch[7], self.switch[9], cls=TCLink, bw=100)
# S8-links to hosts
for i in range(141,151):
self.addLink(self.switch[8], self.host[i], cls=TCLink, bw=100)
# S9-links to hosts
for i in range(151,171):
self.addLink(self.switch[9], self.host[i], cls=TCLink, bw=100)
# S10-links to hosts
for i in range(171,211):
self.addLink(self.switch[10], self.host[i], cls=TCLink, bw=100)
# S11-links to hosts
for i in range(211,231):
self.addLink(self.switch[11], self.host[i], cls=TCLink, bw=100)
# S11-links to switches
self.addLink(self.switch[11], self.switch[10], cls=TCLink, bw=100)
topos = {'ufrgstopo':(lambda:UFRGSTopo())}
|
ComputerNetworks-UFRGS/AuroraSDN
|
extras/mininet/custom_topologies/ufrgstopo.py
|
Python
|
gpl-2.0
| 2,581
|
#Testes in a file with interactions, if two sets of user given coordinates
#Have interactions where one interval is target of the other, and vice-versa
#Returns the number of reciprocate interactions
from sys import argv
f=open(argv[1])
interval1=[int(argv[2]), int(argv[3])]
interval2=[int(argv[4]), int(argv[5])]
for i in f:
ff=i.split(" ")
rr=0
if int(ff[1])>= interval1[0] and int(ff[2])<= interval1[1] and int(ff[4])>= interval2[0] and int(ff[5])<= interval2[1]:
rr+=1
elif int(ff[4])>= interval1[0] and int(ff[5])<= interval1[1] and int(ff[1])>= interval2[0] and int(ff[2])<= interval2[1]:
rr+=1
f.close()
print(rr)
|
Nymeria8/hi-c_helper_scripts
|
teste.py
|
Python
|
gpl-2.0
| 642
|
from routersploit.modules.creds.generic.snmp_bruteforce import Exploit
def test_check_success(generic_target):
""" Test scenerio - testing against SNMP server """
exploit = Exploit()
assert exploit.target == ""
assert exploit.port == 161
assert exploit.version == 1
assert exploit.threads == 8
assert type(exploit.defaults) is list
assert exploit.stop_on_success is True
assert exploit.verbosity is True
|
dasseclab/dasseclab
|
clones/routersploit/tests/creds/generic/test_snmp_bruteforce.py
|
Python
|
gpl-2.0
| 444
|
# Chula imports
from chula.www import controller
class Rest(controller.Controller):
def blog(self):
return 'blog: %s' % self.env.form_rest
def user(self):
return 'user preferences'
|
jmcfarlane/chula
|
apps/example/webapp/controller/rest.py
|
Python
|
gpl-2.0
| 207
|
import sys
from time import sleep
from PyQt4 import QtGui
from gui.main_window3 import Ui_MainWindow
import zmq
from threading import Thread
from math import cos, sin, pi
sys.path.append("../lib")
import cflib.crtp
from CF_class_sterowanie2 import Crazy as cf
import usb.core
import logging
logging.basicConfig(level=logging.ERROR)
# from sterowanie.fuzzy import fuzzy_ctrl as control
from sterowanie.sprzezenie import control
from sterowanie.fuzzy import fuzzyZ as fz, fuzzyX as fx, fuzzyY as fy,fuzzyKat as ftheta
class window(QtGui.QMainWindow):
def __init__(self,app):
super(window, self).__init__()
self.ui=Ui_MainWindow()
self.ui.setupUi(self)
self.app=app
self.set_signals()
self.update_target()
zmq=Thread(target=self.run_zmq)
zmq.daemon=True
zmq.start()
self.rpy=[0,0,0]
self.cf=cf(self.ui,self.rpy)
self.init_var()
self.scan()
def init_var(self):
self.z=0
self.x=0
self.y=0
self.theta=0
self.Vz=0
self.Vx=0
self.Vy=0
self.Vtheta=0
self.x_d = 0
self.y_d = 0
self.Vx_d = 0
self.Vy_d = 0
def set_signals(self):
self.ui.pb_scan.clicked.connect(self.scan)
self.ui.pb_conn.clicked.connect(self.pb_conn_clicked)
self.ui.pb_disc.clicked.connect(self.pb_disc_clicked)
self.ui.pb_start.clicked.connect(self.pb_start_clicked)
self.ui.pb_stop.clicked.connect(self.pb_stop_clicked)
self.ui.pb_update.clicked.connect(self.update_target)
self.app.aboutToQuit.connect(self.exit)
def exit(self):
self.ctrl_isTrue=False
self.cf.close()
# ustawienie polaczenie
def pb_conn_clicked(self):
uri=self.ui.cb_device.currentText()
self.ui.l_conn.setText("connecting to {}".format(uri))
self.cf.connect(uri)
self.ui.pb_scan.setEnabled(False)
self.ui.pb_conn.setEnabled(False)
self.ui.cb_device.setEnabled(False)
self.ui.pb_disc.setEnabled(True)
self.ui.gb_target.setEnabled(True)
self.ui.gb_control.setEnabled(True)
# rozlacznie
def pb_disc_clicked(self):
# self.pb_stop_clicked()
self.ui.l_conn.setText("Disconnecting ...")
self.cf.close()
self.ui.pb_scan.setEnabled(True)
self.ui.cb_device.setEnabled(True)
self.ui.pb_disc.setEnabled(False)
self.ui.gb_target.setEnabled(False)
self.ui.gb_control.setEnabled(False)
# sterowanie
def pb_start_clicked(self):
self.ui.pb_start.setEnabled(False)
self.ui.pb_stop.setEnabled(True)
self.ui.pb_update.setEnabled(True)
self.ui.pb_disc.setEnabled(False)
self.update_target();
self.ctrl_isTrue=True
self.ctrl_thread=Thread(target=self.ctrl)
self.ctrl_thread.daemon=True
self.ctrl_thread.start()
def pb_stop_clicked(self):
self.ui.pb_start.setEnabled(True)
self.ui.pb_stop.setEnabled(False)
# self.ui.pb_update.setEnabled(False)
self.ui.pb_disc.setEnabled(True)
self.ctrl_isTrue=False
# sleep(0.1)
self.ctrl_thread.join()
def update_target(self):
self.x_t=float(self.ui.en_x.text())
self.y_t=float(self.ui.en_y.text())
self.z_t=float(self.ui.en_z.text())
self.theta_t=float(self.ui.en_theta.text())
self.x_d_t=self.x_t*cos(self.theta_t*pi/180)-self.y_t*sin(self.theta_t*pi/180)
self.y_d_t=self.x_t*sin(self.theta_t*pi/180)+self.y_t*cos(self.theta_t*pi/180)
def ctrl(self):
timeout=0
thrust_p=0
roll_p=0
pitch_p=0
yaw_p=0
while self.ctrl_isTrue:
if (self.x==5 or self.y==5 or self.z==5) :
thrust=thrust_p
roll=roll_p
pitch=pitch_p
yaw=yaw_p
timeout+=1
print timeout
if timeout>=3:
self.cf.update_ctrl(0,0,0,0)
self.ctrl_isTrue=False
break
elif (self.x==0 and self.y==0 and self.z==0 and self.theta==0):
thrust=0
roll=0
pitch=0
yaw=0
else:
thrust=fz(self.z_t-self.z,self.Vz)
roll=fx(self.x_t-self.x,self.Vx)
pitch=fy(self.y_t-self.y,self.Vy)
yaw=ftheta(self.theta)
timeout=0
thrust_p=thrust
roll_p=roll
pitch_p=pitch
yaw_p=yaw
self.ui.l_thrust.setText("Thrust: {:.3f}".format(thrust))
self.ui.l_roll.setText("roll: {:.3f}".format(roll))
self.ui.l_pitch.setText("pitch: {:.3f}".format(pitch))
self.ui.l_yaw.setText("yaw: {:.3f}".format(yaw))
self.cf.update_ctrl(thrust*600,pitch,roll,yaw)
sleep(0.05)
self.cf.update_ctrl(0,0,0,0)
self.ui.l_thrust.setText("Thrust: {:f}".format(0))
self.ui.l_roll.setText("roll: {:f}".format(0))
self.ui.l_pitch.setText("pitch: {:f}".format(0))
self.ui.l_yaw.setText("yaw: {:f}".format(0))
# funkcja wyszukiwania + wypelnienie cb
def scan(self):
# find USB devices
dev = usb.core.find(find_all=True)
try:
a=cflib.crtp.scan_interfaces()
self.ui.cb_device.clear()
k=0
kk=0
for cfg in dev:
if cfg.idVendor==6421:
for i in a:
self.ui.cb_device.addItem(i[0].replace(str(kk),str(k),1))
kk=k
k+=1
if len(a)>0:
self.ui.pb_conn.setEnabled(True)
else:
self.ui.pb_conn.setEnabled(False)
except Exception as e:
self.ui.l_conn.setText(str(e))
def run_zmq(self):
port=2500
context = zmq.Context()
print "Connecting to server..."
socket = context.socket(zmq.SUB)
socket.setsockopt_string(zmq.SUBSCRIBE, u"")
socket.connect ("tcp://192.168.2.103:%s" % port)
# socket.connect ("tcp://localhost:%s" % port)
while True:
rcv=socket.recv_unicode()
# rcv=str(rcv)
if rcv!=None and len(rcv)>0:
rcv=rcv.replace(",",".")
splited=rcv.split(";")
if self.x!=5 and self.y!=5 and self.z!=5 and self.theta!=200:
self.x_p=self.x
self.y_p=self.y
self.z_p=self.z
self.theta_p=self.theta
self.x_d_p=self.x_d
self.y_d_p=self.y_d
self.x=float(splited[0])
self.y=float(splited[1])
self.z=float(splited[2])
self.theta=float(splited[3])
self.Vx=self.x-self.x_p
self.Vy=self.y-self.y_p
self.Vz=self.z-self.z_p
self.Vtheta=self.theta-self.theta_p
self.x_d=self.x*cos(self.theta*pi/180)-self.y*sin(self.theta*pi/180)
self.y_d=self.x*sin(self.theta*pi/180)+self.y*cos(self.theta*pi/180)
self.Vx_d=self.x_d-self.x_d_p
self.Vy_d=self.y_d-self.y_d_p
# self.x_e=float(splited[0])-self.x_t
# self.y_e=float(splited[1])-self.y_t
# self.z_e=float(splited[2])-self.z_t
# self.theta_e=float(splited[3])-self.theta_t
# wyswietlanie na gui
self.ui.l_x.setText("x: {}".format(splited[0]))
self.ui.l_y.setText("y: {}".format(splited[1]))
self.ui.l_z.setText("z: {}".format(splited[2]))
self.ui.l_theta.setText("kat: {}".format(splited[3]))
self.ui.l_vx.setText("x: {}".format(self.Vx))
self.ui.l_vy.setText("y: {}".format(self.Vy))
self.ui.l_vz.setText("z: {}".format(self.Vz))
self.ui.l_vtheta.setText("kat: {}".format(self.Vtheta))
# sleep(0.01)
def main():
app = QtGui.QApplication(sys.argv)
ex = window(app)
ex.show()
sys.exit(app.exec_())
if __name__ == '__main__':
cflib.crtp.init_drivers(enable_debug_driver=False)
main()
|
Venris/crazyflie-multilink
|
KM/main_sterowanie3_fuzzy.py
|
Python
|
gpl-2.0
| 8,451
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# ,---------, ____ _ __
# | ,-^-, | / __ )(_) /_______________ _____ ___
# | ( O ) | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# | / ,--' | / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# +------` /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2020 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Enables access to the Crazyflie memory subsystem.
"""
import errno
import logging
import struct
from threading import Lock
from .deck_memory import DeckMemoryManager
from .i2c_element import I2CElement
from .led_driver_memory import LEDDriverMemory
from .led_timings_driver_memory import LEDTimingsDriverMemory
from .lighthouse_memory import LighthouseBsCalibration
from .lighthouse_memory import LighthouseBsGeometry
from .lighthouse_memory import LighthouseMemHelper
from .lighthouse_memory import LighthouseMemory
from .loco_memory import LocoMemory
from .loco_memory_2 import LocoMemory2
from .memory_element import MemoryElement
from .memory_tester import MemoryTester
from .ow_element import OWElement
from .trajectory_memory import CompressedSegment
from .trajectory_memory import CompressedStart
from .trajectory_memory import Poly4D
from .trajectory_memory import TrajectoryMemory
from cflib.crtp.crtpstack import CRTPPacket
from cflib.crtp.crtpstack import CRTPPort
from cflib.utils.callbacks import Caller
__author__ = 'Bitcraze AB'
__all__ = ['Memory', 'Poly4D', 'CompressedStart', 'CompressedSegment', 'MemoryElement',
'LighthouseBsGeometry', 'LighthouseBsCalibration', 'LighthouseMemHelper',
'DeckMemoryManager']
# Channels used for the logging port
CHAN_INFO = 0
CHAN_READ = 1
CHAN_WRITE = 2
# Commands used when accessing the Settings port
CMD_INFO_VER = 0
CMD_INFO_NBR = 1
CMD_INFO_DETAILS = 2
logger = logging.getLogger(__name__)
class _ReadRequest:
"""
Class used to handle memory reads that will split up the read in multiple
packets if necessary
"""
MAX_DATA_LENGTH = 20
def __init__(self, mem, addr, length, cf):
"""Initialize the object with good defaults"""
self.mem = mem
self.addr = addr
self._bytes_left = length
self.data = bytearray()
self.cf = cf
self._current_addr = addr
def start(self):
"""Start the fetching of the data"""
self._request_new_chunk()
def resend(self):
logger.debug('Sending write again...')
self._request_new_chunk()
def _request_new_chunk(self):
"""
Called to request a new chunk of data to be read from the Crazyflie
"""
# Figure out the length of the next request
new_len = self._bytes_left
if new_len > _ReadRequest.MAX_DATA_LENGTH:
new_len = _ReadRequest.MAX_DATA_LENGTH
logger.debug('Requesting new chunk of {}bytes at 0x{:X}'.format(
new_len, self._current_addr))
# Request the data for the next address
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_READ)
pk.data = struct.pack('<BIB', self.mem.id, self._current_addr, new_len)
reply = struct.unpack('<BBBBB', pk.data[:-1])
self.cf.send_packet(pk, expected_reply=reply, timeout=1)
def add_data(self, addr, data):
"""Callback when data is received from the Crazyflie"""
data_len = len(data)
if not addr == self._current_addr:
logger.warning(
'Address did not match when adding data to read request!')
return
# Add the data and calculate the next address to fetch
self.data += data
self._bytes_left -= data_len
self._current_addr += data_len
if self._bytes_left > 0:
self._request_new_chunk()
return False
else:
return True
class _WriteRequest:
"""
Class used to handle memory reads that will split up the read in multiple
packets in necessary
"""
MAX_DATA_LENGTH = 25
def __init__(self, mem, addr, data, cf, progress_cb=None):
"""Initialize the object with good defaults"""
self.mem = mem
self.addr = addr
self._bytes_left = len(data)
self._write_len = self._bytes_left
self._data = data
self.data = bytearray()
self.cf = cf
self._progress_cb = progress_cb
self._progress = -1
self._current_addr = addr
self._sent_packet = None
self._sent_reply = None
self._addr_add = 0
def start(self):
"""Start the fetching of the data"""
self._write_new_chunk()
def resend(self):
logger.debug('Sending write again...')
self.cf.send_packet(
self._sent_packet, expected_reply=self._sent_reply, timeout=1)
def _write_new_chunk(self):
"""
Called to request a new chunk of data to be read from the Crazyflie
"""
# Figure out the length of the next request
new_len = len(self._data)
if new_len > _WriteRequest.MAX_DATA_LENGTH:
new_len = _WriteRequest.MAX_DATA_LENGTH
logger.debug('Writing new chunk of {}bytes at 0x{:X}'.format(
new_len, self._current_addr))
data = self._data[:new_len]
self._data = self._data[new_len:]
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_WRITE)
pk.data = struct.pack('<BI', self.mem.id, self._current_addr)
# Create a tuple used for matching the reply using id and address
reply = struct.unpack('<BBBBB', pk.data)
self._sent_reply = reply
# Add the data
pk.data += struct.pack('B' * len(data), *data)
self._sent_packet = pk
self.cf.send_packet(pk, expected_reply=reply, timeout=1)
self._addr_add = len(data)
self._bytes_left -= self._addr_add
def _get_progress_message(self):
if isinstance(self.mem, DeckMemoryManager):
for deck_memory in self.mem.deck_memories.values():
if deck_memory.contains(self._current_addr):
return f'Writing to {deck_memory.name} deck memory'
return 'Writing to memory'
def write_done(self, addr):
"""Callback when data is received from the Crazyflie"""
if not addr == self._current_addr:
logger.warning(
'Address did not match when adding data to read request!')
return
if self._progress_cb is not None:
new_progress = int(100 * (self._write_len - self._bytes_left) / self._write_len)
if new_progress > self._progress:
self._progress = new_progress
self._progress_cb(self._get_progress_message(), self._progress)
if len(self._data) > 0:
self._current_addr += self._addr_add
self._write_new_chunk()
return False
else:
logger.debug('This write request is done')
return True
class Memory():
"""Access memories on the Crazyflie"""
# These codes can be decoded using os.stderror, but
# some of the text messages will look very strange
# in the UI, so they are redefined here
_err_codes = {
errno.ENOMEM: 'No more memory available',
errno.ENOEXEC: 'Command not found',
errno.ENOENT: 'No such block id',
errno.E2BIG: 'Block too large',
errno.EEXIST: 'Block already exists'
}
def __init__(self, crazyflie=None):
"""Instantiate class and connect callbacks"""
self.cf = crazyflie
self.cf.add_port_callback(CRTPPort.MEM, self._new_packet_cb)
self.cf.disconnected.add_callback(self._disconnected)
self._write_requests_lock = Lock()
self._clear_state()
def _clear_state(self):
self.mems = []
# Called when new memories have been added
self.mem_added_cb = Caller()
# Called to signal completion of read or write
self.mem_read_cb = Caller()
self.mem_read_failed_cb = Caller()
self.mem_write_cb = Caller()
self.mem_write_failed_cb = Caller()
self._refresh_callback = None
self._fetch_id = 0
self.nbr_of_mems = 0
self._ow_mem_fetch_index = 0
self._elem_data = ()
self._read_requests = {}
self._write_requests = {}
self._ow_mems_left_to_update = []
self._getting_count = False
def _mem_update_done(self, mem):
"""
Callback from each individual memory (only 1-wire) when reading of
header/elements are done
"""
if mem.id in self._ow_mems_left_to_update:
self._ow_mems_left_to_update.remove(mem.id)
logger.debug(mem)
if len(self._ow_mems_left_to_update) == 0:
if self._refresh_callback:
self._refresh_callback()
self._refresh_callback = None
def get_mem(self, id):
"""Fetch the memory with the supplied id"""
for m in self.mems:
if m.id == id:
return m
return None
def get_mems(self, type):
"""Fetch all the memories of the supplied type"""
ret = ()
for m in self.mems:
if m.type == type:
ret += (m,)
return ret
def ow_search(self, vid=0xBC, pid=None, name=None):
"""Search for specific memory id/name and return it"""
for m in self.get_mems(MemoryElement.TYPE_1W):
if pid and m.pid == pid or name and m.name == name:
return m
return None
def write(self, memory, addr, data, flush_queue=False, progress_cb=None):
"""Write the specified data to the given memory at the given address"""
wreq = _WriteRequest(memory, addr, data, self.cf, progress_cb)
if memory.id not in self._write_requests:
self._write_requests[memory.id] = []
# Workaround until we secure the uplink and change messages for
# mems to non-blocking
self._write_requests_lock.acquire()
if flush_queue:
self._write_requests[memory.id] = self._write_requests[
memory.id][:1]
self._write_requests[memory.id].append(wreq)
if len(self._write_requests[memory.id]) == 1:
wreq.start()
self._write_requests_lock.release()
return True
def read(self, memory, addr, length):
"""
Read the specified amount of bytes from the given memory at the given
address
"""
if memory.id in self._read_requests:
logger.warning('There is already a read operation ongoing for '
'memory id {}'.format(memory.id))
return False
rreq = _ReadRequest(memory, addr, length, self.cf)
self._read_requests[memory.id] = rreq
rreq.start()
return True
def refresh(self, refresh_done_callback):
"""Start fetching all the detected memories"""
self._refresh_callback = refresh_done_callback
self._fetch_id = 0
for m in self.mems:
try:
self.mem_read_cb.remove_callback(m.new_data)
m.disconnect()
except Exception as e:
logger.info(
'Error when removing memory after update: {}'.format(e))
self.mems = []
self.nbr_of_mems = 0
self._getting_count = False
logger.debug('Requesting number of memories')
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_INFO)
pk.data = (CMD_INFO_NBR,)
self.cf.send_packet(pk, expected_reply=(CMD_INFO_NBR,))
def _disconnected(self, uri):
"""The link to the Crazyflie has been broken. Reset state"""
self._clear_state()
def _new_packet_cb(self, packet):
"""Callback for newly arrived packets for the memory port"""
chan = packet.channel
cmd = packet.data[0]
payload = packet.data[1:]
if chan == CHAN_INFO:
if cmd == CMD_INFO_NBR:
self.nbr_of_mems = payload[0]
logger.info('{} memories found'.format(self.nbr_of_mems))
# Start requesting information about the memories,
# if there are any...
if self.nbr_of_mems > 0:
if not self._getting_count:
self._getting_count = True
logger.debug('Requesting first id')
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_INFO)
pk.data = (CMD_INFO_DETAILS, 0)
self.cf.send_packet(pk, expected_reply=(
CMD_INFO_DETAILS, 0))
else:
self._refresh_callback()
if cmd == CMD_INFO_DETAILS:
# Did we get a good reply, otherwise try again:
if len(payload) < 5:
# Workaround for 1-wire bug when memory is detected
# but updating the info crashes the communication with
# the 1-wire. Fail by saying we only found 1 memory
# (the I2C).
logger.error(
'-------->Got good count, but no info on mem!')
self.nbr_of_mems = 1
if self._refresh_callback:
self._refresh_callback()
self._refresh_callback = None
return
# Create information about a new memory
# Id - 1 byte
mem_id = payload[0]
# Type - 1 byte
mem_type = payload[1]
# Size 4 bytes (as addr)
mem_size = struct.unpack('I', payload[2:6])[0]
# Addr (only valid for 1-wire?)
mem_addr_raw = struct.unpack('B' * 8, payload[6:14])
mem_addr = ''
for m in mem_addr_raw:
mem_addr += '{:02X}'.format(m)
if (not self.get_mem(mem_id)):
if mem_type == MemoryElement.TYPE_1W:
mem = OWElement(id=mem_id, type=mem_type,
size=mem_size,
addr=mem_addr, mem_handler=self)
self.mem_read_cb.add_callback(mem.new_data)
self.mem_write_cb.add_callback(mem.write_done)
self._ow_mems_left_to_update.append(mem.id)
elif mem_type == MemoryElement.TYPE_I2C:
mem = I2CElement(id=mem_id, type=mem_type,
size=mem_size,
mem_handler=self)
self.mem_read_cb.add_callback(mem.new_data)
self.mem_write_cb.add_callback(mem.write_done)
elif mem_type == MemoryElement.TYPE_DRIVER_LED:
mem = LEDDriverMemory(id=mem_id, type=mem_type,
size=mem_size, mem_handler=self)
logger.debug(mem)
self.mem_read_cb.add_callback(mem.new_data)
self.mem_write_cb.add_callback(mem.write_done)
elif mem_type == MemoryElement.TYPE_LOCO:
mem = LocoMemory(id=mem_id, type=mem_type,
size=mem_size, mem_handler=self)
logger.debug(mem)
self.mem_read_cb.add_callback(mem.new_data)
elif mem_type == MemoryElement.TYPE_TRAJ:
mem = TrajectoryMemory(id=mem_id, type=mem_type,
size=mem_size, mem_handler=self)
logger.debug(mem)
self.mem_write_cb.add_callback(mem.write_done)
self.mem_write_failed_cb.add_callback(mem.write_failed)
elif mem_type == MemoryElement.TYPE_LOCO2:
mem = LocoMemory2(id=mem_id, type=mem_type,
size=mem_size, mem_handler=self)
logger.debug(mem)
self.mem_read_cb.add_callback(mem.new_data)
elif mem_type == MemoryElement.TYPE_LH:
mem = LighthouseMemory(id=mem_id, type=mem_type,
size=mem_size, mem_handler=self)
logger.debug(mem)
self.mem_read_cb.add_callback(mem.new_data)
self.mem_read_failed_cb.add_callback(
mem.new_data_failed)
self.mem_write_cb.add_callback(mem.write_done)
self.mem_write_failed_cb.add_callback(mem.write_failed)
elif mem_type == MemoryElement.TYPE_MEMORY_TESTER:
mem = MemoryTester(id=mem_id, type=mem_type,
size=mem_size, mem_handler=self)
logger.debug(mem)
self.mem_read_cb.add_callback(mem.new_data)
self.mem_write_cb.add_callback(mem.write_done)
elif mem_type == MemoryElement.TYPE_DRIVER_LEDTIMING:
mem = LEDTimingsDriverMemory(id=mem_id, type=mem_type,
size=mem_size,
mem_handler=self)
logger.debug(mem)
self.mem_read_cb.add_callback(mem.new_data)
self.mem_write_cb.add_callback(mem.write_done)
elif mem_type == MemoryElement.TYPE_DECK_MEMORY:
mem = DeckMemoryManager(id=mem_id, type=mem_type, size=mem_size, mem_handler=self)
logger.debug(mem)
self.mem_read_cb.add_callback(mem._new_data)
self.mem_read_failed_cb.add_callback(mem._new_data_failed)
self.mem_write_cb.add_callback(mem._write_done)
self.mem_write_failed_cb.add_callback(mem._write_failed)
else:
mem = MemoryElement(id=mem_id, type=mem_type,
size=mem_size, mem_handler=self)
logger.debug(mem)
self.mems.append(mem)
self.mem_added_cb.call(mem)
self._fetch_id = mem_id + 1
if self.nbr_of_mems - 1 >= self._fetch_id:
logger.debug(
'Requesting information about memory {}'.format(
self._fetch_id))
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_INFO)
pk.data = (CMD_INFO_DETAILS, self._fetch_id)
self.cf.send_packet(pk, expected_reply=(
CMD_INFO_DETAILS, self._fetch_id))
else:
logger.debug(
'Done getting all the memories, start reading the OWs')
ows = self.get_mems(MemoryElement.TYPE_1W)
# If there are any OW mems start reading them, otherwise
# we are done
for ow_mem in ows:
ow_mem.update(self._mem_update_done)
if len(ows) == 0:
if self._refresh_callback:
self._refresh_callback()
self._refresh_callback = None
if chan == CHAN_WRITE:
id = cmd
(addr, status) = struct.unpack('<IB', payload[0:5])
logger.debug(
'WRITE: Mem={}, addr=0x{:X}, status=0x{}'.format(
id, addr, status))
# Find the read request
if id in self._write_requests:
self._write_requests_lock.acquire()
do_call_sucess_cb = False
do_call_fail_cb = False
wreq = self._write_requests[id][0]
if status == 0:
if wreq.write_done(addr):
# self._write_requests.pop(id, None)
# Remove the first item
self._write_requests[id].pop(0)
do_call_sucess_cb = True
# Get a new one to start (if there are any)
if len(self._write_requests[id]) > 0:
self._write_requests[id][0].start()
else:
logger.debug(
'Status {}: write failed.'.format(status))
# Remove from queue
self._write_requests[id].pop(0)
do_call_fail_cb = True
# Get a new one to start (if there are any)
if len(self._write_requests[id]) > 0:
self._write_requests[id][0].start()
self._write_requests_lock.release()
# Call callbacks after the lock has been released to alow for new writes
# to be initiated from the callback.
if do_call_sucess_cb:
self.mem_write_cb.call(wreq.mem, wreq.addr)
if do_call_fail_cb:
self.mem_write_failed_cb.call(wreq.mem, wreq.addr)
if chan == CHAN_READ:
id = cmd
(addr, status) = struct.unpack('<IB', payload[0:5])
data = struct.unpack('B' * len(payload[5:]), payload[5:])
logger.debug('READ: Mem={}, addr=0x{:X}, status=0x{}, '
'data={}'.format(id, addr, status, data))
# Find the read request
if id in self._read_requests:
logger.debug(
'READING: We are still interested in request for '
'mem {}'.format(id))
rreq = self._read_requests[id]
if status == 0:
if rreq.add_data(addr, payload[5:]):
self._read_requests.pop(id, None)
self.mem_read_cb.call(rreq.mem, rreq.addr, rreq.data)
else:
logger.debug('Status {}: read failed.'.format(status))
self._read_requests.pop(id, None)
self.mem_read_failed_cb.call(
rreq.mem, rreq.addr, rreq.data)
|
bitcraze/crazyflie-lib-python
|
cflib/crazyflie/mem/__init__.py
|
Python
|
gpl-2.0
| 23,474
|
from libs.fountain import Fountain
import pandas as pd
import os
class ScreenPlay(Fountain):
characters = None # Character Dictionary { Character : Contentlength Integer }
topcharacters = [] # Sorted List of characters [ Char_mostContent, Char_2nd-mostContent , ... ]
rawscript = None # Script broken up in array of Character and Action [ Type , Content ][ Type , Content ] ...
scriptlength = None # Total string length of the loaded script
scenedictionary = None #
def __init__(self, path=None):
try:
Fountain.__init__(self, string=None, path=path)
self.createObjectElements()
self.createSceneMeta(self.rawscript)
#self.createSceneDictionary(cutoff)
except:
self.throwError('init')
def throwError(self, errtype):
if errtype is 'init':
print "Could not Initialize :("
elif errtype is 'range':
print "Requested index is out of range :("
def createSceneMeta(self, scenelist):
'''
Process the script to create meta information
'''
scriptlength = 0
for scene in scenelist:
scriptlength += len( scene[1] )
self.scriptlength = scriptlength
def createObjectElements(self, maxcharacters=6):
'''
Processing of Fountain elements to create characters{}, topcharacters[]
and rawscript[[_][_]]
'''
simple_script = []
for element in self.elements:
if element.element_type in ['Dialogue', 'Action', 'Character']:
simple_script.append([element.element_type, element.element_text])
# Offsetting by one - assumption is that character is followed by dialogue
offset_list = simple_script[1:]
output = []
characters = {}
for i in range(len(offset_list)):
if simple_script[i][0] is 'Action':
output.append([ 'ScriptAction' , simple_script[i][1] ])
elif simple_script[i][0] is 'Character' and offset_list[i][0] is 'Dialogue':
character = str(simple_script[i][1])
# Getting rid of everything that happens after bracket start e.g. (V.O.)
character = character.split(' (')[0] # sometimes double space
character = character.split(' (')[0] # sometimes single space
character = character.strip()
output.append([ character , offset_list[i][1] ])
if character in characters:
characters[character] = characters[character] + len( offset_list[i][1] )
else:
characters[character] = len( offset_list[i][1] )
# Get characters with most text
dfCharacters = pd.DataFrame(list(characters.iteritems()), columns=['character','stringlength'])
charlist = dfCharacters.sort('stringlength', ascending=False).head(maxcharacters).character.tolist()
# Set object variables
self.characters = characters
self.topcharacters = charlist
self.rawscript = output
def createSceneDictionary(self, cutoff):
scenelist = self.rawscript
currentscene = 0 # What scene index are we processing?
scenes = []
while currentscene < len(scenelist):
scenedict = {}
count = 0 # How far are we in the progress to the cutoff
# Loop through until the cutoff has been reached
while count < cutoff:
# Stop loop if the currentscene is going out of range
if currentscene >= len(scenelist): break;
key = scenelist[currentscene][0]
entry = len( scenelist[currentscene][1] )
# Does the whole entry fit into what is left before the cutoff?
if entry < cutoff - count:
addcount = entry
currentscene += 1 # We can increase the scene - all done here
else:
addcount = cutoff - count
scenelist[currentscene][1] = 'x' * (entry - addcount) # Reduce what remains in the current scene
if key in scenedict:
scenedict[key] = scenedict[key] + addcount
else:
scenedict[key] = addcount
count += addcount
scenes.append(scenedict)
self.scenedictionary = scenes
def sceneComposition(self, sceneindex, toponly=True):
'''
Returns a dictionary of components of the requested scene index
'''
try:
# Get the list of top characters and add ScriptAction for Action events
charlist = self.topcharacters
#charlist.append('ScriptAction')
dfSceneComp = pd.DataFrame(list(self.scenedictionary[sceneindex].iteritems()), columns=['character','stringlength'])
if toponly:
_charMask = dfSceneComp['character'].map(lambda x: x in charlist )
dfSceneComp_filtered = dfSceneComp[_charMask]
out_dictionary = pd.Series(dfSceneComp_filtered.stringlength.values,index=dfSceneComp_filtered.character).to_dict()
else:
out_dictionary = pd.Series(dfSceneComp.stringlength.values,index=dfSceneComp.character).to_dict()
return out_dictionary
except:
self.throwError('range')
return None
def setMaxCharacters(self, setting):
dfCharacters = pd.DataFrame(list(self.characters.iteritems()), columns=['character','stringlength'])
charlist = dfCharacters.sort('stringlength', ascending=False).head(setting).character.tolist()
self.topcharacters = charlist
def getTopCharacterName(self, topchar):
try:
return str(self.topcharacters[topchar])
except:
return None
|
MaroGM/gendersonification
|
screenplay.py
|
Python
|
gpl-2.0
| 5,001
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
('documents', '0001_initial'),
('annotationsets', '0002_auto_20150818_0905'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(unique=True, max_length=150, verbose_name=b'username')),
('email', models.EmailField(unique=True, max_length=75, verbose_name=b'email address')),
('name', models.CharField(max_length=256, verbose_name=b'persons name', blank=True)),
('surname', models.CharField(max_length=256, verbose_name=b'persons surname', blank=True)),
('joined', models.DateTimeField(auto_now_add=True)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('groups', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of his/her group.', verbose_name='groups')),
('hidden_documents', models.ManyToManyField(related_name=b'hidden_documents', null=True, to='documents.Document', blank=True)),
('owned_documents', models.ManyToManyField(to='documents.Document', null=True, blank=True)),
('user_permissions', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Permission', blank=True, help_text='Specific permissions for this user.', verbose_name='user permissions')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Membership',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date_joined', models.DateField(auto_now_add=True)),
('invite_reason', models.CharField(max_length=64, blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='WorkingGroup',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128, verbose_name=b'group name')),
('comment', models.CharField(max_length=500, verbose_name=b'group description', blank=True)),
('concept_set', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to='annotationsets.ConceptSet', null=True)),
('documents', models.ManyToManyField(to='documents.Document', blank=True)),
('members', models.ManyToManyField(related_name=b'group_members', through='accounts.Membership', to=settings.AUTH_USER_MODEL)),
('owner', models.ForeignKey(related_name=b'group_owner', to=settings.AUTH_USER_MODEL, null=True)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='membership',
name='group',
field=models.ForeignKey(to='accounts.WorkingGroup'),
preserve_default=True,
),
migrations.AddField(
model_name='membership',
name='permissions',
field=models.ManyToManyField(to='auth.Permission', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='membership',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
]
|
FUB-HCC/neonion
|
accounts/migrations/0001_initial.py
|
Python
|
gpl-2.0
| 4,596
|
#! /usr/local/bin/python3
from wsgiref.simple_server import make_server
from cgi import parse_qs, escape #parse_qs(environ['HTTP_HOST'])
import os,sys,importlib,urllib
import Controller,Model,View #import default abstract classes
#import Loader
DEFAULT_APP_PATH = "/home/francesco/webapp"
WORKING_APP_PATH = ""
CONTROLLERS_DIRECTORY = "controllers/"
MODELS_DIRECTORY = "models/"
VIEWS_DIRECTORY = "views/"
def webapp(environ,start_response):
#params = processEnviron(environ)
body = setInsertPaths()
body = byteEncode(body)
status = '200 OK'
headers = [('Content-Type','text/html;')]
start_response(status,headers)
return body
def splitRequest(request):
return request
def setInsertPaths():
global DEFAULT_APP_PATH, WORKING_APP_PATH,CONTROLLERS_DIRECTORY,MODELS_DIRECTORY,VIEWS_DIRECTORY
os.chdir(DEFAULT_APP_PATH)
sys.path.insert(0,DEFAULT_APP_PATH+"/"+CONTROLLERS_DIRECTORY)
sys.path.insert(0,DEFAULT_APP_PATH+"/"+MODELS_DIRECTORY)
sys.path.insert(0,DEFAULT_APP_PATH+"/"+VIEWS_DIRECTORY)
Controller = importlib.import_module("IndexController")
Controller = Controller.Index()
Controller = Controller.Init()
#print(Controller)
return [Controller]
def byteEncode(array):
newList = []
for value in array:
newList.append(value.encode("utf-8"))
return newList
def processEnviron(environ):
return environ
if __name__ == "__main__":
httpd = make_server('localhost',8080,webapp)
print('Serving on 8080..')
httpd.serve_forever()
#os.path.join(appDirecotry,"controllers")
#fileRead = os.popen("index.py",'r',1)
#write (fileRead)
#self.returnResponse(fileRead)
#def returnResponse(self,fileread):
|
frank2411/python-web-mvc-server
|
wsgipy.py
|
Python
|
gpl-2.0
| 1,747
|
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2007-2009 Douglas S. Blank <doug.blank@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# $Id$
#------------------------------------------------------------------------
#
# GRAMPS modules
#
#------------------------------------------------------------------------
from gramps.gen.plug import Gramplet
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.get_translation().sgettext
from gramps.gui.plug.quick import run_quick_report_by_name
from gramps.gen.lib import Date
#------------------------------------------------------------------------
#
# CalendarGramplet class
#
#------------------------------------------------------------------------
class CalendarGramplet(Gramplet):
def init(self):
from gi.repository import Gtk
self.set_tooltip(_("Double-click a day for details"))
self.gui.calendar = Gtk.Calendar()
self.gui.calendar.connect('day-selected-double-click', self.double_click)
self.gui.calendar.set_display_options(
Gtk.CalendarDisplayOptions.SHOW_HEADING)
self.gui.get_container_widget().remove(self.gui.textview)
vbox = Gtk.VBox(False, 0)
vbox.pack_start(self.gui.calendar, False, False, 0)
self.gui.get_container_widget().add_with_viewport(vbox)
vbox.show_all()
#self.gui.calendar.show()
def post_init(self):
self.disconnect("active-changed")
def double_click(self, obj):
# bring up events on this day
year, month, day = self.gui.calendar.get_date()
date = Date()
date.set_yr_mon_day(year, month + 1, day)
run_quick_report_by_name(self.gui.dbstate,
self.gui.uistate,
'onthisday',
date)
|
Forage/Gramps
|
gramps/plugins/gramplet/calendargramplet.py
|
Python
|
gpl-2.0
| 2,539
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
# Copyright Manba Team
from __future__ import absolute_import
from __future__ import print_function
import calendar
from twisted.internet.defer import inlineCallbacks
from twisted.trial import unittest
import buildbot.www.change_hook as change_hook
from buildbot.test.fake.web import FakeRequest
from buildbot.test.fake.web import fakeMasterForHooks
from buildbot.www.hooks.bitbucket import _HEADER_EVENT
gitJsonPayload = b"""{
"canon_url": "https://bitbucket.org",
"commits": [
{
"author": "marcus",
"branch": "master",
"files": [
{
"file": "somefile.py",
"type": "modified"
}
],
"message": "Added some more things to somefile.py",
"node": "620ade18607a",
"parents": [
"702c70160afc"
],
"raw_author": "Marcus Bertrand <marcus@somedomain.com>",
"raw_node": "620ade18607ac42d872b568bb92acaa9a28620e9",
"revision": null,
"size": -1,
"timestamp": "2012-05-30 05:58:56",
"utctimestamp": "2012-05-30 03:58:56+00:00"
}
],
"repository": {
"absolute_url": "/marcus/project-x/",
"fork": false,
"is_private": true,
"name": "Project X",
"owner": "marcus",
"scm": "git",
"slug": "project-x",
"website": "https://atlassian.com/"
},
"user": "marcus"
}"""
mercurialJsonPayload = b"""{
"canon_url": "https://bitbucket.org",
"commits": [
{
"author": "marcus",
"branch": "master",
"files": [
{
"file": "somefile.py",
"type": "modified"
}
],
"message": "Added some more things to somefile.py",
"node": "620ade18607a",
"parents": [
"702c70160afc"
],
"raw_author": "Marcus Bertrand <marcus@somedomain.com>",
"raw_node": "620ade18607ac42d872b568bb92acaa9a28620e9",
"revision": null,
"size": -1,
"timestamp": "2012-05-30 05:58:56",
"utctimestamp": "2012-05-30 03:58:56+00:00"
}
],
"repository": {
"absolute_url": "/marcus/project-x/",
"fork": false,
"is_private": true,
"name": "Project X",
"owner": "marcus",
"scm": "hg",
"slug": "project-x",
"website": "https://atlassian.com/"
},
"user": "marcus"
}"""
gitJsonNoCommitsPayload = b"""{
"canon_url": "https://bitbucket.org",
"commits": [
],
"repository": {
"absolute_url": "/marcus/project-x/",
"fork": false,
"is_private": true,
"name": "Project X",
"owner": "marcus",
"scm": "git",
"slug": "project-x",
"website": "https://atlassian.com/"
},
"user": "marcus"
}"""
mercurialJsonNoCommitsPayload = b"""{
"canon_url": "https://bitbucket.org",
"commits": [
],
"repository": {
"absolute_url": "/marcus/project-x/",
"fork": false,
"is_private": true,
"name": "Project X",
"owner": "marcus",
"scm": "hg",
"slug": "project-x",
"website": "https://atlassian.com/"
},
"user": "marcus"
}"""
class TestChangeHookConfiguredWithBitbucketChange(unittest.TestCase):
"""Unit tests for BitBucket Change Hook
"""
def setUp(self):
self.change_hook = change_hook.ChangeHookResource(
dialects={'bitbucket': True}, master=fakeMasterForHooks())
@inlineCallbacks
def testGitWithChange(self):
change_dict = {b'payload': [gitJsonPayload]}
request = FakeRequest(change_dict)
request.received_headers[_HEADER_EVENT] = b"repo:push"
request.uri = b'/change_hook/bitbucket'
request.method = b'POST'
yield request.test_render(self.change_hook)
self.assertEqual(len(self.change_hook.master.addedChanges), 1)
commit = self.change_hook.master.addedChanges[0]
self.assertEqual(commit['files'], ['somefile.py'])
self.assertEqual(
commit['repository'], 'https://bitbucket.org/marcus/project-x/')
self.assertEqual(
calendar.timegm(commit['when_timestamp'].utctimetuple()),
1338350336
)
self.assertEqual(
commit['author'], 'Marcus Bertrand <marcus@somedomain.com>')
self.assertEqual(
commit['revision'], '620ade18607ac42d872b568bb92acaa9a28620e9')
self.assertEqual(
commit['comments'], 'Added some more things to somefile.py')
self.assertEqual(commit['branch'], 'master')
self.assertEqual(
commit['revlink'],
'https://bitbucket.org/marcus/project-x/commits/'
'620ade18607ac42d872b568bb92acaa9a28620e9'
)
self.assertEqual(
commit['properties']['event'],
'repo:push')
@inlineCallbacks
def testGitWithNoCommitsPayload(self):
change_dict = {b'payload': [gitJsonNoCommitsPayload]}
request = FakeRequest(change_dict)
request.uri = b'/change_hook/bitbucket'
request.method = b'POST'
yield request.test_render(self.change_hook)
self.assertEqual(len(self.change_hook.master.addedChanges), 0)
self.assertEqual(request.written, b'no change found')
@inlineCallbacks
def testMercurialWithChange(self):
change_dict = {b'payload': [mercurialJsonPayload]}
request = FakeRequest(change_dict)
request.received_headers[_HEADER_EVENT] = b"repo:push"
request.uri = b'/change_hook/bitbucket'
request.method = b'POST'
yield request.test_render(self.change_hook)
self.assertEqual(len(self.change_hook.master.addedChanges), 1)
commit = self.change_hook.master.addedChanges[0]
self.assertEqual(commit['files'], ['somefile.py'])
self.assertEqual(
commit['repository'], 'https://bitbucket.org/marcus/project-x/')
self.assertEqual(
calendar.timegm(commit['when_timestamp'].utctimetuple()),
1338350336
)
self.assertEqual(
commit['author'], 'Marcus Bertrand <marcus@somedomain.com>')
self.assertEqual(
commit['revision'], '620ade18607ac42d872b568bb92acaa9a28620e9')
self.assertEqual(
commit['comments'], 'Added some more things to somefile.py')
self.assertEqual(commit['branch'], 'master')
self.assertEqual(
commit['revlink'],
'https://bitbucket.org/marcus/project-x/commits/'
'620ade18607ac42d872b568bb92acaa9a28620e9'
)
self.assertEqual(
commit['properties']['event'],
'repo:push')
@inlineCallbacks
def testMercurialWithNoCommitsPayload(self):
change_dict = {b'payload': [mercurialJsonNoCommitsPayload]}
request = FakeRequest(change_dict)
request.uri = b'/change_hook/bitbucket'
request.method = b'POST'
yield request.test_render(self.change_hook)
self.assertEqual(len(self.change_hook.master.addedChanges), 0)
self.assertEqual(request.written, b'no change found')
@inlineCallbacks
def testWithNoJson(self):
request = FakeRequest()
request.uri = b'/change_hook/bitbucket'
request.method = b'POST'
yield request.test_render(self.change_hook)
self.assertEqual(len(self.change_hook.master.addedChanges), 0)
self.assertEqual(request.written, b'Error processing changes.')
request.setResponseCode.assert_called_with(
500, b'Error processing changes.')
self.assertEqual(len(self.flushLoggedErrors()), 1)
@inlineCallbacks
def testGitWithChangeAndProject(self):
change_dict = {
b'payload': [gitJsonPayload],
b'project': [b'project-name']}
request = FakeRequest(change_dict)
request.uri = b'/change_hook/bitbucket'
request.method = b'POST'
yield request.test_render(self.change_hook)
self.assertEqual(len(self.change_hook.master.addedChanges), 1)
commit = self.change_hook.master.addedChanges[0]
self.assertEqual(commit['project'], 'project-name')
|
seankelly/buildbot
|
master/buildbot/test/unit/test_www_hooks_bitbucket.py
|
Python
|
gpl-2.0
| 9,160
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
""" Batchuploader templates """
__revision__ = "$Id$"
import cgi
from invenio.dbquery import run_sql
from invenio.config import CFG_SITE_URL, CFG_SITE_LANG
from invenio.messages import gettext_set_language
from invenio.bibrankadminlib import addadminbox, tupletotable
class Template:
"""Invenio Template class for creating Web Upload interface"""
def tmpl_styles(self):
"""Defines the local CSS styles and javascript used in the plugin"""
styles = """
<style type="text/css">
.mandatory_field{
color:#ff0000
}
.italics{
font-style:italic
}
.italics_small{
font-style:italic;
font-size: 0.9em;
}
#content {width:750px; font:90.1% arial, sans-serif;}
#uploadform {margin:0 0 1em 0}
#uploadform div {margin:0.5em 0}
#uploadform fieldset {border:1px solid #657; padding:0.8em 1em; margin:2em 10px}
#docuploadform {margin:0 0 1em 0}
#docuploadform div {margin:0.5em 0}
#docuploadform fieldset {border:1px solid #657; padding:0.8em 1em; margin:2em 10px}
div.ui-datepicker{
font-size:12px;
}
span.red{
color:#df0000;
}
span.green{
color:#060;
background: transparent;
}
span.yellow{
color:#9f9b00;
}
#info_box{
border: 3px black solid;
border-width: thin;
width: 750px;
}
img.img_link {
border-style: none;
}
</style>
"""
styles += """
<link type="text/css" href="%(site_url)s/img/jquery/jquery-ui.css" rel="stylesheet" />
<script type="text/javascript">
function clearText(field){
if (field.value == field.defaultValue){
field.value = '';
}
}
function defText(field){
if (field.value == ''){
field.value = field.defaultValue;
}
}
</script>
<script type="text/javascript" src="%(site_url)s/js/jquery/jquery.min.js"></script>
<script type="text/javascript" src="%(site_url)s/js/jquery/jquery.ui.datepicker.min.js"></script>
""" % {'site_url':CFG_SITE_URL}
return styles
def tmpl_display_web_metaupload_form(self, ln=CFG_SITE_LANG, error=0, filetype="marcxml", mode="--insert",
submit_date="yyyy-mm-dd", submit_time="hh:mm:ss"):
""" Displays Metadata upload form
@param error: defines the type of error to be displayed
@param mode: upload mode
@param submit_date: file upload date
@param submit_time: file upload time
@return: the form in HTML format
"""
_ = gettext_set_language(ln)
body_content = ""
body_content += """
<script type="text/javascript">
$(function() {
$("#datepicker").datepicker({dateFormat: 'yy-mm-dd'});
});
</script>
"""
body_content += """<form id="uploadform" method="post" action="%(site_url)s/batchuploader/metasubmit" enctype="multipart/form-data">""" \
% {'site_url': CFG_SITE_URL}
body_content += """
<div id="content">
<fieldset>
"""
if error != 0:
if error == 1:
body_content += """
<div><b>%(msg)s</b></div>
""" % {'msg':_("Warning: Please, select a valid time")}
elif error == 2:
body_content += """
<div><b>%(msg)s</b></div>
""" % {'msg':_("Warning: Please, select a valid file")}
elif error == 3:
body_content += """
<div><b>%(msg)s</b></div>
""" % {'msg': _("Warning: The date format is not correct")}
elif error == 4:
body_content += """
<div><b>%(msg)s</b></div>
""" % {'msg': _("Warning: Please, select a valid date")}
body_content += """
<div><span class="mandatory_field""> * </span> %(txt_file)s:<input type="file" name="metafile" size="30" onChange="filename.value=(this.value)"></div>
<input type="hidden" name="filename" id="filename" value="">
<div><span class="mandatory_field""> * </span> %(txt_file_type)s:
<select name="filetype">
<option %(type_sel1)s value="marcxml">MarcXML</option>
<option %(type_sel2)s value="textmarc">TextMARC</option>
</select>
</div>
<div><span class="mandatory_field""> * </span> %(txt_upload_mode)s:
<select name="mode">
<option %(mode_sel1)s>--insert</option>
<option %(mode_sel2)s>--replace</option>
<option %(mode_sel3)s>--correct</option>
<option %(mode_sel4)s>--append</option>
<option %(mode_sel5)s>-ir insert-or-replace</option>
</select>
<a href="%(site_url)s/help/admin/bibupload-admin-guide#3.3" target="_blank"><img class="img_link" src="/img/help.png" title="Upload mode help"></a>
</div>
<div> %(txt_priority)s:
<select name="priority">
<option value="1">normal</option>
<option value="5">high</option>
</select>
<br/>
<div>%(txt_upload_later)s <span class="italics">%(txt_date)s:</span>
<input type="text" id="datepicker" name="submit_date" value=%(submit_date)s onBlur="defText(this)" onFocus="clearText(this)" style="width:100px" >
<span class="italics">%(txt_time)s:</span>
<input type="text" name="submit_time" value=%(submit_time)s onBlur="defText(this)" onFocus="clearText(this)" style="width:100px" >
<span class="italics">%(txt_example)s: 2009-12-20 19:22:18</span>
<div><i>%(txt_mandatory)s</i></div>
<div> <input type="submit" value="Upload" class="adminbutton"> </div>
</fieldset>
""" % {'txt_file': _("Select file to upload"),
'txt_file_type': _("File type"),
'txt_upload_mode': _("Upload mode"),
'txt_upload_later': _("Upload later? then select:"),
'txt_date': _("Date"),
'txt_time': _("Time"),
'txt_example': _("Example"),
'txt_mandatory': _("All fields with %(x_fmt_open)s*%(x_fmt_close)s are mandatory") % \
{'x_fmt_open': '<span class="mandatory_field">', 'x_fmt_close': '</span>'},
'txt_priority': _("Upload priority"),
'type_sel1': filetype == 'marcxml' and "selected" or "",
'type_sel2': filetype == 'textmarc' and "selected" or "",
'mode_sel1': mode == '--insert' and "selected" or "",
'mode_sel2': mode == '--replace' and "selected" or "",
'mode_sel3': mode == '--correct' and "selected" or "",
'mode_sel4': mode == '--append' and "selected" or "",
'mode_sel5': mode == '-ir insert-or-replace' and "selected" or "",
'site_url': CFG_SITE_URL,
'submit_date': cgi.escape(submit_date),
'submit_time': cgi.escape(submit_time)}
body_content += """</form></div>"""
return body_content
def tmpl_upload_successful(self, ln=CFG_SITE_LANG):
""" Displays message when the upload is successful """
_ = gettext_set_language(ln)
body_content = """<br/>"""
body_content += _("Your file has been successfully queued. You can check your %(x_url1_open)supload history%(x_url1_close)s or %(x_url2_open)ssubmit another file%(x_url2_close)s") %\
{'x_url1_open': "<a href=\"%s/batchuploader/history\">" % CFG_SITE_URL,
'x_url1_close': "</a>",
'x_url2_open': "<a href=\"%s/batchuploader/metadata\">" % CFG_SITE_URL,
'x_url2_close': "</a>"}
return body_content
def tmpl_invalid_marcxml(self, ln=CFG_SITE_LANG):
""" Displays message when the MARCXML is not valid """
_ = gettext_set_language(ln)
body_content = """<br/>"""
body_content += _("The MARCXML submitted is not valid. Please, review the file and %(x_url2_open)sresubmit it%(x_url2_close)s") %\
{'x_url2_open': "<a href=\"%s/batchuploader/metadata\">" % CFG_SITE_URL,
'x_url2_close': "</a>"}
return body_content
def tmpl_upload_history(self, ln=CFG_SITE_LANG, upload_meta_list="", upload_doc_list=""):
"""Displays upload history of a given user"""
_ = gettext_set_language(ln)
body_content = ""
body_content += "<h3> Metadata uploads </h3>"
if not upload_meta_list:
body_content += _("No metadata files have been uploaded yet.")
body_content += "<br/>"
else:
body_content += """
<table border=0>
<tr>
<b>
<th>%(txt1)s</th>
<th>%(txt2)s</th>
<th>%(txt3)s</th>
<th>%(txt4)s</th>
</b>
</tr>
""" % {'txt1': _("Submit time"),
'txt2': _("File name"),
'txt3': _("Execution time"),
'txt4': _("Status")}
for upload in upload_meta_list:
color = ""
if "ERROR" in upload[3]:
color = "red"
elif upload[3] == "WAITING":
color = "yellow"
elif upload[3] == "DONE":
color = "green"
body_content += """
<tr>
<td style="text-align: center; vertical-align: middle; width: 220px;">%(submit_time)s</td>
<td style="text-align: center; vertical-align: middle; width: 220px;">%(file_name)s</td>
<td style="text-align: center; vertical-align: middle; width: 220px;">%(exec_time)s</td>
<td style="text-align: center; vertical-align: middle; width: 220px;"><span class="%(color)s">%(status)s</span></td>
</tr>
""" % {'submit_time': upload[0],
'file_name': upload[1],
'exec_time': upload[2],
'color': color,
'status': upload[3]
}
body_content += "</table><br/>"
body_content += "<h3> Document uploads </h3>"
if not upload_doc_list:
body_content += _("No document files have been uploaded yet.")
body_content += "<br/>"
else:
body_content += """
<table border=0>
<tr>
<b>
<th>%(txt1)s</th>
<th>%(txt2)s</th>
<th>%(txt3)s</th>
<th>%(txt4)s</th>
</b>
</tr>
""" % {'txt1': _("Submit time"),
'txt2': _("File name"),
'txt3': _("Execution time"),
'txt4': _("Status")}
for upload in upload_doc_list:
color = ""
if "ERROR" in upload[3]:
color = "red"
elif upload[3] == "WAITING":
color = "yellow"
elif upload[3] == "DONE":
color = "green"
body_content += """
<tr>
<td style="text-align: center; vertical-align: middle; width: 220px;">%(submit_time)s</td>
<td style="text-align: center; vertical-align: middle; width: 220px;">%(file_name)s</td>
<td style="text-align: center; vertical-align: middle; width: 220px;">%(exec_time)s</td>
<td style="text-align: center; vertical-align: middle; width: 220px;"><span class="%(color)s">%(status)s</span></td>
</tr>
""" % {'submit_time': upload[0],
'file_name': upload[1],
'exec_time': upload[2],
'color': color,
'status': upload[3]
}
body_content += "</table>"
return body_content
def tmpl_display_menu(self, ln=CFG_SITE_LANG, ref=""):
""" Displays menu with all upload options """
_ = gettext_set_language(ln)
body_content = """
<table>
<td>0. <small>%(upload_open_link)s%(text1)s%(upload_close_link)s</small></td>
<td>1. <small>%(docupload_open_link)s%(text2)s%(docupload_close_link)s</small></td>
<td>2. <small>%(history_open_link)s%(text3)s%(history_close_link)s</small></td>
<td>3. <small>%(daemon_open_link)s%(text4)s%(daemon_close_link)s</small></td>
</tr>
</table>
""" % { 'upload_open_link': not ref == "metadata" and "<a href=\"%s/batchuploader/metadata?ln=%s\">" % (CFG_SITE_URL, ln) or "",
'upload_close_link': not ref == "metadata" and "</a>" or "",
'text1': _("Metadata batch upload"),
'docupload_open_link': not ref == "documents" and "<a href=\"%s/batchuploader/documents?ln=%s\">" % (CFG_SITE_URL, ln) or "",
'docupload_close_link': not ref == "documents" and "</a>" or "",
'text2': _("Document batch upload"),
'history_open_link': not ref == "history" and "<a href=\"%s/batchuploader/history?ln=%s\">" % (CFG_SITE_URL, ln) or "",
'history_close_link': not ref == "history" and "</a>" or "",
'text3': _("Upload history"),
'daemon_open_link': not ref == "daemon" and "<a href=\"%s/batchuploader/daemon?ln=%s\">" % (CFG_SITE_URL, ln) or "",
'daemon_close_link': not ref == "daemon" and "</a>" or "",
'text4': _("Daemon monitor")
}
return addadminbox("<b>Menu</b>", [body_content])
def tmpl_display_web_docupload_form(self, ln=CFG_SITE_LANG, submit_date="yyyy-mm-dd", submit_time="hh:mm:ss"):
""" Display form used for batch document upload """
_ = gettext_set_language(ln)
body_content = """
<script type="text/javascript">
$(function() {
$("#datepicker").datepicker({dateFormat: 'yy-mm-dd'});
});
</script>
"""
body_content += """<form id="docuploadform" method="post" action="%(site_url)s/batchuploader/docsubmit" enctype="multipart/form-data">""" \
% {'site_url': CFG_SITE_URL}
body_content += """
<div id="content">
<fieldset>
<div><span class="mandatory_field""> * </span> %(txt1)s: <input type="text" name="docfolder" size="30" />
<span class="italics">%(txt2)s: /afs/cern.ch/user/j/user/public/foo/</span></div>
<div><span class="mandatory_field""> * </span> %(txt3)s:
<select name="matching">
<option>reportnumber</option>
<option>recid</option>
</select>
</div>
<div><span class="mandatory_field""> * </span> %(txt4)s: <input type="radio" name="mode" value="append" "checked" id="appendcheckbox"/><label for="appendcheckbox">append</label>
<input type="radio" name="mode" value="correct" id="revisecheckbox"/><label for="revisecheckbox">revise</label>
</div>
<div> %(txt_priority)s:
<select name="priority">
<option value="1">normal</option>
<option value="5">high</option>
</select>
<br/>
<div>%(txt5)s <span class="italics">%(txt6)s:</span>
<input type="text" id="datepicker" name="submit_date" value=%(submit_date)s onBlur="defText(this)" onFocus="clearText(this)" style="width:100px" >
<span class="italics">%(txt7)s:</span>
<input type="text" name="submit_time" value=%(submit_time)s onBlur="defText(this)" onFocus="clearText(this)" style="width:100px" >
<span class="italics">%(txt8)s: 2009-12-20 19:22:18</span>
<br/>
<div><i>%(txt9)s</i></div>
<div> <input type="submit" value="Upload" class="adminbutton"> </div>
</fieldset>
</form></div>
""" % {'submit_date': submit_date,
'submit_time': submit_time,
'txt_priority': _("Upload priority"),
'txt1': _("Input directory"),
'txt2': _("Example"),
'txt3': _("Filename matching"),
'txt4': _("Upload mode"),
'txt5': _("Upload later? then select:"),
'txt6': _("Date"),
'txt7': _("Time"),
'txt8': _("Example"),
'txt9': _("All fields with %(x_fmt_open)s*%(x_fmt_close)s are mandatory") % \
{'x_fmt_open': '<span class="mandatory_field">', 'x_fmt_close': '</span>'}
}
return body_content
def tmpl_display_web_docupload_result(self, ln=CFG_SITE_LANG, errors=None, info=None):
""" Display results from the document upload """
_ = gettext_set_language(ln)
body_content = "<br/>"
body_content += _("<b>%s documents</b> have been found." % info[0])
body_content += "<br/><br/>"
body_content += _("The following files have been successfully queued:")
body_content += "<ul>"
for uploaded_file in info[1]:
body_content += "<li><b>%s</b></li>" % uploaded_file
body_content += "</ul>"
body_content += _("The following errors have occurred:")
body_content += "<ul>"
for error in errors:
if error != 'MoveError':
body_content += "<li><b>%s</b> : %s</li>" % (cgi.escape(str(error[0])), cgi.escape(str(error[1])))
body_content += "</ul>"
if 'MoveError' in errors:
body_content += "<div><i><b>" + _("Some files could not be moved to DONE folder. Please remove them manually.") + "</b></i></div><br/>"
else:
body_content += "<div><i><b>" + _("All uploaded files were moved to DONE folder.") + "</b></i></div><br/>"
body_content += "<a href=\"%(docupload_url)s\">Return to upload form</a>" % \
{'docupload_url': "%s/batchuploader/documents?ln=%s" % (CFG_SITE_URL, ln) }
return body_content
def tmpl_daemon_content(self, ln=CFG_SITE_LANG, docs=None, metadata=None):
""" Displays all information related with batch uploader daemon mode """
_ = gettext_set_language(ln)
body_content = "<br/><div id=\"info_box\">"
body_content += "<ul>"
body_content += "<li>" + _("Using %(x_fmt_open)sweb interface upload%(x_fmt_close)s, actions are executed a single time.") % \
{'x_fmt_open': '<b>', 'x_fmt_close':'</b>'} + "</li>"
body_content += "<li>" + _("Check the %(x_url_open)sBatch Uploader daemon help page%(x_url_close)s for executing these actions periodically.") % \
{'x_url_open': '<a href="%s/help/admin/bibupload-admin-guide#4.2">' % CFG_SITE_URL,
'x_url_close': "</a>"} + \
"</li>"
body_content += "</div><br/>"
body_content += "<h3>%s</h3>" % _("Metadata folders")
body_content += "<ul>"
for folder in metadata.keys():
body_content += "<li><b>" + folder + "</b></li>"
for filename, info in metadata[folder]:
body_content += " "
for stat in info:
body_content += "%s " % stat
body_content += filename + "<br />"
body_content += "</ul>"
body_content += "<h3> Document folders </h3>"
body_content += "<ul>"
for folder in docs.keys():
body_content += "<li><b>" + folder + "</b></li>"
body_content += " "
for filename, info in docs[folder]:
for stat in info:
body_content += "%s " % stat
body_content += filename + "<br />"
body_content += "</ul>"
header = [_("ID"), _("Name"), _("Time"), _("Status"), _("Progress")]
actions = []
body_content += """<br /><b>%s</b><br />""" % _("Last BibSched tasks:")
res = run_sql("select id, proc, host, user, runtime, sleeptime, arguments, status, progress from schTASK where proc='batchuploader' and runtime< now() ORDER by runtime")
if len(res) > 0:
(tsk_id, proc, host, user, runtime, sleeptime, arguments, status, progress) = res[len(res) - 1]
actions.append([tsk_id, proc, runtime, (status !="" and status or ''), (progress !="" and progress or '')])
else:
actions.append(['', 'batchuploader', '', '', 'Not executed yet'])
body_content += tupletotable(header=header, tuple=actions)
body_content += """<br /><b>%s</b><br />""" % _("Next scheduled BibSched run:")
actions = []
res = run_sql("select id, proc, host, user, runtime, sleeptime, arguments, status, progress from schTASK where proc='batchuploader' and runtime > now() ORDER by runtime")
if len(res) > 0:
(tskid, proc, host, user, runtime, sleeptime, arguments, status, progress) = res[0]
actions.append([tskid, proc, runtime, (status !="" and status or ''), (progress !="" and progress or '')])
else:
actions.append(['', 'batchuploader', '', '', 'Not scheduled'])
body_content += tupletotable(header=header, tuple=actions)
return body_content
|
pamoakoy/invenio
|
modules/bibupload/lib/batchuploader_templates.py
|
Python
|
gpl-2.0
| 23,046
|
# -*- coding: utf-8 -*-
#
# File: Student.py
#
# Copyright (c) 2008 by []
# Generator: ArchGenXML Version 2.0-beta10
# http://plone.org/products/archgenxml
#
# GNU General Public License (GPL)
#
__author__ = """unknown <unknown>"""
__docformat__ = 'plaintext'
from AccessControl import ClassSecurityInfo
from Products.Archetypes.atapi import *
from zope.interface import implements
import interfaces
from Products.CMFDynamicViewFTI.browserdefault import BrowserDefaultMixin
from Products.ATReferenceBrowserWidget.ATReferenceBrowserWidget import \
ReferenceBrowserWidget
from Products.ATBackRef.BackReferenceField import BackReferenceField, BackReferenceWidget
from Products.UWOshMusicRecruiting.config import *
##code-section module-header #fill in your manual code here
##/code-section module-header
copied_fields = {}
copied_fields['title'] = BaseSchema['title'].copy()
copied_fields['title'].required = 1
copied_fields['title'].searchable = 1
copied_fields['title'].widget.label = "Name"
schema = Schema((
copied_fields['title'],
StringField(
name='phone',
widget=StringField._properties['widget'](
label='Phone',
label_msgid='UWOshMusicRecruiting_label_phone',
i18n_domain='UWOshMusicRecruiting',
),
searchable=1,
),
StringField(
name='email',
widget=StringField._properties['widget'](
label='Email',
label_msgid='UWOshMusicRecruiting_label_email',
i18n_domain='UWOshMusicRecruiting',
),
searchable=1,
),
StringField(
name='instrument',
widget=StringField._properties['widget'](
label='Instrument',
label_msgid='UWOshMusicRecruiting_label_instrument',
i18n_domain='UWOshMusicRecruiting',
),
searchable=1,
),
StringField(
name='address',
widget=StringField._properties['widget'](
label='Address',
label_msgid='UWOshMusicRecruiting_label_address',
i18n_domain='UWOshMusicRecruiting',
),
searchable=1,
),
StringField(
name='city',
widget=StringField._properties['widget'](
label='City',
label_msgid='UWOshMusicRecruiting_label_city',
i18n_domain='UWOshMusicRecruiting',
),
searchable=1,
),
StringField(
name='state',
widget=StringField._properties['widget'](
label='State',
label_msgid='UWOshMusicRecruiting_label_state',
i18n_domain='UWOshMusicRecruiting',
),
searchable=1,
),
StringField(
name='zip',
widget=StringField._properties['widget'](
label='Zip',
label_msgid='UWOshMusicRecruiting_label_zip',
i18n_domain='UWOshMusicRecruiting',
),
searchable=1,
),
BooleanField(
name='isCommittedToAttend',
widget=BooleanField._properties['widget'](
label='Iscommittedtoattend',
label_msgid='UWOshMusicRecruiting_label_isCommittedToAttend',
i18n_domain='UWOshMusicRecruiting',
),
searchable=1,
),
BackReferenceField(
name='visits',
widget=BackReferenceWidget(
label='Visits',
label_msgid='UWOshMusicRecruiting_label_visits',
i18n_domain='UWOshMusicRecruiting',
),
allowed_types=('Visit',),
multiValued=1,
relationship='student_visit',
),
),
)
##code-section after-local-schema #fill in your manual code here
##/code-section after-local-schema
Student_schema = BaseSchema.copy() + \
schema.copy()
##code-section after-schema #fill in your manual code here
##/code-section after-schema
class Student(BaseContent, BrowserDefaultMixin):
"""
"""
security = ClassSecurityInfo()
implements(interfaces.IStudent)
meta_type = 'Student'
_at_rename_after_creation = True
schema = Student_schema
##code-section class-header #fill in your manual code here
##/code-section class-header
# Methods
registerType(Student, PROJECTNAME)
# end of class Student
##code-section module-footer #fill in your manual code here
##/code-section module-footer
|
uwosh/UWOshMusicRecruiting
|
content/Student.py
|
Python
|
gpl-2.0
| 4,310
|
import numpy as np
def pretty_depth(depth):
"""Converts depth into a 'nicer' format for display
This is abstracted to allow for experimentation with normalization
Args:
depth: A numpy array with 2 bytes per pixel
Returns:
A numpy array that has been processed whos datatype is unspecified
"""
np.clip(depth, 0, 2**10 - 1, depth)
depth >>= 2
depth = depth.astype(np.uint8)
return depth
def pretty_depth_cv(depth):
"""Converts depth into a 'nicer' format for display
This is abstracted to allow for experimentation with normalization
Args:
depth: A numpy array with 2 bytes per pixel
Returns:
An opencv image who's datatype is unspecified
"""
import cv2
depth = pretty_depth(depth)
# image = cv2.cv.CreateImageHeader((depth.shape[1], depth.shape[0]),
# cv.IPL_DEPTH_8U,
# 1)
# cv2.cv.SetData(image, depth.tostring(),
# depth.dtype.itemsize * depth.shape[1])
return depth
def video_cv(video):
"""Converts video into a BGR format for opencv
This is abstracted out to allow for experimentation
Args:
video: A numpy array with 1 byte per pixel, 3 channels RGB
Returns:
An opencv image who's datatype is 1 byte, 3 channel BGR
"""
video = video[:, :, ::-1] # RGB -> BGR
# image = cv2.cv.CreateImageHeader((video.shape[1], video.shape[0]),
# cv.IPL_DEPTH_8U,
# 3)
# cv2.cv.SetData(image, video.tostring(),
# video.dtype.itemsize * 3 * video.shape[1])
return video
|
team4099/Stronghold_2016_Vision
|
frame_convert.py
|
Python
|
gpl-2.0
| 1,697
|
"""
MetPX Copyright (C) 2004-2006 Environment Canada
MetPX comes with ABSOLUTELY NO WARRANTY; For details type see the file
named COPYING in the root of the source directory tree.
"""
"""
#############################################################################################
# Name: PDSClient.py
#
# Author: Daniel Lemay
#
# Date: 2004-09-01
#
#############################################################################################
"""
import PDSPaths
import logging
class PDSClient:
"""
#############################################################################################
# Represent a PDS client.
#############################################################################################
"""
def __init__(self, machine, name, pid, status, date, config, logfile):
"""
#############################################################################################
# Constructor of a PDSCLient object
#############################################################################################
"""
self.machine = machine
self.name = name
self.pid = pid
self.status = status
self.date = date
self.config = config
self.logfile = logfile
self.queue = 0
self.logline = []
def setQueue(self, number):
self.queue = number
def getQueue(self):
return self.queue
def setLastLog(self, logline):
self.logline.extend(logline)
def getLastLog(self):
return self.logline
#def __repr__(self):
# return "[CLIENT OBJECT: %s %20s %6s %8s %18s %6d ]" % (self.machine, self.name, self.pid, self.status, self.date, self.queue)
def __repr__(self):
return "[%s %8s %6d %s]" % (self.machine, self.status, self.queue, self.logline)
|
khosrow/metpx
|
columbo/lib/PDSClient.py
|
Python
|
gpl-2.0
| 1,812
|
import os
import tuned.logs
from . import base
from tuned.utils.commands import commands
class kb2s(base.Function):
"""
Conversion function: kbytes to sectors
"""
def __init__(self):
# 1 argument
super(kb2s, self).__init__("kb2s", 1, 1)
def execute(self, args):
if not super(kb2s, self).execute(args):
return None
try:
return str(int(args[0]) * 2)
except ValueError:
return None
|
redhat-performance/tuned
|
tuned/profiles/functions/function_kb2s.py
|
Python
|
gpl-2.0
| 405
|