code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/env python
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Base class with common routines between the Apache, Lighttpd, and websocket servers."""
import errno
import logging
import socket
import sys
import tempfile
import time
_log = logging.getLogger(__name__)
class ServerError(Exception):
pass
class HttpServerBase(object):
"""A skeleton class for starting and stopping servers used by the layout tests."""
def __init__(self, port_obj):
self._executive = port_obj._executive
self._filesystem = port_obj._filesystem
self._name = '<virtual>'
self._mappings = {}
self._pid = None
self._pid_file = None
self._port_obj = port_obj
# We need a non-checkout-dependent place to put lock files, etc. We
# don't use the Python default on the Mac because it defaults to a
# randomly-generated directory under /var/folders and no one would ever
# look there.
tmpdir = tempfile.gettempdir()
if port_obj.host.platform.is_mac():
tmpdir = '/tmp'
self._runtime_path = self._filesystem.join(tmpdir, "WebKit")
self._filesystem.maybe_make_directory(self._runtime_path)
def start(self):
"""Starts the server. It is an error to start an already started server.
This method also stops any stale servers started by a previous instance."""
assert not self._pid, '%s server is already running' % self._name
# Stop any stale servers left over from previous instances.
if self._filesystem.exists(self._pid_file):
self._pid = int(self._filesystem.read_text_file(self._pid_file))
self._stop_running_server()
self._pid = None
self._remove_stale_logs()
self._prepare_config()
self._check_that_all_ports_are_available()
self._pid = self._spawn_process()
if self._wait_for_action(self._is_server_running_on_all_ports):
_log.debug("%s successfully started (pid = %d)" % (self._name, self._pid))
else:
self._stop_running_server()
raise ServerError('Failed to start %s server' % self._name)
def stop(self):
"""Stops the server. Stopping a server that isn't started is harmless."""
actual_pid = None
if self._filesystem.exists(self._pid_file):
actual_pid = int(self._filesystem.read_text_file(self._pid_file))
if not self._pid:
self._pid = actual_pid
if not self._pid:
return
if not actual_pid:
_log.warning('Failed to stop %s: pid file is missing' % self._name)
return
if self._pid != actual_pid:
_log.warning('Failed to stop %s: pid file contains %d, not %d' %
(self._name, actual_pid, self._pid))
# Try to kill the existing pid, anyway, in case it got orphaned.
self._executive.kill_process(self._pid)
self._pid = None
return
_log.debug("Attempting to shut down %s server at pid %d" % (self._name, self._pid))
self._stop_running_server()
_log.debug("%s server at pid %d stopped" % (self._name, self._pid))
self._pid = None
def _prepare_config(self):
"""This routine can be overridden by subclasses to do any sort
of initialization required prior to starting the server that may fail."""
pass
def _remove_stale_logs(self):
"""This routine can be overridden by subclasses to try and remove logs
left over from a prior run. This routine should log warnings if the
files cannot be deleted, but should not fail unless failure to
delete the logs will actually cause start() to fail."""
pass
def _spawn_process(self):
"""This routine must be implemented by subclasses to actually start the server.
This routine returns the pid of the started process, and also ensures that that
pid has been written to self._pid_file."""
raise NotImplementedError()
def _stop_running_server(self):
"""This routine must be implemented by subclasses to actually stop the running server listed in self._pid_file."""
raise NotImplementedError()
# Utility routines.
def _remove_log_files(self, folder, starts_with):
files = self._filesystem.listdir(folder)
for file in files:
if file.startswith(starts_with):
full_path = self._filesystem.join(folder, file)
self._filesystem.remove(full_path)
def _wait_for_action(self, action, wait_secs=20.0, sleep_secs=1.0):
"""Repeat the action for wait_sec or until it succeeds, sleeping for sleep_secs
in between each attempt. Returns whether it succeeded."""
start_time = time.time()
while time.time() - start_time < wait_secs:
if action():
return True
_log.debug("Waiting for action: %s" % action)
time.sleep(sleep_secs)
return False
def _is_server_running_on_all_ports(self):
"""Returns whether the server is running on all the desired ports."""
if not self._executive.check_running_pid(self._pid):
_log.debug("Server isn't running at all")
raise ServerError("Server exited")
for mapping in self._mappings:
s = socket.socket()
port = mapping['port']
try:
s.connect(('localhost', port))
_log.debug("Server running on %d" % port)
except socket.error, e:
# this branch is needed on Mac 10.5 / python 2.5
if e.args[0] not in (errno.ECONNREFUSED, errno.ECONNRESET):
raise
_log.debug("Server NOT running on %d: %s" % (port, e))
return False
except IOError, e:
if e.errno not in (errno.ECONNREFUSED, errno.ECONNRESET):
raise
_log.debug("Server NOT running on %d: %s" % (port, e))
return False
finally:
s.close()
return True
def _check_that_all_ports_are_available(self):
for mapping in self._mappings:
s = socket.socket()
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
port = mapping['port']
try:
s.bind(('localhost', port))
except IOError, e:
if e.errno in (errno.EALREADY, errno.EADDRINUSE):
raise ServerError('Port %d is already in use.' % port)
elif sys.platform == 'win32' and e.errno in (errno.WSAEACCES,):
raise ServerError('Port %d is already in use.' % port)
else:
raise
finally:
s.close()
|
cs-au-dk/Artemis
|
WebKit/Tools/Scripts/webkitpy/layout_tests/servers/http_server_base.py
|
Python
|
gpl-3.0
| 8,410
|
#!/usr/bin/env python2
'''
Simple monitoring script to collect per process cpu percentage
and mem usage in bytes (vms or virt and rss)
usage:
cron-send-cpu-mem-stats process_name openshift.whatever.zabbix.key
or
cron-send-cpu-mem-stats 'something parameter more params' openshift.something.parameter.more.params
The script will attach .cpu and .mem.{vms|rss} to the end of the zabbix key name for the values
Future enhancement can be to add multiple instances, that would add pid to the key, but those
would have to be dynamic items in zabbix
'''
# vim: expandtab:tabstop=4:shiftwidth=4
# Disabling invalid-name because pylint doesn't like the naming conention we have.
# pylint: disable=invalid-name
import argparse
import psutil
# Reason: disable pylint import-error because our libs aren't loaded on jenkins.
# Status: temporary until we start testing in a container where our stuff is installed.
# pylint: disable=import-error
from openshift_tools.monitoring.metric_sender import MetricSender
def parse_args():
""" parse the args from the cli """
parser = argparse.ArgumentParser(description='CPU and Memory per process stats collector')
parser.add_argument('--debug', action='store_true', default=None, help='Debug?')
parser.add_argument('process_str', help='The process command line string to match')
parser.add_argument('zabbix_key_prefix', help='Prefix for the key that will be sent \
to zabbix with this data, will get a .cpu and .mem suffix')
return parser.parse_args()
def main():
""" Main function to run the check """
argz = parse_args()
proc_parts = argz.process_str.split()
zagg_data = {}
for proc in psutil.process_iter():
try:
if proc_parts[0] == proc.name():
proc.dict = proc.as_dict(['cmdline', 'memory_info'])
cmdline = proc.dict['cmdline']
if len(proc_parts) > 1 and len(cmdline) > 1:
part_count = len(proc_parts[1:])
# This call might be confusing, (I know I will be in 2 weeks) so quick explanation:
# if the process name matches above, it will check the rest of the strings
# against the /proc/<pid>/cmdline contents, order shouldn't matter since all have to match
if len(set(proc_parts[1:]).intersection(set(cmdline[1:1+part_count]))) != part_count:
continue
if argz.debug:
print cmdline
cpu_percent = '{0:.2f}'.format(proc.cpu_percent(interval=0.5))
mem_vms = '{0}'.format(getattr(proc.dict['memory_info'], 'vms'))
mem_rss = '{0}'.format(getattr(proc.dict['memory_info'], 'rss'))
zagg_data = {'{0}.cpu'.format(argz.zabbix_key_prefix) : cpu_percent,
'{0}.mem.vms'.format(argz.zabbix_key_prefix) : mem_vms,
'{0}.mem.rss'.format(argz.zabbix_key_prefix) : mem_rss}
except psutil.NoSuchProcess:
pass
if argz.debug:
try:
print 'Process ({0}) is using {1} CPU and {2} {3} memory'.format(argz.process_str,
cpu_percent,
mem_vms,
mem_rss)
print 'Zagg will receive: {0}'.format(zagg_data)
except NameError as ex:
print 'No values: {0}'.format(ex)
if zagg_data:
ms = MetricSender(debug=argz.debug)
ms.add_metric(zagg_data)
ms.send_metrics()
if __name__ == '__main__':
main()
|
ivanhorvath/openshift-tools
|
scripts/monitoring/cron-send-cpu-mem-stats.py
|
Python
|
apache-2.0
| 3,773
|
# Outspline - A highly modular and extensible outliner.
# Copyright (C) 2011-2014 Dario Giovannetti <dev@dariogiovannetti.net>
#
# This file is part of Outspline.
#
# Outspline is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Outspline is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Outspline. If not, see <http://www.gnu.org/licenses/>.
from collections import OrderedDict as OD
data = (
OD(),
OD((
("Log", (
OD((
("log_level_stdout", "1"),
("log_level_file", "0"),
("log_file", "~/.config/outspline/outspline.log"),
)),
OD()
)),
("Save", (
OD((
("default_extension", "osl"),
)),
OD()
)),
("History", (
OD((
("default_soft_limit", "60"),
("time_limit", "15"),
("hard_limit", "120"),
)),
OD()
)),
("Extensions", (OD(), OD())),
("Interfaces", (OD(), OD())),
("Plugins", (OD(), OD())),
))
)
|
xguse/outspline
|
src/outspline/conf/core.py
|
Python
|
gpl-3.0
| 1,547
|
from django.db import connection, models
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
PsycoPG2 implementation of database operations.
"""
backend_name = "postgres"
def rename_column(self, table_name, old, new):
if old == new:
return []
qn = connection.ops.quote_name
params = (qn(table_name), qn(old), qn(new))
self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % params)
def rename_table(self, old_table_name, table_name):
"will rename the table and an associated ID sequence and primary key index"
# First, rename the table
generic.DatabaseOperations.rename_table(self, old_table_name, table_name)
# Then, try renaming the ID sequence
# (if you're using other AutoFields... your problem, unfortunately)
self.commit_transaction()
self.start_transaction()
try:
generic.DatabaseOperations.rename_table(self, old_table_name+"_id_seq", table_name+"_id_seq")
except:
if self.debug:
print " ~ No such sequence (ignoring error)"
self.rollback_transaction()
else:
self.commit_transaction()
self.start_transaction()
# Rename primary key index, will not rename other indices on
# the table that are used by django (e.g. foreign keys). Until
# figure out how, you need to do this yourself.
try:
generic.DatabaseOperations.rename_table(self, old_table_name+"_pkey", table_name+ "_pkey")
except:
if self.debug:
print " ~ No such primary key (ignoring error)"
self.rollback_transaction()
else:
self.commit_transaction()
self.start_transaction()
def rename_index(self, old_index_name, index_name):
"Rename an index individually"
generic.DatabaseOperations.rename_table(self, old_index_name, index_name)
def _db_type_for_alter_column(self, field):
"""
Returns a field's type suitable for ALTER COLUMN.
Strips CHECKs from PositiveSmallIntegerField) and PositiveIntegerField
@param field: The field to generate type for
"""
if isinstance(field, models.PositiveSmallIntegerField) or isinstance(field, models.PositiveIntegerField):
return field.db_type().split(" ")[0]
return super(DatabaseOperations, self)._db_type_for_alter_column(field)
|
wahaha02/myblog
|
south/db/postgresql_psycopg2.py
|
Python
|
bsd-3-clause
| 2,521
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Midokura Japan K.K.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License
import webob
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import compute
from nova import exception
from nova import log as logging
LOG = logging.getLogger('nova.api.openstack.compute.contrib.server_start_stop')
class ServerStartStopActionController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(ServerStartStopActionController, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
@wsgi.action('os-start')
def _start_server(self, req, id, body):
"""Start an instance. """
context = req.environ['nova.context']
try:
LOG.debug(_("start instance %r"), id)
instance = self.compute_api.get(context, id)
self.compute_api.start(context, instance)
except exception.ApiError, e:
raise webob.exc.HTTPBadRequest(explanation=e.message)
except exception.NotAuthorized, e:
raise webob.exc.HTTPUnauthorized()
return webob.Response(status_int=202)
@wsgi.action('os-stop')
def _stop_server(self, req, id, body):
"""Stop an instance."""
context = req.environ['nova.context']
try:
LOG.debug(_("stop instance %r"), id)
instance = self.compute_api.get(context, id)
self.compute_api.stop(context, instance)
except exception.ApiError, e:
raise webob.exc.HTTPBadRequest(explanation=e.message)
except exception.NotAuthorized, e:
raise webob.exc.HTTPUnauthorized()
return webob.Response(status_int=202)
class Server_start_stop(extensions.ExtensionDescriptor):
"""Start/Stop instance compute API support"""
name = "ServerStartStop"
namespace = "http://docs.openstack.org/compute/ext/servers/api/v1.1"
updated = "2012-01-23:00:00+00:00"
def get_controller_extensions(self):
controller = ServerStartStopActionController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
|
rcbops/nova-buildpackage
|
nova/api/openstack/compute/contrib/server_start_stop.py
|
Python
|
apache-2.0
| 2,721
|
"""
.. _ex-publication-figure:
===================================
Make figures more publication ready
===================================
In this example, we show several use cases to take MNE plots and
customize them for a more publication-ready look.
"""
# Authors: Eric Larson <larson.eric.d@gmail.com>
# Daniel McCloy <dan.mccloy@gmail.com>
# Stefan Appelhoff <stefan.appelhoff@mailbox.org>
#
# License: BSD (3-clause)
###############################################################################
# .. contents:: Contents
# :local:
# :depth: 1
#
# Imports
# -------
# We are importing everything we need for this example:
import os.path as op
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import (make_axes_locatable, ImageGrid,
inset_locator)
import mne
###############################################################################
# Evoked plot with brain activation
# ---------------------------------
#
# Suppose we want a figure with an evoked plot on top, and the brain activation
# below, with the brain subplot slightly bigger than the evoked plot. Let's
# start by loading some :ref:`example data <sample-dataset>`.
data_path = mne.datasets.sample.data_path()
subjects_dir = op.join(data_path, 'subjects')
fname_stc = op.join(data_path, 'MEG', 'sample', 'sample_audvis-meg-eeg-lh.stc')
fname_evoked = op.join(data_path, 'MEG', 'sample', 'sample_audvis-ave.fif')
evoked = mne.read_evokeds(fname_evoked, 'Left Auditory')
evoked.pick_types(meg='grad').apply_baseline((None, 0.))
max_t = evoked.get_peak()[1]
stc = mne.read_source_estimate(fname_stc)
###############################################################################
# During interactive plotting, we might see figures like this:
evoked.plot()
stc.plot(views='lat', hemi='split', size=(800, 400), subject='sample',
subjects_dir=subjects_dir, initial_time=max_t,
time_viewer=False, show_traces=False)
###############################################################################
# To make a publication-ready figure, first we'll re-plot the brain on a white
# background, take a screenshot of it, and then crop out the white margins.
# While we're at it, let's change the colormap, set custom colormap limits and
# remove the default colorbar (so we can add a smaller, vertical one later):
colormap = 'viridis'
clim = dict(kind='value', lims=[4, 8, 12])
# Plot the STC, get the brain image, crop it:
brain = stc.plot(views='lat', hemi='split', size=(800, 400), subject='sample',
subjects_dir=subjects_dir, initial_time=max_t, background='w',
colorbar=False, clim=clim, colormap=colormap,
time_viewer=False, show_traces=False)
screenshot = brain.screenshot()
brain.close()
###############################################################################
# Now let's crop out the white margins and the white gap between hemispheres.
# The screenshot has dimensions ``(h, w, 3)``, with the last axis being R, G, B
# values for each pixel, encoded as integers between ``0`` and ``255``. ``(255,
# 255, 255)`` encodes a white pixel, so we'll detect any pixels that differ
# from that:
nonwhite_pix = (screenshot != 255).any(-1)
nonwhite_row = nonwhite_pix.any(1)
nonwhite_col = nonwhite_pix.any(0)
cropped_screenshot = screenshot[nonwhite_row][:, nonwhite_col]
# before/after results
fig = plt.figure(figsize=(4, 4))
axes = ImageGrid(fig, 111, nrows_ncols=(2, 1), axes_pad=0.5)
for ax, image, title in zip(axes, [screenshot, cropped_screenshot],
['Before', 'After']):
ax.imshow(image)
ax.set_title('{} cropping'.format(title))
###############################################################################
# A lot of figure settings can be adjusted after the figure is created, but
# many can also be adjusted in advance by updating the
# :data:`~matplotlib.rcParams` dictionary. This is especially useful when your
# script generates several figures that you want to all have the same style:
# Tweak the figure style
plt.rcParams.update({
'ytick.labelsize': 'small',
'xtick.labelsize': 'small',
'axes.labelsize': 'small',
'axes.titlesize': 'medium',
'grid.color': '0.75',
'grid.linestyle': ':',
})
###############################################################################
# Now let's create our custom figure. There are lots of ways to do this step.
# Here we'll create the figure and the subplot axes in one step, specifying
# overall figure size, number and arrangement of subplots, and the ratio of
# subplot heights for each row using :mod:`GridSpec keywords
# <matplotlib.gridspec>`. Other approaches (using
# :func:`~matplotlib.pyplot.subplot2grid`, or adding each axes manually) are
# shown commented out, for reference.
# sphinx_gallery_thumbnail_number = 4
# figsize unit is inches
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(4.5, 3.),
gridspec_kw=dict(height_ratios=[3, 4]))
# alternate way #1: using subplot2grid
# fig = plt.figure(figsize=(4.5, 3.))
# axes = [plt.subplot2grid((7, 1), (0, 0), rowspan=3),
# plt.subplot2grid((7, 1), (3, 0), rowspan=4)]
# alternate way #2: using figure-relative coordinates
# fig = plt.figure(figsize=(4.5, 3.))
# axes = [fig.add_axes([0.125, 0.58, 0.775, 0.3]), # left, bot., width, height
# fig.add_axes([0.125, 0.11, 0.775, 0.4])]
# we'll put the evoked plot in the upper axes, and the brain below
evoked_idx = 0
brain_idx = 1
# plot the evoked in the desired subplot, and add a line at peak activation
evoked.plot(axes=axes[evoked_idx])
peak_line = axes[evoked_idx].axvline(max_t, color='#66CCEE', ls='--')
# custom legend
axes[evoked_idx].legend(
[axes[evoked_idx].lines[0], peak_line], ['MEG data', 'Peak time'],
frameon=True, columnspacing=0.1, labelspacing=0.1,
fontsize=8, fancybox=True, handlelength=1.8)
# remove the "N_ave" annotation
axes[evoked_idx].texts = []
# Remove spines and add grid
axes[evoked_idx].grid(True)
axes[evoked_idx].set_axisbelow(True)
for key in ('top', 'right'):
axes[evoked_idx].spines[key].set(visible=False)
# Tweak the ticks and limits
axes[evoked_idx].set(
yticks=np.arange(-200, 201, 100), xticks=np.arange(-0.2, 0.51, 0.1))
axes[evoked_idx].set(
ylim=[-225, 225], xlim=[-0.2, 0.5])
# now add the brain to the lower axes
axes[brain_idx].imshow(cropped_screenshot)
axes[brain_idx].axis('off')
# add a vertical colorbar with the same properties as the 3D one
divider = make_axes_locatable(axes[brain_idx])
cax = divider.append_axes('right', size='5%', pad=0.2)
cbar = mne.viz.plot_brain_colorbar(cax, clim, colormap, label='Activation (F)')
# tweak margins and spacing
fig.subplots_adjust(
left=0.15, right=0.9, bottom=0.01, top=0.9, wspace=0.1, hspace=0.5)
# add subplot labels
for ax, label in zip(axes, 'AB'):
ax.text(0.03, ax.get_position().ymax, label, transform=fig.transFigure,
fontsize=12, fontweight='bold', va='top', ha='left')
###############################################################################
# Custom timecourse with montage inset
# ------------------------------------
#
# Suppose we want a figure with some mean timecourse extracted from a number of
# sensors, and we want a smaller panel within the figure to show a head outline
# with the positions of those sensors clearly marked.
# If you are familiar with MNE, you know that this is something that
# :func:`mne.viz.plot_compare_evokeds` does, see an example output in
# :ref:`ex-hf-sef-data` at the bottom.
#
# In this part of the example, we will show you how to achieve this result on
# your own figure, without having to use :func:`mne.viz.plot_compare_evokeds`!
#
# Let's start by loading some :ref:`example data <sample-dataset>`.
data_path = mne.datasets.sample.data_path()
fname_raw = op.join(data_path, "MEG", "sample", "sample_audvis_raw.fif")
raw = mne.io.read_raw_fif(fname_raw)
# For the sake of the example, we focus on EEG data
raw.pick_types(meg=False, eeg=True)
###############################################################################
# Let's make a plot.
# channels to plot:
to_plot = [f"EEG {i:03}" for i in range(1, 5)]
# get the data for plotting in a short time interval from 10 to 20 seconds
start = int(raw.info['sfreq'] * 10)
stop = int(raw.info['sfreq'] * 20)
data, times = raw.get_data(picks=to_plot,
start=start, stop=stop, return_times=True)
# Scale the data from the MNE internal unit V to µV
data *= 1e6
# Take the mean of the channels
mean = np.mean(data, axis=0)
# make a figure
fig, ax = plt.subplots(figsize=(4.5, 3))
# plot some EEG data
ax.plot(times, mean)
###############################################################################
# So far so good. Now let's add the smaller figure within the figure to show
# exactly, which sensors we used to make the timecourse.
# For that, we use an "inset_axes" that we plot into our existing axes.
# The head outline with the sensor positions can be plotted using the
# `~mne.io.Raw` object that is the source of our data.
# Specifically, that object already contains all the sensor positions,
# and we can plot them using the ``plot_sensors`` method.
# recreate the figure (only necessary for our documentation server)
fig, ax = plt.subplots(figsize=(4.5, 3))
ax.plot(times, mean)
axins = inset_locator.inset_axes(ax, width="30%", height="30%", loc=2)
# pick_channels() edits the raw object in place, so we'll make a copy here
# so that our raw object stays intact for potential later analysis
raw.copy().pick_channels(to_plot).plot_sensors(title="", axes=axins)
###############################################################################
# That looks nice. But the sensor dots are way too big for our taste. Luckily,
# all MNE-Python plots use Matplotlib under the hood and we can customize
# each and every facet of them.
# To make the sensor dots smaller, we need to first get a handle on them to
# then apply a ``*.set_*`` method on them.
# If we inspect our axes we find the objects contained in our plot:
print(axins.get_children())
###############################################################################
# That's quite a a lot of objects, but we know that we want to change the
# sensor dots, and those are most certainly a "PathCollection" object.
# So let's have a look at how many "collections" we have in the axes.
print(axins.collections)
###############################################################################
# There is only one! Those must be the sensor dots we were looking for.
# We finally found exactly what we needed. Sometimes this can take a bit of
# experimentation.
sensor_dots = axins.collections[0]
# Recreate the figure once more; shrink the sensor dots; add axis labels
fig, ax = plt.subplots(figsize=(4.5, 3))
ax.plot(times, mean)
axins = inset_locator.inset_axes(ax, width="30%", height="30%", loc=2)
raw.copy().pick_channels(to_plot).plot_sensors(title="", axes=axins)
sensor_dots = axins.collections[0]
sensor_dots.set_sizes([1])
# add axis labels, and adjust bottom figure margin to make room for them
ax.set(xlabel="Time (s)", ylabel="Amplitude (µV)")
fig.subplots_adjust(bottom=0.2)
|
olafhauk/mne-python
|
examples/visualization/plot_publication_figure.py
|
Python
|
bsd-3-clause
| 11,270
|
# -*- coding: utf-8 -*-
# Copyright 2020 Green Valley Belgium NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.7@@
from mcfw.consts import MISSING
STRING_TYPE = 'string'
BOOL_TYPE = 'bool'
LONG_TYPE = 'long'
FLOAT_TYPE = 'float'
TO_TYPE = 'to'
STRING_LIST_TYPE = 'list_of_string'
BOOL_LIST_TYPE = 'list_of_bool'
LONG_LIST_TYPE = 'list_of_long'
FLOAT_LIST_TYPE = 'list_of_float'
TO_LIST_TYPE = 'list_of_to'
# Mapping of single simple type to type enum
OBJC_SINGLE_TYPE_MAPPING = {
'unicode': STRING_TYPE,
'bool': BOOL_TYPE,
'float': FLOAT_TYPE,
'long': LONG_TYPE,
'int': LONG_TYPE,
}
# Mapping of list simple type to type enum
OBJC_LIST_TYPE_MAPPING = {
'unicode': STRING_LIST_TYPE,
'bool': BOOL_LIST_TYPE,
'float': FLOAT_LIST_TYPE,
'long': LONG_LIST_TYPE,
'int': LONG_LIST_TYPE,
}
# Mapping of type enum to properly aligned ivar/property code string
OBJC_ALIGNED_REPRESENTATION_MAPPING = {
STRING_TYPE: 'NSString *',
BOOL_TYPE: 'BOOL ',
FLOAT_TYPE: 'MCTFloat ',
LONG_TYPE: 'MCTlong ',
STRING_LIST_TYPE: 'NSArray<NSNumber *> *',
BOOL_LIST_TYPE: 'NSArray<NSNumber *> *',
FLOAT_LIST_TYPE: 'NSArray<NSNumber *> *',
LONG_LIST_TYPE: 'NSArray<NSNumber *> *',
}
# Mapping of type enum to non-aligned code string
# TO_TYPE is missing from this list
OBJC_REPRESENTATION_MAPPING = {
STRING_TYPE: 'NSString *',
BOOL_TYPE: 'BOOL ',
FLOAT_TYPE: 'MCTFloat ',
LONG_TYPE: 'MCTlong ',
STRING_LIST_TYPE: 'NSArray<NSString *> *',
BOOL_LIST_TYPE: 'NSArray<NSNumber *> *',
FLOAT_LIST_TYPE: 'NSArray<NSNumber *> *',
LONG_LIST_TYPE: 'NSArray<NSNumber *> *',
}
# Render attributes for objC @property declaration, based on AttrDefinition
def objc_attr_property_attribute(attrdef):
cleantype = objc_attr_internal_type(attrdef)
if cleantype == STRING_TYPE:
return '(nonatomic, copy) '
if cleantype in [BOOL_TYPE, LONG_TYPE, FLOAT_TYPE]:
return '(nonatomic) '
return '(nonatomic, strong)'
# Map an AttrDefinition to an enumerated type (see *_TYPE constants in this module)
def objc_attr_internal_type(attrdef):
if objc_attr_is_list_type(attrdef):
return OBJC_LIST_TYPE_MAPPING.get(attrdef.type, TO_LIST_TYPE)
return objc_internal_type(attrdef.type)
def objc_internal_type(type_str):
return OBJC_SINGLE_TYPE_MAPPING.get(type_str, TO_TYPE)
# Check whether an AttrDefinition refers to a collection
def objc_attr_is_list_type(attrdef):
return attrdef.collection_type == list.__name__
# Render objC classname for a transfer object
def objc_to_classname(attrdef):
return attrdef and 'MCT_' + attrdef.type.replace('.', '_')
# Render objC classname for an API package
def objc_package_classname(package):
return package and 'MCT_' + package.replace('.', '_')
# Render objC instance name for a CAPI package
def objc_package_instancename(package):
return package.replace('.', '_') + '_IClientRPC_instance'
OBJC_RESERVED_KEYWORDS = ['atomic', 'auto', 'break', 'bycopy', 'byref', 'case', 'char',
'const', 'continue', 'default', 'description', 'do', 'double', 'else', 'enum',
'extern', 'float', 'for', 'goto', 'hash', 'id', 'if', 'in', 'inline', 'inout',
'int', 'long', 'nil', 'nonatomic', 'oneway', 'out', 'register',
'restrict', 'retain', 'return', 'self', 'short', 'signed', 'sizeof',
'static', 'struct', 'super', 'switch', 'typedef', 'union', 'unsigned',
'void', 'volatile', 'while', 'strong', 'weak', 'friend']
# Cleanup a name to make it objC compliant (e.g. reserved keywords, dots in name)
def objc_cleanup_name(name):
name = name.rstrip('_')
if name in OBJC_RESERVED_KEYWORDS:
name += 'X'
return name
# Generate property name for a field AttrDefinition
def objc_property_field_name(attrdef):
return objc_cleanup_name(attrdef.name)
# Generate ivar name for a field AttrDefinition
def objc_ivar_field_name(attrdef):
return objc_property_field_name(attrdef) + '_'
# generate name for transfer object class definition
def objc_make_to_name(to):
return to and 'MCT' + '_' + to.package.replace('.', '_') + '_' + to.name
# check whether property must be deallocated in destructor
def objc_field_must_be_deallocated(field):
return objc_attr_internal_type(field) not in [BOOL_TYPE, LONG_TYPE, FLOAT_TYPE]
# used for tracking TODOs
def objc_error(param):
raise RuntimeError('ERROR - Not yet implemented ' + unicode(param))
# used for tracking TODOs
def objc_warning(param):
print 'WARNING - Not yet implemented ' + unicode(param)
return ''
##########################################################################################
# IVAR / PROPERTY / ARG / RETURN_TYPE TYPES
##########################################################################################
# Generate properly aligned code for field definition (ivar + property)
def objc_code_fieldtype_representation(attrdef):
return objc_code_type_representation(attrdef, allow_collection=True, align=True)
# Generate code for return type
def objc_code_rtype_representation(attrdef):
return objc_code_type_representation(attrdef, allow_collection=False, align=False)
# Generate code for argument type
def objc_code_argtype_representation(attrdef):
return objc_code_type_representation(attrdef, allow_collection=True, align=False)
def objc_code_type_representation(attrdef, allow_collection, align):
if not allow_collection and attrdef.collection_type:
raise RuntimeError('Collection not supported')
internal_type = objc_attr_internal_type(attrdef)
if internal_type == TO_TYPE:
return objc_to_classname(attrdef) + ' *'
if internal_type == TO_LIST_TYPE:
return 'NSArray<%s%s *> *' % ('__kindof ' if attrdef.subtype else '',
objc_to_classname(attrdef))
return OBJC_REPRESENTATION_MAPPING[internal_type]
def objc_representation_for_type(type_str):
return OBJC_REPRESENTATION_MAPPING[objc_internal_type(type_str)]
def objc_default_value(field):
if field.default == MISSING:
raise Exception("There is no default value (field: %s)" % field.name)
if field.default is None:
return 'nil'
if field.collection_type:
return '[NSMutableArray arrayWithCapacity:0]'
if field.type not in OBJC_SINGLE_TYPE_MAPPING:
raise Exception("field.type (%s) not in OBJC_SINGLE_TYPE_MAPPING" % field.type)
if field.type == 'unicode':
return '@"%s"' % field.default
if field.type == 'bool':
return 'YES' if field.default else 'NO'
return field.default
|
our-city-app/oca-backend
|
src-generator/generator/custom/objc_filters.py
|
Python
|
apache-2.0
| 7,376
|
from django.apps import AppConfig
from django.contrib.auth.signals import user_logged_out
from django.db.models.signals import post_save
class TrackingConfig(AppConfig):
name = 'tracking'
verbose_name = 'django-tracking2'
def ready(self):
from tracking import handlers
from tracking.models import Visitor
user_logged_out.connect(handlers.track_ended_session)
post_save.connect(handlers.post_save_cache, sender=Visitor)
|
bruth/django-tracking2
|
tracking/apps.py
|
Python
|
bsd-2-clause
| 467
|
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
# Copyright 2013 Canonical
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
"""libautopilot-qt autopilot tests - top level package."""
|
uglide/autopilot-qt
|
tests/autopilot/libautopilot_qt/__init__.py
|
Python
|
gpl-3.0
| 349
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#******************************************************************************
# $Id$
#
# Project: GDAL
# Purpose: Application for Google web service authentication.
# Author: Frank Warmerdam, warmerdam@pobox.com
#
#******************************************************************************
# Copyright (c) 2013, Frank Warmerdam <warmerdam@pobox.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#******************************************************************************
#
from osgeo import gdal
import sys
import stat
import os
import time
import webbrowser
SCOPES = {
'ft' : 'https://www.googleapis.com/auth/fusiontables',
'gme' : 'https://www.googleapis.com/auth/mapsengine',
'gme.ro' : 'https://www.googleapis.com/auth/mapsengine.readonly',
}
# =============================================================================
# Usage()
# =============================================================================
def Usage():
print('')
print('Usage: gdal_auth_py [-s scope]' )
print(' - interactive use.')
print('')
print('or:')
print('Usage: gdal_auth.py login [-s scope] ')
print('Usage: gdal_auth.py auth2refresh [-s scope] auth_token')
print('Usage: gdal_auth.py refresh2access [-s scope] refresh_token')
print('')
print('scopes: ft/gme/gme.ro/full_url')
print('')
sys.exit(1)
# =============================================================================
# Mainline
# =============================================================================
scope = SCOPES['ft']
token_in = None
command = None
argv = gdal.GeneralCmdLineProcessor( sys.argv )
if argv is None:
sys.exit( 0 )
# Parse command line arguments.
i = 1
while i < len(argv):
arg = argv[i]
if arg == '-s' and i < len(argv)-1:
if argv[i+1] in SCOPES:
scope = SCOPES[argv[i+1]]
elif argv[i+1].startswith('http'):
scope = argv[i+1]
else:
print('Scope %s not recognised.' % argv[i+1])
Usage()
sys.exit(1)
i = i + 1
elif arg[0] == '-':
Usage()
elif command is None:
command = arg
elif token_in is None:
token_in = arg
else:
Usage()
i = i + 1
if command is None:
command = 'interactive'
if command == 'login':
print(gdal.GOA2GetAuthorizationURL(scope))
elif command == 'auth2refresh':
print(gdal.GOA2GetRefreshToken(token_in, scope))
elif command == 'refresh2access':
print(gdal.GOA2GetAccessToken(token_in, scope))
elif command != 'interactive':
Usage()
else:
# Interactive case
print('Authorization requested for scope:')
print(scope)
print('')
print('Please login and authorize access in web browser...')
webbrowser.open(gdal.GOA2GetAuthorizationURL(scope))
time.sleep(2.0)
print('')
print('Enter authorization token:')
auth_token = sys.stdin.readline()
refresh_token = gdal.GOA2GetRefreshToken(auth_token, scope)
print('Refresh Token:'+refresh_token)
print('')
print('Consider setting a configuration option like:')
print('GFT_REFRESH_TOKEN='+refresh_token)
|
tilemapjp/OSGeo.GDAL.Xamarin
|
gdal-1.11.0/swig/python/scripts/gdal_auth.py
|
Python
|
mit
| 4,264
|
import os
import sys
import imp
import logging
from collections import namedtuple
"""
Objects used to configure Glue at runtime.
"""
__all__ = ['Registry', 'SettingRegistry', 'ExporterRegistry',
'ColormapRegistry', 'DataFactoryRegistry', 'QtClientRegistry',
'LinkFunctionRegistry', 'LinkHelperRegistry',
'ProfileFitterRegistry',
'qt_client', 'data_factory', 'link_function', 'link_helper',
'colormaps',
'exporters', 'settings', 'fit_plugin']
class Registry(object):
"""Container to hold groups of objects or settings.
Registry instances are used by Glue to track objects
used for various tasks like data linking, widget creation, etc.
They have the following properties:
- A `members` property, which lists each item in the registry
- A `default_members` function, which can be overridden to lazily
initialize the members list
- A call interface, allowing the instance to be used as a decorator
for users to add new items to the registry in their config files
"""
def __init__(self):
self._members = []
self._loaded = False
@property
def members(self):
""" A list of the members in the registry.
The return value is a list. The contents of the list
are specified in each subclass"""
if not self._loaded:
self._members = self.default_members() + self._members
self._loaded = True
return self._members
def default_members(self):
"""The member items provided by default. These are put in this
method so that code is only imported when needed"""
return []
def add(self, value):
""" Add a new item to the registry """
self._members.append(value)
def __iter__(self):
return iter(self.members)
def __len__(self):
return len(self.members)
def __contains__(self, value):
return value in self.members
def __call__(self, arg):
"""This is provided so that registry instances can be used
as decorators. The decorators should add the decorated
code object to the registry, and return the original function"""
self.add(arg)
return arg
class SettingRegistry(Registry):
"""Stores key/value settings that code can use to customize Glue
Each member is a tuple of 3 items:
- key: the setting name [str]
- value: the default setting [object]
- validator: A function which tests whether the input is a valid value,
and raises a ValueError if invalid. On valid input,
returns the (possibly sanitized) setting value.
"""
def add(self, key, value, validator=str):
self.members.append((key, value, validator))
class ExporterRegistry(Registry):
"""Stores functions which can export an applocation to an output file
The members property is a list of exporters, each represented
as a (label, save_function, can_save_function, outmode) tuple.
save_function takes an (application, path) as input, and saves
the session
can_save_function takes an application as input, and raises an
exception if saving this session is not possible
outmode is a string, with one of 3 values:
'file': indicates that exporter creates a file
'directory': exporter creates a directory
'label': exporter doesn't write to disk, but needs a label
"""
def default_members(self):
return []
def add(self, label, exporter, checker, outmode='file'):
"""
Add a new exporter
:param label: Short label for the exporter
:type label: str
:param exporter: exporter function
:type exporter: function(application, path)
:param checker: function that checks if save is possible
:type exporter: function(application)
``exporter`` should raise an exception if export isn't possible.
:param outmode: What kind of output is created?
:type outmode: str ('file' | 'directory' | 'label')
"""
self.members.append((label, exporter, checker, outmode))
class ColormapRegistry(Registry):
"""Stores colormaps for the Image Viewer. The members property is
a list of colormaps, each represented as a [name,cmap] pair.
"""
def default_members(self):
import matplotlib.cm as cm
members = []
members.append(['Gray', cm.gray])
members.append(['Purple-Blue', cm.PuBu])
members.append(['Yellow-Green-Blue', cm.YlGnBu])
members.append(['Yellow-Orange-Red', cm.YlOrRd])
members.append(['Red-Purple', cm.RdPu])
members.append(['Blue-Green', cm.BuGn])
members.append(['Hot', cm.hot])
members.append(['Red-Blue', cm.RdBu])
members.append(['Red-Yellow-Blue', cm.RdYlBu])
members.append(['Purple-Orange', cm.PuOr])
members.append(['Purple-Green', cm.PRGn])
return members
def add(self, label, cmap):
"""
Add colormap *cmap* with label *label*.
"""
self.members.append([label, cmap])
class DataFactoryRegistry(Registry):
"""Stores data factories. Data factories take filenames as input,
and return :class:`~glue.core.data.Data` instances
The members property returns a list of (function, label, identifier)
namedtuples:
- Function is the factory that creates the data object
- label is a short human-readable description of the factory
- identifier is a function that takes ``(filename, **kwargs)`` as input
and returns True if the factory can open the file
New data factories can be registered via::
@data_factory('label_name', identifier, default='txt')
def new_factory(file_name):
...
This has the additional side-effect of associating
this this factory with filenames ending in ``txt`` by default
"""
item = namedtuple('DataFactory', 'function label identifier')
def default_members(self):
from .core.data_factories import __factories__
return [self.item(f, f.label, f.identifier) for f in __factories__]
def __call__(self, label, identifier, default=''):
from .core.data_factories import set_default_factory
def adder(func):
set_default_factory(default, func)
self.add(self.item(func, label, identifier))
return func
return adder
class QtClientRegistry(Registry):
"""Stores QT widgets to visualize data.
The members property is a list of Qt widget classes
New widgets can be registered via::
@qt_client
class CustomWidget(QMainWindow):
...
"""
def default_members(self):
try:
from .qt.widgets.scatter_widget import ScatterWidget
from .qt.widgets.image_widget import ImageWidget
from .qt.widgets.histogram_widget import HistogramWidget
return [ScatterWidget, ImageWidget, HistogramWidget]
except ImportError:
logging.getLogger(__name__).warning(
"could not import glue.qt in ConfigObject")
return []
class LinkFunctionRegistry(Registry):
"""Stores functions to convert between quantities
The members properety is a list of (function, info_string,
output_labels) namedtuples. `info_string` is describes what the
function does. `output_labels` is a list of names for each output.
New link functions can be registered via
@link_function(info="maps degrees to arcseconds",
output_labels=['arcsec'])
def degrees2arcsec(degrees):
return degress * 3600
Link functions are expected to receive and return numpy arrays
"""
item = namedtuple('LinkFunction', 'function info output_labels')
def default_members(self):
from .core import link_helpers
return list(self.item(l, "", l.output_args)
for l in link_helpers.__LINK_FUNCTIONS__)
def __call__(self, info="", output_labels=None):
out = output_labels or []
def adder(func):
self.add(self.item(func, info, out))
return func
return adder
class LinkHelperRegistry(Registry):
"""Stores helper objects that compute many ComponentLinks at once
The members property is a list of (object, info_string,
input_labels) tuples. `Object` is the link helper. `info_string`
describes what `object` does. `input_labels` is a list labeling
the inputs.
Each link helper takes a list of ComponentIDs as inputs, and
returns an iterable object (e.g. list) of ComponentLinks.
New helpers can be registered via
@link_helper('Links degrees and arcseconds in both directions',
['degree', 'arcsecond'])
def new_helper(degree, arcsecond):
return [ComponentLink([degree], arcsecond, using=lambda d: d*3600),
ComponentLink([arcsecond], degree, using=lambda a: a/3600)]
"""
item = namedtuple('LinkHelper', 'helper info input_labels')
def default_members(self):
from .core.link_helpers import __LINK_HELPERS__ as helpers
return list(self.item(l, l.info_text, l.input_args)
for l in helpers)
def __call__(self, info, input_labels):
def adder(func):
self.add(self.item(func, info, input_labels))
return func
return adder
class ProfileFitterRegistry(Registry):
item = namedtuple('ProfileFitter', 'cls')
def add(self, cls):
"""
Add colormap *cmap* with label *label*.
"""
self.members.append(cls)
def default_members(self):
from .core.fitters import __FITTERS__
return list(__FITTERS__)
qt_client = QtClientRegistry()
data_factory = DataFactoryRegistry()
link_function = LinkFunctionRegistry()
link_helper = LinkHelperRegistry()
colormaps = ColormapRegistry()
exporters = ExporterRegistry()
settings = SettingRegistry()
fit_plugin = ProfileFitterRegistry()
def load_configuration(search_path=None):
''' Find and import a config.py file
Returns:
The module object
Raises:
Exception, if no module was found
'''
search_order = search_path or _default_search_order()
result = imp.new_module('config')
for config_file in search_order:
dir = os.path.dirname(config_file)
try:
sys.path.append(dir)
config = imp.load_source('config', config_file)
result = config
except IOError:
pass
except Exception as e:
raise Exception("Error loading config file %s:\n%s" %
(config_file, e))
finally:
sys.path.remove(dir)
return result
def _default_search_order():
"""
The default configuration file search order:
* current working directory
* environ var GLUERC
* HOME/.glue/config.py
* Glue's own default config
"""
search_order = [os.path.join(os.getcwd(), 'config.py')]
if 'GLUERC' in os.environ:
search_order.append(os.environ['GLUERC'])
search_order.append(os.path.expanduser('~/.glue/config.py'))
return search_order[::-1]
|
bsipocz/glue
|
glue/config.py
|
Python
|
bsd-3-clause
| 11,375
|
import unittest
from Assembly import Assembly
from Parser.ParserContext import ParseException
from Attribute import Attribute
class AssemblyParser(object):
def __init__(self):
pass
def parse(self, parserContext):
assembly = Assembly()
token = parserContext.get_next_token()
if token != '.assembly':
raise ParseException('Expected .assembly, found ' . token)
token = parserContext.get_next_token()
if token == 'extern':
assembly.extern = True
token = parserContext.get_next_token()
assembly.name = token
while True:
token = parserContext.get_next_token()
if token == '.ver':
assembly.version = parserContext.get_next_token()
elif token == '.hash':
if parserContext.get_next_token() != 'algorithm':
raise ParseException('Expected token "algorithm"')
assembly.hashAlgorithm = int(parserContext.get_next_token(), 16)
elif token == '.custom':
if parserContext.get_next_token() != 'instance':
raise ParseException('Expected token "instance"')
if parserContext.get_next_token() != 'void':
raise ParseException('Expected token "void"')
attribute = Attribute()
attribute.name = parserContext.get_next_token() + '(' + parserContext.get_next_token() + ')'
assembly.customAttributes.append(attribute)
elif token == '{':
pass
elif token == '}':
break
#fixme public key token
return assembly
class AssemblyParserTests(unittest.TestCase):
def test_parse_extern_assembly(self):
from ParserContext import ParserContext
s = ('// Metadata version: v2.0.50727\n'
'.assembly extern mscorlib\n'
'{\n'
'.publickeytoken = (B7 7A 5C 56 19 34 E0 89 ) // .z\V.4..\n'
'.hash algorithm 0x00008004\n'
'.ver 2:0:0:0\n'
'}\n')
ap = AssemblyParser()
p = ParserContext(s);
a = ap.parse(p)
self.assertEqual(a.name, 'mscorlib')
self.assertEqual(a.extern, True)
self.assertEqual(a.version, '2:0:0:0')
self.assertEqual(a.extern, True)
self.assertEqual(a.hashAlgorithm, 0x8004)
def test_parse_custom_attributes(self):
from ParserContext import ParserContext
s = ('.assembly ConsoleApplication1\n'
'{\n'
'.custom instance void [mscorlib]System.Reflection.AssemblyTitleAttribute::.ctor(string) = ( 01 00 13 43 6F 6E 73 6F 6C 65 41 70 70 6C 69 63 // ...ConsoleApplic\n'
' 61 74 69 6F 6E 31 00 00 )\n'
'.custom instance void [mscorlib]System.Reflection.AssemblyDescriptionAttribute::.ctor(string) = ( 01 00 00 00 00 ) \n'
'}')
ap = AssemblyParser()
p = ParserContext(s)
a = ap.parse(p)
self.assertEqual(a.name, "ConsoleApplication1")
self.assertEqual(len(a.customAttributes), 2)
self.assertEqual(a.customAttributes[0].name, '[mscorlib]System.Reflection.AssemblyTitleAttribute::.ctor(string)')
self.assertEqual(a.customAttributes[1].name, '[mscorlib]System.Reflection.AssemblyDescriptionAttribute::.ctor(string)')
|
martydill/PyCIL
|
src/Parser/AssemblyParser.py
|
Python
|
bsd-2-clause
| 3,623
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Module Writen to Odoo, Open Source Management Solution
#
# Copyright (C) 2013 Obertix Free Solutions (<http://obertix.net>).
# cubells <info@obertix.net>
#
# All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
import search_document
|
cubells/odoo-addons
|
search_by_date/wizard/__init__.py
|
Python
|
agpl-3.0
| 1,120
|
import pysal.lib as lp
from pysal.lib import examples
import geopandas as gpd
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib
import numpy as np
from pysal.viz.splot.mapping import (value_by_alpha_cmap,
vba_choropleth,
mapclassify_bin,
vba_legend)
def test_value_by_alpha_cmap():
# data
link_to_data = examples.get_path('columbus.shp')
gdf = gpd.read_file(link_to_data)
x = gdf['HOVAL'].values
y = gdf['CRIME'].values
# create cmap
rgba, cmap = value_by_alpha_cmap(x, y)
# create divergent rgba
div_rgba, _ = value_by_alpha_cmap(x, y, cmap='seismic', divergent=True)
# create reverted rgba
rev_rgba, _ = value_by_alpha_cmap(x, y, cmap='RdBu', revert_alpha=True)
def test_vba_choropleth():
# data
link_to_data = examples.get_path('columbus.shp')
gdf = gpd.read_file(link_to_data)
x = gdf['HOVAL'].values
y = gdf['CRIME'].values
# plot
fig, _ = vba_choropleth(x, y, gdf)
plt.close(fig)
# plot with divergent and reverted alpha
fig, _ = vba_choropleth(x, y, gdf, cmap='RdBu',
divergent=True,
revert_alpha=True)
plt.close(fig)
# plot with classified alpha and rgb
fig, _ = vba_choropleth(x, y, gdf, cmap='RdBu',
alpha_mapclassify=dict(classifier='quantiles'),
rgb_mapclassify=dict(classifier='quantiles'))
plt.close(fig)
# plot classified with legend
fig, _ = vba_choropleth(x, y, gdf,
alpha_mapclassify=dict(classifier='std_mean'),
rgb_mapclassify=dict(classifier='std_mean'),
legend=True)
plt.close(fig)
def test_vba_legend():
# data
link_to_data = examples.get_path('columbus.shp')
gdf = gpd.read_file(link_to_data)
x = gdf['HOVAL'].values
y = gdf['CRIME'].values
# classify data
rgb_bins = mapclassify_bin(x, 'quantiles')
alpha_bins = mapclassify_bin(y, 'quantiles')
# plot legend
fig, _ = vba_legend(rgb_bins, alpha_bins, cmap='RdBu')
plt.close(fig)
def test_mapclassify_bin():
# data
link_to_data = examples.get_path('columbus.shp')
gdf = gpd.read_file(link_to_data)
x = gdf['HOVAL'].values
# quantiles
mapclassify_bin(x, 'quantiles')
mapclassify_bin(x, 'quantiles', k=3)
# box_plot
mapclassify_bin(x, 'box_plot')
mapclassify_bin(x, 'box_plot', hinge=2)
# headtail_breaks
mapclassify_bin(x, 'headtail_breaks')
# percentiles
mapclassify_bin(x, 'percentiles')
mapclassify_bin(x, 'percentiles', pct=[25,50,75,100])
# std_mean
mapclassify_bin(x, 'std_mean')
mapclassify_bin(x, 'std_mean', multiples=[-1,-0.5,0.5,1])
# maximum_breaks
mapclassify_bin(x, 'maximum_breaks')
mapclassify_bin(x, 'maximum_breaks', k=3, mindiff=0.1)
# natural_breaks, max_p_classifier
mapclassify_bin(x, 'natural_breaks')
mapclassify_bin(x, 'max_p_classifier', k=3, initial=50)
# user_defined
mapclassify_bin(x, 'user_defined', bins=[20, max(x)])
|
lixun910/pysal
|
pysal/viz/splot/tests/test_viz_value_by_alpha_mapl.py
|
Python
|
bsd-3-clause
| 3,191
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
:Author: Bastian Knippschild (B.Knippschild@gmx.de)
:Date: October 2014
Copyright (C) 2014 Bastian Knippschild
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with tmLQCD. If not, see <http://www.gnu.org/licenses/>.
Function
========
Computation of Lüscher's Zeta Function. This program is based on
arXiv:1107.5023v2__, e.g. equation (5). The parameter :math:`\Lambda` is set to
1 in this implementation.
For informations on input parameters see the description of the function.
__ http://arxiv.org/abs/1107.5023v2
Performed tests
===============
1. Against Mathematica code provided by Liuming Liu w. and w.o. tbc in cms and
:math:`l=0`, :math:`m=0`
.. note::
Up to now, there is still some numerical difference in the
two implementations on the third or fourth digit. This is
not understood yet!
2. Against data from arXiv:1107.5023v2 w. and w.o. moving frames and
:math:`l=0`, :math:`m=0`
3. Against data from arXiv:1011.5288 w. and w.o. moving frames and linear
combinations of :math:`l=2`, :math:`m=-2,0,2`. See the test function at the
very end of this file.
'''
import math
import cmath
import numpy as np
import scipy.special
import scipy.integrate
def Z(q, gamma=1.0, l=0, m=0, d=np.array([0., 0., 0.]),
theta=np.array([0., 0., 0.]), precision=10e-6, verbose=0):
'''
Lüscher's Zeta Function
This is the ONLY function which should and needs to be called from outside.
:param float q: scattering momentum fraction, ONLY MANDATORY INPUT PARAMETER
:param float gamma: Lorentz factor for moving frames, see e.g. arXiv:1011.5288
:param int l: orbital quantum number
:param int m: magnetic quantum number
:param np.array d: total three momentum of the system
:param np.array theta: twist angle. IMPORTANT: It is not intended to use tbc with
moving frames yet. The twist angle theta shall only be
different from zero if ``d=(0,0,0)``!
:param float precision: precision of the calculation
:param int verbose: 0, no output on screen; 1, detailed output with convergence
informations
:returns: The value of Lüscher's Zeta function as a COMPLEX number.
:rtype: complex float
minor details: The three terms A, B, and C correspond to the three terms of
equation (5) in arXiv:1107.5023v2__.
__ http://arxiv.org/abs/1107.5023v2
'''
# some small checks
if ((np.dot(d,d) != 0.) and (np.dot(theta,theta) != 0.)):
print 'TBC and moving frames is not supported'
exit(0)
if gamma < 1.0:
print 'Gamma must be larger or equal to 1.0'
exit(0)
# the computation
theta = theta / (2.*math.pi)
res = A(q, gamma, l, m, d, theta, precision, verbose) + \
B(q, gamma, l, precision, verbose) + \
C(q, gamma, l, m, d, precision, verbose)
if verbose:
print 'Luescher Zeta function:', res
return res
################################################################################
#
# IMPLEMENTATION
#
################################################################################
def appendSpherical_np(xyz):
'''
Transforms an array of 3d vectors from cartesian to spherical coordinates
'''
ptsnew = np.zeros(xyz.shape)
xy = xyz[:,0]**2 + xyz[:,1]**2
ptsnew[:,0] = np.sqrt(xy + xyz[:,2]**2)
ptsnew[:,1] = np.arctan2(np.sqrt(xy), xyz[:,2])
ptsnew[:,2] = np.arctan2(xyz[:,1], xyz[:,0])
return ptsnew
def cartesian(arrays, out=None):
'''
Gives an array of 3d vectors for summation
'''
arrays = [np.asarray(x) for x in arrays]
dtype = arrays[0].dtype
n = np.prod([x.size for x in arrays])
if out is None:
out = np.zeros([n, len(arrays)], dtype=dtype)
m = n / arrays[0].size
out[:,0] = np.repeat(arrays[0], m)
if arrays[1:]:
cartesian(arrays[1:], out=out[0:m,1:])
for j in xrange(1, arrays[0].size):
out[j*m:(j+1)*m,1:] = out[0:m,1:]
return out
def compute_r_in_spherical_coordinates(a, d, gamma):
'''
Computes the vector r for the sum in term A and returns it in spherical
coordinates
'''
out = np.zeros(a.shape)
if (np.linalg.norm(d) == 0.0):
for r, i in zip(a, range(0,a.shape[0])):
out[i,:] = r/gamma
# splitting every vector in a in parallel and orthogonal part w.r.t. d
else:
for r, i in zip(a, range(0,a.shape[0])):
r_p = np.dot(r, d)/np.dot(d,d)*d
r_o = r-r_p
out[i,:] = (r_p-0.5*d)/gamma + r_o
return appendSpherical_np(out)
def compute_summands_A(a_sph, q, l, m):
'''
Computes a part of the sum in term A
'''
result = 0.0
for r in a_sph:
result += (np.exp(-(r[0]**2.-q)) * r[0]**l) / (r[0]**2-q) * \
scipy.special.sph_harm(m, l, r[2], r[1])
return result
def create_momentum_array(q):
'''
creates the momentum array used for the sums
'''
i = int(math.sqrt(q)+1)
n = [j for j in xrange(-i,i+1)]
r = cartesian((n, n, n))
out = []
for rr in r:
if (np.dot(rr, rr) == q):
out.append(np.ndarray.tolist(rr))
out = np.asarray(out, dtype=float)
q += 1
# these momentum suqares do not exist
exclude = [7, 15, 23, 28, 31, 39, 47, 55, 60, 63, 71]
if q in exclude:
q += 1
if q > 72:
print 'cannot converge, see zeta.py - create_momentum_array'
exit(0)
return out, q
def A(q, gamma, l, m, d, theta, precision, verbose):
'''
Computation of term A
'''
i = 0
r, i = create_momentum_array(i)
r_sph = compute_r_in_spherical_coordinates(r+theta, d, gamma)
result = compute_summands_A(r_sph, q, l, m)
if verbose:
print 'convergence in term A:'
print '\t', i-1, result
# computing new sums until precision is reached
eps = 1
while (eps > precision):
r, i = create_momentum_array(i)
r_sph = compute_r_in_spherical_coordinates(r+theta, d, gamma)
result_h = compute_summands_A(r_sph, q, l, m)
eps = abs(result_h/result)
result += result_h
if verbose:
print '\t', i-1, result, eps
if verbose:
print 'Term A:', result
return result
def B(q, gamma, l, precision, verbose):
'''
Computation of term B
'''
if l is not 0:
return 0.0
else:
a = 2.*scipy.special.sph_harm(0, 0, 0.0, 0.0)*gamma*math.pow(math.pi, 3./2.)
# The integral gives [2*(exp(q)*DawsonF(sqrt(q))/sqrt(q)] for Lambda = 1
b = q * 2.*np.exp(q)*scipy.special.dawsn(cmath.sqrt(q))/cmath.sqrt(q)
c = math.exp(q)
if verbose:
print 'Term B:', a*(b-c)
return a*(b-c)
def compute_gamma_w_in_spherical_coordinates(a, d, gamma):
'''
Computes the term gamma*w and returns the result in spherical coordinates
'''
out = np.zeros(a.shape)
if (np.linalg.norm(d) == 0.0):
for r, i in zip(a, range(0,a.shape[0])):
out[i,:] = r*gamma
# splitting every vector in a in parallel and orthogonal part w.r.t. d
else:
for r, i in zip(a, range(0,a.shape[0])):
r_p = np.dot(r, d)/np.dot(d,d)*d
r_o = r-r_p
out[i,:] = r_p*gamma + r_o
return appendSpherical_np(out)
def integrand(t, q, l, w):
'''
Just the integrand of term C
'''
return ((math.pi/t)**(3./2.+l) ) * np.exp(q*t-(math.pi)**2.*w*w/t)
def compute_summands_C(w_sph, w, q, gamma, l, m, d, precision):
'''
Computes a part of the sum in term C
'''
part1 = gamma * (np.absolute(w_sph[:,0])**l) * \
np.exp((0.0-1.j)*math.pi*np.dot(w, d)) * \
scipy.special.sph_harm(m, l, w_sph[:,2], w_sph[:,1])
# Factor two: The integral
part2 = []
for ww in w_sph:
part2.append((scipy.integrate.quadrature(integrand, 0., 1., \
args=[q, l, ww[0]], tol = precision*0.1, maxiter=1000))[0])
part2 = np.asarray(part2, dtype=float)
# return the result
return np.dot(part1, part2)
def C(q, gamma, l, m, d, precision, verbose):
'''
Computation of term C
'''
i = 1
w, i = create_momentum_array(i)
w_sph = compute_gamma_w_in_spherical_coordinates(w, d, gamma)
result = compute_summands_C(w_sph, w, q, gamma, l, m, d, precision)
if verbose:
print 'convergence in term C:'
print '\t', i-1, result
# computing new sums until precision is reached
eps = 1
while (eps > precision):
w, i = create_momentum_array(i)
w_sph = compute_gamma_w_in_spherical_coordinates(w, d, gamma)
result_h = compute_summands_C(w_sph, w, q, gamma, l, m, d, precision)
eps = abs(result_h/result)
result += result_h
if verbose:
print '\t', i-1, result, eps
if verbose:
print 'Term C:', result
return result
def test():
# cms ##########################
print '\nTest in cms:'
Pcm = np.array([0., 0., 0.])
q = 0.1207*24/(2.*math.pi)
gamma = 1.0
zeta = Z(q*q, gamma, d = Pcm).real
print 'q, gamma:', q, gamma
delta = np.arctan(math.pi**(3./2.)*q/zeta)*180./math.pi
if delta < 0:
delta = 180+delta
print 'delta:', delta, 'delta should be: 137'
# mv1 ##########################
print '\nTest in mv1:'
Pcm = np.array([0., 0., 1.])
L = 32
q = 0.161*L/(2.*math.pi)
E = 0.440
Ecm = 0.396
gamma = E/Ecm
Z00 = Z(q*q, gamma, d = Pcm).real
Z20 = Z(q*q, gamma, d = Pcm, l = 2).real
print 'q, gamma:', q, gamma
delta = np.arctan(gamma*math.pi**(3./2.) * q / \
(Z00 + (2./(q*q*math.sqrt(5)))*Z20))*180./math.pi
if delta < 0:
delta = 180+delta
print 'delta:', delta, 'delta should be: 116'
# mv2 ##########################
print '\nTest in mv2:'
Pcm = np.array([1., 1., 0.])
L = 32
q = 0.167*L/(2.*math.pi)
E = 0.490
Ecm = 0.407
gamma = E/Ecm
Z00 = Z(q*q, gamma, d = Pcm).real
Z20 = Z(q*q, gamma, d = Pcm, l = 2).real
Z22 = Z(q*q, gamma, d = Pcm, l = 2, m = 2).imag
Z2_2 = Z(q*q, gamma, d = Pcm, l = 2, m = -2).imag
print 'q, gamma:', q, gamma
delta = np.arctan(gamma*math.pi**(3./2.) * q / \
(Z00 - (1./(q*q*math.sqrt(5)))*Z20 \
+ ((math.sqrt(3./10.)/(q*q))*(Z22-Z2_2))))*180./math.pi
if delta < 0:
delta = 180+delta
print 'delta:', delta, 'delta should be: 128'
if __name__ == '__main__':
test()
# vim: sw=2
|
martin-ueding/LueschersZetaFunction
|
zeta.py
|
Python
|
gpl-3.0
| 10,459
|
#!/usr/bin/env python
from json import loads
from decimal import Decimal
from struct import pack
def fix(n, m = 1):
return long(Decimal(n) * Decimal(m) * 10**18)
def unfix(n):
return n // 10**18
def stringToBytes(value):
return value.ljust(32, '\x00')
def longTo32Bytes(value):
return pack(">l", value).rjust(32, '\x00')
def longToHexString(value, leftPad=40):
# convert the value to a hex string, strip off the `0x`, strip off any trailing `L`, pad with zeros, prefix with `0x`
return '0x' + hex(value)[2:].rstrip('L').zfill(leftPad)
def bytesToLong(value):
return long(value.encode('hex'), 16)
def bytesToHexString(value):
return longToHexString(bytesToLong(value))
def captureFilteredLogs(state, contract, logs):
def captureLog(contract, logs, message):
translated = contract.translator.listen(message)
if not translated: return
logs.append(translated)
state.log_listeners.append(lambda x: captureLog(contract, logs, x))
|
redsquirrel/augur-core
|
tests/utils.py
|
Python
|
gpl-3.0
| 997
|
#! /usr/bin/env python
# -*- coding: UTF8 -*-
"""
############################################################
Criação de slots para a matéria de psicologia geral
############################################################
:Author: *Carlo E. T. Oliveira*
:Contact: carlo@nce.ufrj.br
:Date: $Date: 2017/06/23 $
:Status: This is a "work in progress"
:Revision: $Revision: 0.01 $
:Home: `Labase <http://labase.nce.ufrj.br/>`__
:Copyright: ©2017, `GPL <http://is.gd/3Udt>__.
"""
import mechanize
# from BeautifulSoup import BeautifulSoup as soup
from colors import COLOR as K
import ssl
import json
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
try:
_create_unverified_https_context = ssl._create_unverified_context
except AttributeError:
# Legacy Python that doesn't verify HTTPS certificates by default
pass
else:
# Handle target environment that doesn't support HTTPS verification
ssl._create_default_https_context = _create_unverified_https_context
# ssl_version corrections are done
COLORS = [K.red, K.green, K.blue, K.fuchsia, K.teal, K.navy, K.maroon, K.yellow,
K.purple, K.darkgoldenrod, K.lime, K.aqua, K.tomato, K.olivedrab, K.dodgerblue,
K.lightpink, K.lightgreen, K.black, K.gray, K.silver]
__author__ = "Carlo E. T. Oliveira (cetoli@yahoo.com.br) $Author: cetoli $"
__version__ = "1.0 $Revision$"[10:-1]
__date__ = "2017/06/23 $Date$"
NL = []
class Main:
def __init__(self, page, owners, avals, nopeers, degrade, pub_dir,
grades, averages, evs=NL, peers=NL, plat=None, no_last=0):
self.owners, self.avals, self.nopeers = owners, avals, nopeers
self.degrade, self.pub_dir, self.peers = degrade, pub_dir, peers
self.grades_file, self.averages_file = grades, averages
self.page, self.evs, self.no_last = page, evs, no_last
self.plat = plat or 'https://activufrj.nce.ufrj.br'
self.events = self.norms = self.grades = self.titles = self.range = self.speers = None
def plot_results(self, outfile='test_grades.png'):
tcks = ['%s%s:%d' % (lab[0:3], lab[-2:], ind) for ind, lab in enumerate(self.events)]
# plotter = ['.', ',', ':','-','--' ,'-.',':','-','--' ]*6# ,':','.','<','>', '+', 'v', '^']
plotter = []
color = COLORS * 4
[plotter.extend([tk] * 16) for tk in ['-', '--', '-.', ':']] # ,':','.','<','>', '+', 'v', '^']
plt.xticks(range(len(tcks)), tcks)
plt.figure()
top = .9 - (len(set(self.peers) - set(self.nopeers)) / 3.0) * 0.05
plt.subplots_adjust(bottom=0.08, left=.05, right=.96, top=top, hspace=.35)
for peer, plot, color in zip(self.speers, plotter, color):
if peer in self.nopeers or "_" in peer:
continue
plt.plot(self.grades[peer], plot, color=color, label=peer[:15], linewidth=2.0)
plt.legend(bbox_to_anchor=(0, 1, 1, 3), loc=3, borderaxespad=1., ncol=3,
mode="expand", ) # loc='upper right')
# plt.margins(10,10)
plt.savefig(self.pub_dir + outfile, pad_inches=2)
# plt.label(peer)
plt.show()
def normalize_grade_grid(self):
grades = [self.grades[peer] for peer in self.speers]
self.norms = [max(ev) > 0 and max(ev) or 9 for ev in zip(*grades)]
# self.norms = [(max(ev)> 0 and max(ev) or 9,
# min(ev)>max(ev)/3.0 and min(ev) or max(ev)/3.0) for ev in zip(*grades)]
for peer in self.grades:
if peer in self.nopeers:
continue
self.grades[peer] = [g * 100 / (n + 2) + 1 for g, n in zip(self.grades[peer], self.norms)] # [:-1])]
def average_grade_grid(self):
def min_for_max(ingrades):
ingrades[ingrades.index(min(ingrades))] = max(ingrades)
if len(ingrades) > 6:
ingrades[ingrades.index(min(ingrades))] = max(ingrades)
return ingrades
for peer in self.grades:
if peer in self.nopeers:
continue
grades = [0] + self.grades[peer]
grades = min_for_max(min_for_max(grades))
self.grades[peer] = [sum(grades[0:i + 1]) / (i + 1) for i, grade in enumerate(grades) if 0 < i] # <9]
def create_grade_grid(self):
self.grades = dict((peer, [0] * len(self.events)) for peer in self.speers)
def remove_page(self, entry):
pg = self.page + '/'
if pg in entry:
return entry.replace(pg, '')
else:
return entry
def assess_a_vote_session(self, session_no, event):
session = self.avals[event]
for vote in session.itervalues():
self.assess_a_user_vote(vote, session_no)
def assess_a_user_vote(self, vote, session):
vote = isinstance(vote, dict) and vote.values()[0] or vote
print(vote)
degrade = self.range[session] + 2
votes = [(voted, rank) for voted, rank
in zip(vote, self.degrade[:-degrade])]
for voted in votes:
peer_name, rank = voted
peer_name = self.remove_page(peer_name) if peer_name else 0
if peer_name in self.nopeers:
continue
for vote in self.owners[peer_name]:
assert self.grades[vote][session] >= -100, 'v:%s,p:%s,s:%s' % (vote, peer_name, session)
self.grades[vote][session] += rank
def calculate(self):
# global self.owners, self.speers, AVALS
self.avals.update(self.scrap_from_page())
# return
if self.owners is None:
first_event = self.events[0]
self.owners = {name: [name] for name in self.avals[first_event]}
for i, v in self.owners.iteritems():
print(' %s = %s,' % (i, str(v)))
self.events = self.evs + self.events
[self.events.pop() for _ in range(self.no_last)]
avals = self.avals
# print("avals", avals)
print("ownwer", self.owners)
self.peers = {peer for votes in avals.itervalues()
for vote in votes.itervalues() for peer in vote
if not ({0, '/'} & set(list(peer == 0 and [0] or peer)))}
self.speers = sorted(self.peers)
# self.owners = {peer:[peer] for peer in self.peers}
self.owners.setdefault(0, [])
self.create_grade_grid()
for session, event in enumerate(self.events):
self.assess_a_vote_session(session, event)
print('grades before normalize:\n', self.grades)
self.normalize_grade_grid()
self.plot_results(self.grades_file)
print('grades normalizers:\n', self.norms)
self.average_grade_grid()
print('grades average after normalize:\n', self.grades)
self.plot_results(self.averages_file)
def scrap_from_page(self):
# self.page='https://activufrj.nce.ufrj.br/evaluation/result/Neuro_UM_XII'
mech = mechanize.Browser()
mech.set_handle_robots(False)
mech.open(self.plat)
mech.select_form(nr=0)
mech["user"] = "carlo"
mech["passwd"] = "labase4ct1v"
mech.submit().read()
# soup(results)
mech.open(self.plat + '/evaluation/' + self.page).read()
self.events = [link.url.split('/')[-1] for link in mech.links()
if '/evaluation/edit/' in link.url]
print(self.events)
events = self.events
avs = mech.open(self.plat + '/rest/evaluation/result/' + self.page).read()
avs = json.loads(avs)["result"]
# doc = soup(avs)
# lns = doc.findAll('a')
print(avs)
def get_range(event):
rngpg = mech.open(event).read()
return len(rngpg.split('para os votados')[1].split('Data')[0].split()[4:-4])
self.range = [get_range(self.plat+"/evaluation/edit/%s/%s" % (self.page, link)) for link in events]
print('self.range %s' % self.range)
return {l: avs[self.page+"/"+l] for l in self.events}
def main(self):
self.calculate()
if __name__ == "__main__":
Main().main()
|
cetoli/draft
|
src/activmech/fonoslots.py
|
Python
|
gpl-2.0
| 8,148
|
"""
Server startstop hooks
This module contains functions called by Evennia at various
points during its startup, reload and shutdown sequence. It
allows for customizing the server operation as desired.
This module must contain at least these global functions:
at_server_start()
at_server_stop()
at_server_reload_start()
at_server_reload_stop()
at_server_cold_start()
at_server_cold_stop()
"""
from evennia import create_script
def at_server_start():
"""
This is called every time the server starts up, regardless of
how it was shut down.
"""
pass
def at_server_stop():
"""
This is called just before the server is shut down, regardless
of it is for a reload, reset or shutdown.
"""
pass
def at_server_reload_start():
"""
This is called only when server starts back up after a reload.
"""
pass
def at_server_reload_stop():
"""
This is called only time the server stops before a reload.
"""
pass
def at_server_cold_start():
"""
This is called only when the server starts "cold", i.e. after a
shutdown or a reset.
"""
create_script("typeclasses.scripts.Assets", obj=None, persistent=True)
def at_server_cold_stop():
"""
This is called only when the server goes down due to a shutdown or
reset.
"""
pass
|
ccubed/EvenniaGames
|
FSuns/server/conf/at_server_startstop.py
|
Python
|
mit
| 1,329
|
from django.contrib import admin
from django.utils.safestring import mark_safe
from .models import OrganizerType
class ParticipantAdminMixin(object):
def linked_target(self, organizer):
if not organizer.content_object:
return None
return mark_safe(
'<a href="%s">%s</a>' % (
organizer.content_object.get_absolute_url(),
organizer.content_object,
)
)
linked_target.short_description = 'target'
class BaseOrganizerAdmin(ParticipantAdminMixin, admin.ModelAdmin):
exclude = ('content_type', 'object_id',)
list_display = ('name', 'email', 'type', 'post_publicly', 'added',
'linked_target',)
list_filter = ('added', 'post_publicly',)
readonly_fields = ('added', 'linked_target',)
search_fields = ('name', 'email', 'phone', 'notes', 'url',)
class BaseWatcherAdmin(ParticipantAdminMixin, admin.ModelAdmin):
exclude = ('content_type', 'object_id',)
list_display = ('name', 'email', 'added', 'linked_target',)
list_filter = ('added',)
readonly_fields = ('added', 'linked_target',)
search_fields = ('name', 'email', 'phone',)
class OrganizerTypeAdmin(admin.ModelAdmin):
list_display = ('name', 'is_group',)
search_fields = ('name',)
admin.site.register(OrganizerType, OrganizerTypeAdmin)
|
596acres/django-livinglots-organize
|
livinglots_organize/admin.py
|
Python
|
agpl-3.0
| 1,358
|
from sklearn.externals import joblib
import time
from . import folder_name
class ClassifierLoader(object):
"""Handles saving and loading of trained classifiers transparently."""
def __init__(self):
super(ClassifierLoader, self).__init__()
def dump_object(self, obj, classifier, category="", **kwargs):
self.logger.info("Writing object to disk")
t2 = time.time()
try:
folder = folder_name(self.datamanager.PATHS["CLASSIFIER"], category, classifier)
if not os.path.isdir(folder):
os.makedirs(folder)
joblib.dump(obj, os.path.join(folder, fname), compress=3)
except Exception as e:
self.logger.error("Joblib failed: %s" % e)
self.logger.info("%f seconds\n" % (time.time() - t2))
def load_object(self, fname, category="", classifier=None):
self.logger.info("Reading object from disk")
t2 = time.time()
if classifier == None:
classifier = self.classifier
try:
folder = folder_name(self.datamanager.PATHS["CLASSIFIER"], category, classifier)
if not os.path.isdir(folder):
self.logger.info("Object's path doesn't exist")
return None
obj = joblib.load(os.path.join(folder, fname))
self.logger.info("%f seconds\n" % (time.time() - t2))
return obj
except Exception as e:
self.logger.error("Joblib failed: %s" % e)
return None
|
peret/visualize-bovw
|
util/classifierloader.py
|
Python
|
gpl-2.0
| 1,518
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from antlr3 import *
from antlr3.compat import set, frozenset
# Copyright 2005-2009 Google, Inc. All rights reserved.
# @author arb@google.com (Anthony Baxter)
# Based on original C++ version by
# @author estlin@google.com (Brian Estlin)
# Groc (Googley runner of commands) is a microlanguage that provides an
# alternative to traditional cron syntax/semantics for specifying
# recurrent events. Syntactically, it is designed to be more readable
# (more easily 'grokked') than crontab language. Groc forfeits certain
# semantics found in crontab, in favor of readability; however,
# certain timespecs which are awkward in crontab are much easier
# to express in Groc (for example, the 3rd tuesday of the month).
# It is these constructs to which Groc is best suited.
#
# Examples of valid Groc include:
# '1st,3rd monday of month 15:30'
# 'every wed,fri of jan,jun 13:15'
# 'first sunday of quarter 00:00'
# 'every 2 hours'
#
# FEATURES NOT YET IMPLEMENTED (in approx. order of priority):
# - some way to specify multiple values for minutes/hours (definitely)
# - 'am/pm' (probably)
# - other range/interval functionality (maybe)
__author__ = 'arb@google.com (Anthony Baxter)'
# WARNING: This file is externally viewable by our users. All comments from
# this file will be stripped. The docstrings will NOT. Do not put sensitive
# information in docstrings. If you must communicate internal information in
# this source file, please place them in comments only.
allOrdinals = set([1, 2, 3, 4, 5])
numOrdinals = len(allOrdinals)
# for convenience in actions
HIDDEN = BaseRecognizer.HIDDEN
# token types
MONTH=27
THURSDAY=23
FOURTH_OR_FIFTH=16
THIRD=13
DECEMBER=39
FROM=41
EVERY=6
WEDNESDAY=22
QUARTER=40
SATURDAY=25
SYNCHRONIZED=9
JANUARY=28
SUNDAY=26
TUESDAY=21
SEPTEMBER=36
UNKNOWN_TOKEN=45
AUGUST=35
JULY=34
MAY=32
FRIDAY=24
DIGITS=8
FEBRUARY=29
TWO_DIGIT_HOUR_TIME=43
OF=4
WS=44
EOF=-1
APRIL=31
COMMA=10
JUNE=33
OCTOBER=37
TIME=5
FIFTH=15
NOVEMBER=38
FIRST=11
DIGIT=7
FOURTH=14
MONDAY=20
HOURS=17
MARCH=30
SECOND=12
MINUTES=18
TO=42
DAY=19
# token names
tokenNames = [
"<invalid>", "<EOR>", "<DOWN>", "<UP>",
"OF", "TIME", "EVERY", "DIGIT", "DIGITS", "SYNCHRONIZED", "COMMA", "FIRST",
"SECOND", "THIRD", "FOURTH", "FIFTH", "FOURTH_OR_FIFTH", "HOURS", "MINUTES",
"DAY", "MONDAY", "TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY", "SATURDAY",
"SUNDAY", "MONTH", "JANUARY", "FEBRUARY", "MARCH", "APRIL", "MAY", "JUNE",
"JULY", "AUGUST", "SEPTEMBER", "OCTOBER", "NOVEMBER", "DECEMBER", "QUARTER",
"FROM", "TO", "TWO_DIGIT_HOUR_TIME", "WS", "UNKNOWN_TOKEN"
]
class GrocParser(Parser):
grammarFileName = "borg/borgcron/py/Groc.g"
antlr_version = version_str_to_tuple("3.1.1")
antlr_version_str = "3.1.1"
tokenNames = tokenNames
def __init__(self, input, state=None):
if state is None:
state = RecognizerSharedState()
Parser.__init__(self, input, state)
self.dfa4 = self.DFA4(
self, 4,
eot = self.DFA4_eot,
eof = self.DFA4_eof,
min = self.DFA4_min,
max = self.DFA4_max,
accept = self.DFA4_accept,
special = self.DFA4_special,
transition = self.DFA4_transition
)
self.ordinal_set = set()
self.weekday_set = set()
self.month_set = set()
self.monthday_set = set()
self.time_string = ''
self.interval_mins = 0
self.period_string = ''
self.synchronized = False
self.start_time_string = ''
self.end_time_string = ''
valuesDict = {
SUNDAY: 0,
FIRST: 1,
MONDAY: 1,
JANUARY: 1,
TUESDAY: 2,
SECOND: 2,
FEBRUARY: 2,
WEDNESDAY: 3,
THIRD: 3,
MARCH: 3,
THURSDAY: 4,
FOURTH: 4,
APRIL: 4,
FRIDAY: 5,
FIFTH: 5,
MAY: 5,
SATURDAY: 6,
JUNE: 6,
JULY: 7,
AUGUST: 8,
SEPTEMBER: 9,
OCTOBER: 10,
NOVEMBER: 11,
DECEMBER: 12,
}
# Convert date tokens to int representations of properties.
def ValueOf(self, token_type):
return self.valuesDict.get(token_type, -1)
# $ANTLR start "timespec"
# borg/borgcron/py/Groc.g:92:1: timespec : ( specifictime | interval ) EOF ;
def timespec(self, ):
try:
try:
# borg/borgcron/py/Groc.g:93:3: ( ( specifictime | interval ) EOF )
# borg/borgcron/py/Groc.g:93:5: ( specifictime | interval ) EOF
pass
# borg/borgcron/py/Groc.g:93:5: ( specifictime | interval )
alt1 = 2
LA1_0 = self.input.LA(1)
if (LA1_0 == EVERY) :
LA1_1 = self.input.LA(2)
if ((DIGIT <= LA1_1 <= DIGITS)) :
alt1 = 2
elif ((DAY <= LA1_1 <= SUNDAY)) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 1, self.input)
raise nvae
elif ((DIGIT <= LA1_0 <= DIGITS) or (FIRST <= LA1_0 <= FOURTH_OR_FIFTH)) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 0, self.input)
raise nvae
if alt1 == 1:
# borg/borgcron/py/Groc.g:93:7: specifictime
pass
self._state.following.append(self.FOLLOW_specifictime_in_timespec44)
self.specifictime()
self._state.following.pop()
elif alt1 == 2:
# borg/borgcron/py/Groc.g:93:22: interval
pass
self._state.following.append(self.FOLLOW_interval_in_timespec48)
self.interval()
self._state.following.pop()
self.match(self.input, EOF, self.FOLLOW_EOF_in_timespec52)
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "timespec"
# $ANTLR start "specifictime"
# borg/borgcron/py/Groc.g:96:1: specifictime : ( ( ( ( ( ordinals weekdays ) | monthdays ) OF ( monthspec | quarterspec ) ) | ( ordinals weekdays ) ) TIME ) ;
def specifictime(self, ):
TIME1 = None
try:
try:
# borg/borgcron/py/Groc.g:97:3: ( ( ( ( ( ( ordinals weekdays ) | monthdays ) OF ( monthspec | quarterspec ) ) | ( ordinals weekdays ) ) TIME ) )
# borg/borgcron/py/Groc.g:97:5: ( ( ( ( ( ordinals weekdays ) | monthdays ) OF ( monthspec | quarterspec ) ) | ( ordinals weekdays ) ) TIME )
pass
# borg/borgcron/py/Groc.g:97:5: ( ( ( ( ( ordinals weekdays ) | monthdays ) OF ( monthspec | quarterspec ) ) | ( ordinals weekdays ) ) TIME )
# borg/borgcron/py/Groc.g:97:7: ( ( ( ( ordinals weekdays ) | monthdays ) OF ( monthspec | quarterspec ) ) | ( ordinals weekdays ) ) TIME
pass
# borg/borgcron/py/Groc.g:97:7: ( ( ( ( ordinals weekdays ) | monthdays ) OF ( monthspec | quarterspec ) ) | ( ordinals weekdays ) )
alt4 = 2
alt4 = self.dfa4.predict(self.input)
if alt4 == 1:
# borg/borgcron/py/Groc.g:97:8: ( ( ( ordinals weekdays ) | monthdays ) OF ( monthspec | quarterspec ) )
pass
# borg/borgcron/py/Groc.g:97:8: ( ( ( ordinals weekdays ) | monthdays ) OF ( monthspec | quarterspec ) )
# borg/borgcron/py/Groc.g:97:10: ( ( ordinals weekdays ) | monthdays ) OF ( monthspec | quarterspec )
pass
# borg/borgcron/py/Groc.g:97:10: ( ( ordinals weekdays ) | monthdays )
alt2 = 2
LA2_0 = self.input.LA(1)
if (LA2_0 == EVERY or (FIRST <= LA2_0 <= FOURTH_OR_FIFTH)) :
alt2 = 1
elif ((DIGIT <= LA2_0 <= DIGITS)) :
alt2 = 2
else:
nvae = NoViableAltException("", 2, 0, self.input)
raise nvae
if alt2 == 1:
# borg/borgcron/py/Groc.g:97:11: ( ordinals weekdays )
pass
# borg/borgcron/py/Groc.g:97:11: ( ordinals weekdays )
# borg/borgcron/py/Groc.g:97:12: ordinals weekdays
pass
self._state.following.append(self.FOLLOW_ordinals_in_specifictime72)
self.ordinals()
self._state.following.pop()
self._state.following.append(self.FOLLOW_weekdays_in_specifictime74)
self.weekdays()
self._state.following.pop()
elif alt2 == 2:
# borg/borgcron/py/Groc.g:97:31: monthdays
pass
self._state.following.append(self.FOLLOW_monthdays_in_specifictime77)
self.monthdays()
self._state.following.pop()
self.match(self.input, OF, self.FOLLOW_OF_in_specifictime80)
# borg/borgcron/py/Groc.g:97:45: ( monthspec | quarterspec )
alt3 = 2
LA3_0 = self.input.LA(1)
if ((MONTH <= LA3_0 <= DECEMBER)) :
alt3 = 1
elif ((FIRST <= LA3_0 <= THIRD) or LA3_0 == QUARTER) :
alt3 = 2
else:
nvae = NoViableAltException("", 3, 0, self.input)
raise nvae
if alt3 == 1:
# borg/borgcron/py/Groc.g:97:46: monthspec
pass
self._state.following.append(self.FOLLOW_monthspec_in_specifictime83)
self.monthspec()
self._state.following.pop()
elif alt3 == 2:
# borg/borgcron/py/Groc.g:97:56: quarterspec
pass
self._state.following.append(self.FOLLOW_quarterspec_in_specifictime85)
self.quarterspec()
self._state.following.pop()
elif alt4 == 2:
# borg/borgcron/py/Groc.g:98:11: ( ordinals weekdays )
pass
# borg/borgcron/py/Groc.g:98:11: ( ordinals weekdays )
# borg/borgcron/py/Groc.g:98:12: ordinals weekdays
pass
self._state.following.append(self.FOLLOW_ordinals_in_specifictime101)
self.ordinals()
self._state.following.pop()
self._state.following.append(self.FOLLOW_weekdays_in_specifictime103)
self.weekdays()
self._state.following.pop()
#action start
self.month_set = set(range(1,13))
#action end
TIME1=self.match(self.input, TIME, self.FOLLOW_TIME_in_specifictime117)
#action start
self.time_string = TIME1.text
#action end
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "specifictime"
# $ANTLR start "interval"
# borg/borgcron/py/Groc.g:102:1: interval : ( EVERY intervalnum= ( DIGIT | DIGITS ) period ( time_range | ( SYNCHRONIZED ) )? ) ;
def interval(self, ):
intervalnum = None
period2 = None
try:
try:
# borg/borgcron/py/Groc.g:103:3: ( ( EVERY intervalnum= ( DIGIT | DIGITS ) period ( time_range | ( SYNCHRONIZED ) )? ) )
# borg/borgcron/py/Groc.g:103:5: ( EVERY intervalnum= ( DIGIT | DIGITS ) period ( time_range | ( SYNCHRONIZED ) )? )
pass
# borg/borgcron/py/Groc.g:103:5: ( EVERY intervalnum= ( DIGIT | DIGITS ) period ( time_range | ( SYNCHRONIZED ) )? )
# borg/borgcron/py/Groc.g:103:7: EVERY intervalnum= ( DIGIT | DIGITS ) period ( time_range | ( SYNCHRONIZED ) )?
pass
self.match(self.input, EVERY, self.FOLLOW_EVERY_in_interval136)
intervalnum = self.input.LT(1)
if (DIGIT <= self.input.LA(1) <= DIGITS):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
#action start
self.interval_mins = int(intervalnum.text)
#action end
self._state.following.append(self.FOLLOW_period_in_interval164)
period2 = self.period()
self._state.following.pop()
#action start
if ((period2 is not None) and [self.input.toString(period2.start,period2.stop)] or [None])[0] == "hours":
self.period_string = "hours"
else:
self.period_string = "minutes"
#action end
# borg/borgcron/py/Groc.g:113:7: ( time_range | ( SYNCHRONIZED ) )?
alt5 = 3
LA5_0 = self.input.LA(1)
if (LA5_0 == FROM) :
alt5 = 1
elif (LA5_0 == SYNCHRONIZED) :
alt5 = 2
if alt5 == 1:
# borg/borgcron/py/Groc.g:113:9: time_range
pass
self._state.following.append(self.FOLLOW_time_range_in_interval176)
self.time_range()
self._state.following.pop()
elif alt5 == 2:
# borg/borgcron/py/Groc.g:114:9: ( SYNCHRONIZED )
pass
# borg/borgcron/py/Groc.g:114:9: ( SYNCHRONIZED )
# borg/borgcron/py/Groc.g:114:10: SYNCHRONIZED
pass
self.match(self.input, SYNCHRONIZED, self.FOLLOW_SYNCHRONIZED_in_interval189)
#action start
self.synchronized = True
#action end
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "interval"
# $ANTLR start "ordinals"
# borg/borgcron/py/Groc.g:118:1: ordinals : ( EVERY | ( ordinal ( COMMA ordinal )* ) ) ;
def ordinals(self, ):
try:
try:
# borg/borgcron/py/Groc.g:119:3: ( ( EVERY | ( ordinal ( COMMA ordinal )* ) ) )
# borg/borgcron/py/Groc.g:119:5: ( EVERY | ( ordinal ( COMMA ordinal )* ) )
pass
# borg/borgcron/py/Groc.g:119:5: ( EVERY | ( ordinal ( COMMA ordinal )* ) )
alt7 = 2
LA7_0 = self.input.LA(1)
if (LA7_0 == EVERY) :
alt7 = 1
elif ((FIRST <= LA7_0 <= FOURTH_OR_FIFTH)) :
alt7 = 2
else:
nvae = NoViableAltException("", 7, 0, self.input)
raise nvae
if alt7 == 1:
# borg/borgcron/py/Groc.g:119:7: EVERY
pass
self.match(self.input, EVERY, self.FOLLOW_EVERY_in_ordinals218)
elif alt7 == 2:
# borg/borgcron/py/Groc.g:120:5: ( ordinal ( COMMA ordinal )* )
pass
# borg/borgcron/py/Groc.g:120:5: ( ordinal ( COMMA ordinal )* )
# borg/borgcron/py/Groc.g:120:7: ordinal ( COMMA ordinal )*
pass
self._state.following.append(self.FOLLOW_ordinal_in_ordinals226)
self.ordinal()
self._state.following.pop()
# borg/borgcron/py/Groc.g:120:15: ( COMMA ordinal )*
while True: #loop6
alt6 = 2
LA6_0 = self.input.LA(1)
if (LA6_0 == COMMA) :
alt6 = 1
if alt6 == 1:
# borg/borgcron/py/Groc.g:120:16: COMMA ordinal
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_ordinals229)
self._state.following.append(self.FOLLOW_ordinal_in_ordinals231)
self.ordinal()
self._state.following.pop()
else:
break #loop6
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "ordinals"
# $ANTLR start "ordinal"
# borg/borgcron/py/Groc.g:123:1: ordinal : ord= ( FIRST | SECOND | THIRD | FOURTH | FIFTH | FOURTH_OR_FIFTH ) ;
def ordinal(self, ):
ord = None
try:
try:
# borg/borgcron/py/Groc.g:124:3: (ord= ( FIRST | SECOND | THIRD | FOURTH | FIFTH | FOURTH_OR_FIFTH ) )
# borg/borgcron/py/Groc.g:124:5: ord= ( FIRST | SECOND | THIRD | FOURTH | FIFTH | FOURTH_OR_FIFTH )
pass
ord = self.input.LT(1)
if (FIRST <= self.input.LA(1) <= FOURTH_OR_FIFTH):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
#action start
self.ordinal_set.add(self.ValueOf(ord.type));
#action end
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "ordinal"
class period_return(ParserRuleReturnScope):
def __init__(self):
ParserRuleReturnScope.__init__(self)
# $ANTLR start "period"
# borg/borgcron/py/Groc.g:129:1: period : ( HOURS | MINUTES ) ;
def period(self, ):
retval = self.period_return()
retval.start = self.input.LT(1)
try:
try:
# borg/borgcron/py/Groc.g:130:3: ( ( HOURS | MINUTES ) )
# borg/borgcron/py/Groc.g:130:5: ( HOURS | MINUTES )
pass
if (HOURS <= self.input.LA(1) <= MINUTES):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
retval.stop = self.input.LT(-1)
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return retval
# $ANTLR end "period"
# $ANTLR start "monthdays"
# borg/borgcron/py/Groc.g:133:1: monthdays : ( monthday ( COMMA monthday )* ) ;
def monthdays(self, ):
try:
try:
# borg/borgcron/py/Groc.g:134:3: ( ( monthday ( COMMA monthday )* ) )
# borg/borgcron/py/Groc.g:134:5: ( monthday ( COMMA monthday )* )
pass
# borg/borgcron/py/Groc.g:134:5: ( monthday ( COMMA monthday )* )
# borg/borgcron/py/Groc.g:134:7: monthday ( COMMA monthday )*
pass
self._state.following.append(self.FOLLOW_monthday_in_monthdays314)
self.monthday()
self._state.following.pop()
# borg/borgcron/py/Groc.g:134:16: ( COMMA monthday )*
while True: #loop8
alt8 = 2
LA8_0 = self.input.LA(1)
if (LA8_0 == COMMA) :
alt8 = 1
if alt8 == 1:
# borg/borgcron/py/Groc.g:134:18: COMMA monthday
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_monthdays318)
self._state.following.append(self.FOLLOW_monthday_in_monthdays320)
self.monthday()
self._state.following.pop()
else:
break #loop8
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "monthdays"
# $ANTLR start "monthday"
# borg/borgcron/py/Groc.g:137:1: monthday : day= ( DIGIT | DIGITS ) ;
def monthday(self, ):
day = None
try:
try:
# borg/borgcron/py/Groc.g:138:3: (day= ( DIGIT | DIGITS ) )
# borg/borgcron/py/Groc.g:138:5: day= ( DIGIT | DIGITS )
pass
day = self.input.LT(1)
if (DIGIT <= self.input.LA(1) <= DIGITS):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
#action start
self.monthday_set.add(int(day.text));
#action end
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "monthday"
# $ANTLR start "weekdays"
# borg/borgcron/py/Groc.g:142:1: weekdays : ( DAY | ( weekday ( COMMA weekday )* ) ) ;
def weekdays(self, ):
try:
try:
# borg/borgcron/py/Groc.g:143:3: ( ( DAY | ( weekday ( COMMA weekday )* ) ) )
# borg/borgcron/py/Groc.g:143:5: ( DAY | ( weekday ( COMMA weekday )* ) )
pass
# borg/borgcron/py/Groc.g:143:5: ( DAY | ( weekday ( COMMA weekday )* ) )
alt10 = 2
LA10_0 = self.input.LA(1)
if (LA10_0 == DAY) :
alt10 = 1
elif ((MONDAY <= LA10_0 <= SUNDAY)) :
alt10 = 2
else:
nvae = NoViableAltException("", 10, 0, self.input)
raise nvae
if alt10 == 1:
# borg/borgcron/py/Groc.g:143:7: DAY
pass
self.match(self.input, DAY, self.FOLLOW_DAY_in_weekdays365)
#action start
if self.ordinal_set:
# <ordinal> day means <ordinal> day of the month,
# not every day of the <ordinal> week.
self.monthday_set = self.ordinal_set
self.ordinal_set = set()
else:
self.ordinal_set = self.ordinal_set.union(allOrdinals)
self.weekday_set = set([self.ValueOf(SUNDAY), self.ValueOf(MONDAY),
self.ValueOf(TUESDAY), self.ValueOf(WEDNESDAY),
self.ValueOf(THURSDAY), self.ValueOf(FRIDAY),
self.ValueOf(SATURDAY), self.ValueOf(SUNDAY)])
#action end
elif alt10 == 2:
# borg/borgcron/py/Groc.g:155:11: ( weekday ( COMMA weekday )* )
pass
# borg/borgcron/py/Groc.g:155:11: ( weekday ( COMMA weekday )* )
# borg/borgcron/py/Groc.g:155:13: weekday ( COMMA weekday )*
pass
self._state.following.append(self.FOLLOW_weekday_in_weekdays373)
self.weekday()
self._state.following.pop()
# borg/borgcron/py/Groc.g:155:21: ( COMMA weekday )*
while True: #loop9
alt9 = 2
LA9_0 = self.input.LA(1)
if (LA9_0 == COMMA) :
alt9 = 1
if alt9 == 1:
# borg/borgcron/py/Groc.g:155:22: COMMA weekday
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_weekdays376)
self._state.following.append(self.FOLLOW_weekday_in_weekdays378)
self.weekday()
self._state.following.pop()
else:
break #loop9
#action start
if not self.ordinal_set:
self.ordinal_set = self.ordinal_set.union(allOrdinals)
#action end
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "weekdays"
# $ANTLR start "weekday"
# borg/borgcron/py/Groc.g:161:1: weekday : dayname= ( MONDAY | TUESDAY | WEDNESDAY | THURSDAY | FRIDAY | SATURDAY | SUNDAY ) ;
def weekday(self, ):
dayname = None
try:
try:
# borg/borgcron/py/Groc.g:162:3: (dayname= ( MONDAY | TUESDAY | WEDNESDAY | THURSDAY | FRIDAY | SATURDAY | SUNDAY ) )
# borg/borgcron/py/Groc.g:162:5: dayname= ( MONDAY | TUESDAY | WEDNESDAY | THURSDAY | FRIDAY | SATURDAY | SUNDAY )
pass
dayname = self.input.LT(1)
if (MONDAY <= self.input.LA(1) <= SUNDAY):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
#action start
self.weekday_set.add(self.ValueOf(dayname.type))
#action end
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "weekday"
# $ANTLR start "monthspec"
# borg/borgcron/py/Groc.g:168:1: monthspec : ( MONTH | months ) ;
def monthspec(self, ):
try:
try:
# borg/borgcron/py/Groc.g:169:3: ( ( MONTH | months ) )
# borg/borgcron/py/Groc.g:169:5: ( MONTH | months )
pass
# borg/borgcron/py/Groc.g:169:5: ( MONTH | months )
alt11 = 2
LA11_0 = self.input.LA(1)
if (LA11_0 == MONTH) :
alt11 = 1
elif ((JANUARY <= LA11_0 <= DECEMBER)) :
alt11 = 2
else:
nvae = NoViableAltException("", 11, 0, self.input)
raise nvae
if alt11 == 1:
# borg/borgcron/py/Groc.g:169:7: MONTH
pass
self.match(self.input, MONTH, self.FOLLOW_MONTH_in_monthspec459)
#action start
self.month_set = self.month_set.union(set([
self.ValueOf(JANUARY), self.ValueOf(FEBRUARY), self.ValueOf(MARCH),
self.ValueOf(APRIL), self.ValueOf(MAY), self.ValueOf(JUNE),
self.ValueOf(JULY), self.ValueOf(AUGUST), self.ValueOf(SEPTEMBER),
self.ValueOf(OCTOBER), self.ValueOf(NOVEMBER),
self.ValueOf(DECEMBER)]))
#action end
elif alt11 == 2:
# borg/borgcron/py/Groc.g:177:7: months
pass
self._state.following.append(self.FOLLOW_months_in_monthspec469)
self.months()
self._state.following.pop()
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "monthspec"
# $ANTLR start "months"
# borg/borgcron/py/Groc.g:180:1: months : ( month ( COMMA month )* ) ;
def months(self, ):
try:
try:
# borg/borgcron/py/Groc.g:181:3: ( ( month ( COMMA month )* ) )
# borg/borgcron/py/Groc.g:181:5: ( month ( COMMA month )* )
pass
# borg/borgcron/py/Groc.g:181:5: ( month ( COMMA month )* )
# borg/borgcron/py/Groc.g:181:7: month ( COMMA month )*
pass
self._state.following.append(self.FOLLOW_month_in_months486)
self.month()
self._state.following.pop()
# borg/borgcron/py/Groc.g:181:13: ( COMMA month )*
while True: #loop12
alt12 = 2
LA12_0 = self.input.LA(1)
if (LA12_0 == COMMA) :
alt12 = 1
if alt12 == 1:
# borg/borgcron/py/Groc.g:181:14: COMMA month
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_months489)
self._state.following.append(self.FOLLOW_month_in_months491)
self.month()
self._state.following.pop()
else:
break #loop12
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "months"
# $ANTLR start "month"
# borg/borgcron/py/Groc.g:184:1: month : monthname= ( JANUARY | FEBRUARY | MARCH | APRIL | MAY | JUNE | JULY | AUGUST | SEPTEMBER | OCTOBER | NOVEMBER | DECEMBER ) ;
def month(self, ):
monthname = None
try:
try:
# borg/borgcron/py/Groc.g:185:3: (monthname= ( JANUARY | FEBRUARY | MARCH | APRIL | MAY | JUNE | JULY | AUGUST | SEPTEMBER | OCTOBER | NOVEMBER | DECEMBER ) )
# borg/borgcron/py/Groc.g:185:5: monthname= ( JANUARY | FEBRUARY | MARCH | APRIL | MAY | JUNE | JULY | AUGUST | SEPTEMBER | OCTOBER | NOVEMBER | DECEMBER )
pass
monthname = self.input.LT(1)
if (JANUARY <= self.input.LA(1) <= DECEMBER):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
#action start
self.month_set.add(self.ValueOf(monthname.type));
#action end
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "month"
# $ANTLR start "quarterspec"
# borg/borgcron/py/Groc.g:190:1: quarterspec : ( QUARTER | ( quarter_ordinals MONTH OF QUARTER ) ) ;
def quarterspec(self, ):
try:
try:
# borg/borgcron/py/Groc.g:191:3: ( ( QUARTER | ( quarter_ordinals MONTH OF QUARTER ) ) )
# borg/borgcron/py/Groc.g:191:5: ( QUARTER | ( quarter_ordinals MONTH OF QUARTER ) )
pass
# borg/borgcron/py/Groc.g:191:5: ( QUARTER | ( quarter_ordinals MONTH OF QUARTER ) )
alt13 = 2
LA13_0 = self.input.LA(1)
if (LA13_0 == QUARTER) :
alt13 = 1
elif ((FIRST <= LA13_0 <= THIRD)) :
alt13 = 2
else:
nvae = NoViableAltException("", 13, 0, self.input)
raise nvae
if alt13 == 1:
# borg/borgcron/py/Groc.g:191:7: QUARTER
pass
self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec583)
#action start
self.month_set = self.month_set.union(set([
self.ValueOf(JANUARY), self.ValueOf(APRIL), self.ValueOf(JULY),
self.ValueOf(OCTOBER)]))
#action end
elif alt13 == 2:
# borg/borgcron/py/Groc.g:195:7: ( quarter_ordinals MONTH OF QUARTER )
pass
# borg/borgcron/py/Groc.g:195:7: ( quarter_ordinals MONTH OF QUARTER )
# borg/borgcron/py/Groc.g:195:9: quarter_ordinals MONTH OF QUARTER
pass
self._state.following.append(self.FOLLOW_quarter_ordinals_in_quarterspec595)
self.quarter_ordinals()
self._state.following.pop()
self.match(self.input, MONTH, self.FOLLOW_MONTH_in_quarterspec597)
self.match(self.input, OF, self.FOLLOW_OF_in_quarterspec599)
self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec601)
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "quarterspec"
# $ANTLR start "quarter_ordinals"
# borg/borgcron/py/Groc.g:198:1: quarter_ordinals : ( month_of_quarter_ordinal ( COMMA month_of_quarter_ordinal )* ) ;
def quarter_ordinals(self, ):
try:
try:
# borg/borgcron/py/Groc.g:199:3: ( ( month_of_quarter_ordinal ( COMMA month_of_quarter_ordinal )* ) )
# borg/borgcron/py/Groc.g:199:5: ( month_of_quarter_ordinal ( COMMA month_of_quarter_ordinal )* )
pass
# borg/borgcron/py/Groc.g:199:5: ( month_of_quarter_ordinal ( COMMA month_of_quarter_ordinal )* )
# borg/borgcron/py/Groc.g:199:7: month_of_quarter_ordinal ( COMMA month_of_quarter_ordinal )*
pass
self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals620)
self.month_of_quarter_ordinal()
self._state.following.pop()
# borg/borgcron/py/Groc.g:199:32: ( COMMA month_of_quarter_ordinal )*
while True: #loop14
alt14 = 2
LA14_0 = self.input.LA(1)
if (LA14_0 == COMMA) :
alt14 = 1
if alt14 == 1:
# borg/borgcron/py/Groc.g:199:33: COMMA month_of_quarter_ordinal
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_quarter_ordinals623)
self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals625)
self.month_of_quarter_ordinal()
self._state.following.pop()
else:
break #loop14
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "quarter_ordinals"
# $ANTLR start "month_of_quarter_ordinal"
# borg/borgcron/py/Groc.g:202:1: month_of_quarter_ordinal : offset= ( FIRST | SECOND | THIRD ) ;
def month_of_quarter_ordinal(self, ):
offset = None
try:
try:
# borg/borgcron/py/Groc.g:203:3: (offset= ( FIRST | SECOND | THIRD ) )
# borg/borgcron/py/Groc.g:203:5: offset= ( FIRST | SECOND | THIRD )
pass
offset = self.input.LT(1)
if (FIRST <= self.input.LA(1) <= THIRD):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
#action start
jOffset = self.ValueOf(offset.type) - 1
self.month_set = self.month_set.union(set([
jOffset + self.ValueOf(JANUARY), jOffset + self.ValueOf(APRIL),
jOffset + self.ValueOf(JULY), jOffset + self.ValueOf(OCTOBER)]))
#action end
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "month_of_quarter_ordinal"
# $ANTLR start "time_range"
# borg/borgcron/py/Groc.g:210:1: time_range : ( FROM (start_time= TIME ) TO (end_time= TIME ) ) ;
def time_range(self, ):
start_time = None
end_time = None
try:
try:
# borg/borgcron/py/Groc.g:211:3: ( ( FROM (start_time= TIME ) TO (end_time= TIME ) ) )
# borg/borgcron/py/Groc.g:211:5: ( FROM (start_time= TIME ) TO (end_time= TIME ) )
pass
# borg/borgcron/py/Groc.g:211:5: ( FROM (start_time= TIME ) TO (end_time= TIME ) )
# borg/borgcron/py/Groc.g:211:7: FROM (start_time= TIME ) TO (end_time= TIME )
pass
self.match(self.input, FROM, self.FOLLOW_FROM_in_time_range673)
# borg/borgcron/py/Groc.g:211:12: (start_time= TIME )
# borg/borgcron/py/Groc.g:211:13: start_time= TIME
pass
start_time=self.match(self.input, TIME, self.FOLLOW_TIME_in_time_range680)
#action start
self.start_time_string = start_time.text
#action end
self.match(self.input, TO, self.FOLLOW_TO_in_time_range691)
# borg/borgcron/py/Groc.g:212:10: (end_time= TIME )
# borg/borgcron/py/Groc.g:212:11: end_time= TIME
pass
end_time=self.match(self.input, TIME, self.FOLLOW_TIME_in_time_range698)
#action start
self.end_time_string = end_time.text
#action end
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
# $ANTLR end "time_range"
# Delegated rules
# lookup tables for DFA #4
DFA4_eot = DFA.unpack(
u"\13\uffff"
)
DFA4_eof = DFA.unpack(
u"\13\uffff"
)
DFA4_min = DFA.unpack(
u"\1\6\1\23\1\12\1\uffff\2\4\1\13\1\uffff\1\24\1\12\1\4"
)
DFA4_max = DFA.unpack(
u"\1\20\2\32\1\uffff\1\5\1\12\1\20\1\uffff\2\32\1\12"
)
DFA4_accept = DFA.unpack(
u"\3\uffff\1\1\3\uffff\1\2\3\uffff"
)
DFA4_special = DFA.unpack(
u"\13\uffff"
)
DFA4_transition = [
DFA.unpack(u"\1\1\2\3\2\uffff\6\2"),
DFA.unpack(u"\1\4\7\5"),
DFA.unpack(u"\1\6\10\uffff\1\4\7\5"),
DFA.unpack(u""),
DFA.unpack(u"\1\3\1\7"),
DFA.unpack(u"\1\3\1\7\4\uffff\1\10"),
DFA.unpack(u"\6\11"),
DFA.unpack(u""),
DFA.unpack(u"\7\12"),
DFA.unpack(u"\1\6\10\uffff\1\4\7\5"),
DFA.unpack(u"\1\3\1\7\4\uffff\1\10")
]
# class definition for DFA #4
DFA4 = DFA
FOLLOW_specifictime_in_timespec44 = frozenset([])
FOLLOW_interval_in_timespec48 = frozenset([])
FOLLOW_EOF_in_timespec52 = frozenset([1])
FOLLOW_ordinals_in_specifictime72 = frozenset([19, 20, 21, 22, 23, 24, 25, 26])
FOLLOW_weekdays_in_specifictime74 = frozenset([4])
FOLLOW_monthdays_in_specifictime77 = frozenset([4])
FOLLOW_OF_in_specifictime80 = frozenset([11, 12, 13, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40])
FOLLOW_monthspec_in_specifictime83 = frozenset([5])
FOLLOW_quarterspec_in_specifictime85 = frozenset([5])
FOLLOW_ordinals_in_specifictime101 = frozenset([19, 20, 21, 22, 23, 24, 25, 26])
FOLLOW_weekdays_in_specifictime103 = frozenset([5])
FOLLOW_TIME_in_specifictime117 = frozenset([1])
FOLLOW_EVERY_in_interval136 = frozenset([7, 8])
FOLLOW_set_in_interval146 = frozenset([17, 18])
FOLLOW_period_in_interval164 = frozenset([1, 9, 41])
FOLLOW_time_range_in_interval176 = frozenset([1])
FOLLOW_SYNCHRONIZED_in_interval189 = frozenset([1])
FOLLOW_EVERY_in_ordinals218 = frozenset([1])
FOLLOW_ordinal_in_ordinals226 = frozenset([1, 10])
FOLLOW_COMMA_in_ordinals229 = frozenset([11, 12, 13, 14, 15, 16])
FOLLOW_ordinal_in_ordinals231 = frozenset([1, 10])
FOLLOW_set_in_ordinal252 = frozenset([1])
FOLLOW_set_in_period291 = frozenset([1])
FOLLOW_monthday_in_monthdays314 = frozenset([1, 10])
FOLLOW_COMMA_in_monthdays318 = frozenset([7, 8])
FOLLOW_monthday_in_monthdays320 = frozenset([1, 10])
FOLLOW_set_in_monthday340 = frozenset([1])
FOLLOW_DAY_in_weekdays365 = frozenset([1])
FOLLOW_weekday_in_weekdays373 = frozenset([1, 10])
FOLLOW_COMMA_in_weekdays376 = frozenset([19, 20, 21, 22, 23, 24, 25, 26])
FOLLOW_weekday_in_weekdays378 = frozenset([1, 10])
FOLLOW_set_in_weekday400 = frozenset([1])
FOLLOW_MONTH_in_monthspec459 = frozenset([1])
FOLLOW_months_in_monthspec469 = frozenset([1])
FOLLOW_month_in_months486 = frozenset([1, 10])
FOLLOW_COMMA_in_months489 = frozenset([27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39])
FOLLOW_month_in_months491 = frozenset([1, 10])
FOLLOW_set_in_month510 = frozenset([1])
FOLLOW_QUARTER_in_quarterspec583 = frozenset([1])
FOLLOW_quarter_ordinals_in_quarterspec595 = frozenset([27])
FOLLOW_MONTH_in_quarterspec597 = frozenset([4])
FOLLOW_OF_in_quarterspec599 = frozenset([40])
FOLLOW_QUARTER_in_quarterspec601 = frozenset([1])
FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals620 = frozenset([1, 10])
FOLLOW_COMMA_in_quarter_ordinals623 = frozenset([11, 12, 13, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40])
FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals625 = frozenset([1, 10])
FOLLOW_set_in_month_of_quarter_ordinal644 = frozenset([1])
FOLLOW_FROM_in_time_range673 = frozenset([5])
FOLLOW_TIME_in_time_range680 = frozenset([42])
FOLLOW_TO_in_time_range691 = frozenset([5])
FOLLOW_TIME_in_time_range698 = frozenset([1])
def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
from antlr3.main import ParserMain
main = ParserMain("GrocLexer", GrocParser)
main.stdin = stdin
main.stdout = stdout
main.stderr = stderr
main.execute(argv)
if __name__ == '__main__':
main(sys.argv)
|
KaranToor/MA450
|
google-cloud-sdk/lib/googlecloudsdk/third_party/appengine/googlecron/GrocParser.py
|
Python
|
apache-2.0
| 45,082
|
from __future__ import unicode_literals
from django.db import models
class Artist(models.Model):
first_name = models.CharField(max_length=200)
last_name = models.CharField(max_length=200)
bio = models.TextField(null=True, blank=True)
email_address = models.EmailField()
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
def __repr__(self):
"""
Returns a string represntation of the artist for debugging/interpreter
>>> a = Artist(first_name="Vincent", last_name="Van Gough", bio="", \
email_address="vincent@vangough.com")
>>> repr(a)
'<Artist Vincent Van Gough>'
"""
return '<Artist {} {}>'.format(self.first_name, self.last_name)
def __str__(self):
"""
Returns string representation of the artist fisrtname last_name
>>> a = Artist(first_name="Vincent", last_name="Van Gough", bio="", \
email_address="vincent@vangough.com")
>>> str(a)
'Vincent Van Gough'
"""
return '{} {}'.format(self.first_name, self.last_name)
|
highsineburgh/gallery_api
|
gallery_api/gallery/models.py
|
Python
|
gpl-3.0
| 1,130
|
#!/usr/bin/env python
# encoding: utf-8
"""
setup.py
Created by Cody Brocious on 2006-12-21.
Copyright (c) 2006 Falling Leaf Systems. All rights reserved.
"""
from distutils.core import setup
import py2app
setup(
app = ['Convert.py'],
options = dict(
py2app=dict(
argv_emulation=True
)
)
)
|
callen/Alky-Reborn
|
Convertor/setup.py
|
Python
|
lgpl-3.0
| 303
|
"""
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.9.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '@w$i9&1blz%(h_kx4qsoq_2e11l#z9%=7+aseo1xdb-8^b-(b5'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'redactor',
'ckeditor',
'ckeditor_uploader',
'suit_redactor',
'tinymce',
'blog',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, "templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
# MEDIA_URL = '/http://127.0.0.1:8000/media/'
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'public/media')
REDACTOR_OPTIONS = {'lang': 'en'}
REDACTOR_UPLOAD = MEDIA_ROOT
CKEDITOR_JQUERY_URL = 'https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js'
CKEDITOR_UPLOAD_PATH = "uploads/"
CKEDITOR_UPLOAD_SLUGIFY_FILENAME = False
CKEDITOR_RESTRICT_BY_USER = True
CKEDITOR_BROWSE_SHOW_DIRS = True
CKEDITOR_IMAGE_BACKEND = "pillow"
CKEDITOR_CONFIGS = {
'default': {
'skin': 'moono',
# 'skin': 'office2013',
'toolbar_Basic': [
['Source', '-', 'Bold', 'Italic']
],
'toolbar_YouCustomToolbarConfig': [
{'name': 'document', 'items': ['Source', '-', 'Save', 'NewPage', 'Preview', 'Print', '-', 'Templates']},
{'name': 'clipboard', 'items': ['Cut', 'Copy', 'Paste', 'PasteText', 'PasteFromWord', '-', 'Undo', 'Redo']},
{'name': 'editing', 'items': ['Find', 'Replace', '-', 'SelectAll']},
{'name': 'forms',
'items': ['Form', 'Checkbox', 'Radio', 'TextField', 'Textarea', 'Select', 'Button', 'ImageButton',
'HiddenField']},
'/',
{'name': 'basicstyles',
'items': ['Bold', 'Italic', 'Underline', 'Strike', 'Subscript', 'Superscript', '-', 'RemoveFormat']},
{'name': 'paragraph',
'items': ['NumberedList', 'BulletedList', '-', 'Outdent', 'Indent', '-', 'Blockquote', 'CreateDiv', '-',
'JustifyLeft', 'JustifyCenter', 'JustifyRight', 'JustifyBlock', '-', 'BidiLtr', 'BidiRtl',
'Language']},
{'name': 'links', 'items': ['Link', 'Unlink', 'Anchor']},
{'name': 'insert',
'items': ['Image', 'Flash', 'Table', 'HorizontalRule', 'Smiley', 'SpecialChar', 'PageBreak', 'Iframe']},
'/',
{'name': 'styles', 'items': ['Styles', 'Format', 'Font', 'FontSize']},
{'name': 'colors', 'items': ['TextColor', 'BGColor']},
{'name': 'tools', 'items': ['Maximize', 'ShowBlocks']},
{'name': 'about', 'items': ['About']},
'/', # put this to force next toolbar on new line
{'name': 'youcustomtools', 'items': [
# put the name of your editor.ui.addButton here
'Preview',
'Maximize',
]},
],
'toolbar': 'YouCustomToolbarConfig', # put selected toolbar config here
# 'toolbarGroups': [{ 'name': 'document', 'groups': [ 'mode', 'document', 'doctools' ] }],
# 'height': 291,
# 'width': '100%',
# 'filebrowserWindowHeight': 725,
# 'filebrowserWindowWidth': 940,
# 'toolbarCanCollapse': True,
# 'mathJaxLib': '//cdn.mathjax.org/mathjax/2.2-latest/MathJax.js?config=TeX-AMS_HTML',
'tabSpaces': 4,
'extraPlugins': ','.join(
[
# you extra plugins here
'div',
'autolink',
'autoembed',
'embedsemantic',
'autogrow',
# 'devtools',
'widget',
'lineutils',
'clipboard',
'dialog',
'dialogui',
'elementspath'
]),
}
}
# tinymce
TINYMCE_DEFAULT_CONFIG = {
'theme': "advanced",
'plugins': "wordcount,preview,emotions,preview,spellchecker,",
'height': "400px",
'width': "700px",
'theme_advanced_buttons3' : "fontselect,fontsizeselect,emotions,preview,",
}
|
janusnic/dj-21v
|
unit_07/mysite/mysite/settings.py
|
Python
|
mit
| 6,967
|
import nltk
import urllib
import json
import urllib2
from urllib import urlopen
from simplenlp import get_nl
from nltk.corpus import wordnet
import random
import sys
import string
import pickle
import readline
#this method replaces adjectives with a random synonym
def replace_adjectives_strip_pos(token):
if(token[1] in ("JJ", "JJR", "JJS")):
syn = ["<strong>" + lemma.name.replace("_"," ") + "</strong>" for lemma in sum([ss.lemmas for ss in wordnet.synsets(token[0], wordnet.ADJ)],[])]
if(len(syn)>0):
return random.choice(syn)
else:
return token[0]
else:
return token[0]
def replace_nouns_strip_pos(token):
if(token[1] in ("NN", "NNS")):
syn = ["<strong>" + lemma.name.replace("_"," ") + "</strong>" for lemma in sum([ss.lemmas for ss in wordnet.synsets(token[0], wordnet.NOUN)],[])]
if(len(syn)>0):
return random.choice(syn)
else:
return token[0]
else:
return token[0]
# if there's punctuation, split off the punctuation along with the final word
# otherwise just split off the final word
def split_final_word_from_line(pos_tokens):
length = len(pos_tokens)
if(length == 0):
return [],[]
if pos_tokens[-1][0] in string.punctuation:
if(length>1):
n = pos_tokens[0:-2],pos_tokens[-2:]
return n
else:
return [], [pos_tokens[-1]]
else:
return pos_tokens[0:-1],[pos_tokens[-1]]
# download Moby Dick by Herman Melville from Project Gutenberg
url = "http://www.gutenberg.org/cache/epub/2701/pg2701.txt"
# download Heart of Darkness by Joseph Conrad from Project Gutenberg
url = "http://www.gutenberg.org/cache/epub/526/pg526.txt"
# download the collected works of Emily Dickinson
url = "http://www.gutenberg.org/cache/epub/12242/pg12242.txt"
# download the Tractatus by Wittgenstein (known transcription errors)
#url = "http://natematias.com/medialab/tractatus.txt"
# download Don Quixote by Cervantes
url = "http://www.gutenberg.org/cache/epub/996/pg996.txt"
raw_text = urlopen(url).read()
print "downloaded text..."
sys.stdout.flush()
f = open(sys.argv[1], 'w')
for line in raw_text.split('\n'):
# tokenize the story
tokens = nltk.word_tokenize(line)
# load a brill tagger trained by nltk-trainer
# https://github.com/japerk/nltk-trainer
#tagger = pickle.load("/Users/nathan/nltk_data/taggers/treebank_brill_aubt.pickle")
# apply part of speech tags to the tokens
pos_tokens = nltk.pos_tag(tokens)
# for rhyming poetry, split final word from line:
front_tokens, end_tokens = split_final_word_from_line(pos_tokens)
#print "labeled tokens..."
#replace all adjectives with a synonym
#adj_replaced_tokens = [replace_adjectives_strip_pos(x) for x in pos_tokens]
noun_replaced_tokens = [replace_nouns_strip_pos(x) for x in front_tokens]
#print "replaced nouns..."
sys.stdout.flush()
#untokenize the text to create a single string. Clean up some of the dashes, which confuse reporting script
en_nl = get_nl('en')
#replaced_text = en_nl.untokenize(" ".join(adj_replaced_tokens))#.replace(".",".\n")
replaced_text = en_nl.untokenize(" ".join(noun_replaced_tokens + [x[0] for x in end_tokens]))
#print sys.stdout.write(".")
sys.stdout.flush()
# write modified literature to file
f.write("<pre>\n")
f.write(replaced_text + "\n")
f.write("</pre>\n")
f.close()
|
natematias/the-synonym-machine
|
poetry_synonym_machine.py
|
Python
|
mit
| 3,300
|
import sys
sys.path.insert(1, "../../../")
import h2o, tests
def binop_amp(ip,port):
iris = h2o.import_file(path=h2o.locate("smalldata/iris/iris_wheader_65_rows.csv"))
rows, cols = iris.dim
###################################################################
# LHS: scaler, RHS: H2OFrame
amp_res = 5 & iris
amp_rows, amp_cols = amp_res.dim
assert amp_rows == rows and amp_cols == cols, "dimension mismatch"
# LHS: scaler, RHS: H2OVec
amp_res = 1 & iris[1]
amp_rows = amp_res.nrow
assert amp_rows == rows, "dimension mismatch"
new_rows = iris[amp_res].nrow
assert new_rows == rows, "wrong number of rows returned"
###################################################################
# LHS: scaler, RHS: H2OFrame
res = 1.2 + iris[2]
res2 = res[11,:] & iris
res2.show()
# LHS: scaler, RHS: H2OVec
res = 1.2 + iris[2]
res2 = res[43,:] & iris[1]
res2.show()
###################################################################
# LHS: H2OVec, RHS: H2OFrame
#try:
# res = iris[2] & iris
# res.show()
# assert False, "expected error. objects with different dimensions not supported."
#except EnvironmentError:
# pass
# LHS: H2OVec, RHS: H2OVec
res = iris[0] & iris[1]
assert res.sum() == 65.0, "expected all True"
res = iris[2] & iris[1]
assert res.sum() == 65.0, "expected all True"
# LHS: H2OVec, RHS: H2OVec
res = 1.2 + iris[2]
res2 = iris[1,:] & res[7,:]
res2.show()
# LHS: H2OVec, RHS: scaler
res = iris[0] & 0
assert res.sum() == 0.0, "expected all False"
###################################################################
# LHS: H2OFrame, RHS: H2OFrame
res = iris & iris
res_rows, res_cols = res.dim
assert res_rows == rows and res_cols == cols, "dimension mismatch"
res = iris[0:2] & iris[1:3]
res_rows, res_cols = res.dim
assert res_rows == rows and res_cols == 2, "dimension mismatch"
#try:
# res = iris & iris[0:3]
# res.show()
# assert False, "expected error. frames are different dimensions."
#except EnvironmentError:
# pass
# LHS: H2OFrame, RHS: H2OVec
#try:
# res = iris & iris[0]
# res.show()
# assert False, "expected error. objects of different dimensions not supported."
#except EnvironmentError:
# pass
# LHS: H2OFrame, RHS: scaler
res = 1.2 + iris[2]
res2 = iris & res[55,:]
res2.show()
# LHS: H2OFrame, RHS: scaler
res = iris & 0
res_rows, res_cols = res.dim
assert res_rows == rows and res_cols == cols, "dimension mismatch"
for c in range(cols-1):
for r in range(rows):
assert res[r,c] == 0.0, "expected False"
###################################################################
if __name__ == "__main__":
tests.run_test(sys.argv, binop_amp)
|
bospetersen/h2o-3
|
h2o-py/tests/testdir_munging/binop/pyunit_binop2_amp.py
|
Python
|
apache-2.0
| 2,942
|
# Copyright 2013, Nachi Ueno, NTT I3, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from neutron.common import rpc as n_rpc
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.plugins.common import constants
LOG = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class VpnDriver(object):
def __init__(self, service_plugin):
self.service_plugin = service_plugin
@property
def service_type(self):
pass
@abc.abstractmethod
def create_vpnservice(self, context, vpnservice):
pass
@abc.abstractmethod
def update_vpnservice(
self, context, old_vpnservice, vpnservice):
pass
@abc.abstractmethod
def delete_vpnservice(self, context, vpnservice):
pass
class BaseIPsecVpnAgentApi(n_rpc.RpcProxy):
"""Base class for IPSec API to agent."""
def __init__(self, to_agent_topic, topic, default_version):
self.to_agent_topic = to_agent_topic
super(BaseIPsecVpnAgentApi, self).__init__(topic, default_version)
def _agent_notification(self, context, method, router_id,
version=None, **kwargs):
"""Notify update for the agent.
This method will find where is the router, and
dispatch notification for the agent.
"""
admin_context = context.is_admin and context or context.elevated()
plugin = manager.NeutronManager.get_service_plugins().get(
constants.L3_ROUTER_NAT)
if not version:
version = self.RPC_API_VERSION
l3_agents = plugin.get_l3_agents_hosting_routers(
admin_context, [router_id],
admin_state_up=True,
active=True)
for l3_agent in l3_agents:
LOG.debug(_('Notify agent at %(topic)s.%(host)s the message '
'%(method)s %(args)s'),
{'topic': self.to_agent_topic,
'host': l3_agent.host,
'method': method,
'args': kwargs})
self.cast(
context, self.make_msg(method, **kwargs),
version=version,
topic='%s.%s' % (self.to_agent_topic, l3_agent.host))
def vpnservice_updated(self, context, router_id, **kwargs):
"""Send update event of vpnservices."""
self._agent_notification(context, 'vpnservice_updated', router_id,
**kwargs)
|
onecloud/neutron
|
neutron/services/vpn/service_drivers/__init__.py
|
Python
|
apache-2.0
| 3,066
|
# coding=utf-8
"""
InaSAFE Disaster risk assessment tool developed by AusAid -
**Zonal Stats.**
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'tim@linfiniti.com'
__date__ = '17/10/2013'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
import struct
import logging
import numpy
from osgeo import gdal, ogr, osr
from PyQt4.QtCore import QCoreApplication
from qgis.core import (
QgsRectangle,
QgsFeatureRequest,
QgsGeometry,
QgsPoint)
from safe_qgis.utilities.utilities import (
is_raster_layer,
is_polygon_layer)
from safe_qgis.exceptions import InvalidParameterError, InvalidGeometryError
LOGGER = logging.getLogger('InaSAFE')
def tr(theText):
"""We define a tr() alias here since the utilities implementation.
The code below is not a class and does not inherit from QObject.
.. note:: see http://tinyurl.com/pyqt-differences
:param theText: String to be translated
:type theText: str
:returns: Translated version of the given string if available,
otherwise the original string.
"""
# noinspection PyCallByClass,PyTypeChecker,PyArgumentList
return QCoreApplication.translate('zonal_stats', theText)
def calculate_zonal_stats(raster_layer, polygon_layer):
"""Calculate zonal statics given two layers.
:param raster_layer: A QGIS raster layer.
:type raster_layer: QgsRasterLayer
:param polygon_layer: A QGIS vector layer containing polygons.
:type polygon_layer: QgsVectorLayer
:returns: A data structure containing sum, mean, min, max,
count of raster values for each polygonal area.
:rtype: dict
:raises: InvalidParameterError, InvalidGeometryError
Note:
* InvalidParameterError if incorrect inputs are received.
* InvalidGeometryError if none geometry is found during calculations.
* Any other exceptions are propagated.
Example of output data structure:
{ 1: {'sum': 10, 'count': 20, 'min': 1, 'max': 4, 'mean': 2},
2: {'sum': 10, 'count': 20, 'min': 1, 'max': 4, 'mean': 2},
3 {'sum': 10, 'count': 20, 'min': 1, 'max': 4, 'mean': 2}}
The key in the outer dict is the feature id
.. note:: This is a python port of the zonal stats implementation in QGIS
. See https://github.com/qgis/Quantum-GIS/blob/master/src/analysis/
vector/qgszonalstatistics.cpp
.. note:: Currently not projection checks are made to ensure that both
layers are in the same CRS - we assume they are.
"""
if not is_polygon_layer(polygon_layer):
raise InvalidParameterError(tr(
'Zonal stats needs a polygon layer in order to compute '
'statistics.'))
if not is_raster_layer(raster_layer):
raise InvalidParameterError(tr(
'Zonal stats needs a raster layer in order to compute statistics.'
))
LOGGER.debug('Calculating zonal stats for:')
LOGGER.debug('Raster: %s' % raster_layer.source())
LOGGER.debug('Vector: %s' % polygon_layer.source())
myResults = {}
myRasterSource = raster_layer.source()
myFid = gdal.Open(str(myRasterSource), gdal.GA_ReadOnly)
myGeoTransform = myFid.GetGeoTransform()
myColumns = myFid.RasterXSize
myRows = myFid.RasterYSize
# Get first band.
myBand = myFid.GetRasterBand(1)
myNoData = myBand.GetNoDataValue()
#print 'No data %s' % myNoData
myCellSizeX = myGeoTransform[1]
if myCellSizeX < 0:
myCellSizeX = -myCellSizeX
myCellSizeY = myGeoTransform[5]
if myCellSizeY < 0:
myCellSizeY = -myCellSizeY
myRasterBox = QgsRectangle(
myGeoTransform[0],
myGeoTransform[3] - (myCellSizeY * myRows),
myGeoTransform[0] + (myCellSizeX * myColumns),
myGeoTransform[3])
rasterGeom = QgsGeometry.fromRect(myRasterBox)
# Get vector layer
myProvider = polygon_layer.dataProvider()
if myProvider is None:
myMessage = tr(
'Could not obtain data provider from layer "%s"') % (
polygon_layer.source())
raise Exception(myMessage)
myRequest = QgsFeatureRequest()
crs = osr.SpatialReference()
crs.ImportFromProj4(str(polygon_layer.crs().toProj4()))
myCount = 0
for myFeature in myProvider.getFeatures(myRequest):
myGeometry = myFeature.geometry()
if myGeometry is None:
myMessage = tr(
'Feature %d has no geometry or geometry is invalid') % (
myFeature.id())
raise InvalidGeometryError(myMessage)
myCount += 1
myFeatureBox = myGeometry.boundingBox().intersect(myRasterBox)
print 'NEW AGGR: %s' % myFeature.id()
#print 'Raster Box: %s' % myRasterBox.asWktCoordinates()
#print 'Feature Box: %s' % myFeatureBox.asWktCoordinates()
myOffsetX, myOffsetY, myCellsX, myCellsY = intersection_box(
myRasterBox, myFeatureBox, myCellSizeX, myCellSizeY)
# If the poly does not intersect the raster just continue
if None in [myOffsetX, myOffsetY, myCellsX, myCellsY]:
continue
# avoid access to cells outside of the raster (may occur because of
# rounding)
if (myOffsetX + myCellsX) > myColumns:
myOffsetX = myColumns - myOffsetX
if (myOffsetY + myCellsY) > myRows:
myCellsY = myRows - myOffsetY
myIntersectedGeom = rasterGeom.intersection(myGeometry)
mySum, myCount = numpy_stats(
myBand,
myIntersectedGeom,
myGeoTransform,
myNoData,
crs)
if myCount <= 1:
# The cell resolution is probably larger than the polygon area.
# We switch to precise pixel - polygon intersection in this case
mySum, myCount = precise_stats(
myBand,
myGeometry,
myOffsetX,
myOffsetY,
myCellsX,
myCellsY,
myCellSizeX,
myCellSizeY,
myRasterBox,
myNoData)
#print mySum, myCount
if myCount == 0:
myMean = 0
else:
myMean = mySum / myCount
myResults[myFeature.id()] = {
'sum': mySum,
'count': myCount,
'mean': myMean}
# noinspection PyUnusedLocal
myFid = None # Close
return myResults
def intersection_box(
raster_box,
feature_box,
cell_size_x,
cell_size_y):
"""Calculate cell offset and distances for the intersecting bbox.
:param raster_box: Box defining the extents of the raster.
:type raster_box: QgsRectangle
:param feature_box: Bounding box for the feature.
:type feature_box: QgsRectangle
:param cell_size_x: Size in the x direction of a single cell.
:type cell_size_x: float
:param cell_size_y: Size in the y direction of a single cell.
:type cell_size_y: float
:returns: Offsets in the x and y directions, and number of cells in the x
and y directions.
:rtype: (int, int, int, int)
"""
#get intersecting bbox
myIntersectedBox = feature_box.intersect(raster_box)
#print 'Intersected Box: %s' % myIntersectedBox.asWktCoordinates()
if myIntersectedBox.isEmpty():
return None, None, None, None
#get offset in pixels in x- and y- direction
myOffsetX = myIntersectedBox.xMinimum() - raster_box.xMinimum()
myOffsetX /= cell_size_x
myOffsetX = int(myOffsetX)
myOffsetY = raster_box.yMaximum() - myIntersectedBox.yMaximum()
myOffsetY /= cell_size_y
myOffsetY = int(myOffsetY)
##### Checked to here....offsets calculate correctly ##########
myMaxColumn = myIntersectedBox.xMaximum() - raster_box.xMinimum()
myMaxColumn /= cell_size_x
# Round up to the next cell if the bbox is not on an exact pixel boundary
if myMaxColumn > int(myMaxColumn):
myMaxColumn = int(myMaxColumn) + 1
else:
myMaxColumn = int(myMaxColumn)
myMaxRow = raster_box.yMaximum() - myIntersectedBox.yMinimum()
myMaxRow /= cell_size_y
# Round up to the next cell if the bbox is not on an exact pixel boundary
if myMaxRow > int(myMaxRow):
myMaxRow = int(myMaxRow) + 1
else:
myMaxRow = int(myMaxRow)
myCellsX = myMaxColumn - myOffsetX
myCellsY = myMaxRow - myOffsetY
LOGGER.debug(
'Pixel box: W: %s H: %s Offset Left: %s Offset Bottom: %s' % (
myCellsX, myCellsY, myOffsetX, myOffsetY
))
return myOffsetX, myOffsetY, myCellsX, myCellsY
def centroid_intersection_stats(
band,
geometry,
pixel_offset_x,
pixel_offset_y,
cells_x,
cells_y,
cell_size_x,
cell_size_y,
raster_box,
no_data):
"""Stats where centroid of each cell must intersect the polygon.
:param band: A valid band from a raster layer.
:type band: GDALRasterBand
:param geometry: A valid polygon geometry.
:type geometry: QgsGeometry
:param pixel_offset_x: Left offset for raster window.
:type pixel_offset_x: int
:param pixel_offset_y: Offset from bottom for raster window.
:type pixel_offset_y: int
:param cells_x: Width of the raster window.
:type cells_x: int
:param cells_y: Height of the raster window.
:type cells_y: int
:param cell_size_x: Size in the x direction of a single cell.
:type cell_size_x: float
:param cell_size_y: Size in the y direction of a single cell.
:type cell_size_y: float
:param raster_box: Box defining the extents of the raster.
:type raster_box: QgsRectangle
:param no_data: Value for no data in the raster.
:type no_data: int, float
:returns: Sum, Count - sum of the values of all pixels and the count of
pixels that intersect with the geometry.
:rtype: (float, int)
"""
myCellCenterX = (
raster_box.yMaximum() - pixel_offset_y * cell_size_y -
cell_size_y / 2)
myCount = 0
mySum = 0
myBufferXSize = cells_x
myBufferYSize = 1 # read in a single row at a time
myCellsToReadX = cells_x
myCellsToReadY = 1 # read in a single row at a time
for i in range(0, cells_y):
myScanline = band.ReadRaster(
pixel_offset_x,
pixel_offset_y + i,
myCellsToReadX,
myCellsToReadY,
myBufferXSize,
myBufferYSize,
gdal.GDT_Float32)
# Note that the returned scanline is of type string, and contains
# xsize*4 bytes of raw binary floating point data. This can be
# converted to Python values using the struct module from the standard
# library:
myValues = struct.unpack('f' * myCellsToReadX, myScanline)
#print myValues
if myValues is None:
continue
myCellCenterY = (
raster_box.xMinimum() +
pixel_offset_x * cell_size_x +
cell_size_x / 2)
for j in range(0, cells_x):
myPoint = QgsPoint(myCellCenterY, myCellCenterX)
if geometry.contains(myPoint):
if myValues[j] != no_data:
mySum += myValues[j]
myCount += 1
myCellCenterY += cell_size_x
# Move down one row
myCellCenterX -= cell_size_y
return mySum, myCount
# noinspection PyArgumentList
def precise_stats(
band,
geometry,
pixel_offset_x,
pixel_offset_y,
cells_x,
cells_y,
cell_size_x,
cell_size_y,
raster_box,
no_data):
"""Weighted pixel sum for polygon based on only intersecting parts.
:param band: A valid band from a raster layer.
:type band: GDALRasterBand
:param geometry: A valid polygon geometry.
:type geometry: QgsGeometry
:param pixel_offset_x: Left offset for raster window.
:type pixel_offset_x: int
:param pixel_offset_y: Offset from bottom for raster window.
:type pixel_offset_y: int
:param cells_x: Width of the raster window.
:type cells_x: int
:param cells_y: Height of the raster window.
:type cells_y: int
:param cell_size_x: Size in the x direction of a single cell.
:type cell_size_x: float
:param cell_size_y: Size in the y direciton of a single cell.
:type cell_size_y: float
:param raster_box: Box defining the extents of the raster.
:type raster_box: QgsRectangle
:param no_data: Value for nodata in the raster.
:type no_data: int, float
:returns: Sum, Count - sum of the values of all pixels and the count of
pixels that intersect with the geometry.
:rtype: (float, int)
"""
myCurrentY = (
raster_box.yMaximum() - pixel_offset_y * cell_size_y - cell_size_y / 2)
myHalfCellSizeX = cell_size_x / 2.0
myHalfCellsSizeY = cell_size_y / 2.0
myPixelArea = cell_size_x * cell_size_y
myCellsToReadX = cells_x
myCellsToReadY = 1 # read in a single row at a time
myBufferXSize = 1
myBufferYSize = 1
myCount = 0
mySum = 0.0
for row in range(0, cells_y):
myCurrentX = (
raster_box.xMinimum() + cell_size_x / 2.0 +
pixel_offset_x * cell_size_x)
# noinspection PyArgumentList
for col in range(0, cells_x):
# Read a single pixel
myScanline = band.ReadRaster(
pixel_offset_x + col,
pixel_offset_y + row,
myCellsToReadX,
myCellsToReadY,
myBufferXSize,
myBufferYSize,
gdal.GDT_Float32)
# Note that the returned scanline is of type string, and contains
# xsize*4 bytes of raw binary floating point data. This can be
# converted to Python values using the struct module from the
# standard library:
if myScanline != '':
myValues = struct.unpack('f', myScanline) # tuple returned
myValue = myValues[0]
else:
continue
if myValue == no_data:
continue
# noinspection PyCallByClass,PyTypeChecker
myPixelGeometry = QgsGeometry.fromRect(
QgsRectangle(
myCurrentX - myHalfCellSizeX,
myCurrentY - myHalfCellsSizeY,
myCurrentX + myHalfCellSizeX,
myCurrentY + myHalfCellsSizeY))
if myPixelGeometry:
myIntersectionGeometry = myPixelGeometry.intersection(
geometry)
if myIntersectionGeometry:
myIntersectionArea = myIntersectionGeometry.area()
if myIntersectionArea >= 0.0:
myWeight = myIntersectionArea / myPixelArea
myCount += myWeight
mySum += myValue * myWeight
myCurrentX += cell_size_y
myCurrentY -= cells_y
return mySum, myCount
def map_to_pixel(x_coordinate, y_coordinate, geo_transform):
"""Convert map coordinates to pixel coordinates.
:param x_coordinate: Input map X coordinate.
:type x_coordinate: float
:param y_coordinate: Input map Y coordinate.
:type y_coordinate: float
:param geo_transform: Geo-referencing transform from raster metadata.
:type geo_transform: list (six floats)
:returns pX, pY - Output pixel coordinates
:rtype: (int, int)
"""
if geo_transform[2] + geo_transform[4] == 0:
pX = (x_coordinate - geo_transform[0]) / geo_transform[1]
pY = (y_coordinate - geo_transform[3]) / geo_transform[5]
else:
pX, pY = transform(
x_coordinate, y_coordinate, inverse_transform(geo_transform))
return int(pX + 0.5), int(pY + 0.5)
def pixel_to_map(pixel_x, pixel_y, geo_transform):
"""Convert pixel coordinates to map coordinates.
:param pixel_x: Input pixel X coordinate
:type pixel_x: float
:param pixel_y: Input pixel Y coordinate
:type pixel_y: float
:param geo_transform: Geo-referencing transform from raster metadata.
:type geo_transform: list (six floats)
:returns mX, mY - Output map coordinates
:rtype: (float, float)
"""
mX, mY = transform(pixel_x, pixel_y, geo_transform)
return mX, mY
def transform(x, y, geo_transform):
"""Apply a geo transform to coordinates.
:param x: Input X coordinate.
:type x: float
:param y: Input Y coordinate
:type y: float
:param geo_transform: Geo-referencing transform from raster metadata.
:type geo_transform: list (six floats)
:returns outX, outY - Transformed X and Y coordinates
:rtype: (float, float)
"""
outX = geo_transform[0] + x * geo_transform[1] + y * geo_transform[2]
outY = geo_transform[3] + x * geo_transform[4] + y * geo_transform[5]
return outX, outY
def inverse_transform(geo_transform):
"""Invert standard 3x2 set of geo-transform coefficients.
:param geo_transform: Geo-referencing transform from raster metadata (
which is unaltered).
:type geo_transform: list (six floats)
:param geo_transform: Invert geo-referencing transform (updated) on
success, empty list on failure.
:type geo_transform: list (six floats or empty)
"""
# we assume a 3rd row that is [1 0 0]
# compute determinate
det = (geo_transform[1] * geo_transform[5] -
geo_transform[2] * geo_transform[4])
if abs(det) < 0.000000000000001:
return []
invDet = 1.0 / det
# compute adjoint and divide by determinate
outGeoTransform = [0, 0, 0, 0, 0, 0]
outGeoTransform[1] = geo_transform[5] * invDet
outGeoTransform[4] = -geo_transform[4] * invDet
outGeoTransform[2] = -geo_transform[2] * invDet
outGeoTransform[5] = geo_transform[1] * invDet
outGeoTransform[0] = (geo_transform[2] * geo_transform[3] -
geo_transform[0] * geo_transform[5]) * invDet
outGeoTransform[3] = (-geo_transform[1] * geo_transform[3] +
geo_transform[0] * geo_transform[4]) * invDet
return outGeoTransform
def numpy_stats(band, geometry, geo_transform, no_data, crs):
"""
:param band: A valid band from a raster layer.
:type band: GDALRasterBand
:param geometry: A polygon geometry used to calculate statistics.
:type geometry: QgsGeometry
:param geo_transform: Geo-referencing transform from raster metadata.
:type geo_transform: list (six floats)
:param no_data: Value for nodata in the raster.
:type no_data: int, float
:param crs: Coordinate reference system of the vector layer.
:type crs: OGRSpatialReference
:returns: Sum, Count - sum of the values of all pixels and the count of
pixels that intersect with the geometry.
:rtype: (float, int)
"""
mem_drv = ogr.GetDriverByName('Memory')
driver = gdal.GetDriverByName('MEM')
geom = ogr.CreateGeometryFromWkt(str(geometry.exportToWkt()))
bbox = geometry.boundingBox()
x_min = bbox.xMinimum()
x_max = bbox.xMaximum()
y_min = bbox.yMinimum()
y_max = bbox.yMaximum()
start_column, start_row = map_to_pixel(x_min, y_max, geo_transform)
end_column, end_row = map_to_pixel(x_max, y_min, geo_transform)
width = end_column - start_column
height = end_row - start_row
if width == 0 or height == 0:
return 0, 0
src_offset = (start_column, start_row, width, height)
src_array = band.ReadAsArray(*src_offset)
new_geo_transform = (
(geo_transform[0] + (src_offset[0] * geo_transform[1])),
geo_transform[1],
0.0,
(geo_transform[3] + (src_offset[1] * geo_transform[5])),
0.0,
geo_transform[5]
)
# Create a temporary vector layer in memory
mem_ds = mem_drv.CreateDataSource('out')
mem_layer = mem_ds.CreateLayer('poly', crs, ogr.wkbPolygon)
feat = ogr.Feature(mem_layer.GetLayerDefn())
feat.SetGeometry(geom)
mem_layer.CreateFeature(feat)
feat.Destroy()
# Rasterize it
rasterized_ds = driver.Create('', src_offset[2], src_offset[3], 1,
gdal.GDT_Byte)
rasterized_ds.SetGeoTransform(new_geo_transform)
gdal.RasterizeLayer(rasterized_ds, [1], mem_layer, burn_values=[1])
rv_array = rasterized_ds.ReadAsArray()
# Mask the source data array with our current feature
# we take the logical_not to flip 0<->1 to get the correct mask effect
# we also mask out nodata values explicitly
src_array = numpy.nan_to_num(src_array)
masked = numpy.ma.MaskedArray(
src_array,
mask=numpy.logical_or(
src_array == no_data,
numpy.logical_not(rv_array)
)
)
my_sum = float(masked.sum())
my_count = int(masked.count())
return my_sum, my_count
|
danylaksono/inasafe
|
safe_qgis/impact_statistics/zonal_stats.py
|
Python
|
gpl-3.0
| 21,354
|
#!/usr/bin/env python
__all__ = ['color', 'grayscale', 'sstv', 'tests', 'examples']
|
dnet/pySSTV
|
pysstv/__init__.py
|
Python
|
mit
| 85
|
#!/usr/bin/env python3
"""
Plot form factor.
"""
import bornagain as ba
from bornagain import nm, deg
import bornplot as bp
det = ba.SphericalDetector(200, 5*deg, 2.5*deg, 2.5*deg)
n = 4
results = []
for i in range(n):
theta = 30*i/(n - 1)
title = r'$\vartheta=%d^\circ$' % theta
ff = ba.FormFactorCone(4*nm, 11*nm, 75*deg)
trafo = ba.RotationY(theta*deg)
data = bp.run_simulation(det, ff, trafo)
results.append(bp.Result(i, data, title))
bp.make_plot(results, det, "ff_Cone")
|
gpospelov/BornAgain
|
Doc/FFCatalog/fig/ff2/sim_Cone.py
|
Python
|
gpl-3.0
| 503
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import datetime as dt
import json
from django.test import TestCase, Client
from bs4 import BeautifulSoup as bs
from common.templatetags import common_tags as tags
from bulletins.models import BulletinEmail
class FilterTests(TestCase):
"""Tests for common/common_tags."""
fixtures = ['common/test_fixtures/new_data.json']
def test_zulu_time_filter(self):
"""Test conversion to zulu time."""
date_obj = dt.date(2017, 1, 1)
zulu_string = tags.zulu_time(date_obj)
assert zulu_string == "20170101T050000Z", zulu_string
date_obj = dt.datetime(2017, 1, 1, 0, 0)
zulu_string = tags.zulu_time(date_obj)
assert zulu_string == "20170101T050000Z", zulu_string
def test_generate_page_title(self):
"""Test deriving a <title> tag from a Page."""
with open('common/test_data/bulletins.bulletinemail.json') as f:
page = BulletinEmail.from_json(f.read(), check_fks=False)
title = tags.generate_page_title(page)
assert title == 'Test Site - Show up to Resist Trump, Resist Corporate Greed, and Support Workers!', title
# Check to make sure that we don't throw an error for page not found
assert tags.generate_page_title(None) == ''
def test_generate_page_description(self):
"""Test deriving a <meta description> tag from a page."""
with open('common/test_data/bulletins.bulletinemail.json') as f:
page = BulletinEmail.from_json(f.read(), check_fks=False)
desc = tags.generate_page_description(page)
assert desc == 'SEIU 32BJ Philadelphia Airport workers need your support on July 13. PHL Airport workers-who are mostly low-wage immigrants and people ...', desc
def test_organziation_jsonld(self):
"""Test rendering of organization JSON+LD."""
client = Client()
req = client.get('/')
soup = bs(req.content, 'html.parser')
org_json = json.loads(soup.find('script', {'id': 'orgjson'}).get_text())
assert org_json['name'] == 'Test Site'
assert org_json['url'] == 'http://localhost'
assert org_json['sameAs'] == ['https://www.facebook.com/example', 'https://www.twitter.com/example', 'https://instagram.com/example', 'https://youtube.com/example']
assert org_json['address']['addressStreet'] == '123 Main Street', org_json['address']
|
PhillyDSA/phillydsa-com
|
common/tests.py
|
Python
|
agpl-3.0
| 2,443
|
# This file is part of h5py, a Python interface to the HDF5 library.
#
# http://www.h5py.org
#
# Copyright 2008-2013 Andrew Collette and contributors
#
# License: Standard 3-clause BSD; see "license.txt" for full license terms
# and contributor agreement.
import unittest as ut
from h5py import h5p, h5f, version
from .common import TestCase
class TestLibver(TestCase):
"""
Feature: Setting/getting lib ver bounds
"""
def test_libver(self):
""" Test libver bounds set/get """
plist = h5p.create(h5p.FILE_ACCESS)
plist.set_libver_bounds(h5f.LIBVER_EARLIEST, h5f.LIBVER_LATEST)
self.assertEqual((h5f.LIBVER_EARLIEST, h5f.LIBVER_LATEST),
plist.get_libver_bounds())
@ut.skipIf(version.hdf5_version_tuple < (1, 10, 2),
'Requires HDF5 1.10.2 or later')
def test_libver_v18(self):
""" Test libver bounds set/get for H5F_LIBVER_V18"""
plist = h5p.create(h5p.FILE_ACCESS)
plist.set_libver_bounds(h5f.LIBVER_EARLIEST, h5f.LIBVER_V18)
self.assertEqual((h5f.LIBVER_EARLIEST, h5f.LIBVER_V18),
plist.get_libver_bounds())
@ut.skipIf(version.hdf5_version_tuple < (1, 10, 2),
'Requires HDF5 1.10.2 or later')
def test_libver_v110(self):
""" Test libver bounds set/get for H5F_LIBVER_V110"""
plist = h5p.create(h5p.FILE_ACCESS)
plist.set_libver_bounds(h5f.LIBVER_V18, h5f.LIBVER_V110)
self.assertEqual((h5f.LIBVER_V18, h5f.LIBVER_V110),
plist.get_libver_bounds())
@ut.skipIf(version.hdf5_version_tuple < (1, 11, 4),
'Requires HDF5 1.11.4 or later')
def test_libver_v112(self):
""" Test libver bounds set/get for H5F_LIBVER_V112"""
plist = h5p.create(h5p.FILE_ACCESS)
plist.set_libver_bounds(h5f.LIBVER_V18, h5f.LIBVER_V112)
self.assertEqual((h5f.LIBVER_V18, h5f.LIBVER_V112),
plist.get_libver_bounds())
class TestDA(TestCase):
'''
Feature: setting/getting chunk cache size on a dataset access property list
'''
def test_chunk_cache(self):
'''test get/set chunk cache '''
dalist = h5p.create(h5p.DATASET_ACCESS)
nslots = 10000 # 40kb hash table
nbytes = 1000000 # 1MB cache size
w0 = .5 # even blend of eviction strategy
dalist.set_chunk_cache(nslots, nbytes, w0)
self.assertEqual((nslots, nbytes, w0),
dalist.get_chunk_cache())
class TestFA(TestCase):
'''
Feature: setting/getting mdc config on a file access property list
'''
def test_mdc_config(self):
'''test get/set mdc config '''
falist = h5p.create(h5p.FILE_ACCESS)
config = falist.get_mdc_config()
falist.set_mdc_config(config)
def test_set_alignment(self):
'''test get/set chunk cache '''
falist = h5p.create(h5p.FILE_ACCESS)
threshold = 10 * 1024 # threshold of 10kiB
alignment = 1024 * 1024 # threshold of 1kiB
falist.set_alignment(threshold, alignment)
self.assertEqual((threshold, alignment),
falist.get_alignment())
@ut.skipUnless(
version.hdf5_version_tuple >= (1, 12, 1) or
(version.hdf5_version_tuple[:2] == (1, 10) and version.hdf5_version_tuple[2] >= 7),
'Requires HDF5 1.12.1 or later or 1.10.x >= 1.10.7')
def test_set_file_locking(self):
'''test get/set file locking'''
falist = h5p.create(h5p.FILE_ACCESS)
use_file_locking = False
ignore_when_disabled = False
falist.set_file_locking(use_file_locking, ignore_when_disabled)
self.assertEqual((use_file_locking, ignore_when_disabled),
falist.get_file_locking())
class TestPL(TestCase):
def test_obj_track_times(self):
"""
tests if the object track times set/get
"""
# test for groups
gcid = h5p.create(h5p.GROUP_CREATE)
gcid.set_obj_track_times(False)
self.assertEqual(False, gcid.get_obj_track_times())
gcid.set_obj_track_times(True)
self.assertEqual(True, gcid.get_obj_track_times())
# test for datasets
dcid = h5p.create(h5p.DATASET_CREATE)
dcid.set_obj_track_times(False)
self.assertEqual(False, dcid.get_obj_track_times())
dcid.set_obj_track_times(True)
self.assertEqual(True, dcid.get_obj_track_times())
# test for generic objects
ocid = h5p.create(h5p.OBJECT_CREATE)
ocid.set_obj_track_times(False)
self.assertEqual(False, ocid.get_obj_track_times())
ocid.set_obj_track_times(True)
self.assertEqual(True, ocid.get_obj_track_times())
def test_link_creation_tracking(self):
"""
tests the link creation order set/get
"""
gcid = h5p.create(h5p.GROUP_CREATE)
gcid.set_link_creation_order(0)
self.assertEqual(0, gcid.get_link_creation_order())
flags = h5p.CRT_ORDER_TRACKED | h5p.CRT_ORDER_INDEXED
gcid.set_link_creation_order(flags)
self.assertEqual(flags, gcid.get_link_creation_order())
# test for file creation
fcpl = h5p.create(h5p.FILE_CREATE)
fcpl.set_link_creation_order(flags)
self.assertEqual(flags, fcpl.get_link_creation_order())
def test_attr_phase_change(self):
"""
test the attribute phase change
"""
cid = h5p.create(h5p.OBJECT_CREATE)
# test default value
ret = cid.get_attr_phase_change()
self.assertEqual((8,6), ret)
# max_compact must < 65536 (64kb)
with self.assertRaises(ValueError):
cid.set_attr_phase_change(65536, 6)
# Using dense attributes storage to avoid 64kb size limitation
# for a single attribute in compact attribute storage.
cid.set_attr_phase_change(0, 0)
self.assertEqual((0,0), cid.get_attr_phase_change())
|
h5py/h5py
|
h5py/tests/test_h5p.py
|
Python
|
bsd-3-clause
| 6,042
|
# Copyright 2005 Duke University
# Copyright (C) 2012-2016 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Written by Seth Vidal
"""
Command line interface yum class and related.
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from . import output
from dnf.cli import CliError
from dnf.i18n import ucd, _
import collections
import dnf
import dnf.cli.commands
import dnf.cli.commands.autoremove
import dnf.cli.commands.check
import dnf.cli.commands.clean
import dnf.cli.commands.distrosync
import dnf.cli.commands.downgrade
import dnf.cli.commands.remove
import dnf.cli.commands.group
import dnf.cli.commands.install
import dnf.cli.commands.makecache
import dnf.cli.commands.mark
import dnf.cli.commands.reinstall
import dnf.cli.commands.repolist
import dnf.cli.commands.repoquery
import dnf.cli.commands.search
import dnf.cli.commands.shell
import dnf.cli.commands.swap
import dnf.cli.commands.updateinfo
import dnf.cli.commands.upgrade
import dnf.cli.commands.upgrademinimal
import dnf.cli.demand
import dnf.cli.option_parser
import dnf.conf
import dnf.conf.parser
import dnf.conf.substitutions
import dnf.const
import dnf.exceptions
import dnf.cli.format
import dnf.logging
import dnf.plugin
import dnf.persistor
import dnf.rpm
import dnf.sack
import dnf.util
import dnf.yum.misc
import hawkey
import logging
import operator
import os
import re
import sys
import time
logger = logging.getLogger('dnf')
def _add_pkg_simple_list_lens(data, pkg, indent=''):
""" Get the length of each pkg's column. Add that to data.
This "knows" about simpleList and printVer. """
na = len(pkg.name) + 1 + len(pkg.arch) + len(indent)
ver = len(pkg.evr)
rid = len(pkg._from_repo)
for (d, v) in (('na', na), ('ver', ver), ('rid', rid)):
data[d].setdefault(v, 0)
data[d][v] += 1
def _list_cmd_calc_columns(output, ypl):
""" Work out the dynamic size of the columns to pass to fmtColumns. """
data = {'na' : {}, 'ver' : {}, 'rid' : {}}
for lst in (ypl.installed, ypl.available, ypl.extras, ypl.autoremove,
ypl.updates, ypl.recent):
for pkg in lst:
_add_pkg_simple_list_lens(data, pkg)
if len(ypl.obsoletes) > 0:
for (npkg, opkg) in ypl.obsoletesTuples:
_add_pkg_simple_list_lens(data, npkg)
_add_pkg_simple_list_lens(data, opkg, indent=" " * 4)
data = [data['na'], data['ver'], data['rid']]
columns = output.calcColumns(data, remainder_column=1)
return (-columns[0], -columns[1], -columns[2])
def print_versions(pkgs, base, output):
def sm_ui_time(x):
return time.strftime("%Y-%m-%d %H:%M", time.gmtime(x))
def sm_ui_date(x): # For changelogs, there is no time
return time.strftime("%Y-%m-%d", time.gmtime(x))
rpmdb_sack = dnf.sack._rpmdb_sack(base)
done = False
for pkg in rpmdb_sack.query().installed().filter(name=pkgs):
if done:
print("")
done = True
if pkg.epoch == '0':
ver = '%s-%s.%s' % (pkg.version, pkg.release, pkg.arch)
else:
ver = '%s:%s-%s.%s' % (pkg.epoch,
pkg.version, pkg.release, pkg.arch)
name = output.term.bold(pkg.name)
print(_(" Installed: %s-%s at %s") %(name, ver,
sm_ui_time(pkg.installtime)))
print(_(" Built : %s at %s") % (pkg.packager if pkg.packager else "",
sm_ui_time(pkg.buildtime)))
# :hawkey, no changelist information yet
# print(_(" Committed: %s at %s") % (pkg.committer,
# sm_ui_date(pkg.committime)))
class BaseCli(dnf.Base):
"""This is the base class for yum cli."""
def __init__(self, conf=None):
conf = conf or dnf.conf.Conf()
super(BaseCli, self).__init__(conf=conf)
self.output = output.Output(self, self.conf)
def _groups_diff(self):
if not self._group_persistor:
return None
return self._group_persistor.diff()
def do_transaction(self, display=()):
"""Take care of package downloading, checking, user
confirmation and actually running the transaction.
:param display: `rpm.callback.TransactionProgress` object(s)
:return: a numeric return code, and optionally a list of
errors. A negative return code indicates that errors
occurred in the pre-transaction checks
"""
grp_diff = self._groups_diff()
grp_str = self.output.list_group_transaction(self.comps, self._group_persistor, grp_diff)
if grp_str:
logger.info(grp_str)
trans = self.transaction
pkg_str = self.output.list_transaction(trans)
if pkg_str:
logger.info(pkg_str)
if trans:
# Check which packages have to be downloaded
install_pkgs = []
rmpkgs = []
install_only = True
for tsi in trans:
installed = tsi.installed
if installed is not None:
install_pkgs.append(installed)
erased = tsi.erased
if erased is not None:
install_only = False
rmpkgs.append(erased)
# Close the connection to the rpmdb so that rpm doesn't hold the
# SIGINT handler during the downloads.
del self._ts
# report the total download size to the user
if not install_pkgs:
self.output.reportRemoveSize(rmpkgs)
else:
self.output.reportDownloadSize(install_pkgs, install_only)
if trans or (grp_diff and not grp_diff.empty()):
# confirm with user
if self._promptWanted():
if self.conf.assumeno or not self.output.userconfirm():
raise CliError(_("Operation aborted."))
else:
logger.info(_('Nothing to do.'))
return
if trans:
remote_pkgs = [pkg for pkg in install_pkgs if not pkg._is_local_pkg()]
if remote_pkgs:
logger.info(_('Downloading Packages:'))
try:
total_cb = self.output.download_callback_total_cb
self.download_packages(remote_pkgs, self.output.progress,
total_cb)
except dnf.exceptions.DownloadError as e:
specific = dnf.cli.format.indent_block(ucd(e))
errstr = _('Error downloading packages:') + '\n%s' % specific
# setting the new line to prevent next chars being eaten up
# by carriage returns
print()
raise dnf.exceptions.Error(errstr)
# Check GPG signatures
self.gpgsigcheck(install_pkgs)
if self.conf.downloadonly:
return
if not isinstance(display, collections.Sequence):
display = [display]
display = [output.CliTransactionDisplay()] + list(display)
super(BaseCli, self).do_transaction(display)
if trans:
msg = self.output.post_transaction_output(trans)
logger.info(msg)
for tsi in trans:
if tsi.op_type == dnf.transaction.FAIL:
raise dnf.exceptions.Error(_('Transaction failed'))
def gpgsigcheck(self, pkgs):
"""Perform GPG signature verification on the given packages,
installing keys if possible.
:param pkgs: a list of package objects to verify the GPG
signatures of
:return: non-zero if execution should stop due to an error
:raises: Will raise :class:`Error` if there's a problem
"""
for po in pkgs:
result, errmsg = self._sig_check_pkg(po)
if result == 0:
# Verified ok, or verify not req'd
continue
elif result == 1:
ay = self.conf.assumeyes and not self.conf.assumeno
if (not sys.stdin or not sys.stdin.isatty()) and not ay:
raise dnf.exceptions.Error(_('Refusing to automatically import keys when running ' \
'unattended.\nUse "-y" to override.'))
# the callback here expects to be able to take options which
# userconfirm really doesn't... so fake it
fn = lambda x, y, z: self.output.userconfirm()
self._get_key_for_package(po, fn)
else:
# Fatal error
raise dnf.exceptions.Error(errmsg)
return 0
def check_updates(self, patterns=(), reponame=None, print_=True):
"""Check updates matching given *patterns* in selected repository."""
ypl = self.returnPkgLists('upgrades', patterns, reponame=reponame)
if self.conf.obsoletes or self.conf.verbose:
typl = self.returnPkgLists('obsoletes', patterns, reponame=reponame)
ypl.obsoletes = typl.obsoletes
ypl.obsoletesTuples = typl.obsoletesTuples
if print_:
columns = _list_cmd_calc_columns(self.output, ypl)
if len(ypl.updates) > 0:
local_pkgs = {}
highlight = self.output.term.MODE['bold']
if highlight:
# Do the local/remote split we get in "yum updates"
for po in sorted(ypl.updates):
local = po.localPkg()
if os.path.exists(local) and po.verifyLocalPkg():
local_pkgs[(po.name, po.arch)] = po
cul = self.conf.color_update_local
cur = self.conf.color_update_remote
self.output.listPkgs(ypl.updates, '', outputType='list',
highlight_na=local_pkgs, columns=columns,
highlight_modes={'=' : cul, 'not in' : cur})
if len(ypl.obsoletes) > 0:
print(_('Obsoleting Packages'))
# The tuple is (newPkg, oldPkg) ... so sort by new
for obtup in sorted(ypl.obsoletesTuples,
key=operator.itemgetter(0)):
self.output.updatesObsoletesList(obtup, 'obsoletes',
columns=columns)
return ypl.updates or ypl.obsoletes
def distro_sync_userlist(self, userlist):
""" Upgrade or downgrade packages to match the latest versions available
in the enabled repositories.
:return: (exit_code, [ errors ])
exit_code is::
0 = we're done, exit
1 = we've errored, exit with error string
2 = we've got work yet to do, onto the next stage
"""
oldcount = self._goal.req_length()
if len(userlist) == 0:
self.distro_sync()
else:
for pkg_spec in userlist:
self.distro_sync(pkg_spec)
cnt = self._goal.req_length() - oldcount
if cnt <= 0 and not self._goal.req_has_distupgrade_all():
msg = _('No packages marked for distribution synchronization.')
raise dnf.exceptions.Error(msg)
def downgradePkgs(self, specs=[], file_pkgs=[], strict=False):
"""Attempt to take the user specified list of packages or
wildcards and downgrade them. If a complete version number is
specified, attempt to downgrade them to the specified version
:param specs: a list of names or wildcards specifying packages to downgrade
:param file_pkgs: a list of pkg objects from local files
"""
oldcount = self._goal.req_length()
for pkg in file_pkgs:
try:
self.package_downgrade(pkg, strict=strict)
continue # it was something on disk and it ended in rpm
# no matter what we don't go looking at repos
except dnf.exceptions.MarkingError as e:
logger.info(_('No match for argument: %s'),
self.output.term.bold(pkg.location))
# it was something on disk and it ended in rpm
# no matter what we don't go looking at repos
for arg in specs:
try:
self.downgrade_to(arg, strict=strict)
except dnf.exceptions.PackageNotFoundError as err:
msg = _('No package %s available.')
logger.info(msg, self.output.term.bold(arg))
except dnf.exceptions.PackagesNotInstalledError as err:
logger.info(_('Packages for argument %s available, but not installed.'),
self.output.term.bold(err.pkg_spec))
except dnf.exceptions.MarkingError:
assert False
cnt = self._goal.req_length() - oldcount
if cnt <= 0:
raise dnf.exceptions.Error(_('Nothing to do.'))
def output_packages(self, basecmd, pkgnarrow='all', patterns=(), reponame=None):
"""Output selection *pkgnarrow* of packages matching *patterns* and *repoid*."""
try:
highlight = self.output.term.MODE['bold']
ypl = self.returnPkgLists(
pkgnarrow, patterns, installed_available=highlight, reponame=reponame)
except dnf.exceptions.Error as e:
return 1, [str(e)]
else:
update_pkgs = {}
inst_pkgs = {}
local_pkgs = {}
columns = None
if basecmd == 'list':
# Dynamically size the columns
columns = _list_cmd_calc_columns(self.output, ypl)
if highlight and ypl.installed:
# If we have installed and available lists, then do the
# highlighting for the installed packages so you can see what's
# available to update, an extra, or newer than what we have.
for pkg in (ypl.hidden_available +
ypl.reinstall_available +
ypl.old_available):
key = (pkg.name, pkg.arch)
if key not in update_pkgs or pkg > update_pkgs[key]:
update_pkgs[key] = pkg
if highlight and ypl.available:
# If we have installed and available lists, then do the
# highlighting for the available packages so you can see what's
# available to install vs. update vs. old.
for pkg in ypl.hidden_installed:
key = (pkg.name, pkg.arch)
if key not in inst_pkgs or pkg > inst_pkgs[key]:
inst_pkgs[key] = pkg
if highlight and ypl.updates:
# Do the local/remote split we get in "yum updates"
for po in sorted(ypl.updates):
if po.reponame != hawkey.SYSTEM_REPO_NAME:
local_pkgs[(po.name, po.arch)] = po
# Output the packages:
clio = self.conf.color_list_installed_older
clin = self.conf.color_list_installed_newer
clir = self.conf.color_list_installed_reinstall
clie = self.conf.color_list_installed_extra
rip = self.output.listPkgs(ypl.installed, _('Installed Packages'), basecmd,
highlight_na=update_pkgs, columns=columns,
highlight_modes={'>' : clio, '<' : clin,
'=' : clir, 'not in' : clie})
clau = self.conf.color_list_available_upgrade
clad = self.conf.color_list_available_downgrade
clar = self.conf.color_list_available_reinstall
clai = self.conf.color_list_available_install
rap = self.output.listPkgs(ypl.available, _('Available Packages'), basecmd,
highlight_na=inst_pkgs, columns=columns,
highlight_modes={'<' : clau, '>' : clad,
'=' : clar, 'not in' : clai})
raep = self.output.listPkgs(ypl.autoremove, _('Autoremove Packages'),
basecmd, columns=columns)
rep = self.output.listPkgs(ypl.extras, _('Extra Packages'), basecmd,
columns=columns)
cul = self.conf.color_update_local
cur = self.conf.color_update_remote
rup = self.output.listPkgs(ypl.updates, _('Upgraded Packages'), basecmd,
highlight_na=local_pkgs, columns=columns,
highlight_modes={'=' : cul, 'not in' : cur})
# XXX put this into the ListCommand at some point
if len(ypl.obsoletes) > 0 and basecmd == 'list':
# if we've looked up obsolete lists and it's a list request
rop = [0, '']
print(_('Obsoleting Packages'))
for obtup in sorted(ypl.obsoletesTuples,
key=operator.itemgetter(0)):
self.output.updatesObsoletesList(obtup, 'obsoletes',
columns=columns)
else:
rop = self.output.listPkgs(ypl.obsoletes, _('Obsoleting Packages'),
basecmd, columns=columns)
rrap = self.output.listPkgs(ypl.recent, _('Recently Added Packages'),
basecmd, columns=columns)
if len(patterns) and \
rrap[0] and rop[0] and rup[0] and rep[0] and rap[0] and \
raep[0] and rip[0]:
raise dnf.exceptions.Error(_('No matching Packages to list'))
def returnPkgLists(self, pkgnarrow='all', patterns=None,
installed_available=False, reponame=None):
"""Return a :class:`dnf.yum.misc.GenericHolder` object containing
lists of package objects that match the given names or wildcards.
:param pkgnarrow: a string specifying which types of packages
lists to produce, such as updates, installed, available, etc.
:param patterns: a list of names or wildcards specifying
packages to list
:param installed_available: whether the available package list
is present as .hidden_available when doing all, available,
or installed
:param reponame: limit packages list to the given repository
:return: a :class:`dnf.yum.misc.GenericHolder` instance with the
following lists defined::
available = list of packageObjects
installed = list of packageObjects
upgrades = tuples of packageObjects (updating, installed)
extras = list of packageObjects
obsoletes = tuples of packageObjects (obsoleting, installed)
recent = list of packageObjects
"""
done_hidden_available = False
done_hidden_installed = False
if installed_available and pkgnarrow == 'installed':
done_hidden_available = True
pkgnarrow = 'all'
elif installed_available and pkgnarrow == 'available':
done_hidden_installed = True
pkgnarrow = 'all'
ypl = self._do_package_lists(
pkgnarrow, patterns, ignore_case=True, reponame=reponame)
if self.conf.showdupesfromrepos:
ypl.available += ypl.reinstall_available
if installed_available:
ypl.hidden_available = ypl.available
ypl.hidden_installed = ypl.installed
if done_hidden_available:
ypl.available = []
if done_hidden_installed:
ypl.installed = []
return ypl
def provides(self, args):
"""Print out a list of packages that provide the given file or
feature. This a cli wrapper to the provides methods in the
rpmdb and pkgsack.
:param args: the name of a file or feature to search for
:return: (exit_code, [ errors ])
exit_code is::
0 = we're done, exit
1 = we've errored, exit with error string
2 = we've got work yet to do, onto the next stage
"""
# always in showdups mode
old_sdup = self.conf.showdupesfromrepos
self.conf.showdupesfromrepos = True
matches = []
for spec in args:
matches.extend(super(BaseCli, self). provides(spec))
for pkg in matches:
self.output.matchcallback_verbose(pkg, [], args)
self.conf.showdupesfromrepos = old_sdup
if not matches:
raise dnf.exceptions.Error(_('No Matches found'))
def _promptWanted(self):
# shortcut for the always-off/always-on options
if self.conf.assumeyes and not self.conf.assumeno:
return False
if self.conf.alwaysprompt:
return True
# prompt if:
# package was added to fill a dependency
# package is being removed
# package wasn't explicitly given on the command line
for txmbr in self.tsInfo.getMembers():
if txmbr.isDep or \
txmbr.name not in self.extcmds:
return True
# otherwise, don't prompt
return False
def _history_get_transactions(self, extcmds):
if not extcmds:
logger.critical(_('No transaction ID given'))
return None
old = self.history.old(extcmds)
if not old:
logger.critical(_('Not found given transaction ID'))
return None
return old
def history_get_transaction(self, extcmds):
old = self._history_get_transactions(extcmds)
if old is None:
return None
if len(old) > 1:
logger.critical(_('Found more than one transaction ID!'))
return old[0]
def history_rollback_transaction(self, extcmd):
"""Rollback given transaction."""
old = self.history_get_transaction((extcmd,))
if old is None:
return 1, ['Failed history rollback, no transaction']
last = self.history.last()
if last is None:
return 1, ['Failed history rollback, no last?']
if old.tid == last.tid:
return 0, ['Rollback to current, nothing to do']
mobj = None
for tid in self.history.old(list(range(old.tid + 1, last.tid + 1))):
if tid.altered_lt_rpmdb:
logger.warning(_('Transaction history is incomplete, before %u.'), tid.tid)
elif tid.altered_gt_rpmdb:
logger.warning(_('Transaction history is incomplete, after %u.'), tid.tid)
if mobj is None:
mobj = dnf.yum.history.YumMergedHistoryTransaction(tid)
else:
mobj.merge(tid)
tm = dnf.util.normalize_time(old.beg_timestamp)
print("Rollback to transaction %u, from %s" % (old.tid, tm))
print(self.output.fmtKeyValFill(" Undoing the following transactions: ",
", ".join((str(x) for x in mobj.tid))))
self.output.historyInfoCmdPkgsAltered(mobj) # :todo
history = dnf.history.open_history(self.history) # :todo
operations = dnf.history.NEVRAOperations()
for id_ in range(old.tid + 1, last.tid + 1):
operations += history.transaction_nevra_ops(id_)
try:
self._history_undo_operations(operations)
except dnf.exceptions.PackagesNotInstalledError as err:
logger.info(_('No package %s installed.'),
self.output.term.bold(ucd(err.pkg_spec)))
return 1, ['A transaction cannot be undone']
except dnf.exceptions.PackagesNotAvailableError as err:
logger.info(_('No package %s available.'),
self.output.term.bold(ucd(err.pkg_spec)))
return 1, ['A transaction cannot be undone']
except dnf.exceptions.MarkingError:
assert False
else:
return 2, ["Rollback to transaction %u" % (old.tid,)]
def history_undo_transaction(self, extcmd):
"""Undo given transaction."""
old = self.history_get_transaction((extcmd,))
if old is None:
return 1, ['Failed history undo']
tm = dnf.util.normalize_time(old.beg_timestamp)
print("Undoing transaction %u, from %s" % (old.tid, tm))
self.output.historyInfoCmdPkgsAltered(old) # :todo
history = dnf.history.open_history(self.history) # :todo
try:
self._history_undo_operations(history.transaction_nevra_ops(old.tid))
except dnf.exceptions.PackagesNotInstalledError as err:
logger.info(_('No package %s installed.'),
self.output.term.bold(ucd(err.pkg_spec)))
return 1, ['An operation cannot be undone']
except dnf.exceptions.PackagesNotAvailableError as err:
logger.info(_('No package %s available.'),
self.output.term.bold(ucd(err.pkg_spec)))
return 1, ['An operation cannot be undone']
except dnf.exceptions.MarkingError:
assert False
else:
return 2, ["Undoing transaction %u" % (old.tid,)]
class Cli(object):
def __init__(self, base):
self.base = base
self.cli_commands = {}
self.command = None
self.demands = dnf.cli.demand.DemandSheet() #:cli
self.register_command(dnf.cli.commands.autoremove.AutoremoveCommand)
self.register_command(dnf.cli.commands.check.CheckCommand)
self.register_command(dnf.cli.commands.clean.CleanCommand)
self.register_command(dnf.cli.commands.distrosync.DistroSyncCommand)
self.register_command(dnf.cli.commands.downgrade.DowngradeCommand)
self.register_command(dnf.cli.commands.group.GroupCommand)
self.register_command(dnf.cli.commands.install.InstallCommand)
self.register_command(dnf.cli.commands.makecache.MakeCacheCommand)
self.register_command(dnf.cli.commands.mark.MarkCommand)
self.register_command(dnf.cli.commands.reinstall.ReinstallCommand)
self.register_command(dnf.cli.commands.remove.RemoveCommand)
self.register_command(dnf.cli.commands.repolist.RepoListCommand)
self.register_command(dnf.cli.commands.repoquery.RepoQueryCommand)
self.register_command(dnf.cli.commands.search.SearchCommand)
self.register_command(dnf.cli.commands.shell.ShellCommand)
self.register_command(dnf.cli.commands.swap.SwapCommand)
self.register_command(dnf.cli.commands.updateinfo.UpdateInfoCommand)
self.register_command(dnf.cli.commands.upgrade.UpgradeCommand)
self.register_command(dnf.cli.commands.upgrademinimal.UpgradeMinimalCommand)
self.register_command(dnf.cli.commands.InfoCommand)
self.register_command(dnf.cli.commands.ListCommand)
self.register_command(dnf.cli.commands.ProvidesCommand)
self.register_command(dnf.cli.commands.CheckUpdateCommand)
self.register_command(dnf.cli.commands.RepoPkgsCommand)
self.register_command(dnf.cli.commands.HelpCommand)
self.register_command(dnf.cli.commands.HistoryCommand)
def _configure_repos(self, opts):
self.base.read_all_repos(opts)
if opts.repofrompath:
for label, path in opts.repofrompath.items():
if '://' not in path:
path = 'file://{}'.format(os.path.abspath(path))
repofp = dnf.repo.Repo(label, self.base.conf)
try:
repofp.baseurl = path
except ValueError as e:
raise dnf.exceptions.RepoError(e)
self.base.repos.add(repofp)
logger.info(_("Added %s repo from %s"), label, path)
# do not let this repo to be disabled
opts.repos_ed.append((label, "enable"))
if opts.repo:
opts.repos_ed.insert(0, ("*", "disable"))
opts.repos_ed.extend([(r, "enable") for r in opts.repo])
notmatch = set()
# Process repo enables and disables in order
try:
for (repo, operation) in opts.repos_ed:
repolist = self.base.repos.get_matching(repo)
if not repolist:
if self.base.conf.strict and operation == "enable":
msg = _("Unknown repo: '%s'")
raise dnf.exceptions.RepoError(msg % repo)
notmatch.add(repo)
if operation == "enable":
repolist.enable()
else:
repolist.disable()
except dnf.exceptions.ConfigError as e:
logger.critical(e)
self.optparser.print_help()
sys.exit(1)
for repo in notmatch:
logger.warning(_("No repository match: %s"), repo)
for rid in self.base._repo_persistor.get_expired_repos():
repo = self.base.repos.get(rid)
if repo:
repo._md_expire_cache()
# setup the progress bars/callbacks
(bar, self.base._ds_callback) = self.base.output.setup_progress_callbacks()
self.base.repos.all().set_progress_bar(bar)
key_import = output.CliKeyImport(self.base, self.base.output)
self.base.repos.all()._set_key_import(key_import)
def _log_essentials(self):
logger.debug('DNF version: %s', dnf.const.VERSION)
logger.log(dnf.logging.DDEBUG,
'Command: %s', self.cmdstring)
logger.log(dnf.logging.DDEBUG,
'Installroot: %s', self.base.conf.installroot)
logger.log(dnf.logging.DDEBUG, 'Releasever: %s',
self.base.conf.releasever)
logger.debug("cachedir: %s", self.base.conf.cachedir)
def _process_demands(self):
demands = self.demands
repos = self.base.repos
if demands.root_user:
if not os.getegid() == 0:
raise dnf.exceptions.Error(_('This command has to be run under the root user.'))
if not demands.cacheonly:
if demands.freshest_metadata:
for repo in repos.iter_enabled():
repo._md_expire_cache()
elif not demands.fresh_metadata:
for repo in repos.values():
repo._md_lazy = True
if demands.sack_activation:
self.base.fill_sack(load_system_repo='auto',
load_available_repos=self.demands.available_repos)
def _parse_commands(self, opts, args):
"""Check that the requested CLI command exists."""
basecmd = opts.command
command_cls = self.cli_commands.get(basecmd)
if command_cls is None:
logger.critical(_('No such command: %s. Please use %s --help'),
basecmd, sys.argv[0])
if self.base.conf.plugins:
logger.critical(_("It could be a DNF plugin command, "
"try: \"dnf install 'dnf-command(%s)'\""), basecmd)
else:
logger.critical(_("It could be a DNF plugin command, "
"but loading of plugins is currently disabled."))
raise CliError
self.command = command_cls(self)
logger.log(dnf.logging.DDEBUG, 'Base command: %s', basecmd)
logger.log(dnf.logging.DDEBUG, 'Extra commands: %s', args)
def configure(self, args, option_parser=None):
"""Parse command line arguments, and set up :attr:`self.base.conf` and
:attr:`self.cmds`, as well as logger objects in base instance.
:param args: a list of command line arguments
:param option_parser: a class for parsing cli options
"""
self.optparser = dnf.cli.option_parser.OptionParser() \
if option_parser is None else option_parser
opts = self.optparser.parse_main_args(args)
# Just print out the version if that's what the user wanted
if opts.version:
print(dnf.const.VERSION)
print_versions(self.base.conf.history_record_packages, self.base,
self.base.output)
sys.exit(0)
if opts.quiet:
opts.debuglevel = 0
opts.errorlevel = 0
if opts.verbose:
opts.debuglevel = opts.errorlevel = dnf.const.VERBOSE_LEVEL
# Read up configuration options and initialize plugins
try:
self.base.conf._configure_from_options(opts)
self._read_conf_file(opts.releasever)
self.base.conf._adjust_conf_options()
except (dnf.exceptions.ConfigError, ValueError) as e:
logger.critical(_('Config error: %s'), e)
sys.exit(1)
except IOError as e:
e = '%s: %s' % (ucd(e.args[1]), repr(e.filename))
logger.critical(_('Config error: %s'), e)
sys.exit(1)
# store the main commands & summaries, before plugins are loaded
self.optparser.add_commands(self.cli_commands, 'main')
# store the plugin commands & summaries
self.base.init_plugins(opts.disableplugin, self)
self.optparser.add_commands(self.cli_commands,'plugin')
# show help if no command specified
# this is done here, because we first have the full
# usage info after the plugins are loaded.
if not opts.command:
self.optparser.print_help()
sys.exit(0)
# save our original args out
self.base.args = args
# save out as a nice command string
self.cmdstring = dnf.const.PROGRAM_NAME + ' '
for arg in self.base.args:
self.cmdstring += '%s ' % arg
self._log_essentials()
try:
self._parse_commands(opts, args)
except CliError:
sys.exit(1)
# show help for dnf <command> --help / --help-cmd
if opts.help:
self.optparser.print_help(self.command)
sys.exit(0)
opts = self.optparser.parse_command_args(self.command, args)
if opts.allowerasing:
self.demands.allow_erasing = opts.allowerasing
if opts.freshest_metadata:
self.demands.freshest_metadata = opts.freshest_metadata
if opts.debugsolver:
self.base.conf.debug_solver = True
if opts.cacheonly:
self.demands.cacheonly = True
if opts.obsoletes:
self.base.conf.obsoletes = True
# with cachedir in place we can configure stuff depending on it:
self.base._activate_persistor()
self._configure_repos(opts)
self.base.configure_plugins()
self.base.conf._configure_from_options(opts)
self.command.configure()
if self.base.conf.color != 'auto':
self.base.output.term.reinit(color=self.base.conf.color)
def _read_conf_file(self, releasever=None):
timer = dnf.logging.Timer('config')
conf = self.base.conf
# search config file inside the installroot first
conf._search_inside_installroot('config_file_path')
# read config
conf.read(priority=dnf.conf.PRIO_MAINCONFIG)
# search reposdir file inside the installroot first
conf._search_inside_installroot('reposdir')
# cachedir, logs, releasever, and gpgkey are taken from or stored in installroot
if releasever is None:
releasever = dnf.rpm.detect_releasever(conf.installroot)
conf.releasever = releasever
subst = conf.substitutions
subst.update_from_etc(conf.installroot)
for opt in ('cachedir', 'logdir', 'persistdir'):
conf.prepend_installroot(opt)
self.base._logging._setup_from_dnf_conf(conf)
timer()
return conf
def _populate_update_security_filter(self, opts, minimal=None, all=None):
if (opts is None) and (all is None):
return
q = self.base.sack.query()
filters = []
if opts.bugfix or all:
filters.append(q.filter(advisory_type='bugfix'))
if opts.enhancement or all:
filters.append(q.filter(advisory_type='enhancement'))
if opts.newpackage or all:
filters.append(q.filter(advisory_type='newpackage'))
if opts.security or all:
filters.append(q.filter(advisory_type='security'))
if opts.advisory:
filters.append(q.filter(advisory=opts.advisory))
if opts.bugzilla:
filters.append(q.filter(advisory_bug=opts.bugzilla))
if opts.cves:
filters.append(q.filter(advisory_cve=opts.cves))
if opts.severity:
filters.append(q.filter(advisory_severity=opts.severity))
if len(filters):
key = 'upgrade' if minimal is None else 'minimal'
self.base._update_security_filters[key] = filters
def redirect_logger(self, stdout=None, stderr=None):
"""
Change minimal logger level for terminal output to stdout and stderr according to specific
command requirements
@param stdout: logging.INFO, logging.WARNING, ...
@param stderr:logging.INFO, logging.WARNING, ...
"""
if stdout is not None:
self.base._logging.stdout_handler.setLevel(stdout)
if stderr is not None:
self.base._logging.stderr_handler.setLevel(stderr)
def _option_conflict(self, option_string_1, option_string_2):
print(self.optparser.print_usage())
raise dnf.exceptions.Error(_("argument {}: not allowed with argument {}".format(
option_string_1, option_string_2)))
def register_command(self, command_cls):
"""Register a Command. :api"""
for name in command_cls.aliases:
if name in self.cli_commands:
raise dnf.exceptions.ConfigError(_('Command "%s" already defined') % name)
self.cli_commands[name] = command_cls
def run(self):
"""Call the base command, and pass it the extended commands or
arguments.
:return: (exit_code, [ errors ])
exit_code is::
0 = we're done, exit
1 = we've errored, exit with error string
2 = we've got work yet to do, onto the next stage
"""
self._process_demands()
return self.command.run()
|
jsilhan/dnf
|
dnf/cli/cli.py
|
Python
|
gpl-2.0
| 39,626
|
# -*- coding: utf-8 -*-
import json
from django.contrib.auth import authenticate
from django.test import TestCase
from django.test.client import RequestFactory
from protoExt.views.protoSaveProtoObj import protoSaveProtoObj
from protoLib.tests.dataSetup import createAuthExt, MySession
from protoExt.views.protoGetPci import protoGetPCI
from protoExt.models import CustomDefinition
class ProtoSaveProtoObjTest(TestCase):
def setUp(self):
createAuthExt()
userdata = {'login': 'A', 'password': '1' }
self.user = authenticate(username=userdata['login'], password=userdata['password'])
# Every test needs access to the request factory.
self.factory = RequestFactory()
self.request = self.factory.post('/protoSaveProtoObj')
self.request.session = MySession()
self.request.user = self.user
self.request.method = 'POST'
self.userdata = { 'viewCode' : 'protoLib.UserProfile' }
self.request.POST = self.userdata
def tearDown(self):
pass
def test_protosaveprotoobj_save_pci(self):
"""
Agrega un campo a la definicion de la pcl,
Guarda la definicion
Lee y verifica el campo
"""
from protoExt.tests.data_protolib_userprofile_pci import DATA_PCI_protoLib_UserProfile
oMeta = DATA_PCI_protoLib_UserProfile['protoMeta']
oMeta['gridConfig']['listDisplay'].append('userTeam')
sMeta = json.dumps( oMeta )
self.userdata = {
'viewCode' : 'protoLib.UserProfile',
'protoMeta' : sMeta
}
self.request.POST = self.userdata
reponse = protoSaveProtoObj( self.request )
returnMessage = json.loads( reponse.content.decode('utf-8'))
self.assertTrue(returnMessage['success'])
self.userdata = {
'viewCode' : 'protoLib.UserProfile',
'protoMeta' : sMeta
}
self.request.POST = self.userdata
reponse = protoGetPCI( self.request )
returnMessage = json.loads( reponse.content.decode('utf-8'))
self.assertTrue(returnMessage['success'])
oMeta = returnMessage['protoMeta']
self.assertEqual( len(oMeta['gridConfig']['listDisplay'] ), 2, 'listdisplau !=2')
def test_protosaveprotoobj_custom_test(self):
"""
Elimina datos de CustomDefinition
Guarda la definicion
Lee CustomDefinition y verifica el campo
"""
self.userdata = {
'viewCode' : '_custom_test',
'protoMeta' : '[0]'
}
self.request.POST = self.userdata
CustomDefinition.objects.filter(code = self.userdata['viewCode'] ).delete()
reponse = protoSaveProtoObj( self.request )
returnMessage = json.loads( reponse.content.decode('utf-8'))
self.assertTrue(returnMessage['success'])
cData = CustomDefinition.objects.get(
code = self.userdata['viewCode'],
smOwningUser = self.request.user
)
self.assertEqual( cData.metaDefinition[0], 0)
|
DarioGT/docker-carra
|
src/protoExt/tests/test_ProtoSaveProtoObj.py
|
Python
|
mit
| 3,156
|
"""
Django settings for scheduler project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # ../../scheduler
PROJECT_PATH = os.path.abspath(os.path.dirname(__file__)) #../scheduler
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'i0e=^n_5f5($$wa17s&mif^vlfya1%$57gx)*2m--a16u#rh5y'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'chronassist',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.i18n',
'django.core.context_processors.debug',
'django.core.context_processors.request',
'django.core.context_processors.media',
'django.core.context_processors.csrf',
'django.core.context_processors.tz',
'django.core.context_processors.static',
)
TEMPLATE_DIRS = (
os.path.join(PROJECT_PATH, "templates"),
)
ROOT_URLCONF = 'scheduler.urls'
WSGI_APPLICATION = 'scheduler.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
#STATIC_ROOT = PROJECT_PATH+'/static/'
MEDIA_ROOT = PROJECT_PATH+'/media/' #../../scheduler/scheduler/media
MEDIA_URL = '/media/'
STATICFILES_DIRS = (
os.path.join(PROJECT_PATH, "static"),
)
|
anshulthakur/scheduler
|
scheduler/scheduler/settings.py
|
Python
|
gpl-2.0
| 2,820
|
"""Models for room meta information"""
import os
from os.path import exists, join
from django.db import models
from nav.models.profiles import Account
from nav.models.manage import Room
from nav.models.fields import VarcharField
from nav.path import localstatedir
ROOMIMAGEPATH = join(localstatedir, 'uploads', 'images', 'rooms')
class Image(models.Model):
"""Model representing an uploaded image"""
id = models.AutoField(db_column='imageid', primary_key=True)
room = models.ForeignKey(Room, db_column='roomid')
title = VarcharField()
path = VarcharField()
name = VarcharField()
created = models.DateTimeField(auto_now_add=True)
uploader = models.ForeignKey(Account, db_column='uploader')
priority = models.IntegerField()
class Meta:
db_table = 'image'
ordering = ['priority']
def _check_image_existance(self):
return exists(join(ROOMIMAGEPATH, self.path, self.name))
def _check_thumb_existance(self):
"""Relies on static thumb directory"""
return exists(join(ROOMIMAGEPATH, self.path, 'thumbs', self.name))
def _check_readable(self):
return os.access(join(ROOMIMAGEPATH, self.path, self.name), os.R_OK)
image_exists = property(_check_image_existance)
thumb_exists = property(_check_thumb_existance)
is_readable = property(_check_readable)
|
alexanderfefelov/nav
|
python/nav/models/roommeta.py
|
Python
|
gpl-2.0
| 1,361
|
# -*- coding: utf-8 -*-
"""Reload command."""
# Part of Clockwork MUD Server (https://github.com/whutch/cwmud)
# :copyright: (c) 2008 - 2017 Will Hutcheson
# :license: MIT (https://github.com/whutch/cwmud/blob/master/LICENSE.txt)
from .. import Command, COMMANDS
from ...channels import CHANNELS
from ...characters import CharacterShell
from ...server import SERVER
@COMMANDS.register
class ReloadCommand(Command):
"""A command to reload the game server, hopefully without interruption.
This is similar to the old ROM-style copyover, except that we try and
preserve a complete game state rather than just the open connections.
"""
def _action(self):
CHANNELS["announce"].send("Server is reloading, please remain calm!")
SERVER.reload()
CharacterShell.add_verbs(ReloadCommand, "reload", truncate=False)
|
whutch/cwmud
|
cwmud/core/commands/admin/reload.py
|
Python
|
mit
| 847
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class VirtualNetworkProfile(Model):
"""Specification for using a Virtual Network.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource id of the Virtual Network.
:type id: str
:ivar name: Name of the Virtual Network (read-only).
:vartype name: str
:ivar type: Resource type of the Virtual Network (read-only).
:vartype type: str
:param subnet: Subnet within the Virtual Network.
:type subnet: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'subnet': {'key': 'subnet', 'type': 'str'},
}
def __init__(self, id=None, subnet=None):
self.id = id
self.name = None
self.type = None
self.subnet = subnet
|
SUSE/azure-sdk-for-python
|
azure-mgmt-web/azure/mgmt/web/models/virtual_network_profile.py
|
Python
|
mit
| 1,489
|
# -*- coding: utf-8 -*-
import numpy as np
import cantera as ct
import pandas as pd
import re
import warnings
import copy
###################################
# 3b. output data analysis
###################################
def branching_ratios(df, solution, compound, production = False):
"""
This method looks at the consumption pathways of `compound` over
all time points in the data set.
It outputs a pandas.DataFrame which contains columns of pertinant reactions
and values of the branching ratio of each reaction which is defined as
$BR_{i} = \frac{ROC_i}{\Sigma_{j=0}^{j=N} ROC_j }$
where $i$ is the reaction in question, $ROC$ is the rate of consumption of
the desired species, and $N$ is the number of reactions, and $BR$ is the branching ratio.
df = dataframe of run data
solution = cantera solution object
compound = species string which you want to identify
production = if True, shows the reactions forming species X
This method only works on forward reactions
"""
reaction_dataframe = weight_reaction_dataframe_by_stoich_coefficients(df,solution,compound)
if not production:
#only keep consumption
consumption_terms = reaction_dataframe[reaction_dataframe < 0]
df = consumption_terms.dropna('columns','all')
else:
production_terms = reaction_dataframe[reaction_dataframe > 0]
df = production_terms.dropna('columns','all')
total = df.sum('columns')
branching_ratios = df.div(total,'index')
branching_ratios = branching_ratios.fillna(0)
#sort from most important
importance_index = branching_ratios.sum('index').sort_values(ascending=False)
branching_ratios = branching_ratios.reindex(importance_index.index,axis='columns')
return branching_ratios
def consumption_pathways(solution,df,species, time = 'all'):
"""
returns the total rate of production for a particular species at the specified
time(s). Postive values indicate production, negative values indicate consumption
If multiple times are given or the keyword 'all' is used, the output is a DataFrame
with indexes the various times. If only one time is supplied, the output is a
Series.
solution = cantera solution object
df = pandas dataframe of reactions
species = string of species
time = number describing the time points to determine consumption (or list of numbers)
"""
if time=='all':
time = list(df.index)
if isinstance(time,list):
# recursively run consumption_pathways
consumption_values = []
for t in time:
consumption_values.append(consumption_pathways(solution=solution,
df=df,
species=species,
time= t))
consumption_values = pd.DataFrame(consumption_values, index=time)
# sort by total sum of flux
sorted_index = consumption_values.sum('index').sort_values().keys()
return consumption_values[sorted_index]
# the time is not a list, return a pd.Series
try:
reactions_weighted = find_reactions(solution, df,species).loc[time,:]
except KeyError:
reactions_weighted = find_reactions(solution, df,species).loc[return_nearest_time_index(time,df.index, index=False),:]
# weight by stoichiometric_coefficients
stoich_coeffs = [obtain_stoichiometry_of_species(solution, species, reaction) for reaction in reactions_weighted.index]
stoich_coeff_dict = pd.Series(dict(zip(reactions_weighted.index,stoich_coeffs)))
# pandas was having some bug, so manually rewrote the line below
#reactions_weighted *= stoich_coeff_dict
for index in stoich_coeff_dict.index:
reactions_weighted[index] *= stoich_coeff_dict[index]
return reactions_weighted.sort_values()
def quasi_steady_state(df, species):
"""
This method outputs the key parameter, $\frac{|ROP-ROC|}{ROP}$, in quasi steady state
approximation.
df = pd.DataFrame containing get_rop_and_roc_series
species = string of species to use
returns a pd.Series of the qss apprixmation: $\frac{|ROP-ROC|}{ROP}$
"""
return (df['production',species] - df['consumption',species]).abs() / df['production',species]
def compare_species_profile_at_one_time(desired_time, df1,df2,
minimum_return_value=1e-13,
time_string = 'time (s)'):
"""
compares the species profile between two models closest to the desired time
returns a pandas.Series object with the relative species concentrations
given by `compare_2_data_sets`
"""
time_index_1 = return_nearest_time_index(desired_time,df1[time_string])
time_index_2 = return_nearest_time_index(desired_time,df2[time_string])
time_slice_1 = find_species(df1).loc[time_index_1]
time_slice_2 = find_species(df2).loc[time_index_2]
return _compare_2_data_sets(time_slice_1,time_slice_2,minimum_return_value)
def _compare_2_data_sets(model1, model2, minimum_return_value = 1000,diff_returned=0.0):
"""given two pd.Series of data, returns a pd.Series with the relative
differences between the two sets. This requires one of the values to be
above the `minimum_return_cutoff` and the difference to be above `diff_returned`
The difference is returned as $\frac{model1 - model2}{\min(model1,model2)}$.
Where the minimum merges the two datasets using the minimum value at each index.
"""
#ensure all values are the same
model1 = copy.deepcopy(model1)[model2.index].dropna()
model2 = copy.deepcopy(model2)[model1.index].dropna()
minimum_value = pd.DataFrame({'model1':model1,'model2':model2}).min(1)
compared_values = ((model1-model2)/minimum_value).dropna()
for label in compared_values.index:
not_enough_value = (model1[label] < minimum_return_value and model2[label] < minimum_return_value)
not_enough_difference = abs(compared_values[label]) < diff_returned
if not_enough_value or not_enough_difference:
compared_values[label] = np.nan
compared_values = compared_values.dropna()
return compared_values.sort_values()
def return_nearest_time_index(desired_time,time_series,index=True):
"""
input the desired time, double, and time_series, pd.Series,
returns the index of the time_series.
If you want the actual time value, change index=False
"""
# commented out due to error in mp.argmin
#nearest_value = lambda value, array: np.argmin(abs(value-array))
#if index:
# return nearest_value(desired_time,time_series)
#return time_series[nearest_value(desired_time,time_series)]
deviation_list = abs(desired_time-time_series)
min_deviation = min(deviation_list)
index_value = list(deviation_list).index(min_deviation)
if index:
return index_value
return time_series[index_value]
def obtain_stoichiometry_of_species(solution, species, reaction):
"""
this method finds a reaction string in the cantera solution file, and
returns its stoichiometric coefficient of the specified species.
Returns a negative value if the species is a reactant.
solution = cantera solution object
species = string of species name
reaction = reaction string or list of reaction strings.
Stoichiometry is calculated by: product_stoich_coeff - reactant_stoich_coeff
"""
# recursively deal with lists of reactions
if not isinstance(reaction,str):
coefficients = np.empty(len(reaction))
for index, reaction_string in enumerate(reaction):
coefficients[index] = obtain_stoichiometry_of_species(solution,species,reaction_string)
return coefficients
# deal with individual reactions
assert isinstance(reaction,str)
reaction_index = solution.reaction_equations().index(reaction)
reactant_stoich_coeff = solution.reactant_stoich_coeff(species, reaction_index)
product_stoich_coeff = solution.product_stoich_coeff(species, reaction_index)
if product_stoich_coeff > 0 or reactant_stoich_coeff > 0:
return product_stoich_coeff - reactant_stoich_coeff
raise Exception('Species {} is not in reaction {}'.format(species,reaction))
def weight_reaction_dataframe_by_stoich_coefficients(df, solution, species):
"""
returns a dataframe of reactions over time weighted by the stoichiometric
coefficient of the species string `species`.
"""
reactions = find_reactions( solution, df, species)
reaction_strings = list(reactions.columns)
stoichiometries = obtain_stoichiometry_of_species(solution,
species,
reaction_strings)
return reactions * stoichiometries
def find_reactions(solution, df,species):
"""
finds the reaction columns in the net_reaction dataframe which contain
the species specified and returns them.
"""
included_columns = []
rxn_string_to_rxn_index = dict(zip(solution.reaction_equations(),range(solution.n_reactions)))
for rxn_name in df.columns:
sln_index = rxn_string_to_rxn_index[rxn_name]
try:
if solution.product_stoich_coeff(species,sln_index) !=0 or \
solution.reactant_stoich_coeff(species,sln_index) !=0:
included_columns.append(rxn_name)
except KeyError:
print("Error obtained in find_reactions,\ncheck to ensure the columns in `df`\ncorrespond to the reactions in `solution`")
raise
df_my_reactions = df[included_columns]
if df_my_reactions.empty:
raise Exception('No reactions found for species {}'.format(species))
return df_my_reactions
|
goldmanm/tools
|
analysis.py
|
Python
|
mit
| 9,967
|
from ImportDependence import *
from CustomClass import *
class CIA(AppForm):
useddf=pd.DataFrame()
Lines = []
Tags = []
description = 'Chemical Index of Alteration'
unuseful = ['Name',
'Mineral',
'Author',
'DataType',
'Label',
'Marker',
'Color',
'Size',
'Alpha',
'Style',
'Width',
'Tag']
reference = '''
CIA = [Al2O3/(Al2O3+CaO*+Na2O+K2O]×100
ICV = (Fe2O3+K2O+Na2O+CaO*+MgO+MnO+TiO2)/Al2O3 (Cox,1995)
PIA = {(Al2O3-K2O)/[(Al2O3-K2O)+CaO*+Na2O]}×100
CIW = [Al2O3/(Al2O3+CaO*+Na2O)]×100
CIW' = [Al2O3/(Al2O3+Na2O)]×100
where CaO* is the amount of CaO incorporated in the silicate fraction of the rock.
CaO* = CaO - (10/3 * P2O5)
if CaO* < Na2O:
CaO* = CaO*
else:
CaO* = Na2O
References:
Nesbitt-CIA-1982
Harnois-CIW-1988
Mclennan-CIA-1993
Cox R-ICV-1995
Fedo-PIA-1995
Cullers-CIW'-2000
Song B W-2013
Cox R, Lowe D R, Cullers R L. The influence of sediment recycling and basement composition on evolution of mudrock chemistry in the southwestern United States[J]. Geochimica Et Cosmochimica Acta, 1995, 59(14):2919-2940.
Harnois, L., 1988, The CIW index: A new chemical index of weathering: Sedimentary Geology, v. 55, p. 319–322. doi:10.1016/0037-0738(88)90137-6
Nesbitt, H.W., and Young, G.M., 1982, Early Proterozoic climates and plate motions inferred from major element chemistry of lutites: Nature, v. 299, p. 715–717. doi:10.1038/299715a0
'''
BaseMass = {'SiO2': 60.083,
'TiO2': 79.865,
'Al2O3': 101.960077,
'TFe2O3': 159.687,
'Fe2O3': 159.687,
'TFeO': 71.844,
'FeO': 71.844,
'MnO': 70.937044,
'MgO': 40.304,
'CaO': 56.077000000000005,
'Na2O': 61.978538560000004,
'K2O': 94.1956,
'P2O5': 141.942523996,
'CO2': 44.009,
'SO3': 80.057,
'FeO': 71.844,
'Fe3O4': 231.531,
'BaO': 153.326,
'SrO': 103.619,
'Cr2O3': 151.98919999999998,
}
def __init__(self, parent=None, df=pd.DataFrame()):
QMainWindow.__init__(self, parent)
self.setWindowTitle('Chemical Index of Alteration & Index of Compositional Variability')
self.items = []
self._df = df
self._df.reindex()
if (len(df) > 0):
self._changed = True
# print('DataFrame recieved to CIA')
self.raw = df
self.raw = self.CleanDataFile(df)
self.rawitems = self.raw.columns.values.tolist()
for i in self.rawitems:
if i not in self.unuseful:
self.items.append(i)
else:
pass
self.create_main_frame()
self.create_status_bar()
def create_main_frame(self):
self.resize(800,600)
self.main_frame = QWidget()
self.dpi = 128
self.setWindowTitle('Chemical Index of Alteration & Index of Compositional Variability')
self.tableView = CustomQTableView(self.main_frame)
self.tableView.setObjectName('tableView')
self.tableView.setSortingEnabled(True)
self.textbox = GrowingTextEdit(self)
self.textbox.setText(self.reference)
# Other GUI controls
self.save_button = QPushButton('&Save')
self.save_button.clicked.connect(self.saveDataFile)
#
# Layout with box sizers
#
self.hbox = QHBoxLayout()
for w in [self.save_button]:
self.hbox.addWidget(w)
self.hbox.setAlignment(w, Qt.AlignVCenter)
self.vbox = QVBoxLayout()
self.vbox.addWidget(self.tableView)
#self.vbox.addWidget(self.tableView)
self.vbox.addLayout(self.hbox)
self.vbox.addWidget(self.textbox)
self.main_frame.setLayout(self.vbox)
self.setCentralWidget(self.main_frame)
def Read(self, inpoints):
points = []
for i in inpoints:
points.append(i.split())
result = []
for i in points:
for l in range(len(i)):
a = float((i[l].split(','))[0])
a = a * self.x_scale
b = float((i[l].split(','))[1])
b = (self.height_load - b) * self.y_scale
result.append((a, b))
return (result)
def CIA(self):
self.WholeData = []
dataframe=pd.DataFrame()
dataframe = self._df
#dataframe.set_index('Label')
ItemsAvalibale = dataframe.columns.values.tolist()
Indexes = dataframe.index.values.tolist()
#ItemsToCheck = ['Label','SiO2','Al2O3','Fe2O3','MgO','CaO','Na2O','K2O','P2O5','MnO','TiO2']
ItemsToTest = ['Number', 'Tag', 'Name', 'Author', 'DataType', 'Marker', 'Color', 'Size', 'Alpha',
'Style', 'Width']
for i in ItemsAvalibale:
if 'O' not in i and i !='Label':
dataframe = dataframe.drop(i, 1)
WholeItemsAvalibale = dataframe.columns.values.tolist()
ItemsAvalibale = dataframe.columns.values.tolist()
Indexes = dataframe.index.values.tolist()
if 'Whole' not in WholeItemsAvalibale:
WholeItemsAvalibale.append('Whole')
if 'CIA' not in WholeItemsAvalibale:
WholeItemsAvalibale.append('CIA')
if 'ICV' not in WholeItemsAvalibale:
WholeItemsAvalibale.append('ICV')
if 'PIA' not in WholeItemsAvalibale:
WholeItemsAvalibale.append('PIA')
if 'CIW' not in WholeItemsAvalibale:
WholeItemsAvalibale.append('CIW')
if 'CIW\'' not in WholeItemsAvalibale:
WholeItemsAvalibale.append('CIW\'')
print('index',Indexes,'\ncolums',WholeItemsAvalibale)
WholeMole=[]
WholeList=[]
dataframe = dataframe.dropna(axis=1,how='all')
print(dataframe)
for j in Indexes:
tmpList=[]
tmpMoleSum=0
tmpcia=0
tmpAl2O3=0
tmpCaO=0
tmpNa2O=0
tmpK2O=0
tmpP2O5=0
tmpFe2O3=0
tmpMgO=0
tmpMnO=0
tmpTiO2=0
#ICV =(Fe2O3+K2O+Na2O+CaO*+MgO+MnO+TiO2)/Al2O3 (Cox,1995)
for i in ItemsAvalibale:
if i in self.BaseMass:
m=dataframe.at[j,i]
n=self.BaseMass[i]
#print('\nm & n is \t',m,n)
tmpmole= m/n
#print(tmpmole)
tmpMoleSum = tmpMoleSum + tmpmole
#tmpList.append(dataframe.at[i,j])
#print('\n total mole is',tmpMoleSum)
for i in ItemsAvalibale:
if i in self.BaseMass:
tmpdata= 100*(dataframe.at[j,i]/self.BaseMass[i])/tmpMoleSum
tmpList.append(tmpdata)
#print(i, tmpdata)
if i =='Al2O3':
tmpAl2O3=tmpdata
elif i =='CaO':
tmpCaO=tmpdata
elif i =='Na2O':
tmpNa2O = tmpdata
elif i =='K2O':
tmpK2O=tmpdata
elif i =='P2O5':
tmpP2O5=tmpdata
elif i =='Fe2O3':
tmpFe2O3=tmpdata
elif i == 'MgO':
tmpMgO = tmpdata
elif i == 'MnO':
tmpMnO = tmpdata
elif i == 'TiO2':
tmpTiO2 = tmpdata
elif i == 'Label' :
tmpdata = dataframe.at[j,i]
tmpList.append(tmpdata)
elif i in WholeItemsAvalibale:
del WholeItemsAvalibale[WholeItemsAvalibale.index(i)]
tmpList.append(tmpMoleSum)
usedCaO=0
middleCaO= tmpCaO-(10/3.0*tmpP2O5)
if middleCaO< tmpNa2O:
usedCaO=middleCaO
else:
usedCaO=tmpNa2O
#print(tmpAl2O3, usedCaO, tmpK2O, tmpNa2O)
CIA=tmpAl2O3/(tmpAl2O3+usedCaO+tmpNa2O+tmpK2O)*100
tmpList.append(CIA)
ICV =(tmpFe2O3+tmpK2O+tmpNa2O+usedCaO+tmpMgO+tmpMnO+tmpTiO2)/tmpAl2O3 #(Cox,1995)
tmpList.append(ICV)
PIA = ((tmpAl2O3-tmpK2O)/(tmpAl2O3-tmpK2O+usedCaO+tmpNa2O))*100
tmpList.append(PIA)
CIW = (tmpAl2O3/(tmpAl2O3+usedCaO+tmpNa2O))*100
tmpList.append(CIW)
CIW2 = (tmpAl2O3/(tmpAl2O3+tmpNa2O))*100
tmpList.append(CIW2)
'''
CIA = [Al2O3/(Al2O3+CaO*+Na2O+K2O]×100
ICV = (Fe2O3+K2O+Na2O+CaO*+MgO+MnO+TiO2)/Al2O3 (Cox,1995)
PIA = {(Al2O3-K2O)/[(Al2O3-K2O)+CaO*+Na2O]}×100
CIW = [Al2O3/(Al2O3+CaO*+Na2O)]×100
CIW' = [Al2O3/(Al2O3+Na2O)]×100
'''
#print(len(tmpList))
WholeList.append(tmpList)
pass
print(len(WholeList))
print(len(WholeItemsAvalibale))
df = pd.DataFrame(WholeList,columns=WholeItemsAvalibale)
self.useddf = df
self.tableView.setModel(PandasModel(self.useddf))
self.show()
def saveDataFile(self):
# if self.model._changed == True:
# print('changed')
# print(self.model._df)
DataFileOutput, ok2 = QFileDialog.getSaveFileName(self,
'文件保存',
'C:/',
'Excel Files (*.xlsx);;CSV Files (*.csv)') # 数据文件保存输出
if (DataFileOutput != ''):
if ('csv' in DataFileOutput):
self.useddf.to_csv(DataFileOutput, sep=',', encoding='utf-8')
elif ('xls' in DataFileOutput):
self.useddf.to_excel(DataFileOutput, encoding='utf-8')
|
chinageology/GeoPython
|
geopytool/CIA.py
|
Python
|
gpl-3.0
| 10,636
|
# This file is part of h5py, a Python interface to the HDF5 library.
#
# http://www.h5py.org
#
# Copyright 2008-2013 Andrew Collette and contributors
#
# License: Standard 3-clause BSD; see "license.txt" for full license terms
# and contributor agreement.
from h5py import h5
from .common import TestCase
def fixnames():
cfg = h5.get_config()
cfg.complex_names = ('r','i')
class TestH5(TestCase):
def test_config(self):
cfg = h5.get_config()
self.assertIsInstance(cfg, h5.H5PYConfig)
cfg2 = h5.get_config()
self.assertIs(cfg, cfg2)
def test_cnames_get(self):
cfg = h5.get_config()
self.assertEqual(cfg.complex_names, ('r','i'))
def test_cnames_set(self):
self.addCleanup(fixnames)
cfg = h5.get_config()
cfg.complex_names = ('q','x')
self.assertEqual(cfg.complex_names, ('q','x'))
def test_cnames_set_exc(self):
self.addCleanup(fixnames)
cfg = h5.get_config()
with self.assertRaises(TypeError):
cfg.complex_names = ('q','i','v')
self.assertEqual(cfg.complex_names, ('r','i'))
def test_repr(self):
cfg = h5.get_config()
repr(cfg)
|
h5py/h5py
|
h5py/tests/test_h5.py
|
Python
|
bsd-3-clause
| 1,216
|
#!/usr/bin/env python3
import sys
# import osgeo.utils.gdalchksum as a convenience to use as a script
from osgeo.utils.gdalchksum import * # noqa
from osgeo.utils.gdalchksum import main
from osgeo.gdal import deprecation_warn
deprecation_warn('gdalchksum', 'utils')
sys.exit(main(sys.argv))
|
grueni75/GeoDiscoverer
|
Source/Platform/Target/Android/core/src/main/jni/gdal-3.2.1/swig/python/scripts/gdalchksum.py
|
Python
|
gpl-3.0
| 295
|
# minke.normalize
# Utilities and helpers for cleaning and normalizing text data.
#
# Author: Benjamin Bengfort <bbengfort@districtdatalabs.com>
# Created: Tue May 03 14:19:14 2016 -0400
#
# Copyright (C) 2016 District Data Labs
# For license information, see LICENSE.txt
#
# ID: normalize.py [978bbb0] benjamin@bengfort.com $
"""
Utilities and helpers for cleaning and normalizing text data.
"""
##########################################################################
## Imports
##########################################################################
import nltk
import string
from nltk.corpus import wordnet as wn
##########################################################################
## Module Constants
##########################################################################
PUNCT = set(string.punctuation)
STOPWORDS = set(nltk.corpus.stopwords.words('english'))
##########################################################################
## Lemmatizer
##########################################################################
class Lemmatizer(object):
"""
Wraps the nltk.WordNetLemmatizer to provide added functionality like the
discovery of the part of speech of the word to lemmatize.
"""
def __init__(self):
self._wordnet = nltk.WordNetLemmatizer()
self._cache = {}
def tagwn(self, tag):
"""
Returns the WordNet tag from the Penn Treebank tag.
"""
return {
'N': wn.NOUN,
'V': wn.VERB,
'R': wn.ADV,
'J': wn.ADJ
}.get(tag[0], wn.NOUN)
def poswn(self, word):
"""
Computes the part of speech for the given word.
"""
return self.tagwn(nltk.pos_tag([word])[0][1])
def lemmatize(self, word, tag=None):
"""
Lemmatizes the word; if no tag is given, then computes the tag.
"""
if (word, tag) in self._cache:
return self._cache[(word, tag)]
tag = self.tagwn(tag) if tag else self.poswn(word)
lemma = self._wordnet.lemmatize(word, tag)
self._cache[(word, tag)] = lemma
return lemma
##########################################################################
## Normalizer
##########################################################################
class Normalizer(object):
"""
Performs normalization of text by applying string operations (lowercase),
excluding stopwords and punctuation, and by lemmatizing words.
"""
def __init__(self, stopwords=STOPWORDS, punctuation=PUNCT,
lemmatize=True, lower=True, strip=True):
self.stopwords = stopwords
self.punct = punctuation
self.lemmatize = lemmatize
self.lower = lower
self.strip = strip
# Initialize lemmatizer
self.lemmatizer = Lemmatizer() if self.lemmatize else None
def normalize(self, words):
"""
Normalizes a list of words.
"""
# Add part of speech tags to the words
words = nltk.pos_tag(words)
for word, tag in words:
if self.lower: word = word.lower()
if self.strip: word = word.strip()
if word not in self.stopwords:
if not all(c in self.punct for c in word):
if self.lemmatize:
word = self.lemmatizer.lemmatize(word, tag)
yield word
def tokenize(self, text):
"""
Performs tokenization in addition to normalization.
"""
return self.normalize(nltk.wordpunct_tokenize(text))
if __name__ == '__main__':
norm = Normalizer()
print list(norm.tokenize((
'Sometimes, technically minded people feel they are not'
' good candidates for leadership positions.'
)))
|
bbengfort/minke
|
minke/normalize.py
|
Python
|
mit
| 3,826
|
# Copyright 2014 Violin Memory, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for Violin Memory 6000 Series All-Flash Array Fibrechannel Driver
"""
import mock
from oslo_utils import units
from cinder import context
from cinder.db.sqlalchemy import models
from cinder import exception
from cinder import test
from cinder.tests.unit import fake_vmem_client as vmemclient
from cinder.volume import configuration as conf
from cinder.volume.drivers.violin import v6000_common
from cinder.volume.drivers.violin import v6000_fcp
VOLUME_ID = "abcdabcd-1234-abcd-1234-abcdeffedcba"
VOLUME = {
"name": "volume-" + VOLUME_ID,
"id": VOLUME_ID,
"display_name": "fake_volume",
"size": 2,
"host": "irrelevant",
"volume_type": None,
"volume_type_id": None,
}
SNAPSHOT_ID = "abcdabcd-1234-abcd-1234-abcdeffedcbb"
SNAPSHOT = {
"name": "snapshot-" + SNAPSHOT_ID,
"id": SNAPSHOT_ID,
"volume_id": VOLUME_ID,
"volume_name": "volume-" + VOLUME_ID,
"volume_size": 2,
"display_name": "fake_snapshot",
"volume": VOLUME,
}
SRC_VOL_ID = "abcdabcd-1234-abcd-1234-abcdeffedcbc"
SRC_VOL = {
"name": "volume-" + SRC_VOL_ID,
"id": SRC_VOL_ID,
"display_name": "fake_src_vol",
"size": 2,
"host": "irrelevant",
"volume_type": None,
"volume_type_id": None,
}
INITIATOR_IQN = "iqn.1111-22.org.debian:11:222"
CONNECTOR = {
"initiator": INITIATOR_IQN,
"host": "irrelevant",
'wwpns': [u'50014380186b3f65', u'50014380186b3f67'],
}
FC_TARGET_WWPNS = [
'31000024ff45fb22', '21000024ff45fb23',
'51000024ff45f1be', '41000024ff45f1bf'
]
FC_INITIATOR_WWPNS = [
'50014380186b3f65', '50014380186b3f67'
]
FC_FABRIC_MAP = {
'fabricA':
{'target_port_wwn_list': [FC_TARGET_WWPNS[0], FC_TARGET_WWPNS[1]],
'initiator_port_wwn_list': [FC_INITIATOR_WWPNS[0]]},
'fabricB':
{'target_port_wwn_list': [FC_TARGET_WWPNS[2], FC_TARGET_WWPNS[3]],
'initiator_port_wwn_list': [FC_INITIATOR_WWPNS[1]]}
}
FC_INITIATOR_TARGET_MAP = {
FC_INITIATOR_WWPNS[0]: [FC_TARGET_WWPNS[0], FC_TARGET_WWPNS[1]],
FC_INITIATOR_WWPNS[1]: [FC_TARGET_WWPNS[2], FC_TARGET_WWPNS[3]]
}
class V6000FCPDriverTestCase(test.TestCase):
"""Test cases for VMEM FCP driver."""
def setUp(self):
super(V6000FCPDriverTestCase, self).setUp()
self.conf = self.setup_configuration()
self.driver = v6000_fcp.V6000FCDriver(configuration=self.conf)
self.driver.common.container = 'myContainer'
self.driver.device_id = 'ata-VIOLIN_MEMORY_ARRAY_23109R00000022'
self.driver.gateway_fc_wwns = FC_TARGET_WWPNS
self.stats = {}
self.driver.set_initialized()
def tearDown(self):
super(V6000FCPDriverTestCase, self).tearDown()
def setup_configuration(self):
config = mock.Mock(spec=conf.Configuration)
config.volume_backend_name = 'v6000_fcp'
config.san_ip = '1.1.1.1'
config.san_login = 'admin'
config.san_password = ''
config.san_thin_provision = False
config.san_is_local = False
config.gateway_mga = '2.2.2.2'
config.gateway_mgb = '3.3.3.3'
config.use_igroups = False
config.request_timeout = 300
config.container = 'myContainer'
return config
def setup_mock_vshare(self, m_conf=None):
"""Create a fake VShare communication object."""
_m_vshare = mock.Mock(name='VShare',
version='1.1.1',
spec=vmemclient.mock_client_conf)
if m_conf:
_m_vshare.configure_mock(**m_conf)
return _m_vshare
@mock.patch.object(v6000_common.V6000Common, 'check_for_setup_error')
def test_check_for_setup_error(self, m_setup_func):
"""No setup errors are found."""
result = self.driver.check_for_setup_error()
m_setup_func.assert_called_with()
self.assertTrue(result is None)
@mock.patch.object(v6000_common.V6000Common, 'check_for_setup_error')
def test_check_for_setup_error_no_wwn_config(self, m_setup_func):
"""No wwns were found during setup."""
self.driver.gateway_fc_wwns = []
self.assertRaises(exception.ViolinInvalidBackendConfig,
self.driver.check_for_setup_error)
def test_create_volume(self):
"""Volume created successfully."""
self.driver.common._create_lun = mock.Mock()
result = self.driver.create_volume(VOLUME)
self.driver.common._create_lun.assert_called_with(VOLUME)
self.assertTrue(result is None)
def test_delete_volume(self):
"""Volume deleted successfully."""
self.driver.common._delete_lun = mock.Mock()
result = self.driver.delete_volume(VOLUME)
self.driver.common._delete_lun.assert_called_with(VOLUME)
self.assertTrue(result is None)
def test_create_snapshot(self):
"""Snapshot created successfully."""
self.driver.common._create_lun_snapshot = mock.Mock()
result = self.driver.create_snapshot(SNAPSHOT)
self.driver.common._create_lun_snapshot.assert_called_with(SNAPSHOT)
self.assertTrue(result is None)
def test_delete_snapshot(self):
"""Snapshot deleted successfully."""
self.driver.common._delete_lun_snapshot = mock.Mock()
result = self.driver.delete_snapshot(SNAPSHOT)
self.driver.common._delete_lun_snapshot.assert_called_with(SNAPSHOT)
self.assertTrue(result is None)
@mock.patch.object(context, 'get_admin_context')
def test_create_volume_from_snapshot(self, m_context_func):
"""Volume created from a snapshot successfully."""
m_context_func.return_value = None
self.driver.common._create_lun = mock.Mock()
self.driver.copy_volume_data = mock.Mock()
result = self.driver.create_volume_from_snapshot(VOLUME, SNAPSHOT)
m_context_func.assert_called_with()
self.driver.common._create_lun.assert_called_with(VOLUME)
self.driver.copy_volume_data.assert_called_with(None, SNAPSHOT, VOLUME)
self.assertTrue(result is None)
@mock.patch.object(context, 'get_admin_context')
def test_create_cloned_volume(self, m_context_func):
"""Volume clone created successfully."""
m_context_func.return_value = None
self.driver.common._create_lun = mock.Mock()
self.driver.copy_volume_data = mock.Mock()
result = self.driver.create_cloned_volume(VOLUME, SRC_VOL)
m_context_func.assert_called_with()
self.driver.common._create_lun.assert_called_with(VOLUME)
self.driver.copy_volume_data.assert_called_with(None, SRC_VOL, VOLUME)
self.assertTrue(result is None)
def test_initialize_connection(self):
lun_id = 1
igroup = None
target_wwns = self.driver.gateway_fc_wwns
init_targ_map = {}
volume = mock.Mock(spec=models.Volume)
self.driver.common.vip = self.setup_mock_vshare()
self.driver._export_lun = mock.Mock(return_value=lun_id)
self.driver._build_initiator_target_map = mock.Mock(
return_value=(target_wwns, init_targ_map))
props = self.driver.initialize_connection(volume, CONNECTOR)
self.driver._export_lun.assert_called_with(volume, CONNECTOR, igroup)
self.driver.common.vip.basic.save_config.assert_called_with()
self.driver._build_initiator_target_map.assert_called_with(
CONNECTOR)
self.assertEqual("fibre_channel", props['driver_volume_type'])
self.assertTrue(props['data']['target_discovered'])
self.assertEqual(target_wwns, props['data']['target_wwn'])
self.assertEqual(lun_id, props['data']['target_lun'])
self.assertEqual(init_targ_map, props['data']['initiator_target_map'])
def test_initialize_connection_with_snapshot_object(self):
lun_id = 1
igroup = None
target_wwns = self.driver.gateway_fc_wwns
init_targ_map = {}
snapshot = mock.Mock(spec=models.Snapshot)
self.driver.common.vip = self.setup_mock_vshare()
self.driver._export_snapshot = mock.Mock(return_value=lun_id)
self.driver._build_initiator_target_map = mock.Mock(
return_value=(target_wwns, init_targ_map))
props = self.driver.initialize_connection(snapshot, CONNECTOR)
self.driver._export_snapshot.assert_called_with(
snapshot, CONNECTOR, igroup)
self.driver.common.vip.basic.save_config.assert_called_with()
self.driver._build_initiator_target_map.assert_called_with(
CONNECTOR)
self.assertEqual("fibre_channel", props['driver_volume_type'])
self.assertTrue(props['data']['target_discovered'])
self.assertEqual(target_wwns, props['data']['target_wwn'])
self.assertEqual(lun_id, props['data']['target_lun'])
self.assertEqual(init_targ_map, props['data']['initiator_target_map'])
def test_terminate_connection(self):
target_wwns = self.driver.gateway_fc_wwns
init_targ_map = {}
volume = mock.Mock(spec=models.Volume)
self.driver.common.vip = self.setup_mock_vshare()
self.driver._unexport_lun = mock.Mock()
self.driver._is_initiator_connected_to_array = mock.Mock(
return_value=False)
self.driver._build_initiator_target_map = mock.Mock(
return_value=(target_wwns, init_targ_map))
props = self.driver.terminate_connection(volume, CONNECTOR)
self.driver._unexport_lun.assert_called_with(volume)
self.driver.common.vip.basic.save_config.assert_called_with()
self.driver._is_initiator_connected_to_array.assert_called_with(
CONNECTOR)
self.driver._build_initiator_target_map.assert_called_with(
CONNECTOR)
self.assertEqual("fibre_channel", props['driver_volume_type'])
self.assertEqual(target_wwns, props['data']['target_wwn'])
self.assertEqual(init_targ_map, props['data']['initiator_target_map'])
def test_terminate_connection_snapshot_object(self):
target_wwns = self.driver.gateway_fc_wwns
init_targ_map = {}
snapshot = mock.Mock(spec=models.Snapshot)
self.driver.common.vip = self.setup_mock_vshare()
self.driver._unexport_snapshot = mock.Mock()
self.driver._is_initiator_connected_to_array = mock.Mock(
return_value=False)
self.driver._build_initiator_target_map = mock.Mock(
return_value=(target_wwns, init_targ_map))
props = self.driver.terminate_connection(snapshot, CONNECTOR)
self.assertEqual("fibre_channel", props['driver_volume_type'])
self.assertEqual(target_wwns, props['data']['target_wwn'])
self.assertEqual(init_targ_map, props['data']['initiator_target_map'])
def test_get_volume_stats(self):
self.driver._update_stats = mock.Mock()
self.driver._update_stats()
result = self.driver.get_volume_stats(True)
self.driver._update_stats.assert_called_with()
self.assertEqual(self.driver.stats, result)
def test_export_lun(self):
lun_id = '1'
igroup = 'test-igroup-1'
response = {'code': 0, 'message': ''}
self.driver.common.vip = self.setup_mock_vshare()
self.driver.common._send_cmd_and_verify = mock.Mock(
return_value=response)
self.driver.common._get_lun_id = mock.Mock(return_value=lun_id)
result = self.driver._export_lun(VOLUME, CONNECTOR, igroup)
self.driver.common._send_cmd_and_verify.assert_called_with(
self.driver.common.vip.lun.export_lun,
self.driver.common._wait_for_export_state, '',
[self.driver.common.container, VOLUME['id'], 'all',
igroup, 'auto'], [VOLUME['id'], None, True])
self.driver.common._get_lun_id.assert_called_with(VOLUME['id'])
self.assertEqual(lun_id, result)
def test_export_lun_fails_with_exception(self):
lun_id = '1'
igroup = 'test-igroup-1'
response = {'code': 14000, 'message': 'Generic error'}
failure = exception.ViolinBackendErr
self.driver.common.vip = self.setup_mock_vshare()
self.driver.common._send_cmd_and_verify = mock.Mock(
side_effect=failure(response['message']))
self.driver.common._get_lun_id = mock.Mock(return_value=lun_id)
self.assertRaises(failure, self.driver._export_lun,
VOLUME, CONNECTOR, igroup)
def test_unexport_lun(self):
response = {'code': 0, 'message': ''}
self.driver.common.vip = self.setup_mock_vshare()
self.driver.common._send_cmd_and_verify = mock.Mock(
return_value=response)
result = self.driver._unexport_lun(VOLUME)
self.driver.common._send_cmd_and_verify.assert_called_with(
self.driver.common.vip.lun.unexport_lun,
self.driver.common._wait_for_export_state, '',
[self.driver.common.container, VOLUME['id'], 'all', 'all', 'auto'],
[VOLUME['id'], None, False])
self.assertTrue(result is None)
def test_unexport_lun_fails_with_exception(self):
response = {'code': 14000, 'message': 'Generic error'}
failure = exception.ViolinBackendErr
self.driver.common.vip = self.setup_mock_vshare()
self.driver.common._send_cmd_and_verify = mock.Mock(
side_effect=failure(response['message']))
self.assertRaises(failure, self.driver._unexport_lun, VOLUME)
def test_export_snapshot(self):
lun_id = '1'
igroup = 'test-igroup-1'
response = {'code': 0, 'message': ''}
self.driver.common.vip = self.setup_mock_vshare()
self.driver.common._send_cmd = mock.Mock(return_value=response)
self.driver.common._wait_for_export_state = mock.Mock()
self.driver.common._get_snapshot_id = mock.Mock(return_value=lun_id)
result = self.driver._export_snapshot(SNAPSHOT, CONNECTOR, igroup)
self.driver.common._send_cmd.assert_called_with(
self.driver.common.vip.snapshot.export_lun_snapshot, '',
self.driver.common.container, SNAPSHOT['volume_id'],
SNAPSHOT['id'], igroup, 'all', 'auto')
self.driver.common._wait_for_export_state.assert_called_with(
SNAPSHOT['volume_id'], SNAPSHOT['id'], state=True)
self.driver.common._get_snapshot_id.assert_called_once_with(
SNAPSHOT['volume_id'], SNAPSHOT['id'])
self.assertEqual(lun_id, result)
def test_unexport_snapshot(self):
response = {'code': 0, 'message': ''}
self.driver.common.vip = self.setup_mock_vshare()
self.driver.common._send_cmd = mock.Mock(return_value=response)
self.driver.common._wait_for_export_state = mock.Mock()
result = self.driver._unexport_snapshot(SNAPSHOT)
self.driver.common._send_cmd.assert_called_with(
self.driver.common.vip.snapshot.unexport_lun_snapshot, '',
self.driver.common.container, SNAPSHOT['volume_id'],
SNAPSHOT['id'], 'all', 'all', 'auto', False)
self.driver.common._wait_for_export_state.assert_called_with(
SNAPSHOT['volume_id'], SNAPSHOT['id'], state=False)
self.assertTrue(result is None)
def test_add_igroup_member(self):
igroup = 'test-group-1'
response = {'code': 0, 'message': 'success'}
wwpns = ['wwn.50:01:43:80:18:6b:3f:65', 'wwn.50:01:43:80:18:6b:3f:67']
conf = {
'igroup.add_initiators.return_value': response,
}
self.driver.common.vip = self.setup_mock_vshare(m_conf=conf)
self.driver._convert_wwns_openstack_to_vmem = mock.Mock(
return_value=wwpns)
result = self.driver._add_igroup_member(CONNECTOR, igroup)
self.driver._convert_wwns_openstack_to_vmem.assert_called_with(
CONNECTOR['wwpns'])
self.driver.common.vip.igroup.add_initiators.assert_called_with(
igroup, wwpns)
self.assertTrue(result is None)
def test_build_initiator_target_map(self):
"""Successfully build a map when zoning is enabled."""
expected_targ_wwns = FC_TARGET_WWPNS
self.driver.lookup_service = mock.Mock()
self.driver.lookup_service.get_device_mapping_from_network.\
return_value = FC_FABRIC_MAP
(targ_wwns, init_targ_map) = \
self.driver._build_initiator_target_map(CONNECTOR)
self.driver.lookup_service.get_device_mapping_from_network.\
assert_called_with(CONNECTOR['wwpns'], self.driver.gateway_fc_wwns)
self.assertEqual(set(expected_targ_wwns), set(targ_wwns))
i = FC_INITIATOR_WWPNS[0]
self.assertIn(FC_TARGET_WWPNS[0], init_targ_map[i])
self.assertIn(FC_TARGET_WWPNS[1], init_targ_map[i])
self.assertEqual(2, len(init_targ_map[i]))
i = FC_INITIATOR_WWPNS[1]
self.assertIn(FC_TARGET_WWPNS[2], init_targ_map[i])
self.assertIn(FC_TARGET_WWPNS[3], init_targ_map[i])
self.assertEqual(2, len(init_targ_map[i]))
self.assertEqual(2, len(init_targ_map))
def test_build_initiator_target_map_no_lookup_service(self):
"""Successfully build a map when zoning is disabled."""
expected_targ_wwns = FC_TARGET_WWPNS
expected_init_targ_map = {
CONNECTOR['wwpns'][0]: FC_TARGET_WWPNS,
CONNECTOR['wwpns'][1]: FC_TARGET_WWPNS
}
self.driver.lookup_service = None
targ_wwns, init_targ_map = self.driver._build_initiator_target_map(
CONNECTOR)
self.assertEqual(expected_targ_wwns, targ_wwns)
self.assertEqual(expected_init_targ_map, init_targ_map)
def test_is_initiator_connected_to_array(self):
"""Successfully finds an initiator with remaining active session."""
converted_wwpns = ['50:01:43:80:18:6b:3f:65',
'50:01:43:80:18:6b:3f:67']
prefix = "/vshare/config/export/container"
bn = "%s/%s/lun/**" % (prefix, self.driver.common.container)
resp_binding0 = "%s/%s/lun/%s/target/hba-a1/initiator/%s" \
% (prefix, self.driver.common.container, VOLUME['id'],
converted_wwpns[0])
resp_binding1 = "%s/%s/lun/%s/target/hba-a1/initiator/%s" \
% (prefix, self.driver.common.container, VOLUME['id'],
converted_wwpns[1])
response = {
resp_binding0: converted_wwpns[0],
resp_binding1: converted_wwpns[1]
}
conf = {
'basic.get_node_values.return_value': response,
}
self.driver.common.vip = self.setup_mock_vshare(m_conf=conf)
self.driver._convert_wwns_openstack_to_vmem = mock.Mock(
return_value=converted_wwpns)
self.assertTrue(self.driver._is_initiator_connected_to_array(
CONNECTOR))
self.driver.common.vip.basic.get_node_values.assert_called_with(bn)
def test_is_initiator_connected_to_array_empty_response(self):
"""Successfully finds no initiators with remaining active sessions."""
converted_wwpns = ['50:01:43:80:18:6b:3f:65',
'50:01:43:80:18:6b:3f:67']
response = {}
conf = {
'basic.get_node_values.return_value': response,
}
self.driver.common.vip = self.setup_mock_vshare(m_conf=conf)
self.driver._convert_wwns_openstack_to_vmem = mock.Mock(
return_value=converted_wwpns)
self.assertFalse(self.driver._is_initiator_connected_to_array(
CONNECTOR))
def test_update_stats(self):
backend_name = self.conf.volume_backend_name
vendor_name = "Violin Memory, Inc."
tot_bytes = 100 * units.Gi
free_bytes = 50 * units.Gi
bn0 = '/cluster/state/master_id'
bn1 = "/vshare/state/global/1/container/myContainer/total_bytes"
bn2 = "/vshare/state/global/1/container/myContainer/free_bytes"
response1 = {bn0: '1'}
response2 = {bn1: tot_bytes, bn2: free_bytes}
conf = {
'basic.get_node_values.side_effect': [response1, response2],
}
self.driver.common.vip = self.setup_mock_vshare(m_conf=conf)
result = self.driver._update_stats()
calls = [mock.call(bn0), mock.call([bn1, bn2])]
self.driver.common.vip.basic.get_node_values.assert_has_calls(calls)
self.assertEqual(100, self.driver.stats['total_capacity_gb'])
self.assertEqual(50, self.driver.stats['free_capacity_gb'])
self.assertEqual(backend_name,
self.driver.stats['volume_backend_name'])
self.assertEqual(vendor_name, self.driver.stats['vendor_name'])
self.assertTrue(result is None)
def test_update_stats_fails_data_query(self):
backend_name = self.conf.volume_backend_name
vendor_name = "Violin Memory, Inc."
bn0 = '/cluster/state/master_id'
response1 = {bn0: '1'}
response2 = {}
conf = {
'basic.get_node_values.side_effect': [response1, response2],
}
self.driver.common.vip = self.setup_mock_vshare(m_conf=conf)
self.assertTrue(self.driver._update_stats() is None)
self.assertEqual(0, self.driver.stats['total_capacity_gb'])
self.assertEqual(0, self.driver.stats['free_capacity_gb'])
self.assertEqual(backend_name,
self.driver.stats['volume_backend_name'])
self.assertEqual(vendor_name, self.driver.stats['vendor_name'])
def test_update_stats_fails_data_query_but_has_cached_stats(self):
"""Stats query to backend fails, but cached stats are available. """
backend_name = self.conf.volume_backend_name
vendor_name = "Violin Memory, Inc."
bn0 = '/cluster/state/master_id'
response1 = {bn0: '1'}
response2 = {}
# fake cached stats, from a previous stats query
self.driver.stats = {'free_capacity_gb': 50, 'total_capacity_gb': 100}
conf = {
'basic.get_node_values.side_effect': [response1, response2],
}
self.driver.common.vip = self.setup_mock_vshare(m_conf=conf)
self.assertIsNone(self.driver._update_stats())
self.assertEqual(100, self.driver.stats['total_capacity_gb'])
self.assertEqual(50, self.driver.stats['free_capacity_gb'])
self.assertEqual(backend_name,
self.driver.stats['volume_backend_name'])
self.assertEqual(vendor_name, self.driver.stats['vendor_name'])
def test_get_active_fc_targets(self):
bn0 = '/vshare/state/global/*'
response0 = {'/vshare/state/global/1': 1,
'/vshare/state/global/2': 2}
bn1 = '/vshare/state/global/1/target/fc/**'
response1 = {'/vshare/state/global/1/target/fc/hba-a1/wwn':
'wwn.21:00:00:24:ff:45:fb:22'}
bn2 = '/vshare/state/global/2/target/fc/**'
response2 = {'/vshare/state/global/2/target/fc/hba-a1/wwn':
'wwn.21:00:00:24:ff:45:e2:30'}
wwpns = ['21000024ff45fb22', '21000024ff45e230']
conf = {
'basic.get_node_values.side_effect':
[response0, response1, response2],
}
self.driver.common.vip = self.setup_mock_vshare(m_conf=conf)
result = self.driver._get_active_fc_targets()
calls = [mock.call(bn0), mock.call(bn1), mock.call(bn2)]
self.driver.common.vip.basic.get_node_values.assert_has_calls(
calls, any_order=True)
self.assertEqual(wwpns, result)
def test_convert_wwns_openstack_to_vmem(self):
vmem_wwns = ['wwn.50:01:43:80:18:6b:3f:65']
openstack_wwns = ['50014380186b3f65']
result = self.driver._convert_wwns_openstack_to_vmem(openstack_wwns)
self.assertEqual(vmem_wwns, result)
def test_convert_wwns_vmem_to_openstack(self):
vmem_wwns = ['wwn.50:01:43:80:18:6b:3f:65']
openstack_wwns = ['50014380186b3f65']
result = self.driver._convert_wwns_vmem_to_openstack(vmem_wwns)
self.assertEqual(openstack_wwns, result)
|
takeshineshiro/cinder
|
cinder/tests/unit/test_v6000_fcp.py
|
Python
|
apache-2.0
| 24,905
|
# -*- coding: utf-8 -*-
import time
from ..internal.MultiAccount import MultiAccount
class SimplydebridCom(MultiAccount):
__name__ = "SimplydebridCom"
__type__ = "account"
__version__ = "0.20"
__status__ = "testing"
__config__ = [("mh_mode", "all;listed;unlisted", "Filter hosters to use", "all"),
("mh_list", "str", "Hoster list (comma separated)", ""),
("mh_interval", "int", "Reload interval in hours", 12)]
__description__ = """Simply-Debrid.com account plugin"""
__license__ = "GPLv3"
__authors__ = [("Kagenoshin", "kagenoshin@gmx.ch"),
("GammaC0de", "nitzo2001[AT]yahoo[DOT]com")]
def grab_hosters(self, user, password, data):
html = self.load("http://simply-debrid.com/api.php", get={'list': 1})
return [x for x in html.split(';') if x]
def grab_info(self, user, password, data):
res = self.load("http://simply-debrid.com/api.php",
get={'login': 2,
'u': user,
'p': password})
data = [x.strip() for x in res.split(";")]
if str(data[0]) != "1":
return {'premium': False}
else:
return {'premium': True,
'trafficleft': -1,
'validuntil': time.mktime(time.strptime(str(data[2]), "%d/%m/%Y"))}
def signin(self, user, password, data):
res = self.load("https://simply-debrid.com/api.php",
get={'login': 1,
'u': user,
'p': password})
if res != "02: loggin success":
self.fail_login()
|
Arno-Nymous/pyload
|
module/plugins/accounts/SimplydebridCom.py
|
Python
|
gpl-3.0
| 1,693
|
"""
WSGI config for djangobook project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djangobook.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
fenexomega/DjangoBookExercises
|
djangobook/wsgi.py
|
Python
|
gpl-3.0
| 395
|
#!/usr/bin/python
###############################################################################
# This code is developed by HighEnergyDataScientests Team.
# Do not copy or modify without written approval from one of the team members.
###############################################################################
import pandas as pd
import numpy as np
import xgboost as xgb
import operator
from sklearn import preprocessing
from sklearn.cross_validation import train_test_split
import matplotlib
matplotlib.use("Agg") # Needed to save figures
import matplotlib.pyplot as plt
# seed = 260681
def create_feature_map(features):
outfile = open('xgb.fmap', 'w')
for i, feat in enumerate(features):
outfile.write('{0}\t{1}\tq\n'.format(i, feat))
outfile.close()
print("## Loading Data")
train = pd.read_csv('../inputs/train.csv')
test = pd.read_csv('../inputs/test.csv')
print("## Data Processing")
train = train.drop('QuoteNumber', axis=1)
# Lets play with some dates
train['Date'] = pd.to_datetime(pd.Series(train['Original_Quote_Date']))
train[['PersonalField7','PropertyField3','PropertyField4']].fillna('N',inplace=True)
train[['PersonalField7','PropertyField3','PropertyField32']].fillna('Y',inplace=True)
# Create list of features
features = [s for s in train.columns.ravel().tolist() if s != 'QuoteConversion_Flag']
print("Features: ", features)
# Split data into 2 dataframes, based on whether the quote was bought or not
#df_pos = train.loc[train['QuoteConversion_Flag'] == 1]
#df_neg = train.loc[train['QuoteConversion_Flag'] == 0]
# Plot each column against Date
for f in features:
if f != 'Date':
if len(pd.unique(train[f])) == 2:
print("Unique value for ", f, " : " , pd.unique(train[f]))
plt.clf() # Clear figure
colors = np.random.rand(2)
lbl = preprocessing.LabelEncoder()
lbl.fit(list(train[f].values))
train[f] = lbl.transform(list(train[f].values))
corr = train[f].corr(train['QuoteConversion_Flag'])
print("Correlation betweeen ", f, " and the output is ", corr)
#train[[f,'QuoteConversion_Flag']].plot(style=['o','rx'])
#x = train[f].values
#y = train['QuoteConversion_Flag'].values
#plt.xlabel(f)
#plt.ylabel('QuoteConversion_Flag')
#plt.scatter(x, y, c=colors, alpha=0.5)
#lbl = preprocessing.LabelEncoder()
#lbl.fit(list(x))
#x = lbl.transform(list(x))
#print("X = ", x)
#print("Y = ", y)
#plt.plot(x, y,'ro')
#plt.savefig('plot_scatter/' + f + '.png')
|
HighEnergyDataScientests/homesitekaggle
|
feature_analysis/analyze_unique_values.py
|
Python
|
apache-2.0
| 2,549
|
import unittest
from unittest.mock import Mock
from unittest.mock import MagicMock
from unittest.mock import patch
from unittest.mock import call
from unittest.mock import ANY
from tornado.concurrent import Future
from tornado.httpclient import AsyncHTTPClient
from tornado.gen import coroutine
from tornado.testing import AsyncTestCase
from tornado.testing import gen_test
from rdflib import Graph
#add the root proxy directory to the sys.path
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
from summarum import RankingService
class RankingServiceTest(AsyncTestCase):
def setUp(self):
AsyncTestCase.setUp(self)
self.uri = 'http://dbpedia.com/resource/Sample'
self.endpoint = Mock()
self.ranking_service = RankingService(self.uri, endpoint=self.endpoint)
@gen_test
def test_rank_calls_fetch_and_parse_on_the_summarum_endpoint(self):
future = Future()
future.set_result([])
self.endpoint.fetch_and_parse = Mock(return_value=future)
facts = {
'predicate': {
'value': 'http://dbpedia.com/resource/occupation'
},
'objects': []
}
yield self.ranking_service.rank(facts)
self.endpoint.fetch_and_parse.assert_called_once_with(self.uri)
@gen_test
def test_rank_stores_the_summarum_rankings(self):
expected_result = [
('a', 'b', 10.0),
('c', 'd', 1.0)
]
future = Future()
future.set_result(expected_result)
self.endpoint.fetch_and_parse = Mock(return_value=future)
facts = {
'predicate': {
'value': 'http://dbpedia.com/resource/occupation'
},
'objects': []
}
yield self.ranking_service.rank(facts)
self.assertEquals(self.ranking_service.rankings, expected_result)
@gen_test
def test_rank_calls_sort_and_returns_output(self):
#setup the response from the summarum endpoint
expected_result = [
('a', 'b', 10.0),
('c', 'd', 1.0)
]
future = Future()
future.set_result(expected_result)
self.endpoint.fetch_and_parse = Mock(return_value=future)
#setup the response return value from the sort call
expected_ranked_facts = {
'predicate': {},
'objects': []
}
self.ranking_service.sort = Mock(return_value = expected_ranked_facts)
#call the function under test
facts = {}
ranked_facts = yield self.ranking_service.rank(facts)
#check that sort was called
self.ranking_service.sort.assert_called_once_with(facts)
#check that rank returns the output from sort
self.assertEquals(ranked_facts, expected_ranked_facts)
def test_sort_sorts_given_facts_based_on_rankings(self):
self.ranking_service.rankings = [
('http://dbpedia.org/ontology/occupation', 'http://dbpedia.org/resource/Rapping', 12.01),
('http://dbpedia.org/ontology/occupation', 'http://dbpedia.org/resource/Actor', 5.01),
]
facts = {
'objects': [
{
'label': "Actor",
'value': "http://dbpedia.org/resource/Actor",
'type': "uri"
},
{
'label': "Rapping",
'value': "http://dbpedia.org/resource/Rapping",
'type': "uri"
}
],
'predicate': {
'label': "Occupation",
'value': "http://dbpedia.org/ontology/occupation"
}
}
result = self.ranking_service.sort(facts)
expected_result = {
'objects': [
{
'label': "Rapping",
'value': "http://dbpedia.org/resource/Rapping",
'type': "uri"
},
{
'label': "Actor",
'value': "http://dbpedia.org/resource/Actor",
'type': "uri"
}
],
'predicate': {
'label': "Occupation",
'value': "http://dbpedia.org/ontology/occupation"
}
}
self.assertEqual(result, expected_result)
def test_sort_leaves_facts_with_no_rankings_at_the_end_of_the_list(self):
self.ranking_service.rankings = [
('http://dbpedia.org/ontology/occupation', 'http://dbpedia.org/resource/Rapping', 12.01),
('http://dbpedia.org/ontology/occupation', 'http://dbpedia.org/resource/Actor', 5.01),
]
facts = {
'objects': [
{
'label': "Actor",
'value': "http://dbpedia.org/resource/Actor",
'type': "uri"
},
{
'label': "Rapping",
'value': "http://dbpedia.org/resource/Rapping",
'type': "uri"
},
{
'label': "Politician",
'value': "http://dbpedia.org/resource/Politician",
'type': "uri"
},
{
'label': "Doctor",
'value': "http://dbpedia.org/resource/Doctor",
'type': "uri"
}
],
'predicate': {
'label': "Occupation",
'value': "http://dbpedia.org/ontology/occupation"
}
}
result = self.ranking_service.sort(facts)
expected_result = {
'objects': [
{
'label': "Rapping",
'value': "http://dbpedia.org/resource/Rapping",
'type': "uri"
},
{
'label': "Actor",
'value': "http://dbpedia.org/resource/Actor",
'type': "uri"
},
#the below facts should be left unsorted
{
'label': "Politician",
'value': "http://dbpedia.org/resource/Politician",
'type': "uri"
},
{
'label': "Doctor",
'value': "http://dbpedia.org/resource/Doctor",
'type': "uri"
}
],
'predicate': {
'label': "Occupation",
'value': "http://dbpedia.org/ontology/occupation"
}
}
self.assertEqual(result, expected_result)
if __name__ == '__main__':
unittest.main()
|
dbpedia/dbpedia-widgets
|
proxy/tests/summarum/ranking_service_test.py
|
Python
|
gpl-2.0
| 6,841
|
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import yaml
class DictWithSourceType(dict):
"""An environment dict which keeps track of its source.
Environment files may be loaded from simple key/value files, or from
structured YAML files, and we need to render them using a different
strategy based on their source. This class adds a source_type property
to a dict which keeps track of whether the source for the dict is
yaml or simple.
"""
def __init__(self, source_type, *args):
dict.__init__(self, args)
if source_type not in ['yaml', 'simple']:
raise ValueError('source_type must be yaml or simple')
self.source_type = source_type
def parse_environment(raw_environment):
environment = DictWithSourceType('simple')
for line in raw_environment.split('\n'):
line = line.strip()
if not line:
continue
if line.startswith('#'):
continue
try:
key, value = line.split(':', 1)
except ValueError:
raise ValueError('Environment must be in key: value format')
environment[key] = value.strip()
return environment
def parse_yaml_environment(raw_environment):
environment = DictWithSourceType('yaml')
parsed_env = yaml.safe_load(raw_environment)
if type(parsed_env) != dict:
raise ValueError('Environment must be valid YAML')
environment.update(parsed_env)
return environment
|
remind101/stacker
|
stacker/environment.py
|
Python
|
bsd-2-clause
| 1,543
|
from sklearn.cluster import DBSCAN
from common import constants
import numpy as np
def cluster_points(coordinates, eps, min_samples, n_jobs=1):
"""Given coordinates, function returns the number of clusters in the
set of coordinates and a list of integer labels corresponding to
the input coordinate list
Arguments:
coordinates: a sequence of (lat, lon) tuples
eps: the cluster size in radial degrees
min_samples: the size of the smallest cluster
n_jobs: number of CPUs to use to compute the clusters
Returns:
n_clusters: number of clusters
labels: the labels of the clusters
"""
db = DBSCAN(eps=eps,
min_samples=min_samples,
n_jobs=n_jobs).fit(coordinates)
return db
def count_clusters(db):
labels = db.labels_
n_clusters = len(set(labels)) - (1 if -1 in labels else 0)
return (n_clusters, labels)
def compute_centers(clusters, locations, suppress_negative=True):
"""Compute centroids of clusters.
Arguments:
clusters: sklearn cluster object with labels_ attribute
locations: the x,y coordinates of the items
suppress_negative: if True, will suppress any cluster label which is -1. -1 means "not assigned to a cluster".
Returns:
centers: dictionary of label -> centroids
sizes: dictionary of label -> the sizes of the centroid (number of members)
"""
points = {}
print clusters
for i, label in enumerate(clusters.labels_):
if suppress_negative and label == -1:
continue
if label not in points:
points[label] = []
points[label].append( (locations[i][0], locations[i][1]))
centers = {}
sizes = {}
for label in points:
centers[label] = np.mean(points[label], axis=0)
sizes[label] = len(points[label])
return centers, sizes
|
google/eclipse2017
|
common/cluster_points.py
|
Python
|
apache-2.0
| 1,883
|
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility methods common to multiple commands."""
import json
import subprocess
import sys
import tempfile
try:
# If we are running in Python 2, builtins is available in 'future'.
from builtins import input as read_input
except Exception:
# We don't want to require the installation of future, so fallback
# to using raw_input from Py2.
read_input = raw_input # noqa: F821
def prompt_for_confirmation(
args,
message,
question='Do you want to continue',
accept_by_default=False):
"""Prompt the user for confirmation.
Args:
args: The Namespace returned by argparse
message: A preliminary message explaining the question to the user.
question: The prompt for the user to either accept or decline.
accept_by_default: If True, then an empty response is treated as
acceptance. Otherwise, an empty response is treated as declining.
Returns:
True iff the user accepted.
"""
print(message)
if args.quiet:
return accept_by_default
question_suffix = ' (Y/n)?: ' if accept_by_default else ' (y/N)?: '
full_question = question + question_suffix
resp = read_input(full_question)
while resp and resp[0] not in ['y', 'Y', 'n', 'N']:
print('Unexpected response {}, please enter "y" or "n"'.format(resp))
resp = read_input(full_question)
if len(resp) < 1:
return accept_by_default
return len(resp) < 1 or resp[0] in ['y', 'Y']
class InvalidInstanceException(Exception):
_MESSAGE = (
'The specified instance, {}, does not appear '
'to have been created by the `datalab` tool, or '
'from any GCE Deeplearning images. Therefore it '
'cannot be managed by `datalab` tool.')
def __init__(self, instance_name):
super(InvalidInstanceException, self).__init__(
InvalidInstanceException._MESSAGE.format(instance_name))
class NoSuchInstanceException(Exception):
_MESSAGE = (
'The specified instance, {}, does not exist in any zone.')
def __init__(self, instance_name):
super(NoSuchInstanceException, self).__init__(
NoSuchInstanceException._MESSAGE.format(instance_name))
class MissingZoneFlagException(Exception):
_DEFAULT_MESSAGE = (
'You must specify a zone using the --zone flag.')
_INSTANCE_MESSAGE = (
'You must specify a zone for the instance {} using the --zone flag.')
def get_message(instance_name=None):
if not instance_name:
return MissingZoneFlagException._DEFAULT_MESSAGE
else:
return MissingZoneFlagException._INSTANCE_MESSAGE.format(
instance_name)
def __init__(self, instance_name=None):
super(MissingZoneFlagException, self).__init__(
MissingZoneFlagException.get_message(instance_name))
def call_gcloud_quietly(args, gcloud_surface, cmd, report_errors=True):
"""Call `gcloud` and silence any output unless it fails.
Normally, the `gcloud` command line tool can output a lot of
messages that are relevant to users in general, but may not
be relevant to the way a Datalab instance is created.
For example, creating a persistent disk will result in a
message that the disk needs to be formatted before it can
be used. However, the instance we create formats the disk
if necessary, so that message is erroneous in our case.
These messages are output regardless of the `--quiet` flag.
This method allows us to avoid any confusion from those
messages by redirecting them to a temporary file.
In the case of an error in the `gcloud` invocation, we
still print the messages by reading from the temporary
file and printing its contents.
Args:
args: The Namespace returned by argparse
gcloud_surface: Function that can be used for invoking `gcloud <surface>`
cmd: The subcommand to run
report_errors: Whether or not to report errors to the user
Raises:
subprocess.CalledProcessError: If the `gcloud` command fails
"""
with tempfile.TemporaryFile() as stdout, \
tempfile.TemporaryFile() as stderr:
try:
cmd = ['--quiet'] + cmd
gcloud_surface(args, cmd, stdout=stdout, stderr=stderr)
except subprocess.CalledProcessError:
if report_errors:
stdout.seek(0)
stderr.seek(0)
print(stdout.read().decode('utf-8'))
sys.stderr.write(stderr.read())
raise
stderr.seek(0)
gcloud_stderr = stderr.read().decode('utf-8')
if 'WARNING' in gcloud_stderr:
sys.stderr.write(gcloud_stderr)
return
def prompt_for_zone(args, gcloud_compute, instance=None):
"""Prompt the user to select a zone.
Args:
args: The Namespace instance returned by argparse
gcloud_compute: Function that can be used to invoke `gcloud compute`
Raises:
subprocess.CalledProcessError: If a nested `gcloud` calls fails
NoSuchInstanceException: If the user specified an instance that
does not exist in any zone.
"""
matching_zones = []
list_cmd = ['zones', '--quiet', 'list', '--format=value(name)']
if instance:
# list the zones for matching instances instea of all zones.
list_cmd = [
'instances', 'list', '--quiet', '--filter',
'name={}'.format(instance), '--format', 'value(zone)']
with tempfile.TemporaryFile() as stdout, \
tempfile.TemporaryFile() as stderr:
try:
gcloud_compute(args, list_cmd,
stdout=stdout, stderr=stderr)
stdout.seek(0)
matching_zones = stdout.read().decode('utf-8').strip().splitlines()
except subprocess.CalledProcessError:
stderr.seek(0)
sys.stderr.write(stderr.read())
raise
if len(matching_zones) == 1:
# There is only one possible zone, so just return it.
return matching_zones[0]
elif (instance and len(matching_zones) == 0):
raise NoSuchInstanceException(instance)
if args.quiet:
raise MissingZoneFlagException(instance)
zone_number = 1
zone_map = {}
print('Please specify a zone from one of:')
for zone in matching_zones:
zone_map[zone_number] = zone
print(' [{}] {}'.format(zone_number, zone))
zone_number += 1
selected = read_input('Your selected zone: ')
try:
zone_number = int(selected)
return zone_map[zone_number]
except Exception:
if selected not in matching_zones:
print('Zone {} not recognized'.format(selected))
return prompt_for_zone(args, gcloud_compute, instance=instance)
return selected
def flatten_metadata(metadata):
"""Flatten the given API-style dictionary into a Python dictionary.
This takes a mapping of key-value pairs as returned by the Google
Compute Engine API, and converts it to a Python dictionary.
The `metadata` argument is an object that has an `items` field
containing a list of key->value mappings. Each key->value mapping
is an object with a `key` field and a `value` field.
Example:
Given the following input:
{ "items": [
{ "key": "a",
"value": 1
},
{ "key": "b",
"value": 2
},
],
"fingerprint": "<something>"
}
... this will return {"a": 1, "b": 2}
"""
items = metadata.get('items', [])
result = {}
for mapping in items:
result[mapping.get('key', '')] = mapping.get('value', '')
return result
def _check_instance_allowed(instance, status_tags_and_metadata):
"""Check that the given "tags" object contains `datalab`.
This is used to verify that a VM was created by the `datalab create`
command or was from GCE Deeplearning images, by checking if the VM
description contains a tag of 'datalab' or includes a c2d-tensorflow
licence string.
Args:
instance: The name of the instance to check
status_tags_and_metadata: An object containing the result of GCE
VM instance description.
Raises:
InvalidInstanceException: If the check fails.
"""
tags = status_tags_and_metadata.get('tags', {})
items = tags.get('items', [])
if 'datalab' in items:
return
else:
_license = ('https://www.googleapis.com/compute/v1/projects/'
'click-to-deploy-images/global/licenses/c2d-tensorflow')
disks = status_tags_and_metadata.get('disks', [])
for disk in disks:
if _license in disk.get('licenses', []):
return
raise InvalidInstanceException(instance)
def describe_instance(args, gcloud_compute, instance):
"""Get the status and metadata of the given Google Compute Engine VM.
This will prompt the user to select a zone if necessary.
Args:
args: The Namespace instance returned by argparse
gcloud_compute: Function that can be used to invoke `gcloud compute`
instance: The name of the instance to check
Returns:
A tuple of the string describing the status of the instance
(e.g. 'RUNNING' or 'TERMINATED'), and the list of metadata items.
Raises:
subprocess.CalledProcessError: If the `gcloud` call fails
ValueError: If the result returned by gcloud is not valid JSON
InvalidInstanceException: If the instance was not created by
running `datalab create`.
NoSuchInstanceException: If the user specified an instance that
does not exist in any zone.
"""
get_cmd = ['instances', 'describe', '--quiet']
if args.zone:
get_cmd.extend(['--zone', args.zone])
get_cmd.extend(
['--format', 'json(status,tags.items,metadata.items,disks[].licenses)',
instance])
with tempfile.TemporaryFile() as stdout, \
tempfile.TemporaryFile() as stderr:
try:
gcloud_compute(args, get_cmd, stdout=stdout, stderr=stderr)
stdout.seek(0)
json_result = stdout.read().decode('utf-8').strip()
status_tags_and_metadata = json.loads(json_result)
_check_instance_allowed(instance, status_tags_and_metadata)
status = status_tags_and_metadata.get('status', 'UNKNOWN')
metadata = status_tags_and_metadata.get('metadata', {})
return (status, flatten_metadata(metadata))
except subprocess.CalledProcessError:
if args.zone:
stderr.seek(0)
sys.stderr.write(stderr.read())
raise
else:
args.zone = prompt_for_zone(
args, gcloud_compute, instance=instance)
return describe_instance(
args, gcloud_compute, instance)
return ('UNKNOWN', [])
def instance_notebook_disk(args, gcloud_compute, instance):
"""Get the config for the notebooks disk attached to the instance.
This returns None if there is no notebooks disk attached.
Args:
args: The Namespace instance returned by argparse
gcloud_compute: Function that can be used to invoke `gcloud compute`
instance: The name of the instance to check
Returns:
An object containing the configuration for attaching the disk to
the instance.
Raises:
subprocess.CalledProcessError: If the `gcloud` call fails
"""
get_cmd = ['instances', 'describe', '--quiet']
if args.zone:
get_cmd.extend(['--zone', args.zone])
get_cmd.extend(['--format', 'json', instance])
with tempfile.TemporaryFile() as stdout, \
tempfile.TemporaryFile() as stderr:
try:
gcloud_compute(args, get_cmd, stdout=stdout, stderr=stderr)
stdout.seek(0)
instance_json = json.loads(stdout.read().decode('utf-8').strip())
disk_configs = instance_json.get('disks', [])
for cfg in disk_configs:
if cfg['deviceName'] == 'datalab-pd':
return cfg
# There is no notebooks disk attached. This can happen
# if the user manually detached it.
return None
except subprocess.CalledProcessError:
stderr.seek(0)
sys.stderr.write(stderr.read())
raise
def maybe_prompt_for_zone(args, gcloud_compute, instance):
"""Prompt for the zone of the given VM if it is ambiguous.
This will update the args.zone flag to point to the selected zone.
Args:
args: The Namespace instance returned by argparse
gcloud_compute: Function that can be used to invoke `gcloud compute`
instance: The name of the instance to check
Raises:
subprocess.CalledProcessError: If the `gcloud` call fails
InvalidInstanceException: If the instance was not created by
running `datalab create`.
NoSuchInstanceException: If the user specified an instance that
does not exist in any zone.
"""
describe_instance(args, gcloud_compute, instance)
return
def print_warning_messages(args):
"""Return whether or not warning messages should be printed.
Args:
args: The Namespace instance returned by argparse
Returns:
True iff the verbosity has been set to a level that includes
warning messages.
"""
return args.verbosity in ['debug', 'info', 'default', 'warning']
def print_info_messages(args):
"""Return whether or not info messages should be printed.
Args:
args: The Namespace instance returned by argparse
Returns:
True iff the verbosity has been set to a level that includes
info messages.
"""
return args.verbosity in ['debug', 'info', 'default']
def print_debug_messages(args):
"""Return whether or not debug messages should be printed.
Args:
args: The Namespace instance returned by argparse
Returns:
True iff the verbosity has been set to a level that includes
debug messages.
"""
return args.verbosity == 'debug'
|
googledatalab/datalab
|
tools/cli/commands/utils.py
|
Python
|
apache-2.0
| 14,859
|
"""This is a demonstration script for Gittip's test suite.
"""
# Layout
# ======
# The Gittip test suite lives at tests/test_*.py. The tests/ directory is not a
# Python package (there is no __init__.py). Think of it as holding test scripts
# to be run via nosetest (maybe py.test would work too?). Helpers are defined
# in the gittip.testing module.
from gittip import testing
# Basic Pattern
# =============
# First, import something from the gittip library. Here I'm defining a function
# inline for demonstration purposes.
def greet():
return "Greetings, program!"
# Then, write a test case. Here's what a canonical test case in the Gittip test
# suite looks like:
def test_greet_greets_programs():
expected = "Greetings, program!"
actual = greet()
assert actual == expected, actual
# The name of the test case should be a sentence, with a subject and a
# predicate. The subject should be the thing under test, and the predicate
# should state the expected behavior we're testing for.
def test_greet_still_greets_programs():
# More complex tests will start with some setup. Here in this test we don't
# have any. The last three lines of each test case look like the following.
# Ask questions first, shoot later: our expectation always preceeds the
# performance of the test.
expected = "Greetings, program!"
# Perform the test, storing the result in actual.
actual = greet()
# Compare reality with our expectation, and, if they don't match, inform
# the viewer of reality.
assert actual == expected, actual
# Context Managers
# ================
# Gittip's test suite uses context managers to manage testing state instead of
# test classes with setup/teardown methods. The reason is to make the test
# suite flatter and easier to follow, and to keep a tight coupling between test
# fixture and test cases. We want to avoid bloated super-fixtures.
def test_inserting_inserts():
# Gittip's fundamental context manager for testing is gittip.testing.load.
# It's called that because its primary function is to load data into the
# database. When the context manager exits, the database is wiped.
with testing.load() as context:
# The context object gives you access to the database. The db attribute
# here is the usual PostgresManager that is used throughout Gittip.
context.db.execute("INSERT INTO participants VALUES ('foo')")
# There's a dump method on context that gives you all the data in the
# database, as a mapping of table names to mappings of id to row dict.
actual = context.dump()
# The context.diff method gives you a diff of the state of the database
# since you entered the context. With compact=True it returns a mapping
# of the names of tables that have changed, to a list of ints showing
# the number of rows that have been inserted, updated, and deleted,
# respectively.
actual = context.diff(compact=True)
# If the expectation can be stated succinctly, it's acceptable to
# inline it in the assertion, rather than defining it separately.
assert actual == {"participants": [1,0,0]}, actual
def test_something_changes_something():
# The testing.load callable takes a data definition as positional
# arguments. {str,unicode} is interpreted as a table name, and {dict,list,
# tuple} is interpreted as a row of data to be inserted into the most
# recently named table. Generally you'll end up defining "data" and then
# calling testing.load(*data), as it won't fit on one line.
with testing.load("participants", ("foo",)) as context:
context.db.execute("UPDATE participants SET statement='BLAM!!!' "
"WHERE id='foo'")
# Calling context.diff without compact=True gives you a mapping of the
# names of tables that have changed to a mapping with keys 'inserts',
# 'updates', and 'deletes'. The values for inserts and deletes are
# lists of row dicts containing the new and old data, respectively. The
# value for updates is a list of dicts containing only the data that
# has changed (and the primary key).
expected = {"id": "foo", "statement": "BLAM!!!"}
actual = context.diff()['participants']['updates'][0]
assert actual == expected, actual
# Wrappers
# --------
# As a rule of thumb, test cases should have one assertion each. Write wrappers
# for test cases that want slightly varying but similar state. Start by writing
# them in the same file as the test cases, and if they turn out to be useful in
# multiple test scripts, we'll move them into gittip.testing.
def let_them_eat_cake(): # For demonstration; this would be imported.
"""Simulate the gittip application doing something.
"""
import gittip
rec = gittip.db.fetchone('SELECT id FROM participants')
return "{id} eats cake.".format(**rec)
def participant(participant_id): # This is a context wrapper.
"""Wrap testing.load to install a participant.
"""
context = testing.load("participants", (participant_id,))
return context
def test_foo_eats_cake():
with participant("foo"):
actual = let_them_eat_cake()
assert actual == "foo eats cake.", actual
def test_bar_eats_cake():
with participant("bar"):
actual = let_them_eat_cake()
assert actual == "bar eats cake.", actual
# NB: There is one line between related test cases instead of two, as a way to
# group them together.
|
MikeFair/www.gittip.com
|
tests/test_suite_intro.py
|
Python
|
cc0-1.0
| 5,623
|
# Tour through the asteroids
from py4j.clientserver import ClientServer, JavaParameters
gateway = ClientServer(java_parameters=JavaParameters(auto_convert=True))
gs = gateway.entry_point
# Camera params
gs.setCameraSpeed(1.0)
gs.setRotationCameraSpeed(1.0)
gs.setTurningCameraSpeed(1.0)
gs.setCinematicCamera(True)
gs.setFov(65.0)
# Visibility
gs.setVisibility("element.planets", True)
gs.setVisibility("element.atmospheres", True)
gs.setVisibility("element.stars", True)
gs.setVisibility("element.moons", True)
gs.setVisibility("element.satellites", True)
gs.setVisibility("element.galaxies", True)
gs.setVisibility("element.milkyway", True)
gs.setVisibility("element.asteroids", False)
gs.setVisibility("element.orbits", False)
gs.setVisibility("element.labels", False)
gs.setVisibility("element.constellations", False)
gs.setVisibility("element.boundaries", False)
gs.setVisibility("element.equatorial", False)
gs.setVisibility("element.ecliptic", False)
gs.setVisibility("element.galactic", False)
gs.setVisibility("element.clusters", False)
gs.setVisibility("element.meshes", False)
gs.setVisibility("element.titles", False)
gs.setCrosshairVisibility(False)
# Parallel view
gs.setStereoscopicProfile(3)
gs.setStereoscopicMode(True)
gs.configureFrameOutput(1920, 1080, 30, "/home/tsagrista/.gaiasky/frames/3d-asteroids_tour/", "gs")
stdwait = 5.0
stdwaitlong = 10.0
# Time
gs.stopSimulationTime()
gs.setSimulationTime(2018, 4, 25, 10, 0, 0, 0)
# Camera state
gs.setCameraPosition([-1294.3864339045447 * 1e6,156.30069319755347 * 1e6,-1150.2743059128413 * 1e6])
gs.setCameraDirection([0.739144930622408,-0.09348275378626529,0.6670275453680645])
gs.setCameraUp([-0.1374839626900124,0.9485312542098752,0.2852834025843425])
gs.setCameraFocus("Sun")
gs.sleep(stdwait)
# Uncomment next line to save still frames
#gs.setFrameOutput(True)
gs.sleep(stdwait)
# Enable orbits
gs.setVisibility("element.orbits", True)
gs.sleep(stdwait)
gs.setVisibility("element.asteroids", True)
gs.sleep(stdwait)
gs.goToObject("Sun", 0.05, 0.0)
gs.cameraRotate(0.5, 0.0)
gs.sleep(stdwaitlong)
gs.cameraStop()
gs.setVisibility("element.orbits", False)
gs.sleep(stdwait)
gs.startSimulationTime()
initime = 4000.0
endtime = 4000000.0
def frange(x, y, jump):
while x < y:
yield x
x += jump
# 10 seconds, in steps of 0.1 is 100 steps
step = (endtime - initime) / 200.0
gs.setSimulationPace(initime)
for t in frange(initime, endtime, step):
gs.setSimulationPace(t)
gs.sleep(0.05)
gs.cameraStop()
gs.sleep(stdwait)
gs.cameraRotate(0.0, 0.1)
gs.sleep(stdwaitlong)
gs.cameraRotate(0.0, -0.9)
gs.goToObject("Sun", 0.01, 0.0)
gs.stopSimulationTime()
gs.cameraStop()
gs.setFrameOutput(False)
gs.setStereoscopicMode(False)
gateway.shutdown()
|
langurmonkey/gaiasky
|
assets/scripts/showcases/3d-asteroids-tour.py
|
Python
|
mpl-2.0
| 2,766
|
""" Controller for TSTools that handles slots/signals communication
"""
import copy
from datetime import datetime as dt
from functools import partial
import itertools
import logging
import matplotlib as mpl
import numpy as np
try:
import palettable
HAS_PALETTABLE = True
except:
HAS_PALETTABLE = False
from PyQt4 import QtCore, QtGui
import qgis
from . import config
from . import plots
from . import settings
from .utils import actions
from .logger import qgis_log
from .ts_driver.ts_manager import tsm
logger = logging.getLogger('tstools')
ITERABLE = (list, tuple, np.ndarray)
# PyQt -- moveToThread and functools.partial -- why doesn't it work?
# See:
# http://stackoverflow.com/questions/23317195/pyqt-movetothread-does-not-work-when-using-partial-for-slot
class Worker(QtCore.QObject):
update = QtCore.pyqtSignal(float)
finished = QtCore.pyqtSignal()
errored = QtCore.pyqtSignal(str)
def __init__(self, parent, pct_increment=10.0):
super(Worker, self).__init__()
parent.fetch_data.connect(self.fetch)
self.pct_increment = pct_increment
@QtCore.pyqtSlot(object, object, str)
def fetch(self, ts, pos, crs_wkt):
""" Fetch a point from a time series driver, emitting progress
Progress emitted incrementally to not overwhelm network communication
Arg:
ts (time series driver): Time series drivers (e.g., specified
under "TSTools.drivers" entry point)
pos (tuple): Point
crs_wkt (str): Coordinate reference system as WKT
"""
logger.info('Fetching from QThread (id: %s)' %
hex(self.thread().currentThreadId()))
# Fetch data
pct = 0
try:
for percent in ts.fetch_data(pos[0], pos[1], crs_wkt):
if percent > pct + self.pct_increment:
self.update.emit(percent)
pct = percent
except Exception as e:
self.errored.emit(e.message)
else:
self.update.emit(100.0)
self.finished.emit()
class PlotHandler(QtCore.QObject):
""" Workaround for connecting `pick_event` signals to `twinx()` axes
Forwards `pick_event` signal to an axis onward.
Args:
canvas (matplotlib.backend_bases.FigureCanvasBase): figure canvas to
connect
tolerance (float or int): tolerance for picking plot point in days
(default: 10)
"""
picked = QtCore.pyqtSignal(set)
def __init__(self, canvas, tolerance=2):
super(PlotHandler, self).__init__()
self.canvas = canvas
self.tolerance = tolerance
self.cid = self.canvas.mpl_connect('button_release_event', self)
def __call__(self, event):
# Plot X/Y clicked
x, y = event.x, event.y
# Bands plotted on each axis
plotted = (settings.plot['y_axis_1_band'],
settings.plot['y_axis_2_band'])
# Store output as a set
images = set()
for ax, _plotted in zip(event.canvas.axes, plotted):
# If nothing plotted on this axis, continue
if not np.any(_plotted):
continue
# Setup transform for going from data to plot coordinates
trans = ax.transData
# Check bands that are plotted on current axis
on = np.where(_plotted)[0]
on_series = settings.plot_series[on]
on_band = settings.plot_band_indices[on]
for i, j in zip(on_series, on_band):
# Switch based on plot type
if isinstance(event.canvas, plots.TSPlot):
_X, _y = tsm.ts.get_data(i, j, mask=False)
_x = _X['ordinal']
elif isinstance(event.canvas, plots.ResidualPlot):
residuals = tsm.ts.get_residuals(i, j)
if residuals is None:
return
_x = np.array([dt.toordinal(_d) for _d in
np.concatenate(residuals[0])])
_y = np.concatenate(residuals[1])
elif isinstance(event.canvas, plots.DOYPlot):
_X, _y = tsm.ts.get_data(i, j, mask=False)
_x = _X['doy']
# Transform data into plot coordinates
trans_coords = trans.transform(np.vstack((_x, _y)).T)
_x, _y = trans_coords[:, 0], trans_coords[:, 1]
delta_x = np.abs(_x - x)
delta_y = np.abs(_y - y)
delta = np.linalg.norm(np.vstack((delta_x, delta_y)), axis=0)
clicked = np.where(delta < self.tolerance)[0]
for _clicked in clicked:
# Add index of series and index of image
images.add((i, _clicked))
self.picked.emit(images)
def disconnect(self):
self.canvas.mpl_disconnect(self.cid)
class Controller(QtCore.QObject):
""" Controller class for handling signals/slots
Attributes:
controls (ControlPanel): control panel instance
plots (list): list of Plot* instances
"""
controls = None
plots = []
working = False
worker = None
work_thread = None
fetch_data = QtCore.pyqtSignal(object, object, str)
initialized = False
def __init__(self, iface, controls, plots, parent=None):
super(Controller, self).__init__()
self.iface = iface
self.controls = controls
self.plots = plots
self.plot_events = [] # Matplotlib event handlers
# TIMESERIES
def get_timeseries(self, driver, location, custom_config=None):
""" Initialize timeseries selected by user
"""
try:
tsm.ts = driver(location, config=custom_config)
except Exception as e:
msg = 'Failed to open timeseries: {msg}'.format(msg=e.message)
qgis_log(msg, level=logging.ERROR, duration=5)
raise # TODO: REMOVE EXCEPTION
else:
qgis_log('Loaded timeseries: {d}'.format(d=tsm.ts.description))
self.disconnect()
self.config_closed()
self._ts_init()
self.initialized = True
def _ts_init(self):
""" Initialize control and plot views with data from timeseries driver
"""
# Connect QgsMapLayerRegistry signals
qgis.core.QgsMapLayerRegistry.instance().layersAdded.connect(
self._map_layers_added)
qgis.core.QgsMapLayerRegistry.instance().layersWillBeRemoved.connect(
self._map_layers_removed)
# Prepare TS driver data for controls
self._init_plot_options()
self._init_plot_symbology()
self._init_raster_symbology()
# Setup controls
self.controls.init_ts()
self.controls.plot_options_changed.connect(self.update_plot)
self.controls.image_table_row_clicked.connect(self._add_remove_image)
self.controls.symbology_applied.connect(
lambda: actions.apply_symbology())
# Setup plots
self._init_plots()
self.update_plot()
# PLOT TOOL
@QtCore.pyqtSlot(object)
def plot_request(self, pos):
if self.working:
qgis_log('Unable to initiate plot request: already working',
logging.INFO)
else:
qgis_log('Clicked a point: {p} ({t})'.format(p=pos, t=type(pos)),
level=logging.INFO)
crs = self.iface.mapCanvas().mapSettings().destinationCrs()
crs_wkt = crs.toWkt()
# Setup QProgressBar
self.progress_bar = self.iface.messageBar().createMessage(
'Retrieving data')
self.progress = QtGui.QProgressBar()
self.progress.setValue(0)
self.progress.setMaximum(100)
self.progress.setAlignment(QtCore.Qt.AlignLeft |
QtCore.Qt.AlignVCenter)
self.but_cancel = QtGui.QPushButton('Cancel')
self.but_cancel.pressed.connect(self.plot_request_cancel)
self.progress_bar.layout().addWidget(self.progress)
self.progress_bar.layout().addWidget(self.but_cancel)
self.iface.messageBar().pushWidget(
self.progress_bar, self.iface.messageBar().INFO)
# Setup worker and thread
self.working = True
self.work_thread = QtCore.QThread()
# self.worker = Worker()
self.worker = Worker(self)
self.worker.moveToThread(self.work_thread)
self.worker.update.connect(self.plot_request_update)
self.worker.finished.connect(self.plot_request_finish)
self.worker.errored.connect(self.plot_request_error)
self.work_thread.started.connect(partial(self.plot_request_start,
tsm.ts,
(pos[0], pos[1]),
crs_wkt))
if (getattr(self.controls, 'custom_form', None) is not None and
hasattr(tsm.ts, 'set_custom_controls')):
try:
options = self.controls.custom_form.get()
tsm.ts.set_custom_controls(options)
except BaseException as e:
logger.warning(
'Could not use custom controls for timeseries')
qgis_log(str(e), level=logging.WARNING)
self.controls.custom_form.reset()
return
# Run thread
logger.info('Timeseries (id: {i})'.format(i=hex(id(tsm.ts))))
logger.info('Current thread: ({i})'.format(
i=hex(self.thread().currentThreadId())))
self.work_thread.start()
logger.info('Started QThread (id: {i})'.format(
i=hex(self.work_thread.currentThreadId())))
@QtCore.pyqtSlot(object, tuple, str)
def plot_request_start(self, ts, pos, crs_wkt):
logger.info('Fetch data signal sent for point: '
'{p} ({t})'.format(p=pos, t=type(pos)))
self.fetch_data.emit(ts, pos, crs_wkt)
@QtCore.pyqtSlot(float)
def plot_request_update(self, progress):
if self.working is True:
self.progress.setValue(progress)
@QtCore.pyqtSlot()
def plot_request_finish(self):
# Get results in this thread since it's so prone to error
try:
tsm.ts.fetch_results()
except Exception as e:
logger.error('Could not fetch results: %s' % e.message)
raise
finally:
# Stop 'working'
self.working = False
self.work_thread.quit()
# Clear GUI messages
logger.info('Plot request finished')
self.iface.messageBar().clearWidgets()
# Update plots
self.update_plot()
# Add geometry from clicked point
self.plot_request_geometry()
@QtCore.pyqtSlot(str)
def plot_request_error(self, txt):
self.iface.messageBar().clearWidgets()
qgis_log(txt, logging.ERROR, duration=5)
self.working = False
self.work_thread.quit()
@QtCore.pyqtSlot()
def plot_request_cancel(self):
self.plot_request_finish()
def plot_request_geometry(self):
""" Add polygon of geometry from clicked X/Y coordinate """
# Record currently selected feature so we can restore it
last_selected = self.iface.activeLayer()
geom_wkt, proj_wkt = tsm.ts.get_geometry()
geom_qgis = qgis.core.QgsGeometry.fromWkt(geom_wkt)
proj_qgis = qgis.core.QgsCoordinateReferenceSystem()
proj_qgis.createFromWkt(proj_wkt)
# Update existing layer
if settings.canvas['click_layer_id'] is not None:
# Update to new row/column
vlayer = qgis.core.QgsMapLayerRegistry.instance().mapLayers()[
settings.canvas['click_layer_id']]
vlayer.startEditing()
pr = vlayer.dataProvider()
# attrs = pr.attributeIndexes()
for feat in vlayer.getFeatures():
vlayer.changeAttributeValue(feat.id(), 0, tsm.ts.pixel_pos)
vlayer.changeGeometry(feat.id(), geom_qgis)
vlayer.setCrs(proj_qgis)
vlayer.commitChanges()
vlayer.updateExtents()
vlayer.triggerRepaint()
# Create new layer
else:
uri = 'polygon?crs=%s' % proj_wkt
vlayer = qgis.core.QgsVectorLayer(uri, 'Query', 'memory')
pr = vlayer.dataProvider()
vlayer.startEditing()
pr.addAttributes([
qgis.core.QgsField('position', QtCore.QVariant.String)
])
feat = qgis.core.QgsFeature()
feat.setGeometry(geom_qgis)
feat.setAttributes([tsm.ts.pixel_pos])
pr.addFeatures([feat])
# See: http://lists.osgeo.org/pipermail/qgis-developer/2011-April/013772.html
props = {
'color_border': '255, 0, 0, 255',
'style': 'no',
'style_border': 'solid',
'width': '0.40'
}
s = qgis.core.QgsFillSymbolV2.createSimple(props)
vlayer.setRendererV2(qgis.core.QgsSingleSymbolRendererV2(s))
vlayer.commitChanges()
vlayer.updateExtents()
vlayer_id = qgis.core.QgsMapLayerRegistry.instance().addMapLayer(
vlayer).id()
if vlayer_id:
settings.canvas['click_layer_id'] = vlayer_id
else:
logger.warning('Could not get ID of "query" layer')
# Restore active layer
self.iface.setActiveLayer(last_selected)
# LAYER MANIPULATION
@QtCore.pyqtSlot(set)
def _plot_add_layer(self, idx):
""" Add or remove image described by idx
Args:
idx (list): list of tuples (index of series, index of image) to add
or remove
"""
for i_series, i_img in idx:
self._add_remove_image(i_series, i_img)
@QtCore.pyqtSlot(list)
def _map_layers_added(self, layers):
""" Performs necessary functions if added layers in timeseries
Check if all newly added layers are part of timeseries. If so, then:
- Set timeseries image checkbox in images table to checked state
Args:
layers (QList<QgsMapLayer *>): list of QgsMapLayers
"""
for layer in layers:
for i, series in enumerate(tsm.ts.series):
rows_added = [row for row, path in
enumerate(series.images['path'])
if layer.source() == path]
for row in rows_added:
logger.debug('Added image: {img}'.format(
img=series.images['id'][row]))
item = self.controls.image_tables[i].item(row, 0)
if item:
if item.checkState() == QtCore.Qt.Unchecked:
item.setCheckState(QtCore.Qt.Checked)
@QtCore.pyqtSlot(list)
def _map_layers_removed(self, layer_ids):
""" Perform necessary functions if removed layers in timeseries
Args:
layer_ids (QStringList theLayerIds): list of layer IDs
"""
for layer_id in layer_ids:
# Get QgsMapLayer instance for ID
layer = qgis.core.QgsMapLayerRegistry.instance().mapLayers()[
layer_id]
# Remove from settings
if layer in settings.image_layers:
settings.image_layers.remove(layer)
# Remove from table
for i, series in enumerate(tsm.ts.series):
rows_removed = [
row for row, (_id, path) in
enumerate(zip(series.images['id'], series.images['path']))
if _id in layer_id or path in layer_id
]
for row in rows_removed:
item = self.controls.image_tables[i].item(row, 0)
if item and item.checkState() == QtCore.Qt.Checked:
item.setCheckState(QtCore.Qt.Unchecked)
# Check for click layer
if settings.canvas['click_layer_id'] == layer_id:
logger.debug('Removed Query layer')
settings.canvas['click_layer_id'] = None
@QtCore.pyqtSlot(int, int)
def _add_remove_image(self, i_series, i_image):
""" Add or remove image at index `i_image`
"""
layers = qgis.core.QgsMapLayerRegistry.instance().mapLayers().values()
filename = tsm.ts.series[i_series].images['path'][i_image]
# Add image
if filename not in [layer.source() for layer in layers]:
rlayer = qgis.core.QgsRasterLayer(
tsm.ts.series[i_series].images['path'][i_image],
tsm.ts.series[i_series].images['id'][i_image])
if rlayer.isValid():
qgis.core.QgsMapLayerRegistry.instance().addMapLayer(rlayer)
settings.image_layers.append(rlayer)
actions.apply_symbology(rlayer)
# Remove image
else:
layer_id = [l.id() for l in layers if l.source() == filename][0]
qgis.core.QgsMapLayerRegistry.instance().removeMapLayer(layer_id)
# CONFIG
@QtCore.pyqtSlot()
def open_config(self, parent=None):
self.config = config.Config()
self.config.accepted.connect(self.config_accepted)
self.config.canceled.connect(self.config_closed)
self.config.exec_()
@QtCore.pyqtSlot()
def config_accepted(self):
# Temporary values
location = str(self.config.location)
ts_index = int(self.config.model_index)
custom_config = self.config.custom_options
driver = tsm.ts_drivers[ts_index]
logger.info('ACCEPTED CONFIG')
logger.info(location)
logger.info(ts_index)
logger.info(custom_config)
self.get_timeseries(driver, location, custom_config)
@QtCore.pyqtSlot()
def config_closed(self):
self.config.accepted.disconnect()
self.config.canceled.disconnect()
self.config.close()
self.config = None
# PLOT SYMBOLOGY / SETTINGS
def _init_plot_symbology(self):
logger.debug('Initialize plot symbology')
# Setup colors to cycle
if HAS_PALETTABLE:
if hasattr(palettable, 'wesanderson'):
# Zissou and Darjeeling combined for 9 colors
colors = (palettable.wesanderson.get_map('Zissou').colors +
palettable.wesanderson.get_map('Darjeeling1').colors)
else:
colors = palettable.colorbrewer.get_map(
'Set1', 'Qualitative', 9).colors
else:
colors = mpl.cm.Set1(np.linspace(0, 1, 9), bytes=True)[:, :-1]
# Initialize plot symbology for each series in timeseries
settings.plot_symbol = []
color_cycle = itertools.cycle(colors)
for s, b in zip(settings.plot_series, settings.plot_band_indices):
symbol = copy.deepcopy(settings.default_plot_symbol)
n_image = tsm.ts.series[s].images.shape[0]
symbol.update({
'indices': [np.arange(n_image)],
'markers': ['o'],
'colors': [color_cycle.next()]
})
settings.plot_symbol.append(symbol)
# CONTROLS
def _init_plot_options(self):
""" Initialize plot control data
"""
logger.debug('Initialize plot options')
settings.plot_series = []
settings.plot_band_indices = []
settings.plot_bands = []
for i, series in enumerate(tsm.ts.series):
settings.plot_series.extend([i] * len(series.band_names))
settings.plot_band_indices.extend(range(len(series.band_names)))
settings.plot_bands.extend(['%s - %s' %
(series.description, name) for
name in series.band_names])
settings.plot_series = np.asarray(settings.plot_series)
settings.plot_band_indices = np.asarray(settings.plot_band_indices)
settings.plot_bands = np.asarray(settings.plot_bands)
n_bands = len(settings.plot_bands)
# No bands plotted on axes initially
settings.plot['y_axis_1_band'] = np.zeros(n_bands, dtype=np.bool)
settings.plot['y_axis_2_band'] = np.zeros(n_bands, dtype=np.bool)
# Default min/max on plot
settings.plot['y_min'] = [0, 0] # TODO:HARDCODE
settings.plot['y_max'] = [10000, 10000] # TODO:HARDCODE
settings.plot['x_min'] = min([series.images['date'].min()
for series in tsm.ts.series]).year
settings.plot['x_max'] = max([series.images['date'].max()
for series in tsm.ts.series]).year
# Default mask values and fit/break on/off
settings.plot['mask_val'] = tsm.ts.mask_values.copy()
settings.plot['fit'] = True if tsm.ts.has_results else False
settings.plot['break'] = True if tsm.ts.has_results else False
def _init_raster_symbology(self):
""" Initialize image symbology
"""
logger.debug('Initialize raster symbology')
settings.symbol = []
for i, series in enumerate(tsm.ts.series):
# Setup symbology settings for series
symbol = copy.deepcopy(settings.default_symbol)
n_bands = len(series.band_names)
# Default min/max
symbol['min'] = np.zeros(n_bands, dtype=np.float)
symbol['max'] = np.ones(n_bands, dtype=np.float) * 10000
# Custom symbology, if exists
if hasattr(series, 'symbology_hint_indices'):
i = [min(n_bands - 1, _i) for _i in
series.symbology_hint_indices]
if isinstance(i, (tuple, list)):
if len(i) == 3:
logger.debug('Applying RGB symbology hint')
symbol.update({
'type': 'RGB',
'band_red': i[0],
'band_green': i[1],
'band_blue': i[2]
})
elif len(i) == 1:
logger.debug('Applying GREY symbology hint')
symbol.update({
'type': 'GREY',
'band_red': i[0],
'band_green': i[0],
'band_blue': i[0]
})
else:
logger.warning(
'Symbology RGB band hint improperly described')
if hasattr(series, 'symbology_hint_minmax'):
i = series.symbology_hint_minmax
if isinstance(i, ITERABLE):
# One min/max or a set of them
if isinstance(i[1], (int, float)) and \
isinstance(i[0], (int, float)):
logger.debug(
'Applying min/max symbology hint for all bands')
symbol.update({
'min': np.ones(n_bands, dtype=np.float) * i[0],
'max': np.ones(n_bands, dtype=np.float) * i[1],
})
# Min/max for each band
elif (isinstance(i[0], ITERABLE) and
isinstance(i[1], ITERABLE) and
len(i[0]) == n_bands and len(i[1]) == n_bands):
logger.debug(
'Applying specified min/max symbology hint')
symbol.update({
'min': np.asarray(i[0]),
'max': np.asarray(i[1])
})
else:
logger.warning('Could not parse symbology min/max '
'hint')
else:
logger.warning('Symbology min/max hint improperly '
'described')
# Add to settings
settings.symbol.append(symbol)
# PLOTS
def _init_plots(self):
""" Initialize plot data """
# Disconnect any existing signals
for pe in self.plot_events:
pe.disconnect()
pe.deleteLater()
pe = None
for plt in self.plots:
plt.reset()
# Connect plot signals for adding images
self.plot_events = []
for plot in self.plots:
handler = PlotHandler(plot.fig.canvas,
tolerance=settings.plot['picker_tol'])
handler.picked.connect(self._plot_add_layer)
self.plot_events.append(handler)
def update_plot(self):
# Update mask if needed
if not np.array_equal(tsm.ts.mask_values, settings.plot['mask_val']):
tsm.ts.update_mask(settings.plot['mask_val'])
# Re-calculate scale
if settings.plot['y_axis_scale_auto'][0]:
actions.calculate_scale(0)
if settings.plot['y_axis_scale_auto'][1]:
actions.calculate_scale(1)
# Update controls
if any(settings.plot['y_axis_scale_auto']):
self.controls.autoscale_applied()
# Update plots -- only visible
for i, plot in enumerate(self.plots):
if i == settings.plot_current:
settings.plot_dirty[i] = False
plot.plot()
else:
settings.plot_dirty[i] = True
# DISCONNECT
def disconnect(self):
logger.info('Disconnecting controller')
if not self.initialized:
return
# Swallow error:
# layer registry can be deleted before this runs when closing QGIS
try:
qgis.core.QgsMapLayerRegistry.instance()\
.layersAdded.disconnect(self._map_layers_added)
qgis.core.QgsMapLayerRegistry.instance()\
.layersWillBeRemoved.disconnect(self._map_layers_removed)
except:
pass
# Disconnect plot mouse event signals
for pe in self.plot_events:
pe.disconnect()
pe.deleteLater()
pe = None
# Controls
try:
self.controls.disconnect()
self.controls.plot_options_changed.disconnect(self.update_plot)
self.controls.image_table_row_clicked.disconnect(
self._add_remove_image)
self.controls.symbology_applied.disconnect()
except Exception as e:
logger.error('Error disconnecting signals from controls: %s' %
e.message)
self.initialzed = False
|
ceholden/TSTools
|
tstools/src/controller.py
|
Python
|
gpl-2.0
| 27,300
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Xmessage(AutotoolsPackage, XorgPackage):
"""xmessage displays a message or query in a window. The user can click
on an "okay" button to dismiss it or can select one of several buttons
to answer a question. xmessage can also exit after a specified time."""
homepage = "http://cgit.freedesktop.org/xorg/app/xmessage"
xorg_mirror_path = "app/xmessage-1.0.4.tar.gz"
version('1.0.4', sha256='883099c3952c8cace5bd11d3df2e9ca143fc07375997435d5ff4f2d50353acca')
depends_on('libxaw')
depends_on('libxt')
depends_on('pkgconfig', type='build')
depends_on('util-macros', type='build')
|
iulian787/spack
|
var/spack/repos/builtin/packages/xmessage/package.py
|
Python
|
lgpl-2.1
| 845
|
''' -- imports from python libraries -- '''
import os
import csv
import json
import ast
import time
import datetime
''' imports from installed packages '''
from django.core.management.base import BaseCommand, CommandError
from mongokit import IS
try:
from bson import ObjectId
except ImportError: # old pymongo
from pymongo.objectid import ObjectId
''' imports from application folders/files '''
from gnowsys_ndf.ndf.models import DATA_TYPE_CHOICES
from gnowsys_ndf.ndf.models import node_collection, triple_collection
from gnowsys_ndf.ndf.models import Node
from gnowsys_ndf.ndf.models import GSystemType, AttributeType, RelationType
from gnowsys_ndf.ndf.models import GSystem, GAttribute, GRelation
from gnowsys_ndf.ndf.views.methods import create_gattribute, create_grelation, create_college_group_and_setup_data
from gnowsys_ndf.ndf.views.methods import get_student_enrollment_code
####################################################################################################################
# TODO:
# 1) Name of attributes/relation in property_order field needs to be replaced with their respective ObjectIds
# 2) regex query needs to be modified because in current situation it's not considering names with space
# - searching for terms till it finds first space
SCHEMA_ROOT = os.path.join( os.path.dirname(__file__), "schema_files" )
log_list = [] # To hold intermediate errors
log_list.append("\n######### Script run on : " + time.strftime("%c") + " #########\n############################################################\n")
is_json_file_exists = False
gsystem_type_node = None
gsystem_type_id = None
gsystem_type_name = ""
home_grp = node_collection.one({'_type': "Group", 'name': "home"})
group_id = home_grp._id
user_id = 1
mis_group = node_collection.one({
'_type': "Group",
'$or': [{
'name': {'$regex': u"MIS_admin", '$options': 'i'}
}, {
'altnames': {'$regex': u"MIS_admin", '$options': 'i'}
}],
'group_type': "PRIVATE"
}, {
'created_by': 1
})
if mis_group is not None:
group_id = mis_group._id
user_id = mis_group.created_by # User who created the above private group
college_gst = node_collection.one({
"_type": "GSystemType", "name": "College"
})
college_dict = {}
college_name_dict = {}
attr_type_dict = {}
rel_type_dict = {}
create_student_enrollment_code = False
create_private_college_group = False
node_repeated = False
class Command(BaseCommand):
help = "Based on "
def handle(self, *args, **options):
try:
for file_name in args:
file_path = os.path.join(SCHEMA_ROOT, file_name)
global gsystem_type_node
global gsystem_type_id
global gsystem_type_name
gsystem_type_node = None
gsystem_type_id = None
gsystem_type_name = ""
if os.path.exists(file_path):
gsystem_type_name = os.path.basename(file_path)
gsystem_type_name = os.path.splitext(gsystem_type_name)[0]
gsystem_type_name = gsystem_type_name.replace("_", " ")
if gsystem_type_name == u"Student":
global create_student_enrollment_code
create_student_enrollment_code = True
elif gsystem_type_name == u"College":
global create_private_college_group
create_private_college_group = True
gsystem_type_node = node_collection.one({
"_type": "GSystemType",
"$or": [{
"name": {"$regex": "^"+gsystem_type_name+"$", '$options': 'i'}
}, {
"altnames": {"$regex": "^"+gsystem_type_name+"$", '$options': 'i'}
}]
})
if gsystem_type_node:
gsystem_type_id = gsystem_type_node._id
else:
error_message = "\n GSystemTypeError: This GSystemType ("+gsystem_type_name+") doesn't exists for creating it's own GSystem !!!"
log_list.append(error_message)
raise Exception(error_message)
file_extension = os.path.splitext(file_name)[1]
if "csv" in file_extension:
# Process csv file and convert it to json format at first
total_rows = 0
info_message = "\n CSVType: Following file (" + file_path + ") found!!!"
log_list.append(info_message)
try:
csv_file_path = file_path
json_file_name = file_name.rstrip("csv") + "json"
json_file_path = os.path.join(SCHEMA_ROOT, json_file_name)
json_file_content = ""
with open(csv_file_path, 'rb') as csv_file:
csv_file_content = csv.DictReader(csv_file, delimiter=",")
json_file_content = []
for row in csv_file_content:
total_rows += 1
json_file_content.append(row)
info_message = "\n- File '" + file_name + "' contains : " + str(total_rows) + " entries/rows (excluding top-header/column-names)."
print info_message
log_list.append(str(info_message))
with open(json_file_path, 'w') as json_file:
json.dump(json_file_content,
json_file,
indent=4,
sort_keys=False)
if os.path.exists(json_file_path):
file_path = json_file_path
is_json_file_exists = True
info_message = "\n JSONType: Following file (" + json_file_path + ") created successfully.\n"
log_list.append(info_message)
except Exception as e:
error_message = "\n CSV-JSONError: " + str(e)
log_list.append(error_message)
# End of csv-json coversion
elif "json" in file_extension:
is_json_file_exists = True
else:
error_message = "\n FileTypeError: Please choose either 'csv' or 'json' format supported files!!!\n"
log_list.append(error_message)
raise Exception(error_mesage)
if is_json_file_exists:
# Process json file and create required GSystems, GRelations, and GAttributes
info_message = "\n Task initiated: Processing json-file...\n"
log_list.append(info_message)
t0 = time.time()
parse_data_create_gsystem(file_path, file_name)
t1 = time.time()
time_diff = t1 - t0
# print time_diff
total_time_minute = round( (time_diff/60), 2) if time_diff else 0
total_time_hour = round( (time_diff/(60*60)), 2) if time_diff else 0
# End of processing json file
info_message = "\n------- Task finised: Successfully processed json-file -------\n"
info_message += "- Total time taken for the processing: \n\n\t" + str(total_time_minute) + " MINUTES\n\t=== OR ===\n\t" + str(total_time_hour) + " HOURS\n"
print info_message
log_list.append(str(info_message))
# End of processing json file
else:
error_message = "\n FileNotFound: Following path (" + file_path + ") doesn't exists!!!\n"
log_list.append(error_message)
raise Exception(error_message)
except Exception as e:
error_message = str(e)
print "\n >>> >>>> >>>>>" + error_message
finally:
if log_list:
log_list.append("\n ============================================================ End of Iteration ============================================================\n")
log_file_name = gsystem_type_name + ".log"
log_file_path = os.path.join(SCHEMA_ROOT, log_file_name)
with open(log_file_path, 'a') as log_file:
log_file.writelines(log_list)
# --- End of handle() ---
# -----------------------------------------------------------------------------------------------------------------
# Function that process json data according to the structure field
# -----------------------------------------------------------------------------------------------------------------
def parse_data_create_gsystem(json_file_path, file_name):
json_file_content = ""
try:
print "\n file_name == ",file_name
with open(json_file_path) as json_file:
json_file_content = json_file.read()
json_documents_list = json.loads(json_file_content)
# Process data in proper format
node = node_collection.collection.GSystem()
node_keys = node.keys()
node_structure = node.structure
json_documents_list_spaces = json_documents_list
json_documents_list = []
# Removes leading and trailing spaces from keys as well as values
for json_document_spaces in json_documents_list_spaces:
json_document = {}
for key_spaces, value_spaces in json_document_spaces.iteritems():
json_document[key_spaces.strip().lower()] = value_spaces.strip()
json_documents_list.append(json_document)
except Exception as e:
error_message = "\n While parsing the file ("+json_file_path+") got following error...\n " + str(e)
log_list.append(error_message)
print error_message
raise error_message
for i, json_document in enumerate(json_documents_list):
try:
if file_name == "QuizItem.csv":
print "\n\n *******************"
question_content = json_document['content']
question_content = question_content.split(' ')
question_content = question_content[:4]
question_content = ' '.join(question_content)
json_document['name'] = question_content
json_document['altnames'] = json_document['content']
group_id = ObjectId(json_document['group_id'])
group_obj = node_collection.one({'_id': group_id})
if group_obj:
group_id = group_obj._id
else:
group_id = home_grp._id
user_id = int(json_document['user_id'])
print "\n\n NAME ======= ", json_document['name'], group_id, user_id
global node_repeated
node_repeated = False
n_name = ""
if "first name" in json_document:
n_name = json_document["first name"] + " "
if json_document["middle name"]:
n_name += json_document["middle name"]
if json_document["last name"]:
n_name += " "
n_name += json_document["last name"]
json_document["name"] = n_name.title()
info_message = "\n ============ #"+ str(i+1) +" : Start of "+gsystem_type_name+"'s GSystem ("+json_document['name']+") creation/updation ============\n"
log_list.append(info_message)
parsed_json_document = {}
attribute_relation_list = []
for key in json_document.iterkeys():
# print "\n key ",key
parsed_key = key.lower()
parsed_key = parsed_key.replace(" ", "_")
if parsed_key in node_keys:
if node_structure[parsed_key] == unicode:
parsed_json_document[parsed_key] = unicode(json_document[key])
elif node_structure[parsed_key] == datetime.datetime:
parsed_json_document[parsed_key] = datetime.datetime.strptime(json_document[key], "%d/%m/%Y")
else:
parsed_json_document[parsed_key] = json_document[key]
else:
parsed_json_document[key] = json_document[key]
attribute_relation_list.append(key)
info_message = "\n Creating "+gsystem_type_name+" ("+parsed_json_document["name"]+")..."
log_list.append(info_message)
print "\n HERE == "
node = create_edit_gsystem(gsystem_type_id, gsystem_type_name, parsed_json_document, user_id)
print "\n node created === ", node._id, " === ", node.name, node.altnames
# print "attribute_relation_list == ",attribute_relation_list
if node:
if not attribute_relation_list:
# Neither possible attribute fields, nor possible relations defined for this node
info_message = "\n "+gsystem_type_name+" ("+node.name+"): Neither possible attribute fields, nor possible relations defined for this node !\n"
log_list.append(info_message)
continue
gst_possible_attributes_dict = node.get_possible_attributes(gsystem_type_id)
print "\n gsystem_type_id ===",gst_possible_attributes_dict
relation_list = []
json_document['name'] = node.name
# Write code for setting atrributes
for key in attribute_relation_list:
is_relation = True
for attr_key, attr_value in gst_possible_attributes_dict.iteritems():
# print "\n\n attr_key === ", attr_key
# print "\n\n altnames -- === ", attr_value['altnames']
if attr_value['altnames'] and key == attr_value['altnames'].lower() or key == attr_key.lower():
is_relation = False
if json_document[key]:
try:
if attr_value['data_type'] == basestring:
if u"\u2013" in json_document[key]:
json_document[key] = json_document[key].replace(u"\u2013", "-")
info_message = "\n For GAttribute parsing content | key: " + attr_key + " -- " + json_document[key]
log_list.append(info_message)
if attr_value['data_type'] == unicode:
json_document[key] = unicode(json_document[key])
elif attr_value['data_type'] == bool:
if json_document[key].lower() == "yes":
json_document[key] = True
elif json_document[key].lower() == "no":
json_document[key] = False
else:
json_document[key] = None
elif attr_value['data_type'] == datetime.datetime:
# Use small-case altnames
if key in ["dob", "date of birth", "date of registration"]:
if json_document[key]:
json_document[key] = datetime.datetime.strptime(json_document[key], "%d/%m/%Y")
else:
if json_document[key]:
json_document[key] = datetime.datetime.strptime(json_document[key], "%Y")
elif attr_value['data_type'] in [int, float, long]:
if not json_document[key]:
json_document[key] = 0
else:
if attr_value['data_type'] == int:
json_document[key] = int(json_document[key])
elif attr_value['data_type'] == float:
json_document[key] = float(json_document[key])
else:
json_document[key] = long(json_document[key])
elif type(attr_value['data_type']) == IS:
for op in attr_value['data_type']._operands:
if op.lower() == json_document[key].lower():
json_document[key] = op
elif (attr_value['data_type'] in [list, dict]) or (type(attr_value['data_type']) in [list, dict]):
if "," not in json_document[key]:
# Necessary to inform perform_eval_type() that handle this value as list
json_document[key] = "\"" + json_document[key] + "\", "
else:
formatted_value = ""
for v in json_document[key].split(","):
formatted_value += "\""+v.strip(" ")+"\", "
json_document[key] = formatted_value
perform_eval_type(key, json_document, "GSystem")
subject_id = node._id
attribute_type_node = None
if attr_key in attr_type_dict:
attribute_type_node = attr_type_dict[attr_key]
else:
attribute_type_node = node_collection.one({
'_type': "AttributeType",
'$or': [{
'name': {'$regex': "^" + attr_key + "$", '$options': 'i'}
}, {
'altnames': {'$regex': "^" + attr_key + "$", '$options': 'i'}
}]
})
attr_type_dict[attr_key] = attribute_type_node
object_value = json_document[key]
ga_node = None
info_message = "\n Creating GAttribute (" + node.name + " -- " + attribute_type_node.name + " -- " + str(json_document[key]) + ") ...\n"
log_list.append(info_message)
ga_node = create_gattribute(subject_id, attribute_type_node, object_value)
except Exception as e:
error_message = "\n While creating GAttribute (" + attr_key + ") for "+gsystem_type_name+"'s GSystem ("+json_document['name']+") got following error...\n " + str(e) + "\n"
log_list.append(error_message)
print error_message # Keep it!
# To break outer for loop as key found
break
else:
error_message = "\n DataNotFound: No data found for field ("+attr_key+") while creating GSystem (" + gsystem_type_name + " -- " + node.name + ") !!!\n"
log_list.append(error_message)
if is_relation:
relation_list.append(key)
if not relation_list:
# No possible relations defined for this node
info_message = "\n "+gsystem_type_name+" ("+node.name+"): No possible relations defined for this node !!!\n"
log_list.append(info_message)
else:
gst_possible_relations_dict = node.get_possible_relations(gsystem_type_id)
# Write code for setting relations
for key in relation_list:
is_relation = True
for rel_key, rel_value in gst_possible_relations_dict.iteritems():
if key == rel_value['altnames'].lower() or key == rel_key.lower():
is_relation = False
if json_document[key]:
# Here semi-colon(';') is used instead of comma(',')
# Beacuse one of the value may contain comma(',') which causes problem in finding required value in database
try:
if ";" not in json_document[key]:
# Necessary to inform perform_eval_type() that handle this value as list
json_document[key] = "\""+json_document[key]+"\", "
else:
formatted_value = ""
for v in json_document[key].split(";"):
formatted_value += "\""+v.strip(" ")+"\", "
json_document[key] = formatted_value
info_message = "\n For GRelation parsing content | key: " + rel_key + " -- " + json_document[key]
log_list.append(info_message)
perform_eval_type(key, json_document, "GSystem", "GSystem")
# for right_subject_id in json_document[key]:
subject_id = node._id
# Here we are appending list of ObjectIds of GSystemType's type_of field
# along with the ObjectId of GSystemType's itself (whose GSystem is getting created)
# This is because some of the RelationType's are holding Base class's ObjectId
# and not that of the Derived one's
# Delibrately keeping GSystemType's ObjectId first in the list
# And hence, used $in operator in the query!
rel_subject_type = []
rel_subject_type.append(gsystem_type_id)
if gsystem_type_node.type_of:
rel_subject_type.extend(gsystem_type_node.type_of)
relation_type_node = None
if rel_key in rel_type_dict:
relation_type_node = rel_type_dict[rel_key]
else:
relation_type_node = node_collection.one({
'_type': "RelationType",
'$or': [{
'name': {'$regex': "^" + rel_key + "$", '$options': 'i'}
}, {
'altnames': {'$regex': "^" + rel_key + "$", '$options': 'i'}
}],
'subject_type': {'$in': rel_subject_type}
})
rel_type_dict[rel_key] = relation_type_node
info_message = "\n Creating GRelation ("+node.name+" -- "+rel_key+" -- "+str(json_document[key])+") ...\n"
log_list.append(info_message)
gr_node = create_grelation(subject_id, relation_type_node, json_document[key])
except Exception as e:
error_message = "\n While creating GRelation (" + rel_key + ") for "+gsystem_type_name+"'s GSystem ("+json_document['name']+") got following error...\n" + str(e) + "\n"
log_list.append(error_message)
pass
if college_gst._id in relation_type_node.object_type:
# Fetch college node's group id
# Append it to node's group_set
node_group_set = node.group_set
is_group_set_changed = False
# Iterate through each college
# Find it's corresponding group's ObjectId
# Append it to node's group_set
for each in json_document[key]:
each = ObjectId(each)
each_str = str(each)
if each_str in college_dict:
college_group_id = college_dict[each_str]
if college_group_id not in node_group_set:
node_group_set.append(college_group_id)
is_group_set_changed = True
else:
# If not found in college_dict
# Then find and update college_dict
college_node = node_collection.collection.aggregate([{
"$match": {"_id": each}
}, {
"$project": {"group_id": "$relation_set.has_group"}
}])
college_node = college_node["result"]
if college_node:
college_node = college_node[0]
college_group_id = college_node["group_id"]
if college_group_id:
college_group_id = college_group_id[0][0]
college_dict[each_str] = college_group_id
node_group_set.append(college_group_id)
is_group_set_changed = True
# Update node's group_set with updated list
# if changed
if is_group_set_changed:
node_collection.collection.update({
"_id": subject_id
}, {
"$set": {"group_set": node_group_set}
},
upsert=False, multi=False
)
# To break outer for loop if key found
break
else:
error_message = "\n DataNotFound: No data found for relation ("+rel_key+") while creating GSystem ("+gsystem_type_name+" -- "+node.name+") !!!\n"
log_list.append(error_message)
# print error_message
break
# Create enrollment code (Only for Student)
if create_student_enrollment_code and not node_repeated:
enrollment_code_at = node_collection.one({
"_type": "AttributeType", "name": "enrollment_code"
})
node_exist = node_collection.one({"_id": node._id, "attribute_set.enrollment_code": {"$exists": True}})
if not node_exist:
# It means enrollment_code is not set for given student node
# Then set it
try:
college_id = None
group_id = None
for k, v in college_dict.items():
college_id = ObjectId(k)
group_id = ObjectId(v)
student_enrollment_code = get_student_enrollment_code(college_id, node._id, json_document["date of registration"], group_id)
info_message = "\n Creating GAttribute (" + node.name + " -- " + enrollment_code_at.name + " -- " + str(student_enrollment_code) + ") ...\n"
log_list.append(info_message)
ga_node = create_gattribute(node._id, enrollment_code_at, student_enrollment_code)
except Exception as e:
error_message = "\n StudentEnrollmentCreateError: " + str(e) + "!!!"
log_list.append(error_message)
elif create_private_college_group:
# Create a private group for respective college node
node_exist = node_collection.one({"_id": node._id, "relation_set.has_group": {"$exists": True}})
if not node_exist:
try:
info_message = "\n Creating private group for given college (" + node.name + ") via RelationType (has_group)...\n"
log_list.append(info_message)
college_group, college_group_gr = create_college_group_and_setup_data(node)
except Exception as e:
error_message = "\n CollegeGroupCreateError: " + str(e) + "!!!"
log_list.append(error_message)
except Exception as e:
error_message = "\n While creating "+gsystem_type_name+"'s GSystem ("+json_document['name']+") got following error...\n " + str(e)
log_list.append(error_message)
print error_message # Keep it!
import sys
print "\n ****\n"
print 'Error on line {}'.format(sys.exc_info()[-1].tb_lineno)
def create_edit_gsystem(gsystem_type_id, gsystem_type_name, json_document, user_id):
"""Creates/Updates respective GSystem and it's related GAttribute(s)
and GRelation(s)
"""
node = None
if "(" in json_document['name'] or ")" in json_document['name']:
query = {
"_type": "GSystem",
'name': json_document['name'],
'member_of': gsystem_type_id
}
else:
query = {
"_type": "GSystem",
'$or': [{
'name': {'$regex': "^"+json_document['name']+"$", '$options': 'i'}
}, {
'altnames': {'$regex': "^"+json_document['name']+"$", '$options': 'i'}
}],
'member_of': gsystem_type_id
}
if "date of birth" in json_document:
dob = json_document["date of birth"]
if dob:
query.update({"attribute_set.dob": datetime.datetime.strptime(dob, "%d/%m/%Y")})
if "contact number (mobile)" in json_document:
mobile_number = json_document["contact number (mobile)"]
if mobile_number:
query.update({"attribute_set.mobile_number": long(mobile_number)})
if "degree name / highest degree" in json_document:
degree_name = json_document["degree name / highest degree"]
if degree_name:
query.update({"attribute_set.degree_name": degree_name})
if "year of study" in json_document:
degree_year = json_document["year of study"]
if degree_year:
query.update({"attribute_set.degree_year": degree_year})
if "college ( graduation )" in json_document:
college_name = json_document["college ( graduation )"]
if college_name not in college_name_dict:
college_node = node_collection.one({
"member_of": college_gst._id, "name": college_name
}, {
"name": 1
})
college_name_dict[college_name] = college_node
query.update({"relation_set.student_belongs_to_college": college_name_dict[college_name]._id})
info_message = "\n query for " + json_document['name'] + " : " + str(query) + "\n"
log_list.append(info_message)
if gsystem_type_name != "QuizItem":
node = node_collection.one(query)
if node is None:
try:
node = node_collection.collection.GSystem()
personal_details = []
address_details = []
details_12 = []
graduation_details = []
work_experience = []
education_details = []
tot_details = []
property_order = []
# TODO: Name of attributes/relation to be replaced with their respective ObjectIds
if gsystem_type_name in ["Student", "Voluntary Teacher"]:
personal_details = [
("first_name", "First Name"),
("middle_name", "Middle Name"),
("last_name", "Last Name"),
("gender", "Gender"),
("dob", "Date of Birth"),
("religion", "Religion"),
("languages_known", "Languages Known"),
("mobile_number", "Contact Number (Mobile)"),
("alternate_number", "Alternate Number / Landline"),
("email_id", "Email ID")
]
if gsystem_type_name in ["College", "University", "Student", "Voluntary Teacher"]:
address_details = [
("house_street", "House / Street"),
("village", "Village"),
("taluka", "Taluka"),
("town_city", "Town / City"),
("pin_code", "Pin Code")
]
if gsystem_type_name in ["Voluntary Teacher"]:
work_experience = [
("key_skills", "Key Skills"),
("profession", "Profession"),
("designation", "Profession"),
("work_exp", "Year of Experience (if Any)")
]
education_details = [
("degree_name", "Degree Name / Highest Degree"),
("degree_specialization", "Degree Specialization"),
("degree_passing_year", "Year of Passing Degree"),
("other_qualifications", "Any other Qualification")
]
tot_details = [
("trainer_of_college", "Volunteer to teach College(s) [At max. 2]"),
("trainer_of_course", "Volunteer to teach Course(s) [At max. 2]"),
("is_tot_attended", "Did you attend TOT?"),
("tot_when", "When did you attend TOT?"),
]
if gsystem_type_name in ["Student"]:
details_12 = [
("student_has_domicile", "State/Union Territory of Domicile"),
("12_passing_year", "Year of Passing XII")
]
graduation_details = [
("student_belongs_to_college", "College (Graduation)"),
("degree_name", "Degree Name / Highest Degree"),
("degree_year", "Year of Study"),
("college_enroll_num", "College Enrolment Number / Roll No"),
("student_belongs_to_university", "University"),
("is_nss_registered", "Are you registered for NSS?"),
("is_dropout_student", "Are you a dropout student?")
]
if gsystem_type_name in ["College", "University"]:
address_details.insert(4, ("organization_belongs_to_country", "Country"))
address_details.insert(4, ("organization_belongs_to_state", "State"))
address_details.insert(4, ("organization_belongs_to_district", "District"))
property_order = [
["Address", address_details]
]
if gsystem_type_name in ["University"]:
affiliated_college_details = [
("affiliated_college", "Affiliated Colleges")
]
property_order.append(["Affiliated Colleges", affiliated_college_details])
if gsystem_type_name in ["Voluntary Teacher"]:
address_details.insert(4, ("person_belongs_to_country", "Country"))
address_details.insert(4, ("person_belongs_to_state", "State"))
address_details.insert(4, ("person_belongs_to_district", "District"))
property_order = [
["Personal", personal_details],
["Address", address_details],
["Education", education_details],
["Work Experience", work_experience],
["TOT Details", tot_details],
]
if gsystem_type_name in ["Student"]:
personal_details.insert(6, ("student_of_caste_category", "Caste Category"))
address_details.insert(4, ("person_belongs_to_country", "Country"))
address_details.insert(4, ("person_belongs_to_state", "State"))
address_details.insert(4, ("person_belongs_to_district", "District"))
property_order = [
["Personal", personal_details],
["Address", address_details],
["XII", details_12],
["Graduation", graduation_details]
]
node.property_order = property_order
# Save Node first with it's basic attribute fields
for key in json_document.keys():
if node.has_key(key):
node[key] = json_document[key]
node.created_by = user_id
node.modified_by = user_id
if user_id not in node.contributors:
node.contributors.append(user_id)
node.member_of.append(gsystem_type_id)
node.group_set.append(group_id)
node.status = u"PUBLISHED"
node.save()
info_message = "\n "+gsystem_type_name+" ("+node.name+") created successfully.\n"
log_list.append(info_message)
except Exception as e:
error_message = "\n "+gsystem_type_name+"Error: Failed to create ("+json_document['name']+") as " + str(e) + "\n"
log_list.append(error_message)
raise Exception(error_message)
else:
# Code for updation
is_node_changed = False
global node_repeated
node_repeated = True
try:
for key in json_document.iterkeys():
if key in node:
if type(node[key]) == list:
if set(node[key]) != set(json_document[key]):
node[key] = json_document[key]
is_node_changed = True
elif type(node[key]) == dict:
if cmp(node[key], json_document[key]) != 0:
node[key] = json_document[key]
is_node_changed = True
else:
if node[key] != json_document[key]:
node[key] = json_document[key]
is_node_changed = True
if is_node_changed:
node.modified_by = user_id
if user_id not in node.contributors:
node.contributors.append(user_id)
node.status = u"PUBLISHED"
node.save()
info_message = "\n "+gsystem_type_name+" ("+node.name+") updated successfully.\n"
log_list.append(info_message)
else:
info_message = "\n "+gsystem_type_name+" ("+node.name+") already exists (Nothing updated) !\n"
log_list.append(info_message)
except Exception as e:
error_message = "\n "+gsystem_type_name+"Error: Failed to update ("+node.name+") as " + str(e) + "\n"
log_list.append(error_message)
raise Exception(error_message)
return node
def perform_eval_type(eval_field, json_document, type_to_create, type_convert_objectid=None):
"""Converts eval_field's data in json-type to it's corresponding python-type, and
resets eval_field with that converted data
"""
try:
json_document[eval_field] = ast.literal_eval(json_document[eval_field])
except Exception as e:
if u"\u201c" in json_document[eval_field]:
json_document[eval_field] = json_document[eval_field].replace(u"\u201c", "\"")
if u"\u201d" in json_document[eval_field]:
json_document[eval_field] = json_document[eval_field].replace(u"\u201d", "\"")
if u"\u2013" in json_document[eval_field]:
json_document[eval_field] = json_document[eval_field].replace(u"\u2013", "-")
try:
json_document[eval_field] = ast.literal_eval(json_document[eval_field])
except Exception as e:
error_message = "\n InvalidDataError: For " + type_to_create + " ("+json_document['name']+") invalid data found -- " + str(e) + "!!!\n"
log_list.append(error_message)
raise Exception(error_message)
type_list = []
for data in json_document[eval_field]:
if type_convert_objectid is None:
if eval_field == "when did you attend tot?":
type_list.append(datetime.datetime.strptime(data, "%d/%m/%Y"))
else:
type_list.append(data)
else:
if "(" in data or ")" in data:
node = node_collection.one({'_type': type_convert_objectid,
'name': data,
'group_set': group_id
},
{'_id': 1}
)
else:
node = node_collection.one({'_type': type_convert_objectid,
'$or': [{'name': {'$regex': "^"+data+"$", '$options': 'i'}},
{'altnames': {'$regex': "^"+data+"$", '$options': 'i'}}],
'group_set': group_id
},
{'_id': 1}
)
if node:
type_list.append(node._id)
else:
error_message = "\n "+type_convert_objectid+"Error ("+eval_field+"): This "+type_convert_objectid+" (" + data + ") doesn't exists for creating "+type_to_create+" (" + json_document['name'] + ") !!!\n"
log_list.append(error_message)
raise Exception(error_message)
# Sets python-type converted list
json_document[eval_field] = type_list
|
AvadootNachankar/gstudio
|
gnowsys-ndf/gnowsys_ndf/ndf/management/commands/data_entry.py
|
Python
|
agpl-3.0
| 45,442
|
# PyQuantFi - statisticsMC.py
# (c) 2012 Nick Collins
class StatisticMC(object):
"""
Abstract statistics gathering class
"""
def add_one_result(self,value):
self._store_one_result(value)
def get_results(self):
return self._get_results()
class StatisticMean(StatisticMC):
def __init__(self):
self._runningSum = 0
self._pathsDone = 0
def _store_one_result(self,value):
self._runningSum += value
self._pathsDone += 1
def _get_results(self):
self._results = [[self._runningSum / self._pathsDone ]]
return self._results
class ConvergenceTable(StatisticMC):
def __init__(self,gatherer):
self._stoppingPoint = 2
self._pathsDone = 0
self._statisticMC = gatherer
self._results = []
def _store_one_result(self,value):
self._statisticMC.add_one_result(value)
self._pathsDone += 1
if (self._pathsDone == self._stoppingPoint):
self._stoppingPoint *= 2
thisResult = self._statisticMC.get_results()
for item in thisResult:
item.append(self._pathsDone)
self._results.append(item)
def _get_results(self):
tmp = self._results
if (self._pathsDone * 2 != self._stoppingPoint):
thisResult = self._statisticMC.get_results()
for item in thisResult:
item.append(self._pathsDone)
tmp.append(item)
return tmp
|
ncollins/pyquantfi
|
statistics_mc.py
|
Python
|
bsd-3-clause
| 1,509
|
# Prompts the user for a year or a year and a month,
# the year being no earlier than 1753, and displays
# the calendar for the period requested in the style
# of the Unix cal command, but starting the week on Monday (not SUnday)
# Written by Eric Martin for COMP9021
field_width = 22
def calendar():
month_names = ['January', 'February', 'March', 'April',
'May', 'June', 'July', 'August',
'September', 'October', 'November', 'December']
month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
print("I will display a calendar, either for a year "
"or for a month in a year.\n"
"The earliest year should be 1753.\n"
"For the month, input at least the first three letters "
"of the month's name.")
correct_input = False
month = 0
while not correct_input:
date = input('Input year, or year and month, or month and year: ')
date = date.split()
if len(date) == 0 or len(date) > 2:
continue
if len(date) == 1:
try:
year = int(date[0])
correct_input = True
except ValueError:
print('Incorrect input. ', end = '')
else:
if year < 1753 or year > 9999999999999999:
print('Incorrect input. ', end = '')
correct_input = False
else:
date_0 = date[0].title()
date_1 = date[1].title()
try:
month = date_0
year = int(date_1)
except ValueError:
try:
month = date_1
year = int(date_0)
except ValueError:
print('Incorrect input. ', end = '')
continue
if len(month) < 3:
print('Incorrect input. ', end = '')
continue
for i in range(12):
if month_names[i].startswith(month):
month = i
correct_input = True
break
# Number of days between 1 January 2000 and the requested date,
# being 1 January of the requested year if no month has been input,
# positive for a date after 1 January 2000, negative for a date
# before 1 January 2000.
# If a month of March or later has been input and the input year
# is a leap year before 2000, then the assignment is incorrect
# by one day, which is fixed in the following if statement.
offset = ((year - 2000) * 365 +
(year - 1997) // 4 - (year - 1901) // 100 + (year - 1601) // 400 +
sum(month_lengths[0 : month]))
if month > 2 and (year % 4 == 0 and year % 100 or year % 400 == 0):
offset += 1
# 1 January 2000 is a Saturday
starting_day = (offset + 5) % 7
if len(date) == 2:
date = month_names[month] + ' ' + str(year)
print(date.center(field_width))
if month == 1 and (year % 4 == 0 and year % 100 or year % 400 == 0):
nb_of_days = 29
else:
nb_of_days = month_lengths[month]
for line in month_representation_lines(starting_day, nb_of_days):
print(line)
else:
print(str(year).center(field_width * 3) + '\n')
if year % 4 == 0 and year % 100 or year % 400 == 0:
month_lengths[1] += 1
months = [[month.center(field_width)] for month in month_names]
for i in range(12):
months[i].extend(month_representation_lines(starting_day, month_lengths[i]))
starting_day = (starting_day + month_lengths[i]) % 7
groups_of_three_months = [months[3 * i : 3 * (i + 1)] for i in range(4)]
for group_of_three_months in groups_of_three_months:
for month in group_of_three_months:
month.extend([' ' * field_width] *
(max(map(len, group_of_three_months)) - len(month)))
lines = map(''.join, zip(*group_of_three_months))
for line in lines:
print(line)
def month_representation_lines(starting_day, nb_of_days):
lines = [' Mo Tu We Th Fr Sa Su ']
line = ' ' * 3 * starting_day
for i in range(1, nb_of_days + 1):
line += "{0:3d}".format(i)
starting_day = (starting_day + 1) % 7
if starting_day == 0:
lines.append(line + ' ')
line = ''
if line != '':
line += ' ' * 3 * (7 - starting_day)
lines.append(line + ' ')
return lines
if __name__ == '__main__':
calendar()
|
YufeiZhang/Principles-of-Programming-Python-3
|
Labs/Lab5/calendar.py
|
Python
|
gpl-3.0
| 4,645
|
from __future__ import absolute_import, division, print_function
import tensorflow as tf
from odin.bay.vi.autoencoder.beta_vae import BetaVAE
from odin.bay.vi.autoencoder.variational_autoencoder import TrainStep
from odin.utils import as_tuple
__all__ = ['ImputeVAE', 'StochasticVAE']
class PosteriorStep(TrainStep):
def __call__(self, training=True):
analytic = self.elbo_kw.pop('analytic', False)
reverse = self.elbo_kw.pop('reverse', True)
qZ_X = self.vae.encode(self.inputs,
training=training,
sample_shape=self.sample_shape)
if len(self.vae.latent_layers) == 1:
qZ_X = [qZ_X]
#
metrics = {}
kl_loss = tf.convert_to_tensor(0., dtype=self.vae.dtype)
for name, qZ in zip(self.vae.latent_names, qZ_X):
kl = tf.reduce_mean(qZ.KL_divergence(analytic=analytic, reverse=reverse))
metrics["kl_%s" % name] = kl
kl_loss += self.vae.beta * kl
return kl_loss, metrics
class LikelihoodStep(TrainStep):
def __call__(self, training=True):
prior = self.vae.sample_prior(self.sample_shape)
pX_Z = self.vae.decode(prior,
training=training,
sample_shape=self.sample_shape)
if len(self.vae.observation) == 1:
pX_Z = [pX_Z]
inputs = tf.nest.flatten(self.inputs)
#
metrics = {}
llk_loss = tf.convert_to_tensor(0., dtype=self.vae.dtype)
for name, X, pX in zip(self.vae.variable_names, inputs, pX_Z):
llk = tf.reduce_mean(pX.log_prob(X))
metrics["llk_%s" % name] = llk
llk_loss += -llk
return llk_loss, metrics
class StochasticVAE(BetaVAE):
def __init__(self, kl_steps=1, llk_steps=1, beta=1.0, **kwargs):
super().__init__(beta=beta, **kwargs)
self.kl_steps = max(int(kl_steps), 1)
self.llk_steps = max(int(llk_steps), 1)
## parameters for each step
kl_params = self.encoder.trainable_variables
for layer in self.latent_layers:
kl_params += layer.trainable_variables
#
llk_params = self.decoder.trainable_variables
for layer in self.observation:
llk_params += layer.trainable_variables
#
self.kl_params = kl_params
self.llk_params = llk_params
def train_steps(self,
inputs,
training=None,
mask=None,
sample_shape=(),
iw=False,
elbo_kw=dict()) -> TrainStep:
r""" Facilitate multiple steps training for each iteration (smilar to GAN)
Example:
```
model = factorVAE()
x = model.sample_data()
vae_step, discriminator_step = list(model.train_steps(x))
# optimizer VAE with total correlation loss
with tf.GradientTape(watch_accessed_variables=False) as tape:
tape.watch(vae_step.parameters)
loss, metrics = vae_step()
tape.gradient(loss, vae_step.parameters)
# optimizer the discriminator
with tf.GradientTape(watch_accessed_variables=False) as tape:
tape.watch(discriminator_step.parameters)
loss, metrics = discriminator_step()
tape.gradient(loss, discriminator_step.parameters)
```
"""
self.step.assign_add(1)
for _ in range(self.kl_steps):
yield PosteriorStep(vae=self,
inputs=inputs,
mask=mask,
training=training,
sample_shape=sample_shape,
iw=iw,
elbo_kw=elbo_kw,
parameters=self.kl_params)
for _ in range(self.llk_steps):
yield LikelihoodStep(vae=self,
inputs=inputs,
mask=mask,
training=training,
sample_shape=sample_shape,
iw=iw,
elbo_kw=elbo_kw,
parameters=self.llk_params)
class ImputeVAE(BetaVAE):
r""" Iteratively imputing VAE outputs for a fixed number of steps
Arguments:
sequential : a Boolean. If True, using the outputs from previous step
as inputs for the next step when calculating ELBO.
This could be interpreted as a scheme for data augmentation.
Example:
```
ds = MNIST()
train = ds.create_dataset(partition='train')
model = ImputeVAE(
encoder='mnist',
outputs=RV((28, 28, 1), 'bern', name="Image"),
impute_steps=3,
sequential=True)
model.fit(train, epochs=-1, max_iter=8000, compile_graph=True)
```
"""
def __init__(self,
beta=1.,
impute_steps=3,
impute_llk_weights=[1.0, 0.8, 0.4],
impute_kl_weights=[1.0, 0.8, 0.4],
sequential=True,
**kwargs):
super().__init__(beta=beta, **kwargs)
assert impute_steps >= 1
self.impute_steps = int(impute_steps)
self.impute_kl_weights = as_tuple(impute_kl_weights,
t=float,
N=self.impute_steps)
self.impute_llk_weights = as_tuple(impute_llk_weights,
t=float,
N=self.impute_steps)
self.sequential = bool(sequential)
def _elbo(self,
X,
pX_Z,
qZ_X,
analytic,
reverse,
sample_shape=None,
mask=None,
training=None,
**kwargs):
if sample_shape is None:
sample_shape = []
X = [X] * self.impute_steps
all_llk = {}
all_div = {}
prev_px = None
for step, (inputs, px, qz, w_llk, w_div) in enumerate(
zip(X, pX_Z, qZ_X, self.impute_llk_weights, self.impute_kl_weights)):
if self.sequential and prev_px is not None:
inputs = [p.mean() for p in prev_px]
px = tf.nest.flatten(px)
qz = tf.nest.flatten(qz)
llk, div = super()._elbo(X=inputs,
pX_Z=px,
qZ_X=qz,
analytic=analytic,
reverse=reverse,
sample_shape=sample_shape,
mask=mask,
training=training,
**kwargs)
all_llk.update({'%s_%d' % (k, step): w_llk * v for k, v in llk.items()})
all_div.update({'%s_%d' % (k, step): w_div * v for k, v in div.items()})
prev_px = px
return all_llk, all_div
def call(self, inputs, training=None, mask=None, sample_shape=()):
sample_shape = tf.nest.flatten(sample_shape)
pX_Z, qZ_X = super().call(inputs,
training=training,
mask=mask,
sample_shape=sample_shape)
results = [[pX_Z], [qZ_X]]
for _ in range(1, self.impute_steps):
pX_Z = tf.nest.flatten(pX_Z)
inputs = [p.mean() for p in pX_Z]
if len(sample_shape) > 0:
inputs = [
tf.reduce_mean(i, axis=list(range(len(sample_shape))))
for i in inputs
]
pX_Z, qZ_X = super().call(inputs[0] if len(inputs) == 1 else inputs,
training=training,
mask=mask,
sample_shape=sample_shape)
results[0].append(pX_Z)
results[1].append(qZ_X)
return results[0], results[1]
|
imito/odin
|
odin/bay/vi/autoencoder/stochastic_vae.py
|
Python
|
mit
| 7,473
|
from os import system
print "Generating your public/private key pair. You'll use this for github & ssh connections"
email = raw_input("First, what is your @degordian.com email? I won't use it for anything shady, I promise ;)\n")
print("Okay, generating your key....")
ssh_location = "~/.ssh/github_rsa"
system("ssh-keygen -t rsa -b 4096 -f {0} -C {1}".format(ssh_location, email))
# TODO: automail toniju izgenerirani public key
system("eval \"$(ssh-agent -s)\"")
with open("~/.ssh/config", "w+") as ssh_config:
ssh_config.write("Host *")
ssh_config.write(" AddKeysToAgent yes")
ssh_config.write(" UseKeychain yes")
ssh_config.write(" IdentityFile ~/.ssh/id_rsa")
ssh_config.close()
system("ssh-add -K {0}".format(ssh_location))
system("chmod 600 {0}; chmod 600 {0}".format(ssh_location))
print("Finished generating your key!")
|
krukru/shell-me-up-scotty
|
scripts/ssh.py
|
Python
|
apache-2.0
| 863
|
'''Operating System Scheduler
'''
from queue import Queue
from task import Task
from systemCall import *
import time
import select
class Scheduler(object):
def __init__(self):
self.ready = Queue()
self.taskMap = {}
self.waitMap = {}
self.waitrl = {}
self.waitwl = {}
def new(self, target):
task = Task(target)
self.taskMap[task.tid] = task
self.schedule(task)
return task.tid
def schedule(self, task):
self.ready.put(task)
def terminate(self, task):
task.terminate()
if task.tid in self.taskMap:
del self.taskMap[task.tid]
print('[SCHEDULER STATE] tid = %s terminate.' % ( task.tid ))
for wTask in self.waitMap.pop(task.tid, []):
self.schedule(wTask)
def waitForRead(self, fd, task):
self.waitrl[fd] = task
def waitForWrite(self, fd, task):
self.waitwl[fd] = task
def IOPoll(self, timeout = None):
if self.waitrl or self.waitwl:
rl, wl, el = select.select(self.waitrl, self.waitwl, [], timeout)
for fd in rl:
self.schedule(self.waitrl.pop(fd))
for fd in wl:
self.schedule(self.waitwl.pop(fd))
def IOPollTask(self):
print('[TASK IOPOLL] IO Polling Initialized')
while True:
if self.ready.empty():
self.IOPoll(None)
else:
self.IOPoll(0)
yield
def wait(self, task, waitTaskId):
if waitTaskId in self.taskMap:
self.waitMap.setdefault(waitTaskId, []).append(task)
else:
return False
return True
def mainLoop(self):
self.new(self.IOPollTask())
while self.taskMap:
task = self.ready.get()
try:
result = task.run()
if isinstance(result, SystemCall):
result.task = task
result.sched = self
result.handler()
continue
except StopIteration:
self.terminate(task)
continue
self.schedule(task)
if __name__ == '__main__':
def t1():
print('[TASK T1] Initialized')
tid = yield GetTid()
print('[TASK T1] step1, SystemCall(GetTid) = %s' % ( tid ))
yield
print('[TASK T1] step2, SystemCall(GetTid) = %s' % ( tid ))
yield
print('[TASK T1] step3, SystemCall(GetTid) = %s' % ( tid ))
def t1c():
print('[TASK T1C] Initialized')
tid = yield GetTid()
print('[TASK T1C] step1, SystemCall(GetTid) = %s' % ( tid ))
yield
print('[TASK T1C] step2, SystemCall(GetTid) = %s' % ( tid ))
yield
print('[TASK T1C] step3, SystemCall(GetTid) = %s' % ( tid ))
def t2():
print('[TASK T2] Initialized')
tid = yield GetTid()
print('[TASK T2] step1, SystemCall(GetTid) = %s' % ( tid ))
tid = yield CreateTask(t1c())
print('[TASK T2] step2, SystemCall(GetTid) = %s' % ( tid ))
def t3():
print('[TASK T3] Initialized')
tid = yield GetTid()
print('[TASK T3] step1, SystemCall(GetTid) = %s' % ( tid ))
tid = yield GetTid()
print('[TASK T3] step2, SystemCall(GetTid) = %s' % ( tid ))
tid = yield KillTask(4)
print('[TASK T3] step3, SystemCall(GetTid) = %s' % ( tid ))
def t4():
print('[TASK T4] Initialized')
tid = yield GetTid()
print('[TASK T4] step1, SystemCall(GetTid) = %s' % ( tid ))
tid = yield WaitTask(2)
print('[TASK T4] step2, SystemCall(GetTid) = %s' % ( tid ))
tid = yield GetTid()
print('[TASK T4] step3, SystemCall(GetTid) = %s' % ( tid ))
tid = yield
print('[TASK T4] step4, SystemCall(GetTid) = %s' % ( tid ))
tid = yield
print('[TASK T4] step5, SystemCall(GetTid) = %s' % ( tid ))
scheduler = Scheduler()
scheduler.new(t1())
scheduler.new(t2())
scheduler.new(t3())
scheduler.new(t4())
scheduler.mainLoop()
|
JShadowMan/package
|
python/coroutine/operatingSystem/scheduler.py
|
Python
|
mit
| 4,290
|
# Copyright (C) 2014 Rémi Bèges
# For conditions of distribution and use, see copyright notice in the LICENSE file
# Test of the protocol algorithm with dummy frames
from API.Protocol import Protocol
from API.SerialPort import SerialPort
class SerialMgr():
def __init__(self):
# Create serial port manager
self.serial = SerialPort(self.on_rx_data,self.on_connect_try_callback)
self.serial.connect("COM11",115200)
def on_rx_data(self,c):
print(c.decode('ascii'),end='')
def on_connect_try_callback(self,data):
print(data)
if __name__ == '__main__':
mgr = SerialMgr()
while(mgr.serial.running):
pass
print("Done.")
|
Overdrivr/DistantIO
|
03_Serial_listener.py
|
Python
|
mit
| 721
|
#!/usr/bin/env python
import os, random, sys, time, urllib
#
# Options
#
dry_run = len(sys.argv) > 1 and "--dry-run" in set(sys.argv[1:])
quiet = len(sys.argv) > 1 and "--quiet" in set(sys.argv[1:])
#
# Functions and constants
#
def download_progress_hook(block_count, block_size, total_blocks):
if quiet or random.random() > 0.5:
return
sys.stdout.write(".")
sys.stdout.flush()
def download_url_to_file(url, file, message):
if not quiet:
print message + " ",
if not dry_run:
dir = os.path.dirname(file)
if len(dir) and not os.path.exists(dir):
os.makedirs(dir)
urllib.urlretrieve(url, file, download_progress_hook)
if not quiet:
print
# This is mostly just the list of North America http mirrors from http://cygwin.com/mirrors.html,
# but a few have been removed that seemed unresponsive from Cupertino.
mirror_servers = ["http://cygwin.elite-systems.org/",
"http://mirror.mcs.anl.gov/cygwin/",
"http://cygwin.osuosl.org/",
"http://mirrors.kernel.org/sourceware/cygwin/",
"http://mirrors.xmission.com/cygwin/",
"http://sourceware.mirrors.tds.net/pub/sourceware.org/cygwin/"]
package_mirror_url = mirror_servers[random.choice(range(len(mirror_servers)))]
def download_package(package, message):
download_url_to_file(package_mirror_url + package["path"], package["path"], message)
required_packages = frozenset(["apache",
"bc",
"bison",
"curl",
"diffutils",
"e2fsprogs",
"emacs",
"flex",
"gcc",
"gperf",
"keychain",
"make",
"nano",
"openssh",
"patch",
"perl",
"perl-libwin32",
"python",
"rebase",
"rsync",
"ruby",
"subversion",
"unzip",
"vim",
"zip"])
#
# Main
#
print "Using Cygwin mirror server " + package_mirror_url + " to download setup.ini..."
urllib.urlretrieve(package_mirror_url + "setup.ini", "setup.ini.orig")
downloaded_packages_file_path = "setup.ini.orig"
downloaded_packages_file = file(downloaded_packages_file_path, "r")
if not dry_run:
modified_packages_file = file("setup.ini", "w")
packages = {}
current_package = ''
for line in downloaded_packages_file.readlines():
if line[0] == "@":
current_package = line[2:-1]
packages[current_package] = {"name": current_package, "needs_download": False, "requires": [], "path": ""}
elif line[:10] == "category: ":
if current_package in required_packages:
line = "category: Base\n"
if "Base" in set(line[10:-1].split()):
packages[current_package]["needs_download"] = True
elif line[:10] == "requires: ":
packages[current_package]["requires"] = line[10:].split()
packages[current_package]["requires"].sort()
elif line[:9] == "install: " and not len(packages[current_package]["path"]):
end_of_path = line.find(" ", 9)
if end_of_path != -1:
packages[current_package]["path"] = line[9:end_of_path]
if not dry_run:
modified_packages_file.write(line)
downloaded_packages_file.close()
os.remove(downloaded_packages_file_path)
if not dry_run:
modified_packages_file.close()
names_to_download = set()
package_names = packages.keys()
package_names.sort()
def add_package_and_dependencies(name):
if name in names_to_download:
return
if not name in packages:
return
packages[name]["needs_download"] = True
names_to_download.add(name)
for dep in packages[name]["requires"]:
add_package_and_dependencies(dep)
for name in package_names:
if packages[name]["needs_download"]:
add_package_and_dependencies(name)
downloaded_so_far = 0
for name in package_names:
if packages[name]["needs_download"]:
downloaded_so_far += 1
download_package(packages[name], "Downloading package %3d of %3d (%s)" % (downloaded_so_far, len(names_to_download), name))
download_url_to_file("http://cygwin.com/setup.exe", "setup.exe", "Downloading setup.exe")
seconds_to_sleep = 10
print """
Finished downloading Cygwin. In %d seconds,
I will run setup.exe. Select the "Install
from Local Directory" option and browse to
"%s"
when asked for the "Local Package Directory".
""" % (seconds_to_sleep, os.getcwd())
while seconds_to_sleep > 0:
print "%d..." % seconds_to_sleep,
sys.stdout.flush()
time.sleep(1)
seconds_to_sleep -= 1
print
if not dry_run:
os.execl("setup.exe")
|
danialbehzadi/Nokia-RM-1013-2.0.0.11
|
webkit/Tools/CygwinDownloader/cygwin-downloader.py
|
Python
|
gpl-3.0
| 5,471
|
from intelligine.core.exceptions import MoleculeException
from intelligine.synergy.object.Bug import Bug
from intelligine.cst import CARRYING, TRANSPORTER, ATTACKER, COL_TRANSPORTER, COL_TRANSPORTER_NOT_CARRYING, \
COL_FIGHTER, MODE_EXPLO, MODE_GOHOME, BODY_PART_PHEROMONE_GLAND, TYPE, TYPE_ANT, \
COL_TRANSPORTER_CARRYING, MODE_NURSE, MODE_HOME, CARRY, PUT_FAIL_COUNT
from intelligine.synergy.object.Food import Food
from intelligine.simulation.object.molecule.MovementMoleculeGland import MovementMoleculeGland
from intelligine.simulation.object.brain.AntBrain import AntBrain
import random
class Ant(Bug):
_body_parts = {
BODY_PART_PHEROMONE_GLAND: MovementMoleculeGland
}
_brain_class = AntBrain
def __init__(self, collection, context):
super().__init__(collection, context)
context.metas.states.add_list(self.get_id(), [TRANSPORTER, ATTACKER])
context.metas.collections.add_list(self.get_id(), [COL_TRANSPORTER,
COL_TRANSPORTER_NOT_CARRYING,
COL_FIGHTER])
self._carried = None
# TODO: Comme pour lorsque une action put est faite, lancer un algo de choix de la mission a suivre.
if random.choice([1, 0]):
self._brain.switch_to_mode(MODE_EXPLO)
else:
self._brain.switch_to_mode(MODE_NURSE)
context.metas.list.add(TYPE, self.get_id(), TYPE_ANT)
self._put_fail_count = 0
def die(self):
super().die()
self._remove_state(TRANSPORTER)
self._remove_state(ATTACKER)
self._remove_col(COL_TRANSPORTER)
self._remove_col(COL_TRANSPORTER_NOT_CARRYING)
self._remove_col(COL_TRANSPORTER_CARRYING, allow_not_in=True)
self._remove_col(COL_FIGHTER)
def get_movement_molecule_gland(self):
return self.get_body_part(BODY_PART_PHEROMONE_GLAND)
def put_carry(self, obj, position=None):
if position is None:
position = self._get_position()
self._carried = None
obj.set_position(position)
obj.set_is_carried(False, self)
self._context.metas.states.remove(self.get_id(), CARRYING)
self._context.metas.value.unset(CARRY, self.get_id())
self._add_col(COL_TRANSPORTER_NOT_CARRYING)
self._remove_col(COL_TRANSPORTER_CARRYING)
def get_carried(self):
return self._carried
def carry(self, obj):
self._carried = obj
self._context.metas.states.add(self.get_id(), CARRYING)
self._add_col(COL_TRANSPORTER_CARRYING)
self._remove_col(COL_TRANSPORTER_NOT_CARRYING)
obj.set_is_carried(True, self)
self._context.metas.value.set(CARRY, self.get_id(), obj.get_id())
# TODO: pour le moment hardcode, a gerer dans AntTakeBrainPart (callback en fct de ce qui est depose)
if isinstance(obj, Food):
self.get_brain().switch_to_mode(MODE_GOHOME)
self.get_movement_molecule_gland().appose()
def is_carrying(self):
if self._carried:
return True
return False
def set_position(self, position):
if self._position is not None and position != self._position:
self._brain.host_moved()
super().set_position(position)
if self.is_carrying():
self._carried.set_position(position)
def initialize(self):
super().initialize()
if self.get_movement_molecule_gland().is_enabled():
try:
self.get_movement_molecule_gland().appose()
except MoleculeException:
pass
def get_colony(self):
return self.get_collection()
def get_put_fail_count(self):
return self._put_fail_count
def increment_put_fail_count(self):
self._put_fail_count += 1
self._context.metas.value.set(PUT_FAIL_COUNT, self.get_id(), self._put_fail_count)
def reinit_put_fail_count(self):
self._put_fail_count = 0
self._context.metas.value.set(PUT_FAIL_COUNT, self.get_id(), self._put_fail_count)
|
buxx/intelligine
|
intelligine/synergy/object/ant/Ant.py
|
Python
|
apache-2.0
| 4,135
|
all = """
Add
Delete
Insert
List
SetDefault
""".split()
|
onelab-eu/sfa
|
sfatables/commands/__init__.py
|
Python
|
mit
| 56
|
# CRPropa test script
# Simulates the integrated relative abundance from a source,
# accelerationg particles up to a maximum rigidity.
# Minimum energy = 10 EeV
# Maximum rigidity = 100 EeV / Z -> max. energy = Z * 100 EeV
# Composition = p, He, C, O, Si, Fe with relative abundances
# from Allard 2006, DOI: 10.1088/1475-7516/2006/09/005
#
import matplotlib
matplotlib.use('Agg')
from crpropa import *
from pylab import *
nS = 5 # number of spectral indices between 2 and 3
nP = 5000 # number of particles per spectral index
d = {1:zeros(nS), 2:zeros(nS), 6:zeros(nS), 8:zeros(nS), 14:zeros(nS), 26:zeros(nS)}
# 'simulating for spectral index'
for i in range(nS):
beta = 2 + i/float(nS - 1)
composition = SourceComposition(10, 100, -beta)
composition.add(1, 1, 92000)
composition.add(4, 2, 13000)
composition.add(12, 6, 447.4)
composition.add(16, 8, 526.3)
composition.add(28, 14, 100)
composition.add(56, 26, 97)
ps = ParticleState()
for j in range(nP):
composition.prepareParticle(ps)
z = chargeNumber(ps.getId())
d[z][i] += 1
norm = float(d[1][i])
for z in d.keys():
d[z][i] /= norm
figure()
beta = linspace(2, 3, nS)
elements = {1:'H', 2:'He', 6:'C', 8:'O', 14:'Si', 26:'Fe'}
for z in d.keys():
plt.plot(beta, d[z], label=elements[z])
legend(loc = 'lower right')
xlabel(r'Source Spectral Index $\beta$')
ylabel('Relative Integrated Abundance')
xlim(2, 3)
semilogy()
grid()
savefig('SourceCompostion.png')
show()
|
cheiter/CRPropa3
|
test/python/testSourceComposition.py
|
Python
|
gpl-3.0
| 1,515
|
# -*- coding: utf-8 -*-
from .base import Entidade
from OpenSSL import crypto
import tempfile
import os
class Certificado(Entidade):
"""Classe abstrata responsavel por definir o modelo padrao para as demais
classes de certificados digitais.
Caso va implementar um novo formato de certificado, crie uma classe que
herde desta."""
def __new__(cls, *args, **kwargs):
if cls == Certificado:
raise Exception('Esta classe nao pode ser instanciada diretamente!')
else:
return super(Certificado, cls).__new__(cls)
class CertificadoA1(Certificado):
"""Implementa a entidade do certificado eCNPJ A1, suportado pelo OpenSSL,
e amplamente utilizado."""
caminho_arquivo = None
def __init__(self, caminho_arquivo=None):
self.caminho_arquivo = caminho_arquivo
self.arquivos_temp = []
def separar_arquivo(self, senha, caminho=False):
"""Separa o arquivo de certificado em dois: de chave e de certificado,
e retorna a string. Se caminho for True grava na pasta temporaria e retorna
o caminho dos arquivos, senao retorna o objeto. Apos o uso devem ser excluidos com o metodo excluir."""
# Carrega o arquivo .pfx, erro pode ocorrer se a senha estiver errada ou formato invalido.
try:
pkcs12 = crypto.load_pkcs12(open(self.caminho_arquivo, "rb").read(), senha)
except Exception as e:
raise Exception('Falha ao carregar certificado digital A1. Verifique local e senha.')
if caminho:
cert = crypto.dump_certificate(crypto.FILETYPE_PEM, pkcs12.get_certificate())
chave = crypto.dump_privatekey(crypto.FILETYPE_PEM, pkcs12.get_privatekey())
# cria arquivos temporarios
with tempfile.NamedTemporaryFile(delete=False) as arqcert:
arqcert.write(cert)
with tempfile.NamedTemporaryFile(delete=False) as arqchave:
arqchave.write(chave)
self.arquivos_temp.append(arqchave.name)
self.arquivos_temp.append(arqcert.name)
return arqchave.name, arqcert.name
else:
# Certificado
cert = crypto.dump_certificate(crypto.FILETYPE_PEM, pkcs12.get_certificate()).decode('utf-8')
cert = cert.replace('\n', '')
cert = cert.replace('-----BEGIN CERTIFICATE-----', '')
cert = cert.replace('-----END CERTIFICATE-----', '')
# Chave, string decodificada da chave privada
chave = crypto.dump_privatekey(crypto.FILETYPE_PEM, pkcs12.get_privatekey())
return chave, cert
def excluir(self):
"""Exclui os arquivos temporarios utilizados para o request."""
try:
for i in self.arquivos_temp:
os.remove(i)
self.arquivos_temp.clear()
except:
pass
|
leotada/PyNFe
|
pynfe/entidades/certificado.py
|
Python
|
lgpl-3.0
| 2,889
|
#!/usr/bin/python
from scrapy.cmdline import execute
execute("scrapy crawl index".split())
|
VisitBoy/Tumblr_Feed_Video_Crawler
|
start.py
|
Python
|
apache-2.0
| 93
|
#!/usr/bin/env python
## Testing for correct patchset estimation
# Structure of the test cvs repository
#
# Message File:Content Commit Time
# Rev 1 a: 1.1 2009-02-21 19:11:43 +0100
# Rev 2 a: 1.2 b: 1.1 2009-02-21 19:11:14 +0100
# Rev 3 b: 1.2 2009-02-21 19:11:43 +0100
#
# As you can see the commit of Rev 3 has the same time as
# Rev 1 this leads to a broken import because of a cvsps
# bug.
import os, cvspstest
cc = cvspstest.ConvertComparison("t9603", "module")
cc.cmp_branch_tree("test of branch", "master", True)
cc.cleanup()
|
jleverenz/cvsps-esr
|
test/t9603.py
|
Python
|
gpl-2.0
| 594
|
from __future__ import unicode_literals
from .base import * # noqa @UnusedWildImport
INSTALLED_APPS += ['sendfile']
WIKI_ATTACHMENTS_USE_SENDFILE = True
SENDFILE_BACKEND = 'sendfile.backends.development'
# SENDFILE_URL = None #Not needed
# SENDFILE_ROOT = None #Not needed
|
cXhristian/django-wiki
|
testproject/testproject/settings/sendfile.py
|
Python
|
gpl-3.0
| 280
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path as path
from io import StringIO
import astwro.starlist as sl
import astwro.sampledata as data
from astwro.utils import tmpdir
def get_catalog(object):
from astroquery.vizier import Vizier
viz = Vizier( columns=['Star', '*'])
star = object.replace('_', ' ') + '*'
try:
return viz.query_constraints(catalog='II/183A/table2',Star=star)[0]
except IndexError:
return None
def test_read_write_ds9():
s1 = sl.read_dao_file(data.ap_file())
d = tmpdir()
f2 = path.join(d.path, 'i.reg')
sl.write_ds9_regions(s1, f2)
s2 = sl.read_ds9_regions(f2)
assert s2[['id', 'x', 'y']].equals(s1[['id', 'x', 'y']])
assert not s2.auto_id.any()
def test_read_noid_reg():
reg = u"""
# Region file format: DS9 version 4.1
global color=green dashlist=8 3 width=1 font="helvetica 10 normal roman" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1
image
circle(869.40377,745.33678,13.888889)
circle(1225.6722,742.09608,13.888889)
circle(753.77706,465.33857,13.888889)
circle(1034.8725,499.95079,13.888889)
circle(1194.5182,211.78505,13.888889)
"""
regstrm = StringIO(reg)
s = sl.read_ds9_regions(regstrm)
assert s.id[1] == 1
assert s.x[3] == 753.77706
assert s.auto_id.all()
def test_read_mixedid_reg():
reg = u"""
# Region file format: DS9 version 4.1
global color=green dashlist=8 3 width=1 font="helvetica 10 normal roman" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1
image
circle(869.40377,745.33678,13.888889)
circle(1225.6722,742.09608,13.888889)
circle(753.77706,465.33857,13.888889) # id=160
circle(1034.8725,499.95079,13.888889)
circle(1194.5182,211.78505,13.888889)
"""
regstrm = StringIO(reg)
s = sl.read_ds9_regions(regstrm)
assert s.x[161] == 869.40377
assert s.y[160] == 465.33857
assert s.auto_id.any()
assert not s.auto_id.all()
def test_read_wcs_reg():
reg = u"""
# Region file format: DS9 version 4.1
global color=green dashlist=8 3 width=1 font="helvetica 10 normal roman" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1
fk5
circle(21:45:00.7278,+65:45:56.751,2.275") # color=#38ACFB text={1} id=1
circle(21:45:33.1351,+65:50:07.545,2.275") # color=#38ACFB text={7}
circle(21:45:41.7344,+65:45:45.371,2.275") # color=#38ACFB text={8} id=8
circle(21:45:46.9471,+65:45:43.794,2.275") # color=#38ACFB text={9} id=9
"""
regstrm = StringIO(reg)
s = sl.read_ds9_regions(regstrm)
assert s.ra[8] == '21:45:41.7344'
assert s.dec[1] == '+65:45:56.751'
assert s.auto_id.any()
assert not s.auto_id.all()
def test_write_wizir_catalog():
d = tmpdir()
fpath = path.join(d.path, 'mark_a.reg')
std_catalog = get_catalog('MARK_A')
sl.write_ds9_regions(std_catalog, fpath, WCS=True)
cat = sl.read_ds9_regions(fpath)
assert len(cat) == len(std_catalog)
|
majkelx/astwro
|
astwro/starlist/tests/ds9_io_test.py
|
Python
|
mit
| 3,020
|
# aprslib - Python library for working with APRS
# Copyright (C) 2013-2014 Rossen Georgiev
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
APRS library in Python
Currently the library provides facilities to:
- parse APRS packets
- Connect and listen to an aprs-is packet feed
"""
# Py2 & Py3 compability
import sys
if sys.version_info[0] >= 3:
is_py3 = True
string_type = (str, )
string_type_parse = string_type + (bytes, )
int_type = int
else:
is_py3 = False
string_type = (str, unicode)
string_type_parse = string_type
int_type = (int, long)
from datetime import date as _date
__date__ = str(_date.today())
del _date
__version__ = "0.7.0"
version_info = (0, 7, 0)
__author__ = "Rossen Georgiev"
__all__ = ['IS', 'parse', 'passcode']
from aprslib.exceptions import *
from aprslib.parsing import parse
from aprslib.passcode import passcode
from aprslib.inet import IS
|
rossengeorgiev/aprs-python
|
aprslib/__init__.py
|
Python
|
gpl-2.0
| 1,577
|
# -*- coding: utf-8 -*-
"""
F test for null hypothesis that coefficients in several regressions are the same
* implemented by creating groupdummies*exog and testing appropriate contrast
matrices
* similar to test for structural change in all variables at predefined break points
* allows only one group variable
* currently tests for change in all exog variables
* allows for heterogscedasticity, error variance varies across groups
TODO
----
* generalize anova structure,
- structural break in only some variables
- compare structural breaks in several exog versus constant only
- fast way to construct comparisons
* print anova style results
* add all pairwise comparison tests (DONE) with and without Bonferroni correction
* add additional test, likelihood-ratio, lagrange-multiplier, wald ?
* test for heteroscedasticity, equality of variances
- how?
- like lagrange-multiplier in stattools heteroscedasticity tests
* permutation or bootstrap test statistic or pvalues
References
----------
Greene: section 7.4 Modeling and Testing for a Structural Break
is not the same because I use a different normalization, which looks easier
for more than 2 groups/subperiods
after looking at Greene:
* my version assumes that all groups are large enough to estimate the coefficients
* in sections 7.4.2 and 7.5.3, predictive tests can also be used when there are
insufficient (nobs<nvars) observations in one group/subperiods
question: can this be used to test structural change for last period?
cusum test but only for current period,
in general cusum is better done with recursive ols
check other references again for this, there was one for non-recursive
calculation of cusum (if I remember correctly)
* Greene 7.4.4: with unequal variances Greene mentions Wald test, but where
size of test might not be very good
no mention of F-test based on GLS, is there a reference for what I did?
alternative: use Wald test with bootstrap pvalues?
Created on Sat Mar 27 01:48:01 2010
Author: josef-pktd
"""
import numpy as np
from scipy import stats
from scikits.statsmodels.regression import OLS, WLS
class OneWayLS(object):
'''Class to test equality of regression coefficients across groups
This class performs tests whether the linear regression coefficients are
the same across pre-specified groups. This can be used to test for
structural breaks at given change points, or for ANOVA style analysis of
differences in the effect of explanatory variables across groups.
Notes
-----
The test is implemented by regression on the original pooled exogenous
variables and on group dummies times the exogenous regressors.
y_i = X_i beta_i + u_i for all groups i
The test is for the null hypothesis: beta_i = beta for all i
against the alternative that at least one beta_i is different.
By default it is assumed that all u_i have the same variance. If the
keyword option het is True, then it is assumed that the variance is
group specific. This uses WLS with weights given by the standard errors
from separate regressions for each group.
Note: het=True is not sufficiently tested
The F-test assumes that the errors are normally distributed.
original question from mailing list for equality of coefficients
across regressions, and example in Stata FAQ
*testing*:
* if constant is the only regressor then the result for the F-test is
the same as scipy.stats.f_oneway
(which in turn is verified against NIST for not badly scaled problems)
* f-test for simple structural break is the same as in original script
* power and size of test look ok in examples
* not checked/verified for heteroscedastic case
- for constant only: ftest result is the same with WLS as with OLS - check?
check: I might be mixing up group names (unique)
and group id (integers in arange(ngroups)
not tested for groups that are not arange(ngroups)
make sure groupnames are always consistently sorted/ordered
'''
def __init__(self, y, x, groups=None, het=False, data=None, meta=None):
if groups is None:
raise ValueError('use OLS if there are no groups')
#maybe replace by dispatch to OLS
if data:
y = data[y]
x = [data[v] for v in x]
try:
groups = data[groups]
except [KeyError, ValueError]:
pass
self.endog = np.asarray(y)
self.exog = np.asarray(x)
if self.exog.ndim == 1:
self.exog = self.exog[:,None]
self.groups = np.asarray(groups)
self.het = het
self.groupsint = None
if np.issubdtype(self.groups.dtype, int):
self.unique = np.unique(self.groups)
if (self.unique == np.arange(len(self.unique))).all():
self.groupsint = self.groups
if self.groupsint is None: # groups are not consecutive integers
self.unique, self.groupsint = np.unique(self.groupsint, return_inverse=True)
def fitbygroups(self):
olsbygroup = {}
sigmabygroup = []
for gi, group in enumerate(self.unique):
groupmask = self.groupsint == group
res = OLS(self.endog[groupmask], self.exog[groupmask]).fit()
olsbygroup[group] = res
sigmabygroup.append(res.mse_resid)
self.olsbygroup = olsbygroup
self.sigmabygroup = np.array(sigmabygroup)
self.weights = np.sqrt(self.sigmabygroup[self.groupsint]) #TODO:chk sqrt
def fitjoint(self):
if not hasattr(self, 'weights'):
self.fitbygroups()
groupdummy = (self.groupsint[:,None] == self.unique).astype(int)
#order of dummy variables by variable - not used
#dummyexog = self.exog[:,:,None]*groupdummy[:,None,1:]
#order of dummy variables by grous - used
dummyexog = self.exog[:,None,:]*groupdummy[:,1:,None]
exog = np.c_[self.exog, dummyexog.reshape(self.exog.shape[0],-1)] #self.nobs ??
#Notes: I changed to drop first group from dummy
#instead I want one full set dummies
if self.het:
weights = self.weights
res = WLS(self.endog, exog, weights=weights).fit()
else:
res = OLS(self.endog, exog).fit()
self.lsjoint = res
contrasts = {}
nvars = self.exog.shape[1]
nparams = exog.shape[1]
ndummies = nparams - nvars
contrasts['all'] = np.c_[np.zeros((ndummies, nvars)), np.eye(ndummies)]
for groupind,group in enumerate(self.unique[1:]): #need enumerate if groups != groupsint
groupind = groupind + 1
contr = np.zeros((nvars, nparams))
contr[:,nvars*groupind:nvars*(groupind+1)] = np.eye(nvars)
contrasts[group] = contr
#save also for pairs, see next
contrasts[(self.unique[0], group)] = contr
#Note: I'm keeping some duplication for testing
pairs = np.triu_indices(len(self.unique),1)
for ind1,ind2 in zip(*pairs): #replace with group1, group2 in sorted(keys)
if ind1 == 0:
continue # need comparison with benchmark/normalization group separate
g1 = self.unique[ind1]
g2 = self.unique[ind2]
group = (g1, g2)
contr = np.zeros((nvars, nparams))
contr[:,nvars*ind1:nvars*(ind1+1)] = np.eye(nvars)
contr[:,nvars*ind2:nvars*(ind2+1)] = -np.eye(nvars)
contrasts[group] = contr
self.contrasts = contrasts
def fitpooled(self):
if self.het:
if not hasattr(self, 'weights'):
self.fitbygroups()
weights = self.weights
res = WLS(self.endog, self.exog, weights=weights).fit()
else:
res = OLS(self.endog, self.exog).fit()
self.lspooled = res
def ftest_summary(self):
if not hasattr(self, 'lsjoint'):
self.fitjoint()
txt = []
summarytable = []
txt.append('F-test for equality of coefficients across groups')
fres = self.lsjoint.f_test(self.contrasts['all'])
txt.append(fres.__str__())
summarytable.append(('all',(fres.fvalue, fres.pvalue, fres.df_denom, fres.df_num)))
# for group in self.unique[1:]: #replace with group1, group2 in sorted(keys)
# txt.append('F-test for equality of coefficients between group'
# ' %s and group %s' % (group, '0'))
# fres = self.lsjoint.f_test(self.contrasts[group])
# txt.append(fres.__str__())
# summarytable.append((group,(fres.fvalue, fres.pvalue, fres.df_denom, fres.df_num)))
pairs = np.triu_indices(len(self.unique),1)
for ind1,ind2 in zip(*pairs): #replace with group1, group2 in sorted(keys)
g1 = self.unique[ind1]
g2 = self.unique[ind2]
txt.append('F-test for equality of coefficients between group'
' %s and group %s' % (g1, g2))
group = (g1, g2)
fres = self.lsjoint.f_test(self.contrasts[group])
txt.append(fres.__str__())
summarytable.append((group,(fres.fvalue, fres.pvalue, fres.df_denom, fres.df_num)))
return '\n'.join(txt), summarytable
def lr_test(self):
'''generic likelihood ration test between nested models
\begin{align} D & = -2(\ln(\text{likelihood for null model}) - \ln(\text{likelihood for alternative model})) \\ & = -2\ln\left( \frac{\text{likelihood for null model}}{\text{likelihood for alternative model}} \right). \end{align}
is distributed as chisquare with df equal to difference in number of parameters or equivalently
difference in residual degrees of freedom (sign?)
TODO: put into separate function
'''
if not hasattr(self, 'lsjoint'):
self.fitjoint()
if not hasattr(self, 'lspooled'):
self.fitpooled()
loglikejoint = self.lsjoint.llf
loglikepooled = self.lspooled.llf
lrstat = -2*(loglikepooled - loglikejoint) #??? check sign
lrdf = self.lspooled.df_resid - self.lsjoint.df_resid
lrpval = stats.chi2.sf(lrstat, lrdf)
return lrstat, lrpval, lrdf
def linmod(y, x, **kwds):
if 'weights' in kwds:
return WLS(y, x, kwds)
elif 'sigma' in kwds:
return GLS(y, x,kwds)
else:
return OLS(y, x, kwds)
#this has been moved in sandbox/tools/stattools, next to the het and break tests
#def recursive_olsresiduals(olsresults, skip):
# '''this is my original version based on Greene and references'''
# y = olsresults.model.endog
# x = olsresults.model.exog
# nobs, nvars = x.shape
# rparams = np.nan * np.zeros((nobs,nvars))
# rresid = np.nan * np.zeros((nobs))
# rypred = np.nan * np.zeros((nobs))
# rvarraw = np.nan * np.zeros((nobs))
#
# #XTX = np.zeros((nvars,nvars))
# #XTY = np.zeros((nvars))
#
# x0 = x[:skip]
# y0 = y[:skip]
# XTX = np.dot(x0.T, x0)
# XTY = np.dot(x0.T, y0) #xi * y #np.dot(xi, y)
# beta = np.linalg.solve(XTX, XTY)
# rparams[skip-1] = beta
# yipred = np.dot(x[skip-1], beta)
# rypred[skip-1] = yipred
# rresid[skip-1] = y[skip-1] - yipred
# rvarraw[skip-1] = 1+np.dot(x[skip-1],np.dot(np.linalg.inv(XTX),x[skip-1]))
# for i in range(skip,nobs):
# xi = x[i:i+1,:]
# yi = y[i]
# xxT = np.dot(xi.T, xi) #xi is 2d 1 row
# xy = np.squeeze(xi*yi) #.ravel() # XTY is 1d #np.dot(xi, yi) #np.dot(xi, y)
# #print xy.shape, XTY.shape
# #print XTX
# #print XTY
# beta = np.linalg.solve(XTX, XTY)
# rparams[i-1] = beta #this is beta based on info up to t-1
# yipred = np.dot(xi, beta)
# rypred[i] = yipred
# rresid[i] = yi - yipred
# rvarraw[i] = 1 + np.dot(xi,np.dot(np.linalg.inv(XTX),xi.T))
# XTX += xxT
# XTY += xy
#
# i = nobs
# beta = np.linalg.solve(XTX, XTY)
# rparams[i-1] = beta
#
# rresid_scaled = rresid/np.sqrt(rvarraw) #this is N(0,sigma2) distributed
# nrr = nobs-skip
# sigma2 = rresid_scaled[skip-1:].var(ddof=1)
# rresid_standardized = rresid_scaled/np.sqrt(sigma2) #N(0,1) distributed
# rcusum = rresid_standardized[skip-1:].cumsum()
# #confidence interval points in Greene p136 looks strange?
# #this assumes sum of independent standard normal
# #rcusumci = np.sqrt(np.arange(skip,nobs+1))*np.array([[-1.],[+1.]])*stats.norm.sf(0.025)
# a = 1.143 #for alpha=0.99 =0.948 for alpha=0.95
# #following taken from Ploberger,
# crit = a*np.sqrt(nrr)
# rcusumci = (a*np.sqrt(nrr) + a*np.arange(0,nobs-skip)/np.sqrt(nrr)) * np.array([[-1.],[+1.]])
# return rresid, rparams, rypred, rresid_standardized, rresid_scaled, rcusum, rcusumci
#
#
#def recursive_olsresiduals2(olsresults, skip=None):
# '''
#
# note: change to other version beta is now moved by 1 position
# produces same recursive residuals as other version
#
# References
# ----------
# jplv to check formulas, follows Harvey
# BigJudge 5.5.2b for formula for inverse(X'X) updating
# '''
# lamda = 0.0
# y = olsresults.model.endog
# x = olsresults.model.exog
# nobs, nvars = x.shape
# if skip is None:
# skip = nvars
# rparams = np.nan * np.zeros((nobs,nvars))
# rresid = np.nan * np.zeros((nobs))
# rypred = np.nan * np.zeros((nobs))
# rvarraw = np.nan * np.zeros((nobs))
#
#
# #intialize with skip observations
# x0 = x[:skip]
# y0 = y[:skip]
# #add Ridge to start (not in jplv
# XTXi = np.linalg.inv(np.dot(x0.T, x0)+lamda*np.eye(nvars))
# XTY = np.dot(x0.T, y0) #xi * y #np.dot(xi, y)
# #beta = np.linalg.solve(XTX, XTY)
# beta = np.dot(XTXi, XTY)
# #print 'beta', beta
# rparams[skip-1] = beta
# yipred = np.dot(x[skip-1], beta)
# rypred[skip-1] = yipred
# rresid[skip-1] = y[skip-1] - yipred
# rvarraw[skip-1] = 1 + np.dot(x[skip-1],np.dot(XTXi, x[skip-1]))
# for i in range(skip,nobs):
# xi = x[i:i+1,:]
# yi = y[i]
# #xxT = np.dot(xi.T, xi) #xi is 2d 1 row
# xy = (xi*yi).ravel() # XTY is 1d #np.dot(xi, yi) #np.dot(xi, y)
# #print xy.shape, XTY.shape
# #print XTX
# #print XTY
#
# # get prediction error with previous beta
# yipred = np.dot(xi, beta)
# rypred[i] = yipred
# residi = yi - yipred
# rresid[i] = residi
#
# #update beta and inverse(X'X)
# tmp = np.dot(XTXi, xi.T)
# ft = 1 + np.dot(xi, tmp)
#
# XTXi = XTXi - np.dot(tmp,tmp.T) / ft #BigJudge equ 5.5.15
#
# #print 'beta', beta
# beta = beta + (tmp*residi / ft).ravel() #BigJudge equ 5.5.14
## #version for testing
## XTY += xy
## beta = np.dot(XTXi, XTY)
## print (tmp*yipred / ft).shape
## print 'tmp.shape, ft.shape, beta.shape', tmp.shape, ft.shape, beta.shape
# rparams[i] = beta
# rvarraw[i] = ft
#
#
#
# i = nobs
# #beta = np.linalg.solve(XTX, XTY)
# #rparams[i] = beta
#
# rresid_scaled = rresid/np.sqrt(rvarraw) #this is N(0,sigma2) distributed
# nrr = nobs-skip
# sigma2 = rresid_scaled[skip-1:].var(ddof=1)
# rresid_standardized = rresid_scaled/np.sqrt(sigma2) #N(0,1) distributed
# rcusum = rresid_standardized[skip-1:].cumsum()
# #confidence interval points in Greene p136 looks strange?
# #this assumes sum of independent standard normal
# #rcusumci = np.sqrt(np.arange(skip,nobs+1))*np.array([[-1.],[+1.]])*stats.norm.sf(0.025)
# a = 1.143 #for alpha=0.99 =0.948 for alpha=0.95
# #following taken from Ploberger,
# crit = a*np.sqrt(nrr)
# rcusumci = (a*np.sqrt(nrr) + a*np.arange(0,nobs-skip)/np.sqrt(nrr)) * np.array([[-1.],[+1.]])
# return rresid, rparams, rypred, rresid_standardized, rresid_scaled, rcusum, rcusumci
#
|
matthew-brett/draft-statsmodels
|
scikits/statsmodels/sandbox/regression/onewaygls.py
|
Python
|
bsd-3-clause
| 16,054
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import json
import unittest
from airflow.providers.amazon.aws.hooks.cloud_formation import AWSCloudFormationHook
try:
from moto import mock_cloudformation
from moto.ec2.models import NetworkInterface as some_model
except ImportError:
mock_cloudformation = None
@unittest.skipIf(mock_cloudformation is None, 'moto package not present')
class TestAWSCloudFormationHook(unittest.TestCase):
def setUp(self):
self.hook = AWSCloudFormationHook(aws_conn_id='aws_default')
def create_stack(self, stack_name):
timeout = 15
template_body = json.dumps(
{
'Resources': {
"myResource": {
"Type": some_model.cloudformation_type(),
"Properties": {"myProperty": "myPropertyValue"},
}
}
}
)
self.hook.create_stack(
stack_name=stack_name,
params={
'TimeoutInMinutes': timeout,
'TemplateBody': template_body,
'Parameters': [{'ParameterKey': 'myParam', 'ParameterValue': 'myParamValue'}],
},
)
@mock_cloudformation
def test_get_conn_returns_a_boto3_connection(self):
assert self.hook.get_conn().describe_stacks() is not None
@mock_cloudformation
def test_get_stack_status(self):
stack_name = 'my_test_get_stack_status_stack'
stack_status = self.hook.get_stack_status(stack_name=stack_name)
assert stack_status is None
self.create_stack(stack_name)
stack_status = self.hook.get_stack_status(stack_name=stack_name)
assert stack_status == 'CREATE_COMPLETE', 'Incorrect stack status returned.'
@mock_cloudformation
def test_create_stack(self):
stack_name = 'my_test_create_stack_stack'
self.create_stack(stack_name)
stacks = self.hook.get_conn().describe_stacks()['Stacks']
assert len(stacks) > 0, 'CloudFormation should have stacks'
matching_stacks = [x for x in stacks if x['StackName'] == stack_name]
assert len(matching_stacks) == 1, f'stack with name {stack_name} should exist'
stack = matching_stacks[0]
assert stack['StackStatus'] == 'CREATE_COMPLETE', 'Stack should be in status CREATE_COMPLETE'
@mock_cloudformation
def test_delete_stack(self):
stack_name = 'my_test_delete_stack_stack'
self.create_stack(stack_name)
self.hook.delete_stack(stack_name=stack_name)
stacks = self.hook.get_conn().describe_stacks()['Stacks']
matching_stacks = [x for x in stacks if x['StackName'] == stack_name]
assert len(matching_stacks) == 0, f'stack with name {stack_name} should not exist'
|
apache/incubator-airflow
|
tests/providers/amazon/aws/hooks/test_cloud_formation.py
|
Python
|
apache-2.0
| 3,561
|
class List_plays:
'''
This class stores and sorts objects of the Play class so that they can be efficiently accessed for processing.
Overall list consisting of three separate lists each of which stores multiple instances of the Play class.
'''
def __init__(self):
self.first_down = [[],[],[],[],[],[],[],[],[],[]]
self.second_down = [[],[],[],[],[],[],[],[],[],[]]
self.third_down = [[],[],[],[],[],[],[],[],[],[]]
self.plays_array = [self.first_down,self.second_down,self.third_down]
self.number_plays = 0
def __getnewargs__(self):
return ()
def add_play(self,play):
down = play.start_down
to_go = play.distance_to_first
self.plays_array[down-1][min(play.distance_to_first-1,9)].append(play)
def retrieve_plays(self,down,yards):
return self.plays_array[down-1][min(yards-1,9)]
class Play:
'''
This class stores data for one NFL regulation play. Data scraped from Sport Data API. Variable Descriptions:
Offense (str) offensive team's name
Defense (str) defensive team's name
start_down (int) Down when play starts
distance_to_first (int) Distance to first down
end_down (int) Down when play ends
end_distance_to_first (int) Distance to first down at end of play
play_type (int) Run (0) or Pass (1)
play_type_detail (int) If run: Left (0), Middle (1), Right (2). If throw: Short (<5) (0), Medium (5-15) (1), Long (>15) (2)
turnover (int) No turnover (0), Turnover (1)
position (int) Position on field at beggining of play (0-100). 0 Corresponds to own endzone, 100 opponents endzone
score_offense (int) Current number of points that the offense has
score_defense (int) Current number of points that the defense has
time (int) number of minutes left in quarter
quarter (int) Quarter that the play occured in (1-4)
player (str) Player name identified by play-by-play
'''
def __init__(self,offense,defense,start_down,distance_to_first,end_down,end_distance_to_first,play_type,play_type_detail,turnover,position,score_offense,score_defense,time,quarter,player):
self.offense = offense
self.defense = defense
self.start_down = start_down
self.distance_to_first = distance_to_first
self.end_down = end_down
self.end_distance_to_first = end_distance_to_first
self.play_type = play_type
self.play_type_detail = play_type_detail
self.turnover = turnover
self.position = position
self.score_offense = score_offense
self.score_defense = score_defense
self.time = time
self.quarter = quarter
self.player = player
def __lt__(self,other):
return self.distance_to_first <= other.distance_to_first
def __str__(self):
string = 'Offense:'
string += self.offense
string += ' Defense:'
string += self.defense
string += ' Down:'
string += str(self.start_down)
string += ' To go:'
string += str(self.distance_to_first)
string += ' Play:'
if(self.play_type == 0):
string += 'Run '
if(self.play_type_detail == 0):
string += 'Left'
elif(self.play_type_detail == 1):
string += 'Middle'
else:
string += 'Right'
else:
string += 'Pass '
if(self.play_type_detail == 0):
string += 'Short'
elif(self.play_type_detail == 1):
string += 'Medium'
else:
string += 'Long'
return string
|
A-Malone/NFL-Analytics
|
AnalysisSite/DataStructure.py
|
Python
|
mit
| 3,299
|
import numpy as np
from numpy import linalg
from scipy.sparse import dok_matrix, csr_matrix, issparse
from scipy.spatial.distance import cosine, cityblock, minkowski, wminkowski
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_raises_regexp
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import ignore_warnings
from sklearn.externals.six import iteritems
from sklearn.metrics.pairwise import euclidean_distances
from sklearn.metrics.pairwise import manhattan_distances
from sklearn.metrics.pairwise import linear_kernel
from sklearn.metrics.pairwise import chi2_kernel, additive_chi2_kernel
from sklearn.metrics.pairwise import polynomial_kernel
from sklearn.metrics.pairwise import rbf_kernel
from sklearn.metrics.pairwise import laplacian_kernel
from sklearn.metrics.pairwise import sigmoid_kernel
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.metrics.pairwise import cosine_distances
from sklearn.metrics.pairwise import pairwise_distances
from sklearn.metrics.pairwise import pairwise_distances_argmin_min
from sklearn.metrics.pairwise import pairwise_distances_argmin
from sklearn.metrics.pairwise import pairwise_kernels
from sklearn.metrics.pairwise import PAIRWISE_KERNEL_FUNCTIONS
from sklearn.metrics.pairwise import PAIRWISE_DISTANCE_FUNCTIONS
from sklearn.metrics.pairwise import PAIRWISE_BOOLEAN_FUNCTIONS
from sklearn.metrics.pairwise import PAIRED_DISTANCES
from sklearn.metrics.pairwise import check_pairwise_arrays
from sklearn.metrics.pairwise import check_paired_arrays
from sklearn.metrics.pairwise import paired_distances
from sklearn.metrics.pairwise import paired_euclidean_distances
from sklearn.metrics.pairwise import paired_manhattan_distances
from sklearn.preprocessing import normalize
from sklearn.exceptions import DataConversionWarning
def test_pairwise_distances():
# Test the pairwise_distance helper function.
rng = np.random.RandomState(0)
# Euclidean distance should be equivalent to calling the function.
X = rng.random_sample((5, 4))
S = pairwise_distances(X, metric="euclidean")
S2 = euclidean_distances(X)
assert_array_almost_equal(S, S2)
# Euclidean distance, with Y != X.
Y = rng.random_sample((2, 4))
S = pairwise_distances(X, Y, metric="euclidean")
S2 = euclidean_distances(X, Y)
assert_array_almost_equal(S, S2)
# Test with tuples as X and Y
X_tuples = tuple([tuple([v for v in row]) for row in X])
Y_tuples = tuple([tuple([v for v in row]) for row in Y])
S2 = pairwise_distances(X_tuples, Y_tuples, metric="euclidean")
assert_array_almost_equal(S, S2)
# "cityblock" uses scikit-learn metric, cityblock (function) is
# scipy.spatial.
S = pairwise_distances(X, metric="cityblock")
S2 = pairwise_distances(X, metric=cityblock)
assert_equal(S.shape[0], S.shape[1])
assert_equal(S.shape[0], X.shape[0])
assert_array_almost_equal(S, S2)
# The manhattan metric should be equivalent to cityblock.
S = pairwise_distances(X, Y, metric="manhattan")
S2 = pairwise_distances(X, Y, metric=cityblock)
assert_equal(S.shape[0], X.shape[0])
assert_equal(S.shape[1], Y.shape[0])
assert_array_almost_equal(S, S2)
# Using size_threshold argument should raise
# a deprecation warning
assert_warns(DeprecationWarning,
manhattan_distances, X, Y, size_threshold=10)
# Test cosine as a string metric versus cosine callable
# The string "cosine" uses sklearn.metric,
# while the function cosine is scipy.spatial
S = pairwise_distances(X, Y, metric="cosine")
S2 = pairwise_distances(X, Y, metric=cosine)
assert_equal(S.shape[0], X.shape[0])
assert_equal(S.shape[1], Y.shape[0])
assert_array_almost_equal(S, S2)
# Test with sparse X and Y,
# currently only supported for Euclidean, L1 and cosine.
X_sparse = csr_matrix(X)
Y_sparse = csr_matrix(Y)
S = pairwise_distances(X_sparse, Y_sparse, metric="euclidean")
S2 = euclidean_distances(X_sparse, Y_sparse)
assert_array_almost_equal(S, S2)
S = pairwise_distances(X_sparse, Y_sparse, metric="cosine")
S2 = cosine_distances(X_sparse, Y_sparse)
assert_array_almost_equal(S, S2)
S = pairwise_distances(X_sparse, Y_sparse.tocsc(), metric="manhattan")
S2 = manhattan_distances(X_sparse.tobsr(), Y_sparse.tocoo())
assert_array_almost_equal(S, S2)
S2 = manhattan_distances(X, Y)
assert_array_almost_equal(S, S2)
# Test with scipy.spatial.distance metric, with a kwd
kwds = {"p": 2.0}
S = pairwise_distances(X, Y, metric="minkowski", **kwds)
S2 = pairwise_distances(X, Y, metric=minkowski, **kwds)
assert_array_almost_equal(S, S2)
# same with Y = None
kwds = {"p": 2.0}
S = pairwise_distances(X, metric="minkowski", **kwds)
S2 = pairwise_distances(X, metric=minkowski, **kwds)
assert_array_almost_equal(S, S2)
# Test that scipy distance metrics throw an error if sparse matrix given
assert_raises(TypeError, pairwise_distances, X_sparse, metric="minkowski")
assert_raises(TypeError, pairwise_distances, X, Y_sparse,
metric="minkowski")
# Test that a value error is raised if the metric is unknown
assert_raises(ValueError, pairwise_distances, X, Y, metric="blah")
# ignore conversion to boolean in pairwise_distances
@ignore_warnings(category=DataConversionWarning)
def test_pairwise_boolean_distance():
# test that we convert to boolean arrays for boolean distances
rng = np.random.RandomState(0)
X = rng.randn(5, 4)
Y = X.copy()
Y[0, 0] = 1 - Y[0, 0]
for metric in PAIRWISE_BOOLEAN_FUNCTIONS:
for Z in [Y, None]:
res = pairwise_distances(X, Z, metric=metric)
res[np.isnan(res)] = 0
assert_true(np.sum(res != 0) == 0)
def test_pairwise_precomputed():
for func in [pairwise_distances, pairwise_kernels]:
# Test correct shape
assert_raises_regexp(ValueError, '.* shape .*',
func, np.zeros((5, 3)), metric='precomputed')
# with two args
assert_raises_regexp(ValueError, '.* shape .*',
func, np.zeros((5, 3)), np.zeros((4, 4)),
metric='precomputed')
# even if shape[1] agrees (although thus second arg is spurious)
assert_raises_regexp(ValueError, '.* shape .*',
func, np.zeros((5, 3)), np.zeros((4, 3)),
metric='precomputed')
# Test not copied (if appropriate dtype)
S = np.zeros((5, 5))
S2 = func(S, metric="precomputed")
assert_true(S is S2)
# with two args
S = np.zeros((5, 3))
S2 = func(S, np.zeros((3, 3)), metric="precomputed")
assert_true(S is S2)
# Test always returns float dtype
S = func(np.array([[1]], dtype='int'), metric='precomputed')
assert_equal('f', S.dtype.kind)
# Test converts list to array-like
S = func([[1.]], metric='precomputed')
assert_true(isinstance(S, np.ndarray))
def check_pairwise_parallel(func, metric, kwds):
rng = np.random.RandomState(0)
for make_data in (np.array, csr_matrix):
X = make_data(rng.random_sample((5, 4)))
Y = make_data(rng.random_sample((3, 4)))
try:
S = func(X, metric=metric, n_jobs=1, **kwds)
except (TypeError, ValueError) as exc:
# Not all metrics support sparse input
# ValueError may be triggered by bad callable
if make_data is csr_matrix:
assert_raises(type(exc), func, X, metric=metric,
n_jobs=2, **kwds)
continue
else:
raise
S2 = func(X, metric=metric, n_jobs=2, **kwds)
assert_array_almost_equal(S, S2)
S = func(X, Y, metric=metric, n_jobs=1, **kwds)
S2 = func(X, Y, metric=metric, n_jobs=2, **kwds)
assert_array_almost_equal(S, S2)
def test_pairwise_parallel():
wminkowski_kwds = {'w': np.arange(1, 5).astype('double'), 'p': 1}
metrics = [(pairwise_distances, 'euclidean', {}),
(pairwise_distances, wminkowski, wminkowski_kwds),
(pairwise_distances, 'wminkowski', wminkowski_kwds),
(pairwise_kernels, 'polynomial', {'degree': 1}),
(pairwise_kernels, callable_rbf_kernel, {'gamma': .1}),
]
for func, metric, kwds in metrics:
yield check_pairwise_parallel, func, metric, kwds
def test_pairwise_callable_nonstrict_metric():
# paired_distances should allow callable metric where metric(x, x) != 0
# Knowing that the callable is a strict metric would allow the diagonal to
# be left uncalculated and set to 0.
assert_equal(pairwise_distances([[1.]], metric=lambda x, y: 5)[0, 0], 5)
def callable_rbf_kernel(x, y, **kwds):
# Callable version of pairwise.rbf_kernel.
K = rbf_kernel(np.atleast_2d(x), np.atleast_2d(y), **kwds)
return K
def test_pairwise_kernels(): # Test the pairwise_kernels helper function.
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((2, 4))
# Test with all metrics that should be in PAIRWISE_KERNEL_FUNCTIONS.
test_metrics = ["rbf", "laplacian", "sigmoid", "polynomial", "linear",
"chi2", "additive_chi2"]
for metric in test_metrics:
function = PAIRWISE_KERNEL_FUNCTIONS[metric]
# Test with Y=None
K1 = pairwise_kernels(X, metric=metric)
K2 = function(X)
assert_array_almost_equal(K1, K2)
# Test with Y=Y
K1 = pairwise_kernels(X, Y=Y, metric=metric)
K2 = function(X, Y=Y)
assert_array_almost_equal(K1, K2)
# Test with tuples as X and Y
X_tuples = tuple([tuple([v for v in row]) for row in X])
Y_tuples = tuple([tuple([v for v in row]) for row in Y])
K2 = pairwise_kernels(X_tuples, Y_tuples, metric=metric)
assert_array_almost_equal(K1, K2)
# Test with sparse X and Y
X_sparse = csr_matrix(X)
Y_sparse = csr_matrix(Y)
if metric in ["chi2", "additive_chi2"]:
# these don't support sparse matrices yet
assert_raises(ValueError, pairwise_kernels,
X_sparse, Y=Y_sparse, metric=metric)
continue
K1 = pairwise_kernels(X_sparse, Y=Y_sparse, metric=metric)
assert_array_almost_equal(K1, K2)
# Test with a callable function, with given keywords.
metric = callable_rbf_kernel
kwds = {'gamma': 0.1}
K1 = pairwise_kernels(X, Y=Y, metric=metric, **kwds)
K2 = rbf_kernel(X, Y=Y, **kwds)
assert_array_almost_equal(K1, K2)
# callable function, X=Y
K1 = pairwise_kernels(X, Y=X, metric=metric, **kwds)
K2 = rbf_kernel(X, Y=X, **kwds)
assert_array_almost_equal(K1, K2)
def test_pairwise_kernels_filter_param():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((2, 4))
K = rbf_kernel(X, Y, gamma=0.1)
params = {"gamma": 0.1, "blabla": ":)"}
K2 = pairwise_kernels(X, Y, metric="rbf", filter_params=True, **params)
assert_array_almost_equal(K, K2)
assert_raises(TypeError, pairwise_kernels, X, Y, "rbf", **params)
def test_paired_distances():
# Test the pairwise_distance helper function.
rng = np.random.RandomState(0)
# Euclidean distance should be equivalent to calling the function.
X = rng.random_sample((5, 4))
# Euclidean distance, with Y != X.
Y = rng.random_sample((5, 4))
for metric, func in iteritems(PAIRED_DISTANCES):
S = paired_distances(X, Y, metric=metric)
S2 = func(X, Y)
assert_array_almost_equal(S, S2)
S3 = func(csr_matrix(X), csr_matrix(Y))
assert_array_almost_equal(S, S3)
if metric in PAIRWISE_DISTANCE_FUNCTIONS:
# Check the pairwise_distances implementation
# gives the same value
distances = PAIRWISE_DISTANCE_FUNCTIONS[metric](X, Y)
distances = np.diag(distances)
assert_array_almost_equal(distances, S)
# Check the callable implementation
S = paired_distances(X, Y, metric='manhattan')
S2 = paired_distances(X, Y, metric=lambda x, y: np.abs(x - y).sum(axis=0))
assert_array_almost_equal(S, S2)
# Test that a value error is raised when the lengths of X and Y should not
# differ
Y = rng.random_sample((3, 4))
assert_raises(ValueError, paired_distances, X, Y)
def test_pairwise_distances_argmin_min():
# Check pairwise minimum distances computation for any metric
X = [[0], [1]]
Y = [[-1], [2]]
Xsp = dok_matrix(X)
Ysp = csr_matrix(Y, dtype=np.float32)
# euclidean metric
D, E = pairwise_distances_argmin_min(X, Y, metric="euclidean")
D2 = pairwise_distances_argmin(X, Y, metric="euclidean")
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(D2, [0, 1])
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(E, [1., 1.])
# sparse matrix case
Dsp, Esp = pairwise_distances_argmin_min(Xsp, Ysp, metric="euclidean")
assert_array_equal(Dsp, D)
assert_array_equal(Esp, E)
# We don't want np.matrix here
assert_equal(type(Dsp), np.ndarray)
assert_equal(type(Esp), np.ndarray)
# Non-euclidean scikit-learn metric
D, E = pairwise_distances_argmin_min(X, Y, metric="manhattan")
D2 = pairwise_distances_argmin(X, Y, metric="manhattan")
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(D2, [0, 1])
assert_array_almost_equal(E, [1., 1.])
D, E = pairwise_distances_argmin_min(Xsp, Ysp, metric="manhattan")
D2 = pairwise_distances_argmin(Xsp, Ysp, metric="manhattan")
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(E, [1., 1.])
# Non-euclidean Scipy distance (callable)
D, E = pairwise_distances_argmin_min(X, Y, metric=minkowski,
metric_kwargs={"p": 2})
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(E, [1., 1.])
# Non-euclidean Scipy distance (string)
D, E = pairwise_distances_argmin_min(X, Y, metric="minkowski",
metric_kwargs={"p": 2})
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(E, [1., 1.])
# Compare with naive implementation
rng = np.random.RandomState(0)
X = rng.randn(97, 149)
Y = rng.randn(111, 149)
dist = pairwise_distances(X, Y, metric="manhattan")
dist_orig_ind = dist.argmin(axis=0)
dist_orig_val = dist[dist_orig_ind, range(len(dist_orig_ind))]
dist_chunked_ind, dist_chunked_val = pairwise_distances_argmin_min(
X, Y, axis=0, metric="manhattan", batch_size=50)
np.testing.assert_almost_equal(dist_orig_ind, dist_chunked_ind, decimal=7)
np.testing.assert_almost_equal(dist_orig_val, dist_chunked_val, decimal=7)
def test_euclidean_distances():
# Check the pairwise Euclidean distances computation
X = [[0]]
Y = [[1], [2]]
D = euclidean_distances(X, Y)
assert_array_almost_equal(D, [[1., 2.]])
X = csr_matrix(X)
Y = csr_matrix(Y)
D = euclidean_distances(X, Y)
assert_array_almost_equal(D, [[1., 2.]])
rng = np.random.RandomState(0)
X = rng.random_sample((10, 4))
Y = rng.random_sample((20, 4))
X_norm_sq = (X ** 2).sum(axis=1).reshape(1, -1)
Y_norm_sq = (Y ** 2).sum(axis=1).reshape(1, -1)
# check that we still get the right answers with {X,Y}_norm_squared
D1 = euclidean_distances(X, Y)
D2 = euclidean_distances(X, Y, X_norm_squared=X_norm_sq)
D3 = euclidean_distances(X, Y, Y_norm_squared=Y_norm_sq)
D4 = euclidean_distances(X, Y, X_norm_squared=X_norm_sq,
Y_norm_squared=Y_norm_sq)
assert_array_almost_equal(D2, D1)
assert_array_almost_equal(D3, D1)
assert_array_almost_equal(D4, D1)
# check we get the wrong answer with wrong {X,Y}_norm_squared
X_norm_sq *= 0.5
Y_norm_sq *= 0.5
wrong_D = euclidean_distances(X, Y,
X_norm_squared=np.zeros_like(X_norm_sq),
Y_norm_squared=np.zeros_like(Y_norm_sq))
assert_greater(np.max(np.abs(wrong_D - D1)), .01)
def test_cosine_distances():
# Check the pairwise Cosine distances computation
rng = np.random.RandomState(1337)
x = np.abs(rng.rand(910))
XA = np.vstack([x, x])
D = cosine_distances(XA)
assert_array_almost_equal(D, [[0., 0.], [0., 0.]])
# check that all elements are in [0, 2]
assert_true(np.all(D >= 0.))
assert_true(np.all(D <= 2.))
# check that diagonal elements are equal to 0
assert_array_almost_equal(D[np.diag_indices_from(D)], [0., 0.])
XB = np.vstack([x, -x])
D2 = cosine_distances(XB)
# check that all elements are in [0, 2]
assert_true(np.all(D2 >= 0.))
assert_true(np.all(D2 <= 2.))
# check that diagonal elements are equal to 0 and non diagonal to 2
assert_array_almost_equal(D2, [[0., 2.], [2., 0.]])
# check large random matrix
X = np.abs(rng.rand(1000, 5000))
D = cosine_distances(X)
# check that diagonal elements are equal to 0
assert_array_almost_equal(D[np.diag_indices_from(D)], [0.] * D.shape[0])
assert_true(np.all(D >= 0.))
assert_true(np.all(D <= 2.))
# Paired distances
def test_paired_euclidean_distances():
# Check the paired Euclidean distances computation
X = [[0], [0]]
Y = [[1], [2]]
D = paired_euclidean_distances(X, Y)
assert_array_almost_equal(D, [1., 2.])
def test_paired_manhattan_distances():
# Check the paired manhattan distances computation
X = [[0], [0]]
Y = [[1], [2]]
D = paired_manhattan_distances(X, Y)
assert_array_almost_equal(D, [1., 2.])
def test_chi_square_kernel():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((10, 4))
K_add = additive_chi2_kernel(X, Y)
gamma = 0.1
K = chi2_kernel(X, Y, gamma=gamma)
assert_equal(K.dtype, np.float)
for i, x in enumerate(X):
for j, y in enumerate(Y):
chi2 = -np.sum((x - y) ** 2 / (x + y))
chi2_exp = np.exp(gamma * chi2)
assert_almost_equal(K_add[i, j], chi2)
assert_almost_equal(K[i, j], chi2_exp)
# check diagonal is ones for data with itself
K = chi2_kernel(Y)
assert_array_equal(np.diag(K), 1)
# check off-diagonal is < 1 but > 0:
assert_true(np.all(K > 0))
assert_true(np.all(K - np.diag(np.diag(K)) < 1))
# check that float32 is preserved
X = rng.random_sample((5, 4)).astype(np.float32)
Y = rng.random_sample((10, 4)).astype(np.float32)
K = chi2_kernel(X, Y)
assert_equal(K.dtype, np.float32)
# check integer type gets converted,
# check that zeros are handled
X = rng.random_sample((10, 4)).astype(np.int32)
K = chi2_kernel(X, X)
assert_true(np.isfinite(K).all())
assert_equal(K.dtype, np.float)
# check that kernel of similar things is greater than dissimilar ones
X = [[.3, .7], [1., 0]]
Y = [[0, 1], [.9, .1]]
K = chi2_kernel(X, Y)
assert_greater(K[0, 0], K[0, 1])
assert_greater(K[1, 1], K[1, 0])
# test negative input
assert_raises(ValueError, chi2_kernel, [[0, -1]])
assert_raises(ValueError, chi2_kernel, [[0, -1]], [[-1, -1]])
assert_raises(ValueError, chi2_kernel, [[0, 1]], [[-1, -1]])
# different n_features in X and Y
assert_raises(ValueError, chi2_kernel, [[0, 1]], [[.2, .2, .6]])
# sparse matrices
assert_raises(ValueError, chi2_kernel, csr_matrix(X), csr_matrix(Y))
assert_raises(ValueError, additive_chi2_kernel,
csr_matrix(X), csr_matrix(Y))
def test_kernel_symmetry():
# Valid kernels should be symmetric
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
for kernel in (linear_kernel, polynomial_kernel, rbf_kernel,
laplacian_kernel, sigmoid_kernel, cosine_similarity):
K = kernel(X, X)
assert_array_almost_equal(K, K.T, 15)
def test_kernel_sparse():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
X_sparse = csr_matrix(X)
for kernel in (linear_kernel, polynomial_kernel, rbf_kernel,
laplacian_kernel, sigmoid_kernel, cosine_similarity):
K = kernel(X, X)
K2 = kernel(X_sparse, X_sparse)
assert_array_almost_equal(K, K2)
def test_linear_kernel():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
K = linear_kernel(X, X)
# the diagonal elements of a linear kernel are their squared norm
assert_array_almost_equal(K.flat[::6], [linalg.norm(x) ** 2 for x in X])
def test_rbf_kernel():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
K = rbf_kernel(X, X)
# the diagonal elements of a rbf kernel are 1
assert_array_almost_equal(K.flat[::6], np.ones(5))
def test_laplacian_kernel():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
K = laplacian_kernel(X, X)
# the diagonal elements of a laplacian kernel are 1
assert_array_almost_equal(np.diag(K), np.ones(5))
# off-diagonal elements are < 1 but > 0:
assert_true(np.all(K > 0))
assert_true(np.all(K - np.diag(np.diag(K)) < 1))
def test_cosine_similarity_sparse_output():
# Test if cosine_similarity correctly produces sparse output.
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((3, 4))
Xcsr = csr_matrix(X)
Ycsr = csr_matrix(Y)
K1 = cosine_similarity(Xcsr, Ycsr, dense_output=False)
assert_true(issparse(K1))
K2 = pairwise_kernels(Xcsr, Y=Ycsr, metric="cosine")
assert_array_almost_equal(K1.todense(), K2)
def test_cosine_similarity():
# Test the cosine_similarity.
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((3, 4))
Xcsr = csr_matrix(X)
Ycsr = csr_matrix(Y)
for X_, Y_ in ((X, None), (X, Y),
(Xcsr, None), (Xcsr, Ycsr)):
# Test that the cosine is kernel is equal to a linear kernel when data
# has been previously normalized by L2-norm.
K1 = pairwise_kernels(X_, Y=Y_, metric="cosine")
X_ = normalize(X_)
if Y_ is not None:
Y_ = normalize(Y_)
K2 = pairwise_kernels(X_, Y=Y_, metric="linear")
assert_array_almost_equal(K1, K2)
def test_check_dense_matrices():
# Ensure that pairwise array check works for dense matrices.
# Check that if XB is None, XB is returned as reference to XA
XA = np.resize(np.arange(40), (5, 8))
XA_checked, XB_checked = check_pairwise_arrays(XA, None)
assert_true(XA_checked is XB_checked)
assert_array_equal(XA, XA_checked)
def test_check_XB_returned():
# Ensure that if XA and XB are given correctly, they return as equal.
# Check that if XB is not None, it is returned equal.
# Note that the second dimension of XB is the same as XA.
XA = np.resize(np.arange(40), (5, 8))
XB = np.resize(np.arange(32), (4, 8))
XA_checked, XB_checked = check_pairwise_arrays(XA, XB)
assert_array_equal(XA, XA_checked)
assert_array_equal(XB, XB_checked)
XB = np.resize(np.arange(40), (5, 8))
XA_checked, XB_checked = check_paired_arrays(XA, XB)
assert_array_equal(XA, XA_checked)
assert_array_equal(XB, XB_checked)
def test_check_different_dimensions():
# Ensure an error is raised if the dimensions are different.
XA = np.resize(np.arange(45), (5, 9))
XB = np.resize(np.arange(32), (4, 8))
assert_raises(ValueError, check_pairwise_arrays, XA, XB)
XB = np.resize(np.arange(4 * 9), (4, 9))
assert_raises(ValueError, check_paired_arrays, XA, XB)
def test_check_invalid_dimensions():
# Ensure an error is raised on 1D input arrays.
# The modified tests are not 1D. In the old test, the array was internally
# converted to 2D anyways
XA = np.arange(45).reshape(9, 5)
XB = np.arange(32).reshape(4, 8)
assert_raises(ValueError, check_pairwise_arrays, XA, XB)
XA = np.arange(45).reshape(9, 5)
XB = np.arange(32).reshape(4, 8)
assert_raises(ValueError, check_pairwise_arrays, XA, XB)
def test_check_sparse_arrays():
# Ensures that checks return valid sparse matrices.
rng = np.random.RandomState(0)
XA = rng.random_sample((5, 4))
XA_sparse = csr_matrix(XA)
XB = rng.random_sample((5, 4))
XB_sparse = csr_matrix(XB)
XA_checked, XB_checked = check_pairwise_arrays(XA_sparse, XB_sparse)
# compare their difference because testing csr matrices for
# equality with '==' does not work as expected.
assert_true(issparse(XA_checked))
assert_equal(abs(XA_sparse - XA_checked).sum(), 0)
assert_true(issparse(XB_checked))
assert_equal(abs(XB_sparse - XB_checked).sum(), 0)
XA_checked, XA_2_checked = check_pairwise_arrays(XA_sparse, XA_sparse)
assert_true(issparse(XA_checked))
assert_equal(abs(XA_sparse - XA_checked).sum(), 0)
assert_true(issparse(XA_2_checked))
assert_equal(abs(XA_2_checked - XA_checked).sum(), 0)
def tuplify(X):
# Turns a numpy matrix (any n-dimensional array) into tuples.
s = X.shape
if len(s) > 1:
# Tuplify each sub-array in the input.
return tuple(tuplify(row) for row in X)
else:
# Single dimension input, just return tuple of contents.
return tuple(r for r in X)
def test_check_tuple_input():
# Ensures that checks return valid tuples.
rng = np.random.RandomState(0)
XA = rng.random_sample((5, 4))
XA_tuples = tuplify(XA)
XB = rng.random_sample((5, 4))
XB_tuples = tuplify(XB)
XA_checked, XB_checked = check_pairwise_arrays(XA_tuples, XB_tuples)
assert_array_equal(XA_tuples, XA_checked)
assert_array_equal(XB_tuples, XB_checked)
def test_check_preserve_type():
# Ensures that type float32 is preserved.
XA = np.resize(np.arange(40), (5, 8)).astype(np.float32)
XB = np.resize(np.arange(40), (5, 8)).astype(np.float32)
XA_checked, XB_checked = check_pairwise_arrays(XA, None)
assert_equal(XA_checked.dtype, np.float32)
# both float32
XA_checked, XB_checked = check_pairwise_arrays(XA, XB)
assert_equal(XA_checked.dtype, np.float32)
assert_equal(XB_checked.dtype, np.float32)
# mismatched A
XA_checked, XB_checked = check_pairwise_arrays(XA.astype(np.float),
XB)
assert_equal(XA_checked.dtype, np.float)
assert_equal(XB_checked.dtype, np.float)
# mismatched B
XA_checked, XB_checked = check_pairwise_arrays(XA,
XB.astype(np.float))
assert_equal(XA_checked.dtype, np.float)
assert_equal(XB_checked.dtype, np.float)
|
mbayon/TFG-MachineLearning
|
venv/lib/python3.6/site-packages/sklearn/metrics/tests/test_pairwise.py
|
Python
|
mit
| 27,335
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
#Importar libreria GPIO
import RPi.GPIO as GPIO
#Definir modo de trabajo para la placa BCM
GPIO.setmode(GPIO.BCM)
#Desactivo Errores
GPIO.setwarnings(False)
#Importamos la libreria time
import time
#Importamos la libreria para comandos de la consola/shell
import os
#Definir GPIO como Salida
GPIO.setup(6, GPIO.OUT)
#Asigno valor alto para que tenga corriente la salida de 3,3v por GPIO
GPIO.output(6, GPIO.LOW)
#Limpiar GPIO
GPIO.cleanup()
|
fryntiz/Raspberry-PI
|
Obsoleto/Web Administración Raspberri PI/GPIO/6/apagar.py
|
Python
|
gpl-3.0
| 494
|
#!/usr/bin/env python
# Copyright (c) 2021 by
# Donatas Abraitis <donatas.abraitis@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose with or without fee is hereby granted, provided
# that the above copyright notice and this permission notice appear
# in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NETDEF DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NETDEF BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
#
"""
Test if BGP community alias is visible in CLI outputs
"""
import os
import sys
import json
import pytest
import functools
pytestmark = pytest.mark.bgpd
CWD = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(CWD, "../"))
# pylint: disable=C0413
from lib import topotest
from lib.topogen import Topogen, TopoRouter, get_topogen
pytestmark = [pytest.mark.bgpd]
def build_topo(tgen):
for routern in range(1, 3):
tgen.add_router("r{}".format(routern))
switch = tgen.add_switch("s1")
switch.add_link(tgen.gears["r1"])
switch.add_link(tgen.gears["r2"])
def setup_module(mod):
tgen = Topogen(build_topo, mod.__name__)
tgen.start_topology()
router_list = tgen.routers()
for i, (rname, router) in enumerate(router_list.items(), 1):
router.load_config(
TopoRouter.RD_ZEBRA, os.path.join(CWD, "{}/zebra.conf".format(rname))
)
router.load_config(
TopoRouter.RD_BGP, os.path.join(CWD, "{}/bgpd.conf".format(rname))
)
tgen.start_router()
def teardown_module(mod):
tgen = get_topogen()
tgen.stop_topology()
def test_bgp_community_alias():
tgen = get_topogen()
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
router = tgen.gears["r1"]
def _bgp_converge(router):
output = json.loads(router.vtysh_cmd("show ip route json"))
expected = {
"172.16.16.1/32": [
{
"tag": 10,
"communities": "community-r2-1 65001:2",
"largeCommunities": "large-community-r2-1 65001:1:2",
}
],
"172.16.16.2/32": [
{
"tag": 20,
"communities": "65002:1 community-r2-2",
"largeCommunities": "",
}
],
"172.16.16.3/32": [
{
"tag": 100,
"communities": "",
"largeCommunities": "",
}
],
}
return topotest.json_cmp(output, expected)
test_func = functools.partial(_bgp_converge, router)
success, result = topotest.run_and_expect(test_func, None, count=60, wait=0.5)
assert result is None, "Cannot see BGP community aliases at r1"
def _bgp_show_prefixes_by_alias(router):
output = json.loads(
router.vtysh_cmd(
"show bgp ipv4 unicast alias large-community-r2-1 json detail"
)
)
expected = {
"routes": {
"172.16.16.1/32": [
{
"community": {"string": "community-r2-1 65001:2"},
"largeCommunity": {"string": "large-community-r2-1 65001:1:2"},
}
]
}
}
return topotest.json_cmp(output, expected)
test_func = functools.partial(_bgp_show_prefixes_by_alias, router)
success, result = topotest.run_and_expect(test_func, None, count=60, wait=0.5)
assert result is None, "Cannot see BGP prefixes by community alias at r1"
def _bgp_show_prefixes_by_large_community_list(router):
output = json.loads(
router.vtysh_cmd("show bgp ipv4 unicast large-community-list r2 json")
)
expected = {"routes": {"172.16.16.1/32": [{"valid": True}]}}
return topotest.json_cmp(output, expected)
test_func = functools.partial(_bgp_show_prefixes_by_large_community_list, router)
success, result = topotest.run_and_expect(test_func, None, count=60, wait=0.5)
assert result is None, "Cannot see BGP prefixes by large community list at r1"
if __name__ == "__main__":
args = ["-s"] + sys.argv[1:]
sys.exit(pytest.main(args))
|
freerangerouting/frr
|
tests/topotests/bgp_community_alias/test_bgp-community-alias.py
|
Python
|
gpl-2.0
| 4,645
|
import os
import re
import shutil
from urllib import parse
from jirafs import exceptions, utils
from jirafs.plugin import CommandPlugin
from jirafs.ticketfolder import TicketFolder
class Command(CommandPlugin):
"""Clone a new ticketfolder for the specified ticket URL"""
MIN_VERSION = "2.0.0"
MAX_VERSION = "3.0.0"
AUTOMATICALLY_INSTANTIATE_FOLDER = False
TICKET_RE = re.compile(r".*\/browse\/(\w+-\d+)\/?")
def handle(self, args, jira, path, **kwargs):
ticket_url = args.ticket_url[0]
ticket_url_parts = parse.urlparse(ticket_url)
if not ticket_url_parts.netloc:
default_server = utils.get_default_jira_server()
ticket_url = parse.urljoin(default_server, "browse/" + ticket_url + "/")
path = args.path[0] if args.path else None
return self.cmd(path, ticket_url, jira)
def clone_from_issue(self, match, ticket_url, path, jira):
if not path:
path = match.group(1)
path = os.path.realpath(path)
os.mkdir(path)
try:
folder = TicketFolder.initialize_ticket_folder(ticket_url, path, jira)
utils.run_command_method_with_kwargs("pull", folder=folder)
except BaseException:
shutil.rmtree(path)
raise
folder.log(
"Issue %s cloned successfully to %s",
(
folder.issue_url,
folder.path,
),
)
return folder
def main(self, path, url, jira):
match = self.TICKET_RE.match(url)
if not match:
raise exceptions.JirafsError("'%s' is not a valid Jira ticket URL." % url)
return self.clone_from_issue(
match,
url,
path,
jira,
)
def add_arguments(self, parser):
parser.add_argument("ticket_url", nargs=1, type=str)
parser.add_argument(
"path",
nargs="*",
type=str,
)
|
coddingtonbear/jirafs
|
jirafs/commands/clone.py
|
Python
|
mit
| 2,005
|
# Copyright 2016 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from testrunner.local import testsuite
from testrunner.objects import testcase
class FuzzerVariantGenerator(testsuite.VariantGenerator):
# Only run the fuzzer with standard variant.
def FilterVariantsByTest(self, testcase):
return self.standard_variant
def GetFlagSets(self, testcase, variant):
return testsuite.FAST_VARIANT_FLAGS[variant]
class FuzzerTestSuite(testsuite.TestSuite):
SUB_TESTS = ( 'json', 'parser', 'regexp', 'wasm', 'wasm_asmjs', 'wasm_call',
'wasm_code', 'wasm_compile', 'wasm_data_section',
'wasm_function_sigs_section', 'wasm_globals_section',
'wasm_imports_section', 'wasm_memory_section', 'wasm_names_section',
'wasm_types_section' )
def __init__(self, name, root):
super(FuzzerTestSuite, self).__init__(name, root)
def ListTests(self, context):
tests = []
for subtest in FuzzerTestSuite.SUB_TESTS:
shell = 'v8_simple_%s_fuzzer' % subtest
for fname in os.listdir(os.path.join(self.root, subtest)):
if not os.path.isfile(os.path.join(self.root, subtest, fname)):
continue
test = testcase.TestCase(self, '%s/%s' % (subtest, fname),
override_shell=shell)
tests.append(test)
tests.sort()
return tests
def GetFlagsForTestCase(self, testcase, context):
suite, name = testcase.path.split('/')
return [os.path.join(self.root, suite, name)]
def _VariantGeneratorFactory(self):
return FuzzerVariantGenerator
def GetSuite(name, root):
return FuzzerTestSuite(name, root)
|
RPGOne/Skynet
|
node-master/deps/v8/test/fuzzer/testcfg.py
|
Python
|
bsd-3-clause
| 1,746
|
import twitter
import urllib
from datetime import datetime
from django.db.utils import IntegrityError
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from rest_framework import generics, viewsets
from socialtool.loading import get_classes, get_model
PostSerializer, PaginatedPostSerializer, MessageSerializer, \
MarketAccountSerializer = get_classes('social.serializers', ('PostSerializer', 'PaginatedPostSerializer', 'MessageSerializer', 'MarketAccountSerializer'))
HasImageFilterBackend, OldSchoolRetweet = get_classes('social.filters', ('HasImageFilterBackend', 'OldSchoolRetweet'))
# TODO - tweet and artworker assignments should be returning a JSON
# response - although having said that we are just swapping out HTML
# for returned HTML - so maybe not! ~jaymz
class TweetUserView(TemplateView):
template_name = 'tweet_user.html'
def send_tweet(self):
tweet_pk = self.request.GET['tweet_pk']
msg = self.request.GET['msg']
tweet = get_model('social', 'socialpost').objects.get(pk=tweet_pk)
# Reverse the quoting and get the unicode back
msg = urllib.unquote(msg)
try:
api = twitter.Api(
consumer_key=tweet.account.consumer_key,
consumer_secret=tweet.account.consumer_secret,
access_token_key=tweet.account.access_token_key,
access_token_secret=tweet.account.access_token_secret,
)
# If we have an included media file then attach and send that
# otherwise we post a regular Update instead - that is we're
# not going by the message content!
if tweet.photoshop:
status = api.PostMedia(u'{!s}'.format(msg), tweet.photoshop.file.name,
in_reply_to_status_id=tweet.uid)
else:
status = api.PostUpdate(u'{!s}'.format(msg), in_reply_to_status_id=tweet.uid)
# Update the tweet itself now
tweet.tweeted = True
tweet.tweet_id = status.id
tweet.sent_tweet = msg
tweet.tweeted_by = self.request.user
tweet.tweeted_at = datetime.now()
tweet.save()
except twitter.TwitterError:
status = None
return status
def get_context_data(self, **kwargs):
context = super(TweetUserView, self).get_context_data(**kwargs)
context['tweet'] = self.send_tweet()
return context
def get(self, *args, **kwargs):
return super(TweetUserView, self).get(*args, **kwargs)
class BanUserView(View):
template_name = 'assign_artworker.html'
def ban_user(self):
post_pk = self.request.GET['post_pk']
tweet = get_model('social', 'socialpost').everything.get(pk=post_pk)
hellban = get_model('social', 'banneduser')(handle=tweet.handle)
try:
hellban.save()
except IntegrityError:
return "Already banned"
return "OK"
def get(self, request, *args, **kwargs):
return HttpResponse(self.ban_user())
class PaginatedImagePostFeedView(generics.ListAPIView):
queryset = get_model('social', 'socialpost').objects.all()
serializer_class = PostSerializer
pagination_serializer_class = PaginatedPostSerializer
filter_backends = (HasImageFilterBackend, OldSchoolRetweet)
def get_queryset(self):
queryset = get_model('social', 'socialpost').objects.all()
user = self.request.QUERY_PARAMS.get('user', None)
if user is not None:
try:
# If we have a user then we need to look up what accounts they are associated
# with and then filter on all those (it's M2M)
tracked_term = get_model('social', 'trackedterms').objects.get(user__username=user)
queryset = queryset.filter(search_term__in=tracked_term.terms.values_list('pk', flat=True))
except get_model('social', 'trackedterms').DoesNotExist:
# If we can't find the user just carry on
pass
return queryset
class MessageViewSet(viewsets.ModelViewSet):
queryset = get_model('social', 'message').objects.all()
serializer_class = MessageSerializer
filter_fields = ('type', 'account',)
class MarketAccountViewSet(viewsets.ModelViewSet):
queryset = get_model('social', 'marketaccount').objects.all()
serializer_class = MarketAccountSerializer
|
udox/django-social-tools
|
socialtool/social/views.py
|
Python
|
bsd-3-clause
| 4,489
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2017-03-21 16:28:18
# @Author : Zhou Bo (zhoub@suooter.com)
# @Link : http://onlyus.online
# @Version : $Id$
from io import open
def filter(oldfile, newfile):
'''\
Read a list of names from a file line by line into an output file.
If a line begins with a particular name, insert a string of text
after the name before appending the line to the output file.
'''
with open(newfile, 'w') as outfile, open(oldfile, 'r', encoding='utf-8') as infile:
for line in infile:
outfile.write(line)
if __name__ == "__main__":
filter('/etc/hosts', '/tmp/hosts')
|
congminghaoxue/learn_python
|
read_write_file.py
|
Python
|
apache-2.0
| 662
|
"""The tests for the manual_mqtt Alarm Control Panel component."""
from datetime import timedelta
import unittest
from unittest.mock import Mock, patch
from homeassistant.components import alarm_control_panel
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_PENDING,
STATE_ALARM_TRIGGERED,
)
from homeassistant.setup import setup_component
import homeassistant.util.dt as dt_util
from tests.common import (
assert_setup_component,
fire_mqtt_message,
fire_time_changed,
get_test_home_assistant,
mock_mqtt_component,
)
from tests.components.alarm_control_panel import common
CODE = "HELLO_CODE"
class TestAlarmControlPanelManualMqtt(unittest.TestCase):
"""Test the manual_mqtt alarm module."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.hass.config_entries._async_schedule_save = Mock()
self.mock_publish = mock_mqtt_component(self.hass)
def tearDown(self): # pylint: disable=invalid-name
"""Stop down everything that was started."""
self.hass.stop()
def test_fail_setup_without_state_topic(self):
"""Test for failing with no state topic."""
with assert_setup_component(0) as config:
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
alarm_control_panel.DOMAIN: {
"platform": "mqtt_alarm",
"command_topic": "alarm/command",
}
},
)
assert not config[alarm_control_panel.DOMAIN]
def test_fail_setup_without_command_topic(self):
"""Test failing with no command topic."""
with assert_setup_component(0):
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
alarm_control_panel.DOMAIN: {
"platform": "mqtt_alarm",
"state_topic": "alarm/state",
}
},
)
def test_arm_home_no_pending(self):
"""Test arm home method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"pending_time": 0,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_home(self.hass, CODE)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_HOME == self.hass.states.get(entity_id).state
def test_arm_home_no_pending_when_code_not_req(self):
"""Test arm home method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"code_arm_required": False,
"pending_time": 0,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_home(self.hass, 0)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_HOME == self.hass.states.get(entity_id).state
def test_arm_home_with_pending(self):
"""Test arm home method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"pending_time": 1,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_home(self.hass, CODE, entity_id)
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
state = self.hass.states.get(entity_id)
assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_HOME
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_HOME == self.hass.states.get(entity_id).state
def test_arm_home_with_invalid_code(self):
"""Attempt to arm home without a valid code."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"pending_time": 1,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_home(self.hass, CODE + "2")
self.hass.block_till_done()
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
def test_arm_away_no_pending(self):
"""Test arm home method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"pending_time": 0,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_away(self.hass, CODE, entity_id)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state
def test_arm_away_no_pending_when_code_not_req(self):
"""Test arm home method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code_arm_required": False,
"code": CODE,
"pending_time": 0,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_away(self.hass, 0, entity_id)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state
def test_arm_home_with_template_code(self):
"""Attempt to arm with a template-based code."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code_template": '{{ "abc" }}',
"pending_time": 0,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_home(self.hass, "abc")
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert STATE_ALARM_ARMED_HOME == state.state
def test_arm_away_with_pending(self):
"""Test arm home method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"pending_time": 1,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_away(self.hass, CODE)
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
state = self.hass.states.get(entity_id)
assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_AWAY
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state
def test_arm_away_with_invalid_code(self):
"""Attempt to arm away without a valid code."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"pending_time": 1,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_away(self.hass, CODE + "2")
self.hass.block_till_done()
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
def test_arm_night_no_pending(self):
"""Test arm night method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"pending_time": 0,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_night(self.hass, CODE, entity_id)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_NIGHT == self.hass.states.get(entity_id).state
def test_arm_night_no_pending_when_code_not_req(self):
"""Test arm night method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code_arm_required": False,
"code": CODE,
"pending_time": 0,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_night(self.hass, 0, entity_id)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_NIGHT == self.hass.states.get(entity_id).state
def test_arm_night_with_pending(self):
"""Test arm night method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"pending_time": 1,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_night(self.hass, CODE)
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
state = self.hass.states.get(entity_id)
assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_NIGHT
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_NIGHT == self.hass.states.get(entity_id).state
# Do not go to the pending state when updating to the same state
common.alarm_arm_night(self.hass, CODE, entity_id)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_NIGHT == self.hass.states.get(entity_id).state
def test_arm_night_with_invalid_code(self):
"""Attempt to arm night without a valid code."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"pending_time": 1,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_night(self.hass, CODE + "2")
self.hass.block_till_done()
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
def test_trigger_no_pending(self):
"""Test triggering when no pending submitted method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"trigger_time": 1,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass, entity_id=entity_id)
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
future = dt_util.utcnow() + timedelta(seconds=60)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state
def test_trigger_with_delay(self):
"""Test trigger method and switch from pending to triggered."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"delay_time": 1,
"pending_time": 0,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_away(self.hass, CODE)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass, entity_id=entity_id)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert STATE_ALARM_PENDING == state.state
assert STATE_ALARM_TRIGGERED == state.attributes["post_pending_state"]
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert STATE_ALARM_TRIGGERED == state.state
def test_trigger_zero_trigger_time(self):
"""Test disabled trigger."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"pending_time": 0,
"trigger_time": 0,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass)
self.hass.block_till_done()
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
def test_trigger_zero_trigger_time_with_pending(self):
"""Test disabled trigger."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"pending_time": 2,
"trigger_time": 0,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass)
self.hass.block_till_done()
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
def test_trigger_with_pending(self):
"""Test arm home method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"pending_time": 2,
"trigger_time": 3,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass)
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
state = self.hass.states.get(entity_id)
assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED
future = dt_util.utcnow() + timedelta(seconds=2)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state
future = dt_util.utcnow() + timedelta(seconds=5)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
def test_trigger_with_disarm_after_trigger(self):
"""Test disarm after trigger."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"trigger_time": 5,
"pending_time": 0,
"disarm_after_trigger": True,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass, entity_id=entity_id)
self.hass.block_till_done()
assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state
future = dt_util.utcnow() + timedelta(seconds=5)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
def test_trigger_with_zero_specific_trigger_time(self):
"""Test trigger method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"trigger_time": 5,
"disarmed": {"trigger_time": 0},
"pending_time": 0,
"disarm_after_trigger": True,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass, entity_id=entity_id)
self.hass.block_till_done()
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
def test_trigger_with_unused_zero_specific_trigger_time(self):
"""Test disarm after trigger."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"trigger_time": 5,
"armed_home": {"trigger_time": 0},
"pending_time": 0,
"disarm_after_trigger": True,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass, entity_id=entity_id)
self.hass.block_till_done()
assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state
future = dt_util.utcnow() + timedelta(seconds=5)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
def test_trigger_with_specific_trigger_time(self):
"""Test disarm after trigger."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"disarmed": {"trigger_time": 5},
"pending_time": 0,
"disarm_after_trigger": True,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass, entity_id=entity_id)
self.hass.block_till_done()
assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state
future = dt_util.utcnow() + timedelta(seconds=5)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
def test_back_to_back_trigger_with_no_disarm_after_trigger(self):
"""Test no disarm after back to back trigger."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"trigger_time": 5,
"pending_time": 0,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_away(self.hass, CODE, entity_id)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass, entity_id=entity_id)
self.hass.block_till_done()
assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state
future = dt_util.utcnow() + timedelta(seconds=5)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass, entity_id=entity_id)
self.hass.block_till_done()
assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state
future = dt_util.utcnow() + timedelta(seconds=5)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state
def test_disarm_while_pending_trigger(self):
"""Test disarming while pending state."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"trigger_time": 5,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass)
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
common.alarm_disarm(self.hass, entity_id=entity_id)
self.hass.block_till_done()
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
future = dt_util.utcnow() + timedelta(seconds=5)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
def test_disarm_during_trigger_with_invalid_code(self):
"""Test disarming while code is invalid."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"pending_time": 5,
"code": CODE + "2",
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass)
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
common.alarm_disarm(self.hass, entity_id=entity_id)
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
future = dt_util.utcnow() + timedelta(seconds=5)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state
def test_trigger_with_unused_specific_delay(self):
"""Test trigger method and switch from pending to triggered."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"delay_time": 5,
"pending_time": 0,
"armed_home": {"delay_time": 10},
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_away(self.hass, CODE)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass, entity_id=entity_id)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert STATE_ALARM_PENDING == state.state
assert STATE_ALARM_TRIGGERED == state.attributes["post_pending_state"]
future = dt_util.utcnow() + timedelta(seconds=5)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert state.state == STATE_ALARM_TRIGGERED
def test_trigger_with_specific_delay(self):
"""Test trigger method and switch from pending to triggered."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"delay_time": 10,
"pending_time": 0,
"armed_away": {"delay_time": 1},
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_away(self.hass, CODE)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass, entity_id=entity_id)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert STATE_ALARM_PENDING == state.state
assert STATE_ALARM_TRIGGERED == state.attributes["post_pending_state"]
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert state.state == STATE_ALARM_TRIGGERED
def test_trigger_with_pending_and_delay(self):
"""Test trigger method and switch from pending to triggered."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"delay_time": 1,
"pending_time": 0,
"triggered": {"pending_time": 1},
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_away(self.hass, CODE)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass, entity_id=entity_id)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert state.state == STATE_ALARM_PENDING
assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert state.state == STATE_ALARM_PENDING
assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED
future += timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert state.state == STATE_ALARM_TRIGGERED
def test_trigger_with_pending_and_specific_delay(self):
"""Test trigger method and switch from pending to triggered."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"delay_time": 10,
"pending_time": 0,
"armed_away": {"delay_time": 1},
"triggered": {"pending_time": 1},
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_away(self.hass, CODE)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass, entity_id=entity_id)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert state.state == STATE_ALARM_PENDING
assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert state.state == STATE_ALARM_PENDING
assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED
future += timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert state.state == STATE_ALARM_TRIGGERED
def test_armed_home_with_specific_pending(self):
"""Test arm home method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"pending_time": 10,
"armed_home": {"pending_time": 2},
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
common.alarm_arm_home(self.hass)
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
future = dt_util.utcnow() + timedelta(seconds=2)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_HOME == self.hass.states.get(entity_id).state
def test_armed_away_with_specific_pending(self):
"""Test arm home method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"pending_time": 10,
"armed_away": {"pending_time": 2},
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
common.alarm_arm_away(self.hass)
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
future = dt_util.utcnow() + timedelta(seconds=2)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state
def test_armed_night_with_specific_pending(self):
"""Test arm home method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"pending_time": 10,
"armed_night": {"pending_time": 2},
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
common.alarm_arm_night(self.hass)
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
future = dt_util.utcnow() + timedelta(seconds=2)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_NIGHT == self.hass.states.get(entity_id).state
def test_trigger_with_specific_pending(self):
"""Test arm home method."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"pending_time": 10,
"triggered": {"pending_time": 2},
"trigger_time": 3,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
common.alarm_trigger(self.hass)
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
future = dt_util.utcnow() + timedelta(seconds=2)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state
future = dt_util.utcnow() + timedelta(seconds=5)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
def test_arm_away_after_disabled_disarmed(self):
"""Test pending state with and without zero trigger time."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code": CODE,
"pending_time": 0,
"delay_time": 1,
"armed_away": {"pending_time": 1},
"disarmed": {"trigger_time": 0},
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_away(self.hass, CODE)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert STATE_ALARM_PENDING == state.state
assert STATE_ALARM_DISARMED == state.attributes["pre_pending_state"]
assert STATE_ALARM_ARMED_AWAY == state.attributes["post_pending_state"]
common.alarm_trigger(self.hass, entity_id=entity_id)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert STATE_ALARM_PENDING == state.state
assert STATE_ALARM_DISARMED == state.attributes["pre_pending_state"]
assert STATE_ALARM_ARMED_AWAY == state.attributes["post_pending_state"]
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert STATE_ALARM_ARMED_AWAY == state.state
common.alarm_trigger(self.hass, entity_id=entity_id)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert STATE_ALARM_PENDING == state.state
assert STATE_ALARM_ARMED_AWAY == state.attributes["pre_pending_state"]
assert STATE_ALARM_TRIGGERED == state.attributes["post_pending_state"]
future += timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert STATE_ALARM_TRIGGERED == state.state
def test_disarm_with_template_code(self):
"""Attempt to disarm with a valid or invalid template-based code."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual_mqtt",
"name": "test",
"code_template": '{{ "" if from_state == "disarmed" else "abc" }}',
"pending_time": 0,
"disarm_after_trigger": False,
"command_topic": "alarm/command",
"state_topic": "alarm/state",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_arm_home(self.hass, "def")
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert STATE_ALARM_ARMED_HOME == state.state
common.alarm_disarm(self.hass, "def")
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert STATE_ALARM_ARMED_HOME == state.state
common.alarm_disarm(self.hass, "abc")
self.hass.block_till_done()
state = self.hass.states.get(entity_id)
assert STATE_ALARM_DISARMED == state.state
def test_arm_home_via_command_topic(self):
"""Test arming home via command topic."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
alarm_control_panel.DOMAIN: {
"platform": "manual_mqtt",
"name": "test",
"pending_time": 1,
"state_topic": "alarm/state",
"command_topic": "alarm/command",
"payload_arm_home": "ARM_HOME",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
# Fire the arm command via MQTT; ensure state changes to pending
fire_mqtt_message(self.hass, "alarm/command", "ARM_HOME")
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
# Fast-forward a little bit
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_HOME == self.hass.states.get(entity_id).state
def test_arm_away_via_command_topic(self):
"""Test arming away via command topic."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
alarm_control_panel.DOMAIN: {
"platform": "manual_mqtt",
"name": "test",
"pending_time": 1,
"state_topic": "alarm/state",
"command_topic": "alarm/command",
"payload_arm_away": "ARM_AWAY",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
# Fire the arm command via MQTT; ensure state changes to pending
fire_mqtt_message(self.hass, "alarm/command", "ARM_AWAY")
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
# Fast-forward a little bit
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state
def test_arm_night_via_command_topic(self):
"""Test arming night via command topic."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
alarm_control_panel.DOMAIN: {
"platform": "manual_mqtt",
"name": "test",
"pending_time": 1,
"state_topic": "alarm/state",
"command_topic": "alarm/command",
"payload_arm_night": "ARM_NIGHT",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
# Fire the arm command via MQTT; ensure state changes to pending
fire_mqtt_message(self.hass, "alarm/command", "ARM_NIGHT")
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
# Fast-forward a little bit
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
assert STATE_ALARM_ARMED_NIGHT == self.hass.states.get(entity_id).state
def test_disarm_pending_via_command_topic(self):
"""Test disarming pending alarm via command topic."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
alarm_control_panel.DOMAIN: {
"platform": "manual_mqtt",
"name": "test",
"pending_time": 1,
"state_topic": "alarm/state",
"command_topic": "alarm/command",
"payload_disarm": "DISARM",
}
},
)
entity_id = "alarm_control_panel.test"
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
common.alarm_trigger(self.hass)
self.hass.block_till_done()
assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state
# Now that we're pending, receive a command to disarm
fire_mqtt_message(self.hass, "alarm/command", "DISARM")
self.hass.block_till_done()
assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state
def test_state_changes_are_published_to_mqtt(self):
"""Test publishing of MQTT messages when state changes."""
assert setup_component(
self.hass,
alarm_control_panel.DOMAIN,
{
alarm_control_panel.DOMAIN: {
"platform": "manual_mqtt",
"name": "test",
"pending_time": 1,
"trigger_time": 1,
"state_topic": "alarm/state",
"command_topic": "alarm/command",
}
},
)
# Component should send disarmed alarm state on startup
self.hass.block_till_done()
self.mock_publish.async_publish.assert_called_once_with(
"alarm/state", STATE_ALARM_DISARMED, 0, True
)
self.mock_publish.async_publish.reset_mock()
# Arm in home mode
common.alarm_arm_home(self.hass)
self.hass.block_till_done()
self.mock_publish.async_publish.assert_called_once_with(
"alarm/state", STATE_ALARM_PENDING, 0, True
)
self.mock_publish.async_publish.reset_mock()
# Fast-forward a little bit
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
self.mock_publish.async_publish.assert_called_once_with(
"alarm/state", STATE_ALARM_ARMED_HOME, 0, True
)
self.mock_publish.async_publish.reset_mock()
# Arm in away mode
common.alarm_arm_away(self.hass)
self.hass.block_till_done()
self.mock_publish.async_publish.assert_called_once_with(
"alarm/state", STATE_ALARM_PENDING, 0, True
)
self.mock_publish.async_publish.reset_mock()
# Fast-forward a little bit
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
self.mock_publish.async_publish.assert_called_once_with(
"alarm/state", STATE_ALARM_ARMED_AWAY, 0, True
)
self.mock_publish.async_publish.reset_mock()
# Arm in night mode
common.alarm_arm_night(self.hass)
self.hass.block_till_done()
self.mock_publish.async_publish.assert_called_once_with(
"alarm/state", STATE_ALARM_PENDING, 0, True
)
self.mock_publish.async_publish.reset_mock()
# Fast-forward a little bit
future = dt_util.utcnow() + timedelta(seconds=1)
with patch(
(
"homeassistant.components.manual_mqtt.alarm_control_panel."
"dt_util.utcnow"
),
return_value=future,
):
fire_time_changed(self.hass, future)
self.hass.block_till_done()
self.mock_publish.async_publish.assert_called_once_with(
"alarm/state", STATE_ALARM_ARMED_NIGHT, 0, True
)
self.mock_publish.async_publish.reset_mock()
# Disarm
common.alarm_disarm(self.hass)
self.hass.block_till_done()
self.mock_publish.async_publish.assert_called_once_with(
"alarm/state", STATE_ALARM_DISARMED, 0, True
)
|
Teagan42/home-assistant
|
tests/components/manual_mqtt/test_alarm_control_panel.py
|
Python
|
apache-2.0
| 59,283
|
"""
sentry.utils.imports
~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import inspect
import sys
from sentry.utils.imports import import_string
PACKAGES = {
'django.db.backends.postgresql_psycopg2': 'psycopg2.extensions',
'django.db.backends.mysql': 'MySQLdb',
'django.db.backends.oracle': 'cx_Oracle',
'django.core.cache.backends.memcached.MemcachedCache': 'memcache',
'django.core.cache.backends.memcached.PyLibMCCache': 'pylibmc'
}
def reraise_as(new_exception_or_type):
"""
Obtained from https://github.com/dcramer/reraise/blob/master/src/reraise.py
>>> try:
>>> do_something_crazy()
>>> except Exception:
>>> reraise_as(UnhandledException)
"""
__traceback_hide__ = True # NOQA
e_type, e_value, e_traceback = sys.exc_info()
if inspect.isclass(new_exception_or_type):
new_type = new_exception_or_type
new_exception = new_exception_or_type()
else:
new_type = type(new_exception_or_type)
new_exception = new_exception_or_type
new_exception.__cause__ = e_value
try:
raise new_type, new_exception, e_traceback
finally:
del e_traceback
def validate_settings(settings):
for key, engine_key, engine_type in \
[('DATABASES', 'ENGINE', 'database engine'), ('CACHES', 'BACKEND', 'caching backend')]:
value = getattr(settings, key, {})
for alias in value:
engine = value[alias][engine_key]
if engine not in PACKAGES:
continue
validate_dependency(settings, engine_type, engine, PACKAGES[engine])
def validate_dependency(settings, dependency_type, dependency, package):
try:
import_string(package)
except ImportError:
msg = ConfigurationError.get_error_message("%s %s" % (dependency_type, dependency), package)
reraise_as(ConfigurationError(msg))
class ConfigurationError(ValueError):
"""
This error is thrown whenever a sentry configuration is wrong, or requires a third-party library
that's not installed properly or can't be found.
"""
@classmethod
def get_error_message(cls, dependency, package):
return """Python could not find %(package)s in your current environment (required by %(dependency)s). If you have it installed, maybe you are using the wrong python binary to run sentry?""" % {
"dependency": dependency,
"package": package
}
|
beni55/sentry
|
src/sentry/utils/settings.py
|
Python
|
bsd-3-clause
| 2,562
|
import urllib2
import lxml.html
from gauth.settings import TOKEN_FIELD_NAME
# gets the entry id for the auth token field
def get_entry_id(form_url):
try:
page = urllib2.urlopen(form_url)
except urllib2.HTTPError:
return None
p = lxml.html.parse(page)
root = p.getroot()
headings = root.cssselect('div[role=heading]:contains("'+TOKEN_FIELD_NAME+'")')
if len(headings) < 1:
return None
heading = headings[0]
parent_wrappers = [e for e in heading.iterancestors() if e.attrib.get('role') == 'listitem']
if len(parent_wrappers) < 1:
return None
parent_wrapper = parent_wrappers[0]
inputs = parent_wrapper.cssselect('input[type=hidden]')
if len(inputs) < 1:
return None
return inputs[0].get('name')
|
benweissmann/gauth
|
redirector/form_parser.py
|
Python
|
mit
| 801
|
#!/usr/bin/env python
import urllib
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter
@stringfilter
def qrcode(value, alt=None):
"""
Generate QR Code image from a string with the Google charts API
http://code.google.com/intl/fr-FR/apis/chart/types.html#qrcodes
Exemple usage --
{{ my_string|qrcode:"my alt" }}
<img src="http://chart.apis.google.com/chart?chs=150x150&cht=qr&chl=my_string&choe=UTF-8" alt="my alt" />
"""
url = conditional_escape("http://chart.apis.google.com/chart?%s" % \
urllib.urlencode({'chs':'200x200', 'cht':'qr', 'chl':value, 'choe':'UTF-8'}))
alt = conditional_escape(alt or value)
return mark_safe(u"""<img class="qrcode" src="%s" width="250" height="250" alt="%s" />""" % (url, alt))
@register.filter("documento_id")
def documento_id(value):
return str(value['_id'])
|
hugobarzano/DispositivosMovilesBackEnd
|
ControlUsuarios/templatetags/qr.py
|
Python
|
gpl-3.0
| 1,049
|
#!/usr/bin/env python
# encoding: utf-8
import argparse
import os
import pickle
import sys
sys.path.insert(0, 'utilities')
from analyze import *
from output import *
from stats import *
from store import *
from filesystem import *
from lists import *
from numbers import *
from strings import *
#from google.protobuf import text_format
def searchTitle(apps, title):
""" Search for an app given its title. """
for app in apps:
if apps[app]['title'] == title:
return app
return None
def parseArgs():
""" Parse command line arguments.
Returns:
Command line argument object
"""
# example for help text
prog = os.path.basename(__file__)
epilog = "android ID:\n\n"
epilog+= " in order to download apps from the Google Play Store you need\n"
epilog+= " to provide your android id number.\n\n"
epilog+= " type *#*#8255#*#* on your phone to start GTalk Monitor.\n"
epilog+= " your android id is shown as 'aid'.\n\n"
epilog+= "examples:\n\n"
epilog+= " use mail and password:\n"
epilog+= " $ " + prog + " -u EMAIL -p PASS\n\n"
epilog+= " use token:\n"
epilog+= " $ " + prog + " -t TOKEN\n\n"
epilog+= " generate statistic files:\n"
epilog+= " $ " + prog + " -D -P\n\n"
epilog+= " print statistics:\n"
epilog+= " $ " + prog + " -D -G\n\n"
parser = argparse.ArgumentParser(
description='download android apps and analyze the security of their communications.',
usage='%(prog)s [options]',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=epilog)
parser.add_argument('--cache', help="file for storing cache.", dest="f_cache", type=str, metavar=('FILE'), default=".hermes-cache.p")
parser.add_argument('--cache-pos', help="file for storing position used for resuming analyzer.", dest="f_pos", type=str, metavar=('FILE'), default=".hermes-cache-pos.p")
parser.add_argument('--category', help="category to fetch apps from (default: all)", dest="category", type=str, metavar=('NAME'))
parser.add_argument('--subcategory', help="subcategory to fetch apps from (default: all)", dest="subcategory", type=str, metavar=('NAME'))
parser.add_argument('--limit', help="the total number of apps to fetch from each category/subcategory.", dest="limit", type=int, metavar=('NUM'), default=500)
parser.add_argument('--offset', help="the offset from where to fetch apps in each category/subcategory.", dest="offset", type=int, metavar=('NUM'), default=0)
parser.add_argument('--restore-freq', help="how often to create restore point when analyzing apps, use 0 to skip.", dest="restore_freq", type=int, metavar=('NUM'), default=10)
parser.add_argument('--app-dir', help="directory where apps will be stored during download and analytics.", dest="app_dir", type=str, metavar=('FOLDER'), default='apps/')
parser.add_argument('--tex-dir', help="directory where LaTeX reports will be saved.", dest="tex_dir", type=str, metavar=('FOLDER'), default='tex/')
parser.add_argument('-i', help="your android ID number, see -h for more info.", dest="id", type=str, metavar=('ID'))
parser.add_argument('-u', help="username for logging into Google Play Store.", dest="user", type=str, metavar=('GMAIL'))
parser.add_argument('-p', help="password for logging into Google Play Store.", dest="passw", type=str, metavar=('PASS'))
parser.add_argument('-t', help="access token for accessing Google Play Store.", dest="token", type=str, metavar=('TOKEN'))
parser.add_argument('-D', '--no-download', help="skip downloading and analysing apps.", dest="skip_download", action='store_true')
parser.add_argument('-G', '--no-generating', help="skip generating statistic files.", dest="skip_generating", action='store_true')
parser.add_argument('-P', '--no-printing', help="skip printing statistic output.", dest="skip_printing", action='store_true')
args = parser.parse_args()
# validate login credentials
if not args.skip_download:
if (not args.token) and not (args.user and args.passw):
print("error: you need to specify user/pass or token.")
exit(1)
if not args.id:
print("error: you need to specify your android id. see -h for more info.")
exit(1)
# validate modes
if args.skip_download and args.skip_printing and args.skip_generating:
print("what's the point if you skip everything?")
exit(1)
return args
def shouldProcess(app):
""" Check if an app should be downloaded and analyzed. """
if not 'internet' in app:
return False
if not 'unchecked' in app:
return False
return app['internet'] and app['unchecked'] and app['price'] == u'Free'
def processApps(args, gpapi, apps):
""" Download and analyze apps on the Google Play Store.
Arguments:
args -- the command line arguments object
gpapi -- the Google Play API object
apps -- dictionary of apps and their meta data
"""
createAppFolder(args)
i = 0
j = 0
for app in apps:
if shouldProcess(apps[app]):
j += 1
print "found {:,} apps to process".format(j)
pos = getRestorePoint(args)
for app,meta in apps.iteritems():
# we only care about apps which require INTERNET permission, we haven't checked yet, and are free
if not shouldProcess(meta):
continue
# skip until at the position where we should resume
i += 1
if i < pos:
continue
# create restore point
if i % args.restore_freq == 0 and i > 0 and args.restore_freq > 0:
createRestorePoint(args, apps, i)
# print progress
sys.stdout.write("\rprocessing apps... %6.2f%% %10s: %s\033[K " % (100.0 * i / j, "app", app))
sys.stdout.flush()
try:
fname = args.app_dir + app + ".apk"
if download(gpapi, fname, app, meta['version'], meta['offer']):
analyze(apps, fname, app)
os.remove(fname)
except:
None
sys.stdout.write("\rdone processing apps\033[K\n")
sys.stdout.flush()
# clean up
print "saving to cache"
clearRestorePoint(args, apps)
deleteAppFolder(args)
def main():
args = parseArgs()
print "hermes 0.1"
print "by ephracis"
print ""
# load cache
apps = {}
try:
print "looking for cache"
apps = pickle.load(open(args.f_cache, 'rb'))
print "loaded {:,} apps from cache".format(len(apps))
except:
print "no cache found"
# download + analyze
if not args.skip_download:
print "logging in to play store"
api = login(args.id, args.user, args.passw, args.token)
print "constructing list of apps"
browse(args, api, apps)
print "starting app analyzer"
processApps(args, api, apps)
if len(apps) == 0:
print("error: no apps to analyze.")
exit(1)
# statistics
if not (args.skip_generating and args.skip_printing):
print "generating output"
outputResults(args, apps)
if args.skip_printing:
print "done"
if __name__ == "__main__":
main()
|
ephracis/hermes
|
hermes.py
|
Python
|
mit
| 6,704
|
# -*- coding: utf-8 -*-
# Copyright 2016 Eficent Business and IT Consulting Services S.L.
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl-3.0).
from openerp.tests import common
from openerp.tools import SUPERUSER_ID
class TestPurchaseRequest(common.TransactionCase):
def setUp(self):
super(TestPurchaseRequest, self).setUp()
self.purchase_request = self.env['purchase.request']
self.purchase_request_line = self.env['purchase.request.line']
def test_purchase_request_status(self):
vals = {
'picking_type_id': self.env.ref('stock.picking_type_in').id,
'requested_by': SUPERUSER_ID,
}
purchase_request = self.purchase_request.create(vals)
vals = {
'request_id': purchase_request.id,
'product_id': self.env.ref('product.product_product_13').id,
'product_uom_id': self.env.ref('product.product_uom_unit').id,
'product_qty': 5.0,
}
self.purchase_request_line.create(vals)
self.assertEqual(
purchase_request.is_editable, True,
'Should be editable')
purchase_request.button_to_approve()
self.assertEqual(
purchase_request.state, 'to_approve',
'Should be in state to_approve')
self.assertEqual(
purchase_request.is_editable, False,
'Should not be editable')
purchase_request.button_draft()
self.assertEqual(
purchase_request.is_editable, True,
'Should be editable')
self.assertEqual(
purchase_request.state, 'draft',
'Should be in state draft')
self.purchase_request_line.unlink()
|
SerpentCS/purchase-workflow
|
purchase_request/tests/test_purchase_request.py
|
Python
|
agpl-3.0
| 1,728
|
import sys
if sys.version_info < (3, 7):
from ._text import TextValidator
from ._offset import OffsetValidator
from ._font import FontValidator
else:
from _plotly_utils.importers import relative_import
__all__, __getattr__, __dir__ = relative_import(
__name__,
[],
["._text.TextValidator", "._offset.OffsetValidator", "._font.FontValidator"],
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/carpet/baxis/title/__init__.py
|
Python
|
mit
| 395
|
from django.conf import settings
from django.utils.six.moves.urllib.parse import urlparse
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.utils import resolve_model_string
class BadRequestError(Exception):
pass
def get_base_url(request=None):
base_url = getattr(settings, 'WAGTAILAPI_BASE_URL', request.site.root_url if request else None)
if base_url:
# We only want the scheme and netloc
base_url_parsed = urlparse(base_url)
return base_url_parsed.scheme + '://' + base_url_parsed.netloc
def get_full_url(request, path):
base_url = get_base_url(request) or ''
return base_url + path
def pages_for_site(site):
pages = Page.objects.public().live()
pages = pages.descendant_of(site.root_page, inclusive=True)
return pages
def page_models_from_string(string):
page_models = []
for sub_string in string.split(','):
page_model = resolve_model_string(sub_string)
if not issubclass(page_model, Page):
raise ValueError("Model is not a page")
page_models.append(page_model)
return tuple(page_models)
def filter_page_type(queryset, page_models):
qs = queryset.none()
for model in page_models:
qs |= queryset.type(model)
return qs
|
gogobook/wagtail
|
wagtail/api/v2/utils.py
|
Python
|
bsd-3-clause
| 1,286
|
from ..testing.test_loader import MANUAL, AUTOMATIC
PAUSED = "paused"
RUNNING = "running"
COMPLETED = "completed"
ABORTED = "aborted"
PENDING = "pending"
UNKNOWN = "unknown"
class Session(object):
def __init__(
self,
token=None,
test_types=None,
user_agent=None,
labels=None,
tests=None,
pending_tests=None,
running_tests=None,
timeouts=None,
status=None,
test_state=None,
last_completed_test=None,
recent_completed_count=None,
date_created=None,
date_started=None,
date_finished=None,
is_public=None,
reference_tokens=None,
browser=None,
expiration_date=None,
type=None,
malfunctioning_tests=None
):
if token is None:
token = ""
self.token = token
if test_types is None:
test_types = [AUTOMATIC, MANUAL]
self.test_types = test_types
if user_agent is None:
user_agent = ""
self.user_agent = user_agent
if labels is None:
labels = []
self.labels = labels
self.tests = tests
self.pending_tests = pending_tests
self.running_tests = running_tests
if timeouts is None:
timeouts = {}
self.timeouts = timeouts
if status is None:
status = UNKNOWN
self.status = status
self.test_state = test_state
self.last_completed_test = last_completed_test
if recent_completed_count is None:
recent_completed_count = 0
self.recent_completed_count = recent_completed_count
self.date_created = date_created
self.date_started = date_started
self.date_finished = date_finished
if is_public is None:
is_public = False
self.is_public = is_public
if reference_tokens is None:
reference_tokens = []
self.reference_tokens = reference_tokens
self.browser = browser
self.expiration_date = expiration_date
self.type = type
if malfunctioning_tests is None:
malfunctioning_tests = []
self.malfunctioning_tests = malfunctioning_tests
|
servo/servo
|
tests/wpt/web-platform-tests/tools/wave/data/session.py
|
Python
|
mpl-2.0
| 2,341
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from app_data import AppDataField
from six import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(
_('Type'),
max_length=100,
)
namespace = models.CharField(
_('Instance namespace'),
default=None,
max_length=100,
unique=True,
)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _('Apphook config')
verbose_name_plural = _('Apphook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return '%s / %s' % (self.cmsapp.name, self.namespace)
else:
return '%s / %s' % (self.type, self.namespace)
def __getattr__(self, item):
"""
This allows to access config form attribute as normal model fields
:param item:
:return:
"""
try:
return getattr(self.app_data.config, item)
except Exception:
raise AttributeError('attribute %s not found' % item)
|
aldryn/aldryn-apphooks-config
|
aldryn_apphooks_config/models.py
|
Python
|
bsd-3-clause
| 1,617
|
# Copyright 2017, David Wilson
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
These classes implement execution for each style of Ansible module. They are
instantiated in the target context by way of target.py::run_module().
Each class in here has a corresponding Planner class in planners.py that knows
how to build arguments for it, preseed related data, etc.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import atexit
import ctypes
import errno
import imp
import json
import logging
import os
import shlex
import sys
import tempfile
import types
import mitogen.core
import ansible_mitogen.target # TODO: circular import
try:
# Cannot use cStringIO as it does not support Unicode.
from StringIO import StringIO
except ImportError:
from io import StringIO
try:
from shlex import quote as shlex_quote
except ImportError:
from pipes import quote as shlex_quote
# Prevent accidental import of an Ansible module from hanging on stdin read.
import ansible.module_utils.basic
ansible.module_utils.basic._ANSIBLE_ARGS = '{}'
# For tasks that modify /etc/resolv.conf, non-Debian derivative glibcs cache
# resolv.conf at startup and never implicitly reload it. Cope with that via an
# explicit call to res_init() on each task invocation. BSD-alikes export it
# directly, Linux #defines it as "__res_init".
libc = ctypes.CDLL(None)
libc__res_init = None
for symbol in 'res_init', '__res_init':
try:
libc__res_init = getattr(libc, symbol)
except AttributeError:
pass
iteritems = getattr(dict, 'iteritems', dict.items)
LOG = logging.getLogger(__name__)
class EnvironmentFileWatcher(object):
"""
Usually Ansible edits to /etc/environment and ~/.pam_environment are
reflected in subsequent tasks if become:true or SSH multiplexing is
disabled, due to sudo and/or SSH reinvoking pam_env. Rather than emulate
existing semantics, do our best to ensure edits are always reflected.
This can't perfectly replicate the existing behaviour, but it can safely
update and remove keys that appear to originate in `path`, and that do not
conflict with any existing environment key inherited from elsewhere.
A more robust future approach may simply be to arrange for the persistent
interpreter to restart when a change is detected.
"""
def __init__(self, path):
self.path = os.path.expanduser(path)
#: Inode data at time of last check.
self._st = self._stat()
#: List of inherited keys appearing to originated from this file.
self._keys = [key for key, value in self._load()
if value == os.environ.get(key)]
LOG.debug('%r installed; existing keys: %r', self, self._keys)
def __repr__(self):
return 'EnvironmentFileWatcher(%r)' % (self.path,)
def _stat(self):
try:
return os.stat(self.path)
except OSError:
return None
def _load(self):
try:
with open(self.path, 'r') as fp:
return list(self._parse(fp))
except IOError:
return []
def _parse(self, fp):
"""
linux-pam-1.3.1/modules/pam_env/pam_env.c#L207
"""
for line in fp:
# ' #export foo=some var ' -> ['#export', 'foo=some var ']
bits = shlex.split(line, comments=True)
if (not bits) or bits[0].startswith('#'):
continue
if bits[0] == 'export':
bits.pop(0)
key, sep, value = (' '.join(bits)).partition('=')
if key and sep:
yield key, value
def _on_file_changed(self):
LOG.debug('%r: file changed, reloading', self)
for key, value in self._load():
if key in os.environ:
LOG.debug('%r: existing key %r=%r exists, not setting %r',
self, key, os.environ[key], value)
else:
LOG.debug('%r: setting key %r to %r', self, key, value)
self._keys.append(key)
os.environ[key] = value
def _remove_existing(self):
"""
When a change is detected, remove keys that existed in the old file.
"""
for key in self._keys:
if key in os.environ:
LOG.debug('%r: removing old key %r', self, key)
del os.environ[key]
self._keys = []
def check(self):
"""
Compare the :func:`os.stat` for the pam_env style environmnt file
`path` with the previous result `old_st`, which may be :data:`None` if
the previous stat attempt failed. Reload its contents if the file has
changed or appeared since last attempt.
:returns:
New :func:`os.stat` result. The new call to :func:`reload_env` should
pass it as the value of `old_st`.
"""
st = self._stat()
if self._st == st:
return
self._st = st
self._remove_existing()
if st is None:
LOG.debug('%r: file has disappeared', self)
else:
self._on_file_changed()
_pam_env_watcher = EnvironmentFileWatcher('~/.pam_environment')
_etc_env_watcher = EnvironmentFileWatcher('/etc/environment')
def utf8(s):
"""
Coerce an object to bytes if it is Unicode.
"""
if isinstance(s, mitogen.core.UnicodeType):
s = s.encode('utf-8')
return s
def reopen_readonly(fp):
"""
Replace the file descriptor belonging to the file object `fp` with one
open on the same file (`fp.name`), but opened with :py:data:`os.O_RDONLY`.
This enables temporary files to be executed on Linux, which usually throws
``ETXTBUSY`` if any writeable handle exists pointing to a file passed to
`execve()`.
"""
fd = os.open(fp.name, os.O_RDONLY)
os.dup2(fd, fp.fileno())
os.close(fd)
class Runner(object):
"""
Ansible module runner. After instantiation (with kwargs supplied by the
corresponding Planner), `.run()` is invoked, upon which `setup()`,
`_run()`, and `revert()` are invoked, with the return value of `_run()`
returned by `run()`.
Subclasses may override `_run`()` and extend `setup()` and `revert()`.
:param str module:
Name of the module to execute, e.g. "shell"
:param mitogen.core.Context service_context:
Context to which we should direct FileService calls. For now, always
the connection multiplexer process on the controller.
:param str json_args:
Ansible module arguments. A mixture of user and internal keys created
by :meth:`ansible.plugins.action.ActionBase._execute_module`.
This is passed as a string rather than a dict in order to mimic the
implicit bytes/str conversion behaviour of a 2.x controller running
against a 3.x target.
:param str good_temp_dir:
The writeable temporary directory for this user account reported by
:func:`ansible_mitogen.target.init_child` passed via the controller.
This is specified explicitly to remain compatible with Ansible<2.5, and
for forked tasks where init_child never runs.
:param dict env:
Additional environment variables to set during the run. Keys with
:data:`None` are unset if present.
:param str cwd:
If not :data:`None`, change to this directory before executing.
:param mitogen.core.ExternalContext econtext:
When `detach` is :data:`True`, a reference to the ExternalContext the
runner is executing in.
:param bool detach:
When :data:`True`, indicate the runner should detach the context from
its parent after setup has completed successfully.
"""
def __init__(self, module, service_context, json_args, good_temp_dir,
extra_env=None, cwd=None, env=None, econtext=None,
detach=False):
self.module = module
self.service_context = service_context
self.econtext = econtext
self.detach = detach
self.args = json.loads(json_args)
self.good_temp_dir = good_temp_dir
self.extra_env = extra_env
self.env = env
self.cwd = cwd
#: If not :data:`None`, :meth:`get_temp_dir` had to create a temporary
#: directory for this run, because we're in an asynchronous task, or
#: because the originating action did not create a directory.
self._temp_dir = None
def get_temp_dir(self):
path = self.args.get('_ansible_tmpdir')
if path is not None:
return path
if self._temp_dir is None:
self._temp_dir = tempfile.mkdtemp(
prefix='ansible_mitogen_runner_',
dir=self.good_temp_dir,
)
return self._temp_dir
def revert_temp_dir(self):
if self._temp_dir is not None:
ansible_mitogen.target.prune_tree(self._temp_dir)
self._temp_dir = None
def setup(self):
"""
Prepare for running a module, including fetching necessary dependencies
from the parent, as :meth:`run` may detach prior to beginning
execution. The base implementation simply prepares the environment.
"""
self._setup_cwd()
self._setup_environ()
def _setup_cwd(self):
"""
For situations like sudo to a non-privileged account, CWD could be
$HOME of the old account, which could have mode go=, which means it is
impossible to restore the old directory, so don't even try.
"""
if self.cwd:
os.chdir(self.cwd)
def _setup_environ(self):
"""
Apply changes from /etc/environment files before creating a
TemporaryEnvironment to snapshot environment state prior to module run.
"""
_pam_env_watcher.check()
_etc_env_watcher.check()
env = dict(self.extra_env or {})
if self.env:
env.update(self.env)
self._env = TemporaryEnvironment(env)
def revert(self):
"""
Revert any changes made to the process after running a module. The base
implementation simply restores the original environment.
"""
self._env.revert()
self.revert_temp_dir()
def _run(self):
"""
The _run() method is expected to return a dictionary in the form of
ActionBase._low_level_execute_command() output, i.e. having::
{
"rc": int,
"stdout": "stdout data",
"stderr": "stderr data"
}
"""
raise NotImplementedError()
def run(self):
"""
Set up the process environment in preparation for running an Ansible
module. This monkey-patches the Ansible libraries in various places to
prevent it from trying to kill the process on completion, and to
prevent it from reading sys.stdin.
:returns:
Module result dictionary.
"""
self.setup()
if self.detach:
self.econtext.detach()
try:
return self._run()
finally:
self.revert()
class ModuleUtilsImporter(object):
"""
:param list module_utils:
List of `(fullname, path, is_pkg)` tuples.
"""
def __init__(self, context, module_utils):
self._context = context
self._by_fullname = dict(
(fullname, (path, is_pkg))
for fullname, path, is_pkg in module_utils
)
self._loaded = set()
sys.meta_path.insert(0, self)
def revert(self):
sys.meta_path.remove(self)
for fullname in self._loaded:
sys.modules.pop(fullname, None)
def find_module(self, fullname, path=None):
if fullname in self._by_fullname:
return self
def load_module(self, fullname):
path, is_pkg = self._by_fullname[fullname]
source = ansible_mitogen.target.get_small_file(self._context, path)
code = compile(source, path, 'exec', 0, 1)
mod = sys.modules.setdefault(fullname, imp.new_module(fullname))
mod.__file__ = "master:%s" % (path,)
mod.__loader__ = self
if is_pkg:
mod.__path__ = []
mod.__package__ = str(fullname)
else:
mod.__package__ = str(fullname.rpartition('.')[0])
exec(code, mod.__dict__)
self._loaded.add(fullname)
return mod
class TemporaryEnvironment(object):
"""
Apply environment changes from `env` until :meth:`revert` is called. Values
in the dict may be :data:`None` to indicate the relevant key should be
deleted.
"""
def __init__(self, env=None):
self.original = dict(os.environ)
self.env = env or {}
for key, value in iteritems(self.env):
if value is None:
os.environ.pop(key, None)
else:
os.environ[key] = str(value)
def revert(self):
"""
Revert changes made by the module to the process environment. This must
always run, as some modules (e.g. git.py) set variables like GIT_SSH
that must be cleared out between runs.
"""
os.environ.clear()
os.environ.update(self.original)
class TemporaryArgv(object):
def __init__(self, argv):
self.original = sys.argv[:]
sys.argv[:] = map(str, argv)
def revert(self):
sys.argv[:] = self.original
class NewStyleStdio(object):
"""
Patch ansible.module_utils.basic argument globals.
"""
def __init__(self, args, temp_dir):
self.temp_dir = temp_dir
self.original_stdout = sys.stdout
self.original_stderr = sys.stderr
self.original_stdin = sys.stdin
sys.stdout = StringIO()
sys.stderr = StringIO()
encoded = json.dumps({'ANSIBLE_MODULE_ARGS': args})
ansible.module_utils.basic._ANSIBLE_ARGS = utf8(encoded)
sys.stdin = StringIO(mitogen.core.to_text(encoded))
self.original_get_path = getattr(ansible.module_utils.basic,
'get_module_path', None)
ansible.module_utils.basic.get_module_path = self._get_path
def _get_path(self):
return self.temp_dir
def revert(self):
ansible.module_utils.basic.get_module_path = self.original_get_path
sys.stdout = self.original_stdout
sys.stderr = self.original_stderr
sys.stdin = self.original_stdin
ansible.module_utils.basic._ANSIBLE_ARGS = '{}'
class ProgramRunner(Runner):
"""
Base class for runners that run external programs.
:param str path:
Absolute path to the program file on the master, as it can be retrieved
via :class:`mitogen.service.FileService`.
:param bool emulate_tty:
If :data:`True`, execute the program with `stdout` and `stderr` merged
into a single pipe, emulating Ansible behaviour when an SSH TTY is in
use.
"""
def __init__(self, path, emulate_tty=None, **kwargs):
super(ProgramRunner, self).__init__(**kwargs)
self.emulate_tty = emulate_tty
self.path = path
def setup(self):
super(ProgramRunner, self).setup()
self._setup_program()
def _get_program_filename(self):
"""
Return the filename used for program on disk. Ansible uses the original
filename for non-Ansiballz runs, and "ansible_module_+filename for
Ansiballz runs.
"""
return os.path.basename(self.path)
program_fp = None
def _setup_program(self):
"""
Create a temporary file containing the program code. The code is
fetched via :meth:`_get_program`.
"""
filename = self._get_program_filename()
path = os.path.join(self.get_temp_dir(), filename)
self.program_fp = open(path, 'wb')
self.program_fp.write(self._get_program())
self.program_fp.flush()
os.chmod(self.program_fp.name, int('0700', 8))
reopen_readonly(self.program_fp)
def _get_program(self):
"""
Fetch the module binary from the master if necessary.
"""
return ansible_mitogen.target.get_small_file(
context=self.service_context,
path=self.path,
)
def _get_program_args(self):
"""
Return any arguments to pass to the program.
"""
return []
def revert(self):
"""
Delete the temporary program file.
"""
if self.program_fp:
self.program_fp.close()
super(ProgramRunner, self).revert()
def _get_argv(self):
"""
Return the final argument vector used to execute the program.
"""
return [
self.args['_ansible_shell_executable'],
'-c',
self._get_shell_fragment(),
]
def _get_shell_fragment(self):
return "%s %s" % (
shlex_quote(self.program_fp.name),
' '.join(map(shlex_quote, self._get_program_args())),
)
def _run(self):
try:
rc, stdout, stderr = ansible_mitogen.target.exec_args(
args=self._get_argv(),
emulate_tty=self.emulate_tty,
)
except Exception as e:
LOG.exception('While running %s', self._get_argv())
return {
'rc': 1,
'stdout': '',
'stderr': '%s: %s' % (type(e), e),
}
return {
'rc': rc,
'stdout': mitogen.core.to_text(stdout),
'stderr': mitogen.core.to_text(stderr),
}
class ArgsFileRunner(Runner):
def setup(self):
super(ArgsFileRunner, self).setup()
self._setup_args()
def _setup_args(self):
"""
Create a temporary file containing the module's arguments. The
arguments are formatted via :meth:`_get_args`.
"""
self.args_fp = tempfile.NamedTemporaryFile(
prefix='ansible_mitogen',
suffix='-args',
dir=self.get_temp_dir(),
)
self.args_fp.write(utf8(self._get_args_contents()))
self.args_fp.flush()
reopen_readonly(self.program_fp)
def _get_args_contents(self):
"""
Return the module arguments formatted as JSON.
"""
return json.dumps(self.args)
def _get_program_args(self):
return [self.args_fp.name]
def revert(self):
"""
Delete the temporary argument file.
"""
self.args_fp.close()
super(ArgsFileRunner, self).revert()
class BinaryRunner(ArgsFileRunner, ProgramRunner):
pass
class ScriptRunner(ProgramRunner):
def __init__(self, interpreter_fragment, is_python, **kwargs):
super(ScriptRunner, self).__init__(**kwargs)
self.interpreter_fragment = interpreter_fragment
self.is_python = is_python
b_ENCODING_STRING = b'# -*- coding: utf-8 -*-'
def _get_program(self):
return self._rewrite_source(
super(ScriptRunner, self)._get_program()
)
def _get_argv(self):
return [
self.args['_ansible_shell_executable'],
'-c',
self._get_shell_fragment(),
]
def _get_shell_fragment(self):
"""
Scripts are eligible for having their hashbang line rewritten, and to
be executed via /bin/sh using the ansible_*_interpreter value used as a
shell fragment prefixing to the invocation.
"""
return "%s %s %s" % (
self.interpreter_fragment,
shlex_quote(self.program_fp.name),
' '.join(map(shlex_quote, self._get_program_args())),
)
def _rewrite_source(self, s):
"""
Mutate the source according to the per-task parameters.
"""
# While Ansible rewrites the #! using ansible_*_interpreter, it is
# never actually used to execute the script, instead it is a shell
# fragment consumed by shell/__init__.py::build_module_command().
new = [b'#!' + utf8(self.interpreter_fragment)]
if self.is_python:
new.append(self.b_ENCODING_STRING)
_, _, rest = s.partition(b'\n')
new.append(rest)
return b'\n'.join(new)
class NewStyleRunner(ScriptRunner):
"""
Execute a new-style Ansible module, where Module Replacer-related tricks
aren't required.
"""
#: path => new-style module bytecode.
_code_by_path = {}
def __init__(self, module_map, **kwargs):
super(NewStyleRunner, self).__init__(**kwargs)
self.module_map = module_map
def _setup_imports(self):
"""
Ensure the local importer and PushFileService has everything for the
Ansible module before setup() completes, but before detach() is called
in an asynchronous task.
The master automatically streams modules towards us concurrent to the
runner invocation, however there is no public API to synchronize on the
completion of those preloads. Instead simply reuse the importer's
synchronization mechanism by importing everything the module will need
prior to detaching.
"""
for fullname, _, _ in self.module_map['custom']:
mitogen.core.import_module(fullname)
for fullname in self.module_map['builtin']:
mitogen.core.import_module(fullname)
def _setup_excepthook(self):
"""
Starting with Ansible 2.6, some modules (file.py) install a
sys.excepthook and never clean it up. So we must preserve the original
excepthook and restore it after the run completes.
"""
self.original_excepthook = sys.excepthook
def setup(self):
super(NewStyleRunner, self).setup()
self._stdio = NewStyleStdio(self.args, self.get_temp_dir())
# It is possible that not supplying the script filename will break some
# module, but this has never been a bug report. Instead act like an
# interpreter that had its script piped on stdin.
self._argv = TemporaryArgv([''])
self._importer = ModuleUtilsImporter(
context=self.service_context,
module_utils=self.module_map['custom'],
)
self._setup_imports()
self._setup_excepthook()
if libc__res_init:
libc__res_init()
def _revert_excepthook(self):
sys.excepthook = self.original_excepthook
def revert(self):
self._argv.revert()
self._stdio.revert()
self._revert_excepthook()
super(NewStyleRunner, self).revert()
def _get_program_filename(self):
"""
See ProgramRunner._get_program_filename().
"""
return 'ansible_module_' + os.path.basename(self.path)
def _setup_args(self):
pass
def _setup_program(self):
self.source = ansible_mitogen.target.get_small_file(
context=self.service_context,
path=self.path,
)
def _get_code(self):
try:
return self._code_by_path[self.path]
except KeyError:
return self._code_by_path.setdefault(self.path, compile(
source=self.source,
filename="master:" + self.path,
mode='exec',
dont_inherit=True,
))
if mitogen.core.PY3:
main_module_name = '__main__'
else:
main_module_name = b'__main__'
def _handle_magic_exception(self, mod, exc):
"""
Beginning with Ansible >2.6, some modules (file.py) install a
sys.excepthook which is a closure over AnsibleModule, redirecting the
magical exception to AnsibleModule.fail_json().
For extra special needs bonus points, the class is not defined in
module_utils, but is defined in the module itself, meaning there is no
type for isinstance() that outlasts the invocation.
"""
klass = getattr(mod, 'AnsibleModuleError', None)
if klass and isinstance(exc, klass):
mod.module.fail_json(**exc.results)
def _run_code(self, code, mod):
try:
if mitogen.core.PY3:
exec(code, vars(mod))
else:
exec('exec code in vars(mod)')
except Exception as e:
self._handle_magic_exception(mod, e)
raise
def _run_atexit_funcs(self):
"""
Newer Ansibles use atexit.register() to trigger tmpdir cleanup, when
AnsibleModule.tmpdir is responsible for creating its own temporary
directory.
"""
atexit._run_exitfuncs()
def _run(self):
mod = types.ModuleType(self.main_module_name)
mod.__package__ = None
# Some Ansible modules use __file__ to find the Ansiballz temporary
# directory. We must provide some temporary path in __file__, but we
# don't want to pointlessly write the module to disk when it never
# actually needs to exist. So just pass the filename as it would exist.
mod.__file__ = os.path.join(
self.get_temp_dir(),
'ansible_module_' + os.path.basename(self.path),
)
code = self._get_code()
exc = None
try:
try:
self._run_code(code, mod)
finally:
self._run_atexit_funcs()
except SystemExit as e:
exc = e
return {
'rc': exc.args[0] if exc else 2,
'stdout': mitogen.core.to_text(sys.stdout.getvalue()),
'stderr': mitogen.core.to_text(sys.stderr.getvalue()),
}
class JsonArgsRunner(ScriptRunner):
JSON_ARGS = b'<<INCLUDE_ANSIBLE_MODULE_JSON_ARGS>>'
def _get_args_contents(self):
return json.dumps(self.args).encode()
def _rewrite_source(self, s):
return (
super(JsonArgsRunner, self)._rewrite_source(s)
.replace(self.JSON_ARGS, self._get_args_contents())
)
class WantJsonRunner(ArgsFileRunner, ScriptRunner):
pass
class OldStyleRunner(ArgsFileRunner, ScriptRunner):
def _get_args_contents(self):
"""
Mimic the argument formatting behaviour of
ActionBase._execute_module().
"""
return ' '.join(
'%s=%s' % (key, shlex_quote(str(self.args[key])))
for key in self.args
) + ' ' # Bug-for-bug :(
|
ConnectBox/wifi-test-framework
|
ansible/plugins/mitogen-0.2.3/ansible_mitogen/runner.py
|
Python
|
mit
| 28,071
|
from django import template
register = template.Library()
def getattribute(value, arg):
"""Gets an attribute of an object dynamically from a string name"""
if hasattr(value, str(arg)):
return getattr(value, arg)
register.filter('getattr', getattribute)
|
osu-cass/working-waterfronts-api
|
working_waterfronts/working_waterfronts_api/templatetags/getattr.py
|
Python
|
apache-2.0
| 274
|
# -*- coding: utf-8 -*-
#
# SPDX-FileCopyrightText: 2013-2021 Agora Voting SL <contact@nvotes.com>
#
# SPDX-License-Identifier: AGPL-3.0-only
#
import pickle
import base64
import json
import re
from datetime import datetime
from flask import Blueprint, request, make_response, abort
from frestq.utils import loads, dumps
from frestq.tasks import SimpleTask, TaskError
from frestq.app import app, db
from models import Election, Authority, QueryQueue
from create_election.performer_jobs import check_election_data
from taskqueue import queue_task, apply_task, dequeue_task
public_api = Blueprint('public_api', __name__)
def error(status, message=""):
if message:
data = json.dumps(dict(message=message))
else:
data=""
return make_response(data, status)
@public_api.route('/dequeue', methods=['GET'])
def dequeue():
try:
dequeue_task()
except Exception as e:
return make_response(dumps(dict(status=e.message)), 202)
return make_response(dumps(dict(status="ok")), 202)
@public_api.route('/election', methods=['POST'])
def post_election():
'''
POST /election
Creates an election, with the given input data. This involves communicating
with the different election authorities to generate the joint public key.
Example request:
POST /election
{
"id": 1110,
"title": "Votación de candidatos",
"description": "Selecciona los documentos político, ético y organizativo con los que Podemos",
"director": "wadobo-auth1",
"authorities": "openkratio-authority",
"layout": "pcandidates-election",
"presentation": {
"share_text": "lo que sea",
"theme": "foo",
"urls": [
{
"title": "",
"url": ""
}
],
"theme_css": "whatever"
},
"end_date": "2013-12-09T18:17:14.457000",
"start_date": "2013-12-06T18:17:14.457000",
"questions": [
{
"description": "",
"layout": "pcandidates-election",
"max": 1,
"min": 0,
"num_winners": 1,
"title": "Secretaría General",
"randomize_answer_order": true,
"tally_type": "plurality-at-large",
"answer_total_votes_percentage": "over-total-valid-votes",
"answers": [
{
"id": 0,
"category": "Equipo de Enfermeras",
"details": "",
"sort_order": 1,
"urls": [
{
"title": "",
"url": ""
}
],
"text": "Fulanita de tal",
}
]
}
],
"authorities": [
{
"name": "Asociación Sugus GNU/Linux",
"orchestra_url": "https://sugus.eii.us.es/orchestra",
"ssl_cert": "-----BEGIN CERTIFICATE-----\nMIIFATCCA+mgAwIBAgIQAOli4NZQEWpKZeYX25jjwDANBgkqhkiG9w0BAQUFADBz\n8YOltJ6QfO7jNHU9jh/AxeiRf6MibZn6fvBHvFCrVBvDD43M0gdhMkVEDVNkPaak\nC7AHA/waXZ2EwW57Chr2hlZWAkwkFvsWxNt9BgJAJJt4CIVhN/iau/SaXD0l0t1N\nT0ye54QPYl38Eumvc439Yd1CeVS/HYbP0ISIfpNkkFA5TiQdoA==\n-----END CERTIFICATE-----"
},
{
"name": "Agora Ciudadana",
"orchestra_url": "https://agoravoting.com:6874/orchestra",
"ssl_cert": "-----BEGIN CERTIFICATE-----\nMIIFATCCA+mgAwIBAgIQAOli4NZQEWpKZeYX25jjwDANBgkqhkiG9w0BAQUFADBz\n8YOltJ6QfO7jNHU9jh/AxeiRf6MibZn6fvBHvFCrVBvDD43M0gdhMkVEDVNkPaak\nC7AHA/waXZ2EwW57Chr2hlZWAkwkFvsWxNt9BgJAJJt4CIVhN/iau/SaXD0l0t1N\nT0ye54QPYl38Eumvc439Yd1CeVS/HYbP0ISIfpNkkFA5TiQdoA==\n-----END CERTIFICATE-----"
},
{
"name": "Wadobo Labs",
"orchestra_url": "https://wadobo.com:6874/orchestra",
"ssl_cert": "-----BEGIN CERTIFICATE-----\nMIIFATCCA+mgAwIBAgIQAOli4NZQEWpKZeYX25jjwDANBgkqhkiG9w0BAQUFADBz\n8YOltJ6QfO7jNHU9jh/AxeiRf6MibZn6fvBHvFCrVBvDD43M0gdhMkVEDVNkPaak\nC7AHA/waXZ2EwW57Chr2hlZWAkwkFvsWxNt9BgJAJJt4CIVhN/iau/SaXD0l0t1N\nT0ye54QPYl38Eumvc439Yd1CeVS/HYbP0ISIfpNkkFA5TiQdoA==\n-----END CERTIFICATE-----"
}
]
}
On success, response is empty with status 202 Accepted and returns something
like:
{
"task_id": "ba83ee09-aa83-1901-bb11-e645b52fc558",
}
When the election finally gets processed, the callback_url is called with a
POST containing the protInfo.xml file generated jointly by each
authority, following this example response:
{
"status": "finished",
"reference": {
"election_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5",
"action": "POST /election"
},
"session_data": [{
"session_id": "deadbeef-03fa-4890-aa83-2fc558e645b5",
"publickey": ["<pubkey codified in hexadecimal>"]
}]
}
Note that this protInfo.xml will contain the election public key, but
also some other information. In particular, it's worth noting that
the http and hint servers' urls for each authority could change later,
if election-orchestra needs it.
If there was an error, then the callback will be called following this
example format:
{
"status": "error",
"reference": {
"session_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5",
"action": "POST /election"
},
"data": {
"message": "error message"
}
}
'''
data = request.get_json(force=True, silent=True)
d = base64.b64encode(pickle.dumps(data)).decode('utf-8')
queueid = queue_task(task='election', data=d)
return make_response(dumps(dict(queue_id=queueid)), 202)
@public_api.route('/tally', methods=['POST'])
def post_tally():
'''
POST /tally
Tallies an election, with the given input data. This involves communicating
with the different election authorities to do the tally.
Example request:
POST /tally
{
"election_id": 111,
"callback_url": "https://127.0.0.1:5000/public_api/receive_tally",
"votes_url": "https://127.0.0.1:5000/public_data/vota4/encrypted_ciphertexts",
"votes_hash": "ni:///sha-256;f4OxZX_x_FO5LcGBSKHWXfwtSx-j1ncoSt3SABJtkGk"
}
On success, response is empty with status 202 Accepted and returns something
like:
{
"task_id": "ba83ee09-aa83-1901-bb11-e645b52fc558",
}
When the election finally gets processed, the callback_url is called with POST
similar to the following example:
{
"status": "finished",
"reference": {
"election_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5",
"action": "POST /tally"
},
"data": {
"votes_url": "https://127.0.0.1:5000/public_data/vota4/tally.tar.bz2",
"votes_hash": "ni:///sha-256;f4OxZX_x_FO5LcGBSKHWXfwtSx-j1ncoSt3SABJtkGk"
}
}
If there was an error, then the callback will be called following this
example format:
{
"status": "error",
"reference": {
"election_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5",
"action": "POST /tally"
},
"data": {
"message": "error message"
}
}
'''
# first of all, parse input data
data = request.get_json(force=True, silent=True)
d = base64.b64encode(pickle.dumps(data)).decode('utf-8')
queueid = queue_task(task='tally', data=d)
return make_response(dumps(dict(queue_id=queueid)), 202)
@public_api.route('/receive_election', methods=['POST'])
def receive_election():
'''
This is a test route to be able to test that callbacks are correctly sent
'''
print("ATTENTION received election callback: ")
print(request.get_json(force=True, silent=True))
return make_response("", 202)
@public_api.route('/receive_tally', methods=['POST'])
def receive_tally():
'''
This is a test route to be able to test that callbacks are correctly sent
'''
print("ATTENTION received tally callback: ")
print(request.get_json(force=True, silent=True))
return make_response("", 202)
|
agoravoting/election-orchestra
|
public_api.py
|
Python
|
agpl-3.0
| 8,209
|
# Python - 3.4.3
def bmi(weight, height):
# 計算BMI
v = weight / height ** 2
# 根據BMI輸出訊息
if v <= 18.5:
return 'Underweight'
elif v <= 25:
return 'Normal'
elif v <= 30:
return 'Overweight'
else:
return 'Obese'
|
RevansChen/online-judge
|
Codewars/8kyu/calculate-bmi/Python/solution1.py
|
Python
|
mit
| 293
|
# bgscan tests
# Copyright (c) 2014, Jouni Malinen <j@w1.fi>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import time
import logging
logger = logging.getLogger()
import os
import hostapd
def test_bgscan_simple(dev, apdev):
"""bgscan_simple"""
hostapd.add_ap(apdev[0]['ifname'], { "ssid": "bgscan" })
hostapd.add_ap(apdev[1]['ifname'], { "ssid": "bgscan" })
dev[0].connect("bgscan", key_mgmt="NONE", scan_freq="2412",
bgscan="simple:1:-20:2")
dev[1].connect("bgscan", key_mgmt="NONE", scan_freq="2412",
bgscan="simple:1:-45:2")
dev[2].connect("bgscan", key_mgmt="NONE", scan_freq="2412",
bgscan="simple:1:-45")
dev[2].request("REMOVE_NETWORK all")
dev[2].wait_disconnected()
dev[2].connect("bgscan", key_mgmt="NONE", scan_freq="2412",
bgscan="simple:0:0")
dev[2].request("REMOVE_NETWORK all")
dev[2].wait_disconnected()
dev[2].connect("bgscan", key_mgmt="NONE", scan_freq="2412",
bgscan="simple")
dev[2].request("REMOVE_NETWORK all")
dev[2].wait_disconnected()
dev[2].connect("bgscan", key_mgmt="NONE", scan_freq="2412",
bgscan="simple:1")
dev[2].request("REMOVE_NETWORK all")
dev[2].wait_disconnected()
ev = dev[0].wait_event(["CTRL-EVENT-SIGNAL-CHANGE"], timeout=10)
if ev is None:
raise Exception("dev0 did not indicate signal change event")
if "above=0" not in ev:
raise Exception("Unexpected signal change event contents from dev0: " + ev)
ev = dev[1].wait_event(["CTRL-EVENT-SIGNAL-CHANGE"], timeout=10)
if ev is None:
raise Exception("dev1 did not indicate signal change event")
if "above=1" not in ev:
raise Exception("Unexpected signal change event contents from dev1: " + ev)
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-STARTED"], timeout=3)
if ev is None:
raise Exception("dev0 did not start a scan")
ev = dev[1].wait_event(["CTRL-EVENT-SCAN-STARTED"], timeout=3)
if ev is None:
raise Exception("dev1 did not start a scan")
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
if ev is None:
raise Exception("dev0 did not complete a scan")
ev = dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
if ev is None:
raise Exception("dev1 did not complete a scan")
def test_bgscan_learn(dev, apdev):
"""bgscan_learn"""
hostapd.add_ap(apdev[0]['ifname'], { "ssid": "bgscan" })
hostapd.add_ap(apdev[1]['ifname'], { "ssid": "bgscan" })
try:
os.remove("/tmp/test_bgscan_learn.bgscan")
except:
pass
try:
dev[0].connect("bgscan", key_mgmt="NONE", scan_freq="2412",
bgscan="learn:1:-20:2")
id = dev[1].connect("bgscan", key_mgmt="NONE", scan_freq="2412",
bgscan="learn:1:-45:2:/tmp/test_bgscan_learn.bgscan")
dev[2].connect("bgscan", key_mgmt="NONE", scan_freq="2412",
bgscan="learn:1:-45")
dev[2].request("REMOVE_NETWORK all")
dev[2].wait_disconnected()
dev[2].connect("bgscan", key_mgmt="NONE", scan_freq="2412",
bgscan="learn:0:0")
dev[2].request("REMOVE_NETWORK all")
dev[2].wait_disconnected()
dev[2].connect("bgscan", key_mgmt="NONE", scan_freq="2412",
bgscan="learn")
dev[2].request("REMOVE_NETWORK all")
dev[2].wait_disconnected()
dev[2].connect("bgscan", key_mgmt="NONE", scan_freq="2412",
bgscan="learn:1")
dev[2].request("REMOVE_NETWORK all")
dev[2].wait_disconnected()
ev = dev[0].wait_event(["CTRL-EVENT-SIGNAL-CHANGE"], timeout=10)
if ev is None:
raise Exception("dev0 did not indicate signal change event")
if "above=0" not in ev:
raise Exception("Unexpected signal change event contents from dev0: " + ev)
ev = dev[1].wait_event(["CTRL-EVENT-SIGNAL-CHANGE"], timeout=10)
if ev is None:
raise Exception("dev1 did not indicate signal change event")
if "above=1" not in ev:
raise Exception("Unexpected signal change event contents from dev1: " + ev)
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-STARTED"], timeout=3)
if ev is None:
raise Exception("dev0 did not start a scan")
ev = dev[1].wait_event(["CTRL-EVENT-SCAN-STARTED"], timeout=3)
if ev is None:
raise Exception("dev1 did not start a scan")
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
if ev is None:
raise Exception("dev0 did not complete a scan")
ev = dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
if ev is None:
raise Exception("dev1 did not complete a scan")
dev[0].request("DISCONNECT")
dev[1].request("DISCONNECT")
dev[0].request("REMOVE_NETWORK all")
with open("/tmp/test_bgscan_learn.bgscan", "r") as f:
lines = f.read().splitlines()
if lines[0] != "wpa_supplicant-bgscan-learn":
raise Exception("Unexpected bgscan header line")
if 'BSS 02:00:00:00:03:00 2412' not in lines:
raise Exception("Missing BSS1")
if 'BSS 02:00:00:00:04:00 2412' not in lines:
raise Exception("Missing BSS2")
if 'NEIGHBOR 02:00:00:00:03:00 02:00:00:00:04:00' not in lines:
raise Exception("Missing BSS1->BSS2 neighbor entry")
if 'NEIGHBOR 02:00:00:00:04:00 02:00:00:00:03:00' not in lines:
raise Exception("Missing BSS2->BSS1 neighbor entry")
dev[1].set_network(id, "scan_freq", "")
dev[1].connect_network(id)
ev = dev[1].wait_event(["CTRL-EVENT-SCAN-STARTED"], timeout=10)
if ev is None:
raise Exception("dev1 did not start a scan")
ev = dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 10)
if ev is None:
raise Exception("dev1 did not complete a scan")
dev[1].request("REMOVE_NETWORK all")
finally:
try:
os.remove("/tmp/test_bgscan_learn.bgscan")
except:
pass
|
wangybgit/Chameleon
|
hostapd-OpenWrt/tests/hwsim/test_bgscan.py
|
Python
|
apache-2.0
| 6,284
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, André Paramés <git@andreparames.com>
# Based on the Git module by Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = u'''
---
module: bzr
author: "André Paramés (@andreparames)"
version_added: "1.1"
short_description: Deploy software (or files) from bzr branches
description:
- Manage I(bzr) branches to deploy files or software.
options:
name:
required: true
aliases: [ 'parent' ]
description:
- SSH or HTTP protocol address of the parent branch.
dest:
required: true
description:
- Absolute path of where the branch should be cloned to.
version:
required: false
default: "head"
description:
- What version of the branch to clone. This can be the
bzr revno or revid.
force:
required: false
default: "no"
choices: [ 'yes', 'no' ]
description:
- If C(yes), any modified files in the working
tree will be discarded. Before 1.9 the default
value was "yes".
executable:
required: false
default: null
version_added: "1.4"
description:
- Path to bzr executable to use. If not supplied,
the normal mechanism for resolving binary paths will be used.
'''
EXAMPLES = '''
# Example bzr checkout from Ansible Playbooks
- bzr:
name: 'bzr+ssh://foosball.example.org/path/to/branch'
dest: /srv/checkout
version: 22
'''
import re
class Bzr(object):
def __init__(self, module, parent, dest, version, bzr_path):
self.module = module
self.parent = parent
self.dest = dest
self.version = version
self.bzr_path = bzr_path
def _command(self, args_list, cwd=None, **kwargs):
(rc, out, err) = self.module.run_command([self.bzr_path] + args_list, cwd=cwd, **kwargs)
return (rc, out, err)
def get_version(self):
'''samples the version of the bzr branch'''
cmd = "%s revno" % self.bzr_path
rc, stdout, stderr = self.module.run_command(cmd, cwd=self.dest)
revno = stdout.strip()
return revno
def clone(self):
'''makes a new bzr branch if it does not already exist'''
dest_dirname = os.path.dirname(self.dest)
try:
os.makedirs(dest_dirname)
except:
pass
if self.version.lower() != 'head':
args_list = ["branch", "-r", self.version, self.parent, self.dest]
else:
args_list = ["branch", self.parent, self.dest]
return self._command(args_list, check_rc=True, cwd=dest_dirname)
def has_local_mods(self):
cmd = "%s status -S" % self.bzr_path
rc, stdout, stderr = self.module.run_command(cmd, cwd=self.dest)
lines = stdout.splitlines()
lines = filter(lambda c: not re.search('^\\?\\?.*$', c), lines)
return len(lines) > 0
def reset(self, force):
'''
Resets the index and working tree to head.
Discards any changes to tracked files in the working
tree since that commit.
'''
if not force and self.has_local_mods():
self.module.fail_json(msg="Local modifications exist in branch (force=no).")
return self._command(["revert"], check_rc=True, cwd=self.dest)
def fetch(self):
'''updates branch from remote sources'''
if self.version.lower() != 'head':
(rc, out, err) = self._command(["pull", "-r", self.version], cwd=self.dest)
else:
(rc, out, err) = self._command(["pull"], cwd=self.dest)
if rc != 0:
self.module.fail_json(msg="Failed to pull")
return (rc, out, err)
def switch_version(self):
'''once pulled, switch to a particular revno or revid'''
if self.version.lower() != 'head':
args_list = ["revert", "-r", self.version]
else:
args_list = ["revert"]
return self._command(args_list, check_rc=True, cwd=self.dest)
# ===========================================
def main():
module = AnsibleModule(
argument_spec = dict(
dest=dict(required=True, type='path'),
name=dict(required=True, aliases=['parent']),
version=dict(default='head'),
force=dict(default='no', type='bool'),
executable=dict(default=None),
)
)
dest = module.params['dest']
parent = module.params['name']
version = module.params['version']
force = module.params['force']
bzr_path = module.params['executable'] or module.get_bin_path('bzr', True)
bzrconfig = os.path.join(dest, '.bzr', 'branch', 'branch.conf')
rc, out, err, status = (0, None, None, None)
bzr = Bzr(module, parent, dest, version, bzr_path)
# if there is no bzr configuration, do a branch operation
# else pull and switch the version
before = None
local_mods = False
if not os.path.exists(bzrconfig):
(rc, out, err) = bzr.clone()
else:
# else do a pull
local_mods = bzr.has_local_mods()
before = bzr.get_version()
(rc, out, err) = bzr.reset(force)
if rc != 0:
module.fail_json(msg=err)
(rc, out, err) = bzr.fetch()
if rc != 0:
module.fail_json(msg=err)
# switch to version specified regardless of whether
# we cloned or pulled
(rc, out, err) = bzr.switch_version()
# determine if we changed anything
after = bzr.get_version()
changed = False
if before != after or local_mods:
changed = True
module.exit_json(changed=changed, before=before, after=after)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
Rajeshkumar90/ansible-modules-extras
|
source_control/bzr.py
|
Python
|
gpl-3.0
| 6,658
|
# ===============================================================================
# Copyright 2019 ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
from traits.api import HasTraits, List, Str
from pychron.core.helpers.formatting import floatfmt
class BaseMatrixResult(HasTraits):
values = List
name = Str
def __init__(self, ag, ags):
self.values = self._calculate_values(ag, ags)
self._set_name(ag)
def _set_name(self, ag):
self.name = "{}({})".format(ag.identifier, ag.group_id)
def _calculate_values(self, ag, others):
raise NotImplementedError
def get_value(self, row, column):
if column == 0:
return self.name
elif column < row:
return ""
else:
ret = self.values[column + 1]
if ret:
ret = self._format_value(ret)
return ret
def _format_value(self, v):
return floatfmt(v, 3)
def get_color(self, row, column):
if column == 0:
return "white"
elif column < row:
return "white"
else:
v = self.values[column + 1]
return "white" if not v or v < 0.05 else "lightgreen"
# ============= EOF =============================================
|
USGSDenverPychron/pychron
|
pychron/pipeline/results/base_matrix_result.py
|
Python
|
apache-2.0
| 1,863
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Module for connecting to kodi """
import cherrypy
import htpc
import base64
import socket
import struct
from urllib2 import quote
from jsonrpclib import Server
from sqlobject import SQLObject, SQLObjectNotFound
from sqlobject.col import StringCol, IntCol
from htpc.helpers import get_image, cachedprime
import logging
from cherrypy.lib.auth2 import require, member_of
import os
import hashlib
class KodiServers(SQLObject):
""" SQLObject class for kodi_servers table """
name = StringCol()
host = StringCol()
port = IntCol()
username = StringCol(default=None)
password = StringCol(default=None)
mac = StringCol(default=None)
class sqlmeta:
fromDatabase = True
class Kodi(object):
def __init__(self):
""" Add module to list of modules on load and set required settings """
self.logger = logging.getLogger('modules.kodi')
KodiServers.createTable(ifNotExists=True)
try:
KodiServers.sqlmeta.addColumn(IntCol('starterport'), changeSchema=True)
except:
# Will always raise if column exist
pass
htpc.MODULES.append({
'name': 'Kodi',
'id': 'kodi',
'fields': [
{'type': 'bool',
'label': 'Enable',
'name': 'kodi_enable'},
{'type': 'text',
'label': 'Menu name',
'name': 'kodi_name'},
{'type': 'bool',
'label': 'Enable PVR',
'name': 'kodi_enable_pvr'},
{'type': 'bool',
'label': 'Hide watched',
'name': 'kodi_hide_watched'}
]
})
htpc.MODULES.append({
'name': 'Kodi Servers',
'id': 'kodi_update_server',
'action': htpc.WEBDIR + 'kodi/setserver',
'test': htpc.WEBDIR + 'kodi/ping',
'fields': [
{'type': 'select',
'label': 'Server',
'name': 'kodi_server_id',
'options': [
{'name': 'New', 'value': 0}
]
},
{'type': 'text',
'label': 'Name',
'name': 'kodi_server_name'},
{'type': 'text',
'label': 'IP / Host',
'placeholder': 'localhost',
'name': 'kodi_server_host'},
{'type': 'text',
'label': 'Port',
'placeholder': '8080',
'name': 'kodi_server_port'},
{'type': 'text',
'label': 'Username',
'name': 'kodi_server_username'},
{'type': 'password',
'label': 'Password',
'name': 'kodi_server_password'},
{'type': 'text',
'label': 'Mac addr.',
'name': 'kodi_server_mac'},
{'type': 'text',
'label': 'XBMC Starter port',
'placeholder': '9',
'name': 'kodi_server_starterport'}
]
})
server = htpc.settings.get('kodi_current_server', 0)
self.changeserver(server)
@cherrypy.expose()
@require()
def index(self):
""" Generate page from template """
return htpc.LOOKUP.get_template('kodi.html').render(scriptname='kodi')
@cherrypy.expose()
@require()
def webinterface(self):
""" Generate page from template """
raise cherrypy.HTTPRedirect(self.url('', True))
@cherrypy.expose()
@cherrypy.tools.json_out()
@require(member_of(htpc.role_admin))
def primecache(self, t='all', wanted_art='all', async=True, resize=True):
''' find all images and cache them, might take a while...'''
kodi = Server(self.url('/jsonrpc', True))
url = self.url('/image/')
# fix headers
_head = 'Basic %s' % self.auth()
headers = {'Authorization': _head}
musicprop = ['fanart', 'thumbnail']
itemprop = ['art', 'fanart', 'thumbnail']
addonprop = ['thumbnail']
stuff = []
if t == 'all':
movie = kodi.VideoLibrary.GetMovies(properties=itemprop)
episode = kodi.VideoLibrary.GetEpisodes(properties=itemprop)
artist = kodi.AudioLibrary.GetArtists(properties=musicprop)
song = kodi.AudioLibrary.GetSongs(properties=musicprop)
tvshow = kodi.VideoLibrary.GetTVShows(properties=itemprop)
stuff = [movie, episode, artist, song, tvshow]
elif t == 'movie':
movie = kodi.VideoLibrary.GetMovies(properties=itemprop)
stuff.append(movie)
elif t == 'episode':
episode = kodi.VideoLibrary.GetEpisodes(properties=itemprop)
stuff.append(episode)
elif t == 'song':
song = kodi.AudioLibrary.GetSongs(properties=musicprop)
stuff.append(song)
elif t == 'tvshow':
tvshow = kodi.VideoLibrary.GetTVShows(properties=itemprop)
stuff.append(tvshow)
elif t == 'addon':
addon = kodi.Addons.GetAddons(content='unknown', enabled='all', properties=addonprop)
stuff.append(addon)
imgdir = os.path.join(htpc.DATADIR, 'images/')
imglist = []
self.logger.debug('Fetching every image we can find from kodi') # todo add addon images
resize_sizes = [[225, 338], [200, 300], [675, 400], [100, 150], [375, 210], [150, 150]]
for item in stuff:
for k, v in item.items():
if k in ['episodes', 'movies', 'tvshows', 'songs', 'artists', 'addons']:
self.logger.debug('There where %s %s' % (len(item[k]), k))
for kk in item[k]:
for kkk, vvv, in kk.items():
d = {}
if kkk == wanted_art or wanted_art == 'all':
if kkk == 'art':
for z, a in kk['art'].items():
if z == wanted_art or wanted_art == 'all':
_url = url + quote(a)
h = hashlib.md5(_url).hexdigest()
d['fp'] = os.path.join(imgdir, h)
d['hash'] = h
d['url'] = _url
d['resize'] = resize_sizes
imglist.append(d)
if kkk in ['fanart', 'thumbnail']:
_url = url + quote(vvv)
h = hashlib.md5(_url).hexdigest()
d['fp'] = os.path.join(imgdir, h)
d['hash'] = h
d['url'] = _url
d['resize'] = resize_sizes
imglist.append(d)
self.logger.debug('Found %s images in total' % len(imglist))
try:
if async:
t = cachedprime(imglist, headers, resize=bool(resize))
return t
except Exception as e:
self.logger.debug('%s' % e)
@cherrypy.expose()
@require(member_of(htpc.role_admin))
@cherrypy.tools.json_out()
def ping(self, kodi_server_host='', kodi_server_port='',
kodi_server_username='', kodi_server_password='', **kwargs):
""" Tests settings, returns MAC address on success and null on fail """
self.logger.debug("Testing kodi connectivity")
try:
url = kodi_server_host + ':' + kodi_server_port
if kodi_server_username and kodi_server_password:
url = kodi_server_username + ':' + kodi_server_password + '@' + url
kodi = Server('http://' + url + '/jsonrpc')
self.logger.debug("Trying to contact kodi via %s" % url)
return kodi.XBMC.GetInfoLabels(labels=["Network.MacAddress"])
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to contact kodi via %s", url)
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def getserver(self, id=None):
if id:
""" Get kodi server info """
try:
server = KodiServers.selectBy(id=id).getOne()
return dict((c, getattr(server, c)) for c in server.sqlmeta.columns)
except SQLObjectNotFound:
return
""" Get a list of all servers and the current server """
servers = []
for s in KodiServers.select():
servers.append({'id': s.id, 'name': s.name})
if len(servers) < 1:
return
try:
current = self.current.name
except AttributeError:
current = None
return {'current': current, 'servers': servers}
@cherrypy.expose()
@require(member_of(htpc.role_admin))
@cherrypy.tools.json_out()
def setserver(self, kodi_server_id, kodi_server_name, kodi_server_host, kodi_server_port,
kodi_server_username=None, kodi_server_password=None, kodi_server_mac=None, kodi_server_starterport=''):
""" Create a server if id=0, else update a server """
if kodi_server_starterport == '':
kodi_server_starterport = None
else:
kodi_server_starterport = int(kodi_server_starterport)
if kodi_server_id == "0":
self.logger.debug("Creating kodi-Server in database")
try:
server = KodiServers(name=kodi_server_name,
host=kodi_server_host,
port=int(kodi_server_port),
username=kodi_server_username,
password=kodi_server_password,
mac=kodi_server_mac,
starterport=kodi_server_starterport)
self.changeserver(server.id)
htpc.BLACKLISTWORDS.append(kodi_server_password)
return 1
except Exception, e:
self.logger.debug("Exception: " + str(e))
self.logger.error("Unable to create kodi-Server in database")
return 0
else:
self.logger.debug("Updating kodi-Server " + kodi_server_name + " in database")
try:
server = KodiServers.selectBy(id=kodi_server_id).getOne()
server.name = kodi_server_name
server.host = kodi_server_host
server.port = int(kodi_server_port)
server.username = kodi_server_username
server.password = kodi_server_password
server.mac = kodi_server_mac
server.starterport = kodi_server_starterport
return 1
except SQLObjectNotFound, e:
self.logger.error("Unable to update kodi-Server " + server.name + " in database")
return 0
@cherrypy.expose()
@require(member_of(htpc.role_admin))
def delserver(self, id):
""" Delete a server """
self.logger.debug("Deleting server " + str(id))
KodiServers.delete(id)
self.changeserver()
return
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def changeserver(self, id=0):
try:
self.current = KodiServers.selectBy(id=id).getOne()
htpc.settings.set('kodi_current_server', str(id))
self.logger.info("Selecting kodi server: %s", id)
return "success"
except SQLObjectNotFound:
try:
self.current = KodiServers.select(limit=1).getOne()
self.logger.error("Invalid server. Selecting first Available.")
return "success"
except SQLObjectNotFound:
self.current = None
self.logger.warning("No configured kodi-Servers.")
return "No valid servers"
@cherrypy.expose()
@require()
def GetThumb(self, thumb=None, h=None, w=None, o=100, mode=None):
""" Parse thumb to get the url and send to htpc.proxy.get_image """
url = self.url('/images/DefaultVideo.png')
if thumb:
url = self.url('/image/' + quote(thumb))
self.logger.debug("Trying to fetch image via %s" % url)
return get_image(url, h, w, o, mode, self.auth())
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetMovies(self, start=0, end=0, sortmethod='title', sortorder='ascending', hidewatched=0, filter=''):
""" Get a list of all movies """
self.logger.debug("Fetching Movies")
try:
kodi = Server(self.url('/jsonrpc', True))
sort = {'order': sortorder, 'method': sortmethod, 'ignorearticle': True}
properties = ['title', 'year', 'plot', 'thumbnail', 'file', 'fanart', 'studio', 'trailer',
'imdbnumber', 'genre', 'rating', 'playcount']
limits = {'start': int(start), 'end': int(end)}
filter = {'field': 'title', 'operator': 'contains', 'value': filter}
if hidewatched == "1":
filter = {"and": [filter, {'field': 'playcount', 'operator': 'is', 'value': '0'}]}
return kodi.VideoLibrary.GetMovies(sort=sort, properties=properties, limits=limits, filter=filter)
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to fetch movies!")
return
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetShows(self, start=0, end=0, sortmethod='title', sortorder='ascending', hidewatched=0, filter=''):
""" Get a list of all the TV Shows """
self.logger.debug("Fetching TV Shows")
try:
kodi = Server(self.url('/jsonrpc', True))
sort = {'order': sortorder, 'method': sortmethod, 'ignorearticle': True}
properties = ['title', 'year', 'plot', 'thumbnail', 'playcount']
limits = {'start': int(start), 'end': int(end)}
filter = {'field': 'title', 'operator': 'contains', 'value': filter}
if hidewatched == "1":
filter = {"and": [filter, {'field': 'playcount', 'operator': 'is', 'value': '0'}]}
shows = kodi.VideoLibrary.GetTVShows(sort=sort, properties=properties, limits=limits, filter=filter)
return shows
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to fetch TV Shows")
return
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetEpisodes(self, start=0, end=0, sortmethod='episode', sortorder='ascending', tvshowid=None, hidewatched=False, filter=''):
""" Get information about a single TV Show """
self.logger.debug("Loading information for TVID %s", str(tvshowid))
try:
kodi = Server(self.url('/jsonrpc', True))
sort = {'order': sortorder, 'method': sortmethod, 'ignorearticle': True}
properties = ['episode', 'season', 'thumbnail', 'plot', 'file', 'playcount']
limits = {'start': int(start), 'end': int(end)}
filter = {'field': 'title', 'operator': 'contains', 'value': filter}
if hidewatched == "1":
filter = {"and": [filter, {'field': 'playcount', 'operator': 'is', 'value': '0'}]}
episodes = kodi.VideoLibrary.GetEpisodes(sort=sort, tvshowid=int(tvshowid), properties=properties, limits=limits, filter=filter)
return episodes
except:
return
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetArtists(self, start=0, end=0, sortmethod='artist', sortorder='ascending', filter=''):
""" Get a list of all artists """
self.logger.debug("Fetching all artists in the music database")
try:
kodi = Server(self.url('/jsonrpc', True))
sort = {'order': sortorder, 'method': sortmethod, 'ignorearticle': True}
properties = ['thumbnail', 'fanart']
limits = {'start': int(start), 'end': int(end)}
filter = {'field': 'artist', 'operator': 'contains', 'value': filter}
return kodi.AudioLibrary.GetArtists(properties=properties, limits=limits, sort=sort, filter=filter)
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to fetch artists!")
return
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetAlbums(self, start=0, end=0, sortmethod='label', sortorder='ascending', artistid=None, filter=''):
""" Get a list of all albums for artist """
self.logger.debug("Loading all albums for ARTISTID %s", str(artistid))
try:
kodi = Server(self.url('/jsonrpc', True))
sort = {'order': sortorder, 'method': sortmethod, 'ignorearticle': True}
properties = ['title', 'artist', 'year', 'thumbnail']
limits = {'start': int(start), 'end': int(end)}
if artistid:
filter = {'artistid': int(artistid)}
else:
filter = {'or': [{'field': 'album', 'operator': 'contains', 'value': filter},
{'field': 'artist', 'operator': 'contains', 'value': filter}]}
return kodi.AudioLibrary.GetAlbums(properties=properties, limits=limits, sort=sort, filter=filter)
except Exception, e:
self.logger.debug("Exception: %s", str(e))
self.logger.error("Unable to fetch albums!")
return
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetSongs(self, start=0, end=0, sortmethod='title', sortorder='ascending', albumid=None, artistid=None, filter='', *args, **kwargs):
""" Get a list of all songs """
self.logger.debug("Fetching all artists in the music database")
try:
kodi = Server(self.url('/jsonrpc', True))
sort = {'order': sortorder, 'method': sortmethod, 'ignorearticle': True}
properties = ['artist', 'artistid', 'album', 'albumid', 'duration', 'year', 'thumbnail']
limits = {'start': int(start), 'end': int(end)}
if albumid and filter == '':
filter = {'albumid': int(albumid)}
elif artistid and filter == '':
filter = {'artistid': int(artistid)}
else:
filter = {'or': [{'field': 'album', 'operator': 'contains', 'value': filter},
{'field': 'artist', 'operator': 'contains', 'value': filter},
{'field': 'title', 'operator': 'contains', 'value': filter}]}
return kodi.AudioLibrary.GetSongs(properties=properties, limits=limits, sort=sort, filter=filter)
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to fetch artists!")
return
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetChannelGroups(self, type='tv'):
""" Get PVR channel list from kodi """
self.logger.debug("Loading kodi PVC channel list.")
try:
kodi = Server(self.url('/jsonrpc', True))
return kodi.PVR.GetChannelGroups(channeltype=type)
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to fetch channelgroups!")
return
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetChannels(self, type='tv', group=2):
""" Get PVR channel list from kodi """
self.logger.debug("Loading kodi PVC channel list.")
try:
kodi = Server(self.url('/jsonrpc', True))
return kodi.PVR.GetChannels(channelgroupid=int(group), properties=['thumbnail'])
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to fetch channels!")
return
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def ExecuteAddon(self, addon, cmd0='', cmd1=''):
if cmd0 == 'undefined':
cmd0 = ''
# cmd0 is a parameter to the addon, usual a input of some kind
if cmd1 == 'undefined':
cmd1 = ''
""" Execute an kodi addon """
self.logger.debug('Execute %s with commands cmd0 %s and cmd1 %s' % (addon, cmd0, cmd1))
kodi = Server(self.url('/jsonrpc', True))
if addon == 'script.artwork.downloader':
return kodi.Addons.ExecuteAddon(addonid=addon, params=['tvshow', 'movie', 'musicvideos'])
elif addon == 'script.cinema.experience':
cmd = 'movieid=' + int(cmd0)
return kodi.Addons.ExecuteAddon(addon, cmd)
elif addon == 'plugin.video.youtube':
cmd = 'action=play_video&videoid=' + cmd0
return kodi.Addons.ExecuteAddon(addon, cmd)
elif addon == 'script.cdartmanager':
return kodi.Addons.ExecuteAddon('addonid=' + addon, cmd0)
elif addon == 'plugin.video.twitch':
if cmd0: # If search
return kodi.Addons.ExecuteAddon(addon, '/searchresults/'+ cmd0 + '/0' )
else: # Open plugin
return kodi.Addons.ExecuteAddon(addon, '/')
elif addon == 'plugin.video.nrk':
if cmd0:
# Does not work in kodi or via this one, think its a addon problem
cmd = '/search/%s/1' % cmd0
return kodi.Addons.ExecuteAddon(addon, cmd)
else:
return kodi.Addons.ExeceuteAddon(addonid=addon)
elif addon == 'script.globalsearch':
kodi.Addons.ExecuteAddon(addon, '/searchstring/'+ cmd0)
return kodi.Input.SendText(text=cmd0)
else:
return kodi.Addons.ExecuteAddon(addonid=addon)
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def Enable_DisableAddon(self, addonid=None, enabled=None):
kodi = Server(self.url('/jsonrpc', True))
return kodi.Addons.SetAddonEnabled(addonid=addonid, enabled=bool(int(enabled)))
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetAddons(self):
kodi = Server(self.url('/jsonrpc', True))
prop = ['name', 'thumbnail', 'description', 'author', 'version', 'enabled', 'rating', 'summary']
addons = kodi.Addons.GetAddons(content='unknown', enabled='all', properties=prop)['addons']
return addons
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def PlayItem(self, item=None, type=None):
""" Play a file in kodi """
self.logger.debug("Playing '%s' of the type %s", item, type)
kodi = Server(self.url('/jsonrpc', True))
if type == 'movie':
return kodi.Player.Open(item={'movieid': int(item)}, options={'resume': True})
elif type == 'episode':
return kodi.Player.Open(item={'episodeid': int(item)}, options={'resume': True})
elif type == 'channel':
return kodi.Player.Open(item={'channelid': int(item)})
elif type == 'artist':
return kodi.Player.Open(item={'artistid': int(item)})
elif type == 'album':
return kodi.Player.Open(item={'albumid': int(item)})
elif type == 'song':
return kodi.Player.Open(item={'songid': int(item)})
else:
return kodi.Player.Open(item={'file': item})
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def QueueItem(self, item, type):
""" Queue a file in kodi """
self.logger.debug("Enqueueing '%s' of the type %s", item, type)
kodi = Server(self.url('/jsonrpc', True))
if type == 'movie':
return kodi.Playlist.Add(playlistid=1, item={'movieid': int(item)})
elif type == 'episode':
return kodi.Playlist.Add(playlistid=1, item={'episodeid': int(item)})
elif type == 'channel':
return kodi.Playlist.Add(playlistid=1, item={'channelid': int(item)})
elif type == 'artist':
return kodi.Playlist.Add(playlistid=0, item={'artistid': int(item)})
elif type == 'album':
return kodi.Playlist.Add(playlistid=0, item={'albumid': int(item)})
elif type == 'song':
return kodi.Playlist.Add(playlistid=0, item={'songid': int(item)})
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def RemoveItem(self, item, playlistid=0):
""" Remove a file from the playlist """
self.logger.debug("Removing '%s' from the playlist", item)
kodi = Server(self.url('/jsonrpc', True))
return kodi.Playlist.Remove(playlistid=playlistid, position=int(item))
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def PlaylistMove(self, position1, position2, playlistid=0):
""" Swap files in playlist """
playlistid = int(playlistid)
position1 = int(position1)
position2 = int(position2)
i = 1 if position1 < position2 else -1
kodi = Server(self.url('/jsonrpc', True))
while(position1 != position2):
kodi.Playlist.Swap(playlistid=playlistid, position1=position1, position2=position1 + i)
position1 += i
return "Moved from " + str(position1) + " to " + str(position2)
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def Playlist(self, type='audio'):
""" Get a playlist from kodi """
self.logger.debug("Loading Playlist of type %s", type)
kodi = Server(self.url('/jsonrpc', True))
if type == 'video':
return kodi.Playlist.GetItems(playlistid=1, properties=['year', 'showtitle', 'season', 'episode', 'runtime'])
return kodi.Playlist.GetItems(playlistid=0, properties=['artist', 'title', 'album', 'duration'])
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def NowPlaying(self):
""" Get information about current playing item """
self.logger.debug("Fetching currently playing information")
try:
kodi = Server(self.url('/jsonrpc', True))
player = kodi.Player.GetActivePlayers()[0]
playerid = player['playerid']
if player['type'] == 'video':
playerprop = ['speed', 'position', 'time', 'totaltime',
'percentage', 'subtitleenabled', 'currentsubtitle',
'subtitles', 'currentaudiostream', 'audiostreams']
itemprop = ['title', 'season', 'episode', 'duration', 'showtitle',
'fanart', 'tvshowid', 'plot', 'thumbnail', 'year']
elif player['type'] == 'audio':
playerprop = ['speed', 'position', 'time', 'totaltime', 'percentage']
itemprop = ['title', 'duration', 'fanart', 'artist', 'albumartist', 'album', 'track', 'artistid', 'albumid', 'thumbnail', 'year']
app = kodi.Application.GetProperties(properties=['muted', 'volume'])
player = kodi.Player.GetProperties(playerid=playerid, properties=playerprop)
item = kodi.Player.GetItem(playerid=playerid, properties=itemprop)
return {'playerInfo': player, 'itemInfo': item, 'app': app}
except IndexError:
return
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to fetch currently playing information!")
return
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def ControlPlayer(self, action, value=''):
""" Various commands to control kodi Player """
self.logger.debug("Sending control to kodi %s", action)
try:
kodi = Server(self.url('/jsonrpc', True))
if action == 'seek':
player = kodi.Player.GetActivePlayers()[0]
return kodi.Player.Seek(playerid=player[u'playerid'], value=float(value))
elif action == 'jump':
player = kodi.Player.GetActivePlayers()[0]
return kodi.Player.GoTo(playerid=player[u'playerid'], to=int(value))
elif action == 'party':
return kodi.Player.Open(item={'partymode': 'audio'})
elif action == 'getsub':
try:
# Frodo
return kodi.Addons.ExecuteAddon(addonid='script.kodi.subtitles')
except:
pass
try:
# Gotham
return kodi.GUI.ActivateWindow(window='subtitlesearch')
except:
pass
elif action == 'volume':
return kodi.Application.SetVolume(volume=int(value))
else:
return kodi.Input.ExecuteAction(action=action)
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to control kodi with action: %s", action)
return 'error'
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def SendText(self, text):
""" Send text to kodi """
self.logger.debug("Sending text to kodi: %s", text)
kodi = Server(self.url('/jsonrpc', True))
return kodi.Input.SendText(text=text)
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def Subtitles(self, subtitle='off'):
""" Change the subtitles """
self.logger.debug("Changing subtitles to %s", subtitle)
try:
kodi = Server(self.url('/jsonrpc', True))
playerid = kodi.Player.GetActivePlayers()[0][u'playerid']
try:
subtitle = int(subtitle)
kodi.Player.SetSubtitle(playerid=playerid, subtitle=subtitle, enable=True)
return "success"
except ValueError:
kodi.Player.SetSubtitle(playerid=playerid, subtitle='off')
return "Disabling subtitles."
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to set subtitle to specified value %s", subtitle)
return
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def Audio(self, audio):
""" Change the audio stream """
self.logger.debug("Chaning audio stream to %s", audio)
try:
kodi = Server(self.url('/jsonrpc', True))
playerid = kodi.Player.GetActivePlayers()[0][u'playerid']
return kodi.Player.SetAudioStream(playerid=playerid, stream=int(audio))
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to change audio stream to specified value %s", audio)
return
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def System(self, action=''):
""" Various system commands """
kodi = Server(self.url('/jsonrpc', True))
if action == 'Quit':
self.logger.info("Exiting kodi")
kodi.Application.Quit()
return 'Exiting kodi.'
if action == 'Shutdown':
self.logger.info("Shutting down kodi")
kodi.System.Shutdown()
return 'Shutting down kodi.'
elif action == 'Suspend':
self.logger.info("Suspending kodi")
kodi.System.Suspend()
return 'Suspending kodi.'
elif action == 'Reboot':
self.logger.info("Rebooting kodi")
kodi.System.Reboot()
return 'Rebooting kodi.'
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def Wake(self):
""" Send WakeOnLan package """
self.logger.info("Waking up kodi-System")
try:
addr_byte = self.current.mac.split(':')
hw_addr = struct.pack('BBBBBB',
int(addr_byte[0], 16),
int(addr_byte[1], 16),
int(addr_byte[2], 16),
int(addr_byte[3], 16),
int(addr_byte[4], 16),
int(addr_byte[5], 16))
msg = '\xff' * 6 + hw_addr * 16
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.sendto(msg, ("255.255.255.255", 9))
self.logger.info("WOL package sent to %s", self.current.mac)
return "WOL package sent"
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to send WOL packet")
return "Unable to send WOL packet"
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def Run(self):
""" Send XBMC Starter packet """
self.logger.info("Sending XBMC Starter packet")
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.sendto("YatseStart-Xbmc", (self.current.host, self.current.starterport))
self.logger.info("XBMC Starter package sent to %s:%s", self.current.host, self.current.starterport)
return "XBMC Starter packet sent"
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to send XBMC Starter packet")
self.logger.debug('Have you installed http://yatse.leetzone.org/redmine/projects/androidwidget/wiki/XbmcStarter?')
return "Unable to send XBMC Starter packet"
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def Notify(self, text):
""" Create popup in kodi """
self.logger.debug("Sending notification to kodi: %s" % text)
kodi = Server(self.url('/jsonrpc', True))
image = '../interfaces/default/img/kodi-logo.png'
return kodi.GUI.ShowNotification(title='HTPC manager', message=text, image=image)
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetRecentMovies(self, limit=5):
""" Get a list of recently added movies """
self.logger.debug("Fetching recently added movies")
try:
kodi = Server(self.url('/jsonrpc', True))
properties = ['title', 'year', 'runtime', 'plot', 'thumbnail', 'file',
'fanart', 'trailer', 'imdbnumber', 'studio', 'genre', 'rating']
limits = {'start': 0, 'end': int(limit)}
return kodi.VideoLibrary.GetRecentlyAddedMovies(properties=properties, limits=limits)
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to fetch recently added movies!")
return
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetRecentShows(self, limit=5):
""" Get a list of recently added TV Shows """
self.logger.debug("Fetching recently added TV Shows")
try:
kodi = Server(self.url('/jsonrpc', True))
properties = ['showtitle', 'season', 'episode', 'title', 'runtime',
'thumbnail', 'plot', 'fanart', 'file']
limits = {'start': 0, 'end': int(limit)}
return kodi.VideoLibrary.GetRecentlyAddedEpisodes(properties=properties, limits=limits)
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to fetch recently added TV Shows")
return
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetRecentAlbums(self, limit=5):
""" Get a list of recently added music """
self.logger.debug("Fetching recently added Music")
try:
kodi = Server(self.url('/jsonrpc', True))
properties = ['artist', 'albumlabel', 'year', 'description', 'thumbnail']
limits = {'start': 0, 'end': int(limit)}
return kodi.AudioLibrary.GetRecentlyAddedAlbums(properties=properties, limits=limits)
except Exception, e:
self.logger.exception(e)
self.logger.error("Unable to fetch recently added Music!")
return
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def Library(self, do='scan', lib='video'):
kodi = Server(self.url('/jsonrpc', True))
if lib == 'video':
if do == 'clean':
return kodi.VideoLibrary.Clean()
else:
return kodi.VideoLibrary.Scan()
else:
if do == 'clean':
return kodi.AudioLibrary.Clean()
else:
return kodi.AudioLibrary.Scan()
def url(self, path='', auth=False):
""" Generate a URL for the RPC based on kodi settings """
url = self.current.host + ':' + str(self.current.port) + path
if auth and self.current.username and self.current.password:
url = self.current.username + ':' + self.current.password + '@' + url
self.logger.debug("URL: http://%s" % url)
return 'http://' + url
def auth(self):
""" Generate a base64 HTTP auth string based on settings """
if self.current.username and self.current.password:
return base64.encodestring('%s:%s' % (self.current.username, self.current.password)).strip('\n')
|
scith/htpc-manager_ynh
|
sources/modules/kodi.py
|
Python
|
gpl-3.0
| 37,969
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.