code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
"""
Copyright 2017-present Airbnb, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from streamalert.shared import rule_table
from streamalert.shared.logger import get_logger
from streamalert_cli.terraform.generate import terraform_generate_handler
from streamalert_cli.terraform.helpers import terraform_runner
from streamalert_cli.utils import (
add_default_lambda_args,
CLICommand,
MutuallyExclusiveStagingAction,
set_parser_epilog,
)
LOGGER = get_logger(__name__)
class DeployCommand(CLICommand):
description = 'Deploy the specified AWS Lambda function(s)'
@classmethod
def setup_subparser(cls, subparser):
"""Add the deploy subparser: manage.py deploy [options]"""
set_parser_epilog(
subparser,
epilog=(
'''\
Example:
manage.py deploy --function rule alert
'''
)
)
# Flag to manually bypass rule staging for new rules upon deploy
# This only has an effect if rule staging is enabled
subparser.add_argument(
'--skip-rule-staging',
action='store_true',
help='Skip staging of new rules so they go directly into production'
)
# flag to manually demote specific rules to staging during deploy
subparser.add_argument(
'--stage-rules',
action=MutuallyExclusiveStagingAction,
default=set(),
help='Stage the rules provided in a space-separated list',
nargs='+'
)
# flag to manually bypass rule staging for specific rules during deploy
subparser.add_argument(
'--unstage-rules',
action=MutuallyExclusiveStagingAction,
default=set(),
help='Unstage the rules provided in a space-separated list',
nargs='+'
)
add_default_lambda_args(subparser)
@classmethod
def handler(cls, options, config):
"""CLI handler for deploying new versions of Lambda functions
Args:
options (argparse.Namespace): Parsed argparse namespace from the CLI
config (CLIConfig): Loaded StreamAlert config
Returns:
bool: False if errors occurred, True otherwise
"""
# Make sure the Terraform code is up to date
if not terraform_generate_handler(config=config):
return False
if not deploy(config, options.functions, options.clusters):
return False
# Update the rule table now if the rules engine is being deployed
if 'rule' in set(options.functions):
_update_rule_table(options, config)
return True
def deploy(config, functions, clusters=None):
"""Deploy the functions
Args:
functions (set): Set of functions being deployed
config (CLIConfig): Loaded StreamAlert config
clusters (set=None): Optional clusters to target for this deploy
Returns:
bool: False if errors occurred, True otherwise
"""
LOGGER.info('Deploying: %s', ', '.join(sorted(functions)))
# Terraform apply only to the module which contains our lambda functions
clusters = clusters or config.clusters()
deploy_targets = _lambda_terraform_targets(config, functions, clusters)
LOGGER.debug('Applying terraform targets: %s', ', '.join(sorted(deploy_targets)))
# Terraform applies the new package and publishes a new version
return terraform_runner(config, targets=deploy_targets)
def _update_rule_table(options, config):
"""Update the rule table with any staging information
Args:
options (argparse.Namespace): Various options from the CLI needed for actions
config (CLIConfig): The loaded StreamAlert config
"""
# If rule staging is disabled, do not update the rules table
if not config['global']['infrastructure']['rule_staging'].get('enabled', False):
return
# Get the rule import paths to load
rule_import_paths = config['global']['general']['rule_locations']
table_name = '{}_streamalert_rules'.format(config['global']['account']['prefix'])
table = rule_table.RuleTable(table_name, *rule_import_paths)
table.update(options.skip_rule_staging)
if options.stage_rules or options.unstage_rules:
# Create a dictionary of rule_name: stage=True|False
rules = {rule_name: False for rule_name in options.unstage_rules}
rules.update({rule_name: True for rule_name in options.stage_rules})
for rule, stage in rules.items():
table.toggle_staged_state(rule, stage)
def _lambda_terraform_targets(config, functions, clusters):
"""Return any terraform targets for the function(s) being deployed
NOTE: This is very hacky and should go away. A complete refactor of how we peform
terraform generation would help with this, but this hack will do for now.
Args:
config (CLIConfig): The loaded StreamAlert config
functions (list): Functions to target during deploy
clusters (list): Clusters to target during deploy
Returns:
set: Terraform module paths to target during this deployment
"""
target_mapping = {
'alert': {
'targets': {
'module.alert_processor_iam',
'module.alert_processor_lambda',
},
'enabled': True # required function
},
'alert_merger': {
'targets': {
'module.alert_merger_iam',
'module.alert_merger_lambda',
},
'enabled': True # required function
},
'athena': {
'targets': {
'module.athena_partitioner_iam',
'module.athena_partitioner_lambda',
},
'enabled': True # required function
},
'rule': {
'targets': {
'module.rules_engine_iam',
'module.rules_engine_lambda',
},
'enabled': True # required function
},
'classifier': {
'targets': {
'module.classifier_{}_{}'.format(cluster, suffix)
for suffix in {'lambda', 'iam'}
for cluster in clusters
},
'enabled': bool(clusters) # one cluster at least is required
},
'apps': {
'targets': {
'module.app_{}_{}_{}'.format(app_info['app_name'], cluster, suffix)
for suffix in {'lambda', 'iam'}
for cluster in clusters
for app_info in config['clusters'][cluster]['modules'].get(
'streamalert_apps', {}
).values()
if 'app_name' in app_info
},
'enabled': any(
info['modules'].get('streamalert_apps')
for info in config['clusters'].values()
)
},
'rule_promo': {
'targets': {
'module.rule_promotion_iam',
'module.rule_promotion_lambda',
},
'enabled': config['lambda'].get('rule_promotion_config', {}).get('enabled', False)
},
'scheduled_queries': {
'targets': {
'module.scheduled_queries',
},
'enabled': config['scheduled_queries'].get('enabled', False),
},
'threat_intel_downloader': {
'targets': {
'module.threat_intel_downloader',
'module.threat_intel_downloader_iam',
},
'enabled': config['lambda'].get('threat_intel_downloader_config', False),
}
}
targets = set()
for function in functions:
if not target_mapping[function]['enabled']:
LOGGER.warning('Function is not enabled and will be ignored: %s', function)
continue
targets.update(target_mapping[function]['targets'])
return targets
|
airbnb/streamalert
|
streamalert_cli/manage_lambda/deploy.py
|
Python
|
apache-2.0
| 8,527
|
from setuptools import setup, find_packages # Always prefer setuptools over distutils
from codecs import open # To use a consistent encoding
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='''ckanext-pdfview''',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# http://packaging.python.org/en/latest/tutorial.html#version
version='0.0.4',
description='''View plugin for rendering PDFs on the browser''',
long_description=long_description,
# The project's main homepage.
url='https://github.com/ckan/ckanext-pdfview',
# Author details
author='''Open Knowledge''',
author_email='''info@ckan.org''',
# Choose your license
license='AGPL',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
# What does your project relate to?
keywords='''CKAN PDF''',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
# List run-time dependencies here. These will be installed by pip when your
# project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/technical.html#install-requires-vs-requirements-files
install_requires=[],
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
include_package_data=True,
package_data={
},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages.
# see http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
data_files=[],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points='''
[ckan.plugins]
pdf_view=ckanext.pdfview.plugin:PdfView
''',
)
|
ESRC-CDRC/ckan-ckanext-pdfview
|
setup.py
|
Python
|
agpl-3.0
| 3,132
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2018 Vimig Socrates <vimig.socrates@gmail.com>
# Copyright (C) 2016 Loreto Parisi <loretoparisi@gmail.com>
# Copyright (C) 2016 Silvio Olivastri <silvio.olivastri@gmail.com>
# Copyright (C) 2016 Radim Rehurek <radim@rare-technologies.com>
"""This script allows converting word-vectors from word2vec format into Tensorflow 2D tensor and metadata format.
This script used for for word-vector visualization on `Embedding Visualization <http://projector.tensorflow.org/>`_.
How to use
----------
#. Convert your word-vector with this script (for example, we'll use model from
`gensim-data <https://rare-technologies.com/new-download-api-for-pretrained-nlp-models-and-datasets-in-gensim/>`_) ::
python -m gensim.downloader -d glove-wiki-gigaword-50 # download model in word2vec format
python -m gensim.scripts.word2vec2tensor -i ~/gensim-data/glove-wiki-gigaword-50/glove-wiki-gigaword-50.gz \
-o /tmp/my_model_prefix
#. Open http://projector.tensorflow.org/
#. Click "Load Data" button from the left menu.
#. Select "Choose file" in "Load a TSV file of vectors." and choose "/tmp/my_model_prefix_tensor.tsv" file.
#. Select "Choose file" in "Load a TSV file of metadata." and choose "/tmp/my_model_prefix_metadata.tsv" file.
#. ???
#. PROFIT!
For more information about TensorBoard TSV format please visit:
https://www.tensorflow.org/versions/master/how_tos/embedding_viz/
Command line arguments
----------------------
.. program-output:: python -m gensim.scripts.word2vec2tensor --help
:ellipsis: 0, -7
"""
import os
import sys
import logging
import argparse
import gensim
from gensim import utils
logger = logging.getLogger(__name__)
def word2vec2tensor(word2vec_model_path, tensor_filename, binary=False):
"""Convert file in Word2Vec format and writes two files 2D tensor TSV file.
File "tensor_filename"_tensor.tsv contains word-vectors, "tensor_filename"_metadata.tsv contains words.
Parameters
----------
word2vec_model_path : str
Path to file in Word2Vec format.
tensor_filename : str
Prefix for output files.
binary : bool, optional
True if input file in binary format.
"""
model = gensim.models.KeyedVectors.load_word2vec_format(word2vec_model_path, binary=binary)
outfiletsv = tensor_filename + '_tensor.tsv'
outfiletsvmeta = tensor_filename + '_metadata.tsv'
with utils.open(outfiletsv, 'wb') as file_vector, utils.open(outfiletsvmeta, 'wb') as file_metadata:
for word in model.index2word:
file_metadata.write(gensim.utils.to_utf8(word) + gensim.utils.to_utf8('\n'))
vector_row = '\t'.join(str(x) for x in model[word])
file_vector.write(gensim.utils.to_utf8(vector_row) + gensim.utils.to_utf8('\n'))
logger.info("2D tensor file saved to %s", outfiletsv)
logger.info("Tensor metadata file saved to %s", outfiletsvmeta)
if __name__ == "__main__":
logging.basicConfig(format='%(asctime)s - %(module)s - %(levelname)s - %(message)s', level=logging.INFO)
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description=__doc__[:-138])
parser.add_argument("-i", "--input", required=True, help="Path to input file in word2vec format")
parser.add_argument("-o", "--output", required=True, help="Prefix path for output files")
parser.add_argument(
"-b", "--binary", action='store_const', const=True, default=False,
help="Set this flag if word2vec model in binary format (default: %(default)s)"
)
args = parser.parse_args()
logger.info("running %s", ' '.join(sys.argv))
word2vec2tensor(args.input, args.output, args.binary)
logger.info("finished running %s", os.path.basename(sys.argv[0]))
|
napsternxg/gensim
|
gensim/scripts/word2vec2tensor.py
|
Python
|
gpl-3.0
| 3,850
|
"""
Support for Unifi WAP controllers.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.unifi/
"""
import logging
import urllib
from homeassistant.components.device_tracker import DOMAIN
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
from homeassistant.helpers import validate_config
# Unifi package doesn't list urllib3 as a requirement
REQUIREMENTS = ['urllib3', 'unifi==1.2.5']
_LOGGER = logging.getLogger(__name__)
CONF_PORT = 'port'
def get_scanner(hass, config):
"""Setup Unifi device_tracker."""
from unifi.controller import Controller
if not validate_config(config, {DOMAIN: [CONF_USERNAME,
CONF_PASSWORD]},
_LOGGER):
_LOGGER.error('Invalid configuration')
return False
this_config = config[DOMAIN]
host = this_config.get(CONF_HOST, 'localhost')
username = this_config.get(CONF_USERNAME)
password = this_config.get(CONF_PASSWORD)
try:
port = int(this_config.get(CONF_PORT, 8443))
except ValueError:
_LOGGER.error('Invalid port (must be numeric like 8443)')
return False
try:
ctrl = Controller(host, username, password, port, 'v4')
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to connect to unifi: %s', ex)
return False
return UnifiScanner(ctrl)
class UnifiScanner(object):
"""Provide device_tracker support from Unifi WAP client data."""
def __init__(self, controller):
"""Initialize the scanner."""
self._controller = controller
self._update()
def _update(self):
"""Get the clients from the device."""
try:
clients = self._controller.get_clients()
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to scan clients: %s', ex)
clients = []
self._clients = {client['mac']: client for client in clients}
def scan_devices(self):
"""Scan for devices."""
self._update()
return self._clients.keys()
def get_device_name(self, mac):
"""Return the name (if known) of the device.
If a name has been set in Unifi, then return that, else
return the hostname if it has been detected.
"""
client = self._clients.get(mac, {})
name = client.get('name') or client.get('hostname')
_LOGGER.debug('Device %s name %s', mac, name)
return name
|
mikaelboman/home-assistant
|
homeassistant/components/device_tracker/unifi.py
|
Python
|
mit
| 2,556
|
#
# File Header
#
# Define vowels
vowels = "aeiouAEIOU" #I solved the case sensitivity by adding the vowels in both lowercase and uppercase.
# Ask for word
word = input("Please enter a word: ")
if (originalword[0] in vowels):
print((originalword + endofvowel).capitalize())
else:
print((originalword[1:] + originalword[0] + endofconsonant).capitalize())
# Loop through word, one letter at a time
for letter in word:
if letter in vowels:
pig = word + "yay"
else:
# False? Consonant
pig = word[1:] + word[0] + "ay"
print(pig)
|
josiahhardacre/Week-Four-Assignment
|
pigify.py
|
Python
|
mit
| 562
|
from PyQt5.QtWidgets import QWidget, QDialogButtonBox
from PyQt5.QtCore import QEvent, Qt
from .password_input_uic import Ui_PasswordInputWidget
class PasswordInputView(QWidget, Ui_PasswordInputWidget):
"""
The model of Navigation component
"""
def __init__(self, parent):
# construct from qtDesigner
super().__init__(parent)
self.setupUi(self)
self.button_box = QDialogButtonBox(self)
self.button_box.setOrientation(Qt.Horizontal)
self.button_box.setStandardButtons(QDialogButtonBox.Cancel | QDialogButtonBox.Ok)
self.button_box.button(QDialogButtonBox.Ok).setEnabled(False)
self.layout().addWidget(self.button_box)
self.button_box.hide()
def error(self, text):
self.label_info.setText(text)
self.button_box.button(QDialogButtonBox.Ok).setEnabled(False)
def clear(self):
self.edit_password.clear()
self.edit_secret_key.clear()
def valid(self):
self.label_info.setText(self.tr("Password is valid"))
self.button_box.button(QDialogButtonBox.Ok).setEnabled(True)
def changeEvent(self, event):
"""
Intercepte LanguageChange event to translate UI
:param QEvent QEvent: Event
:return:
"""
if event.type() == QEvent.LanguageChange:
self.retranslateUi(self)
return super(PasswordInputView, self).changeEvent(event)
|
ucoin-io/cutecoin
|
src/sakia/gui/sub/password_input/view.py
|
Python
|
mit
| 1,433
|
# -*- coding: utf-8 -*-
import re
from module.plugins.internal.XFSHoster import XFSHoster, create_getInfo
class NosuploadCom(XFSHoster):
__name__ = "NosuploadCom"
__type__ = "hoster"
__version__ = "0.31"
__pattern__ = r'http://(?:www\.)?nosupload\.com/\?d=\w{12}'
__description__ = """Nosupload.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("igel", "igelkun@myopera.com")]
SIZE_PATTERN = r'<p><strong>Size:</strong> (?P<S>[\d.,]+) (?P<U>[\w^_]+)</p>'
LINK_PATTERN = r'<a class="select" href="(http://.+?)">Download</a>'
WAIT_PATTERN = r'Please wait.*?>(\d+)</span>'
def getDownloadLink(self):
# stage1: press the "Free Download" button
data = self.getPostParameters()
self.html = self.load(self.pyfile.url, post=data, decode=True)
# stage2: wait some time and press the "Download File" button
data = self.getPostParameters()
wait_time = re.search(self.WAIT_PATTERN, self.html, re.M | re.S).group(1)
self.logDebug("Hoster told us to wait %s seconds" % wait_time)
self.wait(wait_time)
self.html = self.load(self.pyfile.url, post=data, decode=True)
# stage3: get the download link
return re.search(self.LINK_PATTERN, self.html, re.S).group(1)
getInfo = create_getInfo(NosuploadCom)
|
Zerknechterer/pyload
|
module/plugins/hoster/NosuploadCom.py
|
Python
|
gpl-3.0
| 1,353
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-09-30 06:19
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('blog', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('email', models.EmailField(max_length=255)),
('url', models.URLField(blank=True)),
('text', models.TextField()),
('created_time', models.DateTimeField(auto_now_add=True)),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.Post')),
],
),
]
|
RebelBIrd/Blog
|
comments/migrations/0001_initial.py
|
Python
|
mit
| 962
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Neural Clustering."""
from neural_clustering import data_generators
from neural_clustering import models
from neural_clustering import utils
__all__ = ["data_generators", "models", "utils"]
|
tensorflow/neural-structured-learning
|
research/neural_clustering/__init__.py
|
Python
|
apache-2.0
| 770
|
import PyStatGen as pst
import yaml
import argparse
import time
from watcher import Watcher
def main():
prs = argparse.ArgumentParser(description='Generate the website anew.')
prs.add_argument('-c', '--conf', type=str,
default="example_configuration.yml", dest='conf_path',
help='Path to the configuration file.')
args = prs.parse_args()
conf = yaml.load(open(args.conf_path, 'r').read())
task_manager = pst.Tasker(conf)
watcher = Watcher(task_manager)
watcher.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
watcher.stop()
watcher.join()
if __name__ == "__main__":
main()
|
olivierdeserres/dropbox-generator
|
__watch.py
|
Python
|
mit
| 708
|
"""Creates beautiful visualizations of the publication database."""
import datetime
import sqlite3 as sql
import numpy as np
from astropy import log
from matplotlib import pyplot as plt
import matplotlib.patheffects as path_effects
import matplotlib as mpl
from matplotlib import style
import seaborn as sns
from kpub import PublicationDB
MISSIONS = ['k2']
SCIENCES = ['exoplanets', 'astrophysics']
output_fn = 'publications-per-year-k2.png'
db = PublicationDB()
first_year = 2014
barwidth = 0.75
extrapolate = True
current_year = datetime.datetime.now().year
palette = sns.color_palette(['#f1c40f', '#2980b9'])
style.use('../styles/black.mplstyle')
plt.rc('xtick.major', size=0)
plt.rc('ytick.major', size=0)
# Initialize a dictionary to contain the data to plot
counts = {}
for mission in MISSIONS:
counts[mission] = {}
for year in range(first_year, current_year + 1):
counts[mission][year] = 0
cur = db.con.execute("SELECT year, COUNT(*) FROM pubs "
"WHERE mission = ? "
"AND year >= '2014' "
"GROUP BY year;",
[mission])
rows = list(cur.fetchall())
for row in rows:
counts[mission][int(row[0])] = row[1]
# Now make the actual plot
fig = plt.figure(figsize=(8, 4.5))
ax = fig.add_subplot(111)
plt.bar(np.array(list(counts['k2'].keys())) - 0.5*barwidth,
counts['k2'].values(),
label='K2',
facecolor=palette[0],
edgecolor='black',
width=barwidth)
# Also plot the extrapolated precition for the current year
if extrapolate:
now = datetime.datetime.now()
fraction_of_year_passed = float(now.strftime("%-j")) / 365.2425
current_total = (counts['k2'][current_year])
expected = (1/fraction_of_year_passed - 1) * current_total
plt.bar(current_year - 0.5*barwidth,
expected,
bottom=current_total,
label='Extrapolation',
facecolor='#34495e',
edgecolor='black',
width=barwidth)
# Aesthetics
plt.ylabel("Publications per year", fontsize=18)
ax.get_xaxis().get_major_formatter().set_useOffset(False)
plt.xticks(range(first_year - 1, current_year + 1), fontsize=18)
plt.yticks(range(0, 151, 50), fontsize=18)
plt.xlim([first_year - 0.75*barwidth, current_year + 0.75*barwidth])
"""
plt.legend(bbox_to_anchor=(0.1, 1),
loc='upper left',
ncol=3,
borderaxespad=0.,
handlelength=0.8,
frameon=False,
fontsize=18)
"""
# Disable spines
ax.spines["left"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["top"].set_visible(False)
ax.spines["bottom"].set_visible(False)
# Only show bottom and left ticks
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
# Only show horizontal grid lines
ax.grid(axis='y')
n_pub = sum(counts['k2'].values())
plt.suptitle("K2 Contributed to "
"{} Publications So Far".format(n_pub),
fontsize=22)
plt.tight_layout(rect=(0, 0, 1, 0.92), h_pad=1.5)
log.info("Writing {}".format(output_fn))
plt.savefig(output_fn)
plt.close()
|
barentsen/exoplanet-charts
|
publication-stats/publications-per-year-k2.py
|
Python
|
mit
| 3,121
|
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Routines for configuring Murano-Agent
"""
from oslo_config import cfg
from oslo_log import log as logging
from muranoagent import version
CONF = cfg.CONF
opts = [
cfg.StrOpt('storage',
default='/var/murano/plans',
help='Directory to store execution plans'),
cfg.StrOpt('engine_key',
help='Public key of murano-engine')
]
message_routing_opt = cfg.BoolOpt(
'enable_dynamic_result_queue',
help='Enable taking dynamic result queue from task field reply_to',
default=False)
rabbit_opts = [
cfg.HostAddressOpt('host',
help='The RabbitMQ broker address which used for '
'communication with Murano guest agents.',
default='localhost'),
cfg.IntOpt('port', help='The RabbitMQ broker port.', default=5672),
cfg.StrOpt('login',
help='The RabbitMQ login.',
default='guest'),
cfg.StrOpt('password',
help='The RabbitMQ password.',
secret=True,
default='guest'),
cfg.StrOpt('virtual_host',
help='The RabbitMQ virtual host.',
default='/'),
cfg.BoolOpt('ssl',
help='Boolean flag to enable SSL communication through the '
'RabbitMQ broker between murano-engine and guest agents.',
default=False),
cfg.StrOpt('ssl_version',
default='',
help='SSL version to use (valid only if SSL enabled). '
'Valid values are TLSv1 and SSLv23. SSLv2, SSLv3, '
'TLSv1_1, and TLSv1_2 may be available on some '
'distributions.'),
cfg.StrOpt('ca_certs',
help='SSL cert file (valid only if SSL enabled).',
default=''),
cfg.BoolOpt('insecure', default=False,
help='This option explicitly allows Murano to perform '
'"insecure" SSL connections to RabbitMQ'),
cfg.StrOpt('result_routing_key',
help='This value should be obtained from API'),
cfg.StrOpt('result_exchange',
help='This value must be obtained from API',
default=''),
cfg.StrOpt('input_queue',
help='This value must be obtained from API',
default='')
]
CONF.register_opts(opts)
CONF.register_cli_opt(message_routing_opt)
CONF.register_opts(rabbit_opts, group='rabbitmq')
logging.register_options(CONF)
def parse_args(args=None, usage=None, default_config_files=None):
version_string = version.version_info.version_string()
CONF(args=args,
project='muranoagent',
version=version_string,
usage=usage,
default_config_files=default_config_files)
|
openstack/murano-agent
|
muranoagent/common/config.py
|
Python
|
apache-2.0
| 3,423
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsImageCache.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '(C) 2018 by Nyall Dawson'
__date__ = '02/10/2018'
__copyright__ = 'Copyright 2018, The QGIS Project'
import qgis # NOQA
import os
import socketserver
import threading
import http.server
import time
from qgis.PyQt.QtCore import QDir, QCoreApplication, QSize
from qgis.PyQt.QtGui import QColor, QImage, QPainter
from qgis.core import (QgsImageCache, QgsRenderChecker, QgsApplication, QgsMultiRenderChecker)
from qgis.testing import start_app, unittest
from utilities import unitTestDataPath
start_app()
TEST_DATA_DIR = unitTestDataPath()
class SlowHTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
def do_GET(self):
time.sleep(1)
return http.server.SimpleHTTPRequestHandler.do_GET(self)
class TestQgsImageCache(unittest.TestCase):
@classmethod
def setUpClass(cls):
# Bring up a simple HTTP server, for remote SVG tests
os.chdir(unitTestDataPath() + '')
handler = SlowHTTPRequestHandler
cls.httpd = socketserver.TCPServer(('localhost', 0), handler)
cls.port = cls.httpd.server_address[1]
cls.httpd_thread = threading.Thread(target=cls.httpd.serve_forever)
cls.httpd_thread.setDaemon(True)
cls.httpd_thread.start()
def setUp(self):
self.report = "<h1>Python QgsImageCache Tests</h1>\n"
self.fetched = False
QgsApplication.imageCache().remoteImageFetched.connect(self.imageFetched)
def tearDown(self):
report_file_path = "%s/qgistest.html" % QDir.tempPath()
with open(report_file_path, 'a') as report_file:
report_file.write(self.report)
def imageFetched(self):
self.fetched = True
def waitForFetch(self):
self.fetched = False
while not self.fetched:
QCoreApplication.processEvents()
def testRemoteImage(self):
"""Test fetching remote image."""
url = 'http://localhost:{}/qgis_local_server/sample_image.png'.format(str(TestQgsImageCache.port))
image, in_cache = QgsApplication.imageCache().pathAsImage(url, QSize(100, 100), True, 1.0)
# first should be waiting image
self.assertTrue(self.imageCheck('Remote Image', 'waiting_image', image))
self.assertFalse(QgsApplication.imageCache().originalSize(url).isValid())
self.waitForFetch()
# second should be correct image
image, in_cache = QgsApplication.imageCache().pathAsImage(url, QSize(100, 100), True, 1.0)
self.assertTrue(self.imageCheck('Remote Image', 'remote_image', image))
self.assertEqual(QgsApplication.imageCache().originalSize(url), QSize(511, 800), 1.0)
def testRemoteImageMissing(self):
"""Test fetching remote image with bad url"""
url = 'http://localhost:{}/qgis_local_server/xxx.png'.format(str(TestQgsImageCache.port)) # oooo naughty
image, in_cache = QgsApplication.imageCache().pathAsImage(url, QSize(100, 100), 1.0, True)
self.assertTrue(self.imageCheck('Remote image missing', 'waiting_image', image))
def testRemoteImageBlocking(self):
"""Test fetching remote image."""
# remote not yet requested so not in cache
url = 'http://localhost:{}/qgis_local_server/logo_2017.png'.format(str(TestQgsImageCache.port))
image, in_cache = QgsApplication.imageCache().pathAsImage(url, QSize(100, 100), True, 1.0, blocking=1)
# first should be correct image
self.assertTrue(self.imageCheck('Remote image sync', 'remote_image_blocking', image))
# remote probably in cache
url = 'http://localhost:{}/qgis_local_server/sample_image.png'.format(str(TestQgsImageCache.port))
image, in_cache = QgsApplication.imageCache().pathAsImage(url, QSize(100, 100), True, 1.0, blocking=1)
self.assertTrue(self.imageCheck('Remote Image', 'remote_image', image))
# remote probably in cache
url = 'http://localhost:{}/qgis_local_server/xxx.png'.format(str(TestQgsImageCache.port)) # oooo naughty
image, in_cache = QgsApplication.imageCache().pathAsImage(url, QSize(100, 100), True, 1.0, blocking=1)
self.assertTrue(self.imageCheck('Remote image missing', 'waiting_image', image))
def imageCheck(self, name, reference_image, image):
self.report += "<h2>Render {}</h2>\n".format(name)
temp_dir = QDir.tempPath() + '/'
file_name = temp_dir + 'image_' + name + ".png"
output_image = QImage(image.size(), QImage.Format_RGB32)
QgsMultiRenderChecker.drawBackground(output_image)
painter = QPainter(output_image)
painter.drawImage(0, 0, image)
painter.end()
output_image.save(file_name, "PNG")
checker = QgsRenderChecker()
checker.setControlPathPrefix("image_cache")
checker.setControlName("expected_" + reference_image)
checker.setRenderedImage(file_name)
checker.setColorTolerance(2)
result = checker.compareImages(name, 20)
self.report += checker.report()
print((self.report))
return result
if __name__ == '__main__':
unittest.main()
|
tomtor/QGIS
|
tests/src/python/test_qgsimagecache.py
|
Python
|
gpl-2.0
| 5,431
|
# $Id$
class main(wt.TemplateCode):
class request(wt.TemplateCode):
def main(self, template):
for (self.key, self.val) in self.req.params.items():
self.process(template)
class cookies(wt.TemplateCode):
def main(self, template):
for (self.key, self.val) in self.req.cookies.items():
self.process(template)
class environ(wt.TemplateCode):
def main(self, template):
for (self.key, self.val) in self.req.environ.items():
self.process(template)
|
jribbens/jonpy
|
example/wt/printenv.html.py
|
Python
|
mit
| 504
|
""" Strategies to Traverse a Tree """
from __future__ import print_function, division
from .util import basic_fns
from sympy.strategies.core import chain, do_one
def top_down(rule, fns=basic_fns):
""" Apply a rule down a tree running it on the top nodes first """
return chain(rule, lambda expr: sall(top_down(rule, fns), fns)(expr))
def bottom_up(rule, fns=basic_fns):
""" Apply a rule down a tree running it on the bottom nodes first """
return chain(lambda expr: sall(bottom_up(rule, fns), fns)(expr), rule)
def top_down_once(rule, fns=basic_fns):
""" Apply a rule down a tree - stop on success """
return do_one(rule, lambda expr: sall(top_down(rule, fns), fns)(expr))
def bottom_up_once(rule, fns=basic_fns):
""" Apply a rule up a tree - stop on success """
return do_one(lambda expr: sall(bottom_up(rule, fns), fns)(expr), rule)
def sall(rule, fns=basic_fns):
""" Strategic all - apply rule to args """
op, new, children, leaf = map(fns.get, ('op', 'new', 'children', 'leaf'))
def all_rl(expr):
if leaf(expr):
return expr
else:
args = map(rule, children(expr))
return new(op(expr), *args)
return all_rl
|
drufat/sympy
|
sympy/strategies/traverse.py
|
Python
|
bsd-3-clause
| 1,215
|
"""
*******************************************************
*
* plot_marginals - PLOT MARGINALS
*
* License: Apache 2.0
* Written by: Michael Slugocki
* Created on: September 10, 2018
* Last updated: September 18, 2018
*
*******************************************************
"""
#################################################################
# IMPORT MODULES
#################################################################
import numpy as np
import matplotlib.pyplot as plt
#################################################################
# PLOT MARGINAL DISTRIBUTION
#################################################################
def plot_marginals(metrics):
"""Plots marginal distributions for each parameter of
the fitted model.
Keyword arguments:
metrics -- contain important metrics about fitted model (dictionary)
"""
# Generate basic plot of marginal distributions
fig, axes = plt.subplots(2, 2,
subplot_kw=dict(polar=False),
figsize = (7,6))
# Scale parameter
axes[0,0].set_xlabel('Scale')
axes[0,1].set_xlabel('Slope')
axes[1,0].set_xlabel('Gamma')
axes[1,1].set_xlabel('Lambda')
# Loop through and plot marginals that exist
counter = 0
idx = np.array([[0,0], [0,1], [1,0], [1,1]])
for keys in ['scale', 'slope', 'gamma', 'lambda']:
axes[idx[counter,0],idx[counter,1]].set_ylabel('Probability')
if metrics['Marginals'][keys] is not np.nan and metrics['Marginals'][keys].size > 1:
axes[idx[counter,0],idx[counter,1]].plot(metrics['Marginals_X'][keys],
metrics['Marginals'][keys],
lw=3,
color='#5998ff')
axes[idx[counter,0],idx[counter,1]].fill_between(metrics['Marginals_X'][keys],
metrics['Marginals'][keys], color='#5998ff', alpha = .4)
elif metrics['Marginals'][keys].size == 1:
axes[idx[counter,0],idx[counter,1]].text(0.5,0.5, "None",
horizontalalignment='center',
verticalalignment='center',
transform=axes[idx[counter,0],idx[counter,1]].transAxes)
# Update counter
counter += 1
plt.tight_layout()
plt.show()
|
SlugocM/bayesfit
|
bayesfit/plot_Marginals.py
|
Python
|
apache-2.0
| 2,285
|
#!/usr/bin/env python3
import sys
import os
import hashlib
def sha256_file(filepath):
sha256 = hashlib.sha256()
with open(filepath, "rb") as fh:
while True:
chunk = fh.read(4096)
if not chunk:
break
sha256.update(chunk)
return sha256.hexdigest()
def main():
# read index.html
index_html = sys.argv[1]
with open(index_html, "r") as fh:
html = fh.read()
assets = sys.argv[2:]
new_names = {}
for asset in assets:
assert asset in html, "asset not found in html: " + asset
# asset paths are relative to index.html
# determine actual path of each asset
asset_path = os.path.join(os.path.dirname(index_html), asset)
asset_dir = os.path.dirname(asset_path)
asset_name = os.path.basename(asset_path)
# hash asset file
sha256 = sha256_file(asset_path)
# build new asset filename based on hash
name, ext = os.path.splitext(asset_name)
name = name + "." + sha256[:10] + ext
# rename asset on disk
os.rename(asset_path, os.path.join(asset_dir, name))
# save mapping from old asset path to new asset path (relative to index.html)
asset_rel_dir = os.path.dirname(asset)
new_names[asset] = os.path.join(asset_rel_dir, name)
# replace references in index.html with new asset filenames
for old, new in new_names.items():
html = html.replace(old, new)
with open(index_html, "w") as fh:
fh.write(html)
if __name__ == "__main__":
main()
|
ghenga/ghenga-ui
|
utils/cachebust.py
|
Python
|
mit
| 1,600
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('form_processor', '0083_migrate_delta_4_switch_columns'),
]
operations = [
migrations.SeparateDatabaseAndState(
state_operations=[
migrations.AlterField(
model_name='ledgertransaction',
name='delta',
field=models.BigIntegerField(default=0),
),
]
),
]
|
dimagi/commcare-hq
|
corehq/form_processor/migrations/0084_migrate_delta_5_alter_field.py
|
Python
|
bsd-3-clause
| 509
|
__author__ = 'vesna'
|
marinkaz/orange3
|
Orange/canvas/report/tests/__init__.py
|
Python
|
bsd-2-clause
| 21
|
#!/usr/bin/env python
# pylint: disable=missing-docstring
# flake8: noqa: T001
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
#
# Copyright 2016 Red Hat, Inc. and/or its affiliates
# and other contributors as indicated by the @author tags.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- -*- -*- Begin included fragment: lib/import.py -*- -*- -*-
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
from __future__ import print_function
import atexit
import copy
import fcntl
import json
import time
import os
import re
import shutil
import subprocess
import tempfile
# pylint: disable=import-error
try:
import ruamel.yaml as yaml
except ImportError:
import yaml
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: doc/objectvalidator -*- -*- -*-
DOCUMENTATION = '''
---
module: oc_objectvalidator
short_description: Validate OpenShift objects
description:
- Validate OpenShift objects
options:
kubeconfig:
description:
- The path for the kubeconfig file to use for authentication
required: false
default: /etc/origin/master/admin.kubeconfig
aliases: []
author:
- "Mo Khan <monis@redhat.com>"
extends_documentation_fragment: []
'''
EXAMPLES = '''
oc_objectvalidator:
- name: run oc_objectvalidator
oc_objectvalidator:
register: oc_objectvalidator
'''
# -*- -*- -*- End included fragment: doc/objectvalidator -*- -*- -*-
# -*- -*- -*- Begin included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
class YeditException(Exception): # pragma: no cover
''' Exception class for Yedit '''
pass
# pylint: disable=too-many-public-methods
class Yedit(object): # pragma: no cover
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z{}/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self,
filename=None,
content=None,
content_type='yaml',
separator='.',
backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
self.load(content_type=self.content_type)
if self.__yaml_dict is None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for separator '''
return self._separator
@separator.setter
def separator(self, inc_sep):
''' setter method for separator '''
self._separator = inc_sep
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key.format(''.join(common_separators)), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key.format(''.join(common_separators)), key):
return False
return True
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def remove_entry(data, key, index=None, value=None, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
if value is not None:
data.pop(value)
elif index is not None:
raise YeditException("remove_entry for a dictionary does not have an index {}".format(index))
else:
data.clear()
return True
elif key == '' and isinstance(data, list):
ind = None
if value is not None:
try:
ind = data.index(value)
except ValueError:
return False
elif index is not None:
ind = index
else:
del data[:]
if ind is not None:
data.pop(ind)
return True
if not (key and Yedit.valid_key(key, sep)) and \
isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and dict_key in data and data[dict_key]: # noqa: E501
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
return data
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
tmp_filename = filename + '.yedit'
with open(tmp_filename, 'w') as yfd:
fcntl.flock(yfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
yfd.write(contents)
fcntl.flock(yfd, fcntl.LOCK_UN)
os.rename(tmp_filename, filename)
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, '{}.{}'.format(self.filename, time.strftime("%Y%m%dT%H%M%S")))
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripDumper if supported.
if self.content_type == 'yaml':
try:
Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except AttributeError:
Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
elif self.content_type == 'json':
Yedit._write(self.filename, json.dumps(self.yaml_dict, indent=4, sort_keys=True))
else:
raise YeditException('Unsupported content_type: {}.'.format(self.content_type) +
'Please specify a content_type of yaml or json.')
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename is None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripLoader if supported.
try:
self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
except AttributeError:
self.yaml_dict = yaml.safe_load(contents)
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. {}'.format(err))
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if key_or_item in entry:
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path, index=None, value=None):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, index, value, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# AUDIT:maybe-no-member makes sense due to loading data from
# a serialized format.
# pylint: disable=maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in dict with non-dict type. ' +
'value=[{}] type=[{}]'.format(value, type(value)))
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index is not None:
ind = index
if ind is not None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
# already exists, return
if ind is not None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is None:
return (False, self.yaml_dict)
# When path equals "" it is a special case.
# "" refers to the root of the document
# Only update the root path (entire document) when its a list or dict
if path == '':
if isinstance(result, list) or isinstance(result, dict):
self.yaml_dict = result
return (True, self.yaml_dict)
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is not None:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
@staticmethod
def get_curr_value(invalue, val_type):
'''return the current value'''
if invalue is None:
return None
curr_value = invalue
if val_type == 'yaml':
try:
# AUDIT:maybe-no-member makes sense due to different yaml libraries
# pylint: disable=maybe-no-member
curr_value = yaml.safe_load(invalue, Loader=yaml.RoundTripLoader)
except AttributeError:
curr_value = yaml.safe_load(invalue)
elif val_type == 'json':
curr_value = json.loads(invalue)
return curr_value
@staticmethod
def parse_value(inc_value, vtype=''):
'''determine value type passed'''
true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
'on', 'On', 'ON', ]
false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
'off', 'Off', 'OFF']
# It came in as a string but you didn't specify value_type as string
# we will convert to bool if it matches any of the above cases
if isinstance(inc_value, str) and 'bool' in vtype:
if inc_value not in true_bools and inc_value not in false_bools:
raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype))
elif isinstance(inc_value, bool) and 'str' in vtype:
inc_value = str(inc_value)
# There is a special case where '' will turn into None after yaml loading it so skip
if isinstance(inc_value, str) and inc_value == '':
pass
# If vtype is not str then go ahead and attempt to yaml load it.
elif isinstance(inc_value, str) and 'str' not in vtype:
try:
inc_value = yaml.safe_load(inc_value)
except Exception:
raise YeditException('Could not determine type of incoming value. ' +
'value=[{}] vtype=[{}]'.format(type(inc_value), vtype))
return inc_value
@staticmethod
def process_edits(edits, yamlfile):
'''run through a list of edits and process them one-by-one'''
results = []
for edit in edits:
value = Yedit.parse_value(edit['value'], edit.get('value_type', ''))
if edit.get('action') == 'update':
# pylint: disable=line-too-long
curr_value = Yedit.get_curr_value(
Yedit.parse_value(edit.get('curr_value')),
edit.get('curr_value_format'))
rval = yamlfile.update(edit['key'],
value,
edit.get('index'),
curr_value)
elif edit.get('action') == 'append':
rval = yamlfile.append(edit['key'], value)
else:
rval = yamlfile.put(edit['key'], value)
if rval[0]:
results.append({'key': edit['key'], 'edit': rval[1]})
return {'changed': len(results) > 0, 'results': results}
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(params):
'''perform the idempotent crud operations'''
yamlfile = Yedit(filename=params['src'],
backup=params['backup'],
content_type=params['content_type'],
separator=params['separator'])
state = params['state']
if params['src']:
rval = yamlfile.load()
if yamlfile.yaml_dict is None and state != 'present':
return {'failed': True,
'msg': 'Error opening file [{}]. Verify that the '.format(params['src']) +
'file exists, that it is has correct permissions, and is valid yaml.'}
if state == 'list':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['key']:
rval = yamlfile.get(params['key'])
return {'changed': False, 'result': rval, 'state': state}
elif state == 'absent':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['update']:
rval = yamlfile.pop(params['key'], params['value'])
else:
rval = yamlfile.delete(params['key'], params['index'], params['value'])
if rval[0] and params['src']:
yamlfile.write()
return {'changed': rval[0], 'result': rval[1], 'state': state}
elif state == 'present':
# check if content is different than what is in the file
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
# We had no edits to make and the contents are the same
if yamlfile.yaml_dict == content and \
params['value'] is None:
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
yamlfile.yaml_dict = content
# If we were passed a key, value then
# we enapsulate it in a list and process it
# Key, Value passed to the module : Converted to Edits list #
edits = []
_edit = {}
if params['value'] is not None:
_edit['value'] = params['value']
_edit['value_type'] = params['value_type']
_edit['key'] = params['key']
if params['update']:
_edit['action'] = 'update'
_edit['curr_value'] = params['curr_value']
_edit['curr_value_format'] = params['curr_value_format']
_edit['index'] = params['index']
elif params['append']:
_edit['action'] = 'append'
edits.append(_edit)
elif params['edits'] is not None:
edits = params['edits']
if edits:
results = Yedit.process_edits(edits, yamlfile)
# if there were changes and a src provided to us we need to write
if results['changed'] and params['src']:
yamlfile.write()
return {'changed': results['changed'], 'result': results['results'], 'state': state}
# no edits to make
if params['src']:
# pylint: disable=redefined-variable-type
rval = yamlfile.write()
return {'changed': rval[0],
'result': rval[1],
'state': state}
# We were passed content but no src, key or value, or edits. Return contents in memory
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
return {'failed': True, 'msg': 'Unkown state passed'}
# -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*-
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
''' Find and return oc binary file '''
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, edits=None, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
updated = False
if content is not None:
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
updated = True
elif edits is not None:
results = Yedit.process_edits(edits, yed)
if results['changed']:
updated = True
if updated:
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''replace the current object with oc replace'''
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''create a temporary file and then call oc create on it'''
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''call oc create on a filename'''
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, name=None, selector=None):
'''call oc delete on a resource'''
cmd = ['delete', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
else:
raise OpenShiftCLIError('Either name or selector is required when calling delete.')
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
'''process a template
template_name: the name of the template to process
create: whether to send to oc create after processing
params: the parameters for the template
template_data: the incoming template's data; instead of a file
'''
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["{}={}".format(key, str(value).replace("'", r'"')) for key, value in params.items()]
cmd.append('-p')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, name=None, selector=None, field_selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
if field_selector is not None:
cmd.append('--field-selector={}'.format(field_selector))
# Name cannot be used with selector or field_selector.
if selector is None and field_selector is None and name is not None:
cmd.append(name)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
cmd.append('--schedulable={}'.format(schedulable))
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm list pods
node: the node in which to list pods
selector: the label selector filter if provided
pod_selector: the pod selector filter if provided
'''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
if grace_period:
cmd.append('--grace-period={}'.format(int(grace_period)))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
''' return the openshift version'''
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
''' Actually executes the command. This makes mocking easier. '''
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"cmd": ' '.join(cmds)}
if output_type == 'json':
rval['results'] = {}
if output and stdout:
try:
rval['results'] = json.loads(stdout)
except ValueError as verr:
if "No JSON object could be decoded" in verr.args:
rval['err'] = verr.args
elif output_type == 'raw':
rval['results'] = stdout if output else ''
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if 'err' in rval or returncode != 0:
rval.update({"stderr": stderr,
"stdout": stdout})
return rval
class Utils(object): # pragma: no cover
''' utilities for openshiftcli modules '''
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
with open(filename, 'w') as sfd:
sfd.write(str(contents))
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
'''create a temporary copy of a file'''
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
''' filter the oc version output '''
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
''' create custom versions strings '''
versions_dict = {}
for tech, version in versions.items():
# clean up "-" from version
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
version = version[1:] # Remove the 'v' prefix
versions_dict[tech + '_numeric'] = version.split('+')[0]
# "3.3.0.33" is what we have, we want "3.3"
versions_dict[tech + '_short'] = "{}.{}".format(*version.split('.'))
return versions_dict
@staticmethod
def openshift_installed():
''' check if openshift is installed '''
import rpm
transaction_set = rpm.TransactionSet()
rpmquery = transaction_set.dbMatch("name", "atomic-openshift")
return rpmquery.count() > 0
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
# recurse on a dictionary
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
# Verify each key, value pair is the same
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self, ascommalist=''):
'''return all options as a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs'''
return self.stringify(ascommalist)
def stringify(self, ascommalist=''):
''' return the options hash as cli params in a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs '''
rval = []
for key in sorted(self.config_options.keys()):
data = self.config_options[key]
if data['include'] \
and (data['value'] is not None or isinstance(data['value'], int)):
if key == ascommalist:
val = ','.join(['{}={}'.format(kk, vv) for kk, vv in sorted(data['value'].items())])
else:
val = data['value']
rval.append('--{}={}'.format(key.replace('_', '-'), val))
return rval
# -*- -*- -*- End included fragment: lib/base.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: class/oc_objectvalidator.py -*- -*- -*-
# pylint: disable=too-many-instance-attributes
class OCObjectValidator(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
def __init__(self, kubeconfig):
''' Constructor for OCObjectValidator '''
# namespace has no meaning for object validation, hardcode to 'default'
super(OCObjectValidator, self).__init__('default', kubeconfig)
def get_invalid(self, kind, invalid_filter):
''' return invalid object information '''
rval = self._get(kind)
if rval['returncode'] != 0:
return False, rval, []
return True, rval, list(filter(invalid_filter, rval['results'][0]['items'])) # wrap filter with list for py3
# pylint: disable=too-many-return-statements
@staticmethod
def run_ansible(params):
''' run the idempotent ansible code
params comes from the ansible portion of this module
'''
objectvalidator = OCObjectValidator(params['kubeconfig'])
all_invalid = {}
failed = False
def _is_invalid_namespace(namespace):
# check if it uses a reserved name
name = namespace['metadata']['name']
if not any((name == 'kube',
name == 'kubernetes',
name == 'openshift',
name.startswith('kube-'),
name.startswith('kubernetes-'),
name.startswith('openshift-'),)):
return False
# determine if the namespace was created by a user
if 'annotations' not in namespace['metadata']:
return False
return 'openshift.io/requester' in namespace['metadata']['annotations']
checks = (
(
'hostsubnet',
lambda x: x['metadata']['name'] != x['host'],
u'hostsubnets where metadata.name != host',
),
(
'netnamespace',
lambda x: x['metadata']['name'] != x['netname'],
u'netnamespaces where metadata.name != netname',
),
(
'namespace',
_is_invalid_namespace,
u'namespaces that use reserved names and were not created by infrastructure components',
),
)
for resource, invalid_filter, invalid_msg in checks:
success, rval, invalid = objectvalidator.get_invalid(resource, invalid_filter)
if not success:
return {'failed': True, 'msg': 'Failed to GET {}.'.format(resource), 'state': 'list', 'results': rval}
if invalid:
failed = True
all_invalid[invalid_msg] = invalid
if failed:
return {
'failed': True,
'msg': (
"All objects are not valid. If you are a supported customer please contact "
"Red Hat Support providing the complete output above. If you are not a customer "
"please contact users@lists.openshift.redhat.com for assistance."
),
'state': 'list',
'results': all_invalid
}
return {'msg': 'All objects are valid.'}
# -*- -*- -*- End included fragment: class/oc_objectvalidator.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: ansible/oc_objectvalidator.py -*- -*- -*-
def main():
'''
ansible oc module for validating OpenShift objects
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
),
supports_check_mode=False,
)
rval = OCObjectValidator.run_ansible(module.params)
if 'failed' in rval:
module.fail_json(**rval)
module.exit_json(**rval)
if __name__ == '__main__':
main()
# -*- -*- -*- End included fragment: ansible/oc_objectvalidator.py -*- -*- -*-
|
blrm/openshift-tools
|
openshift/installer/vendored/openshift-ansible-3.9.40/roles/lib_openshift/library/oc_objectvalidator.py
|
Python
|
apache-2.0
| 53,161
|
from . import formats
from .core.format_manager import convert, format_manager
from .core.contents_manager import IPymdContentsManager
from .core.scripts import convert_files
__version__ = '0.1.2.dev0'
|
bollwyvl/ipymd
|
ipymd/__init__.py
|
Python
|
bsd-3-clause
| 204
|
# -*- coding: utf-8 -*-
from collections import namedtuple
from pprint import pprint as pp
OpInfo = namedtuple('OpInfo', 'prec assoc')
L, R = 'Left Right'.split()
ops = {
'^': OpInfo(prec=4, assoc=R),
'*': OpInfo(prec=3, assoc=L),
'/': OpInfo(prec=3, assoc=L),
'+': OpInfo(prec=2, assoc=L),
'-': OpInfo(prec=2, assoc=L),
'(': OpInfo(prec=9, assoc=L),
')': OpInfo(prec=0, assoc=L),
}
NUM, LPAREN, RPAREN = 'NUMBER ( )'.split()
def get_input(inp = None):
'Inputs an expression and returns list of (TOKENTYPE, tokenvalue)'
if inp is None:
inp = input('expression: ')
tokens = inp.strip().split()
tokenvals = []
for token in tokens:
if token in ops:
tokenvals.append((token, ops[token]))
#elif token in (LPAREN, RPAREN):
# tokenvals.append((token, token))
else:
tokenvals.append((NUM, token))
return tokenvals
def shunting(tokenvals):
outq, stack = [], []
table = ['TOKEN,ACTION,RPN OUTPUT,OP STACK,NOTES'.split(',')]
for token, val in tokenvals:
note = action = ''
if token is NUM:
action = 'Add number to output'
outq.append(val)
table.append( (val, action, ' '.join(outq), ' '.join(s[0] for s in stack), note) )
elif token in ops:
t1, (p1, a1) = token, val
v = t1
note = 'Pop ops from stack to output'
while stack:
t2, (p2, a2) = stack[-1]
if (a1 == L and p1 <= p2) or (a1 == R and p1 < p2):
if t1 != RPAREN:
if t2 != LPAREN:
stack.pop()
action = '(Pop op)'
outq.append(t2)
else:
break
else:
if t2 != LPAREN:
stack.pop()
action = '(Pop op)'
outq.append(t2)
else:
stack.pop()
action = '(Pop & discard "(")'
table.append( (v, action, ' '.join(outq), ' '.join(s[0] for s in stack), note) )
break
table.append( (v, action, ' '.join(outq), ' '.join(s[0] for s in stack), note) )
v = note = ''
else:
note = ''
break
note = ''
note = ''
if t1 != RPAREN:
stack.append((token, val))
action = 'Push op token to stack'
else:
action = 'Discard ")"'
table.append( (v, action, ' '.join(outq), ' '.join(s[0] for s in stack), note) )
note = 'Drain stack to output'
while stack:
v = ''
t2, (p2, a2) = stack[-1]
action = '(Pop op)'
stack.pop()
outq.append(t2)
table.append( (v, action, ' '.join(outq), ' '.join(s[0] for s in stack), note) )
v = note = ''
return table
def IngresarNumero(dato):
try:
float(dato)
retorno = True
except ValueError:
retorno = False
return(retorno)
def Resultado(entrada):
Aux1 = 0
Aux2 = 0
Lista = []
Aux = entrada.strip().split()
for i in Aux:
if IngresarNumero(i) == True:
Aux2 = float(i)
Lista.append(Aux2)
else:
if i == "+":
Aux1 = Lista[-2] + Lista[-1]
Lista.pop()
Lista.pop()
Lista.append(Aux1)
elif i == "-":
Aux1 = Lista[-2] - Lista[-1]
Lista.pop()
Lista.pop()
Lista.append(Aux1)
elif i == "*":
Aux1 = Lista[-2] * Lista[-1]
Lista.pop()
Lista.pop()
Lista.append(Aux1)
elif i == "/":
Aux1 = Lista[-2] / Lista[-1]
Lista.pop()
Lista.pop()
Lista.append(Aux1)
elif i == "^":
Aux1 = Lista[-2] ** Lista[-1]
Lista.pop()
Lista.pop()
Lista.append(Aux1)
print Lista
if __name__ == '__main__':
infix = raw_input("Ingrese la expresion que desea calcular: ")
print( 'Expresion Ingresada: %r\n' % infix )
rp = shunting(get_input(infix))
print('Expresion en notacion polaca inversa: %r\n' % rp[-1][2])
rpn = rp[-1][2]
print("El resultado es: ")
Resultado(rpn)
raw_input("Pulse cualquier tecla seguido de Enter para finalizar: ")
|
hugoallan9/programacionMatematica
|
Billy/tarea4.py
|
Python
|
gpl-3.0
| 4,536
|
"""A custom backend for testing."""
from django.core.mail.backends.base import BaseEmailBackend
class EmailBackend(BaseEmailBackend):
def __init__(self, *args, **kwargs):
super(EmailBackend, self).__init__(*args, **kwargs)
self.test_outbox = []
def send_messages(self, email_messages):
# Messages are stored in a instance variable for testing.
self.test_outbox.extend(email_messages)
return len(email_messages)
|
openhatch/new-mini-tasks
|
vendor/packages/Django/tests/regressiontests/mail/custombackend.py
|
Python
|
apache-2.0
| 464
|
import subprocess
def isValidInkscape(executable):
try:
out = subprocess.check_output([executable, "--version"]).decode("utf-8")
parts = out.split(" ")
if parts[0] != "Inkscape":
return False
version = parts[1].split(".")
return int(version[0]) == 1
except FileNotFoundError as e:
return False
except subprocess.CalledProcessError as e:
return False
def chooseInkscapeCandidate(candidates):
for candidate in candidates:
if isValidInkscape(candidate):
return candidate
raise RuntimeError("No Inkscape executable found. Please check:\n" +
"- if Inkscape is installed\n" +
"- if it is version at least 1.0\n" +
"If the conditions above are true, please ensure Inkscape is in PATH or\n" +
"ensure there is environmental variable 'PCBDRAW_INKSCAPE' pointing to the Inkscape executable\n\n" +
"Checked paths: \n" +
"\n".join([f"- {x}" for x in candidates]))
|
yaqwsx/PcbDraw
|
pcbdraw/convert_common.py
|
Python
|
mit
| 1,100
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.txt')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = [
'pyramid',
'SQLAlchemy',
'transaction',
'pyramid_tm',
'pyramid_debugtoolbar',
'zope.sqlalchemy',
'waitress',
'pyramid_jinja2',
'WTForms',
'psycopg2',
'cryptography'
]
setup(name = 'spartan',
version = '1.1',
description = 'A simple CMS',
long_description = README + '\n\n' + CHANGES,
classifiers = [
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author = 'Josue Montano',
author_email = 'josuemontanoa@gmail.com',
url = '',
keywords = 'web wsgi bfg pylons pyramid',
packages = find_packages(),
include_package_data = True,
zip_safe = False,
test_suite = 'spartan',
install_requires = requires,
entry_points = """\
[paste.app_factory]
main = spartan:main
[console_scripts]
initialize_spartan_db = spartan.scripts.initializedb:main
""",
)
|
josuemontano/cms
|
spartan/setup.py
|
Python
|
gpl-3.0
| 1,286
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Copyright (C) 2004-2015: Paul Vint pjvint@gmail.com
"""
This script exports Milkshape3d text files from Blender (http://www.blender.org). It supports face and vertex normals,
colours, and texture coordinates per face or per vertex.
Only one mesh can be exported at a time.
"""
import bpy
import os
DEBUG = True
def getPrimaryVertexGroup(_vgroups, _v):
g = -1
w = 0
## Scan through any vertex groups and return the index of the one with the highest weight (or -1 if none)
for vertgroup in _v.groups:
if (vertgroup.weight > w):
w = vertgroup.weight
g = vertgroup.group
#fw("xx%fxx" % vertgroup.group)
return g
def face_iter_func(mesh):
uv_layer = mesh.uv_textures.active.data
uv_layer_len = len(uv_layer)
faces = mesh.faces
for i in range(uv_layer_len):
uv_elem = uv_layer[i]
yield (i, uv_layer[i].uv)
def save(operator,
context,
filepath="",
use_modifiers=True,
use_normals=True,
use_uv_coords=True,
use_colors=True,
):
def rvec3d(v):
return round(v[0], 6), round(v[1], 6), round(v[2], 6)
def rvec2d(v):
return round(v[0], 6), round(v[1], 6)
scene = context.scene
obj = context.active_object
if not obj:
raise Exception("Error, Select 1 active object")
# Multiple meshes
objects = context.selected_objects
file = open(filepath, "w", encoding="utf8", newline="\n")
fw = file.write
fw("// Milkshape 3D ASCII\n\n")
fw("Frames: 30\n")
fw("Frame: 1\n\n")
if scene.objects.active:
bpy.ops.object.mode_set(mode='OBJECT')
o = 0
numArmatures = 0
numMeshes = 0
# count the meshes
for obj in objects:
if obj.type == "MESH":
numMeshes = numMeshes + 1
fw("Meshes: %d\n" % numMeshes)
for obj in objects:
## Check if it's an armature
if obj.type == "ARMATURE":
numArmatures = numArmatures + 1
else:
if use_modifiers:
mesh = obj.to_mesh(scene, True, 'PREVIEW')
else:
mesh = obj.data
if not mesh:
raise Exception("Error, could not get mesh data from active object")
# mesh.transform(obj.matrix_world) # XXX
has_uv = (len(mesh.uv_textures) > 0)
has_uv_vertex = (len(mesh.sticky) > 0)
# FIXME
#has_uv = True
has_vcol = len(mesh.vertex_colors) > 0
#if (not has_uv) and (not has_uv_vertex):
# use_uv_coords = False
if not has_vcol:
use_colors = False
if not use_uv_coords:
has_uv = has_uv_vertex = False
if not use_colors:
has_vcol = False
if has_uv:
active_uv_layer = mesh.uv_textures.active
if not active_uv_layer:
use_uv_coords = False
has_uv = False
else:
active_uv_layer = active_uv_layer.data
if False: # Testing
for i, uv in face_iter_func(mesh):
fw("%d %f \n" % (i, uv[0][0]))
return True
## Get UV list
if has_uv:
faceUVs = []
for i, uv in face_iter_func(mesh):
faceUVs.append(uv)
if has_vcol:
active_col_layer = mesh.vertex_colors.active
if not active_col_layer:
use_colors = False
has_vcol = False
else:
active_col_layer = active_col_layer.data
# in case
color = uvcoord = uvcoord_key = normal = normal_key = None
mesh_verts = mesh.vertices # save a lookup
ply_verts = [] # list of dictionaries
# vdict = {} # (index, normal, uv) -> new index
vdict = [{} for i in range(len(mesh_verts))]
ply_faces = [[] for f in range(len(mesh.faces))]
vert_count = 0
## Vertex Group Testing
vGroups = []
vGroupsIndices = []
if (obj.vertex_groups):
for x in obj.vertex_groups:
#fw("=%d %s\n" % (x.index, x.name))
vGroups.append({x.index, x.name})
vGroupsIndices.append(x.index)
## Yielded:
#0 Bone
#1 Bone.002
#2 Bone.001
for i, f in enumerate(mesh.faces):
# GOOD: fw("Verts: %d %d %d\n" % (f.vertices[0], f.vertices[1], f.vertices[2]))
smooth = f.use_smooth
if not smooth:
normal = tuple(f.normal)
normal_key = rvec3d(normal)
if has_uv:
uv = active_uv_layer[i]
uv = uv.uv1, uv.uv2, uv.uv3, uv.uv4 # XXX - crufty :/
if has_vcol:
col = active_col_layer[i]
col = col.color1[:], col.color2[:], col.color3[:], col.color4[:]
f_verts = f.vertices
pf = ply_faces[i]
## FIXME Deprecated
for j, vidx in enumerate(f_verts):
v = mesh_verts[vidx]
if smooth:
normal = tuple(v.normal)
normal_key = rvec3d(normal)
if has_uv:
uvcoord = uv[j][0], 1.0 - uv[j][1]
uvcoord_key = rvec2d(uvcoord)
elif has_uv_vertex:
uvcoord = v.uvco[0], 1.0 - v.uvco[1]
uvcoord_key = rvec2d(uvcoord)
if has_vcol:
color = col[j]
color = (int(color[0] * 255.0),
int(color[1] * 255.0),
int(color[2] * 255.0),
)
key = normal_key, uvcoord_key, color
vdict_local = vdict[vidx]
pf_vidx = vdict_local.get(key) # Will be None initially
if pf_vidx is None: # same as vdict_local.has_key(key)
pf_vidx = vdict_local[key] = vert_count
ply_verts.append((vidx, normal, uvcoord, color))
vert_count += 1
pf.append(pf_vidx)
# Mesh name, flags, material index
fw("\"%s\" 0 %d\n" % (obj.name, o))
#fw("%d\n" % (len(mesh.faces) * 3))
#if use_colors:
# fw("property uchar red\n"
# "property uchar green\n"
# "property uchar blue\n")
#fw("element face %d\n" % len(mesh.faces))
#fw("property list uchar uint vertex_indices\n")
#fw("end_header\n")
# mesh.vertices is array of vertex coords
# face.vertices is array of vertex indices
# to get unique vertices in the file create an array of all vertices and
# then find the highest index in the list of faces and use only up to
# that one to only have unique vertices
maxIndex = 0
numVerts = 0
for f in mesh.faces:
for v in f.vertices:
numVerts = numVerts + 1
if (v >= maxIndex):
maxIndex = v
maxIndex = maxIndex + 1
#fw("%d\n" % (maxIndex))
## create array of verts
vco = []
fverts = []
## make a properly ordered list of vertices
for f in mesh.faces:
for v in mesh.vertices:
fverts.append(v)
### The following method is crap - need to duplicate verts for when they have different
### UV coords for different faces!
#for i in range(0, maxIndex):
#fw("0 %.4f %.4f %.4f " % (-fverts[i].co[0], fverts[i].co[2], -fverts[i].co[1]))
#fw('0.0, 0.0') # uv
# Vertex Group
#vg = getPrimaryVertexGroup(vGroups, fverts[i])
#fw(" %d\n" % vg)
## Prep for UVs
activeUV = mesh.uv_textures[0].data
#if has_uv:
# actuveUV = mesh.uv_textures
### Dump each vert on each face
fw("%d\n" % numVerts)
fIdx = 0
for f in mesh.faces:
if (len(f.vertices) != 3):
raise Exception("Error! All faces must be triangles. (Convert in edit mode by pressing CTRL-t)")
## Loop through each vertex in the face
vIdx = 0
uv = activeUV[fIdx]
fuv = uv.uv1, uv.uv2, uv.uv3
for v in f.vertices:
fw("0 %.4f %.4f %.4f " % (-fverts[v].co[0], fverts[v].co[2], -fverts[v].co[1]))
## uv coords
#for i, uv in face_iter_func(mesh):
#fw("%d %f \n" % (i, uv[0][0]))
if has_uv:
fw("%.4f %.4f " % (faceUVs[fIdx][vIdx][0], 1.0 - faceUVs[fIdx][vIdx][1]))
#fw("%.4f %.4f " % (fverts[v].uv[0], 1 - fverts[v].uv[1]))
else:
fw("0.0000 0.0000 ");
## Vertex Group
if not obj.vertex_groups:
vg = -1
else:
vg = getPrimaryVertexGroup(vGroups, fverts[v])
fw("%d\n" % vg)
vIdx = vIdx + 1
fIdx = fIdx + 1
# Repeat the above loop to get vertex normals
fw("%d\n" % numVerts)
for f in mesh.faces:
## Test if using smoothing or not
if f.use_smooth:
## Loop through each vertex in the face
for v in f.vertices:
fw("%.4f %.4f %.4f\n" % (-fverts[v].normal[0], fverts[v].normal[2], -fverts[v].normal[1]))
else:
for v in f.vertices:
fw("%.4f %.4f %.4f\n" % (-f.normal[0], f.normal[2], -f.normal[1]))
# Get Face info
# TODO: Smoothing groups
# A bit BFI, but vertices are in order
fw("%d\n" % len(ply_faces))
v = 0
for f in mesh.faces:
fw("1 %d %d %d" % (v + 2, v + 1, v))
fw(" %d %d %d 1\n" % (v + 2, v + 1, v))
v = v + 3
o = o + 1
## Materials
# Note: Limiting to one mat per mesh, and assuming every mesh has one
world = scene.world
if world:
world_amb = world.ambient_color
else:
world_amb = Color((0.0, 0.0, 0.0))
fw("\nMaterials: %d\n" % o)
o = 0
for obj in objects:
if obj.type != "ARMATURE":
materials = obj.data.materials[:]
mat = materials[0]
fw("\"Mat%d\"\n" % o)
## ambient
fw('%.6f %.6f %.6f 1.000000\n' % (mat.diffuse_color * mat.ambient)[:])
## Diffues
fw("%.6f %.6f %.6f 1.000000\n" % (mat.diffuse_intensity * mat.diffuse_color)[:])
fw("%.6f %.6f %.6f 1.000000\n" % (mat.specular_intensity * mat.specular_color)[:]) # Specular
fw('%.6f %.6f %.6f 1.000000\n' % (mat.diffuse_color * mat.emit)[:])
fw("%.6f\n" % mat.specular_hardness)
fw("%.6f\n" % mat.alpha)
if (len(obj.data.uv_textures) > 0):
uv_layer = obj.data.uv_textures.active.data[:]
uv_image = uv_layer[0].image
if (uv_image):
fw("\"%s\"\n" % uv_image.filepath)
else:
fw("\"\"\n")
else:
fw("\"\"\n")
# TODO: Alpha texture
fw("\"\"\n")
o = o + 1
fw("\n")
#fw("Bones: %d\n" % numArmatures)
numBones = 0
# count the bones
for obj in objects:
if obj.type == "ARMATURE":
for b in obj.pose.bones:
numBones = numBones + 1
fw("Bones: %d\n" % numBones)
# export the bones
for obj in objects:
if obj.type == "ARMATURE":
for b in obj.pose.bones:
## Give the file the bone!
## Bone Name
fw("\"%s\"\n" % b.name)
## Parent Name
if (len(b.parent_recursive) > 0 ):
fw("\"%s\"\n" % b.parent.name)
else:
fw("\"\"\n")
## // joint: flags, posx, posy, posz, rotx, roty, rotz
## Looking at examples the flag appears to always be 24 (?)
## Not sure how to get rot - skip it for now
fw("24 %.6f %.6f %.6f 0 0 0\n" % ( -b.head[0], b.head[2], -b.head[1]))
## Number of position keys - using the number of frames in the anim sequence
fw("%d\n" % (scene.frame_end - scene.frame_start))
## FIXME Not sure how to handle time, just doing 1 sec per frame for now
secs = 1
## // position key: time, posx, posy, posz
for frame in range(scene.frame_start, scene.frame_end):
## Go to the first frame
scene.frame_set(frame)
fw("%.6f %.6f %.6f %.6f\n" % ( secs, -b.tail[0], b.tail[2], -b.tail[1]))
secs = secs + 1
### Rotation Keys
# Just using number of frames for now with rots all 0.0
fw("%d\n" % (scene.frame_end - scene.frame_start))
for frame in range(scene.frame_start, scene.frame_end):
fw("%d 0.000000 0.000000 0.000000\n" % secs)
## End of this bone
fw("\n")
fw("GroupComments: 0\n")
fw("MaterialComments: 0\n")
fw("BoneComments: 0\n")
fw("ModelComment: 0\n")
file.close()
print("writing %r done" % filepath)
if use_modifiers:
bpy.data.meshes.remove(mesh)
# XXX
"""
if is_editmode:
Blender.Window.EditMode(1, "", 0)
"""
return {'FINISHED'}
|
pvint/Blender2MS3d
|
io_mesh_ms3d/export_ms3d.py
|
Python
|
gpl-2.0
| 11,954
|
from setuptools import setup, find_packages
setup(
name='viewdns',
version='0.1.0',
packages=find_packages(),
url='https://github.com/MiquelT/viewdns-api.git',
# install_requires=["PyCrypto", "asyncssh-unofficial"],
license='BSD',
author='MiquelTur',
author_email='miquel.tur.m@gmail.com',
description='API for http://viewdns.info',
classifiers=[
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
]
)
|
MiquelT/viewdns-api
|
setup.py
|
Python
|
bsd-3-clause
| 490
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# nova documentation build configuration file, created by
# sphinx-quickstart on Sat May 1 15:17:47 2010.
#
# This file is execfile()d with the current directory set to
# its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
html_theme = 'openstackdocs'
html_theme_options = {
"sidebar_mode": "toc",
}
extensions = [
'os_api_ref',
'openstackdocstheme',
]
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
sys.path.insert(0, os.path.abspath('../'))
sys.path.insert(0, os.path.abspath('./'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Messaging Service API Reference'
copyright = u'2010-present, OpenStack Foundation'
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/zaqar'
openstackdocs_bug_project = 'zaqar'
openstackdocs_bug_tag = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# The reST default role (used for this markup: `text`) to use
# for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# -- Options for man page output ----------------------------------------------
# Grouping the document tree for man pages.
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_use_modindex = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'zaqardoc'
# -- Options for LaTeX output -------------------------------------------------
# The paper size ('letter' or 'a4').
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'Zaqar.tex', u'OpenStack Messaging Service API Documentation',
u'OpenStack Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# Additional stuff for the LaTeX preamble.
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_use_modindex = True
|
openstack/zaqar
|
api-ref/source/conf.py
|
Python
|
apache-2.0
| 6,668
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Jeremiah Marks
# @Date: 2015-03-08 21:40:17
# @Last Modified 2015-03-09
# @Last Modified time: 2015-03-09 21:40:22
class NumChain2(object):
def __init__(self,targetValue, chainLength):
self.valuegoal=targetValue
self.linksgoal=chainLength
self.links=[]
self.potentialLinks=[]
self.links.append(1)
self.potentialLinks.append(set([1]))
self.fulfilled=False
self.chainsThatWork=[]
def createNextPotentialLinks(self):
self.potentialNextValues=set()
self.currentValue=sum(self.links)
self.currentLinks=len(self.links)
for eachFirstLocation in range(len(self.links)):
for eachSecondValue in range(eachFirstLocation,len(self.links)):
self.potentialNextValues.add(self.links[eachFirstLocation]+self.links[eachSecondValue])
def iterate(self):
self.createNextPotentialLinks()
self.potentialLinks.append(self.potentialNextValues)
if (self.currentLinks>=self.linksgoal-1):
if (len(self.chainsThatWork)>0):
self.fulfilled=True
elif (self.valuegoal in self.potentialNextValues):
self.setNextValue(self.valuegoal)
# elif (max(self.potentialNextValues)==self.links[-1]):
# """That means we have did this last time"""
# if (max(self.potentialNextValues)==self.valuegoal):
# self.fulfilled=True
for eachPotential in self.potentialNextValues:
self.setNextValue(eachPotential)
self.links.pop()
def setNextValue(self,value):
self.links.append(value)
if (value==self.valuegoal):
temp=[]
for eachV in self.links:
temp.append(eachV)
self.chainsThatWork.append(temp)
def theLogicController(self):
while not self.fulfilled:
self.iterate()
print self.links
|
jeremiahmarks/dangerzone
|
scripts/python/numchain.py
|
Python
|
mit
| 1,986
|
from django.contrib import admin, messages
from django.forms import ValidationError
from feincms.module.medialibrary.admin import MediaFileAdmin as MediaFileAdminOld
from feincms.module.medialibrary.forms import MediaFileAdminForm as MediaFileAdminFormOld
from feincms.module.medialibrary.models import MediaFile
from feincms.module.page.admin import PageAdmin as PageAdminOld
from feincms.module.page.models import Page
from .utils import copy_tree
class PageAdmin(PageAdminOld):
save_on_top = True
actions = ['copy_tree_admin_action']
list_editable = ['in_navigation']
list_display = ['short_title', 'slug', 'is_visible_admin', 'in_navigation',
'template']
def copy_tree_admin_action(self, request, queryset):
if len(queryset) != 1:
self.message_user(request, 'Select only one page to copy', level=messages.ERROR)
return
copy_tree(queryset[0])
copy_tree_admin_action.short_description = 'Copy tree'
admin.site.unregister(Page)
admin.site.register(Page, PageAdmin)
class MediaFileAdminForm(MediaFileAdminFormOld):
def clean(self, *args, **kwargs):
cleaned_data = super(MediaFileAdminForm, self).clean(*args, **kwargs)
if not cleaned_data.get('categories'):
raise ValidationError('You must select at least one category.')
return cleaned_data
class MediaFileAdmin(MediaFileAdminOld):
form = MediaFileAdminForm
inlines = []
list_display = ['admin_thumbnail', '__str__', 'list_categories', 'formatted_created']
fieldsets = (
(None, {'fields': ('file', 'categories')}),
)
def list_categories(self, obj):
return ', '.join([category.title for category in obj.categories.all()])
admin.site.unregister(MediaFile)
admin.site.register(MediaFile, MediaFileAdmin)
|
glogiotatidis/masterfirefoxos
|
masterfirefoxos/base/admin.py
|
Python
|
mpl-2.0
| 1,834
|
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Only (de)serialization utils hasn't been removed to decrease requirements
# number.
"""Utility methods for working with WSGI servers."""
import datetime
import errno
import os
import signal
from xml.dom import minidom
from xml.parsers import expat
from xml import sax
from xml.sax import expatreader
import eventlet
from eventlet import wsgi
from oslo_config import cfg
from oslo_log import log as logging
from oslo_log import loggers
from oslo_serialization import jsonutils
import six
from sahara import exceptions
from sahara.i18n import _
from sahara.i18n import _LE
from sahara.i18n import _LI
from sahara.openstack.common import sslutils
LOG = logging.getLogger(__name__)
wsgi_opts = [
cfg.IntOpt('max_header_line',
default=16384,
help="Maximum line size of message headers to be accepted. "
"max_header_line may need to be increased when using "
"large tokens (typically those generated by the "
"Keystone v3 API with big service catalogs)."),
]
CONF = cfg.CONF
CONF.register_opts(wsgi_opts)
class ProtectedExpatParser(expatreader.ExpatParser):
"""An expat parser which disables DTD's and entities by default."""
def __init__(self, forbid_dtd=True, forbid_entities=True,
*args, **kwargs):
# Python 2.x old style class
expatreader.ExpatParser.__init__(self, *args, **kwargs)
self.forbid_dtd = forbid_dtd
self.forbid_entities = forbid_entities
def start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
raise ValueError("Inline DTD forbidden")
def entity_decl(self, entityName, is_parameter_entity, value, base,
systemId, publicId, notationName):
raise ValueError("<!ENTITY> entity declaration forbidden")
def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
# expat 1.2
raise ValueError("<!ENTITY> unparsed entity forbidden")
def external_entity_ref(self, context, base, systemId, publicId):
raise ValueError("<!ENTITY> external entity forbidden")
def notation_decl(self, name, base, sysid, pubid):
raise ValueError("<!ENTITY> notation forbidden")
def reset(self):
expatreader.ExpatParser.reset(self)
if self.forbid_dtd:
self._parser.StartDoctypeDeclHandler = self.start_doctype_decl
self._parser.EndDoctypeDeclHandler = None
if self.forbid_entities:
self._parser.EntityDeclHandler = self.entity_decl
self._parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl
self._parser.ExternalEntityRefHandler = self.external_entity_ref
self._parser.NotationDeclHandler = self.notation_decl
try:
self._parser.SkippedEntityHandler = None
except AttributeError:
# some pyexpat versions do not support SkippedEntity
pass
def safe_minidom_parse_string(xml_string):
"""Parse an XML string using minidom safely.
"""
try:
return minidom.parseString(xml_string, parser=ProtectedExpatParser())
except sax.SAXParseException:
raise expat.ExpatError()
class ActionDispatcher(object):
"""Maps method name to local methods through action name."""
def dispatch(self, *args, **kwargs):
"""Find and call local method."""
action = kwargs.pop('action', 'default')
action_method = getattr(self, str(action), self.default)
return action_method(*args, **kwargs)
def default(self, data):
raise NotImplementedError()
class DictSerializer(ActionDispatcher):
"""Default request body serialization."""
def serialize(self, data, action='default'):
return self.dispatch(data, action=action)
def default(self, data):
return ""
class JSONDictSerializer(DictSerializer):
"""Default JSON request body serialization."""
def default(self, data):
def sanitizer(obj):
if isinstance(obj, datetime.datetime):
_dtime = obj - datetime.timedelta(microseconds=obj.microsecond)
return _dtime.isoformat()
return unicode(obj)
return jsonutils.dumps(data, default=sanitizer)
class XMLDictSerializer(DictSerializer):
def __init__(self, metadata=None, xmlns=None):
""":param metadata: information needed to deserialize xml
into a dictionary.
:param xmlns: XML namespace to include with serialized xml
"""
super(XMLDictSerializer, self).__init__()
self.metadata = metadata or {}
self.xmlns = xmlns
def default(self, data):
# We expect data to contain a single key which is the XML root.
root_key = data.keys()[0]
doc = minidom.Document()
node = self._to_xml_node(doc, self.metadata, root_key, data[root_key])
return self.to_xml_string(node)
def to_xml_string(self, node, has_atom=False):
self._add_xmlns(node, has_atom)
return node.toprettyxml(indent=' ', encoding='UTF-8')
# NOTE (ameade): the has_atom should be removed after all of the
# xml serializers and view builders have been updated to the current
# spec that required all responses include the xmlns:atom, the has_atom
# flag is to prevent current tests from breaking
def _add_xmlns(self, node, has_atom=False):
if self.xmlns is not None:
node.setAttribute('xmlns', self.xmlns)
if has_atom:
node.setAttribute('xmlns:atom', "http://www.w3.org/2005/Atom")
def _to_xml_node(self, doc, metadata, nodename, data):
"""Recursive method to convert data members to XML nodes."""
result = doc.createElement(nodename)
# Set the xml namespace if one is specified
# TODO(justinsb): We could also use prefixes on the keys
xmlns = metadata.get('xmlns', None)
if xmlns:
result.setAttribute('xmlns', xmlns)
# TODO(bcwaldon): accomplish this without a type-check
if type(data) is list:
collections = metadata.get('list_collections', {})
if nodename in collections:
metadata = collections[nodename]
for item in data:
node = doc.createElement(metadata['item_name'])
node.setAttribute(metadata['item_key'], str(item))
result.appendChild(node)
return result
singular = metadata.get('plurals', {}).get(nodename, None)
if singular is None:
if nodename.endswith('s'):
singular = nodename[:-1]
else:
singular = 'item'
for item in data:
node = self._to_xml_node(doc, metadata, singular, item)
result.appendChild(node)
# TODO(bcwaldon): accomplish this without a type-check
elif type(data) is dict:
collections = metadata.get('dict_collections', {})
if nodename in collections:
metadata = collections[nodename]
for k, v in data.items():
node = doc.createElement(metadata['item_name'])
node.setAttribute(metadata['item_key'], str(k))
text = doc.createTextNode(str(v))
node.appendChild(text)
result.appendChild(node)
return result
attrs = metadata.get('attributes', {}).get(nodename, {})
for k, v in data.items():
if k in attrs:
result.setAttribute(k, str(v))
else:
node = self._to_xml_node(doc, metadata, k, v)
result.appendChild(node)
else:
# Type is atom
node = doc.createTextNode(str(data))
result.appendChild(node)
return result
def _create_link_nodes(self, xml_doc, links):
link_nodes = []
for link in links:
link_node = xml_doc.createElement('atom:link')
link_node.setAttribute('rel', link['rel'])
link_node.setAttribute('href', link['href'])
if 'type' in link:
link_node.setAttribute('type', link['type'])
link_nodes.append(link_node)
return link_nodes
class TextDeserializer(ActionDispatcher):
"""Default request body deserialization."""
def deserialize(self, datastring, action='default'):
return self.dispatch(datastring, action=action)
def default(self, datastring):
return {}
class JSONDeserializer(TextDeserializer):
def _from_json(self, datastring):
try:
return jsonutils.loads(datastring)
except ValueError:
msg = _("cannot understand JSON")
raise exceptions.MalformedRequestBody(msg)
def default(self, datastring):
return {'body': self._from_json(datastring)}
class XMLDeserializer(TextDeserializer):
def __init__(self, metadata=None):
""":param metadata: information needed to
deserialize xml into a dictionary.
"""
super(XMLDeserializer, self).__init__()
self.metadata = metadata or {}
def _from_xml(self, datastring):
plurals = set(self.metadata.get('plurals', {}))
try:
node = safe_minidom_parse_string(datastring).childNodes[0]
return {node.nodeName: self._from_xml_node(node, plurals)}
except expat.ExpatError:
msg = _("cannot understand XML")
raise exceptions.MalformedRequestBody(msg)
def _from_xml_node(self, node, listnames):
"""Convert a minidom node to a simple Python type.
:param listnames: list of XML node names whose subnodes should
be considered list items.
"""
if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3:
return node.childNodes[0].nodeValue
elif node.nodeName in listnames:
return [self._from_xml_node(n, listnames) for n in node.childNodes]
else:
result = dict()
for attr, val in six.iteritems(node.attributes):
result[attr] = val.nodeValue
for child in node.childNodes:
if child.nodeType != node.TEXT_NODE:
result[child.nodeName] = self._from_xml_node(child,
listnames)
return result
def find_first_child_named(self, parent, name):
"""Search a nodes children for the first child with a given name."""
for node in parent.childNodes:
if node.nodeName == name:
return node
return None
def find_children_named(self, parent, name):
"""Return all of a nodes children who have the given name."""
for node in parent.childNodes:
if node.nodeName == name:
yield node
def extract_text(self, node):
"""Get the text field contained by the given node."""
if len(node.childNodes) == 1:
child = node.childNodes[0]
if child.nodeType == child.TEXT_NODE:
return child.nodeValue
return ""
def default(self, datastring):
return {'body': self._from_xml(datastring)}
class Server(object):
"""Server class to manage multiple WSGI sockets and applications."""
def __init__(self, threads=500):
eventlet.wsgi.MAX_HEADER_LINE = CONF.max_header_line
self.threads = threads
self.children = []
self.running = True
def start(self, application):
"""Run a WSGI server with the given application.
:param application: The application to run in the WSGI server
"""
def kill_children(*args):
"""Kills the entire process group."""
LOG.error(_LE('SIGTERM received'))
signal.signal(signal.SIGTERM, signal.SIG_IGN)
self.running = False
os.killpg(0, signal.SIGTERM)
def hup(*args):
"""Shuts down the server(s).
Shuts down the server(s), but allows running requests to complete
"""
LOG.error(_LE('SIGHUP received'))
signal.signal(signal.SIGHUP, signal.SIG_IGN)
os.killpg(0, signal.SIGHUP)
signal.signal(signal.SIGHUP, hup)
self.application = application
self.sock = eventlet.listen((CONF.host, CONF.port), backlog=500)
if sslutils.is_enabled():
LOG.info(_LI("Using HTTPS for port %s"), CONF.port)
self.sock = sslutils.wrap(self.sock)
if CONF.api_workers == 0:
# Useful for profiling, test, debug etc.
self.pool = eventlet.GreenPool(size=self.threads)
self.pool.spawn_n(self._single_run, application, self.sock)
return
LOG.debug("Starting %d workers", CONF.api_workers)
signal.signal(signal.SIGTERM, kill_children)
signal.signal(signal.SIGHUP, hup)
while len(self.children) < CONF.api_workers:
self.run_child()
def wait_on_children(self):
while self.running:
try:
pid, status = os.wait()
if os.WIFEXITED(status) or os.WIFSIGNALED(status):
LOG.error(_LE('Removing dead child %s'), pid)
self.children.remove(pid)
self.run_child()
except OSError as err:
if err.errno not in (errno.EINTR, errno.ECHILD):
raise
except KeyboardInterrupt:
LOG.info(_LI('Caught keyboard interrupt. Exiting.'))
os.killpg(0, signal.SIGTERM)
break
eventlet.greenio.shutdown_safe(self.sock)
self.sock.close()
LOG.debug('Server exited')
def wait(self):
"""Wait until all servers have completed running."""
try:
if self.children:
self.wait_on_children()
else:
self.pool.waitall()
except KeyboardInterrupt:
pass
def run_child(self):
pid = os.fork()
if pid == 0:
signal.signal(signal.SIGHUP, signal.SIG_DFL)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
self.run_server()
LOG.debug('Child %d exiting normally', os.getpid())
return
else:
LOG.info(_LI('Started child %s'), pid)
self.children.append(pid)
def run_server(self):
"""Run a WSGI server."""
self.pool = eventlet.GreenPool(size=self.threads)
wsgi.server(self.sock,
self.application,
custom_pool=self.pool,
log=loggers.WritableLogger(LOG),
debug=False)
self.pool.waitall()
def _single_run(self, application, sock):
"""Start a WSGI server in a new green thread."""
LOG.info(_LI("Starting single process server"))
eventlet.wsgi.server(sock, application,
custom_pool=self.pool,
log=loggers.WritableLogger(LOG),
debug=False)
|
bigfootproject/sahara
|
sahara/utils/wsgi.py
|
Python
|
apache-2.0
| 16,018
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pip.download
from pip.req import parse_requirements
from setuptools import setup
setup(name='ansible-toolkit',
version='1.3.2',
description='The missing Ansible tools',
url='http://github.com/dellis23/ansible-toolkit',
author='Daniel Ellis',
author_email='ellisd23@gmail.com',
license='GPLv3',
install_requires=[
str(pkg.req) for pkg in parse_requirements(
'requirements.txt',
session=pip.download.PipSession())],
tests_require=[
str(pkg.req) for pkg in parse_requirements(
'test-requirements.txt',
session=pip.download.PipSession())],
packages=['ansible_toolkit'],
scripts=[
'bin/atk-git-diff',
'bin/atk-show-vars',
'bin/atk-show-template',
'bin/atk-vault',
])
|
dellis23/ansible-toolkit
|
setup.py
|
Python
|
gpl-3.0
| 919
|
from camkifu.stone.stonesfinder import StonesFinder
|
ArnaudPel/CamKifu
|
src/camkifu/stone/__init__.py
|
Python
|
gpl-2.0
| 52
|
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible.modules.storage.netapp.netapp_e_auditlog import AuditLog
from units.modules.utils import AnsibleFailJson, ModuleTestCase, set_module_args
__metaclass__ = type
from units.compat import mock
class AuditLogTests(ModuleTestCase):
REQUIRED_PARAMS = {'api_username': 'rw',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1'}
REQ_FUNC = 'ansible.modules.storage.netapp.netapp_e_auditlog.request'
MAX_RECORDS_MAXIMUM = 50000
MAX_RECORDS_MINIMUM = 100
def _set_args(self, **kwargs):
module_args = self.REQUIRED_PARAMS.copy()
if kwargs is not None:
module_args.update(kwargs)
set_module_args(module_args)
def test_max_records_argument_pass(self):
"""Verify AuditLog arument's max_records and threshold upper and lower boundaries."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
max_records_set = (self.MAX_RECORDS_MINIMUM, 25000, self.MAX_RECORDS_MAXIMUM)
for max_records in max_records_set:
initial["max_records"] = max_records
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": False})):
audit_log = AuditLog()
self.assertTrue(audit_log.max_records == max_records)
def test_max_records_argument_fail(self):
"""Verify AuditLog arument's max_records and threshold upper and lower boundaries."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
max_records_set = (self.MAX_RECORDS_MINIMUM - 1, self.MAX_RECORDS_MAXIMUM + 1)
for max_records in max_records_set:
with self.assertRaisesRegexp(AnsibleFailJson, r"Audit-log max_records count must be between 100 and 50000"):
initial["max_records"] = max_records
self._set_args(**initial)
AuditLog()
def test_threshold_argument_pass(self):
"""Verify AuditLog arument's max_records and threshold upper and lower boundaries."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
threshold_set = (60, 75, 90)
for threshold in threshold_set:
initial["threshold"] = threshold
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": False})):
audit_log = AuditLog()
self.assertTrue(audit_log.threshold == threshold)
def test_threshold_argument_fail(self):
"""Verify AuditLog arument's max_records and threshold upper and lower boundaries."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
threshold_set = (59, 91)
for threshold in threshold_set:
with self.assertRaisesRegexp(AnsibleFailJson, r"Audit-log percent threshold must be between 60 and 90"):
initial["threshold"] = threshold
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": False})):
AuditLog()
def test_is_proxy_pass(self):
"""Verify that True is returned when proxy is used to communicate with storage."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90,
"api_url": "https://10.1.1.10/devmgr/v2"}
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
self.assertTrue(audit_log.is_proxy())
def test_is_proxy_fail(self):
"""Verify that AnsibleJsonFail exception is thrown when exception occurs."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve the webservices about information"):
with mock.patch(self.REQ_FUNC, return_value=Exception()):
audit_log.is_proxy()
def test_get_configuration_pass(self):
"""Validate get configuration does not throw exception when normal request is returned."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
expected = {"auditLogMaxRecords": 1000,
"auditLogLevel": "writeOnly",
"auditLogFullPolicy": "overWrite",
"auditLogWarningThresholdPct": 90}
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with mock.patch(self.REQ_FUNC, return_value=(200, expected)):
body = audit_log.get_configuration()
self.assertTrue(body == expected)
def test_get_configuration_fail(self):
"""Verify AnsibleJsonFail exception is thrown."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve the audit-log configuration!"):
with mock.patch(self.REQ_FUNC, return_value=Exception()):
audit_log.get_configuration()
def test_build_configuration_pass(self):
"""Validate configuration changes will force an update."""
response = {"auditLogMaxRecords": 1000,
"auditLogLevel": "writeOnly",
"auditLogFullPolicy": "overWrite",
"auditLogWarningThresholdPct": 90}
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
changes = [{"max_records": 50000},
{"log_level": "all"},
{"full_policy": "preventSystemAccess"},
{"threshold": 75}]
for change in changes:
initial_with_changes = initial.copy()
initial_with_changes.update(change)
self._set_args(**initial_with_changes)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with mock.patch(self.REQ_FUNC, return_value=(200, response)):
update = audit_log.build_configuration()
self.assertTrue(update)
def test_delete_log_messages_fail(self):
"""Verify AnsibleJsonFail exception is thrown."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to delete audit-log messages!"):
with mock.patch(self.REQ_FUNC, return_value=Exception()):
audit_log.delete_log_messages()
def test_update_configuration_delete_pass(self):
"""Verify 422 and force successfully returns True."""
body = {"auditLogMaxRecords": 1000,
"auditLogLevel": "writeOnly",
"auditLogFullPolicy": "overWrite",
"auditLogWarningThresholdPct": 90}
initial = {"max_records": 2000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90,
"force": True}
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with mock.patch(self.REQ_FUNC, side_effect=[(200, body),
(422, {u"invalidFieldsIfKnown": None,
u"errorMessage": u"Configuration change...",
u"localizedMessage": u"Configuration change...",
u"retcode": u"auditLogImmediateFullCondition",
u"codeType": u"devicemgrerror"}),
(200, None),
(200, None)]):
self.assertTrue(audit_log.update_configuration())
def test_update_configuration_delete_skip_fail(self):
"""Verify 422 and no force results in AnsibleJsonFail exception."""
body = {"auditLogMaxRecords": 1000,
"auditLogLevel": "writeOnly",
"auditLogFullPolicy": "overWrite",
"auditLogWarningThresholdPct": 90}
initial = {"max_records": 2000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90,
"force": False}
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to update audit-log configuration!"):
with mock.patch(self.REQ_FUNC, side_effect=[(200, body), Exception(422, {"errorMessage": "error"}),
(200, None), (200, None)]):
audit_log.update_configuration()
|
alxgu/ansible
|
test/units/modules/storage/netapp/test_netapp_e_auditlog.py
|
Python
|
gpl-3.0
| 10,758
|
def do_stuff():
a = 1
return a
|
moreati/pytest-cov
|
tests/helper.py
|
Python
|
mit
| 39
|
#!/usr/bin/env python
r"""
Produce a MagicPoint file from a text file with simple markup.
Markup:
directives (lines starting with .)
MagicPoint text (everything else)
You can use MagicPoint markup (lines starting with %), but that is discouraged.
You should know that \ is MagicPoint's escape character. If you want to
include a literal \ in the text, write \\. You should also know that lines
starting with a # are comments. If you want to include a line starting with
a # in the output, write \#
Directives:
.logo FILENAME (default: povlogo.png)
logo image used on the title page and on all other pages
should be specified before the first page directive
.title TITLE
.author AUTHOR
.email <EMAIL>
.conference CONFERENCE NAME YEAR
.company COMPANY NAME (default: Programers of Vilnius)
.url COMPANY URL (default: http://pov.lt)
.logo FILENAME (default: povlogo.png)
title page elements
can be specified either before or inside the .titlepage directive
.titlepage
produce a title page
you can use a different logo image for the title page if you
specify .logo between the .titlepage directive and the next page
directive.
.heading TEXT
define the header line to show at the top of the current page
.footer TEXT
define a footer line to show at bottom of the current page
.subpage
start a new page that is a copy of the old page with extra text added
.page
start a new page (first line of text is centered, second line right
below it, rendered in italics; further lines horizontally centered,
in a smaller font)
.pageoftext
start a new page for text examples (12 lines of text, left-aligned)
.dictpage
start a new page (5 lines of vertically centered text)
.listpage
start a new page (title, blank line, then up to 8 list items)
Blank lines following a directive are skipped.
makeslide.py was written by Marius Gedminas <marius@pov.lt>
"""
import sys
import string
import fileinput
#
# Tokenizer
#
class Token(object):
def __init__(self, filename, lineno):
self.filename = filename
self.lineno = lineno
def report_error(self, message):
print >> sys.stderr, '%s:%d: %s' % (self.filename, self.lineno, message)
class Directive(Token):
def __init__(self, filename, lineno, name, args):
Token.__init__(self, filename, lineno)
self.name = name
self.args = args
class LineOfText(Token):
def __init__(self, filename, lineno, text):
Token.__init__(self, filename, lineno)
self.text = text
class BlankLine(LineOfText):
pass
def tokenize(inputfile):
filename = getattr(inputfile, 'name', '<input>')
for n, line in enumerate(inputfile, 1):
line = line.rstrip('\n')
if not line:
yield BlankLine(filename, n, line)
continue
if line.startswith('.') and line[1:2].isalpha():
args = line.split(None, 1)
name = args.pop(0)[1:]
yield Directive(filename, n, name, (args + [''])[0])
else:
yield LineOfText(filename, n, line)
#
# Parser
#
def parse(token_stream, variables, templates, preamble):
variables = variables.copy()
cur_page = preamble(variables)
pages = [cur_page]
skipping_empty_lines = True
for token in token_stream:
if isinstance(token, BlankLine) and skipping_empty_lines:
continue
elif isinstance(token, Directive):
keyword = token.name
if keyword in variables:
variables[keyword] = token.args
elif keyword in templates:
variables = variables.copy()
cur_page = templates[keyword](variables)
pages.append(cur_page)
elif keyword == 'subpage':
if cur_page is None:
token.report_error(".subpage before first page ignored")
else:
cur_page = cur_page.copy()
pages.append(cur_page)
variables = cur_page.variables
else:
token.report_error(".%s ignored" % keyword)
skipping_empty_lines = True
elif isinstance(token, LineOfText):
skipping_empty_lines = False
if cur_page is None:
token.report_error("text before first page ignored")
else:
try:
cur_page.add_line(token.text)
except NotImplementedError:
token.report_error("text ignored")
return pages
#
# Compiler
#
def process(inputfile, outputfile, variables, templates, preamble):
pages = parse(tokenize(inputfile), variables, templates, preamble)
for page in pages:
outputfile.write(page.render())
#
# Semantics
#
VARIABLES = dict(title='', author='', email='', conference='',
logo='povlogo.png',
company='Programmers of Vilnius',
url='http://pov.lt',
heading='', footer='')
TEMPLATES = {}
def template(name):
def decorator(cls):
TEMPLATES[name] = cls
return cls
return decorator
class PageTemplate(object):
template = string.Template('$text')
supports_text = True
defaults = dict(
pageoftextlogo='area 90 90, vgap 60,'
' size 8, font "standard", fore "#134d73", back "white", right,'
' newimage -zoom 50 "$logo", mark, again, center, size 4',
default1='area 90 90, vgap 260,'
' size 8, font "standard", fore "#134d73", back "white", right,'
' newimage -zoom 50 "$logo", mark, again, center, size 4, vgap 520',
default2='fore "#134d73"',
default3='center, size 5, vgap 260',
default4='size 8, vgap 80',
default5='font "em", size 7, vgap 10',
default6='font "standard", size 3',
footer_impl='%again, size 950, center, vgap 10\n\n%size 4\n$footer\n',
)
variables_to_reset_for_each_page = dict(
heading=' ',
footer='',
)
def __init__(self, variables):
self.variables = variables
self.text = []
self.variables.update(self.variables_to_reset_for_each_page)
def copy(self):
new = self.__class__({})
new.variables = self.variables.copy()
new.text = list(self.text)
return new
def add_line(self, text):
if not self.supports_text and not text.startswith('#'):
raise NotImplementedError
self.text.append(text)
def namespace(self):
namespace = dict((k, string.Template(v).substitute(**self.variables))
for k, v in self.defaults.items())
namespace.update(self.variables)
namespace['text'] = ''.join(line + '\n' for line in self.text)
for n, line in enumerate(self.text, 1):
namespace['line%d' % n] = line
namespace['rest%d' % n] = ''.join(line + '\n' for line in self.text[n:])
for n in range(len(self.text) + 1, 20):
namespace['line%d' % n] = ''
namespace['rest%d' % n] = ''
return namespace
def render(self):
return self.template.substitute(**self.namespace())
class Preamble(PageTemplate):
supports_text = False
template = string.Template('\n'.join([
'#!/usr/bin/env mgp',
'# Note: tabs and trailing spaces are *important* in this file',
'# - Preamble ----------------------------------------------------------------',
'%deffont "standard" xfont "verdana"',
'%deffont "thick" xfont "verdana-bold"',
'%deffont "em" xfont "verdana-medium-i"',
'%deffont "mono" xfont "andale mono"',
'%default 1 $default1',
'%default 2 $default2',
'%default 3 $default3',
'%default 4 $default4',
'%default 5 $default5',
'%default 6 $default6',
'# ---------------------------------------------------------------------------',
'$text',
]))
@template('titlepage')
class TitlePage(PageTemplate):
supports_text = False
template = string.Template('\n'.join([
'%page',
'%pcache 1 1 0 1',
'%ccolor "#134d73"',
'%nodefault',
'%size 7, font "standard", vgap 20, fore "black", back "white"',
'',
'',
'%center, font "thick", size 11',
'$title',
'%center, font "standard", size 7',
'',
'',
'%size 5, font "standard", fore "#134d73"',
'$author',
'%size 4',
'$email',
'%size 2',
'',
'%size 5',
'$company',
'%size 4',
'$url',
'%size 2',
'',
'%newimage "$logo"',
'',
'',
'',
'%fore "black"',
'$conference',
'$text',
]))
@template('page')
class Page(PageTemplate):
template = string.Template('\n'.join([
'%page',
'$heading',
'',
'$text',
'$footer_impl',
]))
def __init__(self, variables):
PageTemplate.__init__(self, variables)
@template('pageoftext')
class PageOfText(PageTemplate):
template = string.Template('\n'.join([
'%page',
'%nodefault',
'%$pageoftextlogo',
'$heading',
'%left, size 6, vgap 10',
'$text',
'$footer_impl',
]))
@template('dictpage')
class DictPage(PageTemplate):
template = string.Template('\n'.join([
'%page',
'%nodefault',
'%$default1',
'$heading',
'%center, size 8, vgap 20',
'$text',
]))
@template('listpage')
class ListPage(PageTemplate):
template = string.Template('\n'.join([
'%page',
'%nodefault',
'%$default1',
'$heading',
'%$default2',
'%$default3',
'%$default4',
'$line1',
'%size 1',
'$line2',
'%size 6, vgap 20',
'$rest2',
'$footer_impl',
]))
def main():
process(fileinput.input(), sys.stdout, VARIABLES, TEMPLATES, Preamble)
if __name__ == '__main__':
main()
|
mgedmin/mgp2pdf
|
samples/pyconlt/source/makeslide.py
|
Python
|
gpl-2.0
| 10,287
|
"""
Distance computations (:mod:`scipy.spatial.distance`)
=====================================================
.. sectionauthor:: Damian Eads
Function reference
------------------
Distance matrix computation from a collection of raw observation vectors
stored in a rectangular array.
.. autosummary::
:toctree: generated/
pdist -- pairwise distances between observation vectors.
cdist -- distances between two collections of observation vectors
squareform -- convert distance matrix to a condensed one and vice versa
directed_hausdorff -- directed Hausdorff distance between arrays
Predicates for checking the validity of distance matrices, both
condensed and redundant. Also contained in this module are functions
for computing the number of observations in a distance matrix.
.. autosummary::
:toctree: generated/
is_valid_dm -- checks for a valid distance matrix
is_valid_y -- checks for a valid condensed distance matrix
num_obs_dm -- # of observations in a distance matrix
num_obs_y -- # of observations in a condensed distance matrix
Distance functions between two numeric vectors ``u`` and ``v``. Computing
distances over a large collection of vectors is inefficient for these
functions. Use ``pdist`` for this purpose.
.. autosummary::
:toctree: generated/
braycurtis -- the Bray-Curtis distance.
canberra -- the Canberra distance.
chebyshev -- the Chebyshev distance.
cityblock -- the Manhattan distance.
correlation -- the Correlation distance.
cosine -- the Cosine distance.
euclidean -- the Euclidean distance.
jensenshannon -- the Jensen-Shannon distance.
mahalanobis -- the Mahalanobis distance.
minkowski -- the Minkowski distance.
seuclidean -- the normalized Euclidean distance.
sqeuclidean -- the squared Euclidean distance.
wminkowski -- (deprecated) alias of `minkowski`.
Distance functions between two boolean vectors (representing sets) ``u`` and
``v``. As in the case of numerical vectors, ``pdist`` is more efficient for
computing the distances between all pairs.
.. autosummary::
:toctree: generated/
dice -- the Dice dissimilarity.
hamming -- the Hamming distance.
jaccard -- the Jaccard distance.
kulsinski -- the Kulsinski distance.
rogerstanimoto -- the Rogers-Tanimoto dissimilarity.
russellrao -- the Russell-Rao dissimilarity.
sokalmichener -- the Sokal-Michener dissimilarity.
sokalsneath -- the Sokal-Sneath dissimilarity.
yule -- the Yule dissimilarity.
:func:`hamming` also operates over discrete numerical vectors.
"""
# Copyright (C) Damian Eads, 2007-2008. New BSD License.
__all__ = [
'braycurtis',
'canberra',
'cdist',
'chebyshev',
'cityblock',
'correlation',
'cosine',
'dice',
'directed_hausdorff',
'euclidean',
'hamming',
'is_valid_dm',
'is_valid_y',
'jaccard',
'jensenshannon',
'kulsinski',
'mahalanobis',
'matching',
'minkowski',
'num_obs_dm',
'num_obs_y',
'pdist',
'rogerstanimoto',
'russellrao',
'seuclidean',
'sokalmichener',
'sokalsneath',
'sqeuclidean',
'squareform',
'wminkowski',
'yule'
]
import warnings
import numpy as np
from functools import partial
from collections import namedtuple
from scipy._lib._util import _asarray_validated
from scipy._lib.deprecation import _deprecated
from . import _distance_wrap
from . import _hausdorff
from ..linalg import norm
from ..special import rel_entr
def _args_to_kwargs_xdist(args, kwargs, metric, func_name):
"""
Convert legacy positional arguments to keyword arguments for pdist/cdist.
"""
if not args:
return kwargs
if (callable(metric) and metric not in [
braycurtis, canberra, chebyshev, cityblock, correlation, cosine,
dice, euclidean, hamming, jaccard, jensenshannon, kulsinski,
mahalanobis, matching, minkowski, rogerstanimoto, russellrao,
seuclidean, sokalmichener, sokalsneath, sqeuclidean, yule,
wminkowski]):
raise TypeError('When using a custom metric arguments must be passed'
'as keyword (i.e., ARGNAME=ARGVALUE)')
if func_name == 'pdist':
old_arg_names = ['p', 'w', 'V', 'VI']
else:
old_arg_names = ['p', 'V', 'VI', 'w']
num_args = len(args)
warnings.warn('%d metric parameters have been passed as positional.'
'This will raise an error in a future version.'
'Please pass arguments as keywords(i.e., ARGNAME=ARGVALUE)'
% num_args, DeprecationWarning)
if num_args > 4:
raise ValueError('Deprecated %s signature accepts only 4'
'positional arguments (%s), %d given.'
% (func_name, ', '.join(old_arg_names), num_args))
for old_arg, arg in zip(old_arg_names, args):
if old_arg in kwargs:
raise TypeError('%s() got multiple values for argument %s'
% (func_name, old_arg))
kwargs[old_arg] = arg
return kwargs
def _copy_array_if_base_present(a):
"""Copy the array if its base points to a parent array."""
if a.base is not None:
return a.copy()
return a
def _correlation_cdist_wrap(XA, XB, dm, **kwargs):
XA = XA - XA.mean(axis=1, keepdims=True)
XB = XB - XB.mean(axis=1, keepdims=True)
_distance_wrap.cdist_cosine_double_wrap(XA, XB, dm, **kwargs)
def _correlation_pdist_wrap(X, dm, **kwargs):
X2 = X - X.mean(axis=1, keepdims=True)
_distance_wrap.pdist_cosine_double_wrap(X2, dm, **kwargs)
def _convert_to_type(X, out_type):
return np.ascontiguousarray(X, dtype=out_type)
def _filter_deprecated_kwargs(kwargs, args_blocklist):
# Filtering out old default keywords
for k in args_blocklist:
if k in kwargs:
del kwargs[k]
warnings.warn('Got unexpected kwarg %s. This will raise an error'
' in a future version.' % k, DeprecationWarning)
def _nbool_correspond_all(u, v, w=None):
if u.dtype == v.dtype == bool and w is None:
not_u = ~u
not_v = ~v
nff = (not_u & not_v).sum()
nft = (not_u & v).sum()
ntf = (u & not_v).sum()
ntt = (u & v).sum()
else:
dtype = np.find_common_type([int], [u.dtype, v.dtype])
u = u.astype(dtype)
v = v.astype(dtype)
not_u = 1.0 - u
not_v = 1.0 - v
if w is not None:
not_u = w * not_u
u = w * u
nff = (not_u * not_v).sum()
nft = (not_u * v).sum()
ntf = (u * not_v).sum()
ntt = (u * v).sum()
return (nff, nft, ntf, ntt)
def _nbool_correspond_ft_tf(u, v, w=None):
if u.dtype == v.dtype == bool and w is None:
not_u = ~u
not_v = ~v
nft = (not_u & v).sum()
ntf = (u & not_v).sum()
else:
dtype = np.find_common_type([int], [u.dtype, v.dtype])
u = u.astype(dtype)
v = v.astype(dtype)
not_u = 1.0 - u
not_v = 1.0 - v
if w is not None:
not_u = w * not_u
u = w * u
nft = (not_u * v).sum()
ntf = (u * not_v).sum()
return (nft, ntf)
def _validate_cdist_input(XA, XB, mA, mB, n, metric_name, **kwargs):
if metric_name is not None:
# get supported types
types = _METRICS[metric_name].types
# choose best type
typ = types[types.index(XA.dtype)] if XA.dtype in types else types[0]
# validate data
XA = _convert_to_type(XA, out_type=typ)
XB = _convert_to_type(XB, out_type=typ)
# validate kwargs
_validate_kwargs = _METRICS[metric_name].validator
if _validate_kwargs:
kwargs = _validate_kwargs(np.vstack([XA, XB]), mA + mB, n, **kwargs)
else:
typ = None
return XA, XB, typ, kwargs
def _validate_hamming_kwargs(X, m, n, **kwargs):
w = kwargs.get('w', np.ones((n,), dtype='double'))
if w.ndim != 1 or w.shape[0] != n:
raise ValueError("Weights must have same size as input vector. %d vs. %d" % (w.shape[0], n))
kwargs['w'] = _validate_weights(w)
return kwargs
def _validate_mahalanobis_kwargs(X, m, n, **kwargs):
VI = kwargs.pop('VI', None)
if VI is None:
if m <= n:
# There are fewer observations than the dimension of
# the observations.
raise ValueError("The number of observations (%d) is too "
"small; the covariance matrix is "
"singular. For observations with %d "
"dimensions, at least %d observations "
"are required." % (m, n, n + 1))
CV = np.atleast_2d(np.cov(X.astype(np.double).T))
VI = np.linalg.inv(CV).T.copy()
kwargs["VI"] = _convert_to_double(VI)
return kwargs
def _validate_minkowski_kwargs(X, m, n, **kwargs):
w = kwargs.pop('w', None)
if w is not None:
kwargs['w'] = _validate_weights(w)
if 'p' not in kwargs:
kwargs['p'] = 2.
else:
if kwargs['p'] < 1:
raise ValueError("p must be at least 1")
return kwargs
def _validate_pdist_input(X, m, n, metric_name, **kwargs):
if metric_name is not None:
# get supported types
types = _METRICS[metric_name].types
# choose best type
typ = types[types.index(X.dtype)] if X.dtype in types else types[0]
# validate data
X = _convert_to_type(X, out_type=typ)
# validate kwargs
_validate_kwargs = _METRICS[metric_name].validator
if _validate_kwargs:
kwargs = _validate_kwargs(X, m, n, **kwargs)
else:
typ = None
return X, typ, kwargs
def _validate_seuclidean_kwargs(X, m, n, **kwargs):
V = kwargs.pop('V', None)
if V is None:
V = np.var(X.astype(np.double), axis=0, ddof=1)
else:
V = np.asarray(V, order='c')
if len(V.shape) != 1:
raise ValueError('Variance vector V must '
'be one-dimensional.')
if V.shape[0] != n:
raise ValueError('Variance vector V must be of the same '
'dimension as the vectors on which the distances '
'are computed.')
kwargs['V'] = _convert_to_double(V)
return kwargs
def _validate_vector(u, dtype=None):
# XXX Is order='c' really necessary?
u = np.asarray(u, dtype=dtype, order='c').squeeze()
# Ensure values such as u=1 and u=[1] still return 1-D arrays.
u = np.atleast_1d(u)
if u.ndim > 1:
raise ValueError("Input vector should be 1-D.")
return u
def _validate_weights(w, dtype=np.double):
w = _validate_vector(w, dtype=dtype)
if np.any(w < 0):
raise ValueError("Input weights should be all non-negative")
return w
@_deprecated(
msg="'wminkowski' metric is deprecated and will be removed in"
" SciPy 1.8.0, use 'minkowski' instead.")
def _validate_wminkowski_kwargs(X, m, n, **kwargs):
w = kwargs.pop('w', None)
if w is None:
raise ValueError('weighted minkowski requires a weight '
'vector `w` to be given.')
kwargs['w'] = _validate_weights(w)
if 'p' not in kwargs:
kwargs['p'] = 2.
return kwargs
def directed_hausdorff(u, v, seed=0):
"""
Compute the directed Hausdorff distance between two N-D arrays.
Distances between pairs are calculated using a Euclidean metric.
Parameters
----------
u : (M,N) ndarray
Input array.
v : (O,N) ndarray
Input array.
seed : int or None
Local `numpy.random.RandomState` seed. Default is 0, a random
shuffling of u and v that guarantees reproducibility.
Returns
-------
d : double
The directed Hausdorff distance between arrays `u` and `v`,
index_1 : int
index of point contributing to Hausdorff pair in `u`
index_2 : int
index of point contributing to Hausdorff pair in `v`
Raises
------
ValueError
An exception is thrown if `u` and `v` do not have
the same number of columns.
Notes
-----
Uses the early break technique and the random sampling approach
described by [1]_. Although worst-case performance is ``O(m * o)``
(as with the brute force algorithm), this is unlikely in practice
as the input data would have to require the algorithm to explore
every single point interaction, and after the algorithm shuffles
the input points at that. The best case performance is O(m), which
is satisfied by selecting an inner loop distance that is less than
cmax and leads to an early break as often as possible. The authors
have formally shown that the average runtime is closer to O(m).
.. versionadded:: 0.19.0
References
----------
.. [1] A. A. Taha and A. Hanbury, "An efficient algorithm for
calculating the exact Hausdorff distance." IEEE Transactions On
Pattern Analysis And Machine Intelligence, vol. 37 pp. 2153-63,
2015.
See Also
--------
scipy.spatial.procrustes : Another similarity test for two data sets
Examples
--------
Find the directed Hausdorff distance between two 2-D arrays of
coordinates:
>>> from scipy.spatial.distance import directed_hausdorff
>>> u = np.array([(1.0, 0.0),
... (0.0, 1.0),
... (-1.0, 0.0),
... (0.0, -1.0)])
>>> v = np.array([(2.0, 0.0),
... (0.0, 2.0),
... (-2.0, 0.0),
... (0.0, -4.0)])
>>> directed_hausdorff(u, v)[0]
2.23606797749979
>>> directed_hausdorff(v, u)[0]
3.0
Find the general (symmetric) Hausdorff distance between two 2-D
arrays of coordinates:
>>> max(directed_hausdorff(u, v)[0], directed_hausdorff(v, u)[0])
3.0
Find the indices of the points that generate the Hausdorff distance
(the Hausdorff pair):
>>> directed_hausdorff(v, u)[1:]
(3, 3)
"""
u = np.asarray(u, dtype=np.float64, order='c')
v = np.asarray(v, dtype=np.float64, order='c')
if u.shape[1] != v.shape[1]:
raise ValueError('u and v need to have the same '
'number of columns')
result = _hausdorff.directed_hausdorff(u, v, seed)
return result
def minkowski(u, v, p=2, w=None):
"""
Compute the Minkowski distance between two 1-D arrays.
The Minkowski distance between 1-D arrays `u` and `v`,
is defined as
.. math::
{||u-v||}_p = (\\sum{|u_i - v_i|^p})^{1/p}.
\\left(\\sum{w_i(|(u_i - v_i)|^p)}\\right)^{1/p}.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
p : int
The order of the norm of the difference :math:`{||u-v||}_p`.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
minkowski : double
The Minkowski distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.minkowski([1, 0, 0], [0, 1, 0], 1)
2.0
>>> distance.minkowski([1, 0, 0], [0, 1, 0], 2)
1.4142135623730951
>>> distance.minkowski([1, 0, 0], [0, 1, 0], 3)
1.2599210498948732
>>> distance.minkowski([1, 1, 0], [0, 1, 0], 1)
1.0
>>> distance.minkowski([1, 1, 0], [0, 1, 0], 2)
1.0
>>> distance.minkowski([1, 1, 0], [0, 1, 0], 3)
1.0
"""
u = _validate_vector(u)
v = _validate_vector(v)
if p < 1:
raise ValueError("p must be at least 1")
u_v = u - v
if w is not None:
w = _validate_weights(w)
if p == 1:
root_w = w
elif p == 2:
# better precision and speed
root_w = np.sqrt(w)
elif p == np.inf:
root_w = (w != 0)
else:
root_w = np.power(w, 1/p)
u_v = root_w * u_v
dist = norm(u_v, ord=p)
return dist
def wminkowski(u, v, p, w):
"""
Compute the weighted Minkowski distance between two 1-D arrays.
The weighted Minkowski distance between `u` and `v`, defined as
.. math::
\\left(\\sum{(|w_i (u_i - v_i)|^p)}\\right)^{1/p}.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
p : int
The order of the norm of the difference :math:`{||u-v||}_p`.
w : (N,) array_like
The weight vector.
Returns
-------
wminkowski : double
The weighted Minkowski distance between vectors `u` and `v`.
Notes
-----
`wminkowski` is deprecated and will be removed in SciPy 1.8.0.
Use `minkowski` with the ``w`` argument instead.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.wminkowski([1, 0, 0], [0, 1, 0], 1, np.ones(3))
2.0
>>> distance.wminkowski([1, 0, 0], [0, 1, 0], 2, np.ones(3))
1.4142135623730951
>>> distance.wminkowski([1, 0, 0], [0, 1, 0], 3, np.ones(3))
1.2599210498948732
>>> distance.wminkowski([1, 1, 0], [0, 1, 0], 1, np.ones(3))
1.0
>>> distance.wminkowski([1, 1, 0], [0, 1, 0], 2, np.ones(3))
1.0
>>> distance.wminkowski([1, 1, 0], [0, 1, 0], 3, np.ones(3))
1.0
"""
warnings.warn(
message="scipy.distance.wminkowski is deprecated and will be removed "
"in SciPy 1.8.0, use scipy.distance.minkowski instead.",
category=DeprecationWarning)
w = _validate_weights(w)
return minkowski(u, v, p=p, w=w**p)
def euclidean(u, v, w=None):
"""
Computes the Euclidean distance between two 1-D arrays.
The Euclidean distance between 1-D arrays `u` and `v`, is defined as
.. math::
{||u-v||}_2
\\left(\\sum{(w_i |(u_i - v_i)|^2)}\\right)^{1/2}
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
euclidean : double
The Euclidean distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.euclidean([1, 0, 0], [0, 1, 0])
1.4142135623730951
>>> distance.euclidean([1, 1, 0], [0, 1, 0])
1.0
"""
return minkowski(u, v, p=2, w=w)
def sqeuclidean(u, v, w=None):
"""
Compute the squared Euclidean distance between two 1-D arrays.
The squared Euclidean distance between `u` and `v` is defined as
.. math::
{||u-v||}_2^2
\\left(\\sum{(w_i |(u_i - v_i)|^2)}\\right)
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
sqeuclidean : double
The squared Euclidean distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.sqeuclidean([1, 0, 0], [0, 1, 0])
2.0
>>> distance.sqeuclidean([1, 1, 0], [0, 1, 0])
1.0
"""
# Preserve float dtypes, but convert everything else to np.float64
# for stability.
utype, vtype = None, None
if not (hasattr(u, "dtype") and np.issubdtype(u.dtype, np.inexact)):
utype = np.float64
if not (hasattr(v, "dtype") and np.issubdtype(v.dtype, np.inexact)):
vtype = np.float64
u = _validate_vector(u, dtype=utype)
v = _validate_vector(v, dtype=vtype)
u_v = u - v
u_v_w = u_v # only want weights applied once
if w is not None:
w = _validate_weights(w)
u_v_w = w * u_v
return np.dot(u_v, u_v_w)
def correlation(u, v, w=None, centered=True):
"""
Compute the correlation distance between two 1-D arrays.
The correlation distance between `u` and `v`, is
defined as
.. math::
1 - \\frac{(u - \\bar{u}) \\cdot (v - \\bar{v})}
{{||(u - \\bar{u})||}_2 {||(v - \\bar{v})||}_2}
where :math:`\\bar{u}` is the mean of the elements of `u`
and :math:`x \\cdot y` is the dot product of :math:`x` and :math:`y`.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
correlation : double
The correlation distance between 1-D array `u` and `v`.
"""
u = _validate_vector(u)
v = _validate_vector(v)
if w is not None:
w = _validate_weights(w)
if centered:
umu = np.average(u, weights=w)
vmu = np.average(v, weights=w)
u = u - umu
v = v - vmu
uv = np.average(u * v, weights=w)
uu = np.average(np.square(u), weights=w)
vv = np.average(np.square(v), weights=w)
dist = 1.0 - uv / np.sqrt(uu * vv)
# Return absolute value to avoid small negative value due to rounding
return np.abs(dist)
def cosine(u, v, w=None):
"""
Compute the Cosine distance between 1-D arrays.
The Cosine distance between `u` and `v`, is defined as
.. math::
1 - \\frac{u \\cdot v}
{||u||_2 ||v||_2}.
where :math:`u \\cdot v` is the dot product of :math:`u` and
:math:`v`.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
cosine : double
The Cosine distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.cosine([1, 0, 0], [0, 1, 0])
1.0
>>> distance.cosine([100, 0, 0], [0, 1, 0])
1.0
>>> distance.cosine([1, 1, 0], [0, 1, 0])
0.29289321881345254
"""
# cosine distance is also referred to as 'uncentered correlation',
# or 'reflective correlation'
return correlation(u, v, w=w, centered=False)
def hamming(u, v, w=None):
"""
Compute the Hamming distance between two 1-D arrays.
The Hamming distance between 1-D arrays `u` and `v`, is simply the
proportion of disagreeing components in `u` and `v`. If `u` and `v` are
boolean vectors, the Hamming distance is
.. math::
\\frac{c_{01} + c_{10}}{n}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n`.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
hamming : double
The Hamming distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.hamming([1, 0, 0], [0, 1, 0])
0.66666666666666663
>>> distance.hamming([1, 0, 0], [1, 1, 0])
0.33333333333333331
>>> distance.hamming([1, 0, 0], [2, 0, 0])
0.33333333333333331
>>> distance.hamming([1, 0, 0], [3, 0, 0])
0.33333333333333331
"""
u = _validate_vector(u)
v = _validate_vector(v)
if u.shape != v.shape:
raise ValueError('The 1d arrays must have equal lengths.')
u_ne_v = u != v
if w is not None:
w = _validate_weights(w)
return np.average(u_ne_v, weights=w)
def jaccard(u, v, w=None):
"""
Compute the Jaccard-Needham dissimilarity between two boolean 1-D arrays.
The Jaccard-Needham dissimilarity between 1-D boolean arrays `u` and `v`,
is defined as
.. math::
\\frac{c_{TF} + c_{FT}}
{c_{TT} + c_{FT} + c_{TF}}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n`.
Parameters
----------
u : (N,) array_like, bool
Input array.
v : (N,) array_like, bool
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
jaccard : double
The Jaccard distance between vectors `u` and `v`.
Notes
-----
When both `u` and `v` lead to a `0/0` division i.e. there is no overlap
between the items in the vectors the returned distance is 0. See the
Wikipedia page on the Jaccard index [1]_, and this paper [2]_.
.. versionchanged:: 1.2.0
Previously, when `u` and `v` lead to a `0/0` division, the function
would return NaN. This was changed to return 0 instead.
References
----------
.. [1] https://en.wikipedia.org/wiki/Jaccard_index
.. [2] S. Kosub, "A note on the triangle inequality for the Jaccard
distance", 2016, :arxiv:`1612.02696`
Examples
--------
>>> from scipy.spatial import distance
>>> distance.jaccard([1, 0, 0], [0, 1, 0])
1.0
>>> distance.jaccard([1, 0, 0], [1, 1, 0])
0.5
>>> distance.jaccard([1, 0, 0], [1, 2, 0])
0.5
>>> distance.jaccard([1, 0, 0], [1, 1, 1])
0.66666666666666663
"""
u = _validate_vector(u)
v = _validate_vector(v)
nonzero = np.bitwise_or(u != 0, v != 0)
unequal_nonzero = np.bitwise_and((u != v), nonzero)
if w is not None:
w = _validate_weights(w)
nonzero = w * nonzero
unequal_nonzero = w * unequal_nonzero
a = np.double(unequal_nonzero.sum())
b = np.double(nonzero.sum())
return (a / b) if b != 0 else 0
def kulsinski(u, v, w=None):
"""
Compute the Kulsinski dissimilarity between two boolean 1-D arrays.
The Kulsinski dissimilarity between two boolean 1-D arrays `u` and `v`,
is defined as
.. math::
\\frac{c_{TF} + c_{FT} - c_{TT} + n}
{c_{FT} + c_{TF} + n}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n`.
Parameters
----------
u : (N,) array_like, bool
Input array.
v : (N,) array_like, bool
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
kulsinski : double
The Kulsinski distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.kulsinski([1, 0, 0], [0, 1, 0])
1.0
>>> distance.kulsinski([1, 0, 0], [1, 1, 0])
0.75
>>> distance.kulsinski([1, 0, 0], [2, 1, 0])
0.33333333333333331
>>> distance.kulsinski([1, 0, 0], [3, 1, 0])
-0.5
"""
u = _validate_vector(u)
v = _validate_vector(v)
if w is None:
n = float(len(u))
else:
w = _validate_weights(w)
n = w.sum()
(nff, nft, ntf, ntt) = _nbool_correspond_all(u, v, w=w)
return (ntf + nft - ntt + n) / (ntf + nft + n)
def seuclidean(u, v, V):
"""
Return the standardized Euclidean distance between two 1-D arrays.
The standardized Euclidean distance between `u` and `v`.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
V : (N,) array_like
`V` is an 1-D array of component variances. It is usually computed
among a larger collection vectors.
Returns
-------
seuclidean : double
The standardized Euclidean distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.seuclidean([1, 0, 0], [0, 1, 0], [0.1, 0.1, 0.1])
4.4721359549995796
>>> distance.seuclidean([1, 0, 0], [0, 1, 0], [1, 0.1, 0.1])
3.3166247903553998
>>> distance.seuclidean([1, 0, 0], [0, 1, 0], [10, 0.1, 0.1])
3.1780497164141406
"""
u = _validate_vector(u)
v = _validate_vector(v)
V = _validate_vector(V, dtype=np.float64)
if V.shape[0] != u.shape[0] or u.shape[0] != v.shape[0]:
raise TypeError('V must be a 1-D array of the same dimension '
'as u and v.')
return euclidean(u, v, w=1/V)
def cityblock(u, v, w=None):
"""
Compute the City Block (Manhattan) distance.
Computes the Manhattan distance between two 1-D arrays `u` and `v`,
which is defined as
.. math::
\\sum_i {\\left| u_i - v_i \\right|}.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
cityblock : double
The City Block (Manhattan) distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.cityblock([1, 0, 0], [0, 1, 0])
2
>>> distance.cityblock([1, 0, 0], [0, 2, 0])
3
>>> distance.cityblock([1, 0, 0], [1, 1, 0])
1
"""
u = _validate_vector(u)
v = _validate_vector(v)
l1_diff = abs(u - v)
if w is not None:
w = _validate_weights(w)
l1_diff = w * l1_diff
return l1_diff.sum()
def mahalanobis(u, v, VI):
"""
Compute the Mahalanobis distance between two 1-D arrays.
The Mahalanobis distance between 1-D arrays `u` and `v`, is defined as
.. math::
\\sqrt{ (u-v) V^{-1} (u-v)^T }
where ``V`` is the covariance matrix. Note that the argument `VI`
is the inverse of ``V``.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
VI : ndarray
The inverse of the covariance matrix.
Returns
-------
mahalanobis : double
The Mahalanobis distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> iv = [[1, 0.5, 0.5], [0.5, 1, 0.5], [0.5, 0.5, 1]]
>>> distance.mahalanobis([1, 0, 0], [0, 1, 0], iv)
1.0
>>> distance.mahalanobis([0, 2, 0], [0, 1, 0], iv)
1.0
>>> distance.mahalanobis([2, 0, 0], [0, 1, 0], iv)
1.7320508075688772
"""
u = _validate_vector(u)
v = _validate_vector(v)
VI = np.atleast_2d(VI)
delta = u - v
m = np.dot(np.dot(delta, VI), delta)
return np.sqrt(m)
def chebyshev(u, v, w=None):
"""
Compute the Chebyshev distance.
Computes the Chebyshev distance between two 1-D arrays `u` and `v`,
which is defined as
.. math::
\\max_i {|u_i-v_i|}.
Parameters
----------
u : (N,) array_like
Input vector.
v : (N,) array_like
Input vector.
w : (N,) array_like, optional
Unused, as 'max' is a weightless operation. Here for API consistency.
Returns
-------
chebyshev : double
The Chebyshev distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.chebyshev([1, 0, 0], [0, 1, 0])
1
>>> distance.chebyshev([1, 1, 0], [0, 1, 0])
1
"""
u = _validate_vector(u)
v = _validate_vector(v)
if w is not None:
w = _validate_weights(w)
has_weight = w > 0
if has_weight.sum() < w.size:
u = u[has_weight]
v = v[has_weight]
return max(abs(u - v))
def braycurtis(u, v, w=None):
"""
Compute the Bray-Curtis distance between two 1-D arrays.
Bray-Curtis distance is defined as
.. math::
\\sum{|u_i-v_i|} / \\sum{|u_i+v_i|}
The Bray-Curtis distance is in the range [0, 1] if all coordinates are
positive, and is undefined if the inputs are of length zero.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
braycurtis : double
The Bray-Curtis distance between 1-D arrays `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.braycurtis([1, 0, 0], [0, 1, 0])
1.0
>>> distance.braycurtis([1, 1, 0], [0, 1, 0])
0.33333333333333331
"""
u = _validate_vector(u)
v = _validate_vector(v, dtype=np.float64)
l1_diff = abs(u - v)
l1_sum = abs(u + v)
if w is not None:
w = _validate_weights(w)
l1_diff = w * l1_diff
l1_sum = w * l1_sum
return l1_diff.sum() / l1_sum.sum()
def canberra(u, v, w=None):
"""
Compute the Canberra distance between two 1-D arrays.
The Canberra distance is defined as
.. math::
d(u,v) = \\sum_i \\frac{|u_i-v_i|}
{|u_i|+|v_i|}.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
canberra : double
The Canberra distance between vectors `u` and `v`.
Notes
-----
When `u[i]` and `v[i]` are 0 for given i, then the fraction 0/0 = 0 is
used in the calculation.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.canberra([1, 0, 0], [0, 1, 0])
2.0
>>> distance.canberra([1, 1, 0], [0, 1, 0])
1.0
"""
u = _validate_vector(u)
v = _validate_vector(v, dtype=np.float64)
if w is not None:
w = _validate_weights(w)
with np.errstate(invalid='ignore'):
abs_uv = abs(u - v)
abs_u = abs(u)
abs_v = abs(v)
d = abs_uv / (abs_u + abs_v)
if w is not None:
d = w * d
d = np.nansum(d)
return d
def jensenshannon(p, q, base=None):
"""
Compute the Jensen-Shannon distance (metric) between
two 1-D probability arrays. This is the square root
of the Jensen-Shannon divergence.
The Jensen-Shannon distance between two probability
vectors `p` and `q` is defined as,
.. math::
\\sqrt{\\frac{D(p \\parallel m) + D(q \\parallel m)}{2}}
where :math:`m` is the pointwise mean of :math:`p` and :math:`q`
and :math:`D` is the Kullback-Leibler divergence.
This routine will normalize `p` and `q` if they don't sum to 1.0.
Parameters
----------
p : (N,) array_like
left probability vector
q : (N,) array_like
right probability vector
base : double, optional
the base of the logarithm used to compute the output
if not given, then the routine uses the default base of
scipy.stats.entropy.
Returns
-------
js : double
The Jensen-Shannon distance between `p` and `q`
.. versionadded:: 1.2.0
Examples
--------
>>> from scipy.spatial import distance
>>> distance.jensenshannon([1.0, 0.0, 0.0], [0.0, 1.0, 0.0], 2.0)
1.0
>>> distance.jensenshannon([1.0, 0.0], [0.5, 0.5])
0.46450140402245893
>>> distance.jensenshannon([1.0, 0.0, 0.0], [1.0, 0.0, 0.0])
0.0
"""
p = np.asarray(p)
q = np.asarray(q)
p = p / np.sum(p, axis=0)
q = q / np.sum(q, axis=0)
m = (p + q) / 2.0
left = rel_entr(p, m)
right = rel_entr(q, m)
js = np.sum(left, axis=0) + np.sum(right, axis=0)
if base is not None:
js /= np.log(base)
return np.sqrt(js / 2.0)
def yule(u, v, w=None):
"""
Compute the Yule dissimilarity between two boolean 1-D arrays.
The Yule dissimilarity is defined as
.. math::
\\frac{R}{c_{TT} * c_{FF} + \\frac{R}{2}}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n` and :math:`R = 2.0 * c_{TF} * c_{FT}`.
Parameters
----------
u : (N,) array_like, bool
Input array.
v : (N,) array_like, bool
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
yule : double
The Yule dissimilarity between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.yule([1, 0, 0], [0, 1, 0])
2.0
>>> distance.yule([1, 1, 0], [0, 1, 0])
0.0
"""
u = _validate_vector(u)
v = _validate_vector(v)
if w is not None:
w = _validate_weights(w)
(nff, nft, ntf, ntt) = _nbool_correspond_all(u, v, w=w)
return float(2.0 * ntf * nft / np.array(ntt * nff + ntf * nft))
@np.deprecate(message="spatial.distance.matching is deprecated in scipy 1.0.0; "
"use spatial.distance.hamming instead.")
def matching(u, v, w=None):
"""
Compute the Hamming distance between two boolean 1-D arrays.
This is a deprecated synonym for :func:`hamming`.
"""
return hamming(u, v, w=w)
def dice(u, v, w=None):
"""
Compute the Dice dissimilarity between two boolean 1-D arrays.
The Dice dissimilarity between `u` and `v`, is
.. math::
\\frac{c_{TF} + c_{FT}}
{2c_{TT} + c_{FT} + c_{TF}}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n`.
Parameters
----------
u : (N,) ndarray, bool
Input 1-D array.
v : (N,) ndarray, bool
Input 1-D array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
dice : double
The Dice dissimilarity between 1-D arrays `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.dice([1, 0, 0], [0, 1, 0])
1.0
>>> distance.dice([1, 0, 0], [1, 1, 0])
0.3333333333333333
>>> distance.dice([1, 0, 0], [2, 0, 0])
-0.3333333333333333
"""
u = _validate_vector(u)
v = _validate_vector(v)
if w is not None:
w = _validate_weights(w)
if u.dtype == v.dtype == bool and w is None:
ntt = (u & v).sum()
else:
dtype = np.find_common_type([int], [u.dtype, v.dtype])
u = u.astype(dtype)
v = v.astype(dtype)
if w is None:
ntt = (u * v).sum()
else:
ntt = (u * v * w).sum()
(nft, ntf) = _nbool_correspond_ft_tf(u, v, w=w)
return float((ntf + nft) / np.array(2.0 * ntt + ntf + nft))
def rogerstanimoto(u, v, w=None):
"""
Compute the Rogers-Tanimoto dissimilarity between two boolean 1-D arrays.
The Rogers-Tanimoto dissimilarity between two boolean 1-D arrays
`u` and `v`, is defined as
.. math::
\\frac{R}
{c_{TT} + c_{FF} + R}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n` and :math:`R = 2(c_{TF} + c_{FT})`.
Parameters
----------
u : (N,) array_like, bool
Input array.
v : (N,) array_like, bool
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
rogerstanimoto : double
The Rogers-Tanimoto dissimilarity between vectors
`u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.rogerstanimoto([1, 0, 0], [0, 1, 0])
0.8
>>> distance.rogerstanimoto([1, 0, 0], [1, 1, 0])
0.5
>>> distance.rogerstanimoto([1, 0, 0], [2, 0, 0])
-1.0
"""
u = _validate_vector(u)
v = _validate_vector(v)
if w is not None:
w = _validate_weights(w)
(nff, nft, ntf, ntt) = _nbool_correspond_all(u, v, w=w)
return float(2.0 * (ntf + nft)) / float(ntt + nff + (2.0 * (ntf + nft)))
def russellrao(u, v, w=None):
"""
Compute the Russell-Rao dissimilarity between two boolean 1-D arrays.
The Russell-Rao dissimilarity between two boolean 1-D arrays, `u` and
`v`, is defined as
.. math::
\\frac{n - c_{TT}}
{n}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n`.
Parameters
----------
u : (N,) array_like, bool
Input array.
v : (N,) array_like, bool
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
russellrao : double
The Russell-Rao dissimilarity between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.russellrao([1, 0, 0], [0, 1, 0])
1.0
>>> distance.russellrao([1, 0, 0], [1, 1, 0])
0.6666666666666666
>>> distance.russellrao([1, 0, 0], [2, 0, 0])
0.3333333333333333
"""
u = _validate_vector(u)
v = _validate_vector(v)
if u.dtype == v.dtype == bool and w is None:
ntt = (u & v).sum()
n = float(len(u))
elif w is None:
ntt = (u * v).sum()
n = float(len(u))
else:
w = _validate_weights(w)
ntt = (u * v * w).sum()
n = w.sum()
return float(n - ntt) / n
def sokalmichener(u, v, w=None):
"""
Compute the Sokal-Michener dissimilarity between two boolean 1-D arrays.
The Sokal-Michener dissimilarity between boolean 1-D arrays `u` and `v`,
is defined as
.. math::
\\frac{R}
{S + R}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n`, :math:`R = 2 * (c_{TF} + c_{FT})` and
:math:`S = c_{FF} + c_{TT}`.
Parameters
----------
u : (N,) array_like, bool
Input array.
v : (N,) array_like, bool
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
sokalmichener : double
The Sokal-Michener dissimilarity between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.sokalmichener([1, 0, 0], [0, 1, 0])
0.8
>>> distance.sokalmichener([1, 0, 0], [1, 1, 0])
0.5
>>> distance.sokalmichener([1, 0, 0], [2, 0, 0])
-1.0
"""
u = _validate_vector(u)
v = _validate_vector(v)
if u.dtype == v.dtype == bool and w is None:
ntt = (u & v).sum()
nff = (~u & ~v).sum()
elif w is None:
ntt = (u * v).sum()
nff = ((1.0 - u) * (1.0 - v)).sum()
else:
w = _validate_weights(w)
ntt = (u * v * w).sum()
nff = ((1.0 - u) * (1.0 - v) * w).sum()
(nft, ntf) = _nbool_correspond_ft_tf(u, v)
return float(2.0 * (ntf + nft)) / float(ntt + nff + 2.0 * (ntf + nft))
def sokalsneath(u, v, w=None):
"""
Compute the Sokal-Sneath dissimilarity between two boolean 1-D arrays.
The Sokal-Sneath dissimilarity between `u` and `v`,
.. math::
\\frac{R}
{c_{TT} + R}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n` and :math:`R = 2(c_{TF} + c_{FT})`.
Parameters
----------
u : (N,) array_like, bool
Input array.
v : (N,) array_like, bool
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
sokalsneath : double
The Sokal-Sneath dissimilarity between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.sokalsneath([1, 0, 0], [0, 1, 0])
1.0
>>> distance.sokalsneath([1, 0, 0], [1, 1, 0])
0.66666666666666663
>>> distance.sokalsneath([1, 0, 0], [2, 1, 0])
0.0
>>> distance.sokalsneath([1, 0, 0], [3, 1, 0])
-2.0
"""
u = _validate_vector(u)
v = _validate_vector(v)
if u.dtype == v.dtype == bool and w is None:
ntt = (u & v).sum()
elif w is None:
ntt = (u * v).sum()
else:
w = _validate_weights(w)
ntt = (u * v * w).sum()
(nft, ntf) = _nbool_correspond_ft_tf(u, v, w=w)
denom = np.array(ntt + 2.0 * (ntf + nft))
if not denom.any():
raise ValueError('Sokal-Sneath dissimilarity is not defined for '
'vectors that are entirely false.')
return float(2.0 * (ntf + nft)) / denom
_convert_to_double = partial(_convert_to_type, out_type=np.double)
_convert_to_bool = partial(_convert_to_type, out_type=bool)
# adding python-only wrappers to _distance_wrap module
_distance_wrap.pdist_correlation_double_wrap = _correlation_pdist_wrap
_distance_wrap.cdist_correlation_double_wrap = _correlation_cdist_wrap
# Registry of implemented metrics:
# Dictionary with the following structure:
# {
# metric_name : MetricInfo(aka, types=[double], validator=None)
# }
#
# Where:
# `metric_name` must be equal to python metric name
#
# MetricInfo is a named tuple with fields:
# 'aka' : [list of aliases],
#
# 'validator': f(X, m, n, **kwargs) # function that check kwargs and
# # computes default values.
#
# 'types': [list of supported types], # X (pdist) and XA (cdist) are used to
# # choose the type. if there is no match
# # the first type is used. Default double
# }
MetricInfo = namedtuple("MetricInfo", 'aka types validator ')
MetricInfo.__new__.__defaults__ = (['double'], None)
_METRICS = {
'braycurtis': MetricInfo(aka=['braycurtis']),
'canberra': MetricInfo(aka=['canberra']),
'chebyshev': MetricInfo(aka=['chebychev', 'chebyshev', 'cheby', 'cheb', 'ch']),
'cityblock': MetricInfo(aka=['cityblock', 'cblock', 'cb', 'c']),
'correlation': MetricInfo(aka=['correlation', 'co']),
'cosine': MetricInfo(aka=['cosine', 'cos']),
'dice': MetricInfo(aka=['dice'], types=['bool']),
'euclidean': MetricInfo(aka=['euclidean', 'euclid', 'eu', 'e']),
'hamming': MetricInfo(aka=['matching', 'hamming', 'hamm', 'ha', 'h'],
types=['double', 'bool'],
validator=_validate_hamming_kwargs),
'jaccard': MetricInfo(aka=['jaccard', 'jacc', 'ja', 'j'],
types=['double', 'bool']),
'jensenshannon': MetricInfo(aka=['jensenshannon', 'js'],
types=['double']),
'kulsinski': MetricInfo(aka=['kulsinski'], types=['bool']),
'mahalanobis': MetricInfo(aka=['mahalanobis', 'mahal', 'mah'],
validator=_validate_mahalanobis_kwargs),
'minkowski': MetricInfo(aka=['minkowski', 'mi', 'm', 'pnorm'],
validator=_validate_minkowski_kwargs),
'rogerstanimoto': MetricInfo(aka=['rogerstanimoto'], types=['bool']),
'russellrao': MetricInfo(aka=['russellrao'], types=['bool']),
'seuclidean': MetricInfo(aka=['seuclidean', 'se', 's'],
validator=_validate_seuclidean_kwargs),
'sokalmichener': MetricInfo(aka=['sokalmichener'], types=['bool']),
'sokalsneath': MetricInfo(aka=['sokalsneath'], types=['bool']),
'sqeuclidean': MetricInfo(aka=['sqeuclidean', 'sqe', 'sqeuclid']),
'wminkowski': MetricInfo(aka=['wminkowski', 'wmi', 'wm', 'wpnorm'],
validator=_validate_wminkowski_kwargs),
'yule': MetricInfo(aka=['yule'], types=['bool']),
}
_METRIC_ALIAS = dict((alias, name)
for name, info in _METRICS.items()
for alias in info.aka)
_METRICS_NAMES = list(_METRICS.keys())
_TEST_METRICS = {'test_' + name: globals()[name] for name in _METRICS.keys()}
# C implementations with weighted versions
_C_WEIGHTED_METRICS = {
'chebyshev': 'weighted_chebyshev',
'minkowski': 'weighted_minkowski',
'wminkowski': 'old_weighted_minkowski',
}
def _select_weighted_metric(mstr, kwargs, out):
kwargs = dict(kwargs)
if "w" in kwargs and kwargs["w"] is None:
# w=None is the same as omitting it
kwargs.pop("w")
if mstr.startswith("test_") or mstr in (
_METRICS['hamming'].aka +
_METRICS['wminkowski'].aka +
_METRICS['minkowski'].aka):
# These support weights
pass
elif "w" in kwargs:
if (mstr in _METRICS['seuclidean'].aka or
mstr in _METRICS['mahalanobis'].aka):
raise ValueError("metric %s incompatible with weights" % mstr)
# XXX: C-versions do not support weights
# need to use python version for weighting
kwargs['out'] = out
mstr = "test_%s" % mstr
return mstr, kwargs
def pdist(X, metric='euclidean', *args, **kwargs):
"""
Pairwise distances between observations in n-dimensional space.
See Notes for common calling conventions.
Parameters
----------
X : ndarray
An m by n array of m original observations in an
n-dimensional space.
metric : str or function, optional
The distance metric to use. The distance function can
be 'braycurtis', 'canberra', 'chebyshev', 'cityblock',
'correlation', 'cosine', 'dice', 'euclidean', 'hamming',
'jaccard', 'jensenshannon', 'kulsinski', 'mahalanobis', 'matching',
'minkowski', 'rogerstanimoto', 'russellrao', 'seuclidean',
'sokalmichener', 'sokalsneath', 'sqeuclidean', 'yule'.
*args : tuple. Deprecated.
Additional arguments should be passed as keyword arguments
**kwargs : dict, optional
Extra arguments to `metric`: refer to each metric documentation for a
list of all possible arguments.
Some possible arguments:
p : scalar
The p-norm to apply for Minkowski, weighted and unweighted.
Default: 2.
w : ndarray
The weight vector for metrics that support weights (e.g., Minkowski).
V : ndarray
The variance vector for standardized Euclidean.
Default: var(X, axis=0, ddof=1)
VI : ndarray
The inverse of the covariance matrix for Mahalanobis.
Default: inv(cov(X.T)).T
out : ndarray.
The output array
If not None, condensed distance matrix Y is stored in this array.
Note: metric independent, it will become a regular keyword arg in a
future scipy version
Returns
-------
Y : ndarray
Returns a condensed distance matrix Y. For
each :math:`i` and :math:`j` (where :math:`i<j<m`),where m is the number
of original observations. The metric ``dist(u=X[i], v=X[j])``
is computed and stored in entry
``m * i + j - ((i + 2) * (i + 1)) // 2``.
See Also
--------
squareform : converts between condensed distance matrices and
square distance matrices.
Notes
-----
See ``squareform`` for information on how to calculate the index of
this entry or to convert the condensed distance matrix to a
redundant square matrix.
The following are common calling conventions.
1. ``Y = pdist(X, 'euclidean')``
Computes the distance between m points using Euclidean distance
(2-norm) as the distance metric between the points. The points
are arranged as m n-dimensional row vectors in the matrix X.
2. ``Y = pdist(X, 'minkowski', p=2.)``
Computes the distances using the Minkowski distance
:math:`||u-v||_p` (p-norm) where :math:`p \\geq 1`.
3. ``Y = pdist(X, 'cityblock')``
Computes the city block or Manhattan distance between the
points.
4. ``Y = pdist(X, 'seuclidean', V=None)``
Computes the standardized Euclidean distance. The standardized
Euclidean distance between two n-vectors ``u`` and ``v`` is
.. math::
\\sqrt{\\sum {(u_i-v_i)^2 / V[x_i]}}
V is the variance vector; V[i] is the variance computed over all
the i'th components of the points. If not passed, it is
automatically computed.
5. ``Y = pdist(X, 'sqeuclidean')``
Computes the squared Euclidean distance :math:`||u-v||_2^2` between
the vectors.
6. ``Y = pdist(X, 'cosine')``
Computes the cosine distance between vectors u and v,
.. math::
1 - \\frac{u \\cdot v}
{{||u||}_2 {||v||}_2}
where :math:`||*||_2` is the 2-norm of its argument ``*``, and
:math:`u \\cdot v` is the dot product of ``u`` and ``v``.
7. ``Y = pdist(X, 'correlation')``
Computes the correlation distance between vectors u and v. This is
.. math::
1 - \\frac{(u - \\bar{u}) \\cdot (v - \\bar{v})}
{{||(u - \\bar{u})||}_2 {||(v - \\bar{v})||}_2}
where :math:`\\bar{v}` is the mean of the elements of vector v,
and :math:`x \\cdot y` is the dot product of :math:`x` and :math:`y`.
8. ``Y = pdist(X, 'hamming')``
Computes the normalized Hamming distance, or the proportion of
those vector elements between two n-vectors ``u`` and ``v``
which disagree. To save memory, the matrix ``X`` can be of type
boolean.
9. ``Y = pdist(X, 'jaccard')``
Computes the Jaccard distance between the points. Given two
vectors, ``u`` and ``v``, the Jaccard distance is the
proportion of those elements ``u[i]`` and ``v[i]`` that
disagree.
10. ``Y = pdist(X, 'chebyshev')``
Computes the Chebyshev distance between the points. The
Chebyshev distance between two n-vectors ``u`` and ``v`` is the
maximum norm-1 distance between their respective elements. More
precisely, the distance is given by
.. math::
d(u,v) = \\max_i {|u_i-v_i|}
11. ``Y = pdist(X, 'canberra')``
Computes the Canberra distance between the points. The
Canberra distance between two points ``u`` and ``v`` is
.. math::
d(u,v) = \\sum_i \\frac{|u_i-v_i|}
{|u_i|+|v_i|}
12. ``Y = pdist(X, 'braycurtis')``
Computes the Bray-Curtis distance between the points. The
Bray-Curtis distance between two points ``u`` and ``v`` is
.. math::
d(u,v) = \\frac{\\sum_i {|u_i-v_i|}}
{\\sum_i {|u_i+v_i|}}
13. ``Y = pdist(X, 'mahalanobis', VI=None)``
Computes the Mahalanobis distance between the points. The
Mahalanobis distance between two points ``u`` and ``v`` is
:math:`\\sqrt{(u-v)(1/V)(u-v)^T}` where :math:`(1/V)` (the ``VI``
variable) is the inverse covariance. If ``VI`` is not None,
``VI`` will be used as the inverse covariance matrix.
14. ``Y = pdist(X, 'yule')``
Computes the Yule distance between each pair of boolean
vectors. (see yule function documentation)
15. ``Y = pdist(X, 'matching')``
Synonym for 'hamming'.
16. ``Y = pdist(X, 'dice')``
Computes the Dice distance between each pair of boolean
vectors. (see dice function documentation)
17. ``Y = pdist(X, 'kulsinski')``
Computes the Kulsinski distance between each pair of
boolean vectors. (see kulsinski function documentation)
18. ``Y = pdist(X, 'rogerstanimoto')``
Computes the Rogers-Tanimoto distance between each pair of
boolean vectors. (see rogerstanimoto function documentation)
19. ``Y = pdist(X, 'russellrao')``
Computes the Russell-Rao distance between each pair of
boolean vectors. (see russellrao function documentation)
20. ``Y = pdist(X, 'sokalmichener')``
Computes the Sokal-Michener distance between each pair of
boolean vectors. (see sokalmichener function documentation)
21. ``Y = pdist(X, 'sokalsneath')``
Computes the Sokal-Sneath distance between each pair of
boolean vectors. (see sokalsneath function documentation)
22. ``Y = pdist(X, 'wminkowski', p=2, w=w)``
Computes the weighted Minkowski distance between each pair of
vectors. (see wminkowski function documentation)
'wminkowski' is deprecated and will be removed in SciPy 1.8.0.
Use 'minkowski' instead.
23. ``Y = pdist(X, f)``
Computes the distance between all pairs of vectors in X
using the user supplied 2-arity function f. For example,
Euclidean distance between the vectors could be computed
as follows::
dm = pdist(X, lambda u, v: np.sqrt(((u-v)**2).sum()))
Note that you should avoid passing a reference to one of
the distance functions defined in this library. For example,::
dm = pdist(X, sokalsneath)
would calculate the pair-wise distances between the vectors in
X using the Python function sokalsneath. This would result in
sokalsneath being called :math:`{n \\choose 2}` times, which
is inefficient. Instead, the optimized C version is more
efficient, and we call it using the following syntax.::
dm = pdist(X, 'sokalsneath')
"""
# You can also call this as:
# Y = pdist(X, 'test_abc')
# where 'abc' is the metric being tested. This computes the distance
# between all pairs of vectors in X using the distance metric 'abc' but
# with a more succinct, verifiable, but less efficient implementation.
X = _asarray_validated(X, sparse_ok=False, objects_ok=True, mask_ok=True,
check_finite=False)
kwargs = _args_to_kwargs_xdist(args, kwargs, metric, "pdist")
X = np.asarray(X, order='c')
s = X.shape
if len(s) != 2:
raise ValueError('A 2-dimensional array must be passed.')
m, n = s
out = kwargs.pop("out", None)
if out is None:
dm = np.empty((m * (m - 1)) // 2, dtype=np.double)
else:
if out.shape != (m * (m - 1) // 2,):
raise ValueError("output array has incorrect shape.")
if not out.flags.c_contiguous:
raise ValueError("Output array must be C-contiguous.")
if out.dtype != np.double:
raise ValueError("Output array must be double type.")
dm = out
# compute blocklist for deprecated kwargs
if(metric in _METRICS['jensenshannon'].aka
or metric == 'test_jensenshannon' or metric == jensenshannon):
kwargs_blocklist = ["p", "w", "V", "VI"]
elif(metric in _METRICS['minkowski'].aka
or metric in _METRICS['wminkowski'].aka
or metric in ['test_minkowski', 'test_wminkowski']
or metric in [minkowski, wminkowski]):
kwargs_blocklist = ["V", "VI"]
elif(metric in _METRICS['seuclidean'].aka or
metric == 'test_seuclidean' or metric == seuclidean):
kwargs_blocklist = ["p", "w", "VI"]
elif(metric in _METRICS['mahalanobis'].aka
or metric == 'test_mahalanobis' or metric == mahalanobis):
kwargs_blocklist = ["p", "w", "V"]
else:
kwargs_blocklist = ["p", "V", "VI"]
_filter_deprecated_kwargs(kwargs, kwargs_blocklist)
if callable(metric):
mstr = getattr(metric, '__name__', 'UnknownCustomMetric')
metric_name = _METRIC_ALIAS.get(mstr, None)
if metric_name is not None:
X, typ, kwargs = _validate_pdist_input(X, m, n,
metric_name, **kwargs)
k = 0
for i in range(0, m - 1):
for j in range(i + 1, m):
dm[k] = metric(X[i], X[j], **kwargs)
k = k + 1
elif isinstance(metric, str):
mstr = metric.lower()
mstr, kwargs = _select_weighted_metric(mstr, kwargs, out)
metric_name = _METRIC_ALIAS.get(mstr, None)
if metric_name is not None:
X, typ, kwargs = _validate_pdist_input(X, m, n,
metric_name, **kwargs)
if 'w' in kwargs:
metric_name = _C_WEIGHTED_METRICS.get(metric_name, metric_name)
# get pdist wrapper
pdist_fn = getattr(_distance_wrap,
"pdist_%s_%s_wrap" % (metric_name, typ))
pdist_fn(X, dm, **kwargs)
return dm
elif mstr in ['old_cosine', 'old_cos']:
warnings.warn('"old_cosine" is deprecated and will be removed in '
'a future version. Use "cosine" instead.',
DeprecationWarning)
X = _convert_to_double(X)
norms = np.einsum('ij,ij->i', X, X, dtype=np.double)
np.sqrt(norms, out=norms)
nV = norms.reshape(m, 1)
# The numerator u * v
nm = np.dot(X, X.T)
# The denom. ||u||*||v||
de = np.dot(nV, nV.T)
dm = 1.0 - (nm / de)
dm[range(0, m), range(0, m)] = 0.0
dm = squareform(dm)
elif mstr.startswith("test_"):
if mstr in _TEST_METRICS:
dm = pdist(X, _TEST_METRICS[mstr], **kwargs)
else:
raise ValueError('Unknown "Test" Distance Metric: %s' % mstr[5:])
else:
raise ValueError('Unknown Distance Metric: %s' % mstr)
else:
raise TypeError('2nd argument metric must be a string identifier '
'or a function.')
return dm
def squareform(X, force="no", checks=True):
"""
Convert a vector-form distance vector to a square-form distance
matrix, and vice-versa.
Parameters
----------
X : ndarray
Either a condensed or redundant distance matrix.
force : str, optional
As with MATLAB(TM), if force is equal to ``'tovector'`` or
``'tomatrix'``, the input will be treated as a distance matrix or
distance vector respectively.
checks : bool, optional
If set to False, no checks will be made for matrix
symmetry nor zero diagonals. This is useful if it is known that
``X - X.T1`` is small and ``diag(X)`` is close to zero.
These values are ignored any way so they do not disrupt the
squareform transformation.
Returns
-------
Y : ndarray
If a condensed distance matrix is passed, a redundant one is
returned, or if a redundant one is passed, a condensed distance
matrix is returned.
Notes
-----
1. ``v = squareform(X)``
Given a square n-by-n symmetric distance matrix ``X``,
``v = squareform(X)`` returns a ``n * (n-1) / 2``
(i.e. binomial coefficient n choose 2) sized vector `v`
where :math:`v[{n \\choose 2} - {n-i \\choose 2} + (j-i-1)]`
is the distance between distinct points ``i`` and ``j``.
If ``X`` is non-square or asymmetric, an error is raised.
2. ``X = squareform(v)``
Given a ``n * (n-1) / 2`` sized vector ``v``
for some integer ``n >= 1`` encoding distances as described,
``X = squareform(v)`` returns a n-by-n distance matrix ``X``.
The ``X[i, j]`` and ``X[j, i]`` values are set to
:math:`v[{n \\choose 2} - {n-i \\choose 2} + (j-i-1)]`
and all diagonal elements are zero.
In SciPy 0.19.0, ``squareform`` stopped casting all input types to
float64, and started returning arrays of the same dtype as the input.
"""
X = np.ascontiguousarray(X)
s = X.shape
if force.lower() == 'tomatrix':
if len(s) != 1:
raise ValueError("Forcing 'tomatrix' but input X is not a "
"distance vector.")
elif force.lower() == 'tovector':
if len(s) != 2:
raise ValueError("Forcing 'tovector' but input X is not a "
"distance matrix.")
# X = squareform(v)
if len(s) == 1:
if s[0] == 0:
return np.zeros((1, 1), dtype=X.dtype)
# Grab the closest value to the square root of the number
# of elements times 2 to see if the number of elements
# is indeed a binomial coefficient.
d = int(np.ceil(np.sqrt(s[0] * 2)))
# Check that v is of valid dimensions.
if d * (d - 1) != s[0] * 2:
raise ValueError('Incompatible vector size. It must be a binomial '
'coefficient n choose 2 for some integer n >= 2.')
# Allocate memory for the distance matrix.
M = np.zeros((d, d), dtype=X.dtype)
# Since the C code does not support striding using strides.
# The dimensions are used instead.
X = _copy_array_if_base_present(X)
# Fill in the values of the distance matrix.
_distance_wrap.to_squareform_from_vector_wrap(M, X)
# Return the distance matrix.
return M
elif len(s) == 2:
if s[0] != s[1]:
raise ValueError('The matrix argument must be square.')
if checks:
is_valid_dm(X, throw=True, name='X')
# One-side of the dimensions is set here.
d = s[0]
if d <= 1:
return np.array([], dtype=X.dtype)
# Create a vector.
v = np.zeros((d * (d - 1)) // 2, dtype=X.dtype)
# Since the C code does not support striding using strides.
# The dimensions are used instead.
X = _copy_array_if_base_present(X)
# Convert the vector to squareform.
_distance_wrap.to_vector_from_squareform_wrap(X, v)
return v
else:
raise ValueError(('The first argument must be one or two dimensional '
'array. A %d-dimensional array is not '
'permitted') % len(s))
def is_valid_dm(D, tol=0.0, throw=False, name="D", warning=False):
"""
Return True if input array is a valid distance matrix.
Distance matrices must be 2-dimensional numpy arrays.
They must have a zero-diagonal, and they must be symmetric.
Parameters
----------
D : ndarray
The candidate object to test for validity.
tol : float, optional
The distance matrix should be symmetric. `tol` is the maximum
difference between entries ``ij`` and ``ji`` for the distance
metric to be considered symmetric.
throw : bool, optional
An exception is thrown if the distance matrix passed is not valid.
name : str, optional
The name of the variable to checked. This is useful if
throw is set to True so the offending variable can be identified
in the exception message when an exception is thrown.
warning : bool, optional
Instead of throwing an exception, a warning message is
raised.
Returns
-------
valid : bool
True if the variable `D` passed is a valid distance matrix.
Notes
-----
Small numerical differences in `D` and `D.T` and non-zeroness of
the diagonal are ignored if they are within the tolerance specified
by `tol`.
"""
D = np.asarray(D, order='c')
valid = True
try:
s = D.shape
if len(D.shape) != 2:
if name:
raise ValueError(('Distance matrix \'%s\' must have shape=2 '
'(i.e. be two-dimensional).') % name)
else:
raise ValueError('Distance matrix must have shape=2 (i.e. '
'be two-dimensional).')
if tol == 0.0:
if not (D == D.T).all():
if name:
raise ValueError(('Distance matrix \'%s\' must be '
'symmetric.') % name)
else:
raise ValueError('Distance matrix must be symmetric.')
if not (D[range(0, s[0]), range(0, s[0])] == 0).all():
if name:
raise ValueError(('Distance matrix \'%s\' diagonal must '
'be zero.') % name)
else:
raise ValueError('Distance matrix diagonal must be zero.')
else:
if not (D - D.T <= tol).all():
if name:
raise ValueError(('Distance matrix \'%s\' must be '
'symmetric within tolerance %5.5f.')
% (name, tol))
else:
raise ValueError('Distance matrix must be symmetric within'
' tolerance %5.5f.' % tol)
if not (D[range(0, s[0]), range(0, s[0])] <= tol).all():
if name:
raise ValueError(('Distance matrix \'%s\' diagonal must be'
' close to zero within tolerance %5.5f.')
% (name, tol))
else:
raise ValueError(('Distance matrix \'%s\' diagonal must be'
' close to zero within tolerance %5.5f.')
% tol)
except Exception as e:
if throw:
raise
if warning:
warnings.warn(str(e))
valid = False
return valid
def is_valid_y(y, warning=False, throw=False, name=None):
"""
Return True if the input array is a valid condensed distance matrix.
Condensed distance matrices must be 1-dimensional numpy arrays.
Their length must be a binomial coefficient :math:`{n \\choose 2}`
for some positive integer n.
Parameters
----------
y : ndarray
The condensed distance matrix.
warning : bool, optional
Invokes a warning if the variable passed is not a valid
condensed distance matrix. The warning message explains why
the distance matrix is not valid. `name` is used when
referencing the offending variable.
throw : bool, optional
Throws an exception if the variable passed is not a valid
condensed distance matrix.
name : bool, optional
Used when referencing the offending variable in the
warning or exception message.
"""
y = np.asarray(y, order='c')
valid = True
try:
if len(y.shape) != 1:
if name:
raise ValueError(('Condensed distance matrix \'%s\' must '
'have shape=1 (i.e. be one-dimensional).')
% name)
else:
raise ValueError('Condensed distance matrix must have shape=1 '
'(i.e. be one-dimensional).')
n = y.shape[0]
d = int(np.ceil(np.sqrt(n * 2)))
if (d * (d - 1) / 2) != n:
if name:
raise ValueError(('Length n of condensed distance matrix '
'\'%s\' must be a binomial coefficient, i.e.'
'there must be a k such that '
'(k \\choose 2)=n)!') % name)
else:
raise ValueError('Length n of condensed distance matrix must '
'be a binomial coefficient, i.e. there must '
'be a k such that (k \\choose 2)=n)!')
except Exception as e:
if throw:
raise
if warning:
warnings.warn(str(e))
valid = False
return valid
def num_obs_dm(d):
"""
Return the number of original observations that correspond to a
square, redundant distance matrix.
Parameters
----------
d : ndarray
The target distance matrix.
Returns
-------
num_obs_dm : int
The number of observations in the redundant distance matrix.
"""
d = np.asarray(d, order='c')
is_valid_dm(d, tol=np.inf, throw=True, name='d')
return d.shape[0]
def num_obs_y(Y):
"""
Return the number of original observations that correspond to a
condensed distance matrix.
Parameters
----------
Y : ndarray
Condensed distance matrix.
Returns
-------
n : int
The number of observations in the condensed distance matrix `Y`.
"""
Y = np.asarray(Y, order='c')
is_valid_y(Y, throw=True, name='Y')
k = Y.shape[0]
if k == 0:
raise ValueError("The number of observations cannot be determined on "
"an empty distance matrix.")
d = int(np.ceil(np.sqrt(k * 2)))
if (d * (d - 1) / 2) != k:
raise ValueError("Invalid condensed distance matrix passed. Must be "
"some k where k=(n choose 2) for some n >= 2.")
return d
def cdist(XA, XB, metric='euclidean', *args, **kwargs):
"""
Compute distance between each pair of the two collections of inputs.
See Notes for common calling conventions.
Parameters
----------
XA : ndarray
An :math:`m_A` by :math:`n` array of :math:`m_A`
original observations in an :math:`n`-dimensional space.
Inputs are converted to float type.
XB : ndarray
An :math:`m_B` by :math:`n` array of :math:`m_B`
original observations in an :math:`n`-dimensional space.
Inputs are converted to float type.
metric : str or callable, optional
The distance metric to use. If a string, the distance function can be
'braycurtis', 'canberra', 'chebyshev', 'cityblock', 'correlation',
'cosine', 'dice', 'euclidean', 'hamming', 'jaccard', 'jensenshannon',
'kulsinski', 'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto',
'russellrao', 'seuclidean', 'sokalmichener', 'sokalsneath', 'sqeuclidean',
'wminkowski', 'yule'.
*args : tuple. Deprecated.
Additional arguments should be passed as keyword arguments
**kwargs : dict, optional
Extra arguments to `metric`: refer to each metric documentation for a
list of all possible arguments.
Some possible arguments:
p : scalar
The p-norm to apply for Minkowski, weighted and unweighted.
Default: 2.
w : ndarray
The weight vector for metrics that support weights (e.g., Minkowski).
V : ndarray
The variance vector for standardized Euclidean.
Default: var(vstack([XA, XB]), axis=0, ddof=1)
VI : ndarray
The inverse of the covariance matrix for Mahalanobis.
Default: inv(cov(vstack([XA, XB].T))).T
out : ndarray
The output array
If not None, the distance matrix Y is stored in this array.
Note: metric independent, it will become a regular keyword arg in a
future scipy version
Returns
-------
Y : ndarray
A :math:`m_A` by :math:`m_B` distance matrix is returned.
For each :math:`i` and :math:`j`, the metric
``dist(u=XA[i], v=XB[j])`` is computed and stored in the
:math:`ij` th entry.
Raises
------
ValueError
An exception is thrown if `XA` and `XB` do not have
the same number of columns.
Notes
-----
The following are common calling conventions:
1. ``Y = cdist(XA, XB, 'euclidean')``
Computes the distance between :math:`m` points using
Euclidean distance (2-norm) as the distance metric between the
points. The points are arranged as :math:`m`
:math:`n`-dimensional row vectors in the matrix X.
2. ``Y = cdist(XA, XB, 'minkowski', p=2.)``
Computes the distances using the Minkowski distance
:math:`||u-v||_p` (:math:`p`-norm) where :math:`p \\geq 1`.
3. ``Y = cdist(XA, XB, 'cityblock')``
Computes the city block or Manhattan distance between the
points.
4. ``Y = cdist(XA, XB, 'seuclidean', V=None)``
Computes the standardized Euclidean distance. The standardized
Euclidean distance between two n-vectors ``u`` and ``v`` is
.. math::
\\sqrt{\\sum {(u_i-v_i)^2 / V[x_i]}}.
V is the variance vector; V[i] is the variance computed over all
the i'th components of the points. If not passed, it is
automatically computed.
5. ``Y = cdist(XA, XB, 'sqeuclidean')``
Computes the squared Euclidean distance :math:`||u-v||_2^2` between
the vectors.
6. ``Y = cdist(XA, XB, 'cosine')``
Computes the cosine distance between vectors u and v,
.. math::
1 - \\frac{u \\cdot v}
{{||u||}_2 {||v||}_2}
where :math:`||*||_2` is the 2-norm of its argument ``*``, and
:math:`u \\cdot v` is the dot product of :math:`u` and :math:`v`.
7. ``Y = cdist(XA, XB, 'correlation')``
Computes the correlation distance between vectors u and v. This is
.. math::
1 - \\frac{(u - \\bar{u}) \\cdot (v - \\bar{v})}
{{||(u - \\bar{u})||}_2 {||(v - \\bar{v})||}_2}
where :math:`\\bar{v}` is the mean of the elements of vector v,
and :math:`x \\cdot y` is the dot product of :math:`x` and :math:`y`.
8. ``Y = cdist(XA, XB, 'hamming')``
Computes the normalized Hamming distance, or the proportion of
those vector elements between two n-vectors ``u`` and ``v``
which disagree. To save memory, the matrix ``X`` can be of type
boolean.
9. ``Y = cdist(XA, XB, 'jaccard')``
Computes the Jaccard distance between the points. Given two
vectors, ``u`` and ``v``, the Jaccard distance is the
proportion of those elements ``u[i]`` and ``v[i]`` that
disagree where at least one of them is non-zero.
10. ``Y = cdist(XA, XB, 'chebyshev')``
Computes the Chebyshev distance between the points. The
Chebyshev distance between two n-vectors ``u`` and ``v`` is the
maximum norm-1 distance between their respective elements. More
precisely, the distance is given by
.. math::
d(u,v) = \\max_i {|u_i-v_i|}.
11. ``Y = cdist(XA, XB, 'canberra')``
Computes the Canberra distance between the points. The
Canberra distance between two points ``u`` and ``v`` is
.. math::
d(u,v) = \\sum_i \\frac{|u_i-v_i|}
{|u_i|+|v_i|}.
12. ``Y = cdist(XA, XB, 'braycurtis')``
Computes the Bray-Curtis distance between the points. The
Bray-Curtis distance between two points ``u`` and ``v`` is
.. math::
d(u,v) = \\frac{\\sum_i (|u_i-v_i|)}
{\\sum_i (|u_i+v_i|)}
13. ``Y = cdist(XA, XB, 'mahalanobis', VI=None)``
Computes the Mahalanobis distance between the points. The
Mahalanobis distance between two points ``u`` and ``v`` is
:math:`\\sqrt{(u-v)(1/V)(u-v)^T}` where :math:`(1/V)` (the ``VI``
variable) is the inverse covariance. If ``VI`` is not None,
``VI`` will be used as the inverse covariance matrix.
14. ``Y = cdist(XA, XB, 'yule')``
Computes the Yule distance between the boolean
vectors. (see `yule` function documentation)
15. ``Y = cdist(XA, XB, 'matching')``
Synonym for 'hamming'.
16. ``Y = cdist(XA, XB, 'dice')``
Computes the Dice distance between the boolean vectors. (see
`dice` function documentation)
17. ``Y = cdist(XA, XB, 'kulsinski')``
Computes the Kulsinski distance between the boolean
vectors. (see `kulsinski` function documentation)
18. ``Y = cdist(XA, XB, 'rogerstanimoto')``
Computes the Rogers-Tanimoto distance between the boolean
vectors. (see `rogerstanimoto` function documentation)
19. ``Y = cdist(XA, XB, 'russellrao')``
Computes the Russell-Rao distance between the boolean
vectors. (see `russellrao` function documentation)
20. ``Y = cdist(XA, XB, 'sokalmichener')``
Computes the Sokal-Michener distance between the boolean
vectors. (see `sokalmichener` function documentation)
21. ``Y = cdist(XA, XB, 'sokalsneath')``
Computes the Sokal-Sneath distance between the vectors. (see
`sokalsneath` function documentation)
22. ``Y = cdist(XA, XB, 'wminkowski', p=2., w=w)``
Computes the weighted Minkowski distance between the
vectors. (see `wminkowski` function documentation)
'wminkowski' is deprecated and will be removed in SciPy 1.8.0.
Use 'minkowski' instead.
23. ``Y = cdist(XA, XB, f)``
Computes the distance between all pairs of vectors in X
using the user supplied 2-arity function f. For example,
Euclidean distance between the vectors could be computed
as follows::
dm = cdist(XA, XB, lambda u, v: np.sqrt(((u-v)**2).sum()))
Note that you should avoid passing a reference to one of
the distance functions defined in this library. For example,::
dm = cdist(XA, XB, sokalsneath)
would calculate the pair-wise distances between the vectors in
X using the Python function `sokalsneath`. This would result in
sokalsneath being called :math:`{n \\choose 2}` times, which
is inefficient. Instead, the optimized C version is more
efficient, and we call it using the following syntax::
dm = cdist(XA, XB, 'sokalsneath')
Examples
--------
Find the Euclidean distances between four 2-D coordinates:
>>> from scipy.spatial import distance
>>> coords = [(35.0456, -85.2672),
... (35.1174, -89.9711),
... (35.9728, -83.9422),
... (36.1667, -86.7833)]
>>> distance.cdist(coords, coords, 'euclidean')
array([[ 0. , 4.7044, 1.6172, 1.8856],
[ 4.7044, 0. , 6.0893, 3.3561],
[ 1.6172, 6.0893, 0. , 2.8477],
[ 1.8856, 3.3561, 2.8477, 0. ]])
Find the Manhattan distance from a 3-D point to the corners of the unit
cube:
>>> a = np.array([[0, 0, 0],
... [0, 0, 1],
... [0, 1, 0],
... [0, 1, 1],
... [1, 0, 0],
... [1, 0, 1],
... [1, 1, 0],
... [1, 1, 1]])
>>> b = np.array([[ 0.1, 0.2, 0.4]])
>>> distance.cdist(a, b, 'cityblock')
array([[ 0.7],
[ 0.9],
[ 1.3],
[ 1.5],
[ 1.5],
[ 1.7],
[ 2.1],
[ 2.3]])
"""
# You can also call this as:
# Y = cdist(XA, XB, 'test_abc')
# where 'abc' is the metric being tested. This computes the distance
# between all pairs of vectors in XA and XB using the distance metric 'abc'
# but with a more succinct, verifiable, but less efficient implementation.
kwargs = _args_to_kwargs_xdist(args, kwargs, metric, "cdist")
XA = np.asarray(XA, order='c')
XB = np.asarray(XB, order='c')
s = XA.shape
sB = XB.shape
if len(s) != 2:
raise ValueError('XA must be a 2-dimensional array.')
if len(sB) != 2:
raise ValueError('XB must be a 2-dimensional array.')
if s[1] != sB[1]:
raise ValueError('XA and XB must have the same number of columns '
'(i.e. feature dimension.)')
mA = s[0]
mB = sB[0]
n = s[1]
out = kwargs.pop("out", None)
if out is None:
dm = np.empty((mA, mB), dtype=np.double)
else:
if out.shape != (mA, mB):
raise ValueError("Output array has incorrect shape.")
if not out.flags.c_contiguous:
raise ValueError("Output array must be C-contiguous.")
if out.dtype != np.double:
raise ValueError("Output array must be double type.")
dm = out
# compute blocklist for deprecated kwargs
if(metric in _METRICS['minkowski'].aka or
metric in _METRICS['wminkowski'].aka or
metric in ['test_minkowski', 'test_wminkowski'] or
metric in [minkowski, wminkowski]):
kwargs_blocklist = ["V", "VI"]
elif(metric in _METRICS['seuclidean'].aka or
metric == 'test_seuclidean' or metric == seuclidean):
kwargs_blocklist = ["p", "w", "VI"]
elif(metric in _METRICS['mahalanobis'].aka or
metric == 'test_mahalanobis' or metric == mahalanobis):
kwargs_blocklist = ["p", "w", "V"]
else:
kwargs_blocklist = ["p", "V", "VI"]
_filter_deprecated_kwargs(kwargs, kwargs_blocklist)
if callable(metric):
mstr = getattr(metric, '__name__', 'Unknown')
metric_name = _METRIC_ALIAS.get(mstr, None)
XA, XB, typ, kwargs = _validate_cdist_input(XA, XB, mA, mB, n,
metric_name, **kwargs)
for i in range(0, mA):
for j in range(0, mB):
dm[i, j] = metric(XA[i], XB[j], **kwargs)
elif isinstance(metric, str):
mstr = metric.lower()
mstr, kwargs = _select_weighted_metric(mstr, kwargs, out)
metric_name = _METRIC_ALIAS.get(mstr, None)
if metric_name is not None:
XA, XB, typ, kwargs = _validate_cdist_input(XA, XB, mA, mB, n,
metric_name, **kwargs)
if 'w' in kwargs:
metric_name = _C_WEIGHTED_METRICS.get(metric_name, metric_name)
# get cdist wrapper
cdist_fn = getattr(_distance_wrap,
"cdist_%s_%s_wrap" % (metric_name, typ))
cdist_fn(XA, XB, dm, **kwargs)
return dm
elif mstr.startswith("test_"):
if mstr in _TEST_METRICS:
dm = cdist(XA, XB, _TEST_METRICS[mstr], **kwargs)
else:
raise ValueError('Unknown "Test" Distance Metric: %s' % mstr[5:])
else:
raise ValueError('Unknown Distance Metric: %s' % mstr)
else:
raise TypeError('2nd argument metric must be a string identifier '
'or a function.')
return dm
|
nmayorov/scipy
|
scipy/spatial/distance.py
|
Python
|
bsd-3-clause
| 87,158
|
def test_filename(parser, data_file):
ledger = parser.parse_ledger(data_file("simple.dat"))
assert ledger.absolute_filename("test.dat") == data_file("test.dat")
|
pcapriotti/pledger
|
tests/test_ledger.py
|
Python
|
mit
| 169
|
__author__ = 'hd'
import socket
from socketcan import CanMessage
class CanSocket:
def __init__(self):
self.sock = None
def open(self, interface):
self.sock = socket.socket(socket.AF_CAN, socket.SOCK_RAW, socket.CAN_RAW)
#self.sock.setsockopt(socket.SOL_CAN_RAW, socket.CAN_RAW_LOOPBACK, 0)
#self.sock.setsockopt(socket.SOL_CAN_RAW, socket.CAN_RAW_RECV_OWN_MSGS, 0)
self.sock.bind((interface,))
def close(self):
self.sock.close()
def read(self, timeout=None):
self.sock.settimeout(timeout)
try:
frame, addr = self.sock.recvfrom(16)
msg = CanMessage.from_raw(frame)
print("recv: ", msg)
return msg
except socket.timeout:
return None
def send(self, msg):
print("send: ", msg)
frame = msg.to_raw()
self.sock.send(frame)
|
Bytewerk/balloon_opencm3_ws2812_can
|
python/socketcan/cansocket.py
|
Python
|
gpl-2.0
| 899
|
from flask_factory import Factory
create_app = Factory(__name__)
create_app.step('.db:init_db')
|
Kroisse/flask-factory
|
tests/relative/web.py
|
Python
|
bsd-3-clause
| 97
|
#!/usr/bin/python3
import sys
import os
import random
import time
FLAG = os.environ["TASK1_FLAG"]
OFFSET = random.randint(38, 42)
def get_correct():
return int(time.time()) + OFFSET
print("Download path <game server>/0c16c4dd438b0042c4d725fab588e648.py\n")
print("Oh! Look what time it is: " + str(int(time.time())))
print("Yes! It's guessing o'clock!")
while True:
try:
s = input("Now, tell me the number I'm thinking about: ")
v = int(s.strip())
if v != get_correct():
print("Hahaha. No.")
continue
print(FLAG)
break
except ValueError:
print("That's not a number, go away.")
break
except EOFError:
print("Ohes Noes!")
break
|
google/google-ctf
|
2020/hackceler8/match-pre-package/game/static/0c16c4dd438b0042c4d725fab588e648.py
|
Python
|
apache-2.0
| 691
|
#coding:utf-8
from sys import argv
script, jisuan0, user_name = argv
prompt = '>' #赋值
print "Hi %s, I'm the %s script." % (user_name,script) #输出程序名字(script)
print "I'd like to ask you a few questions."
print "Do you like me %s?" % user_name #输出第一个参数(user_name)
likes = raw_input(prompt) #显示提示信息”>“并输入第一个参数
print "Where do you live %s?" % user_name
lives = raw_input(prompt)
print "What kind of computer do you heve?"
computer = raw_input(prompt)
#将输入的数字转化为数值型
jisuan1 = int(input("输入计算的数2:"))
jisuan0 = int (jisuan0)
ss = jisuan0 +jisuan1
print "计算结果是: %s" % ss
#输出所有输入的信息
print """
Alright, so you said %r about liking me.
You live in %r. Not sure where that is.
And you have a %r computer. Nice.
""" % (likes, lives, computer)
|
Mbdn/Python-Beginner
|
ex14.3.py
|
Python
|
mit
| 932
|
#!/usr/bin/python
#
# (c) 2013, RSD Services S.A
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: java_cert
version_added: '2.3'
short_description: Uses keytool to import/remove key from java keystore(cacerts)
description:
- This is a wrapper module around keytool. Which can be used to import/remove
certificates from a given java keystore.
options:
cert_url:
description:
- Basic URL to fetch SSL certificate from. One of cert_url or cert_path is required to load certificate.
cert_port:
description:
- Port to connect to URL. This will be used to create server URL:PORT
default: 443
cert_path:
description:
- Local path to load certificate from. One of cert_url or cert_path is required to load certificate.
cert_alias:
description:
- Imported certificate alias.
keystore_path:
description:
- Path to keystore.
keystore_pass:
description:
- Keystore password.
required: true
keystore_create:
description:
- Create keystore if it doesn't exist
executable:
description:
- Path to keytool binary if not used we search in PATH for it.
default: keytool
state:
description:
- Defines action which can be either certificate import or removal.
choices: [ 'present', 'absent' ]
default: present
author: Adam Hamsik @haad
'''
EXAMPLES = '''
# Import SSL certificate from google.com to a given cacerts keystore
java_cert:
cert_url: google.com
cert_port: 443
keystore_path: /usr/lib/jvm/jre7/lib/security/cacerts
keystore_pass: changeit
state: present
# Remove certificate with given alias from a keystore
java_cert:
cert_url: google.com
keystore_path: /usr/lib/jvm/jre7/lib/security/cacerts
keystore_pass: changeit
executable: /usr/lib/jvm/jre7/bin/keytool
state: absent
# Import SSL certificate from google.com to a keystore,
# create it if it doesn't exist
java_cert:
cert_url: google.com
keystore_path: /tmp/cacerts
keystore_pass: changeit
keystore_create: yes
state: present
'''
RETURN = '''
msg:
description: Output from stdout of keytool command after execution of given command.
returned: success
type: string
sample: "Module require existing keystore at keystore_path '/tmp/test/cacerts'"
rc:
description: Keytool command execution return value
returned: success
type: int
sample: "0"
cmd:
description: Executed command to get action done
returned: success
type: string
sample: "keytool -importcert -noprompt -keystore"
'''
import os
# import module snippets
from ansible.module_utils.basic import AnsibleModule
def check_cert_present(module, executable, keystore_path, keystore_pass, alias):
''' Check if certificate with alias is present in keystore
located at keystore_path '''
test_cmd = ("%s -noprompt -list -keystore '%s' -storepass '%s' "
"-alias '%s'")%(executable, keystore_path, keystore_pass, alias)
(check_rc, _, _) = module.run_command(test_cmd)
if check_rc == 0:
return True
return False
def import_cert_url(module, executable, url, port, keystore_path, keystore_pass, alias):
''' Import certificate from URL into keystore located at keystore_path '''
fetch_cmd = ("%s -printcert -rfc -sslserver %s:%d")%(executable, url, port)
import_cmd = ("%s -importcert -noprompt -keystore '%s' "
"-storepass '%s' -alias '%s'")%(executable, keystore_path,
keystore_pass, alias)
if module.check_mode:
module.exit_json(changed=True)
# Fetch SSL certificate from remote host.
(_, fetch_out, _) = module.run_command(fetch_cmd, check_rc=True)
# Use remote certificate from remote host and import it to a java keystore
(import_rc, import_out, import_err) = module.run_command(import_cmd,
data=fetch_out,
check_rc=False)
diff = {'before': '\n', 'after': '%s\n'%alias}
if import_rc == 0:
return module.exit_json(changed=True, msg=import_out,
rc=import_rc, cmd=import_cmd, stdout=import_out,
diff=diff)
else:
return module.fail_json(msg=import_out, rc=import_rc, cmd=import_cmd,
error=import_err)
def import_cert_path(module, executable, path, keystore_path, keystore_pass, alias):
''' Import certificate from path into keystore located on
keystore_path as alias '''
import_cmd = ("%s -importcert -noprompt -keystore '%s' "
"-storepass '%s' -file '%s' -alias '%s'")%(executable,
keystore_path,
keystore_pass,
path, alias)
if module.check_mode:
module.exit_json(changed=True)
# Use local certificate from local path and import it to a java keystore
(import_rc, import_out, import_err) = module.run_command(import_cmd,
check_rc=False)
diff = {'before': '\n', 'after': '%s\n'%alias}
if import_rc == 0:
return module.exit_json(changed=True, msg=import_out,
rc=import_rc, cmd=import_cmd, stdout=import_out,
error=import_err, diff=diff)
else:
return module.fail_json(msg=import_out, rc=import_rc, cmd=import_cmd)
def delete_cert(module, executable, keystore_path, keystore_pass, alias):
''' Delete cerificate identified with alias from keystore on keystore_path '''
del_cmd = ("%s -delete -keystore '%s' -storepass '%s' "
"-alias '%s'")%(executable, keystore_path, keystore_pass, alias)
if module.check_mode:
module.exit_json(changed=True)
# Delete SSL certificate from keystore
(del_rc, del_out, del_err) = module.run_command(del_cmd, check_rc=True)
diff = {'before': '%s\n'%alias, 'after': None}
return module.exit_json(changed=True, msg=del_out,
rc=del_rc, cmd=del_cmd, stdout=del_out,
error=del_err, diff=diff)
def test_keytool(module, executable):
''' Test if keytool is actuall executable or not '''
test_cmd = "%s"%(executable)
module.run_command(test_cmd, check_rc=True)
def test_keystore(module, keystore_path):
''' Check if we can access keystore as file or not '''
if keystore_path is None:
keystore_path = ''
if not os.path.exists(keystore_path) and not os.path.isfile(keystore_path):
## Keystore doesn't exist we want to create it
return module.fail_json(changed=False,
msg="Module require existing keystore at keystore_path '%s'"
%(keystore_path))
def main():
argument_spec = dict(
cert_url=dict(type='str'),
cert_path=dict(type='str'),
cert_alias=dict(type='str'),
cert_port=dict(default='443', type='int'),
keystore_path=dict(type='str'),
keystore_pass=dict(required=True, type='str', no_log=True),
keystore_create=dict(default=False, type='bool'),
executable=dict(default='keytool', type='str'),
state=dict(default='present',
choices=['present', 'absent'])
)
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=[['cert_path', 'cert_url']],
required_together=[['keystore_path', 'keystore_pass']],
mutually_exclusive=[
['cert_url', 'cert_path']
],
supports_check_mode=True,
)
url = module.params.get('cert_url')
path = module.params.get('cert_path')
port = module.params.get('cert_port')
cert_alias = module.params.get('cert_alias') or url
keystore_path = module.params.get('keystore_path')
keystore_pass = module.params.get('keystore_pass')
keystore_create = module.params.get('keystore_create')
executable = module.params.get('executable')
state = module.params.get('state')
if path and not cert_alias:
module.fail_json(changed=False,
msg="Using local path import from %s requires alias argument."
%(keystore_path))
test_keytool(module, executable)
if not keystore_create:
test_keystore(module, keystore_path)
cert_present = check_cert_present(module, executable, keystore_path,
keystore_pass, cert_alias)
if state == 'absent':
if cert_present:
delete_cert(module, executable, keystore_path, keystore_pass, cert_alias)
elif state == 'present':
if not cert_present:
if path:
import_cert_path(module, executable, path, keystore_path,
keystore_pass, cert_alias)
if url:
import_cert_url(module, executable, url, port, keystore_path,
keystore_pass, cert_alias)
module.exit_json(changed=False)
if __name__ == "__main__":
main()
|
t0mk/ansible
|
lib/ansible/modules/system/java_cert.py
|
Python
|
gpl-3.0
| 10,044
|
# Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from rally import consts
from rally.deploy import engine
from rally import objects
class ExistingCloud(engine.EngineFactory):
"""Just use an existing OpenStack deployment without deploying anything.
To use ExistingCloud, you should put endpoint information to the config:
{
"type": "ExistingCloud",
"auth_url": "http://localhost:5000/v2.0/",
"region_name": "RegionOne",
"endpoint_type": "public",
"admin": {
"username": "admin",
"password": "password",
"tenant_name": "demo"
},
"https_insecure": False,
"https_cacert": "",
}
Or, using keystone v3 API endpoint:
{
"type": "ExistingCloud",
"auth_url": "http://localhost:5000/v3/",
"region_name": "RegionOne",
"endpoint_type": "public",
"admin": {
"username": "admin",
"password": "admin",
"user_domain_name": "admin",
"project_name": "admin",
"project_domain_name": "admin",
},
"https_insecure": False,
"https_cacert": "",
}
"""
CONFIG_SCHEMA = {
"type": "object",
"definitions": {
"user": {
"type": "object",
"properties": {
"username": {"type": "string"},
"password": {"type": "string"},
},
"oneOf": [
{
# v2.0 authentication
"properties": {
"tenant_name": {"type": "string"},
},
"required": ["username", "password", "tenant_name"],
},
{
# Authentication in project scope
"properties": {
"user_domain_name": {"type": "string"},
"project_name": {"type": "string"},
"project_domain_name": {"type": "string"},
},
"required": ["username", "password", "project_name"],
}
]
}
},
"properties": {
"type": {"type": "string"},
"auth_url": {"type": "string"},
"region_name": {"type": "string"},
"endpoint_type": {"type": "string",
"enum": [consts.EndpointType.ADMIN,
consts.EndpointType.INTERNAL,
consts.EndpointType.PUBLIC]},
"https_insecure": {"type": "boolean"},
"https_cacert": {"type": "string"},
},
"anyOf": [
{
"properties": {
"admin": {"$ref": "#/definitions/user"}
},
"required": ["type", "auth_url", "admin"]
},
{
"users": {
"type": "array",
"items": {"$ref": "#/definitions/user"}
},
"required": ["type", "auth_url", "users"]
}
]
}
def _create_endpoint(self, common, user, permission):
return objects.Endpoint(
common["auth_url"], user["username"], user["password"],
tenant_name=user.get("project_name", user.get("tenant_name")),
permission=permission,
region_name=common.get("region_name"),
endpoint_type=common.get("endpoint_type",
consts.EndpointType.PUBLIC),
endpoint=common.get("endpoint"),
domain_name=user.get("domain_name"),
user_domain_name=user.get("user_domain_name", "Default"),
project_domain_name=user.get("project_domain_name", "Default"),
https_insecure=common.get("https_insecure", False),
https_cacert=common.get("https_cacert")
)
def deploy(self):
permissions = consts.EndpointPermission
users = [self._create_endpoint(self.config, user, permissions.USER)
for user in self.config.get("users", [])]
admin = self._create_endpoint(self.config,
self.config.get("admin"),
permissions.ADMIN)
return {"admin": admin, "users": users}
def cleanup(self):
pass
|
varunarya10/rally
|
rally/deploy/engines/existing.py
|
Python
|
apache-2.0
| 5,238
|
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Denis Kobozev
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from __future__ import division
import numpy
import math
_identity_matrix = [
[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 0.0, 1.0],
]
_rotation_matrix_cache = {}
def identity_matrix():
return numpy.require(_identity_matrix[:], 'f')
def rotation_matrix(angle, x, y, z):
angle_r = math.radians(angle)
c = math.cos(angle_r)
s = math.sin(angle_r)
C = 1 - c
matrix = numpy.require([
[x ** 2 * C + c, x * y * C - z * s, x * z * C + y * s],
[y * x * C + z * s, y ** 2 * C + c, y * z * C - x * s],
[x * z * C - y * s, y * z * C + x * s, z ** 2 * C + c],
], 'f')
return matrix
def translate(vertices, x, y, z):
translated = vertices + numpy.array([x, y, z], 'f')
return translated
def rotate(vertices, angle, x, y, z):
key = (angle, x, y, z)
if key not in _rotation_matrix_cache:
_rotation_matrix_cache[key] = rotation_matrix(angle, x, y, z)
matrix = _rotation_matrix_cache[key]
rotated = numpy.dot(vertices, matrix)
return rotated
|
dkobozev/tatlin
|
libtatlin/vector.py
|
Python
|
gpl-2.0
| 1,804
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard.api import cinder
# This set of states was pulled from cinder's snapshot_actions.py
STATUS_CHOICES = (
('available', _('Available')),
('creating', _('Creating')),
('deleting', _('Deleting')),
('error', _('Error')),
('error_deleting', _('Error Deleting')),
)
def populate_status_choices(initial, status_choices):
current_status = initial.get('status')
status_choices = [status for status in status_choices
if status[0] != current_status]
status_choices.insert(0, ("", _("Select a new status")))
return status_choices
class UpdateStatus(forms.SelfHandlingForm):
status = forms.ThemableChoiceField(label=_("Status"))
def __init__(self, request, *args, **kwargs):
super(UpdateStatus, self).__init__(request, *args, **kwargs)
initial = kwargs.get('initial', {})
self.fields['status'].choices = populate_status_choices(
initial, STATUS_CHOICES)
def handle(self, request, data):
try:
cinder.volume_snapshot_reset_state(request,
self.initial['snapshot_id'],
data['status'])
choices = dict(STATUS_CHOICES)
choice = choices[data['status']]
messages.success(request, _('Successfully updated volume snapshot'
' status: "%s".') % choice)
return True
except Exception:
redirect = reverse("horizon:admin:snapshots:index")
exceptions.handle(request,
_('Unable to update volume snapshot status.'),
redirect=redirect)
|
kogotko/carburetor
|
openstack_dashboard/dashboards/admin/snapshots/forms.py
|
Python
|
apache-2.0
| 2,471
|
# coding: utf-8
import os
import shutil
import tempfile
from common_unit_test import UnitTestCase
from nxdrive.logging_config import get_logger
from tests.common_unit_test import FILE_CONTENT
log = get_logger(__name__)
TEST_TIMEOUT = 60
class TestLocalPaste(UnitTestCase):
NUMBER_OF_LOCAL_FILES = 25
TEMP_FOLDER = u'temp_folder'
FOLDER_A1 = u'a1'
FOLDER_A2 = u'a2'
FILENAME_PATTERN = u'file%03d.txt'
'''
1. create folder 'temp/a1' with more than 20 files in it
2. create folder 'temp/a2', empty
3. copy 'a1' and 'a2', in this order to the test sync root
4. repeat step 3, but copy 'a2' and 'a1', in this order (to the test sync root)
5. Verify that both folders and their content is sync to DM, in both steps 3 and 4
'''
def setUp(self):
super(TestLocalPaste, self).setUp()
log.debug('*** enter TestLocalPaste.setUp()')
log.debug('*** engine1 starting')
self.engine_1.start()
self.wait_sync(wait_for_async=True)
log.debug('*** engine 1 synced')
self.assertTrue(self.local_client_1.exists('/'), "Test sync root should be sync")
self.workspace_abspath = self.local_client_1.abspath('/')
# create folder a1 and a2 under a temp folder
self.local_temp = tempfile.mkdtemp(self.TEMP_FOLDER)
self.folder1 = os.path.join(self.local_temp, self.FOLDER_A1)
os.makedirs(self.folder1)
self.folder2 = os.path.join(self.local_temp, self.FOLDER_A2)
os.makedirs(self.folder2)
# add files in folder 'temp/a1'
for file_num in range(1, self.NUMBER_OF_LOCAL_FILES + 1):
filename = self.FILENAME_PATTERN % file_num
with open(os.path.join(self.folder1, filename), 'w') as f:
f.write(FILE_CONTENT)
log.debug('*** exit TestLocalPaste.setUp()')
def tearDown(self):
log.debug('*** enter TestLocalPaste.tearDown()')
# delete temp folder
shutil.rmtree(self.local_temp)
super(TestLocalPaste, self).tearDown()
log.debug('*** exit TestLocalPaste.tearDown()')
"""
copy 'a2' to 'Nuxeo Drive Test Workspace', then 'a1' to 'Nuxeo Drive Test Workspace'
"""
def test_copy_paste_empty_folder_first(self):
log.debug('*** enter TestLocalPaste.test_copy_paste_empty_folder_first()')
# copy 'temp/a2' under 'Nuxeo Drive Test Workspace'
shutil.copytree(self.folder2, os.path.join(self.workspace_abspath, self.FOLDER_A2))
# copy 'temp/a1' under 'Nuxeo Drive Test Workspace'
shutil.copytree(self.folder1, os.path.join(self.workspace_abspath, self.FOLDER_A1))
self.wait_sync(timeout=TEST_TIMEOUT)
# check that '/Nuxeo Drive Test Workspace/a1' does exist
self.assertTrue(self.local_client_1.exists(os.path.join('/', self.FOLDER_A1)))
# check that '/Nuxeo Drive Test Workspace/a2' does exist
self.assertTrue(self.local_client_1.exists(os.path.join('/', self.FOLDER_A2)))
# check that '/Nuxeo Drive Test Workspace/a1/ has all the files
children = os.listdir(os.path.join(self.workspace_abspath, self.FOLDER_A1))
self.assertEqual(len(children), self.NUMBER_OF_LOCAL_FILES,
'folder /Nuxeo Drive Test Workspace/%s has %d files (expected %d)' %
(self.FOLDER_A1, len(children), self.NUMBER_OF_LOCAL_FILES))
# check that remote (DM) 'Nuxeo Drive Test Workspace/a1' exists
remote_ref_1 = self.local_client_1.get_remote_id(os.path.join('/', self.FOLDER_A1))
self.assertTrue(self.remote_file_system_client_1.exists(remote_ref_1))
# check that remote (DM) 'Nuxeo Drive Test Workspace/a2' exists
remote_ref_2 = self.local_client_1.get_remote_id(os.path.join('/', self.FOLDER_A2))
self.assertTrue(self.remote_file_system_client_1.exists(remote_ref_2))
# check that remote (DM) 'Nuxeo Drive Test Workspace/a1' has all the files
remote_children = [remote_info.name
for remote_info in self.remote_file_system_client_1.get_children_info(remote_ref_1)]
self.assertEqual(len(remote_children), self.NUMBER_OF_LOCAL_FILES,
'remote folder /Nuxeo Drive Test Workspace/%s has %d files (expected %d)' %
(self.FOLDER_A1, len(remote_children), self.NUMBER_OF_LOCAL_FILES))
log.debug('*** exit TestLocalPaste.test_copy_paste_empty_folder_first()')
"""
copy 'a1' to 'Nuxeo Drive Test Workspace', then 'a2' to 'Nuxeo Drive Test Workspace'
"""
def test_copy_paste_empty_folder_last(self):
log.debug('*** enter TestLocalPaste.test_copy_paste_empty_folder_last()')
workspace_abspath = self.local_client_1.abspath('/')
# copy 'temp/a1' under 'Nuxeo Drive Test Workspace'
shutil.copytree(self.folder1, os.path.join(workspace_abspath, self.FOLDER_A1))
# copy 'temp/a2' under 'Nuxeo Drive Test Workspace'
shutil.copytree(self.folder2, os.path.join(workspace_abspath, self.FOLDER_A2))
self.wait_sync(timeout=TEST_TIMEOUT)
# check that '/Nuxeo Drive Test Workspace/a1' does exist
self.assertTrue(self.local_client_1.exists(os.path.join('/', self.FOLDER_A1)))
# check that '/Nuxeo Drive Test Workspace/a2' does exist
self.assertTrue(self.local_client_1.exists(os.path.join('/', self.FOLDER_A2)))
# check that '/Nuxeo Drive Test Workspace/a1/ has all the files
children = os.listdir(os.path.join(self.workspace_abspath, self.FOLDER_A1))
self.assertEqual(len(children), self.NUMBER_OF_LOCAL_FILES,
'folder /Nuxeo Drive Test Workspace/%s has %d files (expected %d)' %
(self.FOLDER_A1, len(children), self.NUMBER_OF_LOCAL_FILES))
# check that remote (DM) 'Nuxeo Drive Test Workspace/a1' exists
remote_ref_1 = self.local_client_1.get_remote_id(os.path.join('/', self.FOLDER_A1))
self.assertTrue(self.remote_file_system_client_1.exists(remote_ref_1))
# check that remote (DM) 'Nuxeo Drive Test Workspace/a2' exists
remote_ref_2 = self.local_client_1.get_remote_id(os.path.join('/', self.FOLDER_A2))
self.assertTrue(self.remote_file_system_client_1.exists(remote_ref_2))
# check that remote (DM) 'Nuxeo Drive Test Workspace/a1' has all the files
remote_children = [remote_info.name
for remote_info in self.remote_file_system_client_1.get_children_info(remote_ref_1)]
self.assertEqual(len(remote_children), self.NUMBER_OF_LOCAL_FILES,
'remote folder /Nuxeo Drive Test Workspace/%s has %d files (expected %d)' %
(self.FOLDER_A1, len(remote_children), self.NUMBER_OF_LOCAL_FILES))
log.debug('*** exit TestLocalPaste.test_copy_paste_empty_folder_last()')
"""
copy 'a1' to 'Nuxeo Drive Test Workspace', then 'a2' to 'Nuxeo Drive Test Workspace'
"""
def test_copy_paste_same_file(self):
log.debug('*** enter TestLocalPaste.test_copy_paste_same_file()')
name = self.FILENAME_PATTERN % 1
workspace_abspath = self.local_client_1.abspath('/')
path = os.path.join('/', self.FOLDER_A1, name)
copypath = os.path.join('/', self.FOLDER_A1, name + 'copy')
# copy 'temp/a1' under 'Nuxeo Drive Test Workspace'
os.mkdir(os.path.join(workspace_abspath, self.FOLDER_A1))
shutil.copy2(os.path.join(self.folder1, name), os.path.join(workspace_abspath, self.FOLDER_A1, name))
self.wait_sync(timeout=TEST_TIMEOUT)
# check that '/Nuxeo Drive Test Workspace/a1' does exist
self.assertTrue(self.local_client_1.exists(os.path.join('/', self.FOLDER_A1)))
# check that '/Nuxeo Drive Test Workspace/a1/ has all the files
children = os.listdir(os.path.join(self.workspace_abspath, self.FOLDER_A1))
self.assertEqual(len(children), 1)
# check that remote (DM) 'Nuxeo Drive Test Workspace/a1' exists
remote_ref_1 = self.local_client_1.get_remote_id(os.path.join('/', self.FOLDER_A1))
self.assertTrue(self.remote_file_system_client_1.exists(remote_ref_1))
remote_children = [remote_info.name
for remote_info in self.remote_file_system_client_1.get_children_info(remote_ref_1)]
self.assertEqual(len(remote_children), 1)
remote_id = self.local_client_1.get_remote_id(path)
log.debug('*** copy file TestLocalPaste.test_copy_paste_same_file()')
shutil.copy2(self.local_client_1.abspath(path), self.local_client_1.abspath(copypath))
self.local_client_1.set_remote_id(copypath, remote_id)
log.debug('*** wait for sync TestLocalPaste.test_copy_paste_same_file()')
self.wait_sync(timeout=TEST_TIMEOUT)
remote_children = [remote_info.name
for remote_info in self.remote_file_system_client_1.get_children_info(remote_ref_1)]
self.assertEqual(len(remote_children), 2)
children = os.listdir(os.path.join(self.workspace_abspath, self.FOLDER_A1))
self.assertEqual(len(children), 2)
log.debug('*** exit TestLocalPaste.test_copy_paste_same_file()')
|
ssdi-drive/nuxeo-drive
|
nuxeo-drive-client/tests/test_local_paste.py
|
Python
|
lgpl-2.1
| 9,284
|
# This file is part of the uPiot project, https://github.com/gepd/upiot/
#
# MIT License
#
# Copyright (c) 2017 GEPD
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sublime
from sublime_plugin import WindowCommand
from threading import Thread
from ..tools import sampy_manager
from ..tools.serial import selected_port
from ..tools.thread_progress import ThreadProgress
class upiotRetrieveAllFilesCommand(WindowCommand):
def run(self):
port = selected_port(request_port=True)
if(not port):
return
self.window.show_input_panel(
'Destination:', '', self.callback, None, None)
def callback(self, path):
th = Thread(target=sampy_manager.get_files, args=(path,))
th.start()
ThreadProgress(th, '', '')
|
gepd/uPiotMicroPythonTool
|
commands/retrieve_all_files.py
|
Python
|
mit
| 1,799
|
# Copyright 2015 Hewlett-Packard Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_serialization import jsonutils
import webob.exc
from searchlight.api.v1 import search as search
from searchlight.common import exception
from searchlight.common import utils
import searchlight.elasticsearch
import searchlight.gateway
import searchlight.tests.unit.utils as unit_test_utils
import searchlight.tests.utils as test_utils
def _action_fixture(op_type, data, index=None, doc_type=None, _id=None,
**kwargs):
action = {
'action': op_type,
'id': _id,
'index': index,
'type': doc_type,
'data': data,
}
if kwargs:
action.update(kwargs)
return action
def _image_fixture(op_type, _id=None, index='glance', doc_type='image',
data=None, **kwargs):
image_data = {
'name': 'image-1',
'disk_format': 'raw',
}
if data is not None:
image_data.update(data)
return _action_fixture(op_type, image_data, index, doc_type, _id, **kwargs)
class TestSearchController(test_utils.BaseTestCase):
def setUp(self):
super(TestSearchController, self).setUp()
self.search_controller = search.SearchController()
def test_search_all(self):
request = unit_test_utils.get_fake_request()
self.search_controller.search = mock.Mock(return_value="{}")
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = None
offset = 0
limit = 10
self.search_controller.search(
request, query, index, doc_type, fields, offset, limit)
self.search_controller.search.assert_called_once_with(
request, query, index, doc_type, fields, offset, limit)
def test_search_all_repo(self):
request = unit_test_utils.get_fake_request()
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.search = mock.Mock(return_value="{}")
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.search_controller.search(
request, query, index, doc_type, fields, offset, limit)
repo.search.assert_called_once_with(
index, doc_type, query, fields, offset, limit, True)
def test_search_forbidden(self):
request = unit_test_utils.get_fake_request()
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.search = mock.Mock(side_effect=exception.Forbidden)
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.assertRaises(
webob.exc.HTTPForbidden, self.search_controller.search,
request, query, index, doc_type, fields, offset, limit)
def test_search_not_found(self):
request = unit_test_utils.get_fake_request()
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.search = mock.Mock(side_effect=exception.NotFound)
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.assertRaises(
webob.exc.HTTPNotFound, self.search_controller.search, request,
query, index, doc_type, fields, offset, limit)
def test_search_duplicate(self):
request = unit_test_utils.get_fake_request()
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.search = mock.Mock(side_effect=exception.Duplicate)
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.assertRaises(
webob.exc.HTTPConflict, self.search_controller.search, request,
query, index, doc_type, fields, offset, limit)
def test_search_internal_server_error(self):
request = unit_test_utils.get_fake_request()
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.search = mock.Mock(side_effect=Exception)
query = {"match_all": {}}
index = "glance"
doc_type = "metadef"
fields = []
offset = 0
limit = 10
self.assertRaises(
webob.exc.HTTPInternalServerError, self.search_controller.search,
request, query, index, doc_type, fields, offset, limit)
def test_index_complete(self):
request = unit_test_utils.get_fake_request(is_admin=True)
self.search_controller.index = mock.Mock(return_value="{}")
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
default_index = 'glance'
default_type = 'image'
self.search_controller.index(
request, actions, default_index, default_type)
self.search_controller.index.assert_called_once_with(
request, actions, default_index, default_type)
def test_index_repo_complete(self):
request = unit_test_utils.get_fake_request(is_admin=True)
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.index = mock.Mock(return_value="{}")
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
default_index = 'glance'
default_type = 'image'
self.search_controller.index(
request, actions, default_index, default_type)
repo.index.assert_called_once_with(
default_index, default_type, actions)
def test_index_repo_minimal(self):
request = unit_test_utils.get_fake_request(is_admin=True)
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.index = mock.Mock(return_value="{}")
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.search_controller.index(request, actions)
repo.index.assert_called_once_with(None, None, actions)
def test_index_forbidden(self):
request = unit_test_utils.get_fake_request()
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.index = mock.Mock(side_effect=exception.Forbidden)
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.assertRaises(
webob.exc.HTTPForbidden, self.search_controller.index,
request, actions)
def test_index_not_found(self):
request = unit_test_utils.get_fake_request(is_admin=True)
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.index = mock.Mock(side_effect=exception.NotFound)
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.assertRaises(
webob.exc.HTTPNotFound, self.search_controller.index,
request, actions)
def test_index_duplicate(self):
request = unit_test_utils.get_fake_request(is_admin=True)
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.index = mock.Mock(side_effect=exception.Duplicate)
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.assertRaises(
webob.exc.HTTPConflict, self.search_controller.index,
request, actions)
def test_index_exception(self):
request = unit_test_utils.get_fake_request(is_admin=True)
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.index = mock.Mock(side_effect=Exception)
actions = [{'action': 'create', 'index': 'myindex', 'id': 10,
'type': 'MyTest', 'data': '{"name": "MyName"}'}]
self.assertRaises(
webob.exc.HTTPInternalServerError, self.search_controller.index,
request, actions)
def test_plugins_info(self):
request = unit_test_utils.get_fake_request(is_admin=True)
self.search_controller.plugins_info = mock.Mock(return_value="{}")
self.search_controller.plugins_info(request)
self.search_controller.plugins_info.assert_called_once_with(request)
def test_plugins_info_repo(self):
request = unit_test_utils.get_fake_request()
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.plugins_info = mock.Mock(return_value="{}")
self.search_controller.plugins_info(request)
repo.plugins_info.assert_called_once_with()
def test_plugins_info_forbidden(self):
request = unit_test_utils.get_fake_request()
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.plugins_info = mock.Mock(side_effect=exception.Forbidden)
self.assertRaises(
webob.exc.HTTPForbidden, self.search_controller.plugins_info,
request)
def test_plugins_info_not_found(self):
request = unit_test_utils.get_fake_request()
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.plugins_info = mock.Mock(side_effect=exception.NotFound)
self.assertRaises(webob.exc.HTTPNotFound,
self.search_controller.plugins_info, request)
def test_plugins_info_internal_server_error(self):
request = unit_test_utils.get_fake_request()
repo = searchlight.elasticsearch.CatalogSearchRepo
repo.plugins_info = mock.Mock(side_effect=Exception)
self.assertRaises(webob.exc.HTTPInternalServerError,
self.search_controller.plugins_info, request)
class TestSearchDeserializer(test_utils.BaseTestCase):
def setUp(self):
super(TestSearchDeserializer, self).setUp()
self.deserializer = search.RequestDeserializer(
utils.get_search_plugins()
)
def test_single_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': 'glance',
})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
def test_single_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'type': 'image',
})
output = self.deserializer.search(request)
self.assertEqual(['image'], output['doc_type'])
def test_empty_request(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(sorted(['image', 'metadef']),
sorted(output['doc_type']))
def test_empty_request_admin(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({})
request.context.is_admin = True
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(sorted(['image', 'metadef']),
sorted(output['doc_type']))
def test_invalid_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': 'invalid',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_invalid_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'type': 'invalid',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_forbidden_schema(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'schema': {},
})
self.assertRaises(webob.exc.HTTPForbidden, self.deserializer.search,
request)
def test_forbidden_self(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'self': {},
})
self.assertRaises(webob.exc.HTTPForbidden, self.deserializer.search,
request)
def test_fields_restriction(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'fields': ['description'],
})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(['metadef'], output['doc_type'])
self.assertEqual(['description'], output['fields'])
def test_highlight_fields(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'highlight': {'fields': {'name': {}}}
})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(['metadef'], output['doc_type'])
self.assertEqual({'name': {}}, output['query']['highlight']['fields'])
def test_invalid_limit(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'limit': 'invalid',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_negative_limit(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'limit': -1,
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_invalid_offset(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'offset': 'invalid',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_negative_offset(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'offset': -1,
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_limit_and_offset(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['metadef'],
'query': {'match_all': {}},
'limit': 1,
'offset': 2,
})
output = self.deserializer.search(request)
self.assertEqual(['glance'], output['index'])
self.assertEqual(['metadef'], output['doc_type'])
self.assertEqual(1, output['limit'])
self.assertEqual(2, output['offset'])
class TestIndexDeserializer(test_utils.BaseTestCase):
def setUp(self):
super(TestIndexDeserializer, self).setUp()
self.deserializer = search.RequestDeserializer(
utils.get_search_plugins()
)
def test_empty_request(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_empty_actions(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'glance',
'default_type': 'image',
'actions': [],
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_missing_actions(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'glance',
'default_type': 'image',
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_invalid_operation_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('invalid', '1')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_invalid_default_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'invalid',
'actions': [_image_fixture('create', '1')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_invalid_default_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_type': 'invalid',
'actions': [_image_fixture('create', '1')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_empty_operation_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('', '1')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_missing_operation_type(self):
action = _image_fixture('', '1')
action.pop('action')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'index',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_create_single(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', '1')]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_create_multiple(self):
actions = [
_image_fixture('create', '1'),
_image_fixture('create', '2', data={'name': 'image-2'}),
]
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': actions,
})
output = self.deserializer.index(request)
expected = {
'actions': [
{
'_id': '1',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
},
{
'_id': '2',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-2'},
'_type': 'image'
},
],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_create_missing_data(self):
action = _image_fixture('create', '1')
action.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_create_with_default_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'glance',
'actions': [_image_fixture('create', '1', index=None)]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': None,
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': 'glance',
'default_type': None
}
self.assertEqual(expected, output)
def test_create_with_default_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_type': 'image',
'actions': [_image_fixture('create', '1', doc_type=None)]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': None
}],
'default_index': None,
'default_type': 'image'
}
self.assertEqual(expected, output)
def test_create_with_default_index_and_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'default_index': 'glance',
'default_type': 'image',
'actions': [_image_fixture('create', '1', index=None,
doc_type=None)]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': None,
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': None
}],
'default_index': 'glance',
'default_type': 'image'
}
self.assertEqual(expected, output)
def test_create_missing_id(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create')]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': None,
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': None,
'default_type': None,
}
self.assertEqual(expected, output)
def test_create_empty_id(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', '')]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '',
'_index': 'glance',
'_op_type': 'create',
'_source': {'disk_format': 'raw', 'name': 'image-1'},
'_type': 'image'
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_create_invalid_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', index='invalid')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_create_invalid_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', doc_type='invalid')]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_create_missing_index(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', '1', index=None)]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_create_missing_doc_type(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('create', '1', doc_type=None)]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_update_missing_id(self):
action = _image_fixture('update')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_update_missing_data(self):
action = _image_fixture('update', '1')
action.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_update_using_data(self):
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [_image_fixture('update', '1')]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'update',
'_type': 'image',
'doc': {'disk_format': 'raw', 'name': 'image-1'}
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_update_using_script(self):
action = _image_fixture('update', '1', script='<sample script>')
action.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'update',
'_type': 'image',
'params': {},
'script': '<sample script>'
}],
'default_index': None,
'default_type': None,
}
self.assertEqual(expected, output)
def test_update_using_script_and_data(self):
action = _image_fixture('update', '1', script='<sample script>')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'update',
'_type': 'image',
'params': {'disk_format': 'raw', 'name': 'image-1'},
'script': '<sample script>'
}],
'default_index': None,
'default_type': None,
}
self.assertEqual(expected, output)
def test_delete_missing_id(self):
action = _image_fixture('delete')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.index,
request)
def test_delete_single(self):
action = _image_fixture('delete', '1')
action.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action]
})
output = self.deserializer.index(request)
expected = {
'actions': [{
'_id': '1',
'_index': 'glance',
'_op_type': 'delete',
'_source': {},
'_type': 'image'
}],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
def test_delete_multiple(self):
action_1 = _image_fixture('delete', '1')
action_1.pop('data')
action_2 = _image_fixture('delete', '2')
action_2.pop('data')
request = unit_test_utils.get_fake_request()
request.body = jsonutils.dumps({
'actions': [action_1, action_2],
})
output = self.deserializer.index(request)
expected = {
'actions': [
{
'_id': '1',
'_index': 'glance',
'_op_type': 'delete',
'_source': {},
'_type': 'image'
},
{
'_id': '2',
'_index': 'glance',
'_op_type': 'delete',
'_source': {},
'_type': 'image'
},
],
'default_index': None,
'default_type': None
}
self.assertEqual(expected, output)
class TestResponseSerializer(test_utils.BaseTestCase):
def setUp(self):
super(TestResponseSerializer, self).setUp()
self.serializer = search.ResponseSerializer()
def test_plugins_info(self):
expected = {
"plugins": [
{
"index": "glance",
"type": "image"
},
{
"index": "glance",
"type": "metadef"
}
]
}
request = webob.Request.blank('/v0.1/search')
response = webob.Response(request=request)
result = {
"plugins": [
{
"index": "glance",
"type": "image"
},
{
"index": "glance",
"type": "metadef"
}
]
}
self.serializer.search(response, result)
actual = jsonutils.loads(response.body)
self.assertEqual(expected, actual)
self.assertEqual('application/json', response.content_type)
def test_search(self):
expected = [{
'id': '1',
'name': 'image-1',
'disk_format': 'raw',
}]
request = webob.Request.blank('/v0.1/search')
response = webob.Response(request=request)
result = [{
'id': '1',
'name': 'image-1',
'disk_format': 'raw',
}]
self.serializer.search(response, result)
actual = jsonutils.loads(response.body)
self.assertEqual(expected, actual)
self.assertEqual('application/json', response.content_type)
def test_index(self):
expected = {
'success': '1',
'failed': '0',
'errors': [],
}
request = webob.Request.blank('/v0.1/index')
response = webob.Response(request=request)
result = {
'success': '1',
'failed': '0',
'errors': [],
}
self.serializer.index(response, result)
actual = jsonutils.loads(response.body)
self.assertEqual(expected, actual)
self.assertEqual('application/json', response.content_type)
|
lakshmisampath/searchlight
|
searchlight/tests/unit/v1/test_search.py
|
Python
|
apache-2.0
| 33,311
|
#!/usr/bin/env python3
# Copyright (c) 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for grit.format.gzip_string'''
from __future__ import print_function
import gzip
import io
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
import unittest
from grit.format import gzip_string
class FormatGzipStringUnittest(unittest.TestCase):
def testGzipStringRsyncable(self):
# Can only test the rsyncable version on platforms which support rsyncable,
# which at the moment is Linux.
if sys.platform == 'linux2':
header_begin = (b'\x1f\x8b') # gzip first two bytes
input = (b'TEST STRING STARTING NOW'
b'continuing'
b'<even more>'
b'<finished NOW>')
compressed = gzip_string.GzipStringRsyncable(input)
self.failUnless(header_begin == compressed[:2])
compressed_file = io.BytesIO()
compressed_file.write(compressed)
compressed_file.seek(0)
with gzip.GzipFile(mode='rb', fileobj=compressed_file) as f:
output = f.read()
self.failUnless(output == input)
def testGzipString(self):
header_begin = b'\x1f\x8b' # gzip first two bytes
input = (b'TEST STRING STARTING NOW'
b'continuing'
b'<even more>'
b'<finished NOW>')
compressed = gzip_string.GzipString(input)
self.failUnless(header_begin == compressed[:2])
compressed_file = io.BytesIO()
compressed_file.write(compressed)
compressed_file.seek(0)
with gzip.GzipFile(mode='rb', fileobj=compressed_file) as f:
output = f.read()
self.failUnless(output == input)
if __name__ == '__main__':
unittest.main()
|
nwjs/chromium.src
|
tools/grit/grit/format/gzip_string_unittest.py
|
Python
|
bsd-3-clause
| 1,844
|
#!/usr/bin/env python
'''
some useful wx widgets
Andrew Tridgell
June 2012
'''
import wx
class ImagePanel(wx.Panel):
'''a resizable panel containing an image'''
def __init__(self, parent, img):
wx.Panel.__init__(self, parent, -1, size=(1, 1))
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.set_image(img)
self.Bind(wx.EVT_PAINT, self.on_paint)
def on_paint(self, event):
'''repaint the image'''
dc = wx.AutoBufferedPaintDC(self)
dc.DrawBitmap(self._bmp, 0, 0)
def set_image(self, img):
'''set the image to be displayed'''
self._bmp = wx.BitmapFromImage(img)
self.SetMinSize((self._bmp.GetWidth(), self._bmp.GetHeight()))
|
Inspirati/freeMixer
|
Tools/MAVLink/MAVProxy/modules/lib/mp_widgets.py
|
Python
|
gpl-3.0
| 726
|
# -*- coding: utf-8 -*-
"""
eventlogging.parse
~~~~~~~~~~~~~~~~~~
This module provides a scanf-like parser for raw log lines.
The format specifiers hew closely to those accepted by varnishncsa.
See the `varnishncsa documentation <https://www.varnish-cache.org
/docs/trunk/reference/varnishncsa.html>`_ for details.
Field specifiers
================
+--------+-----------------------------+
| Symbol | Field |
+========+=============================+
| %h | Client IP |
+--------+-----------------------------+
| %j | JSON event object |
+--------+-----------------------------+
| %q | Query-string-encoded JSON |
+--------+-----------------------------+
| %t | Timestamp in NCSA format |
+--------+-----------------------------+
| %{..}i | Tab-delimited string |
+--------+-----------------------------+
| %{..}s | Space-delimited string |
+--------+-----------------------------+
| %{..}d | Integer |
+--------+-----------------------------+
'..' is the desired property name for the capturing group.
"""
from __future__ import division, unicode_literals
import calendar
import datetime
import re
import time
import uuid
from .compat import json, unquote_plus, uuid5
from .crypto import keyhasher, rotating_key
__all__ = ('LogParser', 'ncsa_to_unix', 'ncsa_utcnow', 'capsule_uuid')
# Format string (as would be passed to `strftime`) for timestamps in
# NCSA Common Log Format.
NCSA_FORMAT = '%Y-%m-%dT%H:%M:%S'
# Formats event capsule objects into URLs using the combination of
# origin hostname, sequence ID, and timestamp. This combination is
# guaranteed to be unique. Example::
#
# event://vanadium.eqiad.wmnet/?seqId=438763×tamp=1359702955
#
EVENTLOGGING_URL_FORMAT = (
'event://%(recvFrom)s/?seqId=%(seqId)s×tamp=%(timestamp).10s')
# Specifies the length of time in seconds from the moment a key is
# generated until it is expired and replaced with a new key. The key is
# used to anonymize IP addresses.
KEY_LIFESPAN = datetime.timedelta(days=90)
def capsule_uuid(capsule):
"""Generate a UUID for a capsule object.
Gets a unique URI for the capsule using `EVENTLOGGING_URL_FORMAT`
and uses it to generate a UUID5 in the URL namespace.
..seealso:: `RFC 4122 <https://www.ietf.org/rfc/rfc4122.txt>`_.
:param capsule: A capsule object (or any dictionary that defines
`recvFrom`, `seqId`, and `timestamp`).
"""
id = uuid5(uuid.NAMESPACE_URL, EVENTLOGGING_URL_FORMAT % capsule)
return '%032x' % id.int
def ncsa_to_unix(ncsa_ts):
"""Converts an NCSA Common Log Format timestamp to an integer
timestamp representing the number of seconds since UNIX epoch UTC.
:param ncsa_ts: Timestamp in NCSA format.
"""
return calendar.timegm(time.strptime(ncsa_ts, NCSA_FORMAT))
def ncsa_utcnow():
"""Gets the current UTC date and time in NCSA Common Log Format"""
return time.strftime(NCSA_FORMAT, time.gmtime())
def decode_qson(qson):
"""Decodes a QSON (query-string-encoded JSON) object.
:param qs: Query string.
"""
return json.loads(unquote_plus(qson.strip('?;')))
# A crytographic hash function for hashing client IPs. Produces HMAC SHA1
# hashes by using the client IP as the message and a 64-byte byte string as
# the key. The key is generated at runtime and is refreshed every 90 days.
# It is not written anywhere. The hash value is useful for detecting spam
# (large volume of events sharing a common origin).
hash_ip = keyhasher(rotating_key(size=64, period=KEY_LIFESPAN.total_seconds()))
class LogParser(object):
"""Parses raw varnish/MediaWiki log lines into encapsulated events."""
def __init__(self, format, ip_hasher=hash_ip):
"""Constructor.
:param format: Format string.
:param ip_hasher: function ip_hasher(ip) -> hashed ip.
"""
self.format = format
# A mapping of format specifiers to a tuple of (regexp, caster).
self.format_specifiers = {
'd': (r'(?P<%s>\d+)', int),
'h': (r'(?P<clientIp>\S+)', ip_hasher),
'i': (r'(?P<%s>[^\t]+)', str),
'j': (r'(?P<capsule>\S+)', json.loads),
'q': (r'(?P<capsule>\?\S+)', decode_qson),
's': (r'(?P<%s>\S+)', str),
't': (r'(?P<timestamp>\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})',
ncsa_to_unix),
}
# Field casters, ordered by the relevant field's position in
# format string.
self.casters = []
# Compiled regexp.
format = re.sub(' ', r'\s+', format)
raw = re.sub(r'(?<!%)%({(\w+)})?([dhijqst])', self._repl, format)
self.re = re.compile(raw)
def _repl(self, spec):
"""Replace a format specifier with its expanded regexp matcher
and append its caster to the list. Called by :func:`re.sub`.
"""
_, name, specifier = spec.groups()
matcher, caster = self.format_specifiers[specifier]
if name:
matcher = matcher % name
self.casters.append(caster)
return matcher
def parse(self, line):
"""Parse a log line into a map of field names / values."""
match = self.re.match(line)
if match is None:
raise ValueError(self.re, line)
keys = sorted(match.groupdict(), key=match.start)
event = {k: f(match.group(k)) for f, k in zip(self.casters, keys)}
event.update(event.pop('capsule'))
event['uuid'] = capsule_uuid(event)
return event
def __repr__(self):
return '<LogParser(\'%s\')>' % self.format
|
Facerafter/starcitizen-tools
|
extensions/EventLogging/server/eventlogging/parse.py
|
Python
|
gpl-3.0
| 5,715
|
# Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import json
import ast
from webob import Response
from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller import dpset
from ryu.controller.handler import MAIN_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_0
from ryu.ofproto import ofproto_v1_2
from ryu.ofproto import ofproto_v1_3
from ryu.lib import ofctl_v1_0
from ryu.lib import ofctl_v1_2
from ryu.lib import ofctl_v1_3
from ryu.app.wsgi import ControllerBase, WSGIApplication
LOG = logging.getLogger('ryu.app.ofctl_rest')
# REST API
#
# Retrieve the switch stats
#
# get the list of all switches
# GET /stats/switches
#
# get the desc stats of the switch
# GET /stats/desc/<dpid>
#
# get flows stats of the switch
# GET /stats/flow/<dpid>
#
# get flows stats of the switch filtered by the fields
# POST /stats/flow/<dpid>
#
# get ports stats of the switch
# GET /stats/port/<dpid>
#
# get meter features stats of the switch
# GET /stats/meterfeatures/<dpid>
#
# get meter config stats of the switch
# GET /stats/meterconfig/<dpid>
#
# get meters stats of the switch
# GET /stats/meter/<dpid>
#
# get group features stats of the switch
# GET /stats/groupfeatures/<dpid>
#
# get groups desc stats of the switch
# GET /stats/groupdesc/<dpid>
#
# get groups stats of the switch
# GET /stats/group/<dpid>
#
# get ports description of the switch
# GET /stats/portdesc/<dpid>
# Update the switch stats
#
# add a flow entry
# POST /stats/flowentry/add
#
# modify all matching flow entries
# POST /stats/flowentry/modify
#
# modify flow entry strictly matching wildcards and priority
# POST /stats/flowentry/modify_strict
#
# delete all matching flow entries
# POST /stats/flowentry/delete
#
# delete flow entry strictly matching wildcards and priority
# POST /stats/flowentry/delete_strict
#
# delete all flow entries of the switch
# DELETE /stats/flowentry/clear/<dpid>
#
# add a meter entry
# POST /stats/meterentry/add
#
# modify a meter entry
# POST /stats/meterentry/modify
#
# delete a meter entry
# POST /stats/meterentry/delete
#
# add a group entry
# POST /stats/groupentry/add
#
# modify a group entry
# POST /stats/groupentry/modify
#
# delete a group entry
# POST /stats/groupentry/delete
#
# modify behavior of the physical port
# POST /stats/portdesc/modify
#
#
# send a experimeter message
# POST /stats/experimenter/<dpid>
class StatsController(ControllerBase):
def __init__(self, req, link, data, **config):
super(StatsController, self).__init__(req, link, data, **config)
self.dpset = data['dpset']
self.waiters = data['waiters']
def get_dpids(self, req, **_kwargs):
dps = self.dpset.dps.keys()
body = json.dumps(dps)
return (Response(content_type='application/json', body=body))
def get_desc_stats(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
desc = ofctl_v1_0.get_desc_stats(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
desc = ofctl_v1_2.get_desc_stats(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
desc = ofctl_v1_3.get_desc_stats(dp, self.waiters)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
body = json.dumps(desc)
return (Response(content_type='application/json', body=body))
def get_flow_stats(self, req, dpid, **_kwargs):
if req.body == '':
flow = {}
else:
try:
flow = ast.literal_eval(req.body)
except SyntaxError:
LOG.debug('invalid syntax %s', req.body)
return Response(status=400)
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
flows = ofctl_v1_0.get_flow_stats(dp, self.waiters, flow)
elif dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
flows = ofctl_v1_2.get_flow_stats(dp, self.waiters, flow)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
flows = ofctl_v1_3.get_flow_stats(dp, self.waiters, flow)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
body = json.dumps(flows)
return (Response(content_type='application/json', body=body))
def get_port_stats(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
ports = ofctl_v1_0.get_port_stats(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
ports = ofctl_v1_2.get_port_stats(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
ports = ofctl_v1_3.get_port_stats(dp, self.waiters)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
body = json.dumps(ports)
return (Response(content_type='application/json', body=body))
def get_meter_features(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
meters = ofctl_v1_3.get_meter_features(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION or \
dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
LOG.debug('Request not supported in this OF protocol version')
return Response(status=501)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
body = json.dumps(meters)
return (Response(content_type='application/json', body=body))
def get_meter_config(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
meters = ofctl_v1_3.get_meter_config(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION or \
dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
LOG.debug('Request not supported in this OF protocol version')
return Response(status=501)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
body = json.dumps(meters)
return (Response(content_type='application/json', body=body))
def get_meter_stats(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
meters = ofctl_v1_3.get_meter_stats(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION or \
dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
LOG.debug('Request not supported in this OF protocol version')
return Response(status=501)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
body = json.dumps(meters)
return (Response(content_type='application/json', body=body))
def get_group_features(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
groups = ofctl_v1_2.get_group_features(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
groups = ofctl_v1_3.get_group_features(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
LOG.debug('Request not supported in this OF protocol version')
return Response(status=501)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
body = json.dumps(groups)
return Response(content_type='application/json', body=body)
def get_group_desc(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
groups = ofctl_v1_2.get_group_desc(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
groups = ofctl_v1_3.get_group_desc(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
LOG.debug('Request not supported in this OF protocol version')
return Response(status=501)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
body = json.dumps(groups)
return Response(content_type='application/json', body=body)
def get_group_stats(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
groups = ofctl_v1_2.get_group_stats(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
groups = ofctl_v1_3.get_group_stats(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
LOG.debug('Request not supported in this OF protocol version')
return Response(status=501)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
body = json.dumps(groups)
return Response(content_type='application/json', body=body)
def get_port_desc(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
groups = ofctl_v1_0.get_port_desc(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
groups = ofctl_v1_2.get_port_desc(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
groups = ofctl_v1_3.get_port_desc(dp, self.waiters)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
body = json.dumps(groups)
return Response(content_type='application/json', body=body)
def mod_flow_entry(self, req, cmd, **_kwargs):
try:
flow = ast.literal_eval(req.body)
except SyntaxError:
LOG.debug('invalid syntax %s', req.body)
return Response(status=400)
dpid = flow.get('dpid')
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if cmd == 'add':
cmd = dp.ofproto.OFPFC_ADD
elif cmd == 'modify':
cmd = dp.ofproto.OFPFC_MODIFY
elif cmd == 'modify_strict':
cmd = dp.ofproto.OFPFC_MODIFY_STRICT
elif cmd == 'delete':
cmd = dp.ofproto.OFPFC_DELETE
elif cmd == 'delete_strict':
cmd = dp.ofproto.OFPFC_DELETE_STRICT
else:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
ofctl_v1_0.mod_flow_entry(dp, flow, cmd)
elif dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
ofctl_v1_2.mod_flow_entry(dp, flow, cmd)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
ofctl_v1_3.mod_flow_entry(dp, flow, cmd)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
return Response(status=200)
def delete_flow_entry(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
ofctl_v1_0.delete_flow_entry(dp)
elif dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
ofctl_v1_2.mod_flow_entry(dp, {}, dp.ofproto.OFPFC_DELETE)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
ofctl_v1_3.mod_flow_entry(dp, {}, dp.ofproto.OFPFC_DELETE)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
return Response(status=200)
def mod_meter_entry(self, req, cmd, **_kwargs):
try:
flow = ast.literal_eval(req.body)
except SyntaxError:
LOG.debug('invalid syntax %s', req.body)
return Response(status=400)
dpid = flow.get('dpid')
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if cmd == 'add':
cmd = dp.ofproto.OFPMC_ADD
elif cmd == 'modify':
cmd = dp.ofproto.OFPMC_MODIFY
elif cmd == 'delete':
cmd = dp.ofproto.OFPMC_DELETE
else:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
ofctl_v1_3.mod_meter_entry(dp, flow, cmd)
elif dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION or \
dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
LOG.debug('Request not supported in this OF protocol version')
return Response(status=501)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
return Response(status=200)
def mod_group_entry(self, req, cmd, **_kwargs):
try:
group = ast.literal_eval(req.body)
except SyntaxError:
LOG.debug('invalid syntax %s', req.body)
return Response(status=400)
dpid = group.get('dpid')
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
LOG.debug('Request not supported in this OF protocol version')
return Response(status=501)
if cmd == 'add':
cmd = dp.ofproto.OFPGC_ADD
elif cmd == 'modify':
cmd = dp.ofproto.OFPGC_MODIFY
elif cmd == 'delete':
cmd = dp.ofproto.OFPGC_DELETE
else:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
ofctl_v1_2.mod_group_entry(dp, group, cmd)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
ofctl_v1_3.mod_group_entry(dp, group, cmd)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
return Response(status=200)
def mod_port_behavior(self, req, cmd, **_kwargs):
try:
port_config = ast.literal_eval(req.body)
except SyntaxError:
LOG.debug('invalid syntax %s', req.body)
return Response(status=400)
dpid = port_config.get('dpid')
port_no = int(port_config.get('port_no', 0))
port_info = self.dpset.port_state[int(dpid)].get(port_no)
if 'hw_addr' not in port_config:
if port_info is not None:
port_config['hw_addr'] = port_info.hw_addr
else:
return Response(status=404)
if 'advertise' not in port_config:
if port_info is not None:
port_config['advertise'] = port_info.advertised
else:
return Response(status=404)
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if cmd != 'modify':
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
ofctl_v1_0.mod_port_behavior(dp, port_config)
elif dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
ofctl_v1_2.mod_port_behavior(dp, port_config)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
ofctl_v1_3.mod_port_behavior(dp, port_config)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
def send_experimenter(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
try:
exp = ast.literal_eval(req.body)
except SyntaxError:
LOG.debug('invalid syntax %s', req.body)
return Response(status=400)
if dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
ofctl_v1_2.send_experimenter(dp, exp)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
ofctl_v1_3.send_experimenter(dp, exp)
elif dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
LOG.debug('Request not supported in this OF protocol version')
return Response(status=501)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
return Response(status=200)
class RestStatsApi(app_manager.RyuApp):
OFP_VERSIONS = [ofproto_v1_0.OFP_VERSION,
ofproto_v1_2.OFP_VERSION,
ofproto_v1_3.OFP_VERSION]
_CONTEXTS = {
'dpset': dpset.DPSet,
'wsgi': WSGIApplication
}
def __init__(self, *args, **kwargs):
super(RestStatsApi, self).__init__(*args, **kwargs)
self.dpset = kwargs['dpset']
wsgi = kwargs['wsgi']
self.waiters = {}
self.data = {}
self.data['dpset'] = self.dpset
self.data['waiters'] = self.waiters
mapper = wsgi.mapper
wsgi.registory['StatsController'] = self.data
path = '/stats'
uri = path + '/switches'
mapper.connect('stats', uri,
controller=StatsController, action='get_dpids',
conditions=dict(method=['GET']))
uri = path + '/desc/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='get_desc_stats',
conditions=dict(method=['GET']))
uri = path + '/flow/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='get_flow_stats',
conditions=dict(method=['GET', 'POST']))
uri = path + '/port/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='get_port_stats',
conditions=dict(method=['GET']))
uri = path + '/meterfeatures/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='get_meter_features',
conditions=dict(method=['GET']))
uri = path + '/meterconfig/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='get_meter_config',
conditions=dict(method=['GET']))
uri = path + '/meter/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='get_meter_stats',
conditions=dict(method=['GET']))
uri = path + '/groupfeatures/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='get_group_features',
conditions=dict(method=['GET']))
uri = path + '/groupdesc/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='get_group_desc',
conditions=dict(method=['GET']))
uri = path + '/group/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='get_group_stats',
conditions=dict(method=['GET']))
uri = path + '/portdesc/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='get_port_desc',
conditions=dict(method=['GET']))
uri = path + '/flowentry/{cmd}'
mapper.connect('stats', uri,
controller=StatsController, action='mod_flow_entry',
conditions=dict(method=['POST']))
uri = path + '/flowentry/clear/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='delete_flow_entry',
conditions=dict(method=['DELETE']))
uri = path + '/meterentry/{cmd}'
mapper.connect('stats', uri,
controller=StatsController, action='mod_meter_entry',
conditions=dict(method=['POST']))
uri = path + '/groupentry/{cmd}'
mapper.connect('stats', uri,
controller=StatsController, action='mod_group_entry',
conditions=dict(method=['POST']))
uri = path + '/portdesc/{cmd}'
mapper.connect('stats', uri,
controller=StatsController, action='mod_port_behavior',
conditions=dict(method=['POST']))
uri = path + '/experimenter/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='send_experimenter',
conditions=dict(method=['POST']))
@set_ev_cls([ofp_event.EventOFPStatsReply,
ofp_event.EventOFPDescStatsReply,
ofp_event.EventOFPFlowStatsReply,
ofp_event.EventOFPPortStatsReply,
ofp_event.EventOFPMeterStatsReply,
ofp_event.EventOFPMeterFeaturesStatsReply,
ofp_event.EventOFPMeterConfigStatsReply,
ofp_event.EventOFPGroupStatsReply,
ofp_event.EventOFPGroupFeaturesStatsReply,
ofp_event.EventOFPGroupDescStatsReply,
ofp_event.EventOFPPortDescStatsReply
], MAIN_DISPATCHER)
def stats_reply_handler(self, ev):
msg = ev.msg
dp = msg.datapath
if dp.id not in self.waiters:
return
if msg.xid not in self.waiters[dp.id]:
return
lock, msgs = self.waiters[dp.id][msg.xid]
msgs.append(msg)
flags = 0
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
flags = dp.ofproto.OFPSF_REPLY_MORE
elif dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
flags = dp.ofproto.OFPSF_REPLY_MORE
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
flags = dp.ofproto.OFPMPF_REPLY_MORE
if msg.flags & flags:
return
del self.waiters[dp.id][msg.xid]
lock.set()
@set_ev_cls([ofp_event.EventOFPSwitchFeatures], MAIN_DISPATCHER)
def features_reply_handler(self, ev):
msg = ev.msg
dp = msg.datapath
if dp.id not in self.waiters:
return
if msg.xid not in self.waiters[dp.id]:
return
lock, msgs = self.waiters[dp.id][msg.xid]
msgs.append(msg)
del self.waiters[dp.id][msg.xid]
lock.set()
|
Neil741/ryu-master
|
ryu/app/ofctl_rest.py
|
Python
|
apache-2.0
| 24,075
|
#!/usr/bin/env python
# Copyright (c) 2013-2014 ZUYD Research
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author Robert Jacobs/info@rjpjacobs.nl
"""
This will check the scan topic and looks if there are any -inf (minus infinite) values
If there are too much -infs there is probably an object nearby -> emergency stop
"""
import roslib; roslib.load_manifest('zuros_emergency_stop')
import rospy
from sensor_msgs.msg import LaserScan
from std_msgs.msg import Bool
import os
import math
## Class for checking the status of the scan topic depth points
class EmergencyChecker(object):
## Constructor
def __init__(self):
self.nan_count = 0
self.emergency_stop = False
# Create a publisher for the emergency stop topic
self.pub = rospy.Publisher('emergency_stop', Bool)
## Callback method
# Gets called when there is new data in the scan topic
def callback_scan(self, data):
self.inf_count = 0
for r in range (0,640):
#the scan publisher (zuros_depth) publishes -inf values for each point < 0.8
if(math.isinf(data.ranges[r])):
self.inf_count = self.inf_count + 1
if(self.inf_count >= 5 and self.emergency_stop == False):
self.emergency_stop = True
rospy.loginfo("EMERGENCY STOP ISSUED")
else:
if(self.inf_count < 5 and self.emergency_stop == True):
self.emergency_stop = False
rospy.loginfo("EMERGENCY STOP RELEASED")
self.pub.publish(Bool(self.emergency_stop))
## Check if this is a class call or a program call
if __name__ == '__main__':
rospy.init_node('emergency_scanner', anonymous=False)
# Start
emc = EmergencyChecker()
rospy.Subscriber("scan", LaserScan, emc.callback_scan)
# Spin
rospy.spin()
|
robertjacobs/zuros
|
zuros_control/zuros_emergency_stop/src/emergency_stop.py
|
Python
|
mit
| 3,303
|
# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from taiga.timeline.service import register_timeline_implementation
from . import service
@register_timeline_implementation("projects.project", "create")
@register_timeline_implementation("projects.project", "change")
@register_timeline_implementation("projects.project", "delete")
def project_timeline(instance, extra_data={}):
result ={
"project": service.extract_project_info(instance),
}
result.update(extra_data)
return result
@register_timeline_implementation("milestones.milestone", "create")
@register_timeline_implementation("milestones.milestone", "change")
@register_timeline_implementation("milestones.milestone", "delete")
def project_timeline(instance, extra_data={}):
result ={
"milestone": service.extract_milestone_info(instance),
"project": service.extract_project_info(instance.project),
}
result.update(extra_data)
return result
@register_timeline_implementation("userstories.userstory", "create")
@register_timeline_implementation("userstories.userstory", "change")
@register_timeline_implementation("userstories.userstory", "delete")
def userstory_timeline(instance, extra_data={}):
result ={
"userstory": service.extract_userstory_info(instance),
"project": service.extract_project_info(instance.project),
}
if instance.milestone is not None:
result["milestone"] = service.extract_milestone_info(instance.milestone)
result.update(extra_data)
return result
@register_timeline_implementation("issues.issue", "create")
@register_timeline_implementation("issues.issue", "change")
@register_timeline_implementation("issues.issue", "delete")
def issue_timeline(instance, extra_data={}):
result ={
"issue": service.extract_issue_info(instance),
"project": service.extract_project_info(instance.project),
}
result.update(extra_data)
return result
@register_timeline_implementation("tasks.task", "create")
@register_timeline_implementation("tasks.task", "change")
@register_timeline_implementation("tasks.task", "delete")
def task_timeline(instance, extra_data={}):
result ={
"task": service.extract_task_info(instance),
"project": service.extract_project_info(instance.project),
}
result.update(extra_data)
return result
@register_timeline_implementation("wiki.wikipage", "create")
@register_timeline_implementation("wiki.wikipage", "change")
@register_timeline_implementation("wiki.wikipage", "delete")
def wiki_page_timeline(instance, extra_data={}):
result ={
"wikipage": service.extract_wiki_page_info(instance),
"project": service.extract_project_info(instance.project),
}
result.update(extra_data)
return result
@register_timeline_implementation("projects.membership", "create")
@register_timeline_implementation("projects.membership", "delete")
def membership_timeline(instance, extra_data={}):
result = {
"user": service.extract_user_info(instance.user),
"project": service.extract_project_info(instance.project),
"role": service.extract_role_info(instance.role),
}
result.update(extra_data)
return result
|
rajiteh/taiga-back
|
taiga/timeline/timeline_implementations.py
|
Python
|
agpl-3.0
| 4,003
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: mytype.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='mytype.proto',
package='',
serialized_pb=_b('\n\x0cmytype.proto\"8\n\x06MyType\x12\n\n\x02id\x18\x01 \x02(\x05\x12\x0e\n\x06longId\x18\x02 \x02(\x03\x12\x12\n\nStrMessage\x18\x03 \x01(\x0c\"9\n\x07MyType1\x12\n\n\x02id\x18\x01 \x02(\x05\x12\x0e\n\x06longId\x18\x02 \x02(\x03\x12\x12\n\nStrMessage\x18\x03 \x01(\x0c\"9\n\x07MyType2\x12\n\n\x02id\x18\x01 \x02(\x05\x12\x0e\n\x06longId\x18\x02 \x02(\x03\x12\x12\n\nStrMessage\x18\x03 \x01(\x0c\"9\n\x07MyType3\x12\n\n\x02id\x18\x01 \x02(\x05\x12\x0e\n\x06longId\x18\x02 \x02(\x03\x12\x12\n\nStrMessage\x18\x03 \x01(\x0c\"`\n\x0bMyTypeArray\x12\x12\n\x01t\x18\x01 \x03(\x0b\x32\x07.MyType\x12\x13\n\x02t1\x18\x02 \x03(\x0b\x32\x07.MyType\x12\x13\n\x02t2\x18\x03 \x03(\x0b\x32\x07.MyType\x12\x13\n\x02t3\x18\x04 \x03(\x0b\x32\x07.MyType')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_MYTYPE = _descriptor.Descriptor(
name='MyType',
full_name='MyType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='MyType.id', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='longId', full_name='MyType.longId', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='StrMessage', full_name='MyType.StrMessage', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=16,
serialized_end=72,
)
_MYTYPE1 = _descriptor.Descriptor(
name='MyType1',
full_name='MyType1',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='MyType1.id', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='longId', full_name='MyType1.longId', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='StrMessage', full_name='MyType1.StrMessage', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=74,
serialized_end=131,
)
_MYTYPE2 = _descriptor.Descriptor(
name='MyType2',
full_name='MyType2',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='MyType2.id', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='longId', full_name='MyType2.longId', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='StrMessage', full_name='MyType2.StrMessage', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=133,
serialized_end=190,
)
_MYTYPE3 = _descriptor.Descriptor(
name='MyType3',
full_name='MyType3',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='MyType3.id', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='longId', full_name='MyType3.longId', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='StrMessage', full_name='MyType3.StrMessage', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=192,
serialized_end=249,
)
_MYTYPEARRAY = _descriptor.Descriptor(
name='MyTypeArray',
full_name='MyTypeArray',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='t', full_name='MyTypeArray.t', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='t1', full_name='MyTypeArray.t1', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='t2', full_name='MyTypeArray.t2', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='t3', full_name='MyTypeArray.t3', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=251,
serialized_end=347,
)
_MYTYPEARRAY.fields_by_name['t'].message_type = _MYTYPE
_MYTYPEARRAY.fields_by_name['t1'].message_type = _MYTYPE
_MYTYPEARRAY.fields_by_name['t2'].message_type = _MYTYPE
_MYTYPEARRAY.fields_by_name['t3'].message_type = _MYTYPE
DESCRIPTOR.message_types_by_name['MyType'] = _MYTYPE
DESCRIPTOR.message_types_by_name['MyType1'] = _MYTYPE1
DESCRIPTOR.message_types_by_name['MyType2'] = _MYTYPE2
DESCRIPTOR.message_types_by_name['MyType3'] = _MYTYPE3
DESCRIPTOR.message_types_by_name['MyTypeArray'] = _MYTYPEARRAY
MyType = _reflection.GeneratedProtocolMessageType('MyType', (_message.Message,), dict(
DESCRIPTOR = _MYTYPE,
__module__ = 'mytype_pb2'
# @@protoc_insertion_point(class_scope:MyType)
))
_sym_db.RegisterMessage(MyType)
MyType1 = _reflection.GeneratedProtocolMessageType('MyType1', (_message.Message,), dict(
DESCRIPTOR = _MYTYPE1,
__module__ = 'mytype_pb2'
# @@protoc_insertion_point(class_scope:MyType1)
))
_sym_db.RegisterMessage(MyType1)
MyType2 = _reflection.GeneratedProtocolMessageType('MyType2', (_message.Message,), dict(
DESCRIPTOR = _MYTYPE2,
__module__ = 'mytype_pb2'
# @@protoc_insertion_point(class_scope:MyType2)
))
_sym_db.RegisterMessage(MyType2)
MyType3 = _reflection.GeneratedProtocolMessageType('MyType3', (_message.Message,), dict(
DESCRIPTOR = _MYTYPE3,
__module__ = 'mytype_pb2'
# @@protoc_insertion_point(class_scope:MyType3)
))
_sym_db.RegisterMessage(MyType3)
MyTypeArray = _reflection.GeneratedProtocolMessageType('MyTypeArray', (_message.Message,), dict(
DESCRIPTOR = _MYTYPEARRAY,
__module__ = 'mytype_pb2'
# @@protoc_insertion_point(class_scope:MyTypeArray)
))
_sym_db.RegisterMessage(MyTypeArray)
# @@protoc_insertion_point(module_scope)
|
pkhopper/whatever
|
src/pbbox/messages/mytype_pb2.py
|
Python
|
mit
| 9,888
|
# -*- coding: utf-8 -*-
#
# gapic-google-cloud-logging-v2 documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
__version__ = '0.14.0'
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
'sphinx.ext.napoleon',
'sphinx.ext.viewcode',
]
# autodoc/autosummary flags
autoclass_content = 'both'
autodoc_default_flags = ['members']
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'gapic-google-cloud-logging-v2'
copyright = u'2016, Google'
author = u'Google APIs'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = __version__
# The short X.Y version.
version = '.'.join(release.split('.')[0:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'gapic-google-cloud-logging-v2-doc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'gapic-google-cloud-logging-v2.tex', u'gapic-google-cloud-logging-v2 Documentation',
author, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'gapic-google-cloud-logging-v2', u'gapic-google-cloud-logging-v2 Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'gapic-google-cloud-logging-v2', u'gapic-google-cloud-logging-v2 Documentation',
author, 'gapic-google-cloud-logging-v2', 'GAPIC library for the google-cloud-logging (api.version) service',
'APIs'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('http://python.readthedocs.org/en/latest/', None),
'gax': ('https://gax-python.readthedocs.org/en/latest/', None),
}
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
|
eoogbe/api-client-staging
|
generated/python/gapic-google-cloud-logging-v2-docs/docs/conf.py
|
Python
|
bsd-3-clause
| 10,297
|
import json
from array import array
class dsf_geom_load (object):
def __init__ (self):
pass
@classmethod
def intern_geometry (self, jdata):
v = array ('f')
f = list ()
m = array ('i')
g = array ('i')
for vertex in jdata['vertices']['values']:
v.extend (vertex)
group_list = jdata['polygon_groups']['values']
mat_list = jdata['polygon_material_groups']['values']
for polygon in jdata['polylist']['values']:
(gidx, midx, verts) = (polygon[0], polygon[1], polygon[2:])
f.append (verts)
m.append (midx)
g.append (gidx)
return {
'v': v, 'g': g, 'm': m, 'f': f,
'gm': group_list, 'mm': mat_list
}
@classmethod
def load_geometry (self, filename, feats = ['vt', 'g', 'm']):
"""create a model from the json-data in jdata.
g - include face-groups
m - include materials
"""
from . import dsf_io
jdata = dsf_io.read_json_data (filename, encoding = 'latin1')
geom = self.intern_geometry\
(jdata['geometry_library'][0])
geom['id_path'] =\
jdata['asset_info']['id'] + "#" + jdata['node_library'][0]['id']
return geom
@classmethod
def load_file (self, filename):
geom_data = self.load_geometry (filename)
return geom_data
genesis = '/images/winshare/dsdata4/data/DAZ 3D/Genesis/Base/Genesis.dsf'
|
337529542/dsf-utils
|
dsf_geom_load.py
|
Python
|
gpl-3.0
| 1,353
|
from .boolean_objects import L
from .shortcuts import and_, or_, is_boolean_formula
__all__ = ["L", "and_", "or_", "is_boolean_formula"]
|
aubreystarktoller/lite-boolean-formulae
|
lite_boolean_formulae/__init__.py
|
Python
|
bsd-3-clause
| 138
|
from django.contrib import admin
from book.models import Book, Author, Publisher
class BookAdmin(admin.ModelAdmin):
list_display = ('title',)
admin.site.register(Book, BookAdmin)
class AuthorAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(Author, AuthorAdmin)
class PublisherAdmin(admin.ModelAdmin):
list_display = ('name',)
admin.site.register(Publisher, PublisherAdmin)
|
fredwulei/fredsneverland
|
fredsneverland/book/admin.py
|
Python
|
mit
| 412
|
# Copyright 2012-2017 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path, subprocess
import typing as T
from ..mesonlib import EnvironmentException, MachineChoice
from .compilers import Compiler
from .mixins.clike import CLikeCompiler
from .mixins.gnu import GnuCompiler
from .mixins.clang import ClangCompiler
if T.TYPE_CHECKING:
from ..envconfig import MachineInfo
class ObjCCompiler(CLikeCompiler, Compiler):
language = 'objc'
def __init__(self, exelist, version, for_machine: MachineChoice,
is_cross: bool, info: 'MachineInfo',
exe_wrap: T.Optional[str], **kwargs):
Compiler.__init__(self, exelist, version, for_machine, info, **kwargs)
CLikeCompiler.__init__(self, is_cross, exe_wrap)
@staticmethod
def get_display_language():
return 'Objective-C'
def sanity_check(self, work_dir, environment):
# TODO try to use sanity_check_impl instead of duplicated code
source_name = os.path.join(work_dir, 'sanitycheckobjc.m')
binary_name = os.path.join(work_dir, 'sanitycheckobjc')
extra_flags = []
extra_flags += environment.coredata.get_external_args(self.for_machine, self.language)
if self.is_cross:
extra_flags += self.get_compile_only_args()
else:
extra_flags += environment.coredata.get_external_link_args(self.for_machine, self.language)
with open(source_name, 'w') as ofile:
ofile.write('#import<stddef.h>\n'
'int main(void) { return 0; }\n')
pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name])
pc.wait()
if pc.returncode != 0:
raise EnvironmentException('ObjC compiler %s can not compile programs.' % self.name_string())
if self.is_cross:
# Can't check if the binaries run so we have to assume they do
return
pe = subprocess.Popen(binary_name)
pe.wait()
if pe.returncode != 0:
raise EnvironmentException('Executables created by ObjC compiler %s are not runnable.' % self.name_string())
class GnuObjCCompiler(GnuCompiler, ObjCCompiler):
def __init__(self, exelist, version, for_machine: MachineChoice,
is_cross, info: 'MachineInfo', exe_wrapper=None,
defines=None, **kwargs):
ObjCCompiler.__init__(self, exelist, version, for_machine, is_cross,
info, exe_wrapper, **kwargs)
GnuCompiler.__init__(self, defines)
default_warn_args = ['-Wall', '-Winvalid-pch']
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
class ClangObjCCompiler(ClangCompiler, ObjCCompiler):
def __init__(self, exelist, version, for_machine: MachineChoice,
is_cross, info: 'MachineInfo', exe_wrapper=None,
**kwargs):
ObjCCompiler.__init__(self, exelist, version, for_machine, is_cross,
info, exe_wrapper, **kwargs)
ClangCompiler.__init__(self, [])
default_warn_args = ['-Wall', '-Winvalid-pch']
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
|
becm/meson
|
mesonbuild/compilers/objc.py
|
Python
|
apache-2.0
| 4,047
|
import sys
import argparse
def josephus(n, k):
""" Function that calculates the last person location
of the josephus problem
Args:
n(int): number of people in circle
k(int): step rate
Returns:
int: index value of the winner
"""
# special case, k = 1
if k == 1:
return n - 1
# base case, if only one person left, they win
elif n <= 1:
return 0
if k <= n:
num_dead = n / k
else:
num_dead = 1
# first and last people to die in current round
# the mod n is applied in case k > n
first_index = (k - 1) % n
last_index = first_index + k * (num_dead - 1)
# which person the next round starts counting from
next_round_start = last_index + 1
# recursion to find out who winner is in next round
winner_next_round = josephus(n - num_dead, k)
# translate that over to the current round's numbering
# the people in [next_round_start, n) are all alive
if next_round_start + winner_next_round < n:
return next_round_start + winner_next_round
# look at [0, next_round_start). Some may be dead, adjust.
else:
winner_next_round -= (n - next_round_start)
# every k-th person is dead
block_num = winner_next_round / (k - 1)
index_in_block = winner_next_round % (k - 1)
return block_num * k + index_in_block
def main():
parser = argparse.ArgumentParser(
description='Finds location of survivor in josephus problem')
parser.add_argument(
'n', type=int, help='No of people in circle'
)
parser.add_argument('k', type=int, help='step rate', )
args = parser.parse_args()
print "The index position of the survivor is {0}".format(
josephus(args.n, args.k))
if __name__ == '__main__':
sys.exit(main())
|
ashm2/josephus_problem
|
josephus.py
|
Python
|
mit
| 1,843
|
from __future__ import division, absolute_import
import astropy.stats
import glob
import math
import matplotlib.pyplot as plt
from matplotlib import ticker
from matplotlib.ticker import FormatStrFormatter
import numpy as np
import os
import pandas as pd
from scipy import integrate,optimize,spatial
from matplotlib import rc,rcParams
rc('text', usetex=True)
rc('axes', linewidth=2)
rc('font', weight='bold')
# rcParams['text.latex.preamble'] = [r'\usepackage{sfmath} \boldmath']
class Vars(object):
size_xlabel = 24
size_ylabel = 24
size_text = 18
size_tick = 18
va = Vars()
three_dex_abd_matched = {'1': ([[ 0.21524025, 0.20638555, 0.18613791, 0.17004973, 0.17328601,
0.17824797, 0.22330475, 0.27547932, 0.35097406, 0.46594156,
0.6529005 , 0.6352904 , 0.73630952],
[ 0.01130106, 0.01165314, 0.00956257, 0.0100338 , 0.00851037,
0.00829444, 0.00939837, 0.0112802 , 0.01710905, 0.01586881,
0.01895867, 0.05304972, 0.06702147]]),
'10': ([[ 0.0434983 , 0.04448739, 0.03900445, 0.03950445, 0.03455255,
0.03613353, 0.03198477, 0.02779419, 0.04298508, 0.05409842,
0.08323442, 0.13483586, 0.1875 ],
[ 0.00907987, 0.01003662, 0.00746137, 0.00781005, 0.00876654,
0.00778842, 0.00710085, 0.00787262, 0.00931001, 0.01068675,
0.01922668, 0.03250244, 0.05831473]]),
'2': ([[ 0.12064069, 0.12118292, 0.11193204, 0.10343818, 0.09727599,
0.09318955, 0.09715361, 0.12339972, 0.16939451, 0.2670205 ,
0.41188724, 0.50939394, 0.52678571],
[ 0.01189233, 0.01312654, 0.01054764, 0.01001456, 0.00993245,
0.01071466, 0.00966967, 0.0091511 , 0.01455119, 0.01775133,
0.01885678, 0.05781321, 0.05745782]]),
'20': ([[ 0.02384301, 0.02535176, 0.02022905, 0.02055364, 0.01931314,
0.017112 , 0.01431874, 0.01258658, 0.0159481 , 0.01943668,
0.03090288, 0.07816919, 0.07916667],
[ 0.00626565, 0.00685997, 0.00526008, 0.00613409, 0.00587699,
0.00553958, 0.00485825, 0.00502655, 0.00554471, 0.0062414 ,
0.01229515, 0.01883964, 0.02942593]]),
'3': ([[ 0.09119876, 0.09777328, 0.08925934, 0.08259272, 0.07711375,
0.07068848, 0.065675 , 0.07988132, 0.11103945, 0.1773137 ,
0.28889296, 0.40550505, 0.37321429],
[ 0.01140526, 0.0122457 , 0.00999486, 0.00918654, 0.00988064,
0.00946682, 0.00857933, 0.00827363, 0.01327769, 0.01686665,
0.01616292, 0.0496007 , 0.06285859]]),
'5': ([[ 0.06871318, 0.06882041, 0.06418409, 0.05834665, 0.05640096,
0.05266543, 0.04813125, 0.05107188, 0.07282119, 0.09718295,
0.19193237, 0.26286616, 0.28333333],
[ 0.01079781, 0.01208153, 0.01074775, 0.00901779, 0.00918551,
0.0088578 , 0.00872636, 0.00841686, 0.01211816, 0.01476584,
0.01849396, 0.0521784 , 0.06123724]])}
two_dex_abd_matched = {'1': ([[ 0.2277589 , 0.20929902, 0.20440717, 0.19045579, 0.17335294,
0.17895473, 0.21237887, 0.2784413 , 0.40348453, 0.5576678 ,
0.72205984, 0.87900008, 0.92788462],
[ 0.01441421, 0.01248772, 0.01319243, 0.01204725, 0.01152511,
0.00900664, 0.0116242 , 0.01143036, 0.01337072, 0.01665733,
0.02369187, 0.01932513, 0.05786732]]),
'10': ([[ 0.04756858, 0.04980623, 0.05146618, 0.04804556, 0.04270754,
0.03740174, 0.03831069, 0.03824497, 0.04121288, 0.06389169,
0.12307228, 0.2865359 , 0.43269231],
[ 0.00942242, 0.01033441, 0.01064282, 0.00874843, 0.00855415,
0.00720339, 0.00656024, 0.00555056, 0.00648568, 0.01294673,
0.02002825, 0.04360923, 0.11830788]]),
'2': ([[ 0.12990243, 0.12738903, 0.12694609, 0.11571515, 0.10560929,
0.09257186, 0.09617228, 0.11780891, 0.1859156 , 0.33015289,
0.51666957, 0.7644459 , 0.87660256],
[ 0.01349072, 0.01281358, 0.01434831, 0.0103789 , 0.01165435,
0.00854937, 0.00869274, 0.00846095, 0.00808849, 0.02341006,
0.0219697 , 0.02357977, 0.06410006]]),
'20': ([[ 0.02597944, 0.02514224, 0.02744873, 0.02401747, 0.02247834,
0.02085579, 0.02042847, 0.02035271, 0.02037985, 0.02690943,
0.06062737, 0.10727761, 0.15224359],
[ 0.00689687, 0.00717086, 0.00797024, 0.00599567, 0.006135 ,
0.00550897, 0.00578622, 0.00404915, 0.00486626, 0.00822912,
0.01503631, 0.03598 , 0.05570783]]),
'3': ([[ 0.10125596, 0.10313103, 0.10249001, 0.09152116, 0.08442258,
0.07670431, 0.07217208, 0.08102919, 0.11020136, 0.23244375,
0.39091166, 0.63650154, 0.80528846],
[ 0.01285048, 0.01269348, 0.0142144 , 0.01074026, 0.01016215,
0.00845994, 0.00754706, 0.00660626, 0.00947444, 0.01941087,
0.02452525, 0.03119116, 0.0615887 ]]),
'5': ([[ 0.07707357, 0.07497129, 0.07521926, 0.06941231, 0.06047828,
0.05585792, 0.05540505, 0.05540863, 0.06323059, 0.12723545,
0.24226817, 0.48849221, 0.64983974],
[ 0.01257445, 0.01240699, 0.01306162, 0.0089147 , 0.01045907,
0.00876058, 0.00736706, 0.00684428, 0.00782831, 0.01641762,
0.02766064, 0.02216969, 0.11269876]])}
one_dex_abd_matched = {'1': ([[ 0.24090065, 0.21751226, 0.21489993,
0.1894229 , 0.18796087,
0.17726431, 0.20180639, 0.27350405, 0.42899167, 0.66968654,
0.87864981, 0.95474644, 1. ],
[ 0.01145703, 0.01261648, 0.01349425, 0.01143488, 0.0098266 ,
0.00890501, 0.00808996, 0.00900656, 0.01142553, 0.01202355,
0.01441284, 0.01614769, 0. ]]),
'10': ([[ 0.05282593, 0.05287924, 0.05178348, 0.04819594, 0.04430741,
0.03937763, 0.03436343, 0.03209625, 0.03886541, 0.06173756,
0.12849462, 0.43421778, 0.7390625 ],
[ 0.0107828 , 0.01097779, 0.01009919, 0.00905892, 0.00782053,
0.00749636, 0.00665324, 0.00744886, 0.00935906, 0.01220939,
0.02361977, 0.04500612, 0.0672377 ]]),
'2': ([[ 0.13947179, 0.13403766, 0.13205966, 0.11856716, 0.11513654,
0.09391647, 0.09153797, 0.09952045, 0.1774149 , 0.39959388,
0.72658597, 0.92456062, 0.9921875 ],
[ 0.01208771, 0.01360823, 0.01351236, 0.0117967 , 0.00948068,
0.00906088, 0.00822835, 0.00907392, 0.013738 , 0.01755909,
0.01341753, 0.02104195, 0.00730792]]),
'20': ([[ 0.02637069, 0.02678547, 0.02904849, 0.02689762, 0.0232155 ,
0.02239701, 0.01759682, 0.0208054 , 0.02202997, 0.03431871,
0.04782364, 0.18408795, 0.38229167],
[ 0.0077015 , 0.00753392, 0.00748456, 0.00673008, 0.00569602,
0.00594712, 0.0051561 , 0.0057339 , 0.00707003, 0.00808574,
0.0113854 , 0.03388804, 0.06644556]]),
'3': ([[ 0.10697682, 0.10539415, 0.10845122, 0.09131209, 0.08983389,
0.07622917, 0.06914106, 0.06562035, 0.08964582, 0.24107919,
0.53426499, 0.85987446, 0.9921875 ],
[ 0.01349656, 0.01286965, 0.01258807, 0.01156779, 0.01004648,
0.00974959, 0.00908036, 0.00846809, 0.01190788, 0.0186402 ,
0.02561799, 0.0220085 , 0.00730792]]),
'5': ([[ 0.0829296 , 0.0767095 , 0.08028205, 0.06873084, 0.0658115 ,
0.05865908, 0.05155796, 0.04898616, 0.05256302, 0.10856876,
0.31840299, 0.71632312, 0.9296875 ],
[ 0.01293653, 0.0132994 , 0.01202746, 0.01024547, 0.00966556,
0.01024098, 0.00791901, 0.00852191, 0.01026344, 0.01356835,
0.02565866, 0.03820398, 0.04016458]])}
one_dex_norm = {'1': ([[ 0.23379138, 0.21858028, 0.21544219, 0.19484084, 0.19169834,
0.17881306, 0.18925336, 0.2509041 , 0.42610801, 0.6986764 ,
0.91731741, 0.9875 , 1. ],
[ 0.01093244, 0.01359421, 0.01257943, 0.01314939, 0.00962991,
0.00968802, 0.00884824, 0.00916126, 0.00944932, 0.00868739,
0.01068788, 0.01169268, 0. ]]),
'10': ([[ 0.05097433, 0.05342309, 0.05144121, 0.04976471, 0.04664067,
0.03953891, 0.03558171, 0.03403173, 0.03652341, 0.07052831,
0.1808226 , 0.65861222, 0.93333333],
[ 0.01032246, 0.01117695, 0.00979262, 0.00938893, 0.00863373,
0.00705196, 0.00760729, 0.00725605, 0.00863966, 0.01411 ,
0.02977818, 0.06330736, 0.05270463]]),
'2': ([[ 0.13506372, 0.13455398, 0.13282997, 0.12184482, 0.11706111,
0.09973883, 0.0930339 , 0.09587241, 0.17229977, 0.42859355,
0.8063813 , 0.97329545, 1. ],
[ 0.01143441, 0.01415717, 0.01295881, 0.01255686, 0.01064479,
0.00920652, 0.00925522, 0.00774904, 0.01212893, 0.01616638,
0.01843806, 0.01429375, 0. ]]),
'20': ([[ 0.02543328, 0.0270065 , 0.02892489, 0.02787714, 0.02374327,
0.02240617, 0.01976756, 0.02039297, 0.02114825, 0.03430814,
0.07321814, 0.33127289, 0.71111111],
[ 0.00739287, 0.00762563, 0.00721275, 0.00680826, 0.00619374,
0.00590446, 0.00584163, 0.00568387, 0.00669582, 0.00874335,
0.01914046, 0.04155249, 0.12668616]]),
'3': ([[ 0.10356548, 0.10577668, 0.10848871, 0.09457448, 0.09156254,
0.08007504, 0.07148496, 0.06546403, 0.08749008, 0.27215413,
0.63875407, 0.92082293, 1. ],
[ 0.01284947, 0.01325919, 0.0120804 , 0.01213016, 0.01076404,
0.00973611, 0.0104359 , 0.00791612, 0.01108596, 0.01368561,
0.02065483, 0.01528594, 0. ]]),
'5': ([[ 0.08011167, 0.07727852, 0.08043464, 0.07125348, 0.06703718,
0.05997341, 0.05343608, 0.04997222, 0.05174344, 0.12648361,
0.43564471, 0.86068307, 1. ],
[ 0.01236788, 0.01359278, 0.01161002, 0.01091639, 0.01023869,
0.010021 , 0.00971142, 0.00798677, 0.0098478 , 0.01536732,
0.02288673, 0.0229637 , 0. ]])}
two_dex_norm = {'1': ([[ 0.21757995, 0.21159866, 0.20698244, 0.19267024, 0.17728934,
0.17941772, 0.19704583, 0.25948005, 0.35904919, 0.54708736,
0.71595682, 0.92759048, 0.96875 ],
[ 0.01413842, 0.01217976, 0.01275151, 0.01224816, 0.01500227,
0.00765373, 0.00840479, 0.00696645, 0.01410345, 0.01224488,
0.016043 , 0.02031895, 0.0292317 ]]),
'10': ([[ 0.04526536, 0.05056117, 0.05125827, 0.04947443, 0.04393309,
0.03918201, 0.03899048, 0.03712702, 0.03831807, 0.06205462,
0.11817761, 0.2671782 , 0.578125 ],
[ 0.00921302, 0.01045191, 0.01023102, 0.00867248, 0.0101528 ,
0.00769676, 0.00646166, 0.00581073, 0.00709351, 0.00922964,
0.02032159, 0.03135078, 0.10820242]]),
'2': ([[ 0.123864 , 0.12859161, 0.12776939, 0.11990065, 0.10803898,
0.09396226, 0.09306344, 0.10582313, 0.16855359, 0.31243535,
0.49743619, 0.81120753, 0.93125 ],
[ 0.01326967, 0.01305198, 0.01395815, 0.0101661 , 0.01439067,
0.0086511 , 0.00886102, 0.00516303, 0.01236694, 0.01541778,
0.03050539, 0.024729 , 0.03743484]]),
'20': ([[ 0.0247306 , 0.02549198, 0.02712836, 0.02517137, 0.02344174,
0.02226584, 0.01960278, 0.02063911, 0.01744428, 0.03040506,
0.04858382, 0.12446845, 0.23125 ],
[ 0.00672988, 0.0072824 , 0.00776997, 0.00592793, 0.00697547,
0.00588343, 0.00562093, 0.00458423, 0.00401182, 0.00831327,
0.0099017 , 0.03118885, 0.11204334]]),
'3': ([[ 0.09632808, 0.10458555, 0.10365274, 0.09462682, 0.08569358,
0.07928289, 0.07294874, 0.07407289, 0.10272467, 0.21203764,
0.369383 , 0.63161268, 0.909375 ],
[ 0.01259439, 0.01283883, 0.01378067, 0.01041436, 0.01234633,
0.00876314, 0.00798071, 0.00501344, 0.01139153, 0.01256465,
0.03044705, 0.03757055, 0.04799445]]),
'5': ([[ 0.07351418, 0.07606423, 0.07561372, 0.0727972 , 0.06071745,
0.05857334, 0.05507502, 0.05394583, 0.06087093, 0.11975757,
0.22611026, 0.43858088, 0.759375 ],
[ 0.01234074, 0.01247395, 0.01269944, 0.00886458, 0.01191216,
0.00933791, 0.00800756, 0.00547326, 0.00879668, 0.01124001,
0.03046099, 0.0325339 , 0.07499186]])}
three_dex_norm = {'1': ([[ 0.20829433, 0.20457921, 0.1903796 , 0.17148667, 0.17655332,
0.17448569, 0.1998673 , 0.24076067, 0.32940725, 0.40774115,
0.5455842 , 0.71231361, 0.69908494],
[ 0.01089732, 0.01200659, 0.01101784, 0.00935578, 0.00896163,
0.00846024, 0.00860821, 0.00934991, 0.00732583, 0.014189 ,
0.01834871, 0.01754357, 0.04036407]]),
'10': ([[ 0.04203558, 0.04419473, 0.03941548, 0.04026057, 0.03451402,
0.032655 , 0.03592204, 0.030436 , 0.03527852, 0.04144363,
0.07733356, 0.09325203, 0.10696836],
[ 0.00882999, 0.01021811, 0.00706577, 0.0075648 , 0.00862625,
0.00769305, 0.00803968, 0.0082279 , 0.00623344, 0.00972556,
0.01211075, 0.01743087, 0.0281395 ]]),
'2': ([[ 0.11636379, 0.12017516, 0.11518941, 0.10379404, 0.10274423,
0.09201899, 0.09275208, 0.10696531, 0.1555338 , 0.19832555,
0.33570517, 0.45564685, 0.53448623],
[ 0.01180342, 0.0129437 , 0.01054841, 0.01041868, 0.00938872,
0.01039477, 0.01126146, 0.00913982, 0.00836321, 0.01602994,
0.01959266, 0.02334625, 0.04419489]]),
'20': ([[ 0.02315809, 0.02489594, 0.02076118, 0.02113088, 0.01889123,
0.01773325, 0.01611588, 0.0146317 , 0.01397966, 0.01363161,
0.03079506, 0.04637692, 0.05099439],
[ 0.00602487, 0.00689501, 0.0054539 , 0.00608519, 0.00594935,
0.00571235, 0.00517594, 0.00510871, 0.00529972, 0.00529514,
0.00916591, 0.01038736, 0.0146574 ]]),
'3': ([[ 0.0879212 , 0.09649538, 0.09177762, 0.08309075, 0.08026366,
0.07006718, 0.06937523, 0.06802135, 0.09895054, 0.13284495,
0.22807118, 0.32004935, 0.39108318],
[ 0.01123558, 0.01218644, 0.00999191, 0.00972049, 0.00925183,
0.00936082, 0.01052106, 0.01015136, 0.00779289, 0.01288677,
0.0203126 , 0.036634 , 0.03673733]]),
'5': ([[ 0.06622738, 0.0680997 , 0.06545554, 0.05953069, 0.057135 ,
0.05071328, 0.05087791, 0.05021138, 0.06142949, 0.07497064,
0.1399526 , 0.20886604, 0.27510545],
[ 0.01063068, 0.01204707, 0.01087383, 0.0092756 , 0.00945984,
0.00807972, 0.01016534, 0.01027421, 0.00523548, 0.01343365,
0.01911088, 0.02543038, 0.04120676]])}
bin_centers = np.array([ 9.2, 9.4, 9.6, 9.8, 10. , 10.2, 10.4, 10.6, 10.8,
11. , 11.2, 11.4, 11.6])
def plot_mean_halo_frac(bin_centers,mean_vals,ax,std,plot_idx,color='grey',\
linestyle='solid',linewidth=2,label=None,text=False):
# if plot_idx == 4:
if plot_idx == 1:
ax.set_xlabel(r'$\log\ (M_{*}/M_{\odot})$',fontsize=va.size_xlabel)
ax.set_xlim(9.1,11.9)
ax.set_xticks(np.arange(9.5,12,0.5))
ax.tick_params(axis='both', labelsize=va.size_tick)
ax.errorbar(bin_centers,mean_vals,yerr=std,color=color,label=label,\
linestyle=linestyle,linewidth=linewidth)
if text == True:
# titles = [1,2,3,5,10,20]
titles_pres = [1,5,20]
title_here = r"\boldmath$N=%d$"%(titles_pres[plot_idx])
ax.text(0.05, 0.6, title_here,horizontalalignment='left', \
verticalalignment='top',transform=ax.transAxes,\
fontsize=va.size_text)
if plot_idx == 0:
ax.legend(loc='best')
ax.set_ylabel(r'Fraction in Same Halo',fontsize=va.size_ylabel)
neigh_vals = np.array([1,2,3,5,10,20])
# nrow = int(2)
# ncol = int(3)
pres_row = int(1)
pres_col = int(3)
neigh_vals_pres = (1,5,20)
fig,axes = plt.subplots(nrows=pres_row,ncols=pres_col, \
figsize=(14,4),sharex=True,sharey=True)
axes_flat = axes.flatten()
# figure_title = fig.suptitle(r'Fraction of Galaxies with Nth Nearest Neighbor in Same Halo',\
# fontsize=20)
# figure_title.set_y(1.0)
# fig.subplots_adjust(bottom=0.17, right=0.99, left=0.04,top=0.94, hspace=0, wspace=0)
zz = int(0)
while zz <=2:
for jj in neigh_vals_pres:
nn_str = '{0}'.format(jj)
# plot_mean_halo_frac(bin_centers,three_dex_norm[nn_str][0],\
# axes_flat[zz],three_dex_abd_matched[nn_str][1],zz,\
# color='indigo',label='0.3',linewidth=3)
# plot_mean_halo_frac(bin_centers,two_dex_norm[nn_str][0],\
# axes_flat[zz],two_dex_norm[nn_str][1],zz,\
# color='seagreen',label='0.2',linewidth=3)
# plot_mean_halo_frac(bin_centers,one_dex_norm[nn_str][0],\
# axes_flat[zz],one_dex_norm[nn_str][1],zz,\
# color='maroon',label='0.1',text=True,linewidth=3)
# plot_mean_halo_frac(bin_centers,one_dex_abd_matched[nn_str][0],\
# axes_flat[zz],one_dex_abd_matched[nn_str][1],zz,\
# color='darkviolet',label='0.1 dex')
plot_mean_halo_frac(bin_centers,two_dex_abd_matched[nn_str][0],\
axes_flat[zz],two_dex_abd_matched[nn_str][1],zz,\
color='royalblue',label='0.2 dex',text=True)
# plot_mean_halo_frac(bin_centers,three_dex_abd_matched[nn_str][0],\
# axes_flat[zz],three_dex_abd_matched[nn_str][1],zz,\
# color='hotpink',label='0.3 dex',text=True)
zz += 1
plt.subplots_adjust(top=0.93,bottom=0.21,left=0.06,right=0.99,hspace=0.00,\
wspace=0)
# plt.tight_layout()
plt.show()
|
hrichstein/Stellar_mass_env_Density
|
Codes/Scripts/mean_halo_frac_all.py
|
Python
|
mit
| 17,978
|
# Copyright (C) 2014 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""pytest integration with BeakerLib"""
import os
import re
import sys
import traceback
import subprocess
import pytest
def shell_quote(string):
quote = "'" + string.replace("'", "'\\''") + "'"
return quote.encode('utf-8')
def pytest_addoption(parser):
parser.addoption(
'--with-beakerlib', action="store_true",
dest="with_beakerlib", default=None,
help="Report test results via beakerlib")
parser.addoption(
'--short-phase-names', action="store_true",
dest="short_phase_names", default=None,
help="Use shorter phase names for beakerlib rlPhaseStart calls")
@pytest.mark.tryfirst
def pytest_load_initial_conftests(args, early_config, parser):
ns = early_config.known_args_namespace
if ns.with_beakerlib:
if 'BEAKERLIB' not in os.environ:
exit('$BEAKERLIB not set, cannot use --with-beakerlib')
if ns.short_phase_names:
os.environ['SHORT_PHASE_NAMES'] = '1'
plugin = BeakerLibPlugin()
pluginmanager = early_config.pluginmanager.register(
plugin, 'BeakerLibPlugin')
class BeakerLibProcess(object):
"""Manager of a Bash process that is being fed beakerlib commands
"""
def __init__(self, env=os.environ):
if 'BEAKERLIB' not in env:
raise RuntimeError('$BEAKERLIB not set, cannot use BeakerLib')
self.env = env
# Set up the Bash process
self.bash = subprocess.Popen(['bash'],
stdin=subprocess.PIPE,
stdout=open(os.devnull, 'w'),
stderr=open(os.devnull, 'w'))
source_path = os.path.join(self.env['BEAKERLIB'], 'beakerlib.sh')
self.run_beakerlib_command(['.', source_path])
def run_beakerlib_command(self, cmd):
"""Given a command as a Popen-style list, run it in the Bash process"""
if not self.bash:
return
for word in cmd:
self.bash.stdin.write(shell_quote(word))
self.bash.stdin.write(b' ')
self.bash.stdin.write(b'\n')
self.bash.stdin.flush()
assert self.bash.returncode is None, "BeakerLib Bash process exited"
def end(self):
"""End the Bash process"""
self.run_beakerlib_command(['exit'])
bash = self.bash
self.bash = None
bash.communicate()
def log_exception(self, err=None):
"""Log an exception
err is a 3-tuple as returned from sys.exc_info(); if not given,
sys.exc_info() is used.
"""
if err is None:
err = sys.exc_info()
message = ''.join(traceback.format_exception(*err)).rstrip()
self.run_beakerlib_command(['rlLogError', message])
class BeakerLibPlugin(object):
def __init__(self):
self.process = BeakerLibProcess(env=os.environ)
self._current_item = None
def run_beakerlib_command(self, cmd):
"""Given a command as a Popen-style list, run it in the Bash process"""
self.process.run_beakerlib_command(cmd)
def set_current_item(self, item):
"""Set the item that is currently being processed
No-op if the same item is already being processed.
Ends the phase for the previous item, if any.
"""
if item != self._current_item:
item_name = get_item_name(item)
if self._current_item:
self.run_beakerlib_command(['rlPhaseEnd'])
if item:
self.run_beakerlib_command(['rlPhaseStart', 'FAIL', item_name])
self._current_item = item
def pytest_collection_modifyitems(self, session, config, items):
"""Log all collected items at start of test"""
self.run_beakerlib_command(['rlLogInfo', 'Collected pytest tests:'])
for item in items:
msg = ' - ' + get_item_name(item)
self.run_beakerlib_command(['rlLogInfo', msg])
def pytest_runtest_setup(self, item):
"""Log item before running it"""
self.set_current_item(item)
def pytest_runtest_makereport(self, item, call):
"""Report pass/fail for setup/call/teardown of an item"""
self.set_current_item(item)
desc = '%s: %s' % (call.when, item)
if not call.excinfo:
self.run_beakerlib_command(['rlPass', 'PASS %s' % desc])
else:
self.run_beakerlib_command(['rlLogError', call.excinfo.exconly()])
short_repr = str(call.excinfo.getrepr(style='short'))
self.run_beakerlib_command(['rlLogInfo', short_repr])
# Give super-detailed traceback for DEBUG=1
long_repr = str(call.excinfo.getrepr(
showlocals=True, funcargs=True))
self.run_beakerlib_command(['rlLogDebug', long_repr])
if call.excinfo.errisinstance(pytest.skip.Exception):
self.run_beakerlib_command(['rlPass', 'SKIP %s' % desc])
else:
self.run_beakerlib_command(['rlFail', 'FAIL %s' % desc])
def pytest_unconfigure(self, config):
"""Clean up and exit"""
self.set_current_item(None)
self.process.end()
def get_item_name(item):
"""Return a "identifier-style" name for the given item
The name only contains the characters [^a-zA-Z0-9_].
"""
bad_char_re = re.compile('[^a-zA-Z0-9_]')
get_name_re = re.compile('^.*\[.*: .*: (.*)\].*$')
parts = []
current = item
while current:
if isinstance(current, pytest.Module):
if 'SHORT_PHASE_NAMES' in os.environ:
break
name = current.name
if name.endswith('.py'):
name = name[:-3]
name = bad_char_re.sub('-', name)
parts.append(name)
break
if isinstance(current, pytest.Instance):
pass
else:
name = current.name
if 'SHORT_PHASE_NAMES' in os.environ:
name = get_name_re.sub(r'\1', name)
if isinstance(current, pytest.Class):
current = current.parent
continue
name = bad_char_re.sub('-', name)
parts.append(name)
current = current.parent
return '-'.join(reversed(parts))
|
encukou/pytest-beakerlib
|
pytest_beakerlib.py
|
Python
|
gpl-3.0
| 7,047
|
import numpy as np
from flux_accumulator import AccumulatorBase
class FluxHistogramBins(AccumulatorBase):
def __init__(self, dims, ranges, filename=''):
self.ar_flux = np.zeros(dims)
self.dims = dims
self.filename = filename
self.ranges = ranges
self.bin_sizes = np.abs(ranges[1] - ranges[0]) / dims
self.pair_count = 0
def add_array_with_mask(self, ar_flux, ar_x, ar_y, ar_z, mask, ar_weights):
assert False, "Not implemented"
def merge(self, bins2):
assert self.ar_flux.shape == bins2.ar_flux.shape
assert self.ranges == bins2.ranges
assert self.dims == bins2.dims
self.ar_flux += bins2.ar_flux
self.pair_count += bins2.pair_count
return self
def save(self, filename):
self.filename = filename
self.flush()
def from_3d_array(self, array):
self.ar_flux = array
self.dims = self.ar_flux.shape
def load(self, filename):
# TODO: to static
stacked_array = np.load(filename)
self.from_3d_array(stacked_array)
def __radd__(self, other):
return self.merge(other)
def __add__(self, other):
return self.merge(other)
@classmethod
def init_as(cls, other):
"""
:type other: FluxHistogramBins
"""
new_obj = cls(other.dims, other.ranges)
new_obj.set_filename(other.filename)
return new_obj
@classmethod
def from_other(cls, other):
new_obj = cls.init_as(other)
new_obj.merge(other)
@classmethod
def from_np_array(cls, ar_flux, ranges):
"""
:type ar_flux: np.array
:type ranges: BinRange
"""
assert ar_flux.ndim == 3
new_bins = cls(ar_flux.shape, ranges)
new_bins.ar_flux = ar_flux
return new_bins
def set_filename(self, filename):
self.filename = filename
def get_data_as_array(self):
return self.ar_flux
def get_array_shape(self):
return self.ar_flux.shape
def flush(self):
np.save(self.filename, self.get_data_as_array())
def get_ranges(self):
return self.ranges
def get_dims(self):
return self.dims
def get_pair_count(self):
return self.pair_count
def get_metadata(self):
return [self.dims,
self.filename,
self.ranges,
self.bin_sizes,
self.pair_count]
@classmethod
def load_from(cls, ar, metadata):
new_bins = cls(dims=np.array((1, 1, 1)), ranges=np.array(((0, 0, 0), (1, 1, 1))))
(new_bins.dims, new_bins.filename,
new_bins.ranges, new_bins.bin_size, new_bins.pair_count) = metadata
new_bins.ar_flux = ar
return new_bins
|
yishayv/lyacorr
|
flux_histogram_bins.py
|
Python
|
mit
| 2,804
|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# streamondemand.- XBMC Plugin
# Canal para piratestreaming
# http://blog.tvalacarta.info/plugin-xbmc/streamondemand.
# ------------------------------------------------------------
import re
import urlparse
from core import config
from core import logger
from core import scrapertools
from core.item import Item
from core.tmdb import infoSod
from servers import servertools
__channel__ = "guardarefilm"
__category__ = "F"
__type__ = "generic"
__title__ = "guardarefilm (IT)"
__language__ = "IT"
host = "http://www.guardarefilm.tv"
headers = [
['User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:38.0) Gecko/20100101 Firefox/38.0'],
['Accept-Encoding', 'gzip, deflate'],
['Referer', host]
]
DEBUG = config.get_setting("debug")
def isGeneric():
return True
def mainlist(item):
logger.info("streamondemand.guardarefilm mainlist")
itemlist = [Item(channel=__channel__,
title="[COLOR azure]Novita' Al Cinema[/COLOR]",
action="peliculas",
url="%s/streaming-al-cinema/" % host,
thumbnail="http://orig03.deviantart.net/6889/f/2014/079/7/b/movies_and_popcorn_folder_icon_by_matheusgrilo-d7ay4tw.png"),
Item(channel=__channel__,
title="[COLOR azure]Popolari[/COLOR]",
action="pelis_top100",
url="%s/top100.html" % host,
thumbnail="http://xbmc-repo-ackbarr.googlecode.com/svn/trunk/dev/skin.cirrus%20extended%20v2/extras/moviegenres/All%20Movies%20by%20Genre.png"),
Item(channel=__channel__,
title="[COLOR azure]Categorie[/COLOR]",
action="categorias",
url=host,
thumbnail="http://xbmc-repo-ackbarr.googlecode.com/svn/trunk/dev/skin.cirrus%20extended%20v2/extras/moviegenres/All%20Movies%20by%20Genre.png"),
Item(channel=__channel__,
title="[COLOR azure]Animazione[/COLOR]",
action="peliculas",
url="%s/streaming-cartoni-animati/" % host,
thumbnail="http://orig09.deviantart.net/df5a/f/2014/169/2/a/fist_of_the_north_star_folder_icon_by_minacsky_saya-d7mq8c8.png"),
Item(channel=__channel__,
title="[COLOR yellow]Cerca...[/COLOR]",
action="search",
thumbnail="http://dc467.4shared.com/img/fEbJqOum/s7/13feaf0c8c0/Search"),
Item(channel=__channel__,
title="[COLOR azure]Serie TV[/COLOR]",
action="peliculas_tv",
extra="serie",
url="%s/serie-tv-streaming/" % host,
thumbnail="http://xbmc-repo-ackbarr.googlecode.com/svn/trunk/dev/skin.cirrus%20extended%20v2/extras/moviegenres/New%20TV%20Shows.png"),
Item(channel=__channel__,
title="[COLOR yellow]Cerca Serie TV...[/COLOR]",
action="search",
extra="serie",
thumbnail="http://dc467.4shared.com/img/fEbJqOum/s7/13feaf0c8c0/Search")]
return itemlist
def categorias(item):
logger.info("streamondemand.guardarefilm categorias")
itemlist = []
data = scrapertools.cache_page(item.url, headers=headers)
# Narrow search by selecting only the combo
bloque = scrapertools.get_match(data, '<ul class="reset dropmenu">(.*?)</ul>')
# The categories are the options for the combo
patron = '<li><a href="([^"]+)">(.*?)</a></li>'
matches = re.compile(patron, re.DOTALL).findall(bloque)
for scrapedurl, scrapedtitle in matches:
scrapedurl = urlparse.urljoin(item.url, scrapedurl)
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): logger.info(
"title=[" + scrapedtitle + "], url=[" + scrapedurl + "], thumbnail=[" + scrapedthumbnail + "]")
itemlist.append(
Item(channel=__channel__,
action="peliculas",
title="[COLOR azure]" + scrapedtitle + "[/COLOR]",
url=scrapedurl,
thumbnail=scrapedthumbnail,
plot=scrapedplot))
return itemlist
def search(item, texto):
logger.info("[guardarefilm.py] " + item.url + " search " + texto)
item.url = host + "/?do=search&subaction=search&story=" + texto
try:
if item.extra == "serie":
return peliculas_tv(item)
else:
return peliculas(item)
# Se captura la excepción, para no interrumpir al buscador global si un canal falla
except:
import sys
for line in sys.exc_info():
logger.error("%s" % line)
return []
def peliculas(item):
logger.info("streamondemand.guardarefilm peliculas")
itemlist = []
# Descarga la pagina
data = scrapertools.cache_page(item.url, headers=headers)
# Extrae las entradas (carpetas)
patron = '<div class="poster"><a href="([^"]+)".*?><img src="([^"]+)".*?><span.*?</div>\s*'
patron += '<div.*?><a.*?>(.*?)</a></div>'
matches = re.compile(patron, re.DOTALL).findall(data)
for scrapedurl, scrapedthumbnail, scrapedtitle in matches:
scrapedplot = ""
scrapedtitle = scrapertools.decodeHtmlentities(scrapedtitle)
if (DEBUG): logger.info(
"title=[" + scrapedtitle + "], url=[" + scrapedurl + "], thumbnail=[" + scrapedthumbnail + "]")
itemlist.append(infoSod(
Item(channel=__channel__,
action="episodios" if item.extra == "serie" else "findvideos",
fulltitle=scrapedtitle,
show=scrapedtitle,
title="[COLOR azure]" + scrapedtitle + "[/COLOR]",
url=scrapedurl,
thumbnail=urlparse.urljoin(host, scrapedthumbnail),
plot=scrapedplot,
folder=True), tipo='movie'))
# Extrae el paginador
patronvideos = '<div class="pages".*?<span>.*?<a href="([^"]+)">'
matches = re.compile(patronvideos, re.DOTALL).findall(data)
if len(matches) > 0:
scrapedurl = urlparse.urljoin(item.url, matches[0])
itemlist.append(
Item(channel=__channel__,
action="HomePage",
title="[COLOR yellow]Torna Home[/COLOR]",
folder=True)),
itemlist.append(
Item(channel=__channel__,
action="peliculas",
title="[COLOR orange]Successivo >>[/COLOR]",
url=scrapedurl,
thumbnail="http://2.bp.blogspot.com/-fE9tzwmjaeQ/UcM2apxDtjI/AAAAAAAAeeg/WKSGM2TADLM/s1600/pager+old.png",
folder=True))
return itemlist
def peliculas_tv(item):
logger.info("streamondemand.guardarefilm peliculas")
itemlist = []
# Descarga la pagina
data = scrapertools.cache_page(item.url, headers=headers)
# Extrae las entradas (carpetas)
patron = '<div class="poster"><a href="([^"]+)".*?><img src="([^"]+)".*?><span.*?</div>\s*'
patron += '<div.*?><a.*?>(.*?)</a></div>'
matches = re.compile(patron, re.DOTALL).findall(data)
for scrapedurl, scrapedthumbnail, scrapedtitle in matches:
scrapedplot = ""
scrapedtitle = scrapertools.decodeHtmlentities(scrapedtitle)
if (DEBUG): logger.info(
"title=[" + scrapedtitle + "], url=[" + scrapedurl + "], thumbnail=[" + scrapedthumbnail + "]")
itemlist.append(infoSod(
Item(channel=__channel__,
action="episodios" if item.extra == "serie" else "findvideos",
fulltitle=scrapedtitle,
show=scrapedtitle,
title="[COLOR azure]" + scrapedtitle + "[/COLOR]",
url=scrapedurl,
thumbnail=urlparse.urljoin(host, scrapedthumbnail),
plot=scrapedplot,
folder=True), tipo='tv'))
# Extrae el paginador
patronvideos = '<div class="pages".*?<span>.*?<a href="([^"]+)">'
matches = re.compile(patronvideos, re.DOTALL).findall(data)
if len(matches) > 0:
scrapedurl = urlparse.urljoin(item.url, matches[0])
itemlist.append(
Item(channel=__channel__,
action="HomePage",
title="[COLOR yellow]Torna Home[/COLOR]",
folder=True)),
itemlist.append(
Item(channel=__channel__,
action="peliculas_tv",
title="[COLOR orange]Successivo >>[/COLOR]",
url=scrapedurl,
thumbnail="http://2.bp.blogspot.com/-fE9tzwmjaeQ/UcM2apxDtjI/AAAAAAAAeeg/WKSGM2TADLM/s1600/pager+old.png",
folder=True))
return itemlist
def HomePage(item):
import xbmc
xbmc.executebuiltin("ReplaceWindow(10024,plugin://plugin.video.streamondemand)")
def pelis_top100(item):
logger.info("streamondemand.guardarefilm peliculas")
itemlist = []
# Descarga la pagina
data = scrapertools.cache_page(item.url, headers=headers)
# Extrae las entradas (carpetas)
patron = r'<span class="top100_title"><a href="([^"]+)">(.*?\(\d+\))</a>'
matches = re.compile(patron).findall(data)
for scrapedurl, scrapedtitle in matches:
html = scrapertools.cache_page(scrapedurl, headers=headers)
start = html.find("<div class=\"textwrap\" itemprop=\"description\">")
end = html.find("</div>", start)
scrapedplot = html[start:end]
scrapedplot = re.sub(r'<[^>]*>', '', scrapedplot)
scrapedplot = scrapertools.decodeHtmlentities(scrapedplot)
scrapedtitle = scrapertools.decodeHtmlentities(scrapedtitle)
scrapedthumbnail = scrapertools.find_single_match(html, r'class="poster-wrapp"><a href="([^"]+)"')
if (DEBUG): logger.info(
"title=[" + scrapedtitle + "], url=[" + scrapedurl + "], thumbnail=[" + scrapedthumbnail + "]")
itemlist.append(
Item(channel=__channel__,
action="episodios" if item.extra == "serie" else "findvideos",
fulltitle=scrapedtitle,
show=scrapedtitle,
title="[COLOR azure]" + scrapedtitle + "[/COLOR]",
url=scrapedurl,
thumbnail=urlparse.urljoin(host, scrapedthumbnail),
plot=scrapedplot,
folder=True,
fanart=host + scrapedthumbnail))
return itemlist
def episodios(item):
logger.info("streamondemand.guardarefilm episodios")
itemlist = []
# Descarga la página
data = scrapertools.cache_page(item.url)
patron = r'<li id="serie-[^"]+" data-title="Stai guardando: ([^"]+)">\s*' \
r'<span class="num">[^<]+</span>\s*' \
r'<span class="title">[^<]+</span>\s*' \
r'<span class="right">\s*' \
r'<a href="#" class="links-sd" id="sd-[^"]+" data-link="([^"]+)">[^<]+</a>'
matches = re.compile(patron, re.DOTALL).findall(data)
for scrapedtitle, scrapedurl in matches:
scrapedtitle = scrapertools.decodeHtmlentities(scrapedtitle)
itemlist.append(
Item(channel=__channel__,
action="findvid_serie",
title=scrapedtitle,
url=item.url,
thumbnail=item.thumbnail,
extra=scrapedurl,
fulltitle=item.fulltitle,
show=item.show))
if config.get_library_support() and len(itemlist) != 0:
itemlist.append(
Item(channel=__channel__,
title=item.title,
url=item.url,
action="add_serie_to_library",
extra="episodios",
show=item.show))
itemlist.append(
Item(channel=item.channel,
title="Scarica tutti gli episodi della serie",
url=item.url,
action="download_all_episodes",
extra="episodios",
show=item.show))
return itemlist
def findvid_serie(item):
logger.info("streamondemand.guardarefilm findvideos")
# Descarga la página
data = item.extra
itemlist = servertools.find_video_items(data=data)
for videoitem in itemlist:
videoitem.title = item.title + videoitem.title
videoitem.fulltitle = item.fulltitle
videoitem.thumbnail = item.thumbnail
videoitem.show = item.show
videoitem.plot = item.plot
videoitem.channel = __channel__
return itemlist
|
dentaku65/plugin.video.sod
|
channels/guardarefilm.py
|
Python
|
gpl-3.0
| 12,690
|
try:
from functools import wraps
except ImportError:
# for python < 2.5... this is a limited set of what we need to do
# with a wrapped function :
def wraps(f):
def wrap_with_f(new_f):
new_f.__name__ = f.__name__
if hasattr(f, '__module__'):
new_f.__module__ = f.__module__
return new_f
return wrap_with_f
def fmt_val(val, shorten=True):
"""Format a value for inclusion in an
informative text string.
"""
val = repr(val)
max = 50
if shorten:
if len(val) > max:
close = val[-1]
val = val[0:max-4] + "..."
if close in (">", "'", '"', ']', '}', ')'):
val = val + close
return val
def fmt_dict_vals(dict_vals, shorten=True):
"""Returns list of key=val pairs formatted
for inclusion in an informative text string.
"""
items = dict_vals.items()
if not items:
return [fmt_val(None, shorten=shorten)]
return ["%s=%s" % (k, fmt_val(v, shorten=shorten)) for k,v in items]
|
mozilla/betafarm
|
vendor-local/lib/python/fudge/util.py
|
Python
|
bsd-3-clause
| 1,070
|
# Copyright 2020, The TensorFlow Privacy Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TensorFlow Privacy library."""
import sys
from tensorflow_privacy.version import __version__ # pylint: disable=g-bad-import-order
# pylint: disable=g-import-not-at-top
if hasattr(sys, 'skip_tf_privacy_import'): # Useful for standalone scripts.
pass
else:
# TensorFlow v1 imports
from tensorflow_privacy import v1
# DpEvents
from tensorflow_privacy.privacy.analysis.dp_event import DpEvent
from tensorflow_privacy.privacy.analysis.dp_event import NoOpDpEvent
from tensorflow_privacy.privacy.analysis.dp_event import NonPrivateDpEvent
from tensorflow_privacy.privacy.analysis.dp_event import UnsupportedDpEvent
from tensorflow_privacy.privacy.analysis.dp_event import GaussianDpEvent
from tensorflow_privacy.privacy.analysis.dp_event import SelfComposedDpEvent
from tensorflow_privacy.privacy.analysis.dp_event import ComposedDpEvent
from tensorflow_privacy.privacy.analysis.dp_event import PoissonSampledDpEvent
from tensorflow_privacy.privacy.analysis.dp_event import SampledWithReplacementDpEvent
from tensorflow_privacy.privacy.analysis.dp_event import SampledWithoutReplacementDpEvent
# Analysis
from tensorflow_privacy.privacy.analysis.compute_dp_sgd_privacy_lib import compute_dp_sgd_privacy
from tensorflow_privacy.privacy.analysis.rdp_accountant import compute_heterogeneous_rdp
from tensorflow_privacy.privacy.analysis.rdp_accountant import compute_rdp
from tensorflow_privacy.privacy.analysis.rdp_accountant import get_privacy_spent
from tensorflow_privacy.privacy.analysis.tree_aggregation_accountant import compute_rdp_tree_restart
from tensorflow_privacy.privacy.analysis.tree_aggregation_accountant import compute_rdp_single_tree
from tensorflow_privacy.privacy.analysis.tree_aggregation_accountant import compute_zcdp_single_tree
# DPQuery classes
from tensorflow_privacy.privacy.dp_query.dp_query import DPQuery
from tensorflow_privacy.privacy.dp_query.dp_query import SumAggregationDPQuery
from tensorflow_privacy.privacy.dp_query.discrete_gaussian_query import DiscreteGaussianSumQuery
from tensorflow_privacy.privacy.dp_query.distributed_discrete_gaussian_query import DistributedDiscreteGaussianSumQuery
from tensorflow_privacy.privacy.dp_query.distributed_skellam_query import DistributedSkellamSumQuery
from tensorflow_privacy.privacy.dp_query.gaussian_query import GaussianSumQuery
from tensorflow_privacy.privacy.dp_query.nested_query import NestedQuery
from tensorflow_privacy.privacy.dp_query.no_privacy_query import NoPrivacyAverageQuery
from tensorflow_privacy.privacy.dp_query.no_privacy_query import NoPrivacySumQuery
from tensorflow_privacy.privacy.dp_query.normalized_query import NormalizedQuery
from tensorflow_privacy.privacy.dp_query.quantile_estimator_query import QuantileEstimatorQuery
from tensorflow_privacy.privacy.dp_query.quantile_estimator_query import NoPrivacyQuantileEstimatorQuery
from tensorflow_privacy.privacy.dp_query.quantile_adaptive_clip_sum_query import QuantileAdaptiveClipSumQuery
from tensorflow_privacy.privacy.dp_query import restart_query
from tensorflow_privacy.privacy.dp_query import tree_aggregation
from tensorflow_privacy.privacy.dp_query.restart_query import RestartQuery
from tensorflow_privacy.privacy.dp_query.tree_aggregation_query import TreeCumulativeSumQuery
from tensorflow_privacy.privacy.dp_query.tree_aggregation_query import TreeResidualSumQuery
from tensorflow_privacy.privacy.dp_query.tree_range_query import TreeRangeSumQuery
# Estimators
from tensorflow_privacy.privacy.estimators.dnn import DNNClassifier
# Keras Models
from tensorflow_privacy.privacy.keras_models.dp_keras_model import DPModel
from tensorflow_privacy.privacy.keras_models.dp_keras_model import DPSequential
from tensorflow_privacy.privacy.keras_models.dp_keras_model import make_dp_model_class
# Optimizers
from tensorflow_privacy.privacy.optimizers.dp_optimizer_keras import DPKerasAdagradOptimizer
from tensorflow_privacy.privacy.optimizers.dp_optimizer_keras import DPKerasAdamOptimizer
from tensorflow_privacy.privacy.optimizers.dp_optimizer_keras import DPKerasSGDOptimizer
from tensorflow_privacy.privacy.optimizers.dp_optimizer_keras import make_keras_optimizer_class
from tensorflow_privacy.privacy.optimizers.dp_optimizer_keras_vectorized import VectorizedDPKerasAdagradOptimizer
from tensorflow_privacy.privacy.optimizers.dp_optimizer_keras_vectorized import VectorizedDPKerasAdamOptimizer
from tensorflow_privacy.privacy.optimizers.dp_optimizer_keras_vectorized import VectorizedDPKerasSGDOptimizer
from tensorflow_privacy.privacy.optimizers.dp_optimizer_keras_vectorized import make_vectorized_keras_optimizer_class
# module `bolt_on` not yet available in this version of TF Privacy
|
tensorflow/privacy
|
tensorflow_privacy/__init__.py
|
Python
|
apache-2.0
| 5,379
|
import numpy as np
from sklearn.ensemble import EnsembleSelectionClassifier
from sklearn import datasets
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import AdaBoostClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.svm import SVC
from sklearn import cross_validation
from sklearn.metrics import accuracy_score
import sklearn.preprocessing as preprocessing
import pandas as pd
# load dataset
def number_encode_features(df):
result = df.copy()
encoders = {}
for column in result.columns:
if result.dtypes[column] == np.object:
encoders[column] = preprocessing.LabelEncoder()
result[column] = encoders[column].fit_transform(result[column])
return result, encoders
original_data = pd.read_csv(
"/path/to/adult.data/",
names=[
"Age", "Workclass", "fnlwgt", "Education", "Education-Num", "Martial Status",
"Occupation", "Relationship", "Race", "Sex", "Capital Gain", "Capital Loss",
"Hours per week", "Country", "Target"],
sep=r'\s*,\s*',
engine='python',
na_values="?")
encoded_data, _ = number_encode_features(original_data)
X = encoded_data[encoded_data.columns.difference(["Target"])]
y = encoded_data["Target"]
# split dataset
X_train, X_test, y_train, y_test = cross_validation.train_test_split(
X, y, test_size=0.95, random_state=0)
X_train, X_val, y_train, y_val = cross_validation.train_test_split(
X_train, y_train, test_size=0.2, random_state=0)
scaler = preprocessing.StandardScaler()
X_train = pd.DataFrame(scaler.fit_transform(X_train.astype(np.float64)),
columns=X_train.columns)
X_val = scaler.transform(X_val.astype(np.float64))
X_test = scaler.transform(X_test.astype(np.float64))
print X_train.shape
n_features = X.shape[1]
C = 1.0
# Create different classifiers. The logistic regression cannot do
# multiclass out of the box.
classifiers = [('L1 logistic', LogisticRegression(C=C, penalty='l1')),
('L2 logistic (OvR)', LogisticRegression(C=C, penalty='l2')),
('L2 logistic (Multinomial)', LogisticRegression(
C=C, solver='lbfgs', multi_class='multinomial')),
('Linear SVC 10', SVC(kernel='linear', C=10, probability=True,
random_state=0)),
('Linear SVC 1', SVC(kernel='linear', C=C, probability=True,
random_state=0)),
('Linear SVC 0.1', SVC(kernel='linear', C=0.1,
probability=True, random_state=0)),
('Linear SVC 10^-2', SVC(kernel='linear', C=0.01,
probability=True, random_state=0)),
('Linear SVC 10^-3', SVC(kernel='linear', C=0.001,
probability=True, random_state=0)),
('Linear SVC 10^-4', SVC(kernel='linear', C=0.0001,
probability=True, random_state=0)),
('Boosted DT 16', AdaBoostClassifier(
DecisionTreeClassifier(max_depth=7),
n_estimators=16)),
('Boosted DT 32', AdaBoostClassifier(
DecisionTreeClassifier(max_depth=7),
n_estimators=32)),
('Boosted DT 64', AdaBoostClassifier(
DecisionTreeClassifier(max_depth=7),
n_estimators=64)),
('Boosted DT 128', AdaBoostClassifier(
DecisionTreeClassifier(max_depth=7),
n_estimators=128))]
n_classifiers = len(classifiers)
print "Validation Set:"
for name, classifier in classifiers:
classifier.fit(X_train, y_train)
y_pred_proba = classifier.predict_proba(X_val)
y_pred = y_pred_proba.argmax(axis=1)
classif_rate = accuracy_score(y_val, y_pred)
print("classif_rate for %s : %f " % (name, classif_rate))
print "Test Set:"
for name, classifier in classifiers:
classifier.fit(X_train, y_train)
#y_pred_proba = classifier.predict_proba(X_test)
#y_pred = y_pred_proba.argmax(axis=1)
y_pred = classifier.predict(X_test)
classif_rate = accuracy_score(y_test, y_pred)
print("classif_rate for %s : %f " % (name, classif_rate))
esc = EnsembleSelectionClassifier(estimators=classifiers, n_bags=20, n_best=1,
bag_fraction=0.5, verbose=True)
esc.fit(X_val, y_val)
y_pred = esc.predict(X_test)
classif_rate = accuracy_score(y_test, y_pred)
print("classif_rate for EnsembleSelectionClassifier : %f " % classif_rate)
|
RTHMaK/RPGOne
|
9fe63d5a5f481b6256eb-e754b90d9aea15fe2dabd2e60a35ae0c502c2707/ensemble_selection_demo.py
|
Python
|
apache-2.0
| 4,721
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains GPU utility functions."""
import collections
import re
# Matches the DeviceAttributes.physical_device_desc field.
_PHYSICAL_DEVICE_DESCRIPTION_REGEX = re.compile(
r'name: ([^,]*), (?:.*compute capability: (\d+)\.(\d+))?')
# compute_capability is a (major version, minor version) pair, or None if this
# is not an Nvidia GPU.
GpuInfo = collections.namedtuple('gpu_info', ['name', 'compute_capability'])
def compute_capability_from_device_desc(device_attrs):
"""Returns the GpuInfo given a DeviceAttributes proto.
Args:
device_attrs: A DeviceAttributes proto.
Returns
A gpu_info tuple. Both fields are None if `device_attrs` does not have a
valid physical_device_desc field.
"""
# TODO(jingyue): The device description generator has to be in sync with
# this file. Another option is to put compute capability in
# DeviceAttributes, but I avoided that to keep DeviceAttributes
# target-independent. Reconsider this option when we have more things like
# this to keep in sync.
# LINT.IfChange
match = _PHYSICAL_DEVICE_DESCRIPTION_REGEX.search(
device_attrs.physical_device_desc)
# LINT.ThenChange(//tensorflow/core/common_runtime/gpu/gpu_device.cc)
if not match:
return GpuInfo(None, None)
cc = (int(match.group(2)), int(match.group(3))) if match.group(2) else None
return GpuInfo(match.group(1), cc)
|
tensorflow/tensorflow
|
tensorflow/python/framework/gpu_util.py
|
Python
|
apache-2.0
| 2,063
|
#!/usr/bin/env python
# −*− coding: UTF−8 −*−
# Topological Sorting
from collections import defaultdict
def topsort(graph):
if not graph:
return []
# 1. Count every node's dependencies
count = defaultdict(int)
for node in graph:
for dependency in graph[node]:
count[dependency] += 1
# 2. Find initial nodes - The ones with no incoming edges, so the ones that
# no dependency points at
initial_nodes = [node for node in graph if count[node] == 0]
if graph and not initial_nodes:
raise Exception("Circular depenency detected")
# 3. Process each node in the order found in initial_nodes. Populate
# initial_nodes with processed node's dependencies if these aren't referred
# in any other node.
result = []
while initial_nodes:
node = initial_nodes.pop()
result.append(node)
for dependency in graph[node]:
count[dependency] -= 1
if count[dependency] == 0:
initial_nodes.append(dependency)
if len(result) != len(graph):
raise Exception("Circular depenency detected")
return result[::-1]
def test():
from simpletest import _assert, _assert_raises
a, b, c, d, e, f = 'abcdef'
graph = {}
_assert(topsort(graph), [])
graph = {
a: set([]),
}
_assert(topsort(graph), [a])
graph = {
a: set([d, b]),
b: set([d, c]),
c: set([d]),
d: set([]),
e: set([d]),
f: set([d, e]),
}
_assert(topsort(graph), [d, c, b, a, e, f])
graph = {
a: set([b]),
b: set([a]),
}
_assert_raises(Exception, topsort, graph)
if __name__ == '__main__':
test()
|
pgularski/snippets
|
python/algorithms/topsort.py
|
Python
|
mit
| 1,733
|
# # # # #
# keys.py
#
# This file is used to serve up
# RESTful links that can be
# consumed by a frontend system
#
# University of Illinois/NCSA Open Source License
# Copyright (c) 2015 Information Trust Institute
# All rights reserved.
#
# Developed by:
#
# Information Trust Institute
# University of Illinois
# http://www.iti.illinois.edu
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal with
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# Redistributions of source code must retain the above copyright notice, this list
# of conditions and the following disclaimers. Redistributions in binary form must
# reproduce the above copyright notice, this list of conditions and the following
# disclaimers in the documentation and/or other materials provided with the
# distribution.
#
# Neither the names of Information Trust Institute, University of Illinois, nor
# the names of its contributors may be used to endorse or promote products derived
# from this Software without specific prior written permission.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE SOFTWARE.
#
# # # # #
import domains.support.lib.common as comLib
import os
import re
def getKeyFileNames(thePath):
return [z[0] for x, y in comLib.getFileList(thePath).items() for z in y.items() if re.match(".*\.pub$", z[0])]
def getKeys(sort="name", order="asc"):
remotePath = comLib.getSupportFilePath("RemoteKeys")
localPath = comLib.getSupportFilePath("LocalKeys")
remoteKeys = getKeyFileNames(remotePath)
localKeys = getKeyFileNames(localPath)
keys = []
for r in remoteKeys:
keys.append({"name": r, 'type': "remote", "mtime": comLib.timestampToPrettyDate(os.stat("{}/{}".format(remotePath, r))[8])})
for l in localKeys:
keys.append({"name": l, 'type': "local", "mtime": comLib.timestampToPrettyDate(os.stat("{}/{}".format(localPath, l))[8])})
return comLib.sortOrder(keys, sort, order)
def getKeyContent(filename, keyType):
filePath = "{}/{}".format(comLib.getSupportFilePath("RemoteKeys"), filename)
if keyType.lower() == "local":
filePath = "{}/{}".format(comLib.getSupportFilePath("LocalKeys"), filename)
if os.path.exists(filePath):
f = open(filePath, 'r')
try:
return f.read()
except IOError as e:
return ''
except UnicodeDecodeError as e:
return ''
return ''
def verifyFileName(name, keyType):
fileOK = False
errs = []
if keyType is None or keyType not in ["local", "remote"]:
errs.append("Invalid key type: {}".format(keyType))
if not re.match(".*\.pub$", name):
errs.append("Invalid filename: {}. File must end in .pub".format(name))
msg = ""
if len(errs) > 0:
msg = "<br/>".join(errs)
else:
fileOK = True
return fileOK, errs
def verifySaveOk(keyType, filename):
thePath = comLib.getSupportFilePath("{}Keys".format(keyType.capitalize()))
theFiles = getKeyFileNames(thePath)
ret = []
print(keyType, theFiles, filename)
if keyType == "local" and len(theFiles) == 1:
ret.append("ERROR: Local key already exists. Please delete existing local key before adding a new one")
if filename in theFiles:
ret.append("ERROR: Key '{}' already exists in {} keys".format(filename, keyType))
return ret
def saveFile(keyType, theFile):
msg = verifySaveOk(keyType, theFile.filename)
if len(msg) == 0:
theFile.save("{}/{}".format(comLib.getSupportFilePath("{}Keys".format(keyType.capitalize())), theFile.filename))
return "<br/>".join(msg)
def verifyDeleteOk(keyType, filename):
thePath = comLib.getSupportFilePath("{}Keys".format(keyType.capitalize()))
theFiles = getKeyFileNames(thePath)
ret = ""
if len(theFiles) == 1:
ret = "Warning: Deleting the last {0} key prevents encrypted communication from working correctly. Please upload a {0} public key to enable encrypted communiation".format(keyType)
return ret
def deleteFile(keyType, filename):
ret = verifyDeleteOk(keyType, filename)
os.remove("{}/{}".format(comLib.getSupportFilePath("{}Keys".format(keyType.capitalize())), filename))
return ret
|
GridProtectionAlliance/ARMORE
|
source/webServer/domains/support/keys.py
|
Python
|
mit
| 4,993
|
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Stub for pre-rename `tfx.dsl.components.base.executor_spec`."""
from tfx.dsl.components.base import executor_spec
from tfx.utils import deprecation_utils
ExecutorSpec = deprecation_utils.deprecated_alias( # pylint: disable=invalid-name
deprecated_name='tfx.components.base.executor_spec.ExecutorSpec',
name='tfx.dsl.components.base.executor_spec.ExecutorSpec',
func_or_class=executor_spec.ExecutorSpec)
ExecutorClassSpec = deprecation_utils.deprecated_alias( # pylint: disable=invalid-name
deprecated_name='tfx.components.base.executor_spec.ExecutorClassSpec',
name='tfx.dsl.components.base.executor_spec.ExecutorClassSpec',
func_or_class=executor_spec.ExecutorClassSpec)
ExecutorContainerSpec = deprecation_utils.deprecated_alias( # pylint: disable=invalid-name
deprecated_name='tfx.components.base.executor_spec.ExecutorContainerSpec',
name='tfx.dsl.components.base.executor_spec.ExecutorContainerSpec',
func_or_class=executor_spec.ExecutorContainerSpec)
|
tensorflow/tfx
|
tfx/components/base/executor_spec.py
|
Python
|
apache-2.0
| 1,599
|
from index import *
from admin import *
from news_mgr import *
from store import *
#from schedule import *
|
zknight/booster
|
booster/views/__init__.py
|
Python
|
mit
| 107
|
import datetime
import logging
import pytz
from openstates.scrape import Scraper, Event
from .apiclient import OregonLegislatorODataClient
from .utils import SESSION_KEYS
logger = logging.getLogger("openstates")
class OREventScraper(Scraper):
_TZ = pytz.timezone("US/Pacific")
_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S"
_SOURCE_BASE = "https://olis.oregonlegislature.gov/liz/{}/Committees/{}/{}/Agenda"
# this scraper supports a start_date argument of Y-m-d
# ex: pupa update or events --scrape start_date=2020-01-01
# if you choose a start date in a previous session, make sure to also pass the relevant session
# due to API limitations, each scrape will only scrape the events in that provided (or current) session
def scrape(self, session=None, start_date=None):
self.api_client = OregonLegislatorODataClient(self)
if not session:
session = self.latest_session()
yield from self.scrape_events(session, start_date)
def scrape_events(self, session, start_date):
session_key = SESSION_KEYS[session]
if start_date is None:
start_date = datetime.date.today()
else:
start_date = datetime.datetime.strptime(start_date, "%Y-%m-%d")
committees_by_code = {}
committees_response = self.api_client.get("committees", session=session_key)
for committee in committees_response:
committees_by_code[committee["CommitteeCode"]] = committee["CommitteeName"]
meetings_response = self.api_client.get(
"committee_meetings",
start_date=start_date.strftime(self._DATE_FORMAT),
session=session_key,
)
for meeting in meetings_response:
event_date = self._TZ.localize(
datetime.datetime.strptime(meeting["MeetingDate"], self._DATE_FORMAT)
)
com_name = committees_by_code[meeting["CommitteeCode"]]
event = Event(
start_date=event_date, name=com_name, location_name=meeting["Location"]
)
event.add_source(meeting["AgendaUrl"])
event.extras["meeting_guid"] = meeting["MeetingGuid"]
event.extras["committee_code"] = committee["CommitteeCode"]
event.add_participant(com_name, type="committee", note="host")
for row in meeting["CommitteeAgendaItems"]:
if row["Comments"] is not None:
agenda = event.add_agenda_item(row["Comments"])
if row["MeasureNumber"] is not None:
bill_id = "{} {}".format(row["MeasurePrefix"], row["MeasureNumber"])
agenda.add_bill(bill_id)
for row in meeting["CommitteeMeetingDocuments"]:
event.add_document(
note=row["ExhibitTitle"],
url=row["DocumentUrl"],
on_duplicate="ignore",
)
yield event
|
sunlightlabs/openstates
|
scrapers/or/events.py
|
Python
|
gpl-3.0
| 2,975
|
from contextlib import contextmanager
from django.conf import settings
from django.db.models.sql.compiler import SQLCompiler
class WouldSelectMultipleTablesForUpdate(Exception):
pass
def replacement_as_sql(self, *args, **kwargs):
sql = self.query_lock_limiter_old_as_sql(*args, **kwargs)
# We're doing this after as_sql because at this point all the
# processing to gather information about used tables is guaranteed to be done.
table_names = list(self.query.table_map.keys())
if self.query.select_for_update and (len(table_names) > 1):
whitelisted = sorted(table_names) in self.query_lock_limiter_whitelist
if not whitelisted:
raise WouldSelectMultipleTablesForUpdate(
f"Query would select_for_update more than one table: {sql}. "
f"Add {table_names} to settings.TEST_SELECT_FOR_UPDATE_WHITELISTED_TABLE_SETS "
f"to allow it."
)
return sql
def patch_sqlcompiler(whitelisted_table_sets):
SQLCompiler.query_lock_limiter_old_as_sql = SQLCompiler.as_sql
SQLCompiler.as_sql = replacement_as_sql
SQLCompiler.query_lock_limiter_whitelist = [
sorted(tables) for tables in whitelisted_table_sets
]
def unpatch_sqlcompiler():
SQLCompiler.as_sql = SQLCompiler.query_lock_limiter_old_as_sql
delattr(SQLCompiler, "query_lock_limiter_old_as_sql")
@contextmanager
def query_lock_limiter(enable=False, whitelisted_table_sets=[]):
enabled = enable or getattr(
settings, "TEST_SELECT_FOR_UPDATE_LIMITER_ENABLED", False
)
if not enabled:
yield
return
was_already_patched = hasattr(SQLCompiler, "query_lock_limiter_old_as_sql")
if not was_already_patched:
whitelist = whitelisted_table_sets or getattr(
settings, "TEST_SELECT_FOR_UPDATE_WHITELISTED_TABLE_SETS", []
)
patch_sqlcompiler(whitelist)
try:
yield
finally:
if not was_already_patched:
unpatch_sqlcompiler()
|
ArabellaTech/drf_tweaks
|
drf_tweaks/test_utils/lock_limiter.py
|
Python
|
mit
| 2,023
|
from ctypeslib.codegen import typedesc
class Func:
def __init__(self, func):
self.name = func.name
self.returns = self._parse_type_arg(func.returns)
self.args = [self._parse_type_arg(arg) for arg in func.iterArgTypes()]
def signature(self):
return "%s %s(%s)" % (self.returns, self.name, ", ".join(self.args))
def _parse_type_arg(self, tp):
if isinstance(tp, typedesc.FundamentalType):
return tp.name
elif isinstance(tp, typedesc.PointerType):
return parse_type(tp.typ) + '*'
elif isinstance(tp, typedesc.CvQualifiedType):
return parse_type(tp.typ)
elif isinstance(tp, typedesc.Typedef):
return tp.name
elif isinstance(tp, typedesc.Structure):
return tp.name
else:
raise ValueError("yoyo", type(tp))
def parse_type(tp):
if isinstance(tp, typedesc.FundamentalType):
return tp.name
elif isinstance(tp, typedesc.PointerType):
if isinstance(tp.typ, typedesc.FunctionType):
args = [parse_type_arg(arg) for arg in tp.typ.iterArgTypes()]
return parse_type(tp.typ.returns) + '(*%s)' + '(%s)' % ", ".join(args)
else:
return parse_type(tp.typ) + '*'
elif isinstance(tp, typedesc.CvQualifiedType):
#return 'const ' + parse_type(tp.typ)
return parse_type(tp.typ)
elif isinstance(tp, typedesc.Typedef):
return tp.name
elif isinstance(tp, typedesc.Structure):
return tp.name
elif isinstance(tp, typedesc.FunctionType):
return ""
else:
raise ValueError("yoyo", type(tp))
|
cournape/cython-codegen
|
cycodegenlib/codegenlib.py
|
Python
|
mit
| 1,660
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Matt Makai <matthew.makai@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
version_added: "2.0"
module: sendgrid
short_description: Sends an email with the SendGrid API
description:
- Sends an email with a SendGrid account through their API, not through
the SMTP service.
notes:
- This module is non-idempotent because it sends an email through the
external API. It is idempotent only in the case that the module fails.
- Like the other notification modules, this one requires an external
dependency to work. In this case, you'll need an active SendGrid
account.
options:
username:
description:
username for logging into the SendGrid account
required: true
password:
description: password that corresponds to the username
required: true
from_address:
description:
the address in the "from" field for the email
required: true
to_addresses:
description:
a list with one or more recipient email addresses
required: true
subject:
description:
the desired subject for the email
required: true
author: '"Matt Makai (@makaimc)" <matthew.makai@gmail.com>'
'''
EXAMPLES = '''
# send an email to a single recipient that the deployment was successful
- sendgrid:
username: "{{ sendgrid_username }}"
password: "{{ sendgrid_password }}"
from_address: "ansible@mycompany.com"
to_addresses:
- "ops@mycompany.com"
subject: "Deployment success."
body: "The most recent Ansible deployment was successful."
delegate_to: localhost
# send an email to more than one recipient that the build failed
- sendgrid
username: "{{ sendgrid_username }}"
password: "{{ sendgrid_password }}"
from_address: "build@mycompany.com"
to_addresses:
- "ops@mycompany.com"
- "devteam@mycompany.com"
subject: "Build failure!."
body: "Unable to pull source repository from Git server."
delegate_to: localhost
'''
# =======================================
# sendgrid module support methods
#
try:
import urllib, urllib2
except ImportError:
module.fail_json(msg="urllib and urllib2 are required")
import base64
def post_sendgrid_api(module, username, password, from_address, to_addresses,
subject, body):
SENDGRID_URI = "https://api.sendgrid.com/api/mail.send.json"
AGENT = "Ansible"
data = {'api_user': username, 'api_key':password,
'from':from_address, 'subject': subject, 'text': body}
encoded_data = urllib.urlencode(data)
to_addresses_api = ''
for recipient in to_addresses:
if isinstance(recipient, unicode):
recipient = recipient.encode('utf-8')
to_addresses_api += '&to[]=%s' % recipient
encoded_data += to_addresses_api
request = urllib2.Request(SENDGRID_URI)
request.add_header('User-Agent', AGENT)
request.add_header('Content-type', 'application/x-www-form-urlencoded')
request.add_header('Accept', 'application/json')
return urllib2.urlopen(request, encoded_data)
# =======================================
# Main
#
def main():
module = AnsibleModule(
argument_spec=dict(
username=dict(required=True),
password=dict(required=True, no_log=True),
from_address=dict(required=True),
to_addresses=dict(required=True, type='list'),
subject=dict(required=True),
body=dict(required=True),
),
supports_check_mode=True
)
username = module.params['username']
password = module.params['password']
from_address = module.params['from_address']
to_addresses = module.params['to_addresses']
subject = module.params['subject']
body = module.params['body']
try:
response = post_sendgrid_api(module, username, password,
from_address, to_addresses, subject, body)
except Exception:
module.fail_json(msg="unable to send email through SendGrid API")
module.exit_json(msg=subject, changed=False)
# import module snippets
from ansible.module_utils.basic import *
main()
|
mapennell/ansible
|
notification/sendgrid.py
|
Python
|
gpl-3.0
| 4,781
|
import math
class Solution(object):
def isPalindrome(self, head):
"""
:type head: ListNode
:rtype: bool
"""
if not head or not head.next:
return True
length = 0
forward = head
backward = head
while forward:
forward = forward.next
length += 1
mid = math.ceil(length * 1.0 / 2)
flag = length % 2 # 0 means lenght is even 1 means odd
forward = head
forwardTmp = forward.next
while mid > 1: # stop when mid is 0
mid -= 1;
backward = forward
forward = forwardTmp
forwardTmp = forwardTmp.next
forward.next = backward
if not flag:
backward = forward
forward = forwardTmp
while(forward):
if forward.val != backward.val:
return False
forward = forward.next;
backward = backward.next;
return True
|
andy-sheng/leetcode
|
234-Palindrome-Linked-List.py
|
Python
|
mit
| 998
|
#!/usr/bin/env python
# Copyright 2015, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys, os, subprocess
# Not really re-creating the images ever, cannot make sure they are binary
# identical, so made this optional.
if "logo" in sys.argv:
assert 0 == os.system("convert -background none misc/Logo/Nuitka-Logo-Vertical.svg images/Nuitka-Logo-Vertical.png")
assert 0 == os.system("convert -background none misc/Logo/Nuitka-Logo-Symbol.svg images/Nuitka-Logo-Symbol.png")
assert 0 == os.system("convert -background none misc/Logo/Nuitka-Logo-Horizontal.svg images/Nuitka-Logo-Horizontal.png")
assert 0 == os.system("optipng -o2 images/Nuitka-Logo-Vertical.png")
assert 0 == os.system("optipng -o2 images/Nuitka-Logo-Symbol.png")
assert 0 == os.system("optipng -o2 images/Nuitka-Logo-Horizontal.png")
if os.path.exists("web/nikola-site"):
assert 0 == os.system("convert -resize 32x32 misc/Logo/Nuitka-Logo-Symbol.svg ../nikola-site/files/favicon.ico")
assert 0 == os.system("convert -resize 32x32 misc/Logo/Nuitka-Logo-Symbol.svg ../nikola-site/files/favicon.png")
assert 0 == os.system("convert -resize 72x72 misc/Logo/Nuitka-Logo-Symbol.svg ../nikola-site/files/apple-touch-icon-ipad.png")
assert 0 == os.system("convert -resize 144x144 misc/Logo/Nuitka-Logo-Symbol.svg ../nikola-site/files/apple-touch-icon-ipad3.png")
assert 0 == os.system("convert -resize 57x57 misc/Logo/Nuitka-Logo-Symbol.svg ../nikola-site/files/apple-touch-icon-iphone.png")
assert 0 == os.system("convert -resize 114x114 misc/Logo/Nuitka-Logo-Symbol.svg ../nikola-site/files/apple-touch-icon-iphone4.png")
for document in ("README.rst", "Developer_Manual.rst", "Changelog.rst"):
args = []
if document != "Changelog.rst":
args.append("-s misc/page-styles.txt")
args.append('--header="###Title### - ###Section###"')
args.append('--footer="###Title### - page ###Page### - ###Section###"')
assert 0 == subprocess.call(
"%(rst2pdf)s %(args)s %(document)s" %
{
"rst2pdf" : (
"rst2pdf"
if os.name != "nt" else
r"C:\Python27_32\Scripts\rst2pdf.exe"
),
"args" : ' '.join(args),
"document" : document
},
shell = True
), document
if os.name != "nt":
if not os.path.exists("man"):
os.mkdir("man")
assert 0 == subprocess.call("help2man -n 'the Python compiler' --no-discard-stderr --no-info --include doc/nuitka-man-include.txt ./bin/nuitka >doc/nuitka.1", shell = True)
assert 0 == subprocess.call("help2man -n 'the Python compiler' --no-discard-stderr --no-info ./bin/nuitka-run >doc/nuitka-run.1", shell = True)
for manpage in ("doc/nuitka.1", "doc/nuitka-run.1"):
manpage_contents = open(manpage).readlines()
new_contents = []
mark = False
for count, line in enumerate(manpage_contents):
if mark:
line = ".SS " + line + ".BR\n"
mark = False
elif line == ".IP\n" and manpage_contents[ count + 1 ].endswith(":\n"):
mark = True
continue
if line == r"\fB\-\-g\fR++\-only" + '\n':
line = r"\fB\-\-g\++\-only\fR" + '\n'
new_contents.append(line)
open(manpage, 'w').writelines(new_contents)
|
wfxiang08/Nuitka
|
misc/make-doc.py
|
Python
|
apache-2.0
| 4,137
|
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
import sys
from Muon.GUI.Common.muon_load_data import MuonLoadData
from Muon.GUI.Common.utilities.load_utils import load_workspace_from_filename
from Muon.GUI.Common.muon_data_context import MuonDataContext
from Muon.GUI.FrequencyDomainAnalysis.frequency_context import FrequencyContext
from mantid.api import AnalysisDataService
import unittest
from Muon.GUI.Common.observer_pattern import Observer
from mantid.api import FileFinder
import copy
if sys.version_info.major < 2:
from unittest import mock
else:
import mock
class MuonDataContextTest(unittest.TestCase):
def setUp(self):
self.loaded_data = MuonLoadData()
self.context = MuonDataContext(self.loaded_data)
self.frequency_context = FrequencyContext(self.context)
self.gui_variable_observer = Observer()
self.gui_variable_observer.update = mock.MagicMock()
self.context.gui_variables_notifier.add_subscriber(self.gui_variable_observer)
self.context.instrument = 'CHRONUS'
self.gui_variable_observer = Observer()
self.gui_variable_observer.update = mock.MagicMock()
self.context.gui_variables_notifier.add_subscriber(self.gui_variable_observer)
filepath = FileFinder.findRuns('CHRONUS00003422.nxs')[0]
load_result, run, filename = load_workspace_from_filename(filepath)
self.loaded_data.add_data(workspace=load_result, run=[run], filename=filename, instrument='CHRONUS')
self.context.current_runs = [[run]]
self.context.update_current_data()
def tearDown(self):
AnalysisDataService.clear()
def test_get_detectors_excluded_from_default_grouping_tables_gets_correct_groups_for_CHRONUS(self):
result = self.frequency_context.get_detectors_excluded_from_default_grouping_tables()
self.assertEqual(result, [256, 425])
if __name__ == '__main__':
unittest.main(buffer=False, verbosity=2)
|
mganeva/mantid
|
scripts/test/Muon/frequency_domain_context_test.py
|
Python
|
gpl-3.0
| 2,194
|
import os
import random
import time
import pytest
from ufo2ft.fontInfoData import (
dateStringToTimeValue,
getAttrWithFallback,
normalizeStringForPostscript,
)
@pytest.fixture
def info(InfoClass):
self = InfoClass()
self.familyName = "Family Name"
self.styleName = "Style Name"
self.unitsPerEm = 1000
self.descender = -250
self.xHeight = 450
self.capHeight = 600
self.ascender = 650
self.italicAngle = 0
return self
class GetAttrWithFallbackTest:
@pytest.mark.parametrize(
"infoDict,expected",
[
# no styleMapFamilyName, no styleMapStyleName
(
{},
{
"familyName": "Family Name",
"styleName": "Style Name",
"styleMapFamilyName": "Family Name Style Name",
"styleMapStyleName": "regular",
"openTypeNamePreferredFamilyName": "Family Name",
"openTypeNamePreferredSubfamilyName": "Style Name",
},
),
# no styleMapStyleName
(
{"styleMapFamilyName": "Style Map Family Name"},
{
"styleMapFamilyName": "Style Map Family Name",
"styleMapStyleName": "regular",
"openTypeNamePreferredFamilyName": "Family Name",
"openTypeNamePreferredSubfamilyName": "Style Name",
},
),
# no styleMapFamilyName, no styleMapStyleName but styleName="Regular"
(
{"styleName": "Regular"},
{
"familyName": "Family Name",
"styleName": "Regular",
"styleMapFamilyName": "Family Name",
"styleMapStyleName": "regular",
"openTypeNamePreferredFamilyName": "Family Name",
"openTypeNamePreferredSubfamilyName": "Regular",
},
),
# no styleMapFamilyName but styleName="Regular"
(
{"styleName": "Regular", "styleMapStyleName": "regular"},
{
"styleMapFamilyName": "Family Name",
"styleMapStyleName": "regular",
"openTypeNamePreferredFamilyName": "Family Name",
"openTypeNamePreferredSubfamilyName": "Regular",
},
),
# no styleMapStyleName but styleName="Regular"
(
{"styleName": "Regular", "styleMapFamilyName": "Style Map Family Name"},
{
"styleMapFamilyName": "Style Map Family Name",
"styleMapStyleName": "regular",
"openTypeNamePreferredFamilyName": "Family Name",
"openTypeNamePreferredSubfamilyName": "Regular",
},
),
# no styleMapFamilyName, no styleMapStyleName but styleName="Bold"
(
{"styleName": "Bold"},
{
"familyName": "Family Name",
"styleName": "Bold",
"styleMapFamilyName": "Family Name",
"styleMapStyleName": "bold",
"openTypeNamePreferredFamilyName": "Family Name",
"openTypeNamePreferredSubfamilyName": "Bold",
},
),
],
)
def test_family_and_style_names(self, info, infoDict, expected):
for key, value in infoDict.items():
setattr(info, key, value)
for key, value in expected.items():
assert getAttrWithFallback(info, key) == value
def test_redundant_metadata(self, info):
assert getAttrWithFallback(info, "openTypeNameVersion") == "Version 0.000"
info.versionMinor = 1
info.versionMajor = 1
assert getAttrWithFallback(info, "openTypeNameVersion") == "Version 1.001"
assert (
getAttrWithFallback(info, "openTypeNameUniqueID")
== "1.001;NONE;FamilyName-StyleName"
)
assert getAttrWithFallback(info, "postscriptSlantAngle") == 0
def test_unecessary_metadata(self, info):
assert getAttrWithFallback(info, "postscriptWeightName") is None
info.postscriptWeightName = "Normal"
assert getAttrWithFallback(info, "postscriptWeightName") == "Normal"
def test_vertical_metrics(self, info):
assert getAttrWithFallback(info, "openTypeHheaAscender") == 950
assert getAttrWithFallback(info, "openTypeHheaDescender") == -250
assert getAttrWithFallback(info, "openTypeOS2TypoAscender") == 650
assert getAttrWithFallback(info, "openTypeOS2TypoDescender") == -250
assert getAttrWithFallback(info, "openTypeOS2WinAscent") == 950
assert getAttrWithFallback(info, "openTypeOS2WinDescent") == 250
def test_caret_slope(self, info):
assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRise") == 1
assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRun") == 0
info.italicAngle = -12
assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRise") == 1000
assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRun") == 213
info.italicAngle = 12
assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRise") == 1000
assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRun") == -213
info.openTypeHheaCaretSlopeRise = 2048
assert info.openTypeHheaCaretSlopeRun is None
assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRise") == 2048
assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRun") == -435
info.openTypeHheaCaretSlopeRise = None
info.openTypeHheaCaretSlopeRun = 200
assert info.openTypeHheaCaretSlopeRise is None
assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRise") == -941
assert getAttrWithFallback(info, "openTypeHheaCaretSlopeRun") == 200
def test_head_created(self, info):
os.environ["SOURCE_DATE_EPOCH"] = "1514485183"
try:
assert (
getAttrWithFallback(info, "openTypeHeadCreated")
== "2017/12/28 18:19:43"
)
finally:
del os.environ["SOURCE_DATE_EPOCH"]
assert getAttrWithFallback(info, "openTypeHeadCreated") != "2017/12/28 18:19:43"
def test_empty_info(self, InfoClass):
info = InfoClass()
assert getAttrWithFallback(info, "familyName") == "New Font"
assert getAttrWithFallback(info, "styleName") == "Regular"
assert getAttrWithFallback(info, "unitsPerEm") == 1000
assert getAttrWithFallback(info, "ascender") == 800
assert getAttrWithFallback(info, "capHeight") == 700
assert getAttrWithFallback(info, "xHeight") == 500
assert getAttrWithFallback(info, "descender") == -200
def test_empty_info_2048(self, InfoClass):
info = InfoClass()
info.unitsPerEm = 2048
assert getAttrWithFallback(info, "unitsPerEm") == 2048
assert getAttrWithFallback(info, "ascender") == 1638
assert getAttrWithFallback(info, "capHeight") == 1434
assert getAttrWithFallback(info, "xHeight") == 1024
assert getAttrWithFallback(info, "descender") == -410
class PostscriptBlueScaleFallbackTest:
def test_without_blue_zones(self, info):
postscriptBlueScale = getAttrWithFallback(info, "postscriptBlueScale")
assert postscriptBlueScale == 0.039625
def test_with_blue_zones(self, info):
info.postscriptBlueValues = [
-13,
0,
470,
483,
534,
547,
556,
569,
654,
667,
677,
690,
738,
758,
]
info.postscriptOtherBlues = [-255, -245]
postscriptBlueScale = getAttrWithFallback(info, "postscriptBlueScale")
assert postscriptBlueScale == 0.0375
class NormalizeStringForPostscriptTest:
def test_no_change(self):
assert (
normalizeStringForPostscript("Sample copyright notice.")
== "Sample copyright notice."
)
class DateStringToTimeValueTest:
def test_roundtrip_random_timestamp(self):
timestamp = random.randint(0, 10 ** 9)
ds = time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(timestamp))
assert dateStringToTimeValue(ds) == timestamp
if __name__ == "__main__":
import sys
sys.exit(pytest.main(sys.argv))
|
jamesgk/ufo2fdk
|
tests/fontInfoData_test.py
|
Python
|
mit
| 8,670
|
"""Database abstraction layer
webdb is a database abstraction layer inspired by web2py's DAL. The goal
of webdb is to be more succinct and offer better cross-table integration.
Use the connect method to open a database connection.
>>> mydb = DB.connect('sqlite','path/to/database.sqlite') # doctest: +ELLIPSIS
Traceback (most recent call last):
...
FileNotFoundError: ... No such file or directory: 'path/to/database.sqlite'
By default, an in-memory database is created.
>>> mydb = DB()
Currently only sqlite and mysql databases are supported. Other databases
may be supported with drivers. See webdb.drivers documentation for more
information.
===
Tables
===
Tables are created using the ``define_table`` method of any database
object. Two special methods help with table definitions: ``conform`` and
``migrate``.
``conform`` reads table definitions from the database, overriding any
tables that have already been defined.
>>> mydb.conform()
>>> list(mydb) #No tables defined yet
[]
>>> mydb.define_table('test_table', StrColumn('key'), StrColumn('value'))
>>> list(mydb)
[<Table 'test_table'>]
>>> mydb.test_table.drop()
>>> mydb.define_table('test_table', StrColumn('key'), StrColumn('value'),
... StrColumn('extra'))
>>> list(mydb)
[<Table 'test_table'>]
>>> mydb.conform()
>>> list(mydb)
[<Table 'test_table'>]
Migrate modifies tables in the database to be like newly-assigned tables.
>>> mydb.test_table.drop()
>>> mydb.define_table('test_table', IntColumn('key'), StrColumn('value'),
... StrColumn('extra'))
>>> #mydb.migrate()
>>> mydb.test_table
<Table 'test_table'>
Conforming after a migration keeps the same columns, but other information
might be lost. For example column data types might be lost (sqlite migrations
don't change data types, boolean columns might be interpretted as integers,
etc.)
>>> mydb.conform()
>>> mydb.test_table
<Table 'test_table'>
It is always recommended to conform your database *before* defining columns.
>>> mydb.test_table.drop()
>>> mydb.define_table('test_table', IntColumn('key'), StrColumn('value'),
... StrColumn('extra'))
>>> mydb.define_table('test_types',
... IntColumn('a'),
... BoolColumn('b'),
... StrColumn('c'),
... DateTimeColumn('e'),
... FloatColumn('f'),
... DataColumn('g'),
... RowidColumn('i'),
... )
>>> _ = mydb.test_types.insert(a=1, b=2, c=3, e=datetime.datetime(1969, 10, 5),
... f=6, g=b'7')
>>> for row in mydb.test_types.select(): # doctest: +NORMALIZE_WHITESPACE
... print(row)
Row(a=1, b=True, c='3', e=datetime.datetime(1969, 10, 5, 0, 0),
f=6.0, g=b'7', i=1)
Conforming and migrating are both optional. Attempting to manipulate the
database without these calls may fail if table definitions don't match tables
in the database. However, conform unconditionally reads all tables, so it may
not be appropriate for large databases. Be careful using migrate on databases
that are shared between applications, as it can break those applications if
a table is renamed or altered.
===
Data
===
Add some data by calling insert on a table. An integer referring to the new row
is returned and can be used to retrieve it later.
>>> mydb = DB()
>>> mydb.define_table('test_table', IntColumn('key'), StrColumn('value'))
>>> mydb.define_table('test_table_x', IntColumn('key'), primarykey=[])
Insert adds a row to the table.
>>> mydb.test_table.insert(key='100', value='a')
Rows can be fetched by primarykey. If no primarykeys are specified, an auto-
increment column is implicitly available. Autoincrement fields start from 1
>>> row = mydb.test_table[1]
>>> row.key
100
>>> row.value
'a'
>>> row.rowid
1
>>> del mydb.test_table[1]
===
Consistency
===
The database acts as a context manager which controls data integrity. If
several operations need to be treated atomically:
If an error is raised, and the database driver supports transactions, all of
the operations in the current transaction are rolled back. Only the outer-most
context manager commits the transaction. Individual calls that modify
the database are wrapped in their own context managers, so they are committed
automatically.
>>> with mydb:
... if 'transactions' in mydb.__driver__.features:
... mydb.test_table.insert(key=3, value='c')
... mydb.test_table.insert(key=4, value='d')
... raise Exception
Traceback (most recent call last):
...
Exception
>>> list(mydb.test_table.select())
[]
>>> with mydb:
... _ = mydb.test_table.insert(key=3, value='c')
... _ = mydb.test_table.insert(key=7, value='g')
>>> for row in mydb.test_table.select():
... print(row)
Row(key=3, value='c')
Row(key=7, value='g')
===
Querying
===
Doing comparison, binary or arithmetic operations on columns produces 'Where'
objects.
>>> mydb.test_table.key <= 3
Where([LESSEQUAL, 'test_table'.'key', 3])
The resulting object can be queried. Standard SQL commands are provided. Using
parentheses, a query can be set up and then selected:
>>> for row in (mydb.test_table.key<=3).select():
... print(row)
Row(key=3, value='c')
Rows in a query can be counted...
>>> (mydb.test_table.key>1).count()
2
or updated...
>>> (mydb.test_table.value=='c').update(key=4)
>>> for row in mydb.test_table.select():
... print(row)
Row(key=4, value='c')
Row(key=7, value='g')
or deleted...
>>> (mydb.test_table.key > 5).delete()
>>> for row in mydb.test_table.select():
... print(row)
Row(key=4, value='c')
>>> _ = mydb.test_table.insert(key=4, value='d')
>>> _ = mydb.test_table.insert(key=5, value='d')
Multiple conditions can be combined using bitwise operators & and |
>>> (mydb.test_table.key == 4).count()
2
>>> (mydb.test_table.rowid < 0).count()
0
>>> ((mydb.test_table.rowid < 0) | (mydb.test_table.key == 4)).count()
2
>>> ((mydb.test_table.rowid < 0) & (mydb.test_table.key == 4)).count()
0
>>> for row in mydb.test_table.select(mydb.test_table.value,
... orderby=mydb.test_table.value,
... distinct=True):
... print(row.value)
c
d
Order by one column
>>> for row in mydb.test_table.select(orderby=mydb.test_table.rowid):
... print(row)
Row(key=4, value='c')
Row(key=4, value='d')
Row(key=5, value='d')
Or more
>>> for row in mydb.test_table.select(orderby=[reversed(mydb.test_table.key),
... mydb.test_table.value]):
... print(row)
Row(key=5, value='d')
Row(key=4, value='c')
Row(key=4, value='d')
===
Cleaning Up
===
Remove tables by calling 'drop' on them.
>>> mydb.test_table.drop()
"""
import copy
import datetime
import sys
from . import drivers
from .. import flatten, sequence, collection
from functools import reduce
class RecordError(Exception):
pass
AuthenticationError = drivers.base.AuthenticationError
SQLSyntaxError = drivers.base.SQLSyntaxError
__all__ = ['RecordError', 'AuthenticationError', 'SQLSyntaxError', 'datetime']
class __Row__(tuple):
"""Base class for Row objects - elements of Selection objects
"""
__slots__ = ()
@property
def primarykey(self):
return tuple(self[c.name] for c in self._selection.primarykey)
def _asdict(self):
return {k.name: self[k.name] for k in self._selection.columns}
__dict__ = property(_asdict)
def update(self, **kwargs):
'''Shortcut for updating a single row of the table
'''
if not self._selection.primarykey:
raise RecordError(
"Can only manipulate records from a single table")
table = self._selection.columns[0].table
query = (table._by_pk(self.primarykey))
query.update(**kwargs)
return query.select().one()
def __iter__(self):
for i in range(len(self._selection.explicit)):
yield self[i]
def __getitem__(self, key):
try:
return tuple.__getitem__(self, key)
except TypeError:
return tuple.__getitem__(self, self._selection.index(key))
__getattr__ = __getitem__
def __eq__(self, x):
return list(self) == sequence(x)
def __len__(self):
return len(self._selection.explicit)
def __repr__(self):
return 'Row(%s)' % ', '.join(
'%s=%r' % (k.name, v) for k, v in
zip(self._selection.explicit, self))
class Selection(object):
def __init__(self, columns, explicit, primarykey, values):
refs = {'__slots__': (), '_selection': self}
if primarykey and primarykey[0].table._referers:
refs.update({
col.table._name: property(lambda row: (col == col.todb(row)))
for col in primarykey[0].table._referers
})
# self.columns == self.explicit + self.primarykey
self.columns = columns
self.explicit = explicit
self.primarykey = primarykey
self.names = {getattr(c, 'name', None): i
for i, c in enumerate(columns)}
self.values = values
self.Row = type('Row', (__Row__,), refs)
self.cache = None
def index(self, name):
return self.names[name]
def __iter__(self):
return self
def __next__(self):
if self.cache:
value = self.cache
self.cache = None
else:
value = self.values.fetchone()
if value is None:
raise StopIteration
def conv(c, v):
if v is None:
return v
if c.fromdb:
v = c.fromdb(v)
return v if isinstance(v, c.native_type) else c.native_type(v)
return self.Row(map(conv, self.columns, value))
def one(self):
try:
return next(self)
except StopIteration:
return None
first = one
def last(self):
result = None
for result in self:
pass
return result
def skip(self, count):
for x in range(count):
self.values.fetchone()
def __getitem__(self, x):
if not isinstance(x, slice):
raise TypeError("Only slices of selections are supported")
if (x.start is not None and x.start < 0) or \
(x.stop is not None and x.stop < 0):
raise ValueError("Negative slices are not supported")
if x.start is not None and x.start > 0:
self.skip(x.start)
if x.stop is None:
return list(self)
else:
return list(next(self) for y in (x.stop - x.start))
def __bool__(self):
if not self.cache:
self.cache = self.values.fetchone()
return self.cache is not None
class Selectable(object):
def _get_columns(self, columns):
if not columns:
columns = [table.ALL for table in self._tables]
return flatten(columns)
def select(self, *columns, **props):
columns = self._get_columns(columns)
all_columns = columns[:]
primarykey = []
if not self._tables:
raise Exception('No tables! Using %s' % flatten(columns))
elif len(self._tables) == 1 and not props.get('distinct'):
primarykey = self._tables.copy().pop().primarykey
all_columns.extend(primarykey)
values = self._db.__driver__._select(
all_columns,
self._tables,
self._where_tree,
props.get('distinct', False),
sequence(props.get('orderby', ()))
)
return Selection(all_columns, columns, primarykey, values)
def select1(self, *columns, **props):
return self.select(*columns, **props).one()
def get(self, expression, **props):
return self.select(expression, **props).one()[0]
def count(self, **props):
columns = flatten(table.primarykey for table in self._tables)
values = self._db.__driver__._select(
columns,
self._tables,
self._where_tree,
props.get('distinct', False),
sequence(props.get('orderby', ()))
)
return len(values.fetchall())
__len__ = count
def update(self, **values):
self._db.__driver__._update(self._tables.copy().pop()._name,
self._where_tree, values)
def delete(self):
self._db.__driver__._delete(self._tables.copy().pop()._name,
self._where_tree)
class Where(Selectable):
def __init__(self, old, *wrapped, **kwargs):
self._db = old._db
self._tables = old._tables
self._where_tree = old._op_args(*wrapped)
self.todb = kwargs.get('todb', old.todb)
self.fromdb = kwargs.get('fromdb', old.fromdb)
self.native_type = kwargs.get('native_type', old.native_type)
def __repr__(self):
return 'Where(%r)' % self._where_tree
def _op_args(self, op, *args):
return [op] + [getattr(x, '_where_tree', x) for x in args]
def __bool__(self):
return True
def __eq__(self, x):
return Where(self, drivers.base.EQUAL, self, x)
def __ne__(self, x):
return Where(self, drivers.base.NOTEQUAL, self, x)
def __le__(self, x):
return Where(self, drivers.base.LESSEQUAL, self, x)
def __ge__(self, x):
return Where(self, drivers.base.GREATEREQUAL, self, x)
def __lt__(self, x):
return Where(self, drivers.base.LESSTHAN, self, x)
def __gt__(self, x):
return Where(self, drivers.base.GREATERTHAN, self, x)
def __add__(self, x):
if (isinstance(x, str) or
self.native_type in {str, bytes} or
x.native_type in {str, bytes}):
self._text_affinity = True
return Where(self, drivers.base.CONCATENATE, self, x)
else:
return Where(self, drivers.base.ADD, self, x)
def __sub__(self, x):
return Where(self, drivers.base.SUBTRACT, self, x)
def __mul__(self, x):
return Where(self, drivers.base.MULTIPLY, self, x)
def __div__(self, x):
return Where(self, drivers.base.DIVIDE, self, x)
def __floordiv__(self, x):
return Where(self, drivers.base.FLOORDIVIDE, self, x)
def __truediv__(self, x):
return Where(self, drivers.base.DIVIDE, self, x)
def __mod__(self, x):
return Where(self, drivers.base.MODULO, self, x)
def __and__(self, x):
return Where(self, drivers.base.AND, self, x)
def __or__(self, x):
return Where(self, drivers.base.OR, self, x)
def __invert__(self):
return Where(self, drivers.base.NOT, self)
def __abs__(self):
return Where(self, drivers.base.ABS, self)
def __neg__(self):
return Where(self, drivers.base.NEGATIVE, self)
def length(self):
return Where(self, drivers.base.LENGTH, self)
def __reversed__(self):
return Where(self, drivers.base.DESCEND, self)
def sum(self):
return Where(self, drivers.base.SUM, self)
def average(self):
return Where(self, drivers.base.AVERAGE, self, native_type=float)
def min(self):
return Where(self, drivers.base.MIN, self)
def max(self):
return Where(self, drivers.base.MAX, self)
def round(self, precision=None):
return (Where(self, drivers.base.ROUND, self)
if precision is None else
Where(self, drivers.base.ROUND, self, precision))
def like(self, pattern, escape=None):
return (Where(self, drivers.base.LIKE, self, pattern, escape)
if escape else
Where(self, drivers.base.LIKE, self, pattern))
def glob(self, pattern):
return Where(self, drivers.base.GLOB, self, pattern)
def strip(self):
return Where(self, drivers.base.STRIP, self)
def lstrip(self):
return Where(self, drivers.base.LSTRIP, self)
def rstrip(self):
return Where(self, drivers.base.RSTRIP, self)
def replace(self, old, new):
return Where(self, drivers.base.REPLACE, self, old, new)
def endswith(self, suffix):
return self[-len(suffix):] == suffix
def startswith(self, prefix):
return self[:len(prefix)] == prefix
def __getitem__(self, index):
if isinstance(index, slice):
if index.step not in (None, 1):
raise ValueError('Slices of db columns must have step==1')
start = (index.start or 0)
if start >= 0:
start += 1
if index.stop is None:
return Where(self, drivers.base.SUBSTRING, self, start)
elif index.stop >= 0:
return Where(self, drivers.base.SUBSTRING, self, start,
index.stop - start + 1)
else:
return Where(
self, drivers.base.SUBSTRING, self, start, Where(
self, drivers.base.ADD, Where(
self, drivers.base.LENGTH, self
), index.stop
)
)
return Where(self, drivers.base.SUBSTRING, self, index+1, 1)
def coalesce(self, *args):
return Where(self, drivers.base.COALESCE, self, *args)
def between(self, min, max):
return Where(self, drivers.base.BETWEEN, self, min, max)
def ident(x):
return x
class Column(Where):
"""Object representing a single column in a database table.
:``name``: Name of the column. Must consist only of alpha-numerics
and underscores.
:``native_type``: Python type which database should expect and
produce. Must be one of ``int``, ``bool``, ``float``, ``unicode``,
``bytes``, or ``datetime.datetime``. This value is used by the
database driver to determine the type affinity of the database
column.
:``todb=None``: Function which converts a value to ``native_type``
for passing to the database driver. If ``todb`` is ``None``, the
value is passed through unaltered.
:``fromdb=None``: Function which converts a ``native_type`` value
from the database to the desired type. If ``fromdb`` is ``None``,
the native_type is returned unaltered.
:``required=False``: Boolean value which determines whether a value
must be given on insert or update. ``None`` is not allowed as a
value of required columns.
:``default=None``: Value to insert when no value is specified for
this column. This value is ignored if ``required`` is true. If
``default`` is callable, it will be called with no arguments
every time an insert is performed and the return value will be
used instead.
:``unique=False``: Boolean value. If true, no value (except
``None``) can occur more than once in this column.
:``primarykey=False``: Boolean value. If true, the values of this
column uniquely identify a row (possibly in combination with other
columns). A true value for ``primarykey`` implies ``unique=True``
and ``required=True``.
:``references=None``: For any ``table``, values in this column (or
the result of this column's ``todb`` function) refer to
corresponding rows in ``table``. It is recommeded to use
``ReferenceColumn`` to properly setup such references.
:``length``: Integer specifying the expected maximum length of this
column's values. Please note that this limit is only enforced by
the database itself, so database engines that don't enforce size
limits (e.g. sqlite) might store longer values. In order to
enforce a strict length limit, use a ``todb`` function to truncate
values. For example, ``todb=lambda x:x[:24]``
:``autoincrement=False``: Boolean value. If true, an
incrementally-increasing integer value is inserted by default by
the database.
"""
def __init__(self, name, native_type, todb=None, fromdb=None,
required=False, default=None, unique=False, primarykey=False,
references=None, length=None, autoincrement=False):
Selectable.__init__(self)
self.name = name
self.table = None
self.native_type = native_type
self.todb = todb
self.fromdb = fromdb
self.required = bool(required)
self.default = default
self.unique = bool(unique)
self.primarykey = bool(primarykey)
self.references = references
self.length = length
self.autoincrement = bool(autoincrement)
@property
def _tables(self):
return {self.table}
@property
def _db(self):
return self.table._db
def __repr__(self):
if self.table:
return '%r.%r' % (self.table._name, self.name)
else:
return repr(self.name)
def __hash__(self):
return hash(id(self))
__all__.append('Column')
def RowidColumn(name, *args, **kwargs):
kwargs['primarykey'] = True
kwargs['autoincrement'] = True
return Column(name, int, *args, **kwargs)
__all__.append('RowidColumn')
def IntColumn(name, *args, **kwargs):
return Column(name, int, *args, **kwargs)
__all__.append('IntColumn')
def BoolColumn(name, *args, **kwargs):
return Column(name, bool, *args, **kwargs)
__all__.append('BoolColumn')
def StrColumn(name, *args, **kwargs):
return Column(name, str, *args, **kwargs)
__all__.append('StrColumn')
def FloatColumn(name, *args, **kwargs):
return Column(name, float, *args, **kwargs)
__all__.append('FloatColumn')
def DataColumn(name, *args, **kwargs):
return Column(name, bytes, *args, **kwargs)
__all__.append('DataColumn')
def DateTimeColumn(name, *args, **kwargs):
kwargs['todb'] = drivers.base.timestamp
kwargs['fromdb'] = drivers.base.timestamp.parse
return Column(name, datetime.datetime, *args, **kwargs)
__all__.append('DateTimeColumn')
def ReferenceColumn(name, table, todb=None, *args, **kwargs):
kwargs['references'] = table
if not todb:
if len(table.primarykey) == 1:
def todb(row):
return row.primarykey[0]
elif len(table.primarykey) == 0:
raise TypeError("Cannot reference non-indexed table"
" %r" % table._name)
else:
raise ValueError("Default ReferenceColumn todb function supports"
" only 1 primary key.")
query = todb(table)
kwargs['todb'] = todb
kwargs['fromdb'] = query.fromdb
self = Column(name, query.native_type, *args, **kwargs)
table._referers.add(self)
return self
__all__.append('ReferenceColumn')
class Table(Selectable):
"""
self._columns: collection of all column objects
self.ALL: list of columns the user defined (excludes implicit or
primarykey-only columns
self.primarykey: list of columns which together uniquely identify a row in
the table
self._db: reference to db which contains this table
self._name: my name
>>> Table(None, 'table', ())
<Table 'table'>
>>> t = Table(None, 'table', [Column('abc', str), Column('def', int)])
>>> t.ALL[0].table == t
True
>>> t.ALL[0].name
'abc'
>>> t.ALL[1].name
'def'
>>> t.primarykey[0].name
'rowid'
"""
def __init__(self, db, name, columns, primarykey=None):
Selectable.__init__(self)
self._db = db
self._name = name
self.ALL = columns
self._columns = collection('name', columns)
self._referers = set()
if primarykey is None:
pk = [c for c in columns if c.primarykey]
if pk:
self.primarykey = pk
else:
rowid = RowidColumn('rowid')
self._columns.add(rowid)
self.primarykey = [rowid]
else:
self.primarykey = []
if primarykey:
for col in primarykey:
if isinstance(col, str):
col = self._columns[col]
else:
self._columns.add(col)
self.primarykey.append(col)
for col in self._columns:
col.table = self
def __getattr__(self, key):
if key in self.__dict__:
return self.__dict__[key]
else:
return self._columns[key]
def __hash__(self):
return hash(self._name)
def _by_pk(self, key):
if self.primarykey:
key = sequence(key)
if len(self.primarykey) != len(key):
raise IndexError(
'Primarykey for %s requires %i values (got %i)' % (
self._name, len(self.primarykey), len(key)))
return reduce(
lambda x, y: x & y,
map(lambda x, y: x == y, self.primarykey, key)
)
raise TypeError('Table %r has no primarykey' % (self._name))
def __getitem__(self, key):
result = self._by_pk(key).select(self.ALL).one()
if result is None:
raise KeyError('No Row in database matching primary key %s' % (
repr(sequence(key))[1:-1]))
return result
def __delitem__(self, key):
self._by_pk(key).delete()
def insert(self, **values):
db_values = []
for k, v in values.items():
try:
todb = self._columns[k].todb
if todb:
v = todb(v)
db_values.append(v)
except TypeError:
print(k, self._columns[k].todb, repr(v), file=sys.stderr)
raise
except KeyError:
raise KeyError('No such column in table: %s' % k)
self._db.__driver__._insert(self._name, list(values.keys()), db_values)
def insert_many(self, *records):
for record in records:
self.insert(**record)
@property
def _tables(self):
return {self}
@property
def _where_tree(self):
return []
def drop(self):
self._db.__driver__.drop_table(self._name)
del self._db[self._name]
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, self._name)
def __bool__(self):
return True
def __eq__(self, x):
if isinstance(x, Table):
x = x._columns
for a, b in zip(self._columns, x):
a = dict(vars(a))
a.pop('table', None)
b = dict(vars(b))
b.pop('table', None)
if a != b:
return False
return True
__all__.append('Table')
class UnknownDriver(Exception):
pass
__all__.append('UnknownDriver')
class DB(collection):
"""
>>> mydb = DB.connect('sqlite')
>>> mydb.define_table('test', StrColumn('data'))
>>> list(mydb)
[<Table 'test'>]
"""
__driver__ = drivers.sqlite.sqlite()
execute = __driver__.execute
__enter__ = __driver__.__enter__
__exit__ = __driver__.__exit__
@property
def lastsql(self):
return self.__driver__.lastsql
def __key__(self, obj):
return obj._name
def define_table(self, name, *columns, **kwargs):
if hasattr(self, name):
raise AttributeError("%s already defined" % name)
columns = list(columns)
primarykey = ()
for i, c in enumerate(columns):
if isinstance(c, Table):
newcols = list(map(copy.copy, c.ALL))
for col in newcols:
col.table = None
columns[i] = newcols
primarykey = list(map(copy.copy, c.primarykey))
for col in primarykey:
col.table = None
if primarykey and kwargs.get('primarykey') is None:
kwargs['primarykey'] = primarykey
elif kwargs.get('primarykey'):
kwargs['primarykey'] = sequence(kwargs['primarykey'])
value = Table(self, name, flatten(columns), **kwargs)
self.__driver__._create_table_if_nexists(
name, value._columns, [pk.name for pk in value.primarykey])
collection.add(self, value)
def __getattr__(self, key):
try:
return self.__dict__[key] if key[0] == '_' else self[key]
except KeyError:
raise AttributeError(key)
def __delattr__(self, key):
try:
if key[0] == '_':
del self.__dict__[key]
else:
del self[key]
except KeyError:
raise AttributeError(key)
@classmethod
def connect(cls, name, *args, **kwargs):
module = getattr(drivers, name, None)
class_ = getattr(module, name, None)
if class_ is None:
raise UnknownDriver("Unable to find database driver %r" % name)
driver = class_(*args, **kwargs)
return type(cls.__name__, (cls,), {
'__driver__': driver,
'execute': driver.execute,
'__enter__': driver.__enter__,
'__exit__': driver.__exit__,
})()
def conform(self):
"""DB.conform()
Reads database for table definitions"""
for table in self.__driver__.list_tables():
columns = []
for name, v_type, notnull, default in (
self.__driver__._list_columns(table)):
columns.append(Column(
name, v_type, required=notnull, default=default))
t = Table(self, table, columns)
collection.add(self, t)
def migrate(self):
"""DB.migrate()
Alters database to match defined tables"""
names = set(self.keys())
db_tables = set(self.__driver__.list_tables())
for name in names - db_tables:
# Create
self.__driver__._create_table_if_nexists(name, self[name])
for name in names.intersection(db_tables):
# Alter if not the same
raise NotImplementedError
self.__driver__.alter_table(name, self[name])
__all__.append('DB')
connect = DB.connect
__all__.append('connect')
|
orbnauticus/silk
|
silk/webdb/__init__.py
|
Python
|
bsd-3-clause
| 30,145
|
# MJPEG Video Recording on Movement Example
#
# Note: You will need an SD card to run this example.
#
# You can use your OpenMV Cam to record mjpeg files. You can either feed the
# recorder object JPEG frames or RGB565/Grayscale frames. Once you've finished
# recording a Mjpeg file you can use VLC to play it. If you are on Ubuntu then
# the built-in video player will work too.
#
# This example demonstrates using frame differencing with your OpenMV Cam to do
# motion detection. After motion is detected your OpenMV Cam will take video.
import sensor, image, time, mjpeg, pyb, os
RED_LED_PIN = 1
BLUE_LED_PIN = 3
sensor.reset() # Initialize the camera sensor.
sensor.set_pixformat(sensor.RGB565) # or sensor.GRAYSCALE
sensor.set_framesize(sensor.QVGA) # or sensor.QQVGA (or others)
sensor.skip_frames(time = 2000) # Let new settings take affect.
sensor.set_auto_whitebal(False) # Turn off white balance.
if not "temp" in os.listdir(): os.mkdir("temp") # Make a temp directory
while(True):
pyb.LED(RED_LED_PIN).on()
print("About to save background image...")
sensor.skip_frames(time = 2000) # Give the user time to get ready.
pyb.LED(RED_LED_PIN).off()
sensor.snapshot().save("temp/bg.bmp")
print("Saved background image - Now detecting motion!")
pyb.LED(BLUE_LED_PIN).on()
diff = 10 # We'll say we detected motion after 10 frames of motion.
while(diff):
img = sensor.snapshot()
img.difference("temp/bg.bmp")
stats = img.statistics()
# Stats 5 is the max of the lighting color channel. The below code
# triggers when the lighting max for the whole image goes above 20.
# The lighting difference maximum should be zero normally.
if (stats[5] > 20):
diff -= 1
m = mjpeg.Mjpeg("example-%d.mjpeg" % pyb.rng())
clock = time.clock() # Tracks FPS.
print("You're on camera!")
for i in range(200):
clock.tick()
m.add_frame(sensor.snapshot())
print(clock.fps())
m.close(clock.fps())
pyb.LED(BLUE_LED_PIN).off()
print("Restarting...")
|
openmv/openmv
|
scripts/examples/OpenMV/06-Video-Recording/mjpeg_on_movement.py
|
Python
|
mit
| 2,093
|
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('repo_manage.views',
url(r'^$', 'index', name='index'),
url(r'^create/$', 'repo_simple_new', name='repo_simple_new'),
url(r'^create/settings/$', 'repo_new', name='repo_new'),
url(r'^(?P<user_name>\w+)/$', 'repo_list', name='repo_list'),
url(r'^(?P<user_name>[-\w]+)/(?P<repo_name>[-\w]+)/$', 'repo_desc', name='repo_desc'),
url(r'^(?P<user_name>[-\w]+)/(?P<repo_name>[-\w]+)/access$', 'repo_access', name='repo_access'),
url(r'^(?P<user_name>[-\w]+)/(?P<repo_name>[-\w]+)/clone$', 'repo_clone', name='repo_clone'),
url(r'^(?P<user_name>[-\w]+)/(?P<repo_name>[-\w]+)/delete/$', 'repo_delete', name='repo_delete'),
url(r'^(?P<user_name>[-\w]+)/(?P<repo_name>[-\w]+)/settings/$', 'repo_edit', name='repo_edit'),
)
|
vault/bugit
|
repo_manage/urls.py
|
Python
|
mit
| 825
|
# -*- coding: utf-8 -*-
import os
import pytest
import sqlalchemy as sa
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy_defaults import make_lazy_configured
@pytest.fixture(scope='session')
def dsn():
return os.environ.get('DSN') or 'sqlite:///:memory:'
@pytest.fixture
def Base():
return declarative_base()
@pytest.yield_fixture
def engine(dsn):
engine = create_engine(dsn)
yield engine
engine.dispose()
@pytest.yield_fixture
def connection(engine):
conn = engine.connect()
yield conn
conn.close()
@pytest.fixture
def models():
return []
@pytest.fixture
def lazy_options():
return {}
@pytest.yield_fixture
def Session(Base, connection, models):
sa.orm.configure_mappers()
Base.metadata.create_all(connection)
yield sessionmaker(bind=connection)
Base.metadata.drop_all(connection)
@pytest.yield_fixture
def session(Session):
session = Session()
yield session
session.close_all()
@pytest.fixture
def lazy_configured(models):
for model in models:
make_lazy_configured(model.__mapper__)
|
kvesteri/sqlalchemy-defaults
|
tests/conftest.py
|
Python
|
bsd-3-clause
| 1,185
|
""" CommandLine - Get and parse command line options
NOTE: This still is very much work in progress !!!
Different version are likely to be incompatible.
TODO:
* Incorporate the changes made by (see Inbox)
* Add number range option using srange()
"""
__copyright__ = """\
Copyright (c), 1997-2006, Marc-Andre Lemburg (mal@lemburg.com)
Copyright (c), 2000-2006, eGenix.com Software GmbH (info@egenix.com)
See the documentation for further information on copyrights,
or contact the author. All Rights Reserved.
"""
__version__ = '1.2'
import sys, getopt, string, glob, os, re, exceptions, traceback
### Helpers
def _getopt_flags(options):
""" Convert the option list to a getopt flag string and long opt
list
"""
s = []
l = []
for o in options:
if o.prefix == '-':
# short option
s.append(o.name)
if o.takes_argument:
s.append(':')
else:
# long option
if o.takes_argument:
l.append(o.name+'=')
else:
l.append(o.name)
return string.join(s,''),l
def invisible_input(prompt='>>> '):
""" Get raw input from a terminal without echoing the characters to
the terminal, e.g. for password queries.
"""
import getpass
entry = getpass.getpass(prompt)
if entry is None:
raise KeyboardInterrupt
return entry
def fileopen(name, mode='wb', encoding=None):
""" Open a file using mode.
Default mode is 'wb' meaning to open the file for writing in
binary mode. If encoding is given, I/O to and from the file is
transparently encoded using the given encoding.
Files opened for writing are chmod()ed to 0600.
"""
if name == 'stdout':
return sys.stdout
elif name == 'stderr':
return sys.stderr
elif name == 'stdin':
return sys.stdin
else:
if encoding is not None:
import codecs
f = codecs.open(name, mode, encoding)
else:
f = open(name, mode)
if 'w' in mode:
os.chmod(name, 0600)
return f
def option_dict(options):
""" Return a dictionary mapping option names to Option instances.
"""
d = {}
for option in options:
d[option.name] = option
return d
# Alias
getpasswd = invisible_input
_integerRE = re.compile('\s*(-?\d+)\s*$')
_integerRangeRE = re.compile('\s*(-?\d+)\s*-\s*(-?\d+)\s*$')
def srange(s,
split=string.split,integer=_integerRE,
integerRange=_integerRangeRE):
""" Converts a textual representation of integer numbers and ranges
to a Python list.
Supported formats: 2,3,4,2-10,-1 - -3, 5 - -2
Values are appended to the created list in the order specified
in the string.
"""
l = []
append = l.append
for entry in split(s,','):
m = integer.match(entry)
if m:
append(int(m.groups()[0]))
continue
m = integerRange.match(entry)
if m:
start,end = map(int,m.groups())
l[len(l):] = range(start,end+1)
return l
def abspath(path,
expandvars=os.path.expandvars,expanduser=os.path.expanduser,
join=os.path.join,getcwd=os.getcwd):
""" Return the corresponding absolute path for path.
path is expanded in the usual shell ways before
joining it with the current working directory.
"""
try:
path = expandvars(path)
except AttributeError:
pass
try:
path = expanduser(path)
except AttributeError:
pass
return join(getcwd(), path)
### Option classes
class Option:
""" Option base class. Takes no argument.
"""
default = None
helptext = ''
prefix = '-'
takes_argument = 0
has_default = 0
tab = 15
def __init__(self,name,help=None):
if not name[:1] == '-':
raise TypeError,'option names must start with "-"'
if name[1:2] == '-':
self.prefix = '--'
self.name = name[2:]
else:
self.name = name[1:]
if help:
self.help = help
def __str__(self):
o = self
name = o.prefix + o.name
if o.takes_argument:
name = name + ' arg'
if len(name) > self.tab:
name = name + '\n' + ' ' * (self.tab + 1 + len(o.prefix))
else:
name = '%-*s ' % (self.tab, name)
description = o.help
if o.has_default:
description = description + ' (%s)' % o.default
return '%s %s' % (name, description)
class ArgumentOption(Option):
""" Option that takes an argument.
An optional default argument can be given.
"""
def __init__(self,name,help=None,default=None):
# Basemethod
Option.__init__(self,name,help)
if default is not None:
self.default = default
self.has_default = 1
self.takes_argument = 1
class SwitchOption(Option):
""" Options that can be on or off. Has an optional default value.
"""
def __init__(self,name,help=None,default=None):
# Basemethod
Option.__init__(self,name,help)
if default is not None:
self.default = default
self.has_default = 1
### Application baseclass
class Application:
""" Command line application interface with builtin argument
parsing.
"""
# Options the program accepts (Option instances)
options = []
# Standard settings; these are appended to options in __init__
preset_options = [SwitchOption('-v',
'generate verbose output'),
SwitchOption('-h',
'show this help text'),
SwitchOption('--help',
'show this help text'),
SwitchOption('--debug',
'enable debugging'),
SwitchOption('--copyright',
'show copyright'),
SwitchOption('--examples',
'show examples of usage')]
# The help layout looks like this:
# [header] - defaults to ''
#
# [synopsis] - formatted as '<self.name> %s' % self.synopsis
#
# options:
# [options] - formatted from self.options
#
# [version] - formatted as 'Version:\n %s' % self.version, if given
#
# [about] - defaults to ''
#
# Note: all fields that do not behave as template are formatted
# using the instances dictionary as substitution namespace,
# e.g. %(name)s will be replaced by the applications name.
#
# Header (default to program name)
header = ''
# Name (defaults to program name)
name = ''
# Synopsis (%(name)s is replaced by the program name)
synopsis = '%(name)s [option] files...'
# Version (optional)
version = ''
# General information printed after the possible options (optional)
about = ''
# Examples of usage to show when the --examples option is given (optional)
examples = ''
# Copyright to show
copyright = __copyright__
# Apply file globbing ?
globbing = 1
# Generate debug output ?
debug = 0
# Generate verbose output ?
verbose = 0
# Internal errors to catch
InternalError = exceptions.Exception
# Instance variables:
values = None # Dictionary of passed options (or default values)
# indexed by the options name, e.g. '-h'
files = None # List of passed filenames
optionlist = None # List of passed options
def __init__(self,argv=None):
# Setup application specs
if argv is None:
argv = sys.argv
self.filename = os.path.split(argv[0])[1]
if not self.name:
self.name = os.path.split(self.filename)[1]
else:
self.name = self.name
if not self.header:
self.header = self.name
else:
self.header = self.header
# Init .arguments list
self.arguments = argv[1:]
# Setup Option mapping
self.option_map = option_dict(self.options)
# Append preset options
for option in self.preset_options:
if not self.option_map.has_key(option.name):
self.add_option(option)
# Init .files list
self.files = []
# Start Application
try:
# Process startup
rc = self.startup()
if rc is not None:
raise SystemExit,rc
# Parse command line
rc = self.parse()
if rc is not None:
raise SystemExit,rc
# Start application
rc = self.main()
if rc is None:
rc = 0
except SystemExit,rc:
pass
except KeyboardInterrupt:
print
print '* User Break'
print
rc = 1
except self.InternalError:
print
print '* Internal Error (use --debug to display the traceback)'
if self.debug:
print
traceback.print_exc(20, sys.stdout)
elif self.verbose:
print ' %s: %s' % sys.exc_info()[:2]
print
rc = 1
raise SystemExit,rc
def add_option(self, option):
""" Add a new Option instance to the Application dynamically.
Note that this has to be done *before* .parse() is being
executed.
"""
self.options.append(option)
self.option_map[option.name] = option
def startup(self):
""" Set user defined instance variables.
If this method returns anything other than None, the
process is terminated with the return value as exit code.
"""
return None
def exit(self, rc=0):
""" Exit the program.
rc is used as exit code and passed back to the calling
program. It defaults to 0 which usually means: OK.
"""
raise SystemExit, rc
def parse(self):
""" Parse the command line and fill in self.values and self.files.
After having parsed the options, the remaining command line
arguments are interpreted as files and passed to .handle_files()
for processing.
As final step the option handlers are called in the order
of the options given on the command line.
"""
# Parse arguments
self.values = values = {}
for o in self.options:
if o.has_default:
values[o.prefix+o.name] = o.default
else:
values[o.prefix+o.name] = 0
flags,lflags = _getopt_flags(self.options)
try:
optlist,files = getopt.getopt(self.arguments,flags,lflags)
if self.globbing:
l = []
for f in files:
gf = glob.glob(f)
if not gf:
l.append(f)
else:
l[len(l):] = gf
files = l
self.optionlist = optlist
self.files = files + self.files
except getopt.error,why:
self.help(why)
sys.exit(1)
# Call file handler
rc = self.handle_files(self.files)
if rc is not None:
sys.exit(rc)
# Call option handlers
for optionname, value in optlist:
# Try to convert value to integer
try:
value = string.atoi(value)
except ValueError:
pass
# Find handler and call it (or count the number of option
# instances on the command line)
handlername = 'handle' + string.replace(optionname, '-', '_')
try:
handler = getattr(self, handlername)
except AttributeError:
if value == '':
# count the number of occurrences
if values.has_key(optionname):
values[optionname] = values[optionname] + 1
else:
values[optionname] = 1
else:
values[optionname] = value
else:
rc = handler(value)
if rc is not None:
raise SystemExit, rc
# Apply final file check (for backward compatibility)
rc = self.check_files(self.files)
if rc is not None:
sys.exit(rc)
def check_files(self,filelist):
""" Apply some user defined checks on the files given in filelist.
This may modify filelist in place. A typical application
is checking that at least n files are given.
If this method returns anything other than None, the
process is terminated with the return value as exit code.
"""
return None
def help(self,note=''):
self.print_header()
if self.synopsis:
print 'Synopsis:'
# To remain backward compatible:
try:
synopsis = self.synopsis % self.name
except (NameError, KeyError, TypeError):
synopsis = self.synopsis % self.__dict__
print ' ' + synopsis
print
self.print_options()
if self.version:
print 'Version:'
print ' %s' % self.version
print
if self.about:
print string.strip(self.about % self.__dict__)
print
if note:
print '-'*72
print 'Note:',note
print
def notice(self,note):
print '-'*72
print 'Note:',note
print '-'*72
print
def print_header(self):
print '-'*72
print self.header % self.__dict__
print '-'*72
print
def print_options(self):
options = self.options
print 'Options and default settings:'
if not options:
print ' None'
return
long = filter(lambda x: x.prefix == '--', options)
short = filter(lambda x: x.prefix == '-', options)
items = short + long
for o in options:
print ' ',o
print
#
# Example handlers:
#
# If a handler returns anything other than None, processing stops
# and the return value is passed to sys.exit() as argument.
#
# File handler
def handle_files(self,files):
""" This may process the files list in place.
"""
return None
# Short option handler
def handle_h(self,arg):
self.help()
return 0
def handle_v(self, value):
""" Turn on verbose output.
"""
self.verbose = 1
# Handlers for long options have two underscores in their name
def handle__help(self,arg):
self.help()
return 0
def handle__debug(self,arg):
self.debug = 1
# We don't want to catch internal errors:
self.InternalError = None
def handle__copyright(self,arg):
self.print_header()
print string.strip(self.copyright % self.__dict__)
print
return 0
def handle__examples(self,arg):
self.print_header()
if self.examples:
print 'Examples:'
print
print string.strip(self.examples % self.__dict__)
print
else:
print 'No examples available.'
print
return 0
def main(self):
""" Override this method as program entry point.
The return value is passed to sys.exit() as argument. If
it is None, 0 is assumed (meaning OK). Unhandled
exceptions are reported with exit status code 1 (see
__init__ for further details).
"""
return None
# Alias
CommandLine = Application
def _test():
class MyApplication(Application):
header = 'Test Application'
version = __version__
options = [Option('-v','verbose')]
def handle_v(self,arg):
print 'VERBOSE, Yeah !'
cmd = MyApplication()
if not cmd.values['-h']:
cmd.help()
print 'files:',cmd.files
print 'Bye...'
if __name__ == '__main__':
_test()
|
jlspyaozhongkai/Uter
|
third_party_backup/Python-2.7.9/Tools/pybench/CommandLine.py
|
Python
|
gpl-3.0
| 16,672
|
#!/usr/bin/env python
#
# Creates .wxs files to be used to generate multiple MSI targets
#
# by default the script will check for dist and enterprise-dist, and parse
# the version as needed options are provided to give a build version that will
# download the zip, drop in to dist/enterprise-dist and do the same thing
#
# Expected paths and names
# /tmp/dist/grafana-6.0.0-ca0bc2c5pre3.windows-amd64.zip
# /tmp/enterprise-dist/grafana-enterprise-6.0.0-29b28127pre3.windows-amd64.zip
#
# Optionally (mainly for testing), pass arguments to pull a specific build
# -b,--build 5.4.3
# -e,--enterprise add this flag to specify enterprise
# -p,--premium, add this flag to include premium plugins
#
# When using the build option, the zip file is created in either dist or
# dist-enterprise according to the -e flag toggle.
#
# https://s3-us-west-2.amazonaws.com/grafana-releases/release/
# grafana-{}.windows-amd64.zip
#
# https://dl.grafana.com/enterprise/release/
# grafana-enterprise-{}.windows-amd64.zip
#
import os
import shutil
import argparse
from jinja2 import Environment, FileSystemLoader
from utils import *
#############################
# Constants - DO NOT CHANGE #
#############################
OSS_UPGRADE_VERSION = '35c7d2a9-6e23-4645-b975-e8693a1cef10'
OSS_PRODUCT_NAME = 'Grafana OSS'
ENTERPRISE_UPGRADE_VERSION = 'd534ec50-476b-4edc-a25e-fe854c949f4f'
ENTERPRISE_PRODUCT_NAME = 'Grafana Enterprise'
#############################
# CONSTANTS
#############################
MSI_GENERATOR_VERSION = '1.0.0'
#############################
# PATHS
#############################
WIX_HOME = '/home/xclient/wix'
WINE_CMD = '/usr/bin/wine64' # or just wine for 32bit
CANDLE = '{} {}/candle.exe'.format(WINE_CMD, WIX_HOME)
LIGHT = '{} {}/light.exe'.format(WINE_CMD, WIX_HOME)
HEAT = '{} {}/heat.exe'.format(WINE_CMD, WIX_HOME)
NSSM_VERSION = '2.24'
DIST_LOCATION = '/tmp/dist'
#############################
#
#############################
grafana_oss = {
'feature_component_group_refs': [
'GrafanaX64',
'GrafanaServiceX64',
'GrafanaFirewallExceptionsGroup'
],
'directory_refs': [
'GrafanaX64Dir'
],
'components': [
'grafana.wxs',
'grafana-service.wxs',
'grafana-firewall.wxs'
]
}
#
# Grafana 6 includes new datasources with long paths
#
def remove_long_paths():
print('Removing long pathed files - these are not needed to run grafana')
long_files = [
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/app_insights_querystring_builder.test.ts',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/app_insights_querystring_builder.ts',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_log_analytics/azure_log_analytics_datasource.test.ts',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_log_analytics/azure_log_analytics_datasource.ts',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/azure_monitor_datasource.test.ts',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/azure_monitor_datasource.ts',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/app_insights_datasource.ts',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/app_insights_datasource.test.ts',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/insights_analytics/insights_analytics_datasource.ts',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/azure_monitor_filter_builder.test.ts',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/azure_monitor/azure_monitor_filter_builder.ts',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/AnalyticsConfig.test.tsx',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/AzureCredentialsForm.test.tsx',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/InsightsConfig.test.tsx',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/__snapshots__/AnalyticsConfig.test.tsx.snap',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/__snapshots__/AzureCredentialsForm.test.tsx.snap',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/__snapshots__/InsightsConfig.test.tsx.snap',
'/tmp/a/grafana/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/__snapshots__/ConfigEditor.test.tsx.snap'
]
for file in long_files:
if os.path.isfile(file):
print('Removing: {}'.format(file))
os.remove(file)
else:
print('Skipped: {}'.format(file))
def build_msi(zip_file, extracted_name, PRODUCT_VERSION, grafana_hash, config, features, is_enterprise):
# keep reference to source directory, will need to switch back and
# forth during the process
src_dir = os.getcwd()
# target_dir = tempfile.TemporaryDirectory()
if not os.path.isdir('/tmp/a'):
os.mkdir('/tmp/a')
target_dir_name = '/tmp/a'
extract_zip(zip_file, target_dir_name)
os.system('ls -al /tmp/a')
# the zip file contains a version, which will not work when upgrading,
# and ends up with paths longer
# than light.exe can parse (windows issue)
# Once extracted, rename it to grafana without the version included
zip_file_path = '{}/{}'.format(target_dir_name, extracted_name)
rename_to = '{}/grafana'.format(target_dir_name)
print('Renaming extracted path {} to {}'.format(zip_file_path, rename_to))
os.system('ls -al /tmp/a')
print('Before:')
os.rename(zip_file_path, rename_to)
print('After:')
os.system('ls -al /tmp/a')
# cleanup due to MSI API limitation
remove_long_paths()
#
# HEAT
#
# Collects the files from the path given and generates wxs file
#
print('Heat Harvesting')
cgname = 'GrafanaX64'
cgdir = 'GrafanaX64Dir'
if not os.path.isdir('/tmp/scratch'):
os.mkdir('/tmp/scratch')
os.chdir('/tmp/scratch')
outfile = 'grafana-oss.wxs'
# important flags
# -srd - prevents the parent directory name from being included in the
# harvest
# -cg - component group to be referenced in main wxs file
# -fr - directory ref to be used in main wxs file
try:
cmd = '''
{} dir {} \
-platform x64 \
-sw5150 \
-srd \
-cg {} \
-gg \
-sfrag \
-dr {} \
-template fragment \
-out {}'''.strip().format(HEAT, target_dir_name, cgname, cgdir, outfile)
print(cmd)
os.system(cmd)
except Exception as ex:
print(ex)
shutil.copy2(outfile, target_dir_name)
nssm_file = get_nssm('/tmp/cache', NSSM_VERSION)
if not os.path.isdir(target_dir_name + '/nssm'):
os.mkdir(target_dir_name + '/nssm')
extract_zip(nssm_file, target_dir_name + '/nssm')
print('HARVEST COMPLETE')
os.chdir(src_dir)
generate_firewall_wxs(env, PRODUCT_VERSION, '/tmp/scratch/grafana-firewall.wxs', target_dir_name)
generate_service_wxs(env, PRODUCT_VERSION, '/tmp/scratch/grafana-service.wxs', target_dir_name, NSSM_VERSION)
generate_product_wxs(env, config, features, '/tmp/scratch/product.wxs', target_dir_name)
print('GENERATE COMPLETE')
copy_static_files(target_dir_name)
print('COPY STATIC COMPLETE')
#
# CANDLE needs to run in the scratch dir
os.chdir('/tmp/scratch')
try:
filename = 'grafana-service.wxs'
cmd = '{} -ext WixFirewallExtension -ext WixUtilExtension -v -arch x64 {}'.format(CANDLE, filename)
print(cmd)
os.system(cmd)
shutil.copy2('grafana-service.wixobj', target_dir_name)
#
filename = 'grafana-firewall.wxs'
cmd = '{} -ext WixFirewallExtension -ext WixUtilExtension -v -arch x64 {}'.format(
CANDLE,
filename)
print(cmd)
os.system(cmd)
shutil.copy2('grafana-firewall.wixobj', target_dir_name)
#
filename = 'grafana-oss.wxs'
cmd = '{} -ext WixFirewallExtension -ext WixUtilExtension -v -arch x64 {}'.format(
CANDLE,
filename)
print(cmd)
os.system(cmd)
shutil.copy2('grafana-oss.wixobj', target_dir_name)
#
filename = 'product.wxs'
cmd = '{} -ext WixFirewallExtension -ext WixUtilExtension -v -arch x64 {}'.format(
CANDLE,
filename)
print(cmd)
os.system(cmd)
shutil.copy2('product.wixobj', target_dir_name)
except Exception as ex:
print(ex)
print('CANDLE COMPLETE')
############################
# LIGHT - Assemble the MSI
############################
os.chdir(target_dir_name)
os.system('cp -pr nssm/nssm-2.24 .')
try:
cmd = '''
{} \
-cultures:en-US \
-ext WixUIExtension.dll -ext WixFirewallExtension -ext WixUtilExtension \
-v -sval -spdb \
grafana-service.wixobj \
grafana-firewall.wixobj \
grafana-oss.wixobj \
product.wixobj \
-out grafana.msi'''.strip().format(LIGHT)
print(cmd)
os.system(cmd)
except Exception as ex:
print(ex)
hash = ''
if grafana_hash:
hash = '-{}'.format(grafana_hash)
# copy to scratch with version included
msi_filename = '/tmp/scratch/grafana-{}{}.windows-amd64.msi'.format(PRODUCT_VERSION, hash)
if is_enterprise:
msi_filename = '/tmp/scratch/grafana-enterprise-{}{}.windows-amd64.msi'.format(PRODUCT_VERSION, hash)
shutil.copy2('grafana.msi', msi_filename)
os.system('ls -al /tmp/scratch')
print('LIGHT COMPLETE')
# finally cleanup
# extract_dir.cleanup()
def main(file_loader, env, grafana_version, grafana_hash, zip_file, extracted_name, is_enterprise):
UPGRADE_VERSION = OSS_UPGRADE_VERSION
GRAFANA_VERSION = grafana_version
PRODUCT_TITLE = OSS_PRODUCT_NAME
PRODUCT_NAME = 'GrafanaOSS'
# PRODUCT_VERSION=GRAFANA_VERSION
# MSI version cannot have anything other
# than a x.x.x.x format, numbers only
PRODUCT_VERSION = GRAFANA_VERSION.split('-')[0]
LICENSE = 'LICENSE.rtf'
if is_enterprise:
UPGRADE_VERSION = ENTERPRISE_UPGRADE_VERSION
PRODUCT_TITLE = ENTERPRISE_PRODUCT_NAME
PRODUCT_NAME = 'GrafanaEnterprise'
LICENSE = 'EE_LICENSE.rtf'
config = {
'grafana_version': PRODUCT_VERSION,
'upgrade_code': UPGRADE_VERSION,
'product_name': PRODUCT_NAME,
'manufacturer': 'Grafana Labs',
'license': LICENSE
}
features = [
{
'name': PRODUCT_NAME,
'title': PRODUCT_TITLE,
'component_groups': [
{
'ref_id': 'GrafanaX64',
'directory': 'GrafanaX64Dir'
}
]
},
{
'name': 'GrafanaService',
'title': 'Run Grafana as a Service',
'component_groups': [
{
'ref_id': 'GrafanaServiceX64',
'directory': 'GrafanaServiceX64Dir'
}
]
}
]
build_msi(zip_file, extracted_name, PRODUCT_VERSION, grafana_hash, config, features, is_enterprise)
if __name__ == '__main__':
print('MSI Generator Version: {}'.format(MSI_GENERATOR_VERSION))
parser = argparse.ArgumentParser(
description='Grafana MSI Generator',
formatter_class=lambda prog: argparse.HelpFormatter(prog, max_help_position=90, width=110), add_help=True)
parser.add_argument(
'-p',
'--premium',
help='Include premium plugins',
dest='premium', action='store_true')
parser.add_argument('-b', '--build', help='build to download')
args = parser.parse_args()
file_loader = FileSystemLoader('templates')
env = Environment(loader=file_loader)
grafana_version = None
grafana_hash = None
is_enterprise = False
if not os.path.isdir(DIST_LOCATION):
os.mkdir(DIST_LOCATION)
# if a build version is specified, pull it
if args.build:
grafana_version = args.build
print('Version Specified: {}'.format(grafana_version))
else:
grafana_version, grafana_hash, is_enterprise = detect_version(DIST_LOCATION)
print('Detected Version: {}'.format(grafana_version))
if grafana_hash:
print('Detected Hash: {}'.format(grafana_hash))
print('Enterprise: {}'.format(is_enterprise))
if is_enterprise:
if grafana_hash:
zip_file = '{}/grafana-enterprise-{}-{}.windows-amd64.zip'.format(DIST_LOCATION, grafana_version, grafana_hash)
extracted_name = 'grafana-{}-{}'.format(grafana_version, grafana_hash)
else:
zip_file = '{}/grafana-enterprise-{}.windows-amd64.zip'.format(DIST_LOCATION, grafana_version)
extracted_name = 'grafana-{}'.format(grafana_version)
else:
# the file can have a build hash
if grafana_hash:
zip_file = '{}/grafana-{}-{}.windows-amd64.zip'.format(DIST_LOCATION, grafana_version, grafana_hash)
extracted_name = 'grafana-{}-{}'.format(grafana_version, grafana_hash)
else:
zip_file = '{}/grafana-{}.windows-amd64.zip'.format(DIST_LOCATION, grafana_version)
extracted_name = 'grafana-{}'.format(grafana_version)
print('ZipFile: {}'.format(zip_file))
# check if file downloaded
if not os.path.isfile(zip_file):
zip_file = get_zip(grafana_version, zip_file)
main(file_loader, env, grafana_version, grafana_hash, zip_file, extracted_name, is_enterprise)
|
grafana/grafana
|
scripts/build/ci-msi-build/msigenerator/generator/build.py
|
Python
|
agpl-3.0
| 14,202
|
import sublime
import unittest
import os
import sys
class TestImport(unittest.TestCase):
mpath = None
@classmethod
def setUpClass(cls):
basedir = os.path.dirname(__file__)
mpath = os.path.normpath(os.path.join(
basedir, "..", "st3_{}_{}".format(sublime.platform(), sublime.arch())))
if mpath not in sys.path:
cls.mpath = mpath
sys.path.append(mpath)
def test_import(self):
from winpty import PtyProcess
self.assertTrue("winpty" in sys.modules)
proc = PtyProcess.spawn('cmd.exe')
self.assertTrue(proc.isalive())
proc.terminate(True)
@classmethod
def tearDownClass(cls):
if not cls.mpath:
return
mpath = cls.mpath
if mpath in sys.path:
sys.path.remove(mpath)
if "winpty" in sys.modules:
del sys.modules["winpty"]
|
dmilith/SublimeText3-dmilith
|
Packages/Debugger/modules/libs/pywinpty/tests/test_import.py
|
Python
|
mit
| 909
|
# coding: utf-8
__author__ = 'AS126'
from urllib.request import *
import random
def reqs(url, encode='utf-8'):
request = Request(url)
ua_list = [
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2467.2 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.10240',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko']
request.add_header('User-Agent', random.choice(ua_list))
return urlopen(request).read().decode(encode)
|
fourstring/free-vip-getter
|
sources/req.py
|
Python
|
gpl-2.0
| 729
|
#!env/bin/python
import os
os.system('babel extract -F babel.cfg -k lazy_gettext -o messages.pot app')
os.system('babel init -i messages.pot -d app/locale -l ru')
os.unlink('messages.pot')
|
dnaroid/laser-server
|
tr_init.py
|
Python
|
bsd-3-clause
| 190
|
import numpy as np
from math import sqrt
# nice trick numpy
float_formatter = lambda x: "%.2f" % x
np.set_printoptions(formatter={'float_kind':float_formatter})
def euclidean_distance(a, b):
"""
Calculate the Euclidean distance of two vectors
"""
distance = 0
for x, y in zip(a, b):
distance += (x - y) ** 2
return sqrt(distance)
def get_distance_matrix(points):
"""
From a group of points calculate the distance matrix between them
"""
symbols = "ABCDEFGHIJKLMNOP"
w = h = len(points)
distance_matrix = np.ndarray(shape=(w, h))
print ' ' * 5,
for x in range(len(points)):
print ("A%i" % (x+1)).ljust(5, ' '),
print
for x, p in enumerate(points):
print ("A%i" % (x+1)).ljust(5, ' '),
for y, q in enumerate(points):
d = euclidean_distance(p, q)
print ("%.2f" % d).ljust(5, ' '),
distance_matrix[x, y] = d
print
return distance_matrix
dataset = np.genfromtxt(
"dummy.data", dtype=float,
delimiter=',', usecols = (0, 1)
)
def gdbscan(set_of_points, n_pred, min_card, w_card):
"""
Do the clustering with GDBSCAN algorithm proposed by
set_of_points,
n_pred,
min_card,
w_card,
"""
# print set_of_points
# create a group of indexes
is_classified = np.zeros((dataset.shape[0], 1), dtype=bool)
# print is_classified
noise = np.zeros((1, is_classified.shape[1]))
# print noise
cluster = {}
# cluster_id = next_id(noise), why this?
cluster_id = 0
for point, classified in zip(set_of_points, is_classified):
if not classified:
if expand_cluster(
set_of_points, point, cluster_id,
n_pred, min_card, w_card
):
cluster_id = cluster_id + 1
def expand_cluster(set_of_points, point, cluster_id, n_pred, min_card, w_card):
pass
def w_card(points):
return len(points)
def neighborhood(index, epsilon):
distances = get_distance_matrix(dataset)[index]
return np.where(distances < epsilon)[0]
min_card = 4
n_pred = 3
gdbscan(dataset, n_pred, min_card, w_card)
|
omartrinidad/schiffsdiebe
|
schiffsdiebe/clustering/dbscan/dbscan.py
|
Python
|
mit
| 2,216
|
# coding: utf8
# skwinsec.py
# 12/11/2012 jichi
# Windows only
SE_DEBUG_PRIVILEGE = 'SeDebugPrivilege'
SE_TIMEZONE_PRIVILEGE = 'SeTimeZonePrivilege'
import skos
if skos.WIN:
import os
import win32api, win32con, win32security
import skwin, skwinapi
from skdebug import dprint, dwarn
# See: http://code.msdn.microsoft.com/windowsdesktop/CppUACSelfElevation-981c0160/sourcecode?fileId=21590&pathId=2074248535
def elevate(path=None, params=None, cwd=None, hwnd=0):
"""
@param path str or None If none, elevate current process
@param cwd str or None If none, use the dirname of path
@param hwnd parent window or 0
@return bool
"""
op = 'runas'
path = path or win32api.GetModuleFileName(None) # python path
cwd = cwd or (os.path.dirname(path) if path else "")
try:
# See: http://timgolden.me.uk/pywin32-docs/win32api__ShellExecute_meth.html
win32api.ShellExecute(hwnd, op, path, params, cwd, win32con.SW_SHOW)
return True
except:
return False
# See: http://code.msdn.microsoft.com/windowsdesktop/CppUACSelfElevation-981c0160/sourcecode?fileId=21590&pathId=2074248535
def is_elevated():
"""
@return bool or None if the current process is elevated, or None on xp
"""
try:
ph = win32api.GetCurrentProcess()
tok = win32security.OpenProcessToken(ph, win32con.TOKEN_QUERY)
# type(tic) == TokenInformationClass
b = win32security.GetTokenInformation(tok, win32security.TokenElevation)
return bool(b)
except: pass # could be windows XP
class _SkProcessElevator: pass
class SkProcessElevator:
def __init__(self, priv):
"""
@param priv unicode or [unicode] or None
"""
d = self.__d = _SkProcessElevator()
if type(priv) in (str, unicode):
priv = [priv]
d.token = None # process token
try:
d.privileges = [( # current or previous privileges
win32security.LookupPrivilegeValue('', p),
win32con.SE_PRIVILEGE_ENABLED,
) for p in priv] if priv else [] # [] not None
except Exception, e: # pywintypes.error
dwarn(e)
d.privileges = None
def __enter__(self):
d = self.__d
if not d.privileges:
dwarn("failed to elevate privilege. This is might be a Windows XP machine")
return
# See: http://msdn.microsoft.com/ja-jp/library/windows/desktop/ms724944%28v=vs.85%29.aspx
# See: http://nullege.com/codes/search/win32security.AdjustTokenPrivileges
# See: http://www.oschina.net/code/explore/chromium.r67069/third_party/python_24/Lib/site-packages/win32/Demos/security/setkernelobjectsecurity.py
#pid = win32api.GetCurrentProcessId()
#ph = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, 0, pid)
ph = win32api.GetCurrentProcess()
#d.token = win32security.OpenProcessToken(ph, win32con.TOKEN_ALL_ACCESS)
d.token = win32security.OpenProcessToken(ph,
win32con.TOKEN_ADJUST_PRIVILEGES|win32con.TOKEN_QUERY)
d.privileges = win32security.AdjustTokenPrivileges(d.token, 0, d.privileges)
if win32api.GetLastError():
dwarn("failed to elevate process privilege")
else:
dprint("process privileges elevated")
return self
def __exit__(self, *err):
d = self.__d
if d.token:
if d.privileges is not None:
win32security.AdjustTokenPrivileges(d.token, 0, d.privileges)
try: win32api.CloseHandle(d.token)
except Exception, e: dwarn("windows error:", e)
d.token = None
def isElevated(self):
"""
@return bool if contain token
"""
return bool(self.__d.token)
PROCESS_INJECT_ACCESS = (
win32con.PROCESS_CREATE_THREAD |
win32con.PROCESS_QUERY_INFORMATION |
win32con.PROCESS_VM_OPERATION |
win32con.PROCESS_VM_WRITE |
win32con.PROCESS_VM_READ)
INJECT_TIMEOUT = 3000 # 3 seconds
def getModuleFunctionAddress(func, module=None):
"""
@param func str
@param module str
@return int address
"""
return win32api.GetProcAddress(
win32api.GetModuleHandle(module),
func)
# See: inject.cpp from translation aggregator
def injectfunc1(addr, arg, argsize, pid=0, handle=None, timeout=INJECT_TIMEOUT):
"""Inject function with 1 argument
Either pid or the process handle should be specified
@param addr LONG function memory address
@param arg LPVOID
@param argsize int
@param pid LONG
@param handle HANDLE
@param timeout int msecs
@return bool
"""
dprint("enter: pid = %s" % pid)
isLocalHandle = False # bool
if not handle and pid:
isLocalHandle = True
try:
handle = win32api.OpenProcess(PROCESS_INJECT_ACCESS, 0, pid)
if not handle:
with SkProcessElevator(SE_DEBUG_PRIVILEGE) as priv:
if priv.isElevated():
handle = win32api.OpenProcess(PROCESS_INJECT_ACCESS, 0, pid)
except Exception, e:
dwarn("windows error:", e)
if not handle:
dprint("exit: error: failed to get process handle")
return False
ret = False
hProcess = handle
try:
data = arg
dataSize = argsize
# Reserved & commit
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa366803%28v=vs.85%29.aspx
# http://msdn.microsoft.com/en-us/library/ms810627.aspx
remoteData = skwinapi.VirtualAllocEx(
hProcess, # process
None, # __in_opt address
dataSize, # data size
win32con.MEM_RESERVE|win32con.MEM_COMMIT,
win32con.PAGE_READWRITE)
if remoteData:
if skwinapi.WriteProcessMemory(hProcess, remoteData, data, dataSize, None):
hThread = skwinapi.CreateRemoteThread(
hProcess,
None, 0,
skwinapi.LPTHREAD_START_ROUTINE(addr),
remoteData,
0, None)
if hThread:
skwinapi.WaitForSingleObject(hThread, timeout)
win32api.CloseHandle(hThread)
ret = True
skwinapi.VirtualFreeEx(hProcess, remoteData, dataSize, win32con.MEM_RELEASE)
except Exception, e:
dwarn("windows error:", e)
if isLocalHandle: # only close the handle if I create it
try: win32api.CloseHandle(hProcess)
except Exception, e: dwarn("windows error:", e)
dprint("exit: ret = ok")
return ret
# See: inject.cpp from translation aggregator
def injectdll(dllpath, pid=0, handle=None, timeout=INJECT_TIMEOUT):
"""Either pid or the process handle should be specified
@param dllpath unicode ABSOLUTE path to dll
@param pid LONG
@param handle HANDLE
@param timeout int msecs
@return bool
"""
#if not dllpath or not os.path.exists(dllpath):
# dwarn("error: dll does not exist")
# return False
dprint("enter: pid = %s" % pid)
try: dllpath = dllpath.decode('utf8')
except UnicodeDecodeError:
dwarn("exit: error: failed to decode dll path to utf8")
return False
LOADLIBRARYW = getModuleFunctionAddress('LoadLibraryW', 'kernel32.dll')
if not LOADLIBRARYW:
dprint("exit error: cannot find LoadLibraryW from kernel32")
return False
data = dllpath
dataSize = len(dllpath) * 2 + 2 # L'\0'
ok = injectfunc1(LOADLIBRARYW, data, dataSize, pid=pid, handle=handle, timeout=timeout)
dprint("exit: ret = ok")
return ok
def unloaddll(dllhandle, pid=0, handle=None, timeout=INJECT_TIMEOUT):
"""Either pid or the process handle should be specified
@param dllhandle handle of the injected dll
@param pid LONG
@param handle HANDLE
@param timeout int msecs
@return bool
"""
dprint("enter: pid = %s" % pid)
LOADLIBRARYW = getModuleFunctionAddress('LoadLibraryW', 'kernel32.dll')
if not LOADLIBRARYW:
dprint("exit error: cannot find LoadLibraryW from kernel32")
return False
data = dllhandle
dataSize = 4 # size of DWORD
ok = injectfunc1(LOADLIBRARYW, data, dataSize, pid=pid, handle=handle, timeout=timeout)
dprint("exit: ret = ok")
return 0
class _SkProcessCreator: pass
class SkProcessCreator:
def __init__(self, path, *args, **kwargs):
"""
@param path unicode
@param args passed to skwin.create_process
@param kwargs passed to skwin.create_process
"""
d = self.__d = _SkProcessElevator()
d.path = path
d.args = args
d.kwargs = kwargs
def __enter__(self):
d = self.__d
d.hProcess, d.hThread, d.dwProcessId, d.dwThreadId = skwin.create_process(
d.path, suspended=True, complete=True, *d.args, **d.kwargs)
dprint("pid = %i" % d.dwProcessId)
return self
def __exit__(self, *err):
d = self.__d
if d.hThread:
ok = skwin.resume_thread(d.hThread)
dprint("resume thread: %s" % ok)
@property
def path(self): return self.__d.path
@property
def processId(self): return self.__d.dwProcessId
@property
def processHandle(self): return self.__d.hProcess
@property
def threadId(self): return self.__d.dwThreadId
@property
def threadHandle(self): return self.__d.hThread
# EOF
|
Dangetsu/vnr
|
Frameworks/Sakura/py/libs/sakurakit/skwinsec.py
|
Python
|
gpl-3.0
| 9,300
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Invenio mail sending utilities.
send_email() is the main API function people should be using; just check out
its docstring.
"""
from __future__ import absolute_import
import os
import re
import sys
import urllib2
from email import Encoders
from email.Header import Header
from email.MIMEBase import MIMEBase
from email.MIMEImage import MIMEImage
from email.MIMEMultipart import MIMEMultipart
from email.Utils import formatdate
from formatter import AbstractFormatter, DumbWriter
from mimetypes import MimeTypes
from time import sleep
from flask_email.message import EmailMessage, EmailMultiAlternatives
from invenio_base.globals import cfg
from invenio_base.helpers import unicodifier
from invenio_base.i18n import _
from six import StringIO, iteritems
from .errors import EmailError
def default_ln(ln):
"""Default ln."""
cfg.get('CFG_SITE_LANG') if ln is None else ln
def getContentType(pageUrl):
"""Get content type."""
page = urllib2.urlopen(pageUrl)
pageHeaders = page.headers
contentType = pageHeaders.getheader('content-type')
return contentType
def setup_app(app):
"""
Prepare application config from Invenio configuration.
@see: https://flask-email.readthedocs.org/en/latest/#configuration
"""
cfg = app.config
app.config.setdefault('EMAIL_BACKEND', cfg.get(
'CFG_EMAIL_BACKEND', 'flask_email.backends.smtp.Mail'))
app.config.setdefault('DEFAULT_FROM_EMAIL', cfg['CFG_SITE_SUPPORT_EMAIL'])
app.config.setdefault('SERVER_EMAIL', cfg['CFG_SITE_ADMIN_EMAIL'])
app.config.setdefault('ADMINS', (('', cfg['CFG_SITE_ADMIN_EMAIL']),))
app.config.setdefault('MANAGERS', (cfg['CFG_SITE_SUPPORT_EMAIL'], ))
CFG_MISCUTIL_SMTP_HOST = cfg.get('CFG_MISCUTIL_SMTP_HOST')
CFG_MISCUTIL_SMTP_PORT = cfg.get('CFG_MISCUTIL_SMTP_PORT')
CFG_MISCUTIL_SMTP_USER = cfg.get('CFG_MISCUTIL_SMTP_USER', '')
CFG_MISCUTIL_SMTP_PASS = cfg.get('CFG_MISCUTIL_SMTP_PASS', '')
CFG_MISCUTIL_SMTP_TLS = cfg.get('CFG_MISCUTIL_SMTP_TLS', False)
app.config.setdefault('EMAIL_HOST', CFG_MISCUTIL_SMTP_HOST)
app.config.setdefault('EMAIL_PORT', CFG_MISCUTIL_SMTP_PORT)
app.config.setdefault('EMAIL_HOST_USER', CFG_MISCUTIL_SMTP_USER)
app.config.setdefault('EMAIL_HOST_PASSWORD', CFG_MISCUTIL_SMTP_PASS)
app.config.setdefault('EMAIL_USE_TLS', CFG_MISCUTIL_SMTP_TLS)
# app.config['EMAIL_USE_SSL']: defaults to False
app.config.setdefault('EMAIL_FILE_PATH', cfg['CFG_LOGDIR'])
return app
def scheduled_send_email(fromaddr,
toaddr,
subject="",
content="",
header=None,
footer=None,
copy_to_admin=0,
attempt_times=1,
attempt_sleeptime=10,
user=None,
other_bibtasklet_arguments=None,
replytoaddr="",
bccaddr="",
):
"""
Like send_email, but send an email via the bibsched
infrastructure.
@param fromaddr: sender
@type fromaddr: string
@param toaddr: list of receivers
@type toaddr: string (comma separated) or list of strings
@param subject: the subject
@param content: the body of the message
@param header: optional header, otherwise default is used
@param footer: optional footer, otherwise default is used
@param copy_to_admin: set to 1 in order to send email the admins
@param attempt_times: try at least n times before giving up sending
@param attempt_sleeptime: number of seconds to sleep between two attempts
@param user: the user name to user when scheduling the bibtasklet. If
None, the sender will be used
@param other_bibtasklet_arguments: other arguments to append to the list
of arguments to the call of task_low_level_submission
@param replytoaddr: [string or list-of-strings] to be used for the
reply-to header of the email (if string, then
receivers are separated by ',')
@param bccaddr: [string or list-of-strings] to be used for BCC header
of the email
(if string, then receivers are separated by ',')
@return: the scheduled bibtasklet
"""
raise RuntimeError('scheduled_send_email has been removed.')
def send_email(fromaddr,
toaddr,
subject="",
content="",
html_content='',
html_images=None,
header=None,
footer=None,
html_header=None,
html_footer=None,
copy_to_admin=0,
attempt_times=1,
attempt_sleeptime=10,
debug_level=0,
ln=None,
charset=None,
replytoaddr="",
attachments=None,
bccaddr="",
forward_failures_to_admin=True,
):
"""Send a forged email to TOADDR from FROMADDR with message created from subjet, content and possibly
header and footer.
@param fromaddr: [string] sender
@param toaddr: [string or list-of-strings] list of receivers (if string, then
receivers are separated by ','). BEWARE: If more than once receiptiant is given,
the receivers are put in BCC and To will be "Undisclosed.Recipients:".
@param subject: [string] subject of the email
@param content: [string] content of the email
@param html_content: [string] html version of the email
@param html_images: [dict] dictionary of image id, image path
@param header: [string] header to add, None for the Default
@param footer: [string] footer to add, None for the Default
@param html_header: [string] header to add to the html part, None for the Default
@param html_footer: [string] footer to add to the html part, None for the Default
@param copy_to_admin: [int] if 1 add CFG_SITE_ADMIN_EMAIL in receivers
@param attempt_times: [int] number of tries
@param attempt_sleeptime: [int] seconds in between tries
@param debug_level: [int] debug level
@param ln: [string] invenio language
@param charset: [string] the content charset. By default is None which means
to try to encode the email as ascii, then latin1 then utf-8.
@param replytoaddr: [string or list-of-strings] to be used for the
reply-to header of the email (if string, then
receivers are separated by ',')
@param attachments: list of paths of files to be attached. Alternatively,
every element of the list could be a tuple: (filename, mimetype)
@param bccaddr: [string or list-of-strings] to be used for BCC header of the email
(if string, then receivers are separated by ',')
@param forward_failures_to_admin: [bool] prevents infinite recursion
in case of admin reporting,
when the problem is not in
the e-mail address format,
but rather in the network
If sending fails, try to send it ATTEMPT_TIMES, and wait for
ATTEMPT_SLEEPTIME seconds in between tries.
e.g.:
send_email('foo.bar@cern.ch', 'bar.foo@cern.ch', 'Let\'s try!'', 'check 1234', '<strong>check</strong> <em>1234</em><img src="cid:image1">', {'image1': '/tmp/quantum.jpg'})
@return: [bool]: True if email was sent okay, False if it was not.
"""
from invenio_ext.logging import register_exception
ln = default_ln(ln)
if html_images is None:
html_images = {}
if not isinstance(toaddr, list):
toaddr = toaddr.strip().split(',')
toaddr = remove_temporary_emails(toaddr)
# More than one address, let's use Bcc in place of To
usebcc = len(toaddr) > 1
if copy_to_admin:
if cfg['CFG_SITE_ADMIN_EMAIL'] not in toaddr:
toaddr.append(cfg['CFG_SITE_ADMIN_EMAIL'])
if not isinstance(bccaddr, list):
bccaddr = bccaddr.strip().split(',')
msg = forge_email(fromaddr, toaddr, subject, content, html_content,
html_images, usebcc, header, footer, html_header,
html_footer, ln, charset, replytoaddr, attachments,
bccaddr)
if attempt_times < 1 or not toaddr:
try:
raise EmailError(_(
'The system is not attempting to send an email from %(x_from)s'
', to %(x_to)s, with body %(x_body)s.', x_from=fromaddr,
x_to=toaddr, x_body=content))
except EmailError:
register_exception()
return False
sent = False
failure_reason = ''
while not sent and attempt_times > 0:
try:
sent = msg.send()
except Exception as e:
failure_reason = str(e)
register_exception()
if debug_level > 1:
try:
raise EmailError(_('Error in sending message. \
Waiting %(sec)s seconds. Exception is %(exc)s, \
while sending email from %(sender)s to %(receipient)s \
with body %(email_body)s.',
sec=attempt_sleeptime,
exc=sys.exc_info()[0],
sender=fromaddr,
receipient=toaddr,
email_body=content))
except EmailError:
register_exception()
if not sent:
attempt_times -= 1
if attempt_times > 0: # sleep only if we shall retry again
sleep(attempt_sleeptime)
if not sent:
# report failure to the admin with the intended message, its
# sender and recipients
if forward_failures_to_admin:
# prepend '> ' to every line of the original message
quoted_body = '> ' + '> '.join(content.splitlines(True))
# define and fill in the report template
admin_report_subject = _(
'Error while sending an email: %(x_subject)s',
x_subject=subject)
admin_report_body = _(
"\nError while sending an email.\n"
"Reason: %(x_reason)s\n"
"Sender: \"%(x_sender)s\"\n"
"Recipient(s): \"%(x_recipient)s\"\n\n"
"The content of the mail was as follows:\n"
"%(x_body)s",
x_reason=failure_reason,
x_sender=fromaddr,
x_recipient=', '.join(toaddr),
x_body=quoted_body)
send_email(
cfg['CFG_SITE_ADMIN_EMAIL'],
cfg['CFG_SITE_ADMIN_EMAIL'],
admin_report_subject,
admin_report_body,
forward_failures_to_admin=False)
try:
raise EmailError(_(
'Error in sending email from %(x_from)s to %(x_to)s with body'
'%(x_body)s.', x_from=fromaddr, x_to=toaddr, x_body=content))
except EmailError:
register_exception()
return sent
def attach_embed_image(email, image_id, image_path):
"""
Attach an image to the email.
"""
with open(image_path, 'rb') as image_data:
img = MIMEImage(image_data.read())
img.add_header('Content-ID', '<%s>' % image_id)
img.add_header(
'Content-Disposition',
'attachment',
filename=os.path.split(image_path)[1])
email.attach(img)
def forge_email(fromaddr, toaddr, subject, content, html_content='',
html_images=None, usebcc=False, header=None, footer=None,
html_header=None, html_footer=None, ln=None,
charset=None, replytoaddr="", attachments=None, bccaddr=""):
"""Prepare email. Add header and footer if needed.
@param fromaddr: [string] sender
@param toaddr: [string or list-of-strings] list of receivers (if string, then
receivers are separated by ',')
@param usebcc: [bool] True for using Bcc in place of To
@param subject: [string] subject of the email
@param content: [string] content of the email
@param html_content: [string] html version of the email
@param html_images: [dict] dictionary of image id, image path
@param header: [string] None for the default header
@param footer: [string] None for the default footer
@param ln: language
@charset: [string] the content charset. By default is None which means
to try to encode the email as ascii, then latin1 then utf-8.
@param replytoaddr: [string or list-of-strings] to be used for the
reply-to header of the email (if string, then
receivers are separated by ',')
@param attachments: list of paths of files to be attached. Alternatively,
every element of the list could be a tuple: (filename, mimetype)
@param bccaddr: [string or list-of-strings] to be used for BCC header of the email
(if string, then receivers are separated by ',')
@return: forged email as an EmailMessage object"""
from invenio_ext.template import render_template_to_string
ln = default_ln(ln)
if html_images is None:
html_images = {}
content = render_template_to_string('mail_text.tpl',
content=unicodifier(content),
header=unicodifier(header),
footer=unicodifier(footer)
).encode('utf8')
if isinstance(toaddr, list):
toaddr = ','.join(toaddr)
if isinstance(bccaddr, list):
bccaddr = ','.join(bccaddr)
if isinstance(replytoaddr, list):
replytoaddr = ','.join(replytoaddr)
toaddr = remove_temporary_emails(toaddr)
headers = {}
kwargs = {'to': [], 'cc': [], 'bcc': []}
if replytoaddr:
headers['Reply-To'] = replytoaddr
if usebcc:
headers['Bcc'] = bccaddr
kwargs['bcc'] = toaddr.split(',') + bccaddr.split(',')
kwargs['to'] = ['Undisclosed.Recipients:']
else:
kwargs['to'] = toaddr.split(',')
headers['From'] = fromaddr
headers['Date'] = formatdate(localtime=True)
headers['User-Agent'] = 'Invenio %s at %s' % (cfg['CFG_VERSION'],
cfg['CFG_SITE_URL'])
if html_content:
html_content = render_template_to_string(
'mail_html.tpl',
content=unicodifier(html_content),
header=unicodifier(html_header),
footer=unicodifier(html_footer)
).encode('utf8')
msg_root = EmailMultiAlternatives(subject=subject, body=content,
from_email=fromaddr,
headers=headers, **kwargs)
msg_root.attach_alternative(html_content, "text/html")
# if not html_images:
# # No image? Attach the HTML to the root
# msg_root.attach(msg_text)
# else:
if html_images:
# Image(s)? Attach the HTML and image(s) as children of a
# "related" block
msg_related = MIMEMultipart('related')
# msg_related.attach(msg_text)
for image_id, image_path in iteritems(html_images):
attach_embed_image(msg_related, image_id, image_path)
msg_root.attach(msg_related)
else:
msg_root = EmailMessage(subject=subject, body=content,
from_email=fromaddr, headers=headers, **kwargs)
if attachments:
_mimes = MimeTypes(strict=False)
for attachment in attachments:
try:
mime = None
if type(attachment) in (list, tuple):
attachment, mime = attachment
if mime is None:
# Automatic guessing of mimetype
mime = _mimes.guess_type(attachment)[0]
if mime is None:
ext = _mimes.guess_extension(getContentType(attachment))
mime = _mimes.guess_type("foo" + ext)[0]
if not mime:
mime = 'application/octet-stream'
part = MIMEBase(*mime.split('/', 1))
part.set_payload(open(attachment, 'rb').read())
Encoders.encode_base64(part)
part.add_header(
'Content-Disposition',
'attachment; filename="%s"' %
os.path.basename(attachment))
msg_root.attach(part)
except:
from invenio_ext.logging import register_exception
register_exception(
alert_admin=True,
prefix="Can't attach %s" %
attachment)
return msg_root
RE_NEWLINES = re.compile(r'<br\s*/?>|</p>', re.I)
RE_SPACES = re.compile(r'\s+')
RE_HTML_TAGS = re.compile(r'<.+?>')
def email_strip_html(html_content):
"""Strip html tags from html_content, trying to respect formatting."""
html_content = RE_SPACES.sub(' ', html_content)
html_content = RE_NEWLINES.sub('\n', html_content)
html_content = RE_HTML_TAGS.sub('', html_content)
html_content = html_content.split('\n')
out = StringIO()
out_format = AbstractFormatter(DumbWriter(out))
for row in html_content:
out_format.add_flowing_data(row)
out_format.end_paragraph(1)
return out.getvalue()
def remove_temporary_emails(emails):
"""
Removes the temporary emails (which are constructed randomly when user logs in
with an external authentication provider which doesn't supply an email
address) from an email list.
@param emails: email list (if string, then receivers are separated by ',')
@type emails: [str]|str
@rtype: list|str
"""
from invenio_access.local_config import CFG_TEMP_EMAIL_ADDRESS
_RE_TEMPORARY_EMAIL = re.compile(CFG_TEMP_EMAIL_ADDRESS % r'.+?', re.I)
if type(emails) in (str, unicode):
emails = [email.strip()
for email in emails.split(',') if email.strip()]
emails = [
email for email in emails if not _RE_TEMPORARY_EMAIL.match(email)]
return ','.join(emails)
else:
return [
email for email in emails if not _RE_TEMPORARY_EMAIL.match(email)]
def get_mail_header(value):
"""
Return a MIME-compliant header-string. Will join lists of strings
into one string with comma (,) as separator.
"""
if not isinstance(value, basestring):
value = ','.join(value)
try:
value = value.encode('ascii')
except (UnicodeEncodeError, UnicodeDecodeError):
value = Header(value, 'utf-8')
return value
|
egabancho/invenio-ext
|
invenio_ext/email/__init__.py
|
Python
|
gpl-2.0
| 20,022
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from tiles.urls import load_namespaced_urls
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'devdash.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'core.views.home', name='home'),
url(r'^login$', 'core.views.login', name='login_start'),
url(r'^login_continue$', 'core.views.login_continue', name='login_continue'),
url(r'^logout', 'core.views.logout_view', name='logout'),
url(r'^refresh', 'core.views.sync', name='sync'),
url('', include('social.apps.django_app.urls', namespace='social')),
url(r'^admin/', include(admin.site.urls)),
)
load_namespaced_urls(urlpatterns, 'github', 'osw', 'jenkins', 'kratos')
|
dgreisen-cfpb/pantheon
|
devdash/devdash/urls.py
|
Python
|
cc0-1.0
| 795
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.conf import settings
from django.http import HttpResponseRedirect
from django.shortcuts import render as django_render
from django.template import TemplateDoesNotExist
from funfactory.urlresolvers import split_path
from .dotlang import get_lang_path, get_translations
from .gettext import template_is_active
def render(request, template, context=None, **kwargs):
"""
Same as django's render() shortcut, but with l10n template support.
If used like this::
return l10n_utils.render(request, 'myapp/mytemplate.html')
... this helper will render the following template::
l10n/LANG/myapp/mytemplate.html
if present, otherwise, it'll render the specified (en-US) template.
"""
context = {} if context is None else context
# Make sure we have a single template
if isinstance(template, list):
template = template[0]
# Every template gets its own .lang file, so figure out what it is
# and pass it in the context
context['langfile'] = get_lang_path(template)
# Get the available translation list of the current page
context['translations'] = get_translations(context['langfile'])
# Look for localized template if not default lang.
if hasattr(request, 'locale') and request.locale != settings.LANGUAGE_CODE:
# redirect to default lang if locale not active
if not template_is_active(template, get_locale(request)):
return HttpResponseRedirect('/' + '/'.join([
settings.LANGUAGE_CODE,
split_path(request.get_full_path())[1]
]))
localized_tmpl = '%s/templates/%s' % (request.locale, template)
try:
return django_render(request, localized_tmpl, context, **kwargs)
except TemplateDoesNotExist:
# If not found, just go on and try rendering the parent template.
pass
return django_render(request, template, context, **kwargs)
def get_locale(request):
return getattr(request, 'locale', settings.LANGUAGE_CODE)
|
mmmavis/lightbeam-bedrock-website
|
lib/l10n_utils/__init__.py
|
Python
|
mpl-2.0
| 2,249
|
#!/usr/bin/python2.7
# encoding: utf-8
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the Main handler."""
import unittest
from google.appengine.ext import webapp
import webob
import config
import django.utils
import main
import test_handler
def setup_request(path):
"""Constructs a webapp.Request object for a given request path."""
return webapp.Request(webob.Request.blank(path).environ)
class MainTests(unittest.TestCase):
def test_get_repo_and_action(self):
def check(path, repo, action):
request = setup_request(path)
assert main.get_repo_and_action(request) == (repo, action)
check('/personfinder/foo', 'foo', '')
check('/personfinder/foo/query', 'foo', 'query')
check('/personfinder', None, '')
check('/personfinder/global', None, '')
check('/personfinder/global/faq', None, 'faq')
check('/foo', 'foo', '')
check('/foo/view', 'foo', 'view')
def test_lang_vulnerability(self):
"""Regression test for bad characters in the lang parameter."""
request = setup_request('/haiti/start&lang=abc%0adef:ghi')
env = main.setup_env(request)
assert '\n' not in env.lang, env.lang
assert ':' not in env.lang, env.lang
def test_shiftjis_get(self):
"""Tests Shift-JIS encoding of GET query parameters."""
request = setup_request(
'/japan/results?charsets=shift_jis&query=%8D%B2%93%A1&role=seek&')
handler = main.Main(request, webapp.Response())
assert handler.env.charset == 'shift_jis'
assert request.charset == 'shift_jis'
assert request.get('query') == u'\u4F50\u85E4'
def test_shiftjis_post(self):
"""Tests Shift-JIS encoding of POST query parameters."""
request = setup_request('/japan/post?')
request.body = 'charsets=shift_jis&given_name=%8D%B2%93%A1'
request.method = 'POST'
handler = main.Main(request, webapp.Response())
assert handler.env.charset == 'shift_jis'
assert request.charset == 'shift_jis'
assert request.get('given_name') == u'\u4F50\u85E4'
def test_default_language(self):
"""Verify that language_menu_options[0] is used as the default."""
request = setup_request('/haiti/start')
handler = main.Main(request, webapp.Response())
assert handler.env.lang == 'en' # first language in the options list
assert django.utils.translation.get_language() == 'en'
config.set_for_repo('haiti', language_menu_options=['fr', 'ht', 'es'])
request = setup_request('/haiti/start')
handler = main.Main(request, webapp.Response())
assert handler.env.lang == 'fr' # first language in the options list
assert django.utils.translation.get_language() == 'fr'
if __name__ == '__main__':
unittest.main()
|
groschovskiy/personfinder
|
tests/test_main.py
|
Python
|
apache-2.0
| 3,414
|
from __future__ import absolute_import, unicode_literals
from ...util.path import Path
from ..via_template import ViaTemplateActivator
class CShellActivator(ViaTemplateActivator):
@classmethod
def supports(cls, interpreter):
return interpreter.os != "nt"
def templates(self):
yield Path("activate.csh")
|
pybuilder/pybuilder
|
src/main/python/pybuilder/_vendor/virtualenv/activation/cshell/__init__.py
|
Python
|
apache-2.0
| 336
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class SubnetsOperations(object):
"""SubnetsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified subnet.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.Subnet"
"""Gets the specified subnet by virtual network and resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Subnet, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_04_01.models.Subnet
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Subnet"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Subnet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
subnet_parameters, # type: "_models.Subnet"
**kwargs # type: Any
):
# type: (...) -> "_models.Subnet"
cls = kwargs.pop('cls', None) # type: ClsType["_models.Subnet"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(subnet_parameters, 'Subnet')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Subnet', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('Subnet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
subnet_parameters, # type: "_models.Subnet"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.Subnet"]
"""Creates or updates a subnet in the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param subnet_parameters: Parameters supplied to the create or update subnet operation.
:type subnet_parameters: ~azure.mgmt.network.v2020_04_01.models.Subnet
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either Subnet or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_04_01.models.Subnet]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Subnet"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
subnet_parameters=subnet_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('Subnet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
def _prepare_network_policies_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
prepare_network_policies_request_parameters, # type: "_models.PrepareNetworkPoliciesRequest"
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._prepare_network_policies_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(prepare_network_policies_request_parameters, 'PrepareNetworkPoliciesRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_prepare_network_policies_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}/PrepareNetworkPolicies'} # type: ignore
def begin_prepare_network_policies(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
prepare_network_policies_request_parameters, # type: "_models.PrepareNetworkPoliciesRequest"
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Prepares a subnet by applying network intent policies.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param prepare_network_policies_request_parameters: Parameters supplied to prepare subnet by
applying network intent policies.
:type prepare_network_policies_request_parameters: ~azure.mgmt.network.v2020_04_01.models.PrepareNetworkPoliciesRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._prepare_network_policies_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
prepare_network_policies_request_parameters=prepare_network_policies_request_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_prepare_network_policies.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}/PrepareNetworkPolicies'} # type: ignore
def _unprepare_network_policies_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
unprepare_network_policies_request_parameters, # type: "_models.UnprepareNetworkPoliciesRequest"
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._unprepare_network_policies_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(unprepare_network_policies_request_parameters, 'UnprepareNetworkPoliciesRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_unprepare_network_policies_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}/UnprepareNetworkPolicies'} # type: ignore
def begin_unprepare_network_policies(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
unprepare_network_policies_request_parameters, # type: "_models.UnprepareNetworkPoliciesRequest"
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Unprepares a subnet by removing network intent policies.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param unprepare_network_policies_request_parameters: Parameters supplied to unprepare subnet
to remove network intent policies.
:type unprepare_network_policies_request_parameters: ~azure.mgmt.network.v2020_04_01.models.UnprepareNetworkPoliciesRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._unprepare_network_policies_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
unprepare_network_policies_request_parameters=unprepare_network_policies_request_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_unprepare_network_policies.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}/UnprepareNetworkPolicies'} # type: ignore
def list(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.SubnetListResult"]
"""Gets all subnets in a virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SubnetListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_04_01.models.SubnetListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SubnetListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('SubnetListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_04_01/operations/_subnets_operations.py
|
Python
|
mit
| 36,140
|
##############################################################################
#
# Copyright (c) 2009 Albert Cervera i Areny <albert@nan-tic.com>
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import os
import csv
from xml.dom.minidom import getDOMImplementation
import xml.dom.minidom
import codecs
from JasperReport import *
from AbstractDataGenerator import *
class CsvRecordDataGenerator(AbstractDataGenerator):
def __init__(self, report, records):
self.report = report
self.records = records
self.temporaryFiles = []
# CSV file generation using a list of dictionaries provided by the parser function.
def generate(self, fileName):
f = open( fileName, 'wb+' )
try:
csv.QUOTE_ALL = True
fieldNames = self.report.fieldNames()
# JasperReports CSV reader requires an extra colon at the end of the line.
writer = csv.DictWriter( f, fieldNames + [''], delimiter=',', quotechar='"' )
header = {}
for field in fieldNames + ['']:
header[ field ] = field
writer.writerow( header )
error_reported_fields = []
for record in self.records:
row = {}
for field in record:
if field not in self.report.fields():
if not field in error_reported_fields:
print "FIELD '%s' NOT FOUND IN REPORT." % field
error_reported_fields.append( field )
continue
value = record.get(field, False)
if value==0.0:
value = 0.0
elif value == False:
value = ''
elif isinstance(value, unicode):
value = value.encode('utf-8')
elif isinstance(value, float):
value = '%.10f' % value
elif not isinstance(value, str):
value = str(value)
if isinstance(value,str) or isinstance(value,unicode):
#Parse Date
if re.match("""^\d{4}-\d{2}-\d{2}$""",value)!=None:
value = "%s 00:00:00"% str(value)
row[self.report.fields()[field]['name']] = value
writer.writerow( row )
finally:
f.close()
class XmlRecordDataGenerator(AbstractDataGenerator):
# XML file generation using a list of dictionaries provided by the parser function.
def generate(self, fileName):
# Once all records have been calculated, create the XML structure itself
self.document = getDOMImplementation().createDocument(None, 'data', None)
topNode = self.document.documentElement
for record in self.data['records']:
recordNode = self.document.createElement('record')
topNode.appendChild( recordNode )
for field, value in record.iteritems():
fieldNode = self.document.createElement( field )
recordNode.appendChild( fieldNode )
# The rest of field types must be converted into str
if value == False:
value = ''
elif isinstance(value, str):
value = unicode(value, 'utf-8')
elif isinstance(value, float):
value = '%.10f' % value
elif not isinstance(value, unicode):
value = unicode(value)
valueNode = self.document.createTextNode( value )
fieldNode.appendChild( valueNode )
# Once created, the only missing step is to store the XML into a file
f = codecs.open( fileName, 'wb+', 'utf-8' )
try:
topNode.writexml( f )
finally:
f.close()
|
jeffery9/mixprint_addons
|
jasper_reports/JasperReports/RecordDataGenerator.py
|
Python
|
agpl-3.0
| 5,047
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.