code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import netaddr
from netaddr.strategy import ipv4
import neutronclient.v2_0.client as nclient
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import uuidutils
import retrying
from murano.common import auth_utils
from murano.common import exceptions as exc
from murano.common.i18n import _LI
from murano.dsl import dsl
from murano.dsl import helpers
from murano.dsl import session_local_storage
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
@dsl.name('io.murano.system.NetworkExplorer')
class NetworkExplorer(object):
def __init__(self, this):
session = helpers.get_execution_session()
self._project_id = session.project_id
self._settings = CONF.networking
self._available_cidrs = self._generate_possible_cidrs()
self._owner = this.find_owner('io.murano.Environment')
@staticmethod
@session_local_storage.execution_session_memoize
def _get_client(region_name):
neutron_settings = CONF.neutron
return nclient.Client(**auth_utils.get_session_client_parameters(
service_type='network', region=region_name, conf=neutron_settings
))
@property
def _client(self):
region = None if self._owner is None else self._owner['region']
return self._get_client(region)
# NOTE(starodubcevna): to avoid simultaneous router requests we use retry
# decorator with random delay 1-10 seconds between attempts and maximum
# delay time 30 seconds.
@retrying.retry(retry_on_exception=lambda e: isinstance(e,
exc.RouterInfoException),
wait_random_min=1000, wait_random_max=10000,
stop_max_delay=30000)
def get_default_router(self):
router_name = self._settings.router_name
routers = self._client.list_routers(
tenant_id=self._project_id, name=router_name).get('routers')
if len(routers) == 0:
LOG.debug('Router {name} not found'.format(name=router_name))
if self._settings.create_router:
LOG.debug('Attempting to create Router {router}'.
format(router=router_name))
external_network = self._settings.external_network
kwargs = {'id': external_network} \
if uuidutils.is_uuid_like(external_network) \
else {'name': external_network}
networks = self._client.list_networks(**kwargs).get('networks')
ext_nets = list(filter(lambda n: n['router:external'],
networks))
if len(ext_nets) == 0:
raise KeyError('Router %s could not be created, '
'no external network found' % router_name)
nid = ext_nets[0]['id']
body_data = {
'router': {
'name': router_name,
'external_gateway_info': {
'network_id': nid
},
'admin_state_up': True,
}
}
router = self._client.create_router(
body=body_data).get('router')
LOG.info(_LI('Created router: {id}').format(id=router['id']))
return router['id']
else:
raise KeyError('Router %s was not found' % router_name)
else:
if routers[0]['external_gateway_info'] is None:
raise exc.RouterInfoException('Please set external gateway for'
' the router %s ' % router_name)
router_id = routers[0]['id']
return router_id
def get_available_cidr(self, router_id, net_id):
"""Uses hash of network IDs to minimize the collisions:
different nets will attempt to pick different cidrs out of available
range.
If the cidr is taken will pick another one
"""
taken_cidrs = self._get_cidrs_taken_by_router(router_id)
id_hash = hash(net_id)
num_fails = 0
while num_fails < len(self._available_cidrs):
cidr = self._available_cidrs[
(id_hash + num_fails) % len(self._available_cidrs)]
if any(self._cidrs_overlap(cidr, taken_cidr) for taken_cidr in
taken_cidrs):
num_fails += 1
else:
return str(cidr)
return None
def get_default_dns(self):
return self._settings.default_dns
def get_external_network_id_for_router(self, router_id):
router = self._client.show_router(router_id).get('router')
if not router or 'external_gateway_info' not in router:
return None
return router['external_gateway_info'].get('network_id')
def get_external_network_id_for_network(self, network_id):
network = self._client.show_network(network_id).get('network')
if network.get('router:external', False):
return network_id
# Get router interfaces of the network
router_ports = self._client.list_ports(
**{'device_owner': 'network:router_interface',
'network_id': network_id}).get('ports')
# For each router this network is connected to
# check if the router has external_gateway set
for router_port in router_ports:
ext_net_id = self.getExternalNetworkIdForRouter(
router_port.get('device_id'))
if ext_net_id:
return ext_net_id
return None
def _get_cidrs_taken_by_router(self, router_id):
if not router_id:
return []
ports = self._client.list_ports(device_id=router_id)['ports']
subnet_ids = []
for port in ports:
for fixed_ip in port['fixed_ips']:
subnet_ids.append(fixed_ip['subnet_id'])
all_subnets = self._client.list_subnets()['subnets']
filtered_cidrs = [netaddr.IPNetwork(subnet['cidr']) for subnet in
all_subnets if subnet['id'] in subnet_ids]
return filtered_cidrs
@staticmethod
def _cidrs_overlap(cidr1, cidr2):
return (cidr1 in cidr2) or (cidr2 in cidr1)
def _generate_possible_cidrs(self):
bits_for_envs = int(
math.ceil(math.log(self._settings.max_environments, 2)))
bits_for_hosts = int(math.ceil(math.log(self._settings.max_hosts, 2)))
width = ipv4.width
mask_width = width - bits_for_hosts - bits_for_envs
net = netaddr.IPNetwork(
'{0}/{1}'.format(self._settings.env_ip_template, mask_width))
return list(net.subnet(width - bits_for_hosts))
def list_networks(self):
return self._client.list_networks()['networks']
def list_subnetworks(self):
return self._client.list_subnets()['subnets']
def list_ports(self):
return self._client.list_ports()['ports']
| satish-avninetworks/murano | murano/engine/system/net_explorer.py | Python | apache-2.0 | 7,642 |
from __future__ import print_function
class MetaOne(type):
def __new__(meta, classname, supers, classdict):
print('In MetaOne.new:', meta, classname, supers, classdict, sep='\n...')
return type.__new__(meta, classname, supers, classdict)
class Eggs(object):
pass
print('making class')
class Spam(Eggs, object): # Inherits from Eggs, instance of MetaOne
__metaclass__ = MetaOne
data = 1 # Class data attribute
def meth(self, arg): # Class method attribute
return self.data + arg
print('making instance')
X = Spam()
print('data:', X.data, X.meth(2))
| simontakite/sysadmin | pythonscripts/learningPython/metaclass1-2x.py | Python | gpl-2.0 | 678 |
# pylint: disable=missing-docstring
import unittest
from django.conf import settings
from django.test import TestCase
from oauth2_provider.models import AccessToken, Application, RefreshToken
from openedx.core.djangoapps.oauth_dispatch.tests import factories
from common.djangoapps.student.tests.factories import UserFactory
@unittest.skipUnless(settings.FEATURES.get("ENABLE_OAUTH2_PROVIDER"), "OAuth2 not enabled")
class TestClientFactory(TestCase):
def setUp(self):
super(TestClientFactory, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
self.user = UserFactory.create()
def test_client_factory(self):
actual_application = factories.ApplicationFactory(user=self.user)
expected_application = Application.objects.get(user=self.user)
assert actual_application == expected_application
@unittest.skipUnless(settings.FEATURES.get("ENABLE_OAUTH2_PROVIDER"), "OAuth2 not enabled")
class TestAccessTokenFactory(TestCase):
def setUp(self):
super(TestAccessTokenFactory, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
self.user = UserFactory.create()
def test_access_token_client_factory(self):
application = factories.ApplicationFactory(user=self.user)
actual_access_token = factories.AccessTokenFactory(user=self.user, application=application)
expected_access_token = AccessToken.objects.get(user=self.user)
assert actual_access_token == expected_access_token
@unittest.skipUnless(settings.FEATURES.get("ENABLE_OAUTH2_PROVIDER"), "OAuth2 not enabled")
class TestRefreshTokenFactory(TestCase):
def setUp(self):
super(TestRefreshTokenFactory, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
self.user = UserFactory.create()
def test_refresh_token_factory(self):
application = factories.ApplicationFactory(user=self.user)
access_token = factories.AccessTokenFactory(user=self.user, application=application)
actual_refresh_token = factories.RefreshTokenFactory(
user=self.user, application=application, access_token=access_token
)
expected_refresh_token = RefreshToken.objects.get(user=self.user, access_token=access_token)
assert actual_refresh_token == expected_refresh_token
| stvstnfrd/edx-platform | openedx/core/djangoapps/oauth_dispatch/tests/test_factories.py | Python | agpl-3.0 | 2,331 |
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^$', QuestionsListView.as_view(), name='questions'),
url(r'^create/$', QuestionsCreateView.as_view(), name='questions-create'),
url(r'^(?P<qslug>[\w\d\-]+)/view/$', QuestionsView.as_view(), name='questions-view'),
url(r'^(?P<qslug>[\w\d\-]+)/edit/$', QuestionsEditView.as_view(), name='questions-edit'),
url(r'^(?P<qslug>[\w\d\-]+)/delete/$', QuestionsDeleteView.as_view(), name='questions-delete'),
)
| niwinz/Green-Mine | src/greenmine/questions/urls.py | Python | bsd-3-clause | 564 |
from quantopian.research import run_pipeline
from quantopian.pipeline import Pipeline
from quantopian.pipeline.factors import Latest
from quantopian.pipeline.data.builtin import USEquityPricing
from quantopian.pipeline.data import morningstar
from quantopian.pipeline.factors import CustomFactor, SimpleMovingAverage, AverageDollarVolume, Returns, RSI
from quantopian.pipeline.classifiers.morningstar import Sector
from quantopian.pipeline.filters import Q500US, Q1500US
from quantopian.pipeline.data.quandl import fred_usdontd156n as libor
from quantopian.pipeline.data.zacks import EarningsSurprises
from quantopian.pipeline.data.sentdex import sentiment
from sklearn.ensemble.partial_dependence import plot_partial_dependence
from sklearn.ensemble.partial_dependence import partial_dependence
import talib
import pandas as pd
import numpy as np
from time import time
import alphalens as al
import pyfolio as pf
from scipy import stats
import matplotlib.pyplot as plt
from sklearn import linear_model, decomposition, ensemble, preprocessing, isotonic, metrics
bs = morningstar.balance_sheet
cfs = morningstar.cash_flow_statement
is_ = morningstar.income_statement
or_ = morningstar.operation_ratios
er = morningstar.earnings_report
v = morningstar.valuation
vr = morningstar.valuation_ratios
def make_factors():
class Momentum(CustomFactor):
inputs = [USEquityPricing.close]
window_length = 252
def compute(self, today, assets, out, prices):
out[:] = ((prices[-21] - prices[-252])/prices[-252] -
(prices[-1] - prices[-21])/prices[-21])
def Growth():
return or_.revenue_growth.latest
def PE_ratio():
return vr.pe_ratio.latest
def Sentiment():
return sentiment.sentiment_signal.latest
def Asset_Growth_3M():
return Returns(inputs=[bs.total_assets], window_length=63)
def Asset_To_Equity_Ratio():
return bs.total_assets.latest / bs.common_stock_equity.latest
def Capex_To_Cashflows():
return (cfs.capital_expenditure.latest * 4.) / \
(cfs.free_cash_flow.latest * 4.)
def EBITDA_Yield():
return (is_.ebitda.latest * 4.) / \
USEquityPricing.close.latest
def EBIT_To_Assets():
return (is_.ebit.latest * 4.) / \
bs.total_assets.latest
def Earnings_Quality():
return morningstar.cash_flow_statement.operating_cash_flow.latest / \
EarningsSurprises.eps_act.latest
def Return_On_Total_Invest_Capital():
return or_.roic.latest
class Mean_Reversion_1M(CustomFactor):
inputs = [Returns(window_length=21)]
window_length = 252
def compute(self, today, assets, out, monthly_rets):
out[:] = (monthly_rets[-1] - np.nanmean(monthly_rets, axis=0)) / \
np.nanstd(monthly_rets, axis=0)
class MACD_Signal_10d(CustomFactor):
inputs = [USEquityPricing.close]
window_length = 60
def compute(self, today, assets, out, close):
sig_lines = []
for col in close.T:
# get signal line only
try:
_, signal_line, _ = talib.MACD(col, fastperiod=12,
slowperiod=26, signalperiod=10)
sig_lines.append(signal_line[-1])
# if error calculating, return NaN
except:
sig_lines.append(np.nan)
out[:] = sig_lines
class Moneyflow_Volume_5d(CustomFactor):
inputs = [USEquityPricing.close, USEquityPricing.volume]
window_length = 5
def compute(self, today, assets, out, close, volume):
mfvs = []
for col_c, col_v in zip(close.T, volume.T):
# denominator
denominator = np.dot(col_c, col_v)
# numerator
numerator = 0.
for n, price in enumerate(col_c.tolist()):
if price > col_c[n - 1]:
numerator += price * col_v[n]
else:
numerator -= price * col_v[n]
mfvs.append(numerator / denominator)
out[:] = mfvs
def Net_Income_Margin():
return or_.net_margin.latest
def Operating_Cashflows_To_Assets():
return (cfs.operating_cash_flow.latest * 4.) / \
bs.total_assets.latest
def Price_Momentum_3M():
return Returns(window_length=63)
class Price_Oscillator(CustomFactor):
inputs = [USEquityPricing.close]
window_length = 252
def compute(self, today, assets, out, close):
four_week_period = close[-20:]
out[:] = (np.nanmean(four_week_period, axis=0) /
np.nanmean(close, axis=0)) - 1.
def Returns_39W():
return Returns(window_length=215)
class Trendline(CustomFactor):
inputs = [USEquityPricing.close]
window_length = 252
# using MLE for speed
def compute(self, today, assets, out, close):
# prepare X matrix (x_is - x_bar)
X = range(self.window_length)
X_bar = np.nanmean(X)
X_vector = X - X_bar
X_matrix = np.tile(X_vector, (len(close.T), 1)).T
# prepare Y matrix (y_is - y_bar)
Y_bar = np.nanmean(close, axis=0)
Y_bars = np.tile(Y_bar, (self.window_length, 1))
Y_matrix = close - Y_bars
# prepare variance of X
X_var = np.nanvar(X)
# multiply X matrix an Y matrix and sum (dot product)
# then divide by variance of X
# this gives the MLE of Beta
out[:] = (np.sum((X_matrix * Y_matrix), axis=0) / X_var) / \
(self.window_length)
class Vol_3M(CustomFactor):
inputs = [Returns(window_length=2)]
window_length = 63
def compute(self, today, assets, out, rets):
out[:] = np.nanstd(rets, axis=0)
def Working_Capital_To_Assets():
return bs.working_capital.latest / bs.total_assets.latest
all_factors = {
'Momentum' : Momentum,
'Growth' : Growth,
'PE ratio' : PE_ratio,
'Sentiment' : Sentiment,
'Asset Growth 3M': Asset_Growth_3M,
'Asset to Equity Ratio': Asset_To_Equity_Ratio,
'Capex to Cashflows': Capex_To_Cashflows,
'EBIT to Assets': EBIT_To_Assets,
'EBITDA Yield': EBITDA_Yield,
'Earnings Quality': Earnings_Quality,
'MACD Signal Line': MACD_Signal_10d,
'Mean Reversion 1M': Mean_Reversion_1M,
'Moneyflow Volume 5D': Moneyflow_Volume_5d,
'Net Income Margin': Net_Income_Margin,
'Operating Cashflows to Assets': Operating_Cashflows_To_Assets,
'Price Momentum 3M': Price_Momentum_3M,
'Price Oscillator': Price_Oscillator,
'Return on Invest Capital': Return_On_Total_Invest_Capital,
'39 Week Returns': Returns_39W,
'Trendline': Trendline,
'Vol 3M': Vol_3M,
'Working Capital to Assets': Working_Capital_To_Assets,
}
return all_factors
universe = Q1500US()
factors = make_factors()
n_fwd_days = 5 # number of days to compute returns over
def make_history_pipeline(factors, universe, n_fwd_days=5):
# Call .rank() on all factors and mask out the universe
factor_ranks = {name: f().rank(mask=universe) for name, f in factors.iteritems()}
# Get cumulative returns over last n_fwd_days days. We will later shift these.
factor_ranks['Returns'] = Returns(inputs=[USEquityPricing.open],
mask=universe, window_length=n_fwd_days)
pipe = Pipeline(screen=universe, columns=factor_ranks)
return pipe
history_pipe = make_history_pipeline(factors, universe, n_fwd_days=n_fwd_days)
start_timer = time()
start = pd.Timestamp("2016-01-01")
end = pd.Timestamp("2017-06-01")
results = run_pipeline(history_pipe, start_date=start, end_date=end)
results.index.names = ['date', 'security']
end_timer = time()
print "Time to run pipeline %.2f secs" % (end_timer - start_timer)
results.head()
def shift_mask_data(X, Y, upper_percentile=70, lower_percentile=30, n_fwd_days=1):
# Shift X to match factors at t to returns at t+n_fwd_days (we want to predict future returns after all)
shifted_X = np.roll(X, n_fwd_days+1, axis=0)
# Slice off rolled elements
X = shifted_X[n_fwd_days+1:]
Y = Y[n_fwd_days+1:]
n_time, n_stocks, n_factors = X.shape
# Look for biggest up and down movers
upper = np.nanpercentile(Y, upper_percentile, axis=1)[:, np.newaxis]
lower = np.nanpercentile(Y, lower_percentile, axis=1)[:, np.newaxis]
upper_mask = (Y >= upper)
lower_mask = (Y <= lower)
mask = upper_mask | lower_mask # This also drops nans
mask = mask.flatten()
# Only try to predict whether a stock moved up/down relative to other stocks
Y_binary = np.zeros(n_time * n_stocks)
Y_binary[upper_mask.flatten()] = 1
Y_binary[lower_mask.flatten()] = -1
# Flatten X
X = X.reshape((n_time * n_stocks, n_factors))
# Drop stocks that did not move much (i.e. are in the 30th to 70th percentile)
X = X[mask]
Y_binary = Y_binary[mask]
return X, Y_binary
# Massage data to be in the form expected by shift_mask_data()
results_wo_returns = results.copy()
returns = results_wo_returns.pop('Returns')
Y = returns.unstack().values
X = results_wo_returns.to_panel()
X = X.swapaxes(2, 0).swapaxes(0, 1).values # (factors, time, stocks) -> (time, stocks, factors)
results_wo_returns.index = results_wo_returns.index.set_levels(results_wo_returns.index.get_level_values(1).map(lambda x: x.symbol), 1, )
results_wo_returns.index = results_wo_returns.index.set_levels(results_wo_returns.index.get_level_values(0).map(lambda x: x.date), 0, )
results_wo_returns.sample(10).sort()
tmp = (returns > 0.).to_frame()
tmp.index = tmp.index.set_levels(tmp.index.get_level_values(1).map(lambda x: x.symbol), 1)
tmp.columns = ['5-day forward returns > 0']
tmp.sample(10).sort()
results_wo_returns.isnull().sum()
# Train-test split
train_size_perc = 0.8
n_time, n_stocks, n_factors = X.shape
train_size = np.int16(np.round(train_size_perc * n_time))
X_train, Y_train = X[:train_size, ...], Y[:train_size]
X_test, Y_test = X[(train_size+n_fwd_days):, ...], Y[(train_size+n_fwd_days):]
X_train_shift, Y_train_shift = shift_mask_data(X_train, Y_train, n_fwd_days=n_fwd_days)
X_test_shift, Y_test_shift = shift_mask_data(X_test, Y_test, n_fwd_days=n_fwd_days,
lower_percentile=50,
upper_percentile=50)
X_train_shift.shape, X_test_shift.shape
start_timer = time()
# Train classifier
imputer = preprocessing.Imputer()
scaler = preprocessing.MinMaxScaler()
clf = ensemble.AdaBoostClassifier(n_estimators=150) # n_estimators controls how many weak classifiers are fi
X_train_trans = imputer.fit_transform(X_train_shift)
X_train_trans = scaler.fit_transform(X_train_trans)
clf.fit(X_train_trans, Y_train_shift)
end_timer = time()
print "Time to train full ML pipline: %0.2f secs" % (end_timer - start_timer)
Y_pred = clf.predict(X_train_trans)
print('Accuracy on train set = {:.2f}%'.format(metrics.accuracy_score(Y_train_shift, Y_pred) * 100))
# Transform test data
X_test_trans = imputer.transform(X_test_shift)
X_test_trans = scaler.transform(X_test_trans)
# Predict
Y_pred = clf.predict(X_test_trans)
Y_pred_prob = clf.predict_proba(X_test_trans)
print 'Predictions:', Y_pred
print 'Probabilities of class == 1:', Y_pred_prob[:, 1] * 100
print('Accuracy on test set = {:.2f}%'.format(metrics.accuracy_score(Y_test_shift, Y_pred) * 100))
print('Log-loss = {:.5f}'.format(metrics.log_loss(Y_test_shift, Y_pred_prob)))
feature_importances = pd.Series(clf.feature_importances_, index=results_wo_returns.columns)
feature_importances.sort(ascending=False)
ax = feature_importances.plot(kind='bar')
ax.set(ylabel='Importance (Gini Coefficient)', title='Feature importances')
| vsmolyakov/fin | alpha_selection.py | Python | mit | 12,422 |
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility methods for working with WSGI servers redux
"""
import sys
import netaddr
import oslo_i18n
from oslo_log import log as logging
from oslo_policy import policy as oslo_policy
import six
import webob.dec
import webob.exc
from neutron.common import exceptions
from neutron.i18n import _LE, _LI
from neutron import wsgi
LOG = logging.getLogger(__name__)
class Request(wsgi.Request):
pass
def Resource(controller, faults=None, deserializers=None, serializers=None):
"""Represents an API entity resource and the associated serialization and
deserialization logic
"""
default_deserializers = {'application/json': wsgi.JSONDeserializer()}
default_serializers = {'application/json': wsgi.JSONDictSerializer()}
format_types = {'json': 'application/json'}
action_status = dict(create=201, delete=204)
default_deserializers.update(deserializers or {})
default_serializers.update(serializers or {})
deserializers = default_deserializers
serializers = default_serializers
faults = faults or {}
@webob.dec.wsgify(RequestClass=Request)
def resource(request):
route_args = request.environ.get('wsgiorg.routing_args')
if route_args:
args = route_args[1].copy()
else:
args = {}
# NOTE(jkoelker) by now the controller is already found, remove
# it from the args if it is in the matchdict
args.pop('controller', None)
fmt = args.pop('format', None)
action = args.pop('action', None)
content_type = format_types.get(fmt,
request.best_match_content_type())
language = request.best_match_language()
deserializer = deserializers.get(content_type)
serializer = serializers.get(content_type)
try:
if request.body:
args['body'] = deserializer.deserialize(request.body)['body']
method = getattr(controller, action)
result = method(request=request, **args)
except (exceptions.NeutronException,
netaddr.AddrFormatError,
oslo_policy.PolicyNotAuthorized) as e:
for fault in faults:
if isinstance(e, fault):
mapped_exc = faults[fault]
break
else:
mapped_exc = webob.exc.HTTPInternalServerError
if 400 <= mapped_exc.code < 500:
LOG.info(_LI('%(action)s failed (client error): %(exc)s'),
{'action': action, 'exc': e})
else:
LOG.exception(_LE('%s failed'), action)
e = translate(e, language)
body = serializer.serialize(
{'NeutronError': get_exception_data(e)})
kwargs = {'body': body, 'content_type': content_type}
raise mapped_exc(**kwargs)
except webob.exc.HTTPException as e:
type_, value, tb = sys.exc_info()
if hasattr(e, 'code') and 400 <= e.code < 500:
LOG.info(_LI('%(action)s failed (client error): %(exc)s'),
{'action': action, 'exc': e})
else:
LOG.exception(_LE('%s failed'), action)
translate(e, language)
value.body = serializer.serialize(
{'NeutronError': get_exception_data(e)})
value.content_type = content_type
six.reraise(type_, value, tb)
except NotImplementedError as e:
e = translate(e, language)
# NOTE(armando-migliaccio): from a client standpoint
# it makes sense to receive these errors, because
# extensions may or may not be implemented by
# the underlying plugin. So if something goes south,
# because a plugin does not implement a feature,
# returning 500 is definitely confusing.
body = serializer.serialize(
{'NotImplementedError': get_exception_data(e)})
kwargs = {'body': body, 'content_type': content_type}
raise webob.exc.HTTPNotImplemented(**kwargs)
except Exception:
# NOTE(jkoelker) Everything else is 500
LOG.exception(_LE('%s failed'), action)
# Do not expose details of 500 error to clients.
msg = _('Request Failed: internal server error while '
'processing your request.')
msg = translate(msg, language)
body = serializer.serialize(
{'NeutronError': get_exception_data(
webob.exc.HTTPInternalServerError(msg))})
kwargs = {'body': body, 'content_type': content_type}
raise webob.exc.HTTPInternalServerError(**kwargs)
status = action_status.get(action, 200)
body = serializer.serialize(result)
# NOTE(jkoelker) Comply with RFC2616 section 9.7
if status == 204:
content_type = ''
body = None
return webob.Response(request=request, status=status,
content_type=content_type,
body=body)
return resource
def get_exception_data(e):
"""Extract the information about an exception.
Neutron client for the v2 API expects exceptions to have 'type', 'message'
and 'detail' attributes.This information is extracted and converted into a
dictionary.
:param e: the exception to be reraised
:returns: a structured dict with the exception data
"""
err_data = {'type': e.__class__.__name__,
'message': e, 'detail': ''}
return err_data
def translate(translatable, locale):
"""Translates the object to the given locale.
If the object is an exception its translatable elements are translated
in place, if the object is a translatable string it is translated and
returned. Otherwise, the object is returned as-is.
:param translatable: the object to be translated
:param locale: the locale to translate to
:returns: the translated object, or the object as-is if it
was not translated
"""
localize = oslo_i18n.translate
if isinstance(translatable, exceptions.NeutronException):
translatable.msg = localize(translatable.msg, locale)
elif isinstance(translatable, webob.exc.HTTPError):
translatable.detail = localize(translatable.detail, locale)
elif isinstance(translatable, Exception):
translatable.message = localize(translatable, locale)
else:
return localize(translatable, locale)
return translatable
| barnsnake351/neutron | neutron/api/v2/resource.py | Python | apache-2.0 | 7,285 |
# encoding: utf-8
class MyBaseException(Exception):
"""
Base class for Exceptions
Create subclasses with parameters in their msg e.g. {message} or {name}
and call as in: raise NewException(name="Foo");
msgargs Arguments that slot into msg
__str__ Returns msg expanded with msgparms
"""
errno=0
httperror = 500 # See BaseHTTPRequestHandler for list of errors
msg="Generic Model Exception" #: Parameterised string for message
def __init__(self, **kwargs):
self.msgargs=kwargs # Store arbitrary dict of message args (can be used ot output msg from template
def __str__(self):
try:
return self.msg.format(**self.msgargs)
except:
return self.msg + " UNFORMATABLE ARGS:" + repr(self.msgargs)
class ToBeImplementedException(MyBaseException):
"""
Raised when some code has not been implemented yet
"""
httperror = 501
msg = "{name} needs implementing"
# Note TransportError is in Transport.py
class IPFSException(MyBaseException):
httperror = 500
msg = "IPFS Error: {message}"
class CodingException(MyBaseException):
httperror = 501
msg = "Coding Error: {message}"
class SignatureException(MyBaseException):
httperror = 501
msg = "Signature Verification Error: {message}"
class EncryptionException(MyBaseException):
httperror = 500 # Failure in the encryption code other than lack of authentication
msg = "Encryption error: {message}"
class ForbiddenException(MyBaseException):
httperror = 403 # Forbidden (WWW Authentication won't help (note there is no real HTTP error for authentication (other than HTTP authentication) failed )
msg = "Not allowed: {what}"
class AuthenticationException(MyBaseException):
"""
Raised when some code has not been implemented yet
"""
httperror = 403 # Forbidden - this should be 401 except that requires extra headers (see RFC2616)
msg = "Authentication Exception: {message}"
class IntentionallyUnimplementedException(MyBaseException):
"""
Raised when some code has not been implemented yet
"""
httperror = 501
msg = "Intentionally not implemented: {message}"
class DecryptionFailException(MyBaseException):
"""
Raised if decrypytion failed - this could be cos its the wrong (e.g. old) key
"""
httperror = 500
msg = "Decryption fail"
class SecurityWarning(MyBaseException):
msg = "Security warning: {message}"
class AssertionFail(MyBaseException): #TODO-BACKPORT - console.assert on JS should throw this
"""
Raised when something that should be True isn't - usually a coding failure or some change not propogated fully
"""
httperror = 500
msg = "{message}"
class TransportURLNotFound(MyBaseException):
httperror = 404
msg = "{url} not found"
class NoContentException(MyBaseException):
httperror = 404
msg = "No content found"
class MultihashError(MyBaseException):
httperror = 500
msg = "Multihash error {message}"
class SearchException(MyBaseException):
httperror = 404
msg = "{search} not found"
class TransportFileNotFound(MyBaseException):
httperror = 404
msg = "file {file} not found"
"""
# Following are currently obsolete - not being used in Python or JS
class PrivateKeyException(MyBaseException):
#Raised when some code has not been implemented yet
httperror = 500
msg = "Operation requires Private Key, but only Public available."
"""
| ArchiveLabs/dweb_gateway | python/Errors.py | Python | agpl-3.0 | 3,507 |
#!/usr/bin/env python
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Definition of targets to build artifacts."""
import os.path
import random
import string
import sys
sys.path.insert(0, os.path.abspath('..'))
import python_utils.jobset as jobset
def create_docker_jobspec(name,
dockerfile_dir,
shell_command,
environ={},
flake_retries=0,
timeout_retries=0,
timeout_seconds=30 * 60,
extra_docker_args=None,
verbose_success=False):
"""Creates jobspec for a task running under docker."""
environ = environ.copy()
environ['ARTIFACTS_OUT'] = 'artifacts/%s' % name
docker_args = []
for k, v in list(environ.items()):
docker_args += ['-e', '%s=%s' % (k, v)]
docker_env = {
'DOCKERFILE_DIR': dockerfile_dir,
'DOCKER_RUN_SCRIPT': 'tools/run_tests/dockerize/docker_run.sh',
'DOCKER_RUN_SCRIPT_COMMAND': shell_command,
'OUTPUT_DIR': 'artifacts'
}
if extra_docker_args is not None:
docker_env['EXTRA_DOCKER_ARGS'] = extra_docker_args
jobspec = jobset.JobSpec(
cmdline=['tools/run_tests/dockerize/build_and_run_docker.sh'] +
docker_args,
environ=docker_env,
shortname='build_artifact.%s' % (name),
timeout_seconds=timeout_seconds,
flake_retries=flake_retries,
timeout_retries=timeout_retries,
verbose_success=verbose_success)
return jobspec
def create_jobspec(name,
cmdline,
environ={},
shell=False,
flake_retries=0,
timeout_retries=0,
timeout_seconds=30 * 60,
use_workspace=False,
cpu_cost=1.0,
verbose_success=False):
"""Creates jobspec."""
environ = environ.copy()
if use_workspace:
environ['WORKSPACE_NAME'] = 'workspace_%s' % name
environ['ARTIFACTS_OUT'] = os.path.join('..', 'artifacts', name)
cmdline = ['bash', 'tools/run_tests/artifacts/run_in_workspace.sh'
] + cmdline
else:
environ['ARTIFACTS_OUT'] = os.path.join('artifacts', name)
jobspec = jobset.JobSpec(cmdline=cmdline,
environ=environ,
shortname='build_artifact.%s' % (name),
timeout_seconds=timeout_seconds,
flake_retries=flake_retries,
timeout_retries=timeout_retries,
shell=shell,
cpu_cost=cpu_cost,
verbose_success=verbose_success)
return jobspec
_MACOS_COMPAT_FLAG = '-mmacosx-version-min=10.10'
_ARCH_FLAG_MAP = {'x86': '-m32', 'x64': '-m64'}
class PythonArtifact:
"""Builds Python artifacts."""
def __init__(self, platform, arch, py_version, presubmit=False):
self.name = 'python_%s_%s_%s' % (platform, arch, py_version)
self.platform = platform
self.arch = arch
self.labels = ['artifact', 'python', platform, arch, py_version]
if presubmit:
self.labels.append('presubmit')
self.py_version = py_version
if 'manylinux' in platform:
self.labels.append('linux')
if 'linux_extra' in platform:
# linux_extra wheels used to be built by a separate kokoro job.
# Their build is now much faster, so they can be included
# in the regular artifact build.
self.labels.append('linux')
if 'musllinux' in platform:
self.labels.append('linux')
def pre_build_jobspecs(self):
return []
def build_jobspec(self, inner_jobs=None):
environ = {}
if inner_jobs is not None:
# set number of parallel jobs when building native extension
# building the native extension is the most time-consuming part of the build
environ['GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS'] = str(inner_jobs)
if self.platform == 'linux_extra':
# Crosscompilation build for armv7 (e.g. Raspberry Pi)
environ['PYTHON'] = '/opt/python/{}/bin/python3'.format(
self.py_version)
environ['PIP'] = '/opt/python/{}/bin/pip3'.format(self.py_version)
environ['GRPC_SKIP_PIP_CYTHON_UPGRADE'] = 'TRUE'
environ['GRPC_SKIP_TWINE_CHECK'] = 'TRUE'
return create_docker_jobspec(
self.name,
'tools/dockerfile/grpc_artifact_python_linux_{}'.format(
self.arch),
'tools/run_tests/artifacts/build_artifact_python.sh',
environ=environ,
timeout_seconds=60 * 60)
elif 'manylinux' in self.platform:
if self.arch == 'x86':
environ['SETARCH_CMD'] = 'linux32'
# Inside the manylinux container, the python installations are located in
# special places...
environ['PYTHON'] = '/opt/python/{}/bin/python'.format(
self.py_version)
environ['PIP'] = '/opt/python/{}/bin/pip'.format(self.py_version)
environ['GRPC_SKIP_PIP_CYTHON_UPGRADE'] = 'TRUE'
if self.arch == 'aarch64':
environ['GRPC_SKIP_TWINE_CHECK'] = 'TRUE'
else:
# only run auditwheel if we're not crosscompiling
environ['GRPC_RUN_AUDITWHEEL_REPAIR'] = 'TRUE'
# only build the packages that depend on grpcio-tools
# if we're not crosscompiling.
# - they require protoc to run on current architecture
# - they only have sdist packages anyway, so it's useless to build them again
environ['GRPC_BUILD_GRPCIO_TOOLS_DEPENDENTS'] = 'TRUE'
return create_docker_jobspec(
self.name,
'tools/dockerfile/grpc_artifact_python_%s_%s' %
(self.platform, self.arch),
'tools/run_tests/artifacts/build_artifact_python.sh',
environ=environ,
timeout_seconds=60 * 60 * 2)
elif 'musllinux' in self.platform:
environ['PYTHON'] = '/opt/python/{}/bin/python'.format(
self.py_version)
environ['PIP'] = '/opt/python/{}/bin/pip'.format(self.py_version)
environ['GRPC_SKIP_PIP_CYTHON_UPGRADE'] = 'TRUE'
environ['GRPC_RUN_AUDITWHEEL_REPAIR'] = 'TRUE'
environ['GRPC_PYTHON_BUILD_WITH_STATIC_LIBSTDCXX'] = 'TRUE'
return create_docker_jobspec(
self.name,
'tools/dockerfile/grpc_artifact_python_%s_%s' %
(self.platform, self.arch),
'tools/run_tests/artifacts/build_artifact_python.sh',
environ=environ,
timeout_seconds=60 * 60 * 2)
elif self.platform == 'windows':
if 'Python27' in self.py_version:
environ['EXT_COMPILER'] = 'mingw32'
else:
environ['EXT_COMPILER'] = 'msvc'
# For some reason, the batch script %random% always runs with the same
# seed. We create a random temp-dir here
dir = ''.join(
random.choice(string.ascii_uppercase) for _ in range(10))
return create_jobspec(self.name, [
'tools\\run_tests\\artifacts\\build_artifact_python.bat',
self.py_version, '32' if self.arch == 'x86' else '64'
],
environ=environ,
timeout_seconds=45 * 60,
use_workspace=True)
else:
environ['PYTHON'] = self.py_version
environ['SKIP_PIP_INSTALL'] = 'TRUE'
return create_jobspec(
self.name,
['tools/run_tests/artifacts/build_artifact_python.sh'],
environ=environ,
timeout_seconds=60 * 60 * 2,
use_workspace=True)
def __str__(self):
return self.name
class RubyArtifact:
"""Builds ruby native gem."""
def __init__(self, platform, gem_platform, presubmit=False):
self.name = 'ruby_native_gem_%s_%s' % (platform, gem_platform)
self.platform = platform
self.gem_platform = gem_platform
self.labels = ['artifact', 'ruby', platform, gem_platform]
if presubmit:
self.labels.append('presubmit')
def pre_build_jobspecs(self):
return []
def build_jobspec(self, inner_jobs=None):
environ = {}
if inner_jobs is not None:
# set number of parallel jobs when building native extension
environ['GRPC_RUBY_BUILD_PROCS'] = str(inner_jobs)
# Ruby build uses docker internally and docker cannot be nested.
# We are using a custom workspace instead.
return create_jobspec(self.name, [
'tools/run_tests/artifacts/build_artifact_ruby.sh',
self.gem_platform
],
use_workspace=True,
timeout_seconds=90 * 60,
environ=environ)
class CSharpExtArtifact:
"""Builds C# native extension library"""
def __init__(self, platform, arch, arch_abi=None, presubmit=False):
self.name = 'csharp_ext_%s_%s' % (platform, arch)
self.platform = platform
self.arch = arch
self.arch_abi = arch_abi
self.labels = ['artifact', 'csharp', platform, arch]
if arch_abi:
self.name += '_%s' % arch_abi
self.labels.append(arch_abi)
if presubmit:
self.labels.append('presubmit')
def pre_build_jobspecs(self):
return []
def build_jobspec(self, inner_jobs=None):
environ = {}
if inner_jobs is not None:
# set number of parallel jobs when building native extension
environ['GRPC_CSHARP_BUILD_EXT_COMPILER_JOBS'] = str(inner_jobs)
if self.arch == 'android':
environ['ANDROID_ABI'] = self.arch_abi
return create_docker_jobspec(
self.name,
'tools/dockerfile/grpc_artifact_android_ndk',
'tools/run_tests/artifacts/build_artifact_csharp_android.sh',
environ=environ)
elif self.arch == 'ios':
return create_jobspec(
self.name,
['tools/run_tests/artifacts/build_artifact_csharp_ios.sh'],
timeout_seconds=60 * 60,
use_workspace=True,
environ=environ)
elif self.platform == 'windows':
return create_jobspec(self.name, [
'tools\\run_tests\\artifacts\\build_artifact_csharp.bat',
self.arch
],
timeout_seconds=45 * 60,
use_workspace=True,
environ=environ)
else:
if self.platform == 'linux':
dockerfile_dir = 'tools/dockerfile/grpc_artifact_centos6_{}'.format(
self.arch)
if self.arch == 'aarch64':
# for aarch64, use a dockcross manylinux image that will
# give us both ready to use crosscompiler and sufficient backward compatibility
dockerfile_dir = 'tools/dockerfile/grpc_artifact_python_manylinux2014_aarch64'
return create_docker_jobspec(
self.name,
dockerfile_dir,
'tools/run_tests/artifacts/build_artifact_csharp.sh',
environ=environ)
else:
return create_jobspec(
self.name,
['tools/run_tests/artifacts/build_artifact_csharp.sh'],
timeout_seconds=45 * 60,
use_workspace=True,
environ=environ)
def __str__(self):
return self.name
class PHPArtifact:
"""Builds PHP PECL package"""
def __init__(self, platform, arch, presubmit=False):
self.name = 'php_pecl_package_{0}_{1}'.format(platform, arch)
self.platform = platform
self.arch = arch
self.labels = ['artifact', 'php', platform, arch]
if presubmit:
self.labels.append('presubmit')
def pre_build_jobspecs(self):
return []
def build_jobspec(self, inner_jobs=None):
del inner_jobs # arg unused as PHP artifact build is basically just packing an archive
if self.platform == 'linux':
return create_docker_jobspec(
self.name,
'tools/dockerfile/test/php73_zts_debian11_{}'.format(self.arch),
'tools/run_tests/artifacts/build_artifact_php.sh')
else:
return create_jobspec(
self.name, ['tools/run_tests/artifacts/build_artifact_php.sh'],
use_workspace=True)
class ProtocArtifact:
"""Builds protoc and protoc-plugin artifacts"""
def __init__(self, platform, arch, presubmit=False):
self.name = 'protoc_%s_%s' % (platform, arch)
self.platform = platform
self.arch = arch
self.labels = ['artifact', 'protoc', platform, arch]
if presubmit:
self.labels.append('presubmit')
def pre_build_jobspecs(self):
return []
def build_jobspec(self, inner_jobs=None):
environ = {}
if inner_jobs is not None:
# set number of parallel jobs when building protoc
environ['GRPC_PROTOC_BUILD_COMPILER_JOBS'] = str(inner_jobs)
if self.platform != 'windows':
environ['CXXFLAGS'] = ''
environ['LDFLAGS'] = ''
if self.platform == 'linux':
dockerfile_dir = 'tools/dockerfile/grpc_artifact_centos6_{}'.format(
self.arch)
if self.arch == 'aarch64':
# for aarch64, use a dockcross manylinux image that will
# give us both ready to use crosscompiler and sufficient backward compatibility
dockerfile_dir = 'tools/dockerfile/grpc_artifact_protoc_aarch64'
environ['LDFLAGS'] += ' -static-libgcc -static-libstdc++ -s'
return create_docker_jobspec(
self.name,
dockerfile_dir,
'tools/run_tests/artifacts/build_artifact_protoc.sh',
environ=environ)
else:
environ[
'CXXFLAGS'] += ' -std=c++11 -stdlib=libc++ %s' % _MACOS_COMPAT_FLAG
return create_jobspec(
self.name,
['tools/run_tests/artifacts/build_artifact_protoc.sh'],
environ=environ,
timeout_seconds=60 * 60,
use_workspace=True)
else:
vs_tools_architecture = self.arch # architecture selector passed to vcvarsall.bat
environ['ARCHITECTURE'] = vs_tools_architecture
return create_jobspec(
self.name,
['tools\\run_tests\\artifacts\\build_artifact_protoc.bat'],
environ=environ,
use_workspace=True)
def __str__(self):
return self.name
def _reorder_targets_for_build_speed(targets):
"""Reorder targets to achieve optimal build speed"""
# ruby artifact build builds multiple artifacts at once, so make sure
# we start building ruby artifacts first, so that they don't end up
# being a long tail once everything else finishes.
return list(
sorted(targets,
key=lambda target: 0 if target.name.startswith('ruby_') else 1))
def targets():
"""Gets list of supported targets"""
return _reorder_targets_for_build_speed([
ProtocArtifact('linux', 'x64', presubmit=True),
ProtocArtifact('linux', 'x86', presubmit=True),
ProtocArtifact('linux', 'aarch64', presubmit=True),
ProtocArtifact('macos', 'x64', presubmit=True),
ProtocArtifact('windows', 'x64', presubmit=True),
ProtocArtifact('windows', 'x86', presubmit=True),
CSharpExtArtifact('linux', 'x64', presubmit=True),
CSharpExtArtifact('linux', 'aarch64', presubmit=True),
CSharpExtArtifact('macos', 'x64', presubmit=True),
CSharpExtArtifact('windows', 'x64', presubmit=True),
CSharpExtArtifact('windows', 'x86', presubmit=True),
CSharpExtArtifact('linux',
'android',
arch_abi='arm64-v8a',
presubmit=True),
CSharpExtArtifact('linux',
'android',
arch_abi='armeabi-v7a',
presubmit=True),
CSharpExtArtifact('linux', 'android', arch_abi='x86', presubmit=True),
CSharpExtArtifact('macos', 'ios', presubmit=True),
PythonArtifact('manylinux2014', 'x64', 'cp36-cp36m', presubmit=True),
PythonArtifact('manylinux2014', 'x64', 'cp37-cp37m'),
PythonArtifact('manylinux2014', 'x64', 'cp38-cp38'),
PythonArtifact('manylinux2014', 'x64', 'cp39-cp39'),
PythonArtifact('manylinux2014', 'x64', 'cp310-cp310', presubmit=True),
PythonArtifact('manylinux2014', 'x86', 'cp36-cp36m', presubmit=True),
PythonArtifact('manylinux2014', 'x86', 'cp37-cp37m'),
PythonArtifact('manylinux2014', 'x86', 'cp38-cp38'),
PythonArtifact('manylinux2014', 'x86', 'cp39-cp39'),
PythonArtifact('manylinux2014', 'x86', 'cp310-cp310', presubmit=True),
PythonArtifact('manylinux2010', 'x64', 'cp36-cp36m'),
PythonArtifact('manylinux2010', 'x64', 'cp37-cp37m', presubmit=True),
PythonArtifact('manylinux2010', 'x64', 'cp38-cp38'),
PythonArtifact('manylinux2010', 'x64', 'cp39-cp39'),
PythonArtifact('manylinux2010', 'x86', 'cp36-cp36m'),
PythonArtifact('manylinux2010', 'x86', 'cp37-cp37m', presubmit=True),
PythonArtifact('manylinux2010', 'x86', 'cp38-cp38'),
PythonArtifact('manylinux2010', 'x86', 'cp39-cp39'),
PythonArtifact('manylinux2014', 'aarch64', 'cp36-cp36m',
presubmit=True),
PythonArtifact('manylinux2014', 'aarch64', 'cp37-cp37m'),
PythonArtifact('manylinux2014', 'aarch64', 'cp38-cp38', presubmit=True),
PythonArtifact('manylinux2014', 'aarch64', 'cp39-cp39'),
PythonArtifact('manylinux2014', 'aarch64', 'cp310-cp310'),
PythonArtifact('linux_extra', 'armv7', 'cp36-cp36m', presubmit=True),
PythonArtifact('linux_extra', 'armv7', 'cp37-cp37m'),
PythonArtifact('linux_extra', 'armv7', 'cp38-cp38'),
PythonArtifact('linux_extra', 'armv7', 'cp39-cp39'),
PythonArtifact('linux_extra', 'armv7', 'cp310-cp310', presubmit=True),
PythonArtifact('musllinux_1_1', 'x64', 'cp310-cp310', presubmit=True),
PythonArtifact('musllinux_1_1', 'x64', 'cp36-cp36m', presubmit=True),
PythonArtifact('musllinux_1_1', 'x64', 'cp37-cp37m'),
PythonArtifact('musllinux_1_1', 'x64', 'cp38-cp38'),
PythonArtifact('musllinux_1_1', 'x64', 'cp39-cp39'),
PythonArtifact('musllinux_1_1', 'x86', 'cp310-cp310', presubmit=True),
PythonArtifact('musllinux_1_1', 'x86', 'cp36-cp36m', presubmit=True),
PythonArtifact('musllinux_1_1', 'x86', 'cp37-cp37m'),
PythonArtifact('musllinux_1_1', 'x86', 'cp38-cp38'),
PythonArtifact('musllinux_1_1', 'x86', 'cp39-cp39'),
PythonArtifact('macos', 'x64', 'python3.6', presubmit=True),
PythonArtifact('macos', 'x64', 'python3.7'),
PythonArtifact('macos', 'x64', 'python3.8'),
PythonArtifact('macos', 'x64', 'python3.9'),
PythonArtifact('macos', 'x64', 'python3.10', presubmit=True),
PythonArtifact('windows', 'x86', 'Python36_32bit', presubmit=True),
PythonArtifact('windows', 'x86', 'Python37_32bit'),
PythonArtifact('windows', 'x86', 'Python38_32bit'),
PythonArtifact('windows', 'x86', 'Python39_32bit'),
PythonArtifact('windows', 'x86', 'Python310_32bit', presubmit=True),
PythonArtifact('windows', 'x64', 'Python36', presubmit=True),
PythonArtifact('windows', 'x64', 'Python37'),
PythonArtifact('windows', 'x64', 'Python38'),
PythonArtifact('windows', 'x64', 'Python39'),
PythonArtifact('windows', 'x64', 'Python310', presubmit=True),
RubyArtifact('linux', 'x86-mingw32', presubmit=True),
RubyArtifact('linux', 'x64-mingw32', presubmit=True),
RubyArtifact('linux', 'x86_64-linux', presubmit=True),
RubyArtifact('linux', 'x86-linux', presubmit=True),
RubyArtifact('linux', 'x86_64-darwin', presubmit=True),
RubyArtifact('linux', 'arm64-darwin', presubmit=True),
RubyArtifact('macos', 'darwin', presubmit=True),
PHPArtifact('linux', 'x64', presubmit=True),
PHPArtifact('macos', 'x64', presubmit=True),
])
| stanley-cheung/grpc | tools/run_tests/artifacts/artifact_targets.py | Python | apache-2.0 | 21,883 |
__author__ = 'venkat'
from tkinter import *
root = Tk()
def send_mail():
return
def send_trap():
return
def mradioselect():
return
check_gmail = IntVar()
check_snmp = IntVar()
var = IntVar()
#root.config(bg="grey")
LB=Label(root, text="All Node Link status", borderwidth=0)
LB.grid(row=0, columnspan=4)
#LB.configure(bg="orange")
CB1=Checkbutton(root, text="Send E-Mail", variable=check_gmail, width=10, command=send_mail)
CB1.grid(row=6, column=0,sticky=E)
#CB1.configure(bg="orange")
CB2=Checkbutton(root, text="Send Trap", variable=check_snmp, width=10, command=send_trap)
CB2.grid(row=6, column=1,sticky=E)
#CB2.configure(bg="orange")
RB1 = Radiobutton(root, text="1 Sec ", variable=var, value=1, width=10, command=mradioselect)
RB1.grid(row=8, column=0, sticky=W)
#RB1.configure(bg="orange")
RB2 = Radiobutton(root, text="5 Sec ", variable=var, value=5, width=10, command=mradioselect)
RB2.grid(row=8, column=1, sticky=W)
#RB2.configure(bg="orange")
RB3 = Radiobutton(root, text="10 Sec", variable=var, value=10, width=10, command=mradioselect)
RB3.grid(row=8, column=2, sticky=W)
#RB3.configure(bg="orange")
RB4 = Radiobutton(root, text="30 Sec", variable=var, value=30, width=10, command=mradioselect)
RB4.grid(row=8, column=3, sticky=W)
#RB4.configure(bg="orange")
def table():
for row in range(5):
current_row = []
for column in range(4):
if row != 0 :
label = Label(root, text="%s/%s" % (row, column), borderwidth=0, width=10)
label.grid(row=row, column=column, sticky="nsew", padx=1, pady=1)
label.configure(bg="white")
if row==2:
label.config(bg="red")
else:
label.config(fg="black")
current_row.append(label)
for column in range(4):
root.grid_columnconfigure(column, weight=1)
lb=Label(root, text="")
lb.grid(row=5, sticky=W)
#lb.configure(bg="orange")
print("5 sec")
root.after(5000,table)
table()
root.mainloop()
| venkatant/msproject | sample.py | Python | gpl-2.0 | 2,075 |
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import context
from nova import db
from nova import log as logging
LOG = logging.getLogger(__name__)
def notify(message):
"""Look for specific compute manager events and interprete them
so as to keep the Capacity table up to date.
NOTE: the True/False return codes are only for testing.
"""
# The event_type must start with 'compute.instance.'
event_type = message.get('event_type', None)
preamble = 'compute.instance.'
if not event_type or not event_type.startswith(preamble):
return False
# Events we're interested in end with .start and .end
event = event_type[len(preamble):]
parts = event.split('.')
suffix = parts[-1].lower()
event = event[:(-len(suffix) - 1)]
if suffix not in ['start', 'end']:
return False
started = suffix == 'start'
ended = suffix == 'end'
if started and event == 'create':
# We've already updated this stuff in the scheduler. Don't redo the
# work here.
return False
work = 1 if started else -1
# Extract the host name from the publisher id ...
publisher_preamble = 'compute.'
publisher = message.get('publisher_id', None)
if not publisher or not publisher.startswith(publisher_preamble):
return False
host = publisher[len(publisher_preamble):]
# If we deleted an instance, make sure we reclaim the resources.
# We may need to do something explicit for rebuild/migrate.
free_ram_mb = 0
free_disk_gb = 0
vms = 0
#Eneabegin
n_cpu_vms = 0
n_io_vms = 0
n_mem_vms = 0
n_und_vms = 0
#Eneaend
if ended and event == 'delete':
vms = -1
#Eneabegin
n_cpu_vms = -1
n_io_vms = -1
n_mem_vms = -1
n_mem_und = -1
#Eneaend
payload = message.get('payload', {})
free_ram_mb = payload.get('memory_mb', 0)
free_disk_gb = payload.get('disk_gb', 0)
#Eneabegin
n_cpu_vms = payload.get('n_cpu_vms', 0)
n_io_vms = payload.get('n_io_vms', 0)
n_mem_vms = payload.get('n_mem_vms', 0)
n_und_vms = payload.get('n_und_vms', 0)
#Eneaend
LOG.debug("EventType=%(event_type)s -> host %(host)s: "
"ram %(free_ram_mb)d, disk %(free_disk_gb)d, "
"work %(work)d, vms%(vms)d, n_cpu_vms %(n_cpu_vms)d, "
"n_io_vms %(n_io_vms)d, n_mem_vms %(n_mem_vms)d, "
"n_und_vms %(n_und_vms)d" % locals())
#Eneabegin
db.api.compute_node_utilization_update(context.get_admin_context(), host,
free_ram_mb_delta=free_ram_mb, free_disk_gb_delta=free_disk_gb,
work_delta=work, vm_delta=vms, n_cpu_delta=n_cpu_vms, n_io_delta=n_io_vms,
n_mem_delta=n_mem_vms, n_und_delta=n_und_vms )
#Eneaend
return True
| eneabio/nova | nova/notifier/capacity_notifier.py | Python | apache-2.0 | 3,445 |
#!/usr/bin/env python
from __future__ import print_function
import sys
sq_to_str = [
'a1', 'b1', 'c1', 'd1', 'e1', 'f1', 'g1', 'h1',
'a2', 'b2', 'c2', 'd2', 'e2', 'f2', 'g2', 'h2',
'a3', 'b3', 'c3', 'd3', 'e3', 'f3', 'g3', 'h3',
'a4', 'b4', 'c4', 'd4', 'e4', 'f4', 'g4', 'h4',
'a5', 'b5', 'c5', 'd5', 'e5', 'f5', 'g5', 'h5',
'a6', 'b6', 'c6', 'd6', 'e6', 'f6', 'g6', 'h6',
'a7', 'b7', 'c7', 'd7', 'e7', 'f7', 'g7', 'h7',
'a8', 'b8', 'c8', 'd8', 'e8', 'f8', 'g8', 'h8'
]
def interp_flags(flags):
if flags == 0:
return 'NONE'
elif flags == 1:
return 'enpassant'
elif flags == 2:
return 'promo'
elif flags == 3:
return 'castle'
else:
return 'unknown'
def interp_promo(flags, pc):
if flags != 2:
return 'none'
visual_pcs = "NBRQPKnbrqpk "
if pc >= 0 and pc < len(visual_pcs):
return visual_pcs[pc]
else:
return 'unknown'
if __name__ == '__main__':
if len(sys.argv) == 1:
print("Usage: {} <move>".format(sys.argv[0]))
sys.exit(0)
move = int(sys.argv[1])
print(move)
tosq = ((move >> 0) & 0x3f)
fromsq = ((move >> 6) & 0x3f)
promopc = ((move >> 12) & 0x03)
flags = ((move >> 14))
print("From : {}".format(sq_to_str[fromsq]))
print("To : {}".format(sq_to_str[tosq]))
print("Flags: {}".format(interp_flags(flags)))
print("Promo: {}".format(interp_promo(flags, promopc)))
| selavy/chess | print_move.py | Python | mit | 1,472 |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
from import2 import *
import import2
print 'In import1'
import2.bar()
bar()
| licongyu95/learning_python | core_python_programming/cap14/import1.py | Python | unlicense | 123 |
# -*- Mode: Python; test-case-name: flumotion.test.test_registry -*-
# vi:si:et:sw=4:sts=4:ts=4
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007,2008,2009 Fluendo, S.L.
# Copyright (C) 2010,2011 Flumotion Services, S.A.
# All rights reserved.
#
# This file may be distributed and/or modified under the terms of
# the GNU Lesser General Public License version 2.1 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.LGPL" in the source distribution for more information.
#
# Headers in this file shall remain intact.
"""parsing of registry, which holds component and bundle information
"""
import os
import stat
import errno
import sys
import tempfile
from StringIO import StringIO
from xml.sax import saxutils
from twisted.spread import pb
from twisted.python import runtime
from flumotion.common import common, log, errors, fxml, python
from flumotion.common.python import makedirs
from flumotion.common.bundle import BundlerBasket, MergedBundler
from flumotion.configure import configure
__all__ = ['ComponentRegistry', 'registry']
__version__ = "$Rev$"
# Re-enable when reading the registry cache is lighter-weight, or we
# decide that it's a good idea, or something. See #799.
READ_CACHE = False
# Rank used when no rank is defined in the wizard entry
FLU_RANK_NONE = 0
_VALID_WIZARD_COMPONENT_TYPES = [
'audio-producer',
'video-producer',
'muxer',
'audio-encoder',
'video-encoder',
'consumer',
]
_VALID_WIZARD_PLUG_TYPES = [
'http-consumer',
'httpserver-plug',
]
def _getMTime(file):
return os.stat(file)[stat.ST_MTIME]
class RegistryEntryScenario(pb.Copyable, pb.RemoteCopy):
"""
I represent a <scenario> entry in the registry
"""
def __init__(self, type, description, base, entries):
"""
@param type: the type of this scenario
@type type: str
@param description: description of this scenario
@type description: str
@param base: base directory where this scenario is placed
@type base: str
@param entries: dict of entry point type -> entry
@type entries: dict of str -> L{RegistryEntryEntry}
"""
self.type = type
# we don't want to end up with the string "None"
self.description = description or ""
self.base = base
self.entries = entries
def getEntries(self):
"""
Get the entries asociated with this scenario
@rtype: list of L{RegistryEntryEntry}
"""
return self.entries.values()
def getEntryByType(self, type):
"""
Get the entry point for the given type of entry.
@param type: The type of the wanted entry.
@type type: string
@rtype: L{RegistryEntryEntry}
"""
return self.entries[type]
def getType(self):
return self.type
def getBase(self):
return self.base
def getDescription(self):
return self.description
pb.setUnjellyableForClass(RegistryEntryScenario, RegistryEntryScenario)
class RegistryEntryComponent(pb.Copyable, pb.RemoteCopy):
"""
I represent a <component> entry in the registry
"""
# RegistryEntryComponent has a constructor with a lot of arguments,
# but that's ok here. Allow it through pychecker.
__pychecker__ = 'maxargs=15'
def __init__(self, filename, type,
source, description, base, properties, files,
entries, eaters, feeders, needs_sync, clock_priority,
sockets, wizards):
"""
@param filename: name of the XML file this component is parsed from
@type filename: str
@param properties: dict of name -> property
@type properties: dict of str -> L{RegistryEntryProperty}
@param files: list of files
@type files: list of L{RegistryEntryFile}
@param entries: dict of entry point type -> entry
@type entries: dict of str -> L{RegistryEntryEntry}
@param sockets: list of sockets supported by the component
@type sockets: list of str
@param wizards: list of wizard entries
@type wizards: list of L{RegistryEntryWizard}
"""
self.filename = filename
self.type = type
self.source = source
self.description = description
# we don't want to end up with the string "None"
if not self.description:
self.description = ""
self.base = base
self.properties = properties
self.files = files
self.entries = entries
self.eaters = eaters
self.feeders = feeders
self.needs_sync = needs_sync
self.clock_priority = clock_priority
self.sockets = sockets
self.wizards = wizards
def getProperties(self):
"""
Get a list of all properties.
@rtype: list of L{RegistryEntryProperty}
"""
return self.properties.values()
def hasProperty(self, name):
"""
Check if the component has a property with the given name.
"""
return name in self.properties.keys()
def getFiles(self):
"""
@rtype: list of L{RegistryEntryFile}
"""
return self.files
def getEntries(self):
return self.entries.values()
def getEntryByType(self, type):
"""
Get the entry point for the given type of entry.
@type type: string
"""
return self.entries[type]
def getGUIEntry(self):
if not self.files:
return
# FIXME: Handle multiple files
if len(self.files) > 1:
return
return self.files[0].getFilename()
def getType(self):
return self.type
def getBase(self):
return self.base
def getDescription(self):
return self.description
def getSource(self):
return self.source
def getEaters(self):
return self.eaters
def getFeeders(self):
return self.feeders
def getNeedsSynchronization(self):
return self.needs_sync
def getClockPriority(self):
return self.clock_priority
def getSockets(self):
return self.sockets
pb.setUnjellyableForClass(RegistryEntryComponent, RegistryEntryComponent)
class RegistryEntryPlug:
"""
I represent a <plug> entry in the registry
"""
def __init__(self, filename, type,
description, socket, entries, properties, wizards):
"""
@param filename: name of the XML file this plug is parsed from
@type filename: str
@param type: the type of plug
@type type: str
@param description: the translatable description of the plug
@type description: str
@param socket: the fully qualified class name of the socket this
plug can be plugged in to
@type socket: str
@param entries: entry points for instantiating the plug
@type entries: list of L{RegistryEntryEntry}
@param properties: properties of the plug
@type properties: dict of str -> L{RegistryEntryProperty}
@param wizards: list of wizard entries
@type wizards: list of L{RegistryEntryWizard}
"""
self.filename = filename
self.type = type
self.description = description
self.socket = socket
self.entries = entries
self.properties = properties
self.wizards = wizards
def getProperties(self):
"""
Get a list of all properties.
@rtype: list of L{RegistryEntryProperty}
"""
return self.properties.values()
def hasProperty(self, name):
"""
Check if the component has a property with the given name.
"""
return name in self.properties.keys()
def getEntryByType(self, type):
"""
Get the entry point for the given type of entry.
@type type: string
"""
return self.entries[type]
def getEntry(self):
return self.entries['default']
def getEntries(self):
return self.entries.values()
def getType(self):
return self.type
def getDescription(self):
return self.description
def getSocket(self):
return self.socket
class RegistryEntryBundle:
"This class represents a <bundle> entry in the registry"
def __init__(self, name, project, under, dependencies, directories):
self.name = name
self.project = project
self.under = under
self.dependencies = dependencies
self.directories = directories
def __repr__(self):
return '<Bundle name=%s>' % self.name
def getName(self):
return self.name
def getDependencies(self):
"""
@rtype: list of str
"""
return self.dependencies
def getDirectories(self):
"""
@rtype: list of L{RegistryEntryBundleDirectory}
"""
return self.directories
def getProject(self):
return self.project
def getUnder(self):
return self.under
def getBaseDir(self):
if self.project == configure.PACKAGE:
return getattr(configure, self.under)
from flumotion.project import project
return project.get(self.project, self.under)
class RegistryEntryBundleDirectory:
"This class represents a <directory> entry in the registry"
def __init__(self, name, files):
self.name = name
self.files = files
def getName(self):
return self.name
def getFiles(self):
return self.files
class RegistryEntryBundleFilename:
"This class represents a <filename> entry in the registry"
def __init__(self, location, relative):
self.location = location
self.relative = relative
def getLocation(self):
return self.location
def getRelative(self):
return self.relative
class RegistryEntryProperty:
"This class represents a <property> entry in the registry"
def __init__(self, name, type, description,
required=False, multiple=False):
self.name = name
self.type = type
self.description = description
# we don't want to end up with the string "None"
if not self.description:
self.description = ""
self.required = required
self.multiple = multiple
def __repr__(self):
return '<Property name=%s>' % self.name
def getName(self):
return self.name
def getType(self):
return self.type
def getDescription(self):
return self.description
def isRequired(self):
return self.required
def isMultiple(self):
return self.multiple
class RegistryEntryCompoundProperty(RegistryEntryProperty):
"This class represents a <compound-property> entry in the registry"
def __init__(self, name, description, properties, required=False,
multiple=False):
RegistryEntryProperty.__init__(self, name, 'compound', description,
required, multiple)
self.properties = properties
def __repr__(self):
return '<Compound-property name=%s>' % self.name
def getProperties(self):
"""
Get a list of all sub-properties.
@rtype: list of L{RegistryEntryProperty}
"""
return self.properties.values()
def hasProperty(self, name):
"""
Check if the compound-property has a sub-property with the
given name.
"""
return name in self.properties
class RegistryEntryFile:
"This class represents a <file> entry in the registry"
def __init__(self, filename, type):
self.filename = filename
self.type = type
def getName(self):
return os.path.basename(self.filename)
def getType(self):
return self.type
def getFilename(self):
return self.filename
def isType(self, type):
return self.type == type
class RegistryEntryEntry:
"This class represents a <entry> entry in the registry"
def __init__(self, type, location, function):
self.type = type
self.location = location
self.function = function
def getType(self):
return self.type
def getLocation(self):
return self.location
def getModuleName(self, base=None):
if base:
path = os.path.join(base, self.getLocation())
else:
path = self.getLocation()
return common.pathToModuleName(path)
def getFunction(self):
return self.function
class RegistryEntryEater:
"This class represents a <eater> entry in the registry"
def __init__(self, name, required=True, multiple=False):
self.name = name
self.required = required
self.multiple = multiple
def getName(self):
return self.name
def getRequired(self):
return self.required
def getMultiple(self):
return self.multiple
class RegistryEntryWizard(pb.Copyable):
"This class represents a <wizard> entry in the registry"
def __init__(self, componentType, type, description, feeder,
eater, accepts, provides, rank=FLU_RANK_NONE):
self.componentType = componentType
self.type = type
self.description = description
self.feeder = feeder
self.eater = eater
self.accepts = accepts
self.provides = provides
self.rank = rank
def __repr__(self):
return '<wizard %s type=%s, feeder=%s>' % (self.componentType,
self.type, self.feeder)
class RegistryEntryWizardFormat(pb.Copyable):
"""
This class represents an <accept-format> or <provide-format>
entry in the registry
"""
def __init__(self, media_type):
self.media_type = media_type
class RegistryParser(fxml.Parser):
"""
Registry parser
I have two modes, one to parse registries and another one to parse
standalone component files.
For parsing registries use the parseRegistry function and for components
use parseRegistryFile.
I also have a list of all components and directories which the
registry uses (instead of saving its own copy)
"""
def __init__(self):
self.clean()
def clean(self):
self._components = {}
self._directories = {} # path -> RegistryDirectory
self._bundles = {}
self._plugs = {}
self._scenarios = {}
def getComponents(self):
return self._components.values()
def getComponent(self, name):
try:
return self._components[name]
except KeyError:
raise errors.UnknownComponentError("unknown component type:"
" %s" % (name, ))
def getScenarios(self):
return self._scenarios.values()
def getScenarioByType(self, type):
if type in self._scenarios:
return self._scenarios[type]
return None
def getPlugs(self):
return self._plugs.values()
def getPlug(self, name):
try:
return self._plugs[name]
except KeyError:
raise errors.UnknownPlugError("unknown plug type: %s"
% (name, ))
def _parseComponents(self, node):
# <components>
# <component>
# </components>
components = {}
def addComponent(comp):
components[comp.getType()] = comp
parsers = {'component': (self._parseComponent, addComponent)}
self.parseFromTable(node, parsers)
return components
def _parseComponent(self, node):
# <component type="..." base="..." _description="...">
# <source>
# <eater>
# <feeder>
# <properties>
# <entries>
# <synchronization>
# <sockets>
# <wizard>
# </component>
# F0.10: remove description, require _description
componentType, baseDir, description, _description = \
self.parseAttributes(node,
required=('type', 'base'),
optional=('description', '_description'))
# intltool-extract only translates attributes starting with _
if description:
import warnings
warnings.warn(
"Please change '<component description=...'"
" to '<component _description=...' for %s" % componentType,
DeprecationWarning)
if _description:
description = _description
files = []
source = fxml.Box(None)
entries = {}
eaters = []
feeders = []
synchronization = fxml.Box((False, 100))
sockets = []
properties = {}
wizards = []
# Merge in options for inherit
#if node.hasAttribute('inherit'):
# base_type = str(node.getAttribute('inherit'))
# base = self.getComponent(base_type)
# for prop in base.getProperties():
# properties[prop.getName()] = prop
parsers = {
'source': (self._parseSource, source.set),
'properties': (self._parseProperties, properties.update),
'files': (self._parseFiles, files.extend),
'entries': (self._parseEntries, entries.update),
'eater': (self._parseEater, eaters.append),
'feeder': (self._parseFeeder, feeders.append),
'synchronization': (self._parseSynchronization,
synchronization.set),
'sockets': (self._parseSockets, sockets.extend),
'wizard': (self._parseComponentWizard, wizards.append),
}
self.parseFromTable(node, parsers)
source = source.unbox()
needs_sync, clock_priority = synchronization.unbox()
return RegistryEntryComponent(self.filename,
componentType, source, description,
baseDir, properties, files,
entries, eaters, feeders,
needs_sync, clock_priority,
sockets, wizards)
def _parseScenarios(self, node):
# <scenarios>
# <scenario>
# </scenarios>
scenarios = {}
def addScenario(scenario):
scenarios[scenario.getType()] = scenario
parsers = {'scenario': (self._parseScenario, addScenario)}
self.parseFromTable(node, parsers)
return scenarios
def _parseScenario(self, node):
# <scenario type="..." base="..." _description="...">
# <entries>
# </scenario>
scenarioType, baseDir, description = \
self.parseAttributes(node,
required=('type', 'base'),
optional=('_description', ))
entries = {}
parsers = {
'entries': (self._parseEntries, entries.update),
}
self.parseFromTable(node, parsers)
return RegistryEntryScenario(scenarioType, description,
baseDir, entries)
def _parseSource(self, node):
# <source location="..."/>
location, = self.parseAttributes(node, ('location', ))
return location
def _parseProperty(self, node):
# <property name="..." type="" required="yes/no" multiple="yes/no"/>
# returns: RegistryEntryProperty
# F0.10: remove description, require _description
attrs = self.parseAttributes(node, required=('name', 'type'),
optional=('required', 'multiple', 'description', '_description'))
name, propertyType, required, multiple, description, _d = attrs
if description:
import warnings
warnings.warn("Please change '<property description=...'"
" to '<property _description=...' for %s" % name,
DeprecationWarning)
if _d:
description = _d
# see flumotion.common.config.parsePropertyValue
allowed = ('string', 'rawstring', 'int', 'long', 'bool',
'float', 'fraction')
if propertyType not in allowed:
raise fxml.ParserError(
"<property> %s's type is not one of %s" % (
name, ", ".join(allowed)))
required = common.strToBool(required)
multiple = common.strToBool(multiple)
return RegistryEntryProperty(name, propertyType, description,
required=required, multiple=multiple)
def _parseCompoundProperty(self, node):
# <compound-property name="..." required="yes/no" multiple="yes/no">
# <property ... />*
# <compound-property ... >...</compound-property>*
# </compound-property>
# returns: RegistryEntryCompoundProperty
# F0.10: remove description, require _description
attrs = self.parseAttributes(node, required=('name', ),
optional=('required', 'multiple', 'description', '_description'))
name, required, multiple, description, _description = attrs
if description:
import warnings
warnings.warn("Please change '<compound-property description=...'"
" to '<compound-property _description=...' for %s" % name,
DeprecationWarning)
if _description:
description = _description
# see flumotion.common.config.parsePropertyValue
required = common.strToBool(required)
multiple = common.strToBool(multiple)
properties = {}
def addProperty(prop):
properties[prop.getName()] = prop
parsers = {'property': (self._parseProperty, addProperty),
'compound-property': (self._parseCompoundProperty,
addProperty)}
self.parseFromTable(node, parsers)
return RegistryEntryCompoundProperty(name, description, properties,
required=required, multiple=multiple)
def _parseProperties(self, node):
# <properties>
# <property>*
# <compound-property>*
# </properties>
properties = {}
def addProperty(prop):
properties[prop.getName()] = prop
parsers = {'property': (self._parseProperty, addProperty),
'compound-property': (self._parseCompoundProperty,
addProperty)}
self.parseFromTable(node, parsers)
return properties
def _parseFile(self, node):
# <file name="..." type=""/>
# returns: RegistryEntryFile
name, fileType = self.parseAttributes(node, ('name', 'type'))
directory = os.path.split(self.filename)[0]
filename = os.path.join(directory, name)
return RegistryEntryFile(filename, fileType)
def _parseFiles(self, node):
# <files>
# <file>
# </files>
files = []
parsers = {'file': (self._parseFile, files.append)}
self.parseFromTable(node, parsers)
return files
def _parseSocket(self, node):
# <socket type=""/>
# returns: str of the type
socketType, = self.parseAttributes(node, ('type', ))
return socketType
def _parseSockets(self, node):
# <sockets>
# <socket>
# </sockets>
sockets = []
parsers = {'socket': (self._parseSocket, sockets.append)}
self.parseFromTable(node, parsers)
return sockets
def _parseEntry(self, node):
attrs = self.parseAttributes(node, ('type', 'location', 'function'))
entryType, location, function = attrs
return RegistryEntryEntry(entryType, location, function)
def _parseEntries(self, node):
# <entries>
# <entry>
# </entries>
# returns: dict of type -> entry
entries = {}
def addEntry(entry):
if entry.getType() in entries:
raise fxml.ParserError("entry %s already specified"
% entry.getType())
entries[entry.getType()] = entry
parsers = {'entry': (self._parseEntry, addEntry)}
self.parseFromTable(node, parsers)
return entries
def _parseEater(self, node):
# <eater name="..." [required="yes/no"] [multiple="yes/no"]/>
attrs = self.parseAttributes(node, ('name', ),
('required', 'multiple'))
name, required, multiple = attrs
# only required defaults to True
required = common.strToBool(required or 'True')
multiple = common.strToBool(multiple)
return RegistryEntryEater(name, required, multiple)
def _parseFeeder(self, node):
# <feeder name="..."/>
name, = self.parseAttributes(node, ('name', ))
return name
def _parseSynchronization(self, node):
# <synchronization [required="yes/no"] [clock-priority="100"]/>
attrs = self.parseAttributes(node, (), ('required', 'clock-priority'))
required, clock_priority = attrs
required = common.strToBool(required)
clock_priority = int(clock_priority or '100')
return required, clock_priority
def _parsePlugEntry(self, node):
attrs = self.parseAttributes(node,
('location', 'function'), ('type', ))
location, function, entryType = attrs
if not entryType:
entryType = 'default'
return RegistryEntryEntry(entryType, location, function)
def _parseDefaultPlugEntry(self, node):
return {'default': self._parsePlugEntry(node)}
def _parsePlugEntries(self, node):
# <entries>
# <entry>
# </entries>
# returns: dict of type -> entry
entries = {}
def addEntry(entry):
if entry.getType() in entries:
raise fxml.ParserError("entry %s already specified"
% entry.getType())
entries[entry.getType()] = entry
parsers = {'entry': (self._parsePlugEntry, addEntry)}
self.parseFromTable(node, parsers)
return entries
def _parsePlug(self, node):
# <plug socket="..." type="..." _description="...">
# <entries>
# <entry>
# <properties>
# <wizard>
# </plug>
# F0.10: make _description be required
plugType, socket, description = \
self.parseAttributes(node, required=('type', 'socket'),
optional=('_description', ))
if not description:
import warnings
warnings.warn(
"Please add '_description=...' attribute to plug '%s'" %
plugType,
DeprecationWarning)
description = 'TODO'
entries = {}
properties = {}
wizards = []
parsers = {
'entries': (self._parsePlugEntries, entries.update),
# backwards compatibility
'entry': (self._parseDefaultPlugEntry, entries.update),
'properties': (self._parseProperties, properties.update),
'wizard': (self._parsePlugWizard, wizards.append),
}
self.parseFromTable(node, parsers)
if not 'default' in entries:
raise fxml.ParserError(
"<plug> %s needs a default <entry>" % plugType)
return RegistryEntryPlug(self.filename, plugType, description,
socket, entries, properties,
wizards)
def _parsePlugs(self, node):
# <plugs>
# <plug>
# </plugs>
self.checkAttributes(node)
plugs = {}
def addPlug(plug):
plugs[plug.getType()] = plug
parsers = {'plug': (self._parsePlug, addPlug)}
self.parseFromTable(node, parsers)
return plugs
## Component registry specific functions
def parseRegistryFile(self, file):
"""
@param file: The file to parse, either as an open file object,
or as the name of a file to open.
@type file: str or file.
"""
if isinstance(file, basestring):
self.filename = file
else:
self.filename = getattr(file, 'name', '<string>')
root = self.getRoot(file)
node = root.documentElement
if node.nodeName != 'registry':
# ignore silently, since this function is used to parse all
# .xml files encountered
self.debug('%s does not have registry as root tag', self.filename)
return
# shouldn't have <directories> elements in registry fragments
self._parseRoot(node, disallowed=['directories'])
root.unlink()
def _parseBundles(self, node):
# <bundles>
# <bundle>
# </bundles>
bundles = {}
def addBundle(bundle):
bundles[bundle.getName()] = bundle
parsers = {'bundle': (self._parseBundle, addBundle)}
self.parseFromTable(node, parsers)
return bundles
def _parseBundle(self, node):
# <bundle name="...">
# <dependencies>
# <directories>
# </bundle>
attrs = self.parseAttributes(node, ('name', ), ('project', 'under'))
name, project, under = attrs
project = project or configure.PACKAGE
under = under or 'pythondir'
dependencies = []
directories = []
parsers = {'dependencies': (self._parseBundleDependencies,
dependencies.extend),
'directories': (self._parseBundleDirectories,
directories.extend)}
self.parseFromTable(node, parsers)
return RegistryEntryBundle(name, project, under,
dependencies, directories)
def _parseBundleDependency(self, node):
name, = self.parseAttributes(node, ('name', ))
return name
def _parseBundleDependencies(self, node):
# <dependencies>
# <dependency name="">
# </dependencies>
dependencies = []
parsers = {'dependency': (self._parseBundleDependency,
dependencies.append)}
self.parseFromTable(node, parsers)
return dependencies
def _parseBundleDirectories(self, node):
# <directories>
# <directory>
# </directories>
directories = []
parsers = {'directory': (self._parseBundleDirectory,
directories.append)}
self.parseFromTable(node, parsers)
return directories
def _parseBundleDirectoryFilename(self, node, name):
attrs = self.parseAttributes(node, ('location', ), ('relative', ))
location, relative = attrs
if not relative:
relative = os.path.join(name, location)
return RegistryEntryBundleFilename(location, relative)
def _parseBundleDirectory(self, node):
# <directory name="">
# <filename location="" [ relative="" ] >
# </directory>
name, = self.parseAttributes(node, ('name', ))
filenames = []
def parseFilename(node):
return self._parseBundleDirectoryFilename(node, name)
parsers = {'filename': (parseFilename, filenames.append)}
self.parseFromTable(node, parsers)
return RegistryEntryBundleDirectory(name, filenames)
## Base registry specific functions
def parseRegistry(self, file):
"""
@param file: The file to parse, either as an open file object,
or as the name of a file to open.
@type file: str or file.
"""
if isinstance(file, basestring):
self.filename = file
else:
self.filename = getattr(file, 'name', '<string>')
root = self.getRoot(file)
self._parseRoot(root.documentElement)
root.unlink()
def getDirectories(self):
return self._directories.values()
def getDirectory(self, name):
return self._directories[name]
def addDirectory(self, directory):
"""
Add a registry path object to the parser.
@type directory: {RegistryDirectory}
"""
self._directories[directory.getPath()] = directory
def removeDirectoryByPath(self, path):
"""
Remove a directory from the parser given the path.
Used when the path does not actually contain any registry information.
"""
if path in self._directories.keys():
del self._directories[path]
def _parseRoot(self, node, disallowed=None):
# <components>...</components>*
# <plugs>...</plugs>*
# <directories>...</directories>*
# <bundles>...</bundles>*
# <scenarios>...</scenarios>*
parsers = {'components': (self._parseComponents,
self._components.update),
'directories': (self._parseDirectories,
self._directories.update),
'bundles': (self._parseBundles, self._bundles.update),
'plugs': (self._parsePlugs, self._plugs.update),
'scenarios': (self._parseScenarios, self._scenarios.update)}
if disallowed:
for k in disallowed:
del parsers[k]
self.parseFromTable(node, parsers)
def _parseDirectories(self, node):
# <directories>
# <directory>
# </directories>
directories = {}
def addDirectory(d):
directories[d.getPath()] = d
parsers = {'directory': (self._parseDirectory, addDirectory)}
self.parseFromTable(node, parsers)
return directories
def _parseDirectory(self, node):
# <directory filename="..."/>
filename, = self.parseAttributes(node, ('filename', ))
return RegistryDirectory(filename)
def _parseComponentWizard(self, node):
return self._parseWizard(node, _VALID_WIZARD_COMPONENT_TYPES)
def _parsePlugWizard(self, node):
return self._parseWizard(node, _VALID_WIZARD_PLUG_TYPES)
def _parseWizard(self, node, validTypes):
# <wizard type="..." _description=" " feeder="..." eater="..."]/>
#
# NOTE: We are using _description with the leading underscore for
# the case of intltool, it is not possible for it to pickup
# translated attributes otherwise. Ideally we would use another
# tool so we can avoid underscores in our xml schema.
attrs = self.parseAttributes(node,
('type', '_description'),
('feeder', 'eater', 'rank'))
wizardType, description, feeder, eater, rank = attrs
accepts = []
provides = []
parsers = {
'accept-format': (self._parseAcceptFormat,
lambda n: accepts.append(n)),
'provide-format': (self._parseProvideFormat,
lambda n: provides.append(n)),
}
self.parseFromTable(node, parsers)
parent_type = node.parentNode.getAttribute('type')
if not wizardType in validTypes:
raise fxml.ParserError(
"<wizard>'s type attribute is %s must be one of %s" % (
parent_type,
', '.join(validTypes)))
rank = int(rank or FLU_RANK_NONE)
isProducer = wizardType.endswith('-producer')
isEncoder = wizardType.endswith('-encoder')
isMuxer = (wizardType == 'muxer')
isConsumer = wizardType.endswith('-consumer')
err = None
# Producers and Encoders cannot have provided
if accepts and (isProducer or isEncoder):
err = ('<wizard type="%s"> does not allow an accepted '
'media-type.') % (parent_type, )
# Encoders, Muxers and Consumers must have an accepted
elif not accepts and (isMuxer or isConsumer):
err = ('<wizard type="%s"> requires at least one accepted '
'media-type.') % (parent_type, )
# Producers and Consumers cannot have provided
elif provides and (isProducer or isConsumer):
err = ('<wizard type="%s"> does not allow a provided '
'media-type.') % (parent_type, )
# Producers, Encoders and Muxers must have exactly one provided
if len(provides) != 1 and (isEncoder or isMuxer):
err = ('<wizard type="%s"> requires exactly one provided '
'media-type.') % (parent_type, )
if err:
raise fxml.ParserError(err)
return RegistryEntryWizard(parent_type, wizardType, description,
feeder, eater, accepts, provides, rank)
def _parseAcceptFormat(self, node):
# <accept-format media-type="..."/>
media_type, = self.parseAttributes(node, ('media-type', ))
return RegistryEntryWizardFormat(media_type)
def _parseProvideFormat(self, node):
# <provide-format media-type="..."/>
media_type, = self.parseAttributes(node, ('media-type', ))
return RegistryEntryWizardFormat(media_type)
# FIXME: filename -> path
class RegistryDirectory(log.Loggable):
"""
I represent a directory under a path managed by the registry.
I can be queried for a list of partial registry .xml files underneath
the given path, under the given prefix.
"""
def __init__(self, path, prefix=configure.PACKAGE):
self._path = path
self._prefix = prefix
scanPath = os.path.join(path, prefix)
self._files, self._dirs = self._getFileLists(scanPath)
def __repr__(self):
return "<RegistryDirectory %s>" % self._path
def _getFileLists(self, root):
"""
Get all files ending in .xml from all directories under the given root.
@type root: string
@param root: the root directory under which to search
@returns: a list of .xml files, relative to the given root directory
"""
files = []
dirs = []
if os.path.exists(root):
try:
directory_files = os.listdir(root)
except OSError, e:
if e.errno == errno.EACCES:
return files, dirs
else:
raise
dirs.append(root)
for entry in directory_files:
path = os.path.join(root, entry)
# if it's a .xml file, then add it to the list
if not os.path.isdir(path):
if path.endswith('.xml'):
files.append(path)
# if it's a directory and not an svn directory, then get
# its files and add them
elif entry != '.svn':
newFiles, newDirs = self._getFileLists(path)
files.extend(newFiles)
dirs.extend(newDirs)
return files, dirs
def rebuildNeeded(self, mtime):
def _rebuildNeeded(f):
try:
if _getMTime(f) > mtime:
self.debug("Path %s changed since registry last "
"scanned", f)
return True
return False
except OSError:
self.debug("Failed to stat file %s, need to rescan", f)
return True
for f in self._files:
if _rebuildNeeded(f):
return True
for f in self._dirs:
if _rebuildNeeded(f):
return True
return False
def getFiles(self):
"""
Return a list of all .xml registry files underneath this registry
path.
"""
return self._files
def getPath(self):
return self._path
class RegistryWriter(log.Loggable):
def __init__(self, components, plugs, bundles, directories):
"""
@param components: components to write
@type components: list of L{RegistryEntryComponent}
@param plugs: plugs to write
@type plugs: list of L{RegistryEntryPlug}
@param bundles: bundles to write
@type bundles: list of L{RegistryEntryBundle}
@param directories: directories to write
@type directories: list of L{RegistryEntryBundleDirectory}
"""
self.components = components
self.plugs = plugs
self.bundles = bundles
self.directories = directories
def dump(self, fd):
"""
Dump the cache of components to the given opened file descriptor.
@type fd: integer
@param fd: open file descriptor to write to
"""
def w(i, msg):
print >> fd, ' '*i + msg
def e(attr):
return saxutils.quoteattr(attr)
def _dump_proplist(i, proplist, ioff=2):
for prop in proplist:
if isinstance(prop, RegistryEntryCompoundProperty):
_dump_compound(i, prop)
else:
w(i, ('<property name="%s" type="%s"'
% (prop.getName(), prop.getType())))
w(i, (' _description=%s'
% (e(prop.getDescription()), )))
w(i, (' required="%s" multiple="%s"/>'
% (prop.isRequired(), prop.isMultiple())))
def _dump_compound(i, cprop, ioff=2):
w(i, ('<compound-property name="%s"' % (cprop.getName(), )))
w(i, (' _description=%s'
% (e(cprop.getDescription()), )))
w(i, (' required="%s" multiple="%s">'
% (cprop.isRequired(), cprop.isMultiple())))
_dump_proplist(i + ioff, cprop.getProperties())
w(i, ('</compound-property>'))
def _dump_entries(i, entries):
if not entries:
return
w(i, '<entries>')
for entry in entries:
w(i+2, '<entry type="%s" location="%s" function="%s"/>' % (
entry.getType(),
entry.getLocation(),
entry.getFunction()))
w(i, '</entries>')
w(0, '<registry>')
w(0, '')
# Write components
w(2, '<components>')
w(0, '')
for component in self.components:
w(4, '<component type="%s" base="%s"' % (
component.getType(), component.getBase()))
w(4, ' _description=%s>'
% (e(component.getDescription()), ))
w(6, '<source location="%s"/>' % component.getSource())
for x in component.getEaters():
w(6, '<eater name="%s" required="%s" multiple="%s"/>'
% (x.getName(), x.getRequired() and "yes" or "no",
x.getMultiple() and "yes" or "no"))
for x in component.getFeeders():
w(6, '<feeder name="%s"/>' % x)
w(6, '<synchronization required="%s" clock-priority="%d"/>'
% (component.getNeedsSynchronization() and "yes" or "no",
component.getClockPriority()))
sockets = component.getSockets()
if sockets:
w(6, '<sockets>')
for socket in sockets:
w(8, '<socket type="%s"/>' % socket)
w(6, '</sockets>')
w(6, '<properties>')
_dump_proplist(8, component.getProperties())
w(6, '</properties>')
for wizard in component.wizards:
rank = ''
if wizard.rank:
rank = ' rank="%d"' % wizard.rank
w(6, '<wizard type="%s" _description="%s" feeder="%s"%s>' % (
wizard.type,
e(wizard.description),
wizard.feeder,
rank))
for accept in wizard.accepts:
w(8, '<accept-format media-type="%s"/>' % (
accept.media_type))
for provide in wizard.provides:
w(8, '<provide-format media-type="%s"/>' % (
provide.media_type))
w(6, '</wizard>')
registryEntryFiles = component.getFiles()
if registryEntryFiles:
w(6, '<files>')
for entryFile in registryEntryFiles:
w(8, '<file name="%s" type="%s"/>' % (
entryFile.getName(),
entryFile.getType()))
w(6, '</files>')
_dump_entries(6, component.getEntries())
w(4, '</component>')
w(0, '')
w(2, '</components>')
w(0, '')
# Write plugs
w(2, '<plugs>')
w(0, '')
for plug in self.plugs:
w(4, '<plug type="%s" socket="%s" _description="%s">'
% (plug.getType(), plug.getSocket(), plug.getDescription()))
_dump_entries(6, plug.getEntries())
w(6, '<properties>')
_dump_proplist(8, plug.getProperties())
w(6, '</properties>')
w(4, '</plug>')
w(0, '')
w(2, '</plugs>')
w(0, '')
# bundles
w(2, '<bundles>')
for bundle in self.bundles:
w(4, '<bundle name="%s" under="%s" project="%s">' % (
bundle.getName(), bundle.getUnder(), bundle.getProject()))
dependencies = bundle.getDependencies()
if dependencies:
w(6, '<dependencies>')
for dependency in dependencies:
w(8, '<dependency name="%s"/>' % dependency)
w(6, '</dependencies>')
bundleDirectories = bundle.getDirectories()
if bundleDirectories:
w(6, '<directories>')
for directory in bundleDirectories:
w(8, '<directory name="%s">' % directory.getName())
for filename in directory.getFiles():
w(10, '<filename location="%s" relative="%s"/>' % (
filename.getLocation(), filename.getRelative()))
w(8, '</directory>')
w(6, '</directories>')
w(4, '</bundle>')
w(0, '')
w(2, '</bundles>')
# Directories
directories = self.directories
if directories:
w(2, '<directories>')
w(0, '')
for d in directories:
w(4, '<directory filename="%s"/>' % d.getPath())
w(2, '</directories>')
w(0, '')
w(0, '</registry>')
class ComponentRegistry(log.Loggable):
"""Registry, this is normally not instantiated."""
logCategory = 'registry'
defaultCachePath = os.path.join(configure.registrydir, 'registry.xml')
def __init__(self, paths=None, prefix=configure.PACKAGE,
cachePath=defaultCachePath, seconds=runtime.seconds):
if paths is not None:
self._paths = paths
else:
self._paths = self._getRegistryPathsFromEnviron()
self.prefix = prefix
self.filename = cachePath
self.seconds = seconds
self.mtime = None
self._modmtime = _getMTime(__file__)
self._parser = RegistryParser()
if (READ_CACHE and
os.path.exists(self.filename) and
os.access(self.filename, os.R_OK)):
self.info('Parsing registry: %s', self.filename)
try:
self._parser.parseRegistry(self.filename)
except fxml.ParserError, e:
# this can happen for example if we upgraded to a new version,
# ran, then downgraded again; the registry can then contain
# XML keys that are not understood by this version.
# This is non-fatal, and gets fixed due to a re-scan
self.warning('Could not parse registry %s.', self.filename)
self.debug('fxml.ParserError: %s', log.getExceptionMessage(e))
self.verify(force=not READ_CACHE)
def addFile(self, file):
"""
@param file: The file to add, either as an open file object, or
as the name of a file to open.
@type file: str or file.
"""
if isinstance(file, str) and file.endswith('registry.xml'):
self.warning('%s seems to be an old registry in your tree, '
'please remove it', file)
self.debug('Adding file: %r', file)
self._parser.parseRegistryFile(file)
def addFromString(self, string):
f = StringIO(string)
self.addFile(f)
f.close()
def addRegistryPath(self, path, prefix=None):
"""
Add a registry path to this registry, scanning it for registry
snippets.
@param path: a full path containing a PREFIX directory, which will be
scanned for registry files.
@param prefix: directory name under path which will be scanned
(defaults to 'flumotion' and cannot be an empty string).
@rtype: bool
@returns: whether the path could be added
"""
prefix = prefix or self.prefix
self.debug('path %s, prefix %s', path, prefix)
if not os.path.exists(path):
self.warning(
"Cannot add non-existent path '%s' to registry", path)
return False
if not os.path.exists(os.path.join(path, prefix)):
self.warning("Cannot add path '%s' to registry "
"since it does not contain prefix '%s'", path, prefix)
return False
# registry path was either not watched or updated, or a force was
# asked, so reparse
self.info('Scanning registry path %s', path)
registryPath = RegistryDirectory(path, prefix=prefix)
files = registryPath.getFiles()
self.debug('Found %d possible registry files', len(files))
map(self.addFile, files)
self._parser.addDirectory(registryPath)
return True
# fixme: these methods inconsistenly molest and duplicate those of
# the parser.
def isEmpty(self):
return len(self._parser._components) == 0
def getComponent(self, name):
"""
@rtype: L{RegistryEntryComponent}
"""
return self._parser.getComponent(name)
def hasComponent(self, name):
return name in self._parser._components
def getComponents(self):
return self._parser.getComponents()
def getPlug(self, type):
"""
@rtype: L{RegistryEntryPlug}
"""
return self._parser.getPlug(type)
def hasPlug(self, name):
return name in self._parser._plugs
def getPlugs(self):
return self._parser.getPlugs()
def getScenarios(self):
return self._parser.getScenarios()
def getScenarioByType(self, type):
return self._parser.getScenarioByType(type)
def getBundles(self):
return self._parser._bundles.values()
def getDirectories(self):
return self._parser.getDirectories()
def makeBundlerBasket(self):
"""
@rtype: L{flumotion.common.bundle.BundlerBasket}
"""
def load():
ret = BundlerBasket(self.mtime)
for b in self.getBundles():
bundleName = b.getName()
self.debug('Adding bundle %s', bundleName)
for d in b.getDirectories():
directory = d.getName()
for bundleFilename in d.getFiles():
try:
basedir = b.getBaseDir()
except errors.NoProjectError, e:
self.warning("Could not load project %s", e.args)
raise
fullpath = os.path.join(basedir, directory,
bundleFilename.getLocation())
relative = bundleFilename.getRelative()
self.log('Adding path %s as %s to bundle %s',
fullpath, relative, bundleName)
try:
ret.add(bundleName, fullpath, relative)
except Exception, e:
self.debug("Reason: %r", e)
raise RuntimeError(
'Could not add %s to bundle %s (%s)'
% (fullpath, bundleName, e))
for d in b.getDependencies():
self.log('Adding dependency of %s on %s', bundleName, d)
ret.depend(bundleName, d)
return ret
try:
return load()
except Exception, e:
self.debug("Could not register bundles the first time: %s",
log.getExceptionMessage(e))
self.warning("Bundle problem, rebuilding registry")
self.verify(force=True)
try:
return load()
except Exception, e:
self.debug("Could not register bundles the second time: %s",
log.getExceptionMessage(e))
self.error("Could not not register bundles (%s)",
log.getExceptionMessage(e))
def dump(self, fd):
"""
Dump the cache of components to the given opened file descriptor.
@type fd: integer
@param fd: open file descriptor to write to
"""
writer = RegistryWriter(self.getComponents(), self.getPlugs(),
self.getBundles(), self.getDirectories())
writer.dump(fd)
def clean(self):
"""
Clean the cache of components.
"""
self._parser.clean()
def rebuildNeeded(self):
if self.mtime is None:
self.log("Rebuild needed: missing mtime")
return True
if not os.path.exists(self.filename):
self.log("Rebuild needed: registry file %s doesn't exists",
self.filename)
return True
# A bit complicated because we want to allow FLU_PROJECT_PATH to
# point to nonexistent directories
registryPaths = python.set(self._paths)
oldRegistryPaths = python.set([directory.getPath()
for directory in self.getDirectories()])
if registryPaths != oldRegistryPaths:
if oldRegistryPaths - registryPaths:
self.log("Rebuild needed: registry paths removed")
return True
f = filter(os.path.exists, registryPaths - oldRegistryPaths)
if f:
self.log("Rebuild needed: a newly added registry path doesn't "
"exists: %s", f)
return True
registry_modified = self.mtime
for d in self._parser.getDirectories():
if d.rebuildNeeded(registry_modified):
return True
return False
def save(self, force=False):
if not force and not self.rebuildNeeded():
return
self.info('Saving registry to %s', self.filename)
# create parent directory
directory = os.path.split(self.filename)[0]
if not os.path.exists(directory):
try:
makedirs(directory)
except OSError, e:
if e.errno == errno.EACCES:
self.error('Registry directory %s could not be created !' %
directory)
else:
raise
if not os.path.isdir(directory):
self.error('Registry directory %s is not a directory !')
try:
# According to doc http://docs.python.org/library/os.html#os.rename
# If successful, the renaming will be an atomic operation (this is
# a POSIX requirement).
tmp = tempfile.mktemp(dir=directory)
fd = open(tmp, 'w')
self.dump(fd)
os.rename(tmp, self.filename)
except IOError, e:
if e.errno == errno.EACCES:
self.error('Registry file %s could not be created !' %
self.filename)
else:
raise
def _getRegistryPathsFromEnviron(self):
registryPaths = [configure.pythondir, ]
if 'FLU_PROJECT_PATH' in os.environ:
paths = os.environ['FLU_PROJECT_PATH']
registryPaths += paths.split(':')
return registryPaths
def verify(self, force=False):
"""
Verify if the registry is uptodate and rebuild if it is not.
@param force: True if the registry needs rebuilding for sure.
"""
# construct a list of all paths to scan for registry .xml files
if force or self.rebuildNeeded():
self.info("Rebuilding registry")
if force:
self.info("Rebuild of registry is forced")
if self.rebuildNeeded():
self.info("Rebuild of registry is needed")
self.clean()
mtime = self.seconds()
for path in self._paths:
if not self.addRegistryPath(path):
self._parser.removeDirectoryByPath(path)
self.mtime = mtime
self.save(True)
def isUptodate(self):
return self._modmtime >= _getMTime(__file__)
class RegistrySubsetWriter(RegistryWriter):
def __init__(self, fromRegistry=None, onlyBundles=None):
"""
@param fromRegistry: The registry to subset, or the default.
@type fromRegistry: L{ComponentRegistry}
@param onlyBundles: If given, only include the subset of the
registry that is provided by bundles whose names are in this
list.
@type onlyBundles: list of str
"""
self.fromRegistry = fromRegistry
self.onlyBundles = onlyBundles
def dump(self, fd):
reg = self.fromRegistry or getRegistry()
pred = None
bundles = reg.getBundles()
if self.onlyBundles is not None:
bundles = [b for b in bundles
if b.name in self.onlyBundles]
bundledfiles = {}
for b in bundles:
for d in b.getDirectories():
for f in d.getFiles():
filename = os.path.join(d.getName(), f.getLocation())
bundledfiles[filename] = b
def fileIsBundled(basedir, filename):
return os.path.join(basedir, filename) in bundledfiles
pred = lambda c: (filter(lambda f: fileIsBundled(c.getBase(),
f.getFilename()),
c.getFiles())
or filter(lambda e: fileIsBundled(c.getBase(),
e.getLocation()),
c.getEntries()))
components = filter(pred, reg.getComponents())
pred = lambda p: p.getEntry().getLocation() in bundledfiles
plugs = filter(pred, reg.getPlugs())
directories = [] # no need for this
regwriter = RegistryWriter(components, plugs, bundles, directories)
regwriter.dump(fd)
__registry = None
def makeBundleFromLoadedModules(outfile, outreg, *prefixes):
"""
Make a bundle from a subset of all loaded modules, also writing out
a registry file that can apply to that subset of the global
registry. Suitable for use as a FLU_ATEXIT handler.
@param outfile: The path to which a zip file will be written.
@type outfile: str
@param outreg: The path to which a registry file will be written.
@type outreg: str
@param prefixes: A list of prefixes to which to limit the export. If
not given, package up all modules. For example, "flumotion" would
limit the output to modules that start with "flumotion".
@type prefixes: list of str
"""
from twisted.python import reflect
def getUsedModules(prefixes):
ret = {}
for modname in sys.modules:
if prefixes and not filter(modname.startswith, prefixes):
continue
try:
module = reflect.namedModule(modname)
if hasattr(module, '__file__'):
ret[modname] = module
else:
log.info('makebundle', 'Module %s has no file', module)
except ImportError:
log.info('makebundle', 'Could not import %s', modname)
return ret
def calculateModuleBundleMap():
allbundles = getRegistry().getBundles()
ret = {}
for bundle in allbundles:
for directory in bundle.getDirectories():
for bundleFile in directory.getFiles():
path = os.path.join(directory.getName(),
bundleFile.getLocation())
parts = path.split(os.path.sep)
if parts[-1].startswith('__init__.py'):
parts.pop()
elif parts[-1].endswith('.py'):
parts[-1] = parts[-1][:-3]
else:
# not a bundled module
continue
modname = '.'.join(parts)
ret[modname] = bundle
return ret
def makeMergedBundler(modules, modulebundlemap):
ret = MergedBundler()
basket = getRegistry().makeBundlerBasket()
for modname in modules:
modfilename = modules[modname].__file__
if modname in modulebundlemap:
bundleName = modulebundlemap[modname].getName()
for depBundleName in basket.getDependencies(bundleName):
ret.addBundler(basket.getBundlerByName(depBundleName))
else:
if modfilename.endswith('.pyc'):
modfilename = modfilename[:-1]
if os.path.isdir(modfilename):
with_init = os.path.join(modfilename, '__init__.py')
if os.path.exists(with_init):
modfilename = with_init
nparts = len(modname.split('.'))
if '__init__' in modfilename:
nparts += 1
relpath = os.path.join(*modfilename.split(
os.path.sep)[-nparts:])
ret.add(modfilename, relpath)
return ret
modules = getUsedModules(prefixes)
modulebundlemap = calculateModuleBundleMap()
bundler = makeMergedBundler(modules, modulebundlemap)
print 'Writing bundle to', outfile
open(outfile, 'w').write(bundler.bundle().getZip())
print 'Writing registry to', outreg
bundlers_used = [b.name for b in bundler.getSubBundlers()]
regwriter = RegistrySubsetWriter(onlyBundles=bundlers_used)
regwriter.dump(open(outreg, 'w'))
def getRegistry():
"""
Return the registry. Only one registry will ever be created.
@rtype: L{ComponentRegistry}
"""
global __registry
if not __registry:
log.debug('registry', 'instantiating registry')
__registry = ComponentRegistry()
elif not __registry.isUptodate():
# When a new version of flumotion gets installed, running managers will
# reread the xml files. Reloading the registry module is required to
# avoid inconsistencies.
log.debug('registry', 'registry module updated, reloading')
reload(sys.modules[__registry.__module__])
__registry = ComponentRegistry()
return __registry
| timvideos/flumotion | flumotion/common/registry.py | Python | lgpl-2.1 | 64,775 |
"""
PostgreSQL Session API
======================
The Session classes wrap the Queries :py:class:`Session <queries.Session>` and
:py:class:`TornadoSession <queries.tornado_session.TornadoSession>` classes
providing environment variable based configuration.
Environment variables should be set using the ``PGSQL[_DBNAME]`` format
where the value is a PostgreSQL URI.
For PostgreSQL URI format, see:
http://www.postgresql.org/docs/9.3/static/libpq-connect.html#LIBPQ-CONNSTRING
As example, given the environment variable:
.. code:: python
PGSQL_FOO = 'postgresql://bar:baz@foohost:6000/foo'
and code for creating a :py:class:`Session` instance for the database name
``foo``:
.. code:: python
session = sprockets.postgresql.Session('foo')
A :py:class:`queries.Session` object will be created that connects to Postgres
running on ``foohost``, port ``6000`` using the username ``bar`` and the
password ``baz``, connecting to the ``foo`` database.
"""
version_info = (2, 0, 1)
__version__ = '.'.join(str(v) for v in version_info)
import logging
import os
from queries import pool
import queries
from queries import tornado_session
_ARGUMENTS = ['host', 'port', 'dbname', 'user', 'password']
LOGGER = logging.getLogger(__name__)
# For ease of access to different cursor types
from queries import DictCursor
from queries import NamedTupleCursor
from queries import RealDictCursor
from queries import LoggingCursor
from queries import MinTimeLoggingCursor
# Expose exceptions so clients do not need to import queries as well
from queries import DataError
from queries import DatabaseError
from queries import IntegrityError
from queries import InterfaceError
from queries import InternalError
from queries import NotSupportedError
from queries import OperationalError
from queries import ProgrammingError
from queries import QueryCanceledError
from queries import TransactionRollbackError
def _get_uri(dbname):
"""Return the URI for the specified database name from an environment
variable. If dbname is blank, the ``PGSQL`` environment variable is used,
otherwise the database name is cast to upper case and concatenated to
``PGSQL_`` and the URI is retrieved from ``PGSQL_DBNAME``. For example,
if the value ``foo`` is passed in, the environment variable used would be
``PGSQL_FOO``.
:param str dbname: The database name to construct the URI for
:return: str
:raises: KeyError
"""
if not dbname:
return os.environ['PGSQL']
return os.environ['PGSQL_{0}'.format(dbname).upper()]
class Session(queries.Session):
"""Extends queries.Session using configuration data that is stored
in environment variables.
Utilizes connection pooling to ensure that multiple concurrent asynchronous
queries do not block each other. Heavily trafficked services will require
a higher ``max_pool_size`` to allow for greater connection concurrency.
:param str dbname: PostgreSQL database name
:param queries.cursor: The cursor type to use
:param int pool_idle_ttl: How long idle pools keep connections open
:param int pool_max_size: The maximum size of the pool to use
:param str db_url: Optional database connection URL. Use this when
you need to connect to a database that is only known at runtime.
"""
def __init__(self, dbname,
cursor_factory=queries.RealDictCursor,
pool_idle_ttl=pool.DEFAULT_IDLE_TTL,
pool_max_size=pool.DEFAULT_MAX_SIZE,
db_url=None):
if db_url is None:
db_url = _get_uri(dbname)
super(Session, self).__init__(db_url,
cursor_factory,
pool_idle_ttl,
pool_max_size)
class TornadoSession(tornado_session.TornadoSession):
"""Extends queries.TornadoSession using configuration data that is stored
in environment variables.
Utilizes connection pooling to ensure that multiple concurrent asynchronous
queries do not block each other. Heavily trafficked services will require
a higher ``max_pool_size`` to allow for greater connection concurrency.
:py:meth:`query <queries.tornado_session.TornadoSession.query>` and
:py:meth:`callproc <queries.tornado_session.TornadoSession.callproc>` must
call :py:meth:`Results.free <queries.tornado_session.Results.free>`
:param str dbname: PostgreSQL database name
:param queries.cursor: The cursor type to use
:param int pool_idle_ttl: How long idle pools keep connections open
:param int pool_max_size: The maximum size of the pool to use
:param tornado.ioloop.IOLoop ioloop: Pass in the instance of the tornado
IOLoop you would like to use. Defaults to the global instance.
:param str db_url: Optional database connection URL. Use this when
you need to connect to a database that is only known at runtime.
"""
def __init__(self, dbname,
cursor_factory=queries.RealDictCursor,
pool_idle_ttl=pool.DEFAULT_IDLE_TTL,
pool_max_size=tornado_session.DEFAULT_MAX_POOL_SIZE,
io_loop=None, db_url=None):
if db_url is None:
db_url = _get_uri(dbname)
super(TornadoSession, self).__init__(db_url,
cursor_factory,
pool_idle_ttl,
pool_max_size,
io_loop)
| sprockets/sprockets.clients.postgresql | sprockets/clients/postgresql/__init__.py | Python | bsd-3-clause | 5,585 |
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import os
import sys
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from airflow import configuration as conf
class DummyStatsLogger(object):
@classmethod
def incr(cls, stat, count=1, rate=1):
pass
@classmethod
def decr(cls, stat, count=1, rate=1):
pass
@classmethod
def gauge(cls, stat, value, rate=1, delta=False):
pass
Stats = DummyStatsLogger
if conf.getboolean('scheduler', 'statsd_on'):
from statsd import StatsClient
statsd = StatsClient(
host=conf.get('scheduler', 'statsd_host'),
port=conf.getint('scheduler', 'statsd_port'),
prefix=conf.get('scheduler', 'statsd_prefix'))
Stats = statsd
else:
Stats = DummyStatsLogger
HEADER = """\
____________ _____________
____ |__( )_________ __/__ /________ __
____ /| |_ /__ ___/_ /_ __ /_ __ \_ | /| / /
___ ___ | / _ / _ __/ _ / / /_/ /_ |/ |/ /
_/_/ |_/_/ /_/ /_/ /_/ \____/____/|__/
"""
BASE_LOG_URL = '/admin/airflow/log'
AIRFLOW_HOME = os.path.expanduser(conf.get('core', 'AIRFLOW_HOME'))
SQL_ALCHEMY_CONN = conf.get('core', 'SQL_ALCHEMY_CONN')
LOGGING_LEVEL = logging.INFO
DAGS_FOLDER = os.path.expanduser(conf.get('core', 'DAGS_FOLDER'))
engine_args = {}
if 'sqlite' not in SQL_ALCHEMY_CONN:
# Engine args not supported by sqlite
engine_args['pool_size'] = conf.getint('core', 'SQL_ALCHEMY_POOL_SIZE')
engine_args['pool_recycle'] = conf.getint('core',
'SQL_ALCHEMY_POOL_RECYCLE')
engine = create_engine(SQL_ALCHEMY_CONN, **engine_args)
Session = scoped_session(
sessionmaker(autocommit=False, autoflush=False, bind=engine))
# can't move this to conf due to ConfigParser interpolation
LOG_FORMAT = (
'[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s')
SIMPLE_LOG_FORMAT = '%(asctime)s %(levelname)s - %(message)s'
def policy(task_instance):
"""
This policy setting allows altering task instances right before they
are executed. It allows administrator to rewire some task parameters.
Note that the ``TaskInstance`` object has an attribute ``task`` pointing
to its related task object, that in turns has a reference to the DAG
object. So you can use the attributes of all of these to define your
policy.
To define policy, add a ``airflow_local_settings`` module
to your PYTHONPATH that defines this ``policy`` function. It receives
a ``TaskInstance`` object and can alter it where needed.
Here are a few examples of how this can be useful:
* You could enforce a specific queue (say the ``spark`` queue)
for tasks using the ``SparkOperator`` to make sure that these
task instances get wired to the right workers
* You could force all task instances running on an
``execution_date`` older than a week old to run in a ``backfill``
pool.
* ...
"""
pass
def configure_logging():
logging.root.handlers = []
logging.basicConfig(
format=LOG_FORMAT, stream=sys.stdout, level=LOGGING_LEVEL)
try:
from airflow_local_settings import *
logging.info("Loaded airflow_local_settings.")
except:
pass
configure_logging()
| wxiang7/airflow | airflow/settings.py | Python | apache-2.0 | 4,000 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('inboxen', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='liberation',
name='async_result',
),
]
| Inboxen/Inboxen | inboxen/migrations/0002_remove_liberation_async_result.py | Python | agpl-3.0 | 351 |
import sys, os, inspect
import os, sys, inspect, inviwopy
path_to_current_folder = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
sys.path.append(path_to_current_folder + "/../")
import envisionpy
import envisionpy.hdf5parser
from envisionpy.network import VisualisationManager
VASP_DIR = path_to_current_folder + "/../unit_testing/resources/FCC-Cu"
HDF5_FILE = path_to_current_folder + "/../demo_fermi.hdf5"
envisionpy.hdf5parser.fermi_parser(HDF5_FILE, VASP_DIR)
# Clear any old network
inviwopy.app.network.clear()
# Initialize inviwo network
visManager = VisualisationManager(HDF5_FILE, inviwopy.app)
visManager.start("fermi")
visManager.subnetworks['fermi'].toggle_iso(True)
visManager.subnetworks['fermi'].clear_tf()
| rartino/ENVISIoN | demo/fermi_surface.py | Python | bsd-2-clause | 754 |
from __future__ import print_function
import socket, threading, sys, re, types
from . import axonmc
from six.moves import map
def main():
## Use 34567 as default port
if len(sys.argv) < 2:
port = 34567
else:
port = int(sys.argv[1])
## Mutex to serialize requests to multiclamp
lock = threading.Lock()
mc = axonmc.MULTICLAMP
## Set up network socket and listen for connections
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', port))
s.listen(1)
print("Listening on port", port)
while True:
conn, addr = s.accept()
print("Connected to", addr[0])
t = WorkThread(conn, addr, mc, lock)
t.start()
class WorkThread(threading.Thread):
def __init__(self, conn, addr, mc, lock):
self.conn = conn
self.addr = addr
self.lock = lock
self.mc = mc
threading.Thread.__init__(self)
def run(self):
while True:
cmd = self.readline()
if cmd is None:
break
print(self.addr[0], "req:", cmd[:-1])
try:
resp = [1] + self.processCmd(cmd)
except:
resp = [0] + list(sys.exc_info()[1].args)
resp = ','.join(map(str, resp))
print(self.addr[0], "response:", resp)
self.conn.sendall(resp + '\n')
self.conn.close()
print("Closing connection to", self.addr[0])
def processCmd(self, cmd):
## Parse out function name and arguments
m = re.match(r'(\S+)\((.*)\)', cmd)
if m is None:
raise Exception("Command must be in the form 'function(arguments)'")
fn = m.groups()[0]
argstr = m.groups()[1]
strargs = re.split(r'\s*,\s*', argstr)
## Guess types for arguments
args = []
for a in strargs:
if a.lower() == 'true':
args.append(True)
elif a.lower() == 'false':
args.append(False)
elif re.match(r'[a-zA-Z]', a):
args.append(a)
elif a == '':
pass
else:
if '.' in a:
args.append(float(a))
else:
args.append(int(a))
print("%s call: %s(%s)" % (self.addr[0], fn, str(args)))
## Run function
self.lock.acquire()
try:
ret = getattr(self.mc, fn)(*args)
except:
self.lock.release()
raise
self.lock.release()
if not isinstance(ret, list):
ret = [ret]
return ret
def readline(self):
l = ''
while True:
c = self.conn.recv(1)
if c == '':
if len(l) > 0:
print(self.addr[0], "Connection closing with incomplete command:", l)
return None
l += c
if l[-1] == '\n':
return l
main() | acq4/acq4 | acq4/drivers/MultiClamp/multiclamp_server/multiClampServer.py | Python | mit | 3,071 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from optparse import OptionParser
import os
import re
import unittest
import sys
import dmunit
import genfiles
def main(ip, port, scripts, directory, isTestDevice):
dmunit.ip = ip
dmunit.port = port
suite = unittest.TestSuite()
genfiles.gen_test_files()
if scripts:
# Ensure the user didn't include the .py on the name of the test file
# (and get rid of it if they did)
scripts = map(lambda x: x.split('.')[0], scripts)
else:
# Go through the directory and pick up everything
# named test_*.py and run it
testfile = re.compile('^test_.*\.py$')
files = os.listdir(directory)
for f in files:
if testfile.match(f):
scripts.append(f.split('.')[0])
testLoader = dmunit.DeviceManagerTestLoader(isTestDevice)
for s in scripts:
suite.addTest(testLoader.loadTestsFromModuleName(s))
unittest.TextTestRunner(verbosity=2).run(suite)
genfiles.clean_test_files()
if __name__ == "__main__":
default_ip = '127.0.0.1'
default_port = 20701
env_ip, _, env_port = os.getenv('TEST_DEVICE', '').partition(':')
if env_port:
try:
env_port = int(env_port)
except ValueError:
print >> sys.stderr, "Port in TEST_DEVICE should be an integer."
sys.exit(1)
# Deal with the options
parser = OptionParser()
parser.add_option("--ip", action="store", type="string", dest="ip",
help="IP address for device running SUTAgent, defaults "
"to what's provided in $TEST_DEVICE or 127.0.0.1",
default=(env_ip or default_ip))
parser.add_option("--port", action="store", type="int", dest="port",
help="Port of SUTAgent on device, defaults to "
"what's provided in $TEST_DEVICE or 20701",
default=(env_port or default_port))
parser.add_option("--script", action="append", type="string",
dest="scripts", help="Name of test script to run, "
"can be specified multiple times", default=[])
parser.add_option("--directory", action="store", type="string", dest="dir",
help="Directory to look for tests in, defaults to "
"current directory", default=os.getcwd())
parser.add_option("--testDevice", action="store_true", dest="isTestDevice",
help="Specifies that the device is a local test agent",
default=False)
(options, args) = parser.parse_args()
main(options.ip, options.port, options.scripts,
options.dir, options.isTestDevice)
| wilebeast/FireFox-OS | B2G/gecko/testing/mozbase/mozdevice/sut_tests/runtests.py | Python | apache-2.0 | 2,917 |
#!/usr/bin/python
import os
import sys
import os.path as osp
import cv2
import numpy as np
import argparse as ap
conf_parser = ap.ArgumentParser(description='Traverse all .mp4 video files in the specified folder and pick out frames to export as images.')
conf_parser.add_argument("-f", "--folder", help="Folder to process",
required=False, default= ["./"])
if __name__ == "__main__":
args = conf_parser.parse_args()
files = [f for f in os.listdir(args.folder) if osp.isfile(args.folder + f) and f.endswith(".mp4")]
files.sort();
left_right_prefix = True
left_files = []
right_files = []
for file in files:
if file.endswith("left.mp4"):
left_files.append(file)
elif file.endswith("right.mp4"):
right_files.append(file)
else:
left_right_prefix = False
if left_right_prefix:
num_pairs = len(left_files)
if len(left_files) != len(right_files):
print "Warning: there are unequal amounts of files marked as right and files marked as left."
num_pairs = min(len(left_files), len(right_files))
for i_pair in range(num_pairs):
print "Processing pair " + f
left_file = left_files[i_pair]
right_file = right_files[i_pair]
left_cap = cv2.VideoCapture(args.folder + left_file)
right_cap = cv2.VideoCapture(args.folder + right_file)
cont_cap = True
i_frame = 0
while(cont_cap):
lret, lframe = left_cap.read()
rret, rframe = right_cap.read()
cont_cap = lret and rret
if cont_cap:
combo = np.hstack((lframe,rframe))
cv2.imshow("video",combo)
key = cv2.waitKey() & 0xFF
if (key == ord('q') or key == ord('n') or key == 28):
break
elif (key == ord('s')):
fnl = "frame_" + left_file[:-4] + "_" + str(i_frame) + ".png"
fnr = "frame_" + right_file[:-4] + "_" + str(i_frame) + ".png"
cv2.imwrite(args.folder + fnl,lframe)
cv2.imwrite(args.folder + fnr,rframe)
print "saving frame " + str(i_frame)
else:
print "next frame pair"
i_frame += 1
left_cap.release()
right_cap.release()
if(key == ord('q')):
break
else:
for f in files:
cap = cv2.VideoCapture(args.folder + f)
ret = True;
i_fr = 0;
key = ''
print "Processing file " + f
while(ret):
ret, frame = cap.read()
if ret:
cv2.imshow("video",frame)
key = cv2.waitKey() & 0xFF
if (key == ord('q') or key == ord('n')):
break
elif (key == ord('s')):
fn = f[:-4] + "_" + str(i_fr) + ".png"
cv2.imwrite(args.folder + fn,frame)
print "saving frame as " + fn
else:
print "next frame"
i_fr += 1
cap.release()
if(key == ord('q')):
break
| Algomorph/reco | scripts/extract_frames_multivideo.py | Python | apache-2.0 | 2,649 |
"""
Integrate functions by rewriting them as Meijer G-functions.
There are three user-visible functions that can be used by other parts of the
sympy library to solve various integration problems:
- meijerint_indefinite
- meijerint_definite
- meijerint_inversion
They can be used to compute, respectively, indefinite integrals, definite
integrals over intervals of the real line, and inverse laplace-type integrals
(from c-I*oo to c+I*oo). See the respective docstrings for details.
The main references for this are:
[L] Luke, Y. L. (1969), The Special Functions and Their Approximations,
Volume 1
[R] Kelly B. Roach. Meijer G Function Representations.
In: Proceedings of the 1997 International Symposium on Symbolic and
Algebraic Computation, pages 205-211, New York, 1997. ACM.
[P] A. P. Prudnikov, Yu. A. Brychkov and O. I. Marichev (1990).
Integrals and Series: More Special Functions, Vol. 3,.
Gordon and Breach Science Publisher
"""
from __future__ import print_function, division
from sympy.core import oo, S, pi, Expr
from sympy.core.exprtools import factor_terms
from sympy.core.function import expand, expand_mul, expand_power_base
from sympy.core.add import Add
from sympy.core.mul import Mul
from sympy.core.compatibility import range
from sympy.core.cache import cacheit
from sympy.core.symbol import Dummy, Wild
from sympy.simplify import hyperexpand, powdenest, collect
from sympy.logic.boolalg import And, Or, BooleanAtom
from sympy.functions.special.delta_functions import Heaviside
from sympy.functions.elementary.exponential import exp
from sympy.functions.elementary.piecewise import Piecewise, piecewise_fold
from sympy.functions.elementary.hyperbolic import \
_rewrite_hyperbolics_as_exp, HyperbolicFunction
from sympy.functions.special.hyper import meijerg
from sympy.utilities.iterables import multiset_partitions, ordered
from sympy.utilities.misc import debug as _debug
from sympy.utilities import default_sort_key
# keep this at top for easy reference
z = Dummy('z')
def _has(res, *f):
# return True if res has f; in the case of Piecewise
# only return True if *all* pieces have f
res = piecewise_fold(res)
if getattr(res, 'is_Piecewise', False):
return all(_has(i, *f) for i in res.args)
return res.has(*f)
def _create_lookup_table(table):
""" Add formulae for the function -> meijerg lookup table. """
def wild(n):
return Wild(n, exclude=[z])
p, q, a, b, c = list(map(wild, 'pqabc'))
n = Wild('n', properties=[lambda x: x.is_Integer and x > 0])
t = p*z**q
def add(formula, an, ap, bm, bq, arg=t, fac=S(1), cond=True, hint=True):
table.setdefault(_mytype(formula, z), []).append((formula,
[(fac, meijerg(an, ap, bm, bq, arg))], cond, hint))
def addi(formula, inst, cond, hint=True):
table.setdefault(
_mytype(formula, z), []).append((formula, inst, cond, hint))
def constant(a):
return [(a, meijerg([1], [], [], [0], z)),
(a, meijerg([], [1], [0], [], z))]
table[()] = [(a, constant(a), True, True)]
# [P], Section 8.
from sympy import unpolarify, Function, Not
class IsNonPositiveInteger(Function):
@classmethod
def eval(cls, arg):
arg = unpolarify(arg)
if arg.is_Integer is True:
return arg <= 0
# Section 8.4.2
from sympy import (gamma, pi, cos, exp, re, sin, sinc, sqrt, sinh, cosh,
factorial, log, erf, erfc, erfi, polar_lift)
# TODO this needs more polar_lift (c/f entry for exp)
add(Heaviside(t - b)*(t - b)**(a - 1), [a], [], [], [0], t/b,
gamma(a)*b**(a - 1), And(b > 0))
add(Heaviside(b - t)*(b - t)**(a - 1), [], [a], [0], [], t/b,
gamma(a)*b**(a - 1), And(b > 0))
add(Heaviside(z - (b/p)**(1/q))*(t - b)**(a - 1), [a], [], [], [0], t/b,
gamma(a)*b**(a - 1), And(b > 0))
add(Heaviside((b/p)**(1/q) - z)*(b - t)**(a - 1), [], [a], [0], [], t/b,
gamma(a)*b**(a - 1), And(b > 0))
add((b + t)**(-a), [1 - a], [], [0], [], t/b, b**(-a)/gamma(a),
hint=Not(IsNonPositiveInteger(a)))
add(abs(b - t)**(-a), [1 - a], [(1 - a)/2], [0], [(1 - a)/2], t/b,
pi/(gamma(a)*cos(pi*a/2))*abs(b)**(-a), re(a) < 1)
add((t**a - b**a)/(t - b), [0, a], [], [0, a], [], t/b,
b**(a - 1)*sin(a*pi)/pi)
# 12
def A1(r, sign, nu):
return pi**(-S(1)/2)*(-sign*nu/2)**(1 - 2*r)
def tmpadd(r, sgn):
# XXX the a**2 is bad for matching
add((sqrt(a**2 + t) + sgn*a)**b/(a**2 + t)**r,
[(1 + b)/2, 1 - 2*r + b/2], [],
[(b - sgn*b)/2], [(b + sgn*b)/2], t/a**2,
a**(b - 2*r)*A1(r, sgn, b))
tmpadd(0, 1)
tmpadd(0, -1)
tmpadd(S(1)/2, 1)
tmpadd(S(1)/2, -1)
# 13
def tmpadd(r, sgn):
add((sqrt(a + p*z**q) + sgn*sqrt(p)*z**(q/2))**b/(a + p*z**q)**r,
[1 - r + sgn*b/2], [1 - r - sgn*b/2], [0, S(1)/2], [],
p*z**q/a, a**(b/2 - r)*A1(r, sgn, b))
tmpadd(0, 1)
tmpadd(0, -1)
tmpadd(S(1)/2, 1)
tmpadd(S(1)/2, -1)
# (those after look obscure)
# Section 8.4.3
add(exp(polar_lift(-1)*t), [], [], [0], [])
# TODO can do sin^n, sinh^n by expansion ... where?
# 8.4.4 (hyperbolic functions)
add(sinh(t), [], [1], [S(1)/2], [1, 0], t**2/4, pi**(S(3)/2))
add(cosh(t), [], [S(1)/2], [0], [S(1)/2, S(1)/2], t**2/4, pi**(S(3)/2))
# Section 8.4.5
# TODO can do t + a. but can also do by expansion... (XXX not really)
add(sin(t), [], [], [S(1)/2], [0], t**2/4, sqrt(pi))
add(cos(t), [], [], [0], [S(1)/2], t**2/4, sqrt(pi))
# Section 8.4.6 (sinc function)
add(sinc(t), [], [], [0], [S(-1)/2], t**2/4, sqrt(pi)/2)
# Section 8.5.5
def make_log1(subs):
N = subs[n]
return [((-1)**N*factorial(N),
meijerg([], [1]*(N + 1), [0]*(N + 1), [], t))]
def make_log2(subs):
N = subs[n]
return [(factorial(N),
meijerg([1]*(N + 1), [], [], [0]*(N + 1), t))]
# TODO these only hold for positive p, and can be made more general
# but who uses log(x)*Heaviside(a-x) anyway ...
# TODO also it would be nice to derive them recursively ...
addi(log(t)**n*Heaviside(1 - t), make_log1, True)
addi(log(t)**n*Heaviside(t - 1), make_log2, True)
def make_log3(subs):
return make_log1(subs) + make_log2(subs)
addi(log(t)**n, make_log3, True)
addi(log(t + a),
constant(log(a)) + [(S(1), meijerg([1, 1], [], [1], [0], t/a))],
True)
addi(log(abs(t - a)), constant(log(abs(a))) +
[(pi, meijerg([1, 1], [S(1)/2], [1], [0, S(1)/2], t/a))],
True)
# TODO log(x)/(x+a) and log(x)/(x-1) can also be done. should they
# be derivable?
# TODO further formulae in this section seem obscure
# Sections 8.4.9-10
# TODO
# Section 8.4.11
from sympy import Ei, I, expint, Si, Ci, Shi, Chi, fresnels, fresnelc
addi(Ei(t),
constant(-I*pi) + [(S(-1), meijerg([], [1], [0, 0], [],
t*polar_lift(-1)))],
True)
# Section 8.4.12
add(Si(t), [1], [], [S(1)/2], [0, 0], t**2/4, sqrt(pi)/2)
add(Ci(t), [], [1], [0, 0], [S(1)/2], t**2/4, -sqrt(pi)/2)
# Section 8.4.13
add(Shi(t), [S(1)/2], [], [0], [S(-1)/2, S(-1)/2], polar_lift(-1)*t**2/4,
t*sqrt(pi)/4)
add(Chi(t), [], [S(1)/2, 1], [0, 0], [S(1)/2, S(1)/2], t**2/4, -
pi**S('3/2')/2)
# generalized exponential integral
add(expint(a, t), [], [a], [a - 1, 0], [], t)
# Section 8.4.14
add(erf(t), [1], [], [S(1)/2], [0], t**2, 1/sqrt(pi))
# TODO exp(-x)*erf(I*x) does not work
add(erfc(t), [], [1], [0, S(1)/2], [], t**2, 1/sqrt(pi))
# This formula for erfi(z) yields a wrong(?) minus sign
#add(erfi(t), [1], [], [S(1)/2], [0], -t**2, I/sqrt(pi))
add(erfi(t), [S(1)/2], [], [0], [-S(1)/2], -t**2, t/sqrt(pi))
# Fresnel Integrals
add(fresnels(t), [1], [], [S(3)/4], [0, S(1)/4], pi**2*t**4/16, S(1)/2)
add(fresnelc(t), [1], [], [S(1)/4], [0, S(3)/4], pi**2*t**4/16, S(1)/2)
##### bessel-type functions #####
from sympy import besselj, bessely, besseli, besselk
# Section 8.4.19
add(besselj(a, t), [], [], [a/2], [-a/2], t**2/4)
# all of the following are derivable
#add(sin(t)*besselj(a, t), [S(1)/4, S(3)/4], [], [(1+a)/2],
# [-a/2, a/2, (1-a)/2], t**2, 1/sqrt(2))
#add(cos(t)*besselj(a, t), [S(1)/4, S(3)/4], [], [a/2],
# [-a/2, (1+a)/2, (1-a)/2], t**2, 1/sqrt(2))
#add(besselj(a, t)**2, [S(1)/2], [], [a], [-a, 0], t**2, 1/sqrt(pi))
#add(besselj(a, t)*besselj(b, t), [0, S(1)/2], [], [(a + b)/2],
# [-(a+b)/2, (a - b)/2, (b - a)/2], t**2, 1/sqrt(pi))
# Section 8.4.20
add(bessely(a, t), [], [-(a + 1)/2], [a/2, -a/2], [-(a + 1)/2], t**2/4)
# TODO all of the following should be derivable
#add(sin(t)*bessely(a, t), [S(1)/4, S(3)/4], [(1 - a - 1)/2],
# [(1 + a)/2, (1 - a)/2], [(1 - a - 1)/2, (1 - 1 - a)/2, (1 - 1 + a)/2],
# t**2, 1/sqrt(2))
#add(cos(t)*bessely(a, t), [S(1)/4, S(3)/4], [(0 - a - 1)/2],
# [(0 + a)/2, (0 - a)/2], [(0 - a - 1)/2, (1 - 0 - a)/2, (1 - 0 + a)/2],
# t**2, 1/sqrt(2))
#add(besselj(a, t)*bessely(b, t), [0, S(1)/2], [(a - b - 1)/2],
# [(a + b)/2, (a - b)/2], [(a - b - 1)/2, -(a + b)/2, (b - a)/2],
# t**2, 1/sqrt(pi))
#addi(bessely(a, t)**2,
# [(2/sqrt(pi), meijerg([], [S(1)/2, S(1)/2 - a], [0, a, -a],
# [S(1)/2 - a], t**2)),
# (1/sqrt(pi), meijerg([S(1)/2], [], [a], [-a, 0], t**2))],
# True)
#addi(bessely(a, t)*bessely(b, t),
# [(2/sqrt(pi), meijerg([], [0, S(1)/2, (1 - a - b)/2],
# [(a + b)/2, (a - b)/2, (b - a)/2, -(a + b)/2],
# [(1 - a - b)/2], t**2)),
# (1/sqrt(pi), meijerg([0, S(1)/2], [], [(a + b)/2],
# [-(a + b)/2, (a - b)/2, (b - a)/2], t**2))],
# True)
# Section 8.4.21 ?
# Section 8.4.22
add(besseli(a, t), [], [(1 + a)/2], [a/2], [-a/2, (1 + a)/2], t**2/4, pi)
# TODO many more formulas. should all be derivable
# Section 8.4.23
add(besselk(a, t), [], [], [a/2, -a/2], [], t**2/4, S(1)/2)
# TODO many more formulas. should all be derivable
# Complete elliptic integrals K(z) and E(z)
from sympy import elliptic_k, elliptic_e
add(elliptic_k(t), [S.Half, S.Half], [], [0], [0], -t, S.Half)
add(elliptic_e(t), [S.Half, 3*S.Half], [], [0], [0], -t, -S.Half/2)
####################################################################
# First some helper functions.
####################################################################
from sympy.utilities.timeutils import timethis
timeit = timethis('meijerg')
def _mytype(f, x):
""" Create a hashable entity describing the type of f. """
if x not in f.free_symbols:
return ()
elif f.is_Function:
return (type(f),)
else:
types = [_mytype(a, x) for a in f.args]
res = []
for t in types:
res += list(t)
res.sort()
return tuple(res)
class _CoeffExpValueError(ValueError):
"""
Exception raised by _get_coeff_exp, for internal use only.
"""
pass
def _get_coeff_exp(expr, x):
"""
When expr is known to be of the form c*x**b, with c and/or b possibly 1,
return c, b.
>>> from sympy.abc import x, a, b
>>> from sympy.integrals.meijerint import _get_coeff_exp
>>> _get_coeff_exp(a*x**b, x)
(a, b)
>>> _get_coeff_exp(x, x)
(1, 1)
>>> _get_coeff_exp(2*x, x)
(2, 1)
>>> _get_coeff_exp(x**3, x)
(1, 3)
"""
from sympy import powsimp
(c, m) = expand_power_base(powsimp(expr)).as_coeff_mul(x)
if not m:
return c, S(0)
[m] = m
if m.is_Pow:
if m.base != x:
raise _CoeffExpValueError('expr not of form a*x**b')
return c, m.exp
elif m == x:
return c, S(1)
else:
raise _CoeffExpValueError('expr not of form a*x**b: %s' % expr)
def _exponents(expr, x):
"""
Find the exponents of ``x`` (not including zero) in ``expr``.
>>> from sympy.integrals.meijerint import _exponents
>>> from sympy.abc import x, y
>>> from sympy import sin
>>> _exponents(x, x)
set([1])
>>> _exponents(x**2, x)
set([2])
>>> _exponents(x**2 + x, x)
set([1, 2])
>>> _exponents(x**3*sin(x + x**y) + 1/x, x)
set([-1, 1, 3, y])
"""
def _exponents_(expr, x, res):
if expr == x:
res.update([1])
return
if expr.is_Pow and expr.base == x:
res.update([expr.exp])
return
for arg in expr.args:
_exponents_(arg, x, res)
res = set()
_exponents_(expr, x, res)
return res
def _functions(expr, x):
""" Find the types of functions in expr, to estimate the complexity. """
from sympy import Function
return set(e.func for e in expr.atoms(Function) if x in e.free_symbols)
def _find_splitting_points(expr, x):
"""
Find numbers a such that a linear substitution x -> x + a would
(hopefully) simplify expr.
>>> from sympy.integrals.meijerint import _find_splitting_points as fsp
>>> from sympy import sin
>>> from sympy.abc import a, x
>>> fsp(x, x)
set([0])
>>> fsp((x-1)**3, x)
set([1])
>>> fsp(sin(x+3)*x, x)
set([-3, 0])
"""
p, q = [Wild(n, exclude=[x]) for n in 'pq']
def compute_innermost(expr, res):
if not isinstance(expr, Expr):
return
m = expr.match(p*x + q)
if m and m[p] != 0:
res.add(-m[q]/m[p])
return
if expr.is_Atom:
return
for arg in expr.args:
compute_innermost(arg, res)
innermost = set()
compute_innermost(expr, innermost)
return innermost
def _split_mul(f, x):
"""
Split expression ``f`` into fac, po, g, where fac is a constant factor,
po = x**s for some s independent of s, and g is "the rest".
>>> from sympy.integrals.meijerint import _split_mul
>>> from sympy import sin
>>> from sympy.abc import s, x
>>> _split_mul((3*x)**s*sin(x**2)*x, x)
(3**s, x*x**s, sin(x**2))
"""
from sympy import polarify, unpolarify
fac = S(1)
po = S(1)
g = S(1)
f = expand_power_base(f)
args = Mul.make_args(f)
for a in args:
if a == x:
po *= x
elif x not in a.free_symbols:
fac *= a
else:
if a.is_Pow and x not in a.exp.free_symbols:
c, t = a.base.as_coeff_mul(x)
if t != (x,):
c, t = expand_mul(a.base).as_coeff_mul(x)
if t == (x,):
po *= x**a.exp
fac *= unpolarify(polarify(c**a.exp, subs=False))
continue
g *= a
return fac, po, g
def _mul_args(f):
"""
Return a list ``L`` such that Mul(*L) == f.
If f is not a Mul or Pow, L=[f].
If f=g**n for an integer n, L=[g]*n.
If f is a Mul, L comes from applying _mul_args to all factors of f.
"""
args = Mul.make_args(f)
gs = []
for g in args:
if g.is_Pow and g.exp.is_Integer:
n = g.exp
base = g.base
if n < 0:
n = -n
base = 1/base
gs += [base]*n
else:
gs.append(g)
return gs
def _mul_as_two_parts(f):
"""
Find all the ways to split f into a product of two terms.
Return None on failure.
Although the order is canonical from multiset_partitions, this is
not necessarily the best order to process the terms. For example,
if the case of len(gs) == 2 is removed and multiset is allowed to
sort the terms, some tests fail.
>>> from sympy.integrals.meijerint import _mul_as_two_parts
>>> from sympy import sin, exp, ordered
>>> from sympy.abc import x
>>> list(ordered(_mul_as_two_parts(x*sin(x)*exp(x))))
[(x, exp(x)*sin(x)), (x*exp(x), sin(x)), (x*sin(x), exp(x))]
"""
gs = _mul_args(f)
if len(gs) < 2:
return None
if len(gs) == 2:
return [tuple(gs)]
return [(Mul(*x), Mul(*y)) for (x, y) in multiset_partitions(gs, 2)]
def _inflate_g(g, n):
""" Return C, h such that h is a G function of argument z**n and
g = C*h. """
# TODO should this be a method of meijerg?
# See: [L, page 150, equation (5)]
def inflate(params, n):
""" (a1, .., ak) -> (a1/n, (a1+1)/n, ..., (ak + n-1)/n) """
res = []
for a in params:
for i in range(n):
res.append((a + i)/n)
return res
v = S(len(g.ap) - len(g.bq))
C = n**(1 + g.nu + v/2)
C /= (2*pi)**((n - 1)*g.delta)
return C, meijerg(inflate(g.an, n), inflate(g.aother, n),
inflate(g.bm, n), inflate(g.bother, n),
g.argument**n * n**(n*v))
def _flip_g(g):
""" Turn the G function into one of inverse argument
(i.e. G(1/x) -> G'(x)) """
# See [L], section 5.2
def tr(l):
return [1 - a for a in l]
return meijerg(tr(g.bm), tr(g.bother), tr(g.an), tr(g.aother), 1/g.argument)
def _inflate_fox_h(g, a):
r"""
Let d denote the integrand in the definition of the G function ``g``.
Consider the function H which is defined in the same way, but with
integrand d/Gamma(a*s) (contour conventions as usual).
If a is rational, the function H can be written as C*G, for a constant C
and a G-function G.
This function returns C, G.
"""
if a < 0:
return _inflate_fox_h(_flip_g(g), -a)
p = S(a.p)
q = S(a.q)
# We use the substitution s->qs, i.e. inflate g by q. We are left with an
# extra factor of Gamma(p*s), for which we use Gauss' multiplication
# theorem.
D, g = _inflate_g(g, q)
z = g.argument
D /= (2*pi)**((1 - p)/2)*p**(-S(1)/2)
z /= p**p
bs = [(n + 1)/p for n in range(p)]
return D, meijerg(g.an, g.aother, g.bm, list(g.bother) + bs, z)
_dummies = {}
def _dummy(name, token, expr, **kwargs):
"""
Return a dummy. This will return the same dummy if the same token+name is
requested more than once, and it is not already in expr.
This is for being cache-friendly.
"""
d = _dummy_(name, token, **kwargs)
if d in expr.free_symbols:
return Dummy(name, **kwargs)
return d
def _dummy_(name, token, **kwargs):
"""
Return a dummy associated to name and token. Same effect as declaring
it globally.
"""
global _dummies
if not (name, token) in _dummies:
_dummies[(name, token)] = Dummy(name, **kwargs)
return _dummies[(name, token)]
def _is_analytic(f, x):
""" Check if f(x), when expressed using G functions on the positive reals,
will in fact agree with the G functions almost everywhere """
from sympy import Heaviside, Abs
return not any(x in expr.free_symbols for expr in f.atoms(Heaviside, Abs))
def _condsimp(cond):
"""
Do naive simplifications on ``cond``.
Note that this routine is completely ad-hoc, simplification rules being
added as need arises rather than following any logical pattern.
>>> from sympy.integrals.meijerint import _condsimp as simp
>>> from sympy import Or, Eq, unbranched_argument as arg, And
>>> from sympy.abc import x, y, z
>>> simp(Or(x < y, z, Eq(x, y)))
Or(x <= y, z)
>>> simp(Or(x <= y, And(x < y, z)))
x <= y
"""
from sympy import (
symbols, Wild, Eq, unbranched_argument, exp_polar, pi, I,
periodic_argument, oo, polar_lift)
from sympy.logic.boolalg import BooleanFunction
if not isinstance(cond, BooleanFunction):
return cond
cond = cond.func(*list(map(_condsimp, cond.args)))
change = True
p, q, r = symbols('p q r', cls=Wild)
rules = [
(Or(p < q, Eq(p, q)), p <= q),
# The next two obviously are instances of a general pattern, but it is
# easier to spell out the few cases we care about.
(And(abs(unbranched_argument(p)) <= pi,
abs(unbranched_argument(exp_polar(-2*pi*I)*p)) <= pi),
Eq(unbranched_argument(exp_polar(-I*pi)*p), 0)),
(And(abs(unbranched_argument(p)) <= pi/2,
abs(unbranched_argument(exp_polar(-pi*I)*p)) <= pi/2),
Eq(unbranched_argument(exp_polar(-I*pi/2)*p), 0)),
(Or(p <= q, And(p < q, r)), p <= q)
]
while change:
change = False
for fro, to in rules:
if fro.func != cond.func:
continue
for n, arg in enumerate(cond.args):
if r in fro.args[0].free_symbols:
m = arg.match(fro.args[1])
num = 1
else:
num = 0
m = arg.match(fro.args[0])
if not m:
continue
otherargs = [x.subs(m) for x in fro.args[:num] + fro.args[num + 1:]]
otherlist = [n]
for arg2 in otherargs:
for k, arg3 in enumerate(cond.args):
if k in otherlist:
continue
if arg2 == arg3:
otherlist += [k]
break
if arg3.func is And and arg2.args[1] == r and \
arg2.func is And and arg2.args[0] in arg3.args:
otherlist += [k]
break
if arg3.func is And and arg2.args[0] == r and \
arg2.func is And and arg2.args[1] in arg3.args:
otherlist += [k]
break
if len(otherlist) != len(otherargs) + 1:
continue
newargs = [arg for (k, arg) in enumerate(cond.args)
if k not in otherlist] + [to.subs(m)]
cond = cond.func(*newargs)
change = True
break
# final tweak
def repl_eq(orig):
if orig.lhs == 0:
expr = orig.rhs
elif orig.rhs == 0:
expr = orig.lhs
else:
return orig
m = expr.match(unbranched_argument(polar_lift(p)**q))
if not m:
if expr.func is periodic_argument and not expr.args[0].is_polar \
and expr.args[1] == oo:
return (expr.args[0] > 0)
return orig
return (m[p] > 0)
return cond.replace(
lambda expr: expr.is_Relational and expr.rel_op == '==',
repl_eq)
def _eval_cond(cond):
""" Re-evaluate the conditions. """
if isinstance(cond, bool):
return cond
return _condsimp(cond.doit())
####################################################################
# Now the "backbone" functions to do actual integration.
####################################################################
def _my_principal_branch(expr, period, full_pb=False):
""" Bring expr nearer to its principal branch by removing superfluous
factors.
This function does *not* guarantee to yield the principal branch,
to avoid introducing opaque principal_branch() objects,
unless full_pb=True. """
from sympy import principal_branch
res = principal_branch(expr, period)
if not full_pb:
res = res.replace(principal_branch, lambda x, y: x)
return res
def _rewrite_saxena_1(fac, po, g, x):
"""
Rewrite the integral fac*po*g dx, from zero to infinity, as
integral fac*G, where G has argument a*x. Note po=x**s.
Return fac, G.
"""
_, s = _get_coeff_exp(po, x)
a, b = _get_coeff_exp(g.argument, x)
period = g.get_period()
a = _my_principal_branch(a, period)
# We substitute t = x**b.
C = fac/(abs(b)*a**((s + 1)/b - 1))
# Absorb a factor of (at)**((1 + s)/b - 1).
def tr(l):
return [a + (1 + s)/b - 1 for a in l]
return C, meijerg(tr(g.an), tr(g.aother), tr(g.bm), tr(g.bother),
a*x)
def _check_antecedents_1(g, x, helper=False):
"""
Return a condition under which the mellin transform of g exists.
Any power of x has already been absorbed into the G function,
so this is just int_0^\infty g dx.
See [L, section 5.6.1]. (Note that s=1.)
If ``helper`` is True, only check if the MT exists at infinity, i.e. if
int_1^\infty g dx exists.
"""
# NOTE if you update these conditions, please update the documentation as well
from sympy import Eq, Not, ceiling, Ne, re, unbranched_argument as arg
delta = g.delta
eta, _ = _get_coeff_exp(g.argument, x)
m, n, p, q = S([len(g.bm), len(g.an), len(g.ap), len(g.bq)])
xi = m + n - p
if p > q:
def tr(l):
return [1 - x for x in l]
return _check_antecedents_1(meijerg(tr(g.bm), tr(g.bother),
tr(g.an), tr(g.aother), x/eta),
x)
tmp = []
for b in g.bm:
tmp += [-re(b) < 1]
for a in g.an:
tmp += [1 < 1 - re(a)]
cond_3 = And(*tmp)
for b in g.bother:
tmp += [-re(b) < 1]
for a in g.aother:
tmp += [1 < 1 - re(a)]
cond_3_star = And(*tmp)
cond_4 = (-re(g.nu) + (q + 1 - p)/2 > q - p)
def debug(*msg):
_debug(*msg)
debug('Checking antecedents for 1 function:')
debug(' delta=%s, eta=%s, m=%s, n=%s, p=%s, q=%s'
% (delta, eta, m, n, p, q))
debug(' ap = %s, %s' % (list(g.an), list(g.aother)))
debug(' bq = %s, %s' % (list(g.bm), list(g.bother)))
debug(' cond_3=%s, cond_3*=%s, cond_4=%s' % (cond_3, cond_3_star, cond_4))
conds = []
# case 1
case1 = []
tmp1 = [1 <= n, p < q, 1 <= m]
tmp2 = [1 <= p, 1 <= m, Eq(q, p + 1), Not(And(Eq(n, 0), Eq(m, p + 1)))]
tmp3 = [1 <= p, Eq(q, p)]
for k in range(ceiling(delta/2) + 1):
tmp3 += [Ne(abs(arg(eta)), (delta - 2*k)*pi)]
tmp = [delta > 0, abs(arg(eta)) < delta*pi]
extra = [Ne(eta, 0), cond_3]
if helper:
extra = []
for t in [tmp1, tmp2, tmp3]:
case1 += [And(*(t + tmp + extra))]
conds += case1
debug(' case 1:', case1)
# case 2
extra = [cond_3]
if helper:
extra = []
case2 = [And(Eq(n, 0), p + 1 <= m, m <= q,
abs(arg(eta)) < delta*pi, *extra)]
conds += case2
debug(' case 2:', case2)
# case 3
extra = [cond_3, cond_4]
if helper:
extra = []
case3 = [And(p < q, 1 <= m, delta > 0, Eq(abs(arg(eta)), delta*pi),
*extra)]
case3 += [And(p <= q - 2, Eq(delta, 0), Eq(abs(arg(eta)), 0), *extra)]
conds += case3
debug(' case 3:', case3)
# TODO altered cases 4-7
# extra case from wofram functions site:
# (reproduced verbatim from Prudnikov, section 2.24.2)
# http://functions.wolfram.com/HypergeometricFunctions/MeijerG/21/02/01/
case_extra = []
case_extra += [Eq(p, q), Eq(delta, 0), Eq(arg(eta), 0), Ne(eta, 0)]
if not helper:
case_extra += [cond_3]
s = []
for a, b in zip(g.ap, g.bq):
s += [b - a]
case_extra += [re(Add(*s)) < 0]
case_extra = And(*case_extra)
conds += [case_extra]
debug(' extra case:', [case_extra])
case_extra_2 = [And(delta > 0, abs(arg(eta)) < delta*pi)]
if not helper:
case_extra_2 += [cond_3]
case_extra_2 = And(*case_extra_2)
conds += [case_extra_2]
debug(' second extra case:', [case_extra_2])
# TODO This leaves only one case from the three listed by Prudnikov.
# Investigate if these indeed cover everything; if so, remove the rest.
return Or(*conds)
def _int0oo_1(g, x):
"""
Evaluate int_0^\infty g dx using G functions,
assuming the necessary conditions are fulfilled.
>>> from sympy.abc import a, b, c, d, x, y
>>> from sympy import meijerg
>>> from sympy.integrals.meijerint import _int0oo_1
>>> _int0oo_1(meijerg([a], [b], [c], [d], x*y), x)
gamma(-a)*gamma(c + 1)/(y*gamma(-d)*gamma(b + 1))
"""
# See [L, section 5.6.1]. Note that s=1.
from sympy import gamma, combsimp, unpolarify
eta, _ = _get_coeff_exp(g.argument, x)
res = 1/eta
# XXX TODO we should reduce order first
for b in g.bm:
res *= gamma(b + 1)
for a in g.an:
res *= gamma(1 - a - 1)
for b in g.bother:
res /= gamma(1 - b - 1)
for a in g.aother:
res /= gamma(a + 1)
return combsimp(unpolarify(res))
def _rewrite_saxena(fac, po, g1, g2, x, full_pb=False):
"""
Rewrite the integral fac*po*g1*g2 from 0 to oo in terms of G functions
with argument c*x.
Return C, f1, f2 such that integral C f1 f2 from 0 to infinity equals
integral fac po g1 g2 from 0 to infinity.
>>> from sympy.integrals.meijerint import _rewrite_saxena
>>> from sympy.abc import s, t, m
>>> from sympy import meijerg
>>> g1 = meijerg([], [], [0], [], s*t)
>>> g2 = meijerg([], [], [m/2], [-m/2], t**2/4)
>>> r = _rewrite_saxena(1, t**0, g1, g2, t)
>>> r[0]
s/(4*sqrt(pi))
>>> r[1]
meijerg(((), ()), ((-1/2, 0), ()), s**2*t/4)
>>> r[2]
meijerg(((), ()), ((m/2,), (-m/2,)), t/4)
"""
from sympy.core.numbers import ilcm
def pb(g):
a, b = _get_coeff_exp(g.argument, x)
per = g.get_period()
return meijerg(g.an, g.aother, g.bm, g.bother,
_my_principal_branch(a, per, full_pb)*x**b)
_, s = _get_coeff_exp(po, x)
_, b1 = _get_coeff_exp(g1.argument, x)
_, b2 = _get_coeff_exp(g2.argument, x)
if (b1 < 0) == True:
b1 = -b1
g1 = _flip_g(g1)
if (b2 < 0) == True:
b2 = -b2
g2 = _flip_g(g2)
if not b1.is_Rational or not b2.is_Rational:
return
m1, n1 = b1.p, b1.q
m2, n2 = b2.p, b2.q
tau = ilcm(m1*n2, m2*n1)
r1 = tau//(m1*n2)
r2 = tau//(m2*n1)
C1, g1 = _inflate_g(g1, r1)
C2, g2 = _inflate_g(g2, r2)
g1 = pb(g1)
g2 = pb(g2)
fac *= C1*C2
a1, b = _get_coeff_exp(g1.argument, x)
a2, _ = _get_coeff_exp(g2.argument, x)
# arbitrarily tack on the x**s part to g1
# TODO should we try both?
exp = (s + 1)/b - 1
fac = fac/(abs(b) * a1**exp)
def tr(l):
return [a + exp for a in l]
g1 = meijerg(tr(g1.an), tr(g1.aother), tr(g1.bm), tr(g1.bother), a1*x)
g2 = meijerg(g2.an, g2.aother, g2.bm, g2.bother, a2*x)
return powdenest(fac, polar=True), g1, g2
def _check_antecedents(g1, g2, x):
""" Return a condition under which the integral theorem applies. """
from sympy import re, Eq, Ne, cos, I, exp, sin, sign, unpolarify
from sympy import arg as arg_, unbranched_argument as arg
# Yes, this is madness.
# XXX TODO this is a testing *nightmare*
# NOTE if you update these conditions, please update the documentation as well
# The following conditions are found in
# [P], Section 2.24.1
#
# They are also reproduced (verbatim!) at
# http://functions.wolfram.com/HypergeometricFunctions/MeijerG/21/02/03/
#
# Note: k=l=r=alpha=1
sigma, _ = _get_coeff_exp(g1.argument, x)
omega, _ = _get_coeff_exp(g2.argument, x)
s, t, u, v = S([len(g1.bm), len(g1.an), len(g1.ap), len(g1.bq)])
m, n, p, q = S([len(g2.bm), len(g2.an), len(g2.ap), len(g2.bq)])
bstar = s + t - (u + v)/2
cstar = m + n - (p + q)/2
rho = g1.nu + (u - v)/2 + 1
mu = g2.nu + (p - q)/2 + 1
phi = q - p - (v - u)
eta = 1 - (v - u) - mu - rho
psi = (pi*(q - m - n) + abs(arg(omega)))/(q - p)
theta = (pi*(v - s - t) + abs(arg(sigma)))/(v - u)
_debug('Checking antecedents:')
_debug(' sigma=%s, s=%s, t=%s, u=%s, v=%s, b*=%s, rho=%s'
% (sigma, s, t, u, v, bstar, rho))
_debug(' omega=%s, m=%s, n=%s, p=%s, q=%s, c*=%s, mu=%s,'
% (omega, m, n, p, q, cstar, mu))
_debug(' phi=%s, eta=%s, psi=%s, theta=%s' % (phi, eta, psi, theta))
def _c1():
for g in [g1, g2]:
for i in g.an:
for j in g.bm:
diff = i - j
if diff.is_integer and diff.is_positive:
return False
return True
c1 = _c1()
c2 = And(*[re(1 + i + j) > 0 for i in g1.bm for j in g2.bm])
c3 = And(*[re(1 + i + j) < 1 + 1 for i in g1.an for j in g2.an])
c4 = And(*[(p - q)*re(1 + i - 1) - re(mu) > -S(3)/2 for i in g1.an])
c5 = And(*[(p - q)*re(1 + i) - re(mu) > -S(3)/2 for i in g1.bm])
c6 = And(*[(u - v)*re(1 + i - 1) - re(rho) > -S(3)/2 for i in g2.an])
c7 = And(*[(u - v)*re(1 + i) - re(rho) > -S(3)/2 for i in g2.bm])
c8 = (abs(phi) + 2*re((rho - 1)*(q - p) + (v - u)*(q - p) + (mu -
1)*(v - u)) > 0)
c9 = (abs(phi) - 2*re((rho - 1)*(q - p) + (v - u)*(q - p) + (mu -
1)*(v - u)) > 0)
c10 = (abs(arg(sigma)) < bstar*pi)
c11 = Eq(abs(arg(sigma)), bstar*pi)
c12 = (abs(arg(omega)) < cstar*pi)
c13 = Eq(abs(arg(omega)), cstar*pi)
# The following condition is *not* implemented as stated on the wolfram
# function site. In the book of Prudnikov there is an additional part
# (the And involving re()). However, I only have this book in russian, and
# I don't read any russian. The following condition is what other people
# have told me it means.
# Worryingly, it is different from the condition implemented in REDUCE.
# The REDUCE implementation:
# https://reduce-algebra.svn.sourceforge.net/svnroot/reduce-algebra/trunk/packages/defint/definta.red
# (search for tst14)
# The Wolfram alpha version:
# http://functions.wolfram.com/HypergeometricFunctions/MeijerG/21/02/03/03/0014/
z0 = exp(-(bstar + cstar)*pi*I)
zos = unpolarify(z0*omega/sigma)
zso = unpolarify(z0*sigma/omega)
if zos == 1/zso:
c14 = And(Eq(phi, 0), bstar + cstar <= 1,
Or(Ne(zos, 1), re(mu + rho + v - u) < 1,
re(mu + rho + q - p) < 1))
else:
c14 = And(Eq(phi, 0), bstar - 1 + cstar <= 0,
Or(And(Ne(zos, 1), abs(arg_(1 - zos)) < pi),
And(re(mu + rho + v - u) < 1, Eq(zos, 1))))
def _cond():
'''
Note: if `zso` is 1 then tmp will be NaN. This raises a
TypeError on `NaN < pi`. Previously this gave `False` so
this behavior has been hardcoded here but someone should
check if this NaN is more serious! This NaN is triggered by
test_meijerint() in test_meijerint.py:
`meijerint_definite(exp(x), x, 0, I)`
'''
tmp = abs(arg_(1 - zso))
return False if tmp is S.NaN else tmp < pi
c14_alt = And(Eq(phi, 0), cstar - 1 + bstar <= 0,
Or(And(Ne(zso, 1), _cond()),
And(re(mu + rho + q - p) < 1, Eq(zso, 1))))
# Since r=k=l=1, in our case there is c14_alt which is the same as calling
# us with (g1, g2) = (g2, g1). The conditions below enumerate all cases
# (i.e. we don't have to try arguments reversed by hand), and indeed try
# all symmetric cases. (i.e. whenever there is a condition involving c14,
# there is also a dual condition which is exactly what we would get when g1,
# g2 were interchanged, *but c14 was unaltered*).
# Hence the following seems correct:
c14 = Or(c14, c14_alt)
'''
When `c15` is NaN (e.g. from `psi` being NaN as happens during
'test_issue_4992' and/or `theta` is NaN as in 'test_issue_6253',
both in `test_integrals.py`) the comparison to 0 formerly gave False
whereas now an error is raised. To keep the old behavior, the value
of NaN is replaced with False but perhaps a closer look at this condition
should be made: XXX how should conditions leading to c15=NaN be handled?
'''
try:
lambda_c = (q - p)*abs(omega)**(1/(q - p))*cos(psi) \
+ (v - u)*abs(sigma)**(1/(v - u))*cos(theta)
# the TypeError might be raised here, e.g. if lambda_c is NaN
if _eval_cond(lambda_c > 0) != False:
c15 = (lambda_c > 0)
else:
def lambda_s0(c1, c2):
return c1*(q - p)*abs(omega)**(1/(q - p))*sin(psi) \
+ c2*(v - u)*abs(sigma)**(1/(v - u))*sin(theta)
lambda_s = Piecewise(
((lambda_s0(+1, +1)*lambda_s0(-1, -1)),
And(Eq(arg(sigma), 0), Eq(arg(omega), 0))),
(lambda_s0(sign(arg(omega)), +1)*lambda_s0(sign(arg(omega)), -1),
And(Eq(arg(sigma), 0), Ne(arg(omega), 0))),
(lambda_s0(+1, sign(arg(sigma)))*lambda_s0(-1, sign(arg(sigma))),
And(Ne(arg(sigma), 0), Eq(arg(omega), 0))),
(lambda_s0(sign(arg(omega)), sign(arg(sigma))), True))
tmp = [lambda_c > 0,
And(Eq(lambda_c, 0), Ne(lambda_s, 0), re(eta) > -1),
And(Eq(lambda_c, 0), Eq(lambda_s, 0), re(eta) > 0)]
c15 = Or(*tmp)
except TypeError:
c15 = False
for cond, i in [(c1, 1), (c2, 2), (c3, 3), (c4, 4), (c5, 5), (c6, 6),
(c7, 7), (c8, 8), (c9, 9), (c10, 10), (c11, 11),
(c12, 12), (c13, 13), (c14, 14), (c15, 15)]:
_debug(' c%s:' % i, cond)
# We will return Or(*conds)
conds = []
def pr(count):
_debug(' case %s:' % count, conds[-1])
conds += [And(m*n*s*t != 0, bstar.is_positive is True, cstar.is_positive is True, c1, c2, c3, c10,
c12)] # 1
pr(1)
conds += [And(Eq(u, v), Eq(bstar, 0), cstar.is_positive is True, sigma.is_positive is True, re(rho) < 1,
c1, c2, c3, c12)] # 2
pr(2)
conds += [And(Eq(p, q), Eq(cstar, 0), bstar.is_positive is True, omega.is_positive is True, re(mu) < 1,
c1, c2, c3, c10)] # 3
pr(3)
conds += [And(Eq(p, q), Eq(u, v), Eq(bstar, 0), Eq(cstar, 0),
sigma.is_positive is True, omega.is_positive is True, re(mu) < 1, re(rho) < 1,
Ne(sigma, omega), c1, c2, c3)] # 4
pr(4)
conds += [And(Eq(p, q), Eq(u, v), Eq(bstar, 0), Eq(cstar, 0),
sigma.is_positive is True, omega.is_positive is True, re(mu + rho) < 1,
Ne(omega, sigma), c1, c2, c3)] # 5
pr(5)
conds += [And(p > q, s.is_positive is True, bstar.is_positive is True, cstar >= 0,
c1, c2, c3, c5, c10, c13)] # 6
pr(6)
conds += [And(p < q, t.is_positive is True, bstar.is_positive is True, cstar >= 0,
c1, c2, c3, c4, c10, c13)] # 7
pr(7)
conds += [And(u > v, m.is_positive is True, cstar.is_positive is True, bstar >= 0,
c1, c2, c3, c7, c11, c12)] # 8
pr(8)
conds += [And(u < v, n.is_positive is True, cstar.is_positive is True, bstar >= 0,
c1, c2, c3, c6, c11, c12)] # 9
pr(9)
conds += [And(p > q, Eq(u, v), Eq(bstar, 0), cstar >= 0, sigma.is_positive is True,
re(rho) < 1, c1, c2, c3, c5, c13)] # 10
pr(10)
conds += [And(p < q, Eq(u, v), Eq(bstar, 0), cstar >= 0, sigma.is_positive is True,
re(rho) < 1, c1, c2, c3, c4, c13)] # 11
pr(11)
conds += [And(Eq(p, q), u > v, bstar >= 0, Eq(cstar, 0), omega.is_positive is True,
re(mu) < 1, c1, c2, c3, c7, c11)] # 12
pr(12)
conds += [And(Eq(p, q), u < v, bstar >= 0, Eq(cstar, 0), omega.is_positive is True,
re(mu) < 1, c1, c2, c3, c6, c11)] # 13
pr(13)
conds += [And(p < q, u > v, bstar >= 0, cstar >= 0,
c1, c2, c3, c4, c7, c11, c13)] # 14
pr(14)
conds += [And(p > q, u < v, bstar >= 0, cstar >= 0,
c1, c2, c3, c5, c6, c11, c13)] # 15
pr(15)
conds += [And(p > q, u > v, bstar >= 0, cstar >= 0,
c1, c2, c3, c5, c7, c8, c11, c13, c14)] # 16
pr(16)
conds += [And(p < q, u < v, bstar >= 0, cstar >= 0,
c1, c2, c3, c4, c6, c9, c11, c13, c14)] # 17
pr(17)
conds += [And(Eq(t, 0), s.is_positive is True, bstar.is_positive is True, phi.is_positive is True, c1, c2, c10)] # 18
pr(18)
conds += [And(Eq(s, 0), t.is_positive is True, bstar.is_positive is True, phi.is_negative is True, c1, c3, c10)] # 19
pr(19)
conds += [And(Eq(n, 0), m.is_positive is True, cstar.is_positive is True, phi.is_negative is True, c1, c2, c12)] # 20
pr(20)
conds += [And(Eq(m, 0), n.is_positive is True, cstar.is_positive is True, phi.is_positive is True, c1, c3, c12)] # 21
pr(21)
conds += [And(Eq(s*t, 0), bstar.is_positive is True, cstar.is_positive is True,
c1, c2, c3, c10, c12)] # 22
pr(22)
conds += [And(Eq(m*n, 0), bstar.is_positive is True, cstar.is_positive is True,
c1, c2, c3, c10, c12)] # 23
pr(23)
# The following case is from [Luke1969]. As far as I can tell, it is *not*
# covered by Prudnikov's.
# Let G1 and G2 be the two G-functions. Suppose the integral exists from
# 0 to a > 0 (this is easy the easy part), that G1 is exponential decay at
# infinity, and that the mellin transform of G2 exists.
# Then the integral exists.
mt1_exists = _check_antecedents_1(g1, x, helper=True)
mt2_exists = _check_antecedents_1(g2, x, helper=True)
conds += [And(mt2_exists, Eq(t, 0), u < s, bstar.is_positive is True, c10, c1, c2, c3)]
pr('E1')
conds += [And(mt2_exists, Eq(s, 0), v < t, bstar.is_positive is True, c10, c1, c2, c3)]
pr('E2')
conds += [And(mt1_exists, Eq(n, 0), p < m, cstar.is_positive is True, c12, c1, c2, c3)]
pr('E3')
conds += [And(mt1_exists, Eq(m, 0), q < n, cstar.is_positive is True, c12, c1, c2, c3)]
pr('E4')
# Let's short-circuit if this worked ...
# the rest is corner-cases and terrible to read.
r = Or(*conds)
if _eval_cond(r) != False:
return r
conds += [And(m + n > p, Eq(t, 0), Eq(phi, 0), s.is_positive is True, bstar.is_positive is True, cstar.is_negative is True,
abs(arg(omega)) < (m + n - p + 1)*pi,
c1, c2, c10, c14, c15)] # 24
pr(24)
conds += [And(m + n > q, Eq(s, 0), Eq(phi, 0), t.is_positive is True, bstar.is_positive is True, cstar.is_negative is True,
abs(arg(omega)) < (m + n - q + 1)*pi,
c1, c3, c10, c14, c15)] # 25
pr(25)
conds += [And(Eq(p, q - 1), Eq(t, 0), Eq(phi, 0), s.is_positive is True, bstar.is_positive is True,
cstar >= 0, cstar*pi < abs(arg(omega)),
c1, c2, c10, c14, c15)] # 26
pr(26)
conds += [And(Eq(p, q + 1), Eq(s, 0), Eq(phi, 0), t.is_positive is True, bstar.is_positive is True,
cstar >= 0, cstar*pi < abs(arg(omega)),
c1, c3, c10, c14, c15)] # 27
pr(27)
conds += [And(p < q - 1, Eq(t, 0), Eq(phi, 0), s.is_positive is True, bstar.is_positive is True,
cstar >= 0, cstar*pi < abs(arg(omega)),
abs(arg(omega)) < (m + n - p + 1)*pi,
c1, c2, c10, c14, c15)] # 28
pr(28)
conds += [And(
p > q + 1, Eq(s, 0), Eq(phi, 0), t.is_positive is True, bstar.is_positive is True, cstar >= 0,
cstar*pi < abs(arg(omega)),
abs(arg(omega)) < (m + n - q + 1)*pi,
c1, c3, c10, c14, c15)] # 29
pr(29)
conds += [And(Eq(n, 0), Eq(phi, 0), s + t > 0, m.is_positive is True, cstar.is_positive is True, bstar.is_negative is True,
abs(arg(sigma)) < (s + t - u + 1)*pi,
c1, c2, c12, c14, c15)] # 30
pr(30)
conds += [And(Eq(m, 0), Eq(phi, 0), s + t > v, n.is_positive is True, cstar.is_positive is True, bstar.is_negative is True,
abs(arg(sigma)) < (s + t - v + 1)*pi,
c1, c3, c12, c14, c15)] # 31
pr(31)
conds += [And(Eq(n, 0), Eq(phi, 0), Eq(u, v - 1), m.is_positive is True, cstar.is_positive is True,
bstar >= 0, bstar*pi < abs(arg(sigma)),
abs(arg(sigma)) < (bstar + 1)*pi,
c1, c2, c12, c14, c15)] # 32
pr(32)
conds += [And(Eq(m, 0), Eq(phi, 0), Eq(u, v + 1), n.is_positive is True, cstar.is_positive is True,
bstar >= 0, bstar*pi < abs(arg(sigma)),
abs(arg(sigma)) < (bstar + 1)*pi,
c1, c3, c12, c14, c15)] # 33
pr(33)
conds += [And(
Eq(n, 0), Eq(phi, 0), u < v - 1, m.is_positive is True, cstar.is_positive is True, bstar >= 0,
bstar*pi < abs(arg(sigma)),
abs(arg(sigma)) < (s + t - u + 1)*pi,
c1, c2, c12, c14, c15)] # 34
pr(34)
conds += [And(
Eq(m, 0), Eq(phi, 0), u > v + 1, n.is_positive is True, cstar.is_positive is True, bstar >= 0,
bstar*pi < abs(arg(sigma)),
abs(arg(sigma)) < (s + t - v + 1)*pi,
c1, c3, c12, c14, c15)] # 35
pr(35)
return Or(*conds)
# NOTE An alternative, but as far as I can tell weaker, set of conditions
# can be found in [L, section 5.6.2].
def _int0oo(g1, g2, x):
"""
Express integral from zero to infinity g1*g2 using a G function,
assuming the necessary conditions are fulfilled.
>>> from sympy.integrals.meijerint import _int0oo
>>> from sympy.abc import s, t, m
>>> from sympy import meijerg, S
>>> g1 = meijerg([], [], [-S(1)/2, 0], [], s**2*t/4)
>>> g2 = meijerg([], [], [m/2], [-m/2], t/4)
>>> _int0oo(g1, g2, t)
4*meijerg(((1/2, 0), ()), ((m/2,), (-m/2,)), s**(-2))/s**2
"""
# See: [L, section 5.6.2, equation (1)]
eta, _ = _get_coeff_exp(g1.argument, x)
omega, _ = _get_coeff_exp(g2.argument, x)
def neg(l):
return [-x for x in l]
a1 = neg(g1.bm) + list(g2.an)
a2 = list(g2.aother) + neg(g1.bother)
b1 = neg(g1.an) + list(g2.bm)
b2 = list(g2.bother) + neg(g1.aother)
return meijerg(a1, a2, b1, b2, omega/eta)/eta
def _rewrite_inversion(fac, po, g, x):
""" Absorb ``po`` == x**s into g. """
_, s = _get_coeff_exp(po, x)
a, b = _get_coeff_exp(g.argument, x)
def tr(l):
return [t + s/b for t in l]
return (powdenest(fac/a**(s/b), polar=True),
meijerg(tr(g.an), tr(g.aother), tr(g.bm), tr(g.bother), g.argument))
def _check_antecedents_inversion(g, x):
""" Check antecedents for the laplace inversion integral. """
from sympy import re, im, Or, And, Eq, exp, I, Add, nan, Ne
_debug('Checking antecedents for inversion:')
z = g.argument
_, e = _get_coeff_exp(z, x)
if e < 0:
_debug(' Flipping G.')
# We want to assume that argument gets large as |x| -> oo
return _check_antecedents_inversion(_flip_g(g), x)
def statement_half(a, b, c, z, plus):
coeff, exponent = _get_coeff_exp(z, x)
a *= exponent
b *= coeff**c
c *= exponent
conds = []
wp = b*exp(I*re(c)*pi/2)
wm = b*exp(-I*re(c)*pi/2)
if plus:
w = wp
else:
w = wm
conds += [And(Or(Eq(b, 0), re(c) <= 0), re(a) <= -1)]
conds += [And(Ne(b, 0), Eq(im(c), 0), re(c) > 0, re(w) < 0)]
conds += [And(Ne(b, 0), Eq(im(c), 0), re(c) > 0, re(w) <= 0,
re(a) <= -1)]
return Or(*conds)
def statement(a, b, c, z):
""" Provide a convergence statement for z**a * exp(b*z**c),
c/f sphinx docs. """
return And(statement_half(a, b, c, z, True),
statement_half(a, b, c, z, False))
# Notations from [L], section 5.7-10
m, n, p, q = S([len(g.bm), len(g.an), len(g.ap), len(g.bq)])
tau = m + n - p
nu = q - m - n
rho = (tau - nu)/2
sigma = q - p
if sigma == 1:
epsilon = S(1)/2
elif sigma > 1:
epsilon = 1
else:
epsilon = nan
theta = ((1 - sigma)/2 + Add(*g.bq) - Add(*g.ap))/sigma
delta = g.delta
_debug(' m=%s, n=%s, p=%s, q=%s, tau=%s, nu=%s, rho=%s, sigma=%s' % (
m, n, p, q, tau, nu, rho, sigma))
_debug(' epsilon=%s, theta=%s, delta=%s' % (epsilon, theta, delta))
# First check if the computation is valid.
if not (g.delta >= e/2 or (p >= 1 and p >= q)):
_debug(' Computation not valid for these parameters.')
return False
# Now check if the inversion integral exists.
# Test "condition A"
for a in g.an:
for b in g.bm:
if (a - b).is_integer and a > b:
_debug(' Not a valid G function.')
return False
# There are two cases. If p >= q, we can directly use a slater expansion
# like [L], 5.2 (11). Note in particular that the asymptotics of such an
# expansion even hold when some of the parameters differ by integers, i.e.
# the formula itself would not be valid! (b/c G functions are cts. in their
# parameters)
# When p < q, we need to use the theorems of [L], 5.10.
if p >= q:
_debug(' Using asymptotic Slater expansion.')
return And(*[statement(a - 1, 0, 0, z) for a in g.an])
def E(z):
return And(*[statement(a - 1, 0, z) for a in g.an])
def H(z):
return statement(theta, -sigma, 1/sigma, z)
def Hp(z):
return statement_half(theta, -sigma, 1/sigma, z, True)
def Hm(z):
return statement_half(theta, -sigma, 1/sigma, z, False)
# [L], section 5.10
conds = []
# Theorem 1
conds += [And(1 <= n, p < q, 1 <= m, rho*pi - delta >= pi/2, delta > 0,
E(z*exp(I*pi*(nu + 1))))]
# Theorem 2, statements (2) and (3)
conds += [And(p + 1 <= m, m + 1 <= q, delta > 0, delta < pi/2, n == 0,
(m - p + 1)*pi - delta >= pi/2,
Hp(z*exp(I*pi*(q - m))), Hm(z*exp(-I*pi*(q - m))))]
# Theorem 2, statement (5)
conds += [And(p < q, m == q, n == 0, delta > 0,
(sigma + epsilon)*pi - delta >= pi/2, H(z))]
# Theorem 3, statements (6) and (7)
conds += [And(Or(And(p <= q - 2, 1 <= tau, tau <= sigma/2),
And(p + 1 <= m + n, m + n <= (p + q)/2)),
delta > 0, delta < pi/2, (tau + 1)*pi - delta >= pi/2,
Hp(z*exp(I*pi*nu)), Hm(z*exp(-I*pi*nu)))]
# Theorem 4, statements (10) and (11)
conds += [And(p < q, 1 <= m, rho > 0, delta > 0, delta + rho*pi < pi/2,
(tau + epsilon)*pi - delta >= pi/2,
Hp(z*exp(I*pi*nu)), Hm(z*exp(-I*pi*nu)))]
# Trivial case
conds += [m == 0]
# TODO
# Theorem 5 is quite general
# Theorem 6 contains special cases for q=p+1
return Or(*conds)
def _int_inversion(g, x, t):
"""
Compute the laplace inversion integral, assuming the formula applies.
"""
b, a = _get_coeff_exp(g.argument, x)
C, g = _inflate_fox_h(meijerg(g.an, g.aother, g.bm, g.bother, b/t**a), -a)
return C/t*g
####################################################################
# Finally, the real meat.
####################################################################
_lookup_table = None
@cacheit
@timeit
def _rewrite_single(f, x, recursive=True):
"""
Try to rewrite f as a sum of single G functions of the form
C*x**s*G(a*x**b), where b is a rational number and C is independent of x.
We guarantee that result.argument.as_coeff_mul(x) returns (a, (x**b,))
or (a, ()).
Returns a list of tuples (C, s, G) and a condition cond.
Returns None on failure.
"""
from sympy import polarify, unpolarify, oo, zoo, Tuple
global _lookup_table
if not _lookup_table:
_lookup_table = {}
_create_lookup_table(_lookup_table)
if isinstance(f, meijerg):
from sympy import factor
coeff, m = factor(f.argument, x).as_coeff_mul(x)
if len(m) > 1:
return None
m = m[0]
if m.is_Pow:
if m.base != x or not m.exp.is_Rational:
return None
elif m != x:
return None
return [(1, 0, meijerg(f.an, f.aother, f.bm, f.bother, coeff*m))], True
f_ = f
f = f.subs(x, z)
t = _mytype(f, z)
if t in _lookup_table:
l = _lookup_table[t]
for formula, terms, cond, hint in l:
subs = f.match(formula, old=True)
if subs:
subs_ = {}
for fro, to in subs.items():
subs_[fro] = unpolarify(polarify(to, lift=True),
exponents_only=True)
subs = subs_
if not isinstance(hint, bool):
hint = hint.subs(subs)
if hint == False:
continue
if not isinstance(cond, (bool, BooleanAtom)):
cond = unpolarify(cond.subs(subs))
if _eval_cond(cond) == False:
continue
if not isinstance(terms, list):
terms = terms(subs)
res = []
for fac, g in terms:
r1 = _get_coeff_exp(unpolarify(fac.subs(subs).subs(z, x),
exponents_only=True), x)
g = g.subs(subs).subs(z, x)
# NOTE these substitutions can in principle introduce oo,
# zoo and other absurdities. It shouldn't matter,
# but better be safe.
if Tuple(*(r1 + (g,))).has(oo, zoo, -oo):
continue
g = meijerg(g.an, g.aother, g.bm, g.bother,
unpolarify(g.argument, exponents_only=True))
res.append(r1 + (g,))
if res:
return res, cond
# try recursive mellin transform
if not recursive:
return None
_debug('Trying recursive Mellin transform method.')
from sympy.integrals.transforms import (mellin_transform,
inverse_mellin_transform, IntegralTransformError,
MellinTransformStripError)
from sympy import oo, nan, zoo, simplify, cancel
def my_imt(F, s, x, strip):
""" Calling simplify() all the time is slow and not helpful, since
most of the time it only factors things in a way that has to be
un-done anyway. But sometimes it can remove apparent poles. """
# XXX should this be in inverse_mellin_transform?
try:
return inverse_mellin_transform(F, s, x, strip,
as_meijerg=True, needeval=True)
except MellinTransformStripError:
return inverse_mellin_transform(
simplify(cancel(expand(F))), s, x, strip,
as_meijerg=True, needeval=True)
f = f_
s = _dummy('s', 'rewrite-single', f)
# to avoid infinite recursion, we have to force the two g functions case
def my_integrator(f, x):
from sympy import Integral, hyperexpand
r = _meijerint_definite_4(f, x, only_double=True)
if r is not None:
res, cond = r
res = _my_unpolarify(hyperexpand(res, rewrite='nonrepsmall'))
return Piecewise((res, cond),
(Integral(f, (x, 0, oo)), True))
return Integral(f, (x, 0, oo))
try:
F, strip, _ = mellin_transform(f, x, s, integrator=my_integrator,
simplify=False, needeval=True)
g = my_imt(F, s, x, strip)
except IntegralTransformError:
g = None
if g is None:
# We try to find an expression by analytic continuation.
# (also if the dummy is already in the expression, there is no point in
# putting in another one)
a = _dummy_('a', 'rewrite-single')
if a not in f.free_symbols and _is_analytic(f, x):
try:
F, strip, _ = mellin_transform(f.subs(x, a*x), x, s,
integrator=my_integrator,
needeval=True, simplify=False)
g = my_imt(F, s, x, strip).subs(a, 1)
except IntegralTransformError:
g = None
if g is None or g.has(oo, nan, zoo):
_debug('Recursive Mellin transform failed.')
return None
args = Add.make_args(g)
res = []
for f in args:
c, m = f.as_coeff_mul(x)
if len(m) > 1:
raise NotImplementedError('Unexpected form...')
g = m[0]
a, b = _get_coeff_exp(g.argument, x)
res += [(c, 0, meijerg(g.an, g.aother, g.bm, g.bother,
unpolarify(polarify(
a, lift=True), exponents_only=True)
*x**b))]
_debug('Recursive Mellin transform worked:', g)
return res, True
def _rewrite1(f, x, recursive=True):
"""
Try to rewrite f using a (sum of) single G functions with argument a*x**b.
Return fac, po, g such that f = fac*po*g, fac is independent of x
and po = x**s.
Here g is a result from _rewrite_single.
Return None on failure.
"""
fac, po, g = _split_mul(f, x)
g = _rewrite_single(g, x, recursive)
if g:
return fac, po, g[0], g[1]
def _rewrite2(f, x):
"""
Try to rewrite f as a product of two G functions of arguments a*x**b.
Return fac, po, g1, g2 such that f = fac*po*g1*g2, where fac is
independent of x and po is x**s.
Here g1 and g2 are results of _rewrite_single.
Returns None on failure.
"""
fac, po, g = _split_mul(f, x)
if any(_rewrite_single(expr, x, False) is None for expr in _mul_args(g)):
return None
l = _mul_as_two_parts(g)
if not l:
return None
l = list(ordered(l, [
lambda p: max(len(_exponents(p[0], x)), len(_exponents(p[1], x))),
lambda p: max(len(_functions(p[0], x)), len(_functions(p[1], x))),
lambda p: max(len(_find_splitting_points(p[0], x)),
len(_find_splitting_points(p[1], x)))]))
for recursive in [False, True]:
for fac1, fac2 in l:
g1 = _rewrite_single(fac1, x, recursive)
g2 = _rewrite_single(fac2, x, recursive)
if g1 and g2:
cond = And(g1[1], g2[1])
if cond != False:
return fac, po, g1[0], g2[0], cond
def meijerint_indefinite(f, x):
"""
Compute an indefinite integral of ``f`` by rewriting it as a G function.
Examples
========
>>> from sympy.integrals.meijerint import meijerint_indefinite
>>> from sympy import sin
>>> from sympy.abc import x
>>> meijerint_indefinite(sin(x), x)
-cos(x)
"""
from sympy import hyper, meijerg
results = []
for a in sorted(_find_splitting_points(f, x) | {S(0)}, key=default_sort_key):
res = _meijerint_indefinite_1(f.subs(x, x + a), x)
if not res:
continue
res = res.subs(x, x - a)
if _has(res, hyper, meijerg):
results.append(res)
else:
return res
if f.has(HyperbolicFunction):
_debug('Try rewriting hyperbolics in terms of exp.')
rv = meijerint_indefinite(
_rewrite_hyperbolics_as_exp(f), x)
if rv:
if not type(rv) is list:
return collect(factor_terms(rv), rv.atoms(exp))
results.extend(rv)
if results:
return next(ordered(results))
def _meijerint_indefinite_1(f, x):
""" Helper that does not attempt any substitution. """
from sympy import Integral, piecewise_fold
_debug('Trying to compute the indefinite integral of', f, 'wrt', x)
gs = _rewrite1(f, x)
if gs is None:
# Note: the code that calls us will do expand() and try again
return None
fac, po, gl, cond = gs
_debug(' could rewrite:', gs)
res = S(0)
for C, s, g in gl:
a, b = _get_coeff_exp(g.argument, x)
_, c = _get_coeff_exp(po, x)
c += s
# we do a substitution t=a*x**b, get integrand fac*t**rho*g
fac_ = fac * C / (b*a**((1 + c)/b))
rho = (c + 1)/b - 1
# we now use t**rho*G(params, t) = G(params + rho, t)
# [L, page 150, equation (4)]
# and integral G(params, t) dt = G(1, params+1, 0, t)
# (or a similar expression with 1 and 0 exchanged ... pick the one
# which yields a well-defined function)
# [R, section 5]
# (Note that this dummy will immediately go away again, so we
# can safely pass S(1) for ``expr``.)
t = _dummy('t', 'meijerint-indefinite', S(1))
def tr(p):
return [a + rho + 1 for a in p]
if any(b.is_integer and (b <= 0) == True for b in tr(g.bm)):
r = -meijerg(
tr(g.an), tr(g.aother) + [1], tr(g.bm) + [0], tr(g.bother), t)
else:
r = meijerg(
tr(g.an) + [1], tr(g.aother), tr(g.bm), tr(g.bother) + [0], t)
r = hyperexpand(r.subs(t, a*x**b))
# now substitute back
# Note: we really do want the powers of x to combine.
res += powdenest(fac_*r, polar=True)
def _clean(res):
"""This multiplies out superfluous powers of x we created, and chops off
constants:
>> _clean(x*(exp(x)/x - 1/x) + 3)
exp(x)
cancel is used before mul_expand since it is possible for an
expression to have an additive constant that doesn't become isolated
with simple expansion. Such a situation was identified in issue 6369:
>>> from sympy import sqrt, cancel
>>> from sympy.abc import x
>>> a = sqrt(2*x + 1)
>>> bad = (3*x*a**5 + 2*x - a**5 + 1)/a**2
>>> bad.expand().as_independent(x)[0]
0
>>> cancel(bad).expand().as_independent(x)[0]
1
"""
from sympy import cancel
res = expand_mul(cancel(res), deep=False)
return Add._from_args(res.as_coeff_add(x)[1])
res = piecewise_fold(res)
if res.is_Piecewise:
newargs = []
for expr, cond in res.args:
expr = _my_unpolarify(_clean(expr))
newargs += [(expr, cond)]
res = Piecewise(*newargs)
else:
res = _my_unpolarify(_clean(res))
return Piecewise((res, _my_unpolarify(cond)), (Integral(f, x), True))
@timeit
def meijerint_definite(f, x, a, b):
"""
Integrate ``f`` over the interval [``a``, ``b``], by rewriting it as a product
of two G functions, or as a single G function.
Return res, cond, where cond are convergence conditions.
Examples
========
>>> from sympy.integrals.meijerint import meijerint_definite
>>> from sympy import exp, oo
>>> from sympy.abc import x
>>> meijerint_definite(exp(-x**2), x, -oo, oo)
(sqrt(pi), True)
This function is implemented as a succession of functions
meijerint_definite, _meijerint_definite_2, _meijerint_definite_3,
_meijerint_definite_4. Each function in the list calls the next one
(presumably) several times. This means that calling meijerint_definite
can be very costly.
"""
# This consists of three steps:
# 1) Change the integration limits to 0, oo
# 2) Rewrite in terms of G functions
# 3) Evaluate the integral
#
# There are usually several ways of doing this, and we want to try all.
# This function does (1), calls _meijerint_definite_2 for step (2).
from sympy import arg, exp, I, And, DiracDelta
_debug('Integrating', f, 'wrt %s from %s to %s.' % (x, a, b))
if f.has(DiracDelta):
_debug('Integrand has DiracDelta terms - giving up.')
return None
f_, x_, a_, b_ = f, x, a, b
# Let's use a dummy in case any of the boundaries has x.
d = Dummy('x')
f = f.subs(x, d)
x = d
if a == b:
return (S.Zero, True)
results = []
if a == -oo and b != oo:
return meijerint_definite(f.subs(x, -x), x, -b, -a)
elif a == -oo:
# Integrating -oo to oo. We need to find a place to split the integral.
_debug(' Integrating -oo to +oo.')
innermost = _find_splitting_points(f, x)
_debug(' Sensible splitting points:', innermost)
for c in sorted(innermost, key=default_sort_key, reverse=True) + [S(0)]:
_debug(' Trying to split at', c)
if not c.is_real:
_debug(' Non-real splitting point.')
continue
res1 = _meijerint_definite_2(f.subs(x, x + c), x)
if res1 is None:
_debug(' But could not compute first integral.')
continue
res2 = _meijerint_definite_2(f.subs(x, c - x), x)
if res2 is None:
_debug(' But could not compute second integral.')
continue
res1, cond1 = res1
res2, cond2 = res2
cond = _condsimp(And(cond1, cond2))
if cond == False:
_debug(' But combined condition is always false.')
continue
res = res1 + res2
return res, cond
elif a == oo:
return -meijerint_definite(f, x, b, oo)
elif (a, b) == (0, oo):
# This is a common case - try it directly first.
res = _meijerint_definite_2(f, x)
if res:
if _has(res[0], meijerg):
results.append(res)
else:
return res
else:
if b == oo:
for split in _find_splitting_points(f, x):
if (a - split >= 0) == True:
_debug('Trying x -> x + %s' % split)
res = _meijerint_definite_2(f.subs(x, x + split)
*Heaviside(x + split - a), x)
if res:
if _has(res[0], meijerg):
results.append(res)
else:
return res
f = f.subs(x, x + a)
b = b - a
a = 0
if b != oo:
phi = exp(I*arg(b))
b = abs(b)
f = f.subs(x, phi*x)
f *= Heaviside(b - x)*phi
b = oo
_debug('Changed limits to', a, b)
_debug('Changed function to', f)
res = _meijerint_definite_2(f, x)
if res:
if _has(res[0], meijerg):
results.append(res)
else:
return res
if f_.has(HyperbolicFunction):
_debug('Try rewriting hyperbolics in terms of exp.')
rv = meijerint_definite(
_rewrite_hyperbolics_as_exp(f_), x_, a_, b_)
if rv:
if not type(rv) is list:
rv = (collect(factor_terms(rv[0]), rv[0].atoms(exp)),) + rv[1:]
return rv
results.extend(rv)
if results:
return next(ordered(results))
def _guess_expansion(f, x):
""" Try to guess sensible rewritings for integrand f(x). """
from sympy import expand_trig
from sympy.functions.elementary.trigonometric import TrigonometricFunction
res = [(f, 'original integrand')]
orig = res[-1][0]
saw = {orig}
expanded = expand_mul(orig)
if expanded not in saw:
res += [(expanded, 'expand_mul')]
saw.add(expanded)
expanded = expand(orig)
if expanded not in saw:
res += [(expanded, 'expand')]
saw.add(expanded)
if orig.has(TrigonometricFunction, HyperbolicFunction):
expanded = expand_mul(expand_trig(orig))
if expanded not in saw:
res += [(expanded, 'expand_trig, expand_mul')]
saw.add(expanded)
return res
def _meijerint_definite_2(f, x):
"""
Try to integrate f dx from zero to infinty.
The body of this function computes various 'simplifications'
f1, f2, ... of f (e.g. by calling expand_mul(), trigexpand()
- see _guess_expansion) and calls _meijerint_definite_3 with each of
these in succession.
If _meijerint_definite_3 succeedes with any of the simplified functions,
returns this result.
"""
# This function does preparation for (2), calls
# _meijerint_definite_3 for (2) and (3) combined.
# use a positive dummy - we integrate from 0 to oo
# XXX if a nonnegative symbol is used there will be test failures
dummy = _dummy('x', 'meijerint-definite2', f, positive=True)
f = f.subs(x, dummy)
x = dummy
if f == 0:
return S(0), True
for g, explanation in _guess_expansion(f, x):
_debug('Trying', explanation)
res = _meijerint_definite_3(g, x)
if res:
return res
def _meijerint_definite_3(f, x):
"""
Try to integrate f dx from zero to infinity.
This function calls _meijerint_definite_4 to try to compute the
integral. If this fails, it tries using linearity.
"""
res = _meijerint_definite_4(f, x)
if res and res[1] != False:
return res
if f.is_Add:
_debug('Expanding and evaluating all terms.')
ress = [_meijerint_definite_4(g, x) for g in f.args]
if all(r is not None for r in ress):
conds = []
res = S(0)
for r, c in ress:
res += r
conds += [c]
c = And(*conds)
if c != False:
return res, c
def _my_unpolarify(f):
from sympy import unpolarify
return _eval_cond(unpolarify(f))
@timeit
def _meijerint_definite_4(f, x, only_double=False):
"""
Try to integrate f dx from zero to infinity.
This function tries to apply the integration theorems found in literature,
i.e. it tries to rewrite f as either one or a product of two G-functions.
The parameter ``only_double`` is used internally in the recursive algorithm
to disable trying to rewrite f as a single G-function.
"""
# This function does (2) and (3)
_debug('Integrating', f)
# Try single G function.
if not only_double:
gs = _rewrite1(f, x, recursive=False)
if gs is not None:
fac, po, g, cond = gs
_debug('Could rewrite as single G function:', fac, po, g)
res = S(0)
for C, s, f in g:
if C == 0:
continue
C, f = _rewrite_saxena_1(fac*C, po*x**s, f, x)
res += C*_int0oo_1(f, x)
cond = And(cond, _check_antecedents_1(f, x))
if cond == False:
break
cond = _my_unpolarify(cond)
if cond == False:
_debug('But cond is always False.')
else:
_debug('Result before branch substitutions is:', res)
return _my_unpolarify(hyperexpand(res)), cond
# Try two G functions.
gs = _rewrite2(f, x)
if gs is not None:
for full_pb in [False, True]:
fac, po, g1, g2, cond = gs
_debug('Could rewrite as two G functions:', fac, po, g1, g2)
res = S(0)
for C1, s1, f1 in g1:
for C2, s2, f2 in g2:
r = _rewrite_saxena(fac*C1*C2, po*x**(s1 + s2),
f1, f2, x, full_pb)
if r is None:
_debug('Non-rational exponents.')
return
C, f1_, f2_ = r
_debug('Saxena subst for yielded:', C, f1_, f2_)
cond = And(cond, _check_antecedents(f1_, f2_, x))
if cond == False:
break
res += C*_int0oo(f1_, f2_, x)
else:
continue
break
cond = _my_unpolarify(cond)
if cond == False:
_debug('But cond is always False (full_pb=%s).' % full_pb)
else:
_debug('Result before branch substitutions is:', res)
if only_double:
return res, cond
return _my_unpolarify(hyperexpand(res)), cond
def meijerint_inversion(f, x, t):
"""
Compute the inverse laplace transform
:math:\int_{c+i\infty}^{c-i\infty} f(x) e^{tx) dx,
for real c larger than the real part of all singularities of f.
Note that ``t`` is always assumed real and positive.
Return None if the integral does not exist or could not be evaluated.
Examples
========
>>> from sympy.abc import x, t
>>> from sympy.integrals.meijerint import meijerint_inversion
>>> meijerint_inversion(1/x, x, t)
Heaviside(t)
"""
from sympy import I, Integral, exp, expand, log, Add, Mul, Heaviside
f_ = f
t_ = t
t = Dummy('t', polar=True) # We don't want sqrt(t**2) = abs(t) etc
f = f.subs(t_, t)
c = Dummy('c')
_debug('Laplace-inverting', f)
if not _is_analytic(f, x):
_debug('But expression is not analytic.')
return None
# We filter out exponentials here. If we are given an Add this will not
# work, but the calling code will take care of that.
shift = 0
if f.is_Mul:
args = list(f.args)
newargs = []
exponentials = []
while args:
arg = args.pop()
if isinstance(arg, exp):
arg2 = expand(arg)
if arg2.is_Mul:
args += arg2.args
continue
try:
a, b = _get_coeff_exp(arg.args[0], x)
except _CoeffExpValueError:
b = 0
if b == 1:
exponentials.append(a)
else:
newargs.append(arg)
elif arg.is_Pow:
arg2 = expand(arg)
if arg2.is_Mul:
args += arg2.args
continue
if x not in arg.base.free_symbols:
try:
a, b = _get_coeff_exp(arg.exp, x)
except _CoeffExpValueError:
b = 0
if b == 1:
exponentials.append(a*log(arg.base))
newargs.append(arg)
else:
newargs.append(arg)
shift = Add(*exponentials)
f = Mul(*newargs)
gs = _rewrite1(f, x)
if gs is not None:
fac, po, g, cond = gs
_debug('Could rewrite as single G function:', fac, po, g)
res = S(0)
for C, s, f in g:
C, f = _rewrite_inversion(fac*C, po*x**s, f, x)
res += C*_int_inversion(f, x, t)
cond = And(cond, _check_antecedents_inversion(f, x))
if cond == False:
break
cond = _my_unpolarify(cond)
if cond == False:
_debug('But cond is always False.')
else:
_debug('Result before branch substitution:', res)
res = _my_unpolarify(hyperexpand(res))
if not res.has(Heaviside):
res *= Heaviside(t)
res = res.subs(t, t + shift)
if not isinstance(cond, bool):
cond = cond.subs(t, t + shift)
return Piecewise((res.subs(t, t_), cond),
(Integral(f_*exp(x*t), (x, c - oo*I, c + oo*I)).subs(t, t_), True))
| ChristinaZografou/sympy | sympy/integrals/meijerint.py | Python | bsd-3-clause | 75,824 |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
'''
# What's the real floor?题目地址:https://www.codewars.com/kata/574b3b1599d8f897470018f6/train/python
'''
import unittest
class TestCases(unittest.TestCase):
def setUp(self):
pass
def test1(self):self.assertEqual(get_real_floor(1), 0)
def test2(self):self.assertEqual(get_real_floor(5), 4)
def test3(self):self.assertEqual(get_real_floor(15), 13)
def test4(self):self.assertEqual(get_real_floor(-3), -3)
def test5(self):self.assertEqual(get_real_floor(0), 0)
def get_real_floor(n):
res = 0
if n <= 0:
res = n
elif n <=13:
res = n - 1
else:
res = n -2
return res
if __name__ == '__main__':
unittest.main()
'''
参考解法:
''' | karchi/codewars_kata | 已完成/What's the real floor.py | Python | mit | 794 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Episode.allow_multiple_niqati'
db.add_column(u'activities_episode', 'allow_multiple_niqati',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Episode.allow_multiple_niqati'
db.delete_column(u'activities_episode', 'allow_multiple_niqati')
models = {
u'activities.activity': {
'Meta': {'object_name': 'Activity'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['activities.Category']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'description': ('django.db.models.fields.TextField', [], {}),
'edit_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inside_collaborators': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_editable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organizers': ('django.db.models.fields.IntegerField', [], {}),
'outside_collaborators': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'participants': ('django.db.models.fields.IntegerField', [], {}),
'primary_club': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_activity'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['clubs.Club']"}),
'public_description': ('django.db.models.fields.TextField', [], {}),
'requirements': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'secondary_clubs': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'secondary_activity'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['clubs.Club']"}),
'submission_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'submitter': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL'})
},
u'activities.category': {
'Meta': {'object_name': 'Category'},
'ar_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'en_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['activities.Category']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
u'activities.episode': {
'Meta': {'object_name': 'Episode'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['activities.Activity']"}),
'allow_multiple_niqati': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_report_early': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'end_time': ('django.db.models.fields.TimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'requires_report': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'requires_story': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'start_time': ('django.db.models.fields.TimeField', [], {})
},
u'activities.evaluation': {
'Meta': {'object_name': 'Evaluation'},
'episode': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['activities.Episode']"}),
'evaluator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'quality': ('django.db.models.fields.PositiveIntegerField', [], {}),
'relevance': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'activities.review': {
'Meta': {'object_name': 'Review'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['activities.Activity']"}),
'clubs_notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'datetime_notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'description_notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'edit_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inside_notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_approved': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'name_notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'organizers_notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'outside_notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'participants_notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'requirement_notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'review_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'review_type': ('django.db.models.fields.CharField', [], {'default': "'P'", 'max_length': '1'}),
'reviewer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'submission_date_notes': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'clubs.club': {
'Meta': {'object_name': 'Club'},
'city': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'college': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['clubs.College']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'coordinator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'coordination'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deputies': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'deputyships'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'edit_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254'}),
'employee': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'employee'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': u"orm['auth.User']", 'blank': 'True', 'null': 'True'}),
'english_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'memberships'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parenthood'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': u"orm['clubs.Club']", 'blank': 'True', 'null': 'True'})
},
u'clubs.college': {
'Meta': {'object_name': 'College'},
'city': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'section': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['activities'] | enjaz/enjaz | activities/south_migrations/0007_auto__add_field_episode_allow_multiple_niqati.py | Python | agpl-3.0 | 12,555 |
#! /usr/bin/env python3
"""
bitset.py
Written by Geremy Condra
Licensed under GPLv3
Released 3 May 2009
This module provides a simple bitset implementation
for Python.
"""
from collections import Sequence
import math
class Bitset(Sequence):
"""A very simple bitset implementation for Python.
Note that, like with normal numbers, the leftmost
index is the MSB, and like normal sequences, that
is 0.
Usage:
>>> b = Bitset(5)
>>> b
Bitset(101)
>>> b[:]
[True, False, True]
>>> b[0] = False
>>> b
Bitset(001)
>>> b << 1
Bitset(010)
>>> b >> 1
Bitset(000)
>>> b & 1
Bitset(001)
>>> b | 2
Bitset(011)
>>> b ^ 6
Bitset(111)
>>> ~b
Bitset(110)
"""
value = 0
length = 0
@classmethod
def from_sequence(cls, seq):
"""Iterates over the sequence to produce a new Bitset.
As in integers, the 0 position represents the LSB.
"""
n = 0
for index, value in enumerate(reversed(seq)):
n += 2**index * bool(int(value))
b = Bitset(n)
return b
def __init__(self, value=0, length=0):
"""Creates a Bitset with the given integer value."""
self.value = value
try: self.length = length or math.floor(math.log(value, 2)) + 1
except Exception: self.length = 0
def __and__(self, other):
b = Bitset(self.value & int(other))
b.length = max((self.length, b.length))
return b
def __or__(self, other):
b = Bitset(self.value | int(other))
b.length = max((self.length, b.length))
return b
def __invert__(self):
b = Bitset(~self.value)
b.length = max((self.length, b.length))
return b
def __xor__(self, value):
b = Bitset(self.value ^ int(value))
b.length = max((self.length, b.length))
return b
def __lshift__(self, value):
b = Bitset(self.value << int(value))
b.length = max((self.length, b.length))
return b
def __rshift__(self, value):
b = Bitset(self.value >> int(value))
b.length = max((self.length, b.length))
return b
def __eq__(self, other):
try:
return self.value == other.value
except Exception:
return self.value == other
def __int__(self):
return self.value
def __str__(self):
s = ""
for i in self[:]:
s += "1" if i else "0"
return s
def __repr__(self):
return "Bitset(%s)" % str(self)
def __getitem__(self, s):
"""Gets the specified position.
Like normal integers, 0 represents the MSB.
"""
try:
start, stop, step = s.indices(len(self))
results = []
for position in range(start, stop, step):
pos = len(self) - position - 1
results.append(bool(self.value & (1 << pos)))
return results
except:
pos = len(self) - s - 1
return bool(self.value & (1 << pos))
def __setitem__(self, s, value):
"""Sets the specified position/s to value.
Like normal integers, 0 represents the MSB.
"""
try:
start, stop, step = s.indices(len(self))
for position in range(start, stop, step):
pos = len(self) - position - 1
if value: self.value |= (1 << pos)
else: self.value &= ~(1 << pos)
maximum_position = max((start + 1, stop, len(self)))
self.length = maximum_position
except:
pos = len(self) - s - 1
if value: self.value |= (1 << pos)
else: self.value &= ~(1 << pos)
if len(self) < pos: self.length = pos
return self
def __iter__(self):
"""Iterates over the values in the bitset."""
for i in self[:]:
yield i
def __len__(self):
"""Returns the length of the bitset."""
return self.length
| djdarcy/Prime-Square-Sum | test/recipe-576738-1.py | Python | gpl-2.0 | 3,415 |
from citrination_client.data import UploadResult
def test_indicates_failure():
"""
Tests that the presence of a single failure
will make the result report unsuccessful
"""
ur = UploadResult()
ur.add_failure("test.jpg", "bad filename")
assert ur.successful() is False
def test_default_is_success():
"""
Tests that an empty result is successful
"""
ur = UploadResult()
assert ur.successful()
def test_add_success():
"""
Tests that a success can be added to the upload
result
"""
ur = UploadResult()
ur.add_success("my/path.jpg", 2, "path.jpg")
assert len(ur.successes) == 1
def test_cant_write_lists():
"""
Tests that the successes and failures properties
are not settable
"""
ur = UploadResult()
try:
ur.successes = "asdf"
assert False
except AttributeError:
assert True
try:
ur.failures = "asdf"
assert False
except AttributeError:
assert True
| CitrineInformatics/python-citrination-client | citrination_client/data/tests/test_upload_result.py | Python | apache-2.0 | 1,010 |
import sys
from time import sleep
sys.path.append('../lib')
import cflib.crtp
from cflib.crazyflie import Crazyflie
from cfclient.utils.logconfigreader import LogConfig
from threading import Thread, Timer
import logging
logging.basicConfig(level=logging.ERROR)
class MyCF:
def __init__(self, uri):
self.cf = Crazyflie()
self.cf.connected.add_callback(self.connected)
self.cf.disconnected.add_callback(self.disconnected)
self.cf.open_link(uri)
self.uri = uri
self.is_connected = False
def connected(self, uri):
self.is_connected = True
print("Connected to {}".format(uri))
def disconnected(self, uri):
print("disconnected from {}".format(uri))
def close(self):
self.cf.close_link()
def start_motors(self):
Thread(target=self.motor).start()
def motor(self):
thrust_mult = 1
thrust_step = 200
thrust = 20000
pitch = 0
roll = 0
yawrate = 0
# Unlock startup thrust protection
self.cf.commander.send_setpoint(0, 0, 0, 0)
while thrust >= 15000:
self.cf.commander.send_setpoint(roll, pitch, yawrate, thrust)
sleep(0.1)
if thrust >= 25000:
thrust_mult = -1
thrust += thrust_step * thrust_mult
self.cf.commander.send_setpoint(0, 0, 0, 0)
# Make sure that the last packet leaves before the link is closed
# since the message queue is not flushed before closing
sleep(0.1)
self.close()
if __name__ == "__main__":
cflib.crtp.init_drivers(enable_debug_driver=False)
a = cflib.crtp.scan_interfaces()
for i in a:
print(i)
liczba_dronow = int(input("podaj liczbe dronow: "))
cf = []
for i in range(liczba_dronow):
nr = input("wybierz uri drona nr {}: ".format(i))
cf.append(MyCF(a[nr][0]))
while any(item.is_connected==False for item in cf):
sleep(0.5)
for i in cf:
i.start_motors()
test=raw_input("press any key...")
for i in cf:
i.close()
# trigger=True
#
# while trigger:
# for i in cf:
| Venris/crazyflie-multilink | KM/kilka.py | Python | gpl-2.0 | 2,202 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import MySQLdb
import sys
import numpy as np
from scipy.optimize import leastsq
def priceVolumeDown(data_date):
stockList = StockDataMart.getAllStocks()
for rows in stockList:
stockId = rows[0]
market = rows[1]
stockName = rows[2]
dataInfo = StockDataMart.getStockPriceAndVolume(stockId,market,data_date)
if len(dataInfo) <= 0 :
continue
lastDays = 0
for i in range( 0,len(dataInfo)-1 ):
if rows[i][0] < rows[i+1][0] and rows[i][1]< rows[i+1][1] then
lastDays = lastDays + 1
else:
break
if lastDays > 0 :
StockDataMart.addPriceVolumeDown(stockId,market,stockName,data_date,lastDays)
def priceDown(data_date):
stockList = StockDataMart.getAllStocks()
for rows in stockList:
stockId = rows[0]
market = rows[1]
stockName = rows[2]
dataInfo = StockDataMart.getStockPrice(stockId,market,data_date)
if len(dataInfo) <= 0 :
continue
lastDays = 0
for i in range( 0,len(dataInfo)-1 ):
if rows[i][0] < rows[i+1][0] then
lastDays = lastDays + 1
else:
break
if lastDays > 0 :
StockDataMart.addPriceDown(stockId,market,stockName,data_date,lastDays)
def volumeDown(data_date):
stockList = StockDataMart.getAllStocks()
for rows in stockList:
stockId = rows[0]
market = rows[1]
stockName = rows[2]
dataInfo = StockDataMart.getStockVolume(stockId,market,data_date)
if len(dataInfo) <= 0 :
continue
lastDays = 0
for i in range( 0,len(dataInfo)-1 ):
if rows[i][0] < rows[i+1][0] then
lastDays = lastDays + 1
else:
break
if lastDays > 0 :
StockDataMart.addVolumeDown(stockId,market,stockName,data_date,lastDays)
def getEvenPrice(data_date,count_days):
stockList = StockDataMart.getAllStocks()
for rows in stockList:
stockId = rows[0]
market = rows[1]
stockName = rows[2]
dataInfo = StockDataMart.getStockPrice(stockId,market,data_date)
num = 0
priceSum = 0.0
for rows in dataInfo:
curPrice = rows[0]
priceSum = priceSum + curPrice
num = num + 1
if (num >= count_days):
break
evenPrice = priceSum / count_days
StockDataMart.addEvenPrice(stockId,market,stockName,count_days,evenPrice,data_date)
def getEvenPriceDiff(data_date,count_days):
stockList = StockDataMart.getAllStocks()
for rows in stockList:
stockId = rows[0]
market = rows[1]
stockName = rows[2]
dataInfo = StockDataMart.getStockPrice(stockId,market,data_date)
sumSquareDiff = 0.0
evenPrice = StockDataMart.getEvenPrice(stockId,market,data_date,count_days)
num = 0
for rows in dataInfo:
curPrice = rows[0]
curData = rows[1]
sumSquareDiff = sumSquareDiff + pow(curPrice - evenPrice,2)
num = num + 1
if num >= count_days :
break
evenSquareDiff = sumSquareDiff / count_days
StockDataMart.addEvenPriceDiff(stockId,market,stockName,count_days,evenSquareDiff,data_date)
def evenPriceDiffByLSM(data_date,count_days):
stockList = StockDataMart.getAllStocks()
for rows in stockList:
stockId = rows[0]
market = rows[1]
stockName = rows[2]
dataInfo = StockDataMart.getEvenPriceDiff(stockId,market,count_days,data_date)
num = 0
evenPriceDiffList = []
xList = []
for rows in dataInfo:
evenPriceDiff = rows[0]
evenPriceDiffList.append(evenPriceDiff)
num = num + 1
xList.append(num )
if num >= count_days:
break
evenPriceDiffList.reverse()
xIndexList = np.array(xList,dtype=float)
yEvenPriceDiffList = np.array(evenPriceDiffList,dtype=float)
r = leastsq(fitResiduals,[1,1],args=(xIndexList,yEvenPriceDiffList))
print r[0]
def fitFunction(x,p):
a,b = p
return a * x + b
def fitResiduals(p,x,y):
return fitFunction(x,p) - y
| sunpy1106/stocknotes | StockDataAnalyze.py | Python | gpl-2.0 | 3,667 |
from PIL import Image
import glob
import numpy as np
import os
def chkMkDir(dirName):
if not os.path.exists(dirName):
os.makedirs(dirName)
return
def imgSizeFinder(img, newSize=1280):
imgDims = img.size
dimMax = max(imgDims)
if (dimMax <= newSize):
width = imgDims[0]
height = imgDims[1]
else:
width = int(np.around((float)imgDims[0]/float(dimMax)*newSize))
height = int(np.around((float)imgDims[1]/float(dimMax)*newSize))
return (width, height)
#Do we have a list of files
#Are we creating thumbs too
#Create output directory to store resized images + thumbs
workDir = os.getcwd()
outDir = os.path.join(workDir, "resized")
chkMkDir(outDir)
if makeThumbs:
thumbDir = os.path.join(outDir, "thumbs")
chkMkDir(thumbDir)
#For each file in w/d: import image; resize and save; make thumbnail & save
inputFiles = glob.glob("*.jpg")
for jpg in inputFiles:
currentImg = None
try:
currentImg = Image.open(os.path.join(workDir, jpg))
except:
print "Problems reading file "+str(currentImg)
outImgSize = imgSizeFinder(currentImg) | mtwharmby/assorted-scripts | PythonPlayground/photo-bulk-resize.py | Python | gpl-2.0 | 1,054 |
"""
A tool for identifying griefers.
.. note::
"blockinfo" must be AFTER "votekick" in the config script list
Commands
^^^^^^^^
* ``/griefcheck or /gc <player> <minutes>`` gives you when, how many and whos blocks a player destroyed *admin only*
Options
^^^^^^^
.. code-block:: guess
[blockinfo]
griefcheck_on_votekick = true
irc_only = false
.. codeauthor:: hompy
"""
from twisted.internet.reactor import seconds
from pyspades.collision import distance_3d_vector
from pyspades.common import prettify_timespan
from piqueserver.commands import command, admin, get_player
from piqueserver.config import config
blockinfo_config = config.section("blockinfo")
GRIEFCHECK_ON_VOTEKICK = blockinfo_config.option("griefcheck_on_votekick", True)
IRC_ONLY = blockinfo_config.option("irc_only", False)
@command('griefcheck', 'gc')
def grief_check(connection, player, minutes=2):
player = get_player(connection.protocol, player)
protocol = connection.protocol
color = connection not in protocol.players and connection.colors
minutes = float(minutes)
if minutes <= 0.0:
raise ValueError('minutes must be number greater than 0')
time = seconds() - minutes * 60.0
blocks_removed = player.blocks_removed or []
blocks = [b[1] for b in blocks_removed if b[0] >= time]
player_name = player.name
if color:
player_name = (('\x0303' if player.team.id else '\x0302') +
player_name + '\x0f')
message = '%s removed %s block%s in the last ' % (
player_name, len(blocks) or 'no', '' if len(blocks) == 1 else 's')
if minutes == 1.0:
minutes_s = 'minute'
else:
minutes_s = '{:.1f} minutes'.format(minutes)
message += minutes_s + '.'
if len(blocks):
infos = set(blocks)
infos.discard(None)
if color:
names = [('\x0303' if team else '\x0302') + name for name, team in
infos]
else:
names = set([name for name, team in infos])
if len(names) > 0:
message += (' Some of them were placed by ' +
('\x0f, ' if color else ', ').join(names))
message += '\x0f.' if color else '.'
else:
message += ' All of them were map blocks.'
last = blocks_removed[-1]
time_s = prettify_timespan(seconds() - last[0], get_seconds=True)
message += ' Last one was destroyed %s ago' % time_s
whom = last[1]
if whom is None and len(names) > 0:
message += ', and was part of the map'
elif whom is not None:
name, team = whom
if color:
name = ('\x0303' if team else '\x0302') + name + '\x0f'
message += ', and belonged to %s' % name
message += '.'
switch_sentence = False
if player.last_switch is not None and player.last_switch >= time:
time_s = prettify_timespan(seconds() - player.last_switch,
get_seconds=True)
message += ' %s joined %s team %s ago' % (player_name,
player.team.name, time_s)
switch_sentence = True
teamkills = len([t for t in player.teamkill_times or [] if t >= time])
if teamkills > 0:
s = ', and killed' if switch_sentence else ' %s killed' % player_name
message += s + ' %s teammates in the last %s' % (teamkills, minutes_s)
if switch_sentence or teamkills > 0:
message += '.'
votekick = getattr(protocol, 'votekick', None)
if (votekick and votekick.victim is player and
votekick.victim.world_object and votekick.instigator.world_object):
instigator = votekick.instigator
tiles = int(distance_3d_vector(player.world_object.position,
instigator.world_object.position))
instigator_name = (('\x0303' if instigator.team.id else '\x0302') +
instigator.name + '\x0f')
message += (' %s is %d tiles away from %s, who started the votekick.' %
(player_name, tiles, instigator_name))
return message
def apply_script(protocol, connection, config):
class BlockInfoConnection(connection):
blocks_removed = None
teamkill_times = None
def on_reset(self):
self.blocks_removed = None
self.teamkill_times = None
connection.on_reset(self)
def on_block_build(self, x, y, z):
if self.protocol.block_info is None:
self.protocol.block_info = {}
self.protocol.block_info[(x, y, z)] = (self.name, self.team.id)
connection.on_block_build(self, x, y, z)
def on_line_build(self, points):
if self.protocol.block_info is None:
self.protocol.block_info = {}
name_team = (self.name, self.team.id)
for point in points:
self.protocol.block_info[point] = name_team
connection.on_line_build(self, points)
def on_block_removed(self, x, y, z):
if self.protocol.block_info is None:
self.protocol.block_info = {}
if self.blocks_removed is None:
self.blocks_removed = []
pos = (x, y, z)
info = (seconds(), self.protocol.block_info.pop(pos, None))
self.blocks_removed.append(info)
connection.on_block_removed(self, x, y, z)
def on_kill(self, killer, type, grenade):
if killer and killer.team is self.team:
if killer.teamkill_times is None:
killer.teamkill_times = []
killer.teamkill_times.append(seconds())
return connection.on_kill(self, killer, type, grenade)
class BlockInfoProtocol(protocol):
block_info = None
def on_map_change(self, map):
self.block_info = None
protocol.on_map_change(self, map)
def on_votekick_start(self, instigator, victim, reason):
result = protocol.on_votekick_start(
self, instigator, victim, reason)
if result is None and GRIEFCHECK_ON_VOTEKICK.get():
message = grief_check(instigator, victim.name)
if IRC_ONLY.get():
self.irc_say('* ' + message)
else:
self.send_chat(message, irc=True)
return result
return BlockInfoProtocol, BlockInfoConnection
| feikname/spades-server | piqueserver/scripts/blockinfo.py | Python | gpl-3.0 | 6,523 |
#coding: utf-8
from django import forms
from django.forms import CharField, IntegerField, DecimalField
from assist.conf import MERCHANT_ID, TEST_MODE, MODE1_URL, MODE2_URL
class HiddenForm(forms.Form):
def __init__(self, *args, **kwargs):
super(HiddenForm, self).__init__(*args, **kwargs)
for field in self.fields:
self.fields[field].widget = forms.HiddenInput()
class AssistMode1Form(HiddenForm):
Merchant_ID = IntegerField(label=u'Идентификатор магазина в ASSIST', initial = MERCHANT_ID)
OrderNumber = CharField(label=u'Номер заказа в системе расчетов Интернет-магазина', max_length=128,
help_text=u'Номер заказа должен быть уникален, иначе платеж будет неуспешным')
OrderAmount = DecimalField(label=u'Сумма платежа в оригинальной валюте', max_digits=17, decimal_places = 2)
OrderCurrency = CharField(label=u'Код валюты, в которой указана сумма платежа Subtotal_P', max_length=3, required=False)
Language = IntegerField(label=u'Язык авторизационных страниц ASSIST', required=False, initial=0)
Delay = IntegerField(label=u'Признак авторизации кредитной карты при двустадийном механизме работы', required=False, initial=0)
URL_RETURN = CharField(label=u'URL страницы, на которую должен вернуться покупатель после осуществления платежа при нажатии кнопки «Вернуться в магазин»', max_length=128, required=False)
URL_RETURN_OK = CharField(label=u'URL страницы, куда должен вернуться покупатель после успешного осуществления платежа', max_length=128, required=False)
URL_RETURN_NO = CharField(label=u'URL страницы, куда должен вернуться покупатель после неуспешного осуществления платежа', max_length=128, required=False)
OrderComment = CharField(label=u'Комментарий', max_length=255, required=False,
help_text=u'передается в ASSIST и отображается в выписках по операциям')
ChoosenCardType = IntegerField(label=u'Идентификатор типа карты для оплаты.', required=False,
help_text=u"1 – VISA 2 - EC/MC 3 – DCL 4 – JCB 5- AMEX. Покупатель сможет оплатить покупку только картой указанного типа (указанный тип карт должен быть активирован для магазина).")
CardPayment = IntegerField(label=u'Может ли покупатель сделать платеж по кредитной карте', required=False, initial=1)
WebMoneyPayment = IntegerField(label=u'Может ли покупатель сделать платеж', initial=1, required=False)
PayCashPayment = IntegerField(label=u'Может ли покупатель сделать платеж с помощью платежной системы PayCash', initial=1, required=False)
QiwiBeelinePayment = IntegerField(label=u'Может ли покупатель сделать платеж с помощью платежного средства «Мобильный платеж. Интернет (Билайн)» системы QIWI', initial=1, required=False)
AssistIDCCPayment = IntegerField(label=u'Может ли покупатель сделать платеж по кредитной карте с использованием Assist®ID', initial=1, required=False)
DemoResult = CharField(label=u'', max_length=5, required=True, initial='AS000')
target = MODE1_URL
def __init__(self, *args, **kwargs):
super(AssistMode1Form, self).__init__(*args, **kwargs)
if not TEST_MODE:
del self.fields['DemoResult']
class AssistMode2Form(AssistMode1Form):
LastName = CharField(label=u'Фамилия', max_length=64)
FirstName = CharField(label=u'Имя', max_length=64)
MiddleName = CharField(label=u'Отчество', max_length=64, required=False)
Email = forms.EmailField(label=u'Электронный адрес', max_length=64)
MobilePhone = CharField(label=u'Телефон', max_length=64, required=False)
Address = CharField(label=u'Адрес', max_length=128, required=False)
Country = CharField(label=u'Код страны покупателя', max_length=3, required=False)
State = CharField(label=u'Код штата/региона', max_length=3, required=False)
City = CharField(label=u'Город', max_length=64, required=False)
Zip = CharField(label=u'Почтовый индекс', max_length=64, required=False)
target = MODE2_URL | denisbalyko/django-assist-by | build/lib/assist/forms.py | Python | mit | 5,182 |
#!/usr/bin/env python
"""
Copyright 2015 Reverb Technologies, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class V1beta3_ReplicationControllerStatus(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
"""
Attributes:
swaggerTypes (dict): The key is attribute name and the value is attribute type.
attributeMap (dict): The key is attribute name and the value is json key in definition.
"""
self.swaggerTypes = {
'replicas': 'int'
}
self.attributeMap = {
'replicas': 'replicas'
}
#most recently oberved number of replicas
self.replicas = None # int
| Alzon/SUR | magnum/common/pythonk8sclient/client/models/V1beta3_ReplicationControllerStatus.py | Python | apache-2.0 | 1,380 |
import os
print('hahaha')
| jonathanchu/gitpython-test | test.py | Python | mit | 27 |
# Sai Nudurupati and Erkan Istanbulluoglu - 14May2014 :
# Example to use radiation_field.py
"""
Article author: Sai S. Nudurupati (saisiddu@uw.edu) and Erkan Istanbulluoglu
Date: 22 May 2014
"""
"""
Radiation component calculates total incoming shortwave radiation and
relative radiation factor (ratio of total radiation incident on the surface
with respect to the total radiation incident on flat surface).
This example demonstrates a simple use case for radiation_field.py.
In this example, a random elevation field of 100m X 100m is created
with a cell area of 1 m^2 each. A raster grid with this elevation field
is created. Total incident short wave radiation and radiation factor
on a given Julian day (at noon) is calculated and plotted.
"""
"""
import landlab's raster grid library 'RasterModelGrid'
"""
from landlab import RasterModelGrid
"""
import 'Radiation' class from 'components' library under radiation package.
"""
from landlab.components.radiation.radiation_field import Radiation
"""
import landlab's plotting function that has the capability of plotting
'fields' stored on the grid.
"""
from landlab.plot.imshow import imshow_field
"""
import Numpy library. We will use 'random' module from numpy for this
tutorial.
"""
import numpy as np
"""
import pyplot module from matplotlib. This is a handy plotting library.
For easier access (and typing), call 'matplotlib.pyplot' as 'plt'
"""
import matplotlib.pyplot as plt
"""
RasterModelGrid module creates a raster grid of size defined by its first
two arguments with a spatial resolution defined by its third optional
argument. Hence the grid has 100 X 100 nodes (a total of 10000 nodes) with
spacing between two nodes being 1.0 unit.
"""
grid = RasterModelGrid(100,100, 1.)
"""
Create a random elevation field. 'np.random.rand' function returns an
array of random numbers that follow a uniform distribution with range(0,1)
and with size defined by its' argument.
"""
elevation = np.random.rand(grid.number_of_nodes) * 1000
"""
Creat a nodal field called 'Elevation' on the grid with units in metres and
populate it with zeros.
"""
grid.add_zeros('node','Elevation',units = 'm')
"""
This 'Elevation' field stored on the grid can be accessed as following:
"""
grid['node']['Elevation'] = elevation
"""
Instantiate an object for 'Radiation' Class. This instantiation associates
the object 'rad' with the capabilities of the class 'Radiation'. This
initiation requires an input of a grid. Creation of the object
automatically associates this grid to the object 'rad'.
"""
rad = Radiation( grid )
"""
Set random time for our example. Time is in years.
"""
current_time = 0.56
"""
'Radiation' class has an update function (like CSDMS BMI component). This
function takes current_time as an input argument and calculates Total Short
Wave Radiation incident on each cell at noon of the julian day represented
by current_time input. It also calculates the Radiation Factor which
represents the ratio of total short wave radiation incident on a grid cell
and flat surface. Hence Radiation Factor will be 1.0 if the grid cell has a
slope of 0.0 . Therefore, two cellular fields are created on the grid
(which means that the two arrays of length equivalent to number of cells
that the grid has are created and stored in conjunction with the grid.
Whenever this grid is transferred, these two cellular fields go with them.
"""
rad.update( current_time )
"""
Create a figure window available from pyplot library. This allows separating
figures while plotting multiple figures.
"""
plt.figure(0)
"""
Plot the cellular field 'TotalShortWaveRadiation' that is available on the
grid. imshow_field is a Landlab plotting tool that reads the input of
grid, the variable name that needs to be plotted, and type of field (whether
it is 'cell' or 'node', etc... It also reads optional inputs (keywords),
grid_units (units of grid X and Y axes , e.g. 'm'). For more options, please refer
documentation for landlab.plot.imshow.
"""
imshow_field(grid,'TotalShortWaveRadiation', values_at = 'cell',
grid_units = ('m','m'))
"""
The plot created can be saved using the function 'savefig' available in
pyplot library. This file will be saved in the current directory that your
python shell is in. You can know the current directory by using 'pwd'
command in the shell. (This documentation works if Enthought Canopy is used.
It might work for other packages too but is not tested by the author.)
"""
plt.savefig('Radiation')
"""
Plot another figure.
"""
plt.figure(1)
"""
Using Landlab plotting tool, lets plot another variable 'RadiationFactor'.
"""
imshow_field(grid,'RadiationFactor', values_at = 'cell', grid_units = ('m','m'))
"""
Save this figure.
"""
plt.savefig('RadiationFactor')
"""
Figure windows generated by pyplot library do not pop up by default. We have
to use function 'show' availabe in pyplot library to allow these figures to
pop up.
"""
plt.show()
"""
Please note that this is a simple implementation of radiation_field
component intended to familiarize its use. Please refer to the documentation
of landlab/components/radiation/radiation_field for more information to
use this component.
"""
| decvalts/landlab | landlab/components/radiation/examples/radiation_field_example.py | Python | mit | 5,494 |
# Setup the notebook and download the data
from __future__ import print_function
from __future__ import division
import functools
import hashlib
import os
import warnings
from subprocess import check_output
import numpy
from ipywidgets import interact
import pandas
from root_pandas import read_root
from scipy import stats as st
from matplotlib import pyplot as plt
warnings.filterwarnings("ignore")
rcParams['image.cmap'] = 'Blues' # change default colormap
pandas.set_option('display.max_columns', None)
def check_hash(filename, fn_hash, block_size=65536):
if not os.path.isfile(filename):
return False
hasher = hashlib.sha256()
with open(filename, 'rb') as afile:
buf = afile.read(block_size)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(block_size)
if hasher.hexdigest() == fn_hash:
return True
else:
print(filename, 'did not match expected hash, retrying')
os.remove(filename)
return False
try:
# Ensure this doesn't run twice
new_hist
except NameError:
_hist = pandas.Series.hist
@functools.wraps(_hist)
def new_hist(self, *args, **kwargs):
kwargs['histtype'] = 'step'
kwargs['grid'] = False
return _hist(self, *args, **kwargs)
pandas.Series.hist = new_hist
def get_plot_func(data):
def plot_hist(bins, x_min, x_max):
data.hist(bins=bins, range=(x_min, x_max))
plt.xlabel('Mass of B+ (MeV/c^2)')
plt.ylabel('Number of Events')
return plot_hist
eos_server = 'root://eospublic.cern.ch/'
data_dir = '/eos/opendata/lhcb/AntimatterMatters2017/data/'
filenames = {
'B2HHH_MagnetDown.root': 'b98651b24f825979053544c37010cf7ef9ce5c56ee62357c7e4ae2c392068379',
'B2HHH_MagnetUp.root': 'c42ad9e47931e1404bf94ad82ea22a0acd10bc9cfbb58e77a6b0fff08ead7859',
}
for fn, fn_hash in filenames.items():
while not check_hash('Data/'+fn, fn_hash):
fn_url = eos_server + data_dir + fn
print('Downloading', fn_url)
check_output('xrdcp ' + fn_url + ' ./Data/', shell=True)
| lhcb/opendata-project | Data/setup_main_analysis.py | Python | gpl-2.0 | 2,100 |
# Generated by Django 2.0.4 on 2018-04-26 07:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Company',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='CompanyAwareFlag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='The human/computer readable name.', max_length=100, unique=True, verbose_name='Name')),
('everyone', models.NullBooleanField(help_text='Flip this flag on (Yes) or off (No) for everyone, overriding all other settings. Leave as Unknown to use normally.', verbose_name='Everyone')),
('percent', models.DecimalField(blank=True, decimal_places=1, help_text='A number between 0.0 and 99.9 to indicate a percentage of users for whom this flag will be active.', max_digits=3, null=True, verbose_name='Percent')),
('testing', models.BooleanField(default=False, help_text='Allow this flag to be set for a session for user testing', verbose_name='Testing')),
('superusers', models.BooleanField(default=True, help_text='Flag always active for superusers?', verbose_name='Superusers')),
('staff', models.BooleanField(default=False, help_text='Flag always active for staff?', verbose_name='Staff')),
('authenticated', models.BooleanField(default=False, help_text='Flag always active for authenticated users?', verbose_name='Authenticated')),
('languages', models.TextField(blank=True, default='', help_text='Activate this flag for users with one of these languages (comma-separated list)', verbose_name='Languages')),
('rollout', models.BooleanField(default=False, help_text='Activate roll-out mode?', verbose_name='Rollout')),
('note', models.TextField(blank=True, help_text='Note where this Flag is used.', verbose_name='Note')),
('created', models.DateTimeField(db_index=True, default=django.utils.timezone.now, help_text='Date when this Flag was created.', verbose_name='Created')),
('modified', models.DateTimeField(default=django.utils.timezone.now, help_text='Date when this Flag was last modified.', verbose_name='Modified')),
('companies', models.ManyToManyField(blank=True, help_text='Activate this flag for these companies.', to='test_app.Company')),
('groups', models.ManyToManyField(blank=True, help_text='Activate this flag for these user groups.', to='auth.Group', verbose_name='Groups')),
('users', models.ManyToManyField(blank=True, help_text='Activate this flag for these users.', to=settings.AUTH_USER_MODEL, verbose_name='Users')),
],
options={
'verbose_name': 'Flag',
'verbose_name_plural': 'Flags',
'abstract': False,
},
),
migrations.CreateModel(
name='CompanyUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('username', models.CharField(max_length=100)),
('company', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='test_app.Company')),
],
options={
'abstract': False,
},
),
]
| rodgomes/django-waffle | test_app/migrations/0001_initial.py | Python | bsd-3-clause | 4,126 |
from Bio import SeqIO
import tempfile
import sys
import argparse
import operator
def parse_xmfa(xmfa):
"""Simple XMFA parser until https://github.com/biopython/biopython/pull/544
"""
current_lcb = []
current_seq = {}
for line in xmfa.readlines():
if line.startswith('#'):
continue
if line.strip() == '=':
if 'id' in current_seq:
current_lcb.append(current_seq)
current_seq = {}
yield current_lcb
current_lcb = []
else:
line = line.strip()
if line.startswith('>'):
if 'id' in current_seq:
current_lcb.append(current_seq)
current_seq = {}
data = line.strip().split()
# 0 1 2 3 4 5
# > 1:5986-6406 + CbK.fa # CbK_gp011
id, loc = data[1].split(':')
start, end = loc.split('-')
current_seq = {
'rid': '_'.join(data[1:]),
'id': id,
'start': int(start),
'end': int(end),
'strand': 1 if data[2] == '+' else -1,
'file': data[3],
'seq': '',
'comment': '',
}
if len(data) > 5:
current_seq['comment'] = ' '.join(data[5:])
else:
current_seq['seq'] += line.strip()
HEADER_TPL = '> {id}:{start}-{end} {strand} {file} # {comment}\n'
def split_by_n(seq, n):
"""A generator to divide a sequence into chunks of n units."""
# http://stackoverflow.com/questions/9475241/split-python-string-every-nth-character
while seq:
yield seq[:n]
seq = seq[n:]
def to_xmfa(lcbs, handle=sys.stdout):
handle.write("#FormatVersion Mauve1\n")
for lcb in lcbs:
for aln in lcb:
handle.write(HEADER_TPL.format(
id=aln['id'],
start=aln['start'],
end=aln['end'],
strand='+' if aln['strand'] > 0 else '-',
file=aln['file'],
comment=aln['comment'],
))
for line in split_by_n(aln['seq'], 80):
handle.write(line + '\n')
handle.write('=\n')
def percent_identity(a, b):
"""Calculate % identity, ignoring gaps in the host sequence
"""
match = 0
mismatch = 0
for char_a, char_b in zip(list(a), list(b)):
if char_a == '-':
continue
if char_a == char_b:
match += 1
else:
mismatch += 1
if match + mismatch == 0:
return 0.0
return 100 * float(match) / (match + mismatch)
def id_tn_dict(sequences, tmpfile=False):
"""Figure out sequence IDs
"""
label_convert = {}
correct_chrom = None
if not isinstance(sequences, list):
sequences = [sequences]
i = 0
for sequence_file in sequences:
for record in SeqIO.parse(sequence_file, 'fasta'):
if correct_chrom is None:
correct_chrom = record.id
i += 1
key = str(i)
label_convert[key] = {
'record_id': record.id,
'len': len(record.seq),
}
if tmpfile:
label_convert[key] = tempfile.NamedTemporaryFile(delete=False)
return label_convert
def filter_lcbs_for_seq(xmfa):
""" clusters lcbs based on which sequences they involve """
strand_info = {'1': '+', '-1': '-'}
clusters = {}
for i in list(parse_xmfa(xmfa)):
cluster_name = ''
for g in i:
cluster_name += g['id'] + strand_info[str(g['strand'])]
# allow clusters with all opposite strands to be together (alt name is opposite strand of orig)
alt_name = cluster_name.replace('+', '*').replace('-', '+').replace('*', '-')
orig_not_in_clusters = cluster_name not in clusters
alt_not_in_clusters = alt_name not in clusters
if orig_not_in_clusters and alt_not_in_clusters:
# if original or alternate names not already in clusters
clusters[cluster_name] = [i]
else:
if not orig_not_in_clusters: # if original name is already in clusters
clusters[cluster_name].append(i)
if not alt_not_in_clusters: # if alt name is already in clusters
clusters[alt_name].append(i)
return clusters
# to_xmfa(clusters['123456'])
def merge_lcbs(lcb1, lcb2):
for num, i in enumerate(lcb1):
i['start'] = min([i['start'], lcb2[num]['start']])
i['end'] = max([i['end'], lcb2[num]['end']])
i['seq'] += lcb2[num]['seq']
return lcb1
def resolve_clusters(clusters):
merged = []
for lcbs in clusters:
if len(lcbs) == 1:
merged.append(lcbs[0])
continue
merging = lcbs[0]
for lcb in lcbs[1:]:
merging = merge_lcbs(merging, lcb)
merged.append(merging)
return merged
def new(clusters, lcb):
new = True
for c in clusters:
if lcb in c:
new = False
return new
def cluster_lcbs(lcbs, threshold):
""" clusters lcbs based on how far apart they are"""
clusters = []
for o, i in enumerate(lcbs):
cluster = []
if not new(clusters, i):
continue
cluster.append(i)
compare_against = i
for n, j in enumerate(lcbs):
if not new(clusters, j) or i == j or compare_against == j:
continue
close = True
for num, k in enumerate(compare_against):
# for num, k in enumerate(i):
if j[num]['start'] - k['end'] > threshold:
close = False
if close:
cluster.append(j)
compare_against = j
clusters.append(cluster)
return resolve_clusters(clusters)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='process XMFA')
parser.add_argument('xmfa', type=argparse.FileType("r"), help='XMFA file')
parser.add_argument('threshold', type=int, help='maximum number of nucleotides between lcbs in a cluster')
args = parser.parse_args()
# assuming lcbs are filtered
final_lcbs = []
lcbs_filtered_for_seq = filter_lcbs_for_seq(args.xmfa)
for i in lcbs_filtered_for_seq:
final_lcbs += cluster_lcbs(lcbs_filtered_for_seq[i], args.threshold)
to_xmfa(final_lcbs)
| TAMU-CPT/mauved3 | cluster_lcbs.py | Python | agpl-3.0 | 6,597 |
"""
``revscoring score -h``
::
Scores a set of revisions.
Usage:
score (-h | --help)
score <model-file> <rev_id>... --api=<uri> [--verbose]
Options:
-h --help Print this documentation
<model-file> Path to a model file
--api=<url> The url pointing to a MediaWiki API to use for extracting
features
--verbose Print debugging info
<rev_id> A revision identifier
"""
import json
import logging
import sys
import traceback
import docopt
from mw import api
from ..extractors import APIExtractor
from ..scorer_models import MLScorerModel
def main(argv=None):
args = docopt.docopt(__doc__, argv=argv)
model = MLScorerModel.load(open(args['<model-file>'], 'rb'))
extractor = APIExtractor(api.Session(args['--api']),
language=model.language)
rev_ids = [int(rev_id) for rev_id in args['<rev_id>']]
verbose = args['--verbose']
run(model, extractor, rev_ids, verbose)
def run(model, extractor, rev_ids, verbose):
if verbose: logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s %(levelname)s:%(name)s -- %(message)s'
)
error_features = extractor.extract(rev_ids, model.features)
for rev_id, (error, values) in zip(rev_ids, error_features):
if error is not None:
print("\t".join([str(rev_id), str(error)]))
else:
score = model.score(values)
print("\t".join([str(rev_id), json.dumps(score)]))
| aetilley/revscoring | revscoring/utilities/score.py | Python | mit | 1,550 |
import ctypes
from . import x64dbg
GUI_MAX_LINE_SIZE = 65536
def DbgIsDebugging():
return x64dbg.DbgIsDebugging()
def GuiGetLineWindow(title=''):
line = ctypes.create_string_buffer(GUI_MAX_LINE_SIZE)
return_value = x64dbg.GuiGetLineWindow("%s" % title, line)
if return_value:
return line.value
def GuiGetWindowHandle():
return x64dbg.GuiGetWindowHandle()
def GuiLogClear():
x64dbg.GuiLogClear()
def GuiUpdateAllViews():
x64dbg.GuiUpdateAllViews()
| x64dbg/x64dbgpy | swig/x64dbgpy/pluginsdk/bridgemain.py | Python | mit | 491 |
# -*- coding:utf-8 -*-
# Transform given Active Display Coordinate (ADC) to Media Coordinate (MC).
# raw/pororo*.tsv -> data/pororo*.tsv
#
# @date 2014-03-30
# @author Jin-Hwa Kim (jhkim@bi.snu.ac.kr)
import os, sys, getopt, glob, re
import numpy as np
from sets import Set
import common
# default options
verbose = False
DEBUG = True
# Get the transformation matrix for mapping Tobbi Snapshot coordinate system
# to a unit space coordinate system.
# @param sourceCoords np.array of x, y pairs
# @param targetCoords np.array of x, y pairs
def getTransformationMatrix(sourceCoords, targetCoords):
# AX = y; A = yX'pinv(XX')
# Using a linear algebra library
Q = np.dot(sourceCoords, np.transpose(sourceCoords))
Tr = np.dot(np.dot(targetCoords, np.transpose(sourceCoords)), \
np.linalg.pinv(Q))
return Tr
# Get a transformed coordinate using a given transformation matrix.
def getUnitCoord(Tr, x, y):
# Encapsulate x and y to a x, y pair
coord = encapsulateCoord(x, y)
# Matrix * matrix
unitCoord = np.dot(Tr, coord)
return unitCoord
# encapsulate to a coordinate
# It follows the linear algebra library convention.
def encapsulateCoord(x, y):
return np.array([x, y, 1])
# encapsulate to a coordinate matrix
# It follows the linear algebra library convention.
def encapsulateCoords(listOfXYs):
return np.transpose(np.array(listOfXYs))
# Add MCx MCy to a given data.
def preprocess(source_filename, output_filename, snapshotCoords, length, delay, skip = 0):
(path, filename, name, extension) = common.pfne(source_filename)
idx = 0
Tr = getTobbiTransformationMatrix(snapshotCoords)
with open(source_filename, 'rU') as f, open(output_filename, 'w') as w:
# Print a header for the output file.
_printHeader(w)
# Read lines.
header = f.readline().split('\n')[0]
wholefile = f.readlines()
for line in wholefile:
# parse the line
timestamp, event, fixX, fixY, gzX, gzY = parseTobbiLine(header, line.split('\n')[0])
# skip
if int(timestamp) < skip :
continue
# delay
if int(timestamp) < delay :
continue
else :
timestamp = int(timestamp) - delay
# length
if timestamp > length :
break
# Print
# The number of Origin's columns is 9.
origin = line.split('\n')[0].split('\t')[1:9]
# try :
w.write("{}\t{}".format(timestamp, '\t'.join(origin)))
# Transformation
try :
fixation = getUnitCoord(Tr, int(fixX), int(fixY))
w.write("\t{0:.3f}\t{1:.3f}".format(fixation[0], fixation[1]))
except ValueError :
fixation = ['', '']
w.write("\t\t")
try :
gaze = getUnitCoord(Tr, int(gzX), int(gzY))
w.write("\t{0:.3f}\t{1:.3f}\n".format(gaze[0], gaze[1]))
except ValueError :
gaze = ['', '']
w.write("\t\t\n")
# Parse Tobbi eye-tracking data to extract the required fields.
def parseTobbiLine(header, line, delimiter = "\t"):
header = header.replace("\xef\xbb\xbf", "").split(delimiter)
line = line.split(delimiter)
timestamp = line[header.index('RecordingTimestamp')]
gazeEventType = line[header.index('GazeEventType')]
fixationPointX = line[header.index('FixationPointX (MCSpx)')]
fixationPointY = line[header.index('FixationPointY (MCSpx)')]
gazePointX = line[header.index('GazePointX (ADCSpx)')]
gazePointY = line[header.index('GazePointY (ADCSpx)')]
return timestamp, gazeEventType, fixationPointX, fixationPointY, \
gazePointX, gazePointY
# Print a header for a given file description.
def _printHeader(f):
f.write("{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n" \
.format("RecordingTimestamp", "FixationIndex", "SaccadeIndex", \
"GazeEventType", "GazeEventDuration", \
"FixationPointX (ADCSpx)", "FixationPointY (ADCSpx)", \
"GazePointX (ADCSpx)", "GazePointY (ADCSpx)", \
"FixationPointX (MCSpx)", "FixationPointY (MCSpx)", \
"GazePointX (MCSpx)", "GazePointY (MCSpx)"))
# Print fixations for a given file description.
def _printFixations(f, t_ts, o_ts, fixations, originalFixations):
for i in range(len(fixations)):
fixX = fixations[i][0]
fixY = fixations[i][1]
oFixX = originalFixations[i][0]
oFixY = originalFixations[i][1]
f.write("{}\t{}\t{}\t{}\t{}\t{}\n" \
.format(t_ts, o_ts[i], fixX, fixY, oFixX, oFixY))
# Get the transformation matrix for Tobbi data.
def getTobbiTransformationMatrix(snapshotCoords):
# Pororo video resolution : 720 * 544
M_SIZE = [720., 544.]
# TV Screen resolution : 1920 * 1080
S_size = [1920., 1080.]
# Scaling factor for Pororo video on the screen.
SCALING_FACTOR = (S_size[0] / (M_SIZE[0] * S_size[1] / M_SIZE[1]) - 1) / 2
for row in snapshotCoords:
row.append(1.)
a = encapsulateCoords(snapshotCoords)
b = encapsulateCoords([[-SCALING_FACTOR,0,1],[1+SCALING_FACTOR,0,1],\
[1+SCALING_FACTOR,1,1],[-SCALING_FACTOR,1,1]])
Tr = getTransformationMatrix(a,b)
if verbose:
print "[00] Tr = ",
print(Tr)
return Tr
def usage():
print "Usage: preprocess [OPTION]\n" +\
"Transform given Active Display Coordinates (ADC) to Media Coordinates (MC).\n"+\
"\n"+\
" -s, --source Specifies source directory\n"+\
" default: ./raw\n"+\
"\n"+\
" -o, --output Specifies source directory\n"+\
" default: ./data\n"+\
"\n"+\
" -v, --verbose View more details\n"
def main():
GAT = False
# Define Filenames
DELAY_FILENAME = "info/delay.csv" if not GAT else "info/gat.csv"
SNAPSHOT_FILENAME = "info/snapshot.tsv"
source = "raw/pororo_*.tsv"
output = "data/"
verbose = True
try:
opts, args = \
getopt.getopt(sys.argv[1:], "vs:o:")
except getopt.GetoptError:
# print help information and exit:
usage()
sys.exit(2)
for option, value in opts:
if option == "-v":
verbose = True
if option in ("-s", "--source"):
source = value
if option in ("-o", "--output"):
output = value
# get file name list
filenameList = glob.glob(source)
snapshotCoordsList = common.readData(SNAPSHOT_FILENAME, '\t', False, verbose)
delayList = common.readData(DELAY_FILENAME, ',', False, verbose)
for fullname in filenameList:
print "[01] Reading", fullname
(path, filename, name, extension) = common.pfne(fullname)
# snapshot coords
snapshotCoords = common.findOne(name, snapshotCoordsList)
tuples = []
for i in range(4):
tuples.append([float(snapshotCoords[i*2+0]), float(snapshotCoords[i*2+1])])
length, delay, skip = [int(i) for i in common.findOne(name, delayList)]
if verbose:
print "delay => ", delay, "skip => ", skip, "length =>", length
if GAT:
_splited = re.split('s03p0\d', filename)
output_filename = _splited[0] + 'GAT' + _splited[1]
else:
output_filename = filename
# Do prepocess and store to a given output filename.
if verbose:
print "preprocess({}, {}, snapshotCoords, {}, {})"\
.format(path + os.sep + filename, output + output_filename, length, delay, skip)
preprocess(path + os.sep + filename, output + output_filename, tuples, length, delay, skip)
if __name__ == "__main__":
main()
| jnhwkim/Eye-Tracking | Transform/preprocess.py | Python | mit | 8,025 |
# -*- coding: utf-8 -*-
import sys,getopt,got,datetime,codecs
def main(argv):
if len(argv) == 0:
print 'You must pass some parameters. Use \"-h\" to help.'
return
if len(argv) == 1 and argv[0] == '-h':
print """\nTo use this jar, you can pass the folowing attributes:
username: Username of a specific twitter account (without @)
since: The lower bound date (yyyy-mm-aa)
until: The upper bound date (yyyy-mm-aa)
querysearch: A query text to be matched
maxtweets: The maximum number of tweets to retrieve
\nExamples:
# Example 1 - Get tweets by username [barackobama]
python Exporter.py --username "barackobama" --maxtweets 1\n
# Example 2 - Get tweets by query search [europe refugees]
python Exporter.py --querysearch "europe refugees" --maxtweets 1\n
# Example 3 - Get tweets by username and bound dates [barackobama, '2015-09-10', '2015-09-12']
python Exporter.py --username "barackobama" --since 2015-09-10 --until 2015-09-12 --maxtweets 1\n
# Example 4 - Get the last 10 top tweets by username
python Exporter.py --username "barackobama" --maxtweets 10 --toptweets\n"""
return
try:
opts, args = getopt.getopt(argv, "", ("username=", "since=", "until=", "querysearch=", "toptweets", "maxtweets="))
tweetCriteria = got.manager.TweetCriteria()
for opt,arg in opts:
if opt == '--username':
tweetCriteria.username = arg
elif opt == '--since':
tweetCriteria.since = arg
elif opt == '--until':
tweetCriteria.until = arg
elif opt == '--querysearch':
tweetCriteria.querySearch = arg
elif opt == '--toptweets':
tweetCriteria.topTweets = True
elif opt == '--maxtweets':
tweetCriteria.maxTweets = int(arg)
outputFile = codecs.open("output_got.csv", "w+", "utf-8")
outputFile.write('username;date;retweets;favorites;text;geo;mentions;hashtags;id;permalink')
print 'Searching...\n'
def receiveBuffer(tweets):
for t in tweets:
outputFile.write(('\n%s;%s;%d;%d;"%s";%s;%s;%s;"%s";%s' % (t.username, t.date.strftime("%Y-%m-%d %H:%M"), t.retweets, t.favorites, t.text, t.geo, t.mentions, t.hashtags, t.id, t.permalink)))
outputFile.flush();
print 'More %d saved on file...\n' % len(tweets)
got.manager.TweetManager.getTweets(tweetCriteria, receiveBuffer)
except arg:
print 'Arguments parser error, try -h' + arg
finally:
outputFile.close()
print 'Done. Output file generated "output_got.csv".'
if __name__ == '__main__':
main(sys.argv[1:]) | pizzaro13/FakeNewsDetector | Exporter.py | Python | mit | 2,515 |
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.NotebookApp.allow_root = True
c.MultiKernelManager.default_kernel_name = 'python3'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
password = os.environ['PASSWORD']
if password:
c.NotebookApp.password = passwd(password)
else:
c.NotebookApp.password = ''
c.NotebookApp.token = ''
del os.environ['PASSWORD'] | lukealexmiller/docker_devenvs | tensorflow_source/jupyter_notebook_config_py3.py | Python | gpl-3.0 | 515 |
# This file is part of OpenHatch.
# Copyright (C) 2009 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from south.db import db
from django.db import models
from mysite.profile.models import *
class Migration:
def forwards(self, orm):
# Adding field 'Citation.old_summary'
db.add_column('profile_citation', 'old_summary', orm['profile.citation:old_summary'])
# Changing field 'PortfolioEntry.date_created'
# (to signature: django.db.models.fields.DateTimeField(default=datetime.datetime(2009, 10, 28, 19, 57, 19, 69865)))
db.alter_column('profile_portfolioentry', 'date_created', orm['profile.portfolioentry:date_created'])
# Changing field 'Citation.date_created'
# (to signature: django.db.models.fields.DateTimeField(default=datetime.datetime(2009, 10, 28, 19, 57, 19, 126390)))
db.alter_column('profile_citation', 'date_created', orm['profile.citation:date_created'])
def backwards(self, orm):
# Deleting field 'Citation.old_summary'
db.delete_column('profile_citation', 'old_summary')
# Changing field 'PortfolioEntry.date_created'
# (to signature: django.db.models.fields.DateTimeField(default=datetime.datetime(2009, 10, 23, 20, 37, 55, 805457)))
db.alter_column('profile_portfolioentry', 'date_created', orm['profile.portfolioentry:date_created'])
# Changing field 'Citation.date_created'
# (to signature: django.db.models.fields.DateTimeField(default=datetime.datetime(2009, 10, 23, 20, 37, 56, 351970)))
db.alter_column('profile_citation', 'date_created', orm['profile.citation:date_created'])
models = {
'auth.group': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)"},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'profile.citation': {
'contributor_role': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'data_import_attempt': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.DataImportAttempt']", 'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2009, 10, 28, 19, 57, 19, 524318)'}),
'distinct_months': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'first_commit_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_published': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'languages': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'old_summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
'portfolio_entry': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.PortfolioEntry']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
'profile.dataimportattempt': {
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'failed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Person']"}),
'query': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
'profile.link_person_tag': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Person']"}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Tag']"})
},
'profile.link_project_tag': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Tag']"})
},
'profile.link_projectexp_tag': {
'Meta': {'unique_together': "[('tag', 'project_exp', 'source')]"},
'favorite': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project_exp': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.ProjectExp']"}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Tag']"})
},
'profile.link_sf_proj_dude_fm': {
'Meta': {'unique_together': "[('person', 'project')]"},
'date_collected': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.SourceForgePerson']"}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.SourceForgeProject']"})
},
'profile.person': {
'gotten_name_from_ohloh': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interested_in_working_on': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024'}),
'last_polled': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(1970, 1, 1, 0, 0)'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100'}),
'show_email': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'profile.portfolioentry': {
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2009, 10, 28, 19, 57, 19, 603831)'}),
'experience_description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Person']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}),
'project_description': ('django.db.models.fields.TextField', [], {})
},
'profile.projectexp': {
'data_import_attempt': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.DataImportAttempt']", 'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'man_months': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'modified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Person']", 'null': 'True'}),
'person_role': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'primary_language': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}),
'should_show_this': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
'profile.sourceforgeperson': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'profile.sourceforgeproject': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'unixname': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'profile.tag': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.TagType']"}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'profile.tagtype': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'prefix': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'search.project': {
'date_icon_was_fetched_from_ohloh': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'icon': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_smaller_for_badge': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
}
}
complete_apps = ['profile']
| jledbetter/openhatch | mysite/profile/migrations/0052_add_old_summary_field_to_citation.py | Python | agpl-3.0 | 14,266 |
from django import template
register = template.Library()
@register.filter
def smart_pages(all_pages, current_page):
all_pages = list(all_pages)
smart_pages = set([
1,
all_pages[-1],
current_page,
max(min(current_page // 2, all_pages[-1]), 1),
max(min(current_page + ((all_pages[-1] - current_page) // 2), all_pages[-1]), 1),
max(min(current_page + 1, all_pages[-1]), 1),
max(min(current_page + 2, all_pages[-1]), 1),
max(min(current_page - 1, all_pages[-1]), 1),
max(min(current_page - 2, all_pages[-1]), 1),
])
return sorted(smart_pages)
| drscream/teamvault | src/teamvault/apps/secrets/templatetags/smart_pagination.py | Python | gpl-3.0 | 631 |
import os
import sys
class Factory(object):
def __init__(self):
pass
def get(self):
return True
NAME = "atve.core"
FACTORY = Factory()
| TE-ToshiakiTanaka/atve | atve/library/core/service.py | Python | mit | 163 |
from django.contrib import admin
class ArchiveAdminMixin(admin.ModelAdmin):
exclude = ('b64id', 'deleted_at',) | octaflop/pastryio | apps/pastryio/admin/mixins.py | Python | mit | 116 |
#!/usr/bin/env python
"""
@file Constants.py
@author Remi Domingues
@date 18/06/2013
Constants used by ASTra Python scripts
"""
import os, sys
import re
from math import ceil
from datetime import datetime
import logging
"""
============================================================================================================================================
=== REQUIRED VARIABLES ===
============================================================================================================================================
"""
def getNetworkFilesDict(networkId, configFile, networkFile):
"""
Returns a network linked with its configuration files
"""
filesDict = dict()
filesDict[CONFIG_FILE_KEY] = configFile
filesDict[NETWORK_FILE_KEY] = networkFile
return filesDict
""" Networks constants """
"""
SUMO_CONFIG dictionary structure:
Key = networkId
Value = Configuration Files dictionary:
Key = configFileType
Value = configFilePath
Note:
- configFileType in {CONFIG_FILE_KEY, NETWORK_FILE_KEY}
"""
SUMO_CONFIG_DICT = dict()
CONFIG_FILE_KEY = "Config"
NETWORK_FILE_KEY = "Network"
"""
============================================================================================================================================
=== USER CONSTANTS ===
============================================================================================================================================
"""
""" ===== COMPUTER CONFIGURATION ===== """
""" Operating system """
POSIX_OS = False
""" ===== ENABLE/DISABLE FUNCTIONALITIES ===== """
ROUTING_ENABLED = True
GRAPH_ENABLED = True
VEHICLE_ENABLED = True
SIMULATION_ENABLED = True
TLL_ENABLED = True
""" ===== MAIN PATHS ===== """
""" ASTRA main directories """
ASTRA_DIRECTORY = os.path.abspath('C:/Temp/astra')
SUMO_TOOLS_DIRECTORY = os.path.abspath('C:/Temp/sumo-0.17.1/tools')
SUMO_BINARY = os.path.abspath('C:/Temp/sumo-0.17.1/bin/sumo-gui')
DUAROUTER_BINARY = os.path.abspath('C:/Temp/sumo-0.17.1/bin/duarouter.exe')
""" ===== ASTRA MAIN DIRECTORIES (DO NOT MODIFY) ===== """
CONFIG_DIRECTORY = ASTRA_DIRECTORY + "/config"
DICT_DIRECTORY = ASTRA_DIRECTORY + "/dict"
LOG_DIRECTORY = ASTRA_DIRECTORY + "/log"
TMP_DIRECTORY = ASTRA_DIRECTORY + "/tmp"
SCREEN_DIRECTORY = ASTRA_DIRECTORY + "/screen"
""" ===== NETWORKS CONFIGURATION ===== """
""" Dublin """
DUBLIN_NETWORK_ID = "Dublin"
DUBLIN_CONFIG_FILE = CONFIG_DIRECTORY + "/Dublin.sumocfg"
DUBLIN_NET_FILE = CONFIG_DIRECTORY + "/Dublin.net.xml"
SUMO_CONFIG_DICT[DUBLIN_NETWORK_ID] = getNetworkFilesDict(DUBLIN_NETWORK_ID, DUBLIN_CONFIG_FILE, DUBLIN_NET_FILE)
""" Chosen network """
SUMO_CHOSEN_NETWORK = DUBLIN_NETWORK_ID
"""
Duration of a SUMO step :
This duration is the one which will be taken into account when calling the traci simulationStep function.
It is not linked to the processing time of a simulation step, but to the simulated time between a step and
an other.
"""
SUMO_SIMULATION_STEP_TIME = 1
"""
Time the simulation thread will sleep between each step iteration. The real time the thread will wait
will be this time minus the computation time of the previous simulation step
"""
SIMULATOR_SLEEP = 1
"""
Time before a car stopped teleports to the end of the edge
If negative, the car won't teleport
"""
SUMO_TIME_BEFORE_TELEPORT = -1
"""
The following constant must contain a regular expression which will be evaluated for
every vehicle when requested for all vehicles information (delete / coordinates / speed / arrived messages)
If you want access the information of a vehicle you ignore, you must specify the corresponding vehicle ID
in one of the previous messages
=> These vehicles are ignored ONLY with messages about ALL VEHICLES
=> These ones cannot be priority
"""
IGNORED_VEHICLES = "^(MOC*)$"
""" ===== SIMULATION REGULAR MESSAGES ===== """
""" Send regular messages even if these ones are empty ? (except the header) """
SEND_MSG_EVEN_IF_EMPTY = False
""" Send regular messages containing vehicles coordinates ? """
SEND_VEHICLES_COORDS = True
""" Send regular messages containing arrived vehicles ID ? """
SEND_ARRIVED_VEHICLES = True
""" ===== TRAFFIC LIGHTS FOR PRIORITY VEHICLES ===== """
# A traffic light will be set to green for the priority vehicle when this one is close enough (distance <= GREEN_LENGTH_ANTICIPATION)
GREEN_LENGTH_ANTICIPATION = 50
# A yellow temporary phase will be set on the junction when this one is close enough (distance <= YELLOW_LENGTH_ANTICIPATION)
YELLOW_LENGTH_ANTICIPATION = 100
"""
============================================================================================================================================
=== SYSTEM CONSTANTS ===
============================================================================================================================================
"""
""" Socket configuration """
TRACI_PORT = 8813
HOST = "127.0.0.1"
GRAPH_INPUT_PORT = 18001
GRAPH_OUTPUT_PORT = 18002
ROUTER_INPUT_PORT = 18003
ROUTER_OUTPUT_PORT = 18004
VEHICLE_INPUT_PORT = 18005
VEHICLE_OUTPUT_PORT = 18006
TLL_INPUT_PORT = 18007
TLL_OUTPUT_PORT = 18008
SIMULATOR_OUTPUT_PORT = 18009
""" Config files """
sys.path.append(SUMO_TOOLS_DIRECTORY)
SUMO_NETWORK_FILE = SUMO_CONFIG_DICT[SUMO_CHOSEN_NETWORK][NETWORK_FILE_KEY]
SUMO_CONFIG_FILE = SUMO_CONFIG_DICT[SUMO_CHOSEN_NETWORK][CONFIG_FILE_KEY]
SUMO_GUI_SETTINGS_FILE = CONFIG_DIRECTORY + "/sumo-gui-settings.xml"
SUMO_JUNCTIONS_DICTIONARY_FILE = DICT_DIRECTORY + "/{}JunctionsDictionary".format(SUMO_CHOSEN_NETWORK)
SUMO_EDGES_DICTIONARY_FILE = DICT_DIRECTORY + "/{}EdgesDictionary".format(SUMO_CHOSEN_NETWORK)
SUMO_TLL_DICTIONARY_FILE = DICT_DIRECTORY + "/{}TrafficLightsDictionary".format(SUMO_CHOSEN_NETWORK)
SUMO_GRAPH_FILE = DICT_DIRECTORY + "/{}GraphDictionary".format(SUMO_CHOSEN_NETWORK)
""" Shared constants """
SLEEP_SYNCHRONISATION = 0.1
MESSAGES_SEPARATOR = "\n"
END_OF_LINE = '\n'
END_OF_MESSAGE = '\n'
EMPTY_STRING = ''
SEPARATOR = ' '
TRACI_EXCEPTION = "FatalTraCIError"
CLOSED_SOCKET_EXCEPTION = "ClosedSocketException"
""" Acknowledge messages """
ACKNOWLEDGE_HEADER = "ACK"
ACK_OK = 0
INVALID_MESSAGE = 40
GRAPH_UNKNOWN_EDGE = 30
GRAPH_INVALID_BLOCK_MSG = 31
ROUTE_ERROR_CONNECTION = 1
DUAROUTER_ERROR_LAUNCH = 2
ROUTE_TIMEOUT_ERROR = 6
ROUTE_INVALID_ALGORITHM = 7
ROUTE_INVALID_GEO = 8
ROUTE_ROUTING_REQUEST_FAILED = 10
VEHICLE_INVALID_ROUTE = 4
VEHICLE_EMPTY_ROUTE = 5
VEHICLE_MOCK_FAILED = 9
VEHICLE_DELETE_FAILED_UNKNOWN = 11
TLL_PHASE_INDEX_ERROR = 21
TLL_PHASE_STATE_ERROR = 22 # Error code returned when a phase state is invalid, or if the current phase index is invalid
TLL_PHASE_DURATION_ERROR = 23
""" Logger """
LOGGER_ID = "sumo"
LOG_LEVEL = logging.INFO
LOG_FORMAT = "%(asctime)s %(levelname)s %(message)s"
NOW = datetime.now()
LOG_FILE_PATH = LOG_DIRECTORY + "/sumo.log.{}.log".format(datetime.strftime(NOW, "%d-%m-%Y_%Hh%Mm%Ss"))
""" Manager """
PRINT_PREFIX_MANAGER = "Manager >>> "
TRACI_CONNECT_MAX_STEPS = 20
SUMO_GUI_QUIT_ON_END = "true"
SUMO_GUI_GAME_MODE = "false"
SUMO_GUI_START_AUTO = "true"
SUMO_GUI_START_COMMAND = "{} -c {} --gui-settings-file {} --step-length {} --time-to-teleport {} --quit-on-end {} --game {} --start {}" .format(SUMO_BINARY, SUMO_CONFIG_FILE, SUMO_GUI_SETTINGS_FILE, SUMO_SIMULATION_STEP_TIME, SUMO_TIME_BEFORE_TELEPORT, SUMO_GUI_QUIT_ON_END, SUMO_GUI_GAME_MODE, SUMO_GUI_START_AUTO)
READY_HEADER = "SOK"
""" Graph """
PRINT_PREFIX_GRAPH = "Graph >>> "
EDGES_COORDS = 1
EDGES_LENGTH = 2
EDGES_CONGESTION = 3
EDGES_SUCCESSORS = 4
GRAPH = 5
ALL_EDGES_COORDS_REQUEST_HEADER = "COO"
EDGES_COORDS_REQUEST_HEADER = "COO"
EDGES_COORDS_RESPONSE_HEADER = "COO"
EDGES_COORDS_END = "END"
EDGES_NUMBER_PER_COORDS_MESSAGE = 500 # If -1, the whole message will be sent
ALL_EDGES_LENGTH_REQUEST_HEADER = "LEN"
EDGES_LENGTH_REQUEST_HEADER = "LEN"
EDGES_LENGTH_RESPONSE_HEADER = "LEN"
EDGES_LENGTH_END = "END"
EDGES_NUMBER_PER_LENGTH_MESSAGE = 500 # If -1, the whole message will be sent
ALL_EDGES_CONGESTION_REQUEST_HEADER = "CON"
EDGES_CONGESTION_REQUEST_HEADER = "CON"
EDGES_CONGESTION_RESPONSE_HEADER = "CON"
EDGES_CONGESTION_END = "END"
EDGES_NUMBER_PER_CONGESTION_MESSAGE = 500 # If -1, the whole message will be sent
ALL_SUCCESSORS_REQUEST_HEADER = "SUC"
SUCCESSORS_REQUEST_HEADER = "SUC"
SUCCESSORS_RESPONSE_HEADER = "SUC"
SUCCESSORS_END = "END"
SUCCESSORS_LIST_SEPARATOR = ","
SUCCESSORS_NUMBER_PER_MESSAGE = 500 # If -1, the whole message will be sent
BLOCK_EDGE_REQUEST_HEADER = "BLO"
BLOCKED_ROUTE_ID_PREFIX = 'BLO'
BLOCKED_VEHICLE_ID_PREFIX = 'BLO'
UNBLOCK_EDGE_REQUEST_HEADER = "UNB"
EDGE_ID_REQUEST_HEADER = "EID"
EDGE_ID_RESPONSE_HEADER = "EID"
""" Route """
PRINT_PREFIX_ROUTER = "Route >>> "
ROUTING_REQUEST_HEADER = "GET"
ROUTING_RESPONSE_HEADER = "ROU"
DIJKSTRA_REQUEST = "DIJ"
DUAROUTER_REQUEST = "DUA"
EDGES_ID = 0
GEOGRAPHIC_COORDS = 1
ERROR_HEADER = "ERR"
""" DijkstraRoute """
PRINT_PREFIX_DIJKSTRA = "DijkstraRoute >>> "
XML_EDGE_ELEMENT = "edge"
XML_EDGE_ID = "id"
XML_EDGE_FROM_JUNCTION = "from"
XML_EDGE_TO_JUNCTION = "to"
XML_LANE_ELEMENT = "lane"
XML_LANE_ID = "id"
XML_LANE_LENGTH = "length"
""" DuarouterRoute """
PRINT_PREFIX_DUAROUTER = "DuarouterRoute >>> "
XML_TRIPS_ELEMENT = "trips"
XML_TRIP_ELEMENT = "trip"
XML_TRIP_ID_ATTRIBUTE = "id"
XML_TRIP_DEPART_ATTRIBUTE = "depart"
XML_TRIP_FROM_ATTRIBUTE = "from"
XML_TRIP_TO_ATTRIBUTE = "to"
XML_ROUTE_ELEMENT = "route"
XML_COST_ATTRIBUTE = "cost"
XML_EDGES_ATTRIBUTE = "edges"
TRIPS_PATH = TMP_DIRECTORY + "/trips.xml"
ROUTES_OUTPUT_PATH = TMP_DIRECTORY + "/result.rou.xml"
ROUTES_ALT_OUTPUT_PATH = TMP_DIRECTORY + "/result.rou.alt.xml"
DUAROUTER_START_COMMAND_TEMPLATE = "{} --ignore-errors --trip-files {} --net-file {} --output-file {}"
DUAROUTER_START_COMMAND = DUAROUTER_START_COMMAND_TEMPLATE.format(DUAROUTER_BINARY, TRIPS_PATH, SUMO_NETWORK_FILE, ROUTES_OUTPUT_PATH)
DUAROUTER_SLEEP_TIME = 0.1
DUAROUTER_MAX_SLEEP_TIME = 20
""" Vehicle """
PRINT_PREFIX_VEHICLE = "Vehicle >>> "
IGNORED_VEHICLES_REGEXP = re.compile(IGNORED_VEHICLES)
VEHICLE_ADD_REQUEST_HEADER = "ADD"
VEHICLE_ADD_RAND_REQUEST_HEADER = "MOC"
VEHICLE_DELETE_REQUEST_HEADER = "DEL"
VEHICLE_SPEED_REQUEST_HEADER = "SPE"
VEHICLE_SPEED_RESPONSE_HEADER = "SPE"
VEHICLE_COORDS_REQUEST_HEADER = "COO"
VEHICLE_COORDS_RESPONSE_HEADER = "COO"
VEHICLE_ARRIVED_REQUEST_HEADER = "ARR"
VEHICLE_ARRIVED_RESPONSE_HEADER = "ARR"
DEFAULT_VEHICLE_TYPE = "DEFAULT_VEHTYPE"
PRIORITY_VEHICLE = '1'
""" TrafficLights """
PRINT_PREFIX_TLL = "TrafficLights >>> "
ALL_TLL_COORDS_REQUEST_HEADER = "COO"
TLL_COORDS_REQUEST_HEADER = "COO"
TLL_COORDS_RESPONSE_HEADER = "COO"
TLL_NUMBER_PER_MESSAGE = 500 # If -1, the whole message will be sent
TLL_POS_END = "END"
TLL_GET_DETAILS_REQUEST_HEADER = "GET"
TLL_GET_DETAILS_RESPONSE_HEADER = "DET"
TLL_SET_DETAILS_REQUEST_HEADER = "SET"
TLL_MIN_PHASE_DURATION = 1000
SCREENSHOT_FILE_NAME = "{}.png"
RED = 'r'
YELLOW = 'y'
GREEN = 'g'
GREEN_PRIO = 'G'
IGNORE = 0
SET_YELLOW = 1
SET_GREEN = 2
PRIORITY_VEHICLE_KM_PER_HOUR_SPEED = 50
PRIORITY_VEHICLE_KM_PER_SEC_SPEED = PRIORITY_VEHICLE_KM_PER_HOUR_SPEED * 1000 / 3600.0
GREEN_STEPS_ANTICIPATION = int(ceil(GREEN_LENGTH_ANTICIPATION / PRIORITY_VEHICLE_KM_PER_SEC_SPEED / SUMO_SIMULATION_STEP_TIME))
YELLOW_STEPS_ANTICIPATION = int(ceil(YELLOW_LENGTH_ANTICIPATION / PRIORITY_VEHICLE_KM_PER_SEC_SPEED / SUMO_SIMULATION_STEP_TIME))
""" Simulation """
PRINT_PREFIX_SIMULATOR = "Simulation >>> "
class ClosedSocketException(Exception):
"""
Exception threw when the socket the process is trying to listen or write is closed
"""
pass
| remidomingues/ASTra | astra/constants.py | Python | gpl-3.0 | 11,987 |
import functools
import django
from django.conf import settings
from django.contrib.admin.options import ModelAdmin, csrf_protect_m, InlineModelAdmin
if django.VERSION >= (1, 7):
from django.contrib.admin.utils import (flatten_fieldsets, unquote,
get_deleted_objects)
else:
from django.contrib.admin.util import (flatten_fieldsets, unquote,
get_deleted_objects)
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.db import router, transaction
from django.forms.models import model_to_dict
if django.VERSION >= (1, 7):
from django.forms.utils import ErrorList
else:
from django.forms.util import ErrorList
from django.http import Http404, HttpResponseRedirect, QueryDict
from django.shortcuts import render_to_response
from django.template import TemplateDoesNotExist
from django.template.context import RequestContext
from django.template.loader import select_template
from django.utils.encoding import iri_to_uri, force_text
from django.utils.functional import curry
from django.utils.translation import ugettext_lazy as _, get_language
from hvad.compat import urlencode, urlparse
from hvad.forms import TranslatableModelForm, translatable_inlineformset_factory, translatable_modelform_factory
from hvad.utils import load_translation
from hvad.manager import FALLBACK_LANGUAGES
atomic = (transaction.atomic if django.VERSION >= (1, 6) else
transaction.commit_on_success)
def get_language_name(language_code):
return dict(settings.LANGUAGES).get(language_code, language_code)
class InlineModelForm(TranslatableModelForm):
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=':',
empty_permitted=False, instance=None):
"""
"""
opts = self._meta
object_data = {}
language = getattr(self, 'language', get_language())
if instance is not None:
trans = load_translation(instance, language, enforce=True)
if trans.pk:
object_data = model_to_dict(trans, opts.fields, opts.exclude)
# Dirty hack that swaps the id from the translation id, to the master id
# This is necessary, because we in this case get the untranslated instance,
# and thereafter get the correct translation on save.
if "id" in object_data:
object_data["id"] = trans.master.id
object_data.update(initial or {})
super(TranslatableModelForm, self).__init__(data, files, auto_id,
prefix, object_data,
error_class, label_suffix,
empty_permitted, instance)
class TranslatableModelAdminMixin(object):
query_language_key = 'language'
def all_translations(self, obj):
""" Get an HTML-formatted list of all translations, with links to admin pages """
if obj is None or not obj.pk:
return ''
languages = []
current_language = get_language()
for language in obj.get_available_languages():
entry = '<a href="%s">%s</a>' % (self.get_url(obj, lang=language), language)
if language == current_language:
entry = u'<strong>%s</strong>' % entry
languages.append(entry)
return u', '.join(languages)
all_translations.allow_tags = True
all_translations.short_description = _('all translations')
def get_available_languages(self, obj):
if obj is None:
return []
return obj.get_available_languages()
def get_language_tabs(self, request, available_languages):
tabs = []
get = request.GET.copy()
language = self._language(request)
for key, name in settings.LANGUAGES:
get['language'] = key
url = '%s?%s' % (request.path, get.urlencode())
if language == key:
status = 'current'
elif key in available_languages:
status = 'available'
else:
status = 'empty'
tabs.append((url, name, key, status))
return tabs
def _language(self, request):
return request.GET.get(self.query_language_key, get_language())
class TranslatableAdmin(ModelAdmin, TranslatableModelAdminMixin):
form = TranslatableModelForm
change_form_template = 'admin/hvad/change_form.html'
deletion_not_allowed_template = 'admin/hvad/deletion_not_allowed.html'
def __init__(self, *args, **kwargs):
super(TranslatableAdmin, self).__init__(*args, **kwargs)
self.reverse = functools.partial(reverse, current_app=self.admin_site.name)
def get_url(self, obj, lang=None, get={}):
ct = ContentType.objects.get_for_model(self.model)
info = ct.app_label, ct.model
if lang:
get.update({self.query_language_key: lang})
url = '%s?%s' % (self.reverse('admin:%s_%s_change' % info, args=(obj.pk,)), urlencode(get))
return url
def get_urls(self):
from django.conf.urls import patterns, url
urlpatterns = super(TranslatableAdmin, self).get_urls()
if django.VERSION >= (1, 6):
info = self.model._meta.app_label, self.model._meta.model_name
else:
info = self.model._meta.app_label, self.model._meta.module_name
return patterns('',
url(r'^(.+)/delete-translation/(.+)/$',
self.admin_site.admin_view(self.delete_translation),
name='%s_%s_delete_translation' % info),
) + urlpatterns
def get_form(self, request, obj=None, **kwargs):
"""
Returns a Form class for use in the admin add view. This is used by
add_view and change_view.
"""
if django.VERSION >= (1, 6):
# From v1.6 on, using get_fieldsets is ok, as long as no 'fields'
# argument was given. It allows dynamic fieldsets on admin form.
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
else:
# On previous versions, calling get_fieldsets triggers infinite recursion
# and we should stick to statically declared fieldsets
if self.declared_fieldsets:
fields = flatten_fieldsets(self.declared_fieldsets)
else:
fields = None
exclude = (
tuple(self.exclude or ()) +
tuple(kwargs.pop("exclude", ())) +
self.get_readonly_fields(request, obj)
)
old_formfield_callback = curry(self.formfield_for_dbfield, request=request)
defaults = {
"form": self.form,
"fields": fields,
"exclude": exclude,
"formfield_callback": old_formfield_callback,
}
defaults.update(kwargs)
language = self._language(request)
return translatable_modelform_factory(language, self.model, **defaults)
def render_change_form(self, request, context, add=False, change=False,
form_url='', obj=None):
lang_code = self._language(request)
lang = get_language_name(lang_code)
available_languages = self.get_available_languages(obj)
context.update({
'title': '%s (%s)' % (context['title'], lang),
'current_is_translated': lang_code in available_languages,
'allow_deletion': len(available_languages) > 1,
'language_tabs': self.get_language_tabs(request, available_languages),
'base_template': self.get_change_form_base_template(),
})
# Ensure form action url carries over tab language
qs_language = request.GET.get('language')
if qs_language:
form_url = urlparse(form_url or request.get_full_path())
query = QueryDict(form_url.query, mutable=True)
if 'language' not in query:
query['language'] = qs_language
form_url = form_url._replace(query=query.urlencode()).geturl()
return super(TranslatableAdmin, self).render_change_form(request,
context,
add, change,
form_url, obj)
def response_change(self, request, obj):
response = super(TranslatableAdmin, self).response_change(request, obj)
if 'Location' in response:
uri = iri_to_uri(request.path)
if django.VERSION >= (1, 6):
app_label, model_name = self.model._meta.app_label, self.model._meta.model_name
else:
app_label, model_name = self.model._meta.app_label, self.model._meta.module_name
if response['Location'] in (uri, "../add/", self.reverse('admin:%s_%s_add' % (app_label, model_name))):
if self.query_language_key in request.GET:
response['Location'] = '%s?%s=%s' % (response['Location'],
self.query_language_key, request.GET[self.query_language_key])
return response
@csrf_protect_m
@atomic
def delete_translation(self, request, object_id, language_code):
"The 'delete translation' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
translations_model = opts.translations_model
try:
obj = translations_model.objects.select_related('master').get(
master__pk=unquote(object_id),
language_code=language_code)
except translations_model.DoesNotExist:
raise Http404
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if len(self.get_available_languages(obj.master)) <= 1:
return self.deletion_not_allowed(request, obj, language_code)
using = router.db_for_write(translations_model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
protected = False
if django.VERSION >= (1, 8):
deleted_objects, model_count, perms_needed, protected = get_deleted_objects(
[obj], translations_model._meta, request.user, self.admin_site, using)
else:
deleted_objects, perms_needed, protected = get_deleted_objects(
[obj], translations_model._meta, request.user, self.admin_site, using)
lang = get_language_name(language_code)
if request.POST: # The user has already confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = '%s translation of %s' % (lang, force_text(obj.master))
self.log_deletion(request, obj, obj_display)
self.delete_model_translation(request, obj)
self.message_user(request,
_('The %(name)s "%(obj)s" was deleted successfully.') % {
'name': force_text(opts.verbose_name),
'obj': force_text(obj_display)
}
)
if not self.has_change_permission(request, None):
return HttpResponseRedirect(self.reverse('admin:index'))
model_name = opts.model_name if django.VERSION >= (1, 6) else opts.module_name
return HttpResponseRedirect(self.reverse('admin:%s_%s_changelist' % (opts.app_label, model_name)))
object_name = '%s Translation' % force_text(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"object_name": object_name,
"object": obj,
"deleted_objects": deleted_objects,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
}
return render_to_response(self.delete_confirmation_template or [
"admin/%s/%s/delete_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_confirmation.html" % app_label,
"admin/delete_confirmation.html"
], context, RequestContext(request))
def deletion_not_allowed(self, request, obj, language_code):
opts = self.model._meta
app_label = opts.app_label
object_name = force_text(opts.verbose_name)
context = RequestContext(request)
context.update({
'object': obj.master,
'language_code': language_code,
'opts': opts,
'app_label': app_label,
'language_name': get_language_name(language_code),
'object_name': object_name,
})
return render_to_response(self.deletion_not_allowed_template, context)
def delete_model_translation(self, request, obj):
obj.delete()
def get_object(self, request, object_id, *args):
obj = super(TranslatableAdmin, self).get_object(request, object_id, *args)
if obj is None: # object was not in queryset, bail out
return None
# object was in queryset - need to make sure we got the right translation
# we use getattr to trigger a load if instance exists but translation was
# not cached yet. Should not happen with current code, but is correct,
# future-proof behavior.
language_code = getattr(obj, 'language_code', None)
request_lang = self._language(request)
if language_code is None or language_code != request_lang:
# if language does not match that of request, we know request_lang
# does not exist, because it was the first language in the use_fallbacks
# list. We prepare it as a new translation.
obj.translate(request_lang)
return obj
def get_queryset(self, request):
language = self._language(request)
languages = [language,]
for lang in FALLBACK_LANGUAGES:
if not lang in languages:
languages.append(lang)
qs = self.model._default_manager.untranslated().use_fallbacks(*languages)
# TODO: this should be handled by some parameter to the ChangeList.
ordering = getattr(self, 'ordering', None) or () # otherwise we might try to *None, which is bad ;)
if ordering:
qs = qs.order_by(*ordering)
return qs
if django.VERSION < (1, 8):
queryset = get_queryset
def get_change_form_base_template(self):
opts = self.model._meta
app_label = opts.app_label
search_templates = [
"admin/%s/%s/change_form.html" % (app_label, opts.object_name.lower()),
"admin/%s/change_form.html" % app_label,
"admin/change_form.html"
]
try:
return select_template(search_templates)
except TemplateDoesNotExist:
return None
class TranslatableInlineModelAdmin(InlineModelAdmin, TranslatableModelAdminMixin):
form = InlineModelForm
change_form_template = 'admin/hvad/change_form.html'
deletion_not_allowed_template = 'admin/hvad/deletion_not_allowed.html'
def get_formset(self, request, obj=None, **kwargs):
"""Returns a BaseInlineFormSet class for use in admin add/change views."""
if django.VERSION >= (1, 6):
# From v1.6 on, using get_fieldsets is ok, as long as no 'fields'
# argument was given. It allows dynamic fieldsets on admin form.
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
else:
# On previous versions, calling get_fieldsets triggers infinite recursion
# and we should stick to statically declared fieldsets
if self.declared_fieldsets:
fields = flatten_fieldsets(self.declared_fieldsets)
else:
fields = None
exclude = (
tuple(self.exclude or ()) +
tuple(kwargs.pop("exclude", ())) +
self.get_readonly_fields(request, obj)
)
defaults = {
"form": self.get_form(request, obj, fields=fields),
"formset": self.formset,
"fk_name": self.fk_name,
"fields": fields,
"exclude": exclude or None,
"formfield_callback": curry(self.formfield_for_dbfield, request=request),
"extra": self.extra,
"max_num": self.max_num,
"can_delete": self.can_delete,
}
defaults.update(kwargs)
language = self._language(request)
return translatable_inlineformset_factory(language, self.parent_model, self.model, **defaults)
def get_urls(self):
from django.conf.urls import patterns, url
urlpatterns = super(InlineModelAdmin, self).get_urls()
info = self.model._meta.app_label, self.model._meta.module_name
return patterns('',
url(r'^(.+)/delete-translation/(.+)/$',
self.admin_site.admin_view(self.delete_translation),
name='%s_%s_delete_translation' % info),
) + urlpatterns
def get_form(self, request, obj=None, **kwargs):
"""
Returns a Form class for use in the admin add view. This is used by
add_view and change_view.
"""
if django.VERSION >= (1, 6):
# From v1.6 on, using get_fieldsets is ok, as long as no 'fields'
# argument was given. It allows dynamic fieldsets on admin form.
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
else:
# On previous versions, calling get_fieldsets triggers infinite recursion
# and we should stick to statically declared fieldsets
if self.declared_fieldsets:
fields = flatten_fieldsets(self.declared_fieldsets)
else:
fields = None
exclude = (
tuple(self.exclude or ()) +
tuple(kwargs.pop("exclude", ())) +
self.get_readonly_fields(request, obj)
)
old_formfield_callback = curry(self.formfield_for_dbfield, request=request)
defaults = {
"form": self.form,
"fields": fields,
"exclude": exclude,
"formfield_callback": old_formfield_callback,
}
defaults.update(kwargs)
language = self._language(request)
return translatable_modelform_factory(language, self.model, **defaults)
def response_change(self, request, obj):
redirect = super(TranslatableAdmin, self).response_change(request, obj)
uri = iri_to_uri(request.path)
if redirect['Location'] in (uri, "../add/"):
if self.query_language_key in request.GET:
redirect['Location'] = '%s?%s=%s' % (redirect['Location'],
self.query_language_key, request.GET[self.query_language_key])
return redirect
"""
# Should be added
@csrf_protect_m
@atomic
def delete_translation(self, request, object_id, language_code):
"The 'delete translation' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
translations_model = opts.translations_model
try:
obj = translations_model.objects.select_related('maser').get(
master__pk=unquote(object_id),
language_code=language_code)
except translations_model.DoesNotExist:
raise Http404
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if len(self.get_available_languages(obj.master)) <= 1:
return self.deletion_not_allowed(request, obj, language_code)
using = router.db_for_write(translations_model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
protected = False
if NEW_GET_DELETE_OBJECTS:
(deleted_objects, perms_needed, protected) = get_deleted_objects(
[obj], translations_model._meta, request.user, self.admin_site, using)
else: # pragma: no cover
(deleted_objects, perms_needed) = get_deleted_objects(
[obj], translations_model._meta, request.user, self.admin_site)
lang = get_language_name(language_code)
if request.POST: # The user has already confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = '%s translation of %s' % (lang, force_text(obj.master))
self.log_deletion(request, obj, obj_display)
self.delete_model_translation(request, obj)
self.message_user(request,
_('The %(name)s "%(obj)s" was deleted successfully.') % {
'name': force_text(opts.verbose_name),
'obj': force_text(obj_display)
}
)
if not self.has_change_permission(request, None):
return HttpResponseRedirect(reverse('admin:index'))
return HttpResponseRedirect(reverse('admin:%s_%s_changelist' % (opts.app_label, opts.module_name)))
object_name = '%s Translation' % force_text(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"object_name": object_name,
"object": obj,
"deleted_objects": deleted_objects,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"root_path": self.admin_site.root_path,
"app_label": app_label,
}
return render_to_response(self.delete_confirmation_template or [
"admin/%s/%s/delete_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_confirmation.html" % app_label,
"admin/delete_confirmation.html"
], context, RequestContext(request))
def deletion_not_allowed(self, request, obj, language_code):
opts = self.model._meta
app_label = opts.app_label
object_name = force_text(opts.verbose_name)
context = RequestContext(request)
context['object'] = obj.master
context['language_code'] = language_code
context['opts'] = opts
context['app_label'] = app_label
context['language_name'] = get_language_name(language_code)
context['object_name'] = object_name
return render_to_response(self.deletion_not_allowed_template, context)
def delete_model_translation(self, request, obj):
obj.delete()
"""
def get_queryset(self, request):
qs = self.model._default_manager.all()#.language(language)
# TODO: this should be handled by some parameter to the ChangeList.
ordering = getattr(self, 'ordering', None) or () # otherwise we might try to *None, which is bad ;)
if ordering:
qs = qs.order_by(*ordering)
return qs
if django.VERSION < (1, 8):
queryset = get_queryset
class TranslatableStackedInline(TranslatableInlineModelAdmin):
template = 'admin/hvad/edit_inline/stacked.html'
class TranslatableTabularInline(TranslatableInlineModelAdmin):
template = 'admin/hvad/edit_inline/tabular.html'
| philippeowagner/django-hvad | hvad/admin.py | Python | bsd-3-clause | 24,414 |
# py27 vs py34 compatibility
try:
from unittest.mock import MagicMock
except ImportError:
from mock import MagicMock
import unittest
from django.http import QueryDict
from django.conf import settings
# We need to configure Django settings before importing SecureView
# because we are running this test set outside of Django
try:
settings.configure()
except RuntimeError:
pass
from django_rest_secureview.view_rules import *
from . mocks import MockRequest, MockModelManager
class ViewRulesTest(unittest.TestCase):
def test_it_can_compare_param_keys(self):
"""
Confirms that the params in the POST request match
the specified params for the endpoint
"""
request = MockRequest(data=QueryDict('dog=1&cat=2'), method="POST")
view_rule = Params([request], None)
check_1 = view_rule.errors_found({'params': ['cat', 'dog']})
check_2 = view_rule.errors_found({'params': ['cat', 'mouse']})
self.assertFalse(check_1[0])
self.assertEqual(check_2[1].data['detail'], 'Missing keys mouse')
def test_it_can_check_model_relations(self):
"""
Confirms that User is present in one of the Model's attributes
"""
NewUser = MagicMock()
request_1 = MockRequest()
request_2 = MockRequest(user=NewUser)
model = MockModelManager()
view_rule_1 = Owner([request_1], {'pk': 1})
view_rule_2 = Owner([request_2], {'pk': 1})
check_1 = view_rule_1.errors_found({'model': model})
check_2 = view_rule_2.errors_found({'model': model})
self.assertFalse(check_1[0])
self.assertEqual(check_2[1].data['detail'], 'Unauthorized access')
def test_it_can_check_model_relations_and_params(self):
"""
Confirms Owner and Params logic are called together
"""
request = MockRequest(data=QueryDict('dog=1&cat=2'), method="POST")
model = MockModelManager()
view_rule = OwnerParams([request], {'pk': 1})
view_rule.enforce_params = MagicMock(return_value=None)
view_rule.enforce_owner = MagicMock(return_value=None)
params = {'model': model, 'params': ['cat', 'dog']}
check = view_rule.errors_found(params)
view_rule.enforce_params.assert_called_with(params)
view_rule.enforce_owner.assert_called_with(params)
self.assertFalse(check[0])
def test_it_can_raise_value_error_for_response(self):
"""
Custom implementations of ViewRule.enforce should return a Response
type object or None.
"""
class CustomRule(ViewRule):
def enforce(self, params=None):
return 1
request = MockRequest()
view_rule = CustomRule([request], {'pk': 2})
with self.assertRaises(ValueError):
view_rule.errors_found()
def test_it_can_raise_an_attribute_error_for_pk(self):
"""
Owner ViewRule expects to be called on a detailed route
"""
request = MockRequest()
model = MockModelManager()
view_rule = Owner([request], None)
with self.assertRaises(AssertionError):
view_rule.errors_found({'model': model})
if __name__ == '__main__':
unittest.main()
| fmitra/django-rest-secureview | tests/test_view_rules.py | Python | mit | 3,284 |
import logging
import click
import click_log
from flask.cli import with_appcontext
from ..account import github
from . import tasks
from .models import Project
logger = logging.getLogger(__name__)
click_log.basic_config(logger)
@click.command("projects")
@click_log.simple_verbosity_option(logger)
@with_appcontext
def sync_projects():
"Syncs projects"
tasks.sync_projects()
@click.command("project_members")
@click_log.simple_verbosity_option(logger)
@with_appcontext
def sync_project_members():
"Syncs project members"
tasks.sync_project_members()
@click.command("new_upload_notifications")
@click.option("--project_id", "-p", default=None)
@click_log.simple_verbosity_option(logger)
@with_appcontext
def send_new_upload_notifications(project_id):
tasks.send_new_upload_notifications(project_id)
@click.command("project_team")
@click.argument("name")
@click_log.simple_verbosity_option(logger)
@with_appcontext
def sync_project_team(name):
"Syncs (create/populate) project team"
project = Project.query.filter(Project.name == name).first()
team_response = github.get_project_team(project.name)
if team_response.status_code == 404:
logger.info(f"Project team {name} doesn't exist yet. Creating..")
team_response = project.create_team()
elif team_response.status_code == 200:
logger.info(f"Project team {name} already exists.")
team_response.raise_for_status()
if team_response:
team_data = team_response.json()
for lead in project.lead_members.all():
logging.info(f"Adding @{lead.login} to project team {name}")
member_response = github.join_team(team_data["slug"], lead.login)
member_response.raise_for_status()
else:
logging.error(
f"Something went wrong while syncing project team for project {name}"
)
| jazzband/site | jazzband/projects/commands.py | Python | mit | 1,877 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._models_py3 import AccessInformationCollection
from ._models_py3 import AccessInformationContract
from ._models_py3 import AccessInformationCreateParameters
from ._models_py3 import AccessInformationSecretsContract
from ._models_py3 import AccessInformationUpdateParameters
from ._models_py3 import AdditionalLocation
from ._models_py3 import ApiCollection
from ._models_py3 import ApiContactInformation
from ._models_py3 import ApiContract
from ._models_py3 import ApiContractProperties
from ._models_py3 import ApiContractUpdateProperties
from ._models_py3 import ApiCreateOrUpdateParameter
from ._models_py3 import ApiCreateOrUpdateProperties
from ._models_py3 import ApiCreateOrUpdatePropertiesWsdlSelector
from ._models_py3 import ApiEntityBaseContract
from ._models_py3 import ApiExportResult
from ._models_py3 import ApiExportResultValue
from ._models_py3 import ApiLicenseInformation
from ._models_py3 import ApiManagementServiceApplyNetworkConfigurationParameters
from ._models_py3 import ApiManagementServiceBackupRestoreParameters
from ._models_py3 import ApiManagementServiceBaseProperties
from ._models_py3 import ApiManagementServiceCheckNameAvailabilityParameters
from ._models_py3 import ApiManagementServiceGetDomainOwnershipIdentifierResult
from ._models_py3 import ApiManagementServiceGetSsoTokenResult
from ._models_py3 import ApiManagementServiceIdentity
from ._models_py3 import ApiManagementServiceListResult
from ._models_py3 import ApiManagementServiceNameAvailabilityResult
from ._models_py3 import ApiManagementServiceProperties
from ._models_py3 import ApiManagementServiceResource
from ._models_py3 import ApiManagementServiceSkuProperties
from ._models_py3 import ApiManagementServiceUpdateParameters
from ._models_py3 import ApiManagementServiceUpdateProperties
from ._models_py3 import ApiManagementSku
from ._models_py3 import ApiManagementSkuCapabilities
from ._models_py3 import ApiManagementSkuCapacity
from ._models_py3 import ApiManagementSkuCosts
from ._models_py3 import ApiManagementSkuLocationInfo
from ._models_py3 import ApiManagementSkuRestrictionInfo
from ._models_py3 import ApiManagementSkuRestrictions
from ._models_py3 import ApiManagementSkuZoneDetails
from ._models_py3 import ApiManagementSkusResult
from ._models_py3 import ApiReleaseCollection
from ._models_py3 import ApiReleaseContract
from ._models_py3 import ApiRevisionCollection
from ._models_py3 import ApiRevisionContract
from ._models_py3 import ApiRevisionInfoContract
from ._models_py3 import ApiTagResourceContractProperties
from ._models_py3 import ApiUpdateContract
from ._models_py3 import ApiVersionConstraint
from ._models_py3 import ApiVersionSetCollection
from ._models_py3 import ApiVersionSetContract
from ._models_py3 import ApiVersionSetContractDetails
from ._models_py3 import ApiVersionSetContractProperties
from ._models_py3 import ApiVersionSetEntityBase
from ._models_py3 import ApiVersionSetUpdateParameters
from ._models_py3 import ApiVersionSetUpdateParametersProperties
from ._models_py3 import ApimResource
from ._models_py3 import ArmIdWrapper
from ._models_py3 import AssociationContract
from ._models_py3 import AuthenticationSettingsContract
from ._models_py3 import AuthorizationServerCollection
from ._models_py3 import AuthorizationServerContract
from ._models_py3 import AuthorizationServerContractBaseProperties
from ._models_py3 import AuthorizationServerContractProperties
from ._models_py3 import AuthorizationServerSecretsContract
from ._models_py3 import AuthorizationServerUpdateContract
from ._models_py3 import AuthorizationServerUpdateContractProperties
from ._models_py3 import BackendAuthorizationHeaderCredentials
from ._models_py3 import BackendBaseParameters
from ._models_py3 import BackendCollection
from ._models_py3 import BackendContract
from ._models_py3 import BackendContractProperties
from ._models_py3 import BackendCredentialsContract
from ._models_py3 import BackendProperties
from ._models_py3 import BackendProxyContract
from ._models_py3 import BackendReconnectContract
from ._models_py3 import BackendServiceFabricClusterProperties
from ._models_py3 import BackendTlsProperties
from ._models_py3 import BackendUpdateParameterProperties
from ._models_py3 import BackendUpdateParameters
from ._models_py3 import BodyDiagnosticSettings
from ._models_py3 import CacheCollection
from ._models_py3 import CacheContract
from ._models_py3 import CacheUpdateParameters
from ._models_py3 import CertificateCollection
from ._models_py3 import CertificateConfiguration
from ._models_py3 import CertificateContract
from ._models_py3 import CertificateCreateOrUpdateParameters
from ._models_py3 import CertificateInformation
from ._models_py3 import ClientSecretContract
from ._models_py3 import ConnectivityCheckRequest
from ._models_py3 import ConnectivityCheckRequestDestination
from ._models_py3 import ConnectivityCheckRequestProtocolConfiguration
from ._models_py3 import ConnectivityCheckRequestProtocolConfigurationHTTPConfiguration
from ._models_py3 import ConnectivityCheckRequestSource
from ._models_py3 import ConnectivityCheckResponse
from ._models_py3 import ConnectivityHop
from ._models_py3 import ConnectivityIssue
from ._models_py3 import ConnectivityStatusContract
from ._models_py3 import ContentItemCollection
from ._models_py3 import ContentItemContract
from ._models_py3 import ContentTypeCollection
from ._models_py3 import ContentTypeContract
from ._models_py3 import DataMasking
from ._models_py3 import DataMaskingEntity
from ._models_py3 import DeletedServiceContract
from ._models_py3 import DeletedServicesCollection
from ._models_py3 import DeployConfigurationParameters
from ._models_py3 import DiagnosticCollection
from ._models_py3 import DiagnosticContract
from ._models_py3 import EmailTemplateCollection
from ._models_py3 import EmailTemplateContract
from ._models_py3 import EmailTemplateParametersContractProperties
from ._models_py3 import EmailTemplateUpdateParameters
from ._models_py3 import EndpointDependency
from ._models_py3 import EndpointDetail
from ._models_py3 import ErrorFieldContract
from ._models_py3 import ErrorResponse
from ._models_py3 import ErrorResponseBody
from ._models_py3 import GatewayCertificateAuthorityCollection
from ._models_py3 import GatewayCertificateAuthorityContract
from ._models_py3 import GatewayCollection
from ._models_py3 import GatewayContract
from ._models_py3 import GatewayHostnameConfigurationCollection
from ._models_py3 import GatewayHostnameConfigurationContract
from ._models_py3 import GatewayKeyRegenerationRequestContract
from ._models_py3 import GatewayKeysContract
from ._models_py3 import GatewayTokenContract
from ._models_py3 import GatewayTokenRequestContract
from ._models_py3 import GenerateSsoUrlResult
from ._models_py3 import GlobalSchemaCollection
from ._models_py3 import GlobalSchemaContract
from ._models_py3 import GroupCollection
from ._models_py3 import GroupContract
from ._models_py3 import GroupContractProperties
from ._models_py3 import GroupCreateParameters
from ._models_py3 import GroupUpdateParameters
from ._models_py3 import HTTPHeader
from ._models_py3 import HostnameConfiguration
from ._models_py3 import HttpMessageDiagnostic
from ._models_py3 import IdentityProviderBaseParameters
from ._models_py3 import IdentityProviderContract
from ._models_py3 import IdentityProviderContractProperties
from ._models_py3 import IdentityProviderCreateContract
from ._models_py3 import IdentityProviderCreateContractProperties
from ._models_py3 import IdentityProviderList
from ._models_py3 import IdentityProviderUpdateParameters
from ._models_py3 import IdentityProviderUpdateProperties
from ._models_py3 import IssueAttachmentCollection
from ._models_py3 import IssueAttachmentContract
from ._models_py3 import IssueCollection
from ._models_py3 import IssueCommentCollection
from ._models_py3 import IssueCommentContract
from ._models_py3 import IssueContract
from ._models_py3 import IssueContractBaseProperties
from ._models_py3 import IssueContractProperties
from ._models_py3 import IssueUpdateContract
from ._models_py3 import IssueUpdateContractProperties
from ._models_py3 import KeyVaultContractCreateProperties
from ._models_py3 import KeyVaultContractProperties
from ._models_py3 import KeyVaultLastAccessStatusContractProperties
from ._models_py3 import LoggerCollection
from ._models_py3 import LoggerContract
from ._models_py3 import LoggerUpdateContract
from ._models_py3 import NamedValueCollection
from ._models_py3 import NamedValueContract
from ._models_py3 import NamedValueContractProperties
from ._models_py3 import NamedValueCreateContract
from ._models_py3 import NamedValueCreateContractProperties
from ._models_py3 import NamedValueEntityBaseParameters
from ._models_py3 import NamedValueSecretContract
from ._models_py3 import NamedValueUpdateParameterProperties
from ._models_py3 import NamedValueUpdateParameters
from ._models_py3 import NetworkStatusContract
from ._models_py3 import NetworkStatusContractByLocation
from ._models_py3 import NotificationCollection
from ._models_py3 import NotificationContract
from ._models_py3 import OAuth2AuthenticationSettingsContract
from ._models_py3 import OpenIdAuthenticationSettingsContract
from ._models_py3 import OpenIdConnectProviderCollection
from ._models_py3 import OpenidConnectProviderContract
from ._models_py3 import OpenidConnectProviderUpdateContract
from ._models_py3 import Operation
from ._models_py3 import OperationCollection
from ._models_py3 import OperationContract
from ._models_py3 import OperationContractProperties
from ._models_py3 import OperationDisplay
from ._models_py3 import OperationEntityBaseContract
from ._models_py3 import OperationListResult
from ._models_py3 import OperationResultContract
from ._models_py3 import OperationResultLogItemContract
from ._models_py3 import OperationTagResourceContractProperties
from ._models_py3 import OperationUpdateContract
from ._models_py3 import OperationUpdateContractProperties
from ._models_py3 import OutboundEnvironmentEndpoint
from ._models_py3 import OutboundEnvironmentEndpointList
from ._models_py3 import ParameterContract
from ._models_py3 import ParameterExampleContract
from ._models_py3 import PipelineDiagnosticSettings
from ._models_py3 import PolicyCollection
from ._models_py3 import PolicyContract
from ._models_py3 import PolicyDescriptionCollection
from ._models_py3 import PolicyDescriptionContract
from ._models_py3 import PortalDelegationSettings
from ._models_py3 import PortalRevisionCollection
from ._models_py3 import PortalRevisionContract
from ._models_py3 import PortalSettingValidationKeyContract
from ._models_py3 import PortalSettingsCollection
from ._models_py3 import PortalSettingsContract
from ._models_py3 import PortalSigninSettings
from ._models_py3 import PortalSignupSettings
from ._models_py3 import PrivateEndpoint
from ._models_py3 import PrivateEndpointConnection
from ._models_py3 import PrivateEndpointConnectionListResult
from ._models_py3 import PrivateEndpointConnectionRequest
from ._models_py3 import PrivateEndpointConnectionRequestProperties
from ._models_py3 import PrivateLinkResource
from ._models_py3 import PrivateLinkResourceListResult
from ._models_py3 import PrivateLinkServiceConnectionState
from ._models_py3 import ProductCollection
from ._models_py3 import ProductContract
from ._models_py3 import ProductContractProperties
from ._models_py3 import ProductEntityBaseParameters
from ._models_py3 import ProductTagResourceContractProperties
from ._models_py3 import ProductUpdateParameters
from ._models_py3 import ProductUpdateProperties
from ._models_py3 import QuotaCounterCollection
from ._models_py3 import QuotaCounterContract
from ._models_py3 import QuotaCounterValueContract
from ._models_py3 import QuotaCounterValueContractProperties
from ._models_py3 import QuotaCounterValueUpdateContract
from ._models_py3 import RecipientEmailCollection
from ._models_py3 import RecipientEmailContract
from ._models_py3 import RecipientUserCollection
from ._models_py3 import RecipientUserContract
from ._models_py3 import RecipientsContractProperties
from ._models_py3 import RegionContract
from ._models_py3 import RegionListResult
from ._models_py3 import RegistrationDelegationSettingsProperties
from ._models_py3 import RemotePrivateEndpointConnectionWrapper
from ._models_py3 import ReportCollection
from ._models_py3 import ReportRecordContract
from ._models_py3 import RepresentationContract
from ._models_py3 import RequestContract
from ._models_py3 import RequestReportCollection
from ._models_py3 import RequestReportRecordContract
from ._models_py3 import Resource
from ._models_py3 import ResourceLocationDataContract
from ._models_py3 import ResourceSku
from ._models_py3 import ResourceSkuCapacity
from ._models_py3 import ResourceSkuResult
from ._models_py3 import ResourceSkuResults
from ._models_py3 import ResponseContract
from ._models_py3 import SamplingSettings
from ._models_py3 import SaveConfigurationParameter
from ._models_py3 import SchemaCollection
from ._models_py3 import SchemaContract
from ._models_py3 import SubscriptionCollection
from ._models_py3 import SubscriptionContract
from ._models_py3 import SubscriptionCreateParameters
from ._models_py3 import SubscriptionKeyParameterNamesContract
from ._models_py3 import SubscriptionKeysContract
from ._models_py3 import SubscriptionUpdateParameters
from ._models_py3 import SubscriptionsDelegationSettingsProperties
from ._models_py3 import SystemData
from ._models_py3 import TagCollection
from ._models_py3 import TagContract
from ._models_py3 import TagCreateUpdateParameters
from ._models_py3 import TagDescriptionBaseProperties
from ._models_py3 import TagDescriptionCollection
from ._models_py3 import TagDescriptionContract
from ._models_py3 import TagDescriptionContractProperties
from ._models_py3 import TagDescriptionCreateParameters
from ._models_py3 import TagResourceCollection
from ._models_py3 import TagResourceContract
from ._models_py3 import TagResourceContractProperties
from ._models_py3 import TenantConfigurationSyncStateContract
from ._models_py3 import TenantSettingsCollection
from ._models_py3 import TenantSettingsContract
from ._models_py3 import TermsOfServiceProperties
from ._models_py3 import TokenBodyParameterContract
from ._models_py3 import UserCollection
from ._models_py3 import UserContract
from ._models_py3 import UserContractProperties
from ._models_py3 import UserCreateParameterProperties
from ._models_py3 import UserCreateParameters
from ._models_py3 import UserEntityBaseParameters
from ._models_py3 import UserIdentityCollection
from ._models_py3 import UserIdentityContract
from ._models_py3 import UserIdentityProperties
from ._models_py3 import UserTokenParameters
from ._models_py3 import UserTokenResult
from ._models_py3 import UserUpdateParameters
from ._models_py3 import UserUpdateParametersProperties
from ._models_py3 import VirtualNetworkConfiguration
from ._models_py3 import X509CertificateName
from ._api_management_client_enums import (
AccessIdName,
AccessType,
AlwaysLog,
ApiManagementSkuCapacityScaleType,
ApiManagementSkuRestrictionsReasonCode,
ApiManagementSkuRestrictionsType,
ApiType,
ApiVersionSetContractDetailsVersioningScheme,
ApimIdentityType,
AppType,
AsyncOperationStatus,
AuthorizationMethod,
BackendProtocol,
BearerTokenSendingMethod,
BearerTokenSendingMethods,
CertificateConfigurationStoreName,
CertificateSource,
CertificateStatus,
ClientAuthenticationMethod,
ConfigurationIdName,
Confirmation,
ConnectionStatus,
ConnectivityCheckProtocol,
ConnectivityStatusType,
ContentFormat,
CreatedByType,
DataMaskingMode,
ExportApi,
ExportFormat,
ExportResultFormat,
GrantType,
GroupType,
HostnameType,
HttpCorrelationProtocol,
IdentityProviderType,
IssueType,
KeyType,
LoggerType,
Method,
NameAvailabilityReason,
NotificationName,
OperationNameFormat,
Origin,
PlatformVersion,
PolicyContentFormat,
PolicyExportFormat,
PolicyIdName,
PolicyScopeContract,
PortalRevisionStatus,
PreferredIPVersion,
PrivateEndpointConnectionProvisioningState,
PrivateEndpointServiceConnectionStatus,
ProductState,
Protocol,
PublicNetworkAccess,
ResourceSkuCapacityScaleType,
SamplingType,
SchemaType,
SettingsTypeName,
Severity,
SkuType,
SoapApiType,
State,
SubscriptionState,
TemplateName,
UserState,
Verbosity,
VersioningScheme,
VirtualNetworkType,
)
__all__ = [
'AccessInformationCollection',
'AccessInformationContract',
'AccessInformationCreateParameters',
'AccessInformationSecretsContract',
'AccessInformationUpdateParameters',
'AdditionalLocation',
'ApiCollection',
'ApiContactInformation',
'ApiContract',
'ApiContractProperties',
'ApiContractUpdateProperties',
'ApiCreateOrUpdateParameter',
'ApiCreateOrUpdateProperties',
'ApiCreateOrUpdatePropertiesWsdlSelector',
'ApiEntityBaseContract',
'ApiExportResult',
'ApiExportResultValue',
'ApiLicenseInformation',
'ApiManagementServiceApplyNetworkConfigurationParameters',
'ApiManagementServiceBackupRestoreParameters',
'ApiManagementServiceBaseProperties',
'ApiManagementServiceCheckNameAvailabilityParameters',
'ApiManagementServiceGetDomainOwnershipIdentifierResult',
'ApiManagementServiceGetSsoTokenResult',
'ApiManagementServiceIdentity',
'ApiManagementServiceListResult',
'ApiManagementServiceNameAvailabilityResult',
'ApiManagementServiceProperties',
'ApiManagementServiceResource',
'ApiManagementServiceSkuProperties',
'ApiManagementServiceUpdateParameters',
'ApiManagementServiceUpdateProperties',
'ApiManagementSku',
'ApiManagementSkuCapabilities',
'ApiManagementSkuCapacity',
'ApiManagementSkuCosts',
'ApiManagementSkuLocationInfo',
'ApiManagementSkuRestrictionInfo',
'ApiManagementSkuRestrictions',
'ApiManagementSkuZoneDetails',
'ApiManagementSkusResult',
'ApiReleaseCollection',
'ApiReleaseContract',
'ApiRevisionCollection',
'ApiRevisionContract',
'ApiRevisionInfoContract',
'ApiTagResourceContractProperties',
'ApiUpdateContract',
'ApiVersionConstraint',
'ApiVersionSetCollection',
'ApiVersionSetContract',
'ApiVersionSetContractDetails',
'ApiVersionSetContractProperties',
'ApiVersionSetEntityBase',
'ApiVersionSetUpdateParameters',
'ApiVersionSetUpdateParametersProperties',
'ApimResource',
'ArmIdWrapper',
'AssociationContract',
'AuthenticationSettingsContract',
'AuthorizationServerCollection',
'AuthorizationServerContract',
'AuthorizationServerContractBaseProperties',
'AuthorizationServerContractProperties',
'AuthorizationServerSecretsContract',
'AuthorizationServerUpdateContract',
'AuthorizationServerUpdateContractProperties',
'BackendAuthorizationHeaderCredentials',
'BackendBaseParameters',
'BackendCollection',
'BackendContract',
'BackendContractProperties',
'BackendCredentialsContract',
'BackendProperties',
'BackendProxyContract',
'BackendReconnectContract',
'BackendServiceFabricClusterProperties',
'BackendTlsProperties',
'BackendUpdateParameterProperties',
'BackendUpdateParameters',
'BodyDiagnosticSettings',
'CacheCollection',
'CacheContract',
'CacheUpdateParameters',
'CertificateCollection',
'CertificateConfiguration',
'CertificateContract',
'CertificateCreateOrUpdateParameters',
'CertificateInformation',
'ClientSecretContract',
'ConnectivityCheckRequest',
'ConnectivityCheckRequestDestination',
'ConnectivityCheckRequestProtocolConfiguration',
'ConnectivityCheckRequestProtocolConfigurationHTTPConfiguration',
'ConnectivityCheckRequestSource',
'ConnectivityCheckResponse',
'ConnectivityHop',
'ConnectivityIssue',
'ConnectivityStatusContract',
'ContentItemCollection',
'ContentItemContract',
'ContentTypeCollection',
'ContentTypeContract',
'DataMasking',
'DataMaskingEntity',
'DeletedServiceContract',
'DeletedServicesCollection',
'DeployConfigurationParameters',
'DiagnosticCollection',
'DiagnosticContract',
'EmailTemplateCollection',
'EmailTemplateContract',
'EmailTemplateParametersContractProperties',
'EmailTemplateUpdateParameters',
'EndpointDependency',
'EndpointDetail',
'ErrorFieldContract',
'ErrorResponse',
'ErrorResponseBody',
'GatewayCertificateAuthorityCollection',
'GatewayCertificateAuthorityContract',
'GatewayCollection',
'GatewayContract',
'GatewayHostnameConfigurationCollection',
'GatewayHostnameConfigurationContract',
'GatewayKeyRegenerationRequestContract',
'GatewayKeysContract',
'GatewayTokenContract',
'GatewayTokenRequestContract',
'GenerateSsoUrlResult',
'GlobalSchemaCollection',
'GlobalSchemaContract',
'GroupCollection',
'GroupContract',
'GroupContractProperties',
'GroupCreateParameters',
'GroupUpdateParameters',
'HTTPHeader',
'HostnameConfiguration',
'HttpMessageDiagnostic',
'IdentityProviderBaseParameters',
'IdentityProviderContract',
'IdentityProviderContractProperties',
'IdentityProviderCreateContract',
'IdentityProviderCreateContractProperties',
'IdentityProviderList',
'IdentityProviderUpdateParameters',
'IdentityProviderUpdateProperties',
'IssueAttachmentCollection',
'IssueAttachmentContract',
'IssueCollection',
'IssueCommentCollection',
'IssueCommentContract',
'IssueContract',
'IssueContractBaseProperties',
'IssueContractProperties',
'IssueUpdateContract',
'IssueUpdateContractProperties',
'KeyVaultContractCreateProperties',
'KeyVaultContractProperties',
'KeyVaultLastAccessStatusContractProperties',
'LoggerCollection',
'LoggerContract',
'LoggerUpdateContract',
'NamedValueCollection',
'NamedValueContract',
'NamedValueContractProperties',
'NamedValueCreateContract',
'NamedValueCreateContractProperties',
'NamedValueEntityBaseParameters',
'NamedValueSecretContract',
'NamedValueUpdateParameterProperties',
'NamedValueUpdateParameters',
'NetworkStatusContract',
'NetworkStatusContractByLocation',
'NotificationCollection',
'NotificationContract',
'OAuth2AuthenticationSettingsContract',
'OpenIdAuthenticationSettingsContract',
'OpenIdConnectProviderCollection',
'OpenidConnectProviderContract',
'OpenidConnectProviderUpdateContract',
'Operation',
'OperationCollection',
'OperationContract',
'OperationContractProperties',
'OperationDisplay',
'OperationEntityBaseContract',
'OperationListResult',
'OperationResultContract',
'OperationResultLogItemContract',
'OperationTagResourceContractProperties',
'OperationUpdateContract',
'OperationUpdateContractProperties',
'OutboundEnvironmentEndpoint',
'OutboundEnvironmentEndpointList',
'ParameterContract',
'ParameterExampleContract',
'PipelineDiagnosticSettings',
'PolicyCollection',
'PolicyContract',
'PolicyDescriptionCollection',
'PolicyDescriptionContract',
'PortalDelegationSettings',
'PortalRevisionCollection',
'PortalRevisionContract',
'PortalSettingValidationKeyContract',
'PortalSettingsCollection',
'PortalSettingsContract',
'PortalSigninSettings',
'PortalSignupSettings',
'PrivateEndpoint',
'PrivateEndpointConnection',
'PrivateEndpointConnectionListResult',
'PrivateEndpointConnectionRequest',
'PrivateEndpointConnectionRequestProperties',
'PrivateLinkResource',
'PrivateLinkResourceListResult',
'PrivateLinkServiceConnectionState',
'ProductCollection',
'ProductContract',
'ProductContractProperties',
'ProductEntityBaseParameters',
'ProductTagResourceContractProperties',
'ProductUpdateParameters',
'ProductUpdateProperties',
'QuotaCounterCollection',
'QuotaCounterContract',
'QuotaCounterValueContract',
'QuotaCounterValueContractProperties',
'QuotaCounterValueUpdateContract',
'RecipientEmailCollection',
'RecipientEmailContract',
'RecipientUserCollection',
'RecipientUserContract',
'RecipientsContractProperties',
'RegionContract',
'RegionListResult',
'RegistrationDelegationSettingsProperties',
'RemotePrivateEndpointConnectionWrapper',
'ReportCollection',
'ReportRecordContract',
'RepresentationContract',
'RequestContract',
'RequestReportCollection',
'RequestReportRecordContract',
'Resource',
'ResourceLocationDataContract',
'ResourceSku',
'ResourceSkuCapacity',
'ResourceSkuResult',
'ResourceSkuResults',
'ResponseContract',
'SamplingSettings',
'SaveConfigurationParameter',
'SchemaCollection',
'SchemaContract',
'SubscriptionCollection',
'SubscriptionContract',
'SubscriptionCreateParameters',
'SubscriptionKeyParameterNamesContract',
'SubscriptionKeysContract',
'SubscriptionUpdateParameters',
'SubscriptionsDelegationSettingsProperties',
'SystemData',
'TagCollection',
'TagContract',
'TagCreateUpdateParameters',
'TagDescriptionBaseProperties',
'TagDescriptionCollection',
'TagDescriptionContract',
'TagDescriptionContractProperties',
'TagDescriptionCreateParameters',
'TagResourceCollection',
'TagResourceContract',
'TagResourceContractProperties',
'TenantConfigurationSyncStateContract',
'TenantSettingsCollection',
'TenantSettingsContract',
'TermsOfServiceProperties',
'TokenBodyParameterContract',
'UserCollection',
'UserContract',
'UserContractProperties',
'UserCreateParameterProperties',
'UserCreateParameters',
'UserEntityBaseParameters',
'UserIdentityCollection',
'UserIdentityContract',
'UserIdentityProperties',
'UserTokenParameters',
'UserTokenResult',
'UserUpdateParameters',
'UserUpdateParametersProperties',
'VirtualNetworkConfiguration',
'X509CertificateName',
'AccessIdName',
'AccessType',
'AlwaysLog',
'ApiManagementSkuCapacityScaleType',
'ApiManagementSkuRestrictionsReasonCode',
'ApiManagementSkuRestrictionsType',
'ApiType',
'ApiVersionSetContractDetailsVersioningScheme',
'ApimIdentityType',
'AppType',
'AsyncOperationStatus',
'AuthorizationMethod',
'BackendProtocol',
'BearerTokenSendingMethod',
'BearerTokenSendingMethods',
'CertificateConfigurationStoreName',
'CertificateSource',
'CertificateStatus',
'ClientAuthenticationMethod',
'ConfigurationIdName',
'Confirmation',
'ConnectionStatus',
'ConnectivityCheckProtocol',
'ConnectivityStatusType',
'ContentFormat',
'CreatedByType',
'DataMaskingMode',
'ExportApi',
'ExportFormat',
'ExportResultFormat',
'GrantType',
'GroupType',
'HostnameType',
'HttpCorrelationProtocol',
'IdentityProviderType',
'IssueType',
'KeyType',
'LoggerType',
'Method',
'NameAvailabilityReason',
'NotificationName',
'OperationNameFormat',
'Origin',
'PlatformVersion',
'PolicyContentFormat',
'PolicyExportFormat',
'PolicyIdName',
'PolicyScopeContract',
'PortalRevisionStatus',
'PreferredIPVersion',
'PrivateEndpointConnectionProvisioningState',
'PrivateEndpointServiceConnectionStatus',
'ProductState',
'Protocol',
'PublicNetworkAccess',
'ResourceSkuCapacityScaleType',
'SamplingType',
'SchemaType',
'SettingsTypeName',
'Severity',
'SkuType',
'SoapApiType',
'State',
'SubscriptionState',
'TemplateName',
'UserState',
'Verbosity',
'VersioningScheme',
'VirtualNetworkType',
]
| Azure/azure-sdk-for-python | sdk/apimanagement/azure-mgmt-apimanagement/azure/mgmt/apimanagement/models/__init__.py | Python | mit | 28,583 |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
####################################################################################################
#
# Author: wishinlife
# QQ: 57956720
# E-Mail: wishinlife@gmail.com, wishinlife@qq.com
# Web Home: http://syncyhome.duapp.com, http://hi.baidu.com/wishinlife
# Update date: 2015-02-02
# VERSION: 2.1.2
# Required packages: kmod-nls-utf8, libopenssl, libcurl, python, python-curl
#
####################################################################################################
import os
import stat
import sys
import time
import re
import struct
import hashlib
import zlib
from urllib import urlencode # , quote_plus
import threading
import traceback
import json
import fcntl
# if '/usr/lib/python2.7/site-packages' not in sys.path:
# sys.path.append('/usr/lib/python2.7/site-packages')
import pycurl
# import binascii
# import fileinput
if sys.getdefaultencoding() != 'utf-8':
reload(sys)
sys.setdefaultencoding('utf-8')
# set config_file and pidfile for your config storage path.
__CONFIG_FILE__ = '/etc/config/syncy'
__PIDFILE__ = '/var/run/syncy.pid'
# Don't modify the following.
__VERSION__ = '2.1.2'
__DEBUG__ = False # True
class SyncY():
synccount = 0
errorcount = 0
failcount = 0
EXLock = threading.Lock()
TaskSemaphore = None
oldSTDERR = None
oldSTDOUT = None
LogLock = threading.Lock()
syncydb = None
sydb = None
sydblen = None
syncData = None
basedirlen = None
syncpath = {}
config = {
'syncyerrlog' : '',
'syncylog' : '',
'blocksize' : 10,
'ondup' : 'rename',
'datacache' : 'on',
'excludefiles' : '',
'listnumber' : 100,
'retrytimes' : 3,
'retrydelay' : 3,
'maxsendspeed' : 0,
'maxrecvspeed' : 0,
'speedlimitperiod': '0-0',
'syncperiod' : '0-24',
'syncinterval' : 3600,
'tasknumber' : 2,
'threadnumber' : 2}
syre = {
'newname': re.compile(r'^(.*)(\.[^.]+)$'),
'pcspath': re.compile(r'^[\s\.\n].*|.*[/<>\\|\*\?:\"].*|.*[\s\.\n]$')}
syncytoken = {'synctotal': 0}
pcsroot = '/apps/SyncY'
synctask = {}
def __init__(self, argv=sys.argv[1:]):
self.__argv = argv
if len(self.__argv) == 0 or self.__argv[0] in ['compress', 'convert', 'rebuild']:
if os.path.exists(__PIDFILE__):
with open(__PIDFILE__, 'r') as pidh:
mypid = pidh.read()
try:
os.kill(int(mypid), 0)
except os.error:
pass
else:
print("SyncY is running!")
sys.exit(0)
with open(__PIDFILE__, 'w') as pidh:
pidh.write(str(os.getpid()))
if not (os.path.isfile(__CONFIG_FILE__)):
sys.stderr.write('%s ERROR: Config file "%s" does not exist.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), __CONFIG_FILE__))
sys.exit(1)
with open(__CONFIG_FILE__, 'r') as sycfg:
line = sycfg.readline()
section = ''
while line:
if re.findall(r'^\s*#', line) or re.findall(r'^\s*$', line):
line = sycfg.readline()
continue
line = re.sub(r'#[^\']*$', '', line)
m = re.findall(r'\s*config\s+([^\s]+).*', line)
if m:
section = m[0].strip('\'')
if section == 'syncpath':
SyncY.syncpath[str(len(SyncY.syncpath))] = {}
line = sycfg.readline()
continue
m = re.findall(r'\s*option\s+([^\s]+)\s+\'([^\']*)\'', line)
if m:
if section == 'syncy':
if m[0][0].strip('\'') in ['blocksize', 'listnumber', 'syncinterval', 'threadnumber', 'tasknumber', 'retrytimes', 'retrydelay', 'maxsendspeed', 'maxrecvspeed']:
SyncY.config[m[0][0].strip('\'')] = int(m[0][1])
else:
SyncY.config[m[0][0].strip('\'')] = m[0][1]
elif section == 'syncytoken':
if m[0][0].strip('\'') in ['expires_in', 'refresh_date', 'compress_date', 'synctotal']:
SyncY.syncytoken[m[0][0].strip('\'')] = int(m[0][1])
else:
SyncY.syncytoken[m[0][0].strip('\'')] = m[0][1]
elif section == 'syncpath':
SyncY.syncpath[str(len(SyncY.syncpath) - 1)][m[0][0].strip('\'')] = m[0][1]
line = sycfg.readline()
try:
if SyncY.config['blocksize'] < 1:
SyncY.config['blocksize'] = 10
print('%s WARNING: "blocksize" must great than or equal to 1(M), set to default 10(M).' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
if SyncY.config['ondup'] != 'overwrite' and SyncY.config['ondup'] != 'rename':
SyncY.config['ondup'] = 'rename'
print('%s WARNING: ondup is invalid, set to default(overwrite).' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
if SyncY.config['datacache'] != 'on' and SyncY.config['datacache'] != 'off':
SyncY.config['datacache'] = 'on'
print('%s WARNING: "datacache" is invalid, set to default(on).' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
if SyncY.config['retrytimes'] < 0:
SyncY.config['retrytimes'] = 3
print('%s WARNING: "retrytimes" is invalid, set to default(3 times).' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
if SyncY.config['retrydelay'] < 0:
SyncY.config['retrydelay'] = 3
print('%s WARNING: "retrydelay" is invalid, set to default(3 second).' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
if SyncY.config['listnumber'] < 1:
SyncY.config['listnumber'] = 100
print('%s WARNING: "listnumber" must great than or equal to 1, set to default 100.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
if SyncY.config['syncinterval'] < 0:
SyncY.config['syncinterval'] = 3600
print('%s WARNING: "syncinterval" must great than or equal to 1, set to default 3600.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
if SyncY.config['maxsendspeed'] < 0:
SyncY.config['maxsendspeed'] = 0
print('%s WARNING: "maxsendspeed" must great than or equal to 0, set to default 0.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
if SyncY.config['maxrecvspeed'] < 0:
SyncY.config['maxrecvspeed'] = 0
print('%s WARNING: "maxrecvspeed" must great than or equal to 0, set to default 100.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
if SyncY.config['threadnumber'] < 1:
SyncY.config['threadnumber'] = 2
print('%s WARNING: "threadnumber" must great than or equal to 1, set to default 2.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
if SyncY.config['tasknumber'] < 1:
SyncY.config['tasknumber'] = 2
print('%s WARNING: "tasknumber" must great than or equal to 1, set to default 2.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
starthour, endhour = SyncY.config['speedlimitperiod'].split('-', 1)
if starthour == '' or endhour == '' or int(starthour) < 0 or int(starthour) > 23 or int(endhour) < 0 or int(endhour) > 24:
print('%s WARNING: "speedlimitperiod" is invalid, set to default(0-0), no limit.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
SyncY.config['speedlimitperiod'] = '0-0'
starthour, endhour = SyncY.config['syncperiod'].split('-', 1)
if starthour == '' or endhour == '' or int(starthour) < 0 or int(starthour) > 23 or int(endhour) < 0 or int(endhour) > 24 or endhour == starthour:
print('%s WARNING: "syncperiod" is invalid, set to default(0-24).\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
SyncY.config['syncperiod'] = '0-24'
except Exception, e:
self.writeerror('%s ERROR: initialize parameters failed. %s\n%s\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), e, traceback.format_exc()))
sys.exit(1)
if 'refresh_token' not in SyncY.syncytoken or SyncY.syncytoken['refresh_token'] == '' or (len(self.__argv) != 0 and self.__argv[0] in ['sybind', 'cpbind']):
sycurl = SYCurl()
if (('device_code' not in SyncY.syncytoken or SyncY.syncytoken['device_code'] == '') and len(self.__argv) == 0) or (len(self.__argv) != 0 and self.__argv[0] == 'sybind'):
retcode, responses = sycurl.request('https://syncyhome.duapp.com/syserver', urlencode({'method': 'bind_device', 'scope': 'basic,netdisk'}), 'POST', SYCurl.Normal)
responses = json.loads(responses)
if retcode != 200:
sys.stderr.write('%s ERROR(Errno:%d): Get device code failed: %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, responses['error_msg']))
sys.exit(1)
device_code = responses['device_code']
user_code = responses['user_code']
if len(self.__argv) != 0 and self.__argv[0] == 'sybind':
with open("/tmp/syncy.bind", 'w') as sybind:
sybind.write('{"user_code":"%s","device_code":"%s","time":%d}' % (user_code, device_code, int(time.time())))
sys.exit(0)
SyncY.syncytoken['device_code'] = device_code
print('Device binding Guide:')
print(' 1. Open web browser to visit:"https://openapi.baidu.com/device" and input user code to binding your baidu account.')
print(' ')
print(' 2. User code:\033[31m %s\033[0m' % user_code)
print(' (User code valid for 30 minutes.)')
print(' ')
raw_input(' 3. After granting access to the application, come back here and press [Enter] to continue.')
print(' ')
if len(self.__argv) != 0 and self.__argv[0] == 'cpbind':
with open('/tmp/syncy.bind', 'r') as sybind:
bindinfo = sybind.read()
bindinfo = json.loads(bindinfo)
os.remove("/tmp/syncy.bind")
if 'device_code' in bindinfo:
if int(time.time()) - int(bindinfo['time']) >= 1800:
sys.exit(1)
SyncY.syncytoken['device_code'] = bindinfo['device_code']
else:
sys.exit(1)
retcode, responses = sycurl.request('https://syncyhome.duapp.com/syserver', urlencode({'method': 'get_device_token', 'code': SyncY.syncytoken['device_code']}), 'POST', SYCurl.Normal)
responses = json.loads(responses)
if retcode != 200:
sys.stderr.write('%s ERROR(Errno:%d): Get device token failed: %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, responses['error_msg']))
sys.exit(1)
SyncY.syncytoken['refresh_token'] = responses['refresh_token']
SyncY.syncytoken['access_token'] = responses['access_token']
SyncY.syncytoken['expires_in'] = int(responses['expires_in'])
SyncY.syncytoken['refresh_date'] = int(time.time())
SyncY.syncytoken['compress_date'] = int(time.time())
self.__save_config()
if len(self.__argv) != 0 and self.__argv[0] == 'cpbind':
sys.exit(0)
print('%s Get device token success.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
if SyncY.config['syncyerrlog'] != '' and os.path.exists(os.path.dirname(SyncY.config['syncyerrlog'])):
if os.path.exists(SyncY.config['syncyerrlog']) and os.path.isdir(SyncY.config['syncyerrlog']):
SyncY.config['syncyerrlog'] += 'syncyerr.log'
self.__save_config()
SyncY.oldSTDERR = sys.stderr
sys.stderr = open(SyncY.config['syncyerrlog'], 'a', 0)
if SyncY.config['syncylog'] != '' and os.path.exists(os.path.dirname(SyncY.config['syncylog'])):
if os.path.exists(SyncY.config['syncylog']) and os.path.isdir(SyncY.config['syncylog']):
SyncY.config['syncylog'] += 'syncy.log'
self.__save_config()
print('%s Running log output to log file:%s.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), SyncY.config['syncylog']))
SyncY.oldSTDOUT = sys.stdout
sys.stdout = open(SyncY.config['syncylog'], 'a', 0)
self._excludefiles = SyncY.config['excludefiles'].replace('.', '\.').replace('*', '.*').replace('?', '.?').split(';')
for i in xrange(len(self._excludefiles)):
self._excludefiles[i] = re.compile(eval('r"^' + self._excludefiles[i] + '$"'))
self._excludefiles.append(re.compile(r'^.*\.syy$'))
if (SyncY.syncytoken['refresh_date'] + SyncY.syncytoken['expires_in'] - 864000) < int(time.time()):
self.__check_expires()
SyncY.TaskSemaphore = threading.Semaphore(SyncY.config['tasknumber'])
size = 32768
while True:
try:
threading.stack_size(size)
break
except ValueError:
if size < 512 * 1024:
size *= 2
else:
threading.stack_size(0)
break
def __del__(self):
if self.__class__.oldSTDERR is not None:
sys.stderr.close()
sys.stderr = self.__class__.oldSTDERR
if self.__class__.oldSTDOUT is not None:
sys.stdout.close()
sys.stdout = self.__class__.oldSTDOUT
if os.path.exists(__PIDFILE__):
with open(__PIDFILE__, 'r') as pidh:
lckpid = pidh.read()
if os.getpid() == int(lckpid):
os.remove(__PIDFILE__)
@staticmethod
def synccount_increase():
SyncY.EXLock.acquire()
SyncY.synccount += 1
SyncY.EXLock.release()
@staticmethod
def errorcount_increase():
SyncY.EXLock.acquire()
SyncY.errorcount += 1
SyncY.EXLock.release()
@staticmethod
def failcount_increase():
SyncY.EXLock.acquire()
SyncY.failcount += 1
SyncY.EXLock.release()
@staticmethod
def reset_counter():
SyncY.EXLock.acquire()
SyncY.synccount = 0
SyncY.failcount = 0
SyncY.errorcount = 0
SyncY.EXLock.release()
@staticmethod
def printlog(msg):
SyncY.LogLock.acquire()
print(msg)
SyncY.LogLock.release()
@staticmethod
def writeerror(msg):
SyncY.LogLock.acquire()
sys.stderr.write(msg)
SyncY.LogLock.release()
@staticmethod
def __init_syncdata():
SyncY.syncData = {}
if os.path.exists(SyncY.syncydb):
with open(SyncY.syncydb, 'rb') as sydb:
fcntl.flock(sydb, fcntl.LOCK_SH)
dataline = sydb.read(40)
while dataline:
SyncY.syncData[dataline[24:]] = dataline[0:24]
dataline = sydb.read(40)
fcntl.flock(sydb, fcntl.LOCK_UN)
def __check_expires(self):
sycurl = SYCurl()
retcode, responses = sycurl.request('https://openapi.baidu.com/rest/2.0/passport/users/getLoggedInUser', urlencode({'access_token': SyncY.syncytoken['access_token']}), 'POST', SYCurl.Normal)
responses = json.loads(responses)
if 'uid' in responses:
retcode, responses = sycurl.request('https://syncyhome.duapp.com/syserver', urlencode({'method': 'get_last_version', 'edition': 'python', 'ver': __VERSION__, 'uid': responses['uid']}), 'POST', SYCurl.Normal)
if retcode == 200 and responses.find('#') > -1:
(lastver, smessage) = responses.strip('\n').split('#', 1)
if lastver != __VERSION__:
self.writeerror('%s %s\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), smessage.encode('utf8')))
self.printlog('%s %s' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), smessage.encode('utf8')))
if (SyncY.syncytoken['refresh_date'] + SyncY.syncytoken['expires_in'] - 864000) > int(time.time()):
return
retcode, responses = sycurl.request('https://syncyhome.duapp.com/syserver', urlencode({'method': 'refresh_access_token', 'refresh_token': SyncY.syncytoken['refresh_token']}), 'POST', SYCurl.Normal)
responses = json.loads(responses)
if retcode != 200:
self.writeerror('%s ERROR(Errno:%d): Refresh access token failed: %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, responses['error_msg']))
return 1
SyncY.syncytoken['refresh_token'] = responses['refresh_token']
SyncY.syncytoken['access_token'] = responses['access_token']
SyncY.syncytoken['expires_in'] = int(responses['expires_in'])
SyncY.syncytoken['refresh_date'] = int(time.time())
self.__save_config()
self.printlog('%s Refresh access token success.' % time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
return 0
@staticmethod
def __save_config():
with open('%s.tmp' % __CONFIG_FILE__, 'w') as sycfg:
sycfg.write("\nconfig syncy\n")
for key, value in SyncY.config.items():
sycfg.write("\toption %s '%s'\n" % (key, str(value)))
sycfg.write("\nconfig syncytoken\n")
for key, value in SyncY.syncytoken.items():
sycfg.write("\toption %s '%s'\n" % (key, str(value)))
for i in range(len(SyncY.syncpath)):
sycfg.write("\nconfig syncpath\n")
for key, value in SyncY.syncpath[str(i)].items():
sycfg.write("\toption %s '%s'\n" % (key, str(value)))
sycfg.flush()
os.fsync(sycfg.fileno())
if os.path.exists('%s.tmp' % __CONFIG_FILE__):
pmeta = os.stat(__CONFIG_FILE__)
os.rename('%s.tmp' % __CONFIG_FILE__, __CONFIG_FILE__)
os.lchown(__CONFIG_FILE__, pmeta.st_uid, pmeta.st_gid)
os.chmod(__CONFIG_FILE__, pmeta.st_mode)
@staticmethod
def __catpath(*names):
fullpath = '/'.join(names)
fullpath = re.sub(r'/+', '/', fullpath)
fullpath = re.sub(r'/$', '', fullpath)
return fullpath
@staticmethod
def __get_newname(oldname):
nowtime = str(time.strftime("%Y%m%d%H%M%S", time.localtime()))
m = SyncY.syre['newname'].findall(oldname)
if m:
newname = m[0][0] + '_old_' + nowtime + m[0][1]
else:
newname = oldname + '_old_' + nowtime
return newname
def __check_pcspath(self, pcsdirname, pcsfilename):
if len(pcsdirname) + len(pcsfilename) + 1 >= 1000:
self.writeerror('%s ERROR: Length of PCS path(%s/%s) must less than 1000, skip upload.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), pcsdirname, pcsfilename))
return 1
if SyncY.syre['pcspath'].findall(pcsfilename):
self.writeerror('%s ERROR: PCS path(%s/%s) is invalid, please check whether special characters exists in the path, skip upload the file.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), pcsdirname, pcsfilename))
return 1
return 0
def __get_pcs_quota(self):
sycurl = SYCurl()
retcode, responses = sycurl.request('https://pcs.baidu.com/rest/2.0/pcs/quota?%s' % urlencode({'method': 'info', 'access_token': SyncY.syncytoken['access_token']}), '', 'GET', SYCurl.Normal)
responses = json.loads(responses)
if retcode != 200:
self.writeerror('%s ERROR(Errno:%d): Get pcs quota failed: %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, responses['error_msg']))
return 1
self.printlog('%s PCS quota is %dG,used %dG.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), responses['quota'] / 1024 / 1024 / 1024, responses['used'] / 1024 / 1024 / 1024))
return 0
def __get_pcs_filelist(self, pcspath, startindex, endindex):
if __DEBUG__:
self.printlog('%s Info(%s): Start get pcs file list(%d-%d) of "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), threading.currentThread().name, startindex, endindex, pcspath))
sycurl = SYCurl()
retcode, responses = sycurl.request('https://pcs.baidu.com/rest/2.0/pcs/file?%s' % urlencode({'method': 'list', 'access_token': SyncY.syncytoken['access_token'], 'path': pcspath, 'limit': '%d-%d' % (startindex, endindex), 'by': 'name', 'order': 'asc'}), '', 'GET', SYCurl.Normal)
try:
responses = json.loads(responses)
if retcode != 200:
if responses['error_code'] == 31066:
return 31066, []
else:
self.writeerror('%s ERROR(Errno:%d): Get PCS file list of "%s" failed: %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, pcspath, responses['error_msg']))
return 1, []
return 0, responses['list']
except Exception, e:
self.writeerror('%s ERROR: Get PCS file list of "%s" failed. return code: %d, response body: %s.\n%s\n%s\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), pcspath, retcode, str(responses), e, traceback.format_exc()))
return 1, []
finally:
del responses
if __DEBUG__:
self.printlog('%s Info(%s): Complete get pcs file list(%d-%d) of "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), threading.currentThread().name, startindex, endindex, pcspath))
def __rm_localfile(self, delpath, slient=False):
try:
if os.path.isfile(delpath):
os.remove(delpath)
if not slient:
self.printlog('%s Delete local file "%s" completed.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), delpath))
elif os.path.isdir(delpath):
fnlist = os.listdir(delpath)
for i in xrange(len(fnlist)):
self.__rm_localfile('%s/%s' % (delpath, fnlist[i]), slient)
os.rmdir(delpath)
if not slient:
self.printlog('%s Delete local directory "%s" completed.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), delpath))
except Exception, e:
if not slient:
self.writeerror('%s ERROR: Delete local file "%s" failed. %s\n%s\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), delpath, e, traceback.format_exc()))
return 1
return 0
def __rm_pcsfile(self, pcspath, slient=False):
sycurl = SYCurl()
retcode, responses = sycurl.request('https://pcs.baidu.com/rest/2.0/pcs/file?%s' % urlencode({'method': 'delete', 'access_token': SyncY.syncytoken['access_token'], 'path': pcspath}), '', 'POST', SYCurl.Normal)
responses = json.loads(responses)
if retcode != 200:
if not slient:
self.writeerror('%s ERROR(Errno:%d): Delete remote file or directory "%s" failed: %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, pcspath, responses['error_msg']))
return 1
if not slient:
self.printlog('%s Delete remote file or directory "%s" completed.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), pcspath))
return 0
def __mv_pcsfile(self, oldpcspath, newpcspath, slient=False):
sycurl = SYCurl()
retcode, responses = sycurl.request('https://pcs.baidu.com/rest/2.0/pcs/file?%s' % urlencode({'method': 'move', 'access_token': SyncY.syncytoken['access_token'], 'from': oldpcspath, 'to': newpcspath}), '', 'POST', SYCurl.Normal)
responses = json.loads(responses)
if retcode != 200:
if not slient:
self.writeerror('%s ERROR(Errno:%d): Move remote file or directory "%s" to "%s" failed: %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, oldpcspath, newpcspath, responses['error_msg']))
return 1
if not slient:
self.printlog('%s Move remote file or directory "%s" to "%s" completed.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), oldpcspath, newpcspath))
return 0
def __cp_pcsfile(self, srcpcspath, destpcspath):
sycurl = SYCurl()
retcode, responses = sycurl.request('https://pcs.baidu.com/rest/2.0/pcs/file?%s' % urlencode({'method': 'copy', 'access_token': SyncY.syncytoken['access_token'], 'from': srcpcspath, 'to': destpcspath}), '', 'POST', SYCurl.Normal)
responses = json.loads(responses)
if retcode != 200:
self.writeerror('%s ERROR(Errno:%d): Copy remote file or directory "%s" to "%s" failed: %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, srcpcspath, destpcspath, responses['error_msg']))
return 1
self.printlog('%s Copy remote file or directory "%s" to "%s" completed.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), srcpcspath, destpcspath))
return 0
@staticmethod
def __get_pcs_filemeta(pcspath):
sycurl = SYCurl()
retcode, responses = sycurl.request('https://pcs.baidu.com/rest/2.0/pcs/file?%s' % urlencode({'method': 'meta', 'access_token': SyncY.syncytoken['access_token'], 'path': pcspath}), '', 'GET', SYCurl.Normal)
responses = json.loads(responses)
if retcode != 200:
SyncY.writeerror('%s ERROR(Errno:%d): Get file meta failed: %s, %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, pcspath, responses['error_msg']))
return 1, {}
return 0, responses['list'][0]
def __upload_file_nosync(self, filepath, pcspath):
sycurl = SYCurl()
retcode, responses = sycurl.request('https://c.pcs.baidu.com/rest/2.0/pcs/file?%s' % urlencode({'method': 'upload', 'access_token': SyncY.syncytoken['access_token'], 'path': pcspath, 'ondup': 'newcopy'}), '0-%d' % (os.stat(filepath).st_size - 1), 'POST', SYCurl.Upload, filepath)
responses = json.loads(responses)
if retcode != 200:
self.writeerror('%s ERROR(Errno:%d): Upload file to pcs failed: %s, %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, filepath, responses['error_msg']))
return 1
self.printlog('%s Upload file "%s" completed.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), filepath))
return 0
def __compress_data(self, pathname, sydbnew, sydb=None, sydblen=0):
fnlist = os.listdir(pathname)
fnlist.sort()
for fnname in fnlist:
if fnname[0:1] == '.':
continue
fullpath = '%s/%s' % (pathname, fnname)
if os.path.isdir(fullpath):
if SyncY.config['datacache'] == 'on':
self.__compress_data(fullpath, sydbnew)
else:
self.__compress_data(fullpath, sydbnew, sydb, sydblen)
elif os.path.isfile(fullpath):
fnmd5 = hashlib.md5(fullpath[SyncY.basedirlen:] + '\n').digest()
if SyncY.config['datacache'] == 'on':
if fnmd5 in SyncY.syncData and SyncY.syncData[fnmd5][16:]:
sydbnew.write('%s%s' % (SyncY.syncData[fnmd5], fnmd5))
del SyncY.syncData[fnmd5]
else:
fnstat = os.stat(fullpath)
fmtime = struct.pack('>I', int(fnstat.st_mtime))
fsize = struct.pack('>I', fnstat.st_size % 4294967296)
if sydb.tell() == sydblen:
sydb.seek(0)
datarec = sydb.read(40)
readlen = 40
while datarec and readlen <= sydblen:
if datarec[16:] == '%s%s%s' % (fmtime, fsize, fnmd5):
sydbnew.write(datarec)
break
if readlen == sydblen:
break
if sydb.tell() == sydblen:
sydb.seek(0)
datarec = sydb.read(40)
readlen += 40
return 0
def __start_compress(self, pathname=''):
if pathname == '':
mpath = []
for i in range(len(SyncY.syncpath)):
if SyncY.syncpath[str(i)]['synctype'].lower() not in ['4', 's', 'sync']:
mpath.append(SyncY.syncpath[str(i)]['localpath'])
self.printlog('%s Start compress sync data.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
else:
mpath = [pathname]
for ipath in mpath:
if ipath == '':
continue
SyncY.basedirlen = len(ipath)
SyncY.syncydb = '%s/.syncy.info.db' % ipath
if os.path.exists(SyncY.syncydb):
with open('%stmp' % SyncY.syncydb, 'wb') as sydbnew:
if SyncY.config['datacache'] == 'on':
self.__init_syncdata()
self.__compress_data(ipath, sydbnew)
SyncY.syncData = None
else:
sydblen = os.stat(SyncY.syncydb).st_size
with open(SyncY.syncydb, 'rb') as sydb:
self.__compress_data(ipath, sydbnew, sydb, sydblen)
sydbnew.flush()
os.fsync(sydbnew.fileno())
os.rename('%stmp' % SyncY.syncydb, SyncY.syncydb)
if pathname == '':
SyncY.syncytoken['compress_date'] = int(time.time())
SyncY.syncytoken['synctotal'] = 0
self.__save_config()
self.printlog('%s Sync data compress completed.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
def __check_excludefiles(self, filepath):
for reexf in self._excludefiles:
if reexf.findall(filepath):
return 1
return 0
@staticmethod
def __check_syncstatus(rmd5, fmtime, fsize, fmd5):
if rmd5 != '*':
rmd5 = rmd5.decode('hex')
if fmtime != '*':
fmtime = struct.pack('>I', fmtime)
fsize = struct.pack('>I', fsize % 4294967296)
if SyncY.config['datacache'] == 'on':
if fmd5 not in SyncY.syncData:
return 0
if rmd5 == '*' and SyncY.syncData[fmd5][16:] == fmtime + fsize:
return 1
elif fmtime == '*' and SyncY.syncData[fmd5][0:16] + SyncY.syncData[fmd5][20:] == rmd5 + fsize:
return 1
elif SyncY.syncData[fmd5] == rmd5 + fmtime + fsize:
return 1
else:
if SyncY.sydb.tell() == SyncY.sydblen:
SyncY.sydb.seek(0)
datarec = SyncY.sydb.read(40)
readlen = 40
while datarec and readlen <= SyncY.sydblen:
if rmd5 == '*' and datarec[16:] == fmtime + fsize + fmd5:
return 1
elif fmtime == '*' and datarec[0:16] + datarec[20:] == rmd5 + fsize + fmd5:
return 1
elif datarec == rmd5 + fmtime + fsize + fmd5:
return 1
if readlen == SyncY.sydblen:
break
if SyncY.sydb.tell() == SyncY.sydblen:
SyncY.sydb.seek(0)
datarec = SyncY.sydb.read(40)
readlen += 40
return 0
def __syncy_upload(self, ldir, rdir):
fnlist = os.listdir(ldir)
fnlist.sort()
for fi in xrange(len(fnlist)):
lfullpath = '%s/%s' % (ldir, fnlist[fi])
if fnlist[fi][0:1] == '.' or self.__check_excludefiles(lfullpath) == 1 or self.__check_pcspath(rdir, fnlist[fi]) == 1:
continue
rfullpath = '%s/%s' % (rdir, fnlist[fi])
if os.path.isdir(lfullpath):
self.__syncy_upload(lfullpath, rfullpath)
else:
fmeta = os.stat(lfullpath)
fnmd5 = hashlib.md5('%s\n' % lfullpath[SyncY.basedirlen:]).digest()
if self.__check_syncstatus('*', int(fmeta.st_mtime), fmeta.st_size, fnmd5) == 0:
if SyncY.config['ondup'] == 'rename':
ondup = 'newcopy'
else:
ondup = 'overwrite'
if SyncY.TaskSemaphore.acquire():
synctask = SYTask(SYTask.Upload, lfullpath, int(fmeta.st_mtime), fmeta.st_size, fnmd5, rfullpath, 0, 0, '', ondup)
synctask.start()
else:
continue
return 0
def __syncy_uploadplus(self, ldir, rdir):
startidx = 0
retcode, rfnlist = self.__get_pcs_filelist(rdir, startidx, SyncY.config['listnumber'])
if retcode != 0 and retcode != 31066:
self.errorcount_increase()
return 1
lfnlist = os.listdir(ldir)
lfnlist.sort()
while retcode == 0:
for i in xrange(len(rfnlist)):
rfullpath = rfnlist[i]['path'].encode('utf8')
fnname = os.path.basename(rfullpath)
lfullpath = '%s/%s' % (ldir, fnname)
if self.__check_excludefiles(lfullpath) == 1:
continue
if os.path.exists(lfullpath):
for idx in xrange(len(lfnlist)):
if lfnlist[idx] == fnname:
del lfnlist[idx]
break
else:
continue
if (rfnlist[i]['isdir'] == 1 and os.path.isfile(lfullpath)) or (rfnlist[i]['isdir'] == 0 and os.path.isdir(lfullpath)):
if SyncY.config['ondup'] == 'rename':
fnnamenew = '%s/%s' % (rdir, self.__get_newname(fnname))
if len(fnnamenew) >= 1000:
self.writeerror('%s ERROR: Rename failed, the length of PCS path "%s" must less than 1000, skip upload "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), fnnamenew, lfullpath))
self.failcount_increase()
continue
if self.__mv_pcsfile(rfullpath, fnnamenew, True) == 1:
self.writeerror('%s ERROR: Rename "%s" failed, skip upload "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), rfullpath, lfullpath))
self.errorcount_increase()
continue
else:
self.__rm_pcsfile(rfullpath, True)
if os.path.isdir(lfullpath):
self.__syncy_uploadplus(lfullpath, rfullpath)
continue
else:
fmeta = os.stat(lfullpath)
fnmd5 = hashlib.md5('%s\n' % lfullpath[SyncY.basedirlen:]).digest()
if SyncY.TaskSemaphore.acquire():
synctask = SYTask(SYTask.Upload, lfullpath, int(fmeta.st_mtime), fmeta.st_size, fnmd5, rfullpath, 0, 0, '', 'overwrite')
synctask.start()
elif rfnlist[i]['isdir'] == 1:
self.__syncy_uploadplus(lfullpath, rfullpath)
continue
else:
fmeta = os.stat(lfullpath)
fnmd5 = hashlib.md5('%s\n' % lfullpath[SyncY.basedirlen:]).digest()
if fmeta.st_size == rfnlist[i]['size']:
if self.__check_syncstatus(rfnlist[i]['md5'], int(fmeta.st_mtime), rfnlist[i]['size'], fnmd5) == 1:
continue
if SyncY.config['ondup'] == 'rename':
fnnamenew = '%s/%s' % (rdir, self.__get_newname(fnname))
if len(fnnamenew) >= 1000:
self.writeerror('%s ERROR: Rename failed, the length of PCS path "%s" must less than 1000, skip upload "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), fnnamenew, lfullpath))
self.failcount_increase()
continue
if self.__mv_pcsfile(rfullpath, fnnamenew, True) == 1:
self.writeerror('%s ERROR: Rename "%s" failed, skip upload "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), rfullpath, lfullpath))
self.failcount_increase()
continue
else:
self.__rm_pcsfile(rfullpath, True)
if SyncY.TaskSemaphore.acquire():
synctask = SYTask(SYTask.Upload, lfullpath, int(fmeta.st_mtime), fmeta.st_size, fnmd5, rfullpath, 0, 0, '', 'overwrite')
synctask.start()
if len(rfnlist) < SyncY.config['listnumber']:
break
startidx += SyncY.config['listnumber']
retcode, rfnlist = self.__get_pcs_filelist(rdir, startidx, startidx + SyncY.config['listnumber'])
if retcode != 0:
self.errorcount_increase()
return 1
for idx in xrange(len(lfnlist)):
lfullpath = '%s/%s' % (ldir, lfnlist[idx])
if lfnlist[idx][0:1] == '.' or self.__check_excludefiles(lfullpath) == 1 or self.__check_pcspath(rdir, lfnlist[idx]) == 1:
continue
rfullpath = '%s/%s' % (rdir, lfnlist[idx])
if os.path.isdir(lfullpath):
self.__syncy_uploadplus(lfullpath, rfullpath)
elif os.path.isfile(lfullpath):
fmeta = os.stat(lfullpath)
fnmd5 = hashlib.md5('%s\n' % lfullpath[SyncY.basedirlen:]).digest()
if SyncY.TaskSemaphore.acquire():
synctask = SYTask(SYTask.Upload, lfullpath, int(fmeta.st_mtime), fmeta.st_size, fnmd5, rfullpath, 0, 0, '', 'overwrite')
synctask.start()
return 0
def __syncy_download(self, ldir, rdir):
startidx = 0
retcode, rfnlist = self.__get_pcs_filelist(rdir, startidx, SyncY.config['listnumber'])
if retcode != 0:
self.errorcount_increase()
return 1
while retcode == 0:
for i in xrange(len(rfnlist)):
rfullpath = rfnlist[i]['path'].encode('utf8')
fnname = os.path.basename(rfullpath)
if self.__check_excludefiles(rfullpath) == 1:
continue
lfullpath = '%s/%s' % (ldir, fnname)
if rfnlist[i]['isdir'] == 1:
if os.path.exists(lfullpath) and os.path.isfile(lfullpath):
if SyncY.config['ondup'] == 'rename':
fnnamenew = '%s/%s' % (ldir, self.__get_newname(fnname))
os.rename(lfullpath, fnnamenew)
else:
if self.__rm_localfile(lfullpath, True) == 1:
self.writeerror('%s ERROR: Delete local file "%s" failed, skip download "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), lfullpath, rfullpath))
self.errorcount_increase()
continue
if not (os.path.exists(lfullpath)):
os.mkdir(lfullpath)
pmeta = os.stat(ldir)
os.lchown(lfullpath, pmeta.st_uid, pmeta.st_gid)
os.chmod(lfullpath, pmeta.st_mode)
self.__syncy_download(lfullpath, rfullpath)
else:
fnmd5 = hashlib.md5('%s\n' % lfullpath[SyncY.basedirlen:]).digest()
if not (os.path.exists(lfullpath + '.db.syy')):
if self.__check_syncstatus(rfnlist[i]['md5'], '*', rfnlist[i]['size'], fnmd5) == 1:
continue
if os.path.exists(lfullpath) and SyncY.config['ondup'] == 'rename':
fnnamenew = '%s/%s' % (ldir, self.__get_newname(fnname))
os.rename(lfullpath, fnnamenew)
elif os.path.exists(lfullpath):
if self.__rm_localfile(lfullpath, True) == 1:
self.writeerror('%s ERROR: Delete local file "%s" failed, skip download "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), lfullpath, rfullpath))
self.failcount_increase()
continue
if SyncY.TaskSemaphore.acquire():
synctask = SYTask(SYTask.Download, lfullpath, 0, 0, fnmd5, rfullpath, rfnlist[i]['mtime'], rfnlist[i]['size'], rfnlist[i]['md5'], 'overwrite')
synctask.start()
if len(rfnlist) < SyncY.config['listnumber']:
break
startidx += SyncY.config['listnumber']
retcode, rfnlist = self.__get_pcs_filelist(rdir, startidx, startidx + SyncY.config['listnumber'])
if retcode != 0:
self.errorcount_increase()
return 1
return 0
def __syncy_downloadplus(self, ldir, rdir):
startidx = 0
retcode, rfnlist = self.__get_pcs_filelist(rdir, startidx, SyncY.config['listnumber'])
if retcode != 0:
self.errorcount_increase()
return 1
while retcode == 0:
for i in xrange(0, len(rfnlist), 1):
rfullpath = rfnlist[i]['path'].encode('utf8')
fnname = os.path.basename(rfullpath)
if self.__check_excludefiles(rfullpath) == 1:
continue
lfullpath = '%s/%s' % (ldir, fnname)
if rfnlist[i]['isdir'] == 1:
if os.path.exists(lfullpath) and os.path.isfile(lfullpath):
if SyncY.config['ondup'] == 'rename':
fnnamenew = '%s/%s' % (ldir, self.__get_newname(fnname))
os.rename(lfullpath, fnnamenew)
else:
if self.__rm_localfile(lfullpath, True) == 1:
self.writeerror('%s ERROR: Delete local file "%s" failed, skip download "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), lfullpath, rfullpath))
self.errorcount_increase()
continue
if not (os.path.exists(lfullpath)):
os.mkdir(lfullpath)
pmeta = os.stat(ldir)
os.lchown(lfullpath, pmeta.st_uid, pmeta.st_gid)
os.chmod(lfullpath, pmeta.st_mode)
self.__syncy_downloadplus(lfullpath, rfullpath)
else:
fnmd5 = hashlib.md5('%s\n' % lfullpath[SyncY.basedirlen:]).digest()
if os.path.exists(lfullpath) and not (os.path.exists(lfullpath + '.db.syy')):
fmeta = os.stat(lfullpath)
if self.__check_syncstatus(rfnlist[i]['md5'], int(fmeta.st_mtime), rfnlist[i]['size'], fnmd5) == 1:
continue
if SyncY.config['ondup'] == 'rename':
fnnamenew = '%s/%s' % (ldir, self.__get_newname(fnname))
os.rename(lfullpath, fnnamenew)
else:
if self.__rm_localfile(lfullpath, True) == 1:
self.writeerror('%s ERROR: Delete local file "%s" failed, skip download "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), lfullpath, rfullpath))
self.failcount_increase()
continue
if SyncY.TaskSemaphore.acquire():
synctask = SYTask(SYTask.Download, lfullpath, 0, 0, fnmd5, rfullpath, rfnlist[i]['mtime'], rfnlist[i]['size'], rfnlist[i]['md5'], 'overwrite')
synctask.start()
if len(rfnlist) < SyncY.config['listnumber']:
break
startidx += SyncY.config['listnumber']
retcode, rfnlist = self.__get_pcs_filelist(rdir, startidx, startidx + SyncY.config['listnumber'])
if retcode != 0:
self.errorcount_increase()
return 1
return 0
def __syncy_sync(self, ldir, rdir):
startidx = 0
retcode, rfnlist = self.__get_pcs_filelist(rdir, startidx, SyncY.config['listnumber'])
if retcode != 0 and retcode != 31066:
self.errorcount_increase()
return 1
lfnlist = os.listdir(ldir)
lfnlist.sort()
while retcode == 0:
for i in xrange(len(rfnlist)):
rfullpath = rfnlist[i]['path'].encode('utf8')
fnname = os.path.basename(rfullpath)
if self.__check_excludefiles(rfullpath) == 1:
continue
lfullpath = '%s/%s' % (ldir, fnname)
if os.path.exists(lfullpath):
for idx in xrange(len(lfnlist)):
if lfnlist[idx] == fnname:
del lfnlist[idx]
break
if rfnlist[i]['isdir'] == 1:
if os.path.exists(lfullpath) and os.path.isfile(lfullpath):
fmeta = os.stat(lfullpath)
fnmd5 = hashlib.md5('%s\n' % lfullpath[SyncY.basedirlen:]).digest()
if self.__check_syncstatus('*', int(fmeta.st_mtime), fmeta.st_size, fnmd5) == 1 or rfnlist[i]['mtime'] > int(fmeta.st_mtime):
if self.__rm_localfile(lfullpath, True) == 1:
self.writeerror('%s ERROR: Delete local file "%s" failed, skip download "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), lfullpath, rfullpath))
self.failcount_increase()
continue
self.__syncy_downloadplus(lfullpath, rfullpath)
continue
else:
self.__rm_pcsfile(rfullpath, True)
if SyncY.TaskSemaphore.acquire():
synctask = SYTask(SYTask.Upload, lfullpath, int(fmeta.st_mtime), fmeta.st_size, fnmd5, rfullpath, 0, 0, '', 'overwrite')
synctask.start()
else:
if not (os.path.exists(lfullpath)):
os.mkdir(lfullpath)
pmeta = os.stat(ldir)
os.lchown(lfullpath, pmeta.st_uid, pmeta.st_gid)
os.chmod(lfullpath, pmeta.st_mode)
self.__syncy_sync(lfullpath, rfullpath)
continue
else:
fnmd5 = hashlib.md5('%s\n' % lfullpath[SyncY.basedirlen:]).digest()
fmtime = 0
fsize = 0
if os.path.exists(lfullpath) and os.path.isdir(lfullpath):
if self.__check_syncstatus(rfnlist[i]['md5'], '*', rfnlist[i]['size'], fnmd5) == 1:
if self.__rm_pcsfile(rfullpath, True) == 1:
self.writeerror('%s ERROR: Delete remote file "%s" failed, skip sync "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), rfullpath, lfullpath))
self.errorcount_increase()
continue
self.__syncy_uploadplus(lfullpath, rfullpath)
continue
else:
if rfnlist[i]['mtime'] > int(os.stat(lfullpath).st_mtime):
if self.__rm_localfile(lfullpath, True) == 1:
self.writeerror('%s ERROR: Delete local file "%s" failed, skip download "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), lfullpath, rfullpath))
self.failcount_increase()
continue
sync_op = SYTask.Download
else:
if self.__rm_pcsfile(rfullpath, True) == 1:
self.writeerror('%s ERROR: Delete remote file "%s" failed, skip sync "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), rfullpath, lfullpath))
self.errorcount_increase()
continue
self.__syncy_uploadplus(lfullpath, rfullpath)
continue
elif os.path.exists(lfullpath):
fmeta = os.stat(lfullpath)
fmtime = int(fmeta.st_mtime)
fsize = fmeta.st_size
if rfnlist[i]['size'] == fsize and self.__check_syncstatus(rfnlist[i]['md5'], fmtime, fsize, fnmd5) == 1:
continue
elif self.__check_syncstatus('*', fmtime, fsize, fnmd5) == 1:
if self.__rm_localfile(lfullpath, True) == 1:
self.writeerror('%s ERROR: Delete local file "%s" failed, skip download "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), lfullpath, rfullpath))
self.failcount_increase()
continue
sync_op = SYTask.Download
elif self.__check_syncstatus(rfnlist[i]['md5'], '*', rfnlist[i]['size'], fnmd5) == 1:
self.__rm_pcsfile(rfullpath, True)
sync_op = SYTask.Upload
elif os.path.exists('%s.db.syy' % lfullpath):
with open('%s.db.syy' % lfullpath, 'r') as infoh:
syyinfo = infoh.readline()
if syyinfo.strip('\n') == 'download:%s:%d' % (rfnlist[i]['md5'], rfnlist[i]['size']):
sync_op = SYTask.Download
else:
os.remove('%s.db.syy' % lfullpath)
if rfnlist[i]['mtime'] > fmtime:
if self.__rm_localfile(lfullpath, True) == 1:
self.writeerror('%s ERROR: Delete local file "%s" failed, skip download "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), lfullpath, rfullpath))
self.failcount_increase()
continue
sync_op = SYTask.Download
else:
self.__rm_pcsfile(rfullpath, True)
sync_op = SYTask.Upload
elif rfnlist[i]['mtime'] > fmtime:
if self.__rm_localfile(lfullpath, True) == 1:
self.writeerror('%s ERROR: Delete local file "%s" failed, skip download "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), lfullpath, rfullpath))
self.failcount_increase()
continue
sync_op = SYTask.Download
else:
self.__rm_pcsfile(rfullpath)
rfnlist[i]['mtime'] = 0
sync_op = SYTask.Upload
else:
if self.__check_syncstatus(rfnlist[i]['md5'], '*', rfnlist[i]['size'], fnmd5) == 1:
if self.__rm_pcsfile(rfullpath) == 1:
self.failcount_increase()
else:
self.synccount_increase()
continue
else:
sync_op = SYTask.Download
if SyncY.TaskSemaphore.acquire():
synctask = SYTask(sync_op, lfullpath, fmtime, fsize, fnmd5, rfullpath, rfnlist[i]['mtime'], rfnlist[i]['size'], rfnlist[i]['md5'], 'overwrite')
synctask.start()
if len(rfnlist) < SyncY.config['listnumber']:
break
startidx += SyncY.config['listnumber']
retcode, rfnlist = self.__get_pcs_filelist(rdir, startidx, startidx + SyncY.config['listnumber'])
if retcode != 0:
self.errorcount_increase()
return 1
for idx in xrange(len(lfnlist)):
lfullpath = '%s/%s' % (ldir, lfnlist[idx])
if lfnlist[idx][0:1] == '.' or self.__check_excludefiles(lfullpath) == 1 or self.__check_pcspath(rdir, lfnlist[idx]) == 1:
continue
rfullpath = '%s/%s' % (rdir, lfnlist[idx])
if os.path.isdir(lfullpath):
self.__syncy_sync(lfullpath, rfullpath)
dir_files = os.listdir(ldir)
if len(dir_files) == 0:
os.rmdir(lfullpath)
elif os.path.isfile(lfullpath):
fmeta = os.stat(lfullpath)
fmtime = int(fmeta.st_mtime)
fsize = fmeta.st_size
fnmd5 = hashlib.md5('%s\n' % lfullpath[SyncY.basedirlen:]).digest()
if self.__check_syncstatus('*', fmtime, fsize, fnmd5) == 1:
if self.__rm_localfile(lfullpath, True) == 1:
self.writeerror('%s ERROR: Delete local file "%s" failed, skip download "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), lfullpath, rfullpath))
self.failcount_increase()
else:
self.synccount_increase()
continue
elif os.path.exists('%s.db.syy' % lfullpath):
with open('%s.db.syy' % lfullpath, 'r') as infoh:
syyinfo = infoh.readline()
if syyinfo.strip('\n') != 'upload:%d:%d' % (fmtime, fsize):
if syyinfo[0:6] == 'upload':
os.remove('%s.db.syy' % lfullpath)
else:
os.remove(lfullpath)
os.remove('%s.db.syy' % lfullpath)
continue
if SyncY.TaskSemaphore.acquire():
synctask = SYTask(SYTask.Upload, lfullpath, fmtime, fsize, fnmd5, rfullpath, 0, 0, '', 'overwrite')
synctask.start()
return 0
def __start_sync(self):
self.__get_pcs_quota()
for i in range(len(SyncY.syncpath)):
if 'localpath' not in SyncY.syncpath[str(i)] or 'remotepath' not in SyncY.syncpath[str(i)] or 'synctype' not in SyncY.syncpath[str(i)] or 'enable' not in SyncY.syncpath[str(i)]:
self.writeerror('%s ERROR: The %d\'s of syncpath setting is invalid.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), i + 1))
continue
if SyncY.syncpath[str(i)]['enable'] == '0':
continue
self.reset_counter()
ipath = ('%s:%s:%s' % (SyncY.syncpath[str(i)]['localpath'], SyncY.syncpath[str(i)]['remotepath'], SyncY.syncpath[str(i)]['synctype']))
self.printlog('%s Start sync path: "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), ipath))
localpath = self.__catpath(SyncY.syncpath[str(i)]['localpath'])
remotepath = self.__catpath(SyncY.pcsroot, SyncY.syncpath[str(i)]['remotepath'])
ckdir = 0
for rdir in remotepath.split('/'):
if re.findall(r'^[\s\.\n].*|.*[/<>\\|\*\?:\"].*|.*[\s\.\n]$', rdir):
ckdir = 1
break
if ckdir != 0:
self.writeerror('%s ERROR: Sync "%s" failed, remote directory error.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), ipath))
continue
if not (os.path.exists(localpath)):
os.mkdir(localpath)
pmeta = os.stat(os.path.dirname(localpath))
os.lchown(localpath, pmeta.st_uid, pmeta.st_gid)
os.chmod(localpath, pmeta.st_mode)
if localpath != '' and os.path.isdir(localpath):
SyncY.syncydb = '%s/.syncy.info.db' % localpath
if SyncY.config['datacache'] == 'on':
self.__init_syncdata()
else:
SyncY.sydblen = os.stat(SyncY.syncydb).st_size
SyncY.sydb = open(SyncY.syncydb, 'rb')
SyncY.basedirlen = len(localpath)
if SyncY.syncpath[str(i)]['synctype'].lower() in ['0', 'u', 'upload']:
self.__syncy_upload(localpath, remotepath)
elif SyncY.syncpath[str(i)]['synctype'].lower() in ['1', 'u+', 'upload+']:
self.__syncy_uploadplus(localpath, remotepath)
elif SyncY.syncpath[str(i)]['synctype'].lower() in ['2', 'd', 'download']:
self.__syncy_download(localpath, remotepath)
elif SyncY.syncpath[str(i)]['synctype'].lower() in ['3', 'd+', 'download+']:
self.__syncy_downloadplus(localpath, remotepath)
elif SyncY.syncpath[str(i)]['synctype'].lower() in ['4', 's', 'sync']:
self.__syncy_sync(localpath, remotepath)
else:
self.writeerror('%s ERROR: The "synctype" of "%s" is invalid, must set to [0 - 4], skiped.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), ipath))
self.printlog('%s ERROR: The "synctype" of "%s" is invalid, must set to [0 - 4], skiped.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), ipath))
continue
if SyncY.config['datacache'] == 'on':
SyncY.syncData = None
else:
SyncY.sydb.close()
while True:
if threading.activeCount() > 1 or len(SyncY.synctask) > 0:
time.sleep(3)
else:
if SyncY.syncpath[str(i)]['synctype'].lower() in ['2', 'd', 'download']:
SyncY.syncytoken['synctotal'] += SyncY.synccount
self.__save_config()
if SyncY.failcount == 0 and SyncY.errorcount == 0:
if SyncY.syncpath[str(i)]['synctype'].lower() not in ['2', 'd', 'download']:
self.__start_compress(SyncY.syncpath[str(i)]['localpath'])
self.printlog('%s Sync path: "%s" complete, Success sync %d files.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), ipath, SyncY.synccount))
else:
self.printlog('%s Sync path: "%s" failed, %d files success, %d files failed, %d errors occurred.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), ipath, SyncY.synccount, SyncY.failcount, SyncY.errorcount))
self.writeerror('%s ERROR: Sync path: "%s" failed, %d files success, %d files failed, %d errors occurred.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), ipath, SyncY.synccount, SyncY.failcount, SyncY.errorcount))
break
else:
self.writeerror('%s ERROR: Sync "%s" failed, local directory is not exist or is normal file.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), ipath))
self.printlog('%s ERROR: Sync "%s" failed, local directory is not exist or is normal file.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), ipath))
self.__get_pcs_quota()
@staticmethod
def __test_chinese(tdir=''):
unicode_str = '\u4e2d\u6587\u8f6c\u7801\u6d4b\u8bd5'
unicode_str = eval('u"%s"' % unicode_str)
unicode_str = unicode_str.encode('utf8')
with open('%s/%s' % (tdir, unicode_str), 'w') as chnfn:
chnfn.write(unicode_str)
def __data_convert(self):
mpath = SyncY.config['syncpath'].split(';')
for i in range(len(mpath)):
if mpath[i] == '':
continue
localdir = mpath[i].split(':')[0:1]
syncydb = '%s/.syncy.info.db' % localdir
if os.path.exists(syncydb):
syncydbtmp = '%s/.syncy.info.db1' % localdir
if os.path.exists(syncydbtmp):
os.remove(syncydbtmp)
with open(syncydb, 'r') as sydb:
syncinfo = sydb.readlines()
if len(syncinfo[0]) > 100 or len(syncinfo[0].split(' ')[0]) != 32:
self.writeerror('%s Convert sync data failed "%s".\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), mpath[i]))
continue
with open(syncydbtmp, 'wb') as sydbnew:
for j in xrange(len(syncinfo)):
rmd5, lmtime, lsize, lmd5 = syncinfo[j].split(' ')
rmd5 = rmd5.decode('hex')
lmtime = struct.pack('>I', lmtime)
lsize = struct.pack('>I', lsize % 4294967296)
lmd5 = lmd5.decode('hex')
sydbnew.write('%s%s%s%s' % (rmd5, lmtime, lsize, lmd5))
os.rename(syncydbtmp, syncydb)
def __rebuild(self, mpath):
if len(mpath) == 0:
mpath = range(len(SyncY.syncpath))
for i in mpath:
i = int(i)
if i >= len(SyncY.syncpath):
continue
self.printlog("%s Start rebuild sync data for directory '%s'." % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), SyncY.syncpath[str(i)]['localpath']))
localpath = self.__catpath(SyncY.syncpath[str(i)]['localpath'])
remotepath = self.__catpath(SyncY.pcsroot, SyncY.syncpath[str(i)]['remotepath'])
SyncY.basedirlen = len(SyncY.syncpath[str(i)]['localpath'])
SyncY.syncydb = '%s/.syncy.info.db' % SyncY.syncpath[str(i)]['localpath']
if os.path.exists(SyncY.syncydb):
os.rename(SyncY.syncydb, '%s.bak%s' % (SyncY.syncydb, str(int(time.time()))))
with open(SyncY.syncydb, 'wb') as sydb:
ret = self.__rebuild_data(localpath, remotepath, sydb)
sydb.flush()
os.fsync(sydb.fileno())
if ret == 0:
self.printlog("%s Rebuild sync data completed for directory '%s'." % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), SyncY.syncpath[str(i)]['localpath']))
else:
self.printlog("%s Rebuild sync data failed for directory '%s'." % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), SyncY.syncpath[str(i)]['localpath']))
def __rebuild_data(self, localpath, remotepath, sydb):
startidx = 0
retcode, rfnlist = self.__get_pcs_filelist(remotepath, startidx, SyncY.config['listnumber'])
if retcode != 0:
return 1
while retcode == 0:
for i in xrange(len(rfnlist)):
rfullpath = rfnlist[i]['path'].encode('utf8')
fnname = os.path.basename(rfullpath)
lfullpath = '%s/%s' % (localpath, fnname)
if self.__check_excludefiles(rfullpath) == 1 or self.__check_excludefiles(lfullpath) == 1:
continue
if rfnlist[i]['isdir'] == 1:
self.__rebuild_data(lfullpath, rfullpath, sydb)
elif os.path.exists(lfullpath) and os.path.isfile(lfullpath):
fnstat = os.stat(lfullpath)
if rfnlist[i]['size'] == fnstat.st_size:
fnmd5 = hashlib.md5('%s\n' % lfullpath[SyncY.basedirlen:]).digest()
fmtime = struct.pack('>I', int(fnstat.st_mtime))
fsize = struct.pack('>I', fnstat.st_size % 4294967296)
sydb.write('%s%s%s%s' % (rfnlist[i]['md5'].decode('hex'), fmtime, fsize, fnmd5))
if len(rfnlist) < SyncY.config['listnumber']:
break
startidx += SyncY.config['listnumber']
retcode, rfnlist = self.__get_pcs_filelist(remotepath, startidx, startidx + SyncY.config['listnumber'])
if retcode != 0:
return 1
return 0
def start(self):
if len(self.__argv) == 0:
if SyncY.config['syncperiod'] == '':
self.__start_sync()
else:
starthour, endhour = SyncY.config['syncperiod'].split('-', 1)
curhour = time.localtime().tm_hour
starthour = int(starthour)
endhour = int(endhour)
while True:
if (endhour > starthour and starthour <= curhour < endhour) or (endhour < starthour and (curhour < starthour or curhour >= endhour)):
self.__start_sync()
self.__check_expires()
time.sleep(SyncY.config['syncinterval'])
else:
time.sleep(300)
curhour = time.localtime().tm_hour
elif self.__argv[0] == 'compress':
self.__start_compress()
elif self.__argv[0] == 'convert':
self.__data_convert()
elif self.__argv[0] == 'testchinese':
self.__test_chinese(self.__argv[1])
elif self.__argv[0] == 'rebuild':
self.__rebuild(self.__argv[1:])
elif os.path.isfile(self.__argv[0]):
fname = os.path.basename(self.__argv[0])
if len(self.__argv) == 2:
pcsdir = self.__catpath(SyncY.pcsroot, self.__argv[1])
else:
pcsdir = SyncY.pcsroot
if self.__check_pcspath(pcsdir, fname) == 0:
self.__upload_file_nosync(self.__argv[0], self.__catpath(pcsdir, fname))
elif not (self.__argv[0] in ["sybind", "cpbind"]):
print('%s Unknown command "%s"' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), ' '.join(self.__argv)))
class SYCurl():
Normal = 0
Upload = 1
Download = 2
def __init__(self):
self.__response = ''
self.__op = None
self.__fd = None
self.__startpos = 0
self.__endpos = None
def __write_data(self, rsp):
rsplen = len(rsp)
if self.__op == SYCurl.Download:
if self.__startpos + rsplen - 1 > self.__endpos:
return 0
self.__fd.write(rsp)
self.__startpos += rsplen
else:
self.__response += rsp
return len(rsp)
def __read_data(self, size):
return self.__fd.read(size)
@staticmethod
def __write_header(rsp):
return len(rsp)
def request(self, url, rdata='', method='POST', rtype=0, fnname=''):
retrycnt = 0
self.__op = rtype
while retrycnt <= SyncY.config['retrytimes']:
if __DEBUG__:
SyncY.printlog('%s Info(%s): Start curl request(%s) %d times for %s.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), threading.currentThread().name, rdata, retrycnt + 1, fnname))
if self.__op != SYCurl.Normal:
startpos, self.__endpos = rdata.split('-', 1)
startpos = self.__startpos = int(startpos)
self.__endpos = int(self.__endpos)
self.__response = ''
curl = pycurl.Curl()
try:
curl.setopt(pycurl.URL, url)
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
curl.setopt(pycurl.SSL_VERIFYHOST, 2)
curl.setopt(pycurl.FOLLOWLOCATION, 1)
curl.setopt(pycurl.CONNECTTIMEOUT, 15)
curl.setopt(pycurl.LOW_SPEED_LIMIT, 1)
curl.setopt(pycurl.LOW_SPEED_TIME, 30)
curl.setopt(pycurl.USERAGENT, '')
curl.setopt(pycurl.HEADER, 0)
curl.setopt(pycurl.NOSIGNAL, 1)
curl.setopt(pycurl.WRITEFUNCTION, self.__write_data)
starthour, endhour = SyncY.config['speedlimitperiod'].split('-', 1)
starthour = int(starthour)
endhour = int(endhour)
curhour = time.localtime().tm_hour
if (endhour > starthour and starthour <= curhour < endhour) or (endhour < starthour and (curhour < starthour or curhour >= endhour)):
curl.setopt(pycurl.MAX_SEND_SPEED_LARGE, SyncY.config['maxsendspeed'])
curl.setopt(pycurl.MAX_RECV_SPEED_LARGE, SyncY.config['maxrecvspeed'])
if self.__op == SYCurl.Upload:
curl.setopt(pycurl.UPLOAD, 1)
with open(fnname, 'rb') as self.__fd:
self.__fd.seek(startpos)
curl.setopt(pycurl.READDATA, self.__fd)
curl.setopt(pycurl.INFILESIZE, self.__endpos - startpos + 1)
fcntl.flock(self.__fd, fcntl.LOCK_SH)
curl.perform()
fcntl.flock(self.__fd, fcntl.LOCK_UN)
elif self.__op == SYCurl.Download:
curl.setopt(pycurl.RANGE, rdata)
with open(fnname, 'rb+') as self.__fd:
self.__fd.seek(startpos)
fcntl.lockf(self.__fd, fcntl.LOCK_EX, self.__endpos - startpos + 1, startpos, 0)
curl.perform()
self.__fd.flush()
os.fdatasync(self.__fd.fileno())
fcntl.lockf(self.__fd, fcntl.LOCK_UN, self.__endpos - startpos + 1, startpos, 0)
else:
curl.setopt(pycurl.CUSTOMREQUEST, method)
if method == 'POST':
curl.setopt(pycurl.POSTFIELDS, rdata)
curl.perform()
retcode = curl.getinfo(pycurl.HTTP_CODE)
if retcode < 400 or retcode == 404 or retrycnt == SyncY.config['retrytimes']:
if retcode != 200 and retcode != 206 and self.__response == '':
self.__response = '{"error_code":%d,"error_msg":"Returned by the server is not in the expected results."}' % retcode
return retcode, self.__response
else:
retrycnt += 1
time.sleep(SyncY.config['retrydelay'])
except pycurl.error, error:
errno, errstr = error
if retrycnt == SyncY.config['retrytimes']:
return errno, '{"error_code":%d,"error_msg":"%s"}' % (errno, errstr)
else:
retrycnt += 1
finally:
curl.close()
if __DEBUG__:
SyncY.printlog('%s Info(%s): Complete curl request(%s) %d times for %s.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), threading.currentThread().name, rdata, retrycnt + 1, fnname))
class SYTask(threading.Thread):
Upload = 1
Download = 2
def __init__(self, syncoperation, filepath, fmtime, fsize, fnmd5, pcspath, rmtime, rsize, rmd5, ondup):
threading.Thread.__init__(self)
self.__op = syncoperation
self.__filepath = filepath
self.__fmtime = fmtime
self.__fsize = fsize
self.__fnmd5 = fnmd5
self.__pcspath = pcspath
self.__rmtime = rmtime
self.__rsize = rsize
self.__rmd5 = rmd5
self.__ondup = ondup
SyncY.synctask[self.__fnmd5] = []
def run(self):
if __DEBUG__:
SyncY.printlog('%s Info(%s): start run task(op:%s) for %s.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, str(self.__op), self.__filepath))
try:
ret = 1
if self.__op == SYCurl.Upload:
if os.path.exists(self.__filepath + '.db.syy'):
ret = self.__slice_uploadfile()
else:
if self.__fsize <= 262144:
ret = self.__upload_file()
else:
ret = self.__rapid_uploadfile()
elif self.__op == SYCurl.Download:
ret = self.__download_file()
else:
SyncY.writeerror('%s ERROR: Unknown sync operation(%s) of threading operation.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__op))
SyncY.errorcount_increase()
if ret == 0:
SyncY.synccount_increase()
else:
SyncY.failcount_increase()
except Exception, e:
SyncY.writeerror('%s ERROR: Transfer task exception error occurred: %s .\n%s\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), e, traceback.format_exc()))
SyncY.failcount_increase()
finally:
del SyncY.synctask[self.__fnmd5]
SyncY.TaskSemaphore.release()
if __DEBUG__:
SyncY.printlog('%s Info(%s): exit task(op:%s) for %s.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, str(self.__op), self.__filepath))
def __create_emptyfile(self):
with open('%s.syy' % self.__filepath, 'wb') as f:
try:
if self.__rsize > 0:
f.seek(self.__rsize - 1)
f.write('\0')
f.flush()
os.fsync(f.fileno())
except Exception, e:
SyncY.writeerror('%s ERROR: Create file "%s" failed. Exception: "%s".\n%s\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__filepath, e, traceback.format_exc()))
return 1
return 0
def __save_data(self):
with open(SyncY.syncydb, 'ab', 0) as sydb:
try:
fcntl.flock(sydb, fcntl.LOCK_EX)
rmd5 = self.__rmd5.decode('hex')
fmtime = struct.pack('>I', self.__fmtime)
fsize = struct.pack('>I', self.__fsize % 4294967296)
sydb.write('%s%s%s%s' % (rmd5, fmtime, fsize, self.__fnmd5))
sydb.flush()
os.fsync(sydb.fileno())
fcntl.flock(sydb, fcntl.LOCK_UN)
except Exception, e:
SyncY.writeerror('%s ERROR: Save sync data failed (%s).\n%s\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), e, traceback.format_exc()))
def __md5sum(self):
with open(self.__filepath, 'rb') as fh:
m = hashlib.md5()
fbuffer = fh.read(8192)
while fbuffer:
m.update(fbuffer)
fbuffer = fh.read(8192)
cmd5 = m.hexdigest()
return cmd5
def __rapid_checkcode(self):
with open(self.__filepath, 'rb') as fh:
m = hashlib.md5()
fbuffer = fh.read(8192)
crc = 0
while fbuffer:
m.update(fbuffer)
crc = zlib.crc32(fbuffer, crc) & 0xffffffff
fbuffer = fh.read(8192)
cmd5 = m.hexdigest()
m = hashlib.md5()
fh.seek(0)
for i in range(32):
fbuffer = fh.read(8192)
m.update(fbuffer)
return '%x' % crc, cmd5, m.hexdigest()
def __upload_file(self):
if __DEBUG__:
SyncY.printlog('%s Info(%s): start upload whole file "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, self.__filepath))
sycurl = SYCurl()
retcode, responses = sycurl.request('https://c.pcs.baidu.com/rest/2.0/pcs/file?%s' % urlencode({'method': 'upload', 'access_token': SyncY.syncytoken['access_token'], 'path': self.__pcspath, 'ondup': self.__ondup}), '0-%d' % (os.stat(self.__filepath).st_size - 1), 'POST', SYCurl.Upload, self.__filepath)
responses = json.loads(responses)
if retcode != 200:
SyncY.writeerror('%s ERROR(Errno:%d): Upload file "%s" to PCS failed: %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, self.__filepath, responses['error_msg']))
return 1
if responses['size'] == self.__fsize:
self.__rmd5 = responses['md5']
else:
SyncY.writeerror('%s ERROR: Upload file "%s" failed, remote file size not equal to local.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__filepath))
sycurl.request('https://pcs.baidu.com/rest/2.0/pcs/file?%s' % urlencode({'method': 'delete', 'access_token': SyncY.syncytoken['access_token'], 'path': self.__pcspath}), '', 'POST', SYCurl.Normal)
return 1
self.__save_data()
SyncY.printlog('%s Upload file "%s" completed.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__filepath))
return 0
def __rapid_uploadfile(self):
if __DEBUG__:
SyncY.printlog('%s Info(%s): start rapid upload file "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, self.__filepath))
crc, contentmd5, slicemd5 = self.__rapid_checkcode()
sycurl = SYCurl()
retcode, responses = sycurl.request('https://pcs.baidu.com/rest/2.0/pcs/file?%s' % urlencode({'method': 'rapidupload', 'access_token': SyncY.syncytoken['access_token'], 'path': self.__pcspath, 'content-length': self.__fsize, 'content-md5': contentmd5, 'slice-md5': slicemd5, 'content-crc32': crc, 'ondup': self.__ondup}), '', 'POST', SYCurl.Normal)
responses = json.loads(responses)
if retcode != 200:
if responses['error_code'] == 31079:
SyncY.printlog('%s File md5 not found, upload the whole file "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__filepath))
if self.__fsize <= SyncY.config['blocksize'] * 1048576 + 1048576:
return self.__upload_file()
else:
return self.__slice_uploadfile()
else:
SyncY.writeerror('%s ERROR(Errno:%d): Rapid upload file "%s" failed: %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, self.__filepath, responses['error_msg']))
return 1
else:
if responses['size'] == self.__fsize:
self.__rmd5 = responses['md5']
else:
SyncY.writeerror('%s ERROR: File "%s" is rapid uploaded, but remote file size not equal to local.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__filepath))
return 1
self.__save_data()
SyncY.printlog('%s Rapid upload file "%s" completed.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__filepath))
return 0
def __slice_uploadfile(self):
if __DEBUG__:
SyncY.printlog('%s Info(%s): start slice upload file "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, self.__filepath))
if self.__fsize <= (SyncY.config['blocksize'] + 1) * 1048576:
return self.__upload_file()
elif self.__fsize > SyncY.config['blocksize'] * 1073741824:
SyncY.writeerror('%s ERROR: File "%s" size exceeds the setting, maxsize = blocksize * 1024M.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__filepath))
return 1
if not os.path.exists('%s.db.syy' % self.__filepath):
with open('%s.db.syy' % self.__filepath, 'w') as ulfn:
ulfn.write('upload:%d:%d\n' % (self.__fmtime, self.__fsize))
SyncY.synctask[self.__fnmd5].append(['upload', self.__fmtime, self.__fsize])
else:
with open('%s.db.syy' % self.__filepath, 'r') as ulfn:
line = ulfn.readline()
if line.strip('\n') != 'upload:%d:%d' % (self.__fmtime, self.__fsize):
with open('%s.db.syy' % self.__filepath, 'w') as ulfn:
ulfn.write('upload:%d:%d\n' % (self.__fmtime, self.__fsize))
SyncY.printlog('%s Local file "%s" is modified, reupload the whole file.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__filepath))
else:
with open('%s.db.syy' % self.__filepath, 'r') as ulfn:
SyncY.synctask[self.__fnmd5].append(ulfn.readline().strip('\n').split(':'))
SyncY.synctask[self.__fnmd5][0][2] = int(SyncY.synctask[self.__fnmd5][0][2])
line = ulfn.readline()
while line:
sliceinfo = line.strip('\n').split(':')[1:]
if sliceinfo[2] == '0':
sliceinfo[2] = 2
SyncY.synctask[self.__fnmd5].append([int(sliceinfo[0]), int(sliceinfo[1]), int(sliceinfo[2]), sliceinfo[3]])
line = ulfn.readline()
SyncY.printlog('%s Resuming slice upload file "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__filepath))
threadcond = threading.Condition()
if threadcond.acquire():
maxthnum = int(self.__fsize / SyncY.config['blocksize'] / 1048576)
if maxthnum > SyncY.config['threadnumber']:
maxthnum = SyncY.config['threadnumber']
SyncY.synctask[self.__fnmd5][0].append(maxthnum)
SyncY.synctask[self.__fnmd5][0].append([])
for i in range(maxthnum):
sythread = SYThread(threadcond, self.__fnmd5, self.__filepath, self.__pcspath)
sythread.start()
if SyncY.synctask[self.__fnmd5][0][3] > 0:
threadcond.wait()
threadcond.release()
if __DEBUG__:
SyncY.printlog('%s Info(%s): all threads is exit for upload file "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, self.__filepath))
if len(SyncY.synctask[self.__fnmd5][0][4]) > 0:
SyncY.writeerror('%s ERROR: Slice upload file "%s" failed.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__filepath))
return 1
param = {'block_list': []}
for i in xrange(1, len(SyncY.synctask[self.__fnmd5]), 1):
param['block_list'].append(SyncY.synctask[self.__fnmd5][i][3])
sycurl = SYCurl()
retcode, responses = sycurl.request('https://pcs.baidu.com/rest/2.0/pcs/file?%s' % urlencode({'method': 'createsuperfile', 'access_token': SyncY.syncytoken['access_token'], 'path': self.__pcspath, 'ondup': self.__ondup}), 'param=%s' % json.dumps(param), 'POST', SYCurl.Normal)
responses = json.loads(responses)
if retcode != 200:
SyncY.writeerror('%s ERROR(Errno:%d): Create superfile "%s" failed: %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, self.__filepath, responses['error_msg']))
return 1
os.remove('%s.db.syy' % self.__filepath)
if responses['size'] == self.__fsize:
self.__rmd5 = responses['md5']
else:
SyncY.writeerror('%s ERROR: Slice upload file "%s" failed, remote file size not equal to local.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__filepath))
sycurl.request('https://pcs.baidu.com/rest/2.0/pcs/file?%s' % urlencode({'method': 'delete', 'access_token': SyncY.syncytoken['access_token'], 'path': self.__pcspath}), '', 'POST', SYCurl.Normal)
return 1
self.__save_data()
SyncY.printlog('%s Slice upload file "%s" completed.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__filepath))
return 0
def __download_file(self):
if __DEBUG__:
SyncY.printlog('%s Info(%s): start download file "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, self.__filepath))
if os.path.exists('%s.db.syy' % self.__filepath) and os.path.exists('%s.syy' % self.__filepath):
with open('%s.db.syy' % self.__filepath, 'r') as dlfn:
dlinfo = dlfn.readlines()
if dlinfo[0].strip('\n') != 'download:%s:%d' % (self.__rmd5, self.__rsize):
with open('%s.db.syy' % self.__filepath, 'w') as dlfn:
dlfn.write('download:%s:%d\n' % (self.__rmd5, self.__rsize))
SyncY.printlog('%s Remote file:"%s" is modified, redownload the whole file.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__pcspath))
os.remove(self.__filepath)
else:
if os.path.exists('%s.syy' % self.__filepath):
SyncY.printlog('%s Resuming download file "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__pcspath))
else:
with open('%s.db.syy' % self.__filepath, 'w') as dlfn:
dlfn.write('download:%s:%d\n' % (self.__rmd5, self.__rsize))
else:
with open('%s.db.syy' % self.__filepath, 'w') as dlfn:
dlfn.write('download:%s:%d\n' % (self.__rmd5, self.__rsize))
if not os.path.exists('%s.syy' % self.__filepath) and self.__create_emptyfile() == 1:
return 1
if self.__rsize <= (SyncY.config['blocksize'] + 1) * 1048576:
if __DEBUG__:
SyncY.printlog('%s Info(%s): start download whole file "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, self.__filepath))
sycurl = SYCurl()
retcode, responses = sycurl.request('https://d.pcs.baidu.com/rest/2.0/pcs/file?%s' % urlencode({'method': 'download', 'access_token': SyncY.syncytoken['access_token'], 'path': self.__pcspath}), '0-%d' % (self.__rsize - 1), 'GET', SYCurl.Download, '%s.syy' % self.__filepath)
if retcode != 200 and retcode != 206:
if __DEBUG__:
SyncY.printlog('%s Info(%s): download file "%s" failed: %s.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, self.__filepath, responses))
responses = json.loads(responses)
SyncY.writeerror('%s ERROR(Errno:%d): Download file "%s" failed: %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, self.__pcspath, responses['error_msg']))
return 1
else:
with open('%s.db.syy' % self.__filepath, 'r') as dlfn:
SyncY.synctask[self.__fnmd5].append(dlfn.readline().strip('\n').split(':'))
SyncY.synctask[self.__fnmd5][0][2] = int(SyncY.synctask[self.__fnmd5][0][2])
line = dlfn.readline()
while line:
sliceinfo = line.strip('\n').split(':')[1:]
if sliceinfo[2] == '0':
sliceinfo[2] = 2
SyncY.synctask[self.__fnmd5].append([int(sliceinfo[0]), int(sliceinfo[1]), int(sliceinfo[2]), sliceinfo[3]])
line = dlfn.readline()
threadcond = threading.Condition()
if threadcond.acquire():
maxthnum = int(self.__rsize / SyncY.config['blocksize'] / 1048576)
if maxthnum > SyncY.config['threadnumber']:
maxthnum = SyncY.config['threadnumber']
SyncY.synctask[self.__fnmd5][0].append(maxthnum)
SyncY.synctask[self.__fnmd5][0].append([])
for i in range(maxthnum):
sythread = SYThread(threadcond, self.__fnmd5, self.__filepath, self.__pcspath)
sythread.start()
if SyncY.synctask[self.__fnmd5][0][3] > 0:
threadcond.wait()
threadcond.release()
if __DEBUG__:
SyncY.printlog('%s Info(%s): all threads is exit for download file "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, self.__filepath))
if len(SyncY.synctask[self.__fnmd5][0][4]) > 0:
SyncY.writeerror('%s ERROR: Download file "%s" failed.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__pcspath))
return 1
if int(SyncY.synctask[self.__fnmd5][len(SyncY.synctask[self.__fnmd5]) - 1][1]) != self.__rsize - 1:
SyncY.writeerror('%s ERROR: Download file "%s" failed, not download all slice.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__pcspath))
return 1
os.remove('%s.db.syy' % self.__filepath)
if self.__rmtime != 0:
os.utime('%s.syy' % self.__filepath, (self.__rmtime, self.__rmtime))
pmeta = os.stat(os.path.dirname('%s.syy' % self.__filepath))
os.lchown('%s.syy' % self.__filepath, pmeta.st_uid, pmeta.st_gid)
os.chmod('%s.syy' % self.__filepath, pmeta.st_mode - stat.S_IXUSR - stat.S_IXGRP - stat.S_IXOTH)
fmeta = os.stat('%s.syy' % self.__filepath)
if fmeta.st_size != self.__rsize:
SyncY.writeerror('%s ERROR: Download file "%s" failed, downloaded file size not equal to remote file size.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__pcspath))
os.remove('%s.syy' % self.__filepath)
return 1
self.__fmtime = int(fmeta.st_mtime)
self.__fsize = fmeta.st_size
os.rename('%s.syy' % self.__filepath, self.__filepath)
self.__save_data()
SyncY.printlog('%s Download file "%s" completed.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.__pcspath))
return 0
class SYThread(threading.Thread):
def __init__(self, threadcond, fnmd5, filepath, pcspath):
threading.Thread.__init__(self)
self.__threadcond = threadcond
self.__fnmd5 = fnmd5
self.__filepath = filepath
self.__pcspath = pcspath
def run(self):
if __DEBUG__:
SyncY.printlog('%s Info(%s): start thread for %s: %s.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, SyncY.synctask[self.__fnmd5][0][0], self.__filepath))
idx = 0
if self.__threadcond.acquire():
idx, startpos, endpos = self.__get_nextslice()
self.__save_status()
self.__threadcond.release()
retcode = 0
responses = None
try:
sycurl = SYCurl()
while True:
if idx == 0:
return 0
if SyncY.synctask[self.__fnmd5][0][0] == 'upload':
if __DEBUG__:
SyncY.printlog('%s Info(%s): Start upload slice(idx:%d) for "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, idx, self.__filepath))
retcode, responses = sycurl.request('https://c.pcs.baidu.com/rest/2.0/pcs/file?%s' % urlencode({'method': 'upload', 'access_token': SyncY.syncytoken['access_token'], 'type': 'tmpfile'}), '%d-%d' % (startpos, endpos), 'POST', SYCurl.Upload, self.__filepath)
responses = json.loads(responses)
if retcode != 200:
SyncY.writeerror('%s ERROR(Errno:%d): Slice upload file "%s" failed: %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, self.__filepath, responses['error_msg']))
return 1
if __DEBUG__:
SyncY.printlog('%s Info(%s): Complete upload slice(idx:%d) for "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, idx, self.__filepath))
if self.__threadcond.acquire():
SyncY.synctask[self.__fnmd5][idx][2] = 1
SyncY.synctask[self.__fnmd5][idx][3] = responses['md5']
idx, startpos, endpos = self.__get_nextslice()
self.__save_status()
self.__threadcond.release()
elif SyncY.synctask[self.__fnmd5][0][0] == 'download':
if __DEBUG__:
SyncY.printlog('%s Info(%s): Start download slice(idx:%d) for "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, idx, self.__filepath))
retcode, responses = sycurl.request('https://d.pcs.baidu.com/rest/2.0/pcs/file?%s' % urlencode({'method': 'download', 'access_token': SyncY.syncytoken['access_token'], 'path': self.__pcspath}), '%d-%d' % (startpos, endpos), 'GET', SYCurl.Download, '%s.syy' % self.__filepath)
if retcode != 200 and retcode != 206:
if __DEBUG__:
SyncY.printlog('%s Info(%s): Slice download(idx:%d) for "%s" failed: %s.' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, idx, self.__filepath, responses))
responses = json.loads(responses)
SyncY.writeerror('%s ERROR(Errno:%d): Slice download file "%s" failed: %s.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, self.__pcspath, responses['error_msg']))
return 1
if __DEBUG__:
SyncY.printlog('%s Info(%s): Complete download slice(idx:%d) for "%s".' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.name, idx, self.__filepath))
if self.__threadcond.acquire():
SyncY.synctask[self.__fnmd5][idx][2] = 1
idx, startpos, endpos = self.__get_nextslice()
self.__save_status()
self.__threadcond.release()
else:
SyncY.writeerror('%s ERROR: Unknown operation(%s) of threading operation.\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), SyncY.synctask[self.__fnmd5][0][0]))
return 1
retcode = 0
responses = None
except Exception, e:
SyncY.writeerror('%s ERROR: Transfer thread exception error occurred. return code: %d, response body: %s.\n%s .\n%s\n' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), retcode, str(responses), e, traceback.format_exc()))
finally:
if self.__threadcond.acquire():
if idx != 0:
SyncY.synctask[self.__fnmd5][idx][2] = 2
SyncY.synctask[self.__fnmd5][0][4].append(idx)
self.__save_status()
SyncY.synctask[self.__fnmd5][0][3] -= 1
if SyncY.synctask[self.__fnmd5][0][3] == 0:
self.__threadcond.notify()
self.__threadcond.release()
def __save_status(self):
with open('%s.dbtmp.syy' % self.__filepath, 'w') as dbnew:
dbnew.write('%s:%s:%d\n' % (SyncY.synctask[self.__fnmd5][0][0], SyncY.synctask[self.__fnmd5][0][1], SyncY.synctask[self.__fnmd5][0][2]))
for i in xrange(1, len(SyncY.synctask[self.__fnmd5]), 1):
dbnew.write('%d:%d:%d:%d:%s\n' % (i, SyncY.synctask[self.__fnmd5][i][0], SyncY.synctask[self.__fnmd5][i][1], SyncY.synctask[self.__fnmd5][i][2], SyncY.synctask[self.__fnmd5][i][3]))
dbnew.flush()
os.fsync(dbnew.fileno())
os.rename('%s.dbtmp.syy' % self.__filepath, '%s.db.syy' % self.__filepath)
def __get_nextslice(self):
idx, startpos, endpos = (0, 0, 0)
for i in xrange(1, len(SyncY.synctask[self.__fnmd5]), 1):
if SyncY.synctask[self.__fnmd5][i][2] not in [1, 0] and i not in SyncY.synctask[self.__fnmd5][0][4]:
idx = i
startpos = SyncY.synctask[self.__fnmd5][i][0]
endpos = SyncY.synctask[self.__fnmd5][i][1]
SyncY.synctask[self.__fnmd5][i][2] = 0
break
if idx == 0:
idx = len(SyncY.synctask[self.__fnmd5])
if idx == 1:
startpos = 0
else:
startpos = SyncY.synctask[self.__fnmd5][idx - 1][1] + 1
filesize = SyncY.synctask[self.__fnmd5][0][2]
if startpos == filesize:
return 0, 0, 0
elif filesize - startpos > SyncY.config['blocksize'] * 1048576 + 1048576:
endpos = startpos + SyncY.config['blocksize'] * 1048576 - 1
else:
endpos = filesize - 1
SyncY.synctask[self.__fnmd5].append([startpos, endpos, 0, '0'])
return idx, startpos, endpos
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'version':
print(__VERSION__)
else:
sy = SyncY(sys.argv[1:])
sy.start()
sys.exit(0)
| wwbhl/openwrt | package/syncy/files/usr/bin/syncy.py | Python | mit | 98,343 |
# Copyright (C) 2010-2018 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from .script_interface import ScriptInterfaceHelper, script_interface_register
@script_interface_register
class Observable(ScriptInterfaceHelper):
_so_name = "Observables::Observable"
_so_bind_methods = ("calculate", "n_values")
_so_creation_policy = "LOCAL"
@script_interface_register
class ComForce(Observable):
"""Calculates the total force on particles with given ids.
Output format: :math:`\\left(\\sum_i f^x_i, \\sum_i f^y_i, \\sum_i f^z_i\\right)`
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
_so_name = "Observables::ComForce"
@script_interface_register
class ComPosition(Observable):
"""Calculates the center of mass for particles with given ids.
Output format: :math:`\\frac{1}{\\sum_i m_i} \\left( \\sum_i m_i r^x_i, \\sum_i m_i r^y_i, \\sum_i m_i r^z_i\\right)`
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
_so_name = "Observables::ComPosition"
@script_interface_register
class ComVelocity(Observable):
"""Calculates the center of mass velocity for particles with given ids.
Output format: :math:`\\frac{1}{\\sum_i m_i} \\left( \\sum_i m_i v^x_i, \\sum_i m_i v^y_i, \\sum_i m_i v^z_i\\right)`
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
_so_name = "Observables::ComVelocity"
@script_interface_register
class Current(Observable):
"""Calculates the electric current for particles with given ids.
Output format: :math:`\\left(\\sum_i q_i v^x_i, \\sum_i q_i v^y_i, \\sum_i q_i v^z_i, \\right)`
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
_so_name = "Observables::Current"
@script_interface_register
class DensityProfile(Observable):
"""Calculates the particle density profile for particles with given ids.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
n_x_bins : :obj:`int`
Number of bins in ``x`` direction.
n_y_bins : :obj:`int`
Number of bins in ``y`` direction.
n_z_bins : :obj:`int`
Number of bins in ``z`` direction.
min_x : :obj:`float`
Minimum ``x`` to consider.
min_y : :obj:`float`
Minimum ``y`` to consider.
min_z : :obj:`float`
Minimum ``z`` to consider.
max_x : :obj:`float`
Maximum ``x`` to consider.
max_y : :obj:`float`
Maximum ``y`` to consider.
max_z : :obj:`float`
Maximum ``z`` to consider.
"""
_so_name = "Observables::DensityProfile"
@script_interface_register
class DipoleMoment(Observable):
"""Calculates the dipole moment for particles with given ids.
Output format: :math:`\\left(\\sum_i q_i r^x_i, \\sum_i q_i r^y_i, \\sum_i q_i r^z_i\\right)`
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
_so_name = "Observables::DipoleMoment"
@script_interface_register
class FluxDensityProfile(Observable):
"""Calculates the particle flux density for particles with given ids.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
n_x_bins : :obj:`int`
Number of bins in ``x`` direction.
n_y_bins : :obj:`int`
Number of bins in ``y`` direction.
n_z_bins : :obj:`int`
Number of bins in ``z`` direction.
min_x : :obj:`float`
Minimum ``x`` to consider.
min_y : :obj:`float`
Minimum ``y`` to consider.
min_z : :obj:`float`
Minimum ``z`` to consider.
max_x : :obj:`float`
Maximum ``x`` to consider.
max_y : :obj:`float`
Maximum ``y`` to consider.
max_z : :obj:`float`
Maximum ``z`` to consider.
"""
_so_name = "Observables::FluxDensityProfile"
@script_interface_register
class ForceDensityProfile(Observable):
"""Calculates the force density profile for particles with given ids.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
n_x_bins : :obj:`int`
Number of bins in ``x`` direction.
n_y_bins : :obj:`int`
Number of bins in ``y`` direction.
n_z_bins : :obj:`int`
Number of bins in ``z`` direction.
min_x : :obj:`float`
Minimum ``x`` to consider.
min_y : :obj:`float`
Minimum ``y`` to consider.
min_z : :obj:`float`
Minimum ``z`` to consider.
max_x : :obj:`float`
Maximum ``x`` to consider.
max_y : :obj:`float`
Maximum ``y`` to consider.
max_z : :obj:`float`
Maximum ``z`` to consider.
"""
_so_name = "Observables::ForceDensityProfile"
@script_interface_register
class LBVelocityProfile(Observable):
"""Calculates the LB fluid velocity profile.
This observable samples the fluid in on a regular grid defined by the variables
``sampling*``. Note that a small delta leads to a large number of sample
points and carries a performance cost.
.. WARNING::
In case of the CPU version of the LB fluid implementation, this observable
currently only works for a single core.
Parameters
----------
n_x_bins : :obj:`int`
Number of bins in ``x`` direction.
n_y_bins : :obj:`int`
Number of bins in ``y`` direction.
n_z_bins : :obj:`int`
Number of bins in ``z`` direction.
min_x : :obj:`float`
Minimum ``x`` to consider.
min_y : :obj:`float`
Minimum ``y`` to consider.
min_z : :obj:`float`
Minimum ``z`` to consider.
max_x : :obj:`float`
Maximum ``x`` to consider.
max_y : :obj:`float`
Maximum ``y`` to consider.
max_z : :obj:`float`
Maximum ``z`` to consider.
sampling_delta_x : :obj:`float`, default=1.0
Spacing for the sampling grid in ``x``-direction.
sampling_delta_y : :obj:`float`, default=1.0
Spacing for the sampling grid in ``y``-direction.
sampling_delta_z : :obj:`float`, default=1.0
Spacing for the sampling grid in ``z``-direction.
sampling_offset_x : :obj:`float`, default=0.0
Offset for the sampling grid in ``x``-direction.
sampling_offset_y : :obj:`float`, default=0.0
Offset for the sampling grid in ``y``-direction.
sampling_offset_z : :obj:`float`, default=0.0
Offset for the sampling grid in ``z``-direction.
allow_empty_bins : :obj:`bool`, default=False
Wether or not to allow bins that will not be sampled at all.
"""
_so_name = "Observables::LBVelocityProfile"
@script_interface_register
class LBFluidStress(Observable):
"""Calculates the average stress of the LB fluid for all nodes.
Parameters
----------
None
"""
_so_name = "Observables::LBFluidStress"
@script_interface_register
class MagneticDipoleMoment(Observable):
"""Calculates the magnetic dipole moment for particles with given ids.
Output format: :math:`\\left(\\sum_i \\mu^x_i, \\sum_i \\mu^y_i, \\sum_i \\mu^z_i\\right)`
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
_so_name = "Observables::MagneticDipoleMoment"
@script_interface_register
class ParticleAngularVelocities(Observable):
_so_name = "Observables::ParticleAngularVelocities"
"""Calculates the angular velocity (omega) in the spaced-fixed frame of reference
Output format: :math:`\\omega^x_1,\\ \\omega^y_1,\\ \\omega^z_1,\\ \\omega^x_2,\\ \\omega^y_2,\\ \\omega^z_2, \\dots\\ \\omega^x_n,\\ \\omega^y_n,\\ \\omega^z_n`.
The particles are ordered according to the list of ids passed to the observable.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
@script_interface_register
class ParticleBodyAngularVelocities(Observable):
_so_name = "Observables::ParticleBodyAngularVelocities"
"""Calculates the angular velocity (omega) in the particles' body-fixed frame of reference.
For each particle, the body-fixed frame of reference is obtained from the particle's
orientation stored in the quaternions.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
@script_interface_register
class ParticleBodyVelocities(Observable):
"""Calculates the particle velocity in the particles' body-fixed frame of reference.
For each particle, the body-fixed frame of reference is obtained from the particle's
orientation stored in the quaternions.
Output format: :math:`v_{x1},\\ v_{y1},\\ v_{z1},\\ v_{x2},\\ v_{y2},\\ v_{z2},\\ \\dots\\ v_{xn},\\ v_{yn},\\ v_{zn}`.
The particles are ordered according to the list of ids passed to the observable.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
_so_name = "Observables::ParticleBodyVelocities"
@script_interface_register
class ParticleForces(Observable):
"""Calculates the particle forces for particles with given ids.
Output format: :math:`f_{x1},\\ f_{y1},\\ f_{z1},\\ f_{x2},\\ f_{y2},\ f_{z2},\\ \\dots\\ f_{xn},\\ f_{yn},\\ f_{zn}`.
The particles are ordered according to the list of ids passed to the observable.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
_so_name = "Observables::ParticleForces"
@script_interface_register
class ParticlePositions(Observable):
"""Calculates the particle positions for particles with given ids.
Output format: :math:`x_1,\\ y_1,\\ z_1,\\ x_2,\\ y_2,\\ z_2,\\ \\dots\\ x_n,\\ y_n,\\ z_n`.
The particles are ordered according to the list of ids passed to the observable.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
_so_name = "Observables::ParticlePositions"
@script_interface_register
class ParticleVelocities(Observable):
"""Calculates the particle velocities for particles with given ids.
Output format: :math:`v_{x1},\\ v_{y1},\\ v_{z1},\\ v_{x2},\\ v_{y2},\\ v_{z2},\\ \\dots\\ v_{xn},\\ v_{yn},\\ v_{zn}`.
The particles are ordered according to the list of ids passed to the observable.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
_so_name = "Observables::ParticleVelocities"
@script_interface_register
class ParticleDistances(Observable):
"""Calculates the distances between particles with given ids along a
polymer chain.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
_so_name = "Observables::ParticleDistances"
@script_interface_register
class BondAngles(Observable):
"""Calculates the angles between bonds of particles with given ids along a
polymer chain.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
_so_name = "Observables::BondAngles"
@script_interface_register
class CosPersistenceAngles(Observable):
"""Calculates the cosine of mutual bond angles for chained particles with given ids.
The *i*-th value of the result contains the cosine of the angle between bonds that
are separated by *i* bonds. The values are averaged over the chain.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
_so_name = "Observables::CosPersistenceAngles"
@script_interface_register
class BondDihedrals(Observable):
"""Calculates the dihedrals between particles with given ids along a
polymer chain.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
"""
_so_name = "Observables::BondDihedrals"
@script_interface_register
class StressTensor(Observable):
"""Calculates the total stress tensor. See :ref:`stress tensor`)
"""
_so_name = "Observables::StressTensor"
@script_interface_register
class DPDStress(Observable):
"""Calculates the non-equilibrium contribution of the DPD interaction
to the stress tensor.
Parameters
----------
None
"""
_so_name = "Observables::DPDStress"
@script_interface_register
class CylindricalDensityProfile(Observable):
"""Calculates the particle density in polar coordinates.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
center : (3,) array_like of :obj:`float`
Position of the center of the polar coordinate system for the histogram.
axis : (3,) array_like of :obj:`float`
Orientation vector of the ``z``-axis of the polar coordinate system for the histogram.
n_r_bins : :obj:`int`
Number of bins in radial direction.
n_phi_bins : :obj:`int`
Number of bins for the azimuthal direction.
n_z_bins : :obj:`int`
Number of bins in ``z`` direction.
min_r : :obj:`float`
Minimum ``r`` to consider.
min_phi : :obj:`float`
Minimum ``phi`` to consider.
min_z : :obj:`float`
Minimum ``z`` to consider.
max_r : :obj:`float`
Maximum ``r`` to consider.
max_phi : :obj:`float`
Maximum ``phi`` to consider.
max_z : :obj:`float`
Maximum ``z`` to consider.
"""
_so_name = "Observables::CylindricalDensityProfile"
@script_interface_register
class CylindricalFluxDensityProfile(Observable):
"""Calculates the particle flux density in polar coordinates.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
center : (3,) array_like of :obj:`float`
Position of the center of the polar coordinate system for the histogram.
axis : (3,) array_like of :obj:`float`
Orientation vector of the ``z``-axis of the polar coordinate system for the histogram.
n_r_bins : :obj:`int`
Number of bins in radial direction.
n_phi_bins : :obj:`int`
Number of bins for the azimuthal direction.
n_z_bins : :obj:`int`
Number of bins in ``z`` direction.
min_r : :obj:`float`
Minimum ``r`` to consider.
min_phi : :obj:`float`
Minimum ``phi`` to consider.
min_z : :obj:`float`
Minimum ``z`` to consider.
max_r : :obj:`float`
Maximum ``r`` to consider.
max_phi : :obj:`float`
Maximum ``phi`` to consider.
max_z : :obj:`float`
Maximum ``z`` to consider.
"""
_so_name = "Observables::CylindricalFluxDensityProfile"
@script_interface_register
class CylindricalLBFluxDensityProfileAtParticlePositions(Observable):
"""Calculates the LB fluid flux density at the particle positions in polar coordinates.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
center : (3,) array_like of :obj:`float`
Position of the center of the polar coordinate system for the histogram.
axis : (3,) array_like of :obj:`float`
Orientation vector of the ``z``-axis of the polar coordinate system for the histogram.
n_r_bins : :obj:`int`
Number of bins in radial direction.
n_phi_bins : :obj:`int`
Number of bins for the azimuthal direction.
n_z_bins : :obj:`int`
Number of bins in ``z`` direction.
min_r : :obj:`float`
Minimum ``r`` to consider.
min_phi : :obj:`float`
Minimum ``phi`` to consider.
min_z : :obj:`float`
Minimum ``z`` to consider.
max_r : :obj:`float`
Maximum ``r`` to consider.
max_phi : :obj:`float`
Maximum ``phi`` to consider.
max_z : :obj:`float`
Maximum ``z`` to consider.
"""
_so_name = "Observables::CylindricalLBFluxDensityProfileAtParticlePositions"
@script_interface_register
class CylindricalLBVelocityProfileAtParticlePositions(Observable):
"""Calculates the LB fluid velocity at the particle positions in polar coordinates.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
center : (3,) array_like of :obj:`float`
Position of the center of the polar coordinate system for the histogram.
axis : (3,) array_like of :obj:`float`
Orientation vector of the ``z``-axis of the polar coordinate system for the histogram.
n_r_bins : :obj:`int`
Number of bins in radial direction.
n_phi_bins : :obj:`int`
Number of bins for the azimuthal direction.
n_z_bins : :obj:`int`
Number of bins in ``z`` direction.
min_r : :obj:`float`
Minimum ``r`` to consider.
min_phi : :obj:`float`
Minimum ``phi`` to consider.
min_z : :obj:`float`
Minimum ``z`` to consider.
max_r : :obj:`float`
Maximum ``r`` to consider.
max_phi : :obj:`float`
Maximum ``phi`` to consider.
max_z : :obj:`float`
Maximum ``z`` to consider.
"""
_so_name = "Observables::CylindricalLBVelocityProfileAtParticlePositions"
@script_interface_register
class CylindricalVelocityProfile(Observable):
"""Calculates the particle velocity profile in polar coordinates.
Parameters
----------
ids : array_like of :obj:`int`
The ids of (existing) particles to take into account.
center : (3,) array_like of :obj:`float`
Position of the center of the polar coordinate system for the histogram.
axis : (3,) array_like of :obj:`float`
Orientation vector of the ``z``-axis of the polar coordinate system for the histogram.
n_r_bins : :obj:`int`
Number of bins in radial direction.
n_phi_bins : :obj:`int`
Number of bins for the azimuthal direction.
n_z_bins : :obj:`int`
Number of bins in ``z`` direction.
min_r : :obj:`float`
Minimum ``r`` to consider.
min_phi : :obj:`float`
Minimum ``phi`` to consider.
min_z : :obj:`float`
Minimum ``z`` to consider.
max_r : :obj:`float`
Maximum ``r`` to consider.
max_phi : :obj:`float`
Maximum ``phi`` to consider.
max_z : :obj:`float`
Maximum ``z`` to consider.
"""
_so_name = "Observables::CylindricalVelocityProfile"
@script_interface_register
class CylindricalLBVelocityProfile(Observable):
"""Calculates the LB fluid velocity profile in polar coordinates.
This observable samples the fluid in on a regular grid defined by the variables
``sampling*``. Note that a small delta leads to a large number of sample
points and carries a performance cost.
Parameters
----------
center : (3,) array_like of :obj:`float`
Position of the center of the polar coordinate system for the histogram.
axis : (3,) array_like of :obj:`float`
Orientation vector of the ``z``-axis of the polar coordinate system for the histogram.
n_r_bins : :obj:`int`
Number of bins in radial direction.
n_phi_bins : :obj:`int`
Number of bins for the azimuthal direction.
n_z_bins : :obj:`int`
Number of bins in ``z`` direction.
min_r : :obj:`float`
Minimum ``r`` to consider.
min_phi : :obj:`float`
Minimum ``phi`` to consider.
min_z : :obj:`float`
Minimum ``z`` to consider.
max_r : :obj:`float`
Maximum ``r`` to consider.
max_phi : :obj:`float`
Maximum ``phi`` to consider.
max_z : :obj:`float`
Maximum ``z`` to consider.
sampling_density : :obj:`float`
Samples per unit volume for the LB velocity interpolation.
"""
_so_name = "Observables::CylindricalLBVelocityProfile"
| mkuron/espresso | src/python/espressomd/observables.py | Python | gpl-3.0 | 21,633 |
# Copyright 2012 OpenStack Foundation
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from oslo_config import cfg
from oslo_serialization import jsonutils
from keystone import exception
from keystone.policy.backends import rules
from keystone.tests import unit as tests
from keystone.tests.unit.ksfixtures import temporaryfile
from keystone.tests.unit import test_v3
CONF = cfg.CONF
DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
class IdentityTestProtectedCase(test_v3.RestfulTestCase):
"""Test policy enforcement on the v3 Identity API."""
def setUp(self):
"""Setup for Identity Protection Test Cases.
As well as the usual housekeeping, create a set of domains,
users, roles and projects for the subsequent tests:
- Three domains: A,B & C. C is disabled.
- DomainA has user1, DomainB has user2 and user3
- DomainA has group1 and group2, DomainB has group3
- User1 has two roles on DomainA
- User2 has one role on DomainA
Remember that there will also be a fourth domain in existence,
the default domain.
"""
# Ensure that test_v3.RestfulTestCase doesn't load its own
# sample data, which would make checking the results of our
# tests harder
super(IdentityTestProtectedCase, self).setUp()
self.tempfile = self.useFixture(temporaryfile.SecureTempFile())
self.tmpfilename = self.tempfile.file_name
self.config_fixture.config(group='oslo_policy',
policy_file=self.tmpfilename)
# A default auth request we can use - un-scoped user token
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'])
def load_sample_data(self):
self._populate_default_domain()
# Start by creating a couple of domains
self.domainA = self.new_domain_ref()
self.resource_api.create_domain(self.domainA['id'], self.domainA)
self.domainB = self.new_domain_ref()
self.resource_api.create_domain(self.domainB['id'], self.domainB)
self.domainC = self.new_domain_ref()
self.domainC['enabled'] = False
self.resource_api.create_domain(self.domainC['id'], self.domainC)
# Now create some users, one in domainA and two of them in domainB
self.user1 = self.new_user_ref(domain_id=self.domainA['id'])
password = uuid.uuid4().hex
self.user1['password'] = password
self.user1 = self.identity_api.create_user(self.user1)
self.user1['password'] = password
self.user2 = self.new_user_ref(domain_id=self.domainB['id'])
password = uuid.uuid4().hex
self.user2['password'] = password
self.user2 = self.identity_api.create_user(self.user2)
self.user2['password'] = password
self.user3 = self.new_user_ref(domain_id=self.domainB['id'])
password = uuid.uuid4().hex
self.user3['password'] = password
self.user3 = self.identity_api.create_user(self.user3)
self.user3['password'] = password
self.group1 = self.new_group_ref(domain_id=self.domainA['id'])
self.group1 = self.identity_api.create_group(self.group1)
self.group2 = self.new_group_ref(domain_id=self.domainA['id'])
self.group2 = self.identity_api.create_group(self.group2)
self.group3 = self.new_group_ref(domain_id=self.domainB['id'])
self.group3 = self.identity_api.create_group(self.group3)
self.role = self.new_role_ref()
self.role_api.create_role(self.role['id'], self.role)
self.role1 = self.new_role_ref()
self.role_api.create_role(self.role1['id'], self.role1)
self.assignment_api.create_grant(self.role['id'],
user_id=self.user1['id'],
domain_id=self.domainA['id'])
self.assignment_api.create_grant(self.role['id'],
user_id=self.user2['id'],
domain_id=self.domainA['id'])
self.assignment_api.create_grant(self.role1['id'],
user_id=self.user1['id'],
domain_id=self.domainA['id'])
def _get_id_list_from_ref_list(self, ref_list):
result_list = []
for x in ref_list:
result_list.append(x['id'])
return result_list
def _set_policy(self, new_policy):
with open(self.tmpfilename, "w") as policyfile:
policyfile.write(jsonutils.dumps(new_policy))
def test_list_users_unprotected(self):
"""GET /users (unprotected)
Test Plan:
- Update policy so api is unprotected
- Use an un-scoped token to make sure we can get back all
the users independent of domain
"""
self._set_policy({"identity:list_users": []})
r = self.get('/users', auth=self.auth)
id_list = self._get_id_list_from_ref_list(r.result.get('users'))
self.assertIn(self.user1['id'], id_list)
self.assertIn(self.user2['id'], id_list)
self.assertIn(self.user3['id'], id_list)
def test_list_users_filtered_by_domain(self):
"""GET /users?domain_id=mydomain (filtered)
Test Plan:
- Update policy so api is unprotected
- Use an un-scoped token to make sure we can filter the
users by domainB, getting back the 2 users in that domain
"""
self._set_policy({"identity:list_users": []})
url_by_name = '/users?domain_id=%s' % self.domainB['id']
r = self.get(url_by_name, auth=self.auth)
# We should get back two users, those in DomainB
id_list = self._get_id_list_from_ref_list(r.result.get('users'))
self.assertIn(self.user2['id'], id_list)
self.assertIn(self.user3['id'], id_list)
def test_get_user_protected_match_id(self):
"""GET /users/{id} (match payload)
Test Plan:
- Update policy to protect api by user_id
- List users with user_id of user1 as filter, to check that
this will correctly match user_id in the flattened
payload
"""
# TODO(henry-nash, ayoung): It would be good to expand this
# test for further test flattening, e.g. protect on, say, an
# attribute of an object being created
new_policy = {"identity:get_user": [["user_id:%(user_id)s"]]}
self._set_policy(new_policy)
url_by_name = '/users/%s' % self.user1['id']
r = self.get(url_by_name, auth=self.auth)
self.assertEqual(self.user1['id'], r.result['user']['id'])
def test_get_user_protected_match_target(self):
"""GET /users/{id} (match target)
Test Plan:
- Update policy to protect api by domain_id
- Try and read a user who is in DomainB with a token scoped
to Domain A - this should fail
- Retry this for a user who is in Domain A, which should succeed.
- Finally, try getting a user that does not exist, which should
still return UserNotFound
"""
new_policy = {'identity:get_user':
[["domain_id:%(target.user.domain_id)s"]]}
self._set_policy(new_policy)
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'],
domain_id=self.domainA['id'])
url_by_name = '/users/%s' % self.user2['id']
r = self.get(url_by_name, auth=self.auth,
expected_status=exception.ForbiddenAction.code)
url_by_name = '/users/%s' % self.user1['id']
r = self.get(url_by_name, auth=self.auth)
self.assertEqual(self.user1['id'], r.result['user']['id'])
url_by_name = '/users/%s' % uuid.uuid4().hex
r = self.get(url_by_name, auth=self.auth,
expected_status=exception.UserNotFound.code)
def test_revoke_grant_protected_match_target(self):
"""DELETE /domains/{id}/users/{id}/roles/{id} (match target)
Test Plan:
- Update policy to protect api by domain_id of entities in
the grant
- Try and delete the existing grant that has a user who is
from a different domain - this should fail.
- Retry this for a user who is in Domain A, which should succeed.
"""
new_policy = {'identity:revoke_grant':
[["domain_id:%(target.user.domain_id)s"]]}
self._set_policy(new_policy)
collection_url = (
'/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': self.domainA['id'],
'user_id': self.user2['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role['id']}
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'],
domain_id=self.domainA['id'])
self.delete(member_url, auth=self.auth,
expected_status=exception.ForbiddenAction.code)
collection_url = (
'/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': self.domainA['id'],
'user_id': self.user1['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role1['id']}
self.delete(member_url, auth=self.auth)
def test_list_users_protected_by_domain(self):
"""GET /users?domain_id=mydomain (protected)
Test Plan:
- Update policy to protect api by domain_id
- List groups using a token scoped to domainA with a filter
specifying domainA - we should only get back the one user
that is in domainA.
- Try and read the users from domainB - this should fail since
we don't have a token scoped for domainB
"""
new_policy = {"identity:list_users": ["domain_id:%(domain_id)s"]}
self._set_policy(new_policy)
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'],
domain_id=self.domainA['id'])
url_by_name = '/users?domain_id=%s' % self.domainA['id']
r = self.get(url_by_name, auth=self.auth)
# We should only get back one user, the one in DomainA
id_list = self._get_id_list_from_ref_list(r.result.get('users'))
self.assertEqual(1, len(id_list))
self.assertIn(self.user1['id'], id_list)
# Now try for domainB, which should fail
url_by_name = '/users?domain_id=%s' % self.domainB['id']
r = self.get(url_by_name, auth=self.auth,
expected_status=exception.ForbiddenAction.code)
def test_list_groups_protected_by_domain(self):
"""GET /groups?domain_id=mydomain (protected)
Test Plan:
- Update policy to protect api by domain_id
- List groups using a token scoped to domainA and make sure
we only get back the two groups that are in domainA
- Try and read the groups from domainB - this should fail since
we don't have a token scoped for domainB
"""
new_policy = {"identity:list_groups": ["domain_id:%(domain_id)s"]}
self._set_policy(new_policy)
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'],
domain_id=self.domainA['id'])
url_by_name = '/groups?domain_id=%s' % self.domainA['id']
r = self.get(url_by_name, auth=self.auth)
# We should only get back two groups, the ones in DomainA
id_list = self._get_id_list_from_ref_list(r.result.get('groups'))
self.assertEqual(2, len(id_list))
self.assertIn(self.group1['id'], id_list)
self.assertIn(self.group2['id'], id_list)
# Now try for domainB, which should fail
url_by_name = '/groups?domain_id=%s' % self.domainB['id']
r = self.get(url_by_name, auth=self.auth,
expected_status=exception.ForbiddenAction.code)
def test_list_groups_protected_by_domain_and_filtered(self):
"""GET /groups?domain_id=mydomain&name=myname (protected)
Test Plan:
- Update policy to protect api by domain_id
- List groups using a token scoped to domainA with a filter
specifying both domainA and the name of group.
- We should only get back the group in domainA that matches
the name
"""
new_policy = {"identity:list_groups": ["domain_id:%(domain_id)s"]}
self._set_policy(new_policy)
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'],
domain_id=self.domainA['id'])
url_by_name = '/groups?domain_id=%s&name=%s' % (
self.domainA['id'], self.group2['name'])
r = self.get(url_by_name, auth=self.auth)
# We should only get back one user, the one in DomainA that matches
# the name supplied
id_list = self._get_id_list_from_ref_list(r.result.get('groups'))
self.assertEqual(1, len(id_list))
self.assertIn(self.group2['id'], id_list)
class IdentityTestPolicySample(test_v3.RestfulTestCase):
"""Test policy enforcement of the policy.json file."""
def load_sample_data(self):
self._populate_default_domain()
self.just_a_user = self.new_user_ref(
domain_id=CONF.identity.default_domain_id)
password = uuid.uuid4().hex
self.just_a_user['password'] = password
self.just_a_user = self.identity_api.create_user(self.just_a_user)
self.just_a_user['password'] = password
self.another_user = self.new_user_ref(
domain_id=CONF.identity.default_domain_id)
password = uuid.uuid4().hex
self.another_user['password'] = password
self.another_user = self.identity_api.create_user(self.another_user)
self.another_user['password'] = password
self.admin_user = self.new_user_ref(
domain_id=CONF.identity.default_domain_id)
password = uuid.uuid4().hex
self.admin_user['password'] = password
self.admin_user = self.identity_api.create_user(self.admin_user)
self.admin_user['password'] = password
self.role = self.new_role_ref()
self.role_api.create_role(self.role['id'], self.role)
self.admin_role = {'id': uuid.uuid4().hex, 'name': 'admin'}
self.role_api.create_role(self.admin_role['id'], self.admin_role)
# Create and assign roles to the project
self.project = self.new_project_ref(
domain_id=CONF.identity.default_domain_id)
self.resource_api.create_project(self.project['id'], self.project)
self.assignment_api.create_grant(self.role['id'],
user_id=self.just_a_user['id'],
project_id=self.project['id'])
self.assignment_api.create_grant(self.role['id'],
user_id=self.another_user['id'],
project_id=self.project['id'])
self.assignment_api.create_grant(self.admin_role['id'],
user_id=self.admin_user['id'],
project_id=self.project['id'])
def test_user_validate_same_token(self):
# Given a non-admin user token, the token can be used to validate
# itself.
# This is GET /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token = self.get_requested_token(auth)
self.get('/auth/tokens', token=token,
headers={'X-Subject-Token': token})
def test_user_validate_user_token(self):
# A user can validate one of their own tokens.
# This is GET /v3/auth/tokens
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token1 = self.get_requested_token(auth)
token2 = self.get_requested_token(auth)
self.get('/auth/tokens', token=token1,
headers={'X-Subject-Token': token2})
def test_user_validate_other_user_token_rejected(self):
# A user cannot validate another user's token.
# This is GET /v3/auth/tokens
user1_auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
user1_token = self.get_requested_token(user1_auth)
user2_auth = self.build_authentication_request(
user_id=self.another_user['id'],
password=self.another_user['password'])
user2_token = self.get_requested_token(user2_auth)
self.get('/auth/tokens', token=user1_token,
headers={'X-Subject-Token': user2_token}, expected_status=403)
def test_admin_validate_user_token(self):
# An admin can validate a user's token.
# This is GET /v3/auth/tokens
admin_auth = self.build_authentication_request(
user_id=self.admin_user['id'],
password=self.admin_user['password'],
project_id=self.project['id'])
admin_token = self.get_requested_token(admin_auth)
user_auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
user_token = self.get_requested_token(user_auth)
self.get('/auth/tokens', token=admin_token,
headers={'X-Subject-Token': user_token})
def test_user_check_same_token(self):
# Given a non-admin user token, the token can be used to check
# itself.
# This is HEAD /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token = self.get_requested_token(auth)
self.head('/auth/tokens', token=token,
headers={'X-Subject-Token': token}, expected_status=200)
def test_user_check_user_token(self):
# A user can check one of their own tokens.
# This is HEAD /v3/auth/tokens
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token1 = self.get_requested_token(auth)
token2 = self.get_requested_token(auth)
self.head('/auth/tokens', token=token1,
headers={'X-Subject-Token': token2}, expected_status=200)
def test_user_check_other_user_token_rejected(self):
# A user cannot check another user's token.
# This is HEAD /v3/auth/tokens
user1_auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
user1_token = self.get_requested_token(user1_auth)
user2_auth = self.build_authentication_request(
user_id=self.another_user['id'],
password=self.another_user['password'])
user2_token = self.get_requested_token(user2_auth)
self.head('/auth/tokens', token=user1_token,
headers={'X-Subject-Token': user2_token},
expected_status=403)
def test_admin_check_user_token(self):
# An admin can check a user's token.
# This is HEAD /v3/auth/tokens
admin_auth = self.build_authentication_request(
user_id=self.admin_user['id'],
password=self.admin_user['password'],
project_id=self.project['id'])
admin_token = self.get_requested_token(admin_auth)
user_auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
user_token = self.get_requested_token(user_auth)
self.head('/auth/tokens', token=admin_token,
headers={'X-Subject-Token': user_token}, expected_status=200)
def test_user_revoke_same_token(self):
# Given a non-admin user token, the token can be used to revoke
# itself.
# This is DELETE /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token = self.get_requested_token(auth)
self.delete('/auth/tokens', token=token,
headers={'X-Subject-Token': token})
def test_user_revoke_user_token(self):
# A user can revoke one of their own tokens.
# This is DELETE /v3/auth/tokens
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token1 = self.get_requested_token(auth)
token2 = self.get_requested_token(auth)
self.delete('/auth/tokens', token=token1,
headers={'X-Subject-Token': token2})
def test_user_revoke_other_user_token_rejected(self):
# A user cannot revoke another user's token.
# This is DELETE /v3/auth/tokens
user1_auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
user1_token = self.get_requested_token(user1_auth)
user2_auth = self.build_authentication_request(
user_id=self.another_user['id'],
password=self.another_user['password'])
user2_token = self.get_requested_token(user2_auth)
self.delete('/auth/tokens', token=user1_token,
headers={'X-Subject-Token': user2_token},
expected_status=403)
def test_admin_revoke_user_token(self):
# An admin can revoke a user's token.
# This is DELETE /v3/auth/tokens
admin_auth = self.build_authentication_request(
user_id=self.admin_user['id'],
password=self.admin_user['password'],
project_id=self.project['id'])
admin_token = self.get_requested_token(admin_auth)
user_auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
user_token = self.get_requested_token(user_auth)
self.delete('/auth/tokens', token=admin_token,
headers={'X-Subject-Token': user_token})
class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
test_v3.AssignmentTestMixin):
"""Test policy enforcement of the sample v3 cloud policy file."""
def setUp(self):
"""Setup for v3 Cloud Policy Sample Test Cases.
The following data is created:
- Three domains: domainA, domainB and admin_domain
- One project, which name is 'project'
- domainA has three users: domain_admin_user, project_admin_user and
just_a_user:
- domain_admin_user has role 'admin' on domainA,
- project_admin_user has role 'admin' on the project,
- just_a_user has a non-admin role on both domainA and the project.
- admin_domain has user cloud_admin_user, with an 'admin' role
on admin_domain.
We test various api protection rules from the cloud sample policy
file to make sure the sample is valid and that we correctly enforce it.
"""
# Ensure that test_v3.RestfulTestCase doesn't load its own
# sample data, which would make checking the results of our
# tests harder
super(IdentityTestv3CloudPolicySample, self).setUp()
# Finally, switch to the v3 sample policy file
self.addCleanup(rules.reset)
rules.reset()
self.config_fixture.config(
group='oslo_policy',
policy_file=tests.dirs.etc('policy.v3cloudsample.json'))
def load_sample_data(self):
# Start by creating a couple of domains
self._populate_default_domain()
self.domainA = self.new_domain_ref()
self.resource_api.create_domain(self.domainA['id'], self.domainA)
self.domainB = self.new_domain_ref()
self.resource_api.create_domain(self.domainB['id'], self.domainB)
self.admin_domain = {'id': 'admin_domain_id', 'name': 'Admin_domain'}
self.resource_api.create_domain(self.admin_domain['id'],
self.admin_domain)
# And our users
self.cloud_admin_user = self.new_user_ref(
domain_id=self.admin_domain['id'])
password = uuid.uuid4().hex
self.cloud_admin_user['password'] = password
self.cloud_admin_user = (
self.identity_api.create_user(self.cloud_admin_user))
self.cloud_admin_user['password'] = password
self.just_a_user = self.new_user_ref(domain_id=self.domainA['id'])
password = uuid.uuid4().hex
self.just_a_user['password'] = password
self.just_a_user = self.identity_api.create_user(self.just_a_user)
self.just_a_user['password'] = password
self.domain_admin_user = self.new_user_ref(
domain_id=self.domainA['id'])
password = uuid.uuid4().hex
self.domain_admin_user['password'] = password
self.domain_admin_user = (
self.identity_api.create_user(self.domain_admin_user))
self.domain_admin_user['password'] = password
self.project_admin_user = self.new_user_ref(
domain_id=self.domainA['id'])
password = uuid.uuid4().hex
self.project_admin_user['password'] = password
self.project_admin_user = (
self.identity_api.create_user(self.project_admin_user))
self.project_admin_user['password'] = password
# The admin role and another plain role
self.admin_role = {'id': uuid.uuid4().hex, 'name': 'admin'}
self.role_api.create_role(self.admin_role['id'], self.admin_role)
self.role = self.new_role_ref()
self.role_api.create_role(self.role['id'], self.role)
# The cloud admin just gets the admin role
self.assignment_api.create_grant(self.admin_role['id'],
user_id=self.cloud_admin_user['id'],
domain_id=self.admin_domain['id'])
# Assign roles to the domain
self.assignment_api.create_grant(self.admin_role['id'],
user_id=self.domain_admin_user['id'],
domain_id=self.domainA['id'])
self.assignment_api.create_grant(self.role['id'],
user_id=self.just_a_user['id'],
domain_id=self.domainA['id'])
# Create and assign roles to the project
self.project = self.new_project_ref(domain_id=self.domainA['id'])
self.resource_api.create_project(self.project['id'], self.project)
self.assignment_api.create_grant(self.admin_role['id'],
user_id=self.project_admin_user['id'],
project_id=self.project['id'])
self.assignment_api.create_grant(self.role['id'],
user_id=self.just_a_user['id'],
project_id=self.project['id'])
def _stati(self, expected_status):
# Return the expected return codes for APIs with and without data
# with any specified status overriding the normal values
if expected_status is None:
return (200, 201, 204)
else:
return (expected_status, expected_status, expected_status)
def _test_user_management(self, domain_id, expected=None):
status_OK, status_created, status_no_data = self._stati(expected)
entity_url = '/users/%s' % self.just_a_user['id']
list_url = '/users?domain_id=%s' % domain_id
self.get(entity_url, auth=self.auth,
expected_status=status_OK)
self.get(list_url, auth=self.auth,
expected_status=status_OK)
user = {'description': 'Updated'}
self.patch(entity_url, auth=self.auth, body={'user': user},
expected_status=status_OK)
self.delete(entity_url, auth=self.auth,
expected_status=status_no_data)
user_ref = self.new_user_ref(domain_id=domain_id)
self.post('/users', auth=self.auth, body={'user': user_ref},
expected_status=status_created)
def _test_project_management(self, domain_id, expected=None):
status_OK, status_created, status_no_data = self._stati(expected)
entity_url = '/projects/%s' % self.project['id']
list_url = '/projects?domain_id=%s' % domain_id
self.get(entity_url, auth=self.auth,
expected_status=status_OK)
self.get(list_url, auth=self.auth,
expected_status=status_OK)
project = {'description': 'Updated'}
self.patch(entity_url, auth=self.auth, body={'project': project},
expected_status=status_OK)
self.delete(entity_url, auth=self.auth,
expected_status=status_no_data)
proj_ref = self.new_project_ref(domain_id=domain_id)
self.post('/projects', auth=self.auth, body={'project': proj_ref},
expected_status=status_created)
def _test_domain_management(self, expected=None):
status_OK, status_created, status_no_data = self._stati(expected)
entity_url = '/domains/%s' % self.domainB['id']
list_url = '/domains'
self.get(entity_url, auth=self.auth,
expected_status=status_OK)
self.get(list_url, auth=self.auth,
expected_status=status_OK)
domain = {'description': 'Updated', 'enabled': False}
self.patch(entity_url, auth=self.auth, body={'domain': domain},
expected_status=status_OK)
self.delete(entity_url, auth=self.auth,
expected_status=status_no_data)
domain_ref = self.new_domain_ref()
self.post('/domains', auth=self.auth, body={'domain': domain_ref},
expected_status=status_created)
def _test_grants(self, target, entity_id, expected=None):
status_OK, status_created, status_no_data = self._stati(expected)
a_role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.role_api.create_role(a_role['id'], a_role)
collection_url = (
'/%(target)s/%(target_id)s/users/%(user_id)s/roles' % {
'target': target,
'target_id': entity_id,
'user_id': self.just_a_user['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': a_role['id']}
self.put(member_url, auth=self.auth,
expected_status=status_no_data)
self.head(member_url, auth=self.auth,
expected_status=status_no_data)
self.get(collection_url, auth=self.auth,
expected_status=status_OK)
self.delete(member_url, auth=self.auth,
expected_status=status_no_data)
def test_user_management(self):
# First, authenticate with a user that does not have the domain
# admin role - shouldn't be able to do much.
self.auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'],
domain_id=self.domainA['id'])
self._test_user_management(
self.domainA['id'], expected=exception.ForbiddenAction.code)
# Now, authenticate with a user that does have the domain admin role
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
self._test_user_management(self.domainA['id'])
def test_user_management_by_cloud_admin(self):
# Test users management with a cloud admin. This user should
# be able to manage users in any domain.
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
domain_id=self.admin_domain['id'])
self._test_user_management(self.domainA['id'])
def test_project_management(self):
# First, authenticate with a user that does not have the project
# admin role - shouldn't be able to do much.
self.auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'],
domain_id=self.domainA['id'])
self._test_project_management(
self.domainA['id'], expected=exception.ForbiddenAction.code)
# ...but should still be able to list projects of which they are
# a member
url = '/users/%s/projects' % self.just_a_user['id']
self.get(url, auth=self.auth)
# Now, authenticate with a user that does have the domain admin role
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
self._test_project_management(self.domainA['id'])
def test_project_management_by_cloud_admin(self):
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
domain_id=self.admin_domain['id'])
# Check whether cloud admin can operate a domain
# other than its own domain or not
self._test_project_management(self.domainA['id'])
def test_domain_grants(self):
self.auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'],
domain_id=self.domainA['id'])
self._test_grants('domains', self.domainA['id'],
expected=exception.ForbiddenAction.code)
# Now, authenticate with a user that does have the domain admin role
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
self._test_grants('domains', self.domainA['id'])
# Check that with such a token we cannot modify grants on a
# different domain
self._test_grants('domains', self.domainB['id'],
expected=exception.ForbiddenAction.code)
def test_domain_grants_by_cloud_admin(self):
# Test domain grants with a cloud admin. This user should be
# able to manage roles on any domain.
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
domain_id=self.admin_domain['id'])
self._test_grants('domains', self.domainA['id'])
def test_project_grants(self):
self.auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'],
project_id=self.project['id'])
self._test_grants('projects', self.project['id'],
expected=exception.ForbiddenAction.code)
# Now, authenticate with a user that does have the project
# admin role
self.auth = self.build_authentication_request(
user_id=self.project_admin_user['id'],
password=self.project_admin_user['password'],
project_id=self.project['id'])
self._test_grants('projects', self.project['id'])
def test_project_grants_by_domain_admin(self):
# Test project grants with a domain admin. This user should be
# able to manage roles on any project in its own domain.
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
self._test_grants('projects', self.project['id'])
def test_cloud_admin_list_assignments_of_domain(self):
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
domain_id=self.admin_domain['id'])
collection_url = self.build_role_assignment_query_url(
domain_id=self.domainA['id'])
r = self.get(collection_url, auth=self.auth)
self.assertValidRoleAssignmentListResponse(
r, expected_length=2, resource_url=collection_url)
domainA_admin_entity = self.build_role_assignment_entity(
domain_id=self.domainA['id'],
user_id=self.domain_admin_user['id'],
role_id=self.admin_role['id'],
inherited_to_projects=False)
domainA_user_entity = self.build_role_assignment_entity(
domain_id=self.domainA['id'],
user_id=self.just_a_user['id'],
role_id=self.role['id'],
inherited_to_projects=False)
self.assertRoleAssignmentInListResponse(r, domainA_admin_entity)
self.assertRoleAssignmentInListResponse(r, domainA_user_entity)
def test_domain_admin_list_assignments_of_domain(self):
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
collection_url = self.build_role_assignment_query_url(
domain_id=self.domainA['id'])
r = self.get(collection_url, auth=self.auth)
self.assertValidRoleAssignmentListResponse(
r, expected_length=2, resource_url=collection_url)
domainA_admin_entity = self.build_role_assignment_entity(
domain_id=self.domainA['id'],
user_id=self.domain_admin_user['id'],
role_id=self.admin_role['id'],
inherited_to_projects=False)
domainA_user_entity = self.build_role_assignment_entity(
domain_id=self.domainA['id'],
user_id=self.just_a_user['id'],
role_id=self.role['id'],
inherited_to_projects=False)
self.assertRoleAssignmentInListResponse(r, domainA_admin_entity)
self.assertRoleAssignmentInListResponse(r, domainA_user_entity)
def test_domain_admin_list_assignments_of_another_domain_failed(self):
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
collection_url = self.build_role_assignment_query_url(
domain_id=self.domainB['id'])
self.get(collection_url, auth=self.auth, expected_status=403)
def test_domain_user_list_assignments_of_domain_failed(self):
self.auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'],
domain_id=self.domainA['id'])
collection_url = self.build_role_assignment_query_url(
domain_id=self.domainA['id'])
self.get(collection_url, auth=self.auth, expected_status=403)
def test_cloud_admin_list_assignments_of_project(self):
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
domain_id=self.admin_domain['id'])
collection_url = self.build_role_assignment_query_url(
project_id=self.project['id'])
r = self.get(collection_url, auth=self.auth)
self.assertValidRoleAssignmentListResponse(
r, expected_length=2, resource_url=collection_url)
project_admin_entity = self.build_role_assignment_entity(
project_id=self.project['id'],
user_id=self.project_admin_user['id'],
role_id=self.admin_role['id'],
inherited_to_projects=False)
project_user_entity = self.build_role_assignment_entity(
project_id=self.project['id'],
user_id=self.just_a_user['id'],
role_id=self.role['id'],
inherited_to_projects=False)
self.assertRoleAssignmentInListResponse(r, project_admin_entity)
self.assertRoleAssignmentInListResponse(r, project_user_entity)
@tests.utils.wip('waiting on bug #1437407')
def test_domain_admin_list_assignments_of_project(self):
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
collection_url = self.build_role_assignment_query_url(
project_id=self.project['id'])
r = self.get(collection_url, auth=self.auth)
self.assertValidRoleAssignmentListResponse(
r, expected_length=2, resource_url=collection_url)
project_admin_entity = self.build_role_assignment_entity(
project_id=self.project['id'],
user_id=self.project_admin_user['id'],
role_id=self.admin_role['id'],
inherited_to_projects=False)
project_user_entity = self.build_role_assignment_entity(
project_id=self.project['id'],
user_id=self.just_a_user['id'],
role_id=self.role['id'],
inherited_to_projects=False)
self.assertRoleAssignmentInListResponse(r, project_admin_entity)
self.assertRoleAssignmentInListResponse(r, project_user_entity)
def test_domain_user_list_assignments_of_project_failed(self):
self.auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'],
domain_id=self.domainA['id'])
collection_url = self.build_role_assignment_query_url(
project_id=self.project['id'])
self.get(collection_url, auth=self.auth, expected_status=403)
def test_cloud_admin(self):
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
self._test_domain_management(
expected=exception.ForbiddenAction.code)
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
domain_id=self.admin_domain['id'])
self._test_domain_management()
def test_domain_admin_get_domain(self):
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
entity_url = '/domains/%s' % self.domainA['id']
self.get(entity_url, auth=self.auth, expected_status=200)
def test_list_user_credentials(self):
self.credential_user = self.new_credential_ref(self.just_a_user['id'])
self.credential_api.create_credential(self.credential_user['id'],
self.credential_user)
self.credential_admin = self.new_credential_ref(
self.cloud_admin_user['id'])
self.credential_api.create_credential(self.credential_admin['id'],
self.credential_admin)
self.auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
url = '/credentials?user_id=%s' % self.just_a_user['id']
self.get(url, auth=self.auth)
url = '/credentials?user_id=%s' % self.cloud_admin_user['id']
self.get(url, auth=self.auth,
expected_status=exception.ForbiddenAction.code)
url = '/credentials'
self.get(url, auth=self.auth,
expected_status=exception.ForbiddenAction.code)
def test_get_and_delete_ec2_credentials(self):
"""Tests getting and deleting ec2 credentials through the ec2 API."""
another_user = self.new_user_ref(domain_id=self.domainA['id'])
password = another_user['password']
another_user = self.identity_api.create_user(another_user)
# create a credential for just_a_user
just_user_auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'],
project_id=self.project['id'])
url = '/users/%s/credentials/OS-EC2' % self.just_a_user['id']
r = self.post(url, body={'tenant_id': self.project['id']},
auth=just_user_auth)
# another normal user can't get the credential
another_user_auth = self.build_authentication_request(
user_id=another_user['id'],
password=password)
another_user_url = '/users/%s/credentials/OS-EC2/%s' % (
another_user['id'], r.result['credential']['access'])
self.get(another_user_url, auth=another_user_auth,
expected_status=exception.ForbiddenAction.code)
# the owner can get the credential
just_user_url = '/users/%s/credentials/OS-EC2/%s' % (
self.just_a_user['id'], r.result['credential']['access'])
self.get(just_user_url, auth=just_user_auth)
# another normal user can't delete the credential
self.delete(another_user_url, auth=another_user_auth,
expected_status=exception.ForbiddenAction.code)
# the owner can get the credential
self.delete(just_user_url, auth=just_user_auth)
def test_user_validate_same_token(self):
# Given a non-admin user token, the token can be used to validate
# itself.
# This is GET /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token = self.get_requested_token(auth)
self.get('/auth/tokens', token=token,
headers={'X-Subject-Token': token})
def test_user_validate_user_token(self):
# A user can validate one of their own tokens.
# This is GET /v3/auth/tokens
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token1 = self.get_requested_token(auth)
token2 = self.get_requested_token(auth)
self.get('/auth/tokens', token=token1,
headers={'X-Subject-Token': token2})
def test_user_validate_other_user_token_rejected(self):
# A user cannot validate another user's token.
# This is GET /v3/auth/tokens
user1_auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
user1_token = self.get_requested_token(user1_auth)
user2_auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'])
user2_token = self.get_requested_token(user2_auth)
self.get('/auth/tokens', token=user1_token,
headers={'X-Subject-Token': user2_token}, expected_status=403)
def test_admin_validate_user_token(self):
# An admin can validate a user's token.
# This is GET /v3/auth/tokens
admin_auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
domain_id=self.admin_domain['id'])
admin_token = self.get_requested_token(admin_auth)
user_auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
user_token = self.get_requested_token(user_auth)
self.get('/auth/tokens', token=admin_token,
headers={'X-Subject-Token': user_token})
def test_user_check_same_token(self):
# Given a non-admin user token, the token can be used to check
# itself.
# This is HEAD /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token = self.get_requested_token(auth)
self.head('/auth/tokens', token=token,
headers={'X-Subject-Token': token}, expected_status=200)
def test_user_check_user_token(self):
# A user can check one of their own tokens.
# This is HEAD /v3/auth/tokens
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token1 = self.get_requested_token(auth)
token2 = self.get_requested_token(auth)
self.head('/auth/tokens', token=token1,
headers={'X-Subject-Token': token2}, expected_status=200)
def test_user_check_other_user_token_rejected(self):
# A user cannot check another user's token.
# This is HEAD /v3/auth/tokens
user1_auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
user1_token = self.get_requested_token(user1_auth)
user2_auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'])
user2_token = self.get_requested_token(user2_auth)
self.head('/auth/tokens', token=user1_token,
headers={'X-Subject-Token': user2_token},
expected_status=403)
def test_admin_check_user_token(self):
# An admin can check a user's token.
# This is HEAD /v3/auth/tokens
admin_auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
admin_token = self.get_requested_token(admin_auth)
user_auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
user_token = self.get_requested_token(user_auth)
self.head('/auth/tokens', token=admin_token,
headers={'X-Subject-Token': user_token}, expected_status=200)
def test_user_revoke_same_token(self):
# Given a non-admin user token, the token can be used to revoke
# itself.
# This is DELETE /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token = self.get_requested_token(auth)
self.delete('/auth/tokens', token=token,
headers={'X-Subject-Token': token})
def test_user_revoke_user_token(self):
# A user can revoke one of their own tokens.
# This is DELETE /v3/auth/tokens
auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
token1 = self.get_requested_token(auth)
token2 = self.get_requested_token(auth)
self.delete('/auth/tokens', token=token1,
headers={'X-Subject-Token': token2})
def test_user_revoke_other_user_token_rejected(self):
# A user cannot revoke another user's token.
# This is DELETE /v3/auth/tokens
user1_auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
user1_token = self.get_requested_token(user1_auth)
user2_auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'])
user2_token = self.get_requested_token(user2_auth)
self.delete('/auth/tokens', token=user1_token,
headers={'X-Subject-Token': user2_token},
expected_status=403)
def test_admin_revoke_user_token(self):
# An admin can revoke a user's token.
# This is DELETE /v3/auth/tokens
admin_auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
admin_token = self.get_requested_token(admin_auth)
user_auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'])
user_token = self.get_requested_token(user_auth)
self.delete('/auth/tokens', token=admin_token,
headers={'X-Subject-Token': user_token})
| jonnary/keystone | keystone/tests/unit/test_v3_protection.py | Python | apache-2.0 | 54,812 |
from Board import Board
from Board import Card
import numpy as np
from PIL import Image, ImageGrab
class ScreenParser:
"""
Screen Parser
"""
def __init__(self):
self.recognizer = CardRecognizer()
self.origin = None
def capture_screenshot(self, im_path='screenshot.png'):
im = ImageGrab.grab()
im.save(im_path)
def parse_screenshot(self, im_path='screenshot.png'):
"""
Input a screenshot and return with Board
"""
self.origin = Image.open(im_path).convert('RGB')
tableau = self.__split_tableau_area__()
foundation = self.__split_foundation_area__()
board = Board(tableau = tableau)
for card in foundation:
for i in range(card.number):
board.foundation.addToFoundation(card.color)
return board
def __split_tableau_area__(self):
(left, upper, right, lower) = (173, 310, 187, 324)
(ds, rs) = (31, 152)
tableau = [[] for i in range(8)]
for i in xrange(8):
for j in xrange(5):
box = (left+rs*i, upper+ds*j, right+rs*i, lower+ds*j)
reg = self.origin.crop(box)
card = self.recognizer.recognize_card(reg)
if card:
tableau[i].append(card)
else:
break
return tableau
def __split_foundation_area__(self):
(left, upper, right, lower) = (933, 46, 947, 60)
rs = 152
foundation = []
for i in xrange(3):
for j in xrange(9):
box = (left+rs*i, upper-j, right+rs*i, lower-j)
reg = self.origin.crop(box)
card = self.recognizer.recognize_card(reg)
if card and card.number == j+1:
foundation.append(card)
break
return foundation
class CardRecognizer:
"""
Card Recognizer
"""
def __init__(self):
self.type_model = np.load('card_type.npy')
def recognize_card(self, im):
"""
recognize a card with its image
"""
src = np.array(list(im.getdata()))
typ = self.__recognize_card_type__(src)
if typ and typ < 10:
color = self.__recognize_card_color__(src)
return Card(color, typ)
elif typ:
return Card(typ-7, None)
else:
return None
def __recognize_card_type__(self, src):
threshold = 28
test = ((src[0]-src)>8).any(1)
for i in xrange(13):
if np.sum(np.logical_xor(self.type_model[i], test)) < threshold:
return i+1
return None
def __recognize_card_color__(self, src):
(r_threshold, g_threshold, threshold) = (80, 50, 30)
dif = src - np.array([193, 195, 179])
if np.sum(dif[:,0]<(dif[:,1]-r_threshold)) > threshold:
return 1 # red
if np.sum(dif[:,1]<(dif[:,0]-g_threshold)) > threshold:
return 0 # green
return 2 # black
| davidxk/SolitaireBot | screenparser.py | Python | mit | 3,066 |
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import uuid
import webob
from nova.api.openstack import compute
from nova.api.openstack.compute.contrib import admin_actions
from nova.compute import api as compute_api
from nova.compute import vm_states
from nova import context
from nova import exception
from nova.openstack.common import cfg
from nova.openstack.common import jsonutils
from nova.scheduler import rpcapi as scheduler_rpcapi
from nova import test
from nova.tests.api.openstack import fakes
CONF = cfg.CONF
INSTANCE = {
"id": 1,
"name": "fake",
"display_name": "test_server",
"uuid": "abcd",
"user_id": 'fake_user_id',
"tenant_id": 'fake_tenant_id',
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"security_groups": [{"id": 1, "name": "test"}],
"progress": 0,
"image_ref": 'http://foo.com/123',
"fixed_ips": [],
"instance_type": {"flavorid": '124'},
}
def fake_compute_api(*args, **kwargs):
return True
def fake_compute_api_raises_invalid_state(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
def fake_compute_api_get(self, context, instance_id):
return {'id': 1, 'uuid': instance_id, 'vm_state': vm_states.ACTIVE,
'task_state': None}
def fake_scheduler_api_live_migration(self, context, dest,
block_migration=False,
disk_over_commit=False, instance=None,
instance_id=None, topic=None):
return None
class AdminActionsTest(test.TestCase):
_actions = ('pause', 'unpause', 'suspend', 'resume', 'migrate',
'resetNetwork', 'injectNetworkInfo', 'lock', 'unlock')
_methods = ('pause', 'unpause', 'suspend', 'resume', 'resize',
'reset_network', 'inject_network_info', 'lock', 'unlock')
_actions_that_check_state = (
# action, method
('pause', 'pause'),
('unpause', 'unpause'),
('suspend', 'suspend'),
('resume', 'resume'),
('migrate', 'resize'))
def setUp(self):
super(AdminActionsTest, self).setUp()
self.stubs.Set(compute_api.API, 'get', fake_compute_api_get)
self.UUID = uuid.uuid4()
for _method in self._methods:
self.stubs.Set(compute_api.API, _method, fake_compute_api)
self.stubs.Set(scheduler_rpcapi.SchedulerAPI,
'live_migration',
fake_scheduler_api_live_migration)
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Admin_actions'])
def test_admin_api_actions(self):
app = fakes.wsgi_app(init_only=('servers',))
for _action in self._actions:
req = webob.Request.blank('/v2/fake/servers/%s/action' %
self.UUID)
req.method = 'POST'
req.body = jsonutils.dumps({_action: None})
req.content_type = 'application/json'
res = req.get_response(app)
self.assertEqual(res.status_int, 202)
def test_admin_api_actions_raise_conflict_on_invalid_state(self):
app = fakes.wsgi_app(init_only=('servers',))
for _action, _method in self._actions_that_check_state:
self.stubs.Set(compute_api.API, _method,
fake_compute_api_raises_invalid_state)
req = webob.Request.blank('/v2/fake/servers/%s/action' %
self.UUID)
req.method = 'POST'
req.body = jsonutils.dumps({_action: None})
req.content_type = 'application/json'
res = req.get_response(app)
self.assertEqual(res.status_int, 409)
self.assertIn("Cannot \'%(_action)s\' while instance" % locals(),
res.body)
def test_migrate_live_enabled(self):
ctxt = context.get_admin_context()
ctxt.user_id = 'fake'
ctxt.project_id = 'fake'
ctxt.is_admin = True
app = fakes.wsgi_app(fake_auth_context=ctxt, init_only=('servers',))
req = webob.Request.blank('/v2/fake/servers/%s/action' % self.UUID)
req.method = 'POST'
req.body = jsonutils.dumps({
'os-migrateLive': {
'host': 'hostname',
'block_migration': False,
'disk_over_commit': False,
}
})
req.content_type = 'application/json'
def fake_update(inst, context, instance,
task_state, expected_task_state):
return None
self.stubs.Set(compute_api.API, 'update', fake_update)
res = req.get_response(app)
self.assertEqual(res.status_int, 202)
def test_migrate_live_missing_dict_param(self):
ctxt = context.get_admin_context()
ctxt.user_id = 'fake'
ctxt.project_id = 'fake'
ctxt.is_admin = True
app = fakes.wsgi_app(fake_auth_context=ctxt, init_only=('servers',))
req = webob.Request.blank('/v2/fake/servers/%s/action' % self.UUID)
req.method = 'POST'
req.body = jsonutils.dumps({
'os-migrateLive': {
'dummy': 'hostname',
'block_migration': False,
'disk_over_commit': False,
}
})
req.content_type = 'application/json'
res = req.get_response(app)
self.assertEqual(res.status_int, 400)
class CreateBackupTests(test.TestCase):
def setUp(self):
super(CreateBackupTests, self).setUp()
self.stubs.Set(compute_api.API, 'get', fake_compute_api_get)
self.backup_stubs = fakes.stub_out_compute_api_backup(self.stubs)
self.app = compute.APIRouter(init_only=('servers',))
self.uuid = uuid.uuid4()
def _get_request(self, body):
url = '/fake/servers/%s/action' % self.uuid
req = fakes.HTTPRequest.blank(url)
req.method = 'POST'
req.content_type = 'application/json'
req.body = jsonutils.dumps(body)
return req
def test_create_backup_with_metadata(self):
body = {
'createBackup': {
'name': 'Backup 1',
'backup_type': 'daily',
'rotation': 1,
'metadata': {'123': 'asdf'},
},
}
request = self._get_request(body)
response = request.get_response(self.app)
self.assertEqual(response.status_int, 202)
self.assertTrue(response.headers['Location'])
def test_create_backup_with_too_much_metadata(self):
body = {
'createBackup': {
'name': 'Backup 1',
'backup_type': 'daily',
'rotation': 1,
'metadata': {'123': 'asdf'},
},
}
for num in range(CONF.quota_metadata_items + 1):
body['createBackup']['metadata']['foo%i' % num] = "bar"
request = self._get_request(body)
response = request.get_response(self.app)
self.assertEqual(response.status_int, 413)
def test_create_backup_no_name(self):
# Name is required for backups.
body = {
'createBackup': {
'backup_type': 'daily',
'rotation': 1,
},
}
request = self._get_request(body)
response = request.get_response(self.app)
self.assertEqual(response.status_int, 400)
def test_create_backup_no_rotation(self):
# Rotation is required for backup requests.
body = {
'createBackup': {
'name': 'Backup 1',
'backup_type': 'daily',
},
}
request = self._get_request(body)
response = request.get_response(self.app)
self.assertEqual(response.status_int, 400)
def test_create_backup_negative_rotation(self):
"""Rotation must be greater than or equal to zero
for backup requests
"""
body = {
'createBackup': {
'name': 'Backup 1',
'backup_type': 'daily',
'rotation': -1,
},
}
request = self._get_request(body)
response = request.get_response(self.app)
self.assertEqual(response.status_int, 400)
def test_create_backup_no_backup_type(self):
# Backup Type (daily or weekly) is required for backup requests.
body = {
'createBackup': {
'name': 'Backup 1',
'rotation': 1,
},
}
request = self._get_request(body)
response = request.get_response(self.app)
self.assertEqual(response.status_int, 400)
def test_create_backup_bad_entity(self):
body = {'createBackup': 'go'}
request = self._get_request(body)
response = request.get_response(self.app)
self.assertEqual(response.status_int, 400)
def test_create_backup_rotation_is_zero(self):
# The happy path for creating backups if rotation is zero.
body = {
'createBackup': {
'name': 'Backup 1',
'backup_type': 'daily',
'rotation': 0,
},
}
request = self._get_request(body)
response = request.get_response(self.app)
self.assertEqual(response.status_int, 202)
self.assertFalse('Location' in response.headers)
def test_create_backup_rotation_is_positive(self):
# The happy path for creating backups if rotation is positive.
body = {
'createBackup': {
'name': 'Backup 1',
'backup_type': 'daily',
'rotation': 1,
},
}
request = self._get_request(body)
response = request.get_response(self.app)
self.assertEqual(response.status_int, 202)
self.assertTrue(response.headers['Location'])
def test_create_backup_raises_conflict_on_invalid_state(self):
body = {
'createBackup': {
'name': 'Backup 1',
'backup_type': 'daily',
'rotation': 1,
},
}
self.stubs.Set(compute_api.API, 'backup',
fake_compute_api_raises_invalid_state)
request = self._get_request(body)
response = request.get_response(self.app)
self.assertEqual(response.status_int, 409)
class ResetStateTests(test.TestCase):
def setUp(self):
super(ResetStateTests, self).setUp()
self.exists = True
self.kwargs = None
self.uuid = uuid.uuid4()
def fake_get(inst, context, instance_id):
if self.exists:
return dict(id=1, uuid=instance_id, vm_state=vm_states.ACTIVE)
raise exception.InstanceNotFound(instance_id=instance_id)
def fake_update(inst, context, instance, **kwargs):
self.kwargs = kwargs
self.stubs.Set(compute_api.API, 'get', fake_get)
self.stubs.Set(compute_api.API, 'update', fake_update)
self.admin_api = admin_actions.AdminActionsController()
url = '/fake/servers/%s/action' % self.uuid
self.request = fakes.HTTPRequest.blank(url)
def test_no_state(self):
self.assertRaises(webob.exc.HTTPBadRequest,
self.admin_api._reset_state,
self.request, 'inst_id',
{"os-resetState": None})
def test_bad_state(self):
self.assertRaises(webob.exc.HTTPBadRequest,
self.admin_api._reset_state,
self.request, 'inst_id',
{"os-resetState": {"state": "spam"}})
def test_no_instance(self):
self.exists = False
self.assertRaises(webob.exc.HTTPNotFound,
self.admin_api._reset_state,
self.request, 'inst_id',
{"os-resetState": {"state": "active"}})
def test_reset_active(self):
body = {"os-resetState": {"state": "active"}}
result = self.admin_api._reset_state(self.request, 'inst_id', body)
self.assertEqual(result.status_int, 202)
self.assertEqual(self.kwargs, dict(vm_state=vm_states.ACTIVE,
task_state=None))
def test_reset_error(self):
body = {"os-resetState": {"state": "error"}}
result = self.admin_api._reset_state(self.request, 'inst_id', body)
self.assertEqual(result.status_int, 202)
self.assertEqual(self.kwargs, dict(vm_state=vm_states.ERROR,
task_state=None))
| maoy/zknova | nova/tests/api/openstack/compute/contrib/test_admin_actions.py | Python | apache-2.0 | 13,648 |
#!/usr/bin/env python
"""
Framework to start a simulated vehicle and connect it to MAVProxy.
Peter Barker, April 2016
based on sim_vehicle.sh by Andrew Tridgell, October 2011
"""
import atexit
import getpass
import optparse
import os
import os.path
import signal
import subprocess
import sys
import tempfile
import time
# List of open terminal windows for macosx
windowID = []
class CompatError(Exception):
"""A custom exception class to hold state if we encounter the parse error we are looking for"""
def __init__(self, error, opts, rargs):
Exception.__init__(self, error)
self.opts = opts
self.rargs = rargs
class CompatOptionParser(optparse.OptionParser):
"""An option parser which emulates the behaviour of the old sim_vehicle.sh; if passed -C, the first argument not understood starts a list of arguments that are passed straight to mavproxy"""
def __init__(self, *args, **kwargs):
optparse.OptionParser.__init__(self, *args, **kwargs)
def error(self, error):
"""Override default error handler called by optparse.OptionParser.parse_args when a parse error occurs; raise a detailed exception which can be caught"""
if error.find("no such option") != -1:
raise CompatError(error, self.values, self.rargs)
optparse.OptionParser.error(self, error)
def parse_args(self, args=None, values=None):
"""Wrap parse_args so we can catch the exception raised upon discovering the known parameter parsing error"""
try:
opts, args = optparse.OptionParser.parse_args(self)
except CompatError as e:
if not e.opts.sim_vehicle_sh_compatible:
print(e)
print("Perhaps you want --sim_vehicle_sh_compatible (-C)?")
sys.exit(1)
if e.opts.mavproxy_args:
print("--mavproxy-args not permitted in compat mode")
sys.exit(1)
args = []
opts = e.opts
mavproxy_args = [str(e)[16:]] # this trims "no such option" off
mavproxy_args.extend(e.rargs)
opts.ensure_value("mavproxy_args", " ".join(mavproxy_args))
return opts, args
def cygwin_pidof(proc_name):
""" Thanks to kata198 for this:
https://github.com/kata198/cygwin-ps-misc/blob/master/pidof
"""
pipe = subprocess.Popen("ps -ea | grep " + proc_name, shell=True, stdout=subprocess.PIPE)
output_lines = pipe.stdout.read().replace("\r", "").split("\n")
ret = pipe.wait()
pids = []
if ret != 0:
# No results
return []
for line in output_lines:
if not line:
continue
line_split = [item for item in line.split(' ') if item]
cmd = line_split[-1].split('/')[-1]
if cmd == proc_name:
try:
pid = int(line_split[0].strip())
except:
pid = int(line_split[1].strip())
str_pid = str(pid)
if str_pid not in pids:
pids.append(str_pid)
return pids
def under_cygwin():
"""Return if Cygwin binary exist"""
return os.path.exists("/usr/bin/cygstart")
def under_macos():
return sys.platform == 'darwin'
def kill_tasks_cygwin(victims):
"""Shell out to ps -ea to find processes to kill"""
for victim in list(victims):
pids = cygwin_pidof(victim)
# progress("pids for (%s): %s" % (victim,",".join([ str(p) for p in pids])))
for apid in pids:
os.kill(apid, signal.SIGKILL)
def kill_tasks_macos():
for window in windowID:
cmd = "osascript -e \'tell application \"Terminal\" to close (window(get index of window id %s))\'" % window
os.system(cmd)
def kill_tasks_psutil(victims):
"""Use the psutil module to kill tasks by name. Sadly, this module is not available on Windows, but when it is we should be able to *just* use this routine"""
import psutil
for proc in psutil.process_iter():
if proc.status == psutil.STATUS_ZOMBIE:
continue
if proc.name in victims:
proc.kill()
def kill_tasks_pkill(victims):
"""Shell out to pkill(1) to kill processed by name"""
for victim in victims: # pkill takes a single pattern, so iterate
cmd = ["pkill", victim]
run_cmd_blocking("pkill", cmd, quiet=True)
class BobException(Exception):
"""Handle Bob's Exceptions"""
pass
def kill_tasks():
"""Clean up stray processes by name. This is a somewhat shotgun approach"""
progress("Killing tasks")
try:
victim_names = [
'JSBSim',
'lt-JSBSim',
'ArduPlane.elf',
'ArduCopter.elf',
'APMrover2.elf',
'AntennaTracker.elf',
'JSBSIm.exe',
'MAVProxy.exe',
'runsim.py',
'AntennaTracker.elf',
]
if under_cygwin():
return kill_tasks_cygwin(victim_names)
if under_macos():
return kill_tasks_macos()
try:
kill_tasks_psutil(victim_names)
except ImportError:
kill_tasks_pkill(victim_names)
except Exception as e:
progress("kill_tasks failed: {}".format(str(e)))
# clean up processes at exit:
atexit.register(kill_tasks)
def check_jsbsim_version():
"""Assert that the JSBSim we will run is the one we expect to run"""
jsbsim_cmd = ["JSBSim", "--version"]
progress_cmd("Get JSBSim version", jsbsim_cmd)
try:
jsbsim_version = subprocess.Popen(jsbsim_cmd, stdout=subprocess.PIPE).communicate()[0]
except OSError:
jsbsim_version = '' # this value will trigger the ".index"
# check below and produce a reasonable
# error message
try:
jsbsim_version.index("ArduPilot")
except ValueError:
print(r"""
=========================================================
You need the latest ArduPilot version of JSBSim installed
and in your \$PATH
Please get it from git://github.com/tridge/jsbsim.git
See
http://ardupilot.org/dev/docs/setting-up-sitl-on-linux.html
for more details
=========================================================
""")
sys.exit(1)
def progress(text):
"""Display sim_vehicle progress text"""
print("SIM_VEHICLE: " + text)
def find_autotest_dir():
"""Return path to autotest directory"""
return os.path.dirname(os.path.realpath(__file__))
def find_root_dir():
"""Return path to root directory"""
return os.path.realpath(os.path.join(find_autotest_dir(), '../..'))
progress("Start")
# define and run parser
parser = CompatOptionParser("sim_vehicle.py", epilog="""
eeprom.bin in the starting directory contains the parameters for your simulated vehicle. Always start from the same directory. It is recommended that you start in the main vehicle directory for the vehicle you are simulating, for example, start in the ArduPlane directory to simulate ArduPlane
""")
parser.add_option("-v", "--vehicle", type='string', default=None, help="vehicle type (ArduPlane, ArduCopter or APMrover2)")
parser.add_option("-f", "--frame", type='string', default=None, help="""set aircraft frame type
for copters can choose +, X, quad or octa
for planes can choose elevon or vtail""")
parser.add_option("-C", "--sim_vehicle_sh_compatible", action='store_true', default=False, help="be compatible with the way sim_vehicle.sh works; make this the first option")
parser.add_option("-H", "--hil", action='store_true', default=False, help="start HIL")
group_build = optparse.OptionGroup(parser, "Build options")
group_build.add_option("-N", "--no-rebuild", action='store_true', default=False, help="don't rebuild before starting ardupilot")
group_build.add_option("-D", "--debug", action='store_true', default=False, help="build with debugging")
group_build.add_option("-c", "--clean", action='store_true', default=False, help="do a make clean before building")
group_build.add_option("-j", "--jobs", default=None, type='int', help="number of processors to use during build (default for waf : number of processor, for make : 1)")
group_build.add_option("-b", "--build-target", default=None, type='string', help="override SITL build target")
group_build.add_option("-s", "--build-system", default="waf", type='choice', choices=["make", "waf"], help="build system to use")
group_build.add_option("", "--no-rebuild-on-failure", dest="rebuild_on_failure", action='store_false', default=True, help="if build fails, do not clean and rebuild")
parser.add_option_group(group_build)
group_sim = optparse.OptionGroup(parser, "Simulation options")
group_sim.add_option("-I", "--instance", default=0, type='int', help="instance of simulator")
group_sim.add_option("-V", "--valgrind", action='store_true', default=False, help="enable valgrind for memory access checking (very slow!)")
group_sim.add_option("-T", "--tracker", action='store_true', default=False, help="start an antenna tracker instance")
group_sim.add_option("-A", "--sitl-instance-args", type='string', default=None, help="pass arguments to SITL instance")
# group_sim.add_option("-R", "--reverse-throttle", action='store_true', default=False, help="reverse throttle in plane")
group_sim.add_option("-G", "--gdb", action='store_true', default=False, help="use gdb for debugging ardupilot")
group_sim.add_option("-g", "--gdb-stopped", action='store_true', default=False, help="use gdb for debugging ardupilot (no auto-start)")
group_sim.add_option("-d", "--delay-start", default=0, type='float', help="delays the start of mavproxy by the number of seconds")
group_sim.add_option("-B", "--breakpoint", type='string', action="append", default=[], help="add a breakpoint at given location in debugger")
group_sim.add_option("-M", "--mavlink-gimbal", action='store_true', default=False, help="enable MAVLink gimbal")
group_sim.add_option("-L", "--location", type='string', default='CMAC', help="select start location from Tools/autotest/locations.txt")
group_sim.add_option("-l", "--custom-location", type='string', default=None, help="set custom start location")
group_sim.add_option("-S", "--speedup", default=1, type='int', help="set simulation speedup (1 for wall clock time)")
group_sim.add_option("-t", "--tracker-location", default='CMAC_PILOTSBOX', type='string', help="set antenna tracker start location")
group_sim.add_option("-w", "--wipe-eeprom", action='store_true', default=False, help="wipe EEPROM and reload parameters")
group_sim.add_option("-m", "--mavproxy-args", default=None, type='string', help="additional arguments to pass to mavproxy.py")
group_sim.add_option("", "--strace", action='store_true', default=False, help="strace the ArduPilot binary")
group_sim.add_option("", "--model", type='string', default=None, help="Override simulation model to use")
parser.add_option_group(group_sim)
# special-cased parameters for mavproxy, because some people's fingers
# have long memories, and they don't want to use -C :-)
group = optparse.OptionGroup(parser, "Compatibility MAVProxy options (consider using --mavproxy-args instead)")
group.add_option("", "--out", default=[], type='string', action="append", help="create an additional mavlink output")
group.add_option("", "--map", default=False, action='store_true', help="load map module on startup")
group.add_option("", "--console", default=False, action='store_true', help="load console module on startup")
parser.add_option_group(group)
cmd_opts, cmd_args = parser.parse_args()
if cmd_opts.sim_vehicle_sh_compatible and cmd_opts.jobs is None:
cmd_opts.jobs = 1
# validate parameters
if cmd_opts.hil:
if cmd_opts.valgrind:
print("May not use valgrind with hil")
sys.exit(1)
if cmd_opts.gdb or cmd_opts.gdb_stopped:
print("May not use gdb with hil")
sys.exit(1)
if cmd_opts.strace:
print("May not use strace with hil")
sys.exit(1)
if cmd_opts.valgrind and (cmd_opts.gdb or cmd_opts.gdb_stopped):
print("May not use valgrind with gdb")
sys.exit(1)
if cmd_opts.strace and (cmd_opts.gdb or cmd_opts.gdb_stopped):
print("May not use strace with gdb")
sys.exit(1)
if cmd_opts.strace and cmd_opts.valgrind:
print("valgrind and strace almost certainly not a good idea")
# magically determine vehicle type (if required):
if cmd_opts.vehicle is None:
cwd = os.getcwd()
cmd_opts.vehicle = os.path.basename(cwd)
# determine a frame type if not specified:
default_frame_for_vehicle = {
"APMrover2": "rover",
"ArduPlane": "jsbsim",
"ArduCopter": "quad",
"AntennaTracker": "tracker",
}
if cmd_opts.vehicle not in default_frame_for_vehicle:
# try in parent directories, useful for having config in subdirectories
cwd = os.getcwd()
while cwd:
bname = os.path.basename(cwd)
if not bname:
break
if bname in default_frame_for_vehicle:
cmd_opts.vehicle = bname
break
cwd = os.path.dirname(cwd)
# try to validate vehicle
if cmd_opts.vehicle not in default_frame_for_vehicle:
progress("** Is (%s) really your vehicle type? Try -v VEHICLETYPE if not, or be in the e.g. ArduCopter subdirectory" % (cmd_opts.vehicle,))
# determine frame options (e.g. build type might be "sitl")
if cmd_opts.frame is None:
cmd_opts.frame = default_frame_for_vehicle[cmd_opts.vehicle]
# setup ports for this instance
mavlink_port = "tcp:127.0.0.1:" + str(5760 + 10 * cmd_opts.instance)
simout_port = "127.0.0.1:" + str(5501 + 10 * cmd_opts.instance)
"""
make_target: option passed to make to create binaries. Usually sitl, and "-debug" may be appended if -D is passed to sim_vehicle.py
default_params_filename: filename of default parameters file. Taken to be relative to autotest dir.
extra_mavlink_cmds: extra parameters that will be passed to mavproxy
"""
_options_for_frame = {
"calibration": {
"extra_mavlink_cmds": "module load sitl_calibration;",
},
# COPTER
"+": {
"waf_target": "bin/arducopter-quad",
"default_params_filename": "default_params/copter_params.parm",
},
"quad": {
"model": "+",
"waf_target": "bin/arducopter-quad",
"default_params_filename": "default_params/copter_params.parm",
},
"X": {
"waf_target": "bin/arducopter-quad",
# this param set FRAME doesn't actually work because mavproxy
# won't set a parameter unless it knows of it, and the param fetch happens asynchronously
"default_params_filename": "default_params/copter_params.parm",
"extra_mavlink_cmds": "param fetch frame; param set FRAME 1;",
},
"hexa": {
"make_target": "sitl-hexa",
"waf_target": "bin/arducopter-hexa",
"default_params_filename": "default_params/copter_params.parm",
},
"octa": {
"make_target": "sitl-octa",
"waf_target": "bin/arducopter-octa",
"default_params_filename": "default_params/copter_params.parm",
},
"tri": {
"make_target": "sitl-tri",
"waf_target": "bin/arducopter-tri",
"default_params_filename": "default_params/tri_params.parm",
},
"y6": {
"make_target": "sitl-y6",
"waf_target": "bin/arducopter-y6",
"default_params_filename": "default_params/y6_params.parm",
},
# COPTER TYPES
"IrisRos": {
"waf_target": "bin/arducopter-quad",
"default_params_filename": "default_params/copter_params.parm",
},
"firefly": {
"waf_target": "bin/arducopter-firefly",
"default_params_filename": "default_params/firefly.parm",
},
# HELICOPTER
"heli": {
"make_target": "sitl-heli",
"waf_target": "bin/arducopter-heli",
"default_params_filename": "default_params/Helicopter.parm",
},
"heli-dual": {
"make_target": "sitl-heli-dual",
"waf_target": "bin/arducopter-coax", # is this correct? -pb201604301447
},
"heli-compound": {
"make_target": "sitl-heli-compound",
"waf_target": "bin/arducopter-coax", # is this correct? -pb201604301447
},
"singlecopter": {
"make_target": "sitl-single",
"waf_target": "bin/arducopter-single",
"default_params_filename": "default_params/SingleCopter.parm",
},
"coaxcopter": {
"make_target": "sitl-coax",
"waf_target": "bin/arducopter-coax",
"default_params_filename": "default_params/CoaxCopter.parm",
},
# PLANE
"quadplane-tilttri": {
"make_target": "sitl-tri",
"waf_target": "bin/arduplane-tri",
"default_params_filename": "default_params/quadplane-tilttri.parm",
},
"quadplane-tri": {
"make_target": "sitl-tri",
"waf_target": "bin/arduplane-tri",
"default_params_filename": "default_params/quadplane-tri.parm",
},
"quadplane": {
"waf_target": "bin/arduplane",
"default_params_filename": "default_params/quadplane.parm",
},
"plane-elevon": {
"waf_target": "bin/arduplane",
"default_params_filename": "default_params/plane-elevons.parm",
},
"plane-vtail": {
"waf_target": "bin/arduplane",
"default_params_filename": "default_params/plane-vtail.parm",
},
"plane": {
"waf_target": "bin/arduplane",
"default_params_filename": "default_params/plane.parm",
},
# ROVER
"rover": {
"waf_target": "bin/ardurover",
"default_params_filename": "default_params/Rover.parm",
},
"rover-skid": {
"waf_target": "bin/ardurover",
"default_params_filename": "default_params/Rover-skid.parm",
},
# SIM
"Gazebo": {
"waf_target": "bin/arducopter-quad",
"default_params_filename": "default_params/copter_params.parm",
},
"last_letter": {
"waf_target": "bin/arduplane",
},
"CRRCSim": {
"waf_target": "bin/arduplane",
},
"jsbsim": {
"waf_target": "bin/arduplane",
"default_params_filename": "default_params/ArduPlane.parm",
},
}
_default_waf_target = {
"ArduPlane": "bin/arduplane",
"ArduCopter": "bin/arducopter-quad",
"APMrover2": "bin/ardurover",
"AntennaTracker": "bin/antennatracker",
}
def default_waf_target(vehicle):
"""Returns a waf target based on vehicle type, which is often determined by which directory the user is in"""
return _default_waf_target[vehicle]
def options_for_frame(frame, vehicle, opts):
"""Return informatiom about how to sitl for frame e.g. build-type==sitl"""
ret = None
if frame in _options_for_frame:
ret = _options_for_frame[frame]
else:
for p in ["octa", "tri", "y6", "firefly", "heli", "last_letter", "jsbsim", "quadplane", "plane-elevon", "plane-vtail", "plane"]:
if frame.startswith(p):
ret = _options_for_frame[p]
break
if ret is None:
if frame.endswith("-heli"):
ret = _options_for_frame["heli"]
if ret is None:
ret = {}
if "model" not in ret:
ret["model"] = frame
if "sitl-port" not in ret:
ret["sitl-port"] = True
if opts.model is not None:
ret["model"] = opts.model
if (ret["model"].find("xplane") != -1 or ret["model"].find("flightaxis") != -1):
ret["sitl-port"] = False
if "make_target" not in ret:
ret["make_target"] = "sitl"
if "waf_target" not in ret:
ret["waf_target"] = default_waf_target(vehicle)
if opts.build_target is not None:
ret["make_target"] = opts.build_target
ret["waf_target"] = opts.build_target
return ret
def do_build_waf(opts, frame_options):
"""Build sitl using waf"""
progress("WAF build")
old_dir = os.getcwd()
root_dir = find_root_dir()
os.chdir(root_dir)
waf_light = os.path.join(root_dir, "modules/waf/waf-light")
cmd_configure = [waf_light, "configure", "--board", "sitl"]
if opts.debug:
cmd_configure.append("--debug")
run_cmd_blocking("Configure waf", cmd_configure)
if opts.clean:
run_cmd_blocking("Building clean", [waf_light, "clean"])
cmd_build = [waf_light, "build", "--target", frame_options["waf_target"]]
if opts.jobs is not None:
cmd_build += ['-j', str(opts.jobs)]
_, sts = run_cmd_blocking("Building", cmd_build)
if sts != 0: # build failed
if opts.rebuild_on_failure:
progress("Build failed; cleaning and rebuilding")
run_cmd_blocking("Building clean", [waf_light, "clean"])
_, sts = run_cmd_blocking("Building", cmd_build)
if sts != 0:
progress("Build failed")
sys.exit(1)
else:
progress("Build failed")
sys.exit(1)
os.chdir(old_dir)
def do_build(vehicledir, opts, frame_options):
"""Build build target (e.g. sitl) in directory vehicledir"""
if opts.build_system == 'waf':
return do_build_waf(opts, frame_options)
old_dir = os.getcwd()
os.chdir(vehicledir)
if opts.clean:
run_cmd_blocking("Building clean", ["make", "clean"])
build_target = frame_options["make_target"]
if opts.debug:
build_target += "-debug"
build_cmd = ["make", build_target]
if opts.jobs is not None:
build_cmd += ['-j', str(opts.jobs)]
_, sts = run_cmd_blocking("Building %s" % build_target, build_cmd)
if sts != 0:
progress("Build failed; cleaning and rebuilding")
run_cmd_blocking("Cleaning", ["make", "clean"])
_, sts = run_cmd_blocking("Building %s" % build_target, build_cmd)
if sts != 0:
progress("Build failed")
sys.exit(1)
os.chdir(old_dir)
def find_location_by_name(autotest, locname):
"""Search locations.txt for locname, return GPS coords"""
locations_filepath = os.path.join(autotest, "locations.txt")
for line in open(locations_filepath, 'r'):
line = line.rstrip("\n")
(name, loc) = line.split("=")
if name == locname:
return loc
print("Failed to find location (%s)" % cmd_opts.location)
sys.exit(1)
def progress_cmd(what, cmd):
"""Print cmd in a way a user could cut-and-paste to get the same effect"""
progress(what)
shell_text = "%s" % (" ".join(['"%s"' % x for x in cmd]))
progress(shell_text)
def run_cmd_blocking(what, cmd, quiet=False, **kw):
if not quiet:
progress_cmd(what, cmd)
p = subprocess.Popen(cmd, **kw)
return os.waitpid(p.pid, 0)
def run_in_terminal_window(autotest, name, cmd):
"""Execute the run_in_terminal_window.sh command for cmd"""
global windowID
runme = [os.path.join(autotest, "run_in_terminal_window.sh"), name]
runme.extend(cmd)
progress_cmd("Run " + name, runme)
if under_macos():
# on MacOS record the window IDs so we can close them later
out = subprocess.Popen(runme, stdout=subprocess.PIPE).communicate()[0]
import re
p = re.compile('tab 1 of window id (.*)')
windowID.append(p.findall(out)[0])
else:
p = subprocess.Popen(runme)
tracker_uarta = None # blemish
def start_antenna_tracker(autotest, opts):
"""Compile and run the AntennaTracker, add tracker to mavproxy"""
global tracker_uarta
progress("Preparing antenna tracker")
tracker_home = find_location_by_name(find_autotest_dir(), opts.tracker_location)
vehicledir = os.path.join(autotest, "../../" + "AntennaTracker")
do_build(vehicledir, opts, "sitl-debug")
tracker_instance = 1
os.chdir(vehicledir)
tracker_uarta = "tcp:127.0.0.1:" + str(5760 + 10 * tracker_instance)
exe = os.path.join(vehicledir, "AntennaTracker.elf")
run_in_terminal_window(autotest, "AntennaTracker", ["nice", exe, "-I" + str(tracker_instance), "--model=tracker", "--home=" + tracker_home])
def start_vehicle(binary, autotest, opts, stuff, loc):
"""Run the ArduPilot binary"""
cmd_name = opts.vehicle
cmd = []
if opts.valgrind:
cmd_name += " (valgrind)"
cmd.append("valgrind")
if opts.gdb:
cmd_name += " (gdb)"
cmd.append("gdb")
gdb_commands_file = tempfile.NamedTemporaryFile(delete=False)
atexit.register(os.unlink, gdb_commands_file.name)
for breakpoint in opts.breakpoint:
gdb_commands_file.write("b %s\n" % (breakpoint,))
gdb_commands_file.write("r\n")
gdb_commands_file.close()
cmd.extend(["-x", gdb_commands_file.name])
cmd.append("--args")
if opts.strace:
cmd_name += " (strace)"
cmd.append("strace")
strace_options = ['-o', binary + '.strace', '-s', '8000', '-ttt']
cmd.extend(strace_options)
cmd.append(binary)
cmd.append("-S")
cmd.append("-I" + str(opts.instance))
cmd.extend(["--home", loc])
if opts.wipe_eeprom:
cmd.append("-w")
cmd.extend(["--model", stuff["model"]])
cmd.extend(["--speedup", str(opts.speedup)])
if opts.sitl_instance_args:
cmd.extend(opts.sitl_instance_args.split(" ")) # this could be a lot better..
if opts.mavlink_gimbal:
cmd.append("--gimbal")
if "default_params_filename" in stuff:
path = os.path.join(autotest, stuff["default_params_filename"])
progress("Using defaults from (%s)" % (path,))
cmd.extend(["--defaults", path])
run_in_terminal_window(autotest, cmd_name, cmd)
def start_mavproxy(opts, stuff):
"""Run mavproxy"""
# FIXME: would be nice to e.g. "mavproxy.mavproxy(....).run" rather than shelling out
extra_cmd = ""
cmd = []
if under_cygwin():
cmd.append("/usr/bin/cygstart")
cmd.append("-w")
cmd.append("/cygdrive/c/Program Files (x86)/MAVProxy/mavproxy.exe")
else:
cmd.append("mavproxy.py")
if opts.hil:
cmd.extend(["--load-module", "HIL"])
else:
cmd.extend(["--master", mavlink_port])
if stuff["sitl-port"]:
cmd.extend(["--sitl", simout_port])
# If running inside of a vagrant guest, then we probably want to forward our mavlink out to the containing host OS
if getpass.getuser() == "vagrant":
cmd.extend(["--out", "10.0.2.2:14550"])
for port in [14550, 14551]:
cmd.extend(["--out", "127.0.0.1:" + str(port)])
if opts.tracker:
cmd.extend(["--load-module", "tracker"])
global tracker_uarta
# tracker_uarta is set when we start the tracker...
extra_cmd += "module load map; tracker set port %s; tracker start; tracker arm;" % (tracker_uarta,)
if opts.mavlink_gimbal:
cmd.extend(["--load-module", "gimbal"])
if "extra_mavlink_cmds" in stuff:
extra_cmd += " " + stuff["extra_mavlink_cmds"]
if opts.mavproxy_args:
cmd.extend(opts.mavproxy_args.split(" ")) # this could be a lot better..
# compatibility pass-through parameters (for those that don't want
# to use -C :-)
for out in opts.out:
cmd.extend(['--out', out])
if opts.map:
cmd.append('--map')
if opts.console:
cmd.append('--console')
if len(extra_cmd):
cmd.extend(['--cmd', extra_cmd])
local_mp_modules_dir = os.path.abspath(
os.path.join(__file__, '..', '..', 'mavproxy_modules'))
env = dict(os.environ)
env['PYTHONPATH'] = local_mp_modules_dir + os.pathsep + env.get('PYTHONPATH', '')
run_cmd_blocking("Run MavProxy", cmd, env=env)
progress("MAVProxy exitted")
frame_infos = options_for_frame(cmd_opts.frame, cmd_opts.vehicle, cmd_opts)
if frame_infos["model"] == "jsbsim":
check_jsbsim_version()
vehicle_dir = os.path.realpath(os.path.join(find_root_dir(), cmd_opts.vehicle))
if not os.path.exists(vehicle_dir):
print("vehicle directory (%s) does not exist" % (vehicle_dir,))
sys.exit(1)
if not cmd_opts.hil:
if cmd_opts.instance == 0:
kill_tasks()
if cmd_opts.tracker:
start_antenna_tracker(find_autotest_dir(), cmd_opts)
if cmd_opts.custom_location:
location = cmd_opts.custom_location
progress("Starting up at %s" % (location,))
else:
location = find_location_by_name(find_autotest_dir(), cmd_opts.location)
progress("Starting up at %s (%s)" % (location, cmd_opts.location))
if cmd_opts.hil:
# (unlikely)
run_in_terminal_window(find_autotest_dir(), "JSBSim", [os.path.join(find_autotest_dir(), "jsb_sim/runsim.py"), "--home", location, "--speedup=" + str(cmd_opts.speedup)])
else:
if not cmd_opts.no_rebuild: # i.e. we should rebuild
do_build(vehicle_dir, cmd_opts, frame_infos)
if cmd_opts.build_system == "waf":
if cmd_opts.debug:
binary_basedir = "build/sitl-debug"
else:
binary_basedir = "build/sitl"
vehicle_binary = os.path.join(find_root_dir(), binary_basedir, frame_infos["waf_target"])
else:
vehicle_binary = os.path.join(vehicle_dir, cmd_opts.vehicle + ".elf")
if not os.path.exists(vehicle_binary):
print("Vehicle binary (%s) does not exist" % (vehicle_binary,))
sys.exit(1)
start_vehicle(vehicle_binary, find_autotest_dir(), cmd_opts, frame_infos, location)
if cmd_opts.delay_start:
progress("Sleeping for %f seconds" % (cmd_opts.delay_start,))
time.sleep(float(cmd_opts.delay_start))
start_mavproxy(cmd_opts, frame_infos)
sys.exit(0)
| shrkey/ardupilot | Tools/autotest/sim_vehicle.py | Python | gpl-3.0 | 29,504 |
"""Random variable generators.
integers
--------
uniform within range
sequences
---------
pick random element
pick random sample
generate random permutation
distributions on the real line:
------------------------------
uniform
normal (Gaussian)
lognormal
negative exponential
gamma
beta
pareto
Weibull
distributions on the circle (angles 0 to 2pi)
---------------------------------------------
circular uniform
von Mises
General notes on the underlying Mersenne Twister core generator:
* The period is 2**19937-1.
* It is one of the most extensively tested generators in existence.
* Without a direct way to compute N steps forward, the semantics of
jumpahead(n) are weakened to simply jump to another distant state and rely
on the large period to avoid overlapping sequences.
* The random() method is implemented in C, executes in a single Python step,
and is, therefore, threadsafe.
"""
from warnings import warn as _warn
from types import MethodType as _MethodType, BuiltinMethodType as _BuiltinMethodType
from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil
from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
from os import urandom as _urandom
from binascii import hexlify as _hexlify
__all__ = ["Random","seed","random","uniform","randint","choice","sample",
"randrange","shuffle","normalvariate","lognormvariate",
"expovariate","vonmisesvariate","gammavariate",
"gauss","betavariate","paretovariate","weibullvariate",
"getstate","setstate","jumpahead", "WichmannHill", "getrandbits",
"SystemRandom"]
NV_MAGICCONST = 4 * _exp(-0.5)/_sqrt(2.0)
TWOPI = 2.0*_pi
LOG4 = _log(4.0)
SG_MAGICCONST = 1.0 + _log(4.5)
BPF = 53 # Number of bits in a float
RECIP_BPF = 2**-BPF
# Translated by Guido van Rossum from C source provided by
# Adrian Baddeley. Adapted by Raymond Hettinger for use with
# the Mersenne Twister and os.urandom() core generators.
import _random
class Random(_random.Random):
"""Random number generator base class used by bound module functions.
Used to instantiate instances of Random to get generators that don't
share state. Especially useful for multi-threaded programs, creating
a different instance of Random for each thread, and using the jumpahead()
method to ensure that the generated sequences seen by each thread don't
overlap.
Class Random can also be subclassed if you want to use a different basic
generator of your own devising: in that case, override the following
methods: random(), seed(), getstate(), setstate() and jumpahead().
Optionally, implement a getrandombits() method so that randrange()
can cover arbitrarily large ranges.
"""
VERSION = 2 # used by getstate/setstate
def __init__(self, x=None):
"""Initialize an instance.
Optional argument x controls seeding, as for Random.seed().
"""
self.seed(x)
self.gauss_next = None
def seed(self, a=None):
"""Initialize internal state from hashable object.
None or no argument seeds from current time or from an operating
system specific randomness source if available.
If a is not None or an int or long, hash(a) is used instead.
"""
if a is None:
try:
a = long(_hexlify(_urandom(16)), 16)
except NotImplementedError:
import time
a = long(time.time() * 256) # use fractional seconds
super(Random, self).seed(a)
self.gauss_next = None
def getstate(self):
"""Return internal state; can be passed to setstate() later."""
return self.VERSION, super(Random, self).getstate(), self.gauss_next
def setstate(self, state):
"""Restore internal state from object returned by getstate()."""
version = state[0]
if version == 2:
version, internalstate, self.gauss_next = state
super(Random, self).setstate(internalstate)
else:
raise ValueError("state with version %s passed to "
"Random.setstate() of version %s" %
(version, self.VERSION))
## ---- Methods below this point do not need to be overridden when
## ---- subclassing for the purpose of using a different core generator.
## -------------------- pickle support -------------------
def __getstate__(self): # for pickle
return self.getstate()
def __setstate__(self, state): # for pickle
self.setstate(state)
def __reduce__(self):
return self.__class__, (), self.getstate()
## -------------------- integer methods -------------------
def randrange(self, start, stop=None, step=1, int=int, default=None,
maxwidth=1L<<BPF):
"""Choose a random item from range(start, stop[, step]).
This fixes the problem with randint() which includes the
endpoint; in Python this is usually not what you want.
Do not supply the 'int', 'default', and 'maxwidth' arguments.
"""
# This code is a bit messy to make it fast for the
# common case while still doing adequate error checking.
istart = int(start)
if istart != start:
raise ValueError, "non-integer arg 1 for randrange()"
if stop is default:
if istart > 0:
if istart >= maxwidth:
return self._randbelow(istart)
return int(self.random() * istart)
raise ValueError, "empty range for randrange()"
# stop argument supplied.
istop = int(stop)
if istop != stop:
raise ValueError, "non-integer stop for randrange()"
width = istop - istart
if step == 1 and width > 0:
# Note that
# int(istart + self.random()*width)
# instead would be incorrect. For example, consider istart
# = -2 and istop = 0. Then the guts would be in
# -2.0 to 0.0 exclusive on both ends (ignoring that random()
# might return 0.0), and because int() truncates toward 0, the
# final result would be -1 or 0 (instead of -2 or -1).
# istart + int(self.random()*width)
# would also be incorrect, for a subtler reason: the RHS
# can return a long, and then randrange() would also return
# a long, but we're supposed to return an int (for backward
# compatibility).
if width >= maxwidth:
return int(istart + self._randbelow(width))
return int(istart + int(self.random()*width))
if step == 1:
raise ValueError, "empty range for randrange() (%d,%d, %d)" % (istart, istop, width)
# Non-unit step argument supplied.
istep = int(step)
if istep != step:
raise ValueError, "non-integer step for randrange()"
if istep > 0:
n = (width + istep - 1) // istep
elif istep < 0:
n = (width + istep + 1) // istep
else:
raise ValueError, "zero step for randrange()"
if n <= 0:
raise ValueError, "empty range for randrange()"
if n >= maxwidth:
return istart + self._randbelow(n)
return istart + istep*int(self.random() * n)
def randint(self, a, b):
"""Return random integer in range [a, b], including both end points.
"""
return self.randrange(a, b+1)
def _randbelow(self, n, _log=_log, int=int, _maxwidth=1L<<BPF,
_Method=_MethodType, _BuiltinMethod=_BuiltinMethodType):
"""Return a random int in the range [0,n)
Handles the case where n has more bits than returned
by a single call to the underlying generator.
"""
try:
getrandbits = self.getrandbits
except AttributeError:
pass
else:
# Only call self.getrandbits if the original random() builtin method
# has not been overridden or if a new getrandbits() was supplied.
# This assures that the two methods correspond.
if type(self.random) is _BuiltinMethod or type(getrandbits) is _Method:
k = int(1.00001 + _log(n-1, 2.0)) # 2**k > n-1 > 2**(k-2)
r = getrandbits(k)
while r >= n:
r = getrandbits(k)
return r
if n >= _maxwidth:
_warn("Underlying random() generator does not supply \n"
"enough bits to choose from a population range this large")
return int(self.random() * n)
## -------------------- sequence methods -------------------
def choice(self, seq):
"""Choose a random element from a non-empty sequence."""
return seq[int(self.random() * len(seq))] # raises IndexError if seq is empty
def shuffle(self, x, random=None, int=int):
"""x, random=random.random -> shuffle list x in place; return None.
Optional arg random is a 0-argument function returning a random
float in [0.0, 1.0); by default, the standard random.random.
"""
if random is None:
random = self.random
for i in reversed(xrange(1, len(x))):
# pick an element in x[:i+1] with which to exchange x[i]
j = int(random() * (i+1))
x[i], x[j] = x[j], x[i]
def sample(self, population, k):
"""Chooses k unique random elements from a population sequence.
Returns a new list containing elements from the population while
leaving the original population unchanged. The resulting list is
in selection order so that all sub-slices will also be valid random
samples. This allows raffle winners (the sample) to be partitioned
into grand prize and second place winners (the subslices).
Members of the population need not be hashable or unique. If the
population contains repeats, then each occurrence is a possible
selection in the sample.
To choose a sample in a range of integers, use xrange as an argument.
This is especially fast and space efficient for sampling from a
large population: sample(xrange(10000000), 60)
"""
# XXX Although the documentation says `population` is "a sequence",
# XXX attempts are made to cater to any iterable with a __len__
# XXX method. This has had mixed success. Examples from both
# XXX sides: sets work fine, and should become officially supported;
# XXX dicts are much harder, and have failed in various subtle
# XXX ways across attempts. Support for mapping types should probably
# XXX be dropped (and users should pass mapping.keys() or .values()
# XXX explicitly).
# Sampling without replacement entails tracking either potential
# selections (the pool) in a list or previous selections in a set.
# When the number of selections is small compared to the
# population, then tracking selections is efficient, requiring
# only a small set and an occasional reselection. For
# a larger number of selections, the pool tracking method is
# preferred since the list takes less space than the
# set and it doesn't suffer from frequent reselections.
n = len(population)
if not 0 <= k <= n:
raise ValueError, "sample larger than population"
random = self.random
_int = int
result = [None] * k
setsize = 21 # size of a small set minus size of an empty list
if k > 5:
setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets
if n <= setsize or hasattr(population, "keys"):
# An n-length list is smaller than a k-length set, or this is a
# mapping type so the other algorithm wouldn't work.
pool = list(population)
for i in xrange(k): # invariant: non-selected at [0,n-i)
j = _int(random() * (n-i))
result[i] = pool[j]
pool[j] = pool[n-i-1] # move non-selected item into vacancy
else:
try:
selected = set()
selected_add = selected.add
for i in xrange(k):
j = _int(random() * n)
while j in selected:
j = _int(random() * n)
selected_add(j)
result[i] = population[j]
except (TypeError, KeyError): # handle (at least) sets
if isinstance(population, list):
raise
return self.sample(tuple(population), k)
return result
## -------------------- real-valued distributions -------------------
## -------------------- uniform distribution -------------------
def uniform(self, a, b):
"""Get a random number in the range [a, b)."""
return a + (b-a) * self.random()
## -------------------- normal distribution --------------------
def normalvariate(self, mu, sigma):
"""Normal distribution.
mu is the mean, and sigma is the standard deviation.
"""
# mu = mean, sigma = standard deviation
# Uses Kinderman and Monahan method. Reference: Kinderman,
# A.J. and Monahan, J.F., "Computer generation of random
# variables using the ratio of uniform deviates", ACM Trans
# Math Software, 3, (1977), pp257-260.
random = self.random
while 1:
u1 = random()
u2 = 1.0 - random()
z = NV_MAGICCONST*(u1-0.5)/u2
zz = z*z/4.0
if zz <= -_log(u2):
break
return mu + z*sigma
## -------------------- lognormal distribution --------------------
def lognormvariate(self, mu, sigma):
"""Log normal distribution.
If you take the natural logarithm of this distribution, you'll get a
normal distribution with mean mu and standard deviation sigma.
mu can have any value, and sigma must be greater than zero.
"""
return _exp(self.normalvariate(mu, sigma))
## -------------------- exponential distribution --------------------
def expovariate(self, lambd):
"""Exponential distribution.
lambd is 1.0 divided by the desired mean. (The parameter would be
called "lambda", but that is a reserved word in Python.) Returned
values range from 0 to positive infinity.
"""
# lambd: rate lambd = 1/mean
# ('lambda' is a Python reserved word)
random = self.random
u = random()
while u <= 1e-7:
u = random()
return -_log(u)/lambd
## -------------------- von Mises distribution --------------------
def vonmisesvariate(self, mu, kappa):
"""Circular data distribution.
mu is the mean angle, expressed in radians between 0 and 2*pi, and
kappa is the concentration parameter, which must be greater than or
equal to zero. If kappa is equal to zero, this distribution reduces
to a uniform random angle over the range 0 to 2*pi.
"""
# mu: mean angle (in radians between 0 and 2*pi)
# kappa: concentration parameter kappa (>= 0)
# if kappa = 0 generate uniform random angle
# Based upon an algorithm published in: Fisher, N.I.,
# "Statistical Analysis of Circular Data", Cambridge
# University Press, 1993.
# Thanks to Magnus Kessler for a correction to the
# implementation of step 4.
random = self.random
if kappa <= 1e-6:
return TWOPI * random()
a = 1.0 + _sqrt(1.0 + 4.0 * kappa * kappa)
b = (a - _sqrt(2.0 * a))/(2.0 * kappa)
r = (1.0 + b * b)/(2.0 * b)
while 1:
u1 = random()
z = _cos(_pi * u1)
f = (1.0 + r * z)/(r + z)
c = kappa * (r - f)
u2 = random()
if u2 < c * (2.0 - c) or u2 <= c * _exp(1.0 - c):
break
u3 = random()
if u3 > 0.5:
theta = (mu % TWOPI) + _acos(f)
else:
theta = (mu % TWOPI) - _acos(f)
return theta
## -------------------- gamma distribution --------------------
def gammavariate(self, alpha, beta):
"""Gamma distribution. Not the gamma function!
Conditions on the parameters are alpha > 0 and beta > 0.
"""
# alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2
# Warning: a few older sources define the gamma distribution in terms
# of alpha > -1.0
if alpha <= 0.0 or beta <= 0.0:
raise ValueError, 'gammavariate: alpha and beta must be > 0.0'
random = self.random
if alpha > 1.0:
# Uses R.C.H. Cheng, "The generation of Gamma
# variables with non-integral shape parameters",
# Applied Statistics, (1977), 26, No. 1, p71-74
ainv = _sqrt(2.0 * alpha - 1.0)
bbb = alpha - LOG4
ccc = alpha + ainv
while 1:
u1 = random()
if not 1e-7 < u1 < .9999999:
continue
u2 = 1.0 - random()
v = _log(u1/(1.0-u1))/ainv
x = alpha*_exp(v)
z = u1*u1*u2
r = bbb+ccc*v-x
if r + SG_MAGICCONST - 4.5*z >= 0.0 or r >= _log(z):
return x * beta
elif alpha == 1.0:
# expovariate(1)
u = random()
while u <= 1e-7:
u = random()
return -_log(u) * beta
else: # alpha is between 0 and 1 (exclusive)
# Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle
while 1:
u = random()
b = (_e + alpha)/_e
p = b*u
if p <= 1.0:
x = p ** (1.0/alpha)
else:
x = -_log((b-p)/alpha)
u1 = random()
if p > 1.0:
if u1 <= x ** (alpha - 1.0):
break
elif u1 <= _exp(-x):
break
return x * beta
## -------------------- Gauss (faster alternative) --------------------
def gauss(self, mu, sigma):
"""Gaussian distribution.
mu is the mean, and sigma is the standard deviation. This is
slightly faster than the normalvariate() function.
Not thread-safe without a lock around calls.
"""
# When x and y are two variables from [0, 1), uniformly
# distributed, then
#
# cos(2*pi*x)*sqrt(-2*log(1-y))
# sin(2*pi*x)*sqrt(-2*log(1-y))
#
# are two *independent* variables with normal distribution
# (mu = 0, sigma = 1).
# (Lambert Meertens)
# (corrected version; bug discovered by Mike Miller, fixed by LM)
# Multithreading note: When two threads call this function
# simultaneously, it is possible that they will receive the
# same return value. The window is very small though. To
# avoid this, you have to use a lock around all calls. (I
# didn't want to slow this down in the serial case by using a
# lock here.)
random = self.random
z = self.gauss_next
self.gauss_next = None
if z is None:
x2pi = random() * TWOPI
g2rad = _sqrt(-2.0 * _log(1.0 - random()))
z = _cos(x2pi) * g2rad
self.gauss_next = _sin(x2pi) * g2rad
return mu + z*sigma
## -------------------- beta --------------------
## See
## http://sourceforge.net/bugs/?func=detailbug&bug_id=130030&group_id=5470
## for Ivan Frohne's insightful analysis of why the original implementation:
##
## def betavariate(self, alpha, beta):
## # Discrete Event Simulation in C, pp 87-88.
##
## y = self.expovariate(alpha)
## z = self.expovariate(1.0/beta)
## return z/(y+z)
##
## was dead wrong, and how it probably got that way.
def betavariate(self, alpha, beta):
"""Beta distribution.
Conditions on the parameters are alpha > -1 and beta} > -1.
Returned values range between 0 and 1.
"""
# This version due to Janne Sinkkonen, and matches all the std
# texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution").
y = self.gammavariate(alpha, 1.)
if y == 0:
return 0.0
else:
return y / (y + self.gammavariate(beta, 1.))
## -------------------- Pareto --------------------
def paretovariate(self, alpha):
"""Pareto distribution. alpha is the shape parameter."""
# Jain, pg. 495
u = 1.0 - self.random()
return 1.0 / pow(u, 1.0/alpha)
## -------------------- Weibull --------------------
def weibullvariate(self, alpha, beta):
"""Weibull distribution.
alpha is the scale parameter and beta is the shape parameter.
"""
# Jain, pg. 499; bug fix courtesy Bill Arms
u = 1.0 - self.random()
return alpha * pow(-_log(u), 1.0/beta)
## -------------------- Wichmann-Hill -------------------
class WichmannHill(Random):
VERSION = 1 # used by getstate/setstate
def seed(self, a=None):
"""Initialize internal state from hashable object.
None or no argument seeds from current time or from an operating
system specific randomness source if available.
If a is not None or an int or long, hash(a) is used instead.
If a is an int or long, a is used directly. Distinct values between
0 and 27814431486575L inclusive are guaranteed to yield distinct
internal states (this guarantee is specific to the default
Wichmann-Hill generator).
"""
if a is None:
try:
a = long(_hexlify(_urandom(16)), 16)
except NotImplementedError:
import time
a = long(time.time() * 256) # use fractional seconds
if not isinstance(a, (int, long)):
a = hash(a)
a, x = divmod(a, 30268)
a, y = divmod(a, 30306)
a, z = divmod(a, 30322)
self._seed = int(x)+1, int(y)+1, int(z)+1
self.gauss_next = None
def random(self):
"""Get the next random number in the range [0.0, 1.0)."""
# Wichman-Hill random number generator.
#
# Wichmann, B. A. & Hill, I. D. (1982)
# Algorithm AS 183:
# An efficient and portable pseudo-random number generator
# Applied Statistics 31 (1982) 188-190
#
# see also:
# Correction to Algorithm AS 183
# Applied Statistics 33 (1984) 123
#
# McLeod, A. I. (1985)
# A remark on Algorithm AS 183
# Applied Statistics 34 (1985),198-200
# This part is thread-unsafe:
# BEGIN CRITICAL SECTION
x, y, z = self._seed
x = (171 * x) % 30269
y = (172 * y) % 30307
z = (170 * z) % 30323
self._seed = x, y, z
# END CRITICAL SECTION
# Note: on a platform using IEEE-754 double arithmetic, this can
# never return 0.0 (asserted by Tim; proof too long for a comment).
return (x/30269.0 + y/30307.0 + z/30323.0) % 1.0
def getstate(self):
"""Return internal state; can be passed to setstate() later."""
return self.VERSION, self._seed, self.gauss_next
def setstate(self, state):
"""Restore internal state from object returned by getstate()."""
version = state[0]
if version == 1:
version, self._seed, self.gauss_next = state
else:
raise ValueError("state with version %s passed to "
"Random.setstate() of version %s" %
(version, self.VERSION))
def jumpahead(self, n):
"""Act as if n calls to random() were made, but quickly.
n is an int, greater than or equal to 0.
Example use: If you have 2 threads and know that each will
consume no more than a million random numbers, create two Random
objects r1 and r2, then do
r2.setstate(r1.getstate())
r2.jumpahead(1000000)
Then r1 and r2 will use guaranteed-disjoint segments of the full
period.
"""
if not n >= 0:
raise ValueError("n must be >= 0")
x, y, z = self._seed
x = int(x * pow(171, n, 30269)) % 30269
y = int(y * pow(172, n, 30307)) % 30307
z = int(z * pow(170, n, 30323)) % 30323
self._seed = x, y, z
def __whseed(self, x=0, y=0, z=0):
"""Set the Wichmann-Hill seed from (x, y, z).
These must be integers in the range [0, 256).
"""
if not type(x) == type(y) == type(z) == int:
raise TypeError('seeds must be integers')
if not (0 <= x < 256 and 0 <= y < 256 and 0 <= z < 256):
raise ValueError('seeds must be in range(0, 256)')
if 0 == x == y == z:
# Initialize from current time
import time
t = long(time.time() * 256)
t = int((t&0xffffff) ^ (t>>24))
t, x = divmod(t, 256)
t, y = divmod(t, 256)
t, z = divmod(t, 256)
# Zero is a poor seed, so substitute 1
self._seed = (x or 1, y or 1, z or 1)
self.gauss_next = None
def whseed(self, a=None):
"""Seed from hashable object's hash code.
None or no argument seeds from current time. It is not guaranteed
that objects with distinct hash codes lead to distinct internal
states.
This is obsolete, provided for compatibility with the seed routine
used prior to Python 2.1. Use the .seed() method instead.
"""
if a is None:
self.__whseed()
return
a = hash(a)
a, x = divmod(a, 256)
a, y = divmod(a, 256)
a, z = divmod(a, 256)
x = (x + a) % 256 or 1
y = (y + a) % 256 or 1
z = (z + a) % 256 or 1
self.__whseed(x, y, z)
## --------------- Operating System Random Source ------------------
class SystemRandom(Random):
"""Alternate random number generator using sources provided
by the operating system (such as /dev/urandom on Unix or
CryptGenRandom on Windows).
Not available on all systems (see os.urandom() for details).
"""
def random(self):
"""Get the next random number in the range [0.0, 1.0)."""
return (long(_hexlify(_urandom(7)), 16) >> 3) * RECIP_BPF
def getrandbits(self, k):
"""getrandbits(k) -> x. Generates a long int with k random bits."""
if k <= 0:
raise ValueError('number of bits must be greater than zero')
if k != int(k):
raise TypeError('number of bits should be an integer')
bytes = (k + 7) // 8 # bits / 8 and rounded up
x = long(_hexlify(_urandom(bytes)), 16)
return x >> (bytes * 8 - k) # trim excess bits
def _stub(self, *args, **kwds):
"Stub method. Not used for a system random number generator."
return None
seed = jumpahead = _stub
def _notimplemented(self, *args, **kwds):
"Method should not be called for a system random number generator."
raise NotImplementedError('System entropy source does not have state.')
getstate = setstate = _notimplemented
## -------------------- test program --------------------
def _test_generator(n, func, args):
import time
print n, 'times', func.__name__
total = 0.0
sqsum = 0.0
smallest = 1e10
largest = -1e10
t0 = time.time()
for i in range(n):
x = func(*args)
total += x
sqsum = sqsum + x*x
smallest = min(x, smallest)
largest = max(x, largest)
t1 = time.time()
print round(t1-t0, 3), 'sec,',
avg = total/n
stddev = _sqrt(sqsum/n - avg*avg)
print 'avg %g, stddev %g, min %g, max %g' % \
(avg, stddev, smallest, largest)
def _test(N=2000):
_test_generator(N, random, ())
_test_generator(N, normalvariate, (0.0, 1.0))
_test_generator(N, lognormvariate, (0.0, 1.0))
_test_generator(N, vonmisesvariate, (0.0, 1.0))
_test_generator(N, gammavariate, (0.01, 1.0))
_test_generator(N, gammavariate, (0.1, 1.0))
_test_generator(N, gammavariate, (0.1, 2.0))
_test_generator(N, gammavariate, (0.5, 1.0))
_test_generator(N, gammavariate, (0.9, 1.0))
_test_generator(N, gammavariate, (1.0, 1.0))
_test_generator(N, gammavariate, (2.0, 1.0))
_test_generator(N, gammavariate, (20.0, 1.0))
_test_generator(N, gammavariate, (200.0, 1.0))
_test_generator(N, gauss, (0.0, 1.0))
_test_generator(N, betavariate, (3.0, 3.0))
# Create one instance, seeded from current time, and export its methods
# as module-level functions. The functions share state across all uses
#(both in the user's code and in the Python libraries), but that's fine
# for most programs and is easier for the casual user than making them
# instantiate their own Random() instance.
_inst = Random()
seed = _inst.seed
random = _inst.random
uniform = _inst.uniform
randint = _inst.randint
choice = _inst.choice
randrange = _inst.randrange
sample = _inst.sample
shuffle = _inst.shuffle
normalvariate = _inst.normalvariate
lognormvariate = _inst.lognormvariate
expovariate = _inst.expovariate
vonmisesvariate = _inst.vonmisesvariate
gammavariate = _inst.gammavariate
gauss = _inst.gauss
betavariate = _inst.betavariate
paretovariate = _inst.paretovariate
weibullvariate = _inst.weibullvariate
getstate = _inst.getstate
setstate = _inst.setstate
jumpahead = _inst.jumpahead
getrandbits = _inst.getrandbits
if __name__ == '__main__':
_test()
| MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.5/Lib/random.py | Python | mit | 30,483 |
# -*- coding: utf-8 -*-
#Copyright (C) David García Granda dgranda@users.sourceforge.net
#Copyright (C) Kevin Dwyer kevin@pheared.net
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Need to set this stuff up so that the translation functions work. Seems like
# the module that needs translation ought to have some way of setting this up.
import pytrainer.lib.localization
pytrainer.lib.localization.initialize_gettext("../../locale")
import unittest
import os
from lxml import etree
from pytrainer.lib.gpx import Gpx
class GpxTest(unittest.TestCase):
def setUp(self):
self.tmp_files = []
def tearDown(self):
for file_name in self.tmp_files:
try:
os.remove(file_name)
except:
pass
def test_get_laps_old(self):
orig_laps = [
("1264.66","42.84154594","-2.68554166","426","5000.71875","42.83547375","-2.68631422","active","170","177","4.93775940","manual"),
("1279.71","42.86093295","-2.66849270","445","5162.37109","42.84155038","-2.68552473","active","176","179","5.10653210","manual"),
("1263.54","42.83505499","-2.67709371","423","4882.18457","42.86094376","-2.66848792","active","176","179","4.37805939","manual"),
("1525.68","42.84018606","-2.68670272","426","4973.64746","42.83504661","-2.67710888","active","167","181","4.52464294","manual"),
("374.23","42.83771038","-2.68647373","96","1098.94531","42.84018849","-2.68670733","active","159","163","4.30066299","manual")]
try:
xml_file = os.path.dirname(os.path.abspath(__file__)) + "/gpxplus_sample_old.gpx"
gpx = Gpx(None, None) # avoid launching _getValues
gpx.tree = etree.ElementTree(file = xml_file).getroot()
gpx_laps = gpx.getLaps()
self.assertEquals(orig_laps, gpx_laps)
except():
self.fail()
def test_get_laps(self):
orig_laps = [
("311.31","43.53781521","-5.63955233","81","1000.000000","43.54065232","-5.65094300","active","158","178","3.52099586","distance"),
("337.85","43.53220135","-5.63737772","83","1000.000000","43.53780859","-5.63955157","active","149","153","4.07694530","distance"),
("342.13","43.52516323","-5.64443462","87","1000.000000","43.53218752","-5.63737328","active","150","154","3.13006544","distance"),
("353.81","43.52035671","-5.65329663","86","1000.000000","43.52515301","-5.64443881","active","146","150","3.00786400","distance"),
("352.61","43.51314962","-5.65532908","87","1000.000000","43.52035412","-5.65329814","active","148","158","3.17764997","distance"),
("354.17","43.52061689","-5.65409191","87","1000.000000","43.51314115","-5.65533193","active","142","149","3.52461219","distance"),
("343.65","43.52592498","-5.64510651","86","1000.000000","43.52062519","-5.65408990","active","144","147","3.04636431","distance"),
("366.95","43.53079587","-5.63821390","83","1000.000000","43.52592733","-5.64509553","active","142","150","3.21967506","distance"),
("345.69","43.53726536","-5.63784711","87","1000.000000","43.53081406","-5.63820661","active","146","150","4.38874722","distance"),
("330.64","43.54042768","-5.64873822","86","1000.000000","43.53726494","-5.63783269","active","149","154","3.56236672","distance"),
("41.96","43.54054570","-5.65028653","11","132.227539","43.54043892","-5.64874199","active","150","152","3.40324497","manual")]
try:
xml_file = os.path.dirname(os.path.abspath(__file__)) + "/gpxplus_sample.gpx"
gpx = Gpx(None, None) # avoid launching _getValues
gpx.tree = etree.ElementTree(file = xml_file).getroot()
gpx_laps = gpx.getLaps()
self.assertEquals(orig_laps, gpx_laps)
except():
self.fail()
def test_missing_tracks(self):
trkdata = """<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
<gpx xmlns="http://www.topografix.com/GPX/1/1" creator="" version="1.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd">
</gpx>
"""
# Write a GPX file with no tracks
file_name = "test-missing.gpx"
tmpf = file(file_name,'w')
tmpf.write(trkdata)
tmpf.close()
self.tmp_files.append(file_name)
try:
g = Gpx(filename=file_name)
except IndexError:
self.fail("Gpx parser crashed on file without tracks")
def test_missing_name(self):
trkdata = """<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
<gpx xmlns="http://www.topografix.com/GPX/1/1" creator="" version="1.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd">
<trk></trk>
</gpx>
"""
# Write a GPX file with a nameless track
file_name = "test-noname.gpx"
tmpf = file(file_name,'w')
tmpf.write(trkdata)
tmpf.close()
self.tmp_files.append(file_name)
try:
g = Gpx(filename=file_name)
except IndexError:
self.fail("Gpx parser crashed on file with a nameless track")
if __name__ == '__main__':
unittest.main()
| viiru-/pytrainer | pytrainer/test/lib/test_gpx.py | Python | gpl-2.0 | 6,062 |
# coding=utf-8
from __future__ import unicode_literals
from collections import defaultdict
from ..node import Node
from ..utils.thread import ThreadLocalObject
class BufferedNode(Node):
def __init__(self, node, callback):
self._node = node
self._callback = callback
self._flushed = False
def __repr__(self):
if self._flushed:
uri = self.uri
else:
uri = self._node.initial_uri
return '<BufferedNode: %s>' % uri
def flush(self):
if not self._flushed:
self._callback(self)
@property
def uri(self):
self.flush()
return self._node.uri
def get_content(self):
self.flush()
return self._node.content
def set_content(self, content):
self._flushed = True
self._node.content = content
content = property(get_content, set_content)
@property
def meta(self):
return self._node.meta
@property
def initial(self):
return self._node.initial
@property
def initial_uri(self):
return self._node.initial_uri
@property
def namespace_uri(self):
return self._node.namespace_uri
class NodeBuffer(ThreadLocalObject):
def __init__(self):
super(NodeBuffer, self).__init__()
self._buffer = {}
def __len__(self):
return sum(len(method_nodes) for method_nodes in self._buffer.values())
def add(self, method, node):
if method not in self._buffer:
self._buffer[method] = defaultdict(list)
buffer = self._buffer[method]
buffer[node.initial_uri].append(node)
def pop(self, method):
buffer = self._buffer.get(method, defaultdict(list))
if buffer:
_clone = dict(buffer)
buffer.clear()
buffer = _clone
return buffer
def clear(self):
self._buffer.clear()
| 5monkeys/content-io | cio/pipeline/buffer.py | Python | bsd-3-clause | 1,923 |
# ----- Info ------------------------------------------------------------------
__author__ = 'Michael Montero <mcmontero@gmail.com>'
# ----- Imports ---------------------------------------------------------------
from tinyAPI.base.config import ConfigManager
import logging
import random
import tinyAPI
__all__ = [
'StatsLogger'
]
# ----- Public Classes --------------------------------------------------------
class StatsLogger(object):
'''Manages writing statistics to the application log file.'''
def hit_ratio(self, name, requests, hits, pid=None):
if tinyAPI.env_unit_test() is False and \
tinyAPI.env_cli() is False and \
random.randint(1, 100000) == 1:
log_file = ConfigManager.value('app log file')
if log_file is not None:
try:
hit_ratio = str((hits / requests) * 100) + '%'
except ZeroDivisionError:
hit_ratio = 'NA'
lines = [
'\n----- ' + name + ' (start) -----'
]
if pid is not None:
lines.append('PID #{}'.format(pid))
lines.extend([
'Requests: ' + '{0:,}'.format(requests),
'Hits: ' + '{0:,}'.format(hits),
'Hit Ratio: ' + hit_ratio,
'----- ' + name + ' (stop) ------'
])
logging.basicConfig(filename = log_file)
logging.critical('\n'.join(lines))
logging.shutdown()
| mcmontero/tinyAPI | base/stats_logger.py | Python | mit | 1,579 |
# Copyright (c) 2021 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
import numpy
import math
from typing import List, Optional, TYPE_CHECKING, Any, Set, cast, Iterable, Dict
from UM.Logger import Logger
from UM.Mesh.MeshData import MeshData
from UM.Mesh.MeshBuilder import MeshBuilder
from UM.Application import Application #To modify the maximum zoom level.
from UM.i18n import i18nCatalog
from UM.Scene.Platform import Platform
from UM.Scene.Iterator.BreadthFirstIterator import BreadthFirstIterator
from UM.Scene.SceneNode import SceneNode
from UM.Resources import Resources
from UM.Math.Vector import Vector
from UM.Math.Matrix import Matrix
from UM.Math.Color import Color
from UM.Math.AxisAlignedBox import AxisAlignedBox
from UM.Math.Polygon import Polygon
from UM.Message import Message
from UM.Signal import Signal
from UM.View.RenderBatch import RenderBatch
from UM.View.GL.OpenGL import OpenGL
from cura.Settings.GlobalStack import GlobalStack
from cura.Scene.CuraSceneNode import CuraSceneNode
from cura.Settings.ExtruderManager import ExtruderManager
from PyQt5.QtCore import QTimer
if TYPE_CHECKING:
from cura.CuraApplication import CuraApplication
from cura.Settings.ExtruderStack import ExtruderStack
from UM.Settings.ContainerStack import ContainerStack
catalog = i18nCatalog("cura")
# Radius of disallowed area in mm around prime. I.e. how much distance to keep from prime position.
PRIME_CLEARANCE = 6.5
class BuildVolume(SceneNode):
"""Build volume is a special kind of node that is responsible for rendering the printable area & disallowed areas."""
raftThicknessChanged = Signal()
def __init__(self, application: "CuraApplication", parent: Optional[SceneNode] = None) -> None:
super().__init__(parent)
self._application = application
self._machine_manager = self._application.getMachineManager()
self._volume_outline_color = None # type: Optional[Color]
self._x_axis_color = None # type: Optional[Color]
self._y_axis_color = None # type: Optional[Color]
self._z_axis_color = None # type: Optional[Color]
self._disallowed_area_color = None # type: Optional[Color]
self._error_area_color = None # type: Optional[Color]
self._width = 0 # type: float
self._height = 0 # type: float
self._depth = 0 # type: float
self._shape = "" # type: str
self._scale_vector = Vector(1.0, 1.0, 1.0)
self._shader = None
self._origin_mesh = None # type: Optional[MeshData]
self._origin_line_length = 20
self._origin_line_width = 1
self._enabled = False
self._grid_mesh = None # type: Optional[MeshData]
self._grid_shader = None
self._disallowed_areas = [] # type: List[Polygon]
self._disallowed_areas_no_brim = [] # type: List[Polygon]
self._disallowed_area_mesh = None # type: Optional[MeshData]
self._disallowed_area_size = 0.
self._error_areas = [] # type: List[Polygon]
self._error_mesh = None # type: Optional[MeshData]
self.setCalculateBoundingBox(False)
self._volume_aabb = None # type: Optional[AxisAlignedBox]
self._raft_thickness = 0.0
self._extra_z_clearance = 0.0
self._adhesion_type = None # type: Any
self._platform = Platform(self)
self._edge_disallowed_size = None
self._build_volume_message = Message(catalog.i18nc("@info:status",
"The build volume height has been reduced due to the value of the"
" \"Print Sequence\" setting to prevent the gantry from colliding"
" with printed models."),
title = catalog.i18nc("@info:title", "Build Volume"),
message_type = Message.MessageType.WARNING)
self._global_container_stack = None # type: Optional[GlobalStack]
self._stack_change_timer = QTimer()
self._stack_change_timer.setInterval(100)
self._stack_change_timer.setSingleShot(True)
self._stack_change_timer.timeout.connect(self._onStackChangeTimerFinished)
self._application.globalContainerStackChanged.connect(self._onStackChanged)
self._engine_ready = False
self._application.engineCreatedSignal.connect(self._onEngineCreated)
self._has_errors = False
self._application.getController().getScene().sceneChanged.connect(self._onSceneChanged)
# Objects loaded at the moment. We are connected to the property changed events of these objects.
self._scene_objects = set() # type: Set[SceneNode]
self._scene_change_timer = QTimer()
self._scene_change_timer.setInterval(200)
self._scene_change_timer.setSingleShot(True)
self._scene_change_timer.timeout.connect(self._onSceneChangeTimerFinished)
self._setting_change_timer = QTimer()
self._setting_change_timer.setInterval(150)
self._setting_change_timer.setSingleShot(True)
self._setting_change_timer.timeout.connect(self._onSettingChangeTimerFinished)
# Must be after setting _build_volume_message, apparently that is used in getMachineManager.
# activeQualityChanged is always emitted after setActiveVariant, setActiveMaterial and setActiveQuality.
# Therefore this works.
self._machine_manager.activeQualityChanged.connect(self._onStackChanged)
# Enable and disable extruder
self._machine_manager.extruderChanged.connect(self.updateNodeBoundaryCheck)
# List of settings which were updated
self._changed_settings_since_last_rebuild = [] # type: List[str]
def _onSceneChanged(self, source):
if self._global_container_stack:
# Ignore anything that is not something we can slice in the first place!
if source.callDecoration("isSliceable"):
self._scene_change_timer.start()
def _onSceneChangeTimerFinished(self):
root = self._application.getController().getScene().getRoot()
new_scene_objects = set(node for node in BreadthFirstIterator(root) if node.callDecoration("isSliceable"))
if new_scene_objects != self._scene_objects:
for node in new_scene_objects - self._scene_objects: #Nodes that were added to the scene.
self._updateNodeListeners(node)
node.decoratorsChanged.connect(self._updateNodeListeners) # Make sure that decoration changes afterwards also receive the same treatment
for node in self._scene_objects - new_scene_objects: #Nodes that were removed from the scene.
per_mesh_stack = node.callDecoration("getStack")
if per_mesh_stack:
per_mesh_stack.propertyChanged.disconnect(self._onSettingPropertyChanged)
active_extruder_changed = node.callDecoration("getActiveExtruderChangedSignal")
if active_extruder_changed is not None:
node.callDecoration("getActiveExtruderChangedSignal").disconnect(self._updateDisallowedAreasAndRebuild)
node.decoratorsChanged.disconnect(self._updateNodeListeners)
self.rebuild()
self._scene_objects = new_scene_objects
self._onSettingPropertyChanged("print_sequence", "value") # Create fake event, so right settings are triggered.
def _updateNodeListeners(self, node: SceneNode):
"""Updates the listeners that listen for changes in per-mesh stacks.
:param node: The node for which the decorators changed.
"""
per_mesh_stack = node.callDecoration("getStack")
if per_mesh_stack:
per_mesh_stack.propertyChanged.connect(self._onSettingPropertyChanged)
active_extruder_changed = node.callDecoration("getActiveExtruderChangedSignal")
if active_extruder_changed is not None:
active_extruder_changed.connect(self._updateDisallowedAreasAndRebuild)
def setWidth(self, width: float) -> None:
self._width = width
def getWidth(self) -> float:
return self._width
def setHeight(self, height: float) -> None:
self._height = height
def getHeight(self) -> float:
return self._height
def setDepth(self, depth: float) -> None:
self._depth = depth
def getDepth(self) -> float:
return self._depth
def setShape(self, shape: str) -> None:
if shape:
self._shape = shape
def getDiagonalSize(self) -> float:
"""Get the length of the 3D diagonal through the build volume.
This gives a sense of the scale of the build volume in general.
:return: length of the 3D diagonal through the build volume
"""
return math.sqrt(self._width * self._width + self._height * self._height + self._depth * self._depth)
def getDisallowedAreas(self) -> List[Polygon]:
return self._disallowed_areas
def getDisallowedAreasNoBrim(self) -> List[Polygon]:
return self._disallowed_areas_no_brim
def setDisallowedAreas(self, areas: List[Polygon]):
self._disallowed_areas = areas
def render(self, renderer):
if not self.getMeshData() or not self.isVisible():
return True
if not self._shader:
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "default.shader"))
self._grid_shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "grid.shader"))
theme = self._application.getTheme()
self._grid_shader.setUniformValue("u_plateColor", Color(*theme.getColor("buildplate").getRgb()))
self._grid_shader.setUniformValue("u_gridColor0", Color(*theme.getColor("buildplate_grid").getRgb()))
self._grid_shader.setUniformValue("u_gridColor1", Color(*theme.getColor("buildplate_grid_minor").getRgb()))
renderer.queueNode(self, mode = RenderBatch.RenderMode.Lines)
renderer.queueNode(self, mesh = self._origin_mesh, backface_cull = True)
renderer.queueNode(self, mesh = self._grid_mesh, shader = self._grid_shader, backface_cull = True)
if self._disallowed_area_mesh:
renderer.queueNode(self, mesh = self._disallowed_area_mesh, shader = self._shader, transparent = True, backface_cull = True, sort = -9)
if self._error_mesh:
renderer.queueNode(self, mesh=self._error_mesh, shader=self._shader, transparent=True,
backface_cull=True, sort=-8)
return True
def updateNodeBoundaryCheck(self):
"""For every sliceable node, update node._outside_buildarea"""
if not self._global_container_stack:
return
root = self._application.getController().getScene().getRoot()
nodes = cast(List[SceneNode], list(cast(Iterable, BreadthFirstIterator(root))))
group_nodes = [] # type: List[SceneNode]
build_volume_bounding_box = self.getBoundingBox()
if build_volume_bounding_box:
# It's over 9000!
# We set this to a very low number, as we do allow models to intersect the build plate.
# This means the model gets cut off at the build plate.
build_volume_bounding_box = build_volume_bounding_box.set(bottom=-9001)
else:
# No bounding box. This is triggered when running Cura from command line with a model for the first time
# In that situation there is a model, but no machine (and therefore no build volume.
return
for node in nodes:
# Need to check group nodes later
if node.callDecoration("isGroup"):
group_nodes.append(node) # Keep list of affected group_nodes
if node.callDecoration("isSliceable") or node.callDecoration("isGroup"):
if not isinstance(node, CuraSceneNode):
continue
if node.collidesWithBbox(build_volume_bounding_box):
node.setOutsideBuildArea(True)
continue
if node.collidesWithAreas(self.getDisallowedAreas()):
node.setOutsideBuildArea(True)
continue
# If the entire node is below the build plate, still mark it as outside.
node_bounding_box = node.getBoundingBox()
if node_bounding_box and node_bounding_box.top < 0 and not node.getParent().callDecoration("isGroup"):
node.setOutsideBuildArea(True)
continue
# Mark the node as outside build volume if the set extruder is disabled
extruder_position = node.callDecoration("getActiveExtruderPosition")
try:
if not self._global_container_stack.extruderList[int(extruder_position)].isEnabled and not node.callDecoration("isGroup"):
node.setOutsideBuildArea(True)
continue
except IndexError: # Happens when the extruder list is too short. We're not done building the printer in memory yet.
continue
except TypeError: # Happens when extruder_position is None. This object has no extruder decoration.
continue
node.setOutsideBuildArea(False)
# Group nodes should override the _outside_buildarea property of their children.
for group_node in group_nodes:
children = group_node.getAllChildren()
# Check if one or more children are non-printable and if so, set the parent as non-printable:
for child_node in children:
if child_node.isOutsideBuildArea():
group_node.setOutsideBuildArea(True)
break
# Apply results of the check to all children of the group:
for child_node in children:
child_node.setOutsideBuildArea(group_node.isOutsideBuildArea())
def checkBoundsAndUpdate(self, node: CuraSceneNode, bounds: Optional[AxisAlignedBox] = None) -> None:
"""Update the outsideBuildArea of a single node, given bounds or current build volume
:param node: single node
:param bounds: bounds or current build volume
"""
if not isinstance(node, CuraSceneNode) or self._global_container_stack is None:
return
if bounds is None:
build_volume_bounding_box = self.getBoundingBox()
if build_volume_bounding_box:
# It's over 9000!
build_volume_bounding_box = build_volume_bounding_box.set(bottom=-9001)
else:
# No bounding box. This is triggered when running Cura from command line with a model for the first time
# In that situation there is a model, but no machine (and therefore no build volume.
return
else:
build_volume_bounding_box = bounds
if node.callDecoration("isSliceable") or node.callDecoration("isGroup"):
if node.collidesWithBbox(build_volume_bounding_box):
node.setOutsideBuildArea(True)
return
if node.collidesWithAreas(self.getDisallowedAreas()):
node.setOutsideBuildArea(True)
return
# Mark the node as outside build volume if the set extruder is disabled
extruder_position = node.callDecoration("getActiveExtruderPosition")
try:
if not self._global_container_stack.extruderList[int(extruder_position)].isEnabled:
node.setOutsideBuildArea(True)
return
except IndexError:
# If the extruder doesn't exist, also mark it as unprintable.
node.setOutsideBuildArea(True)
return
node.setOutsideBuildArea(False)
def _buildGridMesh(self, min_w: float, max_w: float, min_h: float, max_h: float, min_d: float, max_d:float, z_fight_distance: float) -> MeshData:
mb = MeshBuilder()
if self._shape != "elliptic":
# Build plate grid mesh
mb.addQuad(
Vector(min_w, min_h - z_fight_distance, min_d),
Vector(max_w, min_h - z_fight_distance, min_d),
Vector(max_w, min_h - z_fight_distance, max_d),
Vector(min_w, min_h - z_fight_distance, max_d)
)
for n in range(0, 6):
v = mb.getVertex(n)
mb.setVertexUVCoordinates(n, v[0], v[2])
return mb.build()
else:
aspect = 1.0
scale_matrix = Matrix()
if self._width != 0:
# Scale circular meshes by aspect ratio if width != height
aspect = self._depth / self._width
scale_matrix.compose(scale=Vector(1, 1, aspect))
mb.addVertex(0, min_h - z_fight_distance, 0)
mb.addArc(max_w, Vector.Unit_Y, center=Vector(0, min_h - z_fight_distance, 0))
sections = mb.getVertexCount() - 1 # Center point is not an arc section
indices = []
for n in range(0, sections - 1):
indices.append([0, n + 2, n + 1])
mb.addIndices(numpy.asarray(indices, dtype=numpy.int32))
mb.calculateNormals()
for n in range(0, mb.getVertexCount()):
v = mb.getVertex(n)
mb.setVertexUVCoordinates(n, v[0], v[2] * aspect)
return mb.build().getTransformed(scale_matrix)
def _buildMesh(self, min_w: float, max_w: float, min_h: float, max_h: float, min_d: float, max_d:float, z_fight_distance: float) -> MeshData:
if self._shape != "elliptic":
# Outline 'cube' of the build volume
mb = MeshBuilder()
mb.addLine(Vector(min_w, min_h, min_d), Vector(max_w, min_h, min_d), color = self._volume_outline_color)
mb.addLine(Vector(min_w, min_h, min_d), Vector(min_w, max_h, min_d), color = self._volume_outline_color)
mb.addLine(Vector(min_w, max_h, min_d), Vector(max_w, max_h, min_d), color = self._volume_outline_color)
mb.addLine(Vector(max_w, min_h, min_d), Vector(max_w, max_h, min_d), color = self._volume_outline_color)
mb.addLine(Vector(min_w, min_h, max_d), Vector(max_w, min_h, max_d), color = self._volume_outline_color)
mb.addLine(Vector(min_w, min_h, max_d), Vector(min_w, max_h, max_d), color = self._volume_outline_color)
mb.addLine(Vector(min_w, max_h, max_d), Vector(max_w, max_h, max_d), color = self._volume_outline_color)
mb.addLine(Vector(max_w, min_h, max_d), Vector(max_w, max_h, max_d), color = self._volume_outline_color)
mb.addLine(Vector(min_w, min_h, min_d), Vector(min_w, min_h, max_d), color = self._volume_outline_color)
mb.addLine(Vector(max_w, min_h, min_d), Vector(max_w, min_h, max_d), color = self._volume_outline_color)
mb.addLine(Vector(min_w, max_h, min_d), Vector(min_w, max_h, max_d), color = self._volume_outline_color)
mb.addLine(Vector(max_w, max_h, min_d), Vector(max_w, max_h, max_d), color = self._volume_outline_color)
return mb.build()
else:
# Bottom and top 'ellipse' of the build volume
scale_matrix = Matrix()
if self._width != 0:
# Scale circular meshes by aspect ratio if width != height
aspect = self._depth / self._width
scale_matrix.compose(scale = Vector(1, 1, aspect))
mb = MeshBuilder()
mb.addArc(max_w, Vector.Unit_Y, center = (0, min_h - z_fight_distance, 0), color = self._volume_outline_color)
mb.addArc(max_w, Vector.Unit_Y, center = (0, max_h, 0), color = self._volume_outline_color)
return mb.build().getTransformed(scale_matrix)
def _buildOriginMesh(self, origin: Vector) -> MeshData:
mb = MeshBuilder()
mb.addCube(
width=self._origin_line_length,
height=self._origin_line_width,
depth=self._origin_line_width,
center=origin + Vector(self._origin_line_length / 2, 0, 0),
color=self._x_axis_color
)
mb.addCube(
width=self._origin_line_width,
height=self._origin_line_length,
depth=self._origin_line_width,
center=origin + Vector(0, self._origin_line_length / 2, 0),
color=self._y_axis_color
)
mb.addCube(
width=self._origin_line_width,
height=self._origin_line_width,
depth=self._origin_line_length,
center=origin - Vector(0, 0, self._origin_line_length / 2),
color=self._z_axis_color
)
return mb.build()
def _updateColors(self):
theme = self._application.getTheme()
if theme is None:
return
self._volume_outline_color = Color(*theme.getColor("volume_outline").getRgb())
self._x_axis_color = Color(*theme.getColor("x_axis").getRgb())
self._y_axis_color = Color(*theme.getColor("y_axis").getRgb())
self._z_axis_color = Color(*theme.getColor("z_axis").getRgb())
self._disallowed_area_color = Color(*theme.getColor("disallowed_area").getRgb())
self._error_area_color = Color(*theme.getColor("error_area").getRgb())
def _buildErrorMesh(self, min_w: float, max_w: float, min_h: float, max_h: float, min_d: float, max_d: float, disallowed_area_height: float) -> Optional[MeshData]:
if not self._error_areas:
return None
mb = MeshBuilder()
for error_area in self._error_areas:
color = self._error_area_color
points = error_area.getPoints()
first = Vector(self._clamp(points[0][0], min_w, max_w), disallowed_area_height,
self._clamp(points[0][1], min_d, max_d))
previous_point = Vector(self._clamp(points[0][0], min_w, max_w), disallowed_area_height,
self._clamp(points[0][1], min_d, max_d))
for point in points:
new_point = Vector(self._clamp(point[0], min_w, max_w), disallowed_area_height,
self._clamp(point[1], min_d, max_d))
mb.addFace(first, previous_point, new_point, color=color)
previous_point = new_point
return mb.build()
def _buildDisallowedAreaMesh(self, min_w: float, max_w: float, min_h: float, max_h: float, min_d: float, max_d: float, disallowed_area_height: float) -> Optional[MeshData]:
if not self._disallowed_areas:
return None
mb = MeshBuilder()
color = self._disallowed_area_color
for polygon in self._disallowed_areas:
points = polygon.getPoints()
if len(points) == 0:
continue
first = Vector(self._clamp(points[0][0], min_w, max_w), disallowed_area_height,
self._clamp(points[0][1], min_d, max_d))
previous_point = Vector(self._clamp(points[0][0], min_w, max_w), disallowed_area_height,
self._clamp(points[0][1], min_d, max_d))
for point in points:
new_point = Vector(self._clamp(point[0], min_w, max_w), disallowed_area_height,
self._clamp(point[1], min_d, max_d))
mb.addFace(first, previous_point, new_point, color=color)
previous_point = new_point
# Find the largest disallowed area to exclude it from the maximum scale bounds.
# This is a very nasty hack. This pretty much only works for UM machines.
# This disallowed area_size needs a -lot- of rework at some point in the future: TODO
if numpy.min(points[:,
1]) >= 0: # This filters out all areas that have points to the left of the centre. This is done to filter the skirt area.
size = abs(numpy.max(points[:, 1]) - numpy.min(points[:, 1]))
else:
size = 0
self._disallowed_area_size = max(size, self._disallowed_area_size)
return mb.build()
def _updateScaleFactor(self) -> None:
if not self._global_container_stack:
return
scale_xy = 100.0 / max(100.0, self._global_container_stack.getProperty("material_shrinkage_percentage_xy", "value"))
scale_z = 100.0 / max(100.0, self._global_container_stack.getProperty("material_shrinkage_percentage_z" , "value"))
self._scale_vector = Vector(scale_xy, scale_xy, scale_z)
def rebuild(self) -> None:
"""Recalculates the build volume & disallowed areas."""
if not self._width or not self._height or not self._depth:
return
if not self._engine_ready:
return
if not self._global_container_stack:
return
if not self._volume_outline_color:
self._updateColors()
min_w = -self._width / 2
max_w = self._width / 2
min_h = 0.0
max_h = self._height
min_d = -self._depth / 2
max_d = self._depth / 2
z_fight_distance = 0.2 # Distance between buildplate and disallowed area meshes to prevent z-fighting
self._grid_mesh = self._buildGridMesh(min_w, max_w, min_h, max_h, min_d, max_d, z_fight_distance)
self.setMeshData(self._buildMesh(min_w, max_w, min_h, max_h, min_d, max_d, z_fight_distance))
# Indication of the machine origin
if self._global_container_stack.getProperty("machine_center_is_zero", "value"):
origin = (Vector(min_w, min_h, min_d) + Vector(max_w, min_h, max_d)) / 2
else:
origin = Vector(min_w, min_h, max_d)
self._origin_mesh = self._buildOriginMesh(origin)
disallowed_area_height = 0.1
self._disallowed_area_size = 0.
self._disallowed_area_mesh = self._buildDisallowedAreaMesh(min_w, max_w, min_h, max_h, min_d, max_d, disallowed_area_height)
self._error_mesh = self._buildErrorMesh(min_w, max_w, min_h, max_h, min_d, max_d, disallowed_area_height)
self._updateScaleFactor()
self._volume_aabb = AxisAlignedBox(
minimum = Vector(min_w, min_h - 1.0, min_d).scale(self._scale_vector),
maximum = Vector(max_w, max_h - self._raft_thickness - self._extra_z_clearance, max_d).scale(self._scale_vector)
)
bed_adhesion_size = self.getEdgeDisallowedSize()
# As this works better for UM machines, we only add the disallowed_area_size for the z direction.
# This is probably wrong in all other cases. TODO!
# The +1 and -1 is added as there is always a bit of extra room required to work properly.
scale_to_max_bounds = AxisAlignedBox(
minimum = Vector(min_w + bed_adhesion_size + 1, min_h, min_d + self._disallowed_area_size - bed_adhesion_size + 1).scale(self._scale_vector),
maximum = Vector(max_w - bed_adhesion_size - 1, max_h - self._raft_thickness - self._extra_z_clearance, max_d - self._disallowed_area_size + bed_adhesion_size - 1).scale(self._scale_vector)
)
self._application.getController().getScene()._maximum_bounds = scale_to_max_bounds # type: ignore
self.updateNodeBoundaryCheck()
def getBoundingBox(self) -> Optional[AxisAlignedBox]:
return self._volume_aabb
def getRaftThickness(self) -> float:
return self._raft_thickness
def _updateRaftThickness(self) -> None:
if not self._global_container_stack:
return
old_raft_thickness = self._raft_thickness
if self._global_container_stack.extruderList:
# This might be called before the extruder stacks have initialised, in which case getting the adhesion_type fails
self._adhesion_type = self._global_container_stack.getProperty("adhesion_type", "value")
self._raft_thickness = 0.0
if self._adhesion_type == "raft":
self._raft_thickness = (
self._global_container_stack.getProperty("raft_base_thickness", "value") +
self._global_container_stack.getProperty("raft_interface_layers", "value") *
self._global_container_stack.getProperty("raft_interface_thickness", "value") +
self._global_container_stack.getProperty("raft_surface_layers", "value") *
self._global_container_stack.getProperty("raft_surface_thickness", "value") +
self._global_container_stack.getProperty("raft_airgap", "value") -
self._global_container_stack.getProperty("layer_0_z_overlap", "value"))
# Rounding errors do not matter, we check if raft_thickness has changed at all
if old_raft_thickness != self._raft_thickness:
self.setPosition(Vector(0, -self._raft_thickness, 0), SceneNode.TransformSpace.World)
self.raftThicknessChanged.emit()
def _calculateExtraZClearance(self, extruders: List["ContainerStack"]) -> float:
if not self._global_container_stack:
return 0
extra_z = 0.0
for extruder in extruders:
if extruder.getProperty("retraction_hop_enabled", "value"):
retraction_hop = extruder.getProperty("retraction_hop", "value")
if extra_z is None or retraction_hop > extra_z:
extra_z = retraction_hop
return extra_z
def _onStackChanged(self):
self._stack_change_timer.start()
def _onStackChangeTimerFinished(self) -> None:
"""Update the build volume visualization"""
if self._global_container_stack:
self._global_container_stack.propertyChanged.disconnect(self._onSettingPropertyChanged)
extruders = ExtruderManager.getInstance().getActiveExtruderStacks()
for extruder in extruders:
extruder.propertyChanged.disconnect(self._onSettingPropertyChanged)
self._global_container_stack = self._application.getGlobalContainerStack()
if self._global_container_stack:
self._global_container_stack.propertyChanged.connect(self._onSettingPropertyChanged)
extruders = ExtruderManager.getInstance().getActiveExtruderStacks()
for extruder in extruders:
extruder.propertyChanged.connect(self._onSettingPropertyChanged)
self._width = self._global_container_stack.getProperty("machine_width", "value") * self._scale_vector.x
machine_height = self._global_container_stack.getProperty("machine_height", "value")
if self._global_container_stack.getProperty("print_sequence", "value") == "one_at_a_time" and len(self._scene_objects) > 1:
self._height = min(self._global_container_stack.getProperty("gantry_height", "value") * self._scale_vector.z, machine_height)
if self._height < (machine_height * self._scale_vector.z):
self._build_volume_message.show()
else:
self._build_volume_message.hide()
else:
self._height = self._global_container_stack.getProperty("machine_height", "value")
self._build_volume_message.hide()
self._depth = self._global_container_stack.getProperty("machine_depth", "value") * self._scale_vector.y
self._shape = self._global_container_stack.getProperty("machine_shape", "value")
self._updateDisallowedAreas()
self._updateRaftThickness()
self._extra_z_clearance = self._calculateExtraZClearance(ExtruderManager.getInstance().getUsedExtruderStacks())
if self._engine_ready:
self.rebuild()
camera = Application.getInstance().getController().getCameraTool()
if camera:
diagonal = self.getDiagonalSize()
if diagonal > 1:
# You can zoom out up to 5 times the diagonal. This gives some space around the volume.
camera.setZoomRange(min = 0.1, max = diagonal * 5) # type: ignore
def _onEngineCreated(self) -> None:
self._engine_ready = True
self.rebuild()
def _onSettingChangeTimerFinished(self) -> None:
if not self._global_container_stack:
return
rebuild_me = False
update_disallowed_areas = False
update_raft_thickness = False
update_extra_z_clearance = True
for setting_key in self._changed_settings_since_last_rebuild:
if setting_key == "print_sequence":
machine_height = self._global_container_stack.getProperty("machine_height", "value")
if self._application.getGlobalContainerStack().getProperty("print_sequence", "value") == "one_at_a_time" and len(self._scene_objects) > 1:
self._height = min(self._global_container_stack.getProperty("gantry_height", "value") * self._scale_vector.z, machine_height)
if self._height < (machine_height * self._scale_vector.z):
self._build_volume_message.show()
else:
self._build_volume_message.hide()
else:
self._height = self._global_container_stack.getProperty("machine_height", "value") * self._scale_vector.z
self._build_volume_message.hide()
update_disallowed_areas = True
# sometimes the machine size or shape settings are adjusted on the active machine, we should reflect this
if setting_key in self._machine_settings or setting_key in self._material_size_settings:
self._updateMachineSizeProperties()
update_extra_z_clearance = True
update_disallowed_areas = True
if setting_key in self._disallowed_area_settings:
update_disallowed_areas = True
if setting_key in self._raft_settings:
update_raft_thickness = True
if setting_key in self._extra_z_settings:
update_extra_z_clearance = True
if setting_key in self._limit_to_extruder_settings:
update_disallowed_areas = True
rebuild_me = update_extra_z_clearance or update_disallowed_areas or update_raft_thickness
# We only want to update all of them once.
if update_disallowed_areas:
self._updateDisallowedAreas()
if update_raft_thickness:
self._updateRaftThickness()
if update_extra_z_clearance:
self._extra_z_clearance = self._calculateExtraZClearance(ExtruderManager.getInstance().getUsedExtruderStacks())
if rebuild_me:
self.rebuild()
# We just did a rebuild, reset the list.
self._changed_settings_since_last_rebuild = []
def _onSettingPropertyChanged(self, setting_key: str, property_name: str) -> None:
if property_name != "value":
return
if setting_key not in self._changed_settings_since_last_rebuild:
self._changed_settings_since_last_rebuild.append(setting_key)
self._setting_change_timer.start()
def hasErrors(self) -> bool:
return self._has_errors
def _updateMachineSizeProperties(self) -> None:
if not self._global_container_stack:
return
self._updateScaleFactor()
self._height = self._global_container_stack.getProperty("machine_height", "value") * self._scale_vector.z
self._width = self._global_container_stack.getProperty("machine_width", "value") * self._scale_vector.x
self._depth = self._global_container_stack.getProperty("machine_depth", "value") * self._scale_vector.y
self._shape = self._global_container_stack.getProperty("machine_shape", "value")
def _updateDisallowedAreasAndRebuild(self):
"""Calls :py:meth:`cura.BuildVolume._updateDisallowedAreas` and makes sure the changes appear in the scene.
This is required for a signal to trigger the update in one go. The
:py:meth:`cura.BuildVolume._updateDisallowedAreas` method itself shouldn't call
:py:meth:`cura.BuildVolume.rebuild`, since there may be other changes before it needs to be rebuilt,
which would hit performance.
"""
self._updateDisallowedAreas()
self._updateRaftThickness()
self._extra_z_clearance = self._calculateExtraZClearance(ExtruderManager.getInstance().getUsedExtruderStacks())
self.rebuild()
def _scaleAreas(self, result_areas: List[Polygon]) -> None:
if self._global_container_stack is None:
return
for i, polygon in enumerate(result_areas):
result_areas[i] = polygon.scale(
100.0 / max(100.0, self._global_container_stack.getProperty("material_shrinkage_percentage_xy", "value"))
)
def _updateDisallowedAreas(self) -> None:
if not self._global_container_stack:
return
self._error_areas = []
used_extruders = ExtruderManager.getInstance().getUsedExtruderStacks()
self._edge_disallowed_size = None # Force a recalculation
disallowed_border_size = self.getEdgeDisallowedSize()
result_areas = self._computeDisallowedAreasStatic(disallowed_border_size, used_extruders) # Normal machine disallowed areas can always be added.
prime_areas = self._computeDisallowedAreasPrimeBlob(disallowed_border_size, used_extruders)
result_areas_no_brim = self._computeDisallowedAreasStatic(0, used_extruders) # Where the priming is not allowed to happen. This is not added to the result, just for collision checking.
# Check if prime positions intersect with disallowed areas.
for extruder in used_extruders:
extruder_id = extruder.getId()
result_areas[extruder_id].extend(prime_areas[extruder_id])
result_areas_no_brim[extruder_id].extend(prime_areas[extruder_id])
nozzle_disallowed_areas = extruder.getProperty("nozzle_disallowed_areas", "value")
for area in nozzle_disallowed_areas:
polygon = Polygon(numpy.array(area, numpy.float32))
polygon_disallowed_border = polygon.getMinkowskiHull(Polygon.approximatedCircle(disallowed_border_size))
result_areas[extruder_id].append(polygon_disallowed_border) # Don't perform the offset on these.
result_areas_no_brim[extruder_id].append(polygon) # No brim
# Add prime tower location as disallowed area.
if len([x for x in used_extruders if x.isEnabled]) > 1: # No prime tower if only one extruder is enabled
prime_tower_collision = False
prime_tower_areas = self._computeDisallowedAreasPrinted(used_extruders)
for extruder_id in prime_tower_areas:
for area_index, prime_tower_area in enumerate(prime_tower_areas[extruder_id]):
for area in result_areas[extruder_id]:
if prime_tower_area.intersectsPolygon(area) is not None:
prime_tower_collision = True
break
if prime_tower_collision: # Already found a collision.
break
if self._global_container_stack.getProperty("prime_tower_brim_enable", "value") and self._global_container_stack.getProperty("adhesion_type", "value") != "raft":
brim_size = self._calculateBedAdhesionSize(used_extruders, "brim")
# Use 2x the brim size, since we need 1x brim size distance due to the object brim and another
# times the brim due to the brim of the prime tower
prime_tower_areas[extruder_id][area_index] = prime_tower_area.getMinkowskiHull(Polygon.approximatedCircle(2 * brim_size, num_segments = 24))
if not prime_tower_collision:
result_areas[extruder_id].extend(prime_tower_areas[extruder_id])
result_areas_no_brim[extruder_id].extend(prime_tower_areas[extruder_id])
else:
self._error_areas.extend(prime_tower_areas[extruder_id])
self._has_errors = len(self._error_areas) > 0
self._disallowed_areas = []
for extruder_id in result_areas:
self._scaleAreas(result_areas[extruder_id])
self._disallowed_areas.extend(result_areas[extruder_id])
self._disallowed_areas_no_brim = []
for extruder_id in result_areas_no_brim:
self._scaleAreas(result_areas_no_brim[extruder_id])
self._disallowed_areas_no_brim.extend(result_areas_no_brim[extruder_id])
def _computeDisallowedAreasPrinted(self, used_extruders):
"""Computes the disallowed areas for objects that are printed with print features.
This means that the brim, travel avoidance and such will be applied to these features.
:return: A dictionary with for each used extruder ID the disallowed areas where that extruder may not print.
"""
result = {}
skirt_brim_extruder: ExtruderStack = None
for extruder in used_extruders:
if int(extruder.getProperty("extruder_nr", "value")) == int(self._global_container_stack.getProperty("skirt_brim_extruder_nr", "value")):
skirt_brim_extruder = extruder
result[extruder.getId()] = []
# Currently, the only normally printed object is the prime tower.
if self._global_container_stack.getProperty("prime_tower_enable", "value"):
prime_tower_size = self._global_container_stack.getProperty("prime_tower_size", "value")
machine_width = self._global_container_stack.getProperty("machine_width", "value")
machine_depth = self._global_container_stack.getProperty("machine_depth", "value")
prime_tower_x = self._global_container_stack.getProperty("prime_tower_position_x", "value")
prime_tower_y = - self._global_container_stack.getProperty("prime_tower_position_y", "value")
if not self._global_container_stack.getProperty("machine_center_is_zero", "value"):
prime_tower_x = prime_tower_x - machine_width / 2 #Offset by half machine_width and _depth to put the origin in the front-left.
prime_tower_y = prime_tower_y + machine_depth / 2
if skirt_brim_extruder is not None and self._global_container_stack.getProperty("prime_tower_brim_enable", "value") and self._global_container_stack.getProperty("adhesion_type", "value") != "raft":
brim_size = (
skirt_brim_extruder.getProperty("brim_line_count", "value") *
skirt_brim_extruder.getProperty("skirt_brim_line_width", "value") / 100.0 *
skirt_brim_extruder.getProperty("initial_layer_line_width_factor", "value")
)
prime_tower_x -= brim_size
prime_tower_y += brim_size
radius = prime_tower_size / 2
prime_tower_area = Polygon.approximatedCircle(radius, num_segments = 24)
prime_tower_area = prime_tower_area.translate(prime_tower_x - radius, prime_tower_y - radius)
prime_tower_area = prime_tower_area.getMinkowskiHull(Polygon.approximatedCircle(0))
for extruder in used_extruders:
result[extruder.getId()].append(prime_tower_area) #The prime tower location is the same for each extruder, regardless of offset.
return result
def _computeDisallowedAreasPrimeBlob(self, border_size: float, used_extruders: List["ExtruderStack"]) -> Dict[str, List[Polygon]]:
"""Computes the disallowed areas for the prime blobs.
These are special because they are not subject to things like brim or travel avoidance. They do get a dilute
with the border size though because they may not intersect with brims and such of other objects.
:param border_size: The size with which to offset the disallowed areas due to skirt, brim, travel avoid distance
, etc.
:param used_extruders: The extruder stacks to generate disallowed areas for.
:return: A dictionary with for each used extruder ID the prime areas.
"""
result = {} # type: Dict[str, List[Polygon]]
if not self._global_container_stack:
return result
machine_width = self._global_container_stack.getProperty("machine_width", "value")
machine_depth = self._global_container_stack.getProperty("machine_depth", "value")
for extruder in used_extruders:
prime_blob_enabled = extruder.getProperty("prime_blob_enable", "value")
prime_x = extruder.getProperty("extruder_prime_pos_x", "value")
prime_y = -extruder.getProperty("extruder_prime_pos_y", "value")
# Ignore extruder prime position if it is not set or if blob is disabled
if (prime_x == 0 and prime_y == 0) or not prime_blob_enabled:
result[extruder.getId()] = []
continue
if not self._global_container_stack.getProperty("machine_center_is_zero", "value"):
prime_x = prime_x - machine_width / 2 # Offset by half machine_width and _depth to put the origin in the front-left.
prime_y = prime_y + machine_depth / 2
prime_polygon = Polygon.approximatedCircle(PRIME_CLEARANCE)
prime_polygon = prime_polygon.getMinkowskiHull(Polygon.approximatedCircle(border_size))
prime_polygon = prime_polygon.translate(prime_x, prime_y)
result[extruder.getId()] = [prime_polygon]
return result
def _computeDisallowedAreasStatic(self, border_size:float, used_extruders: List["ExtruderStack"]) -> Dict[str, List[Polygon]]:
"""Computes the disallowed areas that are statically placed in the machine.
It computes different disallowed areas depending on the offset of the extruder. The resulting dictionary will
therefore have an entry for each extruder that is used.
:param border_size: The size with which to offset the disallowed areas due to skirt, brim, travel avoid distance
, etc.
:param used_extruders: The extruder stacks to generate disallowed areas for.
:return: A dictionary with for each used extruder ID the disallowed areas where that extruder may not print.
"""
# Convert disallowed areas to polygons and dilate them.
machine_disallowed_polygons = []
if self._global_container_stack is None:
return {}
for area in self._global_container_stack.getProperty("machine_disallowed_areas", "value"):
if len(area) == 0:
continue # Numpy doesn't deal well with 0-length arrays, since it can't determine the dimensionality of them.
polygon = Polygon(numpy.array(area, numpy.float32))
polygon = polygon.getMinkowskiHull(Polygon.approximatedCircle(border_size))
machine_disallowed_polygons.append(polygon)
# For certain machines we don't need to compute disallowed areas for each nozzle.
# So we check here and only do the nozzle offsetting if needed.
nozzle_offsetting_for_disallowed_areas = self._global_container_stack.getMetaDataEntry(
"nozzle_offsetting_for_disallowed_areas", True)
result = {} # type: Dict[str, List[Polygon]]
for extruder in used_extruders:
extruder_id = extruder.getId()
offset_x = extruder.getProperty("machine_nozzle_offset_x", "value")
if offset_x is None:
offset_x = 0
offset_y = extruder.getProperty("machine_nozzle_offset_y", "value")
if offset_y is None:
offset_y = 0
offset_y = -offset_y # Y direction of g-code is the inverse of Y direction of Cura's scene space.
result[extruder_id] = []
for polygon in machine_disallowed_polygons:
result[extruder_id].append(polygon.translate(offset_x, offset_y)) # Compensate for the nozzle offset of this extruder.
# Add the border around the edge of the build volume.
left_unreachable_border = 0
right_unreachable_border = 0
top_unreachable_border = 0
bottom_unreachable_border = 0
# Only do nozzle offsetting if needed
if nozzle_offsetting_for_disallowed_areas:
# The build volume is defined as the union of the area that all extruders can reach, so we need to know
# the relative offset to all extruders.
for other_extruder in ExtruderManager.getInstance().getActiveExtruderStacks():
other_offset_x = other_extruder.getProperty("machine_nozzle_offset_x", "value")
if other_offset_x is None:
other_offset_x = 0
other_offset_y = other_extruder.getProperty("machine_nozzle_offset_y", "value")
if other_offset_y is None:
other_offset_y = 0
other_offset_y = -other_offset_y
left_unreachable_border = min(left_unreachable_border, other_offset_x - offset_x)
right_unreachable_border = max(right_unreachable_border, other_offset_x - offset_x)
top_unreachable_border = min(top_unreachable_border, other_offset_y - offset_y)
bottom_unreachable_border = max(bottom_unreachable_border, other_offset_y - offset_y)
half_machine_width = self._global_container_stack.getProperty("machine_width", "value") / 2
half_machine_depth = self._global_container_stack.getProperty("machine_depth", "value") / 2
if self._shape != "elliptic":
if border_size - left_unreachable_border > 0:
result[extruder_id].append(Polygon(numpy.array([
[-half_machine_width, -half_machine_depth],
[-half_machine_width, half_machine_depth],
[-half_machine_width + border_size - left_unreachable_border, half_machine_depth - border_size - bottom_unreachable_border],
[-half_machine_width + border_size - left_unreachable_border, -half_machine_depth + border_size - top_unreachable_border]
], numpy.float32)))
if border_size + right_unreachable_border > 0:
result[extruder_id].append(Polygon(numpy.array([
[half_machine_width, half_machine_depth],
[half_machine_width, -half_machine_depth],
[half_machine_width - border_size - right_unreachable_border, -half_machine_depth + border_size - top_unreachable_border],
[half_machine_width - border_size - right_unreachable_border, half_machine_depth - border_size - bottom_unreachable_border]
], numpy.float32)))
if border_size + bottom_unreachable_border > 0:
result[extruder_id].append(Polygon(numpy.array([
[-half_machine_width, half_machine_depth],
[half_machine_width, half_machine_depth],
[half_machine_width - border_size - right_unreachable_border, half_machine_depth - border_size - bottom_unreachable_border],
[-half_machine_width + border_size - left_unreachable_border, half_machine_depth - border_size - bottom_unreachable_border]
], numpy.float32)))
if border_size - top_unreachable_border > 0:
result[extruder_id].append(Polygon(numpy.array([
[half_machine_width, -half_machine_depth],
[-half_machine_width, -half_machine_depth],
[-half_machine_width + border_size - left_unreachable_border, -half_machine_depth + border_size - top_unreachable_border],
[half_machine_width - border_size - right_unreachable_border, -half_machine_depth + border_size - top_unreachable_border]
], numpy.float32)))
else:
sections = 32
arc_vertex = [0, half_machine_depth - border_size]
for i in range(0, sections):
quadrant = math.floor(4 * i / sections)
vertices = []
if quadrant == 0:
vertices.append([-half_machine_width, half_machine_depth])
elif quadrant == 1:
vertices.append([-half_machine_width, -half_machine_depth])
elif quadrant == 2:
vertices.append([half_machine_width, -half_machine_depth])
elif quadrant == 3:
vertices.append([half_machine_width, half_machine_depth])
vertices.append(arc_vertex)
angle = 2 * math.pi * (i + 1) / sections
arc_vertex = [-(half_machine_width - border_size) * math.sin(angle), (half_machine_depth - border_size) * math.cos(angle)]
vertices.append(arc_vertex)
result[extruder_id].append(Polygon(numpy.array(vertices, numpy.float32)))
if border_size > 0:
result[extruder_id].append(Polygon(numpy.array([
[-half_machine_width, -half_machine_depth],
[-half_machine_width, half_machine_depth],
[-half_machine_width + border_size, 0]
], numpy.float32)))
result[extruder_id].append(Polygon(numpy.array([
[-half_machine_width, half_machine_depth],
[ half_machine_width, half_machine_depth],
[ 0, half_machine_depth - border_size]
], numpy.float32)))
result[extruder_id].append(Polygon(numpy.array([
[ half_machine_width, half_machine_depth],
[ half_machine_width, -half_machine_depth],
[ half_machine_width - border_size, 0]
], numpy.float32)))
result[extruder_id].append(Polygon(numpy.array([
[ half_machine_width, -half_machine_depth],
[-half_machine_width, -half_machine_depth],
[ 0, -half_machine_depth + border_size]
], numpy.float32)))
return result
def _getSettingFromAllExtruders(self, setting_key: str) -> List[Any]:
"""Private convenience function to get a setting from every extruder.
For single extrusion machines, this gets the setting from the global stack.
:return: A sequence of setting values, one for each extruder.
"""
all_values = ExtruderManager.getInstance().getAllExtruderSettings(setting_key, "value")
all_types = ExtruderManager.getInstance().getAllExtruderSettings(setting_key, "type")
for i, (setting_value, setting_type) in enumerate(zip(all_values, all_types)):
if not setting_value and setting_type in ["int", "float"]:
all_values[i] = 0
return all_values
def _calculateBedAdhesionSize(self, used_extruders, adhesion_override = None):
"""Get the bed adhesion size for the global container stack and used extruders
:param adhesion_override: override adhesion type.
Use None to use the global stack default, "none" for no adhesion, "brim" for brim etc.
"""
if self._global_container_stack is None:
return None
container_stack = self._global_container_stack
adhesion_type = adhesion_override
if adhesion_type is None:
adhesion_type = container_stack.getProperty("adhesion_type", "value")
# Skirt_brim_line_width is a bit of an odd one out. The primary bit of the skirt/brim is printed
# with the adhesion extruder, but it also prints one extra line by all other extruders. As such, the
# setting does *not* have a limit_to_extruder setting (which means that we can't ask the global extruder what
# the value is.
skirt_brim_extruder_nr = self._global_container_stack.getProperty("skirt_brim_extruder_nr", "value")
try:
skirt_brim_stack = self._global_container_stack.extruderList[int(skirt_brim_extruder_nr)]
except IndexError:
Logger.warning(f"Couldn't find extruder with index '{skirt_brim_extruder_nr}', defaulting to 0 instead.")
skirt_brim_stack = self._global_container_stack.extruderList[0]
skirt_brim_line_width = skirt_brim_stack.getProperty("skirt_brim_line_width", "value")
initial_layer_line_width_factor = skirt_brim_stack.getProperty("initial_layer_line_width_factor", "value")
# Use brim width if brim is enabled OR the prime tower has a brim.
if adhesion_type == "brim":
brim_line_count = skirt_brim_stack.getProperty("brim_line_count", "value")
bed_adhesion_size = skirt_brim_line_width * brim_line_count * initial_layer_line_width_factor / 100.0
for extruder_stack in used_extruders:
bed_adhesion_size += extruder_stack.getProperty("skirt_brim_line_width", "value") * extruder_stack.getProperty("initial_layer_line_width_factor", "value") / 100.0
# We don't create an additional line for the extruder we're printing the brim with.
bed_adhesion_size -= skirt_brim_line_width * initial_layer_line_width_factor / 100.0
elif adhesion_type == "skirt":
skirt_distance = skirt_brim_stack.getProperty("skirt_gap", "value")
skirt_line_count = skirt_brim_stack.getProperty("skirt_line_count", "value")
bed_adhesion_size = skirt_distance + (
skirt_brim_line_width * skirt_line_count) * initial_layer_line_width_factor / 100.0
for extruder_stack in used_extruders:
bed_adhesion_size += extruder_stack.getProperty("skirt_brim_line_width", "value") * extruder_stack.getProperty("initial_layer_line_width_factor", "value") / 100.0
# We don't create an additional line for the extruder we're printing the skirt with.
bed_adhesion_size -= skirt_brim_line_width * initial_layer_line_width_factor / 100.0
elif adhesion_type == "raft":
bed_adhesion_size = self._global_container_stack.getProperty("raft_margin", "value") # Should refer to the raft extruder if set.
elif adhesion_type == "none":
bed_adhesion_size = 0
else:
raise Exception("Unknown bed adhesion type. Did you forget to update the build volume calculations for your new bed adhesion type?")
max_length_available = 0.5 * min(
self._global_container_stack.getProperty("machine_width", "value"),
self._global_container_stack.getProperty("machine_depth", "value")
)
bed_adhesion_size = min(bed_adhesion_size, max_length_available)
return bed_adhesion_size
def _calculateFarthestShieldDistance(self, container_stack):
farthest_shield_distance = 0
if container_stack.getProperty("draft_shield_enabled", "value"):
farthest_shield_distance = max(farthest_shield_distance, container_stack.getProperty("draft_shield_dist", "value"))
if container_stack.getProperty("ooze_shield_enabled", "value"):
farthest_shield_distance = max(farthest_shield_distance,container_stack.getProperty("ooze_shield_dist", "value"))
return farthest_shield_distance
def _calculateSupportExpansion(self, container_stack):
support_expansion = 0
support_enabled = self._global_container_stack.getProperty("support_enable", "value")
support_offset = self._global_container_stack.getProperty("support_offset", "value")
if support_enabled and support_offset:
support_expansion += support_offset
return support_expansion
def _calculateMoveFromWallRadius(self, used_extruders):
move_from_wall_radius = 0 # Moves that start from outer wall.
for stack in used_extruders:
if stack.getProperty("travel_avoid_other_parts", "value"):
move_from_wall_radius = max(move_from_wall_radius, stack.getProperty("travel_avoid_distance", "value"))
infill_wipe_distance = stack.getProperty("infill_wipe_dist", "value")
num_walls = stack.getProperty("wall_line_count", "value")
if num_walls >= 1: # Infill wipes start from the infill, so subtract the total wall thickness from this.
infill_wipe_distance -= stack.getProperty("wall_line_width_0", "value")
if num_walls >= 2:
infill_wipe_distance -= stack.getProperty("wall_line_width_x", "value") * (num_walls - 1)
move_from_wall_radius = max(move_from_wall_radius, infill_wipe_distance)
return move_from_wall_radius
def getEdgeDisallowedSize(self):
"""Calculate the disallowed radius around the edge.
This disallowed radius is to allow for space around the models that is not part of the collision radius,
such as bed adhesion (skirt/brim/raft) and travel avoid distance.
"""
if not self._global_container_stack or not self._global_container_stack.extruderList:
return 0
if self._edge_disallowed_size is not None:
return self._edge_disallowed_size
container_stack = self._global_container_stack
used_extruders = ExtruderManager.getInstance().getUsedExtruderStacks()
# If we are printing one at a time, we need to add the bed adhesion size to the disallowed areas of the objects
if container_stack.getProperty("print_sequence", "value") == "one_at_a_time":
return 0.1
bed_adhesion_size = self._calculateBedAdhesionSize(used_extruders)
support_expansion = self._calculateSupportExpansion(self._global_container_stack)
farthest_shield_distance = self._calculateFarthestShieldDistance(self._global_container_stack)
move_from_wall_radius = self._calculateMoveFromWallRadius(used_extruders)
# Now combine our different pieces of data to get the final border size.
# Support expansion is added to the bed adhesion, since the bed adhesion goes around support.
# Support expansion is added to farthest shield distance, since the shields go around support.
self._edge_disallowed_size = max(move_from_wall_radius, support_expansion + farthest_shield_distance, support_expansion + bed_adhesion_size)
return self._edge_disallowed_size
def _clamp(self, value, min_value, max_value):
return max(min(value, max_value), min_value)
_machine_settings = ["machine_width", "machine_depth", "machine_height", "machine_shape", "machine_center_is_zero"]
_skirt_settings = ["adhesion_type", "skirt_gap", "skirt_line_count", "skirt_brim_line_width", "brim_width", "brim_line_count", "raft_margin", "draft_shield_enabled", "draft_shield_dist", "initial_layer_line_width_factor"]
_raft_settings = ["adhesion_type", "raft_base_thickness", "raft_interface_layers", "raft_interface_thickness", "raft_surface_layers", "raft_surface_thickness", "raft_airgap", "layer_0_z_overlap"]
_extra_z_settings = ["retraction_hop_enabled", "retraction_hop"]
_prime_settings = ["extruder_prime_pos_x", "extruder_prime_pos_y", "prime_blob_enable"]
_tower_settings = ["prime_tower_enable", "prime_tower_size", "prime_tower_position_x", "prime_tower_position_y", "prime_tower_brim_enable"]
_ooze_shield_settings = ["ooze_shield_enabled", "ooze_shield_dist"]
_distance_settings = ["infill_wipe_dist", "travel_avoid_distance", "support_offset", "support_enable", "travel_avoid_other_parts", "travel_avoid_supports", "wall_line_count", "wall_line_width_0", "wall_line_width_x"]
_extruder_settings = ["support_enable", "support_bottom_enable", "support_roof_enable", "support_infill_extruder_nr", "support_extruder_nr_layer_0", "support_bottom_extruder_nr", "support_roof_extruder_nr", "brim_line_count", "skirt_brim_extruder_nr", "raft_base_extruder_nr", "raft_interface_extruder_nr", "raft_surface_extruder_nr", "adhesion_type"] #Settings that can affect which extruders are used.
_limit_to_extruder_settings = ["wall_extruder_nr", "wall_0_extruder_nr", "wall_x_extruder_nr", "top_bottom_extruder_nr", "infill_extruder_nr", "support_infill_extruder_nr", "support_extruder_nr_layer_0", "support_bottom_extruder_nr", "support_roof_extruder_nr", "skirt_brim_extruder_nr", "raft_base_extruder_nr", "raft_interface_extruder_nr", "raft_surface_extruder_nr"]
_material_size_settings = ["material_shrinkage_percentage", "material_shrinkage_percentage_xy", "material_shrinkage_percentage_z"]
_disallowed_area_settings = _skirt_settings + _prime_settings + _tower_settings + _ooze_shield_settings + _distance_settings + _extruder_settings + _material_size_settings
| Ultimaker/Cura | cura/BuildVolume.py | Python | lgpl-3.0 | 65,716 |
import os
import sys
import numpy as np
import glob
import gfs_sublink_utils as gsu
import shutil
import math
import astropy
import astropy.io.fits as fits
#import matplotlib
#import matplotlib.pyplot as plt
import scipy
import scipy.ndimage
#import make_color_image
import numpy.random as random
import congrid
import tarfile
import string
import astropy.io.ascii as ascii
from astropy.convolution import *
import copy
sq_arcsec_per_sr = 42545170296.0
c = 3.0e8
lcfile_cols={'col1':'snapshot',
'col2':'SubfindID',
'col3':'ra_deg',
'col4':'dec_deg',
'col5':'ra_kpc',
'col6':'dec_kpc',
'col7':'ra_kpc_inferred',
'col8':'dec_kpc_inferred',
'col9':'true_z',
'col10':'inferred_z',
'col11':'peculiar_z',
'col12':'true_kpc_per_arcsec',
'col13':'X_cmpc',
'col14':'Y_cmpc',
'col15':'Z_cmpc',
'col16':'ADD_cmpc',
'col17':'ADD_cmpc_inferred',
'col18':'snapshot_z',
'col19':'geometric_z',
'col20':'cylinder_number',
'col21':'mstar_msun_rad',
'col22':'mgas_msun_rad',
'col23':'subhalo_mass_msun',
'col24':'bhmass_msun_rad',
'col25':'mbary_msun_rad',
'col26':'sfr_msunperyr_rad',
'col27':'bhrate_code',
'col28':'camX_mpc',
'col29':'camY_mpc',
'col30':'camZ_mpc',
'col31':'g_AB_absmag',
'col32':'r_AB_absmag',
'col33':'i_AB_absmag',
'col34':'z_AB_absmag',
'col35':'v_kms_camX',
'col36':'v_kms_camY',
'col37':'v_kms_camZ',
'col38':'v_kms_hubble',
'col39':'g_AB_appmag'}
scale_window=1.2
mass_window=2.0
def apply_adaptive_smoothing():
kfile='hlsp_misty_mockluvoir_imager_fielda-subimage00-9-8_f200w_v1_lightcone.fits'
adaptive_smooth('hlsp_misty_mockluvoir_imager_fielda-subimage00-9-8_f336w_v1_lightcone.fits',kfile)
adaptive_smooth('hlsp_misty_mockluvoir_imager_fielda-subimage00-9-8_f435w_v1_lightcone.fits',kfile)
adaptive_smooth('hlsp_misty_mockluvoir_imager_fielda-subimage00-9-8_f606w_v1_lightcone.fits',kfile)
adaptive_smooth('hlsp_misty_mockluvoir_imager_fielda-subimage00-9-8_f775w_v1_lightcone.fits',kfile)
adaptive_smooth('hlsp_misty_mockluvoir_imager_fielda-subimage00-9-8_f850lp_v1_lightcone.fits',kfile)
adaptive_smooth('hlsp_misty_mockluvoir_imager_fielda-subimage00-9-8_f115w_v1_lightcone.fits',kfile)
adaptive_smooth('hlsp_misty_mockluvoir_imager_fielda-subimage00-9-8_f150w_v1_lightcone.fits',kfile)
adaptive_smooth('hlsp_misty_mockluvoir_imager_fielda-subimage00-9-8_f200w_v1_lightcone.fits',kfile)
return
def adaptive_smooth(targetfile,distancefile):
fitsfile=os.path.abspath(targetfile)
distancefile=os.path.abspath(distancefile)
filterstr=fitsfile.split('_')[-3]
new_fn=fitsfile.rstrip('_v1_lightcone.fits').rstrip(filterstr).rstrip('_')+'-smooth'+'_'+filterstr+'_v1_lightcone.fits'
print('smoothing..', new_fn)
hdulist=fits.open(fitsfile)
header=hdulist[0].header
data=np.float32(hdulist[0].data)
distlist=fits.open(distancefile)
distdata=np.float32(distlist[0].data)
#measure inverse data or idt on smoothed image?
#use "faint" and "bright" distances together?
inverse_data=np.where(distdata < 1.0e-3, np.ones_like(distdata), np.zeros_like(distdata))
idt=scipy.ndimage.distance_transform_cdt(inverse_data)
idt=scipy.ndimage.gaussian_filter(np.float32(idt),7)
#print('200')
#sm=np.where(idt > 200, scipy.ndimage.gaussian_filter(data,200), data)
#print('100')
#sm=np.where(idt > 100, scipy.ndimage.gaussian_filter(sm,100),data)
#print('50')
#sm=np.where(idt > 50.0, scipy.ndimage.gaussian_filter(data,100),data)
print('25')
sm=np.where(idt > 25.0, scipy.ndimage.gaussian_filter(data,50),data)
print('10')
sm=np.where(idt > 10.0, scipy.ndimage.gaussian_filter(sm,20),data)
print('1')
sm=np.where(idt > 1.0, scipy.ndimage.gaussian_filter(sm,8),data)
sm=np.where(idt > 0.3, scipy.ndimage.gaussian_filter(sm,3),data)
print('0.1')
sm=np.where(idt > 0.1, scipy.ndimage.gaussian_filter(sm,2.5),data)
sm=np.where(idt > 0.03, scipy.ndimage.gaussian_filter(sm,2),data)
print('0')
sm=np.where(idt > 0.0, scipy.ndimage.gaussian_filter(sm,1.5),data)
sm=np.where(distdata < 1.0e-3, scipy.ndimage.gaussian_filter(data,2.5), sm)
#do a final one for smoothness
print('all')
sm=scipy.ndimage.gaussian_filter(sm,1.0)
outhdu=fits.PrimaryHDU(sm)
outhdu.header=header
disthdu=fits.ImageHDU(idt)
outlist=fits.HDUList([outhdu,disthdu])
outlist.writeto(new_fn,overwrite=True)
hdulist.close()
distlist.close()
return
def adaptive_fast(sub_image,k_image,size_factor):
inverse_data=np.where(k_image < (1.0e-3), np.ones_like(k_image), np.zeros_like(k_image))
idt=scipy.ndimage.distance_transform_cdt(inverse_data)
if sub_image.shape[0] > 200:
idt=scipy.ndimage.gaussian_filter(np.float32(idt),20.0)
sm=np.where(idt > 10.0, scipy.ndimage.gaussian_filter(sub_image,32.0),sub_image)
sm=np.where(idt > 1.0, scipy.ndimage.gaussian_filter(sm,16.0),sub_image)
sm=np.where(idt > 0.1, scipy.ndimage.gaussian_filter(sm,8.0),sub_image)
sm=np.where(idt > 0.01, scipy.ndimage.gaussian_filter(sm,4.0),sub_image)
sm=np.where(idt > 0.0, scipy.ndimage.gaussian_filter(sm,2.0),sub_image)
sm=scipy.ndimage.gaussian_filter(sm,1.0)
elif sub_image.shape[0] > 100:
idt=scipy.ndimage.gaussian_filter(np.float32(idt),10.0)
sm=np.where(idt > 10.0, scipy.ndimage.gaussian_filter(sub_image,16.0),sub_image)
sm=np.where(idt > 1.0, scipy.ndimage.gaussian_filter(sm,8.0),sub_image)
sm=np.where(idt > 0.1, scipy.ndimage.gaussian_filter(sm,4.0),sub_image)
sm=np.where(idt > 0.01, scipy.ndimage.gaussian_filter(sm,2.0),sub_image)
sm=scipy.ndimage.gaussian_filter(sm,1.0)
elif sub_image.shape[0] > 50:
idt=scipy.ndimage.gaussian_filter(np.float32(idt),5.0)
sm=np.where(idt > 10.0, scipy.ndimage.gaussian_filter(sub_image,8.0),sub_image)
sm=np.where(idt > 1.0, scipy.ndimage.gaussian_filter(sm,4.0),sub_image)
sm=np.where(idt > 0.1, scipy.ndimage.gaussian_filter(sm,2.0),sub_image)
sm=scipy.ndimage.gaussian_filter(sm,1.0)
elif sub_image.shape[0] > 25:
idt=scipy.ndimage.gaussian_filter(np.float32(idt),3.0)
sm=np.where(idt > 1.0, scipy.ndimage.gaussian_filter(sub_image,2.0),sub_image)
sm=np.where(idt > 0.1, scipy.ndimage.gaussian_filter(sm,1.0),sub_image)
sm=scipy.ndimage.gaussian_filter(sm,0.5)
elif sub_image.shape[0] > 10:
idt=scipy.ndimage.gaussian_filter(np.float32(idt),2.0)
sm=np.where(idt > 1.0, scipy.ndimage.gaussian_filter(sub_image,2.0),sub_image)
sm=np.where(idt > 0.1, scipy.ndimage.gaussian_filter(sm,1.0),sub_image)
sm=scipy.ndimage.gaussian_filter(sm,0.2)
else:
sm=scipy.ndimage.gaussian_filter(sub_image,1.0)
return sm
def adaptive_filter(values):
l=values.shape[0]/2
lrt=np.int32(l**0.5)
reshape_im=np.reshape(values,(lrt,lrt,2))
#im_bg=np.where(reshape_im[:,:,0]>1.0e-7,reshape_im[:,:,0],np.zeros_like(reshape_im[:,:,0]))
#im_dt=scipy.ndimage.distance_transform_cdt(im_bg)
im_dt=reshape_im[:,:,1]
im_bg=reshape_im[:,:,0]
half=np.int32(lrt/2)
#distance=im_dt[half-1,half-1]
#if distance > 3:
# returnval=im_bg[half-1,half-1]
#else:
#sigma_int=np.max( [ 5*(10-distance), 1] )
#blur=scipy.ndimage.gaussian_filter(im_bg,sigma_int)
# returnval=np.mean(im_bg)
#returnarr=np.where(im_dt < 10, np.mean(im_bg)*np.ones_like(im_bg), im_bg)
#dt=float(im_dt[half-1,half-1])
#weight=20.0/(1.0+dt)
#val=im_bg[half-1,half-1]
#expon=1.5
#returnval= (np.mean(im_bg)*(weight)**expon + val)/(1.0+weight**expon)
#too slow?
#dval=np.int( 10.0*np.float( np.median(im_dt)+1 )**0.5 )
#blur=scipy.ndimage.gaussian_filter(im_bg,dval)
#returnval=blur[half-1,half-1]
dval=np.int32(2.0*np.ceil( im_dt[half-1,half-1]) ) #already transformed
i1=half-dval
i2=half+dval
i1=np.max([0,i1])
i2=np.min([lrt-1,i2])
if i1==i2:
returnval=im_bg[i1,i1]
else:
returnval=np.mean(im_bg[i1:i2,i1:i2]) #way to do this with circular distance?
#print(dval,half,lrt,i1,i2,returnval)
return returnval
def process_single_filter_subimage(image_parameters,galaxy_data,lcdata,filname,lambda_eff_microns,selected_catalog=None):
print('**** Processing subimage: ',image_parameters)
single_filter_subimage=np.ndarray((image_parameters['Npix'],image_parameters['Npix']))
single_filter_subimage_smooth=np.ndarray((image_parameters['Npix'],image_parameters['Npix']))
print(single_filter_subimage.shape)
#find sources in this sub-image
#use buffer to clear edge effects from lower left
buf_deg=10.0/3600.0 #10 arcsec buffer?
sub_indices=(lcdata['ra_deg']>=image_parameters['x1_deg']-buf_deg)*(lcdata['ra_deg']<image_parameters['x2_deg'])*(lcdata['dec_deg']>=image_parameters['y1_deg']-buf_deg)*(lcdata['dec_deg']<image_parameters['y2_deg'])
sub_data=lcdata[sub_indices]
success=0
#image_catalog={'filter':filname}
#final source info
xcen_list=[]
ycen_list=[]
final_flux_njy_list=[]
ab_appmag_list=[]
#galaxy_data=galaxy_data.fromkeys(['image_dir','scale','simlabel','Mvir','Mstar','Rhalf_stars'])
#original image source data
final_file_list=[]
image_dir_list=[]
scalefactor_list=[]
simlabel_list=[]
Mvir_list=[]
mstar_list=[]
rhalf_list=[]
#lightcone entry data... all of it???
number=len(sub_data)
if selected_catalog is not None:
assert(len(selected_catalog['galaxy_indices'])==number)
build_catalog=False
else:
selected_catalog={'filter':filname}
selected_catalog['galaxy_indices']=np.ndarray((number),dtype=object)
selected_catalog['found_galaxy']=np.ndarray((number),dtype=object)
selected_catalog['this_camstr']=np.ndarray((number),dtype=object)
selected_catalog['simlabel']=np.ndarray((number),dtype=object)
selected_catalog['numcams']=np.ndarray((number),dtype=object)
selected_catalog['image_dir']=np.ndarray((number),dtype=object)
selected_catalog['scalefactor']=np.ndarray((number),dtype=object)
selected_catalog['Mvir']=np.ndarray((number),dtype=object)
selected_catalog['mstar']=np.ndarray((number),dtype=object)
selected_catalog['rhalf']=np.ndarray((number),dtype=object)
selected_catalog['lc_entry']=np.ndarray((number),dtype=object)
selected_catalog['orig_pix_arcsec']=np.ndarray((number),dtype=object)
selected_catalog['mstar_factor']=np.ndarray((number),dtype=object)
selected_catalog['size_factor']=np.ndarray((number),dtype=object)
selected_catalog['icen']=np.ndarray((number),dtype=object)
selected_catalog['jcen']=np.ndarray((number),dtype=object)
selected_catalog['flux_njy']=np.ndarray((number),dtype=object)
selected_catalog['ABmag']=np.ndarray((number),dtype=object)
selected_catalog['in_image']=np.ndarray((number),dtype=object)
selected_catalog['final_file']=np.ndarray((number),dtype=object)
selected_catalog['Kband_file']=np.ndarray((number),dtype=object)
build_catalog=True
data_found=0
for i,entry in enumerate(sub_data):
if i % 100 == 0:
print(' processed ', i, ' out of ', number)
#need pos_i, pos_j
pos_i=np.int64( (entry['ra_deg']-image_parameters['x1_deg'])*np.float64(image_parameters['Npix'])/(image_parameters['x2_deg']-image_parameters['x1_deg']) )
pos_j=np.int64( (entry['dec_deg']-image_parameters['y1_deg'])*np.float64(image_parameters['Npix'])/(image_parameters['y2_deg']-image_parameters['y1_deg']) )
#select image file to insert
mass_value=entry['subhalo_mass_msun']
scale_value=1.0/(1.0+entry['true_z'])
mstar_value=entry['mstar_msun_rad']
if build_catalog is True:
galaxy_scale_indices=(galaxy_data['scale']>=scale_value/scale_window)
galaxy_scale_indices*=(galaxy_data['scale']<=scale_value*scale_window)
galaxy_mass_indices=(galaxy_data['Mvir']>=mass_value/mass_window)
galaxy_mass_indices*=(galaxy_data['Mvir']<=mass_value*mass_window)
galaxy_search_indices=galaxy_scale_indices*galaxy_mass_indices
found_galaxy=False
selected_catalog['lc_entry'][i]=entry
if np.sum(galaxy_search_indices)==0 and np.sum(galaxy_scale_indices)>0:
#pick random galaxy and resize?
#index into list of Trues
random_index=random.randint(np.sum(galaxy_scale_indices))
scale_where=np.where(galaxy_scale_indices==True)[0]
galaxy_index=scale_where[random_index]
success+=1
found_galaxy=True
elif np.sum(galaxy_search_indices)==0 and np.sum(galaxy_scale_indices)==0:
galaxy_index=None
pass
else:
random_index=random.randint(np.sum(galaxy_search_indices))
galaxy_where=np.where(galaxy_search_indices==True)[0]
galaxy_index=galaxy_where[random_index]
success+=1
found_galaxy=True
selected_catalog['found_galaxy'][i]=found_galaxy
selected_catalog['galaxy_indices'][i]=galaxy_index
else:
galaxy_index=selected_catalog['galaxy_indices'][i]
found_galaxy=selected_catalog['found_galaxy'][i]
found_data=False
#now we have galaxy_index
if galaxy_index is not None:
mstar_factor=mstar_value/(galaxy_data['Mstar'][galaxy_index])
size_factor=(mstar_factor)**0.5
folder=galaxy_data['image_dir'][galaxy_index]
label=galaxy_data['simlabel'][galaxy_index]
if build_catalog is True:
selected_catalog['simlabel'][i]=label
selected_catalog['image_dir'][i]=folder
possible_files=np.sort(np.asarray(glob.glob(folder+'/hires_images_cam??/'+label+'cam??_'+filname+'*.fits')))
selected_catalog['numcams'][i]=possible_files.shape[0]
if possible_files.shape[0]>0:
#pick a random camera
file_index=random.randint(possible_files.shape[0])
#assert all filters exist
this_file=possible_files[file_index]
this_folder=os.path.dirname(this_file)
this_camstr=this_folder[-5:]
filter_files=np.sort(np.asarray(glob.glob(this_folder+'/'+label+'*.fits')))
if filter_files.shape[0]==8:
kband_files=np.asarray(glob.glob(folder+'/hires_images_'+this_camstr+'/'+label+this_camstr+'_'+'nircam_f200w*.fits'))
if kband_files.shape[0]==0:
assert(false)
else:
selected_catalog['Kband_file'][i]=kband_files[0]
else:
this_camstr=None
selected_catalog['this_camstr'][i]=this_camstr
else:
this_file=None
else:
this_camstr=selected_catalog['this_camstr'][i]
if this_camstr is not None:
this_files=np.asarray(glob.glob(folder+'/hires_images_'+this_camstr+'/'+label+this_camstr+'_'+filname+'*.fits'))
if this_files.shape[0]==0:
assert(False)
this_file=this_files[0]
else:
this_file=None
selected_catalog['final_file'][i]=this_file
if this_file is not None:
found_data=True
this_hdu=fits.open(this_file)[0]
this_image=this_hdu.data
pixstr=this_file.split('_')[-2][3:]
pixsize_arcsec=np.float64(pixstr)
#adaptive-smmoother here??? HOW?
kband_hdu=fits.open(selected_catalog['Kband_file'][i])[0]
kdata=kband_hdu.data
#measure inverse? distance transform in K filter
#not sure this is the best flux limit.. a higher one will give more agressive blurring in outer gal
#inverse_im=np.where(kdata < 1.0e-4, np.ones_like(kdata), np.zeros_like(kdata))
#idt=scipy.ndimage.distance_transform_cdt(inverse_im)
#run generic_filter with adaptive_filter function
#inarr=np.ndarray((kdata.shape[0],kdata.shape[0],2),dtype=np.float64)
#outarr=np.zeros_like(inarr)
#inarr[:,:,0]=kdata
#inarr[:,:,1]=idt #transform this
#actually.. apply to re-sized images for efficiency??? need ultra-fast image transforms..
else:
found_data=False
if found_data==True:
#these are in nJy-- preserve integral!
data_found+=1
original_flux=np.sum(this_image)
total_flux=original_flux*mstar_factor #shrink luminosity by mstar factor
this_npix=this_image.shape[0]
#resize--preserve proper units
desired_npix=np.int32( this_npix*(pixsize_arcsec/image_parameters['pix_size_arcsec'])*size_factor )
resized_image=congrid.congrid(this_image,(desired_npix,desired_npix))
resized_flux=np.sum(resized_image)
resized_image=resized_image*(total_flux/resized_flux)
resized_k=congrid.congrid(kdata,(desired_npix,desired_npix))
#inverse_k=np.where(resized_k < 1.0e-3,np.ones_like(resized_k),np.zeros_like(resized_k))
#idt=scipy.ndimage.distance_transform_cdt(inverse_k)
#inarr=np.ndarray((idt.shape[0],idt.shape[0],2),dtype=np.float64)
#outarr=np.zeros_like(inarr)
#inarr[:,:,0]=resized_image
#inarr[:,:,1]=scipy.ndimage.gaussian_filter(2.5*idt**0.5,5)
#scipy.ndimage.generic_filter(inarr,adaptive_filter,size=(10,10,2),output=outarr,origin=0.5)
#tform_image=outarr[:,:,1]
#is there a way to detect and avoid edge effects here??
#1. smooth image
#2. locate peak flux
#3. apply gaussian/exponential factor strong enough to eliminate to full-res image?
#4. use size info... to make sensible??
#add to image
npsub=desired_npix
i1=pos_i
i2=pos_i+npsub
j1=pos_j
j2=pos_j+npsub
icen=np.float64(pos_i)+np.float64(npsub)/2.0
jcen=np.float64(pos_j)+np.float64(npsub)/2.0
#determine overlap image_parameters['Npix']
im0=0
im1=image_parameters['Npix']
#I think this is wrong?
#sub_image_to_add=resized_image[im0-i1:npsub-(i2-im1),im0-j1:npsub-(j2-im1)]
#k_subimage=resized_k[im0-i1:npsub-(i2-im1),im0-j1:npsub-(j2-im1)]
if i1 < im0:
is1=np.abs(im0-i1)
else:
is1=0
if i2 >= im1:
is2=npsub-(i2-im1)-1
else:
is2=npsub-1
if j1 < im0:
js1=np.abs(im0-j1)
else:
js1=0
if j2 >= im1:
js2=npsub-(j2-im1)-1
else:
js2=npsub-1
sub_image_to_add=resized_image[is1:is2,js1:js2]
k_subimage=resized_k[is1:is2,js1:js2]
new_image_to_add=adaptive_fast(sub_image_to_add,k_subimage,size_factor)
k_smoothed=adaptive_fast(k_subimage,k_subimage,size_factor)
orig_image_to_add=sub_image_to_add
#detect edges?
if k_subimage.shape[0] > 2 and k_subimage.shape[1] > 2:
edge1=np.mean(k_smoothed[0:2,:])
edge2=np.mean(k_smoothed[:,-2:])
edge3=np.mean(k_smoothed[:,0:2])
edge4=np.mean(k_smoothed[-2:,:])
maxedge=np.max(np.asarray([edge1,edge2,edge3,edge4]))
else:
maxedge=0.0
#print('edge ratios: ', edge1/total_flux, edge2/total_flux, edge3/total_flux, edge4/total_flux, ' file ', this_file, size_factor)
in_image=False
if maxedge > 0.03 and resized_image.shape[0] > 20 and new_image_to_add.shape[0]==new_image_to_add.shape[1]:
print('omitting edge effect, max: ', maxedge, os.path.basename(this_file), size_factor, new_image_to_add.shape)
new_image_to_add *= 0.0
orig_image_to_add *= 0.0
in_image=False
elif maxedge > 0.001 and resized_image.shape[0] > 200 and new_image_to_add.shape[0]==new_image_to_add.shape[1]:
print('omitting edge effect, max: ', maxedge, os.path.basename(this_file), size_factor, new_image_to_add.shape)
new_image_to_add *= 0.0
orig_image_to_add *= 0.0
in_image=False
else:
in_image=True
if icen >= 0.0 and jcen >= 0.0:
#assemble catalog entries for this object
#final source info
xcen_list.append(icen)
ycen_list.append(jcen)
final_flux_njy_list.append(np.sum(new_image_to_add))
if np.sum(new_image_to_add)==0.0:
ab_appmag_list.append(-1)
else:
ab_appmag_list.append(-2.5*np.log10((1.0e9)*(np.sum(new_image_to_add))/3632.0))
else:
in_image=False
if sub_image_to_add.shape[0] > 0 and sub_image_to_add.shape[1] > 0:
iplace=np.max([i1,0])
jplace=np.max([j1,0])
new_npi=sub_image_to_add.shape[0]
new_npj=sub_image_to_add.shape[1]
single_filter_subimage[iplace:iplace+new_npi,jplace:jplace+new_npj] += orig_image_to_add
single_filter_subimage_smooth[iplace:iplace+new_npi,jplace:jplace+new_npj] += new_image_to_add
selected_catalog['scalefactor'][i]=galaxy_data['scale'][galaxy_index]
selected_catalog['Mvir'][i]=galaxy_data['Mvir'][galaxy_index]
selected_catalog['mstar'][i]=galaxy_data['Mstar'][galaxy_index]
selected_catalog['rhalf'][i]=galaxy_data['Rhalf_stars'][galaxy_index]
selected_catalog['orig_pix_arcsec'][i]=pixsize_arcsec
selected_catalog['mstar_factor'][i]=mstar_factor
selected_catalog['size_factor'][i]=size_factor
selected_catalog['icen'][i]=icen
selected_catalog['jcen'][i]=jcen
selected_catalog['flux_njy'][i]=total_flux
selected_catalog['ABmag'][i]=-2.5*np.log10((1.0e9)*(total_flux)/3632.0)
selected_catalog['in_image'][i]=in_image
print('**** Subimage data found: ', str(data_found), ' out of ', i+1)
return single_filter_subimage,single_filter_subimage_smooth,selected_catalog
def build_lightcone_images(lightcone_file,run_type='images',lim=None,minz=None,
image_filelabel='hlsp_misty_mockluvoir',
total_images=16,
do_sub_image_x=0,
do_sub_image_y=0,
n_cameras=19):
#image parameters
image_parameters={}
image_parameters['Npix']=8192
image_parameters['pix_size_arcsec']=0.00732421875
#gives exactly 1-arcmin images
lightcone_fov_arcmin=5.305160
#give some buffer for clearing edge effects
full_image_min=-0.95*lightcone_fov_arcmin/2.0
full_image_max=0.95*lightcone_fov_arcmin/2.0
x_min=full_image_min + (do_sub_image_x+0)*image_parameters['Npix']*image_parameters['pix_size_arcsec']/60.0
x_max=full_image_min + (do_sub_image_x+1)*image_parameters['Npix']*image_parameters['pix_size_arcsec']/60.0
y_min=full_image_min + (do_sub_image_y+0)*image_parameters['Npix']*image_parameters['pix_size_arcsec']/60.0
y_max=full_image_min + (do_sub_image_y+1)*image_parameters['Npix']*image_parameters['pix_size_arcsec']/60.0
image_parameters['x1_deg']=x_min/60.0
image_parameters['x2_deg']=x_max/60.0
image_parameters['y1_deg']=y_min/60.0
image_parameters['y2_deg']=y_max/60.0
image_parameters['xsub']=do_sub_image_x
image_parameters['ysub']=do_sub_image_y
lightcone_dir=os.path.abspath(os.path.dirname(lightcone_file))
print('Constructing lightcone data from: ', lightcone_file)
output_dir = os.path.join(lightcone_dir,'luvoir_mosaics')
print('Saving lightcone outputs in: ', output_dir)
if not os.path.lexists(output_dir):
os.mkdir(output_dir)
lcdata=ascii.read(lightcone_file)
for colkey in lcfile_cols:
col_obj=lcdata[colkey]
col_obj.name=lcfile_cols[colkey]
filters = np.flipud(['U','B','V','Z','nircam_f115w','nircam_f150w','nircam_f200w'])
output_filters=np.flipud(['f336w','f435w','f606w','f850lp','f115w','f150w','f200w'])
lambda_eff_microns = np.flipud([0.35,0.45,0.55,0.85,1.15,1.50,2.0])
#get galprops catalog(s)
galaxy_data={}
galaxy_data=galaxy_data.fromkeys(['image_dir','scale','simlabel','Mvir','Mstar','Rhalf_stars'])
print(galaxy_data.keys())
galprops_files=np.sort(np.asarray(glob.glob(lightcone_dir+'/galprops_data/VELA??_galprops.npy')))
for gp_file in galprops_files:
print(' ', gp_file)
this_gp_dict=(np.load(gp_file,encoding='bytes')).all()
snap_files=this_gp_dict[b'snap_files']
sim_dir_list = np.str_(snap_files).split('/')[7::8] #OMG wut
sim_labels=[]
sim_names=[]
for sn in sim_dir_list:
sim_labels.append(sn.rstrip('_sunrise'))
sim_names.append(lightcone_dir+'/'+sn[0:6])
sim_labels=np.asarray(sim_labels)
sim_names=np.asarray(sim_names)
assert(sim_labels.shape[0]==len(sim_names))
if galaxy_data['image_dir'] is None:
galaxy_data['image_dir']=sim_names
galaxy_data['scale']=this_gp_dict[b'scale']
galaxy_data['simlabel']=sim_labels
galaxy_data['Mvir']=this_gp_dict[b'Mvir_dm']
galaxy_data['Mstar']=this_gp_dict[b'stars_total_mass']
galaxy_data['Rhalf_stars']=this_gp_dict[b'stars_rhalf']
else:
galaxy_data['image_dir']=np.append(galaxy_data['image_dir'],sim_names)
galaxy_data['scale']=np.append(galaxy_data['scale'],this_gp_dict[b'scale'])
galaxy_data['simlabel']=np.append(galaxy_data['simlabel'],sim_labels)
galaxy_data['Mvir']=np.append(galaxy_data['Mvir'],this_gp_dict[b'Mvir_dm'])
galaxy_data['Mstar']=np.append(galaxy_data['Mstar'],this_gp_dict[b'stars_total_mass'])
galaxy_data['Rhalf_stars']=np.append(galaxy_data['Rhalf_stars'],this_gp_dict[b'stars_rhalf'])
assert(galaxy_data['image_dir'].shape==galaxy_data['Mvir'].shape)
selected_catalog=None
#pick random seed
baseseed=10
random.seed(baseseed+image_parameters['xsub']+int(total_images**0.5)*image_parameters['ysub'])
for i,filname in enumerate(filters):
print('processing.. ', filname)
single_filter_subimage,single_filter_subimage_smooth,selected_catalog=process_single_filter_subimage(image_parameters,
galaxy_data,
lcdata,
filname,
lambda_eff_microns[i],
selected_catalog=selected_catalog)
output_filename=os.path.join(output_dir,image_filelabel+'_imager'+'_fielda-subimage'+str(do_sub_image_x)+str(do_sub_image_y)+'-9-8_'+output_filters[i]+'_v1_lightcone.fits' )
output_filename_sm=os.path.join(output_dir,image_filelabel+'_imager'+'_fielda-subimage'+str(do_sub_image_x)+str(do_sub_image_y)+'-9-8-smooth_'+output_filters[i]+'_v1_lightcone.fits' )
print('**** Saving Subimage: ', output_filename )
#print(selected_catalog)
#save this catalog somehow???
outhdu=fits.PrimaryHDU(np.float32(single_filter_subimage))
outhdu.header['PIXSIZE']=(image_parameters['pix_size_arcsec'],'arcsec')
outlist=fits.HDUList([outhdu])
outlist.writeto(output_filename,overwrite=True)
outhdu2=fits.PrimaryHDU(np.float32(single_filter_subimage_smooth))
outhdu2.header['PIXSIZE']=(image_parameters['pix_size_arcsec'],'arcsec')
outlist2=fits.HDUList([outhdu2])
outlist2.writeto(output_filename_sm,overwrite=True)
return
| gsnyder206/mock-surveys | original_illustris/luvoir_udf.py | Python | mit | 31,191 |
import datetime
import math
import os
import logging
from pyproj import transform, Proj
from urlparse import urljoin, urlsplit
from django.db import models
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError
from django.conf import settings
from django.contrib.staticfiles.templatetags import staticfiles
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth import get_user_model
from django.db.models import signals
from django.core.files import File
from mptt.models import MPTTModel, TreeForeignKey
from polymorphic import PolymorphicModel, PolymorphicManager
from agon_ratings.models import OverallRating
from geonode.base.enumerations import ALL_LANGUAGES, \
HIERARCHY_LEVELS, UPDATE_FREQUENCIES, \
DEFAULT_SUPPLEMENTAL_INFORMATION, LINK_TYPES
from geonode.utils import bbox_to_wkt
from geonode.utils import forward_mercator
from geonode.security.models import PermissionLevelMixin
from taggit.managers import TaggableManager
from geonode.people.enumerations import ROLE_VALUES
logger = logging.getLogger(__name__)
class ContactRole(models.Model):
"""
ContactRole is an intermediate model to bind Profiles as Contacts to Resources and apply roles.
"""
resource = models.ForeignKey('ResourceBase')
contact = models.ForeignKey(settings.AUTH_USER_MODEL)
role = models.CharField(choices=ROLE_VALUES, max_length=255, help_text=_('function performed by the responsible '
'party'))
def clean(self):
"""
Make sure there is only one poc and author per resource
"""
if (self.role == self.resource.poc_role) or (self.role == self.resource.metadata_author_role):
contacts = self.resource.contacts.filter(contactrole__role=self.role)
if contacts.count() == 1:
# only allow this if we are updating the same contact
if self.contact != contacts.get():
raise ValidationError('There can be only one %s for a given resource' % self.role)
if self.contact.user is None:
# verify that any unbound contact is only associated to one resource
bounds = ContactRole.objects.filter(contact=self.contact).count()
if bounds > 1:
raise ValidationError('There can be one and only one resource linked to an unbound contact' % self.role)
elif bounds == 1:
# verify that if there was one already, it corresponds to this instance
if ContactRole.objects.filter(contact=self.contact).get().id != self.id:
raise ValidationError('There can be one and only one resource linked to an unbound contact'
% self.role)
class Meta:
unique_together = (("contact", "resource", "role"),)
class TopicCategory(models.Model):
"""
Metadata about high-level geographic data thematic classification.
It should reflect a list of codes from TC211
See: http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml
<CodeListDictionary gml:id="MD_MD_TopicCategoryCode">
"""
identifier = models.CharField(max_length=255, default='location')
description = models.TextField(default='')
gn_description = models.TextField('GeoNode description', default='', null=True)
is_choice = models.BooleanField(default=True)
def __unicode__(self):
return u"{0}".format(self.gn_description)
class Meta:
ordering = ("identifier",)
verbose_name_plural = 'Metadata Topic Categories'
class SpatialRepresentationType(models.Model):
"""
Metadata information about the spatial representation type.
It should reflect a list of codes from TC211
See: http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml
<CodeListDictionary gml:id="MD_SpatialRepresentationTypeCode">
"""
identifier = models.CharField(max_length=255, editable=False)
description = models.CharField(max_length=255, editable=False)
gn_description = models.CharField('GeoNode description', max_length=255)
is_choice = models.BooleanField(default=True)
def __unicode__(self):
return self.gn_description
class Meta:
ordering = ("identifier",)
verbose_name_plural = 'Metadata Spatial Representation Types'
class RegionManager(models.Manager):
def get_by_natural_key(self, code):
return self.get(code=code)
class Region(MPTTModel):
# objects = RegionManager()
code = models.CharField(max_length=50, unique=True)
name = models.CharField(max_length=255)
parent = TreeForeignKey('self', null=True, blank=True, related_name='children')
def __unicode__(self):
return self.name
class Meta:
ordering = ("name",)
verbose_name_plural = 'Metadata Regions'
class MPTTMeta:
order_insertion_by = ['name']
class RestrictionCodeType(models.Model):
"""
Metadata information about the spatial representation type.
It should reflect a list of codes from TC211
See: http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml
<CodeListDictionary gml:id="MD_RestrictionCode">
"""
identifier = models.CharField(max_length=255, editable=False)
description = models.TextField(max_length=255, editable=False)
gn_description = models.TextField('GeoNode description', max_length=255)
is_choice = models.BooleanField(default=True)
def __unicode__(self):
return self.gn_description
class Meta:
ordering = ("identifier",)
verbose_name_plural = 'Metadata Restriction Code Types'
class License(models.Model):
identifier = models.CharField(max_length=255, editable=False)
name = models.CharField(max_length=100)
abbreviation = models.CharField(max_length=20, null=True, blank=True)
description = models.TextField(null=True, blank=True)
url = models.URLField(max_length=2000, null=True, blank=True)
license_text = models.TextField(null=True, blank=True)
def __unicode__(self):
return self.name
@property
def name_long(self):
if self.abbreviation is None or len(self.abbreviation) == 0:
return self.name
else:
return self.name+" ("+self.abbreviation+")"
@property
def description_bullets(self):
if self.description is None or len(self.description) == 0:
return ""
else:
bullets = []
lines = self.description.split("\n")
for line in lines:
bullets.append("+ "+line)
return bullets
class Meta:
ordering = ("name", )
verbose_name_plural = 'Licenses'
class ResourceBaseManager(PolymorphicManager):
def admin_contact(self):
# this assumes there is at least one superuser
superusers = get_user_model().objects.filter(is_superuser=True).order_by('id')
if superusers.count() == 0:
raise RuntimeError('GeoNode needs at least one admin/superuser set')
return superusers[0]
def get_queryset(self):
return super(ResourceBaseManager, self).get_queryset().non_polymorphic()
def polymorphic_queryset(self):
return super(ResourceBaseManager, self).get_queryset()
class ResourceBase(PolymorphicModel, PermissionLevelMixin):
"""
Base Resource Object loosely based on ISO 19115:2003
"""
VALID_DATE_TYPES = [(x.lower(), _(x)) for x in ['Creation', 'Publication', 'Revision']]
date_help_text = _('reference date for the cited resource')
date_type_help_text = _('identification of when a given event occurred')
edition_help_text = _('version of the cited resource')
abstract_help_text = _('brief narrative summary of the content of the resource(s)')
purpose_help_text = _('summary of the intentions with which the resource(s) was developed')
maintenance_frequency_help_text = _('frequency with which modifications and deletions are made to the data after '
'it is first produced')
keywords_help_text = _('commonly used word(s) or formalised word(s) or phrase(s) used to describe the subject '
'(space or comma-separated')
regions_help_text = _('keyword identifies a location')
restriction_code_type_help_text = _('limitation(s) placed upon the access or use of the data.')
constraints_other_help_text = _('other restrictions and legal prerequisites for accessing and using the resource or'
' metadata')
license_help_text = _('license of the dataset')
language_help_text = _('language used within the dataset')
category_help_text = _('high-level geographic data thematic classification to assist in the grouping and search of '
'available geographic data sets.')
spatial_representation_type_help_text = _('method used to represent geographic information in the dataset.')
temporal_extent_start_help_text = _('time period covered by the content of the dataset (start)')
temporal_extent_end_help_text = _('time period covered by the content of the dataset (end)')
distribution_url_help_text = _('information about on-line sources from which the dataset, specification, or '
'community profile name and extended metadata elements can be obtained')
distribution_description_help_text = _('detailed text description of what the online resource is/does')
data_quality_statement_help_text = _('general explanation of the data producer\'s knowledge about the lineage of a'
' dataset')
# internal fields
uuid = models.CharField(max_length=36)
owner = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True, related_name='owned_resource',
verbose_name=_("Owner"))
contacts = models.ManyToManyField(settings.AUTH_USER_MODEL, through='ContactRole')
title = models.CharField(_('title'), max_length=255, help_text=_('name by which the cited resource is known'))
date = models.DateTimeField(_('date'), default=datetime.datetime.now, help_text=date_help_text)
date_type = models.CharField(_('date type'), max_length=255, choices=VALID_DATE_TYPES, default='publication',
help_text=date_type_help_text)
edition = models.CharField(_('edition'), max_length=255, blank=True, null=True, help_text=edition_help_text)
abstract = models.TextField(_('abstract'), blank=True, help_text=abstract_help_text)
purpose = models.TextField(_('purpose'), null=True, blank=True, help_text=purpose_help_text)
maintenance_frequency = models.CharField(_('maintenance frequency'), max_length=255, choices=UPDATE_FREQUENCIES,
blank=True, null=True, help_text=maintenance_frequency_help_text)
keywords = TaggableManager(_('keywords'), blank=True, help_text=keywords_help_text)
regions = models.ManyToManyField(Region, verbose_name=_('keywords region'), blank=True, null=True,
help_text=regions_help_text)
restriction_code_type = models.ForeignKey(RestrictionCodeType, verbose_name=_('restrictions'),
help_text=restriction_code_type_help_text, null=True, blank=True,
limit_choices_to=Q(is_choice=True))
constraints_other = models.TextField(_('restrictions other'), blank=True, null=True,
help_text=constraints_other_help_text)
license = models.ForeignKey(License, null=True, blank=True,
verbose_name=_("License"),
help_text=license_help_text)
language = models.CharField(_('language'), max_length=3, choices=ALL_LANGUAGES, default='eng',
help_text=language_help_text)
category = models.ForeignKey(TopicCategory, null=True, blank=True, limit_choices_to=Q(is_choice=True),
help_text=category_help_text)
spatial_representation_type = models.ForeignKey(SpatialRepresentationType, null=True, blank=True,
limit_choices_to=Q(is_choice=True),
verbose_name=_("spatial representation type"),
help_text=spatial_representation_type_help_text)
# Section 5
temporal_extent_start = models.DateTimeField(_('temporal extent start'), blank=True, null=True,
help_text=temporal_extent_start_help_text)
temporal_extent_end = models.DateTimeField(_('temporal extent end'), blank=True, null=True,
help_text=temporal_extent_end_help_text)
supplemental_information = models.TextField(_('supplemental information'), default=DEFAULT_SUPPLEMENTAL_INFORMATION,
help_text=_('any other descriptive information about the dataset'))
# Section 6
distribution_url = models.TextField(_('distribution URL'), blank=True, null=True,
help_text=distribution_url_help_text)
distribution_description = models.TextField(_('distribution description'), blank=True, null=True,
help_text=distribution_description_help_text)
# Section 8
data_quality_statement = models.TextField(_('data quality statement'), blank=True, null=True,
help_text=data_quality_statement_help_text)
# Section 9
# see metadata_author property definition below
# Save bbox values in the database.
# This is useful for spatial searches and for generating thumbnail images and metadata records.
bbox_x0 = models.DecimalField(max_digits=19, decimal_places=10, blank=True, null=True)
bbox_x1 = models.DecimalField(max_digits=19, decimal_places=10, blank=True, null=True)
bbox_y0 = models.DecimalField(max_digits=19, decimal_places=10, blank=True, null=True)
bbox_y1 = models.DecimalField(max_digits=19, decimal_places=10, blank=True, null=True)
srid = models.CharField(max_length=255, default='EPSG:4326')
# CSW specific fields
csw_typename = models.CharField(_('CSW typename'), max_length=32, default='gmd:MD_Metadata', null=False)
csw_schema = models.CharField(_('CSW schema'),
max_length=64,
default='http://www.isotc211.org/2005/gmd',
null=False)
csw_mdsource = models.CharField(_('CSW source'), max_length=256, default='local', null=False)
csw_insert_date = models.DateTimeField(_('CSW insert date'), auto_now_add=True, null=True)
csw_type = models.CharField(_('CSW type'), max_length=32, default='dataset', null=False, choices=HIERARCHY_LEVELS)
csw_anytext = models.TextField(_('CSW anytext'), null=True, blank=True)
csw_wkt_geometry = models.TextField(_('CSW WKT geometry'),
null=False,
default='POLYGON((-180 -90,-180 90,180 90,180 -90,-180 -90))')
# metadata XML specific fields
metadata_uploaded = models.BooleanField(default=False)
metadata_xml = models.TextField(null=True,
default='<gmd:MD_Metadata xmlns:gmd="http://www.isotc211.org/2005/gmd"/>',
blank=True)
popular_count = models.IntegerField(default=0)
share_count = models.IntegerField(default=0)
featured = models.BooleanField(_("Featured"), default=False,
help_text=_('Should this resource be advertised in home page?'))
is_published = models.BooleanField(_("Is Published"), default=True,
help_text=_('Should this resource be published and searchable?'))
# fields necessary for the apis
thumbnail_url = models.TextField(null=True, blank=True)
detail_url = models.CharField(max_length=255, null=True, blank=True)
rating = models.IntegerField(default=0, null=True, blank=True)
def __unicode__(self):
return self.title
@property
def bbox(self):
return [self.bbox_x0, self.bbox_y0, self.bbox_x1, self.bbox_y1, self.srid]
@property
def bbox_string(self):
return ",".join([str(self.bbox_x0), str(self.bbox_y0), str(self.bbox_x1), str(self.bbox_y1)])
@property
def geographic_bounding_box(self):
return bbox_to_wkt(self.bbox_x0, self.bbox_x1, self.bbox_y0, self.bbox_y1, srid=self.srid)
@property
def license_light(self):
a = []
if (not (self.license.name is None)) and (len(self.license.name) > 0):
a.append(self.license.name)
if (not (self.license.url is None)) and (len(self.license.url) > 0):
a.append("("+self.license.url+")")
return " ".join(a)
@property
def license_verbose(self):
a = []
if (not (self.license.name_long is None)) and (len(self.license.name_long) > 0):
a.append(self.license.name_long+":")
if (not (self.license.description is None)) and (len(self.license.description) > 0):
a.append(self.license.description)
if (not (self.license.url is None)) and (len(self.license.url) > 0):
a.append("("+self.license.url+")")
return " ".join(a)
def keyword_list(self):
return [kw.name for kw in self.keywords.all()]
def keyword_slug_list(self):
return [kw.slug for kw in self.keywords.all()]
def region_name_list(self):
return [region.name for region in self.regions.all()]
def spatial_representation_type_string(self):
if hasattr(self.spatial_representation_type, 'identifier'):
return self.spatial_representation_type.identifier
else:
if hasattr(self, 'storeType'):
if self.storeType == 'coverageStore':
return 'grid'
return 'vector'
else:
return None
@property
def keyword_csv(self):
keywords_qs = self.get_real_instance().keywords.all()
if keywords_qs:
return ','.join([kw.name for kw in keywords_qs])
else:
return ''
def set_latlon_bounds(self, box):
"""
Set the four bounds in lat lon projection
"""
self.bbox_x0 = box[0]
self.bbox_x1 = box[1]
self.bbox_y0 = box[2]
self.bbox_y1 = box[3]
def set_bounds_from_center_and_zoom(self, center_x, center_y, zoom):
"""
Calculate zoom level and center coordinates in mercator.
"""
self.center_x = center_x
self.center_y = center_y
self.zoom = zoom
deg_len_equator = 40075160 / 360
# covert center in lat lon
def get_lon_lat():
wgs84 = Proj(init='epsg:4326')
mercator = Proj(init='epsg:3857')
lon, lat = transform(mercator, wgs84, center_x, center_y)
return lon, lat
# calculate the degree length at this latitude
def deg_len():
lon, lat = get_lon_lat()
return math.cos(lat) * deg_len_equator
lon, lat = get_lon_lat()
# taken from http://wiki.openstreetmap.org/wiki/Zoom_levels
# it might be not precise but enough for the purpose
distance_per_pixel = 40075160 * math.cos(lat)/2**(zoom+8)
# calculate the distance from the center of the map in degrees
# we use the calculated degree length on the x axis and the
# normal degree length on the y axis assumin that it does not change
# Assuming a map of 1000 px of width and 700 px of height
distance_x_degrees = distance_per_pixel * 500 / deg_len()
distance_y_degrees = distance_per_pixel * 350 / deg_len_equator
self.bbox_x0 = lon - distance_x_degrees
self.bbox_x1 = lon + distance_x_degrees
self.bbox_y0 = lat - distance_y_degrees
self.bbox_y1 = lat + distance_y_degrees
def set_bounds_from_bbox(self, bbox):
"""
Calculate zoom level and center coordinates in mercator.
"""
self.set_latlon_bounds(bbox)
minx, miny, maxx, maxy = [float(c) for c in bbox]
x = (minx + maxx) / 2
y = (miny + maxy) / 2
(center_x, center_y) = forward_mercator((x, y))
xdiff = maxx - minx
ydiff = maxy - miny
zoom = 0
if xdiff > 0 and ydiff > 0:
width_zoom = math.log(360 / xdiff, 2)
height_zoom = math.log(360 / ydiff, 2)
zoom = math.ceil(min(width_zoom, height_zoom))
self.zoom = zoom
self.center_x = center_x
self.center_y = center_y
def download_links(self):
"""assemble download links for pycsw"""
links = []
for url in self.link_set.all():
if url.link_type == 'metadata': # avoid recursion
continue
if url.link_type == 'html':
links.append((self.title, 'Web address (URL)', 'WWW:LINK-1.0-http--link', url.url))
elif url.link_type in ('OGC:WMS', 'OGC:WFS', 'OGC:WCS'):
links.append((self.title, url.name, url.link_type, url.url))
else:
description = '%s (%s Format)' % (self.title, url.name)
links.append((self.title, description, 'WWW:DOWNLOAD-1.0-http--download', url.url))
return links
def get_tiles_url(self):
"""Return URL for Z/Y/X mapping clients or None if it does not exist.
"""
logger.debug('Get tiles url')
try:
tiles_link = self.link_set.get(name='Tiles')
except Link.DoesNotExist:
return None
else:
return tiles_link.url
def get_legend(self):
"""Return Link for legend or None if it does not exist.
"""
try:
legends_link = self.link_set.get(name='Legend')
except Link.DoesNotExist:
return None
else:
return legends_link
def get_legend_url(self):
"""Return URL for legend or None if it does not exist.
The legend can be either an image (for Geoserver's WMS)
or a JSON object for ArcGIS.
"""
legend = self.get_legend()
if legend is None:
return None
return legend.url
def get_ows_url(self):
"""Return URL for OGC WMS server None if it does not exist.
"""
try:
ows_link = self.link_set.get(name='OGC:WMS')
except Link.DoesNotExist:
return None
else:
return ows_link.url
def get_thumbnail_url(self):
"""Return a thumbnail url.
It could be a local one if it exists, a remote one (WMS GetImage) for example
or a 'Missing Thumbnail' one.
"""
local_thumbnails = self.link_set.filter(name='Thumbnail')
if local_thumbnails.count() > 0:
return local_thumbnails[0].url
remote_thumbnails = self.link_set.filter(name='Remote Thumbnail')
if remote_thumbnails.count() > 0:
return remote_thumbnails[0].url
return staticfiles.static(settings.MISSING_THUMBNAIL)
def has_thumbnail(self):
"""Determine if the thumbnail object exists and an image exists"""
return self.link_set.filter(name='Thumbnail').exists()
def save_thumbnail(self, filename, image):
thumb_folder = 'thumbs'
upload_path = os.path.join(settings.MEDIA_ROOT, thumb_folder)
if not os.path.exists(upload_path):
os.makedirs(upload_path)
with open(os.path.join(upload_path, filename), 'wb') as f:
thumbnail = File(f)
thumbnail.write(image)
url_path = os.path.join(settings.MEDIA_URL, thumb_folder, filename).replace('\\', '/')
url = urljoin(settings.SITEURL, url_path)
Link.objects.get_or_create(resource=self,
url=url,
defaults=dict(
name='Thumbnail',
extension='png',
mime='image/png',
link_type='image',
))
ResourceBase.objects.filter(id=self.id).update(
thumbnail_url=url
)
def set_missing_info(self):
"""Set default permissions and point of contacts.
It is mandatory to call it from descendant classes
but hard to enforce technically via signals or save overriding.
"""
from guardian.models import UserObjectPermission
logger.debug('Checking for permissions.')
# True if every key in the get_all_level_info dict is empty.
no_custom_permissions = UserObjectPermission.objects.filter(
content_type=ContentType.objects.get_for_model(self.get_self_resource()),
object_pk=str(self.pk)
).exists()
if not no_custom_permissions:
logger.debug('There are no permissions for this object, setting default perms.')
self.set_default_permissions()
if self.owner:
user = self.owner
else:
user = ResourceBase.objects.admin_contact().user
if self.poc is None:
self.poc = user
if self.metadata_author is None:
self.metadata_author = user
def maintenance_frequency_title(self):
return [v for i, v in enumerate(UPDATE_FREQUENCIES) if v[0] == self.maintenance_frequency][0][1].title()
def language_title(self):
return [v for i, v in enumerate(ALL_LANGUAGES) if v[0] == self.language][0][1].title()
def _set_poc(self, poc):
# reset any poc assignation to this resource
ContactRole.objects.filter(role='pointOfContact', resource=self).delete()
# create the new assignation
ContactRole.objects.create(role='pointOfContact', resource=self, contact=poc)
def _get_poc(self):
try:
the_poc = ContactRole.objects.get(role='pointOfContact', resource=self).contact
except ContactRole.DoesNotExist:
the_poc = None
return the_poc
poc = property(_get_poc, _set_poc)
def _set_metadata_author(self, metadata_author):
# reset any metadata_author assignation to this resource
ContactRole.objects.filter(role='author', resource=self).delete()
# create the new assignation
ContactRole.objects.create(role='author', resource=self, contact=metadata_author)
def _get_metadata_author(self):
try:
the_ma = ContactRole.objects.get(role='author', resource=self).contact
except ContactRole.DoesNotExist:
the_ma = None
return the_ma
metadata_author = property(_get_metadata_author, _set_metadata_author)
objects = ResourceBaseManager()
class Meta:
# custom permissions,
# add, change and delete are standard in django-guardian
permissions = (
('view_resourcebase', 'Can view resource'),
('change_resourcebase_permissions', 'Can change resource permissions'),
('download_resourcebase', 'Can download resource'),
('publish_resourcebase', 'Can publish resource'),
('change_resourcebase_metadata', 'Can change resource metadata'),
)
class LinkManager(models.Manager):
"""Helper class to access links grouped by type
"""
def data(self):
return self.get_query_set().filter(link_type='data')
def image(self):
return self.get_query_set().filter(link_type='image')
def download(self):
return self.get_query_set().filter(link_type__in=['image', 'data'])
def metadata(self):
return self.get_query_set().filter(link_type='metadata')
def original(self):
return self.get_query_set().filter(link_type='original')
def geogig(self):
return self.get_queryset().filter(name__icontains='geogig')
def ows(self):
return self.get_queryset().filter(link_type__in=['OGC:WMS', 'OGC:WFS', 'OGC:WCS'])
class Link(models.Model):
"""Auxiliary model for storing links for resources.
This helps avoiding the need for runtime lookups
to the OWS server or the CSW Catalogue.
There are four types of links:
* original: For uploaded files (Shapefiles or GeoTIFFs)
* data: For WFS and WCS links that allow access to raw data
* image: For WMS and TMS links
* metadata: For CSW links
* OGC:WMS: for WMS service links
* OGC:WFS: for WFS service links
* OGC:WCS: for WCS service links
"""
resource = models.ForeignKey(ResourceBase)
extension = models.CharField(max_length=255, help_text=_('For example "kml"'))
link_type = models.CharField(max_length=255, choices=[(x, x) for x in LINK_TYPES])
name = models.CharField(max_length=255, help_text=_('For example "View in Google Earth"'))
mime = models.CharField(max_length=255, help_text=_('For example "text/xml"'))
url = models.TextField(max_length=1000)
objects = LinkManager()
def __str__(self):
return '%s link' % self.link_type
def resourcebase_post_save(instance, *args, **kwargs):
"""
Used to fill any additional fields after the save.
Has to be called by the children
"""
ResourceBase.objects.filter(id=instance.id).update(
thumbnail_url=instance.get_thumbnail_url(),
detail_url=instance.get_absolute_url(),
csw_insert_date=datetime.datetime.now())
instance.set_missing_info()
# we need to remove stale links
for link in instance.link_set.all():
if link.name == "External Document":
if link.resource.doc_url != link.url:
link.delete()
else:
if urlsplit(settings.SITEURL).hostname not in link.url:
link.delete()
def rating_post_save(instance, *args, **kwargs):
"""
Used to fill the average rating field on OverallRating change.
"""
ResourceBase.objects.filter(id=instance.object_id).update(rating=instance.rating)
signals.post_save.connect(rating_post_save, sender=OverallRating)
| ismailsunni/geonode | geonode/base/models.py | Python | gpl-3.0 | 30,817 |
import inspect
import re
def _hook_add(func, add, name=''):
if not hasattr(func, '_hook'):
func._hook = []
func._hook.append(add)
if not hasattr(func, '_filename'):
func._filename = func.func_code.co_filename
if not hasattr(func, '_args'):
argspec = inspect.getargspec(func)
if name:
n_args = len(argspec.args)
if argspec.defaults:
n_args -= len(argspec.defaults)
if argspec.keywords:
n_args -= 1
if argspec.varargs:
n_args -= 1
if n_args != 1:
err = '%ss must take 1 non-keyword argument (%s)' % (name,
func.__name__)
raise ValueError(err)
args = []
if argspec.defaults:
end = bool(argspec.keywords) + bool(argspec.varargs)
args.extend(argspec.args[-len(argspec.defaults):
end if end else None])
if argspec.keywords:
args.append(0) # means kwargs present
func._args = args
if not hasattr(func, '_thread'): # does function run in its own thread?
func._thread = False
def sieve(func):
if func.func_code.co_argcount != 5:
raise ValueError(
'sieves must take 5 arguments: (bot, input, func, type, args)')
_hook_add(func, ['sieve', (func,)])
return func
def command(arg=None, **kwargs):
args = {}
def command_wrapper(func):
args.setdefault('name', func.func_name)
_hook_add(func, ['command', (func, args)], 'command')
return func
if kwargs or not inspect.isfunction(arg):
if arg is not None:
args['name'] = arg
args.update(kwargs)
return command_wrapper
else:
return command_wrapper(arg)
def event(arg=None, **kwargs):
args = kwargs
def event_wrapper(func):
args['name'] = func.func_name
args.setdefault('events', ['*'])
_hook_add(func, ['event', (func, args)], 'event')
return func
if inspect.isfunction(arg):
return event_wrapper(arg, kwargs)
else:
if arg is not None:
args['events'] = arg.split()
return event_wrapper
def singlethread(func):
func._thread = True
return func
def api_key(key):
def annotate(func):
func._apikey = key
return func
return annotate
def regex(regex, flags=0, **kwargs):
args = kwargs
def regex_wrapper(func):
args['name'] = func.func_name
args['regex'] = regex
args['re'] = re.compile(regex, flags)
_hook_add(func, ['regex', (func, args)], 'regex')
return func
if inspect.isfunction(regex):
raise ValueError("regex decorators require a regex to match against")
else:
return regex_wrapper
| Jeebeevee/DouweBot_JJ15 | plugins_org/util/hook.py | Python | unlicense | 2,904 |
import sys
import os.path
ROOT_DIR = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), os.pardir))
sys.path.append(ROOT_DIR)
import pausable_unittest
import testpauser
import time
import logging
class SampleTest(pausable_unittest.TestCase):
def test_reboot(self):
start = time.time()
self.reboot()
end = time.time()
margin = 3
self.assertTrue(start + 0.75 < end, "start + 0.75 should be less than end.")
self.assertTrue(start + 1 + margin > end, "start + 1 + margin should be more than end.")
def test_chdir(self):
dir1 = os.path.abspath(os.getcwd())
self.reboot()
dir2 = os.path.abspath(os.getcwd())
# self.assertEqual(dir1, dir2)
self.assertTrue(dir1 == dir2) # Keep independency of the test code.
os.chdir(os.path.pardir)
dir3 = os.path.abspath(os.getcwd())
self.reboot()
dir4 = os.path.abspath(os.getcwd())
# self.assertEqual(dir3, dir4)
self.assertTrue(dir3 == dir4) # Keep independency of the test code
os.chdir(dir1)
def test_version(self):
# Do not depend on __version__ string content.
self.assertTrue(isinstance(pausable_unittest.__version__, str), "__version__ should be string")
def test_options(self):
self.assertEqual(self.assertion_log, self.options.get("test", False))
def test_exec_for_reboot(self):
for i in range(3):
start = time.time()
if sys.platform == "win32":
self.exec_for_reboot("cmd /c echo test_exec_for_reboot %d" % i)
else:
self.exec_for_reboot("bash -c 'echo test_exec_for_reboot %d'" % i)
end = time.time()
margin = 3
self.assertTrue(start + 0.75 < end, "start + 0.75 should be less than end." )
self.assertTrue(start + 1 + margin > end, "start + 1 + margin should be more than end.")
def test_assert_raises(self):
num = 1
with self.assertRaises(ZeroDivisionError, "msg") as cm:
self.reboot()
1 / (num - 1)
self.assertEqual(type(cm.exception), ZeroDivisionError)
self.assertRaises(ZeroDivisionError, lambda x: 1 / x, 0)
def test_msg_repr_max_length(self):
self.assertEqual("a" * 101, "a" * 101)
if __name__ == "__main__":
if len(sys.argv) >= 2 and sys.argv[1] == "debug":
pausable_unittest.main(testpauser.Pauser(), loglevel=logging.DEBUG,
assertion_log=True, options={"test": True})
else:
pausable_unittest.main(testpauser.Pauser())
| masamitsu-murase/pausable_unittest | test/test.py | Python | mit | 2,724 |
import _plotly_utils.basevalidators
class FamilyValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self,
plotly_name="family",
parent_name="contourcarpet.contours.labelfont",
**kwargs
):
super(FamilyValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
no_blank=kwargs.pop("no_blank", True),
role=kwargs.pop("role", "style"),
strict=kwargs.pop("strict", True),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/contourcarpet/contours/labelfont/_family.py | Python | mit | 600 |
import codecs
import logging
import json
import os
import unicodedata
import duplicateHash
class classifications:
_FALSE = 0
_MATCH = 1
def __init__(self, gdbm_files, filter_file, category):
self.category = category
self.duplicate_hash = duplicateHash.duplicateHash()
self.filter_file = filter_file
self.gdbm_files = gdbm_files
self.punctuation = {0x2018:0x27, 0x2019:0x27, 0x201C:0x22, 0x201D:0x22, 0x2014:0x2D}
self.result_stack = []
self.edit_hash = {}
return
# Checks if the noun phrase is found in the classification word list. Returns the values corresponding to the noun phrase if the word is found.
# Otherwise, it returns -1. (Private Function)
def determineExistanceInClassifyList(self, tag_word):
log = logging.getLogger('classify')
log.debug("classifications.determineExistanceInClassifyList()")
log.debug("tag word = %s" % tag_word)
matches = self.getValueFromKeyInClassifyList(tag_word)
if matches:
key = 'match.' + tag_word
value = 'MATCH:1'
else:
key = 'false.' + tag_word
value = 'FALSE:1'
self.duplicate_hash.addWord(key, value)
return matches
# Returns the various other ambiguous answers that could occur from a single classification word entry
def getEditHash(self):
return self.edit_hash
# Returns the phrases that match an entry in the classification word list
def getResultStack(self):
return self.result_stack
# Returns the value from a given key. (Public Function)
def getValueFromKeyInClassifyList(self, key):
log = logging.getLogger('classify')
log.debug("classifications.getValueFromKeyInClassifyList()")
# gdbm_files contains a list of gdbm_file objects that contain [path, gdbm_obj]
matches = []
for gdbm_obj in self.gdbm_files:
if key in gdbm_obj[1]:
log.debug("key = %s" % key)
# Ignore matches that are just numbers
if key.isdigit():
log.debug("key contains only digits = %s" % key)
continue
# Ignore matches that are found in the filter dbm file
if self.category == "geography" and key in self.filter_file:
log.debug("key is in filter dbm = %s" % key)
continue
values = json.loads(gdbm_obj[1][key])
log.debug("values = %s" % values)
for element in values:
log.debug("element = %s" % element)
element_value = json.loads(gdbm_obj[1][str(element)])
#log.debug("element value = %s" % element_value)
# Adds the category search path that the match was found under
path = gdbm_obj[0]
tail = ""
category = []
while tail != "gdbm":
(path, tail) = os.path.split(path)
category.append(tail)
category = category[1:len(category)-1]
log.debug("category = %s" % category)
category_phrase = ""
for word in category:
category_phrase += word + "."
category_phrase = category_phrase[:len(category_phrase)-1] # removes the last dot from the end
log.debug("category phrase = %s" % category_phrase)
for obj in element_value:
obj["category"] = category_phrase
#log.debug("new element value = %s" % element_value)
matches.append(element_value)
return matches
# Determines if the noun phrase is found in the classification word list. If the word is a duplicate (has already been seen before), then it
# updates that word in the duplicate words hash. Otherwise it looks into the classification word list to see if that word is a match.
# Returns 1 if the tagged word is a match. Otherwise, it returns a 0. (Public Function)
def isMatch(self, tag_word):
log = logging.getLogger('classify')
log.debug("classifications.isMatch()")
if self.category == "geography" and tag_word[0].isupper() == False:
return classifications._FALSE
tag_words = []
# Converts Unicode Punctuation to ASCII equivalent - NEEDS MORE
tag_word1 = tag_word.translate(self.punctuation).encode('ascii', 'ignore').rstrip().lstrip()
log.debug("tag word translate = %s" % tag_word1)
tag_words.append(tag_word1)
# Converts Unicode to ASCII equivalent - If no equivalent is found, it ignores the unicode
tag_word2 = unicodedata.normalize('NFKD', tag_word).encode('ascii', 'ignore').rstrip().lstrip()
log.debug("tag word normalize = %s" % tag_word2)
if tag_word1 != tag_word2:
log.debug("tag word 1 != tag word 2")
tag_words.append(tag_word2)
log.debug("tag words = %s" % tag_words)
match = classifications._FALSE
for tag_word in tag_words:
tag_word = tag_word.lower() # Forces the tag word to all lowercase
# Ignore punctuation
if tag_word == ".":
continue
duplicate_result = self.duplicate_hash.checkPossibleMatch(tag_word)
if duplicate_result == 0: # False tag found
continue
elif duplicate_result > 0: # Match tag found
list_values = self.getValueFromKeyInClassifyList(tag_word)
#log.debug("list values = %s" % list_values)
self.populateResults(tag_word, list_values)
match = classifications._MATCH
continue
# Word has not yet been encountered
list_values = self.determineExistanceInClassifyList(tag_word)
#log.debug("list_values - isMatch = %s" % list_values)
if not list_values:
continue
self.populateResults(tag_word, list_values)
match = classifications._MATCH
return match
# Populates the result and edit hashes with words that match the search criteria and their corresponding values
def populateResults(self, tag_word, list_values):
log = logging.getLogger('classify')
log.debug("classifications.populateResults()")
log.debug("tag_word = %s" % tag_word)
#log.debug("list_values = %s" % list_values)
# Add the "match" field to the list_values. The "match" field is the actual tag word that was found in the text
for key_dict in list_values:
for option in key_dict:
option["match"] = tag_word
result_hash = {}
result_hash[tag_word] = list_values[0][0]
self.result_stack.append(result_hash)
edit_values = []
for element in list_values:
edit_values.append(element[0])
self.edit_hash[tag_word] = edit_values
return
| electricity345/Full.Text.Classification.Thesis | src/classify.pos.tag/src/classifications.py | Python | mit | 7,221 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import print_function, unicode_literals
import hashlib
import os
import re
import subprocess
import sys
import urllib2
from distutils.version import LooseVersion
from mozboot import rust
NO_MERCURIAL = '''
Could not find Mercurial (hg) in the current shell's path. Try starting a new
shell and running the bootstrapper again.
'''
MERCURIAL_UNABLE_UPGRADE = '''
You are currently running Mercurial %s. Running %s or newer is
recommended for performance and stability reasons.
Unfortunately, this bootstrapper currently does not know how to automatically
upgrade Mercurial on your machine.
You can usually install Mercurial through your package manager or by
downloading a package from http://mercurial.selenic.com/.
'''
MERCURIAL_UPGRADE_FAILED = '''
We attempted to upgrade Mercurial to a modern version (%s or newer).
However, you appear to have version %s still.
It's possible your package manager doesn't support a modern version of
Mercurial. It's also possible Mercurial is not being installed in the search
path for this shell. Try creating a new shell and run this bootstrapper again.
If it continues to fail, consider installing Mercurial by following the
instructions at http://mercurial.selenic.com/.
'''
PYTHON_UNABLE_UPGRADE = '''
You are currently running Python %s. Running %s or newer (but
not 3.x) is required.
Unfortunately, this bootstrapper does not currently know how to automatically
upgrade Python on your machine.
Please search the Internet for how to upgrade your Python and try running this
bootstrapper again to ensure your machine is up to date.
'''
PYTHON_UPGRADE_FAILED = '''
We attempted to upgrade Python to a modern version (%s or newer).
However, you appear to still have version %s.
It's possible your package manager doesn't yet expose a modern version of
Python. It's also possible Python is not being installed in the search path for
this shell. Try creating a new shell and run this bootstrapper again.
If this continues to fail and you are sure you have a modern Python on your
system, ensure it is on the $PATH and try again. If that fails, you'll need to
install Python manually and ensure the path with the python binary is listed in
the $PATH environment variable.
We recommend the following tools for installing Python:
pyenv -- https://github.com/yyuu/pyenv)
pythonz -- https://github.com/saghul/pythonz
official installers -- http://www.python.org/
'''
RUST_INSTALL_COMPLETE = '''
Rust installation complete. You should now have rustc and cargo
in %(cargo_bin)s
The installer tries to add these to your default shell PATH, so
restarting your shell and running this script again may work.
If it doesn't, you'll need to add the new command location
manually.
If restarting doesn't work, edit your shell initialization
script, which may be called ~/.bashrc or ~/.bash_profile or
~/.profile, and add the following line:
%(cmd)s
Then restart your shell and run the bootstrap script again.
'''
RUST_NOT_IN_PATH = '''
You have some rust files in %(cargo_bin)s
but they're not part of this shell's PATH.
To add these to the PATH, edit your shell initialization
script, which may be called ~/.bashrc or ~/.bash_profile or
~/.profile, and add the following line:
%(cmd)s
Then restart your shell and run the bootstrap script again.
'''
RUSTUP_OLD = '''
We found an executable called `rustup` which we normally use to install
and upgrade Rust programming language support, but we didn't understand
its output. It may be an old version, or not be the installer from
https://rustup.rs/
Please move it out of the way and run the bootstrap script again.
Or if you prefer and know how, use the current rustup to install
a compatible version of the Rust programming language yourself.
'''
RUST_UPGRADE_FAILED = '''
We attempted to upgrade Rust to a modern version (%s or newer).
However, you appear to still have version %s.
It's possible rustup failed. It's also possible the new Rust is not being
installed in the search path for this shell. Try creating a new shell and
run this bootstrapper again.
If this continues to fail and you are sure you have a modern Rust on your
system, ensure it is on the $PATH and try again. If that fails, you'll need to
install Rust manually.
We recommend the installer from https://rustup.rs/ for installing Rust,
but you may be able to get a recent enough version from a software install
tool or package manager on your system, or directly from https://rust-lang.org/
'''
BROWSER_ARTIFACT_MODE_MOZCONFIG = '''
Paste the lines between the chevrons (>>> and <<<) into your mozconfig file:
<<<
# Automatically download and use compiled C++ components:
ac_add_options --enable-artifact-builds
>>>
'''
# Upgrade Mercurial older than this.
# This should match OLDEST_NON_LEGACY_VERSION from
# the hg setup wizard in version-control-tools.
MODERN_MERCURIAL_VERSION = LooseVersion('3.7.3')
# Upgrade Python older than this.
MODERN_PYTHON_VERSION = LooseVersion('2.7.3')
# Upgrade rust older than this.
MODERN_RUST_VERSION = LooseVersion('1.13.0')
class BaseBootstrapper(object):
"""Base class for system bootstrappers."""
def __init__(self, no_interactive=False):
self.package_manager_updated = False
self.no_interactive = no_interactive
def install_system_packages(self):
'''
Install packages shared by all applications. These are usually
packages required by the development (like mercurial) or the
build system (like autoconf).
'''
raise NotImplementedError('%s must implement install_system_packages()' %
__name__)
def install_browser_packages(self):
'''
Install packages required to build Firefox for Desktop (application
'browser').
'''
raise NotImplementedError('Cannot bootstrap Firefox for Desktop: '
'%s does not yet implement install_browser_packages()' %
__name__)
def suggest_browser_mozconfig(self):
'''
Print a message to the console detailing what the user's mozconfig
should contain.
Firefox for Desktop can in simple cases determine its build environment
entirely from configure.
'''
pass
def install_browser_artifact_mode_packages(self):
'''
Install packages required to build Firefox for Desktop (application
'browser') in Artifact Mode.
'''
raise NotImplementedError(
'Cannot bootstrap Firefox for Desktop Artifact Mode: '
'%s does not yet implement install_browser_artifact_mode_packages()' %
__name__)
def suggest_browser_artifact_mode_mozconfig(self):
'''
Print a message to the console detailing what the user's mozconfig
should contain.
Firefox for Desktop Artifact Mode needs to enable artifact builds and
a path where the build artifacts will be written to.
'''
print(BROWSER_ARTIFACT_MODE_MOZCONFIG)
def install_mobile_android_packages(self):
'''
Install packages required to build Firefox for Android (application
'mobile/android', also known as Fennec).
'''
raise NotImplementedError('Cannot bootstrap Firefox for Android: '
'%s does not yet implement install_mobile_android_packages()'
% __name__)
def suggest_mobile_android_mozconfig(self):
'''
Print a message to the console detailing what the user's mozconfig
should contain.
Firefox for Android needs an application and an ABI set, and it needs
paths to the Android SDK and NDK.
'''
raise NotImplementedError('%s does not yet implement suggest_mobile_android_mozconfig()' %
__name__)
def install_mobile_android_artifact_mode_packages(self):
'''
Install packages required to build Firefox for Android (application
'mobile/android', also known as Fennec) in Artifact Mode.
'''
raise NotImplementedError(
'Cannot bootstrap Firefox for Android Artifact Mode: '
'%s does not yet implement install_mobile_android_artifact_mode_packages()'
% __name__)
def suggest_mobile_android_artifact_mode_mozconfig(self):
'''
Print a message to the console detailing what the user's mozconfig
should contain.
Firefox for Android Artifact Mode needs an application and an ABI set,
and it needs paths to the Android SDK.
'''
raise NotImplementedError(
'%s does not yet implement suggest_mobile_android_artifact_mode_mozconfig()'
% __name__)
def which(self, name):
"""Python implementation of which.
It returns the path of an executable or None if it couldn't be found.
"""
for path in os.environ['PATH'].split(os.pathsep):
test = os.path.join(path, name)
if os.path.exists(test) and os.access(test, os.X_OK):
return test
return None
def run_as_root(self, command):
if os.geteuid() != 0:
if self.which('sudo'):
command.insert(0, 'sudo')
else:
command = ['su', 'root', '-c', ' '.join(command)]
print('Executing as root:', subprocess.list2cmdline(command))
subprocess.check_call(command, stdin=sys.stdin)
def dnf_install(self, *packages):
if self.which('dnf'):
command = ['dnf', 'install']
else:
command = ['yum', 'install']
if self.no_interactive:
command.append('-y')
command.extend(packages)
self.run_as_root(command)
def dnf_groupinstall(self, *packages):
if self.which('dnf'):
command = ['dnf', 'groupinstall']
else:
command = ['yum', 'groupinstall']
if self.no_interactive:
command.append('-y')
command.extend(packages)
self.run_as_root(command)
def dnf_update(self, *packages):
if self.which('dnf'):
command = ['dnf', 'update']
else:
command = ['yum', 'update']
if self.no_interactive:
command.append('-y')
command.extend(packages)
self.run_as_root(command)
def apt_install(self, *packages):
command = ['apt-get', 'install']
if self.no_interactive:
command.append('-y')
command.extend(packages)
self.run_as_root(command)
def apt_update(self):
command = ['apt-get', 'update']
if self.no_interactive:
command.append('-y')
self.run_as_root(command)
def apt_add_architecture(self, arch):
command = ['dpkg', '--add-architecture']
command.extend(arch)
self.run_as_root(command)
def check_output(self, *args, **kwargs):
"""Run subprocess.check_output even if Python doesn't provide it."""
fn = getattr(subprocess, 'check_output', BaseBootstrapper._check_output)
return fn(*args, **kwargs)
@staticmethod
def _check_output(*args, **kwargs):
"""Python 2.6 compatible implementation of subprocess.check_output."""
proc = subprocess.Popen(stdout=subprocess.PIPE, *args, **kwargs)
output, unused_err = proc.communicate()
retcode = proc.poll()
if retcode:
cmd = kwargs.get('args', args[0])
e = subprocess.CalledProcessError(retcode, cmd)
e.output = output
raise e
return output
def prompt_int(self, prompt, low, high, limit=5):
''' Prompts the user with prompt and requires an integer between low and high. '''
valid = False
while not valid and limit > 0:
try:
choice = int(raw_input(prompt))
if not low <= choice <= high:
print("ERROR! Please enter a valid option!")
limit -= 1
else:
valid = True
except ValueError:
print("ERROR! Please enter a valid option!")
limit -= 1
if limit > 0:
return choice
else:
raise Exception("Error! Reached max attempts of entering option.")
def _ensure_package_manager_updated(self):
if self.package_manager_updated:
return
self._update_package_manager()
self.package_manager_updated = True
def _update_package_manager(self):
"""Updates the package manager's manifests/package list.
This should be defined in child classes.
"""
def _parse_version(self, path, name=None, env=None):
'''Execute the given path, returning the version.
Invokes the path argument with the --version switch
and returns a LooseVersion representing the output
if successful. If not, returns None.
An optional name argument gives the expected program
name returned as part of the version string, if it's
different from the basename of the executable.
An optional env argument allows modifying environment
variable during the invocation to set options, PATH,
etc.
'''
if not name:
name = os.path.basename(path)
if name.endswith('.exe'):
name = name[:-4]
info = self.check_output([path, '--version'],
env=env,
stderr=subprocess.STDOUT)
match = re.search(name + ' ([a-z0-9\.]+)', info)
if not match:
print('ERROR! Unable to identify %s version.' % name)
return None
return LooseVersion(match.group(1))
def _hgplain_env(self):
""" Returns a copy of the current environment updated with the HGPLAIN
environment variable.
HGPLAIN prevents Mercurial from applying locale variations to the output
making it suitable for use in scripts.
"""
env = os.environ.copy()
env[b'HGPLAIN'] = b'1'
return env
def is_mercurial_modern(self):
hg = self.which('hg')
if not hg:
print(NO_MERCURIAL)
return False, False, None
our = self._parse_version(hg, 'version', self._hgplain_env())
if not our:
return True, False, None
return True, our >= MODERN_MERCURIAL_VERSION, our
def ensure_mercurial_modern(self):
installed, modern, version = self.is_mercurial_modern()
if modern:
print('Your version of Mercurial (%s) is sufficiently modern.' %
version)
return installed, modern
self._ensure_package_manager_updated()
if installed:
print('Your version of Mercurial (%s) is not modern enough.' %
version)
print('(Older versions of Mercurial have known security vulnerabilities. '
'Unless you are running a patched Mercurial version, you may be '
'vulnerable.')
else:
print('You do not have Mercurial installed')
if self.upgrade_mercurial(version) is False:
return installed, modern
installed, modern, after = self.is_mercurial_modern()
if installed and not modern:
print(MERCURIAL_UPGRADE_FAILED % (MODERN_MERCURIAL_VERSION, after))
return installed, modern
def upgrade_mercurial(self, current):
"""Upgrade Mercurial.
Child classes should reimplement this.
Return False to not perform a version check after the upgrade is
performed.
"""
print(MERCURIAL_UNABLE_UPGRADE % (current, MODERN_MERCURIAL_VERSION))
def is_python_modern(self):
python = None
for test in ['python2.7', 'python']:
python = self.which(test)
if python:
break
assert python
our = self._parse_version(python, 'Python')
if not our:
return False, None
return our >= MODERN_PYTHON_VERSION, our
def ensure_python_modern(self):
modern, version = self.is_python_modern()
if modern:
print('Your version of Python (%s) is new enough.' % version)
return
print('Your version of Python (%s) is too old. Will try to upgrade.' %
version)
self._ensure_package_manager_updated()
self.upgrade_python(version)
modern, after = self.is_python_modern()
if not modern:
print(PYTHON_UPGRADE_FAILED % (MODERN_PYTHON_VERSION, after))
sys.exit(1)
def upgrade_python(self, current):
"""Upgrade Python.
Child classes should reimplement this.
"""
print(PYTHON_UNABLE_UPGRADE % (current, MODERN_PYTHON_VERSION))
def is_rust_modern(self):
rustc = self.which('rustc')
if not rustc:
print('Could not find a Rust compiler.')
return False, None
cargo = self.which('cargo')
our = self._parse_version(rustc)
if not our:
return False, None
return our >= MODERN_RUST_VERSION, our
def cargo_home(self):
cargo_home = os.environ.get('CARGO_HOME',
os.path.expanduser(os.path.join('~', '.cargo')))
cargo_bin = os.path.join(cargo_home, 'bin')
return cargo_home, cargo_bin
def win_to_msys_path(self, path):
'''Convert a windows-style path to msys style.'''
drive, path = os.path.splitdrive(path)
path = '/'.join(path.split('\\'))
if drive:
if path[0] == '/':
path = path[1:]
path = '/%s/%s' % (drive[:-1], path)
return path
def print_rust_path_advice(self, template, cargo_home, cargo_bin):
# Suggest ~/.cargo/env if it exists.
if os.path.exists(os.path.join(cargo_home, 'env')):
cmd = 'source %s/env' % cargo_home
else:
# On Windows rustup doesn't write out ~/.cargo/env
# so fall back to a manual PATH update. Bootstrap
# only runs under msys, so a unix-style shell command
# is appropriate there.
cargo_bin = self.win_to_msys_path(cargo_bin)
cmd = 'export PATH=%s:$PATH' % cargo_bin
print(template % {
'cargo_bin': cargo_bin,
'cmd': cmd,
})
def ensure_rust_modern(self):
modern, version = self.is_rust_modern()
if modern:
print('Your version of Rust (%s) is new enough.' % version)
return
if not version:
# Rust wasn't in PATH. Check the standard location.
cargo_home, cargo_bin = self.cargo_home()
try_rustc = os.path.join(cargo_bin, 'rustc' + rust.exe_suffix())
try_cargo = os.path.join(cargo_bin, 'cargo' + rust.exe_suffix())
have_rustc = os.path.exists(try_rustc)
have_cargo = os.path.exists(try_cargo)
if have_rustc or have_cargo:
self.print_rust_path_advice(RUST_NOT_IN_PATH,
cargo_home, cargo_bin)
sys.exit(1)
else:
print('Your version of Rust (%s) is too old.' % version)
rustup = self.which('rustup')
if rustup:
rustup_version = self._parse_version(rustup)
if not rustup_version:
print(RUSTUP_OLD)
sys.exit(1)
print('Found rustup. Will try to upgrade.')
self.upgrade_rust(rustup)
modern, after = self.is_rust_modern()
if not modern:
print(RUST_UPGRADE_FAILED % (MODERN_RUST_VERSION, after))
sys.exit(1)
else:
# No rustup. Download and run the installer.
print('Will try to install Rust.')
self.install_rust()
def upgrade_rust(self, rustup):
"""Upgrade Rust.
Invoke rustup from the given path to update the rust install."""
subprocess.check_call([rustup, 'update'])
def install_rust(self):
"""Download and run the rustup installer."""
import errno
import stat
import tempfile
platform = rust.platform()
url = rust.rustup_url(platform)
checksum = rust.rustup_hash(platform)
if not url or not checksum:
print('ERROR: Could not download installer.')
sys.exit(1)
print('Downloading rustup-init... ', end='')
fd, rustup_init = tempfile.mkstemp(prefix=os.path.basename(url))
os.close(fd)
try:
self.http_download_and_save(url, rustup_init, checksum)
mode = os.stat(rustup_init).st_mode
os.chmod(rustup_init, mode | stat.S_IRWXU)
print('Ok')
print('Running rustup-init...')
subprocess.check_call([rustup_init, '-y',
'--default-toolchain', 'stable',
'--default-host', platform,
])
cargo_home, cargo_bin = self.cargo_home()
self.print_rust_path_advice(RUST_INSTALL_COMPLETE,
cargo_home, cargo_bin)
finally:
try:
os.remove(rustup_init)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def http_download_and_save(self, url, dest, sha256hexhash):
f = urllib2.urlopen(url)
h = hashlib.sha256()
with open(dest, 'wb') as out:
while True:
data = f.read(4096)
if data:
out.write(data)
h.update(data)
else:
break
if h.hexdigest() != sha256hexhash:
os.remove(dest)
raise ValueError('Hash of downloaded file does not match expected hash')
| Yukarumya/Yukarum-Redfoxes | python/mozboot/mozboot/base.py | Python | mpl-2.0 | 22,367 |
"""Problem 49
01 August 2003
The arithmetic sequence, 1487, 4817, 8147, in which each of the terms
increases by 3330, is unusual in two ways: (i) each of the three terms
are prime, and, (ii) each of the 4-digit numbers are permutations of
one another.
There are no arithmetic sequences made up of three 1-, 2-, or 3-digit
primes, exhibiting this property, but there is one other 4-digit
increasing sequence.
What 12-digit number do you form by concatenating the three terms in
this sequence?
"""
from eulerlib import generatePrimesLimit, generateStringPermutations, isPrime
primes = generatePrimesLimit(10000)
for prime in primes:
#should check above 1000 only?
permutations = generateStringPermutations(("000"+str(prime))[-4:])
# create a list only with permutations that are also prime
prime_permutations = []
for permutation in permutations:
if len(permutation) == 4 and isPrime(int(permutation)):
prime_permutations.append(int(permutation))
if len(prime_permutations) > 2:
sorted_pp = sorted(prime_permutations)
# this trick avoid checking permutations twice
if int(sorted_pp[0]) < prime:
continue
# compare all members to see if there are 3 in an arithmetic progression
difs = []
for pp1 in sorted_pp:
goOut = False
for pp2 in sorted_pp:
if pp1 < pp2:
dif = pp2 - pp1
if dif in difs and pp1 - dif in sorted_pp:
# Found another pair with the same difference...
print(pp1 - dif, pp1, pp2, "dif =", dif)
goOut = True
break
difs.append(dif)
if goOut:
break
| feliposz/project-euler-solutions | python/euler49.py | Python | mit | 1,801 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for segment reduction ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class SegmentReductionHelper(test.TestCase):
def _input(self, input_shape, dtype=dtypes_lib.int32):
num_elem = 1
for x in input_shape:
num_elem *= x
values = np.arange(1, num_elem + 1)
np_values = values.reshape(input_shape).astype(dtype.as_numpy_dtype)
# Add a non-zero imaginary component to complex types.
if dtype.is_complex:
np_values -= 1j * np_values
return constant_op.constant(
np_values, shape=input_shape, dtype=dtype), np_values
def _segmentReduce(self, indices, x, op1, op2=None, num_segments=None,
initial_value=0):
if not x.size:
return np.array([])
indices = np.asarray(indices)
if num_segments is None:
num_segments = indices[-1] + 1
output = [None] * num_segments
slice_shape = x.shape[indices.ndim:]
x_flat = x.reshape((indices.size,) + slice_shape)
for i, index in enumerate(indices.ravel()):
if (output[index] is not None) and op1 == np.max:
for j in range(0, output[index].shape[0]):
output[index][j] = op1([output[index][j], x_flat[i][j]])
elif output[index] is not None:
output[index] = op1(output[index], x_flat[i])
else:
output[index] = x_flat[i]
# zero initialize values that are still uncalcuated.
initial_value_slice = np.ones(slice_shape) * initial_value
output = [o if o is not None else initial_value_slice for o in output]
if op2 is not None:
output = [op2(o) for o in output]
output = [o.reshape(slice_shape) for o in output]
return np.array(output)
def _mean_cum_op(self, x, y):
return (x[0] + y, x[1] + 1) if isinstance(x, tuple) else (x + y, 2)
def _mean_reduce_op(self, x):
return x[0] / x[1] if isinstance(x, tuple) else x
def _sqrt_n_reduce_op(self, x):
return x[0] / np.sqrt(x[1]) if isinstance(x, tuple) else x
class SegmentReductionOpTest(SegmentReductionHelper):
def testValues(self):
dtypes = [
dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int64,
dtypes_lib.int32, dtypes_lib.complex64, dtypes_lib.complex128
]
# Each item is np_op1, np_op2, tf_op
ops_list = [(np.add, None, math_ops.segment_sum),
(self._mean_cum_op, self._mean_reduce_op,
math_ops.segment_mean),
(np.ndarray.__mul__, None, math_ops.segment_prod),
(np.minimum, None, math_ops.segment_min),
(np.maximum, None, math_ops.segment_max)]
# A subset of ops has been enabled for complex numbers
complex_ops_list = [(np.add, None, math_ops.segment_sum),
(np.ndarray.__mul__, None, math_ops.segment_prod),
(self._mean_cum_op, self._mean_reduce_op,
math_ops.segment_mean)]
n = 10
shape = [n, 2]
indices = [i // 3 for i in range(n)]
for dtype in dtypes:
if dtype in (dtypes_lib.complex64, dtypes_lib.complex128):
curr_ops_list = complex_ops_list
else:
curr_ops_list = ops_list
for use_gpu in [True, False]:
with self.test_session(use_gpu=use_gpu):
tf_x, np_x = self._input(shape, dtype=dtype)
for np_op1, np_op2, tf_op in curr_ops_list:
np_ans = self._segmentReduce(indices, np_x, np_op1, np_op2)
s = tf_op(data=tf_x, segment_ids=indices)
tf_ans = s.eval()
self.assertAllClose(np_ans, tf_ans)
# NOTE(mrry): The static shape inference that computes
# `tf_ans.shape` can only infer that sizes from dimension 1
# onwards, because the size of dimension 0 is data-dependent
# and may therefore vary dynamically.
self.assertAllEqual(np_ans.shape[1:], tf_ans.shape[1:])
def testSegmentIdsShape(self):
shape = [4, 4]
tf_x, _ = self._input(shape)
indices = constant_op.constant([0, 1, 2, 2], shape=[2, 2])
with self.assertRaises(ValueError):
math_ops.segment_sum(data=tf_x, segment_ids=indices)
def testSegmentIdsSize(self):
shape = [4, 4]
for use_gpu in [True, False]:
with self.test_session(use_gpu=use_gpu):
tf_x, _ = self._input(shape)
indices = [0, 1]
s = math_ops.segment_sum(data=tf_x, segment_ids=indices)
with self.assertRaisesOpError("segment_ids should be the same size"):
s.eval()
def testSegmentIdsValid(self):
# This is a baseline for the following SegmentIdsInvalid* tests.
shape = [4, 4]
for use_gpu in [True, False]:
with self.test_session(use_gpu=use_gpu):
tf_x, _ = self._input(shape, dtype=dtypes_lib.float32)
indices = [0, 0, 0, 1]
result = math_ops.segment_sum(data=tf_x, segment_ids=indices).eval()
self.assertAllEqual([[15, 18, 21, 24], [13, 14, 15, 16]], result)
def testSegmentIdsGreaterThanZero(self):
shape = [4, 4]
for use_gpu in [True, False]:
with self.test_session(use_gpu=use_gpu):
tf_x, np_x = self._input(shape, dtype=dtypes_lib.float32)
indices = [1, 1, 2, 2]
np_ans = self._segmentReduce(indices, np_x, np.add)
s = math_ops.segment_sum(data=tf_x, segment_ids=indices)
tf_ans = s.eval()
self.assertAllClose(np_ans, tf_ans)
def testSegmentIdsHole(self):
shape = [4, 4]
for use_gpu in [True, False]:
with self.test_session(use_gpu=use_gpu):
tf_x, np_x = self._input(shape, dtype=dtypes_lib.float32)
indices = [0, 0, 3, 3]
np_ans = self._segmentReduce(indices, np_x, np.add)
s = math_ops.segment_sum(data=tf_x, segment_ids=indices)
tf_ans = s.eval()
self.assertAllClose(np_ans, tf_ans)
def testSegmentIdsInvalid1(self):
shape = [4, 4]
with self.test_session():
tf_x, _ = self._input(shape)
indices = [-1, -1, 0, 0]
s = math_ops.segment_sum(data=tf_x, segment_ids=indices)
with self.assertRaisesOpError(
r"Segment id -1 out of range \[0, 1\), possibly because "
"'segment_ids' input is not sorted."):
s.eval()
def testSegmentIdsInvalid2(self):
shape = [4, 4]
with self.test_session():
tf_x, _ = self._input(shape)
indices = [0, 1, 0, 1]
s = math_ops.segment_sum(data=tf_x, segment_ids=indices)
with self.assertRaisesOpError("segment ids are not increasing"):
s.eval()
def testSegmentIdsInvalid3(self):
shape = [4, 4]
with self.test_session():
tf_x, _ = self._input(shape)
indices = [0, 1, 2, 0]
s = math_ops.segment_sum(data=tf_x, segment_ids=indices)
with self.assertRaisesOpError(
r"Segment id 1 out of range \[0, 1\), possibly "
"because 'segment_ids' input is not sorted."):
s.eval()
def testSegmentIdsInvalid4(self):
shape = [4, 4]
for use_gpu in [True, False]:
with self.test_session(use_gpu=use_gpu):
tf_x, _ = self._input(shape, dtype=dtypes_lib.float32)
indices = [0, 0, 0, -1]
s = math_ops.segment_sum(data=tf_x, segment_ids=indices)
with self.assertRaisesOpError("segment ids must be >= 0"):
s.eval()
def testSegmentIdsInvalid5(self):
shape = [4, 4]
for use_gpu in [True, False]:
with self.test_session(use_gpu=use_gpu):
tf_x, _ = self._input(shape, dtype=dtypes_lib.float32)
indices = [0, 0, 0, -2]
s = math_ops.segment_sum(data=tf_x, segment_ids=indices)
with self.assertRaisesOpError("segment ids must be >= 0"):
s.eval()
def testGradient(self):
shape = [4, 4]
indices = [0, 1, 2, 2]
for tf_op in [
math_ops.segment_sum, math_ops.segment_mean, math_ops.segment_min,
math_ops.segment_max
]:
with self.test_session():
tf_x, np_x = self._input(shape, dtype=dtypes_lib.float64)
s = tf_op(data=tf_x, segment_ids=indices)
jacob_t, jacob_n = gradient_checker.compute_gradient(
tf_x,
shape,
s, [3, 4],
x_init_value=np_x.astype(np.double),
delta=1)
self.assertAllClose(jacob_t, jacob_n)
class UnsortedSegmentTest(SegmentReductionHelper):
def __init__(self, methodName='runTest'):
# Each item is np_op1, np_op2, tf_op, initial_value functor
self.ops_list = [(np.add, None,
math_ops.unsorted_segment_sum, lambda t: 0),
(self._mean_cum_op, self._mean_reduce_op,
math_ops.unsorted_segment_mean, lambda t: 0),
(self._mean_cum_op, self._sqrt_n_reduce_op,
math_ops.unsorted_segment_sqrt_n, lambda t: 0),
(np.ndarray.__mul__, None,
math_ops.unsorted_segment_prod, lambda t: 1),
(np.minimum, None,
math_ops.unsorted_segment_min, lambda t: t.max),
(np.maximum, None,
math_ops.unsorted_segment_max, lambda t: t.min)]
# A subset of ops has been enabled for complex numbers
self.complex_ops_list = [(np.add, None,
math_ops.unsorted_segment_sum, lambda t: 0),
(np.ndarray.__mul__, None,
math_ops.unsorted_segment_prod, lambda t: 1)]
self.differentiable_dtypes = [dtypes_lib.float16, dtypes_lib.float32,
dtypes_lib.float64]
self.all_dtypes = (self.differentiable_dtypes +
[dtypes_lib.bfloat16,
dtypes_lib.int64, dtypes_lib.int32,
dtypes_lib.complex64, dtypes_lib.complex128])
super(UnsortedSegmentTest, self).__init__(methodName=methodName)
def testValues(self):
indices_flat = np.array([0, 4, 0, 8, 3, 8, 4, 7, 7, 3])
num_segments = 12
for indices in indices_flat, indices_flat.reshape(5, 2):
shape = indices.shape + (2,)
for dtype in self.all_dtypes:
ops_list = self.complex_ops_list if dtype.is_complex else self.ops_list
tf_x, np_x = self._input(shape, dtype=dtype)
for use_gpu in [True, False]:
with self.test_session(use_gpu=True):
for np_op1, np_op2, tf_op, init_op in ops_list:
# sqrt_n doesn't support integers
if (np_op2 == self._sqrt_n_reduce_op and dtype.is_integer):
continue
# todo(philjd): enable this test once real_div supports bfloat16
if (np_op2 in [self._sqrt_n_reduce_op, self._mean_reduce_op] and
dtype == dtypes_lib.bfloat16):
continue
np_ans = self._segmentReduce(
indices, np_x, np_op1, np_op2, num_segments=num_segments,
initial_value=init_op(dtype))
s = tf_op(tf_x, segment_ids=indices, num_segments=num_segments)
tf_ans = s.eval()
if dtype is dtypes_lib.bfloat16:
tf_ans = tf_ans.astype(np.float32)
self.assertAllClose(np_ans, tf_ans)
self.assertShapeEqual(np_ans, s)
def testNumSegmentsTypes(self):
dtypes = [dtypes_lib.int32, dtypes_lib.int64]
indices_flat = np.array([0, 4, 0, 8, 3, 8, 4, 7, 7, 3])
num_segments = 12
for indices in indices_flat, indices_flat.reshape(5, 2):
shape = indices.shape + (2,)
for dtype in dtypes:
with self.test_session(use_gpu=True):
tf_x, np_x = self._input(shape)
num_segments_constant = constant_op.constant(
num_segments, dtype=dtype)
np_ans = self._segmentReduce(
indices, np_x, np.add, op2=None, num_segments=num_segments)
s = math_ops.unsorted_segment_sum(
data=tf_x,
segment_ids=indices,
num_segments=num_segments_constant)
tf_ans = s.eval()
self.assertAllClose(np_ans, tf_ans)
self.assertShapeEqual(np_ans, s)
def testGradients(self):
num_cols = 2
indices_flat = np.array([0, 4, 0, -1, 3, -1, 4, 7, 7, 3])
num_segments = max(indices_flat) + 3
for dtype in self.differentiable_dtypes:
ops_list = self.complex_ops_list if dtype.is_complex else self.ops_list
for indices in indices_flat, indices_flat.reshape(5, 2):
shape = indices.shape + (num_cols,)
# test CPU and GPU as tf.gather behaves differently on each device
for use_gpu in [False, True]:
with self.test_session(use_gpu=use_gpu):
for _, _, tf_op, _ in ops_list:
tf_x, np_x = self._input(shape, dtype=dtype)
s = tf_op(tf_x, indices, num_segments)
jacob_t, jacob_n = gradient_checker.compute_gradient(
tf_x,
shape,
s, [num_segments, num_cols],
x_init_value=np_x,
delta=1)
self.assertAllClose(jacob_t, jacob_n)
def testProdGrad(self):
# additional test for the prod gradient to ensure correct handling of zeros
values = np.array([0, 0, 1, 0, 2, 2, 3, 3, 3], dtype=np.float32)
indices = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2], dtype=np.int32)
indices_neg = np.array([-1, 0, 0, -1, 1, 1, -1, 2, 2], dtype=np.int32)
values_tf = constant_op.constant(values)
# ground truth partial derivatives
gradients_indices = np.zeros((9, 3), dtype=np.float32)
gradients_indices_neg = np.zeros((9, 3), dtype=np.float32)
# the derivative w.r.t. to the other segments is zero, so here we only
# explicitly set the grad values for the corresponding segment
gradients_indices[range(9), indices] = [0, 0, 0, 4, 0, 0, 9, 9, 9]
gradients_indices_neg[range(9), indices_neg] = [0, 1, 0, 0, 2, 2, 0, 3, 3]
for use_gpu in [False, True]:
with self.test_session(use_gpu=use_gpu):
for ind, grad_gt in [(indices, gradients_indices),
(indices_neg, gradients_indices_neg)]:
s = math_ops.unsorted_segment_prod(values_tf,
constant_op.constant(ind), 3)
jacob_t, jacob_n = gradient_checker.compute_gradient(
values_tf, (9,), s, (3,), x_init_value=values, delta=1)
self.assertAllClose(jacob_t, jacob_n)
self.assertAllClose(jacob_t, grad_gt)
def testGradientMatchesSegmentSum(self):
# Strategy: compute the gradient for UnsortedSegmentSum and SegmentSum
# and compare the outputs, which should be identical.
# NB: for this test to work, indices must be valid for SegmentSum, namely
# it must be sorted, the indices must be contiguous, and num_segments
# must be max(indices) + 1.
indices = [0, 0, 1, 1, 1, 2, 3, 4, 5]
n = len(indices)
num_cols = 2
shape = [n, num_cols]
num_segments = max(indices) + 1
for dtype in self.differentiable_dtypes:
with self.test_session(use_gpu=True):
tf_x, np_x = self._input(shape, dtype=dtype)
# Results from UnsortedSegmentSum
unsorted_s = math_ops.unsorted_segment_sum(
data=tf_x, segment_ids=indices, num_segments=num_segments)
unsorted_jacob_t, unsorted_jacob_n = (
gradient_checker.compute_gradient(tf_x, shape, unsorted_s,
[num_segments, num_cols],
x_init_value=np_x, delta=1))
# Results from SegmentSum
sorted_s = math_ops.segment_sum(data=tf_x, segment_ids=indices)
sorted_jacob_t, sorted_jacob_n = gradient_checker.compute_gradient(
tf_x,
shape,
sorted_s, [num_segments, num_cols],
x_init_value=np_x,
delta=1)
self.assertAllClose(unsorted_jacob_t, sorted_jacob_t)
self.assertAllClose(unsorted_jacob_n, sorted_jacob_n)
def testBadIndices(self):
# Note: GPU kernel does not return the out-of-range error needed for this
# test, so this test is marked as cpu-only.
# Note: With PR #13055 a negative index will be ignored silently.
with self.test_session(use_gpu=False):
for bad in [[2]], [[7]]:
unsorted = math_ops.unsorted_segment_sum([[17]], bad, num_segments=2)
with self.assertRaisesOpError(
r"segment_ids\[0,0\] = %d is out of range \[0, 2\)" % bad[0][0]):
unsorted.eval()
def testEmptySecondDimension(self):
dtypes = [np.float16, np.float32, np.float64, np.int64, np.int32,
np.complex64, np.complex128]
with self.test_session(use_gpu=True):
for dtype in dtypes:
for itype in (np.int32, np.int64):
data = np.zeros((2, 0), dtype=dtype)
segment_ids = np.array([0, 1], dtype=itype)
unsorted = math_ops.unsorted_segment_sum(data, segment_ids, 2)
self.assertAllEqual(unsorted.eval(), np.zeros((2, 0), dtype=dtype))
def testDropNegatives(self):
# Note: the test is done by replacing segment_ids with 8 to -1
# for index and replace values generated by numpy with 0.
indices_flat = np.array([0, 4, 0, 8, 3, 8, 4, 7, 7, 3])
num_segments = 12
for indices in indices_flat, indices_flat.reshape(5, 2):
shape = indices.shape + (2,)
for dtype in self.all_dtypes:
with self.test_session(use_gpu=True):
tf_x, np_x = self._input(shape, dtype=dtype)
np_ans = self._segmentReduce(
indices, np_x, np.add, op2=None, num_segments=num_segments)
# Replace np_ans[8] with 0 for the value
np_ans[8:] = 0
# Replace 8 with -1 in indices
np.place(indices, indices == 8, [-1])
s = math_ops.unsorted_segment_sum(
data=tf_x, segment_ids=indices, num_segments=num_segments)
tf_ans = s.eval()
self.assertAllClose(np_ans, tf_ans)
self.assertShapeEqual(np_ans, s)
class SparseSegmentReductionHelper(SegmentReductionHelper):
def _sparse_input(self, input_shape, num_indices, dtype=dtypes_lib.int32):
a, b = super(SparseSegmentReductionHelper, self)._input(input_shape, dtype)
indices = np.random.randint(0, input_shape[0], num_indices).astype(np.int32)
return (constant_op.constant(
indices, dtype=dtypes_lib.int32), indices, a, b)
def _sparseSegmentReduce(self,
x,
indices,
segment_indices,
op1,
op2=None,
num_segments=None):
return self._segmentReduce(
segment_indices, x[indices], op1, op2, num_segments=num_segments)
class SparseSegmentReductionOpTest(SparseSegmentReductionHelper):
def testValues(self):
dtypes = [
dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int64,
dtypes_lib.int32
]
mean_dtypes = [dtypes_lib.float32, dtypes_lib.float64]
# Each item is np_op1, np_op2, tf_op
ops_list = [(np.add, None, math_ops.sparse_segment_sum),
(self._mean_cum_op, self._mean_reduce_op,
math_ops.sparse_segment_mean)]
n = 400
shape = [n, 2]
segment_indices = []
for i in range(20):
for _ in range(i + 1):
segment_indices.append(i)
num_indices = len(segment_indices)
for dtype in dtypes:
with self.test_session(use_gpu=False):
tf_indices, np_indices, tf_x, np_x = self._sparse_input(
shape, num_indices, dtype=dtype)
for np_op1, np_op2, tf_op in ops_list:
if tf_op == math_ops.sparse_segment_mean and dtype not in mean_dtypes:
continue
np_ans = self._sparseSegmentReduce(np_x, np_indices, segment_indices,
np_op1, np_op2)
s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices)
tf_ans = s.eval()
self.assertAllClose(np_ans, tf_ans)
# NOTE(mrry): The static shape inference that computes
# `tf_ans.shape` can only infer that sizes from dimension 1
# onwards, because the size of dimension 0 is data-dependent
# and may therefore vary dynamically.
self.assertAllEqual(np_ans.shape[1:], tf_ans.shape[1:])
def testSegmentIdsHole(self):
tf_x, np_x = self._input([10, 4], dtype=dtypes_lib.float32)
ops_list = [(np.add, None, math_ops.sparse_segment_sum), (
self._mean_cum_op, self._mean_reduce_op, math_ops.sparse_segment_mean)]
segment_indices = [0, 2, 2, 2]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for np_op1, np_op2, tf_op in ops_list:
np_ans = self._sparseSegmentReduce(np_x, tf_indices, segment_indices,
np_op1, np_op2)
s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices)
tf_ans = s.eval()
self.assertAllClose(np_ans, tf_ans)
def testWithNumSegments(self):
tf_x, np_x = self._input([10, 4], dtype=dtypes_lib.float32)
ops_list = [(np.add, None, math_ops.sparse_segment_sum_with_num_segments),
(self._mean_cum_op, self._mean_reduce_op,
math_ops.sparse_segment_mean_with_num_segments)]
segment_indices = [0, 2, 2, 2]
tf_indices = [8, 3, 0, 9]
num_segments = 5
with self.test_session(use_gpu=False):
for np_op1, np_op2, tf_op in ops_list:
np_ans = self._sparseSegmentReduce(
np_x,
tf_indices,
segment_indices,
np_op1,
np_op2,
num_segments=num_segments)
s = tf_op(
data=tf_x,
indices=tf_indices,
segment_ids=segment_indices,
num_segments=num_segments)
tf_ans = s.eval()
self.assertAllClose(np_ans, tf_ans)
def testWithEmptySegments(self):
tf_x = constant_op.constant([], shape=[0, 4], dtype=dtypes_lib.float32)
ops_list = [
math_ops.sparse_segment_sum_with_num_segments,
math_ops.sparse_segment_mean_with_num_segments
]
segment_indices = []
tf_indices = []
num_segments = 5
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(
data=tf_x,
indices=tf_indices,
segment_ids=segment_indices,
num_segments=num_segments)
tf_ans = s.eval()
self.assertAllClose(np.zeros([5, 4]), tf_ans)
def testSegmentIdsGreaterThanZero(self):
tf_x, np_x = self._input([10, 4], dtype=dtypes_lib.float32)
ops_list = [(np.add, None, math_ops.sparse_segment_sum), (
self._mean_cum_op, self._mean_reduce_op, math_ops.sparse_segment_mean)]
segment_indices = [1, 2, 2, 2]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for np_op1, np_op2, tf_op in ops_list:
np_ans = self._sparseSegmentReduce(np_x, tf_indices, segment_indices,
np_op1, np_op2)
s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices)
tf_ans = s.eval()
self.assertAllClose(np_ans, tf_ans)
def testValid(self):
# Baseline for the test*Invalid* methods below.
tf_x, _ = self._input([10, 4], dtype=dtypes_lib.float32)
ops_list = [math_ops.sparse_segment_sum, math_ops.sparse_segment_mean]
segment_indices = [0, 1, 2, 2]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices)
s.eval()
def testIndicesInvalid1(self):
tf_x, _ = self._input([10, 4], dtype=dtypes_lib.float32)
ops_list = [math_ops.sparse_segment_sum, math_ops.sparse_segment_mean]
segment_indices = [0, 1, 2, 2]
tf_indices = [8, -1, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices)
with self.assertRaisesOpError(
r"indices\[1\] == -1 out of range \[0, 10\)"):
s.eval()
def testIndicesInvalid2(self):
tf_x, _ = self._input([10, 4], dtype=dtypes_lib.float32)
ops_list = [math_ops.sparse_segment_sum, math_ops.sparse_segment_mean]
segment_indices = [0, 1, 2, 2]
tf_indices = [8, 3, 0, 10]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices)
with self.assertRaisesOpError(
r"indices\[3\] == 10 out of range \[0, 10\)"):
s.eval()
def testSegmentsInvalid2(self):
tf_x, _ = self._input([10, 4], dtype=dtypes_lib.float32)
ops_list = [math_ops.sparse_segment_sum, math_ops.sparse_segment_mean]
segment_indices = [0, 1, 0, 1]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices)
with self.assertRaisesOpError("segment ids are not increasing"):
s.eval()
def testSegmentsInvalid3(self):
tf_x, _ = self._input([10, 4], dtype=dtypes_lib.float32)
ops_list = [math_ops.sparse_segment_sum, math_ops.sparse_segment_mean]
segment_indices = [0, 1, 2, 0]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices)
with self.assertRaisesOpError(
r"Segment id 1 out of range \[0, 1\), possibly because "
"'segment_ids' input is not sorted"):
s.eval()
def testSegmentsInvalid4(self):
tf_x, _ = self._input([10, 4], dtype=dtypes_lib.float32)
ops_list = [math_ops.sparse_segment_sum, math_ops.sparse_segment_mean]
segment_indices = [-1, 0, 1, 1]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices)
with self.assertRaisesOpError(
r"Segment id -1 out of range \[0, 2\), possibly because "
"'segment_ids' input is not sorted"):
s.eval()
def testSegmentsInvalid6(self):
tf_x, _ = self._input([10, 4], dtype=dtypes_lib.float32)
ops_list = [math_ops.sparse_segment_sum, math_ops.sparse_segment_mean]
segment_indices = [0, 0, 0, -1]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices)
with self.assertRaisesOpError("segment ids must be >= 0"):
s.eval()
def testSegmentsInvalid7(self):
tf_x, _ = self._input([10, 4], dtype=dtypes_lib.float32)
ops_list = [math_ops.sparse_segment_sum, math_ops.sparse_segment_mean]
segment_indices = [0, 0, 0, -2]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices)
with self.assertRaisesOpError("segment ids must be >= 0"):
s.eval()
def testSegmentWithNumSegmentsValid(self):
# Baseline for the test*WithNumSegmentsInvalid* methods below.
tf_x, _ = self._input([10, 4], dtype=dtypes_lib.float32)
ops_list = [
math_ops.sparse_segment_sum_with_num_segments,
math_ops.sparse_segment_mean_with_num_segments,
]
num_segments = 5
segment_indices = [0, 1, 3, 3]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(
data=tf_x,
indices=tf_indices,
segment_ids=segment_indices,
num_segments=num_segments)
s.eval()
def testSegmentWithNumSegmentsInvalid1(self):
tf_x, _ = self._input([10, 4], dtype=dtypes_lib.float32)
ops_list = [
math_ops.sparse_segment_sum_with_num_segments,
math_ops.sparse_segment_mean_with_num_segments,
]
num_segments = 5
segment_indices = [0, 1, 3, 5]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(
data=tf_x,
indices=tf_indices,
segment_ids=segment_indices,
num_segments=num_segments)
with self.assertRaisesOpError("segment ids must be < num_segments"):
s.eval()
def testSegmentWithNumSegmentsInvalid2(self):
tf_x, _ = self._input([10, 4], dtype=dtypes_lib.float32)
ops_list = [
math_ops.sparse_segment_sum_with_num_segments,
math_ops.sparse_segment_mean_with_num_segments,
]
num_segments = -2
segment_indices = [0, 1, 3, 3]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
with self.assertRaisesRegexp(
ValueError, "Cannot specify a negative value for num_segments"):
tf_op(
data=tf_x,
indices=tf_indices,
segment_ids=segment_indices,
num_segments=num_segments)
def testGradient(self):
shape = [10, 4]
segment_indices = [0, 1, 2, 2]
num_indices = len(segment_indices)
for tf_op in [math_ops.sparse_segment_sum, math_ops.sparse_segment_mean]:
with self.test_session():
tf_indices, _, tf_x, np_x = self._sparse_input(
shape, num_indices, dtype=dtypes_lib.float64)
s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices)
jacob_t, jacob_n = gradient_checker.compute_gradient(
tf_x,
shape,
s, [3, 4],
x_init_value=np_x.astype(np.double),
delta=1)
self.assertAllClose(jacob_t, jacob_n)
def testGradientWithEmptySegmentsAtEnd(self):
shape = [10, 4]
num_segments = 5
segment_indices = [0, 1, 2, 2]
num_indices = len(segment_indices)
for tf_op in [
math_ops.sparse_segment_sum_with_num_segments,
math_ops.sparse_segment_mean_with_num_segments,
]:
with self.test_session():
tf_indices, _, tf_x, np_x = self._sparse_input(
shape, num_indices, dtype=dtypes_lib.float64)
s = tf_op(
data=tf_x,
indices=tf_indices,
segment_ids=segment_indices,
num_segments=num_segments)
jacob_t, jacob_n = gradient_checker.compute_gradient(
tf_x,
shape,
s, [5, 4],
x_init_value=np_x.astype(np.double),
delta=1)
self.assertAllClose(jacob_t, jacob_n)
def testGradientValid(self):
# Baseline for the testGradient*Invalid* methods below.
tf_x, _ = self._input([3, 4], dtype=dtypes_lib.float32)
ops_list = [
math_ops.sparse_segment_mean_grad, math_ops.sparse_segment_sqrt_n_grad
]
segment_indices = [0, 1, 2, 2]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(tf_x, tf_indices, segment_indices, 10)
s.eval()
def testGradientIndicesInvalid1(self):
tf_x, _ = self._input([3, 4], dtype=dtypes_lib.float32)
ops_list = [
math_ops.sparse_segment_mean_grad, math_ops.sparse_segment_sqrt_n_grad
]
segment_indices = [0, 1, 2, 2]
tf_indices = [8, 3, 0, 10]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(tf_x, tf_indices, segment_indices, 10)
with self.assertRaisesOpError(r"Index 10 out of range \[0, 10\)"):
s.eval()
def testGradientIndicesInvalid2(self):
tf_x, _ = self._input([3, 4], dtype=dtypes_lib.float32)
ops_list = [
math_ops.sparse_segment_mean_grad, math_ops.sparse_segment_sqrt_n_grad
]
segment_indices = [0, 1, 2, 2]
tf_indices = [8, 3, -1, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(tf_x, tf_indices, segment_indices, 10)
with self.assertRaisesOpError(r"Index -1 out of range \[0, 10\)"):
s.eval()
def testGradientSegmentsInvalid1(self):
tf_x, _ = self._input(
[3, 4], dtype=dtypes_lib.float32) # expecting 3 segments
ops_list = [
math_ops.sparse_segment_mean_grad, math_ops.sparse_segment_sqrt_n_grad
]
segment_indices = [0, 1, 1, 4] # 5 segments
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(tf_x, tf_indices, segment_indices, 10)
with self.assertRaisesOpError("Invalid number of segments"):
s.eval()
def testGradientSegmentsInvalid2(self):
tf_x, _ = self._input([1, 4], dtype=dtypes_lib.float32)
ops_list = [
math_ops.sparse_segment_mean_grad, math_ops.sparse_segment_sqrt_n_grad
]
segment_indices = [0, 1, 2, 0]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(tf_x, tf_indices, segment_indices, 10)
with self.assertRaisesOpError(r"Segment id 1 out of range \[0, 1\)"):
s.eval()
def testGradientSegmentsInvalid3(self):
tf_x, _ = self._input([2, 4], dtype=dtypes_lib.float32)
ops_list = [
math_ops.sparse_segment_mean_grad, math_ops.sparse_segment_sqrt_n_grad
]
segment_indices = [-1, 0, 1, 1]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(tf_x, tf_indices, segment_indices, 10)
with self.assertRaisesOpError(r"Segment id -1 out of range \[0, 2\)"):
s.eval()
def testGradientSegmentsInvalid4(self):
tf_x, _ = self._input([0, 4], dtype=dtypes_lib.float32)
ops_list = [
math_ops.sparse_segment_mean_grad, math_ops.sparse_segment_sqrt_n_grad
]
segment_indices = [0, 1, 2, -1]
tf_indices = [8, 3, 0, 9]
with self.test_session(use_gpu=False):
for tf_op in ops_list:
s = tf_op(tf_x, tf_indices, segment_indices, 10)
with self.assertRaisesOpError(r"Segment id 0 out of range \[0, 0\)"):
s.eval()
class SegmentReductionOpBenchmark(test.Benchmark):
outer_dim_options = [2**x for x in range(9, 14, 2)]
ratio_options = [2**x for x in range(1, 6, 2)]
inner_dim_options = [2**x for x in range(9, 14, 2)]
# randomly generated sizes with less alignments
inner_dim_options += [
1120, 1215, 1856, 1302, 1329, 1531, 1313, 1672, 1851, 1584
]
dtype_options = [np.float32, np.float64]
options = (outer_dim_options, ratio_options, inner_dim_options, dtype_options)
# pylint: disable=g-long-lambda
op_functors = [lambda vc, vs, seg_ids:
("sorted", math_ops.segment_sum(vc, vs)),
lambda vc, vs, seg_ids:
("unsorted",
math_ops.unsorted_segment_sum(vc, vs, seg_ids[-1]+1))]
# pylint: enable=g-long-lambda
repeat = 10
def _npTypeToStr(self, t):
if t == np.float32:
return "fp32"
if t == np.float64:
return "fp64"
def _runGraph(self, op_functor, outer_dim, ratio, inner_dim, dtype):
output_outer_dim = int(outer_dim / ratio)
const = np.random.randint(5, size=(outer_dim, inner_dim))
seg_ids = np.sort(np.random.randint(output_outer_dim, size=outer_dim))
vs = variables.Variable(seg_ids.astype(np.int32))
with ops.device("/gpu:0"):
vc = variables.Variable(const.astype(dtype))
name, op = op_functor(vc, vs, seg_ids)
with session.Session() as sess:
variables.global_variables_initializer().run()
r = self.run_op_benchmark(
sess,
op,
min_iters=self.repeat,
name="_".join(
map(str,
[name, outer_dim, ratio, inner_dim,
self._npTypeToStr(dtype)])))
return name, r["wall_time"]
def benchmarkSegmentSumGPU(self):
if not test.is_gpu_available(cuda_only=True):
return
for outer_dim, ratio, inner_dim, dtype in itertools.product(*self.options):
op_functor = self.op_functors[0]
with ops.Graph().as_default():
self._runGraph(op_functor, outer_dim, ratio, inner_dim, dtype)
def benchmarkUnsortedSegmentSumGPU(self):
if not test.is_gpu_available(cuda_only=True):
return
for outer_dim, ratio, inner_dim, dtype in itertools.product(*self.options):
op_functor = self.op_functors[1]
with ops.Graph().as_default():
self._runGraph(op_functor, outer_dim, ratio, inner_dim, dtype)
if __name__ == "__main__":
test.main()
| benoitsteiner/tensorflow-xsmm | tensorflow/python/kernel_tests/segment_reduction_ops_test.py | Python | apache-2.0 | 37,722 |
"""
Common utility methods for Mobile APIs.
"""
API_V05 = 'v0.5'
API_V1 = 'v1'
def parsed_version(version):
""" Converts string X.X.X.Y to int tuple (X, X, X) """
return tuple(map(int, (version.split(".")[:3])))
| eduNEXT/edx-platform | lms/djangoapps/mobile_api/utils.py | Python | agpl-3.0 | 223 |
# Localization classes and functions
#
# Copyright (C) 2012-2013 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Martin Gracik <mgracik@redhat.com>
# Vratislav Podzimek <vpodzime@redhat.com>
#
import gettext
import os
import re
import langtable
import glob
from pyanaconda import constants
from pyanaconda.iutil import upcase_first_letter
import logging
log = logging.getLogger("anaconda")
LOCALE_CONF_FILE_PATH = "/etc/locale.conf"
#e.g. 'SR_RS.UTF-8@latin'
LANGCODE_RE = re.compile(r'(?P<language>[A-Za-z]+)'
r'(_(?P<territory>[A-Za-z]+))?'
r'(\.(?P<encoding>[-A-Za-z0-9]+))?'
r'(@(?P<script>[-A-Za-z0-9]+))?')
class LocalizationConfigError(Exception):
"""Exception class for localization configuration related problems"""
pass
class InvalidLocaleSpec(LocalizationConfigError):
"""Exception class for the errors related to invalid locale specs"""
pass
def parse_langcode(langcode):
"""
For a given langcode (e.g. 'SR_RS.UTF-8@latin') returns a dictionary
with the following keys and example values:
'language' : 'SR'
'territory' : 'RS'
'encoding' : 'UTF-8'
'script' : 'latin'
or None if the given string doesn't match the LANGCODE_RE.
"""
if not langcode:
return None
match = LANGCODE_RE.match(langcode)
if match:
return match.groupdict()
else:
return None
def is_supported_locale(locale):
"""
Function that tells if the given locale is supported by the Anaconda or
not. We consider locales supported by the langtable as supported by the
Anaconda.
:param locale: locale to test
:type locale: str
:return: whether the given locale is supported or not
:rtype: bool
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
en_name = get_english_name(locale)
return bool(en_name)
def langcode_matches_locale(langcode, locale):
"""
Function that tells if the given langcode matches the given locale. I.e. if
all parts of appearing in the langcode (language, territory, script and
encoding) are the same as the matching parts of the locale.
:param langcode: a langcode (e.g. en, en_US, en_US@latin, etc.)
:type langcode: str
:param locale: a valid locale (e.g. en_US.UTF-8 or sr_RS.UTF-8@latin, etc.)
:type locale: str
:return: whether the given langcode matches the given locale or not
:rtype: bool
"""
langcode_parts = parse_langcode(langcode)
locale_parts = parse_langcode(locale)
if not langcode_parts or not locale_parts:
# to match, both need to be valid langcodes (need to have at least
# language specified)
return False
# Check parts one after another. If some part appears in the langcode and
# doesn't match the one from the locale (or is missing in the locale),
# return False, otherwise they match
for part in ("language", "territory", "script", "encoding"):
if langcode_parts[part] and langcode_parts[part] != locale_parts.get(part):
return False
return True
def find_best_locale_match(locale, langcodes):
"""
Find the best match for the locale in a list of langcodes. This is useful
when e.g. pt_BR is a locale and there are possibilities to choose an item
(e.g. rnote) for a list containing both pt and pt_BR or even also pt_PT.
:param locale: a valid locale (e.g. en_US.UTF-8 or sr_RS.UTF-8@latin, etc.)
:type locale: str
:param langcodes: a list or generator of langcodes (e.g. en, en_US, en_US@latin, etc.)
:type langcodes: list(str) or generator(str)
:return: the best matching langcode from the list of None if none matches
:rtype: str or None
"""
score_map = { "language" : 1000,
"territory": 100,
"script" : 10,
"encoding" : 1 }
def get_match_score(locale, langcode):
score = 0
locale_parts = parse_langcode(locale)
langcode_parts = parse_langcode(langcode)
if not locale_parts or not langcode_parts:
return score
for part, part_score in score_map.iteritems():
if locale_parts[part] and langcode_parts[part]:
if locale_parts[part] == langcode_parts[part]:
# match
score += part_score
else:
# not match
score -= part_score
elif langcode_parts[part] and not locale_parts[part]:
# langcode has something the locale doesn't have
score -= part_score
return score
scores = []
# get score for each langcode
for langcode in langcodes:
scores.append((langcode, get_match_score(locale, langcode)))
# find the best one
sorted_langcodes = sorted(scores, key=lambda item_score: item_score[1], reverse=True)
# matches matching only script or encoding or both are not useful
if sorted_langcodes and sorted_langcodes[0][1] > score_map["territory"]:
return sorted_langcodes[0][0]
else:
return None
def setup_locale(locale, lang=None):
"""
Procedure setting the system to use the given locale and store it in to the
ksdata.lang object (if given). DOES NOT PERFORM ANY CHECKS OF THE GIVEN
LOCALE.
:param locale: locale to setup
:type locale: str
:param lang: ksdata.lang object or None
:return: None
:rtype: None
"""
if lang:
lang.lang = locale
os.environ["LANG"] = locale
def get_english_name(locale):
"""
Function returning english name for the given locale.
:param locale: locale to return english name for
:type locale: str
:return: english name for the locale or empty string if unknown
:rtype: st
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
parts = parse_langcode(locale)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid locale" % locale)
name = langtable.language_name(languageId=parts["language"],
territoryId=parts.get("territory", ""),
scriptId=parts.get("script", ""),
languageIdQuery="en")
return upcase_first_letter(name)
def get_native_name(locale):
"""
Function returning native name for the given locale.
:param locale: locale to return native name for
:type locale: str
:return: english name for the locale or empty string if unknown
:rtype: st
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
parts = parse_langcode(locale)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid locale" % locale)
name = langtable.language_name(languageId=parts["language"],
territoryId=parts.get("territory", ""),
scriptId=parts.get("script", ""),
languageIdQuery=parts["language"],
scriptIdQuery=parts.get("script", ""))
return upcase_first_letter(name)
def get_available_translations(localedir=None):
"""
Method that generates (i.e. returns a generator) available translations for
the installer in the given localedir.
:type localedir: str
:return: generator yielding available translations (languages)
:rtype: generator yielding strings
"""
localedir = localedir or gettext._default_localedir
# usually there are no message files for en
messagefiles = sorted(glob.glob(localedir + "/*/LC_MESSAGES/anaconda.mo") +
["blob/en/blob/blob"])
trans_gen = (path.split(os.path.sep)[-3] for path in messagefiles)
langs = set()
for trans in trans_gen:
parts = parse_langcode(trans)
lang = parts.get("language", "")
if lang and lang not in langs:
langs.add(lang)
# check if there are any locales for the language
locales = get_language_locales(lang)
if not locales:
continue
yield lang
def get_language_locales(lang):
"""
Function returning all locales available for the given language.
:param lang: language to get available locales for
:type lang: str
:return: a list of available locales
:rtype: list of strings
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
parts = parse_langcode(lang)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid language" % lang)
return langtable.list_locales(languageId=parts["language"],
territoryId=parts.get("territory", ""),
scriptId=parts.get("script", ""))
def get_territory_locales(territory):
"""
Function returning list of locales for the given territory. The list is
sorted from the most probable locale to the least probable one (based on
langtable's ranking.
:param territory: territory to return locales for
:type territory: str
:return: list of locales
:rtype: list of strings
"""
return langtable.list_locales(territoryId=territory)
def get_locale_keyboards(locale):
"""
Function returning preferred keyboard layouts for the given locale.
:param locale: locale string (see LANGCODE_RE)
:type locale: str
:return: list of preferred keyboard layouts
:rtype: list of strings
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
parts = parse_langcode(locale)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid locale" % locale)
return langtable.list_keyboards(languageId=parts["language"],
territoryId=parts.get("territory", ""),
scriptId=parts.get("script", ""))
def get_locale_timezones(locale):
"""
Function returning preferred timezones for the given locale.
:param locale: locale string (see LANGCODE_RE)
:type locale: str
:return: list of preferred timezones
:rtype: list of strings
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
parts = parse_langcode(locale)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid locale" % locale)
return langtable.list_timezones(languageId=parts["language"],
territoryId=parts.get("territory", ""),
scriptId=parts.get("script", ""))
def get_locale_territory(locale):
"""
Function returning locale's territory.
:param locale: locale string (see LANGCODE_RE)
:type locale: str
:return: territory or None
:rtype: str or None
:raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE)
"""
parts = parse_langcode(locale)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid locale" % locale)
return parts.get("territory", None)
def get_xlated_timezone(tz_spec_part):
"""
Function returning translated name of a region, city or complete timezone
name according to the current value of the $LANG variable.
:param tz_spec_part: a region, city or complete timezone name
:type tz_spec_part: str
:return: translated name of the given region, city or timezone
:rtype: str
"""
locale = os.environ.get("LANG", constants.DEFAULT_LANG)
parts = parse_langcode(locale)
if "language" not in parts:
raise InvalidLocaleSpec("'%s' is not a valid locale" % locale)
xlated = langtable.timezone_name(tz_spec_part, languageIdQuery=parts["language"],
territoryIdQuery=parts.get("territory", ""),
scriptIdQuery=parts.get("script", ""))
return xlated.encode("utf-8")
def write_language_configuration(lang, root):
"""
Write language configuration to the $root/etc/locale.conf file.
:param lang: ksdata.lang object
:param root: path to the root of the installed system
"""
try:
fpath = os.path.normpath(root + LOCALE_CONF_FILE_PATH)
with open(fpath, "w") as fobj:
fobj.write('LANG="%s"\n' % lang.lang)
except IOError as ioerr:
msg = "Cannot write language configuration file: %s" % ioerr.strerror
raise LocalizationConfigError(msg)
def load_firmware_language(lang):
"""
Procedure that loads firmware language information (if any). It stores the
information in the given ksdata.lang object and sets the $LANG environment
variable.
:param lang: ksdata.lang object
:return: None
:rtype: None
"""
if lang.lang and lang.seen:
# set in kickstart, do not override
return
try:
n = "/sys/firmware/efi/efivars/PlatformLang-8be4df61-93ca-11d2-aa0d-00e098032b8c"
d = open(n, 'r', 0).read()
except:
return
# the contents of the file are:
# 4-bytes of attribute data that we don't care about
# NUL terminated ASCII string like 'en-US'.
if len(d) < 10:
log.debug("PlatformLang was too short")
return
d = d[4:]
if d[2] != '-':
log.debug("PlatformLang was malformed")
return
# they use - and we use _, so fix it...
d = d[:2] + '_' + d[3:-1]
# UEFI 2.3.1 Errata C specifies 2 aliases in common use that
# aren't part of RFC 4646, but are allowed in PlatformLang.
# Because why make anything simple?
if d.startswith('zh_chs'):
d = 'zh_Hans'
elif d.startswith('zh_cht'):
d = 'zh_Hant'
d += '.UTF-8'
if not is_supported_locale(d):
log.debug("PlatformLang was '%s', which is unsupported." % d)
return
locales = get_language_locales(d)
if not locales:
log.debug("No locales found for the PlatformLang '%s'." % d)
return
log.debug("Using UEFI PlatformLang '%s' ('%s') as our language." % (d, locales[0]))
setup_locale(locales[0], lang)
| projectatomic/anaconda | pyanaconda/localization.py | Python | gpl-2.0 | 15,233 |
# -*- coding: utf-8 -*-
from django.db.models import Q
from django.contrib.auth.models import User
from crispy_forms.bootstrap import FormActions, PrependedText
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Field, Fieldset, Layout, Reset, Submit
from django.forms import ModelForm
from django.utils.translation import ugettext as _
from .models import Project, ProjectCollaborator, Task
class CreateProjectForm(ModelForm):
def __init__(self, *args, **kwargs):
super(CreateProjectForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-md-2'
self.helper.field_class = 'col-md-8'
self.helper.layout = Layout(
Fieldset(
_('Create a project'),
Field('name', required=True, autofocus=True),
Field('description'),
Field('client'),
Field('due_date', css_class='dtpicker date',
data_date_format="YYYY-MM-DD"),
Field('color', css_class='color_field'),
Field('external_url')),
FormActions(
Submit('save', _('Create'), css_class='col-md-offset-2'),
Reset('reset', _('Clean'))
)
)
class Meta:
model = Project
fields = ['name',
'description',
'client',
'due_date',
'color',
'external_url']
class CreateProjectCollaboratorForm(ModelForm):
def __init__(self, *args, **kwargs):
super(CreateProjectCollaboratorForm, self).__init__(*args, **kwargs)
self.fields['project'].queryset = Project.objects.filter(is_active=True)
self.fields['user'].queryset = User.objects.order_by('username')
self.helper = FormHelper(self)
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-md-2'
self.helper.field_class = 'col-md-8'
self.helper.layout = Layout(
Fieldset(
_('Add a project collaborator'),
Field('project', required=True, autofocus=True),
Field('user', required=True)),
FormActions(
Submit('save', _('Create'), css_class='col-md-offset-2'),
Reset('reset', _('Clean'))
)
)
class Meta:
model = ProjectCollaborator
fields = ['project',
'user']
class EditProjectForm(ModelForm):
def __init__(self, *args, **kwargs):
super(EditProjectForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-md-2'
self.helper.field_class = 'col-md-8'
self.helper.layout = Layout(
Fieldset(
_('Edit Project'),
'name',
'description',
'client',
Field('due_date', css_class='dtpicker date',
data_date_format="YYYY-MM-DD"),
Field('color', css_class='color_field'),
'external_url',
# Field('is_active', css_class='col-md-offset-4'),
PrependedText('is_active', ''),
'owner'
),
FormActions(
Submit('update', _('Update'), css_class='col-md-offset-2')
))
class Meta:
model = Project
fields = '__all__'
class CreateTaskForm(ModelForm):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop("request")
super(CreateTaskForm, self).__init__(*args, **kwargs)
user = self.request.user
# The user's projects only
self.fields['project'].queryset = Project.objects.filter(
Q(projectcollaborator__user=user) | Q(owner=user),
is_active=True).distinct()
self.helper = FormHelper(self)
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-md-3'
self.helper.field_class = 'col-md-8'
self.helper.layout = Layout(
Fieldset(
_('Create a task'),
Field('name', required=True, autofocus=True),
Field('description', rows=2),
Field('project', required=True),
Field('date', css_class='dtpicker date',
data_date_format="YYYY-MM-DD"),
Field('total_hours', min='0', step='0.5'),
Field('task_type'),
Field('external_url')),
FormActions(
Submit('save', _('Create'), css_class='col-md-offset-2'),
Reset('reset', _('Clean'))
)
)
class Meta:
model = Task
fields = ['name',
'description',
'project',
'task_type',
'date',
'total_hours',
'external_url'
]
class EditTaskForm(ModelForm):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop("request")
super(EditTaskForm, self).__init__(*args, **kwargs)
self.request_user = self.request.user
self.fields['project'].queryset = Project.objects.filter(
Q(projectcollaborator__user=self.request_user) | Q(
owner=self.request_user),
is_active=True).distinct()
self.helper = FormHelper(self)
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-md-3'
self.helper.field_class = 'col-md-8'
self.helper.layout = Layout(
Fieldset(
_('Edit Task'),
'name',
'project',
'task_type',
'description',
Field('date', css_class='dtpicker date',
data_date_format="YYYY-MM-DD"),
Field('total_hours', min='0', step='0.5'),
'external_url',
),
FormActions(
Submit('update', _('Update'), css_class='col-md-offset-2')
))
class Meta:
model = Task
fields = ['name',
'project',
'task_type',
'description',
'date',
'total_hours',
'external_url'
]
def save(self):
"""
We only save a change through this form if the user
owns the task
"""
instance = super(EditTaskForm, self).save(commit=False)
user = self.request_user
if self.instance.owner == user:
instance.save()
return instance
class ProfileForm(ModelForm):
def __init__(self, *args, **kwargs):
super(ProfileForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-md-2'
self.helper.field_class = 'col-md-8'
self.helper.layout = Layout(
Fieldset(
_('Edit profile'),
'username',
'first_name',
'last_name',
'email'),
FormActions(
Submit('update', _('Update'), css_class='col-md-offset-2')
))
class Meta:
model = User
fields = ['username',
'first_name',
'last_name',
'email']
| MSA-Argentina/relojito_project | relojito/app/forms.py | Python | mit | 7,675 |
from django.shortcuts import render
from django.contrib.auth.models import User
def all_profiles_page(request):
users = User.objects.filter(is_active=True).all()
return render(request, 'profilelist.html', context={'users': users})
def profile_page(request, user_id=1):
try:
user = User.objects.get(id=user_id)
except User.DoesNotExist:
return render(request, 'error.html',
context={'error_type': '404 Not Found'})
return render(request, 'profilepage.html', context={'user': user})
| HeyIamJames/django-imager | imagersite/imagerprofile/views.py | Python | mit | 543 |
# -*- coding: utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
# Copyright (c) 2012 dput authors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
from dput.core import logger
from dput.exceptions import (ChangesFileException, HookException)
class HashValidationError(HookException):
"""
Subclass of the :class:`dput.exceptions.HookException`.
Thrown if the ``checksum`` checker encounters an issue.
"""
pass
def validate_checksums(changes, profile, interface):
"""
The ``checksum`` checker is a stock dput checker that checks packages
intended for upload for correct checksums. This is actually the most
simple checker that exists.
Profile key: none.
Example profile::
{
...
"hash": "md5"
...
}
The hash may be one of md5, sha1, sha256.
"""
try:
changes.validate_checksums(check_hash=profile["hash"])
except ChangesFileException as e:
raise HashValidationError(
"Bad checksums on %s: %s" % (changes.get_filename(), e)
)
| Debian/dput-ng | dput/hooks/checksum.py | Python | gpl-2.0 | 1,756 |
#!/usr/bin/env python
#
# This example shows the different aspects of user/team management.
#
import sys
from sdcclient import SdcClient
#
# Parse arguments
#
if len(sys.argv) != 4:
print(('usage: %s <sysdig-token> team-name user-name' % sys.argv[0]))
print('You can find your token at https://app.sysdigcloud.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdcClient(sdc_token, sdc_url='https://app.sysdigcloud.com')
team_name = sys.argv[2]
user_name = sys.argv[3]
print(('Trying to invite a user:', user_name))
ok, res = sdclient.create_user_invite(user_name)
if not ok:
if res == 'user ' + user_name + ' already exists':
print(('User creation failed because', user_name, 'already exists. Continuing.'))
else:
print(('User creation failed:', res, '. Exiting.'))
sys.exit(1)
else:
print('User creation succeeded')
# Possible failures on Team creation might include having reached the
# max limit on Teams for this customer account or if the Team by that
# name already exists. Since a previous successful run of this test
# would have deleted the Team by the same name, and we need to be able
# to configure Teams for this test to pass, we'll treat both types of
# error as a genuine fail of the test.
print(('Now trying to create a team with name:', team_name))
ok, res = sdclient.create_team(team_name)
if not ok:
print(('Team creation failed:', res, '. Exiting.'))
sys.exit(1)
else:
print(('Team creation succeeded.', res))
print(('Now trying to find team with name:', team_name))
ok, res = sdclient.get_team(team_name)
if not ok:
print(('Could not get team info:', res, '. Exiting.'))
sys.exit(1)
else:
print('Team fetch succeeded')
print(('Now trying to edit team:', team_name))
memberships = {
'admin@draios.com': 'ROLE_TEAM_MANAGER',
'john-doe@sysdig.com': 'ROLE_TEAM_READ'
}
ok, res = sdclient.edit_team(team_name, description='Nextgen2', memberships=memberships)
if not ok:
print(('Could not edit team:', res, '. Exiting.'))
sys.exit(1)
else:
print('Edited team to change description and add users')
print(('Now trying to edit user:', user_name))
ok, res = sdclient.edit_user(user_name, firstName='Just', lastName='Edited3', systemRole='ROLE_CUSTOMER')
if not ok:
print(('Could not edit user:', res, '. Exiting.'))
sys.exit(1)
else:
print('Edit user succeeded')
print(('Now trying to delete the team:', team_name))
ok, res = sdclient.delete_team(team_name)
if not ok:
print(('Could not delete team:', res, '. Exiting.'))
sys.exit(1)
else:
print('Delete team succeeded')
sys.exit(0)
| draios/python-sdc-client | examples/user_team_mgmt.py | Python | mit | 2,675 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth import get_user_model
from django.forms import widgets
from django.forms.utils import ErrorDict
from django.utils.translation import ugettext_lazy as _
from django.utils.functional import cached_property
from djng.forms import fields
from djng.styling.bootstrap3.forms import Bootstrap3ModelForm
from shop.forms.widgets import CheckboxInput, RadioSelect, Select
from shop.models.address import ShippingAddressModel, BillingAddressModel
from shop.models.customer import CustomerModel
from shop.modifiers.pool import cart_modifiers_pool
from .base import DialogForm, DialogModelForm, UniqueEmailValidationMixin
class CustomerForm(DialogModelForm):
scope_prefix = 'customer'
legend = _("Customer's Details")
email = fields.EmailField(label=_("Email address"))
first_name = fields.CharField(label=_("First Name"))
last_name = fields.CharField(label=_("Last Name"))
class Meta:
model = CustomerModel
exclude = ['user', 'recognized', 'number', 'last_access']
custom_fields = ['email', 'first_name', 'last_name']
def __init__(self, initial=None, instance=None, *args, **kwargs):
initial = dict(initial) if initial else {}
assert instance is not None
initial.update(dict((f, getattr(instance, f)) for f in self.Meta.custom_fields))
super(CustomerForm, self).__init__(initial=initial, instance=instance, *args, **kwargs)
def save(self, commit=True):
for f in self.Meta.custom_fields:
setattr(self.instance, f, self.cleaned_data[f])
return super(CustomerForm, self).save(commit)
@classmethod
def form_factory(cls, request, data, cart):
customer_form = cls(data=data, instance=request.customer)
if customer_form.is_valid():
customer_form.instance.recognize_as_registered(request, commit=False)
customer_form.save()
return customer_form
class GuestForm(UniqueEmailValidationMixin, DialogModelForm):
scope_prefix = 'guest'
form_name = 'customer_form' # Override form name to reuse template `customer-form.html`
legend = _("Customer's Email")
email = fields.EmailField(label=_("Email address"))
class Meta:
model = get_user_model() # since we only use the email field, use the User model directly
fields = ['email']
def __init__(self, initial=None, instance=None, *args, **kwargs):
if isinstance(instance, CustomerModel):
instance = instance.user
super(GuestForm, self).__init__(initial=initial, instance=instance, *args, **kwargs)
@classmethod
def form_factory(cls, request, data, cart):
customer_form = cls(data=data, instance=request.customer.user)
if customer_form.is_valid():
request.customer.recognize_as_guest(request, commit=False)
customer_form.save()
return customer_form
class AddressForm(DialogModelForm):
# field to be superseeded by a select widget
active_priority = fields.CharField(
required=False,
widget=widgets.HiddenInput(),
)
use_primary_address = fields.BooleanField(
label="use primary address", # label will be overridden by Shipping/Billing/AddressForm
required=False,
initial=True,
widget=CheckboxInput(),
)
plugin_fields = ['plugin_id', 'plugin_order', 'use_primary_address']
class Meta:
exclude = ('customer', 'priority',)
def __init__(self, initial=None, instance=None, cart=None, *args, **kwargs):
self.cart = cart
self.multi_addr = kwargs.pop('multi_addr', False)
self.allow_use_primary = kwargs.pop('allow_use_primary', False)
self.populate_siblings_summary()
if instance:
initial = dict(initial or {}, active_priority=instance.priority)
if instance.address_type == 'shipping':
initial['use_primary_address'] = cart.shipping_address is None
else: # address_type == billing
initial['use_primary_address'] = cart.billing_address is None
super(AddressForm, self).__init__(initial=initial, instance=instance, *args, **kwargs)
@classmethod
def get_model(cls):
return cls.Meta.model
@cached_property
def field_css_classes(self):
css_classes = {'*': getattr(Bootstrap3ModelForm, 'field_css_classes')}
for name, field in self.fields.items():
if not field.widget.is_hidden:
css_classes[name] = ['has-feedback', 'form-group', 'shop-address-{}'.format(name)]
return css_classes
@classmethod
def form_factory(cls, request, data, cart):
"""
From the given request, update the database model.
If the form data is invalid, return an error dictionary to update the response.
"""
# search for the associated address DB instance or create a new one
current_address = cls.get_address(cart)
try:
active_priority = int(data.get('active_priority'))
except (ValueError, TypeError):
if data.get('use_primary_address'):
active_priority = 'nop'
else:
active_priority = data.get('active_priority', 'add')
active_address = cls.get_model().objects.get_fallback(customer=request.customer)
else:
filter_args = dict(customer=request.customer, priority=active_priority)
active_address = cls.get_model().objects.filter(**filter_args).first()
if active_priority == 'add':
# Add a newly filled address for the given customer
address_form = cls(data=data, cart=cart)
if address_form.is_valid():
# prevent adding the same address twice
all_field_names = [f.name for f in cls.get_model()._meta.get_fields()]
filter_args = dict((attr, val) for attr, val in address_form.data.items()
if attr in all_field_names and val)
filter_args.update(customer=request.customer)
try:
existing_address = cls.get_model().objects.get(**filter_args)
except cls.get_model().DoesNotExist:
next_address = address_form.save(commit=False)
if next_address:
next_address.customer = request.customer
next_address.priority = cls.get_model().objects.get_max_priority(request.customer) + 1
next_address.save()
address_form.data.update(active_priority=str(next_address.priority))
else:
address_form.data.update(active_priority='nop')
address_form.set_address(cart, next_address)
else:
address_form.set_address(cart, existing_address)
address_form.populate_siblings_summary()
elif active_address is None and not data.get('use_primary_address'):
# customer selected 'Add another address', hence create a new empty form
initial = dict((key, val) for key, val in data.items() if key in cls.plugin_fields)
address_form = cls(initial=initial)
address_form.data.update(address_form.get_initial_data())
address_form.data.update(active_priority='add')
elif current_address == active_address:
# an existing entity of AddressModel was edited
address_form = cls(data=data, instance=active_address, cart=cart)
if address_form.is_valid():
next_address = address_form.save()
address_form.set_address(cart, next_address)
else:
# an address with another priority was selected
initial = dict(data)
for attr in cls().get_initial_data().keys():
if hasattr(active_address, attr):
initial.update({attr: getattr(active_address, attr)})
initial.update(active_priority=str(active_address.priority))
address_form = cls(data=initial, instance=current_address, cart=cart)
address_form.set_address(cart, active_address)
return address_form
def populate_siblings_summary(self):
"""
Build a list of value-labels to populate the address choosing element
"""
self.siblings_summary = []
if self.cart is not None:
AddressModel = self.get_model()
addresses = AddressModel.objects.filter(customer=self.cart.customer).order_by('priority')
for number, addr in enumerate(addresses, 1):
self.siblings_summary.append({
'value': str(addr.priority),
'label': "{}. {}".format(number, addr.as_text().strip().replace('\n', ' – '))
})
def full_clean(self):
super(AddressForm, self).full_clean()
if self.is_bound and self['use_primary_address'].value():
# reset errors, since then the form is always regarded as valid
self._errors = ErrorDict()
def save(self, commit=True):
if not self['use_primary_address'].value():
return super(AddressForm, self).save(commit)
def get_response_data(self):
return dict(self.data, siblings_summary=self.siblings_summary)
def as_div(self):
# Intentionally rendered without field `use_primary_address`, this must be added
# on top of the form template manually
self.fields.pop('use_primary_address', None)
return super(AddressForm, self).as_div()
def as_text(self):
bound_field = self['use_primary_address']
if bound_field.value():
return bound_field.field.widget.choice_label
return super(AddressForm, self).as_text()
class ShippingAddressForm(AddressForm):
scope_prefix = 'shipping_address'
legend = _("Shipping Address")
class Meta(AddressForm.Meta):
model = ShippingAddressModel
widgets = {
'country': Select(attrs={'ng-change': 'updateSiblingAddress()'}),
}
def __init__(self, *args, **kwargs):
super(ShippingAddressForm, self).__init__(*args, **kwargs)
self.fields['use_primary_address'].label = _("Use billing address for shipping")
self.fields['use_primary_address'].widget.choice_label = self.fields['use_primary_address'].label # Django < 1.11
@classmethod
def get_address(cls, cart):
return cart.shipping_address
def set_address(self, cart, instance):
cart.shipping_address = instance if not self['use_primary_address'].value() else None
class BillingAddressForm(AddressForm):
scope_prefix = 'billing_address'
legend = _("Billing Address")
class Meta(AddressForm.Meta):
model = BillingAddressModel
def __init__(self, *args, **kwargs):
super(BillingAddressForm, self).__init__(*args, **kwargs)
self.fields['use_primary_address'].label = _("Use shipping address for billing")
self.fields['use_primary_address'].widget.choice_label = self.fields['use_primary_address'].label # Django < 1.11
@classmethod
def get_address(cls, cart):
return cart.billing_address
def set_address(self, cart, instance):
cart.billing_address = instance if not self['use_primary_address'].value() else None
class PaymentMethodForm(DialogForm):
scope_prefix = 'payment_method'
payment_modifier = fields.ChoiceField(
label=_("Payment Method"),
widget=RadioSelect(attrs={'ng-change': 'updateMethod()'}),
)
def __init__(self, *args, **kwargs):
choices = [m.get_choice() for m in cart_modifiers_pool.get_payment_modifiers()
if not m.is_disabled(kwargs['cart'])]
self.base_fields['payment_modifier'].choices = choices
if len(choices) == 1:
# if there is only one shipping method available, always set it as default
try:
kwargs['initial']['payment_modifier'] = choices[0][0]
except KeyError:
pass
super(PaymentMethodForm, self).__init__(*args, **kwargs)
def has_choices(self):
return len(self.base_fields['payment_modifier'].choices) > 0
@classmethod
def form_factory(cls, request, data, cart):
cart.update(request)
payment_method_form = cls(data=data, cart=cart)
if payment_method_form.is_valid():
cart.extra.update(payment_method_form.cleaned_data,
payment_extra_data=data.get('payment_data', {}))
return payment_method_form
class ShippingMethodForm(DialogForm):
scope_prefix = 'shipping_method'
shipping_modifier = fields.ChoiceField(
label=_("Shipping Method"),
widget=RadioSelect(attrs={'ng-change': 'updateMethod()'}),
)
def __init__(self, *args, **kwargs):
choices = [m.get_choice() for m in cart_modifiers_pool.get_shipping_modifiers()
if not m.is_disabled(kwargs['cart'])]
self.base_fields['shipping_modifier'].choices = choices
if len(choices) == 1:
# with only one choice, initialize with it
try:
kwargs['initial']['shipping_modifier'] = choices[0][0]
except KeyError:
pass
super(ShippingMethodForm, self).__init__(*args, **kwargs)
def has_choices(self):
return len(self.base_fields['shipping_modifier'].choices) > 0
@classmethod
def form_factory(cls, request, data, cart):
cart.update(request)
shipping_method_form = cls(data=data, cart=cart)
if shipping_method_form.is_valid():
cart.extra.update(shipping_method_form.cleaned_data)
return shipping_method_form
class ExtraAnnotationForm(DialogForm):
scope_prefix = 'extra_annotation'
annotation = fields.CharField(
label=_("Extra annotation for this order"),
required=False,
widget=widgets.Textarea,
)
@classmethod
def form_factory(cls, request, data, cart):
extra_annotation_form = cls(data=data)
if extra_annotation_form.is_valid():
cart.extra.update(extra_annotation_form.cleaned_data)
return extra_annotation_form
class AcceptConditionForm(DialogForm):
scope_prefix = 'accept_condition'
accept = fields.BooleanField(
required=True,
widget=CheckboxInput(),
)
def __init__(self, data=None, initial=None, *args, **kwargs):
plugin_id = data and data.get('plugin_id') or initial and initial.get('plugin_id') or 'none'
scope_prefix = '{0}.plugin_{1}'.format(self.scope_prefix, plugin_id)
self.form_name = '{0}.plugin_{1}'.format(self.form_name, plugin_id)
super(AcceptConditionForm, self).__init__(data=data, initial=initial,
scope_prefix=scope_prefix, *args, **kwargs)
@classmethod
def form_factory(cls, request, data, cart):
data = data or {'accept': False}
accept_form = cls(data=data)
return accept_form
| nimbis/django-shop | shop/forms/checkout.py | Python | bsd-3-clause | 15,273 |
import math
# Determines the day of the year
# INPUT
# yr: year of interest (ex. 1989)
# month: month of interest (Jan=1...Dec=12)
# day: day (of the month) of interest
# OUTPUT
# dayj: Julian day of the year
def DAYOYR(yr, month, day):
daysinmonth = [0,31,59,90,120,151,181,212,243,273,304,334]
if (yr % 4) == 0: #checking for leap year to add 1 day
leapyr = 1
for i in range(2, 12):
daysinmonth[i] = daysinmonth[i] + leapyr
# end if
dayj = daysinmonth[month - 1] + day
return dayj | allthroughthenight/aces | python/functions/DAYOYR.py | Python | gpl-3.0 | 547 |
# -*- coding: utf-8 -*-
import os
import base64
from StringIO import StringIO
from django.conf import settings
from django.utils import simplejson
from django.core.files.base import ContentFile
from django.db import models, DEFAULT_DB_ALIAS
from django.utils.encoding import smart_unicode
from django.db.models.fields.files import FileField
from django.core.serializers.python import _get_model
from django.core.serializers.base import DeserializedObject
from django.core.serializers.json import Serializer as DJSerializer, DeserializationError
class Serializer(DJSerializer):
def serialize(self, queryset, **options):
"""
Serialize a queryset.
"""
self.options = options
self.stream = options.pop("stream", StringIO())
self.selected_fields = options.pop("fields", None)
self.use_natural_keys = options.pop("use_natural_keys", False)
self.start_serialization()
for obj, changes, action in queryset:
if action == 0:
changes = []
else:
changes = eval(changes).keys()
self.start_object(obj)
# Use the concrete parent class' _meta instead of the object's _meta
# This is to avoid local_fields problems for proxy models. Refs #17717.
concrete_model = obj._meta.concrete_model
for field in concrete_model._meta.local_fields:
if field.serialize:
if field.rel is None:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_field(obj, field, changes, action)
else:
if self.selected_fields is None or field.attname[:-3] in self.selected_fields:
self.handle_fk_field(obj, field)
for field in concrete_model._meta.many_to_many:
if field.serialize:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_m2m_field(obj, field)
self.end_object(obj, changes, action)
self.end_serialization()
return self.getvalue()
def handle_field(self, obj, field, changes, action):
super(Serializer, self).handle_field(obj, field)
if isinstance(field, FileField):
fileobj = getattr(obj, field.name)
if fileobj:
imagedata = ""
if field.name in changes or action == 1:
imagedata = open(fileobj.path, "rb").read().encode("base64")
self._current[field.name] = {
'data': imagedata,
'name': os.path.basename(fileobj.name)
}
def end_object(self, obj, changes=None, action=None):
self.objects.append({
'model': smart_unicode(obj._meta),
'pk': smart_unicode(obj._get_pk_val(), strings_only=True),
'fields': self._current,
'changes': changes,
'action': action
})
self._current = None
def Deserializer(stream_or_string, **options):
"""
Deserialize a stream or string of JSON data.
"""
if isinstance(stream_or_string, basestring):
stream = StringIO(stream_or_string)
else:
stream = stream_or_string
try:
for obj in CustomDeserializer(simplejson.load(stream), **options):
yield obj
except GeneratorExit:
raise
except Exception, e:
# Map to deserializer error
raise DeserializationError(e)
def CustomDeserializer(object_list, **options):
"""
Deserialize simple Python objects back into Django ORM instances.
It's expected that you pass the Python objects themselves (instead of a
stream or a string) to the constructor
"""
db = options.pop('using', DEFAULT_DB_ALIAS)
models.get_apps()
for d in object_list:
# Look up the model and starting build a dict of data for it.
Model = _get_model(d["model"])
data = {Model._meta.pk.attname: Model._meta.pk.to_python(d["pk"])}
m2m_data = {}
filefields = []
# Handle each field
for (field_name, field_value) in d["fields"].iteritems():
if isinstance(field_value, str):
field_value = smart_unicode(field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True)
field = Model._meta.get_field(field_name)
# Handle M2M relations
if field.rel and isinstance(field.rel, models.ManyToManyRel):
if hasattr(field.rel.to._default_manager, 'get_by_natural_key'):
def m2m_convert(value):
if hasattr(value, '__iter__'):
return field.rel.to._default_manager.db_manager(db).get_by_natural_key(*value).pk
else:
return smart_unicode(field.rel.to._meta.pk.to_python(value))
else:
m2m_convert = lambda v: smart_unicode(field.rel.to._meta.pk.to_python(v))
m2m_data[field.name] = [m2m_convert(pk) for pk in field_value]
# Handle FK fields
elif field.rel and isinstance(field.rel, models.ManyToOneRel):
if field_value is not None:
if hasattr(field.rel.to._default_manager, 'get_by_natural_key'):
if hasattr(field_value, '__iter__'):
obj = field.rel.to._default_manager.db_manager(db).get_by_natural_key(*field_value)
value = getattr(obj, field.rel.field_name)
# If this is a natural foreign key to an object that
# has a FK/O2O as the foreign key, use the FK value
if field.rel.to._meta.pk.rel:
value = value.pk
else:
value = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
data[field.attname] = value
else:
data[field.attname] = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
else:
data[field.attname] = None
elif isinstance(field, FileField):
if field_value:
if field.name in d["changes"] or d["action"] == 1:
filefields.append([field, field_value['name'], ContentFile(base64.decodestring(field_value['data']))])
else:
filefields.append([field, field_value['name'], None])
else:
filefields.append([field, None, None])
else:
data[field.name] = field.to_python(field_value)
yield CustomDeserializedObject(Model(**data), m2m_data, filefields)
class CustomDeserializedObject(DeserializedObject):
def __init__(self, obj, m2m_data=None, file_data=[]):
self.object = obj
self.m2m_data = m2m_data
self.file_data = file_data
def save(self, save_m2m=True, using=None):
for field, filename, djangofile in self.file_data:
filefield = getattr(self.object, field.name)
if filename and not djangofile:
current_file = getattr(self.object.__class__.objects.get(pk=self.object.pk), field.name)
setattr(self.object, field.name, current_file)
filefield = getattr(self.object, field.name)
elif filename and djangofile:
filefield.save(filename, djangofile, save=False)
models.Model.save_base(self.object, using=using, raw=True)
if self.m2m_data and save_m2m:
for accessor_name, object_list in self.m2m_data.items():
setattr(self.object, accessor_name, object_list)
# prevent a second (possibly accidental) call to save() from saving
# the m2m data twice.
self.m2m_data = None
self.file_data = None
| juanpex/django-model-deploy | model_deploy/serializer.py | Python | bsd-3-clause | 8,172 |
from suds.client import Client
from suds.wsse import *
from suds.sax.element import Element
from suds.sax.attribute import Attribute
from suds.xsd.sxbasic import Import
client1 = Client(url='https://www.bsg.gob.ec/sw/STI/BSGSW08_Acceder_BSG?wsdl')
client = Client(url='https://www.bsg.gob.ec/sw/SENESCYT/BSGSW01_Consultar_Titulos?wsdl')
request = client1.factory.create('validarPermisoPeticion')
request.Cedula = '1103635445'
request.Urlsw = 'https://www.bsg.gob.ec/sw/SENESCYT/BSGSW01_Consultar_Titulos?wsdl'
response = client1.service.ValidarPermiso(request)
print response
digest = response['Digest']
fecha = response['Fecha']
fechaf = response['FechaF']
nonce = response['Nonce']
user = "1103635445"
wss = ('wss', 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd')
wsu = ('wsu', 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd')
usrnametoken = Element('UsernameToken', ns=wss).setText('')
username = Element('Username', ns=wss).setText(user)
wnonce = Element('Nonce', ns=wss).setText(nonce)
wnonce.append(Attribute('EncodingType', 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-soap-message-security-1.0#Base64Binary'))
pss = Element('Password', ns = wss).setText(digest)
pss.append(Attribute('Type', 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordDigest'))
create = Element('Created', ns = wsu).setText(fecha)
created = Element('Created', ns = wsu).setText(fecha)
expires = Element('Expires', ns = wsu).setText(fechaf)
timestamp = Element('Timestamp', ns=wsu).setText('')
timestamp.append(Attribute('wsu:Id', 'Timestamp-2'))
usrnametoken.insert(username)
usrnametoken.insert(pss)
usrnametoken.insert(wnonce)
usrnametoken.insert(create)
timestamp.insert(expires)
timestamp.insert(created)
wsselement = Element('Security', ns=wss).insert(timestamp)
wsselement.insert(usrnametoken)
client.set_options(soapheaders=wsselement)
#request_client = client.service.consultaTitulo('0800452773')
#request_client = client.service.consultaTitulo('0704040609')
#request_client = client.service.consultaTitulo('1103635445')
#request_client = client.service.consultaTitulo('0705277317')
#request_client = client.service.consultaTitulo('0704040609')
#0704922004
request_client = client.service.consultaTitulo('0705277762')
#print wsselement
print request_client
print request_client
print request_client['niveltitulos'][0]['titulo'][0]['nombreTitulo']
| mauriciofierrom/testing | service.py | Python | lgpl-3.0 | 2,482 |
import numpy as np
from numpy.random import uniform, randint
from dissimilarity import compute_dissimilarity
if __name__ == '__main__':
n_streamlines = 10000
len_min = 30
len_max = 150
print("Generating random tractography of %s streamlines." %
n_streamlines)
tracks = np.array([uniform(size=(randint(len_min, len_max), 3))
for i in range(n_streamlines)],
dtype=np.object)
dissimilarity_matrix, prototype_idx = compute_dissimilarity(tracks,
verbose=True)
| emanuele/dissimilarity | test_dissimilarity.py | Python | mit | 601 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Knem(AutotoolsPackage):
"""KNEM is a Linux kernel module enabling high-performance intra-node MPI
communication for large messages."""
homepage = "https://knem.gforge.inria.fr"
url = "https://gitlab.inria.fr/knem/knem/uploads/4a43e3eb860cda2bbd5bf5c7c04a24b6/knem-1.1.4.tar.gz"
list_url = "https://knem.gitlabpages.inria.fr/download"
git = "https://gitlab.inria.fr/knem/knem.git"
maintainers = ['skosukhin']
version('master', branch='master')
version('1.1.4', sha256='9f0e360579ae7655e07d6644583fd325515e9ff2b42ef5decb5421a003510937',
url="https://gitlab.inria.fr/knem/knem/uploads/4a43e3eb860cda2bbd5bf5c7c04a24b6/knem-1.1.4.tar.gz")
version('1.1.3', sha256='50d3c4a20c140108b8ce47aaafd0ade0927d6f507e1b5cc690dd6bddeef30f60',
url="https://gitlab.inria.fr/knem/knem/uploads/59375c38537e6ff2d94209f190c54aa6/knem-1.1.3.tar.gz")
variant('hwloc', default=True,
description='Enable hwloc in the user-space tools')
patch('https://gitlab.inria.fr/knem/knem/-/commit/5c8cb902d6040df58cdc4e4e4c10d1f1426c3525.patch',
sha256='78885a02d6f031a793db6a7190549f8d64c8606b353051d65f8e3f802b801902',
when='@1.1.4')
depends_on('hwloc', when='+hwloc')
depends_on('pkgconfig', type='build', when='+hwloc')
depends_on('autoconf', type='build', when='@master')
depends_on('automake', type='build', when='@master')
depends_on('m4', type='build', when='@master')
# The support for hwloc was added in 0.9.1:
conflicts('+hwloc', when='@:0.9.0')
# Ideally, we should list all non-Linux-based platforms here:
conflicts('platform=darwin')
# All compilers except for gcc are in conflict:
for __compiler in spack.compilers.supported_compilers():
if __compiler != 'gcc':
conflicts('%{0}'.format(__compiler),
msg='Linux kernel module must be compiled with gcc')
@run_before('build')
def override_kernel_compiler(self):
# Override the compiler for kernel module source files. We need
# this additional argument for all installation phases.
make.add_default_arg('CC={0}'.format(spack_cc))
def configure_args(self):
return self.enable_or_disable('hwloc')
@when('@master')
def autoreconf(self, spec, prefix):
Executable('./autogen.sh')()
| LLNL/spack | var/spack/repos/builtin/packages/knem/package.py | Python | lgpl-2.1 | 2,617 |
def itemTemplate():
return ['object/tangible/wearables/armor/marauder/shared_armor_marauder_s01_bicep_r.iff','object/tangible/wearables/armor/marauder/shared_armor_marauder_s02_bicep_r.iff','object/tangible/wearables/armor/marauder/shared_armor_marauder_s03_bicep_r.iff']
def customItemName():
return 'Marauder Armor Bicep Right'
def customItemStackCount():
return 1
def customizationAttributes():
return []
def customizationValues():
return []
def itemStats():
stats = ['armor_efficiency_kinetic','5000','6800']
stats += ['armor_efficiency_energy','3000','4800']
stats += ['special_protection_heat','4000','5800']
stats += ['special_protection_cold','4000','5800']
stats += ['special_protection_acid','4000','5800']
stats += ['special_protection_electricity','4000','5800']
return stats
| agry/NGECore2 | scripts/loot/lootItems/armor/marauder/marauder_armor_bicep_right.py | Python | lgpl-3.0 | 818 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Dict, Optional, Sequence, Tuple, Union
from google.api_core.retry import Retry
from google.cloud import datacatalog
from google.cloud.datacatalog import (
CreateTagRequest,
DataCatalogClient,
Entry,
EntryGroup,
SearchCatalogRequest,
Tag,
TagTemplate,
TagTemplateField,
)
from google.protobuf.field_mask_pb2 import FieldMask
from airflow import AirflowException
from airflow.providers.google.common.consts import CLIENT_INFO
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
class CloudDataCatalogHook(GoogleBaseHook):
"""
Hook for Google Cloud Data Catalog Service.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account.
"""
def __init__(
self,
gcp_conn_id: str = "google_cloud_default",
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
delegate_to=delegate_to,
impersonation_chain=impersonation_chain,
)
self._client: Optional[DataCatalogClient] = None
def get_conn(self) -> DataCatalogClient:
"""Retrieves client library object that allow access to Cloud Data Catalog service."""
if not self._client:
self._client = DataCatalogClient(credentials=self._get_credentials(), client_info=CLIENT_INFO)
return self._client
@GoogleBaseHook.fallback_to_default_project_id
def create_entry(
self,
location: str,
entry_group: str,
entry_id: str,
entry: Union[dict, Entry],
project_id: str = PROVIDE_PROJECT_ID,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Entry:
"""
Creates an entry.
Currently only entries of 'FILESET' type can be created.
:param location: Required. The location of the entry to create.
:param entry_group: Required. Entry group ID under which the entry is created.
:param entry_id: Required. The id of the entry to create.
:param entry: Required. The entry to create.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.datacatalog_v1beta1.types.Entry`
:param project_id: The ID of the Google Cloud project that owns the entry.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If set to ``None`` or missing, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
parent = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}"
self.log.info('Creating a new entry: parent=%s', parent)
result = client.create_entry(
request={'parent': parent, 'entry_id': entry_id, 'entry': entry},
retry=retry,
timeout=timeout,
metadata=metadata,
)
self.log.info('Created a entry: name=%s', result.name)
return result
@GoogleBaseHook.fallback_to_default_project_id
def create_entry_group(
self,
location: str,
entry_group_id: str,
entry_group: Union[Dict, EntryGroup],
project_id: str = PROVIDE_PROJECT_ID,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> EntryGroup:
"""
Creates an EntryGroup.
:param location: Required. The location of the entry group to create.
:param entry_group_id: Required. The id of the entry group to create. The id must begin with a letter
or underscore, contain only English letters, numbers and underscores, and be at most 64
characters.
:param entry_group: The entry group to create. Defaults to an empty entry group.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.datacatalog_v1beta1.types.EntryGroup`
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
parent = f"projects/{project_id}/locations/{location}"
self.log.info('Creating a new entry group: parent=%s', parent)
result = client.create_entry_group(
request={'parent': parent, 'entry_group_id': entry_group_id, 'entry_group': entry_group},
retry=retry,
timeout=timeout,
metadata=metadata,
)
self.log.info('Created a entry group: name=%s', result.name)
return result
@GoogleBaseHook.fallback_to_default_project_id
def create_tag(
self,
location: str,
entry_group: str,
entry: str,
tag: Union[dict, Tag],
project_id: str = PROVIDE_PROJECT_ID,
template_id: Optional[str] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Tag:
"""
Creates a tag on an entry.
:param location: Required. The location of the tag to create.
:param entry_group: Required. Entry group ID under which the tag is created.
:param entry: Required. Entry group ID under which the tag is created.
:param tag: Required. The tag to create.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.datacatalog_v1beta1.types.Tag`
:param template_id: Required. Template ID used to create tag
:param project_id: The ID of the Google Cloud project that owns the tag.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
if template_id:
template_path = f"projects/{project_id}/locations/{location}/tagTemplates/{template_id}"
if isinstance(tag, Tag):
tag.template = template_path
else:
tag["template"] = template_path
parent = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}"
self.log.info('Creating a new tag: parent=%s', parent)
# HACK: google-cloud-datacatalog has problems with mapping messages where the value is not a
# primitive type, so we need to convert it manually.
# See: https://github.com/googleapis/python-datacatalog/issues/84
if isinstance(tag, dict):
tag = Tag(
name=tag.get('name'),
template=tag.get('template'),
template_display_name=tag.get('template_display_name'),
column=tag.get('column'),
fields={
k: datacatalog.TagField(**v) if isinstance(v, dict) else v
for k, v in tag.get("fields", {}).items()
},
)
request = CreateTagRequest(
parent=parent,
tag=tag,
)
result = client.create_tag(request=request, retry=retry, timeout=timeout, metadata=metadata or ())
self.log.info('Created a tag: name=%s', result.name)
return result
@GoogleBaseHook.fallback_to_default_project_id
def create_tag_template(
self,
location,
tag_template_id: str,
tag_template: Union[dict, TagTemplate],
project_id: str = PROVIDE_PROJECT_ID,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> TagTemplate:
"""
Creates a tag template.
:param location: Required. The location of the tag template to create.
:param tag_template_id: Required. The id of the tag template to create.
:param tag_template: Required. The tag template to create.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.datacatalog_v1beta1.types.TagTemplate`
:param project_id: The ID of the Google Cloud project that owns the tag template.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
parent = f"projects/{project_id}/locations/{location}"
self.log.info('Creating a new tag template: parent=%s', parent)
# HACK: google-cloud-datacatalog has problems with mapping messages where the value is not a
# primitive type, so we need to convert it manually.
# See: https://github.com/googleapis/python-datacatalog/issues/84
if isinstance(tag_template, dict):
tag_template = datacatalog.TagTemplate(
name=tag_template.get("name"),
display_name=tag_template.get("display_name"),
fields={
k: datacatalog.TagTemplateField(**v) if isinstance(v, dict) else v
for k, v in tag_template.get("fields", {}).items()
},
)
request = datacatalog.CreateTagTemplateRequest(
parent=parent, tag_template_id=tag_template_id, tag_template=tag_template
)
result = client.create_tag_template(
request=request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
self.log.info('Created a tag template: name=%s', result.name)
return result
@GoogleBaseHook.fallback_to_default_project_id
def create_tag_template_field(
self,
location: str,
tag_template: str,
tag_template_field_id: str,
tag_template_field: Union[dict, TagTemplateField],
project_id: str = PROVIDE_PROJECT_ID,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> TagTemplateField:
r"""
Creates a field in a tag template.
:param location: Required. The location of the tag template field to create.
:param tag_template: Required. The id of the tag template to create.
:param tag_template_field_id: Required. The ID of the tag template field to create. Field ids can
contain letters (both uppercase and lowercase), numbers (0-9), underscores (\_) and dashes (-).
Field IDs must be at least 1 character long and at most 128 characters long. Field IDs must also
be unique within their template.
:param tag_template_field: Required. The tag template field to create.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.datacatalog_v1beta1.types.TagTemplateField`
:param project_id: The ID of the Google Cloud project that owns the tag template field.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
parent = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}"
self.log.info('Creating a new tag template field: parent=%s', parent)
result = client.create_tag_template_field(
request={
'parent': parent,
'tag_template_field_id': tag_template_field_id,
'tag_template_field': tag_template_field,
},
retry=retry,
timeout=timeout,
metadata=metadata,
)
self.log.info('Created a tag template field: name=%s', result.name)
return result
@GoogleBaseHook.fallback_to_default_project_id
def delete_entry(
self,
location: str,
entry_group: str,
entry: str,
project_id: str = PROVIDE_PROJECT_ID,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
"""
Deletes an existing entry.
:param location: Required. The location of the entry to delete.
:param entry_group: Required. Entry group ID for entries that is deleted.
:param entry: Entry ID that is deleted.
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
name = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}"
self.log.info('Deleting a entry: name=%s', name)
client.delete_entry(request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ())
self.log.info('Deleted a entry: name=%s', name)
@GoogleBaseHook.fallback_to_default_project_id
def delete_entry_group(
self,
location,
entry_group,
project_id: str = PROVIDE_PROJECT_ID,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
"""
Deletes an EntryGroup.
Only entry groups that do not contain entries can be deleted.
:param location: Required. The location of the entry group to delete.
:param entry_group: Entry group ID that is deleted.
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
name = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}"
self.log.info('Deleting a entry group: name=%s', name)
client.delete_entry_group(
request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()
)
self.log.info('Deleted a entry group: name=%s', name)
@GoogleBaseHook.fallback_to_default_project_id
def delete_tag(
self,
location: str,
entry_group: str,
entry: str,
tag: str,
project_id: str = PROVIDE_PROJECT_ID,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
"""
Deletes a tag.
:param location: Required. The location of the tag to delete.
:param entry_group: Entry group ID for tag that is deleted.
:param entry: Entry ID for tag that is deleted.
:param tag: Identifier for TAG that is deleted.
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
name = (
f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}"
)
self.log.info('Deleting a tag: name=%s', name)
client.delete_tag(request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ())
self.log.info('Deleted a tag: name=%s', name)
@GoogleBaseHook.fallback_to_default_project_id
def delete_tag_template(
self,
location,
tag_template,
force: bool,
project_id: str,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
"""
Deletes a tag template and all tags using the template.
:param location: Required. The location of the tag template to delete.
:param tag_template: ID for tag template that is deleted.
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param force: Required. Currently, this field must always be set to ``true``. This confirms the
deletion of any possible tags using this template. ``force = false`` will be supported in the
future.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
name = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}"
self.log.info('Deleting a tag template: name=%s', name)
client.delete_tag_template(
request={'name': name, 'force': force}, retry=retry, timeout=timeout, metadata=metadata or ()
)
self.log.info('Deleted a tag template: name=%s', name)
@GoogleBaseHook.fallback_to_default_project_id
def delete_tag_template_field(
self,
location: str,
tag_template: str,
field: str,
force: bool,
project_id: str,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
"""
Deletes a field in a tag template and all uses of that field.
:param location: Required. The location of the tag template to delete.
:param tag_template: Tag Template ID for tag template field that is deleted.
:param field: Name of field that is deleted.
:param force: Required. This confirms the deletion of this field from any tags using this field.
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
name = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field}"
self.log.info('Deleting a tag template field: name=%s', name)
client.delete_tag_template_field(
request={'name': name, 'force': force}, retry=retry, timeout=timeout, metadata=metadata or ()
)
self.log.info('Deleted a tag template field: name=%s', name)
@GoogleBaseHook.fallback_to_default_project_id
def get_entry(
self,
location: str,
entry_group: str,
entry: str,
project_id: str,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Entry:
"""
Gets an entry.
:param location: Required. The location of the entry to get.
:param entry_group: Required. The entry group of the entry to get.
:param entry: The ID of the entry to get.
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
name = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}"
self.log.info('Getting a entry: name=%s', name)
result = client.get_entry(
request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()
)
self.log.info('Received a entry: name=%s', result.name)
return result
@GoogleBaseHook.fallback_to_default_project_id
def get_entry_group(
self,
location: str,
entry_group: str,
project_id: str,
read_mask: Optional[FieldMask] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> EntryGroup:
"""
Gets an entry group.
:param location: Required. The location of the entry group to get.
:param entry_group: The ID of the entry group to get.
:param read_mask: The fields to return. If not set or empty, all fields are returned.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.protobuf.field_mask_pb2.FieldMask`
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
name = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}"
self.log.info('Getting a entry group: name=%s', name)
result = client.get_entry_group(
request={'name': name, 'read_mask': read_mask},
retry=retry,
timeout=timeout,
metadata=metadata,
)
self.log.info('Received a entry group: name=%s', result.name)
return result
@GoogleBaseHook.fallback_to_default_project_id
def get_tag_template(
self,
location: str,
tag_template: str,
project_id: str,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> TagTemplate:
"""
Gets a tag template.
:param location: Required. The location of the tag template to get.
:param tag_template: Required. The ID of the tag template to get.
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
name = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}"
self.log.info('Getting a tag template: name=%s', name)
result = client.get_tag_template(
request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()
)
self.log.info('Received a tag template: name=%s', result.name)
return result
@GoogleBaseHook.fallback_to_default_project_id
def list_tags(
self,
location: str,
entry_group: str,
entry: str,
project_id: str,
page_size: int = 100,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
):
"""
Lists the tags on an Entry.
:param location: Required. The location of the tags to get.
:param entry_group: Required. The entry group of the tags to get.
:param entry_group: Required. The entry of the tags to get.
:param page_size: The maximum number of resources contained in the underlying API response. If page
streaming is performed per- resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number of resources in a page.
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
parent = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}"
self.log.info('Listing tag on entry: entry_name=%s', parent)
result = client.list_tags(
request={'parent': parent, 'page_size': page_size},
retry=retry,
timeout=timeout,
metadata=metadata,
)
self.log.info('Received tags.')
return result
@GoogleBaseHook.fallback_to_default_project_id
def get_tag_for_template_name(
self,
location: str,
entry_group: str,
entry: str,
template_name: str,
project_id: str,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Tag:
"""
Gets for a tag with a specific template for a specific entry.
:param location: Required. The location which contains the entry to search for.
:param entry_group: The entry group ID which contains the entry to search for.
:param entry: The name of the entry to search for.
:param template_name: The name of the template that will be the search criterion.
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
tags_list = self.list_tags(
location=location,
entry_group=entry_group,
entry=entry,
project_id=project_id,
retry=retry,
timeout=timeout,
metadata=metadata,
)
tag = next(t for t in tags_list if t.template == template_name)
return tag
def lookup_entry(
self,
linked_resource: Optional[str] = None,
sql_resource: Optional[str] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Entry:
r"""
Get an entry by target resource name.
This method allows clients to use the resource name from the source Google Cloud service
to get the Data Catalog Entry.
:param linked_resource: The full name of the Google Cloud resource the Data Catalog entry
represents. See: https://cloud.google.com/apis/design/resource\_names#full\_resource\_name. Full
names are case-sensitive.
:param sql_resource: The SQL name of the entry. SQL names are case-sensitive.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
if linked_resource and sql_resource:
raise AirflowException("Only one of linked_resource, sql_resource should be set.")
if not linked_resource and not sql_resource:
raise AirflowException("At least one of linked_resource, sql_resource should be set.")
if linked_resource:
self.log.info('Getting entry: linked_resource=%s', linked_resource)
result = client.lookup_entry(
request={'linked_resource': linked_resource},
retry=retry,
timeout=timeout,
metadata=metadata,
)
else:
self.log.info('Getting entry: sql_resource=%s', sql_resource)
result = client.lookup_entry(
request={'sql_resource': sql_resource},
retry=retry,
timeout=timeout,
metadata=metadata,
)
self.log.info('Received entry. name=%s', result.name)
return result
@GoogleBaseHook.fallback_to_default_project_id
def rename_tag_template_field(
self,
location: str,
tag_template: str,
field: str,
new_tag_template_field_id: str,
project_id: str,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> TagTemplateField:
"""
Renames a field in a tag template.
:param location: Required. The location of the tag template field to rename.
:param tag_template: The tag template ID for field that is renamed.
:param field: Required. The old ID of this tag template field. For example,
``my_old_field``.
:param new_tag_template_field_id: Required. The new ID of this tag template field. For example,
``my_new_field``.
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
name = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field}"
self.log.info(
'Renaming field: old_name=%s, new_tag_template_field_id=%s', name, new_tag_template_field_id
)
result = client.rename_tag_template_field(
request={'name': name, 'new_tag_template_field_id': new_tag_template_field_id},
retry=retry,
timeout=timeout,
metadata=metadata,
)
self.log.info('Renamed tag template field.')
return result
def search_catalog(
self,
scope: Union[Dict, SearchCatalogRequest.Scope],
query: str,
page_size: int = 100,
order_by: Optional[str] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
):
r"""
Searches Data Catalog for multiple resources like entries, tags that match a query.
This does not return the complete resource, only the resource identifier and high level fields.
Clients can subsequently call ``Get`` methods.
Note that searches do not have full recall. There may be results that match your query but are not
returned, even in subsequent pages of results. These missing results may vary across repeated calls to
search. Do not rely on this method if you need to guarantee full recall.
:param scope: Required. The scope of this search request.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.datacatalog_v1beta1.types.Scope`
:param query: Required. The query string in search query syntax. The query must be non-empty.
Query strings can be simple as "x" or more qualified as:
- name:x
- column:x
- description:y
Note: Query tokens need to have a minimum of 3 characters for substring matching to work
correctly. See `Data Catalog Search Syntax <https://cloud.google.com/data-catalog/docs/how-
to/search-reference>`__ for more information.
:param page_size: The maximum number of resources contained in the underlying API response. If page
streaming is performed per-resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number of resources in a page.
:param order_by: Specifies the ordering of results, currently supported case-sensitive choices are:
- ``relevance``, only supports descending
- ``last_access_timestamp [asc|desc]``, defaults to descending if not specified
- ``last_modified_timestamp [asc|desc]``, defaults to descending if not specified
If not specified, defaults to ``relevance`` descending.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
self.log.info(
"Searching catalog: scope=%s, query=%s, page_size=%s, order_by=%s",
scope,
query,
page_size,
order_by,
)
result = client.search_catalog(
request={'scope': scope, 'query': query, 'page_size': page_size, 'order_by': order_by},
retry=retry,
timeout=timeout,
metadata=metadata,
)
self.log.info('Received items.')
return result
@GoogleBaseHook.fallback_to_default_project_id
def update_entry(
self,
entry: Union[Dict, Entry],
update_mask: Union[dict, FieldMask],
project_id: str,
location: Optional[str] = None,
entry_group: Optional[str] = None,
entry_id: Optional[str] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Entry:
"""
Updates an existing entry.
:param entry: Required. The updated entry. The "name" field must be set.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.datacatalog_v1beta1.types.Entry`
:param update_mask: The fields to update on the entry. If absent or empty, all modifiable fields are
updated.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.protobuf.field_mask_pb2.FieldMask`
:param location: Required. The location of the entry to update.
:param entry_group: The entry group ID for the entry that is being updated.
:param entry_id: The entry ID that is being updated.
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
if project_id and location and entry_group and entry_id:
full_entry_name = (
f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry_id}"
)
if isinstance(entry, Entry):
entry.name = full_entry_name
elif isinstance(entry, dict):
entry["name"] = full_entry_name
else:
raise AirflowException("Unable to set entry's name.")
elif location and entry_group and entry_id:
raise AirflowException(
"You must provide all the parameters (project_id, location, entry_group, entry_id) "
"contained in the name, or do not specify any parameters and pass the name on the object "
)
name = entry.name if isinstance(entry, Entry) else entry["name"]
self.log.info("Updating entry: name=%s", name)
# HACK: google-cloud-datacatalog has a problem with dictionaries for update methods.
if isinstance(entry, dict):
entry = Entry(**entry)
result = client.update_entry(
request={'entry': entry, 'update_mask': update_mask},
retry=retry,
timeout=timeout,
metadata=metadata,
)
self.log.info('Updated entry.')
return result
@GoogleBaseHook.fallback_to_default_project_id
def update_tag(
self,
tag: Union[Dict, Tag],
update_mask: Union[Dict, FieldMask],
project_id: str,
location: Optional[str] = None,
entry_group: Optional[str] = None,
entry: Optional[str] = None,
tag_id: Optional[str] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> Tag:
"""
Updates an existing tag.
:param tag: Required. The updated tag. The "name" field must be set.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.datacatalog_v1beta1.types.Tag`
:param update_mask: The fields to update on the Tag. If absent or empty, all modifiable fields are
updated. Currently the only modifiable field is the field ``fields``.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.datacatalog_v1beta1.types.FieldMask`
:param location: Required. The location of the tag to rename.
:param entry_group: The entry group ID for the tag that is being updated.
:param entry: The entry ID for the tag that is being updated.
:param tag_id: The tag ID that is being updated.
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
if project_id and location and entry_group and entry and tag_id:
full_tag_name = (
f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}"
f"/tags/{tag_id}"
)
if isinstance(tag, Tag):
tag.name = full_tag_name
elif isinstance(tag, dict):
tag["name"] = full_tag_name
else:
raise AirflowException("Unable to set tag's name.")
elif location and entry_group and entry and tag_id:
raise AirflowException(
"You must provide all the parameters (project_id, location, entry_group, entry, tag_id) "
"contained in the name, or do not specify any parameters and pass the name on the object "
)
name = tag.name if isinstance(tag, Tag) else tag["name"]
self.log.info("Updating tag: name=%s", name)
# HACK: google-cloud-datacatalog has a problem with dictionaries for update methods.
if isinstance(tag, dict):
tag = Tag(**tag)
result = client.update_tag(
request={'tag': tag, 'update_mask': update_mask},
retry=retry,
timeout=timeout,
metadata=metadata,
)
self.log.info('Updated tag.')
return result
@GoogleBaseHook.fallback_to_default_project_id
def update_tag_template(
self,
tag_template: Union[dict, TagTemplate],
update_mask: Union[dict, FieldMask],
project_id: str,
location: Optional[str] = None,
tag_template_id: Optional[str] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> TagTemplate:
"""
Updates a tag template.
This method cannot be used to update the fields of a template. The tag
template fields are represented as separate resources and should be updated using their own
create/update/delete methods.
:param tag_template: Required. The template to update. The "name" field must be set.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.datacatalog_v1beta1.types.TagTemplate`
:param update_mask: The field mask specifies the parts of the template to overwrite.
If absent or empty, all of the allowed fields above will be updated.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.protobuf.field_mask_pb2.FieldMask`
:param location: Required. The location of the tag template to rename.
:param tag_template_id: Optional. The tag template ID for the entry that is being updated.
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
if project_id and location and tag_template:
full_tag_template_name = (
f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}"
)
if isinstance(tag_template, TagTemplate):
tag_template.name = full_tag_template_name
elif isinstance(tag_template, dict):
tag_template["name"] = full_tag_template_name
else:
raise AirflowException("Unable to set name of tag template.")
elif location and tag_template:
raise AirflowException(
"You must provide all the parameters (project_id, location, tag_template_id) "
"contained in the name, or do not specify any parameters and pass the name on the object "
)
name = tag_template.name if isinstance(tag_template, TagTemplate) else tag_template["name"]
self.log.info("Updating tag template: name=%s", name)
# HACK: google-cloud-datacatalog has a problem with dictionaries for update methods.
if isinstance(tag_template, dict):
tag_template = TagTemplate(**tag_template)
result = client.update_tag_template(
request={'tag_template': tag_template, 'update_mask': update_mask},
retry=retry,
timeout=timeout,
metadata=metadata,
)
self.log.info('Updated tag template.')
return result
@GoogleBaseHook.fallback_to_default_project_id
def update_tag_template_field(
self,
tag_template_field: Union[dict, TagTemplateField],
update_mask: Union[dict, FieldMask],
project_id: str,
tag_template_field_name: Optional[str] = None,
location: Optional[str] = None,
tag_template: Optional[str] = None,
tag_template_field_id: Optional[str] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
):
"""
Updates a field in a tag template. This method cannot be used to update the field type.
:param tag_template_field: Required. The template to update.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.datacatalog_v1beta1.types.TagTemplateField`
:param update_mask: The field mask specifies the parts of the template to be updated. Allowed fields:
- ``display_name``
- ``type.enum_type``
If ``update_mask`` is not set or empty, all of the allowed fields above will be updated.
When updating an enum type, the provided values will be merged with the existing values.
Therefore, enum values can only be added, existing enum values cannot be deleted nor renamed.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.protobuf.field_mask_pb2.FieldMask`
:param tag_template_field_name: Optional. The name of the tag template field to rename.
:param location: Optional. The location of the tag to rename.
:param tag_template: Optional. The tag template ID for tag template field to rename.
:param tag_template_field_id: Optional. The ID of tag template field to rename.
:param project_id: The ID of the Google Cloud project that owns the entry group.
If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
:param retry: A retry object used to retry requests. If ``None`` is specified, requests will be
retried using a default configuration.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
``retry`` is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
"""
client = self.get_conn()
if project_id and location and tag_template and tag_template_field_id:
tag_template_field_name = (
f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}"
f"/fields/{tag_template_field_id}"
)
self.log.info("Updating tag template field: name=%s", tag_template_field_name)
result = client.update_tag_template_field(
request={
'name': tag_template_field_name,
'tag_template_field': tag_template_field,
'update_mask': update_mask,
},
retry=retry,
timeout=timeout,
metadata=metadata,
)
self.log.info('Updated tag template field.')
return result
| bolkedebruin/airflow | airflow/providers/google/cloud/hooks/datacatalog.py | Python | apache-2.0 | 54,178 |
from chemlab.core import Atom, Molecule, crystal
from chemlab.graphics.qt import display_system
# Molecule templates
na = Molecule([Atom('Na', [0.0, 0.0, 0.0])])
cl = Molecule([Atom('Cl', [0.0, 0.0, 0.0])])
s = crystal([[0.0, 0.0, 0.0], [0.5, 0.5, 0.5]], # Fractional Positions
[na, cl], # Molecules
225, # Space Group
cellpar = [.54, .54, .54, 90, 90, 90], # unit cell parameters
repetitions = [5, 5, 5]) # unit cell repetitions in each direction
display_system(s)
| chemlab/chemlab | examples/nacl.py | Python | gpl-3.0 | 517 |
"""
WSGI config for xdata project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os, sys
sys.path.append('/var/www/op_task/xdata')
sys.path.append('/var/www/op_task')
sys.path.append('./xdata/settings')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "production")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| draperlaboratory/stout | xdata/wsgi.py | Python | apache-2.0 | 501 |
# -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from openerp.osv import orm, fields
class clv_medicament(orm.Model):
_inherit = 'clv_medicament'
_columns = {
'abcfarma_id': fields.many2one('clv_abcfarma', string='ABCFarma'),
}
class clv_abcfarma(orm.Model):
_inherit = 'clv_abcfarma'
_columns = {
'medicament_ids': fields.one2many('clv_medicament',
'abcfarma_id',
'Medicaments'),
}
| CLVsol/odoo_addons_l10n_br | l10n_br_clv_abcfarma/clv_medicament/clv_medicament.py | Python | agpl-3.0 | 1,866 |
# -*- encoding: utf-8 -*-
__author__ = 'ray'
__date__ = '4/21/15'
from stonemason.renderer.engine.grammar import RenderGrammar, DictTokenizer
from stonemason.renderer.engine.context import RenderContext
from stonemason.renderer.cartographer import ImageNodeFactory
class MasonRenderer(object):
def __init__(self, expression):
factory = ImageNodeFactory()
tokenizer = DictTokenizer(expression)
grammar = RenderGrammar(tokenizer, start='root', factory=factory)
self._renderer = grammar.parse()
def render(self, context):
assert isinstance(context, RenderContext)
return self._renderer.render(context)
| Kotaimen/stonemason | stonemason/renderer/renderer.py | Python | mit | 662 |
def prime(i, primes):
for prime in primes:
if not (i == prime or i % prime):
return False
primes.append(i)
return i
def find_primes(n):
primes = list()
i, p = 2, 0
while True:
if prime(i, primes):
p += 1
if i == n:
return primes
i += 1
f = open('input.txt', 'r')
f2 = open('output.txt', 'w')
n = int(f.readline())
for s in find_primes(n):
f2.write(str(s) + '\n') | mihail-morosan/OCAPE | DemoFiles/Basic/Prime/Prime_Sieve.py | Python | apache-2.0 | 390 |
# Copyright 2013 University of Maryland. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE.TXT file.
import sys
import os
import time
import urllib2
import re
import subprocess
import selenium.common.exceptions
import framework
class Exploit (framework.Exploit):
attributes = {'Name' : "OSVDB_88751",
'Description' : "eXtplorer v2.1 Arbitrary File Upload Vulnerability\n"
"This application has an upload feature that allows an authenticated user "
"with administrator roles to upload arbitrary files to any writable "
"directory in the web root. This module uses an authentication bypass "
"vulnerability to upload and execute a file.",
'References' : [['OSVDB', '88751']],
'Target' : "eXtplorer 2.1",
'TargetLicense' : '',
'VulWikiPage' : "",
'Type' : 'EXEC'
}
def __init__(self, visible=False):
framework.Exploit.__init__(self, visible)
self.msf_out = ""
return
def exploit(self):
metasploit_cmd = "lib/metasploit-framework/msfcli multi/http/extplorer_upload_exec "\
"RHOST=127.0.0.1 TARGETURI=/extplorer/ PAYLOAD=generic/custom PAYLOADSTR=\'phpinfo();\' E"
self.logger.info("Running Metasploit")
proc = subprocess.Popen(metasploit_cmd, stdout=subprocess.PIPE, shell=True)
(self.msf_out, err) = proc.communicate()
self.logger.info(self.msf_out)
self.logger.info("Done! Browse to the payload location specified above to see the results.")
return
def verify(self):
recomp = re.compile('.*Executing payload \((.+)\).*', re.MULTILINE)
path = recomp.findall(self.msf_out)
if(len(path) != 1):
return False
self.logger.info("Checking payload at http://127.0.0.1/%s", path[0])
try:
response = urllib2.urlopen("http://127.0.0.1/%s" % (path[0],))
html = response.read()
except IOError:
self.logger.error("IOError: unable to open url")
return False
if(html.find("PHP Logo") != -1):
self.logger.info("payload confirmed")
return True
self.logger.info("payload not found")
return False
| UMD-SEAM/bugbox | framework/Exploits/OSVDB_88751_MSF.py | Python | bsd-3-clause | 2,462 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-03-23 15:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Arqueos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fecha', models.DateTimeField(auto_now_add=True)),
('caja_dia', models.DecimalField(decimal_places=2, default=0.0, max_digits=20, null=True)),
('efectivo', models.DecimalField(decimal_places=2, default=0.0, max_digits=20, null=True)),
('cambio', models.DecimalField(decimal_places=2, default=0.0, max_digits=20, null=True)),
('total_gastos', models.DecimalField(decimal_places=2, default=0.0, max_digits=20, null=True)),
('tarjeta', models.DecimalField(decimal_places=2, default=0.0, max_digits=20, null=True)),
('descuadre', models.DecimalField(decimal_places=2, default=0.0, max_digits=20, null=True)),
('modify', models.DateTimeField(auto_now=True)),
],
options={
'verbose_name': 'Arqueo',
},
),
migrations.CreateModel(
name='Clientes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(blank=True, default='', max_length=50, null=True)),
('apellido', models.CharField(default='', max_length=100, null=True)),
('email', models.EmailField(blank=True, default='', max_length=100, null=True)),
('telefono', models.CharField(blank=True, default='', max_length=20, null=True)),
('nota', models.TextField(blank=True, default='', null=True)),
('fecha_add', models.DateField(auto_now_add=True)),
('modify', models.DateTimeField(auto_now=True)),
('direccion', models.IntegerField(default=0, null=True)),
],
options={
'ordering': ['apellido', 'nombre'],
'verbose_name': 'Cliente',
},
),
migrations.CreateModel(
name='Conteo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('can', models.IntegerField(verbose_name='Cantidad')),
('tipo', models.DecimalField(decimal_places=2, default=0.0, max_digits=20, null=True, verbose_name='Tipo de moneda')),
('total', models.DecimalField(decimal_places=2, default=0.0, max_digits=20, null=True)),
('texto_tipo', models.CharField(blank=True, default='Euros', max_length=100)),
('modify', models.DateTimeField(auto_now=True, verbose_name='Modificado')),
],
),
migrations.CreateModel(
name='Direcciones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('direccion', models.CharField(max_length=150)),
('localidad', models.CharField(default='Grandada', max_length=50, null=True)),
('codigo', models.CharField(max_length=10, null=True)),
('modify', models.DateTimeField(auto_now=True)),
('clientes', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ventas.Clientes')),
],
options={
'verbose_name': 'Direccion',
},
),
migrations.CreateModel(
name='Gastos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('des', models.CharField(default='Nada', max_length=100, verbose_name='Descripcion')),
('gasto', models.DecimalField(decimal_places=2, default=0.0, max_digits=20, null=True)),
('modify', models.DateTimeField(auto_now=True, verbose_name='Modificado')),
],
options={
'verbose_name': 'Gasto',
},
),
migrations.CreateModel(
name='LineasPedido',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=50)),
('des', models.TextField(null=True)),
('cant', models.IntegerField()),
('precio', models.DecimalField(decimal_places=2, default=0.0, max_digits=20, null=True)),
('total', models.DecimalField(decimal_places=2, default=0.0, max_digits=20, null=True)),
('tipo', models.CharField(max_length=50)),
('modify', models.DateTimeField(auto_now=True)),
('servido', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Pedidos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fecha', models.DateTimeField(auto_now_add=True)),
('modo_pago', models.CharField(max_length=50)),
('para_llevar', models.CharField(max_length=50)),
('num_avisador', models.CharField(max_length=50)),
('total', models.DecimalField(decimal_places=2, default=0.0, max_digits=20, null=True)),
('estado', models.CharField(default='PG_NO', max_length=10)),
('entrega', models.DecimalField(decimal_places=2, default=0.0, max_digits=20, null=True)),
('cambio', models.DecimalField(decimal_places=2, default=0.0, max_digits=20, null=True)),
('modify', models.DateTimeField(auto_now=True)),
('servido', models.BooleanField(default=False)),
],
options={
'ordering': ['-id'],
'verbose_name': 'Pedido',
},
),
migrations.CreateModel(
name='PedidosExtra',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('importe', models.DecimalField(decimal_places=2, max_digits=20)),
('numero_pedido', models.IntegerField()),
('modo_pago', models.CharField(blank=True, default='Efectivo', max_length=50, null=True)),
('modify', models.DateTimeField(auto_now=True)),
('estado', models.CharField(blank=True, default='no_arqueado', max_length=50, null=True)),
],
options={
'verbose_name': 'Pedidos Extra',
},
),
migrations.AddField(
model_name='lineaspedido',
name='pedidos',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ventas.Pedidos'),
),
migrations.AddField(
model_name='clientes',
name='pedidos',
field=models.ManyToManyField(to='ventas.Pedidos'),
),
migrations.AddField(
model_name='arqueos',
name='conteo',
field=models.ManyToManyField(to='ventas.Conteo'),
),
migrations.AddField(
model_name='arqueos',
name='gastos',
field=models.ManyToManyField(to='ventas.Gastos'),
),
migrations.AddField(
model_name='arqueos',
name='pedidos',
field=models.ManyToManyField(to='ventas.Pedidos'),
),
migrations.AddField(
model_name='arqueos',
name='pedidosextra',
field=models.ManyToManyField(to='ventas.PedidosExtra'),
),
]
| vallemrv/tpvB3 | cloud/ventas/migrations/0001_initial.py | Python | apache-2.0 | 8,139 |
from . import common
import hglib
class test_tags(common.basetest):
def test_basic(self):
self.append('a', 'a')
rev, node = self.client.commit('first', addremove=True)
self.client.tag('my tag')
self.client.tag('local tag', rev=rev, local=True)
# filecache that was introduced in 2.0 makes us see the local tag, for
# now we have to reconnect
if self.client.version < (2, 0, 0):
self.client = hglib.open()
tags = self.client.tags()
self.assertEquals(tags, [('tip', 1, self.client.tip().node[:12], False),
('my tag', 0, node[:12], False),
('local tag', 0, node[:12], True)])
| beckjake/python3-hglib | tests/test-tags.py | Python | mit | 730 |
from django.conf import settings
AUDIO_AUTOPLAY = getattr(settings, "VIDEO_AUTOPLAY", False)
AUDIO_LOOP = getattr(settings, "VIDEO_LOOP", False)
| isotoma/cmsplugin-filer | cmsplugin_filer_audio/settings.py | Python | bsd-3-clause | 148 |
from django.contrib import admin
from .models import Friend
class FriendAdmin(admin.ModelAdmin):
list_display = ('full_name', 'profile_image')
def profile_image(self, obj):
return '<img src="%s" width="50" heith="50">' % obj.photo
profile_image.allow_tags = True
admin.site.register(Friend, FriendAdmin) | damianpv/exercise | home/admin.py | Python | gpl-2.0 | 328 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import uuid
from cms.test_utils.project.sampleapp.cms_apps import SampleApp
from cms.test_utils.util.context_managers import apphooks
from django.conf import settings
from django.contrib.sites.models import Site
from django.core import management
from django.core.management import CommandError
from django.test.utils import override_settings
from django.utils.six.moves import StringIO
from cms.api import create_page, add_plugin, create_title
from cms.management.commands.subcommands.list import plugin_report
from cms.models import Page, StaticPlaceholder
from cms.models.placeholdermodel import Placeholder
from cms.models.pluginmodel import CMSPlugin
from cms.test_utils.fixtures.navextenders import NavextendersFixture
from cms.test_utils.testcases import CMSTestCase
from djangocms_text_ckeditor.cms_plugins import TextPlugin
APPHOOK = "SampleApp"
PLUGIN = "TextPlugin"
TEST_INSTALLED_APPS = [
"django.contrib.auth",
"cms",
"menus",
"sekizai",
"treebeard",
] + settings.PLUGIN_APPS
if settings.AUTH_USER_MODEL == "emailuserapp.EmailUser":
TEST_INSTALLED_APPS.append("cms.test_utils.project.emailuserapp")
if settings.AUTH_USER_MODEL == "customuserapp.User":
TEST_INSTALLED_APPS.append("cms.test_utils.project.customuserapp")
class ManagementTestCase(CMSTestCase):
@override_settings(INSTALLED_APPS=TEST_INSTALLED_APPS)
def test_list_apphooks(self):
with apphooks(SampleApp):
out = StringIO()
create_page('Hello Title', "nav_playground.html", "en", apphook=APPHOOK)
self.assertEqual(Page.objects.filter(application_urls=APPHOOK).count(), 1)
management.call_command(
"cms",
"list",
"apphooks",
interactive=False,
stdout=out,
)
self.assertEqual(out.getvalue(), "SampleApp (draft)\n")
def test_uninstall_apphooks_without_apphook(self):
with apphooks():
out = StringIO()
management.call_command(
"cms",
"uninstall",
"apphooks",
APPHOOK,
interactive=False,
stdout=out,
)
self.assertEqual(out.getvalue(), "no 'SampleApp' apphooks found\n")
def test_fix_tree(self):
create_page("home", "nav_playground.html", "en")
page1 = create_page("page", "nav_playground.html", "en")
page1.depth = 3
page1.numchild = 4
page1.path = "00100010"
page1.save()
out = StringIO()
management.call_command('cms', 'fix-tree', interactive=False, stdout=out)
self.assertEqual(out.getvalue(), 'fixing page tree\nfixing plugin tree\nall done\n')
page1 = page1.reload()
self.assertEqual(page1.path, "0002")
self.assertEqual(page1.depth, 1)
self.assertEqual(page1.numchild, 0)
def test_fix_tree_regression_5641(self):
# ref: https://github.com/divio/django-cms/issues/5641
alpha = create_page("Alpha", "nav_playground.html", "en", published=True)
beta = create_page("Beta", "nav_playground.html", "en", published=False)
gamma = create_page("Gamma", "nav_playground.html", "en", published=False)
delta = create_page("Delta", "nav_playground.html", "en", published=True)
theta = create_page("Theta", "nav_playground.html", "en", published=True)
beta.move_page(alpha, position='last-child')
gamma.move_page(beta.reload(), position='last-child')
delta.move_page(gamma.reload(), position='last-child')
theta.move_page(delta.reload(), position='last-child')
out = StringIO()
management.call_command('cms', 'fix-tree', interactive=False, stdout=out)
alpha = alpha.reload()
beta = beta.reload()
gamma = gamma.reload()
delta = delta.reload()
theta = theta.reload()
tree = [
(alpha, '0001'),
(beta, '00010001'),
(gamma, '000100010001'),
(delta, '0001000100010001'),
(theta, '00010001000100010001'),
(alpha.publisher_public, '0002'),
(delta.publisher_public, '0006'),
(theta.publisher_public, '00060001'),
]
for page, path in tree:
self.assertEqual(page.path, path)
@override_settings(INSTALLED_APPS=TEST_INSTALLED_APPS)
def test_uninstall_apphooks_with_apphook(self):
with apphooks(SampleApp):
out = StringIO()
create_page('Hello Title', "nav_playground.html", "en", apphook=APPHOOK)
self.assertEqual(Page.objects.filter(application_urls=APPHOOK).count(), 1)
management.call_command(
"cms",
"uninstall",
"apphooks",
APPHOOK,
interactive=False,
stdout=out,
)
self.assertEqual(out.getvalue(), "1 'SampleApp' apphooks uninstalled\n")
self.assertEqual(Page.objects.filter(application_urls=APPHOOK).count(), 0)
@override_settings(INSTALLED_APPS=TEST_INSTALLED_APPS)
def test_list_plugins(self):
out = StringIO()
placeholder = Placeholder.objects.create(slot="test")
add_plugin(placeholder, TextPlugin, "en", body="en body")
add_plugin(placeholder, TextPlugin, "en", body="en body")
link_plugin = add_plugin(placeholder, "LinkPlugin", "en",
name="A Link", url="https://www.django-cms.org")
self.assertEqual(
CMSPlugin.objects.filter(plugin_type=PLUGIN).count(),
2)
self.assertEqual(
CMSPlugin.objects.filter(plugin_type="LinkPlugin").count(),
1)
# create a CMSPlugin with an unsaved instance
instanceless_plugin = CMSPlugin(language="en", plugin_type="TextPlugin")
instanceless_plugin.save()
# create a bogus CMSPlugin to simulate one which used to exist but
# is no longer installed
bogus_plugin = CMSPlugin(language="en", plugin_type="BogusPlugin")
bogus_plugin.save()
management.call_command('cms', 'list', 'plugins', interactive=False, stdout=out)
report = plugin_report()
# there should be reports for three plugin types
self.assertEqual(
len(report),
3)
# check the bogus plugin
bogus_plugins_report = report[0]
self.assertEqual(
bogus_plugins_report["model"],
None)
self.assertEqual(
bogus_plugins_report["type"],
u'BogusPlugin')
self.assertEqual(
bogus_plugins_report["instances"][0],
bogus_plugin)
# check the link plugin
link_plugins_report = report[1]
self.assertEqual(
link_plugins_report["model"],
link_plugin.__class__)
self.assertEqual(
link_plugins_report["type"],
u'LinkPlugin')
self.assertEqual(
link_plugins_report["instances"][0].get_plugin_instance()[0],
link_plugin)
# check the text plugins
text_plugins_report = report[2]
self.assertEqual(
text_plugins_report["model"],
TextPlugin.model)
self.assertEqual(
text_plugins_report["type"],
u'TextPlugin')
self.assertEqual(
len(text_plugins_report["instances"]),
3)
self.assertEqual(
text_plugins_report["instances"][2],
instanceless_plugin)
self.assertEqual(
text_plugins_report["unsaved_instances"],
[instanceless_plugin])
@override_settings(INSTALLED_APPS=TEST_INSTALLED_APPS)
def test_delete_orphaned_plugins(self):
placeholder = Placeholder.objects.create(slot="test")
add_plugin(placeholder, TextPlugin, "en", body="en body")
add_plugin(placeholder, TextPlugin, "en", body="en body")
add_plugin(placeholder, "LinkPlugin", "en",
name="A Link", url="https://www.django-cms.org")
instanceless_plugin = CMSPlugin(
language="en", plugin_type="TextPlugin")
instanceless_plugin.save()
# create a bogus CMSPlugin to simulate one which used to exist but
# is no longer installed
bogus_plugin = CMSPlugin(language="en", plugin_type="BogusPlugin")
bogus_plugin.save()
report = plugin_report()
# there should be reports for three plugin types
self.assertEqual(
len(report),
3)
# check the bogus plugin
bogus_plugins_report = report[0]
self.assertEqual(
len(bogus_plugins_report["instances"]),
1)
# check the link plugin
link_plugins_report = report[1]
self.assertEqual(
len(link_plugins_report["instances"]),
1)
# check the text plugins
text_plugins_report = report[2]
self.assertEqual(
len(text_plugins_report["instances"]),
3)
self.assertEqual(
len(text_plugins_report["unsaved_instances"]),
1)
out = StringIO()
management.call_command('cms', 'delete-orphaned-plugins', interactive=False, stdout=out)
report = plugin_report()
# there should be reports for two plugin types (one should have been deleted)
self.assertEqual(
len(report),
2)
# check the link plugin
link_plugins_report = report[0]
self.assertEqual(
len(link_plugins_report["instances"]),
1)
# check the text plugins
text_plugins_report = report[1]
self.assertEqual(
len(text_plugins_report["instances"]),
2)
self.assertEqual(
len(text_plugins_report["unsaved_instances"]),
0)
def test_uninstall_plugins_without_plugin(self):
out = StringIO()
management.call_command('cms', 'uninstall', 'plugins', PLUGIN, interactive=False, stdout=out)
self.assertEqual(out.getvalue(), "no 'TextPlugin' plugins found\n")
@override_settings(INSTALLED_APPS=TEST_INSTALLED_APPS)
def test_uninstall_plugins_with_plugin(self):
out = StringIO()
placeholder = Placeholder.objects.create(slot="test")
add_plugin(placeholder, TextPlugin, "en", body="en body")
self.assertEqual(CMSPlugin.objects.filter(plugin_type=PLUGIN).count(), 1)
management.call_command('cms', 'uninstall', 'plugins', PLUGIN, interactive=False, stdout=out)
self.assertEqual(out.getvalue(), "1 'TextPlugin' plugins uninstalled\n")
self.assertEqual(CMSPlugin.objects.filter(plugin_type=PLUGIN).count(), 0)
def test_publisher_public(self):
admin = self.get_superuser()
create_page(
'home',
published=True,
language='de',
template='nav_playground.html',
created_by=admin,
)
page_1 = create_page(
'página 1',
published=True,
language='de',
template='nav_playground.html',
created_by=admin,
)
page_1.unpublish('de')
page_2 = create_page(
'página 2',
published=True,
language='de',
template='nav_playground.html',
created_by=admin,
)
page_2.unpublish('de')
management.call_command(
'cms',
'publisher-publish',
'-l de',
'--unpublished',
interactive=False,
)
self.assertEqual(Page.objects.public().count(), 3)
class PageFixtureManagementTestCase(NavextendersFixture, CMSTestCase):
def _fill_page_body(self, page, lang):
ph_en = page.placeholders.get(slot="body")
# add misc plugins
mcol1 = add_plugin(ph_en, "MultiColumnPlugin", lang, position="first-child")
add_plugin(ph_en, "ColumnPlugin", lang, position="first-child", target=mcol1)
col2 = add_plugin(ph_en, "ColumnPlugin", lang, position="first-child", target=mcol1)
mcol2 = add_plugin(ph_en, "MultiColumnPlugin", lang, position="first-child", target=col2)
add_plugin(ph_en, "ColumnPlugin", lang, position="first-child", target=mcol2)
col4 = add_plugin(ph_en, "ColumnPlugin", lang, position="first-child", target=mcol2)
# add a *nested* link plugin
add_plugin(ph_en, "LinkPlugin", lang, target=col4,
name="A Link", url="https://www.django-cms.org")
static_placeholder = StaticPlaceholder(code=str(uuid.uuid4()), site_id=1)
static_placeholder.save()
add_plugin(static_placeholder.draft, "TextPlugin", lang, body="example content")
def setUp(self):
pages = Page.objects.drafts()
for page in pages:
self._fill_page_body(page, "en")
def test_copy_langs(self):
"""
Various checks here:
* plugins are exactly doubled, half per language with no orphaned plugin
* the bottom-most plugins in the nesting chain maintain the same position and the same content
* the top-most plugin are of the same type
"""
site = 1
number_start_plugins = CMSPlugin.objects.all().count()
out = StringIO()
management.call_command(
'cms', 'copy', 'lang', '--from-lang=en', '--to-lang=de', interactive=False, stdout=out
)
pages = Page.objects.on_site(site).drafts()
for page in pages:
self.assertEqual(set((u'en', u'de')), set(page.get_languages()))
# These asserts that no orphaned plugin exists
self.assertEqual(CMSPlugin.objects.all().count(), number_start_plugins*2)
self.assertEqual(CMSPlugin.objects.filter(language='en').count(), number_start_plugins)
self.assertEqual(CMSPlugin.objects.filter(language='de').count(), number_start_plugins)
root_page = Page.objects.on_site(site).get_home()
root_plugins = CMSPlugin.objects.filter(placeholder=root_page.placeholders.get(slot="body"))
first_plugin_en, _ = root_plugins.get(language='en', parent=None).get_plugin_instance()
first_plugin_de, _ = root_plugins.get(language='de', parent=None).get_plugin_instance()
self.assertEqual(first_plugin_en.plugin_type, first_plugin_de.plugin_type)
link_en, _ = root_plugins.get(language='en', plugin_type='LinkPlugin').get_plugin_instance()
link_de, _ = root_plugins.get(language='de', plugin_type='LinkPlugin').get_plugin_instance()
self.assertEqual(link_en.url, link_de.url)
self.assertEqual(link_en.get_position_in_placeholder(), link_de.get_position_in_placeholder())
stack_plugins = CMSPlugin.objects.filter(placeholder=StaticPlaceholder.objects.order_by('?')[0].draft)
stack_text_en, _ = stack_plugins.get(language='en', plugin_type='TextPlugin').get_plugin_instance()
stack_text_de, _ = stack_plugins.get(language='de', plugin_type='TextPlugin').get_plugin_instance()
self.assertEqual(stack_text_en.plugin_type, stack_text_de.plugin_type)
self.assertEqual(stack_text_en.body, stack_text_de.body)
def test_copy_langs_no_content(self):
"""
Various checks here:
* page structure is copied
* no plugin is copied
"""
site = 1
number_start_plugins = CMSPlugin.objects.all().count()
out = StringIO()
management.call_command(
'cms', 'copy', 'lang', '--from-lang=en', '--to-lang=de', '--skip-content',
interactive=False, stdout=out
)
pages = Page.objects.on_site(site).drafts()
for page in pages:
self.assertEqual(set((u'en', u'de')), set(page.get_languages()))
# These asserts that no orphaned plugin exists
self.assertEqual(CMSPlugin.objects.all().count(), number_start_plugins)
self.assertEqual(CMSPlugin.objects.filter(language='en').count(), number_start_plugins)
self.assertEqual(CMSPlugin.objects.filter(language='de').count(), 0)
root_page = Page.objects.on_site(site).get_home()
root_plugins = CMSPlugin.objects.filter(
placeholder=root_page.placeholders.get(slot="body"))
first_plugin_en, _ = root_plugins.get(language='en', parent=None).get_plugin_instance()
first_plugin_de = None
with self.assertRaises(CMSPlugin.DoesNotExist):
first_plugin_de, _ = root_plugins.get(language='de', parent=None).get_plugin_instance()
self.assertIsNone(first_plugin_de)
stack_plugins = CMSPlugin.objects.filter(
placeholder=StaticPlaceholder.objects.order_by('?')[0].draft)
stack_text_en, _ = stack_plugins.get(language='en',
plugin_type='TextPlugin').get_plugin_instance()
with self.assertRaises(CMSPlugin.DoesNotExist):
stack_text_de, _ = stack_plugins.get(language='de',
plugin_type='TextPlugin').get_plugin_instance()
def test_copy_sites(self):
"""
Various checks here:
* plugins are exactly doubled, half per site with no orphaned plugin
* the bottom-most plugins in the nesting chain maintain the same position and the same content
* the top-most plugin are of the same type
"""
site_1_pk = 1
site_2 = Site.objects.create(name='site 2')
site_2_pk = site_2.pk
phs = []
for page in Page.objects.on_site(site_1_pk).drafts():
phs.extend(page.placeholders.values_list('pk', flat=True))
number_start_plugins = CMSPlugin.objects.filter(placeholder__in=phs).count()
out = StringIO()
management.call_command(
'cms', 'copy', 'site', '--from-site=%s' % site_1_pk, '--to-site=%s' % site_2_pk,
stdout=out
)
for page in Page.objects.on_site(site_1_pk).drafts():
page.publish('en')
for page in Page.objects.on_site(site_2_pk).drafts():
page.publish('en')
pages_1 = list(Page.objects.on_site(site_1_pk).drafts())
pages_2 = list(Page.objects.on_site(site_2_pk).drafts())
for index, page in enumerate(pages_1):
self.assertEqual(page.get_title('en'), pages_2[index].get_title('en'))
self.assertEqual(page.depth, pages_2[index].depth)
phs_1 = []
phs_2 = []
for page in Page.objects.on_site(site_1_pk).drafts():
phs_1.extend(page.placeholders.values_list('pk', flat=True))
for page in Page.objects.on_site(site_2_pk).drafts():
phs_2.extend(page.placeholders.values_list('pk', flat=True))
# These asserts that no orphaned plugin exists
self.assertEqual(CMSPlugin.objects.filter(placeholder__in=phs_1).count(), number_start_plugins)
self.assertEqual(CMSPlugin.objects.filter(placeholder__in=phs_2).count(), number_start_plugins)
root_page_1 = Page.objects.on_site(site_1_pk).get_home(site_1_pk)
root_page_2 = Page.objects.on_site(site_2_pk).get_home(site_2_pk)
root_plugins_1 = CMSPlugin.objects.filter(placeholder=root_page_1.placeholders.get(slot="body"))
root_plugins_2 = CMSPlugin.objects.filter(placeholder=root_page_2.placeholders.get(slot="body"))
first_plugin_1, _ = root_plugins_1.get(language='en', parent=None).get_plugin_instance()
first_plugin_2, _ = root_plugins_2.get(language='en', parent=None).get_plugin_instance()
self.assertEqual(first_plugin_1.plugin_type, first_plugin_2.plugin_type)
link_1, _ = root_plugins_1.get(language='en', plugin_type='LinkPlugin').get_plugin_instance()
link_2, _ = root_plugins_2.get(language='en', plugin_type='LinkPlugin').get_plugin_instance()
self.assertEqual(link_1.url, link_2.url)
self.assertEqual(link_1.get_position_in_placeholder(), link_2.get_position_in_placeholder())
def test_copy_existing_title(self):
"""
Even if a title already exists the copy is successfull, the original
title remains untouched
"""
site = 1
number_start_plugins = CMSPlugin.objects.all().count()
# create an empty title language
root_page = Page.objects.on_site(site).get_home()
create_title("de", "root page de", root_page)
out = StringIO()
management.call_command(
'cms', 'copy', 'lang', '--from-lang=en', '--to-lang=de', interactive=False, stdout=out
)
pages = Page.objects.on_site(site).drafts()
for page in pages:
self.assertEqual(set((u'en', u'de')), set(page.get_languages()))
# Original Title untouched
self.assertEqual("root page de", Page.objects.on_site(site).get_home().get_title("de"))
# Plugins still copied
self.assertEqual(CMSPlugin.objects.all().count(), number_start_plugins*2)
self.assertEqual(CMSPlugin.objects.filter(language='en').count(), number_start_plugins)
self.assertEqual(CMSPlugin.objects.filter(language='de').count(), number_start_plugins)
def test_copy_filled_placeholder(self):
"""
If an existing title in the target language has plugins in a placeholder
that placeholder is skipped
"""
site = 1
number_start_plugins = CMSPlugin.objects.all().count()
# create an empty title language
root_page = Page.objects.on_site(site).get_home()
create_title("de", "root page de", root_page)
ph = root_page.placeholders.get(slot="body")
add_plugin(ph, "TextPlugin", "de", body="Hello World")
out = StringIO()
management.call_command(
'cms', 'copy', 'lang', '--from-lang=en', '--to-lang=de', interactive=False, stdout=out
)
self.assertEqual(CMSPlugin.objects.filter(language='en').count(), number_start_plugins)
# one placeholder (with 7 plugins) is skipped, so the difference must be 6
self.assertEqual(CMSPlugin.objects.filter(language='de').count(), number_start_plugins-6)
def test_copy_filled_placeholder_force_copy(self):
"""
If an existing title in the target language has plugins in a placeholder
and the command is called with *force-copy*, the plugins are copied on
top of the existing one
"""
site = 1
number_start_plugins = CMSPlugin.objects.all().count()
# create an empty title language
root_page = Page.objects.on_site(site).get_home()
create_title("de", "root page de", root_page)
ph = root_page.placeholders.get(slot="body")
add_plugin(ph, "TextPlugin", "de", body="Hello World")
root_plugins = CMSPlugin.objects.filter(placeholder=ph)
text_de_orig, _ = root_plugins.get(language='de', plugin_type='TextPlugin').get_plugin_instance()
out = StringIO()
management.call_command(
'cms', 'copy', 'lang', '--from-lang=en', '--to-lang=de', '--force', interactive=False,
stdout=out
)
CMSPlugin.objects.filter(placeholder=root_page.placeholders.get(slot="body"))
self.assertEqual(CMSPlugin.objects.filter(language='en').count(), number_start_plugins)
# we have an existing plugin in one placeholder, so we have one more
self.assertEqual(CMSPlugin.objects.filter(language='de').count(), number_start_plugins+1)
def test_copy_from_non_existing_lang(self):
"""
If an existing title in the target language has plugins in a placeholder
and the command is called with *force-copy*, the plugins are copied on
top of the existing one
"""
site = 1
out = StringIO()
management.call_command(
'cms', 'copy', 'lang', '--from-lang=de', '--to-lang=fr', verbosity=3,
interactive=False, stdout=out
)
text = out.getvalue()
page_count = Page.objects.on_site(site).drafts().count() + 1
for idx in range(1, page_count):
self.assertTrue("Skipping page page%d, language de not defined" % idx in text)
def test_copy_site_safe(self):
"""
Check that copy of languages on one site does not interfere with other
sites
"""
site_other = 1
site_active = 2
origina_site1_langs = {}
number_start_plugins = CMSPlugin.objects.all().count()
site_obj = Site.objects.create(domain="sample2.com", name="sample2.com", pk=site_active)
for page in Page.objects.on_site(1).drafts():
origina_site1_langs[page.pk] = set(page.get_languages())
p1 = create_page('page1', published=True, in_navigation=True, language='de', template='nav_playground.html', site=site_obj)
create_page('page4', published=True, in_navigation=True, language='de', template='nav_playground.html', site=site_obj)
create_page('page2', published=True, in_navigation=True, parent=p1, language='de', template='nav_playground.html', site=site_obj)
for page in Page.objects.on_site(site_active).drafts():
self._fill_page_body(page, 'de')
number_site2_plugins = CMSPlugin.objects.all().count() - number_start_plugins
out = StringIO()
management.call_command(
'cms', 'copy', 'lang', '--from-lang=de', '--to-lang=fr', '--site=%s' % site_active,
interactive=False, stdout=out
)
for page in Page.objects.on_site(site_other).drafts():
self.assertEqual(origina_site1_langs[page.pk], set(page.get_languages()))
for page in Page.objects.on_site(site_active).drafts():
self.assertEqual(set(('de', 'fr')), set(page.get_languages()))
# plugins for site 1
self.assertEqual(CMSPlugin.objects.filter(language='en').count(), number_start_plugins)
# plugins for site 2 de
self.assertEqual(CMSPlugin.objects.filter(language='de').count(), number_site2_plugins)
# plugins for site 2 fr
self.assertEqual(CMSPlugin.objects.filter(language='fr').count(), number_site2_plugins)
# global number of plugins
self.assertEqual(CMSPlugin.objects.all().count(), number_start_plugins + number_site2_plugins*2)
def test_copy_bad_languages(self):
out = StringIO()
with self.assertRaises(CommandError) as command_error:
management.call_command(
'cms', 'copy', 'lang', '--from-lang=it', '--to-lang=fr', interactive=False,
stdout=out
)
self.assertEqual(str(command_error.exception), 'Both languages have to be present in settings.LANGUAGES and settings.CMS_LANGUAGES')
| jsma/django-cms | cms/tests/test_management.py | Python | bsd-3-clause | 27,071 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import inspect
import logging
logging.basicConfig()
log = logging.getLogger("ModuleRegister")
log.setLevel(logging.DEBUG)
MODULE_MAPS = []
def registerModuleMap(module_map):
MODULE_MAPS.append(module_map)
log.info("ModuleRegister get modules from ModuleMap content: {}".
format(inspect.getsource(module_map)))
def constructTrainerClass(myTrainerClass, opts):
log.info("ModuleRegister, myTrainerClass name is {}".
format(myTrainerClass.__name__))
log.info("ModuleRegister, myTrainerClass type is {}".
format(type(myTrainerClass)))
log.info("ModuleRegister, myTrainerClass dir is {}".
format(dir(myTrainerClass)))
myInitializeModelModule = getModule(opts['model']['model_name_py'])
log.info("ModuleRegister, myInitializeModelModule dir is {}".
format(dir(myInitializeModelModule)))
myTrainerClass.init_model = myInitializeModelModule.init_model
myTrainerClass.run_training_net = myInitializeModelModule.run_training_net
myTrainerClass.fun_per_iter_b4RunNet = \
myInitializeModelModule.fun_per_iter_b4RunNet
myTrainerClass.fun_per_epoch_b4RunNet = \
myInitializeModelModule.fun_per_epoch_b4RunNet
myInputModule = getModule(opts['input']['input_name_py'])
log.info("ModuleRegister, myInputModule {} dir is {}".
format(opts['input']['input_name_py'], myInputModule.__name__))
# Override input methods of the myTrainerClass class
myTrainerClass.get_input_dataset = myInputModule.get_input_dataset
myTrainerClass.get_model_input_fun = myInputModule.get_model_input_fun
myTrainerClass.gen_input_builder_fun = myInputModule.gen_input_builder_fun
# myForwardPassModule = GetForwardPassModule(opts)
myForwardPassModule = getModule(opts['model']['forward_pass_py'])
myTrainerClass.gen_forward_pass_builder_fun = \
myForwardPassModule.gen_forward_pass_builder_fun
myParamUpdateModule = getModule(opts['model']['parameter_update_py'])
myTrainerClass.gen_param_update_builder_fun =\
myParamUpdateModule.gen_param_update_builder_fun \
if myParamUpdateModule is not None else None
myOptimizerModule = getModule(opts['model']['optimizer_py'])
myTrainerClass.gen_optimizer_fun = \
myOptimizerModule.gen_optimizer_fun \
if myOptimizerModule is not None else None
myRendezvousModule = getModule(opts['model']['rendezvous_py'])
myTrainerClass.gen_rendezvous_ctx = \
myRendezvousModule.gen_rendezvous_ctx \
if myRendezvousModule is not None else None
# override output module
myOutputModule = getModule(opts['output']['gen_output_py'])
log.info("ModuleRegister, myOutputModule is {}".
format(myOutputModule.__name__))
myTrainerClass.fun_conclude_operator = myOutputModule.fun_conclude_operator
myTrainerClass.assembleAllOutputs = myOutputModule.assembleAllOutputs
return myTrainerClass
def getModule(moduleName):
log.info("MODULE_MAPS content {}".format(str(MODULE_MAPS)))
myModule = None
for ModuleMap in MODULE_MAPS:
log.info("iterate through MODULE_MAPS content {}".
format(str(ModuleMap)))
for name, obj in inspect.getmembers(ModuleMap):
log.info("iterate through MODULE_MAPS a name {}".format(str(name)))
if name == moduleName:
log.info("AnyExp get module {} with source:{}".
format(moduleName, inspect.getsource(obj)))
myModule = obj
return myModule
return None
def getClassFromModule(moduleName, className):
myClass = None
for ModuleMap in MODULE_MAPS:
for name, obj in inspect.getmembers(ModuleMap):
if name == moduleName:
log.info("ModuleRegistry from module {} get class {} of source:{}".
format(moduleName, className, inspect.getsource(obj)))
myClass = getattr(obj, className)
return myClass
return None
| Yangqing/caffe2 | caffe2/contrib/playground/ModuleRegister.py | Python | apache-2.0 | 4,206 |
# This file is part of Headphones.
#
# Headphones is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Headphones is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Headphones. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import shutil
import uuid
import beets
import threading
import itertools
import headphones
from beets import autotag
from beets import config as beetsconfig
from beets.mediafile import MediaFile, FileTypeError, UnreadableFileError
from beetsplug import lyrics as beetslyrics
from headphones import notifiers, utorrent, transmission
from headphones import db, albumart, librarysync
from headphones import logger, helpers, request, mb, music_encoder
postprocessor_lock = threading.Lock()
def checkFolder():
logger.debug("Checking download folder for completed downloads (only snatched ones).")
with postprocessor_lock:
myDB = db.DBConnection()
snatched = myDB.select('SELECT * from snatched WHERE Status="Snatched"')
for album in snatched:
if album['FolderName']:
if album['Kind'] == 'nzb':
download_dir = headphones.CONFIG.DOWNLOAD_DIR
else:
download_dir = headphones.CONFIG.DOWNLOAD_TORRENT_DIR
album_path = os.path.join(download_dir, album['FolderName']).encode(headphones.SYS_ENCODING, 'replace')
logger.debug("Checking if %s exists" % album_path)
if os.path.exists(album_path):
logger.info('Found "' + album['FolderName'] + '" in ' + album['Kind'] + ' download folder. Verifying....')
verify(album['AlbumID'], album_path, album['Kind'])
else:
logger.info("No folder name found for " + album['Title'])
logger.debug("Checking download folder finished.")
def verify(albumid, albumpath, Kind=None, forced=False, keep_original_folder=False):
myDB = db.DBConnection()
release = myDB.action('SELECT * from albums WHERE AlbumID=?', [albumid]).fetchone()
tracks = myDB.select('SELECT * from tracks WHERE AlbumID=?', [albumid])
if not release or not tracks:
release_list = None
# Fetch album information from MusicBrainz
try:
release_list = mb.getReleaseGroup(albumid)
except Exception as e:
logger.error('Unable to get release information for manual album with rgid: %s. Error: %s', albumid, e)
return
if not release_list:
logger.error('Unable to get release information for manual album with rgid: %s', albumid)
return
# Since we're just using this to create the bare minimum information to
# insert an artist/album combo, use the first release
releaseid = release_list[0]['id']
release_dict = mb.getRelease(releaseid)
if not release_dict:
logger.error('Unable to get release information for manual album with rgid: %s. Cannot continue', albumid)
return
# Check if the artist is added to the database. In case the database is
# frozen during post processing, new artists will not be processed. This
# prevents new artists from appearing suddenly. In case forced is True,
# this check is skipped, since it is assumed the user wants this.
if headphones.CONFIG.FREEZE_DB and not forced:
artist = myDB.select("SELECT ArtistName, ArtistID FROM artists WHERE ArtistId=? OR ArtistName=?", [release_dict['artist_id'], release_dict['artist_name']])
if not artist:
logger.warn("Continuing would add new artist '%s' (ID %s), " \
"but database is frozen. Will skip postprocessing for " \
"album with rgid: %s", release_dict['artist_name'],
release_dict['artist_id'], albumid)
myDB.action('UPDATE snatched SET status = "Frozen" WHERE status NOT LIKE "Seed%" and AlbumID=?', [albumid])
frozen = re.search(r' \(Frozen\)(?:\[\d+\])?', albumpath)
if not frozen:
renameUnprocessedFolder(albumpath, tag="Frozen")
return
logger.info(u"Now adding/updating artist: " + release_dict['artist_name'])
if release_dict['artist_name'].startswith('The '):
sortname = release_dict['artist_name'][4:]
else:
sortname = release_dict['artist_name']
controlValueDict = {"ArtistID": release_dict['artist_id']}
newValueDict = {"ArtistName": release_dict['artist_name'],
"ArtistSortName": sortname,
"DateAdded": helpers.today(),
"Status": "Paused"}
logger.info("ArtistID: " + release_dict['artist_id'] + " , ArtistName: " + release_dict['artist_name'])
if headphones.CONFIG.INCLUDE_EXTRAS:
newValueDict['IncludeExtras'] = 1
newValueDict['Extras'] = headphones.CONFIG.EXTRAS
myDB.upsert("artists", newValueDict, controlValueDict)
logger.info(u"Now adding album: " + release_dict['title'])
controlValueDict = {"AlbumID": albumid}
newValueDict = {"ArtistID": release_dict['artist_id'],
"ReleaseID": albumid,
"ArtistName": release_dict['artist_name'],
"AlbumTitle": release_dict['title'],
"AlbumASIN": release_dict['asin'],
"ReleaseDate": release_dict['date'],
"DateAdded": helpers.today(),
"Type": release_dict['rg_type'],
"Status": "Snatched"
}
myDB.upsert("albums", newValueDict, controlValueDict)
# Delete existing tracks associated with this AlbumID since we're going to replace them and don't want any extras
myDB.action('DELETE from tracks WHERE AlbumID=?', [albumid])
for track in release_dict['tracks']:
controlValueDict = {"TrackID": track['id'],
"AlbumID": albumid}
newValueDict = {"ArtistID": release_dict['artist_id'],
"ArtistName": release_dict['artist_name'],
"AlbumTitle": release_dict['title'],
"AlbumASIN": release_dict['asin'],
"TrackTitle": track['title'],
"TrackDuration": track['duration'],
"TrackNumber": track['number']
}
myDB.upsert("tracks", newValueDict, controlValueDict)
controlValueDict = {"ArtistID": release_dict['artist_id']}
newValueDict = {"Status": "Paused"}
myDB.upsert("artists", newValueDict, controlValueDict)
logger.info(u"Addition complete for: " + release_dict['title'] + " - " + release_dict['artist_name'])
release = myDB.action('SELECT * from albums WHERE AlbumID=?', [albumid]).fetchone()
tracks = myDB.select('SELECT * from tracks WHERE AlbumID=?', [albumid])
downloaded_track_list = []
downloaded_cuecount = 0
for r, d, f in os.walk(albumpath):
for files in f:
if any(files.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS):
downloaded_track_list.append(os.path.join(r, files))
elif files.lower().endswith('.cue'):
downloaded_cuecount += 1
# if any of the files end in *.part, we know the torrent isn't done yet. Process if forced, though
elif files.lower().endswith(('.part', '.utpart')) and not forced:
logger.info("Looks like " + os.path.basename(albumpath).decode(headphones.SYS_ENCODING, 'replace') + " isn't complete yet. Will try again on the next run")
return
# Split cue
if headphones.CONFIG.CUE_SPLIT and downloaded_cuecount and downloaded_cuecount >= len(downloaded_track_list):
if headphones.CONFIG.KEEP_TORRENT_FILES and Kind == "torrent":
albumpath = helpers.preserve_torrent_directory(albumpath)
if albumpath and helpers.cue_split(albumpath):
downloaded_track_list = helpers.get_downloaded_track_list(albumpath)
# test #1: metadata - usually works
logger.debug('Verifying metadata...')
for downloaded_track in downloaded_track_list:
try:
f = MediaFile(downloaded_track)
except Exception as e:
logger.info(u"Exception from MediaFile for: " + downloaded_track.decode(headphones.SYS_ENCODING, 'replace') + u" : " + unicode(e))
continue
if not f.artist:
continue
if not f.album:
continue
metaartist = helpers.latinToAscii(f.artist.lower()).encode('UTF-8')
dbartist = helpers.latinToAscii(release['ArtistName'].lower()).encode('UTF-8')
metaalbum = helpers.latinToAscii(f.album.lower()).encode('UTF-8')
dbalbum = helpers.latinToAscii(release['AlbumTitle'].lower()).encode('UTF-8')
logger.debug('Matching metadata artist: %s with artist name: %s' % (metaartist, dbartist))
logger.debug('Matching metadata album: %s with album name: %s' % (metaalbum, dbalbum))
if metaartist == dbartist and metaalbum == dbalbum:
doPostProcessing(albumid, albumpath, release, tracks, downloaded_track_list, Kind, keep_original_folder)
return
# test #2: filenames
logger.debug('Metadata check failed. Verifying filenames...')
for downloaded_track in downloaded_track_list:
track_name = os.path.splitext(downloaded_track)[0]
split_track_name = re.sub('[\.\-\_]', ' ', track_name).lower()
for track in tracks:
if not track['TrackTitle']:
continue
dbtrack = helpers.latinToAscii(track['TrackTitle'].lower()).encode('UTF-8')
filetrack = helpers.latinToAscii(split_track_name).encode('UTF-8')
logger.debug('Checking if track title: %s is in file name: %s' % (dbtrack, filetrack))
if dbtrack in filetrack:
doPostProcessing(albumid, albumpath, release, tracks, downloaded_track_list, Kind, keep_original_folder)
return
# test #3: number of songs and duration
logger.debug('Filename check failed. Verifying album length...')
db_track_duration = 0
downloaded_track_duration = 0
logger.debug('Total music files in %s: %i' % (albumpath, len(downloaded_track_list)))
logger.debug('Total tracks for this album in the database: %i' % len(tracks))
if len(tracks) == len(downloaded_track_list):
for track in tracks:
try:
db_track_duration += track['TrackDuration'] / 1000
except:
downloaded_track_duration = False
break
for downloaded_track in downloaded_track_list:
try:
f = MediaFile(downloaded_track)
downloaded_track_duration += f.length
except:
downloaded_track_duration = False
break
if downloaded_track_duration and db_track_duration:
logger.debug('Downloaded album duration: %i' % downloaded_track_duration)
logger.debug('Database track duration: %i' % db_track_duration)
delta = abs(downloaded_track_duration - db_track_duration)
if delta < 240:
doPostProcessing(albumid, albumpath, release, tracks, downloaded_track_list, Kind, keep_original_folder)
return
logger.warn(u'Could not identify album: %s. It may not be the intended album.' % albumpath.decode(headphones.SYS_ENCODING, 'replace'))
myDB.action('UPDATE snatched SET status = "Unprocessed" WHERE status NOT LIKE "Seed%" and AlbumID=?', [albumid])
processed = re.search(r' \(Unprocessed\)(?:\[\d+\])?', albumpath)
if not processed:
renameUnprocessedFolder(albumpath, tag="Unprocessed")
def doPostProcessing(albumid, albumpath, release, tracks, downloaded_track_list, Kind=None, keep_original_folder=False):
logger.info('Starting post-processing for: %s - %s' % (release['ArtistName'], release['AlbumTitle']))
# Check to see if we're preserving the torrent dir
if (headphones.CONFIG.KEEP_TORRENT_FILES and Kind == "torrent" and 'headphones-modified' not in albumpath) or headphones.CONFIG.KEEP_ORIGINAL_FOLDER or keep_original_folder:
new_folder = os.path.join(albumpath, 'headphones-modified'.encode(headphones.SYS_ENCODING, 'replace'))
logger.info("Copying files to 'headphones-modified' subfolder to preserve downloaded files for seeding")
try:
shutil.copytree(albumpath, new_folder)
# Update the album path with the new location
albumpath = new_folder
except Exception as e:
logger.warn("Cannot copy/move files to temp folder: " + new_folder.decode(headphones.SYS_ENCODING, 'replace') + ". Not continuing. Error: " + str(e))
return
# Need to update the downloaded track list with the new location.
# Could probably just throw in the "headphones-modified" folder,
# but this is good to make sure we're not counting files that may have failed to move
downloaded_track_list = []
for r, d, f in os.walk(albumpath):
for files in f:
if any(files.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS):
downloaded_track_list.append(os.path.join(r, files))
# Check if files are valid media files and are writable, before the steps
# below are executed. This simplifies errors and prevents unfinished steps.
for downloaded_track in downloaded_track_list:
try:
f = MediaFile(downloaded_track)
if f is None:
# this test is just to keep pyflakes from complaining about an unused variable
return
except (FileTypeError, UnreadableFileError):
logger.error("Track file is not a valid media file: %s. Not " \
"continuing.", downloaded_track.decode(
headphones.SYS_ENCODING, "replace"))
return
except IOError:
logger.error("Unable to find media file: %s. Not continuing.")
return
# If one of the options below is set, it will access/touch/modify the
# files, which requires write permissions. This step just check this, so
# it will not try and fail lateron, with strange exceptions.
if headphones.CONFIG.EMBED_ALBUM_ART or headphones.CONFIG.CLEANUP_FILES or \
headphones.CONFIG.ADD_ALBUM_ART or headphones.CONFIG.CORRECT_METADATA or \
headphones.CONFIG.EMBED_LYRICS or headphones.CONFIG.RENAME_FILES or \
headphones.CONFIG.MOVE_FILES:
try:
with open(downloaded_track, "a+b") as fp:
fp.seek(0)
except IOError as e:
logger.debug("Write check exact error: %s", e)
logger.error("Track file is not writable. This is required " \
"for some post processing steps: %s. Not continuing.",
downloaded_track.decode(headphones.SYS_ENCODING, "replace"))
return
#start encoding
if headphones.CONFIG.MUSIC_ENCODER:
downloaded_track_list = music_encoder.encode(albumpath)
if not downloaded_track_list:
return
artwork = None
album_art_path = albumart.getAlbumArt(albumid)
if headphones.CONFIG.EMBED_ALBUM_ART or headphones.CONFIG.ADD_ALBUM_ART:
if album_art_path:
artwork = request.request_content(album_art_path)
else:
artwork = None
if not album_art_path or not artwork or len(artwork) < 100:
logger.info("No suitable album art found from Amazon. Checking Last.FM....")
artwork = albumart.getCachedArt(albumid)
if not artwork or len(artwork) < 100:
artwork = False
logger.info("No suitable album art found from Last.FM. Not adding album art")
if headphones.CONFIG.EMBED_ALBUM_ART and artwork:
embedAlbumArt(artwork, downloaded_track_list)
if headphones.CONFIG.CLEANUP_FILES:
cleanupFiles(albumpath)
if headphones.CONFIG.KEEP_NFO:
renameNFO(albumpath)
if headphones.CONFIG.ADD_ALBUM_ART and artwork:
addAlbumArt(artwork, albumpath, release)
if headphones.CONFIG.CORRECT_METADATA:
correctedMetadata = correctMetadata(albumid, release, downloaded_track_list)
if not correctedMetadata and headphones.CONFIG.DO_NOT_PROCESS_UNMATCHED:
return
if headphones.CONFIG.EMBED_LYRICS:
embedLyrics(downloaded_track_list)
if headphones.CONFIG.RENAME_FILES:
renameFiles(albumpath, downloaded_track_list, release)
if headphones.CONFIG.MOVE_FILES and not headphones.CONFIG.DESTINATION_DIR:
logger.error('No DESTINATION_DIR has been set. Set "Destination Directory" to the parent directory you want to move the files to')
albumpaths = [albumpath]
elif headphones.CONFIG.MOVE_FILES and headphones.CONFIG.DESTINATION_DIR:
albumpaths = moveFiles(albumpath, release, tracks)
else:
albumpaths = [albumpath]
updateFilePermissions(albumpaths)
myDB = db.DBConnection()
myDB.action('UPDATE albums SET status = "Downloaded" WHERE AlbumID=?', [albumid])
myDB.action('UPDATE snatched SET status = "Processed" WHERE Status NOT LIKE "Seed%" and AlbumID=?', [albumid])
# Check if torrent has finished seeding
if headphones.CONFIG.TORRENT_DOWNLOADER == 1 or headphones.CONFIG.TORRENT_DOWNLOADER == 2:
seed_snatched = myDB.action('SELECT * from snatched WHERE Status="Seed_Snatched" and AlbumID=?', [albumid]).fetchone()
if seed_snatched:
hash = seed_snatched['FolderName']
torrent_removed = False
logger.info(u'%s - %s. Checking if torrent has finished seeding and can be removed' % (release['ArtistName'], release['AlbumTitle']))
if headphones.CONFIG.TORRENT_DOWNLOADER == 1:
torrent_removed = transmission.removeTorrent(hash, True)
else:
torrent_removed = utorrent.removeTorrent(hash, True)
# Torrent removed, delete the snatched record, else update Status for scheduled job to check
if torrent_removed:
myDB.action('DELETE from snatched WHERE status = "Seed_Snatched" and AlbumID=?', [albumid])
else:
myDB.action('UPDATE snatched SET status = "Seed_Processed" WHERE status = "Seed_Snatched" and AlbumID=?', [albumid])
# Update the have tracks for all created dirs:
for albumpath in albumpaths:
librarysync.libraryScan(dir=albumpath, append=True, ArtistID=release['ArtistID'], ArtistName=release['ArtistName'])
logger.info(u'Post-processing for %s - %s complete' % (release['ArtistName'], release['AlbumTitle']))
pushmessage = release['ArtistName'] + ' - ' + release['AlbumTitle']
statusmessage = "Download and Postprocessing completed"
if headphones.CONFIG.GROWL_ENABLED:
logger.info(u"Growl request")
growl = notifiers.GROWL()
growl.notify(pushmessage, statusmessage)
if headphones.CONFIG.PROWL_ENABLED:
logger.info(u"Prowl request")
prowl = notifiers.PROWL()
prowl.notify(pushmessage, statusmessage)
if headphones.CONFIG.XBMC_ENABLED:
xbmc = notifiers.XBMC()
if headphones.CONFIG.XBMC_UPDATE:
xbmc.update()
if headphones.CONFIG.XBMC_NOTIFY:
xbmc.notify(release['ArtistName'],
release['AlbumTitle'],
album_art_path)
if headphones.CONFIG.LMS_ENABLED:
lms = notifiers.LMS()
lms.update()
if headphones.CONFIG.PLEX_ENABLED:
plex = notifiers.Plex()
if headphones.CONFIG.PLEX_UPDATE:
plex.update()
if headphones.CONFIG.PLEX_NOTIFY:
plex.notify(release['ArtistName'],
release['AlbumTitle'],
album_art_path)
if headphones.CONFIG.NMA_ENABLED:
nma = notifiers.NMA()
nma.notify(release['ArtistName'], release['AlbumTitle'])
if headphones.CONFIG.PUSHALOT_ENABLED:
logger.info(u"Pushalot request")
pushalot = notifiers.PUSHALOT()
pushalot.notify(pushmessage, statusmessage)
if headphones.CONFIG.SYNOINDEX_ENABLED:
syno = notifiers.Synoindex()
for albumpath in albumpaths:
syno.notify(albumpath)
if headphones.CONFIG.PUSHOVER_ENABLED:
logger.info(u"Pushover request")
pushover = notifiers.PUSHOVER()
pushover.notify(pushmessage, "Headphones")
if headphones.CONFIG.PUSHBULLET_ENABLED:
logger.info(u"PushBullet request")
pushbullet = notifiers.PUSHBULLET()
pushbullet.notify(pushmessage, statusmessage)
if headphones.CONFIG.TWITTER_ENABLED:
logger.info(u"Sending Twitter notification")
twitter = notifiers.TwitterNotifier()
twitter.notify_download(pushmessage)
if headphones.CONFIG.OSX_NOTIFY_ENABLED:
from headphones import cache
c = cache.Cache()
album_art = c.get_artwork_from_cache(None, release['AlbumID'])
logger.info(u"Sending OS X notification")
osx_notify = notifiers.OSX_NOTIFY()
osx_notify.notify(release['ArtistName'],
release['AlbumTitle'],
statusmessage,
image=album_art)
if headphones.CONFIG.BOXCAR_ENABLED:
logger.info(u"Sending Boxcar2 notification")
boxcar = notifiers.BOXCAR()
boxcar.notify('Headphones processed: ' + pushmessage,
statusmessage, release['AlbumID'])
if headphones.CONFIG.SUBSONIC_ENABLED:
logger.info(u"Sending Subsonic update")
subsonic = notifiers.SubSonicNotifier()
subsonic.notify(albumpaths)
if headphones.CONFIG.MPC_ENABLED:
mpc = notifiers.MPC()
mpc.notify()
if headphones.CONFIG.EMAIL_ENABLED:
logger.info(u"Sending Email notification")
email = notifiers.Email()
subject = release['ArtistName'] + ' - ' + release['AlbumTitle']
email.notify(subject, "Download and Postprocessing completed")
def embedAlbumArt(artwork, downloaded_track_list):
logger.info('Embedding album art')
for downloaded_track in downloaded_track_list:
try:
f = MediaFile(downloaded_track)
except:
logger.error(u'Could not read %s. Not adding album art' % downloaded_track.decode(headphones.SYS_ENCODING, 'replace'))
continue
logger.debug('Adding album art to: %s' % downloaded_track)
try:
f.art = artwork
f.save()
except Exception as e:
logger.error(u'Error embedding album art to: %s. Error: %s' % (downloaded_track.decode(headphones.SYS_ENCODING, 'replace'), str(e)))
continue
def addAlbumArt(artwork, albumpath, release):
logger.info('Adding album art to folder')
try:
year = release['ReleaseDate'][:4]
except TypeError:
year = ''
values = {'$Artist': release['ArtistName'],
'$Album': release['AlbumTitle'],
'$Year': year,
'$artist': release['ArtistName'].lower(),
'$album': release['AlbumTitle'].lower(),
'$year': year
}
album_art_name = helpers.replace_all(headphones.CONFIG.ALBUM_ART_FORMAT.strip(), values) + ".jpg"
album_art_name = helpers.replace_illegal_chars(album_art_name).encode(headphones.SYS_ENCODING, 'replace')
if headphones.CONFIG.FILE_UNDERSCORES:
album_art_name = album_art_name.replace(' ', '_')
if album_art_name.startswith('.'):
album_art_name = album_art_name.replace(".", "_", 1)
try:
with open(os.path.join(albumpath, album_art_name), 'wb') as f:
f.write(artwork)
except IOError as e:
logger.error('Error saving album art: %s', e)
return
def cleanupFiles(albumpath):
logger.info('Cleaning up files')
for r, d, f in os.walk(albumpath):
for files in f:
if not any(files.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS):
logger.debug('Removing: %s' % files)
try:
os.remove(os.path.join(r, files))
except Exception as e:
logger.error(u'Could not remove file: %s. Error: %s' % (files.decode(headphones.SYS_ENCODING, 'replace'), e))
def renameNFO(albumpath):
logger.info('Renaming NFO')
for r, d, f in os.walk(albumpath):
for file in f:
if file.lower().endswith('.nfo'):
logger.debug('Renaming: "%s" to "%s"' % (file.decode(headphones.SYS_ENCODING, 'replace'), file.decode(headphones.SYS_ENCODING, 'replace') + '-orig'))
try:
new_file_name = os.path.join(r, file)[:-3] + 'orig.nfo'
os.rename(os.path.join(r, file), new_file_name)
except Exception as e:
logger.error(u'Could not rename file: %s. Error: %s' % (os.path.join(r, file).decode(headphones.SYS_ENCODING, 'replace'), e))
def moveFiles(albumpath, release, tracks):
logger.info("Moving files: %s" % albumpath)
try:
year = release['ReleaseDate'][:4]
except TypeError:
year = u''
artist = release['ArtistName'].replace('/', '_')
album = release['AlbumTitle'].replace('/', '_')
if headphones.CONFIG.FILE_UNDERSCORES:
artist = artist.replace(' ', '_')
album = album.replace(' ', '_')
releasetype = release['Type'].replace('/', '_')
if release['ArtistName'].startswith('The '):
sortname = release['ArtistName'][4:] + ", The"
else:
sortname = release['ArtistName']
if sortname[0].isdigit():
firstchar = u'0-9'
else:
firstchar = sortname[0]
for r, d, f in os.walk(albumpath):
try:
origfolder = os.path.basename(os.path.normpath(r).decode(headphones.SYS_ENCODING, 'replace'))
except:
origfolder = u''
values = {'$Artist': artist,
'$SortArtist': sortname,
'$Album': album,
'$Year': year,
'$Type': releasetype,
'$OriginalFolder': origfolder,
'$First': firstchar.upper(),
'$artist': artist.lower(),
'$sortartist': sortname.lower(),
'$album': album.lower(),
'$year': year,
'$type': releasetype.lower(),
'$first': firstchar.lower(),
'$originalfolder': origfolder.lower()
}
folder = helpers.replace_all(headphones.CONFIG.FOLDER_FORMAT.strip(), values, normalize=True)
folder = helpers.replace_illegal_chars(folder, type="folder")
folder = folder.replace('./', '_/').replace('/.', '/_')
if folder.endswith('.'):
folder = folder[:-1] + '_'
if folder.startswith('.'):
folder = '_' + folder[1:]
# Grab our list of files early on so we can determine if we need to create
# the lossy_dest_dir, lossless_dest_dir, or both
files_to_move = []
lossy_media = False
lossless_media = False
for r, d, f in os.walk(albumpath):
for files in f:
files_to_move.append(os.path.join(r, files))
if any(files.lower().endswith('.' + x.lower()) for x in headphones.LOSSY_MEDIA_FORMATS):
lossy_media = True
if any(files.lower().endswith('.' + x.lower()) for x in headphones.LOSSLESS_MEDIA_FORMATS):
lossless_media = True
# Do some sanity checking to see what directories we need to create:
make_lossy_folder = False
make_lossless_folder = False
lossy_destination_path = os.path.normpath(os.path.join(headphones.CONFIG.DESTINATION_DIR, folder)).encode(headphones.SYS_ENCODING, 'replace')
lossless_destination_path = os.path.normpath(os.path.join(headphones.CONFIG.LOSSLESS_DESTINATION_DIR, folder)).encode(headphones.SYS_ENCODING, 'replace')
# If they set a destination dir for lossless media, only create the lossy folder if there is lossy media
if headphones.CONFIG.LOSSLESS_DESTINATION_DIR:
if lossy_media:
make_lossy_folder = True
if lossless_media:
make_lossless_folder = True
# If they haven't set a lossless dest_dir, just create the "lossy" folder
else:
make_lossy_folder = True
last_folder = headphones.CONFIG.FOLDER_FORMAT.strip().split('/')[-1]
if make_lossless_folder:
# Only rename the folder if they use the album name, otherwise merge into existing folder
if os.path.exists(lossless_destination_path) and 'album' in last_folder.lower():
create_duplicate_folder = False
if headphones.CONFIG.REPLACE_EXISTING_FOLDERS:
try:
shutil.rmtree(lossless_destination_path)
except Exception as e:
logger.error("Error deleting existing folder: %s. Creating duplicate folder. Error: %s" % (lossless_destination_path.decode(headphones.SYS_ENCODING, 'replace'), e))
create_duplicate_folder = True
if not headphones.CONFIG.REPLACE_EXISTING_FOLDERS or create_duplicate_folder:
temp_folder = folder
i = 1
while True:
newfolder = temp_folder + '[%i]' % i
lossless_destination_path = os.path.normpath(os.path.join(headphones.CONFIG.LOSSLESS_DESTINATION_DIR, newfolder)).encode(headphones.SYS_ENCODING, 'replace')
if os.path.exists(lossless_destination_path):
i += 1
else:
temp_folder = newfolder
break
if not os.path.exists(lossless_destination_path):
try:
os.makedirs(lossless_destination_path)
except Exception as e:
logger.error('Could not create lossless folder for %s. (Error: %s)' % (release['AlbumTitle'], e))
if not make_lossy_folder:
return [albumpath]
if make_lossy_folder:
if os.path.exists(lossy_destination_path) and 'album' in last_folder.lower():
create_duplicate_folder = False
if headphones.CONFIG.REPLACE_EXISTING_FOLDERS:
try:
shutil.rmtree(lossy_destination_path)
except Exception as e:
logger.error("Error deleting existing folder: %s. Creating duplicate folder. Error: %s" % (lossy_destination_path.decode(headphones.SYS_ENCODING, 'replace'), e))
create_duplicate_folder = True
if not headphones.CONFIG.REPLACE_EXISTING_FOLDERS or create_duplicate_folder:
temp_folder = folder
i = 1
while True:
newfolder = temp_folder + '[%i]' % i
lossy_destination_path = os.path.normpath(os.path.join(headphones.CONFIG.DESTINATION_DIR, newfolder)).encode(headphones.SYS_ENCODING, 'replace')
if os.path.exists(lossy_destination_path):
i += 1
else:
temp_folder = newfolder
break
if not os.path.exists(lossy_destination_path):
try:
os.makedirs(lossy_destination_path)
except Exception as e:
logger.error('Could not create folder for %s. Not moving: %s' % (release['AlbumTitle'], e))
return [albumpath]
logger.info('Checking which files we need to move.....')
# Move files to the destination folder, renaming them if they already exist
# If we have two desination_dirs, move non-music files to both
if make_lossy_folder and make_lossless_folder:
for file_to_move in files_to_move:
if any(file_to_move.lower().endswith('.' + x.lower()) for x in headphones.LOSSY_MEDIA_FORMATS):
helpers.smartMove(file_to_move, lossy_destination_path)
elif any(file_to_move.lower().endswith('.' + x.lower()) for x in headphones.LOSSLESS_MEDIA_FORMATS):
helpers.smartMove(file_to_move, lossless_destination_path)
# If it's a non-music file, move it to both dirs
# TODO: Move specific-to-lossless files to the lossless dir only
else:
moved_to_lossy_folder = helpers.smartMove(file_to_move, lossy_destination_path, delete=False)
moved_to_lossless_folder = helpers.smartMove(file_to_move, lossless_destination_path, delete=False)
if moved_to_lossy_folder or moved_to_lossless_folder:
try:
os.remove(file_to_move)
except Exception as e:
logger.error("Error deleting file '" + file_to_move.decode(headphones.SYS_ENCODING, 'replace') + "' from source directory")
else:
logger.error("Error copying '" + file_to_move.decode(headphones.SYS_ENCODING, 'replace') + "'. Not deleting from download directory")
elif make_lossless_folder and not make_lossy_folder:
for file_to_move in files_to_move:
helpers.smartMove(file_to_move, lossless_destination_path)
else:
for file_to_move in files_to_move:
helpers.smartMove(file_to_move, lossy_destination_path)
# Chmod the directories using the folder_format (script courtesy of premiso!)
folder_list = folder.split('/')
temp_fs = []
if make_lossless_folder:
temp_fs.append(headphones.CONFIG.LOSSLESS_DESTINATION_DIR)
if make_lossy_folder:
temp_fs.append(headphones.CONFIG.DESTINATION_DIR)
for temp_f in temp_fs:
for f in folder_list:
temp_f = os.path.join(temp_f, f)
try:
os.chmod(os.path.normpath(temp_f).encode(headphones.SYS_ENCODING, 'replace'), int(headphones.CONFIG.FOLDER_PERMISSIONS, 8))
except Exception as e:
logger.error("Error trying to change permissions on folder: %s. %s", temp_f, e)
# If we failed to move all the files out of the directory, this will fail too
try:
shutil.rmtree(albumpath)
except Exception as e:
logger.error('Could not remove directory: %s. %s', albumpath, e)
destination_paths = []
if make_lossy_folder:
destination_paths.append(lossy_destination_path)
if make_lossless_folder:
destination_paths.append(lossless_destination_path)
return destination_paths
def correctMetadata(albumid, release, downloaded_track_list):
logger.info('Preparing to write metadata to tracks....')
lossy_items = []
lossless_items = []
# Process lossless & lossy media formats separately
for downloaded_track in downloaded_track_list:
try:
if any(downloaded_track.lower().endswith('.' + x.lower()) for x in headphones.LOSSLESS_MEDIA_FORMATS):
lossless_items.append(beets.library.Item.from_path(downloaded_track))
elif any(downloaded_track.lower().endswith('.' + x.lower()) for x in headphones.LOSSY_MEDIA_FORMATS):
lossy_items.append(beets.library.Item.from_path(downloaded_track))
else:
logger.warn("Skipping: %s because it is not a mutagen friendly file format", downloaded_track.decode(headphones.SYS_ENCODING, 'replace'))
except Exception as e:
logger.error("Beets couldn't create an Item from: %s - not a media file? %s", downloaded_track.decode(headphones.SYS_ENCODING, 'replace'), str(e))
for items in [lossy_items, lossless_items]:
if not items:
continue
try:
cur_artist, cur_album, candidates, rec = autotag.tag_album(items, search_artist=helpers.latinToAscii(release['ArtistName']), search_album=helpers.latinToAscii(release['AlbumTitle']))
except Exception as e:
logger.error('Error getting recommendation: %s. Not writing metadata', e)
return False
if str(rec) == 'Recommendation.none':
logger.warn('No accurate album match found for %s, %s - not writing metadata', release['ArtistName'], release['AlbumTitle'])
return False
if candidates:
dist, info, mapping, extra_items, extra_tracks = candidates[0]
else:
logger.warn('No accurate album match found for %s, %s - not writing metadata', release['ArtistName'], release['AlbumTitle'])
return False
logger.info('Beets recommendation for tagging items: %s' % rec)
# TODO: Handle extra_items & extra_tracks
autotag.apply_metadata(info, mapping)
# Set ID3 tag version
if headphones.CONFIG.IDTAG:
beetsconfig['id3v23'] = True
logger.debug("Using ID3v2.3")
else:
beetsconfig['id3v23'] = False
logger.debug("Using ID3v2.4")
for item in items:
try:
item.write()
logger.info("Successfully applied metadata to: %s", item.path.decode(headphones.SYS_ENCODING, 'replace'))
except Exception as e:
logger.warn("Error writing metadata to '%s': %s", item.path.decode(headphones.SYS_ENCODING, 'replace'), str(e))
return False
return True
def embedLyrics(downloaded_track_list):
logger.info('Adding lyrics')
# TODO: If adding lyrics for flac & lossy, only fetch the lyrics once and apply it to both files
# TODO: Get beets to add automatically by enabling the plugin
lossy_items = []
lossless_items = []
lp = beetslyrics.LyricsPlugin()
for downloaded_track in downloaded_track_list:
try:
if any(downloaded_track.lower().endswith('.' + x.lower()) for x in headphones.LOSSLESS_MEDIA_FORMATS):
lossless_items.append(beets.library.Item.from_path(downloaded_track))
elif any(downloaded_track.lower().endswith('.' + x.lower()) for x in headphones.LOSSY_MEDIA_FORMATS):
lossy_items.append(beets.library.Item.from_path(downloaded_track))
else:
logger.warn("Skipping: %s because it is not a mutagen friendly file format", downloaded_track.decode(headphones.SYS_ENCODING, 'replace'))
except Exception as e:
logger.error("Beets couldn't create an Item from: %s - not a media file? %s", downloaded_track.decode(headphones.SYS_ENCODING, 'replace'), str(e))
for items in [lossy_items, lossless_items]:
if not items:
continue
for item in items:
lyrics = None
for artist, titles in beetslyrics.search_pairs(item):
lyrics = [lp.get_lyrics(artist, title) for title in titles]
if any(lyrics):
break
lyrics = u"\n\n---\n\n".join([l for l in lyrics if l])
if lyrics:
logger.debug('Adding lyrics to: %s', item.title)
item.lyrics = lyrics
try:
item.write()
except Exception as e:
logger.error('Cannot save lyrics to: %s. Skipping', item.title)
else:
logger.debug('No lyrics found for track: %s', item.title)
def renameFiles(albumpath, downloaded_track_list, release):
logger.info('Renaming files')
try:
year = release['ReleaseDate'][:4]
except TypeError:
year = ''
# Until tagging works better I'm going to rely on the already provided metadata
for downloaded_track in downloaded_track_list:
try:
f = MediaFile(downloaded_track)
except:
logger.info("MediaFile couldn't parse: %s", downloaded_track.decode(headphones.SYS_ENCODING, 'replace'))
continue
if not f.disc:
discnumber = ''
else:
discnumber = '%d' % f.disc
if not f.track:
tracknumber = ''
else:
tracknumber = '%02d' % f.track
if not f.title:
basename = os.path.basename(downloaded_track.decode(headphones.SYS_ENCODING, 'replace'))
title = os.path.splitext(basename)[0]
ext = os.path.splitext(basename)[1]
new_file_name = helpers.cleanTitle(title) + ext
else:
title = f.title
if release['ArtistName'] == "Various Artists" and f.artist:
artistname = f.artist
else:
artistname = release['ArtistName']
if artistname.startswith('The '):
sortname = artistname[4:] + ", The"
else:
sortname = artistname
values = {'$Disc': discnumber,
'$Track': tracknumber,
'$Title': title,
'$Artist': artistname,
'$SortArtist': sortname,
'$Album': release['AlbumTitle'],
'$Year': year,
'$disc': discnumber,
'$track': tracknumber,
'$title': title.lower(),
'$artist': artistname.lower(),
'$sortartist': sortname.lower(),
'$album': release['AlbumTitle'].lower(),
'$year': year
}
ext = os.path.splitext(downloaded_track)[1]
new_file_name = helpers.replace_all(headphones.CONFIG.FILE_FORMAT.strip(), values).replace('/', '_') + ext
new_file_name = helpers.replace_illegal_chars(new_file_name).encode(headphones.SYS_ENCODING, 'replace')
if headphones.CONFIG.FILE_UNDERSCORES:
new_file_name = new_file_name.replace(' ', '_')
if new_file_name.startswith('.'):
new_file_name = new_file_name.replace(".", "_", 1)
new_file = os.path.join(albumpath, new_file_name)
if downloaded_track == new_file_name:
logger.debug("Renaming for: " + downloaded_track.decode(headphones.SYS_ENCODING, 'replace') + " is not neccessary")
continue
logger.debug('Renaming %s ---> %s', downloaded_track.decode(headphones.SYS_ENCODING, 'replace'), new_file_name.decode(headphones.SYS_ENCODING, 'replace'))
try:
os.rename(downloaded_track, new_file)
except Exception as e:
logger.error('Error renaming file: %s. Error: %s', downloaded_track.decode(headphones.SYS_ENCODING, 'replace'), e)
continue
def updateFilePermissions(albumpaths):
for folder in albumpaths:
logger.info("Updating file permissions in %s", folder)
for r, d, f in os.walk(folder):
for files in f:
full_path = os.path.join(r, files)
try:
os.chmod(full_path, int(headphones.CONFIG.FILE_PERMISSIONS, 8))
except:
logger.error("Could not change permissions for file: %s", full_path)
continue
def renameUnprocessedFolder(path, tag):
"""
Rename a unprocessed folder to a new unique name to indicate a certain
status.
"""
for i in itertools.count():
if i == 0:
new_path = "%s (%s)" % (path, tag)
else:
new_path = "%s (%s[%d])" % (path, tag, i)
if os.path.exists(new_path):
i += 1
else:
os.rename(path, new_path)
return
def forcePostProcess(dir=None, expand_subfolders=True, album_dir=None, keep_original_folder=False):
logger.info('Force checking download folder for completed downloads')
ignored = 0
if album_dir:
folders = [album_dir.encode(headphones.SYS_ENCODING, 'replace')]
else:
download_dirs = []
if dir:
download_dirs.append(dir.encode(headphones.SYS_ENCODING, 'replace'))
if headphones.CONFIG.DOWNLOAD_DIR and not dir:
download_dirs.append(headphones.CONFIG.DOWNLOAD_DIR.encode(headphones.SYS_ENCODING, 'replace'))
if headphones.CONFIG.DOWNLOAD_TORRENT_DIR and not dir:
download_dirs.append(headphones.CONFIG.DOWNLOAD_TORRENT_DIR.encode(headphones.SYS_ENCODING, 'replace'))
# If DOWNLOAD_DIR and DOWNLOAD_TORRENT_DIR are the same, remove the duplicate to prevent us from trying to process the same folder twice.
download_dirs = list(set(download_dirs))
logger.debug('Post processing folders: %s', download_dirs)
# Get a list of folders in the download_dir
folders = []
for download_dir in download_dirs:
if not os.path.isdir(download_dir):
logger.warn('Directory %s does not exist. Skipping', download_dir)
continue
# Scan for subfolders
subfolders = os.listdir(download_dir)
ignored += helpers.path_filter_patterns(subfolders,
headphones.CONFIG.IGNORED_FOLDERS, root=download_dir)
for folder in subfolders:
path_to_folder = os.path.join(download_dir, folder)
if os.path.isdir(path_to_folder):
subfolders = helpers.expand_subfolders(path_to_folder)
if expand_subfolders and subfolders is not None:
folders.extend(subfolders)
else:
folders.append(path_to_folder)
# Log number of folders
if folders:
logger.debug('Expanded post processing folders: %s', folders)
logger.info('Found %d folders to process (%d ignored).',
len(folders), ignored)
else:
logger.info('Found no folders to process. Aborting.')
return
# Parse the folder names to get artist album info
myDB = db.DBConnection()
for folder in folders:
folder_basename = os.path.basename(folder).decode(headphones.SYS_ENCODING, 'replace')
logger.info('Processing: %s', folder_basename)
# Attempt 1: First try to see if there's a match in the snatched table,
# then we'll try to parse the foldername.
# TODO: Iterate through underscores -> spaces, spaces -> dots,
# underscores -> dots (this might be hit or miss since it assumes all
# spaces/underscores came from sab replacing values
logger.debug('Attempting to find album in the snatched table')
snatched = myDB.action('SELECT AlbumID, Title, Kind, Status from snatched WHERE FolderName LIKE ?', [folder_basename]).fetchone()
if snatched:
if headphones.CONFIG.KEEP_TORRENT_FILES and snatched['Kind'] == 'torrent' and snatched['Status'] == 'Processed':
logger.info('%s is a torrent folder being preserved for seeding and has already been processed. Skipping.', folder_basename)
continue
else:
logger.info('Found a match in the database: %s. Verifying to make sure it is the correct album', snatched['Title'])
verify(snatched['AlbumID'], folder, snatched['Kind'], keep_original_folder=keep_original_folder)
continue
# Attempt 2: strip release group id from filename
logger.debug('Attempting to extract release group from folder name')
try:
possible_rgid = folder_basename[-36:]
rgid = uuid.UUID(possible_rgid)
except:
rgid = possible_rgid = None
if rgid:
rgid = possible_rgid
release = myDB.action('SELECT ArtistName, AlbumTitle, AlbumID from albums WHERE AlbumID=?', [rgid]).fetchone()
if release:
logger.info('Found a match in the database: %s - %s. Verifying to make sure it is the correct album', release['ArtistName'], release['AlbumTitle'])
verify(release['AlbumID'], folder, forced=True, keep_original_folder=keep_original_folder)
continue
else:
logger.info('Found a (possibly) valid Musicbrainz release group id in album folder name.')
verify(rgid, folder, forced=True)
continue
# Attempt 3a: parse the folder name into a valid format
logger.debug('Attempting to extract name, album and year from folder name')
try:
name, album, year = helpers.extract_data(folder_basename)
except Exception:
name = album = year = None
if name and album:
release = myDB.action('SELECT AlbumID, ArtistName, AlbumTitle from albums WHERE ArtistName LIKE ? and AlbumTitle LIKE ?', [name, album]).fetchone()
if release:
logger.info('Found a match in the database: %s - %s. Verifying to make sure it is the correct album', release['ArtistName'], release['AlbumTitle'])
verify(release['AlbumID'], folder, keep_original_folder=keep_original_folder)
continue
else:
logger.info('Querying MusicBrainz for the release group id for: %s - %s', name, album)
try:
rgid = mb.findAlbumID(helpers.latinToAscii(name), helpers.latinToAscii(album))
except:
logger.error('Can not get release information for this album')
rgid = None
if rgid:
verify(rgid, folder, keep_original_folder=keep_original_folder)
continue
else:
logger.info('No match found on MusicBrainz for: %s - %s', name, album)
# Attempt 3b: deduce meta data into a valid format
logger.debug('Attempting to extract name, album and year from metadata')
try:
name, album, year = helpers.extract_metadata(folder)
except Exception:
name = album = None
# Check if there's a cue to split
if headphones.CONFIG.CUE_SPLIT and not name and not album and helpers.cue_split(folder):
try:
name, album, year = helpers.extract_metadata(folder)
except Exception:
name = album = None
if name and album:
release = myDB.action('SELECT AlbumID, ArtistName, AlbumTitle from albums WHERE ArtistName LIKE ? and AlbumTitle LIKE ?', [name, album]).fetchone()
if release:
logger.info('Found a match in the database: %s - %s. Verifying to make sure it is the correct album', release['ArtistName'], release['AlbumTitle'])
verify(release['AlbumID'], folder, keep_original_folder=keep_original_folder)
continue
else:
logger.info('Querying MusicBrainz for the release group id for: %s - %s', name, album)
try:
rgid = mb.findAlbumID(helpers.latinToAscii(name), helpers.latinToAscii(album))
except:
logger.error('Can not get release information for this album')
rgid = None
if rgid:
verify(rgid, folder, keep_original_folder=keep_original_folder)
continue
else:
logger.info('No match found on MusicBrainz for: %s - %s', name, album)
# Attempt 4: Hail mary. Just assume the folder name is the album name
# if it doesn't have a separator in it
logger.debug('Attempt to extract album name by assuming it is the folder name')
if '-' not in folder_basename:
release = myDB.action('SELECT AlbumID, ArtistName, AlbumTitle from albums WHERE AlbumTitle LIKE ?', [folder_basename]).fetchone()
if release:
logger.info('Found a match in the database: %s - %s. Verifying to make sure it is the correct album', release['ArtistName'], release['AlbumTitle'])
verify(release['AlbumID'], folder, keep_original_folder=keep_original_folder)
continue
else:
logger.info('Querying MusicBrainz for the release group id for: %s', folder_basename)
try:
rgid = mb.findAlbumID(album=helpers.latinToAscii(folder_basename))
except:
logger.error('Can not get release information for this album')
rgid = None
if rgid:
verify(rgid, folder, keep_original_folder=keep_original_folder)
continue
else:
logger.info('No match found on MusicBrainz for: %s - %s', name, album)
# Fail here
logger.info("Couldn't parse '%s' into any valid format. If adding " \
"albums from another source, they must be in an 'Artist - Album " \
"[Year]' format, or end with the musicbrainz release group id.",
folder_basename)
| lepricon49/headphones | headphones/postprocessor.py | Python | gpl-3.0 | 54,020 |
from .common import *
class TestErrorBasics(TestCase):
def test_error_attributes(self):
try:
av.open('does not exist')
except AVError as e:
self.assertEqual(e.errno, 2)
self.assertEqual(e.strerror, 'No such file or directory')
self.assertEqual(e.filename, 'does not exist')
self.assertEqual(str(e), "[Errno 2] No such file or directory: 'does not exist'")
else:
self.fail('no exception raised')
| danielballan/PyAV | tests/test_errors.py | Python | bsd-3-clause | 499 |
# -*- coding: utf-8 -*-
#
# This file is part of Panucci.
# Copyright (c) 2008-2011 The Panucci Project
#
# Panucci is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Panucci is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Panucci. If not, see <http://www.gnu.org/licenses/>.
#
# Based on http://thpinfo.com/2008/panucci/:
# A resuming media player for Podcasts and Audiobooks
# Copyright (c) 2008-05-26 Thomas Perl <thpinfo.com>
# (based on http://pygstdocs.berlios.de/pygst-tutorial/seeking.html)
from __future__ import absolute_import
import logging
from gi.repository import GObject as gobject
from gi.repository import Pango as pango
from gi.repository import Gtk as gtk
from gi.repository import Gdk as gdk
import panucci
from panucci import platform
from panucci import util
from panucci.gtk3ui import gtkutil
##################################################
# PlaylistTab
##################################################
class PlaylistTab(gtk.VBox):
def __init__(self, main_window, playlist):
gtk.VBox.__init__(self)
self.__log = logging.getLogger('panucci.panucci.BookmarksWindow')
self.main = main_window
self.playlist = playlist
self.__model = gtk.TreeStore(
# uid, name, position
gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING )
self.set_spacing(5)
self.treeview = gtk.TreeView()
self.treeview.set_model(self.__model)
self.treeview.set_headers_visible(True)
tree_selection = self.treeview.get_selection()
# This breaks drag and drop, only use single selection for now
# tree_selection.set_mode(gtk.SELECTION_MULTIPLE)
tree_selection.connect('changed', self.tree_selection_changed)
# The tree lines look nasty on maemo
if platform.DESKTOP:
self.treeview.set_enable_tree_lines(True)
self.update_model()
ncol = gtk.TreeViewColumn(_('Name'))
ncell = gtk.CellRendererText()
ncell.set_property('ellipsize', pango.EllipsizeMode.END)
ncell.set_property('editable', True)
ncell.connect('edited', self.label_edited)
ncol.set_expand(True)
ncol.pack_start(ncell, 0)
ncol.add_attribute(ncell, 'text', 1)
tcol = gtk.TreeViewColumn(_('Position'))
tcell = gtk.CellRendererText()
tcol.pack_start(tcell, 0)
tcol.add_attribute(tcell, 'text', 2)
self.treeview.append_column(ncol)
self.treeview.append_column(tcol)
self.treeview.connect('drag-data-received', self.drag_data_recieved)
self.treeview.connect('drag_data_get', self.drag_data_get_data)
treeview_targets = [
( 'playlist_row_data', gtk.TargetFlags.SAME_WIDGET, 0 ) ]
self.treeview.enable_model_drag_source(
gdk.ModifierType.BUTTON1_MASK, treeview_targets, gdk.DragAction.COPY )
self.treeview.enable_model_drag_dest(
treeview_targets, gdk.DragAction.COPY )
sw = gtk.ScrolledWindow()
sw.set_policy(gtk.PolicyType.AUTOMATIC, gtk.PolicyType.AUTOMATIC)
sw.set_shadow_type(gtk.ShadowType.IN)
sw.add(self.treeview)
self.add(sw)
self.hbox = gtk.HBox()
self.add_button = gtk.Button(gtk.STOCK_NEW)
self.add_button.set_use_stock(True)
gtkutil.set_stock_button_text( self.add_button, _('Add File') )
self.add_button.connect('clicked', self.add_file)
self.hbox.pack_start(self.add_button, True, True, 0)
self.dir_button = gtk.Button(gtk.STOCK_OPEN)
self.dir_button.set_use_stock(True)
gtkutil.set_stock_button_text( self.dir_button, _('Add Folder') )
self.dir_button.connect('clicked', self.add_directory)
self.hbox.pack_start(self.dir_button, True, True, 0)
self.remove_button = gtk.Button(stock=gtk.STOCK_REMOVE)
self.remove_button.connect('clicked', self.remove_bookmark)
self.hbox.pack_start(self.remove_button, True, True, 0)
self.jump_button = gtk.Button(stock=gtk.STOCK_JUMP_TO)
self.jump_button.connect('clicked', self.jump_bookmark)
self.hbox.pack_start(self.jump_button, True, True, 0)
self.info_button = gtk.Button(stock=gtk.STOCK_INFO)
self.info_button.connect('clicked', self.show_playlist_item_details)
self.hbox.pack_start(self.info_button, True, True, 0)
self.empty_button = gtk.Button(stock=gtk.STOCK_DELETE)
gtkutil.set_stock_button_text( self.empty_button, _('Clear'), )
self.empty_button.connect('clicked', self.empty_playlist)
self.hbox.pack_start(self.empty_button, True, True, 0)
if platform.FREMANTLE:
for child in self.hbox.get_children():
if isinstance(child, gtk.Button):
child.set_name('HildonButton-thumb')
self.hbox.set_size_request(-1, 105)
self.pack_start(self.hbox, False, True, 0)
self.playlist.register( 'file_queued',
lambda x,y,z: self.update_model() )
self.playlist.register( 'bookmark_added', self.on_bookmark_added )
self.show_all()
def tree_selection_changed(self, treeselection):
count = treeselection.count_selected_rows()
self.remove_button.set_sensitive(count > 0)
self.jump_button.set_sensitive(count == 1)
self.info_button.set_sensitive(count == 1)
def drag_data_get_data(
self, treeview, context, selection, target_id, timestamp):
treeselection = treeview.get_selection()
model, iter = treeselection.get_selected()
# only allow moving around top-level parents
if model.iter_parent(iter) is None:
# send the path of the selected row
data = model.get_string_from_iter(iter)
selection.set(selection.target, 8, data)
else:
self.__log.debug("Can't move children...")
def drag_data_recieved(
self, treeview, context, x, y, selection, info, timestamp):
drop_info = treeview.get_dest_row_at_pos(x, y)
# TODO: If user drags the row past the last row, drop_info is None
# I'm not sure if it's safe to simply assume that None is
# euqivalent to the last row...
if None not in [ drop_info and selection.data ]:
model = treeview.get_model()
path, position = drop_info
from_iter = model.get_iter_from_string(selection.data)
# make sure the to_iter doesn't have a parent
to_iter = model.get_iter(path)
if model.iter_parent(to_iter) is not None:
to_iter = model.iter_parent(to_iter)
from_row = model.get_path(from_iter)[0]
to_row = path[0]
if ( position == gtk.TREE_VIEW_DROP_BEFORE or
position == gtk.TREE_VIEW_DROP_INTO_OR_BEFORE ):
model.move_before( from_iter, to_iter )
to_row = to_row - 1 if from_row < to_row else to_row
elif ( position == gtk.TREE_VIEW_DROP_AFTER or
position == gtk.TREE_VIEW_DROP_INTO_OR_AFTER ):
model.move_after( from_iter, to_iter )
to_row = to_row + 1 if from_row > to_row else to_row
else:
self.__log.debug('Drop not supported: %s', position)
# don't do anything if we're not actually moving rows around
if from_row != to_row:
self.player.playlist.move_item( from_row, to_row )
else:
self.__log.debug('No drop_data or selection.data available')
def update_model(self):
path_info = self.treeview.get_path_at_pos(0,0)
path = path_info[0] if path_info is not None else None
self.__model.clear()
# build the tree
for item, data in self.playlist.get_playlist_item_ids():
parent = self.__model.append(None, (item, data.get('title'), None))
for bid, bname, bpos in self.playlist.get_bookmarks_from_item_id( item ):
nice_bpos = util.convert_ns(bpos)
self.__model.append( parent, (bid, bname, nice_bpos) )
self.treeview.expand_all()
if path is not None:
self.treeview.scroll_to_cell(path)
def label_edited(self, cellrenderer, path, new_text):
iter = self.__model.get_iter(path)
old_text = self.__model.get_value(iter, 1)
if new_text.strip() and old_text != new_text:
# this loop will only run once, because only one cell can be
# edited at a time, we use it to get the item and bookmark ids
for m, bkmk_id, biter, item_id, iiter in self.__cur_selection():
self.__model.set_value(iter, 1, new_text)
self.player.playlist.update_bookmark(
item_id, bkmk_id, name=new_text )
else:
self.__model.set_value(iter, 1, old_text)
def on_bookmark_added(self, parent_id, bookmark_name, position):
self.main.notify(_('Bookmark added: %s') % bookmark_name)
self.update_model()
def add_file(self, widget):
filename = gtkutil.get_file_from_filechooser(self.main)
if filename is not None:
self.playlist.load(filename)
def add_directory(self, widget):
directory = gtkutil.get_file_from_filechooser(self.main, folder=True )
if directory is not None:
self.playlist.load(directory)
def __cur_selection(self):
selection = self.treeview.get_selection()
model, bookmark_paths = selection.get_selected_rows()
# Convert the paths to gtk.TreeRowReference objects, because we
# might modify the model while this generator is running
bookmark_refs = [gtk.TreeRowReference(model, p) for p in bookmark_paths]
for reference in bookmark_refs:
bookmark_iter = model.get_iter(reference.get_path())
item_iter = model.iter_parent(bookmark_iter)
# bookmark_iter is actually an item_iter
if item_iter is None:
item_iter = bookmark_iter
item_id = model.get_value(item_iter, 0)
bookmark_id, bookmark_iter = None, None
else:
bookmark_id = model.get_value(bookmark_iter, 0)
item_id = model.get_value(item_iter, 0)
yield model, bookmark_id, bookmark_iter, item_id, item_iter
def remove_bookmark(self, w=None):
for model, bkmk_id, bkmk_iter, item_id, item_iter in self.__cur_selection():
self.playlist.remove_bookmark( item_id, bkmk_id )
if bkmk_iter is not None:
model.remove(bkmk_iter)
elif item_iter is not None:
model.remove(item_iter)
def select_current_item(self):
model = self.treeview.get_model()
selection = self.treeview.get_selection()
current_item_id = str(self.playlist.get_current_item())
for row in iter(model):
if model.get_value(row.iter, 0) == current_item_id:
selection.unselect_all()
self.treeview.set_cursor(row.path)
self.treeview.scroll_to_cell(row.path, use_align=True)
break
def show_playlist_item_details(self, w):
selection = self.treeview.get_selection()
if selection.count_selected_rows() == 1:
selected = self.__cur_selection().next()
model, bkmk_id, bkmk_iter, item_id, item_iter = selected
playlist_item = self.playlist.get_item_by_id(item_id)
PlaylistItemDetails(self.main, playlist_item)
def jump_bookmark(self, w):
selected = list(self.__cur_selection())
if len(selected) == 1:
# It should be guranteed by the fact that we only enable the
# "Jump to" button when the selection count equals 1.
model, bkmk_id, bkmk_iter, item_id, item_iter = selected.pop(0)
self.playlist.load_from_bookmark_id(item_id, bkmk_id)
def empty_playlist(self, w):
self.playlist.reset_playlist()
self.treeview.get_model().clear()
##################################################
# PlaylistItemDetails
##################################################
class PlaylistItemDetails(gtk.Dialog):
def __init__(self, main, playlist_item):
gtk.Dialog.__init__(self, _('Playlist item details'),
main.main_window, gtk.DIALOG_MODAL)
if not platform.FREMANTLE:
self.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
self.main = main
self.fill(playlist_item)
self.set_has_separator(False)
self.set_resizable(False)
self.show_all()
self.run()
self.destroy()
def fill(self, playlist_item):
t = gtk.Table(10, 2)
self.vbox.pack_start(t, expand=False)
metadata = playlist_item.metadata
t.attach(gtk.Label(_('Custom title:')), 0, 1, 0, 1)
t.attach(gtk.Label(_('ID:')), 0, 1, 1, 2)
t.attach(gtk.Label(_('Playlist ID:')), 0, 1, 2, 3)
t.attach(gtk.Label(_('Filepath:')), 0, 1, 3, 4)
row_num = 4
for key in metadata:
if metadata[key] is not None:
t.attach( gtk.Label(key.capitalize()+':'),
0, 1, row_num, row_num+1 )
row_num += 1
t.foreach(lambda x, y: x.set_alignment(1, 0.5), None)
t.foreach(lambda x, y: x.set_markup('<b>%s</b>' % x.get_label()), None)
t.attach(gtk.Label(playlist_item.title or _('<not modified>')),1,2,0,1)
t.attach(gtk.Label(str(playlist_item)), 1, 2, 1, 2)
t.attach(gtk.Label(playlist_item.playlist_id), 1, 2, 2, 3)
t.attach(gtk.Label(playlist_item.filepath), 1, 2, 3, 4)
row_num = 4
for key in metadata:
value = metadata[key]
if key == 'length':
value = util.convert_ns(value)
if metadata[key] is not None:
t.attach( gtk.Label( str(value) or _('<not set>')),
1, 2, row_num, row_num+1)
row_num += 1
t.foreach(lambda x, y: x.get_alignment() == (0.5, 0.5) and \
x.set_alignment(0, 0.5), None)
t.set_border_width(8)
t.set_row_spacings(4)
t.set_col_spacings(8)
l = gtk.ListStore(str, str)
t = gtk.TreeView(l)
cr = gtk.CellRendererText()
cr.set_property('ellipsize', pango.ELLIPSIZE_END)
c = gtk.TreeViewColumn(_('Title'), cr, text=0)
c.set_expand(True)
t.append_column(c)
c = gtk.TreeViewColumn(_('Time'), gtk.CellRendererText(), text=1)
t.append_column(c)
playlist_item.load_bookmarks()
for bookmark in playlist_item.bookmarks:
l.append([bookmark.bookmark_name, \
util.convert_ns(bookmark.seek_position)])
sw = gtk.ScrolledWindow()
sw.set_shadow_type(gtk.SHADOW_IN)
sw.add(t)
sw.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
e = gtk.Expander(_('Bookmarks'))
e.add(sw)
if not platform.MAEMO:
self.vbox.pack_start(e)
| xerxes2/panucci | src/panucci/gtk3ui/gtkplaylist.py | Python | gpl-3.0 | 15,748 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-03 10:42
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('app', '0066_school'),
('app', '0066_auto_20160203_1039'),
]
operations = [
]
| malaonline/Server | server/app/migrations/0067_merge.py | Python | mit | 317 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.