repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
prasanna08/oppia | core/domain/action_registry_test.py | Python | apache-2.0 | 1,192 | 0 | # coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an | "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expr | ess or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for methods in the action registry."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from core.domain import action_registry
from core.tests import test_utils
class ActionRegistryUnitTests(test_utils.GenericTestBase):
"""Test for the action registry."""
def test_action_registry(self):
"""Do some sanity checks on the action registry."""
self.assertEqual(
len(action_registry.Registry.get_all_actions()), 3)
|
fabricematrat/py-macaroon-bakery | macaroonbakery/tests/test_time.py | Python | lgpl-3.0 | 4,747 | 0 | # Copyright 2017 Canonical Ltd.
# Licensed under the LGPLv3, see LICENCE file for details.
from datetime import timedelta
from unittest import TestCase
from collections import namedtuple
import pyrfc3339
import pymacaroons
from pymacaroons import Macaroon
import macaroonbakery.checkers as checkers
t1 = pyrfc3339.parse('2017-10-26T16:19:47.441402074Z')
t2 = t1 + timedelta(hours=1)
t3 = t2 + timedelta(hours=1)
def fpcaveat(s):
return pymacaroons.Caveat(caveat_id=s.encode('utf-8'))
class TestExpireTime(TestCase):
def test_expire_time(self):
ExpireTest = namedtuple('ExpireTest', 'about caveats expectTime')
tests = [
ExpireTest(
about='no caveats',
caveats=[],
expectTime=None,
),
ExpireTest(
about='single time-before caveat',
caveats=[
fpcaveat(checkers.time_before_caveat(t1).condition),
],
expectTime=t1,
),
ExpireTest(
about='multiple time-before caveat',
caveats=[
fpcaveat(checkers.time_before_caveat(t2).condition),
fpcaveat(checkers.time_before_caveat(t1).condition),
],
expectTime=t1,
),
ExpireTest(
about='mixed caveats',
caveats=[
fpcaveat(checkers.time_before_caveat(t1).condition),
fpcaveat('allow bar'),
fpcaveat(checkers.time_before_caveat(t2).condition),
fpcaveat('deny foo'),
],
expectTime=t1,
),
ExpireTest(
about='mixed caveats',
caveats=[
fpcaveat(checkers.COND_TIME_BEFORE + ' tomorrow'),
],
expectTime=None,
),
]
for test in tests:
print('test ', test.about)
t = checkers.expiry_time(checkers.Namespace(), test.caveats)
self.assertEqual(t, test.expectTime)
def test_macaroons_expire_time(self):
ExpireTest = namedtuple('ExpireTest', 'about macaroons expectTime')
tests = [
ExpireTest(
about='no macaroons',
macaroons=[newMacaroon()],
expectTime=None,
),
ExpireTest(
about='single macaroon without caveats',
macaroons=[newMacaroon()],
expectTime=None,
),
ExpireTest(
about='multiple macaroon without caveats',
macaroons=[newMacaroon()],
expectTime=None,
),
ExpireTest(
about='single macaroon with time-before caveat',
macaroons=[
newMacaroon([checkers.time_before_caveat(t1).condition]),
],
expectTime=t1,
),
ExpireTest(
about='single macaroon with multiple time-before caveats',
macaroons=[
newMacaroon([
checkers.time_before_caveat(t2).condition,
checkers.time_before_caveat(t1).condition,
]),
],
expectTime=t1,
),
ExpireTest(
about='multiple macaroons with multiple time-before caveats',
macaroons=[
newMacaroon([
checkers.time_before_cavea | t(t3).condition,
checkers.time_before_caveat(t1).condition,
]),
newMacaroon([
checkers.time_before_caveat(t3).condition,
| checkers.time_before_caveat(t1).condition,
]),
],
expectTime=t1,
),
]
for test in tests:
print('test ', test.about)
t = checkers.macaroons_expiry_time(checkers.Namespace(),
test.macaroons)
self.assertEqual(t, test.expectTime)
def test_macaroons_expire_time_skips_third_party(self):
m1 = newMacaroon([checkers.time_before_caveat(t1).condition])
m2 = newMacaroon()
m2.add_third_party_caveat('https://example.com', 'a-key', '123')
t = checkers.macaroons_expiry_time(checkers.Namespace(), [m1, m2])
self.assertEqual(t1, t)
def newMacaroon(conds=[]):
m = Macaroon(key='key', version=2)
for cond in conds:
m.add_first_party_caveat(cond)
return m
|
umitproject/site-status | status_cron/urls.py | Python | agpl-3.0 | 1,731 | 0.016176 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
##
## Author: Adriano Monteiro Marques <adriano@umitproject.org>
##
## Copyright (C) 2011 S2S Network Consultoria e Tecnologia da Informacao LTDA
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU Affero General Public License as
## published by the Free Software Foundation, either version 3 | of the
## License, | or (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Affero General Public License for more details.
##
## You should have received a copy of the GNU Affero General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
from django.conf.urls.defaults import *
import django_cron
urlpatterns = patterns('',
# url('^check_passive_hosts/?$', 'status_cron.views.check_passive_hosts', name='check_passive_hosts'),
url('^check_passive_url_task/(?P<module_key>[0-9a-zA-Z\-\_]+)/?$', 'status_cron.views.check_passive_url_task', name='check_passive_url_task'),
url('^check_passive_port_task/(?P<module_key>[0-9a-zA-Z\-\_]+)/?$', 'status_cron.views.check_passive_port_task', name='check_passive_port_task'),
url('^aggregate_daily_status/?$', 'status_cron.views.aggregate_daily_status', name='aggregate_daily_status'),
# url('^check_notifications/?$', 'status_cron.views.check_notifications', name='check_notifications'),
url('^send_notification_task/(?P<notification_id>[0-9a-zA-Z\-\_]+)/?$', 'status_cron.views.send_notification_task', name='send_notification_task'),
) |
jjangsangy/python-matlab-bridge | pymatbridge/pymatbridge.py | Python | bsd-3-clause | 19,627 | 0.000713 | """
pymatbridge
===========
This is a module for communicating and running Matlab from within python
Example
-------
>>> import pymatbridge
>>> m = pymatbridge.Matlab()
>>> m.start()
Starting MATLAB on ZMQ socket ipc:///tmp/pymatbridge
Send 'exit' command to kill the server
.MATLAB started and connected!
True
>>> m.run_code('a=1;')
{'content': {'stdout': '', 'datadir': '/private/tmp/MatlabData/', 'code': 'a=1;', 'figures': []}, 'success': True}
>>> m.get_variable('a')
1
"""
import atexit
import os
import time
import base64
import zmq
import subprocess
import sys
import json
import types
import weakref
from uuid import uuid4
from numpy import ndarray, generic, float64, frombuffer, asfortranarray
try:
from scipy.sparse import spmatrix
except ImportError:
class spmatrix:
pass
def encode_ndarray(obj):
"""Write a numpy array and its shape to base64 buffers"""
shape = obj.shape
if len(shape) == 1:
shape = (1, obj.shape[0])
if obj.flags.c_contiguous:
obj = obj.T
elif not obj.flags.f_contiguous:
obj = asfortranarray(obj.T)
else:
obj = obj.T
try:
data = obj.astype(float64).tobytes()
except AttributeError:
data = obj.astype(float64).tostring()
data = base64.b64encode(data).decode('utf-8')
return data, shape
# JSON encoder extension to handle complex numbers and numpy arrays
class PymatEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, ndarray) and obj.dtype.kind in 'uif':
data, shape = encode_ndarray(obj)
return {'ndarray': True, 'shape': shape, 'data': data}
elif isinstance(obj, ndarray) and obj.dtype.kind == 'c':
real, shape = encode_ndarray(obj.real.copy())
imag, _ = encode_ndarray(obj.imag.copy())
return {'ndarray': True, 'shape': shape,
'real': real, 'imag': imag}
elif isinstance(obj, ndarray):
return obj.tolist()
elif isinstance(obj, complex):
return {'real': obj.real, 'imag': obj.imag}
elif isinstance(obj, generic):
return obj.item()
# Handle the default case
return json.JSONEncoder.default(self, obj)
def decode_arr(data):
"""Extract a numpy array from a base64 buffer"""
data = data.encode('utf-8')
return frombuffer(base64.b64decode(data), float64)
# JSON decoder for arrays and complex numbers
def decode_pymat(dct):
if 'ndarray' in dct and 'data' in dct:
value = decode_arr(dct['data'])
shape = decode_arr(dct['shape']).astype(int)
return value.reshape(shape, order='F')
elif 'ndarray' in dct and 'imag' in dct:
real = decode_arr(dct['real'])
imag = decode_arr(dct['imag'])
shape = decode_arr(dct['shape']).astype(int)
data = real + 1j * imag
return data.reshape(shape, order='F')
elif 'real' in dct and 'imag' in dct:
return complex(dct['real'], dct['imag'])
return dct
MATLAB_FOLDER = '%s/matlab' % os.path.realpath(os.path.dirname(__file__))
class _Session(object):
"""
A class for communicating with a MATLAB session. It provides the behavior
common across different MATLAB implementations. You shouldn't instantiate
this directly; rather, use the Matlab or Octave subclasses.
"""
def __init__(self, executable, socket_addr=None,
id='python-matlab-bridge', log=False, maxtime=60,
platform=None, startup_options=None):
"""
Initialize this thing.
Parameters
----------
executable : str
A string that would start the session at the terminal.
socket_addr : str
A string that represents a valid ZMQ socket address, such as
"ipc:///tmp/pymatbridge", "tcp://127.0.0.1:55555", etc.
id : str
An identifier for this instance of the pymatbridge.
log : bool
Whether to save a log file in some known location.
maxtime : float
The maximal time to wait for a response from the sess | ion (optional,
Default is 10 sec)
platform : string
The OS of the machine on which this is running. Per default this
will be taken from sys.platform.
startup_options : string
Command line options | to include in the executable's invocation.
Optional; sensible defaults are used if this is not provided.
"""
self.started = False
self.executable = executable
self.socket_addr = socket_addr
self.id = id
self.log = log
self.maxtime = maxtime
self.platform = platform if platform is not None else sys.platform
self.startup_options = startup_options
if socket_addr is None:
self.socket_addr = "tcp://127.0.0.1:55555" if self.platform == "win32" else "ipc:///tmp/pymatbridge"
if self.log:
startup_options += ' > ./pymatbridge/logs/bashlog_%s.txt' % self.id
self.context = None
self.socket = None
atexit.register(self.stop)
def _program_name(self): # pragma: no cover
raise NotImplemented
def _preamble_code(self):
# suppress warnings while loading the path, in the case of
# overshadowing a built-in function on a newer version of
# Matlab (e.g. isrow)
return ["old_warning_state = warning('off','all')",
"addpath(genpath('%s'))" % MATLAB_FOLDER,
"warning(old_warning_state)",
"clear old_warning_state"]
def _execute_flag(self): # pragma: no cover
raise NotImplemented
def _run_server(self):
code = self._preamble_code()
code.extend([
"matlabserver('%s')" % self.socket_addr,
'exit'
])
command = '%s %s %s "%s"' % (self.executable, self.startup_options,
self._execute_flag(), ','.join(code))
subprocess.Popen(command, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
# Start server/client session and make the connection
def start(self):
# Start the MATLAB server in a new process
print("Starting %s on ZMQ socket %s" % (self._program_name(), self.socket_addr))
print("Send 'exit' command to kill the server")
self._run_server()
# Start the client
self.context = zmq.Context()
self.socket = self.context.socket(zmq.REQ)
self.socket.connect(self.socket_addr)
self.started = True
# Test if connection is established
if self.is_connected():
print("%s started and connected!" % self._program_name())
self.set_plot_settings()
return self
else:
raise ValueError("%s failed to start" % self._program_name())
def _response(self, **kwargs):
req = json.dumps(kwargs, cls=PymatEncoder)
self.socket.send_string(req)
resp = self.socket.recv_string()
return resp
# Stop the Matlab server
def stop(self):
if not self.started:
return True
# Matlab should respond with "exit" if successful
if self._response(cmd='exit') == "exit":
print("%s closed" % self._program_name())
self.started = False
return True
# To test if the client can talk to the server
def is_connected(self):
if not self.started:
time.sleep(2)
return False
req = json.dumps(dict(cmd="connect"), cls=PymatEncoder)
self.socket.send_string(req)
start_time = time.time()
while True:
try:
resp = self.socket.recv_string(flags=zmq.NOBLOCK)
return resp == "connected"
except zmq.ZMQError:
sys.stdout.write('.')
time.sleep(1)
if time.time() - start_time > self.maxtime:
print("%s session timed out after %d seconds" % (self._program_name(), self.maxtime))
retur |
googleapis/python-dialogflow-cx | google/cloud/dialogflowcx_v3beta1/types/fulfillment.py | Python | apache-2.0 | 7,518 | 0.001995 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.dialogflowcx_v3beta1.types import response_message
fr | om google.protobuf import struct_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.dialogflow.cx.v3beta1", manifest={"Fulfillment",},
)
class Fulfillment(proto.Message):
r"""A fulfillment can do one or more of the following actions at the
same time:
- Generate rich message responses.
- Set parameter values.
- Call the webhook.
Fulfillments can be called at various stages in th | e
[Page][google.cloud.dialogflow.cx.v3beta1.Page] or
[Form][google.cloud.dialogflow.cx.v3beta1.Form] lifecycle. For
example, when a
[DetectIntentRequest][google.cloud.dialogflow.cx.v3beta1.DetectIntentRequest]
drives a session to enter a new page, the page's entry fulfillment
can add a static response to the
[QueryResult][google.cloud.dialogflow.cx.v3beta1.QueryResult] in the
returning
[DetectIntentResponse][google.cloud.dialogflow.cx.v3beta1.DetectIntentResponse],
call the webhook (for example, to load user data from a database),
or both.
Attributes:
messages (Sequence[google.cloud.dialogflowcx_v3beta1.types.ResponseMessage]):
The list of rich message responses to present
to the user.
webhook (str):
The webhook to call. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/webhooks/<Webhook ID>``.
return_partial_responses (bool):
Whether Dialogflow should return currently
queued fulfillment response messages in
streaming APIs. If a webhook is specified, it
happens before Dialogflow invokes webhook.
Warning:
1) This flag only affects streaming API.
Responses are still queued and returned once in
non-streaming API.
2) The flag can be enabled in any fulfillment
but only the first 3 partial responses will be
returned. You may only want to apply it to
fulfillments that have slow webhooks.
tag (str):
The tag used by the webhook to identify which fulfillment is
being called. This field is required if ``webhook`` is
specified.
set_parameter_actions (Sequence[google.cloud.dialogflowcx_v3beta1.types.Fulfillment.SetParameterAction]):
Set parameter values before executing the
webhook.
conditional_cases (Sequence[google.cloud.dialogflowcx_v3beta1.types.Fulfillment.ConditionalCases]):
Conditional cases for this fulfillment.
"""
class SetParameterAction(proto.Message):
r"""Setting a parameter value.
Attributes:
parameter (str):
Display name of the parameter.
value (google.protobuf.struct_pb2.Value):
The new value of the parameter. A null value
clears the parameter.
"""
parameter = proto.Field(proto.STRING, number=1,)
value = proto.Field(proto.MESSAGE, number=2, message=struct_pb2.Value,)
class ConditionalCases(proto.Message):
r"""A list of cascading if-else conditions. Cases are mutually
exclusive. The first one with a matching condition is selected,
all the rest ignored.
Attributes:
cases (Sequence[google.cloud.dialogflowcx_v3beta1.types.Fulfillment.ConditionalCases.Case]):
A list of cascading if-else conditions.
"""
class Case(proto.Message):
r"""Each case has a Boolean condition. When it is evaluated to be
True, the corresponding messages will be selected and evaluated
recursively.
Attributes:
condition (str):
The condition to activate and select this case. Empty means
the condition is always true. The condition is evaluated
against [form parameters][Form.parameters] or [session
parameters][SessionInfo.parameters].
See the `conditions
reference <https://cloud.google.com/dialogflow/cx/docs/reference/condition>`__.
case_content (Sequence[google.cloud.dialogflowcx_v3beta1.types.Fulfillment.ConditionalCases.Case.CaseContent]):
A list of case content.
"""
class CaseContent(proto.Message):
r"""The list of messages or conditional cases to activate for
this case.
This message has `oneof`_ fields (mutually exclusive fields).
For each oneof, at most one member field can be set at the same time.
Setting any member of the oneof automatically clears all other
members.
.. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
Attributes:
message (google.cloud.dialogflowcx_v3beta1.types.ResponseMessage):
Returned message.
This field is a member of `oneof`_ ``cases_or_message``.
additional_cases (google.cloud.dialogflowcx_v3beta1.types.Fulfillment.ConditionalCases):
Additional cases to be evaluated.
This field is a member of `oneof`_ ``cases_or_message``.
"""
message = proto.Field(
proto.MESSAGE,
number=1,
oneof="cases_or_message",
message=response_message.ResponseMessage,
)
additional_cases = proto.Field(
proto.MESSAGE,
number=2,
oneof="cases_or_message",
message="Fulfillment.ConditionalCases",
)
condition = proto.Field(proto.STRING, number=1,)
case_content = proto.RepeatedField(
proto.MESSAGE,
number=2,
message="Fulfillment.ConditionalCases.Case.CaseContent",
)
cases = proto.RepeatedField(
proto.MESSAGE, number=1, message="Fulfillment.ConditionalCases.Case",
)
messages = proto.RepeatedField(
proto.MESSAGE, number=1, message=response_message.ResponseMessage,
)
webhook = proto.Field(proto.STRING, number=2,)
return_partial_responses = proto.Field(proto.BOOL, number=8,)
tag = proto.Field(proto.STRING, number=3,)
set_parameter_actions = proto.RepeatedField(
proto.MESSAGE, number=4, message=SetParameterAction,
)
conditional_cases = proto.RepeatedField(
proto.MESSAGE, number=5, message=ConditionalCases,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
michaelBenin/django-oscar | oscar/apps/order/utils.py | Python | bsd-3-clause | 10,663 | 0.001501 | from django.contrib.sites.models import Site
from django.conf import settings
from django.db.models import get_model
from django.utils.translation import ugettext_lazy as _
from oscar.apps.shipping.methods import Free
from oscar.apps.order.exceptions import UnableToPlaceOrder
from oscar.core.loading import get_class
ShippingAddress = get_model('order', 'ShippingAddress')
Order = get_model('order', 'Order')
Line = get_model('order', 'Line')
LinePrice = get_model('order', 'LinePrice')
LineAttribute = get_model('order', 'LineAttribute')
OrderDiscount = get_model('order', 'OrderDiscount')
order_placed = get_class('order.signals', 'order_placed')
class OrderNumberGenerator(object):
"""
Simple object for generating order numbers.
We need this as the order number is often required for payment
which takes place before the order model has been created.
"""
def order_number(self, basket):
"""
Return an order number for a given basket
"""
return 100000 + basket.id
class OrderCreator(object):
"""
Places the order by writing out the various models
"""
def place_order(self, basket, total_incl_tax=None, total_excl_tax=None,
user=None, shipping_method=None, shipping_address=None,
billing_address=None, order_number=None, status=None, **kwargs):
"""
Placing an order involves creating all the relevant models based on the
basket and session data.
"""
# Only a basket instance is required to place an order - everything else can be set
# to defaults
if basket.is_empty:
raise ValueError(_("Empty baskets cannot be submitted"))
if not shipping_method:
shipping_method | = Free()
if total_incl_tax is None or total_excl_tax is None:
total_incl_tax = basket.total_incl_tax + shipping_method.basket_charge_incl_tax()
total_excl_tax = basket.total_excl_tax + shipping_method.basket_charge_excl_tax()
if not order_number:
generator = OrderNumberGenerator()
order_number = | generator.order_number(basket)
if not status and hasattr(settings, 'OSCAR_INITIAL_ORDER_STATUS'):
status = getattr(settings, 'OSCAR_INITIAL_ORDER_STATUS')
try:
Order._default_manager.get(number=order_number)
except Order.DoesNotExist:
pass
else:
raise ValueError(_("There is already an order with number %s") % order_number)
# Ok - everything seems to be in order, let's place the order
order = self.create_order_model(
user, basket, shipping_address, shipping_method, billing_address,
total_incl_tax, total_excl_tax, order_number, status, **kwargs)
for line in basket.all_lines():
self.create_line_models(order, line)
self.update_stock_records(line)
for application in basket.offer_applications:
# Trigger any deferred benefits from offers and capture the
# resulting message
application['message'] = application['offer'].apply_deferred_benefit(basket)
# Record offer application results
if application['result'].affects_shipping:
# If a shipping offer, we need to grab the actual discount off
# the shipping method instance, which should be wrapped in an
# OfferDiscount instance.
application['discount'] = shipping_method.discount
self.create_discount_model(order, application)
self.record_discount(application)
for voucher in basket.vouchers.all():
self.record_voucher_usage(order, voucher, user)
# Send signal for analytics to pick up
order_placed.send(sender=self, order=order, user=user)
return order
def create_order_model(self, user, basket, shipping_address, shipping_method,
billing_address, total_incl_tax, total_excl_tax,
order_number, status, **extra_order_fields):
"""
Creates an order model.
"""
order_data = {'basket_id': basket.id,
'number': order_number,
'site': Site._default_manager.get_current(),
'total_incl_tax': total_incl_tax,
'total_excl_tax': total_excl_tax,
'shipping_incl_tax': shipping_method.basket_charge_incl_tax(),
'shipping_excl_tax': shipping_method.basket_charge_excl_tax(),
'shipping_method': shipping_method.name}
if shipping_address:
order_data['shipping_address'] = shipping_address
if billing_address:
order_data['billing_address'] = billing_address
if user and user.is_authenticated():
order_data['user_id'] = user.id
if status:
order_data['status'] = status
if extra_order_fields:
order_data.update(extra_order_fields)
order = Order(**order_data)
order.save()
return order
def get_partner_for_product(self, product):
"""
Return the partner for a product
"""
if product.has_stockrecord:
return product.stockrecord.partner
raise UnableToPlaceOrder(_("No partner found for product '%s'") % product)
def create_line_models(self, order, basket_line, extra_line_fields=None):
"""
Create the batch line model.
You can set extra fields by passing a dictionary as the extra_line_fields value
"""
partner = self.get_partner_for_product(basket_line.product)
stockrecord = basket_line.product.stockrecord
line_data = {'order': order,
# Partner details
'partner': partner,
'partner_name': partner.name,
'partner_sku': stockrecord.partner_sku,
# Product details
'product': basket_line.product,
'title': basket_line.product.get_title(),
'upc': basket_line.product.upc,
'quantity': basket_line.quantity,
# Price details
'line_price_excl_tax': basket_line.line_price_excl_tax_and_discounts,
'line_price_incl_tax': basket_line.line_price_incl_tax_and_discounts,
'line_price_before_discounts_excl_tax': basket_line.line_price_excl_tax,
'line_price_before_discounts_incl_tax': basket_line.line_price_incl_tax,
# Reporting details
'unit_cost_price': stockrecord.cost_price,
'unit_price_incl_tax': basket_line.unit_price_incl_tax,
'unit_price_excl_tax': basket_line.unit_price_excl_tax,
'unit_retail_price': stockrecord.price_retail,
# Shipping details
'est_dispatch_date': stockrecord.dispatch_date
}
extra_line_fields = extra_line_fields or {}
if hasattr(settings, 'OSCAR_INITIAL_LINE_STATUS'):
if not (extra_line_fields and 'status' in extra_line_fields):
extra_line_fields['status'] = getattr(settings, 'OSCAR_INITIAL_LINE_STATUS')
if extra_line_fields:
line_data.update(extra_line_fields)
order_line = Line._default_manager.create(**line_data)
self.create_line_price_models(order, order_line, basket_line)
self.create_line_attributes(order, order_line, basket_line)
self.create_additional_line_models(order, order_line, basket_line)
return order_line
def update_stock_records(self, line):
product = line.product
if product.get_product_class().track_stock:
line.product.stockrecord.allocate(line.quantity)
def create_additional_line_models(self, order, order_line, basket_line):
"""
Empty method designed to be overridden.
Some application |
OCA/l10n-italy | l10n_it_vat_statement_split_payment/models/account.py | Python | agpl-3.0 | 670 | 0 | # Copyright 2018 Silvio Gregorini (silviogregorini@openforce.it)
# Copyright (c) 2018 Openforce Srls Unipersonale (www.openforce.it)
# Copyright (c) 2019 Matteo Bilotta
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl | ).
from odoo import models
class AccountMoveLi | ne(models.Model):
_inherit = "account.move.line"
def group_by_account_and_tax(self):
grouped_lines = {}
for line in self:
group_key = (line.account_id, line.tax_line_id)
if group_key not in grouped_lines:
grouped_lines.update({group_key: []})
grouped_lines[group_key].append(line)
return grouped_lines
|
ComputationalPsychiatry/fitr | tests/test_gradients.py | Python | gpl-3.0 | 21,659 | 0.008264 | import autograd.numpy as np
from autograd import grad as gradient
from autograd import elementwise_grad, jacobian, hessian
from fitr import utils
from fitr import gradients as grad
from fitr import hessians as hess
from fitr.environments import TwoArmedBandit
from fitr.environments import DawTwoStep
from fitr.agents import RWSoftmaxAgent
from fitr.agents import RWStickySoftmaxAgent
from fitr.agents import SARSASoftmaxAgent
from fitr.agents import SARSAStickySoftmaxAgent
from fitr.agents.policies import SoftmaxPolicy
from fitr.agents.policies import StickySoftmaxPolicy
from fitr.agents.value_functions import ValueFunction
from fitr.agents.value_functions import InstrumentalRescorlaWagnerLearner
from fitr.agents.value_functions import QLearner
from fitr.agents.value_functions import SARSALearner
from fitr.agents import TwoStepStickySoftmaxSARSABellmanMaxAgent
def test_logsumexp():
x = np.array([1., 0., 0.])
grad_fitr = grad.logsumexp(x)
grad_autograd = gradient(utils.logsumexp)(x)
grad_err = np.linalg.norm(grad_fitr-grad_autograd)
assert(grad_err < 1e-6)
def test_exp():
x = np.arange(5).astype(np.float32)
assert(np.all(np.equal(x, grad.exp(x))))
def test_max():
rng = np.random.RandomState(236)
ag_max = jacobian(np.max)
for i in range(20):
x = rng.normal(size=5)**2
ag_grad = ag_max(x)
fitr_grad = grad.max(x)
assert(np.linalg.norm(ag_grad-fitr_grad) < 1e-6)
def test_matrix_max():
rng = np.random.RandomState(236)
A = rng.randint(9, size=(3, 5)).astype(np.float32)
def maxrow(A):
return np.max(A, axis=0)
def maxcol(A):
return np.max(A, axis=1)
agR = elementwise_grad(maxrow)(A)
agC = elementwise_grad(maxcol)(A)
fR = grad.matrix_max(A, axis=0)
fC = grad.matrix_max(A, axis=1)
assert(np.linalg.norm(agR - fR) < 1e-6)
assert(np.linalg.norm(agC - fC) < 1e-6)
def test_sigmoid():
x = np.linspace(-5, 5, 10)
f = lambda x: utils.sigmoid(x)
ag = elementwise_grad(f)(x)
fg = grad.sigmoid(x)
assert(np.all(np.linalg.norm(ag-fg) < 1e-6))
def test_softmax():
x = np.arange(5)+1
x = x.astype(np.float)
f = lambda x: utils.softmax(x)
gx = jacobian(f)
agx = gx(x)
fitrgx = grad.softmax(x)
assert(np.linalg.norm(agx-fitrgx) < 1e-6)
def test_grad_Qx():
x = np.array([1., 0., 0.])
task = TwoArmedBandit()
v = ValueFunction(task)
v.Q = np.array([[1., 2., 3.], [4., 5., 6.]])
def vfx(Q):
v.Q = Q
return v.Qx(x)
agQx = elementwise_grad(vfx)(v.Q)
gQ = v.grad_Qx(x)
assert(np.linalg.norm(agQx-gQ) < 1e-5)
def test_grad_uQx():
x = np.array([1., 0., 0.])
u = np.array([0., 1.])
task = TwoArmedBandit()
v = ValueFunction(task)
v.Q = np.array([[1., 2., 3.], [4., 5., 6.]])
def vfx(Q):
v.Q = Q
return v.uQx(u, x)
agQx = elementwise_grad(vfx)(v.Q)
gQ = v.grad_uQx(u, x)
assert(np.linalg.norm(agQx-gQ) < 1e-5)
def test_grad_Vx():
x = np.array([1., 0., 0.])
task = TwoArmedBandit()
v = ValueFunction(task)
v.V = np.array([1., 2., 3.])
def vfx(V):
v.V = V
return v.Vx(x)
agVx = elementwise_grad(vfx)(v.V)
gV = v.grad_Vx(x)
assert(np.linalg.norm(agVx-gV) < 1e-5)
def test_grad_instrumantalrwupdate():
lr = 0.1
task = TwoArmedBandit()
q = InstrumentalRescorlaWagnerLearner(task, learning_rate=lr)
x = np.array([1., 0., 0.])
u1 = np.array([1., 0.])
u2 = np.array([0., 1.])
x_1 = np.array([0., 1., 0.])
x_2 = np.array([0., 0., 1.])
r1 = 1.0
r2 = 0.0
q.update(x, u1, r1, x_1, None)
q.update(x, u2, r2, x_2, None)
q.update(x, u2, r1, x_1, None)
q.update(x, u1, r2, x_2, None)
q.update(x, u1, r1, x_1, None)
fitr_grad = q.dQ['learning_rate']
fitr_hess = q.hess_Q['learning_rate']
def fq(lr):
m = InstrumentalRescorlaWagnerLearner(task, learning_rate=lr)
m._update_noderivatives(x, u1, r1, x_1, None)
m._update_noderivatives(x, u2, r2, x_2, None)
m._update_noderivatives(x, u2, r1, x_1, None)
m._update_noderivatives(x, u1, r2, x_2, None)
m._update_noderivatives(x, u1, r1, x_1, None)
return m.Q
agQ = jacobian(fq)(lr)
ahQ = hessian(fq)(lr)
assert(np.linalg.norm(fitr_grad-agQ) < 1e-6)
assert(np.linalg.norm(fitr_hess-ahQ) < 1e-6)
def test_grad_qlearnerupdate():
ntrials = 7
def make_mdp_trials():
rng = np.random.RandomState(3256)
X1 = np.tile(np.array([1., 0., 0., 0., 0.]), [ntrials, 1])
X2 = rng.multinomial(1, pvals=[0., 0.5, 0.5, 0., 0.], size=ntrials)
U1 = rng.multinomial(1, pvals=[0.5, 0.5], size=ntrials)
U2 = rng.multinomial(1, pvals=[0.5, 0.5], size=ntrials)
X3 = rng.multinomial(1, pvals=[0., 0., 0., 0.5, 0.5], size=ntrials)
R = np.array([0., 0., 0., 1., 0.])
return X1, X2, U1, U2, X3, R
# GRADIENTS WITH FITR
X1, X2, U1, U2, X3, R = make_mdp_trials()
q = QLearner(DawTwoStep(), learning_rate=0.1, discount_factor=0.9, trace_decay=0.95)
for i in range(ntrials):
q.etrace = np.zeros(q.Q.shape)
x = X1[i]; u = U1[i]; x_= X2[i]; r = R@x_
q.update(x, u, r, x_, None)
u_ = U2[i]; x = x_; u = u_; x_ = X3[i]; r = R@x_
q.update(x, u, r, x_, None)
# AUTOGRAD
def agf_lr(lr):
X1, X2, U1, U2, X3, R = make_mdp_trials()
q = QLearner(DawTwoStep(), learning_rate=lr, discount_factor=0.9, trace_decay=0.95)
for i in range(ntrials):
q.etrace = np.zeros((2, 5))
x = X1[i]; u = U1[i]; x_= X2[i]; r | = R@x_
q._update_noderivatives(x, u, r, x_, None)
u_ = U2[i]; x = x_; u = u_; x_ = X3[i]; r = R@x_
q._update_noderivatives(x, u, r, x_, None)
return q.Q
def agf_dc(dc):
X1, X2, U1, U2, X3, R = make_mdp_trials()
q = QLearner(DawTwoStep(), learning_rate=0.1, discount_factor=dc, trace_de | cay=0.95)
for i in range(ntrials):
q.etrace = np.zeros((2, 5))
x = X1[i]; u = U1[i]; x_= X2[i]; r = R@x_
q._update_noderivatives(x, u, r, x_, None)
u_ = U2[i]; x = x_; u = u_; x_ = X3[i]; r = R@x_
q._update_noderivatives(x, u, r, x_, None)
return q.Q
def agf_et(et):
X1, X2, U1, U2, X3, R = make_mdp_trials()
q = QLearner(DawTwoStep(), learning_rate=0.1, discount_factor=0.9, trace_decay=et)
for i in range(ntrials):
q.etrace = np.zeros((2, 5))
x = X1[i]; u = U1[i]; x_= X2[i]; r = R@x_
q._update_noderivatives(x, u, r, x_, None)
u_ = U2[i]; x = x_; u = u_; x_ = X3[i]; r = R@x_
q._update_noderivatives(x, u, r, x_, None)
return q.Q
# Ensure all are producing same value functions
qlist = [agf_lr(0.1), agf_dc(0.9), agf_et(0.95), q.Q]
assert(np.all(np.stack(np.all(np.equal(a, b)) for a in qlist for b in qlist)))
# Check partial derivative of Q with respect to learning rate
assert(np.linalg.norm(q.dQ['learning_rate']-jacobian(agf_lr)(0.1)) < 1e-6)
# Check partial derivative of Q with respect to discount factor
assert(np.linalg.norm(q.dQ['discount_factor']-jacobian(agf_dc)(0.9)) < 1e-6)
# Check partial derivative of Q with respect to trace decay
assert(np.linalg.norm(q.dQ['trace_decay']-jacobian(agf_et)(0.95)) < 1e-6)
def test_grad_sarsalearnerupdate():
ntrials = 7
def make_mdp_trials():
rng = np.random.RandomState(3256)
X1 = np.tile(np.array([1., 0., 0., 0., 0.]), [ntrials, 1])
X2 = rng.multinomial(1, pvals=[0., 0.5, 0.5, 0., 0.], size=ntrials)
U1 = rng.multinomial(1, pvals=[0.5, 0.5], size=ntrials)
U2 = rng.multinomial(1, pvals=[0.5, 0.5], size=ntrials)
X3 = rng.multinomial(1, pvals=[0., 0., 0., 0.5, 0.5], size=ntrials)
U3 = rng.multinomial(1, pvals=[0.5, 0.5], size=ntrials)
R = np.array([0., 0., 0., 1., 0.])
return X1, X2, U1, U2, X3, U3, R
# GRADIENTS WITH FITR
X1, X2, U1, U2, X3, U3, R = make_mdp_trials()
q = SARSALearner(DawTwoStep(), lea |
yuriyminin/leap-gesture | machineLearning.py | Python | mit | 3,079 | 0.034102 | #!/usr/bin/env python
import sys
import tensorflow as tf
import numpy as np
from numpy import genfromtxt
import requests
import csv
from sklearn import datasets
from sklearn.cross_validation import train_test_split
import sklearn
from scipy import stats
import getopt
from StringIO import StringIO
import requests
# Convert to one hot
def convertOneHot(data):
y=np.array([int(i[0]) for i in data])
y_onehot=[0]*len(y)
for i,j in enumerate(y):
y_onehot[i]=[0]*(y.max() + 1)
y_onehot[i][j]=1
return (y,y_onehot)
# find most common element
def mode(arr) :
m = max([arr.count(a) for a in arr])
return [x for x in arr if arr.count(x) == m][0] if m>1 else None
def main():
# get data from arguments
train=str(sys.argv[1]);
test=str(sys.argv[2]);
train = train.replace('\n',' \r\n')
train = train.replace('n',' \r\n')
test = test.replace('\n',' \r\n')
test = test.replace('n',' \r\n')
#print train
#print test
data = genfromtxt(StringIO(train),delimiter=',') # Training data
test_data = genfromtxt(StringIO(test),delimiter=',') # Test data
#print data
#print test_data
x_train=np.array([ i[1::] for i in data])
y_train,y_train_onehot = convertOneHot(data)
x_test=np.array([ i[1::] for i in test_data])
y_test,y_test_onehot = convertOneHot(test_data)
# A number of features, 5 in this cose (one per finger)
# B = number of gesture possibilities
A=data.shape[1]-1 # Number of features, Note first is y
B=len(y_train_onehot[0])
tf_in = tf.placeholder("float", [None, A]) # Features
tf_weight = tf.Variable(tf.zeros([A,B]))
tf_bias = tf.Variable(tf.zeros([B]))
tf_softmax = tf.nn.softmax(tf.matmul(tf_in,tf_weight) + tf_bias)
# Training via backpropagation
tf_softmax_correct = tf.placeholder("float", [None,B])
tf_cross_entropy = -tf.reduce_sum(tf_softmax_correct*tf.log(tf_softmax))
# Train using tf.train.GradientDescentOptimizer
tf_train_step = tf.train.GradientDescentOptimizer(0.01).minimize(tf_cross_entropy)
# Add accuracy checking nodes
tf_correct_prediction = tf.equal(tf.argmax(tf_softmax,1), tf.argmax(tf_softmax_correct,1))
tf_accuracy = tf.reduce_mean(tf.cast(tf_correct_prediction, "float"))
# Initialize and run
init = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init)
#print("...")
# Run the training
for i in range(6):
sess.run(tf_train_step, feed_dict={tf_in: x_train, tf_softmax_correct: y_train_onehot})
#calculate accuracy from test data
#result = sess.run(tf_accuracy, feed_dict={tf_in: x_test, tf_softmax_correct: y_test_onehot})
#print "Run {},{}".format(i,result)
#make Prediction after training |
prediction=tf.argmax(tf_softmax,1)
guess = prediction.eval(feed_dict={tf_in: x_test}, session=sess)
# calculate most common gesture ID
print int(stats.mode(guess)[0][0])
#r = requests.post("http://localhost:3000/api/receiveAnswer", data = {"prediction": | int(stats.mode(guess)[0][0])})
return 0
if __name__ == "__main__":
main()
|
tinkerinestudio/Tinkerine-Suite | TinkerineSuite/Cura/cura_sf/skeinforge_application/skeinforge_plugins/profile_plugins/cutting.py | Python | agpl-3.0 | 2,099 | 0.011434 | """
This page is in the table of contents.
Cutting is a script to set the cutting profile for the skeinforge chain.
The displayed craft sequence is the sequence in which the tools craft the model and export the output.
On the cutting dialog, clicking the 'Add Profile' button will duplicate the selected profile and give it the name in the input field. For example, if laser is selected and the name laser_10mm is in the input field, clicking the 'Add Profile' button will duplicate laser and save it as laser_10mm. The 'Delete Profile' button deletes the selected profile.
The profile selection is the setting. If you hit 'Save and Close' the selection will be saved, if you hit 'Cancel' the selection will not be saved. However; adding and deleting a profile is a permanent action, for example 'Cancel' will not bring back any deleted profiles.
To chang | e the cutting profile, in a shell in the profile_plugins folder type:
> python cutting.py
"""
from __future__ import absolute_import
from fabmetheus_utilities import settings
from skeinforge_application.skeinforge_utilities import skeinforge_profile
import sys
__author__ = 'Enrique Perez (perez_enrique@yahoo.c | om)'
__date__ = '$Date: 2008/21/04 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def getCraftSequence():
"Get the cutting craft sequence."
return 'chop preface outset multiply whittle drill lift flow feed home lash fillet limit unpause alteration export'.split()
def getNewRepository():
'Get new repository.'
return CuttingRepository()
class CuttingRepository(object):
"A class to handle the cutting settings."
def __init__(self):
"Set the default settings, execute title & settings fileName."
skeinforge_profile.addListsSetCraftProfile( getCraftSequence(), 'end_mill', self, 'skeinforge_application.skeinforge_plugins.profile_plugins.cutting.html')
def main():
"Display the export dialog."
if len(sys.argv) > 1:
writeOutput(' '.join(sys.argv[1 :]))
else:
settings.startMainLoopFromConstructor(getNewRepository())
if __name__ == "__main__":
main()
|
ARMmbed/yotta_osx_installer | workspace/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py | Python | apache-2.0 | 8,577 | 0 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from collections import namedtuple
from cryptography import utils
from cryptography.exceptions import InternalError
from cryptography.hazmat.backends.commoncrypto.ciphers import (
_CipherContext, _GCMCipherContext
)
from cryptography.hazmat.backends.commoncrypto.hashes import _HashContext
from cryptography.hazmat.backends.commoncrypto.hmac import _HMACContext
from cryptography.hazmat.backends.interfaces import (
CipherBackend, HMACBackend, HashBackend, PBKDF2HMACBackend
)
from cryptography.hazmat.bindings.commoncrypto.binding import Binding
from cryptography.hazmat.primitives.ciphers.algorithms import (
AES, ARC4, Blowfish, CAST5, TripleDES
)
from cryptography.hazmat.primitives.ciphers.modes import (
CBC, CFB, CFB8, CTR, ECB, GCM, OFB
)
HashMethods = namedtuple(
"HashMethods", ["ctx", "hash_init", "hash_update", "hash_final"]
)
@utils.register_interface(CipherBackend)
@utils.register_interface(HashBackend)
@utils.register_interface(HMACBackend)
@utils.register_interface(PBKDF2HMACBackend)
class Backend(object):
"""
CommonCrypto API wrapper.
"""
name = "commoncrypto"
def __init__(self):
self._binding = Binding()
self._ffi = self._binding.ffi
self._lib = self._binding.lib
self._cipher_registry = {}
self._register_default_ciphers()
self._hash_mapping = {
"md5": HashMethods(
"CC_MD5_CTX *", self._lib.CC_MD5_Init,
self._lib.CC_MD5_Update, self._lib.CC_MD5_Final
),
"sha1": HashMethods(
"CC_SHA1_CTX *", self._lib.CC_SHA1_Init,
self._lib.CC_SHA1_Update, self._lib.CC_SHA1_Final
),
"sha224": HashMethods(
"CC_SHA256_CTX *", self._lib.CC_SHA224_Init,
self._lib.CC_SHA224_Update, self._lib.CC_SHA224_Final
),
"sha256": HashMethods(
"CC_SHA256_CTX *", self._lib.CC_SHA256_Init,
self._lib.CC_SHA256_Update, self._lib.CC_SHA256_Final
),
"sha384": HashMethods(
"CC_SHA512_CTX *", self._lib.CC_SHA384_Init,
self._lib.CC_SHA384_Update, self._lib.CC_SHA384_Final
),
"sha512": HashMethods(
"CC_SHA512_CTX *", self._lib.CC_SHA512_Init,
self._lib.CC_SHA512_Update, self._lib.CC_SHA512_Final
),
}
self._supported_hmac_algorithms = {
"md5": self._lib.kCCHmacAlgMD5,
"sha1": self._lib.kCCHmacAlgSHA1,
"sha224": self._lib.kCCHmacAlgSHA224,
"sha256": self._lib.kCCHmacAlgSHA256,
"sha384": self._lib.kCCHmacAlgSHA384,
"sha512": self._lib.kCCHmacAlgSHA512,
}
self._supported_pbkdf2_hmac_algorithms = {
"sha1": self._lib.kCCPRFHmacAlgSHA1,
"sha224": self._lib.kCCPRFHmacAlgSHA224,
"sha256": self._lib.kCCPRFHmacAlgSHA256,
"sha384": self._lib.kCCPRFHmacAlgSHA384,
"sha512": self._lib.kCCPRFHmacAlgSHA512,
}
def hash_supported(self, algorithm):
return algorithm.name in self._hash_mapping
def hmac_supported(self, algorithm):
return algorithm.name in self._supported_hmac_algorithms
def create_hash_ctx(self, algorithm):
return _HashContext(self, algorithm)
def create_hmac_ctx(self, key, algorithm):
return _HMACContext(self, key, algorithm)
def cipher_supported(self, cipher, mode):
return (type(cipher), type(mode)) in self._cipher_registry
def create_symmetric_encryption_ctx(self, cipher, mode):
if isinstance(mode, GCM):
return _GCMCipherContext(
self, cipher, mode, self._lib.kCCEncrypt
)
else:
return _CipherContext(self, cipher, mode, self._lib.kCCEncrypt)
def create_symmetric_decryption_ctx(self, cipher, mode):
if isinstance(mode, GCM):
return _GCMCipherContext(
self, cipher, mode, self._lib.kCCDecrypt
)
else:
return _CipherContext(self, cipher, mode, self._lib.kCCDecrypt)
def pbkdf2_hmac_supported(self, algorithm):
return algorithm.name in self._supported_pbkdf2_hmac_algorithms
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
key_material):
alg_enum = self._supported_pbkdf2_hmac_algorithms[algorithm.name]
buf = self._ffi.new("char[]", length)
res = self._lib.CCKeyDerivationPBKDF(
self._lib.kCCPBKDF2,
key_material,
len(key_material),
salt,
len(salt),
alg_enum,
iterations,
buf,
length
)
self._check_cipher_response(res)
return self._ffi.buffer(buf)[:]
def _register_cipher_adapter(self, cipher_cls, cipher_const, mode_cls,
mode_const):
if (cipher_cls, mode_cls) in self._cipher_registry:
raise ValueError("Duplicate registration for: {0} {1}.".format(
cipher_cls, mode_cls)
)
self._cipher_registry[cipher_cls, mode_cls] = (cipher_const,
mode_const)
def _register_default_ciphers(self):
for mode_cls, mode_const in [
(CBC, self._lib.kCCModeCBC),
(ECB, self._lib.kCCModeECB),
(CFB, self._lib.kCCModeCFB),
(CFB8, self._lib.kCCModeCFB8),
(OFB, self._lib.kCCModeOFB),
(CTR, self._lib.kCCModeCTR),
(GCM, self._lib.kCCModeGCM),
]:
self._register_cipher_adapter(
AES,
self._lib.kCCAlgorithmAES128,
mode_cls,
mode_const
)
for mode_cls, mode_const in [
(CBC, self._lib.kCCModeCBC),
(ECB, self._lib.kCCModeECB),
(CFB, self._lib.kCCModeCFB),
(CFB8, self._lib.kCCModeCFB8),
(OFB, self._lib.kCCModeOFB),
]:
self._register_cipher_adapter(
TripleDES,
self._lib.kCCAlgorithm3DES,
mode_cls,
mode_const
)
for mode_cls, mode_const in [
(CBC, self._lib.kCCModeCBC),
(ECB, self._lib.kCCModeECB),
(CFB, self._lib.kCCModeCFB),
(OFB, self._lib.kCCModeOFB)
]:
self._register_cipher_adapter(
Blowfish,
self._lib.kCCAlgorithmBlowfish,
mode_cls,
mode_const
)
for mode_cls, mode_const in [
(CBC, self._lib.kCCModeCBC),
(ECB, self._lib.kCCModeECB),
(CFB, self._lib.kCCModeCFB),
(OFB, self._lib.kCCModeOFB),
(CTR, self._lib.kCCModeCTR)
]:
self._register_cipher_adapter(
CAST5,
self._lib.kCCAlgorithmCAST,
mode_cls,
mode_const
)
self._register_cipher_adapter(
ARC4,
| self._lib.kCCAlgorithmR | C4,
type(None),
self._lib.kCCModeRC4
)
def _check_cipher_response(self, response):
if response == self._lib.kCCSuccess:
return
elif response == self._lib.kCCAlignmentError:
# This error is not currently triggered due to a bug filed as
# rdar://15589470
raise ValueError(
"The length of the provided data is not a multiple of "
"the block length."
)
else:
raise InternalError(
"The backend returned an unknown error, consider filing a bug."
" Code: {0}.".format(response),
|
DantestyleXD/MVM5B_BOT | plugins/log.py | Python | gpl-2.0 | 763 | 0.001311 | # -*- co | ding: utf-8 -*-
from config import *
print(Color(
'{autored}[{/red}{autoyellow}+{/yellow}{autored}]{/red} {autocyan} log.py importado.{/cyan}'))
@bot.message_handler(commands=['log'])
def command_log(m):
cid = m.chat.id
uid = m.from_user.id
try:
send_udp('log')
except Exception as e:
bot.send_message(52033876, send_exception(e), parse_mode="Markdown")
if not is_recent(m):
return None
if is_admin(uid):
if extra["log"]:
| extra["log"] = False
bot.send_message(cid, "Log desactivado")
else:
extra["log"] = True
bot.send_message(cid, "Log activado")
with open("extra_data/extra.json", "w") as f:
json.dump(extra, f)
|
juhgiyo/pyserver3 | pyserver/network/async_multicast.py | Python | mit | 8,329 | 0.001921 | #!/usr/bin/python
"""
@file async_multicast.py
@author Woong Gyu La a.k.a Chris. <juhgiyo@gmail.com>
<http://github.com/juhgiyo/pyserver>
@date March 10, 2016
@brief AsyncMulticast Interface
@version 0.1
@section LICENSE
The MIT License (MIT)
Copyright (c) 2016 Woong Gyu La <juhgiyo@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
@section DESCRIPTION
AsyncMulticast Class.
"""
import asyncio
import socket
import traceback
import threading
from .callback_interface import *
from .async_controller import AsyncController
# noinspection PyDeprecation
import copy
IP_MTU_DISCOVER = 10
IP_PMTUDISC_DONT = 0 # Never send DF frames.
IP_PMTUDISC_WANT = 1 # Use per route hints.
IP_PMTUDISC_DO = 2 # Always DF.
IP_PMTUDISC_PROBE = 3 # Ignore dst pmtu.
'''
Interfaces
variables
- callback_obj
functions
- def send(multicast_addr,port,data)
- def close() # close the socket
- def join(multicast_addr) # start receiving datagram from given multicast group
- def leave(multicast_addr) # stop receiving datagram from given multicast group
- def getgrouplist() # get group list
infos
- multicast address range: 224.0.0.0 - 239.255.255.255
- linux : route add -net 224.0.0.0 netmask 240.0.0.0 dev eth0
to enable multicast
'''
class AsyncM | ulticast(asyncio.Protocol):
# enable_loopback : 1 enable loopback / 0 disable loopback
# ttl: 0 - restricted to the same host
# 1 - restricted to the same subnet
# 32 - restricted to the same site
# 64 - restricted to the same region
# 128 - restricted to the same continent
# 255 - unrestricted in scope
def __init__(self, port, callback_obj, ttl=1, enable_loopback=False, bind_addr=''):
# self.lock = threading.R | Lock()
self.MAX_MTU = 1500
self.callback_obj = None
self.port = port
self.multicastSet = set([])
self.lock = threading.RLock()
self.ttl = ttl
self.enable_loopback = enable_loopback
if callback_obj is not None and isinstance(callback_obj, IUdpCallback):
self.callback_obj = callback_obj
else:
raise Exception('callback_obj is None or not an instance of IUdpCallback class')
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except AttributeError:
pass # Some systems don't support SO_REUSEPORT
# for both SENDER and RECEIVER to restrict the region
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, self.ttl)
# for SENDER to choose whether to use loop back
if self.enable_loopback:
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
else:
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 0)
self.bind_addr = bind_addr
if self.bind_addr is None or self.bind_addr == '':
self.bind_addr = socket.gethostbyname(socket.gethostname())
# for both SENDER and RECEIVER to bind to specific network adapter
self.sock.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_IF, socket.inet_aton(self.bind_addr))
# for RECEIVE to receive from multiple multicast groups
self.sock.bind(('', port))
except Exception as e:
print(e)
traceback.print_exc()
self.transport = None
AsyncController.instance().add(self)
if self.callback_obj is not None:
self.callback_obj.on_started(self)
self.loop = asyncio.get_event_loop()
coro = self.loop.create_datagram_endpoint(lambda: self, sock=self.sock)
AsyncController.instance().pause()
(self.transport, _) = self.loop.run_until_complete(coro)
AsyncController.instance().resume()
# Even though UDP is connectionless this is called when it binds to a port
def connection_made(self, transport):
self.transport = transport
# This is called everytime there is something to read
def data_received(self, data, addr):
try:
if data and self.callback_obj is not None:
self.callback_obj.on_received(self, addr, data)
except Exception as e:
print(e)
traceback.print_exc()
def connection_lost(self, exc):
self.close()
def close(self):
self.handle_close()
def error_received(self, exc):
self.handle_close()
def handle_close(self):
try:
delete_set = self.getgrouplist()
for multicast_addr in delete_set:
self.sock.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP,
socket.inet_aton(multicast_addr) + socket.inet_aton('0.0.0.0'))
if self.callback_obj is not None:
self.callback_obj.on_leave(self, multicast_addr)
with self.lock:
self.multicastSet = set([])
except Exception as e:
print(e)
print('asyncUdp close called')
self.transport.close()
AsyncController.instance().discard(self)
try:
if self.callback_obj is not None:
self.callback_obj.on_stopped(self)
except Exception as e:
print(e)
traceback.print_exc()
# noinspection PyMethodOverriding
def send(self, hostname, port, data):
if len(data) <= self.MAX_MTU:
self.transport.sendto(data, (hostname, port))
else:
raise ValueError("The data size is too large")
# for RECEIVER to receive datagram from the multicast group
def join(self, multicast_addr):
with self.lock:
if multicast_addr not in self.multicastSet:
self.sock.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP,
socket.inet_aton(multicast_addr) + socket.inet_aton(self.bind_addr))
self.multicastSet.add(multicast_addr)
if self.callback_obj is not None:
self.callback_obj.on_join(self, multicast_addr)
# for RECEIVER to stop receiving datagram from the multicast group
def leave(self, multicast_addr):
with self.lock:
try:
if multicast_addr in self.multicastSet:
self.sock.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP,
socket.inet_aton(multicast_addr) + socket.inet_aton('0.0.0.0'))
self.multicastSet.discard(multicast_addr)
if self.callback_obj is not None:
self.callback_obj.on_leave(self, multicast_addr)
except Exception as e:
print(e)
def getgrouplist(self):
with self.lock:
return copy.copy(self.multicastSet)
def gethostbyname(self, arg):
return self.sock.gethostbyname(arg)
def gethostname(self):
return self. |
BitcoinUnlimited/BitcoinUnlimited | qa/rpc-tests/electrum_subscriptions.py | Python | mit | 7,420 | 0.007951 | #!/usr/bin/env python3
# Copyright (c) 2019-2020 The Bitcoin Unlimited developers
import asyncio
import time
from test_framework.util import assert_equal, assert_raises
from test_framework.test_framework import BitcoinTestFramework
from test_framework.loginit import logging
from test_framework.electrumutil import (ElectrumConnection,
address_to_scripthash, bitcoind_electrum_args, sync_electrum_height,
wait_for_electrum_mempool)
ADDRESS_SUBSCRIBE = 'blockchain.address.subscribe'
ADDRESS_UNSUBSCRIBE = 'blockchain.address.unsubscribe'
SCRIPTHASH_SUBSCRIBE = 'blockchain.scripthash.subscribe'
SCRIPTHASH_UNSUBSCRIBE = 'blockchain.scripthash.unsubscribe'
def address_to_address(a):
return a
class ElectrumSubscriptionsTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [bitcoind_electrum_args()]
def run_test(self):
n = self.nodes[0]
n.generate(200)
sync_electrum_height(n)
async def async_tests():
await self.test_subscribe_address(n)
await self.test_subscribe_scripthash(n)
await self.test_unsubscribe_address(n)
await self.test_unsubscribe_scripthash(n)
await self.test_subscribe_headers(n)
await self.test_multiple_client_subs(n)
loop = asyncio.get_event_loop()
loop.run_until_complete(async_tests())
async def test_unsubscribe_scripthash(self, n):
return await self.test_unsubscribe(n,
SCRIPTHASH_SUBSCRIBE, SCRIPTHASH_UNSUBSCRIBE,
address_to_scripthash)
async def test_unsubscribe_address(self, n):
return await self.test_unsubscribe(n,
ADDRESS_SUBSCRIBE, ADDRESS_UNSUBSCRIBE,
address_to_address)
async def test_unsubscribe(self, n, subscribe, unsubscribe, addr_converter):
cli = ElectrumConnection()
await cli.connect()
addr = n.getnewaddress()
_, queue = await cli.subscribe(subscribe, addr_converter(addr))
# Verify that we're receiving notifications
n.sendtoaddress(addr, 10)
subscription_name, _ = await asyncio.wait_for(queue.get(), timeout = 10)
assert_equal(addr_converter(addr), subscription_name)
ok = await cli.call(unsubscribe, addr_converter(addr))
assert(ok)
# Verify that we're no longer receiving notifications
n.sendtoaddress(addr, 10)
try:
await asyncio.wait_for(queue.get(), timeout = 10)
assert(False) # Should have timed out.
except asyncio.TimeoutError:
pass
# Unsubscribing from a hash we're not subscribed to should return false
ok = await cli.call(unsubscribe, addr_converter(n.getnewaddress()))
assert(not ok)
async def test_subscribe_scripthash(self, n):
return await self.test_subscribe(n,
SCRIPTHASH_SUBSCRIBE, SCRIPTHASH_UNSUBSCRIBE,
address_to_scripthash)
async def test_subscribe_address(self, n):
return await self.test_subscribe(n,
ADDRESS_SUBSCRIBE, ADDRESS_UNSUBSCRIBE,
address_to_address)
async def test_subscribe(self, n, subscribe, unsubscribe, addr_converter):
cli = ElectrumConnection()
await cli.connect()
logging.info("Testing scripthash subscription")
addr = n.getnewaddress()
statushash, queue = await cli.subscribe(subscribe, addr_converter(addr))
logging.info("Unused address should not have a statushash")
assert_equal(None, statushash)
logging.info("Check notification on receiving coins")
n.sendtoaddress(addr, 10)
subscription_name, new_statushash1 = await asyncio.wait_for(queue.get(), timeout = 10)
assert_equal(addr_converter(addr), subscription_name)
assert(new_statushash1 != None and len(new_statushash1) == 64)
logging.info("Check notification on block confirmation")
assert(len(n.getrawmempool()) == 1)
n.generate(1)
assert(len(n.getrawmempool()) == 0)
subscription_name, new_statushash2 = await asyncio.wait_for(queue.get(), timeout = 10)
assert_equal(addr_converter(addr), subscription_name)
assert(new_statushash2 != new_statushash1)
assert(new_statushash2 != None)
logging.info("Check that we get notification when spending funds from address")
n.sendtoaddress(n.getnewaddress(), n.getbalance(), "", "", True)
subscription_name, new_statushash3 = await asyncio.wait_for(queue.get(), timeout = 10)
assert_equal(addr_converter(addr), subscription_name)
assert(new_statushash3 != new_statushash2)
assert(new_statushash3 != None)
# Clear mempool
n.generate(1)
async def test_subscribe_headers(self, n):
cli = ElectrumConnection()
await cli.connect()
headers = []
logging.info("Calling subscribe should return the current best block header")
result, queue = await cli.subscribe('blockchain.headers.subscribe')
assert_equal(
n.getblockheader(n.getbestblockhash(), False),
result['hex'])
logging.info("Now generate 10 blocks, check that these are pushed to us.")
async def test():
for _ in range(10):
blockhashes = n.generate(1)
header_hex = n.getblockheader(blockhashes.pop(), False)
notified = await asyncio.wait_for(queue.get(), timeout = 10)
assert_equal(header_hex, notified.pop()['hex'])
start = time.time()
await test()
logging.info("Getting 10 block notifications took {} seconds".format(time.time() - start))
async def test_multiple_client_subs(self, n):
num_clients = 50
clients = [ ElectrumConnection() for _ in range(0, num_clients) ]
[ await c.connect() for c in clients ]
queues = []
addresses = [ n.getnewaddress() for _ in range(0, num_clients) ]
# Send coins so the addresses, so they get a statushash
[ n.sendtoaddress(addresses[i], 1) for i in range(0, num_clients) ]
wait_for_electrum_mempool(n, count = num_clients)
statushashes = []
queues = []
for i in range(0, num_clients):
cli = clients[i]
addr = addresses[i]
scripthash = address_to_scripthash(addr)
statushash, queue = await cli.subscribe(SCRIPTHASH_SUBSCRIBE, scripthash)
# should be unique
assert(statushash is not None)
assert(statushash not in statushashes)
statushashes.append(statushash)
queues.append(queue)
# Send new coin to all, observe that all clients ge | t a notification
[ n.se | ndtoaddress(addresses[i], 1) for i in range(0, num_clients) ]
for i in range(0, num_clients):
q = queues[i]
old_statushash = statushashes[i]
scripthash, new_statushash = await asyncio.wait_for(q.get(), timeout = 10)
assert_equal(scripthash, address_to_scripthash(addresses[i]))
assert(new_statushash != None)
assert(new_statushash != old_statushash)
if __name__ == '__main__':
ElectrumSubscriptionsTest().main()
|
wolverineav/neutron | neutron/db/migration/alembic_migrations/dvr_init_opts.py | Python | apache-2.0 | 2,619 | 0 | # Copyright 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Initial operations for dvr
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'dvr_host_macs',
sa.Column('host', sa.String(length=255), nullable | =False),
sa.Column('mac_address', sa.String(length=32),
nullable=False, unique=True),
sa.PrimaryKeyConstraint('host')
)
op.create_table(
'ml2_dvr_port_bindings',
sa.Column('port_id', sa.String(length=36), nullable=False),
sa.Column('host', | sa.String(length=255), nullable=False),
sa.Column('router_id', sa.String(length=36), nullable=True),
sa.Column('vif_type', sa.String(length=64), nullable=False),
sa.Column('vif_details', sa.String(length=4095),
nullable=False, server_default=''),
sa.Column('vnic_type', sa.String(length=64),
nullable=False, server_default='normal'),
sa.Column('profile', sa.String(length=4095),
nullable=False, server_default=''),
sa.Column(u'status', sa.String(16), nullable=False),
sa.ForeignKeyConstraint(['port_id'], ['ports.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('port_id', 'host')
)
op.create_table(
'csnat_l3_agent_bindings',
sa.Column('router_id', sa.String(length=36), nullable=False),
sa.Column('l3_agent_id', sa.String(length=36), nullable=False),
sa.Column('host_id', sa.String(length=255), nullable=True),
sa.Column('csnat_gw_port_id', sa.String(length=36), nullable=True),
sa.ForeignKeyConstraint(['l3_agent_id'], ['agents.id'],
ondelete='CASCADE'),
sa.ForeignKeyConstraint(['router_id'], ['routers.id'],
ondelete='CASCADE'),
sa.ForeignKeyConstraint(['csnat_gw_port_id'], ['ports.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('router_id', 'l3_agent_id')
)
|
rodrigolucianocosta/ControleEstoque | rOne/Storage101/django-localflavor/django-localflavor-1.3/docs/conf.py | Python | gpl-3.0 | 10,708 | 0.007097 | # -*- coding: utf-8 -*-
#
# django-localflavor documentation build configuration file, created by
# sphinx-quickstart on Sun Jun 2 17:56:28 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tests.settings')
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('extensions'))
sys.path.insert(0, os.path.abspath('..'))
import django
django.setup()
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig',
'sphinx.ext.viewcode', 'promises', 'settings']
# Add any paths that contain templates here, relative to this directory.
# templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-localflavor'
copyright = u'Django Software Foundation and individual contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
try:
from localflavor import __version__
# The short X.Y version.
version = '.'.join(__version__.split('.')[:2])
# The full version, including alpha/beta/rc tags.
release = __version__
except ImportError:
version = release = 'dev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'classic'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = ['_theme']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template n | ames.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# | If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-localflavordoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'django-localflavor.tex', u'django-localflavor Documentation',
u'Django Software Foundation and individual contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'django-localflavor', u'django-localflavor Documentation',
[u'Django Software Foundation and individual contributors'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'django-l |
lmazuel/azure-sdk-for-python | azure-mgmt-compute/azure/mgmt/compute/v2016_04_30_preview/models/upgrade_policy_py3.py | Python | mit | 1,372 | 0 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microso | ft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class UpgradePolicy(Model):
"""Describes an upgrade policy - | automatic or manual.
:param mode: Specifies the mode of an upgrade to virtual machines in the
scale set.<br /><br /> Possible values are:<br /><br /> **Manual** - You
control the application of updates to virtual machines in the scale set.
You do this by using the manualUpgrade action.<br /><br /> **Automatic** -
All virtual machines in the scale set are automatically updated at the
same time. Possible values include: 'Automatic', 'Manual'
:type mode: str or
~azure.mgmt.compute.v2016_04_30_preview.models.UpgradeMode
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'UpgradeMode'},
}
def __init__(self, *, mode=None, **kwargs) -> None:
super(UpgradePolicy, self).__init__(**kwargs)
self.mode = mode
|
zentralopensource/zentral | zentral/contrib/simplemdm/views.py | Python | apache-2.0 | 8,061 | 0.002233 | import logging
from django.contrib import messages
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.urls import reverse, reverse_lazy
from django.views.generic import CreateView, DeleteView, DetailView, ListView, UpdateView, TemplateView
from zentral.contrib.inventory.forms import EnrollmentSecretForm
from zentral.utils.osx_package import get_standalone_package_builders
from .api_client import APIClient, APIClientError
from .forms import SimpleMDMInstanceForm
from .models import SimpleMDMApp, SimpleMDMInstance
from .utils import delete_app
logger = logging.getLogger('zentral.contrib.simplemdm.views')
# setup > simplemdm instances
class SimpleMDMInstancesView(LoginRequiredMixin, ListView):
model = SimpleMDMInstance
def get_context_data(self, **kwargs):
| ctx = super().get_context_data(**kwargs)
ctx["setup"] = True
simplemdm_instances_count = len(ctx["object_list"])
if simplemdm_instances_count == 0 or simplemdm_instances_count > 1:
suffix = "s"
else:
suffix = ""
ctx["title"] = "{} SimpleMDM instance{}".format(simplemdm_instances_count, suffix)
return ctx
class CreateSimpleMD | MInstanceView(LoginRequiredMixin, CreateView):
model = SimpleMDMInstance
form_class = SimpleMDMInstanceForm
success_url = reverse_lazy("simplemdm:simplemdm_instances")
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx["setup"] = True
ctx["title"] = "Create SimpleMDM instance"
return ctx
class SimpleMDMInstanceView(LoginRequiredMixin, DetailView):
model = SimpleMDMInstance
form_class = SimpleMDMInstanceForm
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx["setup"] = True
ctx["title"] = "{} SimpleMDM instance".format(self.object.account_name)
ctx["apps"] = list(self.object.simplemdmapp_set.all())
ctx["app_number"] = len(ctx["apps"])
create_simplemdm_app_path = reverse("simplemdm:create_simplemdm_app", args=(self.object.id,))
ctx["create_app_links"] = [("{}?builder={}".format(create_simplemdm_app_path, k),
v.name) for k, v in get_standalone_package_builders().items()]
return ctx
class UpdateSimpleMDMInstanceView(LoginRequiredMixin, UpdateView):
model = SimpleMDMInstance
form_class = SimpleMDMInstanceForm
success_url = reverse_lazy("simplemdm:simplemdm_instances")
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx["setup"] = True
ctx["title"] = "Update SimpleMDM instance"
return ctx
class DeleteSimpleMDMInstanceView(LoginRequiredMixin, DeleteView):
model = SimpleMDMInstance
success_url = reverse_lazy("simplemdm:simplemdm_instances")
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx["setup"] = True
ctx["title"] = "Delete {}".format(self.object)
return ctx
def post(self, request, *args, **kwargs):
simplemdm_instance = get_object_or_404(SimpleMDMInstance, pk=kwargs["pk"])
api_client = APIClient(simplemdm_instance.api_key)
for app in simplemdm_instance.simplemdmapp_set.all():
try:
if api_client.delete_app(app.simplemdm_id):
messages.info(request, "{} removed from SimpleMDM".format(app.name))
except APIClientError:
messages.warning(request, "SimpleMDM API Error. Could not cleanup apps.")
return super().post(request, *args, **kwargs)
class CreateSimpleMDMAppView(LoginRequiredMixin, TemplateView):
template_name = "simplemdm/simplemdmapp_form.html"
def dispatch(self, request, *args, **kwargs):
self.simplemdm_instance = get_object_or_404(SimpleMDMInstance, pk=kwargs["pk"])
self.meta_business_unit = self.simplemdm_instance.business_unit.meta_business_unit
standalone_builders = get_standalone_package_builders()
try:
self.builder_key = request.GET["builder"]
self.builder = standalone_builders[self.builder_key]
except KeyError:
raise Http404
return super().dispatch(request, *args, **kwargs)
def get_forms(self):
secret_form_kwargs = {"prefix": "secret",
"no_restrictions": True,
"meta_business_unit": self.meta_business_unit}
enrollment_form_kwargs = {"meta_business_unit": self.meta_business_unit,
"standalone": True} # w/o dependencies. all in the package.
if self.request.method == "POST":
secret_form_kwargs["data"] = self.request.POST
enrollment_form_kwargs["data"] = self.request.POST
return (EnrollmentSecretForm(**secret_form_kwargs),
self.builder.form(**enrollment_form_kwargs))
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx["setup"] = True
ctx["simplemdm_instance"] = self.simplemdm_instance
ctx["title"] = "Create SimpleMDM app"
ctx["builder_name"] = self.builder.name
if "secret_form" not in kwargs or "enrollment_form" not in kwargs:
ctx["secret_form"], ctx["enrollment_form"] = self.get_forms()
return ctx
def forms_invalid(self, secret_form, enrollment_form):
return self.render_to_response(self.get_context_data(secret_form=secret_form,
enrollment_form=enrollment_form))
def forms_valid(self, secret_form, enrollment_form):
# make secret
secret = secret_form.save()
secret_form.save_m2m()
# make enrollment
enrollment = enrollment_form.save(commit=False)
enrollment.version = 0
enrollment.secret = secret
enrollment.save()
# SimpleMDM app
app = SimpleMDMApp.objects.create(
simplemdm_instance=self.simplemdm_instance,
name="PENDING", # temporary, no app uploaded yet
simplemdm_id=0, # temporary 0, no app uploaded yet
builder=self.builder_key,
enrollment_pk=enrollment.pk
)
# link from enrollment to app, for config update propagation
enrollment.distributor = app
enrollment.save() # build package via callback call and set the simplemdm_id on the app after upload
# info and return to SimpleMDM instance
messages.info(self.request, "{} uploaded to SimpleMDM".format(app.name))
return HttpResponseRedirect(app.get_absolute_url())
def post(self, request, *args, **kwargs):
secret_form, enrollment_form = self.get_forms()
if secret_form.is_valid() and enrollment_form.is_valid():
return self.forms_valid(secret_form, enrollment_form)
else:
return self.forms_invalid(secret_form, enrollment_form)
class DeleteSimpleMDMAppView(LoginRequiredMixin, DeleteView):
model = SimpleMDMApp
success_url = "/simplemdm/instances/{simplemdm_instance_id}/"
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx["setup"] = True
ctx["title"] = "Delete {}".format(self.object)
return ctx
def post(self, request, *args, **kwargs):
simplemdm_app = get_object_or_404(SimpleMDMApp, pk=kwargs["pk"], simplemdm_instance__pk=kwargs["instance_pk"])
success_message, error_message = delete_app(simplemdm_app.simplemdm_instance.api_key,
simplemdm_app.simplemdm_id)
if success_message:
messages.info(request, success_message)
if error_message:
messages.warning(request, error_message)
return super().post(request, *args, **kwargs)
|
hsarmiento/people_finder_chile | tools/validate_merge.py | Python | apache-2.0 | 2,793 | 0.006803 | #!/usr/bin/python2.7
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Verify the messages in the en .po files after merging.
Usage:
From the root directory: tools/verify_translation.py
Verify_translations takes no arguments. To use:
1. run find_missing_translations to generate a templat | e .po file:
find_missing_translations --format=po
This will generate a .po | file with just the translated messages in order.
2. Use the english person_finder.xml file and the template from step 1 to
'merge' the english translations into the english .po file. This should
generate a .po file with the msg string set to the msg id value for each
newly translated string. Example command:
'merge_translations translations/en/person_finder.xml app/locale/en/LC_MESSAGES/django'
3. run verify_translations to verify that the strings are actually the same.
command: 'verify_translation'
4. revert the app/locale/en changes (eg, don't check in the msgstrs
in the englis files).
PO file format:
http://www.gnu.org/software/hello/manual/gettext/PO-Files.html
"""
from babel.messages import pofile
from find_missing_translations import get_po_filename
from test_pfif import text_diff
if __name__ == '__main__':
filename = get_po_filename('en')
english = pofile.read_po(open(filename))
count = 0
def printsep():
if count > 0:
print '-------------------------------------'
for msg in english:
# Each newly translated string will have msg.string set
# to the 'translated' english value.
if msg.id and msg.string and msg.string != msg.id:
if isinstance(msg.id, tuple):
# TODO(lschumacher): deal with plurals properly,
if msg.string[0] or msg.string[1]:
printsep()
print 'msg id: %s\nmsgstr: %s' % (msg.id, msg.string)
count += 1
else:
printsep()
print text_diff(msg.id, msg.string)
count += 1
if count:
printsep()
print 'Found %s bad translations' % count
else:
print 'Translation OK'
|
CodeMill/cmsplugin-hoverimage | cmsplugin_hoverimage/migrations/0001_initial.py | Python | mit | 7,784 | 0.008222 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'HoverImagePlugin'
db.create_table('cmsplugin_hoverimageplugin', (
('cmsplugin_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['cms.CMSPlugin'], unique=True, primary_key=True)),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=100)),
('hover', self.gf('django.db.models.fields.files.ImageField')(max_length=100)),
('url', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('page_link', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['cms.Page'], null=True, blank=True)),
('alt', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('longdesc', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
))
db.send_create_signal('cmsplugin_hoverimage', ['HoverImagePlugin'])
def backwards(self, orm):
# Deleting model 'HoverImagePlugin'
db.delete_table('cmsplugin_hoverimageplugin')
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 3, 8, 0, 0)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('site', 'tree_id', 'lft')", 'object_name': 'Page'},
'changed_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'limit_visibility_in_menu': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderator_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1', 'blank': 'True'}),
'navigation_extenders': ('django.db.models.fields.Ch | arField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [ | ], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'cmsplugin_hoverimage.hoverimageplugin': {
'Meta': {'object_name': 'HoverImagePlugin', 'db_table': "'cmsplugin_hoverimageplugin'", '_ormbases': ['cms.CMSPlugin']},
'alt': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'hover': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'longdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'page_link': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cmsplugin_hoverimage'] |
kdudkov/tiny-home-automation | core/context.py | Python | gpl-3.0 | 2,873 | 0.000348 | import asyncio
import collections
import functools
import logging
from .items import Items
from .rules import AbstractRule
CB_ONCHANGE = 'onchange'
CB_ONCHECK = 'oncheck'
LOG = logging.getLogger('mahno.' + __name__)
class Context(object):
def __init__(self):
self.config = {}
self.items = Items()
self.actors = {}
self.rules = []
self.commands = collections.deque()
self.loop = None
self.callbacks = {}
def do_async(self, fn, *args):
if asyncio.iscoroutinefunction(fn):
asyncio.ensure_future(fn(*args), loop=self.loop)
else:
self.loop.call_soon(functools.partial(fn, *args))
def add_cb(self, name, | cb):
self.callbacks.setdefault(name, []).append(cb)
def command(self, name, cmd):
LOG.info('external command %s', name)
self.commands.append((name, cmd))
def item_comma | nd(self, name, cmd):
item = self.items.get_item(name)
if not item:
LOG.error('no item %s', name)
return
if item.config.get('output'):
for actor in self.actors.values():
if actor.name == item.config['output'].get('channel'):
msg = actor.format_simple_cmd(item.config['output'], cmd)
LOG.info('sending msg %s to %s', msg, actor.name)
self.commands.append((item.config['output']['channel'], msg))
if item.config.get('fast_change'):
LOG.debug('fast change set %s to %s', name, cmd)
self.set_item_value(name, cmd)
else:
LOG.info('directly set %s to %s', name, cmd)
self.set_item_value(name, cmd, True)
def add_rule(self, rule):
assert isinstance(rule, AbstractRule)
rule.context = self
self.rules.append(rule)
def get_item_value(self, name):
item = self.items.get_item(name)
return item.value if item is not None else None
def set_item_value(self, name, value, force=False):
item = self.items.get_item(name)
if not item:
LOG.error('not found item %s' % name)
return False
old_value = item.value
age = item.age
changed = self.items.set_item_value(name, value)
self.run_cb(CB_ONCHECK, item, changed)
if changed or force:
self.run_cb(CB_ONCHANGE, name, item.value, old_value, age)
def add_delayed(self, seconds, fn):
if self.loop:
t = self.loop.time() + seconds
return self.loop.call_at(t, fn)
@staticmethod
def remove_delayed(d):
LOG.info('remove delayed %s', d)
if d:
d.cancel()
def run_cb(self, name, *args):
for cb in self.callbacks.get(name, []):
if cb:
self.do_async(cb, *args)
|
sumihai-tekindo/account_sicepat | asset_tetap/models/asset_tetap.py | Python | gpl-3.0 | 1,140 | 0.023684 | from openerp import models, fields, api, _
class asset_tetap(models.TransientModel):
_name="asset.tetap"
start_date = fields.Date(string="Start Date", required=True)
end_date = fields.Date(string="End Date", required=True)
state = fields.Selection(string="State", selection=[("draft","Draft"),("open","Running"),("all","All")], required=True)
@api.multi
def print_report(self,):
self.ensure_one()
state= self.state
| if state=="all":
state=["draft","open"]
else:
state=[self.state]
ass_ids = self.env['account.asset.asset'].search([("state","in",state)])
# ass_ids = self.env['account.asset.asset'].search([])
list_ass = [ass.id for ass in ass_ids]
| datas={
'model' : 'account.asset.asset',
"ids" : list_ass,
'start_date':self.start_date,
'end_date':self.end_date,
}
return{
'type': 'ir.actions.report.xml',
'report_name': 'report.asset.tetap.xls',
'datas': datas
} |
j0lly/Odin | odin/utils.py | Python | mit | 3,748 | 0 | # -*- coding: utf-8 -*-
"""collection of helpers for Miner module."""
import ipaddress
import json
import odin
from odin.store import ThreadedModel
# Default logging capabilities (logging nowhere)
log = odin.get_logger()
def findip(string):
"""calculate hosts to be scanned; it's just an helper.
:param string: ip range in cidr notation
:type string: str
:returns: a list of IPs to be scanned
:rtype: list
"""
try:
ip_range = ipaddress.IPv4Network(string, strict=False)
except ipaddress.AddressValueError as err:
log.error('%s', err, exc_info=True)
raise
except ipaddress.NetmaskValueError as err:
log.error('%s', err, exc_info=True)
raise
if ip_range.num_addresses == 1:
log.debug('value resulted in a single host ip: %s',
ip_range.network_address.compressed)
return [ip_range.network_address.compressed]
else:
log.debug('value resulted in a muliple host list for %s',
string)
return [k.compressed for k in ip_range.hosts()]
def chunker(iterable, chunk_size=16):
"""return a list of iterables chunking the initial iterable.
:param iterable: an iterable to cut in chunks
:type iterable: iter
:param chunk_size: chunk lenght to use
:type chunk_size: int
:returns: a generator of lists of chunks of provided iterable
:rtype: generator
"""
for x in range(0, len(iterable), chunk_size):
log.debug('yielding %s', iterable[x:x+chunk_size])
yield iterable[x:x+chunk_size]
def run_scan(queue, targets, cls=ThreadedModel):
""" Run a scan against targets and return a Pynamo modeled list of objects.
:queue: a queue
:type queue: queue.Queue
:param targets: list of ips, divided in chunks if necessary
:type targets: list
:param cls: class to be used for resolution and threading
:type cls: class object
:returns: yield a list of pynamo objects
:rtype: generator
"""
for chunk in targets:
threads = []
for ip in chunk:
obj = cls(ip, queue=queue)
obj.daemon = True
threads.append(obj)
for thread in threads:
thread.start()
for thread in threads:
thread.join(timeout=2)
while not queue.empty():
ip_info = queue.get()
yield ip_info
def generate_serialized_results(query, output='json'):
"""Simple helper to generate usable output from a pynamo query
:param que | ry: a pynamo query that returned a generator
:type query: generator
:param output: format of the utput, for now just json or byte format
:type output: str
:returns: a dictionary generator of serialized pynamodb objects
:rtype: generator
"""
for result in query:
obj = result.serialize
if out | put == 'json':
yield '{}\n'.format(json.dumps(obj, indent=4))
elif output == 'bytes':
yield '{}\n'.format(json.dumps(obj, indent=4)).encode('utf-8')
elif output is None:
yield obj
def assembler(string):
"""get a str with class a, b or c range in the form:
192, 192.168, 192.168.0 and return proper CIDR address like 192.0.0.0/8
"""
class_range = ['class_c', 'class_b', 'class_a']
missing = 4 - len(string.split('.'))
log.debug('building cidr with %s missing dots', missing)
for dots in range(0, missing):
string += '.0'
return (string+'/'+str(int(24/missing)), class_range[missing-1])
def get_filter(string):
"""build filter string and assert if negation is in place"""
if string[0] is '!':
return(string[1:], True)
else:
return(string, False)
|
ebu/PlugIt | examples/standalone_proxy/plugIt/management/commands/check_mail.py | Python | bsd-3-clause | 58 | 0 | from plugit_proxy.management.comm | ands.check_mail impor | t *
|
nimadini/Teammate | handlers/stat.py | Python | apache-2.0 | 1,093 | 0.002745 | __author__ = 'stanley'
import webapp2
from domain.statistics.statistics import *
from domain.statistics.entity import Entity
import json
from domain.doc_ | index import *
class StatHandler(webapp2.RequestHandler):
def get(self):
qry = Statistics.query(Statistics.id == 'Teammate_Statistics')
#delete()
if qry.get() is not None:
self.response.headers['Content-Type'] = 'application/json'
result = json.dumps({'First Access': False})
| self.response.write(result)
return
statistics = Statistics(key=statistics_key('my_stat'))
statistics.id = 'Teammate_Statistics'
statistics.BS = Entity(price=0.0, number=0)
statistics.BA = Entity(price=0.0, number=0)
statistics.MS = Entity(price=0.0, number=0)
statistics.MA = Entity(price=0.0, number=0)
statistics.PhD = Entity(price=0.0, number=0)
statistics.put()
self.response.headers['Content-Type'] = 'application/json'
result = json.dumps({'First Access': True})
self.response.write(result) |
clever-crow-consulting/otm-core | opentreemap/treemap/search.py | Python | agpl-3.0 | 15,113 | 0.000066 | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from json import loads
from datetime import datetime
from functools import partial
from django.db.models import Q
from django.contrib.gis.measure import Distance
from django.contrib.gis.geos import Point
from opentreemap.util import dotted_split
from treemap.lib.dates import DATETIME_FORMAT
from treemap.models import Boundary, Tree, Plot, Species, TreePhoto
from treemap.udf import UDFModel, UserDefinedCollectionValue
from treemap.util import to_object_name
class ParseException (Exception):
def __ | init__(self, message):
super(Exception, self).__init__(message)
self.message = message
class ModelParseException(ParseException):
pass
DEFAULT_MAPPING = {'plot': '',
'bioswale': '',
'rainGarden': '',
'rainBarrel': '',
'tree': 'tree__',
'species': 'tree__species__',
'treePhoto': ' | tree__treephoto__',
'mapFeaturePhoto': 'mapfeaturephoto__',
'mapFeature': ''}
TREE_MAPPING = {'plot': 'plot__',
'tree': '',
'species': 'species__',
'treePhoto': 'treephoto__',
'mapFeaturePhoto': 'treephoto__',
'mapFeature': 'plot__'}
PLOT_RELATED_MODELS = {Plot, Tree, Species, TreePhoto}
MAP_FEATURE_RELATED_NAMES = {'mapFeature', 'mapFeaturePhoto'}
class Filter(object):
def __init__(self, filterstr, displaystr, instance):
self.filterstr = filterstr
self.display_filter = loads(displaystr) if displaystr else None
self.instance = instance
def get_objects(self, ModelClass):
# Filter out invalid models
model_name = ModelClass.__name__
# This is a special case when we're doing 'tree-centric'
# searches for eco benefits. Trees essentially count
# as plots for the purposes of pruning
if model_name == 'Tree':
model_name = 'Plot'
if not _model_in_display_filters(model_name, self.display_filter):
return ModelClass.objects.none()
if ModelClass == Tree:
mapping = TREE_MAPPING
else:
mapping = DEFAULT_MAPPING
q = create_filter(self.instance, self.filterstr, mapping)
if model_name == 'Plot':
q = _apply_tree_display_filter(q, self.display_filter, mapping)
models = q.basekeys
if _is_valid_models_list_for_model(models, model_name, ModelClass,
self.instance):
queryset = ModelClass.objects.filter(q)
else:
queryset = ModelClass.objects.none()
return queryset
def get_object_count(self, ModelClass):
return self.get_objects(ModelClass).count()
def _is_valid_models_list_for_model(models, model_name, ModelClass, instance):
"""Validates everything in models are valid filters for model_name"""
def collection_udf_set_for_model(Model):
if not issubclass(ModelClass, UDFModel):
return {}
if hasattr(Model, 'instance'):
fake_model = Model(instance=instance)
else:
fake_model = Model()
return set(fake_model.collection_udfs_search_names())
# MapFeature is valid for all models
models = models - MAP_FEATURE_RELATED_NAMES
object_name = to_object_name(model_name)
models = models - {object_name}
if model_name == 'Plot':
related_models = PLOT_RELATED_MODELS
else:
related_models = {ModelClass}
for Model in related_models:
models = models - {to_object_name(Model.__name__)}
if issubclass(Model, UDFModel):
models = models - collection_udf_set_for_model(Model)
return len(models) == 0
class FilterContext(Q):
def __init__(self, *args, **kwargs):
if 'basekey' in kwargs:
self.basekeys = {kwargs['basekey']}
del kwargs['basekey']
else:
self.basekeys = set()
super(FilterContext, self).__init__(*args, **kwargs)
# TODO: Nothing uses add, is it necessary?
def add(self, thing, conn):
if thing.basekeys:
self.basekeys = self.basekeys | thing.basekeys
return super(FilterContext, self).add(thing, conn)
def create_filter(instance, filterstr, mapping):
"""
A filter is a string that must be valid json and conform to
the following grammar:
literal = json literal | GMT date string in 'YYYY-MM-DD HH:MM:SS'
model = 'plot' | 'tree' | 'species'
value-property = 'MIN'
| 'MAX'
| 'EXCLUSIVE'
| 'IN'
| 'IS'
| 'WITHIN_RADIUS'
| 'IN_BOUNDARY'
| 'LIKE'
| 'ISNULL'
combinator = 'AND' | 'OR'
predicate = { model.field: literal }
| { model.field: { (value-property: literal)* }}
filter = predicate
| [combinator, filter*, literal?]
mapping allows for the developer to search focussed on a
particular object group
Returns a Q object that can be applied to a model of your choice
"""
if filterstr is not None and filterstr != '':
query = loads(filterstr)
q = _parse_filter(query, mapping)
else:
q = FilterContext()
if instance:
q = q & FilterContext(instance=instance)
return q
def _parse_filter(query, mapping):
if type(query) is dict:
return _parse_predicate(query, mapping)
elif type(query) is list:
predicates = [_parse_filter(p, mapping) for p in query[1:]]
return _apply_combinator(query[0], predicates)
def _parse_predicate(query, mapping):
qs = [_parse_predicate_pair(*kv, mapping=mapping)
for kv in query.iteritems()]
return _apply_combinator('AND', qs)
def _parse_predicate_key(key, mapping):
format_string = 'Keys must be in the form of "model.field", not "%s"'
model, field = dotted_split(key, 2,
failure_format_string=format_string,
cls=ParseException)
if _is_udf(model):
__, mapping_model, __ = model.split(':')
field = 'id'
else:
mapping_model = model
if mapping_model not in mapping:
raise ModelParseException(
'Valid models are: %s or a collection UDF, not "%s"' %
(mapping.keys(), model))
return model, mapping[mapping_model] + field
def _parse_value(value):
"""
A value can be either:
* A date
* A literal
* A list of other values
"""
if type(value) is list:
return [_parse_value(v) for v in value]
try:
return datetime.strptime(value, DATETIME_FORMAT)
except (ValueError, TypeError):
return value
def _parse_min_max_value_fn(operator):
"""
returns a function that produces singleton
dictionary of django operands for the given
query operator.
"""
def fn(predicate_value, field=None):
# a min/max predicate can either take
# a value or a dictionary that provides
# a VALUE and EXCLUSIVE flag.
if type(predicate_value) == dict:
raw_value = predicate_value.get('VALUE')
exclusive = predicate_value.get('EXCLUSIVE')
else:
raw_value = predicate_value
exclusive = False
if exclusive:
key = operator
else:
# django use lt/lte and gt/gte
# to handle inclusive/exclusive
key = operator + 'e'
value = _parse_value(raw_value)
if field: # implies hstore
if isinstance(value, datetime):
date_value = value.date().isoformat()
inner_value = {field: date_value}
else:
raise ParseException("Cannot perform min/max comparisons on "
"non-date hstore fiel |
tethysplatform/TethysCluster | tethyscluster/azureutils.py | Python | lgpl-3.0 | 56,078 | 0.002782 | # Copyright 2009-2014 Justin Riley
#
# This file is part of TethysCluster.
#
# TethysCluster is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# TethysCluster is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with TethysCluster. If not, see <http://www.gnu.org/licenses/>.
"""
Azure Utility Classes
"""
import os
import re
import time
import base64
import string
import tempfile
import fnmatch
from azure import *
from azure.servicemanagement import *
from pprint import pprint
from tethyscluster import image
from tethyscluster import utils
from tethyscluster import static
from tethyscluster import spinner
from tethyscluster import sshutils
from tethyscluster import webtools
from tethyscluster import exception
from tethyscluster import progressbar
from tethyscluster.utils import print_timing
from tethyscluster.logger import log
class EasyAzure(object):
def __init__(self, subscription_id, certificate_path,
connection_authenticator, **kwargs):
"""
Create an EasyAzure object.
Requires aws_access_key_id/aws_secret_access_key from an Amazon Web
Services (AWS) account and a connection_authenticator function that
returns an authenticated AWS connection object
Providing only the keys will default to using Amazon EC2
kwargs are passed to the connection_authenticator's constructor
"""
self.subscription_id = subscription_id
self.certificate_path = certificate_path
self.connection_authenticator = connection_authenticator
self._conn = None
self._subscription_name = None
self._kwargs = kwargs
def reload(self):
self._conn = None
return self.conn
@property
def conn(self):
if self._conn is None:
log.debug('creating self._conn w/ connection_authenticator ' +
'kwargs = %s' % self._kwargs)
# validate_certs = self._kwargs.get('validate_certs', True)
# if validate_certs:
# # if not HAVE_HTTPS_CONNECTION:
# raise exception.AWSError(
# "Failed to validate AWS SSL certificates. "
# "SSL certificate validation is only supported "
# "on Python>=2.6.\n\nSet AWS_VALIDATE_CERTS=False in "
# "the [aws info] section of your config to skip SSL "
# "certificate verification and suppress this error AT "
# "YOUR OWN RISK.")
# if not boto_config.has_section('Boto'):
# boto_config.add_section('Boto')
# # Hack to get around the fact that boto ignores validate_certs
# # if https_validate_certificates is declared in the boto config
# boto_config.setbool('Boto', 'https_validate_certificates',
# validate_certs)
self._conn = self.connection_authenticator(
self.subscription_id, self.certificate_path,
**self._kwargs)
# self._conn.https_validate_certificates = validate_certs
return self._conn
@property
def subscription_name(self):
if not self._subscription_name:
subscription_name = self.conn.get_subscription().subscription_name.replace(' ', '-')
self._subscription_name = subscription_name
return base64.b64encode(self.subscription_id)
class EasySMS(EasyAzure):
def __init__(self, subscription_id, certificate_path,
host=None, request_session=Non | e, location='West US', **kwargs):
kwds = dict(request_session=request_sessi | on)
super(EasySMS, self).__init__(subscription_id, certificate_path,
azure.servicemanagement.ServiceManagementService, **kwds)
self._conn = kwargs.get('connection')
# kwds = dict(aws_s3_host=aws_s3_host, aws_s3_path=aws_s3_path,
# aws_port=aws_port, aws_is_secure=aws_is_secure,
# aws_proxy=aws_proxy, aws_proxy_port=aws_proxy_port,
# aws_proxy_user=aws_proxy_user,
# aws_proxy_pass=aws_proxy_pass,
# aws_validate_certs=aws_validate_certs)
# self.s3 = EasyS3(aws_access_key_id, aws_secret_access_key, **kwds)
self._regions = None
self._region = self.get_region(location)
self._account_attrs = None
self._account_attrs_region = None
def __repr__(self):
return '<EasySMS: %s (%s)>' % (self.region.name, ' '.join(self.region.available_services))
def _fetch_account_attrs(self):
raise NotImplementedError()
@property
def supported_platforms(self):
raise NotImplementedError()
@property
def default_vpc(self):
raise NotImplementedError()
def connect_to_region(self, region_name):
"""
Connects to a given region if it exists, raises RegionDoesNotExist
otherwise. Once connected, this object will return only data from the
given region.
"""
self._region = self.get_region(region_name)
self._platforms = None
self._default_vpc = None
self.reload()
return self
@property
def region(self):
"""
Returns the current EC2 region used by this EasyEC2 object
"""
return self._region
@property
def regions(self):
"""
This property returns all Azure Locations, caching the results the first
time a request is made to Azure
"""
if not self._regions:
self._regions = {}
regions = self.conn.list_locations()
for region in regions:
self._regions[region.name] = region
return self._regions
def get_region(self, region_name):
"""
Returns Azure Location object if it exists, raises RegionDoesNotExist
otherwise.
"""
if region_name not in self.regions:
raise exception.RegionDoesNotExist(region_name)
return self.regions.get(region_name)
def list_regions(self):
"""
Print name/services for all Azure locations
"""
regions = self.regions.items()
regions.sort(reverse=True)
for name, region in regions:
print 'name: ', name
print 'services: ', ', '.join(region.available_services)
print
@property
def registered_images(self):
raise NotImplementedError()
@property
def executable_images(self):
raise NotImplementedError()
def get_registered_image(self, image_id):
raise NotImplementedError()
def _wait_for_group_deletion_propagation(self, group):
raise NotImplementedError()
def get_subnet(self, subnet_id):
raise NotImplementedError()
def get_subnets(self, filters=None):
raise NotImplementedError()
def get_internet_gateways(self, filters=None):
raise NotImplementedError()
def get_route_tables(self, filters=None):
raise NotImplementedError()
def get_network_spec(self, *args, **kwargs):
raise NotImplementedError()
def get_network_collection(self, *args, **kwargs):
raise NotImplementedError()
def delete_group(self, group, max_retries=60, retry_delay=5):
"""
This method deletes a security or placement group using group.delete()
but in the case that group.delete() throws a DependencyViolation error
or InvalidPlacementGroup.InUse error it will keep retrying until it's
successful. Waits 5 seconds between each retry.
"""
|
jhseu/tensorflow | tensorflow/python/kernel_tests/lookup_ops_test.py | Python | apache-2.0 | 133,760 | 0.010167 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for lookup ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import numpy as np
import six
from tensorflow.python import tf2
from tensorflow.python.client import session
from tensorflow.python.data.experimental.ops import counter
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function
from tensorflow.python.eager import wrap_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import lookup_ops
from tensorflow.python.ops import map_fn
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import saver
from tensorflow.python.training import server_lib
from tensorflow.python.training.tracking import graph_view
from tensorflow.python.training.tracking import tracking
from tensorflow.python.training.tracking import util as trackable
from tensorflow.python.util import compat
class BaseLookupTableTest(test.TestCase):
def getHashTable(self):
if tf2.enabled():
return lookup_ops.StaticHashTable
else:
return lookup_ops.StaticHashTableV1
def getVocabularyTable(self):
if tf2.enabled():
return lookup_ops.StaticVocabularyTable
else:
return lookup_ops.StaticVocabularyTableV1
def initialize_table(self, table):
if not tf2.enabled():
self.evaluate(table.initializer)
class StaticHashTableTest(BaseLookupTableTest):
def testStaticHashTable(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
self.assertAllEqual(3, self.evaluate(table. | size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
self.assertAllEqual([3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
exported_keys_tensor, exported_values_tensor = table.export()
self.assertItemsEqual([b"brain", b"salad", b"surgery"],
self.evaluate(exported_keys_tensor))
self.assertItemsEqual([0, 1, 2], self.evaluate(exported_valu | es_tensor))
def testStaticHashTableFindHighRank(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([["brain", "salad"],
["tank", "tarkus"]])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([[0, 1], [-1, -1]], result)
def testStaticHashTableInitWithPythonArrays(self):
default_val = -1
keys = ["brain", "salad", "surgery"]
values = [0, 1, 2]
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(
keys, values, value_dtype=dtypes.int64), default_val)
self.initialize_table(table)
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testStaticHashTableInitWithNumPyArrays(self):
default_val = -1
keys = np.array(["brain", "salad", "surgery"], dtype=np.str)
values = np.array([0, 1, 2], dtype=np.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testMultipleStaticHashTables(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table1 = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
table2 = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
table3 = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table1)
self.initialize_table(table2)
self.initialize_table(table3)
self.assertAllEqual(3, self.evaluate(table1.size()))
self.assertAllEqual(3, self.evaluate(table2.size()))
self.assertAllEqual(3, self.evaluate(table3.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output1 = table1.lookup(input_string)
output2 = table2.lookup(input_string)
output3 = table3.lookup(input_string)
out1, out2, out3 = self.evaluate([output1, output2, output3])
self.assertAllEqual([0, 1, -1], out1)
self.assertAllEqual([0, 1, -1], out2)
self.assertAllEqual([0, 1, -1], out3)
def testStaticHashTableWithTensorDefault(self):
default_val = constant_op.constant(-1, dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testStaticHashTableWithSparseTensorInput(self):
default_val = constant_op.constant(-1, dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
sp_indices = [[0, 0], [0, 1], [1, 0]]
sp_shape = [2, 2]
input_tensor = sparse_tensor.SparseTensor(
constant_op.constant(sp_indices, dtypes.int64),
constant_op.constant(["brain", "salad", "tank"]),
constant_op.constant(sp_shape, dtypes.int64))
output = table.lookup(input_tensor)
out_indices, out_values, out_shape = self.evaluate(output)
self.assertAllEqual([0, 1, -1], out_values)
self.assertAllEqual(sp_indices, out_indices)
self.assertAllEqual(sp_shape, out_shape)
def testSignatureMismatch(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable( |
makinacorpus/Geotrek | geotrek/trekking/tests/test_filters.py | Python | bsd-2-clause | 618 | 0.001618 | from geotrek.land.tests.test_filters import LandFiltersTest
from geotrek.trekking.filters import TrekFilterSet
from geotrek.trekking.factories import TrekFactory
class TrekFilterLandTest(LandFiltersTest):
filterclass = TrekFilterSet
def test_land_filters_are_well_setup(self):
filterset = TrekFilterSet()
self.ass | ertIn('work', filterset.filters)
def create_pair_of_distinct_path(self):
useless_path, seek_path = super().create_pair_of_distinct_path()
self.create_pair_of_distinct_topologies(TrekFactory, | useless_path, seek_path)
return useless_path, seek_path
|
plotly/python-api | packages/python/plotly/plotly/graph_objs/bar/marker/_colorbar.py | Python | mit | 69,490 | 0.000964 | from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class ColorBar(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "bar.marker"
_path_str = "bar.marker.colorbar"
_valid_props = {
"bgcolor",
"bordercolor",
"borderwidth",
"dtick",
"exponentformat",
"len",
"lenmode",
"nticks",
"outlinecolor",
"outlinewidth",
"separatethousands",
"showexponent",
"showticklabels",
"showtickprefix",
"showticksuffix",
"thickness",
"thicknessmode",
"tick0",
"tickangle",
"tickcolor",
"tickfont",
"tickformat",
"tickformatstopdefaults",
"tickformatstops",
"ticklen",
"tickmode",
"tickprefix",
"ticks",
"ticksuffix",
"ticktext",
"ticktextsrc",
"tickvals",
"tickvalssrc",
"tickwidth",
"title",
"titlefont",
"titleside",
"x",
"xanchor",
"xpad",
"y",
"yanchor",
"ypad",
}
# bgcolor
# -------
@property
def bgcolor(self):
"""
Sets the color of padded area.
The 'bgcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
| darkslateblue, d | arkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["bgcolor"]
@bgcolor.setter
def bgcolor(self, val):
self["bgcolor"] = val
# bordercolor
# -----------
@property
def bordercolor(self):
"""
Sets the axis line color.
The 'bordercolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["bordercolor"]
@bordercolor.setter
def bordercolor(self, val):
self["bordercolor"] = val
# borderwidth
# -----------
@property
def borderwidth(self):
"""
Sets the width (in px) or the border enclosing this color bar.
The 'borderwidth' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["borderwidth"]
@borderwidth.setter
def borderwidth(self, val):
self["borderwidth"] = val
# dtick
# -----
@property
def dtick(self):
"""
Sets the step in-between ticks on this axis. Use with `tick0`.
Must be a positive number, or special strings available to
"log" and "date" axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick number. For
example, to set a tick mark at 1, 10, 100, 1000, ... set dtick
to 1. To set tick marks at 1, 100, 10000, ... set dtick to 2.
To set tick marks at 1, 5, 25, 125, 625, 3125, ... set dtick to
log_10(5), or 0.69897000433. "log" has several special values;
"L<f>", where `f` is a positive number, gives ticks linearly
spaced in value (but not position). For example `tick0` = 0.1,
`dtick` = "L0.5" will put ticks at 0.1, 0.6, 1.1, 1.6 etc. To
show powers of 10 plus small digits between, use "D1" (all
digi |
deepmind/open_spiel | open_spiel/python/environments/cliff_walking.py | Python | apache-2.0 | 6,127 | 0.004244 | # Copyright 2019 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A cliff walking single agent reinforcement learning environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from open_spiel.python import rl_environment
# Actions
RIGHT, UP, LEFT, DOWN = range(4)
class Environment(object):
r"""A cliff walking reinforcement learning environment.
This is a deterministic environment that can be used to test RL algorithms.
Note there are *no illegal moves* in this environment--if the agent is on the
edge of the cliff and takes an action which would yield an invalid position,
the action is ignored (as if there were walls surrounding the cliff).
Cliff example for height=3 and width=5:
| | | | | |
| | | | | |
| S | x | x | x | G |
where `S` is always the starting position, `G` is always the goal and `x`
represents the zone of high negative reward to be avoided. For this instance,
the optimum policy is depicted as follows:
| | | | | |
|-->|-->|-->|-->|\|/|
|/|\| x | x | x | G |
yielding a reward of -6 (minus 1 per time step).
See pages 132 of Rich Sutton's book for details:
http://www.incompleteideas.net/book/bookdraft2018mar21.pdf
"""
def __init__(self, height=4, width=8, discount=1.0, max_t=100):
if height < 2 or width < 3:
raise ValueError("height must be >= 2 and width >= 3.")
self._height = height
self._width = width
self._legal_actions = [RIGHT, UP, LEFT, DOWN]
self._should_reset = True
self._max_t = max_t
# Discount returned at non-initial steps.
self._discounts = [discount] * self.num_players
def reset(self):
"""Resets the environment."""
self._should_reset = False
self._time_counter = 0
self._state = np.array([self._height - 1, 0])
observations = {
"info_state": [self._state.copy()],
"legal_actions": [self._legal_actions],
"current_player": 0,
}
return rl_environment.TimeStep(
observations=observations,
rewards=None,
discounts=None,
step_type=rl_environment.StepType.FIRST)
def step(self, actions):
"""Updates the environment according to `actions` and returns a `TimeStep`.
Args:
actions: A singleton list with an integer, or an integer, representing the
action the agent took.
Returns:
A `rl_environment.TimeStep` namedtuple containing:
observation: singleton list of dicts containing player observations,
each corresponding to `observation_spec()`.
reward: singleton list containing the reward at this | timestep, or None
if step_type is `rl_environment.StepType.FIRST`.
discount: singleton list containing the discount in the range [0, 1], or
None if step_type is `rl_environment.StepType.FIRST`.
step_type: A `rl_environment.StepType` value.
"""
if self._should_reset:
return self.reset()
self._time_counter += 1
if isinstance(actions, list):
action = actions[0]
elif isinstance(actions, int):
action = actions
e | lse:
raise ValueError("Action not supported.", actions)
dx = 0
dy = 0
if action == LEFT:
dx -= 1
elif action == RIGHT:
dx += 1
if action == UP:
dy -= 1
elif action == DOWN:
dy += 1
self._state += np.array([dy, dx])
self._state = self._state.clip(0, [self._height - 1, self._width - 1])
done = self._is_pit(self._state) or self._is_goal(self._state)
done = done or self._time_counter >= self._max_t
# Return observation
step_type = (
rl_environment.StepType.LAST if done else rl_environment.StepType.MID)
self._should_reset = step_type == rl_environment.StepType.LAST
observations = {
"info_state": [self._state.copy()],
"legal_actions": [self._legal_actions],
"current_player": 0,
}
return rl_environment.TimeStep(
observations=observations,
rewards=[self._get_reward(self._state)],
discounts=self._discounts,
step_type=step_type)
def _is_goal(self, pos):
"""Check if position is bottom right corner of grid."""
return pos[0] == self._height - 1 and pos[1] == self._width - 1
def _is_pit(self, pos):
"""Check if position is in bottom row between start and goal."""
return (pos[1] > 0 and pos[1] < self._width - 1 and
pos[0] == self._height - 1)
def _get_reward(self, pos):
if self._is_pit(pos):
return -100.0
else:
return -1.0
def observation_spec(self):
"""Defines the observation provided by the environment.
Each dict member will contain its expected structure and shape.
Returns:
A specification dict describing the observation fields and shapes.
"""
return dict(
info_state=tuple([2]),
legal_actions=(len(self._legal_actions),),
current_player=(),
)
def action_spec(self):
"""Defines action specifications.
Specifications include action boundaries and their data type.
Returns:
A specification dict containing action properties.
"""
return dict(
num_actions=len(self._legal_actions),
min=min(self._legal_actions),
max=max(self._legal_actions),
dtype=int,
)
@property
def num_players(self):
return 1
@property
def is_turn_based(self):
return False
|
ojengwa/migrate | ibu/schema.py | Python | mit | 12,646 | 0.001502 | from __future__ import unicode_literals
import keyword
import re
from collections import OrderedDict
from click import BaseCommand, B | aseException
from config import DEFAULT_DB_ALIAS
from ibu import connection as connections
class Command(BaseCommand):
help = "Introspects the database tables in the given database and outputs a Django model module."
requires_system_checks = False
db_module = 'ibu.backends'
def add_argumen | ts(self, parser):
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a database to '
'introspect. Defaults to using the "default" database.')
def handle(self, **options):
try:
for line in self.handle_inspection(options):
self.stdout.write("%s\n" % line)
except NotImplementedError:
raise BaseException(
"Database inspection isn't supported for the currently selected database backend.")
def handle_inspection(self, options):
connection = connections[options['database']]
# 'table_name_filter' is a stealth option
table_name_filter = options.get('table_name_filter')
def table2model(table_name):
return re.sub(r'[^a-zA-Z0-9]', '', table_name.title())
def strip_prefix(s):
return s[1:] if s.startswith("u'") else s
with connection.cursor() as cursor:
yield "# This is an auto-generated Django model module."
yield "# You'll have to do the following manually to clean this up:"
yield "# * Rearrange models' order"
yield "# * Make sure each model has one field with primary_key=True"
yield "# * Make sure each ForeignKey has `on_delete` set to the desired behavior."
yield (
"# * Remove `managed = False` lines if you wish to allow "
"Django to create, modify, and delete the table"
)
yield "# Feel free to rename the models, but don't rename db_table values or field names."
yield "from __future__ import unicode_literals"
yield ''
yield 'from %s import models' % self.db_module
known_models = []
for table_name in connection.introspection.table_names(cursor):
if table_name_filter is not None and callable(table_name_filter):
if not table_name_filter(table_name):
continue
yield ''
yield ''
yield 'class %s(models.Model):' % table2model(table_name)
known_models.append(table2model(table_name))
try:
relations = connection.introspection.get_relations(
cursor, table_name)
except NotImplementedError:
relations = {}
try:
indexes = connection.introspection.get_indexes(
cursor, table_name)
except NotImplementedError:
indexes = {}
try:
constraints = connection.introspection.get_constraints(
cursor, table_name)
except NotImplementedError:
constraints = {}
# Holds column names used in the table so far
used_column_names = []
# Maps column names to names of model fields
column_to_field_name = {}
for row in connection.introspection.get_table_description(cursor, table_name):
# Holds Field notes, to be displayed in a Python comment.
comment_notes = []
# Holds Field parameters such as 'db_column'.
extra_params = OrderedDict()
column_name = row[0]
is_relation = column_name in relations
att_name, params, notes = self.normalize_col_name(
column_name, used_column_names, is_relation)
extra_params.update(params)
comment_notes.extend(notes)
used_column_names.append(att_name)
column_to_field_name[column_name] = att_name
# Add primary_key and unique, if necessary.
if column_name in indexes:
if indexes[column_name]['primary_key']:
extra_params['primary_key'] = True
elif indexes[column_name]['unique']:
extra_params['unique'] = True
if is_relation:
rel_to = (
"self" if relations[column_name][1] == table_name
else table2model(relations[column_name][1])
)
if rel_to in known_models:
field_type = 'ForeignKey(%s' % rel_to
else:
field_type = "ForeignKey('%s'" % rel_to
else:
# Calling `get_field_type` to get the field type string and any
# additional parameters and notes.
field_type, field_params, field_notes = self.get_field_type(
connection, table_name, row)
extra_params.update(field_params)
comment_notes.extend(field_notes)
field_type += '('
# Don't output 'id = meta.AutoField(primary_key=True)', because
# that's assumed if it doesn't exist.
if att_name == 'id' and extra_params == {'primary_key': True}:
if field_type == 'AutoField(':
continue
elif field_type == 'IntegerField(' and not connection.features.can_introspect_autofield:
comment_notes.append('AutoField?')
# Add 'null' and 'blank', if the 'null_ok' flag was present in the
# table description.
if row[6]: # If it's NULL...
if field_type == 'BooleanField(':
field_type = 'NullBooleanField('
else:
extra_params['blank'] = True
extra_params['null'] = True
field_desc = '%s = %s%s' % (
att_name,
# Custom fields will have a dotted path
'' if '.' in field_type else 'models.',
field_type,
)
if field_type.startswith('ForeignKey('):
field_desc += ', models.DO_NOTHING'
if extra_params:
if not field_desc.endswith('('):
field_desc += ', '
field_desc += ', '.join(
'%s=%s' % (k, strip_prefix(repr(v)))
for k, v in extra_params.items())
field_desc += ')'
if comment_notes:
field_desc += ' # ' + ' '.join(comment_notes)
yield ' %s' % field_desc
for meta_line in self.get_meta(table_name, constraints, column_to_field_name):
yield meta_line
def normalize_col_name(self, col_name, used_column_names, is_relation):
"""
Modify the column name to make it Python-compatible as a field name
"""
field_params = {}
field_notes = []
new_name = col_name.lower()
if new_name != col_name:
field_notes.append('Field name made lowercase.')
if is_relation:
if new_name.endswith('_id'):
new_name = new_name[:-3]
else:
field_params['db_column'] = col_name
|
ajaniv/django-core-models | django_core_models/social_media/views.py | Python | mit | 7,262 | 0 | """
.. module:: django_core_models.social_media.views
:synopsis: django_core_models social_media application views module.
*django_core | _models* social_media application views module.
"""
from __future__ import absolute_import
from django_core_utils.views import ObjectListView, ObjectDetailView
from . import models
from . import serializers
class EmailMixin(object):
"""Email mixin class."""
queryset = | models.Email.objects.all()
serializer_class = serializers.EmailSerializer
class EmailList(EmailMixin, ObjectListView):
"""Class to list all Email instances,
or create new Email instance."""
pass
class EmailDetail(EmailMixin, ObjectDetailView):
"""
Class to retrieve, update or delete Email instance.
"""
pass
class EmailTypeMixin(object):
"""EmailType mixin class."""
queryset = models.EmailType.objects.all()
serializer_class = serializers.EmailTypeSerializer
class EmailTypeList(EmailTypeMixin, ObjectListView):
"""Class to list all EmailType instances,
or create new EmailType instance."""
pass
class EmailTypeDetail(EmailTypeMixin, ObjectDetailView):
"""
Class to retrieve, update or delete EmailType instance.
"""
pass
class FormattedNameMixin(object):
"""FormattedName mixin class."""
queryset = models.FormattedName.objects.all()
serializer_class = serializers.FormattedNameSerializer
class FormattedNameList(FormattedNameMixin, ObjectListView):
"""Class to list all FormattedName instances,
or create new FormattedName instance."""
pass
class FormattedNameDetail(FormattedNameMixin, ObjectDetailView):
"""
Class to retrieve, update or delete FormattedName instance.
"""
pass
class GroupMixin(object):
"""Group mixin class."""
queryset = models.Group.objects.all()
serializer_class = serializers.GroupSerializer
class GroupList(GroupMixin, ObjectListView):
"""Class to list all Group instances,
or create new Group instance."""
pass
class GroupDetail(GroupMixin, ObjectDetailView):
"""
Class to retrieve, update or delete Group instance.
"""
pass
class InstantMessagingMixin(object):
"""InstantMessaging mixin class."""
queryset = models.InstantMessaging.objects.all()
serializer_class = serializers.InstantMessagingSerializer
class InstantMessagingList(InstantMessagingMixin, ObjectListView):
"""Class to list all InstantMessaging instances,
or create new InstantMessaging instance."""
pass
class InstantMessagingDetail(InstantMessagingMixin, ObjectDetailView):
"""
Class to retrieve, update or delete InstantMessaging instance.
"""
pass
class InstantMessagingTypeMixin(object):
"""InstantMessagingType mixin class."""
queryset = models.InstantMessagingType.objects.all()
serializer_class = serializers.InstantMessagingTypeSerializer
class InstantMessagingTypeList(InstantMessagingTypeMixin, ObjectListView):
"""Class to list all InstantMessagingType instances,
or create new InstantMessagingType instance."""
pass
class InstantMessagingTypeDetail(InstantMessagingTypeMixin, ObjectDetailView):
"""
Class to retrieve, update or delete InstantMessagingType instance.
"""
pass
class LogoTypeMixin(object):
"""Title mixin class."""
queryset = models.LogoType.objects.all()
serializer_class = serializers.LogoTypeSerializer
class LogoTypeList(LogoTypeMixin, ObjectListView):
"""Class to list all LogoType instances,
or create new LogoType instance."""
pass
class LogoTypeDetail(LogoTypeMixin, ObjectDetailView):
"""
Class to retrieve, update or delete LogoType instance.
"""
pass
class NameMixin(object):
"""Name mixin class."""
queryset = models.Name.objects.all()
serializer_class = serializers.NameSerializer
class NameList(NameMixin, ObjectListView):
"""Class to list all Name instances,
or create new LogoType instance."""
pass
class NameDetail(NameMixin, ObjectDetailView):
"""
Class to retrieve, update or delete Name instance.
"""
pass
class NicknameMixin(object):
"""Nickname mixin class."""
queryset = models.Nickname.objects.all()
serializer_class = serializers.NicknameSerializer
class NicknameList(NicknameMixin, ObjectListView):
"""Class to list all Nickname instances,
or create new Nickname instance."""
pass
class NicknameDetail(NicknameMixin, ObjectDetailView):
"""
Class to retrieve, update or delete Nickname instance.
"""
pass
class NicknameTypeMixin(object):
"""NicknameType mixin class."""
queryset = models.NicknameType.objects.all()
serializer_class = serializers.NicknameTypeSerializer
class NicknameTypeList(NicknameTypeMixin, ObjectListView):
"""Class to list all NicknameType instances,
or create new NicknameType instance."""
pass
class NicknameTypeDetail(NicknameTypeMixin, ObjectDetailView):
"""
Class to retrieve, update or delete NicknameType instance.
"""
pass
class PhoneMixin(object):
"""Phone mixin class."""
queryset = models.Phone.objects.all()
serializer_class = serializers.PhoneSerializer
class PhoneList(PhoneMixin, ObjectListView):
"""Class to list all Phone instances,
or create new Phone instance."""
pass
class PhoneDetail(PhoneMixin, ObjectDetailView):
"""
Class to retrieve, update or delete Phone instance.
"""
pass
class PhoneTypeMixin(object):
"""PhoneType mixin class."""
queryset = models.PhoneType.objects.all()
serializer_class = serializers.PhoneTypeSerializer
class PhoneTypeList(PhoneTypeMixin, ObjectListView):
"""Class to list all PhoneType instances,
or create new PhoneType instance."""
pass
class PhoneTypeDetail(PhoneTypeMixin, ObjectDetailView):
"""
Class to retrieve, update or delete PhoneType instance.
"""
pass
class PhotoTypeMixin(object):
"""PhotoType mixin class."""
queryset = models.PhotoType.objects.all()
serializer_class = serializers.PhotoTypeSerializer
class PhotoTypeList(PhotoTypeMixin, ObjectListView):
"""Class to list all PhotoType instances,
or create new PhotoType instance."""
pass
class PhotoTypeDetail(PhotoTypeMixin, ObjectDetailView):
"""
Class to retrieve, update or delete PhotoType instance.
"""
pass
class UrlMixin(object):
"""Url mixin class."""
queryset = models.Url.objects.all()
serializer_class = serializers.UrlSerializer
class UrlList(UrlMixin, ObjectListView):
"""Class to list all Url instances,
or create new Url instance."""
pass
class UrlDetail(UrlMixin, ObjectDetailView):
"""
Class to retrieve, update or delete Url instance.
"""
pass
class UrlTypeMixin(object):
"""UrlType mixin class."""
queryset = models.UrlType.objects.all()
serializer_class = serializers.UrlTypeSerializer
class UrlTypeList(UrlTypeMixin, ObjectListView):
"""Class to list all UrlType instances,
or create new UrlType instance."""
pass
class UrlTypeDetail(UrlTypeMixin, ObjectDetailView):
"""
Class to retrieve, update or delete UrlType instance.
"""
pass
|
amondot/RasterDisplayComposer | RasterDisplayComposer.py | Python | gpl-3.0 | 19,263 | 0.00135 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
RasterDisplayComposer
A QGIS plugin
Compose RGB image display from different bands
-------------------
begin : 2016-02-13
git sha : $Format:%H$
copyright : (C) 2016 by Alexia Mondot
email : contact@mondot.fr
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import QSettings, QTranslator, qVersion, QCoreApplication, Qt, SIGNAL, QObject, QDir
from PyQt4.QtGui import QAction, QIcon, QFileDialog
# Initialize Qt resources from file resources.py
import resources
from qgis.core import QGis, QgsMapLayerRegistry, QgsRasterLayer
import gdal
import osr
from gdalconst import GA_ReadOnly
# Import the code for the DockWidget
from RasterDisplayComposer_dockwidget import RasterDisplayComposerDockWidget
import os.path
from lxml import etree as ET
import logging
import logging.config
logging.config.dictConfig({
'version': 1,
'formatters': {
'verbose': {'format' : '%(asctime)s - %(filename)s - line %(lineno)d - %(module)s:%(funcName)s - %(levelname)s - %(message)s'},
'console': {'format': '%(asctime)s - %(levelname)s - %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S'}
},
'handlers': {
'console': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'console',
'stream': 'ext://sys.stdout'
},
'file': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'formatter': 'verbose',
'filename': "/tmp/RasterDisplayComposer.log",
'maxBytes': 1048576,
'backupCount': 3
}
},
'loggers': {
'default': {
'level': 'INFO',
'handlers': ['console', 'file']
}
},
'disable_existing_loggers': False
})
logger = logging.getLogger('default')
class RasterDisplayComposer:
"""QGIS Plugin Implementation."""
def __init__(self, iface):
"""Constructor.
:param iface: An interface instance that will be passed to this class
which provides the hook by which you can manipulate the QGIS
application at run time.
:type iface: QgsInterface
"""
# Save reference to the QGIS interface
self.iface = iface
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
# initialize locale
locale = QSettings().value('locale/userLocale')[0:2]
locale_path = os.path.join(
self.plugin_dir,
'i18n',
'RasterDisplayComposer_{}.qm'.format(locale))
if os.path.exists(locale_path):
self.translator = QTranslator()
self.translator.load(locale_path)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
# Declare instance attributes
self.actions = []
self.menu = self.tr(u'&RasterDisplayComposer')
# TODO: We are going to let the user set this up in a future iteration
self.toolbar = self.iface.addToolBar(u'RasterDisplayComposer')
self.toolbar.setObjectName(u'RasterDisplayComposer')
# print "** INITIALIZING RasterDisplayComposer"
self.pluginIsActive = False
self.dockwidget = None
self.loaded_raster_layers = {}
self.dock_is_hidden = True
self.isLoaded = False
#self.preLoad()
def preLoad(self):
"""
Preload the plugin interface
:return:
"""
"""
:return:
"""
logger.debug("plugin is active: {}".format(self.pluginIsActive))
if not self.pluginIsActive:
self.pluginIsActive = True
# print "** STARTING RasterDisplayComposer"
# dockwidget may not exist if:
# first run of plugin
# removed on close (see self.onClosePlugin method)
if self.dockwidget == None:
# Create the dockwidget (after translation) and keep reference
self.dockwidget = RasterDisplayComposerDockWidget()
# connect to provide cleanup on closing of dockwidget
self.dockwidget.closingPlugin.connect(self.onClosePlugin)
# show the dockwidget
# TODO: fix to allow choice of dock location
self.iface.addDockWidget(Qt.LeftDockWidgetArea, self.dockwidget)
self.initDockWidgetSignals()
self.loadComboBox()
self.updateLoadedrasterLayers()
self.dockwidget.hide()
self.dock_is_hidden = True
# noinspection PyMethodMayBeStatic
def tr(self, message):
"""Get the translation for a string using Qt translation API.
We implement this ourselves since we do not inherit QObject.
:param message: String for translation.
:type message: str, QString
:returns: Translated version of message.
:rtype: QString
"""
# noinspection PyTypeChecker,PyArgumentList,PyCallByClass
return QCoreApplication.translate('RasterDisplayComposer', message)
def add_action(
self,
icon_path,
text,
callback,
enabled_flag=True,
add_to_menu=True,
add_to_toolbar=True,
status_tip=None,
whats_this=None,
parent=None):
"""Add a toolbar icon to the toolbar.
:param icon_path: Path to the icon for this action. Can be a resource
path (e.g. ':/plugins/foo/bar.png') or a normal file system path.
:type icon_path: str
:param text: Text that should be shown in menu items for this action.
:type text: str
:param callback: Function to be called when the action is triggered.
:type callback: function
:param enabled_flag: A flag indicating if the action should be enabled
by default. Defaults to True.
:type enabled_flag: bool
:param add_to_menu: Flag indicating whether the action should also
be added to the menu. Defaults to True.
:type add_to_menu: bool
:param add_to_toolbar: Flag indicating whether the action should also
be added to the toolbar. Defaults to True.
:type add_to_toolbar: bool
:param status_tip: Optional text to show in a popup when mouse pointer
hovers over the action.
:type status_tip: str
:param parent: Parent widget for the new action. Defaults None.
:type parent: QWidget
:param whats_this: Optional text to show in the status bar when the
mouse pointer hovers over the action.
:returns: The action that was created. Note that the action is also
added to self.actions list.
:rtype: QAction
"""
icon = QIcon(icon_path)
action = QAction(icon, text, parent)
action.triggered.connect(callback)
action.setEnabled(enabled_flag)
if status_tip is not None:
action.setStatusTip(status_tip)
if whats_t | his is not None:
action.setWhatsThis(whats_this)
if add_to_toolbar:
| self.toolbar.addAction(action)
if add_to_menu:
self.iface.addPlug |
Moonshile/goondream | src/desserts/models.py | Python | apache-2.0 | 376 | 0.007979 | #coding=utf-8
from django.db import mod | els
from django.contrib.auth.models import User
class Activity(models.Model):
owner = models.ForeignKey(User, null=False)
text = models.CharField(max_length=20, unique=True)
class Dessert | (models.Model):
activity = models.ForeignKey(Activity, null=False)
description = models.TextField()
photo = models.ImageField()
|
npuichigo/ttsflow | third_party/tensorflow/tensorflow/contrib/tensor_forest/hybrid/python/layers/decisions_to_data_test.py | Python | apache-2.0 | 2,506 | 0.002793 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
# pylint: disable=unused-import
from tensorflow.contrib.tensor_forest.hybrid.python.layers import decisions_to_data
from tensorflow.contrib.tensor_forest.python import tensor_forest
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import test_util
from tensorflow.python.framework.ops import Operation
from tensorflow.python.framework.ops import Tensor
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import googletest
class DecisionsToDataTest(test_util.TensorFlowTestCase):
def setUp(self):
self.params = tensor_forest.ForestHParams(
num_classes=2,
num_features=31,
layer_size=11,
num_layers=13,
num_trees=17,
connection_probability=0.1,
hybrid_tree_depth=4,
regularization_strength=0.01,
regularization="",
learning_rate=0.01,
weight_init_mean=0.0,
weight_init_std=0.1)
self.params.regression = False
self.params.num_nodes = 2**self.params.hybrid_tree_depth - 1
self.params.nu | m_leaves = 2**(self.params.hybrid_tree_depth - 1)
# pylint: disable=W0612
self.input_data = constant_op.constant(
[[random.uniform(-1, 1) for i in range(self.params.num_features)]
for _ in range(100)])
def testInferenceConstruction(self):
with variable_scope.variable_scope(
"DecisionsToDataTest_testInferenceContruction"):
graph_builder = decisions_to_data.DecisionsToDataLayer(self.params, 0,
| None)
unused_graph = graph_builder.inference_graph(self.input_data)
if __name__ == "__main__":
googletest.main()
|
vhf/kwak_cli | commands.py | Python | mit | 1,182 | 0.00846 | def dbg(ui, client, rest):
ui.redraw_userlist()
client.client.call('setOnline', [])
def hot(ui, client, rest):
ui.chatbuffer_add(', '.join(client.hot_channels_name))
client.client.call('getHotChannels', [], client.set_hot_channels_name)
def invt(ui, client, rest):
if (rest == None):
return 1
rest = rest.split(' ', 1)
rest[1] = rest[1].replace(' ', "")
if (len(rest) < 1):
return 1
client.client.call('invite', rest)
def join(ui, client, rest):
client.subscribe_to_channel(rest)
def lst(ui, client, rest):
ui.chatbuffer_add(', | '.join(client.all_channels_name))
client.client.call('channelList', [], client.set_all_channels_name)
ui.redraw_ui()
def quit(ui, client, rest):
exit(0)
commands = {
'dbg': [dbg, 'SYNOPSYS: lala', 'USAGE: lala'],
'hot': [hot, 'SYNOPSYS: lala', 'USAGE: lala'],
'invite': [invt, 'SYNOPSYS: lala', 'USAGE | : lala'],
'j': [join, 'SYNOPSYS: lala', 'USAGE: lala'],
'join': [join, 'SYNOPSYS: lala', 'USAGE: lala'],
'list': [lst, 'SYNOPSYS: lala', 'USAGE: lala'],
'quit': [quit, 'SYNOPSYS: lala', 'USAGE: lala'],
}
|
phrocker/sharkbite | examples/jsoncombiner.py | Python | apache-2.0 | 850 | 0.021176 | class ExampleCombiner:
## This example iterator assumes the CQ contains a field name
## and value pair separated by a null delimiter
def onNext(iterator):
if (iterator.hasTop()):
mapping = {}
| key = iterator.getTopKey()
cf = key.getColumnFamily()
while (iterator.hasTop() and cf == key.getColumnFamily()):
## FN and FV in cq
fieldn | ame = key.getColumnQualifier().split('\x00')[0];
fieldvalue = key.getColumnQualifier().split('\x00')[1];
mapping[fieldname]=fieldvalue
iterator.next();
if (iterator.hasTop()):
key = iterator.getTopKey()
json_data = json.dumps(mapping,indent=4, sort_keys=True);
value = Value(json_data);
kv = KeyValue(key,value)
return kv
else:
return None
|
dwhswenson/contact_map | contact_map/__init__.py | Python | lgpl-2.1 | 746 | 0 | try:
from . import version
except ImportError: # pragma: no cover
from . import _version as version
__version__ = version.version
from .contact_map import (
ContactMap, ContactFrequency, ContactDifferen | ce,
AtomMismatchedContactDifference, ResidueMismatchedContactDifference,
OverrideTopologyContactDifference
)
from .contact_count import ContactCount
from .contact_trajectory import ContactTrajectory, RollingContactFrequency
from .min_dist import NearestAtoms, MinimumDistanceCounter
from .concurrence import (
Concurrence, AtomContactConcurrence, ResidueContactConcurrence,
ConcurrencePlotter, plot_concurrence
)
from .dask_runner import DaskContactFrequen | cy, DaskContactTrajectory
from . import plot_utils
|
ardinusawan/Sistem_Terdistribusi | Web-Service/RESTful/referensi/restful-hudan/tasks-2.py | Python | gpl-3.0 | 881 | 0.00681 | # Source: http://blog.miguelgrinberg.com/post/designing-a-restful-api-with-python-and-flask
from flask import Flask, jsonify, abort, make_response
app = Flask(__name__)
tasks = [
{
'id': 1,
'title': u'Buy groceries',
'description': u'Milk, Cheese, Pizza, Fruit, Tylenol',
'done': False
},
{
'id': 2,
| 'title': u'Learn Python',
'description': u'Need to find a good Python tutorial on the web',
'done': False
}
]
@app.route('/todo/api/v1.0/tasks/<int:task_id>', methods=['GET'])
def get_task(task_id):
task = filter(lambda t: t['id'] == task_id, tasks)
if len(task) == 0:
abort(404)
return jsonify({'task': task[0]})
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({' | error': 'Not found'}), 404)
if __name__ == '__main__':
app.run(debug=True)
|
cmorgan/zipline | tests/test_sources.py | Python | apache-2.0 | 7,041 | 0 | #
# Copyright 2013 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pandas as pd
import pytz
import numpy as np
from six import integer_types
from unittest import TestCase
import zipline.utils.factory as factory
from zipline.sources import (DataFrameSource,
DataPanelSource,
RandomWalkSource)
from zipline.utils import tradingcalendar as calendar_nyse
from zipline.assets import AssetFinder
class TestDataFrameSource(TestCase):
def test_df_source(self):
source, df = factory.create_test_df_source()
assert isinstance(source.start, pd.lib.Timestamp)
assert isinstance(source.end, pd.lib.Timestamp)
for expected_dt, expected_price in df.iterrows():
sid0 = next(source)
assert expected_dt == sid0.dt
assert expected_price[0] == sid0.price
def test_df_sid_filtering(self):
_, df = factory.create_test_df_source()
source = DataFrameSource(df)
assert 1 not in [event.sid for event in source], \
"DataFrameSource should only stream selected sid 0, not sid 1."
def test_panel_source(self):
source, panel = factory.create_test_panel_source(source_type=5)
assert isinstance(source.start, pd.lib.Timestamp)
assert isinstance(source.end, pd.lib.Timestamp)
for event in source:
self.assertTrue('sid' in event)
self.assertTrue('arbitrary' in event)
self.assertTrue('type' in event)
self.assertTrue(hasattr(event, 'volume'))
self.assertTrue(hasattr(event, 'price'))
self.assertEquals(event['type'], 5)
self.assertEquals(event['arbitrary'], 1.)
self.assertEquals(event['sid'], 0)
self.assertTrue(isinstance(event['volume'], int))
self.assertTrue(isinstance(event['arbitrary'], float))
def test_yahoo_bars_to_panel_source(self):
finder = AssetFinder()
stocks = ['AAPL', 'GE']
start = pd.datetime(1993, 1, 1, 0, 0, 0, 0, pytz.utc)
end = pd.datetime(2002, 1, 1, 0, 0, 0, 0, pytz.utc)
data = factory.load_bars_from_yahoo(stocks=stocks,
indexes={},
start=start,
end=end)
check_fields = ['sid', 'open', 'high', 'low', 'close',
'volume', 'price']
copy_panel = data.copy()
sids = finder.map_identifier_index_to_sids(
data.items, data.major_axis[0]
)
copy_panel.items = sids
source = DataPanelSource(copy_panel)
for event in source:
for check_field in check_fields:
self.assertIn(check_field, event)
self.assertTrue(isinstance(event['volume'], (integer_types)))
self.assertTrue(event['sid'] in sids)
def test_nan_filter_dataframe(self):
dates = pd.date_range('1/1/2000', periods=2, freq='B', tz='UTC')
df = pd.DataFrame(np.random.randn(2, 2),
index=dates,
columns=[4, 5])
# should be filtered
df.loc[dates[0], 4] = np.nan
# should not be filtered, should have been ffilled
df.loc[dates[1], 5] = np.nan
source = DataFrameSource(df)
event = next(source)
self.assertEqual(5, event.sid)
event = next(source)
self.assertEqual(4, event.sid)
event = next(source)
self.assertEqual(5, event.sid)
self.assertFalse(np.isnan(event.price))
def test_nan_filter_panel(self):
dates = pd.date_range('1/1/2000', periods=2, freq='B', tz='UTC')
df = pd.Panel(np.random.randn(2, 2, 2),
major_axis=dates,
items=[4, 5],
minor_axis=['price', 'volume'])
# should be filtered
df.loc[4, dates[0], 'price'] = np.nan
# should not be filtered, should have been ffilled
df.loc[5, dates[1], 'price'] = np.nan
source = DataPanelSource(df)
event = next(source)
self.assertEqual(5, event.sid)
event = next(source)
self.assertEqual(4, event.sid)
event = next(source)
self.assertEqual(5, event.sid)
self.assertFalse(np.isnan(event.price))
class TestRandomWalkSource(TestCase):
def test_minute(self):
np.random.seed(123)
start_prices = {0: 100,
1: 500}
start = pd.Timestamp('1990-01-01', tz='UTC')
end = pd.Timestamp('1991-01-01', tz='UTC')
source = RandomWalkSource(start_prices=start_prices,
calendar=calendar_nyse, start=start,
end=end)
self.assertIsInstance(source.start, pd.lib.Timestamp)
self.assertIsInstance(source.end, pd.lib.Timestamp)
for event in source:
self.assertIn(event.sid, start_prices.keys())
self.assertIn(event.dt.repl | ace(minute=0, hour=0),
calendar_nyse.trading_days)
self.assertGreater(event.dt, start)
self.assertLess(event.dt, end)
self.assertGreater(event.price, 0,
"price should never go negative.")
self.assertTrue(13 <= event.dt.hour <= 21,
"event.dt.hour == %i, not during market \
hour | s." % event.dt.hour)
def test_day(self):
np.random.seed(123)
start_prices = {0: 100,
1: 500}
start = pd.Timestamp('1990-01-01', tz='UTC')
end = pd.Timestamp('1992-01-01', tz='UTC')
source = RandomWalkSource(start_prices=start_prices,
calendar=calendar_nyse, start=start,
end=end, freq='daily')
self.assertIsInstance(source.start, pd.lib.Timestamp)
self.assertIsInstance(source.end, pd.lib.Timestamp)
for event in source:
self.assertIn(event.sid, start_prices.keys())
self.assertIn(event.dt.replace(minute=0, hour=0),
calendar_nyse.trading_days)
self.assertGreater(event.dt, start)
self.assertLess(event.dt, end)
self.assertGreater(event.price, 0,
"price should never go negative.")
self.assertEqual(event.dt.hour, 0)
|
Tsumiki-Chan/Neko-Chan | commands/userinfo.py | Python | gpl-3.0 | 3,277 | 0.007019 | from functions import logger,search, messagestorage, search
import pprint
import datetime
DESC="Tells you something about yourself or a user"
USAGE="userinfo"
async def init(bot):
chat=bot.message.channel
try:
user = bot.message.author
| if len(bot.args) > 0:
if len(bot.message.raw_mentions)>0:
user = await search.user(chat, bot.message.raw_mentions[0])
else:
user = await search.user(chat, " ".join(bot.args))
data = "Could not find any user matching {}".format(" ".join(bot.args))
if user is not None:
data = []
if user.bot:
data.append(["<!-- Bot information -->"])
else:
data. | append(["<!-- User information -->"])
data.append(["ID", user.id])
data.append(["Nickname", user.display_name])
data.append(["Name", user.name])
data.append(["Discriminator", user.discriminator])
data.append(["Created on", user.created_at])
if user.is_afk:
data.append(["Status", str(user.status)+" [AFK]"])
else:
data.append(["Status", user.status])
if user.game is not None:
data.append(["Playing", user.game])
if not bot.message.channel.is_private and user.server is not None:
#======== MESSAGE STATS ========
stats = await messagestorage.stats(user.server.id, user.id)
if stats is not None:
data.append(["<!-- 30 day stats -->"])
data.append(["Sent messages", stats['count']])
if stats['seen'] is not None:
data.append(["Last message", datetime.datetime.fromtimestamp(stats['seen']).strftime('%Y-%m-%d %H:%M:%S')])
if stats['channel'] is not None and len(stats['channel'])>0:
data.append(["Last seen", (await search.channel(user.server, stats['channel'])).name])
#======== Server STATS =========
data.append(["<!-- Server stats -->"])
data.append(["Joined on", user.joined_at])
if user.self_mute:
data.append(["Voice status", "Muted"])
if user.voice_channel:
data.append(["Voice channel", user.voice_channel.name])
msg = ""
for row in data:
if len(row)==1:
msg += '\r\n{0!s:^40}'.format(row[0])
else:
msg += '\r\n{0!s:>15} :{1!s:<10}'.format(row[0], row[1])
if len(user.roles) > 1:
msg += "\r\n{0!s:>15} {1!s:<10}".format("Roles", "")
for role in user.roles:
if not role.is_everyone:
msg += "\r\n{0!s:>15} {1!s:<10}".format(" ",role)
#==============================================
await bot.sendMessage( "```xl\r\n{}```".format(msg));
else:
await bot.sendMessage( "S-Sorry senpai. I can't find the user you asked for :sob:");
except Exception:
logger.PrintException(bot.message);
|
jakbob/guitarlegend | slask/readwav.py | Python | gpl-3.0 | 1,131 | 0.01061 | #!/usr/bin/env python
# Disect the wav data and plot it using pylab.
import pylab
import wave # Contains reference to struct. No point in importing it twice
FORMAT = { "11" : "1b", # Mono, 8-bit sound
"12" : "1h", # Mono, 16-bit sound
"21" : "2b", # Stereo, 8-bit sound
"22" : "2h", # Stereo, 16-bit sound
}
WAVE_INPUT_FILE = "output.wav"
wf = wave.open(WAVE_INPUT_FILE, "rb")
params = wf.getparams()
# < == little endian
fmt = "<" + FORMAT[str(params[0]) + # nchannels
str(params[1])] # sampwidth
wavdata = wf.readframes(params[3]) # params[3] is nframes; Of course, don't attempt this with a large file
wf.close( | )
# Ti | me to unpack
python_data = []
for i in range(0, params[3]):
size = wave.struct.calcsize(fmt)
data = wave.struct.unpack(fmt, wavdata[i:i+size]) # Little endian
python_data.append(data[0]) # The above function returns 1-tuples
assert(len(wavdata) == params[0]*params[1]*len(python_data))
x = pylab.arange(0, float(params[3])/params[2], 1/float(params[2]))
pylab.xlabel("time (s)")
pylab.plot(x, python_data)
pylab.show()
|
jorgealmerio/QEsg | core/ezdxf/pp/__init__.py | Python | gpl-3.0 | 150 | 0 | # Purpose: DXF Pretty Printer
# Created: 1 | 6.07.2015
# Copyright (C) 2015, Manfred Moitzi
# License: MIT License
_ | _author__ = "mozman <mozman@gmx.at>"
|
ptosco/rdkit | rdkit/Chem/Pharm2D/UnitTestLazyGenerator.py | Python | bsd-3-clause | 2,437 | 0.009027 | #
# Copyright (C) 2003-2006 greg Landrum and Rational Discovery LLC
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
"""unit testing code for the lazy signature generator
"""
import unittest
from rdkit import Chem
from rdkit.Chem.Pharm2D import SigFactory
try:
from rdkit.Chem.Pharm2D import LazyGenerator
except NotImplementedError:
LazyGenerator = None
class TestCase(unittest.TestCase): # pragma: nocover
def getFactory(self):
factory = SigFactory.SigFactory()
factory.SetPatternsFromSmarts(['O', 'N'])
factory.SetBins([(0, 2), (2, 5), (5, 8)])
factory.SetMinCount(2)
factory.SetMaxCount(3)
return factory
def test_NotImplemented(self):
self.assertIsNone(LazyGenerator, 'Review LazyGenerator unit tests')
@unittest.skipIf(LazyGenerator is None, 'LazyGenerator implementation incomplete')
def test1_simple(self):
mol = Chem.MolFromSmiles('OCC(=O)CCCN')
factory = self.getFactory()
sig = factory.GetSignature()
assert sig.GetSize() == 105, f'bad signature size: {sig.GetSize()}'
sig.SetIncludeBondOrder(0)
gen = LazyGenerator.Generator(sig, mol)
assert len(gen) == sig.GetSize(), f'length mismatch {len(gen)}!={sig.GetSize | ()}'
tgt = (1, 5, 48)
for bit in tgt:
assert gen[bit], f'bit {bit} not properly set' |
assert gen.GetBit(bit), f'bit {bit} not properly set'
assert not gen[bit + 50], f'bit {bit + 100} improperly set'
sig = factory.GetSignature()
assert sig.GetSize() == 105, f'bad signature size: {sig.GetSize()}'
sig.SetIncludeBondOrder(1)
gen = LazyGenerator.Generator(sig, mol)
assert len(gen) == sig.GetSize(), f'length mismatch {len(gen)}!={sig.GetSize()}'
tgt = (1, 4, 5, 45)
for bit in tgt:
assert gen[bit], f'bit {bit} not properly set'
assert gen.GetBit(bit), f'bit {bit} not properly set'
assert not gen[bit + 50], f'bit {bit + 100} improperly set'
try:
gen[sig.GetSize() + 1]
except IndexError:
ok = 1
else:
ok = 0
assert ok, 'accessing bogus bit did not fail'
try:
gen[-1]
except IndexError:
ok = 1
else:
ok = 0
assert ok, 'accessing bogus bit did not fail'
if __name__ == '__main__': # pragma: nocover
unittest.main()
|
redanexis/keysmash | main.py | Python | apache-2.0 | 632 | 0.001582 | #!/usr/bin/env python
# wipflag{todo}
from threading import Thread
from game import Ga | me
from bytekeeper import ByteKeeper
from broker import Broker # todo
class Main:
def __init__(self):
print('starting main...\n')
self.running = True
self.bytekeeper = ByteKeeper()
self.game = Game(self, self.bytekeeper)
# self.broker = Broker(self, self.bytekeeper)
self.gamethread = Thread(target=self.game.run)
# self.brokert | hread = Thread(target=self.broker.run)
self.gamethread.start()
# self.brokerthread.start()
if __name__ == "__main__":
main = Main()
|
figlief/ctx | setup.py | Python | mit | 1,330 | 0 | from __future__ import unicode_literals
import os
import codecs
from setuptools import setup
import ctx
def read(*paths):
"""Build a file path from *paths* and return the contents."""
path = os.path.join(*paths)
with codecs.open(path, mode='rb', encoding='utf-8') as f:
return f.read()
long_description = '\n\n'.join(
map(read, (
'README.rst',
))
)
setup(
name='ctx',
version=ctx.__version__,
description="A minimal but opinionated dict/object combo (like Bunch).",
long_description=long_description,
author='Robert Ledger',
author_email='figlief@figlief.com',
url='https://github.com/figlief/ctx',
include_package_data=True,
py_modules=['ctx' | ],
install_requires=[],
license="MIT",
zip_safe=False,
keywords='ctx',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2 | .7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
anythingrandom/eclcli | eclcli/dh/v2/license.py | Python | apache-2.0 | 3,173 | 0.011346 | import copy
import six
from eclcli.common import command
from eclcli.common import utils
class ListLicense(command.Lister):
def get_parser(self, prog_name):
parser = super(ListLicense, self).get_parser(prog_name)
parser.add_argument(
"--license-type",
help="License type name as string of which you want to list license",
metavar='<license-type>'
)
return parser
def take_action(self, parsed_args):
dh_client = self.app.client_manager.dh
search_opts = {
"license_type":parsed_args.license_type
}
self.log.debug('search options: %s',search_opts)
columns = [
'ID', 'Key', 'Assigned From', 'Expires At', 'License Type',
]
column_headers = columns
data = dh_client.licenses.list(search_opts=search_opts)
return (column_headers,
| (utils.get_item_properties(
s, columns
) for s in data))
class ListLicenseType(command.Lister):
def get_parser(self, prog_name):
parser = super(ListLicenseType, self).get_parser(prog_name)
return parser
def take_action(self, parsed_args):
dh_client = self.app.client_manager.dh
columns = [
'ID', 'Name', | 'Has License Key', 'Unit', 'Description'
]
column_headers = columns
data = dh_client.licenses.list_license_types()
return (column_headers,
(utils.get_item_properties(
s, columns
) for s in data))
class CreateLicense(command.ShowOne):
def get_parser(self, prog_name):
parser = super(CreateLicense, self).get_parser(prog_name)
parser.add_argument(
"license_type",
help="License type name as string of which you want to create license",
metavar='<license-type>'
)
return parser
def take_action(self, parsed_args):
dh_client = self.app.client_manager.dh
self.log.debug('license type: %s',parsed_args.license_type)
rows = [
"ID",
"Key",
"Assigned From",
"Expires At",
"License Type"
]
row_headers = rows
data = dh_client.licenses.create(license_type=parsed_args.license_type)
return (row_headers,
utils.get_item_properties(
data, rows
))
class DeleteLicense(command.Command):
def get_parser(self, prog_name):
parser = super(DeleteLicense, self).get_parser(prog_name)
parser.add_argument(
"license_ids",
nargs="+",
help="IDs of licenses to be deleted",
metavar='<license-ids>'
)
return parser
def take_action(self, parsed_args):
dh_client = self.app.client_manager.dh
self.log.debug('license id: %s',parsed_args.license_ids)
for license_id in parsed_args.license_ids:
dh_client.licenses.delete(license_id)
|
LukasBoersma/pyowm | pyowm/webapi25/weather.py | Python | mit | 18,912 | 0.000529 | """
Module containing weather data classes and data structures.
"""
import json
import xml.etree.ElementTree as ET
from pyowm.webapi25.xsd.xmlnsconfig import (
WEATHER_XMLNS_PREFIX, WEATHER_XMLNS_URL)
from pyowm.utils import timeformatutils, temputils, xmlutils
class Weather(object):
"""
A class encapsulating raw weather data.
A reference about OWM weather codes and icons can be found at:
http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes
:param reference_time: GMT UNIX time of weather measurement
:type reference_time: int
:param sunset_time: GMT UNIX time of sunset
:type sunset_time: int
:param sunrise_time: GMT UNIX time of sunrise
:type sunrise_time: int
:param clouds: cloud coverage percentage
:type clouds: int
:param rain: precipitation info
:type rain: dict
:param snow: snow info
:type snow: dict
:param wind: wind info
:type wind: dict
:param humidity: atmospheric humidity percentage
:type humidity: int
:param pressure: atmospheric pressure info
:type pressure: dict
:param temperature: temperature info
:type temperature: dict
:param status: short weather status
:type status: Unicode
:param detailed_status: detailed weather status
:type detailed_status: Unicode
:param weather_code: OWM weather condition code
:type weather_code: int
:param weather_icon_name: weather-related icon name
:type weather_icon_name: Unicode
:param visibility_distance: visibility distance
:type visibility_distance: float
:param dewpoint: dewpoint
:type dewpoint: float
:param humidex: Canadian humidex
:type humidex: float
:param heat_index: heat index
:type heat_index: float
:returns: a *Weather* instance
:raises: *ValueError* when negative values are provided
"""
def __init__(self, reference_time, sunset_time, sunrise_time, clouds, rain,
snow, wind, humidity, pressure, temperature, status,
detailed_status, weather_code, weather_icon_name,
visibility_distance, dewpoint, humidex, heat_index):
if reference_time < 0:
raise ValueError("'reference_time' must be greater than 0")
self._reference_time = reference_time
if sunset_time < 0:
raise ValueError("'sunset_time' must be greatear than 0")
self._sunset_time = sunset_time
if sunrise_time < 0:
raise ValueError("'sunrise_time' must be greatear than 0")
self._sunrise_time = sunrise_time
if clouds < 0:
raise ValueError("'clouds' must be greater than 0")
self._clouds = clouds
self._rain = rain
self._snow = snow
self._wind = wind
if humidity < 0:
raise ValueError("'humidity' must be greatear than 0")
self._humidity = humidity
self._pressure = pressure
self._temperature = temperature
self._status = status
self._detailed_status = detailed_status
self._weather_code = weather_code
self._weather_icon_name = weather_icon_name
if visibility_distance is not None and visibility_distance < 0:
raise ValueError("'visibility_distance' must be greater than 0")
self._visibility_distance = visibility_distance
self._dewpoint = dewpoint
if humidex is not None a | nd humidex < 0:
raise ValueError("'humidex' must be greater than 0")
self._humidex = humidex
if heat_index is not None and heat_index < 0:
raise ValueError("'heat index' must be grater than 0")
self._heat_index = heat_index
def get_reference_time(self, timeformat='unix'):
"""Returns the GMT time telling when the weather was measured
:param timeformat: the format for the time value. May be: |
'*unix*' (default) for UNIX time or '*iso*' for ISO8601-formatted
string in the format ``YYYY-MM-DD HH:MM:SS+00``
:type timeformat: str
:returns: an int or a str
:raises: ValueError when negative values are provided
"""
return timeformatutils.timeformat(self._reference_time, timeformat)
def get_sunset_time(self, timeformat='unix'):
"""Returns the GMT time of sunset
:param timeformat: the format for the time value. May be:
'*unix*' (default) for UNIX time or '*iso*' for ISO8601-formatted
string in the format ``YYYY-MM-DD HH:MM:SS+00``
:type timeformat: str
:returns: an int or a str
:raises: ValueError
"""
return timeformatutils.timeformat(self._sunset_time, timeformat)
def get_sunrise_time(self, timeformat='unix'):
"""Returns the GMT time of sunrise
:param timeformat: the format for the time value. May be:
'*unix*' (default) for UNIX time or '*iso*' for ISO8601-formatted
string in the format ``YYYY-MM-DD HH:MM:SS+00``
:type timeformat: str
:returns: an int or a str
:raises: ValueError
"""
return timeformatutils.timeformat(self._sunrise_time, timeformat)
def get_clouds(self):
"""Returns the cloud coverage percentage as an int
:returns: the cloud coverage percentage
"""
return self._clouds
def get_rain(self):
"""Returns a dict containing precipitation info
:returns: a dict containing rain info
"""
return self._rain
def get_snow(self):
"""Returns a dict containing snow info
:returns: a dict containing snow info
"""
return self._snow
def get_wind(self):
"""Returns a dict containing wind info
:returns: a dict containing wind info
"""
return self._wind
def get_humidity(self):
"""Returns the atmospheric humidity as an int
:returns: the humidity
"""
return self._humidity
def get_pressure(self):
"""Returns a dict containing atmospheric pressure info
:returns: a dict containing pressure info
"""
return self._pressure
def get_temperature(self, unit='kelvin'):
"""Returns a dict with temperature info
:param unit: the unit of measure for the temperature values. May be:
'*kelvin*' (default), '*celsius*' or '*fahrenheit*'
:type unit: str
:returns: a dict containing temperature values.
:raises: ValueError when unknown temperature units are provided
"""
# This is due to the fact that the OWM web API responses are mixing
# absolute temperatures and temperature deltas together
to_be_converted = dict()
not_to_be_converted = dict()
for label, temp in self._temperature.items():
if temp is None or temp < 0:
not_to_be_converted[label] = temp
else:
to_be_converted[label] = temp
converted = temputils.kelvin_dict_to(to_be_converted, unit)
return dict(list(converted.items()) + \
list(not_to_be_converted.items()))
def get_status(self):
"""Returns the short weather status as a Unicode string
:returns: the short weather status
"""
return self._status
def get_detailed_status(self):
"""Returns the detailed weather status as a Unicode string
:returns: the detailed weather status
"""
return self._detailed_status
def get_weather_code(self):
"""Returns the OWM weather condition code as an int
:returns: the OWM weather condition code
"""
return self._weather_code
def get_weather_icon_name(self):
"""Returns weather-related icon name as a Unicode string.
:returns: the icon name.
"""
return self._weather_icon_name
def get_visibility_distance(self):
"""Returns the visibility distance as a float
:returns: the visibility distance
"""
return self._visibility_distance
def get_dewpoint(self):
" |
beni55/jieba | test/extract_topic.py | Python | mit | 1,456 | 0.010989 | import sys
sys.path.append("../")
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn import decomposition
import jieba
import time
import glob
import sys
import os
import random
if len(sys.argv)<2:
print "usage: extract_topic.py directory [n_topic] [n_top_words]"
sys.exit(0)
n_topic = 10
n_top_words = 25
if len(sys.argv)>2:
n_topic = int(sys.argv[2])
if len(sys.argv)>3:
n_top_words = int(sys.argv[3])
count_vect = CountVectorizer()
docs = []
pattern = os.path.join(sys.argv[1],"*.txt")
print "read "+pattern
for f_name in glob.glob(pattern):
with open(f_name) as f:
print "read file:", f_name
for line in f: #one line as a document
words = " ".join(jieba.cut(line))
docs.append(words)
random.shuffle(docs)
print "read done."
print "transform"
counts = count_vect.fit_transform(docs)
tfidf = TfidfTransf | ormer().fit_transform(counts)
print tfidf.shape
t0 = time.time()
print "training..."
nmf = decomposition.NMF(n_components=n_topic).fit(tfidf)
print("done in %0.3fs." % (time.time() - t0))
# Inverse the vectorizer vocabulary to be able
feature_names = count_vect.get_feature_names()
f | or topic_idx, topic in enumerate(nmf.components_):
print("Topic #%d:" % topic_idx)
print(" ".join([feature_names[i]
for i in topic.argsort()[:-n_top_words - 1:-1]]))
print("")
|
kisel/trex-core | scripts/automation/regression/hltapi_playground.py | Python | apache-2.0 | 10,579 | 0.032328 | #!/router/bin/python
import outer_packages
#from trex_stl_lib.trex_stl_hltapi import CTRexHltApi, CStreamsPerPort
from trex_stl_lib.trex_stl_hltapi import *
import traceback
import sys, time
from pprint import pprint
import argparse
def error(err = None):
if not err:
raise Exception('Unknown exception, look traceback')
if type(err) is str and not err.startswith('[ERR]'):
err = '[ERR] ' + err
print err
sys.exit(1)
def check_res(res):
if res['status'] == 0:
error('Encountered error:\n%s' % res['log'])
return res
def print_brief_stats(res):
title_str = ' '*3
tx_str = 'TX:'
rx_str = 'RX:'
for port_id, stat in res.iteritems():
if type(port_id) is not int:
continue
title_str += ' '*10 + 'Port%s' % port_id
tx_str += '%15s' % res[port_id]['aggregate']['tx']['total_pkts']
rx_str += '%15s' % res[port_id]['aggregate']['rx']['total_pkts']
print(title_str)
print(tx_str)
print(rx_str)
def wait_with_progress(seconds):
for i in range(0, seconds):
time.sleep(1)
sys.stdout.write('.')
sys.stdout.flush()
print('')
if __name__ == "__main__":
try:
parser = argparse.ArgumentParser(description='Example of using stateless TRex via HLT API.', formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('-v', dest = 'verbose', default = 0, help='Stateless API verbosity:\n0: No prints\n1: Commands and their status\n2: Same as 1 + ZMQ in&out')
parser.add_argument('--device', dest = 'device', default = 'localhost', help='Address of TRex server')
args = parser.parse_args()
hlt_client = CTRexHltApi(verbose = int(args.verbose))
print('Connecting to %s...' % args.device)
res = check_res(hlt_client.connect(device = args.device, port_list = [0, 1], username = 'danklei', break_locks = True, reset = True))
port_handle = res['port_handle']
print('Connected, got port handles %s' % port_handle)
ports_streams_dict = CStreamsPerPort()
print hlt_client.traffic_control(action = 'poll')
print hlt_client.traffic_config(mode = 'create', l2_encap = 'ethernet_ii_vlan', rate_pps = 1,
l3_protocol = 'ipv4',
#length_mode = 'imix', l3_length = 200,
ipv6_dst_mode = 'decrement', ipv6_dst_count = 300, ipv6_dst_addr = 'fe80:0:0:0:0:0:0:000f',
port_handle = port_handle, port_handle2 = port_handle[1],
#save_to_yaml = '/tmp/d1.yaml',
#stream_id = 1,
)
print hlt_client.traffic_control(action = 'poll')
print hlt_client.traffic_control(action = 'run')
print hlt_client.traffic_control(action = 'poll')
wait_with_progress(2)
print hlt_client.traffic_control(action = 'poll')
print | hlt_client.traffic | _control(action = 'stop')
print hlt_client.traffic_control(action = 'poll')
print hlt_client.traffic_stats(mode = 'aggregate')
print hlt_client.traffic_control(action = 'clear_stats')
wait_with_progress(1)
print hlt_client.traffic_stats(mode = 'aggregate')
wait_with_progress(1)
print hlt_client.traffic_stats(mode = 'aggregate')
wait_with_progress(1)
print hlt_client.traffic_stats(mode = 'aggregate')
wait_with_progress(1)
print hlt_client.traffic_stats(mode = 'aggregate')
#print res
#print hlt_client._streams_history
#print hlt_client.trex_client._STLClient__get_all_streams(port_id = port_handle[0])
#print hlt_client.trex_client._STLClient__get_all_streams(port_id = port_handle[1])
#ports_streams_dict.add_streams_from_res(res)
sys.exit(0)
res = check_res(hlt_client.traffic_config(mode = 'create', l2_encap = 'ethernet_ii_vlan', rate_pps = 1,
port_handle = port_handle[0], port_handle2 = port_handle[1], save_to_yaml = '/tmp/d1.yaml',
l4_protocol = 'udp',
#udp_src_port_mode = 'decrement',
#udp_src_port_count = 10, udp_src_port = 5,
))
ports_streams_dict.add_streams_from_res(res)
sys.exit(0)
#print ports_streams_dict
#print hlt_client.trex_client._STLClient__get_all_streams(port_id = port_handle[0])
res = check_res(hlt_client.traffic_config(mode = 'modify', port_handle = port_handle[0], stream_id = ports_streams_dict[0][0],
mac_src = '1-2-3:4:5:6', l4_protocol = 'udp', save_to_yaml = '/tmp/d2.yaml'))
#print hlt_client.trex_client._STLClient__get_all_streams(port_id = port_handle[0])
#print hlt_client._streams_history
res = check_res(hlt_client.traffic_config(mode = 'modify', port_handle = port_handle[0], stream_id = ports_streams_dict[0][0],
mac_dst = '{ 7 7 7-7:7:7}', save_to_yaml = '/tmp/d3.yaml'))
#print hlt_client.trex_client._STLClient__get_all_streams(port_id = port_handle[0])
check_res(hlt_client.traffic_config(mode = 'reset', port_handle = port_handle))
res = check_res(hlt_client.traffic_config(mode = 'create', bidirectional = True, length_mode = 'fixed',
port_handle = port_handle[0], port_handle2 = port_handle[1],
transmit_mode = 'single_burst', pkts_per_burst = 100, rate_pps = 100,
mac_src = '1-2-3-4-5-6',
mac_dst = '6:5:4:4:5:6',
save_to_yaml = '/tmp/imix.yaml'))
ports_streams_dict.add_streams_from_res(res)
print('Create single_burst 100 packets rate_pps=100 on port 0')
res = check_res(hlt_client.traffic_config(mode = 'create', port_handle = port_handle[0], transmit_mode = 'single_burst',
pkts_per_burst = 100, rate_pps = 100))
ports_streams_dict.add_streams_from_res(res)
# playground - creating various streams on port 1
res = check_res(hlt_client.traffic_config(mode = 'create', port_handle = port_handle[1], save_to_yaml = '/tmp/hlt2.yaml',
tcp_src_port_mode = 'decrement',
tcp_src_port_count = 10, tcp_dst_port_count = 10, tcp_dst_port_mode = 'random'))
ports_streams_dict.add_streams_from_res(res)
res = check_res(hlt_client.traffic_config(mode = 'create', port_handle = port_handle[1], save_to_yaml = '/tmp/hlt3.yaml',
l4_protocol = 'udp',
udp_src_port_mode = 'decrement',
udp_src_port_count = 10, udp_dst_port_count = 10, udp_dst_port_mode = 'random'))
ports_streams_dict.add_streams_from_res(res)
res = check_res(hlt_client.traffic_config(mode = 'create', port_handle = port_handle[1], save_to_yaml = '/tmp/hlt4.yaml',
length_mode = 'increment',
#ip_src_addr = '192.168.1.1', ip_src_mode = 'increment', ip_src_count = 5,
ip_dst_addr = '5.5.5.5', ip_dst_mode = 'random', ip_dst_count = 2))
ports_streams_dict.add_streams_from_res(res)
res = check_res(hlt_client.traffic_config(mode = 'create', port_handle = port_handle[1], save_to_yaml = '/tmp/hlt5.yaml',
length_mode = 'decrement', frame_size_min = 100, frame_size_max = 3000,
#ip_src_addr = '192.168.1.1', ip_src_mode = 'increment', ip_src_count = 5,
#ip_dst_addr = '5.5.5.5', ip_dst_mode = 'random', ip_dst_count = 2
|
litobro/fbbot | fbchat/stickers.py | Python | mit | 177 | 0.011299 | L | IKES={
'l': '369239383222810',
'm': '369239343222814',
's': '369239263222822'
}
LIKES['large'] = LIKES['l']
LIKES['medium'] =LIKES['m']
LIKES['small'] = LIKES['s' | ]
|
stephane-martin/salt-debian-packaging | salt-2016.3.3/tests/unit/modules/moosefs_test.py | Python | apache-2.0 | 2,122 | 0.001414 | # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Jayesh Kariya <jayeshk@saltstack.com>`
'''
# Import Python Libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import Salt Libs
from salt.modules import moosefs
# Globals
moosefs.__salt__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class MoosefsTestCase(TestCase):
'''
Test cases for salt.modules.moosefs
'''
# 'dirinfo' function tests: 1
def test_dirinfo(self):
'''
Test if it return information on a directory located on the Moose
'''
mo | ck = MagicMock(return_value={'stdout': 'Salt:salt'})
with patch.dict(moosefs.__salt__, {'cmd.run_all': mock}):
self.assert | DictEqual(moosefs.dirinfo('/tmp/salt'), {'Salt': 'salt'})
# 'fileinfo' function tests: 1
def test_fileinfo(self):
'''
Test if it returns information on a file located on the Moose
'''
mock = MagicMock(return_value={'stdout': ''})
with patch.dict(moosefs.__salt__, {'cmd.run_all': mock}):
self.assertDictEqual(moosefs.fileinfo('/tmp/salt'), {})
# 'mounts' function tests: 1
def test_mounts(self):
'''
Test if it returns a list of current MooseFS mounts
'''
mock = MagicMock(return_value={'stdout': ''})
with patch.dict(moosefs.__salt__, {'cmd.run_all': mock}):
self.assertDictEqual(moosefs.mounts(), {})
# 'getgoal' function tests: 1
def test_getgoal(self):
'''
Test if it returns goal(s) for a file or directory
'''
mock = MagicMock(return_value={'stdout': 'Salt: salt'})
with patch.dict(moosefs.__salt__, {'cmd.run_all': mock}):
self.assertDictEqual(moosefs.getgoal('/tmp/salt'), {'goal': 'salt'})
if __name__ == '__main__':
from integration import run_tests
run_tests(MoosefsTestCase, needs_daemon=False)
|
arpruss/plucker | parser/python/PyPlucker/ConversionParser.py | Python | gpl-2.0 | 2,897 | 0.007594 | #!/usr/bin/env python
"""
ConversionParser.py $Id: ConversionParser.py,v 1.5 2004/10/20 01:44:53 chrish Exp $
Copyright 2003 Bill Nalen <bill.nalen@towers.com>
Distributable under the GNU General Public License Version 2 or newer.
Provides methods to wrap external convertors to return PluckerTextDocuments
"""
import os, sys, string, tempfile
from PyPlucker import TextParser
from UtilFns import message, error
def WordParser (url, data, headers, config, attributes):
"""Convert a Word document to HTML and returns a PluckerTextDocument"""
# retrieve config information
worddoc_convert | er = config.get_string('worddoc_converter')
if worddoc_converter is None:
message(0, "Could not find Word conversion command")
return None
check = os.path.basename (worddoc_converter)
(check, ext) = os.path.splitext (check)
check = string.lower (check)
if check == 'wvware':
# need to save data to a local file
tempbase = tempfile.mktemp()
| tempdoc = os.path.join(tempfile.tempdir, tempbase + ".doc")
try:
file = open (tempdoc, "wb")
file.write (data)
file.close ()
except IOError, text:
message(0, "Error saving temporary file %s" % tempdoc)
os.unlink(tempdoc)
return None
# then convert it > local.html
temphtml = os.path.join(tempfile.tempdir, tempbase + ".html")
command = worddoc_converter
command = command + " -d " + tempfile.tempdir + " -b " + os.path.join(tempfile.tempdir, tempbase)
command = command + " " + tempdoc + " > " + temphtml
try:
if os.system (command):
message(0, "Error running Word converter %s" % command)
try:
os.unlink(tempdoc)
os.unlink(temphtml)
except:
pass
return None
except:
message(0, "Exception running word converter %s" % command)
try:
os.unlink(tempdoc)
os.unlink(temphtml)
except:
pass
return None
# then load the local.html file to data2
try:
try:
file = open (temphtml, "rb")
data2 = file.read ()
file.close ()
finally:
os.unlink(tempdoc)
os.unlink(temphtml)
except IOError, text:
message(0, "Error reading temporary file %s" % temphtml)
return None
# then create a structuredhtmlparser from data2
parser = TextParser.StructuredHTMLParser (url, data2, headers, config, attributes)
return parser.get_plucker_doc ()
else:
return None
|
acuriel/Nixtla | nixtla/core/tools/database_parser.py | Python | gpl-2.0 | 9,452 | 0.006771 | # -*- coding: utf-8 -*-
'''
Created on Nov 18, 2014
@author: Arturo Curiel
'''
import ply.yacc as yacc
import ply.lex as lex
import ply.ctokens as ctokens
import ConfigParser
import StringIO
from nixtla.core.tools.pdlsl import Atom, AtomAction
from nixtla.core.tools import pdlsl
class PDLSLLexer(object):
tokens = ('PHI_BINOPERATOR',
'PI_BINOPERATOR',
'PHI_UNIOPERATOR',
'PI_UNIOPERATOR',
'ATOM',
'ATOMACTION',
'ARTICULATOR',
'LPAREN',
'RPAREN',
'LBRACKET',
'RBRACKET',
'ALIAS')
t_ignore = ' \t'
t_PHI_BINOPERATOR = r'&|\|{2}|->|<->'
t_PI_BINOPERATOR = r'//|\^|;'
t_PHI_UNIOPERATOR = ctokens.t_NOT
t_PI_UNIOPERATOR = r'\?|\*{2}[0-9]+'
t_ATOM = r'ATOM[0-9]+'
t_ATOMACTION = r'ACTION[0-9]+'
t_ARTICULATOR = r'ART[0-9]+'
t_LPAREN = ctokens.t_LPAREN
t_RPAREN = ctokens.t_RPAREN
t_LBRACKET = ctokens.t_LBRACKET
t_RBRACKET = ctokens.t_RBRACKET
def __init__(self,
aliases):
self.t_ALIAS = r"|".join(aliases)
self.lexer = lex.lex(module=self)
def t_error(self, t):
print "Illegal character '%s'" % t.value[0]
t.lexer.skip(1)
def input(self, data):
self.lexer.input(data)
# DEBUGGING
def tokenize(self, data):
'Debug method!'
self.lexer.input(data)
while True:
tok = self.lexer.token()
if tok:
yield tok
else:
break
def print_tokens(self, data):
for token in self.tokenize(data):
print token
class DatabaseParser(object):
'''Parses the formulae section of a rules.ini file'''
precedence = (
('right', 'PHI_UNIOPERATOR'),
('left', 'PI_UNIOPERATOR'),
('left', 'PI_BINOPERATOR'),
('left', 'PHI_BINOPERATOR')
)
def __init__(self, ruleset):
if not ruleset.rules_config.has_section('Formulae'):
raise ValueError("Couldn't parse formulae to annotate")
formulae_options = ruleset.rules_config.options('Formulae')
if 'database_file' in formulae_options:
database_ini = ruleset.rules_config.get('Formulae', 'database_file')
ini_str = '[Formulae]\n' + open(database_ini, 'r').read()
ini_fp = StringIO.StringIO(ini_str)
config = ConfigParser.SafeConfigParser()
config.optionxform=str
config.readfp(ini_fp)
else:
config = ruleset.rules_config
self.atoms = {str(rule):rule.atom for rule in ruleset.posture_rules}
self.actions = {str(rule):rule.atom for rule in ruleset.transition_rules}
# TODO: Here to not add articulators
self.atom_index = self.atoms.keys()
self.articulators = None
for alias in ruleset.alias_domains.keys():
if "art" in alias:
# we take this as an articulator
self.articulators = ruleset.alias_domains[alias]
for articulator in self.articulators:
self.atoms[articulator] = Atom('True', articulator)
self.actions[articulator+"_movement"] = AtomAction('True', articulator)
# TODO: Here to add articulator_movement
self.action_index = self.actions.keys()
self.definitions = config.options("Formulae")
# Start parser
self.lexer = PDLSLLexer(self.definitions)
self.tokens = self.lexer.tokens
self.parser = yacc.yacc(module=self, write_tables=0, debug=False)
self.database = {}
for option in self.definitions:
string_to_parse = self.substitute_atoms(config.get("Formulae",
option))
self.database[option] = self.parser.parse(string_to_parse)
def p_error(self,pe):
print 'Error!'
print pe
print
def p_expression_isaction(self, p):
'''expression : action'''
p[0] = p[1]
def p_expression_alias(self, p):
'''expression : ALIAS'''
p[0] = self.database[p[1]]
def p_expression_atom(self, p):
'''expression : ATOM'''
entry = int(p[1].replace("ATOM",""))
atom_name = self.atom_index[entry]
p[0] = self.atoms[atom_name]
def p_articulator_expression_phi(self, p):
'''expression : ARTICULATOR PHI_BINOPERATOR ARTICULATOR
| ARTICULATOR PHI_BINOPERATOR expression
| expression PHI_BINOPERATOR ARTICULATOR'''
if 'ART' in p[1]:
p[1] = self.get_articulator_atom(p[1])
if 'ART' in p[3]:
p[3] = self.get_articulator_atom(p[3])
p[0] = self.calculate_formula(p[2], p[1], p[3])
def p_articulator_uniexpression_phi(self, p):
'''expression : PHI_UNIOPERATOR ARTICULATOR'''
if p[2] in self.articulators:
p[2] = self.get_articulator_atom(p[2])
p[0] = self.calculate_formula(p[1], p[2])
def p_uniexpression_phi(self, p):
'''expression : PHI_UNIOPERATOR expression'''
p[0] = self.calculate_formula(p[1], p[2])
def p_binexpression_phi(self, p):
'''expression : expression PHI_BINOPERATOR expression'''
p[0] = self.calculate_formula(p[2], p[1], p[3])
def p_expression_modal(self, p):
'''expression : LBRACKET action RBRACKET LPAREN expression RPAREN
| LBRACKET expression RBRACKET LPAREN expression RPAREN'''
p[0] = pdlsl.pos[p[2]](p[5])
def p_expression_group(self, p):
'''expression : LPAREN expression RPAREN'''
p[0] = p[2]
def p_action_alias(self, p):
'''action : ALIAS'''
p[0] = self.database[p[1]]
def p_action_atom(self, p):
'''action : ATOMACTION'''
entry = int(p[1].replace("ACTION",""))
action_name = self.action_index[entry]
p[0] = self.actions[action_name]
def p_articulator_binaction_pi(self, p):
'''action : ARTICULATOR PI_BINOPERATOR ARTICULATOR
| ARTICULATOR PI_BINOPERATOR action
| action PI_BINOPERATOR ARTICULATOR'''
if 'ART' in p[1]:
p[1] = self.get_articulator_atomaction(p[1])
if 'ART' in p[3]:
p[3] = self.get_articulator_at | omaction(p[3])
p[0] = self.calculate_action(p[2], p[1], p[3])
def p_articulator_uniaction_pi(self, p):
'''action : ARTICULATOR PI_UNIOPERATOR'''
if 'ART' in p[1]:
p[1] = self.get_articulator_atomaction(p[1])
p[0] = self.calculate_action(p[2], p[1])
def p_uniaction_pi(self, p):
'''action : action PI_UNIOPERATOR'''
p[0] = self.calculate_action(p[2], p[1])
def p_binaction_pi(self, p):
| '''action : action PI_BINOPERATOR action'''
p[0] = self.calculate_action(p[2], p[1], p[3])
def p_action_group(self, p):
'''action : LPAREN action RPAREN'''
p[0] = p[2]
def calculate_formula(self, operator, *args):
if operator == '||':
return pdlsl.Or(*args)
elif operator == '&':
return pdlsl.And(*args)
elif operator == '->':
return pdlsl.Implication(*args)
elif operator == '<->':
return pdlsl.DoubleImplication(*args)
elif operator == '~':
return pdlsl.Not(*args)
else:
raise ValueError("Erroneous operator")
def calculate_action(self, operator, *args):
if operator == '//':
return pdlsl.Concurrence(*args)
elif operator == '^':
return pdlsl.Union(*args)
elif operator == ';':
return pdlsl.Concatenation(*args)
elif operator == '?':
return pdlsl.Test(*arg |
Qwertycal/19520-Eye-Tracker | GUI and Mouse/myVidInGUI.py | Python | gpl-2.0 | 2,428 | 0.053954 | from Tkinter import *
import cv2
from PIL import Image, ImageTk
import pyautogui
width, height = 302, 270
screenwidth, screenheight = pyautogui.size()
cap = cv2.VideoCapture(0)
cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH, width)
cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, height)
root = Tk()
root.title("Testing Mode")
root.bind('<Escape>', lambda e: root.quit())
root.attributes("-fullscreen", True)
#Create Frame
mainFrame = Frame(root)
devViewFrame = Frame(mainFrame, bg = "blue", width = 300, height = 650)
userViewFrame = Frame(mainFrame, bg = "red", width = 500, height = 650)
videoFrame = Frame(devViewFrame, bg = "green", width = width, height = height)
coordsFrame = Frame(devViewFrame, bg = "black", width = 300, height = 380)
devButtonFrame = Frame(devViewFrame, width = 20 | 0)
userButtonFrame = Frame(userViewFrame, width = 500)
desktopViewFrame = Frame(userViewFrame, bg = "red", width = 500, height = 650)
#Create Buttons
mode = IntVar()
devModeB = Radiobutton(devButtonFrame,text="Developer Mode",variable=mod | e,value=1)
userModeB = Radiobutton(devButtonFrame,text="User Mode",variable=mode,value=2)
recordB = Button(userButtonFrame, text = "Record")
stopB = Button(userButtonFrame, text = "Stop")
#for Text width is mesaured in the number of characters, height is the number of lines displayed
outputCoOrds = Text(coordsFrame, width = 42, height = 20)
videoStream = Label(videoFrame)
#Put all of the elements into the GUI
mainFrame.grid(row = 1, column =0, sticky = N)
devViewFrame.grid(row = 1, column = 0, rowspan = 4, sticky = N)
userViewFrame.grid(row = 1, column = 3, sticky = N)
videoFrame.grid(row = 1, column = 0, sticky = N)
coordsFrame.grid(row = 2, column =0, sticky = NW)
devButtonFrame.grid(row = 0, column = 0, sticky = N)
userButtonFrame.grid(row = 0, column = 0, sticky = N)
desktopViewFrame.grid(row = 1, column = 0, sticky = N)
devModeB.grid (row = 0, column =0)
userModeB.grid (row = 0, column = 1)
recordB.grid (row = 0, column = 0)
stopB.grid (row = 0, column = 1)
outputCoOrds.grid(row = 0, column = 0, sticky = NW)
videoStream.grid()
#Show frame
def show_frame():
_, frame = cap.read()
frame = cv2.flip(frame, 1)
cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)
img = Image.fromarray(cv2image)
imgtk = ImageTk.PhotoImage(image=img)
videoStream.imgtk = imgtk
videoStream.configure(image=imgtk)
videoStream.after(10, show_frame)
show_frame()
root.mainloop() |
googleapis/python-resource-manager | samples/generated_samples/cloudresourcemanager_v3_generated_folders_delete_folder_async.py | Python | apache-2.0 | 1,580 | 0.000633 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for DeleteFolder
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-resourcemanager
# [START cloudresourcemanager_v3_ | generated_Folders_DeleteFolder_async]
from google.cloud import resourcemanager_v3
async def sample_delet | e_folder():
# Create a client
client = resourcemanager_v3.FoldersAsyncClient()
# Initialize request argument(s)
request = resourcemanager_v3.DeleteFolderRequest(
name="name_value",
)
# Make the request
operation = client.delete_folder(request=request)
print("Waiting for operation to complete...")
response = await operation.result()
# Handle the response
print(response)
# [END cloudresourcemanager_v3_generated_Folders_DeleteFolder_async]
|
koehlma/pygrooveshark | examples/django_webapp/django_webapp/wsgi.py | Python | lgpl-3.0 | 401 | 0.002494 | """
WSGI confi | g for django_webapp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_webapp.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_appl | ication()
|
luisgg/iteexe | exe/export/websiteexport.py | Python | gpl-2.0 | 8,267 | 0.006169 | # ===========================================================================
# eXe
# Copyright 2004-2005, University of Auckland
# Copyright 2004-2008 eXe Project, http://eXeLearning.org/
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
WebsiteExport will export a package as a website of HTML pages
"""
import logging
import re
import imp
from cgi import escape
from exe.webui.blockfactory import g_blockFactory
from exe.engine.error import Error
from exe.engine.path import Path, TempDirPath
from exe.export.pages import uniquifyNames
from exe.export.websitepage import WebsitePage
from zipfile import ZipFile, ZIP_DEFLATED
log = logging.getLogger(__name__)
# ===========================================================================
class WebsiteExport(object):
"""
WebsiteExport will export a package as a website of HTML pages
"""
def __init__(self, config, styleDir, filename, prefix=""):
"""
'stylesDir' is the directory where we can copy the stylesheets from
'outputDir' is the directory that will be [over]written
with the website
"""
self.config = config
self.imagesDir = config.webDir/"images"
self.scriptsDir = config.webDir/"scripts"
self.templatesDir = config.webDir/"templates"
self.stylesDir = Path(styleDir)
self.filename = Path(filename)
self.pages = []
self.prefix = prefix
def exportZip(self, package):
"""
Export web site
Cleans up the previous packages pages and performs the export
"""
outputDir = TempDirPath()
# Import the Website Page class. If the style has it's own page class
# use that, else use the default one.
if (self.stylesDir/"websitepage.py").exists():
global WebsitePage
module = imp.load_source("websitepage",
self.stylesDir/"websitepage.py")
WebsitePage = module.WebsitePage
self.pages = [ WebsitePage("index", 1, package.root) ]
self.generatePages(package.root, 1)
uniquifyNames(self.pages)
prevPage = None
thisPage = self.pages[0]
for nextPage in self.pages[1:]:
thisPage.save(outputDir, prevPage, nextPage, self.pages)
prevPage = thisPage
thisPage = nextPage
thisPage.save(outputDir, prevPage, None, self.pages)
self.copyFiles(package, outputDir)
# Zip up the website package
self.filename.safeSave(self.doZip, _('EXPORT FAILED!\nLast succesful export is %s.'), outputDir)
# Clean up the temporary dir
outputDir.rmtree()
def doZip(self, fileObj, outputDir):
"""
Actually saves the zip data. Called by 'Path.safeSave'
"""
zipped = ZipFile(fileObj, "w")
for scormFile in outputDir.files():
zipped.write(scormFile, scormFile.basename().encode('utf8'), ZIP_DEFLATED)
zipped.close()
def export(self, package):
"""
Export web site
Cleans up the previous packages pages and performs the export
"""
outputDir = self.filename
if not outputDir.exists():
outputDir.mkdir()
# Import the Website Page class. If the style has it's own page class
# use that, else use the default one.
if (self.stylesDir/"websitepage.py").exists():
global WebsitePage
module = imp.load_source("websitepage",
self.stylesDir/"websitepage.py")
WebsitePage = module.WebsitePage
self.pages = [ WebsitePage(self.prefix + "index", 1, package.root) ]
self.generatePages(package.root, 1)
uniquifyNames(self.pages)
prevPage = None
thisPage = self.pages[0]
for nextPage in self.pages[1:]:
thisPage.save(outputDir, prevPage, nextPage, self.pages)
prevPage = thisPage
thisPage = nextPage
thisPage.save(outputDir, prevPage, None, self.pages)
if self.prefix == "":
self.copyFiles(package, outputDir)
def copyFiles(self, package, outputDir):
"""
Copy all the files used by the website.
"""
# Copy the style sheet files to the output dir
s | tyleFiles = [self.stylesDir/'..'/'base.css']
styleFiles += [self.stylesDir/'..'/'popup_bg.gif']
styleFiles += self.stylesDir.files("*.css")
styleFiles | += self.stylesDir.files("*.jpg")
styleFiles += self.stylesDir.files("*.gif")
styleFiles += self.stylesDir.files("*.png")
styleFiles += self.stylesDir.files("*.js")
styleFiles += self.stylesDir.files("*.html")
styleFiles += self.stylesDir.files("*.ico")
self.stylesDir.copylist(styleFiles, outputDir)
# copy the package's resource files
package.resourceDir.copyfiles(outputDir)
# copy script files.
self.scriptsDir.copylist(('libot_drag.js', 'common.js'),
outputDir)
# copy players for media idevices.
hasFlowplayer = False
hasMagnifier = False
hasXspfplayer = False
isBreak = False
for page in self.pages:
if isBreak:
break
for idevice in page.node.idevices:
if (hasFlowplayer and hasMagnifier and hasXspfplayer):
isBreak = True
break
if not hasFlowplayer:
if 'flowPlayer.swf' in idevice.systemResources:
hasFlowplayer = True
if not hasMagnifier:
if 'magnifier.swf' in idevice.systemResources:
hasMagnifier = True
if not hasXspfplayer:
if 'xspf_player.swf' in idevice.systemResources:
hasXspfplayer = True
if hasFlowplayer:
videofile = (self.templatesDir/'flowPlayer.swf')
videofile.copyfile(outputDir/'flowPlayer.swf')
# JR: anadimos los controles
controlsfile = (self.templatesDir/'flowplayer.controls.swf')
controlsfile.copyfile(outputDir/'flowplayer.controls.swf')
if hasMagnifier:
videofile = (self.templatesDir/'magnifier.swf')
videofile.copyfile(outputDir/'magnifier.swf')
if hasXspfplayer:
videofile = (self.templatesDir/'xspf_player.swf')
videofile.copyfile(outputDir/'xspf_player.swf')
if package.license == "GNU Free Documentation License":
# include a copy of the GNU Free Documentation Licence
(self.templatesDir/'fdl.html').copyfile(outputDir/'fdl.html')
def generatePages(self, node, depth):
"""
Recursively generate pages and store in pages member variable
for retrieving later
"""
for child in node.children:
pageName = child.titleShort.lower().replace(" ", "_")
pageName = re.sub(r"\W", "", pageName)
if not pageName:
pageName = "__"
self.pages.append(WebsitePage(se |
Hawker65/deploycron | deploycron/__init__.py | Python | mit | 5,288 | 0 | # coding: utf-8
import subprocess
import os
def deploycron(filename="", content="", override=False):
"""install crontabs into the system if it's not installed.
This will not remove the other crontabs installed in the system if not
specified as override. It just merge the new one with the existing one.
If you provide `filename`, then will install the crontabs in that file
otherwise install crontabs specified in content
filename - file contains crontab, one crontab for a line
content - string that contains crontab, one crontab for a line
override - override the origin crontab
"""
if not filename and not content:
raise ValueError("neither filename or crontab must be specified")
if filename:
try:
with open(filename, 'r') as f:
content = f.read()
except Exce | ption as e:
raise ValueError("cannot open the file: % | s" % str(e))
if override:
installed_content = ""
else:
installed_content = _get_installed_content()
installed_content = installed_content.rstrip("\n")
installed_crontabs = installed_content.split("\n")
for crontab in content.split("\n"):
if crontab and crontab not in installed_crontabs:
if not installed_content:
installed_content += crontab
else:
installed_content += "\n%s" % crontab
if installed_content:
installed_content += "\n"
# install back
_install_content(installed_content)
def undeploycron_between(start_line, stop_line, occur_start=1, occur_stop=1):
"""uninstall crontab parts between two lines (included).
If the start_line or the stop_line is not found into the installed crontab,
it won't be modified.
`start_line` - start crontab line (the actual line, not the line number)
to delimit the crontab block to remove
`stop_line` - stop crontab line (the actual line, not the line number)
to delimit the crontab block to remove
`occur_start` - nth occurence you want to consider as start_line (ex :
choose 2 if you want the 2nd occurence to be chosen as start_line)
`occur_stop` - nth occurence you want to consider as stop_line (ex :
choose 2 if you want the 2nd occurence to be chosen as stop_line)
"""
lines_installed = [x.strip() for x in
_get_installed_content().splitlines()]
start_line = start_line.strip()
stop_line = stop_line.strip()
if start_line not in lines_installed:
return False
if stop_line not in lines_installed:
return False
if occur_start is None or occur_start <= 0:
return False
if occur_stop is None or occur_stop <= 0:
return False
# Check if stop_line is before start_line by getting their indices
index_start = -1
index_stop = -1
try:
# Find the occurence we are interested in
for j in range(occur_start):
index_start = lines_installed.index(start_line, index_start + 1)
except ValueError:
# If the occurence number is too high (nth occurrence not found)
return False
try:
for j in range(occur_stop):
index_stop = lines_installed.index(stop_line, index_stop + 1)
except ValueError:
return False
# If stop is before start, we switch them
if index_stop < index_start:
buffer_var = index_start
index_start = index_stop
index_stop = buffer_var
lines_to_install = []
for i in range(len(lines_installed)):
if i < index_start or i > index_stop:
lines_to_install.append(lines_installed[i])
if len(lines_to_install) > 0:
lines_to_install.append("")
content_to_install = "\n".join(lines_to_install)
_install_content(content_to_install)
return True
def _get_installed_content():
"""get the current installed crontab.
"""
retcode, err, installed_content = _runcmd("crontab -l")
if retcode != 0 and b'no crontab for' not in err:
raise OSError("crontab not supported in your system")
return installed_content.decode("utf-8")
def _install_content(content):
"""install (replace) the given (multilines) string as new crontab...
"""
retcode, err, out = _runcmd("crontab", content)
if retcode != 0:
raise ValueError("failed to install crontab, check if crontab is "
"valid")
def _runcmd(cmd, input=None):
'''run shell command and return the a tuple of the cmd's return code, std
error and std out.
WARN: DO NOT RUN COMMANDS THAT NEED TO INTERACT WITH STDIN WITHOUT SPECIFY
INPUT, (eg cat), IT WILL NEVER TERMINATE.
'''
if input is not None:
p = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
close_fds=True, preexec_fn=os.setsid)
input = input.encode()
else:
p = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
close_fds=True, preexec_fn=os.setsid)
stdoutdata, stderrdata = p.communicate(input)
return p.returncode, stderrdata, stdoutdata
|
ccbrandenburg/financialanalyticsproject | iembdfa/DataCleaning.py | Python | mit | 1,698 | 0.008245 | # -*- coding: utf-8 -*-
"""
Created on Wed Jun 15 11:39:04 2016
@author: rahulmehra
"""
# Import the modules
import pandas as pd
from sklearn.preprocessing import LabelEncoder
import numpy as np
# Define a function to autoclean the pandas dataframe
def autoclean(x):
for column in x.columns:
# Replace NaNs with the median or mode of the column depending on the column type
try:
x[column].fillna(x[column].median(), inplace=True)
except TypeError:
x[column].fillna(x[column].mode(), inplace=True)
# Select the columns with type int and float
if x[column].dtypes == 'int64' or x[column].dtypes == 'float64':
#Calculate mean of the column
mean = x[column].mean()
#Calculate the standard deviation of the column
std = 2.5*x[column].std()
# See for the outliers and impute with median
x[column] = x[column].apply(lambda y: x[column].median() if(abs(y - mean >std)) else y)
# Calculate the number of rows in dataframe
n_rows = len(x.index)
#Calculate the percentage of negative values in the column
negative_perc = np.sum((x[column] < 0))/n_rows
#Handle the unreliable values (like negative values in the positive value column)
x[column] = x[column].apply(lambda y: -(y) if (y<0 and negative_perc >= 0.05) else y)
# Encode all strings with numerical equivalents
if str(x[column].values.dtype) == 'object':
column_encoder = LabelEncoder().fit(x[ | column].values)
x[column] = colum | n_encoder.transform(x[column].values)
return(x)
|
mozilla/addons-server | src/olympia/addons/tests/test_tasks.py | Python | bsd-3-clause | 8,920 | 0.001121 | from unittest import mock
import os
import pytest
from django.conf import settings
from waffle.testutils import override_switch
from olympia import amo
from olympia.addons.tasks import (
recreate_theme_previews,
update_addon_average_daily_users,
update_addon_hotness,
update_addon_weekly_downloads,
)
from olympia.amo.tests import addon_factory, root_storage
from olympia.versions.models import VersionPreview
@pytest.mark.django_db
def test_recreate_theme_previews():
xpi_path = os.path.join(
settings.ROOT, 'src/olympia/devhub/tests/addons/mozilla_static_theme.zip'
)
addon_without_previews = addon_factory(type=amo.ADDON_STATICTHEME)
root_storage.copy_stored_file(
xpi_path, addon_without_previews.current_version.file.file_path
)
addon_with_previews = addon_factory(type=amo.ADDON_STATICTHEME)
root_storage.copy_stored_file(
xpi_path, addon_with_previews.current_version.file.file_path
)
VersionPreview.objects.create(
version=addon_with_previews.current_version,
sizes={'image': [123, 456], 'thumbnail': [34, 45]},
)
assert addon_without_previews.current_previews.count() == 0
assert addon_with_previews.current_previews.count() == 1
recreate_theme_previews([addon_without_previews.id, addon_with_previews.id])
assert addon_without_previews.reload().current_previews.count() == 2
assert addon_with_previews.reload().current_previews.count() == 2
sizes = addon_without_previews.current_previews.values_list('sizes', flat=True)
renderings = amo.THEME_PREVIEW_RENDERINGS
assert list(sizes) == [
{
'image': list(renderings['firefox']['full']),
'thumbnail': list(renderings['firefox']['thumbnail']),
'image_format': renderings['firefox']['image_format'],
'thumbnail_format': renderings['firefox']['thumbnail_format'],
},
{
'image': list(renderings['amo']['full']),
'thumbnail': list(renderings['amo']['thumbnail']),
'image_format': renderings['amo']['image_format'],
'thumbnail_format': renderings['amo']['thumbnail_format'],
},
]
PATCH_PATH = 'olympia.addons.tasks'
@pytest.mark.django_db
@mock.patch(f'{PATCH_PATH}.parse_addon')
def test_create_missing_theme_previews(parse_addon_mock):
parse_addon_mock.return_value = {}
theme = addon_factory(type=amo.ADDON_STATICTHEME)
amo_preview = VersionPreview.objects.create(
version=theme.current_version,
sizes={
'image': amo.THEME_PREVIEW_RENDERINGS['amo']['full'],
'thumbnail': amo.THEME_PREVIEW_RENDERINGS['amo']['thumbnail'],
'thumbnail_format': amo.THEME_PREVIEW_RENDERINGS['amo']['thumbnail_format'],
'image_format': amo.THEME_PREVIEW_RENDERINGS['amo']['image_format'],
},
)
firefox_preview = VersionPreview.objects.create(
version=theme.current_version,
sizes={
'image': amo.THEME_PREVIEW_RENDERINGS['firefox']['full'],
'thumbnail': amo.THEME_PREVIEW_RENDERINGS['firefox']['thumbnail'],
},
)
# add another extra preview size that should be ignored
extra_preview = VersionPreview.objects.create(
version=theme.current_version,
sizes={'image': [123, 456], 'thumbnail': [34, 45]},
)
# addon has all the complete previews already so skip when only_missing=True
assert VersionPreview.objects.count() == 3
with mock.patch(
f'{PATCH_PATH}.generate_static_theme_preview.apply_async'
) as gen_preview, mock.patch(f'{PATCH_PATH}.resize_image') as resize:
recreate_theme_previews([theme.id], only_missing=True)
assert gen_preview.call_count == 0
assert resize.call_count == 0
recreate_theme_previews([theme.id], only_missing=False)
assert gen_preview.call_count == 1
assert resize.call_count == 0
# If the add-on is missing a preview, we call generate_static_theme_preview
VersionPreview.objects.get(id=amo_preview.id).delete()
firefox_preview.save()
extra_preview.save()
assert VersionPreview.objects.count() == 2
with mock.patch(
f'{PATCH_PATH}.generate_static_theme_preview.apply_async'
) as gen_preview, mock.patch(f'{PATCH_PATH}.resize_image') as resize:
recreate_theme_previews([theme.id], only_missing=True)
assert gen_preview.call_count == 1
assert resize.call_count == 0
# Preview is correct dimensions but wrong format, call generate_static_theme_preview
amo_preview.sizes['image_format'] = 'foo'
amo_preview.save()
firefox_preview.save()
extra_preview.save()
assert VersionPreview.objects.count() == 3
with mock.patch(
f'{PATCH_PATH}.generate_static_theme_preview.apply_async'
) as gen_preview, mock.patch(f'{PATCH_PATH}.resize_image') as resize:
recreate_theme_previews([theme.id], only_missing=True)
assert gen_preview.call_count == 1
assert resize.call_count == 0
# But we don't do the full regeneration to just get new thumbnail sizes or formats
amo_preview.sizes['thumbnail'] = [666, 444]
amo_preview.sizes['image_format'] = 'svg'
amo_preview.save()
assert amo_preview.thumbnail_dimensions == [666, 444]
firefox_preview.sizes['thumbnail_format'] = 'gif'
firefox_preview.save()
assert firefox_preview.get_format('thumbnail') == 'gif'
extra_preview.save()
assert VersionPreview.objects.count() == 3
with mock.patch(
f'{PATCH_PATH}.generate_static_theme_preview.apply_async'
) as gen_preview, mock.patch(f'{PATCH_PATH}.resize_image') as resize:
recreate_theme_previews([theme.id], only_missing=True)
| assert gen_preview.call_count == 0 # not called
assert resize.call_count == 2
amo_preview.reload()
assert amo_preview.thumbnail_dimensions == [720, 92]
firefox_preview.reload()
assert firefox_ | preview.get_format('thumbnail') == 'png'
assert VersionPreview.objects.count() == 3
@pytest.mark.django_db
def test_update_addon_average_daily_users():
addon = addon_factory(average_daily_users=0)
count = 123
data = [(addon.guid, count)]
assert addon.average_daily_users == 0
update_addon_average_daily_users(data)
addon.refresh_from_db()
assert addon.average_daily_users == count
@pytest.mark.django_db
@override_switch('local-statistics-processing', active=True)
def test_update_deleted_addon_average_daily_users():
addon = addon_factory(average_daily_users=0)
addon.delete()
count = 123
data = [(addon.guid, count)]
assert addon.average_daily_users == 0
update_addon_average_daily_users(data)
addon.refresh_from_db()
assert addon.average_daily_users == count
@pytest.mark.django_db
def test_update_addon_hotness():
addon1 = addon_factory(hotness=0, status=amo.STATUS_APPROVED)
addon2 = addon_factory(hotness=123, status=amo.STATUS_APPROVED)
addon3 = addon_factory(hotness=123, status=amo.STATUS_AWAITING_REVIEW)
averages = {
addon1.guid: {'avg_this_week': 213467, 'avg_three_weeks_before': 123467},
addon2.guid: {
'avg_this_week': 1,
'avg_three_weeks_before': 1,
},
addon3.guid: {'avg_this_week': 213467, 'avg_three_weeks_before': 123467},
}
update_addon_hotness(averages=averages.items())
addon1.refresh_from_db()
addon2.refresh_from_db()
addon3.refresh_from_db()
assert addon1.hotness > 0
# Too low averages so we set the hotness to 0.
assert addon2.hotness == 0
# We shouldn't have processed this add-on.
assert addon3.hotness == 123
def test_update_addon_weekly_downloads():
addon = addon_factory(weekly_downloads=0)
count = 123
data = [(addon.addonguid.hashed_guid, count)]
assert addon.weekly_downloads == 0
update_addon_weekly_downloads(data)
addon.refresh_from_db()
assert addon.weekly_downloads == count
def test_update_addon_weekly_downloads_ignores_deleted_addons():
guid = 'some@guid'
deleted_addon = addon_factory(guid=guid)
|
mrkm4ntr/incubator-airflow | airflow/cli/cli_parser.py | Python | apache-2.0 | 53,687 | 0.003241 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Command-line interface"""
import argparse
import json
import os
import textwrap
from argparse import Action, ArgumentError, RawTextHelpFormatter
from functools import lru_cache
from typing import Callable, Dict, Iterable, List, NamedTuple, Optional, Set, Union
from tabulate import tabulate_formats
from airflow import settings
from airflow.cli.commands.legacy_commands import check_legacy_command
from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.executors import executor_constants
from airflow.utils.cli import ColorMode
from airflow.utils.helpers import partition
from airflow.utils.module_loading import import_string
from airflow.utils.timezone import parse as parsedate
BUILD_DOCS = "BUILDING_AIRFLOW_DOCS" in os.environ
def lazy_load_command(import_path: str) -> Callable:
"""Create a lazy loader for command"""
_, _, name = import_path.rpartition('.')
def command(*args, **kwargs):
func = import_string(import_path)
return func(*args, **kwargs)
command.__name__ = name
return command
class DefaultHelpParser(argparse.ArgumentParser):
"""CustomParser to display help message"""
def _check_value(self, action, value):
"""Override _check_value and check conditionally added command"""
executor = conf.get('core', 'EXECUTOR')
if value == 'celery' and executor != executor_constants.CELERY_EXECUTOR:
message = f'celery subcommand works only with CeleryExecutor, your current executor: {executor}'
raise ArgumentError(action, message)
if value == 'kubernetes':
try:
import kubernetes.client # noqa: F401 pylint: disable=unused-import
except ImportError:
message = (
'The kubernetes subcommand requires that you pip install the kubernetes python client.'
"To do it, run: pip install 'apache-airflow[cncf.kubernetes]'"
)
raise ArgumentError(action, message)
if action.choices is not None and value not in action.choices:
check_legacy_command(action, value)
super()._check_value(action, value)
def error(self, message):
"""Override error and use print_instead of print_usage"""
self.print_help()
self.exit(2, f'\n{self.prog} command error: {message}, see help above.\n')
# Used in Arg to enable `None' as a distinct value from "not passed"
_UNSET = object()
class Arg:
"""Class to keep information about command line argument"""
# pylint: disable=redefined-builtin,unused-argument
def __init__(
self,
flags=_UNSET,
help=_UNSET,
action=_UNSET,
default=_UNSET,
nargs=_UNSET,
type=_UNSET,
choices=_UNSET,
required=_UNSET,
metavar=_UNSET,
):
self.flags = flags
self.kwargs = {}
for k, v in locals().items():
if v is _UNSET:
continue
if k in ("self", "flags"):
continue
self.kwargs[k] = v
# pylint: enable=redefined-builtin,unused-argument
def add_to_parser(self, parser: argparse.ArgumentParser):
"""Add this argument to an ArgumentParser"""
parser.add_argument(*self.flags, **self.kwargs)
def positive_int(value):
"""Define a positive int type for an argument."""
try:
value = int(value)
if value > 0:
return value
except ValueError:
pass
raise argparse.ArgumentTypeError(f"invalid positive int value: '{value}'")
# Shared
ARG_DAG_ID = Arg(("dag_id",), help="The id of the dag")
ARG_TASK_ID = Arg(("task_id",), help="The id of the task")
ARG_EXECUTION_DATE = Arg(("execution_date",), help="The execution date of the DAG", type=parsedate)
ARG_TASK_REGEX = Arg(
("-t", "--task-regex"), help="The regex to filter specific task_ids to backfill (optional)"
)
ARG_SUBDIR = Arg(
("-S", "--subdir"),
help=(
"File location or directory from which to look for the dag. "
"Defaults to '[AIRFLOW_HOME]/dags' where [AIRFLOW_HOME] is the "
"value you set for 'AIRFLOW_HOME' config you set in 'airflow.cfg' "
),
default='[AIRFLOW_HOME]/dags' if BUILD_DOCS else settings.DAGS_FOLDER,
)
ARG_START_DATE = Arg(("-s", "--start-date"), help="Override start_date YYYY-MM-DD", type=parsedate)
ARG_END_DATE = Arg(("-e", "--end-date"), help="Override end_date YYYY-MM-DD", type=parsedate)
ARG_OUTPUT_PATH = Arg(
(
"-o",
"--output-path",
),
help="The output for generated yaml files",
type=str,
default="[CWD]" if BUILD_DOCS else os.getcwd(),
)
ARG_DRY_RUN = Arg(
("-n", "--dry-run"),
help="Perform a dry run for each task. Only renders Template Fields for each task, nothing else",
action="store_true",
)
ARG_PID = Arg(("--pid",), help="PID file location", nargs='?')
ARG_DAEMON = Arg(
("-D", "--daemon"), help="Daemonize instead of running in the foreground", action="store_true"
)
ARG_STDERR = Arg(("--stderr",), help="Redirect stderr to this file")
ARG_STDOUT = Arg(("--stdout",), help="Redirect stdout to this file")
ARG_LOG_FILE = Arg(("-l", "--log-file"), help="Location of the log file")
ARG_YES = Arg(
("-y", "--yes"), help="Do not prompt to confirm reset. Use with care!", action="store_true", default=False
)
ARG_OUTPUT = Arg(
("--output",),
help=(
"Output table format. The specified value is passed to "
"the tabulate module (https://pypi.org/project/tabulate/). "
),
metavar="FORMAT",
choices=tabulate_formats,
default="plain",
)
ARG_COLOR = Arg(
('--color',),
help="Do emit colored output (default: auto)",
choices={ColorMode.ON, ColorMode.OFF, ColorMode.AUTO},
default=ColorMode.AUTO,
)
# list_dag_runs
ARG_DAG_ID_OPT = Arg(("-d", "--dag-id"), help="The id of the dag")
ARG_NO_BAC | KFILL = Arg(
("--no-backfill",), help="filter all the backfill dagruns given the dag id", action="store_true"
)
ARG_STATE = Arg(("--state",), help="Only list the dag runs corresponding to the state")
# list_jobs
ARG_LIMIT = Arg(("--limit",), help="Return a limited number of records")
# next_execution
ARG_NUM_EXECUTIONS = Arg(
("-n", "- | -num-executions"),
default=1,
type=positive_int,
help="The number of next execution datetimes to show",
)
# backfill
ARG_MARK_SUCCESS = Arg(
("-m", "--mark-success"), help="Mark jobs as succeeded without running them", action="store_true"
)
ARG_VERBOSE = Arg(("-v", "--verbose"), help="Make logging output more verbose", action="store_true")
ARG_LOCAL = Arg(("-l", "--local"), help="Run the task using the LocalExecutor", action="store_true")
ARG_DONOT_PICKLE = Arg(
("-x", "--donot-pickle"),
help=(
"Do not attempt to pickle the DAG object to send over "
"to the workers, just tell the workers to run their version "
"of the code"
),
action="store_true",
)
ARG_BF_IGNORE_DEPENDENCIES = Arg(
("-i", "--ignore-dependencies"),
help=(
"Skip upstream tasks, run only the tasks "
"matching the regexp. Only works in conjunction "
"with task_regex"
),
action="store_true",
)
ARG_BF_IGNORE_FIRST_DEPENDS_ON_PAST = Arg(
("-I", "--ignore-first-depends-on-past"),
help=(
|
wyrdmeister/OnlineAnalysis | OAGui/src/Control/OAAttrModel.py | Python | gpl-3.0 | 18,589 | 0.00199 | # -*- coding: utf-8 -*-
"""
Online Analysis Configuration Control - TANGO attribute model
Version 1.0
Michele Devetta (c) 2013
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
" | ""
import re
import h5py as h5
import numpy as np
# PyQt4
from PyQt4 import QtCore
from PyQt4 import QtGui
# PyTango
import PyTango as PT
# GuiBase
from OAGui.GuiBase import GuiBase
from OAGui.GuiBase import declare_trUtf8
_trUtf8 = declare_trUtf8("OAControl")
# Plot dialog
from OAGui.OAMultiplot import PlotDialog
class AttributeModel(QtCore.QAbstractTableModel, GuiBase):
""" Attribute model """
|
# Signal to refresh attribute list
refresh = QtCore.pyqtSignal()
def __init__(self, device, parent=None):
""" Constructor. """
# Parent constructor
QtCore.QAbstractTableModel.__init__(self, parent)
GuiBase.__init__(self, "OAControl")
# Store parent
self._parent = parent
# Tango device
try:
self.dev = PT.DeviceProxy(device)
except PT.DevFailed, e:
self.logger.error("Failed to connect to TANGO device (Error: %s)", e[0].desc)
self.dev = None
# Connect refresh signal
self.refresh.connect(self.refresh_model)
# Attribute list
self.attributes = []
self.cols_keys = ['name', 'vartype', 'datatype', 'size', 'acc', 'bstatus', 'bsave', 'bview']
# Refresh list
self.refresh.emit()
@QtCore.pyqtSlot()
def refresh_model(self):
""" Refresh the model. """
if self.dev:
# Add attributes
attr_info = self.dev.attribute_list_query()
attr_list = self.dev.get_attribute_list()
# Cycle over attributes
for a in attr_info:
# Skip state and status
if a.label == 'State' or a.label == 'Status':
continue
# Skip reset attributes
rexp = re.compile('.*__reset$')
if rexp.match(a.label):
continue
# Skip bunches attributes
rexp = re.compile('.*__bunches$')
if rexp.match(a.label):
continue
# Skip bunches attributes
rexp = re.compile('.*__x$')
if rexp.match(a.label):
continue
# Format size
size = '1'
if a.data_format == PT.AttrDataFormat.SPECTRUM:
size = "[%d, ]" % (a.max_dim_x, )
elif a.data_format == PT.AttrDataFormat.IMAGE:
size = "[%d, %d]" % (a.max_dim_x, a.max_dim_y)
elif a.data_format == PT.AttrDataFormat.SCALAR:
val = self.dev.read_attribute(a.label).value
if val == 0:
size = "0"
elif val > 0.01 and val < 100:
size = "%.4f" % val
else:
size = "%.4e" % val
# Accumulated statistic
acc = -1
if a.label + "__bunches" in attr_list:
acc = len(self.dev.read_attribute(a.label + "__bunches").value)
# Search attribute
for at in self.attributes:
if at[self.cols_keys[0]] == a.label:
# Update entry
self.setData(self.index(self.attributes.index(at), 1), QtCore.QVariant(self._tango_atype2str(a.data_format)), QtCore.Qt.EditRole)
self.setData(self.index(self.attributes.index(at), 2), QtCore.QVariant(self._tango_dtype2str(a.data_type)), QtCore.Qt.EditRole)
self.setData(self.index(self.attributes.index(at), 3), QtCore.QVariant(size), QtCore.Qt.EditRole)
self.setData(self.index(self.attributes.index(at), 4), QtCore.QVariant(acc), QtCore.Qt.EditRole)
break
else:
# Add attribute
self.appendAttribute({
self.cols_keys[0]: a.label,
self.cols_keys[1]: self._tango_atype2str(a.data_format),
self.cols_keys[2]: self._tango_dtype2str(a.data_type),
self.cols_keys[3]: size,
self.cols_keys[4]: acc,
self.cols_keys[5]: False,
self.cols_keys[6]: False,
self.cols_keys[7]: False})
# Remove old attributes
for at in self.attributes:
for a in attr_list:
if a == at[self.cols_keys[0]]:
break
else:
self.removeAttribute(self.attributes.index(at))
def rowCount(self, parent=QtCore.QModelIndex()):
""" Return row count. """
return len(self.attributes)
def columnCount(self, parent=QtCore.QModelIndex()):
""" Return column count. """
return len(self.cols_keys)
def data(self, index, role=QtCore.Qt.DisplayRole):
""" Return data from the model. """
if index.isValid() and role == QtCore.Qt.DisplayRole:
return QtCore.QVariant(self.attributes[index.row()][self.cols_keys[index.column()]])
return QtCore.QVariant()
def setData(self, index, data, role=QtCore.Qt.EditRole):
""" Modify data in che model. """
if index.isValid() and role == QtCore.Qt.EditRole:
if index.column() > 4:
self.attributes[index.row()][self.cols_keys[index.column()]] = data.toBool()
else:
self.attributes[index.row()][self.cols_keys[index.column()]] = str(data.toString())
self.dataChanged.emit(index, index)
return True
return False
def appendAttribute(self, attribute, parent=QtCore.QModelIndex()):
""" Append new attribute. """
self.beginInsertRows(parent, self.rowCount(), self.rowCount())
self.attributes.append(attribute)
self.endInsertRows()
def removeAttribute(self, row, parent=QtCore.QModelIndex()):
""" Remove attribute. """
self.beginRemoveRows(parent, row, row)
del self.attributes[row]
self.endRemoveRows()
def clearModel(self, parent=QtCore.QModelIndex()):
""" Remove all attributes from model. """
self.beginRemoveRows(parent, 0, self.rowCount() - 1)
self.attributes = []
self.endRemoveRows()
def headerData(self, section, orientation, role=QtCore.Qt.DisplayRole):
""" Return headers for the attribute list. """
if role == QtCore.Qt.DisplayRole:
if orientation == QtCore.Qt.Horizontal:
if section == 0:
return QtCore.QVariant("Name")
elif section == 1:
return QtCore.QVariant("Attribute type")
elif section == 2:
return QtCore.QVariant("Data type")
elif section == 3:
return QtCore.QVariant("Size")
elif section == 4:
return QtCore.QVariant("Acc.")
elif orientation == QtCore.Qt.Vertical:
return QtCore.QVariant(self.attributes[section]['name'])
return QtCore.QVariant()
def reset_attribute(self, index):
""" Reset a attribute setting the flag in the DynAttr server. """
if self.dev:
name = str(self.data(self.index(i |
fredriklindberg/chromesthesia | chromesthesia_app/chromesthesia.py | Python | gpl-2.0 | 5,919 | 0.004224 | #!/usr/bin/env python
# Copyright (C) 2013-2015 Fredrik Lindberg <fli@shapeshifter.se>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import sys
import output
import console
import log
from threading import Event
from multiprocessing import Pipe
from select import select
from sound import SoundAnalyzer
from command import Command, command_root
from settings import Settings, CmdSet, CmdGet
from version import __version__
class CmdStart(Command):
def __init__(self, sp):
super(CmdStart, self).__init__()
self.name = "start"
self.sp = sp
def execute(self):
if "sound" in self.storage:
return ["Sound processing already running"]
self.storage["sound"] = True
self.sp.start()
class CmdStop(Command):
def __init__(self, sp):
super(CmdStop, self).__init__()
self.name = "stop"
self.sp = sp
def execute(self):
if not "sound" in self.storage:
return ["Sound processing not running"]
self.sp.stop()
del self.storage["sound"]
class SoundProxy(object):
def __init__(self, sa):
self.running = False
self._sa = None
self.outputs = output.Outputs()
self.sa = sa
@property
def sa(self):
return self._sa
@sa.setter
def sa(self, value):
was_running = self.running
self.close()
self._sa = value
if was_running:
self.start()
def start(self):
if self.sa == None:
return
self.outputs.start()
self._sa.start()
self.running = True
def stop(self):
if self.sa == None:
return
self.sa.stop()
self.outputs.stop()
try:
self.sa.data()
except:
pass
self.running = False
def close(self):
if self.sa == None:
return
self.sa.stop()
self.outputs.stop()
self.sa.close()
def fileno(self):
return self.sa.fileno()
def read(self):
try:
data = self.sa.data()
self.outputs.update(data)
except:
pass
class CmdQuit(Command):
def __init__(self, name, running):
super(CmdQuit, self).__init__()
self.name = name
self.running = running
def execute(self):
for r in self.running:
r.clear()
class ConsoleProxy(object):
def __init__(self):
self._read, self._write = Pipe(duplex=True)
def fileno(self):
return self._read.fileno()
def read(self):
data = self._read.recv()
if data["type"] == "completer":
result = command_root.match(data["line"], data["hints"])
elif data["type"] == "parser":
try:
result = command_root.parse(data["line"])
except Command.NotFound as e:
if str(e) != "":
result = str(e)
else:
result = "No such command '{:s}'".format(data["line"])
except Command.SyntaxError as e:
result = str(e)
else:
result = ""
self._read.send(result)
def completer(self, line, hints):
self._write.send({"type" : "completer", "line" : line, "hints" : hints})
return self._write.recv()
def parser(self, line):
self._write.send({"type" : "parser", "line" : line})
return self._write.recv()
class CmdHelp(Command):
def __init__(self):
super(CmdHelp, self).__init__()
| self.name = "help"
def execute(self):
cmds = command_root.commands
return [
"chromesthesia {0}".format(__version__),
"",
"Begin with any of the following commands " + ", ".join(cmds)
]
def main(config):
print("This is chromesthesia {0}".format(__version__))
logger = log.Logger()
| sp = SoundProxy(None)
settings = Settings()
def reinit_sa(key, value):
sp.sa = SoundAnalyzer(settings["freq"], settings["fps"])
settings.create("fps", 60, reinit_sa)
settings.create("freq", 44100, reinit_sa)
reinit_sa(None, None)
def debug(key, value):
logger.del_level(log.DEBUG | log.DEBUG2)
if value >= 1:
logger.add_level(log.DEBUG)
if value >= 2:
logger.add_level(log.DEBUG2)
settings.create("debug", config.verbose, debug)
running = Event()
cp = ConsoleProxy()
cons = console.Console()
cons.parser = cp.parser
cons.completer = cp.completer
cons.set_prompt("chromesthesia> ")
cons.start()
command_root.add(CmdHelp())
command_root.add(CmdQuit("exit", [running, cons.running]))
command_root.add(CmdQuit("quit", [running, cons.running]))
command_root.add(CmdStart(sp))
command_root.add(CmdStop(sp))
command_root.add(CmdSet())
command_root.add(CmdGet())
rlist = [sp, cp]
running.set()
while running.is_set():
try:
rr, _, _ = select(rlist, [], [])
for obj in rr:
obj.read()
except KeyboardInterrupt:
cons.running.clear()
running.clear()
sp.close()
cons.join()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
priorknowledge/loom | loom/format.py | Python | bsd-3-clause | 18,525 | 0.000108 | # Copyright (c) 2014, Salesforce.com, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# - Neither the name of Salesforce.com nor the names of its contributors
# may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import shutil
from itertools import cycle
from itertools import izip
from contextlib2 import ExitStack
from collections import defaultdict
import parsable
from distributions.dbg.models import dpd
from distributions.fileutil import tempdir
from distributions.io.stream import json_dump
from distributions.io.stream import json_load
from distributions.io.stream import open_compressed
from distributions.io.stream import protobuf_stream_load
from loom.util import csv_reader
from loom.util import csv_writer
from loom.util import LoomError
import loom.util
import loom.schema
import loom.schema_pb2
import loom.cFormat
import loom.documented
parsable = parsable.Parsable()
OTHER_DECODE = '_OTHER'
TRUTHY = ['1', '1.0', 'True', 'true', 't']
FALSEY = ['0', '0.0', 'False', 'false', 'f']
BOOLEAN_SYMBOLS = {
key: value
for keys, value in [(TRUTHY, True), (FALSEY, False)]
for key in keys
}
EXAMPLE_VALUES = {
'booleans': False,
'counts': 0,
'reals': 0.0,
}
EXAMPLE_CATEGORICAL_ENCODER = {
'name': 'day-of-week',
'model': 'dd',
'symbols': {
'Monday': 0,
'Tuesday': 1,
'Wednesday': 2,
'Friday': 4,
},
}
@parsable.command
@loom.documented.transform(
inputs=['ingest.schema'],
outputs=['ingest.schema_row'])
def make_schema_row(schema_in, schema_row_out):
'''
Convert json schema to protobuf schema row.
'''
schema = json_load(schema_in)
if not schema:
raise LoomError('Schema is empty: {}'.format(schema_in))
value = loom.schema_pb2.ProductValue()
value.observed.sparsity = loom.schema_pb2.ProductValue.Observed.DENSE
for model in schema.itervalues():
try:
field = loom.schema.MODEL_TO_DATATYPE[model]
except KeyError:
raise LoomError('Unknown model {} in schema {}'.format(
model, schema_in))
value.observed.dense.append(True)
getattr(value, field).append(EXAMPLE_VALUES[field])
with open_compressed(schema_row_out, 'wb') as f:
f.write(value.SerializeToString())
class DefaultEncoderBuilder(object):
def __init__(self, name, model):
self.name = name
self.model = model
def add_value(self, value):
pass
def __iadd__(self, other):
pass
def | build(self):
return {
| 'name': self.name,
'model': self.model,
}
class CategoricalEncoderBuilder(object):
def __init__(self, name, model):
self.name = name
self.model = model
self.counts = defaultdict(lambda: 0)
def add_value(self, value):
self.counts[value] += 1
def __iadd__(self, other):
for key, value in other.counts.iteritems():
self.counts[key] += value
def build(self):
sorted_keys = [(-count, key) for key, count in self.counts.iteritems()]
sorted_keys.sort()
symbols = {key: i for i, (_, key) in enumerate(sorted_keys)}
if self.model == 'dpd':
assert 'OTHER_DECODE not in symbols', \
'data cannot assume reserved value {}'.format(OTHER_DECODE)
symbols[OTHER_DECODE] = dpd.OTHER
return {
'name': self.name,
'model': self.model,
'symbols': symbols,
}
def __getstate__(self):
return (self.name, self.model, dict(self.counts))
def __setstate__(self, (name, model, counts)):
self.name = name
self.model = model
self.counts = defaultdict(lambda: 0)
self.counts.update(counts)
ENCODER_BUILDERS = defaultdict(lambda: DefaultEncoderBuilder)
ENCODER_BUILDERS['dd'] = CategoricalEncoderBuilder
ENCODER_BUILDERS['dpd'] = CategoricalEncoderBuilder
class CategoricalFakeEncoderBuilder(object):
def __init__(self, name, model):
self.name = name
self.model = model
self.max_value = -1
def add_value(self, value):
self.max_value = max(self.max_value, int(value))
def build(self):
symbols = {int(value): value for value in xrange(self.max_value + 1)}
if self.model == 'dpd':
symbols[OTHER_DECODE] = dpd.OTHER
return {
'name': self.name,
'model': self.model,
'symbols': symbols,
}
FAKE_ENCODER_BUILDERS = defaultdict(lambda: DefaultEncoderBuilder)
FAKE_ENCODER_BUILDERS['dd'] = CategoricalFakeEncoderBuilder
FAKE_ENCODER_BUILDERS['dpd'] = CategoricalFakeEncoderBuilder
def load_encoder(encoder):
model = encoder['model']
if 'symbols' in encoder:
encode = encoder['symbols'].__getitem__
elif model == 'bb':
encode = BOOLEAN_SYMBOLS.__getitem__
else:
encode = loom.schema.MODELS[model].Value
return encode
def load_decoder(encoder):
model = encoder['model']
if 'symbols' in encoder:
decoder = {value: key for key, value in encoder['symbols'].iteritems()}
decode = decoder.__getitem__
elif model == 'bb':
decode = ('0', '1').__getitem__
else:
decode = str
return decode
def _make_encoder_builders_file((schema_in, rows_in)):
assert os.path.isfile(rows_in)
schema = json_load(schema_in)
with csv_reader(rows_in) as reader:
header = reader.next()
builders = []
seen = set()
for name in header:
if name in schema:
if name in seen:
raise LoomError('Repeated column {} in csv file {}'.format(
name, rows_in))
seen.add(name)
model = schema[name]
Builder = ENCODER_BUILDERS[model]
builder = Builder(name, model)
else:
builder = None
builders.append(builder)
if all(builder is None for builder in builders):
raise LoomError(
'Csv file has no known features;'
', try adding a header to {}'.format(rows_in))
missing_features = sorted(set(schema) - seen)
if missing_features:
raise LoomError('\n '.join(
['Csv file is missing features:'] + missing_features))
for row in reader:
for value, builder in izip(row, builders):
if builder is not None:
value = value.strip()
if value:
builder.add_value(value)
return [b for b in builders if b is not None]
def _make_encoder_builders_dir(schema_in, rows_in):
assert os.path.isdir(rows_in)
files_in = [os.path.join(rows_in, f) for f in os.listdir(rows_in)]
pa |
pdyba/lunch-app | src/lunch_app/tests.py | Python | mit | 35,088 | 0 | # -*- coding: utf-8 -*-
"""
Presence analyzer unit tests.
"""
# pylint: disable=maybe-no-member, too-many-public-methods, invalid-name
from datetime import datetime, date, timedelta
import os.path
import unittest
from unittest.mock import patch
from .main import app, db, mail
from . import main, utils
from .fixtures import fill_db, allow_ordering
from .mocks import (
MOCK_ADMIN,
MOCK_DATA_TOMAS,
MOCK_DATA_KOZIOLEK,
MOCK_WWW_TOMAS,
MOCK_WWW_KOZIOLEK,
)
from .models import Order, Food, MailText, User
from .webcrawler import get_dania_dnia_from_pod_koziolek, get_week_from_tomas
from .utils import make_datetime
def setUp():
"""
Main setup.
"""
test_config = os.path.join(
os.path.dirname(__file__),
'..', '..', 'parts', 'etc', 'test.cfg',
)
app.config.from_pyfile(test_config)
main.init()
class LunchBackendViewsTestCase(unittest.TestCase):
"""
Views tests.
"""
def setUp(self):
"""
Before each test, set up a environment.
"""
self.client = main.app.test_client()
db.create_all()
def tearDown(self):
"""
Get rid of unused objects after each test.
"""
db.session.remove()
db.drop_all()
def test_mainpage_view(self):
"""
Test main page view.
"""
resp = self.client.get('/')
self.assertEqual(resp.status_code, 200)
def test_info_view(self):
"""
Test info page view.
"""
fill_db()
resp = self.client.get('/info')
self.assertEqual(resp.status_code, 200)
self.assertTrue("CATERING - menu na co dzi" in resp.data.__str__())
mailtxt = MailText.query.first()
mailtxt.info_page_text = "To jest nowa firma \n ze strna\n www.wp.pl"
db.session.commit()
resp = self.client.get('/info')
self.assertEqual(resp.status_code, 200)
self.assertTrue("www.wp.pl" in resp.data.__str__())
@patch('lunch_app.views.current_user', new=MOCK_ADMIN)
def test_my_orders_view(self):
"""
Test my orders page view.
"""
resp = self.client.get('/my_orders')
self.assertEqual(resp.status_code, 200)
@patch('lunch_app.views.current_user', new=MOCK_ADMIN)
def test_overview_view(self):
"""
Test overview page.
"""
resp = self.client.get('/overview')
self.assertEqual(resp.status_code, 200)
@patch('lunch_app.views.current_user', new=MOCK_ADMIN)
def test_create_order_view(self):
"""
Test create order page.
"""
allow_ordering()
resp = self.client.get('/order')
self.assertEqual(resp.status_code, 200)
data = {
'cost': '12',
'company': 'Pod Koziołkiem',
'description': 'dobre_jedzonko',
'send_me_a_copy': 'false',
'arrival_time': '12:00',
}
resp = self.client.post('/order', data=data)
order_db = Order.query.first()
self.assertTrue(resp.status_code == 302)
self.assertEqual(order_db.cost, 12)
self.assertEqual(order_db.company, 'Pod Koziołkiem')
self.assertEqual(order_db.description, 'dobre_jedzonko')
self.assertAlmostEqual(
order_db.date,
datetime.now(),
delta=timedelta(seconds=1),
)
self.assertEqual(order_db.arrival_time, '12:00')
@patch('lunch_app.views.current_user', new=MOCK_ADMIN)
def test_create_order_with_email(self):
"""
Test create order with send me an email.
"""
allow_ordering()
with mail.record_messages() as outbox:
data = {
'cost': '13',
'company': 'Pod Koziołkiem',
'description': 'To jest TESTow zamowienie dla emaila',
'send_me_a_copy': 'true',
'date': '2015-01-02',
'arrival_time': '13:00',
}
resp = self.client.post('/order', data=data)
self.assertTrue(resp.status_code == 302)
self.assertEqual(len(outbox), 1)
msg = outbox[0]
self.assertTrue(msg.subject.st | artswith('Lunch | order'))
self.assertIn('To jest TESTow zamowienie dla emaila', msg.body)
self.assertIn('Pod Koziołkiem', msg.body)
self.assertIn('13.0 PLN', msg.body)
self.assertIn('at 13:00', msg.body)
self.assertEqual(msg.recipients, ['mock@mock.com'])
@patch('lunch_app.permissions.current_user', new=MOCK_ADMIN)
def test_add_food_view(self):
"""
Test add food page.
"""
resp = self.client.get('/add_food')
self.assertEqual(resp.status_code, 200)
data = {
'cost': '333',
'description': 'dobre_jedzonko',
'date_available_to': '2015-01-01',
'company': 'Pod Koziołkiem',
'date_available_from': '2015-01-01',
'o_type': 'daniednia',
'add_meal': 'add',
}
resp_2 = self.client.post('/add_food', data=data,)
food_db = Food.query.first()
self.assertEqual(resp_2.status_code, 302)
self.assertEqual(food_db.cost, 333)
self.assertEqual(food_db.description, 'dobre_jedzonko')
self.assertEqual(food_db.date_available_to, datetime(2015, 1, 1, 0, 0))
self.assertEqual(food_db.company, 'Pod Koziołkiem')
self.assertEqual(food_db.o_type, 'daniednia')
self.assertEqual(
food_db.date_available_from,
datetime(2015, 1, 1, 0, 0)
)
@patch('lunch_app.permissions.current_user', new=MOCK_ADMIN)
def test_add_food__bulk_view(self):
"""
Test bulk add food page.
"""
data = {
'cost': '333',
'description': 'dobre_jedzonko\r\nciekawe_jedzonko\r\npies',
'date_available_to': '2015-01-01',
'company': 'Pod Koziołkiem',
'date_available_from': '2015-01-01',
'o_type': 'daniednia',
'add_meal': 'bulk',
}
resp = self.client.post('/add_food', data=data,)
food_db = Food.query.get(1)
self.assertEqual(resp.status_code, 302)
self.assertEqual(food_db.cost, 333)
self.assertEqual(food_db.description, 'dobre_jedzonko')
self.assertEqual(food_db.date_available_to, datetime(2015, 1, 1, 0, 0))
self.assertEqual(food_db.company, 'Pod Koziołkiem')
self.assertEqual(food_db.o_type, 'daniednia')
self.assertEqual(
food_db.date_available_from,
datetime(2015, 1, 1, 0, 0)
)
food_db = Food.query.get(2)
self.assertEqual(resp.status_code, 302)
self.assertEqual(food_db.cost, 333)
self.assertEqual(food_db.description, 'ciekawe_jedzonko')
self.assertEqual(food_db.date_available_to, datetime(2015, 1, 1, 0, 0))
self.assertEqual(food_db.company, 'Pod Koziołkiem')
self.assertEqual(food_db.o_type, 'daniednia')
self.assertEqual(
food_db.date_available_from,
datetime(2015, 1, 1, 0, 0)
)
food_db = Food.query.get(3)
self.assertEqual(resp.status_code, 302)
self.assertEqual(food_db.cost, 333)
self.assertEqual(food_db.description, 'pies')
self.assertEqual(food_db.date_available_to, datetime(2015, 1, 1, 0, 0))
self.assertEqual(food_db.company, 'Pod Koziołkiem')
self.assertEqual(food_db.o_type, 'daniednia')
self.assertEqual(
food_db.date_available_from,
datetime(2015, 1, 1, 0, 0)
)
@patch('lunch_app.permissions.current_user', new=MOCK_ADMIN)
def test_day_summary_view(self):
"""
Test day summary page.
"""
fill_db()
resp = self.client.get('/day_summary')
self.assertIn('Maly Gruby Nalesnik', str(resp.data))
self.assertIn('Duzy Gruby Nalesnik', str(resp.data))
db.session.close()
def test_order_list_view(self):
"""
Test order list page.
"""
|
Mlieou/oj_solutions | leetcode/python/ex_484.py | Python | mit | 366 | 0.008197 | class Solution(object):
def findPermutation | (self, s):
"""
:type s: str
:rtype: List[int]
"""
if not s: return []
res = []
i = 1
for c in s:
if c == 'I':
res.extend(range(i, len(res), -1))
i += 1
res.extend(range(i, len(r | es), -1))
return res |
zhengbomo/python_practice | project/Lagou/Analyzer.py | Python | mit | 1,396 | 0 | #!/usr/bin/python
# -*- coding:utf-8 -*-
from LagouDb import LagouDb
class Analyzer(object):
def __init__(self):
self.db = LagouDb()
# 统计最受欢迎的工作
@staticmethod
def get_popular_jobs(since=None):
if since:
pass
else:
pass
# 统计职位在不同城市的薪资情况
def get_salary_in_city(self, key, count, mincount=10):
result = self.db.salary_in_city_by_key(key, count, mincount)
kv = {}
for i in result:
if i['count'] >= 5:
# 过滤数量小于5的城市
k = '{0} ({1})'.format(i['city'], i['count'])
kv[k] = i['salary']
return kv
# 统计工资最高的工作
def get_high_s | alary_jobs(self, city, count, mincount=10):
result = self.db.high_salary(city, count, mincount=mincount)
kv = {}
for i in result:
k = '{0} ({1})'.format(i['key'], i['count'])
kv[k] = i['salary']
return kv
# 关键字搜索结果比例
def key_persent(self, city, count):
if city:
result = self.db.key_persent_for_city(city, count)
else:
result = self.db.key_persent(count)
kv = {}
| for i in result:
k = '{0} ({1})'.format(i['key'], i['count'])
kv[k] = i['count']
return kv
|
DailyActie/Surrogate-Model | 01-codes/scipy-master/scipy/odr/setup.py | Python | mit | 1,419 | 0 | #!/usr/bin/env python
from __future__ import division, print_function, absolute_import
from os.path import join
def configuration(parent | _package='', top_path=None):
import warnings
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info, BlasNotFoundError
config = Configuration('odr', parent_package, top_path)
libodr_files = ['d_odr.f',
'd_mprec.f',
'dlunoc.f']
blas_info = get_info('bl | as_opt')
if blas_info:
libodr_files.append('d_lpk.f')
else:
warnings.warn(BlasNotFoundError.__doc__)
libodr_files.append('d_lpkbls.f')
odrpack_src = [join('odrpack', x) for x in libodr_files]
config.add_library('odrpack', sources=odrpack_src)
sources = ['__odrpack.c']
libraries = ['odrpack'] + blas_info.pop('libraries', [])
include_dirs = ['.'] + blas_info.pop('include_dirs', [])
config.add_extension('__odrpack',
sources=sources,
libraries=libraries,
include_dirs=include_dirs,
depends=(['odrpack.h'] + odrpack_src),
**blas_info
)
config.add_data_dir('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
lxki/pjsip | tests/pjsua/scripts-sendto/201_ice_mismatch_1.py | Python | gpl-2.0 | 650 | 0.016923 | # $Id: 201_ice_mismatch_1.py 2392 2008-12-22 18:54:58Z bennylp $
import inc_sip as sip
import inc_sdp as sdp
sdp = \
"""
v=0
o | =- 0 0 IN IP4 127.0.0.1
s=pjmedia
c=IN IP4 127.0.0.1
t=0 0
m=audio 4000 RTP/AVP 0 101
a=ice-ufrag:1234
a=ice-pwd:5678
a=rtpmap:0 PCMU/8000
a=sendrecv
a=rtpmap:101 telephone-event/8000
a=fmtp:101 0-15
a=candidate:XX 1 UDP 1 1.1.1.1 2222 typ host
"""
args = "--null-audio --use-ice --auto-answer 200 --max-calls 1"
include = ["a=ice-mismatch"]
exclude = []
sendto_cfg = sip.SendtoCfg( "caller sends mismatched of | fer for comp 1",
pjsua_args=args, sdp=sdp, resp_code=200,
resp_inc=include, resp_exc=exclude)
|
LockScreen/Backend | venv/lib/python2.7/site-packages/awscli/customizations/cloudtrail/utils.py | Python | mit | 1,641 | 0 | # Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "Li | cense"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2. | 0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
def get_account_id_from_arn(trail_arn):
"""Gets the account ID portion of an ARN"""
return trail_arn.split(':')[4]
def get_account_id(iam_client):
"""Retrieve the AWS account ID for the authenticated user"""
response = iam_client.get_user()
return get_account_id_from_arn(response['User']['Arn'])
def get_trail_by_arn(cloudtrail_client, trail_arn):
"""Gets trail information based on the trail's ARN"""
trails = cloudtrail_client.describe_trails()['trailList']
for trail in trails:
if trail.get('TrailARN', None) == trail_arn:
return trail
raise ValueError('A trail could not be found for %s' % trail_arn)
def remove_cli_error_event(client):
"""This unregister call will go away once the client switchover
is done, but for now we're relying on S3 catching a ClientError
when we check if a bucket exists, so we need to ensure the
botocore ClientError is raised instead of the CLI's error handler.
"""
client.meta.events.unregister(
'after-call', unique_id='awscli-error-handler')
|
hanul93/kicomav | Engine/kavcore/k2file.py | Python | gpl-2.0 | 10,886 | 0.000532 | # -*- coding:utf-8 -*-
# Author: Kei Choi(hanul93@gmail.com)
import os
import re
import glob
import shutil
import tempfile
# import psutil
# ---------------------------------------------------------------------
# K2Tempfile 클래스
# ---------------------------------------------------------------------
class K2Tempfile:
def __init__(self):
self.re_pid = re.compile(r'ktmp([0-9a-f]{5})$', re.IGNORECASE)
self.temp_path = os.path.join(tempfile.gettempdir(), 'ktmp%05x' % os.getpid())
if not os.path.exists(self.temp_path):
try:
os.mkdir(self.temp_path)
except (IOError, OSError) as e:
self.temp_path = tempfile.gettempdir()
def gettempdir(self):
return self.temp_path
def mktemp(self):
return tempfile.mktemp(prefix='ktmp', dir=self.temp_path)
def removetempdir(self):
# 자기 폴더만 지우는 걸로...
try:
if os.path.exists(self.temp_path):
shutil.rmtree(self.temp_path)
except OSError:
pass
'''
fl = glob.glob(os.path.join(tempfile.gettempdir(), 'ktmp*'))
if len(fl):
for tname in fl:
if os.path.isdir(tname):
tpath = self.re_pid.search(tname)
if tpath: # 정상적으로 임시 폴더가 생겼음
if psutil.pid_exists(int(tpath.groups()[0], 16)) is False:
try:
shutil.rmtree(tname)
except OSError:
pass
elif os.path.isfile(tname):
try:
os.remove(tname)
except OSError:
pass
'''
# -------------------------------------------------------------------------
# FileStruct 클래스
# -------------------------------------------------------------------------
class FileStruct:
# ---------------------------------------------------------------------
# __init__(self, filename=None)
# 클래스를 초기화 한다.
# 인자값 : filename - 파일 이름
# ---------------------------------------------------------------------
def __init__(self, filename=None, level=0):
self.__fs = {}
if filename:
self.set_default(filename, level)
# ---------------------------------------------------------------------
# set_default(self, filename)
# 파일에 대한 하나의 FileStruct 생성한다.
# 인자값 : filename - 파일 이름
# ---------------------------------------------------------------------
def set_default(self, filename, level):
import kernel
self.__fs['is_arc'] = False # 압축 여부
self.__fs['arc_engine_name'] = None # 압축 해제 가능 엔진 ID
self.__fs['arc_filename'] = '' # 실제 압축 파일
self.__fs['filename_in_arc'] = '' # 압축해제 대상 파일
self.__fs['real_filename'] = filename # 검사 대상 파일
self.__fs['additional_filename'] = '' # 압축 파일의 내부를 표현하기 위한 파일명
self.__fs['master_filename'] = filename # 출력용
self.__fs['is_modify'] = False # 수정 여부
self.__fs['can_arc'] = kernel.MASTER_IGNORE # 재압축 가능 여부
self.__fs['level'] = level # 압축 깊이
# ---------------------------------------------------------------------
# is_archive(self)
# 파일에 대한 압축 여부를 확인한다.
# 리턴값 : True or False
# ---------------------------------------------------------------------
def is_archive(self): # 압축 여부
return self.__fs['is_arc']
# ---------------------------------------------------------------------
# get_archive_engine_name(self)
# 압축 해제 가능한 엔진을 확인한다.
# 리턴값 : 압축 해제 가능한 엔진 (ex, arc_zip)
# ---------------------------------------------------------------------
def get_archive_engine_name(self): # 압축 엔진 ID
return self.__fs['arc_engine_name']
# ---------------------------------------------------------------------
# get_archive_filename(self)
# 실제 압축 파일 이름을 확인한다.
# 리턴값 : 실제 압축 파일 이름
# ---------------------------------------------------------------------
def get_archive_filename(self): # 실제 압축 파일
return self.__fs['arc_filename']
# ---------------------------------------------------------------------
# get_filename_in_archive(self)
# 압축 해제 대상 파일명을 확인한다.
# 리턴값 : 압축해제 대상 파일
# ---------------------------------------------------------------------
def get_filename_in_archive(self): # 압축해제 대상 파일
return self.__fs['filename_in_arc']
# ---------------------------------------------------------------------
# get_filename(self)
# 실제 작업 대상 파일 이름을 확인한다.
# 리턴값 : 실제 작업 대상 파일
# ---------------------------------------------------------------------
def get_filename(self): # 실제 작업 파일 이름
return self.__fs['real_filename']
# ---------------------------------------------------------------------
# set_filename(self)
# 실제 작업 대상 파일 이름을 저장한다.
# 입력값 : 실제 작업 대상 파일
# ---------------------------------------------------------------------
def set_filename(self, fname): # 실제 작업 파일명을 저장
self.__fs['real_filename'] = fname
# ---------------------------------------------------------------------
# get_master_filename(self)
# 최상위 파일 이름을 확인한다.
# 리턴값 : 압축일 경우 압축 파일명
# ---------------------------------------------------------------------
def get_master_filename(self): # 압축일 경우 최상위 파일
return self.__fs['master_filename'] # 출력용
# ---------------------------------------------------------------------
# get_additional_filename(self)
# 압축 파일 내부를 표현하기 위한 파일 이름을 확인한다.
# 리턴값 : 압축 파일 내부 표현 파일 이름
# ---------------------------------------------------------------------
def get_additional_filename(self):
return self.__fs['additional_filename']
# ---------------------------------------------------------------------
# set_additional_filename(self, filename)
# 압축 파일 내부를 표현하기 위한 파일 이름을 셋팅한다.
# ---------------------------------------------------------------------
def set_additiona | l_filename(self, filename):
self.__fs['additional_filename'] = filename
# ----------------------------------------------- | ----------------------
# is_modify(self)
# 악성코드 치료로 인해 파일이 수정됨 여부를 확인한다.
# 리턴값 : True or False
# ---------------------------------------------------------------------
def is_modify(self): # 수정 여부
return self.__fs['is_modify']
# ---------------------------------------------------------------------
# set_modify(self, modify)
# 악성코드 치료로 파일이 수정 여부를 저장함
# 입력값 : 수정 여부 (True or False)
# ---------------------------------------------------------------------
def set_modify(self, modify): # 수정 여부
self.__fs['is_modify'] = modify
# ---------------------------------------------------------------------
# get_can_archive(self)
# 악성코드로 치료 후 파일을 재압축 할 수 있는지 여부를 확인한다.
# 리턴값 : kernel.MASTER_IGNORE, kernel.MASTER_PACK, kernel.MASTER_DELETE
# ---------------------------------------------------------------------
def get_can_archive(self): # 재압축 가능 여부
return self.__fs['can_arc']
# ---------------------------------------------------------------------
# set_can_archive(self, mode)
# 악성코드로 치료 후 파일을 재압축 할 수 있는지 여부를 설정한다.
# 입력값 : mode - kernel.MASTER_IGNORE, kernel.MASTER_PACK, kernel.MASTER_DELETE
# ---------------------------------------------------------------------
def set_can_archive(self, mode): # 재압축 가능 여부
self.__fs['can_arc'] = mode
# ---------------------------------------------------------------------
# get_level(self)
# 압축의 깊이를 알아낸다.
# 리턴값 : 0, 1, 2 ...
# ---------------------------------------------------------------------
def get_level(self): # 압축 깊이
return self.__fs['level']
# ---------------------------------------------------------------------
# set_level(self, level)
# 압축의 깊이를 설정한다.
# 입력값 : level - 압축 깊이
# ---------------------------------------------------------------------
def set_level(self, level): # 압축 깊이
self.__fs['level'] = level |
jaap-karssenberg/zim-desktop-wiki | tests/config.py | Python | gpl-2.0 | 22,669 | 0.0251 |
# Copyright 2008-2013 Jaap Karssenberg <jaap.karssenberg@gmail.com>
import tests
from tests import os_native_path
import os
from zim.config import *
from zim.fs import adapt_from_oldfs
from zim.newfs import File, Folder, LocalFolder
from zim.notebook import Path
import zim.config
# Check result of lookup functions does not return files outside of
# source to be tested -- just being paranoid here...
# Note that this marshalling remains in place for any subsequent tests
_cwd = tests.ZIM_SRC_FOLDER
def marshal_path_lookup(function):
def marshalled_path_lookup(*arg, **kwarg):
value = function(*arg, **kwarg)
if isinstance(value, ConfigFile):
p = value.file
else:
p = value
p = adapt_from_oldfs(p)
if not p is None:
assert isinstance(p, (File, Folder)), 'BUG: get %r' % p
assert p.ischild(_cwd), "ERROR: \"%s\" not below \"%s\"" % (p, _cwd)
return value
return marshalled_path_lookup
zim.config.data_file = marshal_path_lookup(zim.config.data_file)
zim.config.data_dir = marshal_path_lookup(zim.config.data_dir)
#~ zim.config.config_file = marshal_path_lookup(zim.config.config_file)
##
def value_to_folder(v):
return LocalFolder(os_native_path(v))
class FilterInvalidConfigWarning(tests.LoggingFilter):
def __init__(self):
tests.LoggingFilter.__init__(self, 'zim.config', 'Invalid config value')
class EnvironmentConfigContext(object):
'''Context manager to be able to run test cases for
environment parameters and restore the previous values on
exit or error.
'''
environ = os.environ
def __init__(self, environ_context):
self.environ_context = environ_context
self.environ_backup = {}
self.environ = os.environ
def __enter__(self):
for k, v in self.environ_context.items():
self.environ_backup[k] = self.environ.get(k)
if v:
self.environ[k] = v
elif k in self.environ:
del self.environ[k]
else:
pass
zim.config.set_basedirs(_ignore_test=True) # refresh
def __exit__(self, *exc_info):
for k, v in self.environ_backup.items():
if v:
self.environ[k] = v
elif k in self.environ:
del self.environ[k]
else:
pass
zim.config.set_basedirs() # refresh
return False # Raise
class TestDirsTestSetup(tests.TestCase):
def runTest(self):
'''Test config environment setup of test'''
zim.config.log_basedirs()
for k, v in (
('XDG_DATA_HOME', os.path.join(tests.TMPDIR, 'data_home')),
('XDG_CONFIG_HOME', os.path.join(tests.TMPDIR, 'config_home')),
('XDG_CACHE_HOME', os.path.join(tests.TMPDIR, 'cache_home'))
):
self.assertEqual(adapt_from_oldfs(getattr(zim.config, k)), LocalFolder(v))
for k, v in (
#~ ('XDG_DATA_DIRS', os.path.join(tests.TMPDIR, 'data_dir')),
('XDG_CONFIG_DIRS', os.path.join(tests.TMPDIR, 'config_dir')),
):
self.assertEqual(
list(map(adapt_from_oldfs, getattr(zim.config, k))),
list(map(LocalFolder, v.split(os.pathsep)))
)
class TestXDGDirs(tests.TestCase):
def testAllValid(self):
'''Test config environment is valid'''
for var in (
ZIM_DATA_DIR, # should always be set when running as test
XDG_DATA_HOME,
XDG_CONFIG_HOME,
XDG_CACHE_HOME
):
self.assertTrue(isinstance(adapt_from_oldfs(var), Folder))
for var in (
XDG_DATA_DIRS,
XDG_CONFIG_DIRS,
):
self.assertTrue(isinstance(var, list) and isinstance(adapt_from_oldfs(var[0]), Folder))
self.assertEqual(adapt_from_oldfs(ZIM_DATA_DIR), tests.ZIM_DATA_FOLDER)
self.assertTrue(ZIM_DATA_DIR.file('zim.png').exists())
self.assertTrue(data_file('zim.png').exists())
self.assertTrue(data_dir('templates').exists())
self.assertEqual(
list(data_dirs(('foo', 'bar'))),
[d.subdir(['foo', 'bar']) for d in data_dirs()])
@tests.skipIf(os.name == 'nt', 'No standard defaults for windows')
def testDefaults(self):
'''Test default basedir paths'''
with EnvironmentConfigContext({
'XDG_DATA_HOME': None,
'XDG_DATA_DIRS': ' ',
'XDG_CONFIG_HOME': None,
'XDG_CONFIG_DIRS': '',
'XDG_CACHE_HOME': None,
}):
for k, v in (
('XDG_DATA_HOME', '~/.local/share'),
('XDG_CONFIG_HOME', '~/.config'),
('XDG_CACHE_HOME', '~/.cache')
):
self.assertEqual(adapt_from_oldfs(getattr(zim.config.basedirs, k)), LocalFolder(v))
for k, v in (
('XDG_DATA_DIRS', '/usr/share:/usr/local/share'),
('XDG_CONFIG_DIRS', '/etc/xdg'),
):
self.assertEqual(
list(map(adapt_from_oldfs, getattr(zim.config.basedirs, k))),
list(map(LocalFolder, v.split(':')))
)
def testInitializedEnvironment(self):
'''Test config environment with non-default basedir paths'''
my_environ = {
'XDG_DATA_HOME': '/foo/data/home',
'XDG_DATA_DIRS': '/foo/data/dir1:/foo/data/dir2 ',
'XDG_CONFIG_HOME': '/foo/config/home',
'XDG_CONFIG_DIRS': '/foo/config/dir1:/foo/config/dir2',
'XDG_CACHE_HOME': '/foo/cache',
}
if os.name == 'nt':
my_environ['XDG_DATA_DIRS'] = '/foo/data/dir1;/foo/data/dir2'
my_environ['XDG_CONFIG_DIRS'] = '/foo/config/dir1;/foo/config/dir2'
with EnvironmentConfigContext(my_environ):
for k, v in (
('XDG_DATA_HOME', '/foo/data/home'),
('XDG_CONFIG_HOME', '/foo/config/home'),
('XDG_CACHE_HOME', '/foo/cache')
):
self.assertEqual(adapt_from_oldfs(getattr(zim.config.basedirs, k)), value_to_folder(v))
for k, v in (
('XDG_DATA_DIRS', '/foo/data/dir1:/foo/data/dir2'),
('XDG_CONFIG_DIRS', '/foo/config/dir1:/foo/config/dir2'),
):
self.assertEqual(
list(map(adapt_from_oldfs, getattr(zim.config.basedirs, k))),
list(map(value_to_folder, v.split(':')))
)
class TestControlledDict(tests.TestCase):
def runTest(self):
mydict = ControlledDict({'foo': 'bar'})
self | .assertFalse(mydict.modified)
mydict | ['bar'] = 'dus'
self.assertTrue(mydict.modified)
mydict.set_modified(False)
mydict['section'] = ControlledDict()
mydict['section']['dus'] = 'ja'
self.assertTrue(mydict['section'].modified)
self.assertTrue(mydict.modified)
mydict.set_modified(False)
self.assertFalse(mydict.modified)
mydict['section'].set_modified(False)
self.assertFalse(mydict['section'].modified)
self.assertFalse(mydict.modified)
mydict['section'] = ControlledDict() # nested dict
mydict['section']['dus'] = 'FOO!'
self.assertTrue(mydict['section'].modified)
self.assertTrue(mydict.modified)
mydict.set_modified(False)
self.assertFalse(mydict['section'].modified)
self.assertFalse(mydict.modified)
mydict.update({'nu': 'ja'})
self.assertTrue(mydict.modified)
mydict.set_modified(False)
mydict.setdefault('nu', 'XXX')
self.assertFalse(mydict.modified)
mydict.setdefault('new', 'XXX')
self.assertTrue(mydict.modified)
counter = [0]
def handler(o):
counter[0] += 1
mydict.connect('changed', handler)
mydict['nu'] = 'YYY'
self.assertEqual(counter, [1])
mydict.update({'a': 'b', 'c': 'd', 'e': 'f'})
self.assertEqual(counter, [2]) # signal only emitted once
mydict['section']['foo'] = 'zzz'
self.assertEqual(counter, [3]) # recursive signal
mydict.set_modified(False)
v = mydict.pop('nu')
self.assertEqual(v, 'YYY')
self.assertTrue(mydict.modified)
self.assertRaises(KeyError, mydict.__getitem__, v)
class TestConfigDefinitions(tests.TestCase):
def testBuildDefinition(self):
self.assertRaises(AssertionError, build_config_definition)
for default, check, klass in (
('foo', None, String),
('foo', str, String),
(True, None, Boolean),
(10, None, Integer),
(1.0, None, Float),
('foo', ('foo', 'bar', 'baz'), Choice),
(10, (1, 100), Range),
((10, 20), value_is_coord, Coordinate),
):
definition = build_config_definition(default, check)
self.assertIsInstance(definition, klass)
def testConfigDefinitionByClass(self):
for value, klass in (
([1, 2, 3], list),
(Path('foo'), Path),
):
definition = build_config_definition(value)
self.assertIsInstance(definition, ConfigDefinitionByClass)
self.assertEqual(definition.klass, klass)
# Test input by json struct
definition = ConfigDefinitionByClass([1, 2, 3])
self.assertEqual(definition.check('[true,200,null]'), [True, 200, None])
# Test converting to tuple
definition = ConfigDefinitionByClas |
leifos/tar | scripts/convert_qrels_to_binary_qrels.py | Python | mit | 1,186 | 0.009275 | import sys
import os
def main(qrelFile, threshold):
curr_topic_id = None
with open(qrelFile, "r") as f:
while f:
line = f.readline()
if not line:
break
(topic_id, blank, doc_id, judgement) = li | ne.split()
v= int(judgement)
if v > 0 and v < threshold:
v = 1
print("{0}\t{1}\t{2}\t{3}".format(topic_id, "0", doc_id, v))
if v <= 0:
v = 0
print("{0}\t{1}\t{2}\t{3}".format(topic_id, "0", doc_id, v))
def usage(args):
print("Usage: {0} <qrelfile> <relthreshold>".format(args[0]))
print("<qr | elfile> is in TREC qrel format")
print("<relthreshold> reduces graded relevance scores below the threshold to 1 (binary), if above it is removed, else 0.")
print(" defaults to 3")
if __name__ == "__main__":
threshold = 3
if len(sys.argv)==3:
try:
threshold = int(sys.argv[2])
except:
threshold = 3
if len(sys.argv)>=2:
qrelFile = sys.argv[1]
if len(sys.argv)<2:
usage(sys.argv)
else:
main(qrelFile, threshold) |
synw/django-mogo | djangomogo/__main__.py | Python | mit | 2,304 | 0 | from __future__ import print_function
import sys
import os
import subprocess
path = os.path.abspath(__file__)
modpath = os.path.dirname(path)
base_dir = os.getcwd()
install_mode = 'normal'
plus = False
mon = False
venv = "y"
if len(sys.argv) > 1:
if '-django' in sys.argv:
install_mode = 'django'
elif '-dev' in sys.argv:
install_mode = 'dev'
elif '-modules' in sys.argv:
install_mode = 'modules'
elif "-q" is sys.argv:
install_mode = "default"
if '-plus' in sys.argv:
plus = True
if '-mon' in sys.argv:
mon = True
if '-noenv' in sys.argv:
venv = "n"
msg = 'What is the name of the project? > '
if sys.version_info[:2] <= (2, 7):
get_input = raw_input
else:
get_input = input
user_input = get_input(msg)
if user_input == "":
print("You must provide a project name")
sys.exit()
project_name = user_input
bscript = modpath + '/install/init/install.sh'
print("Starting install ...")
if not install_mode == 'modules':
subprocess.call([bscript, project_name, base_dir,
install_mode, modpath, venv])
pages_installed = "n"
if install_mode != 'django':
bscript = modpath + '/install/pages/install.sh'
subprocess.call([bscript, project_name, base_dir, install_mode, modpath])
# contact
bscript = modpath + '/install/contact/install.sh'
subprocess.call([bscript, project_name, base_dir, install_mode, modpath])
if install_mode != "default":
# real time
msg = 'Install the realtime modules? [y/N] > '
rt = "n"
user_input = get_input(msg)
if user_input == "y":
rt = "y"
bscript = modpath + '/install/real_time/install.sh'
subprocess.call([bscript, project_name, base_dir, modpath])
if plus is True:
# users
bscript = modpath + '/install/users/install.sh'
subprocess.call(
| [bscript, project_name, base_dir, install_mode, modpath])
if mon is True:
bscript = modpath + '/install/mon/install.sh'
subprocess.call(
[bscript, project_name, base_dir, install_mode, m | odpath])
# end
bscript = modpath + '/install/end/install.sh'
subprocess.call([bscript, project_name, base_dir, install_mode, modpath, rt])
|
VishvajitP/readthedocs.org | readthedocs/cdn/purge.py | Python | mit | 645 | 0.00155 | import logging
from django.conf import settings
log = logging.getLogger(__name__)
CDN_SERVICE = getattr(settings, 'CDN_SERVICE', None)
CDN_USERNAME = getattr(settings, 'CDN_USERNAME', None)
CDN_KEY = getattr(settings, 'CDN_KEY', None)
CDN_SECET = getattr(settings, 'CDN_SECET', None)
CDN_ID = getattr(settings, 'CDN_ID', None)
if CDN_USERNAME and CDN_KEY and CDN_SECET and CDN_ID and CDN_SERVICE == 'ma | xcdn':
from maxcdn import MaxCDN
api = MaxCDN(CDN_USERNAME, CDN_KEY, CDN_SECET)
def purge(files):
return api.purge(CDN_ID, files)
else:
def purge( | files):
log.error("CDN not configured, can't purge files")
|
magarcia/python-producteev | producteev/utils.py | Python | mit | 1,036 | 0.000965 | # Copyright (c) 2012 Martin Garcia <newluxfero@gmail.com>
#
# This file is part of python-producteev, and is made available under
# MIT license. | See LICENSE for the full details.
import re
import htmlentitydefs
def unescape(text):
"""
Removes HTML or XML character references and entities from a text string.
text -- The HTML (or XML) source text.
return -- The plain text, as a Unicode string, if necessary.
| """
def fixup(m):
text = m.group(0)
if text[:2] == "&#":
# character reference
try:
if text[:3] == "&#x":
return unichr(int(text[3:-1], 16))
else:
return unichr(int(text[2:-1]))
except ValueError:
pass
else:
# named entity
try:
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]])
except KeyError:
pass
return text # leave as is
return re.sub("&#?\w+;", fixup, text)
|
mmnelemane/nova | nova/tests/unit/api/openstack/compute/test_security_groups.py | Python | apache-2.0 | 67,497 | 0.000133 | # Copyright 2011 OpenStack Foundation
# Copyright 2012 Justin Santa Barbara
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from mox3 import mox
from oslo_config import cfg
from oslo_serialization import jsonutils
import webob
from nova.api.openstack.compute.legacy_v2.contrib import security_groups as \
secgroups_v2
from nova.api.openstack.compute import security_groups as \
secgroups_v21
from nova import compute
from nova.compute import power_state
from nova import context as context_maker
import nova.db
from nova import exception
from nova import objects
from nova import quota
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_instance
CONF = cfg.CONF
FAKE_UUID1 = 'a47ae74e-ab08-447f-8eee-ffd43fc46c16'
FAKE_UUID2 = 'c6e6430a-6563-4efa-9542-5e93c9e97d18'
class AttrDict(dict):
def __getattr__(self, k):
return self[k]
def security_group_request_template(**kwargs):
sg = kwargs.copy()
sg.setdefault('name', 'test')
sg.setdefault('description', 'test-description')
return sg
def security_group_template(**kwargs):
sg = kwargs.copy()
sg.setdefault('tenant_id', '123')
sg.setdefault('name', 'test')
sg.setdefault('description', 'test-description')
return sg
def security_group_db(security_group, id=None):
attrs = security_group.copy()
if 'tenant_id' in attrs:
attrs['project_id'] = attrs.pop('tenant_id')
if id is not None:
attrs['id'] = id
attrs.setdefault('rules', [])
attrs.setdefault('instances', [])
return AttrDict(attrs)
def security_group_rule_template(**kwargs):
rule = kwargs.copy()
rule.setdefault('ip_protocol', 'tcp')
rule.setdefault('from_port', 22)
rule.setdefault('to_port', 22)
rule.setdefault('parent_group_id', 2)
return rule
def security_group_rule_db(rule, id=None):
attrs = rule.copy()
if 'ip_protocol' in attrs:
attrs['protocol'] = attrs.pop('ip_protocol')
return AttrDict(attrs)
def return_server(context, server_id,
columns_to_join=None, use_slave=False):
return fake_instance.fake_db_instance(
**{'id': int(server_id),
'power_state': 0x01,
'host': "localhost",
'uuid': FAKE_UUID1,
'name': 'asdf'})
def return_server_by_uuid(context, server_uuid,
columns_to_join=None,
use_slave=False):
return fake_instance.fake_db_instance(
**{'id': 1,
'power_state': 0x01,
'host': "localhost",
'uuid': server_uuid,
'name': 'asdf'})
def return_non_running_server(context, server_id, columns_to_join=None):
return fake_instance.fake_db_instance(
**{'id': server_id, 'power_state': power_state.SHUTDOWN,
'uuid': FAKE_UUID1, 'host': "localhost", 'name': 'asdf'})
def return_security_group_by_name(context, project_id, group_name):
return {'id': 1, 'name': group_name,
"instances": [{'id': 1, 'uuid': FAKE_UUID1}]}
def return_security_group_without_instances(context, project_id, group_name):
return {'id': 1, 'name': group_name}
def return_server_nonexistent(context, server_id, columns_to_join=None):
raise exception.InstanceNotFound(instance_id=server_id)
class TestSecurityGroupsV21(test.TestCase):
secgrp_ctl_cls = secgroups_v21.SecurityGroupController
server_secgrp_ctl_cls = secgroups_v21.ServerSecurityGroupController
secgrp_act_ctl_cls = secgroups_v21.SecurityGroupActionController
def setUp(self):
super | (TestSecurityGroupsV21, self).setUp()
| self.controller = self.secgrp_ctl_cls()
self.server_controller = self.server_secgrp_ctl_cls()
self.manager = self.secgrp_act_ctl_cls()
# This needs to be done here to set fake_id because the derived
# class needs to be called first if it wants to set
# 'security_group_api' and this setUp method needs to be called.
if self.controller.security_group_api.id_is_uuid:
self.fake_id = '11111111-1111-1111-1111-111111111111'
else:
self.fake_id = '11111111'
def _assert_no_security_groups_reserved(self, context):
"""Check that no reservations are leaked during tests."""
result = quota.QUOTAS.get_project_quotas(context, context.project_id)
self.assertEqual(result['security_groups']['reserved'], 0)
def _assert_security_groups_in_use(self, project_id, user_id, in_use):
context = context_maker.get_admin_context()
result = quota.QUOTAS.get_user_quotas(context, project_id, user_id)
self.assertEqual(result['security_groups']['in_use'], in_use)
def test_create_security_group(self):
sg = security_group_request_template()
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
res_dict = self.controller.create(req, {'security_group': sg})
self.assertEqual(res_dict['security_group']['name'], 'test')
self.assertEqual(res_dict['security_group']['description'],
'test-description')
def test_create_security_group_with_no_name(self):
sg = security_group_request_template()
del sg['name']
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_no_description(self):
sg = security_group_request_template()
del sg['description']
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_empty_description(self):
sg = security_group_request_template()
sg['description'] = ""
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
try:
self.controller.create(req, {'security_group': sg})
self.fail('Should have raised BadRequest exception')
except webob.exc.HTTPBadRequest as exc:
self.assertEqual('description has a minimum character requirement'
' of 1.', exc.explanation)
except exception.InvalidInput:
self.fail('Should have raised BadRequest exception instead of')
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_blank_name(self):
sg = security_group_request_template(name='')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_whitespace_name(self):
sg = security_group_request_template(name=' ')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_blank_description(self):
sg = security_group_request_template(description='')
req = fakes.HTT |
nocarryr/rtlsdr-wwb-scanner | wwb_scanner/ui/plots.py | Python | gpl-2.0 | 3,912 | 0.006646 | import numpy as np
import matplotlib.pyplot as plt
from wwb_scanner.scan_objects.spectrum import compare_spectra
from wwb_scanner.file_handlers import BaseImporter
class BasePlot(object):
def __init__(self, **kwargs):
self.filename = kwargs.get('filename')
if self.filename is not None:
self.spectrum = BaseImporter.import_file(self.filename)
else:
self.spectrum = kwargs.get('spectrum')
#self.figure.canvas.mpl_connect('idle_event', self.on_idle)
@property
def x(self):
return getattr(self, '_x', None)
@x.setter
def x(self, value):
self._x = value
@property
def y(self):
return getattr(self, '_y', None)
| @y.setter
def y(self, value):
self._y = value
@property
def figure(self):
return getattr(self, '_figure', None)
@figure.setter
def figure(self, figure):
self._figure = figure
#self.timer = figure.canvas.new_timer(interval=100)
#self.timer.add_callba | ck(self.on_timer)
def on_timer(self):
print('timer')
spectrum = self.spectrum
with spectrum.data_update_lock:
if spectrum.data_updated.is_set():
print('update plot')
self.update_plot()
spectrum.data_updated.clear()
def build_data(self):
dtype = np.dtype(float)
if not len(self.spectrum.samples):
x = self.x = np.array(0.)
y = self.y = np.array(0.)
else:
x = self.x = np.fromiter(self.spectrum.iter_frequencies(), dtype)
y = self.y = np.fromiter((s.magnitude for s in self.spectrum.iter_samples()), dtype)
if not hasattr(self, 'plot'):
self.spectrum.data_updated.clear()
return x, y
def update_plot(self):
if not hasattr(self, 'plot'):
return
x, y = self.build_data()
self.plot.set_xdata(x)
self.plot.set_ydata(y)
#self.figure.canvas.draw_event(self.figure.canvas)
self.figure.canvas.draw_idle()
def build_plot(self):
pass
class SpectrumPlot(BasePlot):
def build_plot(self):
self.figure = plt.figure()
self.plot = plt.plot(*self.build_data())[0]
plt.xlabel('frequency (MHz)')
plt.ylabel('dBm')
center_frequencies = self.spectrum.center_frequencies
if len(center_frequencies):
samples = [self.spectrum.samples.get(f) for f in center_frequencies]
ymin = self.y.min()
plt.vlines(center_frequencies,
[ymin] * len(center_frequencies),
[s.magnitude-5 if s.magnitude-5 > ymin else s.magnitude for s in samples])
plt.show()
class DiffSpectrum(object):
def __init__(self, **kwargs):
self.spectra = []
self.figure, self.axes = plt.subplots(3, 1, sharex='col')
def add_spectrum(self, spectrum=None, **kwargs):
name = kwargs.get('name')
if name is None:
name = str(len(self.spectra))
if spectrum is None:
spectrum = BaseImporter.import_file(kwargs.get('filename'))
self.spectra.append({'name':name, 'spectrum':spectrum})
def build_plots(self):
dtype = np.dtype(float)
if len(self.spectra) == 2:
diff_spec = compare_spectra(self.spectra[0]['spectrum'],
self.spectra[1]['spectrum'])
self.spectra.append({'name':'diff', 'spectrum':diff_spec})
for i, spec_data in enumerate(self.spectra):
spectrum = spec_data['spectrum']
x = np.fromiter(spectrum.iter_frequencies(), dtype)
y = np.fromiter((s.magnitude for s in spectrum.iter_samples()), dtype)
axes = self.axes[i]
axes.plot(x, y)
axes.set_title(spec_data['name'])
plt.show()
|
pombredanne/django-fluent-contents | fluent_contents/plugins/oembeditem/fields.py | Python | apache-2.0 | 1,167 | 0.005998 | from django.core.exceptions import ValidationError
from django.db.models import URLField
from django.utils.translation import ugettext_lazy as _
from fluent_contents.plugins.oembeditem import backend
class OEmbedUrlField(URLField):
"""
URL Field which validates whether the URL is supported by the OEmbed provider.
This feature is provided as model field, so other apps can use the same logic too.
"""
def __init__(self, *args, **kwargs):
kwargs.setdefault('help_text', _("Enter the URL of the online content to embed (e.g. a YouTube or Vimeo video, SlideShare presentation, etc..)"))
super(OEmbedUrlField, self).__init__(*args, **kwargs)
def clean(self, *args, **kwargs):
url = super(OEmbedUrlField, self).clean(*args, | **kwargs)
if not backend.has_provider_for_url(url):
raise ValidationError(_("The URL is not valid for embedding content")) # or is not configured as provider.
return url
try:
from south.modelsinspector import add_introspection_rules
except ImportErr | or:
pass
else:
add_introspection_rules([], ["^" + __name__.replace(".", "\.") + "\.OEmbedUrlField"])
|
parksandwildlife/wastd | conservation/migrations/0002_auto_20180926_1147.py | Python | mit | 646 | 0.001548 | # Generated by Django 2.0.8 on 2018-09-26 03:47
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migra | tion):
dependencies = [
('conservation', '0 | 001_squashed_0027_auto_20180509_1048'),
]
operations = [
migrations.AlterField(
model_name='fileattachment',
name='author',
field=models.ForeignKey(blank=True, help_text='The person who authored and endorsed this file.', null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL, verbose_name='Author'),
),
]
|
cisco/xr-telemetry-m2m-web | src/m2m_demo/__init__.py | Python | apache-2.0 | 285 | 0 | # =============================================================================
# __init__.py
#
# M2M demo init file.
#
# December 20 | 15
#
# Copyright (c) 2015 b | y cisco Systems, Inc.
# All rights reserved.
# =============================================================================
|
opensyllabus/osp-api | taskrefresh.py | Python | mit | 2,540 | 0.000394 | """AWS ECS: Update cluster servic | e to utilize a cloned task definition.
Clone latest task and register it in order to update the image
used on tasks in the service. It will take a while becau | se it
needs to drain connections and some other stuff.
"""
import sys
import os
import subprocess
import json
# we don't want to accidentally reveal any passwords on travis ci
sys.stdout = open(os.devnull, 'w')
# Get a list of task definition ARNs and find
# the latest task.
list_task_definitions_output = subprocess.check_output(
['aws', 'ecs', 'list-task-definitions'],
)
task_definitions_dict = json.loads(list_task_definitions_output.decode('UTF-8'))
list_of_task_definition_arns = task_definitions_dict['taskDefinitionArns']
for i in range(len(list_of_task_definition_arns) - 1, -1, -1):
task_definition_arn = list_of_task_definition_arns[i]
if task_definition_arn.rsplit(':', 1)[0].endswith('/osp-api'):
break
else:
raise Exception("Task definition not found...")
# Now let's get the JSON for the latest task we found
task_definition_output = subprocess.check_output(
[
'aws',
'ecs',
'describe-task-definition',
'--task-definition',
task_definition_arn,
],
)
latest_task_definition = json.loads(task_definition_output.decode('UTF-8'))
latest_task_definition = latest_task_definition['taskDefinition']
# Now modify the latest task's definition to suit the new deploy.
# First get old image path, then make new path using TRAVIS_TAG
old_image = latest_task_definition['containerDefinitions'][0]['image']
new_image = old_image.rsplit(':', 1)[0] + ':' + os.getenv('TRAVIS_TAG')
latest_task_definition['containerDefinitions'][0]['image'] = new_image
del latest_task_definition['revision']
del latest_task_definition['taskDefinitionArn']
del latest_task_definition['status']
del latest_task_definition['requiresAttributes']
# Now register the task we've just built in memory...
register_task_output = subprocess.check_output(
[
'aws',
'ecs',
'register-task-definition',
'--family',
'osp-api',
'--cli-input-json',
json.dumps(latest_task_definition),
],
)
new_task_definition = json.loads(register_task_output.decode('UTF-8'))
# Finally update the service
update_service_output = subprocess.check_output(
[
'aws',
'ecs',
'update-service',
'--cluster',
'osp-api',
'--service',
'osp-api',
'--task-definition',
'osp-api',
],
)
|
tsuru/varnishapi | run_instance_starter.py | Python | bsd-3-clause | 697 | 0.002869 | # Copyright 2014 va | rnishapi authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# | license that can be found in the LICENSE file.
import argparse
from feaas import api
from feaas.runners import instance_starter
def run(manager):
parser = argparse.ArgumentParser("Instance starter runner")
parser.add_argument("-i", "--interval",
help="Interval for running InstanceStarter (in seconds)",
default=10, type=int)
args = parser.parse_args()
starter = instance_starter.InstanceStarter(manager, args.interval)
starter.loop()
if __name__ == "__main__":
manager = api.get_manager()
run(manager)
|
CospanDesign/python | pyqt/threading/example1.py | Python | mit | 1,752 | 0.006279 | #! /usr/bin/python
import sys
import time
from PyQt4 import QtCore
from PyQt4 import QtGui
class WorkThread(QtCore.QThread):
def __init__(self):
QtCore.QThread.__init__(self)
def __del__(self):
self.wait()
def run(self):
for i in range (6):
print " | .",
time.sleep(0.3)
self.emit(QtCore.SIGNAL("Update(QString)"), "from worker thread " + str(i))
| return
class MyApp(QtGui.QWidget):
def __init__(self, parent = None):
QtGui.QWidget.__init__(self, parent)
self.setGeometry(300, 300, 280, 600)
self.setWindowTitle("Threads")
self.layout= QtGui.QVBoxLayout(self)
self.testButton = QtGui.QPushButton("test")
self.connect(self.testButton, QtCore.SIGNAL("released()"), self.test)
self.listwidget = QtGui.QListWidget(self)
self.layout.addWidget(self.testButton)
self.layout.addWidget(self.listwidget)
def add(self, text):
""" Add item to list widget """
print "Add: %s" % text
self.listwidget.addItem(text)
self.listwidget.sortItems()
def addBatch(self, text="test", iters=6, delay=0.3):
"""Add several items to the list widget"""
for i in range (iters):
#Artificial delay
time.sleep(delay)
self.add(text + " " + str(i))
def test(self):
self.listwidget.clear()
self.addBatch("_non_thread", iters=6, delay=0.3)
self.workThread = WorkThread()
self.connect(self.workThread, QtCore.SIGNAL("Update(QString)"), self.add)
self.workThread.start()
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
test = MyApp()
test.show()
app.exec_()
|
rspavel/spack | lib/spack/spack/filesystem_view.py | Python | lgpl-2.1 | 26,567 | 0.000038 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import functools as ft
import os
import re
import shutil
import sys
from llnl.util.link_tree import LinkTree, MergeConflictError
from llnl.util import tty
from llnl.util.lang import match_predicate, index_by
from llnl.util.tty.color import colorize
from llnl.util.filesystem import (
mkdirp, remove_dead_links, remove_empty_directories)
import spack.util.spack_yaml as s_yaml
import spack.util.spack_json as s_json
import spack.spec
import spack.store
import spack.schema.projections
import spack.projections
import spack.config
import spack.relocate
from spack.error import SpackError
from spack.directory_layout import ExtensionAlreadyInstalledError
from spack.directory_layout import YamlViewExtensionsLayout
# compatability
if sys.version_info < (3, 0):
from itertools import imap as map
from itertools import ifilter as filter
from itertools import izip as zip
__all__ = ["FilesystemView", "YamlFilesystemView"]
_projections_path = '.spack/projections.yaml'
def view_symlink(src, dst, **kwargs):
# keyword arguments are irrelevant
# here to fit required call signature
os.symlink(src, dst)
def view_hardlink(src, dst, **kwargs):
# keyword arguments are irrelevant
# here to fit required call signature
os.link(src, dst)
def view_copy(src, dst, view, spec=None):
"""
Copy a file from src to dst.
Use spec and view to generate relocations
"""
shutil.copyfile(src, dst)
if spec:
# Not metadata, we have to relocate it
# Get information on where to relocate from/to
prefix_to_projection = dict(
(dep.prefix, view.get_projection_for_spec(dep))
for dep in spec.traverse()
)
if spack.relocate.is_binary(dst):
# relocate binaries
spack.relocate.relocate_text_bin(
binaries=[dst],
orig_install_prefix=spec.prefix,
new_install_prefix=view.get_projection_for_spec(spec),
orig_spack=spack.paths.spack_root,
new_spack=view._root,
new_prefixes=prefix_to_projection
)
else:
# relocate text
spack.relocate.relocate_text(
files=[dst],
orig_layout_root=spack.store.layout.root,
new_layout_root=view._root,
orig_install_prefix=spec.prefix,
new_install_prefix=view.get_projection_for_spec(spec),
orig_spack=spack.paths.spack_root,
new_spack=view._root,
new_prefixes=prefix_to_projection
)
class FilesystemView(object):
"""
Governs a filesystem view that is located at certain root-directory.
Packages are linked from their install directories into a common file
hierachy.
In distributed filesystems, loading each installed package seperately
can lead to slow-downs due to too many directories being traversed.
This can be circumvented by loading all needed modules into a common
directory structure.
"""
def __init__(self, root, layout, **kwargs):
"""
Initialize a filesystem view under the given `root` directory with
corresponding directory `layout`.
Files are linked by method `link` (os.symlink by default).
"""
self._root = root
self.layout = layout
self.projections = kwargs.get('projections', {})
self.ignore_conflicts = kwargs.get("ignore_conflicts", False)
self.verbose = kwargs.get("verbose", False)
# Setup link function to include view
link_func = kwargs.get("link", view_symlink)
self.link = ft.partial(link_func, view=self)
def add_specs(self, *specs, **kwargs):
"""
Add given specs to view.
The supplied specs might be standalone packages or extensions of
other packages.
Should accept `with_dependencies` as keyword argument (default
True) to indicate wether or not dependencies should be activated as
well.
Should except an `exclude` keyword argument containing a list of
regexps that filter out matching spec names.
This method should make use of `activate_{extension,standalone}`.
"""
raise NotImplementedError
def add_extension(self, spec):
"""
Add (link) an extension in this v | iew. Does not add dependenci | es.
"""
raise NotImplementedError
def add_standalone(self, spec):
"""
Add (link) a standalone package into this view.
"""
raise NotImplementedError
def check_added(self, spec):
"""
Check if the given concrete spec is active in this view.
"""
raise NotImplementedError
def remove_specs(self, *specs, **kwargs):
"""
Removes given specs from view.
The supplied spec might be a standalone package or an extension of
another package.
Should accept `with_dependencies` as keyword argument (default
True) to indicate wether or not dependencies should be deactivated
as well.
Should accept `with_dependents` as keyword argument (default True)
to indicate wether or not dependents on the deactivated specs
should be removed as well.
Should except an `exclude` keyword argument containing a list of
regexps that filter out matching spec names.
This method should make use of `deactivate_{extension,standalone}`.
"""
raise NotImplementedError
def remove_extension(self, spec):
"""
Remove (unlink) an extension from this view.
"""
raise NotImplementedError
def remove_standalone(self, spec):
"""
Remove (unlink) a standalone package from this view.
"""
raise NotImplementedError
def get_projection_for_spec(self, spec):
"""
Get the projection in this view for a spec.
"""
raise NotImplementedError
def get_all_specs(self):
"""
Get all specs currently active in this view.
"""
raise NotImplementedError
def get_spec(self, spec):
"""
Return the actual spec linked in this view (i.e. do not look it up
in the database by name).
`spec` can be a name or a spec from which the name is extracted.
As there can only be a single version active for any spec the name
is enough to identify the spec in the view.
If no spec is present, returns None.
"""
raise NotImplementedError
def print_status(self, *specs, **kwargs):
"""
Print a short summary about the given specs, detailing whether..
* ..they are active in the view.
* ..they are active but the activated version differs.
* ..they are not activte in the view.
Takes `with_dependencies` keyword argument so that the status of
dependencies is printed as well.
"""
raise NotImplementedError
class YamlFilesystemView(FilesystemView):
"""
Filesystem view to work with a yaml based directory layout.
"""
def __init__(self, root, layout, **kwargs):
super(YamlFilesystemView, self).__init__(root, layout, **kwargs)
# Super class gets projections from the kwargs
# YAML specific to get projections from YAML file
self.projections_path = os.path.join(self._root, _projections_path)
if not self.projections:
# Read projections file from view
self.projections = self.read_projections()
elif not os.path.exists(self.projections_path):
# Write projections file to new view
|
anhstudios/swganh | data/scripts/templates/object/tangible/lair/base/shared_poi_all_lair_mound_large_evil_fire_green.py | Python | mit | 466 | 0.04721 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = | Tangible()
result.template = "object/tangible/lair/base/shared_poi_all_lair_mound_large_evil_fire_green.iff"
result.attribute_template_id = -1
result.stfName("lair_n","mound")
#### BEGIN MODIFICATIONS ####
| #### END MODIFICATIONS ####
return result |
Pikecillo/genna | external/4Suite-XML-1.0.2/test/Xml/Xslt/Core/test_call_template.py | Python | gpl-2.0 | 1,488 | 0.001344 | from Xml.Xslt import test_harness
sheet_str = """<?xml version="1.0"?>
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
<xsl:template match="/">
<root>
<xsl:apply-templates/>
</root>
</xsl:template>
<xsl:template name="do-the-rest">
<xsl:param name="start"/>
<xsl:param name="count"/>
<tr>
<xsl:for-each select="item[position()> | ;=$start and position()<$start+$count]">
<td>
<xsl:value-of select="."/>
</td>
</xsl:for-each>
</tr>
<xsl:if test="$start + $count - 1 < count(child::item)">
<xsl:call-template name="do-the-rest">
<xsl:with-param name="st | art" select="$start + $count"/>
<xsl:with-param name="count" select="$count"/>
</xsl:call-template>
</xsl:if>
</xsl:template>
<xsl:template match="data">
<xsl:call-template name="do-the-rest">
<xsl:with-param name="start" select="1"/>
<xsl:with-param name="count" select="2"/>
</xsl:call-template>
</xsl:template>
</xsl:stylesheet>
"""
source_str = """<?xml version = "1.0"?>
<data>
<item>b</item>
<item>a</item>
<item>d</item>
<item>c</item>
</data>
"""
expected = """<?xml version='1.0' encoding='UTF-8'?>
<root><tr><td>b</td><td>a</td></tr><tr><td>d</td><td>c</td></tr></root>"""
def Test(tester):
source = test_harness.FileInfo(string=source_str)
sheet = test_harness.FileInfo(string=sheet_str)
test_harness.XsltTest(tester, source, [sheet], expected,
title='xsl:call-template')
return
|
quanvm009/codev7 | openerp/addons_quan/lifestyle/wizard/create_po_wizard.py | Python | agpl-3.0 | 7,812 | 0.006144 | # -*- coding: utf-8 -*-
# #####################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP s.a. (<http://openerp.com>).
# Copyright (C) 2013 INIT Tech Co., Ltd (http://init.vn).
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# #####################################################################
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import osv
from openerp.osv import fields
from openerp.tools.translate import _
from openerp import netsvc
class create_po_wizard(osv.osv_memory):
_name = "create.po.wizard"
def default_get(self, cr, uid, fields, context):
if context is None:
context = {}
section_obj = self.pool.get('production.section')
res = super(create_po_wizard, self).default_get(cr, uid, fields, context=context)
obj_plan = self.pool.get('production.plan').browse(cr, uid, context['active_ids'])
lst_section = section_obj.search(cr, uid, [('plan_id', '=', context['active_ids'][0])], context=context)
lst_material_id = []
lst_finished_id = []
for section in section_obj.browse(cr, uid, lst_section, context=context):
lst_material_id += [line.product_id.id for line in
section_obj.browse(cr, uid, section.id, context=context).material_ids]
lst_finished_id.append(section.product_id.id)
lst_material_id = list(set(lst_material_id))
lst_finished_id = list(set(lst_finished_id))
lst_material_id = [n for n in lst_material_id if n not in lst_finished_id]
list_obj_section = self.pool.get('production.section').browse(cr, uid, lst_section)
result = []
for obj_section in list_obj_section:
for line in obj_section.material_ids:
if line.product_id.id in lst_material_id:
dic = {
'product_id': line.product_id and line.product_id.id or False,
'product_uom': line.product_uom and line.product_uom.id or False,
'qty_kg': line.qty_kg or 0.0,
'quantity': line.quantity or 0,
'sale_line_id': obj_plan[0].sale_line_id.id or False,
}
result.append(dic)
res['material_ids'] = result
return res
def _get_default_warehouse(self, cr, uid, context=None):
company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
warehouse_ids = self.pool.get('stock.warehouse').search(cr, uid, [('company_id', '=', company_id)],
context=context)
if not warehouse_ids:
raise osv.except_osv(_('Error!'), _('There is no default warehouse for the current user\'s company!'))
return warehouse_ids[0]
_columns = {
'partner_id': fields.many2one('res.partner', 'Supplier', required=True),
'stock_id': fields.many2one('stock.warehouse', 'Warehouse', required=True),
'date_order': fields.date('Date Order', required=True),
'material_ids': fields.one2many('production.material.wizard', 'material_id_wizard', 'Materials'),
}
_defaults = {
'date_order': lambda *a: datetime.now().strftime('%Y-%m-%d'),
'stock_id': _get_default_warehouse,
}
def button_validate(self, cr, uid, ids, context={}):
obj_material = self.browse(cr, uid, ids)[0]
obj_plan = self.pool.get('production.plan').browse(cr, uid, context['active_ids'])
p_id = self.pool.get('purchase.order').create(cr, uid, {'partner_id': obj_material.partner_id.id,
'date_order': obj_material.date_order,
'location_id': obj_material.stock_id and obj_material.stock_id.lot_stock_id.id or False,
'warehouse_id': obj_material.stock_id.id,
'pricelist_id': 1,
})
obj_line = self.browse(cr, uid, ids)[0].material_ids
wf_service = netsvc.LocalService("workflow")
for line in obj_line:
self.pool.get('purchase.order.line').create(cr, uid, {
'product_id': line.product_id and line.product_id.id or False,
'name': line.product_id and line.product_id.name or '',
'product_qty': line.quantity or 0,
'qty_kg': line.qty_kg or 0.0,
'product_uom': line.product_uom and line.product_uom.id or False,
# 'price_unit': line.product_id and line.product_id.list_price or 0,
'order_id': p_id,
'date_planned': time.strftime('%Y-%m-%d'),
'sale_line_id': line.sale_line_id.id or False,
'price_unit': line.price_unit,
})
self.pool.get('history.plan').create(cr, uid, {
'product_id': line.product_id and line.product_id.id or False,
'quantity': line.quantity or 0,
'user_id': obj_plan[0].user_id and obj_plan[0].user_id.id or False,
'date_create': obj_plan[0].date_create or False,
'plan_id': context['active_ids'][0],
'stock_id': obj_material.stock_id.id,
})
wf_service.trg_validate(uid, 'purchase.order', p_id, 'purchase_confirm', cr)
return {
'name': _('Incoming Shipment'),
'view_type': 'form',
"view_mode": 'tree,form',
'res_model': 'stock.picking.in',
'type': 'ir.actions.act_window',
'nodestroy': True,
'domain': "[('purchase_id','=',%d)]" % (p_id)
}
def button_cancel(self, cr, uid, ids, context=None):
return
create_po_wizard()
class production_material_wizard(osv.osv_memory):
_name = "production.material.wizard"
_columns = {
'product_id': fields.many2one('product.product', 'Product'),
'quantity': fields.float('Qty(Kg)'),
'qty_kg': fields.float('Qty(Yard)'),
'product_uom': fields.many2one('product.uom', 'UoM'),
| 'sale_line_id': fields.many2one('sale.order.line', 'Sale Order Line'),
'material_id_wizard': fields.many2one | ('create.po.wizard', 'Material'),
'price_unit': fields.float('Unit Price', required=True, digits_compute=dp.get_precision('Product Price')),
}
_defaults = {
'quantity': 0,
'qty_kg': 0,
}
def onchange_product_id(self, cr, uid, ids, prod_id=False, context=None):
""" On change of product id, .
@return: Dictionary of values
"""
if not prod_id:
return {}
product = self.pool.get('product.product').browse(cr, uid, [prod_id], context=context)[0]
result = {
'product_uom': product.uom_id.id,
}
return {'value': result}
production_material_wizard()
|
OCA/l10n-netherlands | l10n_nl_xaf_auditfile_export/tests/__init__.py | Python | agpl-3.0 | 118 | 0 | # L | icense AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import test_l10n_nl_xaf_auditfile_export | |
cpieloth/BackBacker | setup_commands.py | Python | gpl-3.0 | 8,386 | 0.002385 | """Custom commands for setup.py"""
import abc
import distutils.cmd
import os
import re
__author__ = 'Christof Pieloth'
working_dir = os.path.abspath(os.path.dirname(__file__))
build_dir = os.path.join(working_dir, 'build')
api_name = 'backbacker'
project_name = re.search('^__project_name__\s*=\s*\'(.*)\'',
open(os.path.join(working_dir, api_name, '__init__.py')).read(), re.M).group(1)
version = re.search('^__version__\s*=\s*\'(.*)\'',
open(os.path.join(working_dir, api_name, '__init__.py')).read(), re.M).group(1)
class CustomCommand(distutils.cmd.Command, metaclass=abc.ABCMeta):
"""Base class for all custom commands."""
@staticmethod
def description(desc):
"""
Generate description text.
:param desc: Description.
:return: Text for description.
:rtype: str
"""
return '{}: {}'.format(api_name.upper(), desc)
@classmethod
@abc.abstractmethod
def name(cls):
"""
Return name of the command.
:return: Name of the command.
:rtype: str
"""
raise NotImplementedError()
@classmethod
@abc.abstractmethod
def clean_folders(cls):
"""
Return list of folders to clean-up.
:return: List of folders.
:rtype: list
"""
raise NotImplementedError()
class PackageCustomCmd(CustomCommand):
"""Create a Python Built Distribution package."""
description = CustomCommand.description(__doc__)
user_options = []
@classmethod
def clean_folders(cls):
return [os.path.join(working_dir, 'dist'),
os.path.join(working_dir, 'temp'),
os.path.join(working_dir, '{}.egg-info'.format(project_name)),
os.path.join(working_dir, 'build', 'lib'), # do not use variable build_dir
os.path.join(working_dir, 'build', 'bdist*') # do not use variable build_dir
]
@classmethod
def name(cls):
return 'package'
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
from setuptools import sandbox
sandbox.run_setup('setup.py', ['bdist_wheel', '--python-tag', 'py3'])
class DocumentationCustomCmd(CustomCommand):
"""Generate HTML documentation with Sphinx."""
description = CustomCommand.description(__doc__)
user_options = []
sphinx_build_dir = os.path.join(build_dir, 'sphinx')
doc_build_dir = os.path.join(build_dir, 'docs')
@classmethod
def clean_folders(cls):
return [cls.doc_build_dir, cls.sphinx_build_dir]
@classmethod
def name(cls):
return 'documentation'
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import sphinx.ext.apidoc
import sphinx.cmdline
# generate source files for Sphinx from python code
argv = ['-f', '-o', self.sphinx_build_dir, os.path.join(working_dir, api_name)]
sphinx.ext.apidoc.main(argv)
# copy configuration and source files to build folder, to keep doc/sphinx clean
self.copy_tree(os.path.join(working_dir, 'docs'), self.sphinx_build_dir)
# generate HTML
argv = ['-b', 'html', '-a', self.sphinx_build_dir, self.doc_build_dir]
return sphinx.cmdline.main(argv)
class CheckCodeCustomCmd(CustomCommand):
"""Run code analysis with pylint."""
description = CustomCommand.description(__doc__)
user_options = []
@classmethod
def name(cls):
return 'check_code'
@classmethod
def clean_folders(cls):
return []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
from pylint.lint import Run
args = ['--rcfile', os.path.join(working_dir, 'tools', 'pylintrc'), os.path.join(working_dir, api_name)]
return Run(args, do_exit=False).linter.msg_status
class CheckStyleCodeCustomCmd(CustomCommand):
"""Run style checker for code with pep8."""
description = CustomCommand.description(__doc__)
user_options = []
@classmethod
def name(cls):
return 'check_style_code'
@classmethod
def clean_folders(cls):
return []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import pycodestyle
ignores = list()
style_guide = pycodestyle.StyleGuide(ignore=ignores, max_line_length=120)
report = style_guide.check_files([os.path.join(working_dir, api_name), os.path.join(working_dir, 'tests')])
return report.total_errors
class CheckStyleDocCustomCmd(CustomCommand):
"""Run style checker for docstrings with pep257."""
description = CustomCommand.description(__doc__)
user_options = []
@classmethod
def name(cls):
return | 'check_style_doc'
@classmethod
def clean_folders(cls):
return []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import pep257
import sys
ignores = list()
ignores.append('D105') # Missing docstring in magic method
| ignores.append('D203') # 1 blank line required before class docstring
sys.argv = ['pep257', '--ignore={}'.format(','.join(ignores)), os.path.join(working_dir, api_name)]
return pep257.run_pep257()
class CheckStyleCustomCmd(CustomCommand):
"""Run style checkers."""
description = CustomCommand.description(__doc__)
user_options = []
@classmethod
def name(cls):
return 'check_style'
@classmethod
def clean_folders(cls):
return []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
self.run_command('check_style_code')
self.run_command('check_style_doc')
class CoverageCustomCmd(CustomCommand):
"""Generate unit test coverage report."""
description = CustomCommand.description(__doc__)
user_options = []
dst_dir = os.path.join(build_dir, 'coverage_html')
@classmethod
def name(cls):
return 'coverage'
@classmethod
def clean_folders(cls):
return [cls.dst_dir, os.path.join(working_dir, '.coverage')]
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
from coverage.cmdline import main
tests_dir = os.path.join(working_dir, 'tests', api_name)
argv = ['run', '--source', api_name, '-m', 'unittest', 'discover', tests_dir]
rc = main(argv)
if rc != 0:
return rc
argv = ['html', '-d', self.dst_dir]
return main(argv)
class CleanCustomCmd(CustomCommand):
"""Extends standard clean command to clean-up fiels and folders of custom commands."""
user_options = []
@classmethod
def name(cls):
return 'clean'
@classmethod
def clean_folders(cls):
return []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# invoke the default clean()
import shutil
from distutils.command.clean import clean
c = clean(self.distribution)
c.all = True
c.finalize_options()
c.run()
pycache_folders = [root for root, _, _ in os.walk(working_dir) if root.endswith('__pycache__')]
custom_folders = [folder for cmd in custom_commands.values() for folder in cmd.clean_folders()]
# additional cleanup
for folder in pycache_folders + custom_folders:
try:
if os.path.isfile(folder):
os.remove(folder)
else:
shutil.rmtree(folder)
except:
pass
custom_commands = {
PackageCustomCmd.name(): PackageCustomCmd,
DocumentationCustomCmd.name(): DocumentationCustomCmd,
CheckCodeCustomCmd.name(): CheckCodeCustomCmd,
CheckStyleCodeCustomCmd.name(): CheckStyleCodeCustomCmd,
|
renegelinas/mi-instrument | mi/dataset/parser/test/test_zplsc_c_echogram.py | Python | bsd-2-clause | 1,302 | 0 | #!/usr/bin/env python
import os
from mi.logging import log
from mi.dataset.parser.zplsc_c import ZplscCParser
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.driver.zplsc_c.resource import RESOURCE_PATH
__author__ = 'Rene Gelinas'
MODULE_NAME = 'mi.dataset.parser.zplsc_c'
CLASS_NAME = 'ZplscCRecoveredDataParticle'
config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: MODULE_NAME,
DataSetDriverConfigKeys.PARTICLE_CLASS: CLASS_NAME
}
def crea | te_zplsc_c_par | ser(file_handle):
"""
This function creates a zplsc-c parser for recovered data.
@param file_handle - File handle of the ZPLSC_C raw data.
"""
return ZplscCParser(config, file_handle, rec_exception_callback)
def file_path(filename):
log.debug('resource path = %s, file name = %s', RESOURCE_PATH, filename)
return os.path.join(RESOURCE_PATH, filename)
def rec_exception_callback(exception):
"""
Call back method to for exceptions
@param exception - Exception that occurred
"""
log.info("Exception occurred: %s", exception.message)
def zplsc_c_echogram_test():
with open(file_path('160501.01A')) as in_file:
parser = create_zplsc_c_parser(in_file)
parser.create_echogram()
if __name__ == '__main__':
zplsc_c_echogram_test()
|
StellarCN/py-stellar-base | stellar_sdk/xdr/int32.py | Python | apache-2.0 | 1,398 | 0 | # This is an automatically generated file.
# DO NOT EDIT or your changes may be overwritten
import base64
from xdrlib import Packer, Unpacker
from ..type_checked import type_checked
from .base import Integer
__all__ = ["Int32"]
@type_checked
class Int32:
"""
XDR Source Code::
typedef int int32;
"""
def __init__(self, int32: int) -> None:
self.int32 = int32
def pack( | self, packer: Packer) -> None:
Integer(self.int32).pack(packer)
@classmethod
def unpack(cls, unpacker: Unpacker) -> "Int32":
int32 = Integer.unpack(unpacker)
| return cls(int32)
def to_xdr_bytes(self) -> bytes:
packer = Packer()
self.pack(packer)
return packer.get_buffer()
@classmethod
def from_xdr_bytes(cls, xdr: bytes) -> "Int32":
unpacker = Unpacker(xdr)
return cls.unpack(unpacker)
def to_xdr(self) -> str:
xdr_bytes = self.to_xdr_bytes()
return base64.b64encode(xdr_bytes).decode()
@classmethod
def from_xdr(cls, xdr: str) -> "Int32":
xdr_bytes = base64.b64decode(xdr.encode())
return cls.from_xdr_bytes(xdr_bytes)
def __eq__(self, other: object):
if not isinstance(other, self.__class__):
return NotImplemented
return self.int32 == other.int32
def __str__(self):
return f"<Int32 [int32={self.int32}]>"
|
homeworkprod/byceps | byceps/services/email/service.py | Python | bsd-3-clause | 4,293 | 0 | """
byceps.services.email.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from __future__ import annotations
from typing import Optional
from sqlalchemy.exc import IntegrityError
from ...database import db, upsert
from ... import email
from ...typing import BrandID
from ...util.jobqueue import enqueue
from .dbmodels import EmailConfig as DbEmailConfig
from .transfer.models import EmailConfig, Message, Sender
class UnknownEmailConfigId(ValueError):
pass
def create_config(
brand_id: BrandID,
sender_address: str,
*,
sender | _name: Optional[str] = | None,
contact_address: Optional[str] = None,
) -> EmailConfig:
"""Create a configuration."""
config = DbEmailConfig(
brand_id,
sender_address,
sender_name=sender_name,
contact_address=contact_address,
)
db.session.add(config)
db.session.commit()
return _db_entity_to_config(config)
def update_config(
brand_id: BrandID,
sender_address: str,
sender_name: Optional[str],
contact_address: Optional[str],
) -> EmailConfig:
"""Update a configuration."""
config = _find_db_config(brand_id)
if config is None:
raise UnknownEmailConfigId(
f'No e-mail config found for brand ID "{brand_id}"'
)
config.sender_address = sender_address
config.sender_name = sender_name
config.contact_address = contact_address
db.session.commit()
return _db_entity_to_config(config)
def delete_config(brand_id: BrandID) -> bool:
"""Delete a configuration.
It is expected that no database records (sites) refer to the
configuration anymore.
Return `True` on success, or `False` if an error occurred.
"""
get_config(brand_id) # Verify ID exists.
try:
db.session \
.query(DbEmailConfig) \
.filter_by(brand_id=brand_id) \
.delete()
db.session.commit()
except IntegrityError:
db.session.rollback()
return False
return True
def _find_db_config(brand_id: BrandID) -> Optional[DbEmailConfig]:
return db.session \
.query(DbEmailConfig) \
.filter_by(brand_id=brand_id) \
.one_or_none()
def get_config(brand_id: BrandID) -> EmailConfig:
"""Return the configuration, or raise an error if none is configured
for that brand.
"""
config = _find_db_config(brand_id)
if config is None:
raise UnknownEmailConfigId(
f'No e-mail config found for brand ID "{brand_id}"'
)
return _db_entity_to_config(config)
def set_config(
brand_id: BrandID,
sender_address: str,
*,
sender_name: Optional[str] = None,
contact_address: Optional[str] = None,
) -> None:
"""Add or update configuration for that ID."""
table = DbEmailConfig.__table__
identifier = {
'brand_id': brand_id,
'sender_address': sender_address,
}
replacement = {
'sender_name': sender_name,
'contact_address': contact_address,
}
upsert(table, identifier, replacement)
def get_all_configs() -> list[EmailConfig]:
"""Return all configurations."""
configs = db.session.query(DbEmailConfig).all()
return [_db_entity_to_config(config) for config in configs]
def enqueue_message(message: Message) -> None:
"""Enqueue e-mail to be sent asynchronously."""
enqueue_email(
message.sender, message.recipients, message.subject, message.body
)
def enqueue_email(
sender: Optional[Sender], recipients: list[str], subject: str, body: str
) -> None:
"""Enqueue e-mail to be sent asynchronously."""
sender_str = sender.format() if (sender is not None) else None
enqueue(send_email, sender_str, recipients, subject, body)
def send_email(
sender: Optional[str], recipients: list[str], subject: str, body: str
) -> None:
"""Send e-mail."""
email.send(sender, recipients, subject, body)
def _db_entity_to_config(config: DbEmailConfig) -> EmailConfig:
sender = Sender(
config.sender_address,
config.sender_name,
)
return EmailConfig(
config.brand_id,
sender,
config.contact_address,
)
|
jamesp/jpy | jpy/maths/derive.py | Python | mit | 2,080 | 0.006747 | #!/usr/bin/env pyt | hon
# -*- coding: utf-8 -*-
"""Numerical differentiation."""
import numpy as np
from jpy.maths.matrix import tridiag
def make_stencil(n, i, i_, _i):
"""Create a tridiagonal stencil matrix of size n.
Creates a matrix to dot with a vector for performing discrete spatial computations.
i, i_ and _i are multipliers of the ith, i+1 and | i-1 values of the vector respectively.
e.g. to calculate an average at position i based on neighbouring values:
>>> s = make_stencil(N, 0, 0.5, 0.5)
>>> avg_v = np.dot(s, v)
The stencil has periodic boundaries.
Returns an nxn matrix.
"""
m = tridiag(n, i, i_, _i)
m[-1,0] = i_
m[0,-1] = _i
return m
def make_central_difference1(n, dx):
"""Returns a function dfdx that calculates the first derivative
of a list of values discretised with n points of constant separation dx.
>>> x = np.arange(0, 1, 0.01)
>>> y = np.sin(2*np.pi*x)
>>> dfdx = make_central_difference1(len(x), 0.01)
>>> dfdx(y) #=> ~ 2π cos(2πx)"""
m = make_stencil(n, 0, 1, -1) / (2*dx)
def dfdx(phi):
return np.dot(m, phi)
return dfdx
def make_central_difference2(n, dx):
"""Returns a function df2dx that calculates the second derivative
of a list of values discretised with n points of constant separation dx.
>>> x = np.arange(0, 1, 0.01)
>>> y = np.sin(2*np.pi*x)
>>> d2fdx = make_central_difference2(len(x), 0.01)
>>> dfdx(y) #=> ~ -4π² sin(2πx)"""
m = make_stencil(n, -2, 1, 1) / (dx**2)
def d2fdx(phi):
return np.dot(m, phi)
return d2fdx
if __name__ == '__main__':
# test: plot sin(2pi x) and dy/dx = cos(2pi x) [normalised]
import matplotlib.pyplot as plt
X = 1.0
dx = 0.01
x = np.arange(0,X,dx)
y = np.sin(2*np.pi*x/X)
dfdx = make_central_difference1(len(x), dx)
d2fdx = make_central_difference2(len(x), dx)
plt.plot(x, y)
plt.plot(x, dfdx(y) / (2*np.pi))
plt.plot(x, d2fdx(y) / (4*np.pi*np.pi))
plt.legend(['y=sin(x)','dy/dx','d2y/dx'])
plt.show()
|
mercycorps/TolaActivity | indicators/migrations/0060_ind_level_fields_unique_together.py | Python | apache-2.0 | 436 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.21 on 2019-06-13 15:38
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Mi | gration):
dependencies | = [
('indicators', '0059_require_level_name'),
]
operations = [
migrations.AlterUniqueTogether(
name='indicator',
unique_together=set([('level', 'level_order')]),
),
]
|
marios-zindilis/musicbrainz-django-models | musicbrainz_django_models/models/instrument_alias_type.py | Python | gpl-2.0 | 2,427 | 0.002472 | """
.. module:: instrument_alias_type
The **Instrum | ent Alias Type** Model. Here's a complete table of values, from the
MusicBrainz database dump of 2017-07-22:
+----+-----------------+--------+-------------+-------------+--------------------------------------+
| id | name | parent | child_order | description | gid |
+====+=================+========+=============+=============+======================================+
| 1 | Instrument name | | 0 | | 2322fc94-fbf3-3c09-b23c- | aa5ec8d14fcd |
+----+-----------------+--------+-------------+-------------+--------------------------------------+
| 2 | Search hint | | | | 7d5ef40f-4856-3000-8667-aa13b9db547d |
+----+-----------------+--------+-------------+-------------+--------------------------------------+
PostgreSQL Definition
---------------------
The :code:`instrument_alias_type` table is defined in the MusicBrainz Server as:
.. code-block:: sql
CREATE TABLE instrument_alias_type ( -- replicate
id SERIAL, -- PK,
name TEXT NOT NULL,
parent INTEGER, -- references instrument_alias_type.id
child_order INTEGER NOT NULL DEFAULT 0,
description TEXT,
gid uuid NOT NULL
);
"""
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from . import abstract
from ..signals import pre_save_name_is_member_of_name_choices_list
@python_2_unicode_compatible
class instrument_alias_type(abstract.model_type):
"""
Not all parameters are listed here, only those that present some interest
in their Django implementation.
:param gid: this is interesting because it cannot be NULL but a default is
not defined in SQL. The default `uuid.uuid4` in Django will generate a
UUID during the creation of an instance.
"""
INSTRUMENT_NAME = 'Instrument name'
SEARCH_HINT = 'Search hint'
NAME_CHOICES = (
(INSTRUMENT_NAME, INSTRUMENT_NAME),
(SEARCH_HINT, SEARCH_HINT))
NAME_CHOICES_LIST = [_[0] for _ in NAME_CHOICES]
name = models.TextField(choices=NAME_CHOICES)
class Meta:
db_table = 'instrument_alias_type'
models.signals.pre_save.connect(pre_save_name_is_member_of_name_choices_list, sender=instrument_alias_type)
|
hfp/tensorflow-xsmm | tensorflow/contrib/distributions/python/ops/bijectors/reshape.py | Python | apache-2.0 | 13,771 | 0.005882 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Reshape bijectors."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.distributions import bijector
from tensorflow.python.util import deprecation
__all__ = [
"Reshape",
]
@deprecation.deprecated(
"2018-10-01",
"The TensorFlow Distributions library has moved to "
"TensorFlow Probability "
"(https://github.com/tensorflow/probability). You "
"should update all references to use `tfp.distributions` "
"instead of `tfp.distributions`.",
warn_once=True)
def _static_ndims_from_shape(shape):
return tensor_shape.d | imension_value(shape.shape.with_rank_at_least(1)[0])
@deprecation.deprecated(
"2018-10-01",
"The TensorFlow Distributions library has moved to "
"TensorFlow Probability "
"(https://github.com/tensorflow/probability). You "
"should update all ref | erences to use `tfp.distributions` "
"instead of `tfp.distributions`.",
warn_once=True)
def _ndims_from_shape(shape):
return array_ops.shape(shape)[0]
class Reshape(bijector.Bijector):
"""Reshapes the `event_shape` of a `Tensor`.
The semantics generally follow that of `tf.reshape()`, with
a few differences:
* The user must provide both the input and output shape, so that
the transformation can be inverted. If an input shape is not
specified, the default assumes a vector-shaped input, i.e.,
event_shape_in = (-1,).
* The `Reshape` bijector automatically broadcasts over the leftmost
dimensions of its input (`sample_shape` and `batch_shape`); only
the rightmost `event_ndims_in` dimensions are reshaped. The
number of dimensions to reshape is inferred from the provided
`event_shape_in` (`event_ndims_in = len(event_shape_in)`).
Example usage:
```python
import tensorflow_probability as tfp
tfb = tfp.bijectors
r = tfb.Reshape(event_shape_out=[1, -1])
r.forward([3., 4.]) # shape [2]
# ==> [[3., 4.]] # shape [1, 2]
r.forward([[1., 2.], [3., 4.]]) # shape [2, 2]
# ==> [[[1., 2.]],
# [[3., 4.]]] # shape [2, 1, 2]
r.inverse([[3., 4.]]) # shape [1,2]
# ==> [3., 4.] # shape [2]
r.forward_log_det_jacobian(any_value)
# ==> 0.
r.inverse_log_det_jacobian(any_value)
# ==> 0.
```
"""
@deprecation.deprecated(
"2018-10-01",
"The TensorFlow Distributions library has moved to "
"TensorFlow Probability "
"(https://github.com/tensorflow/probability). You "
"should update all references to use `tfp.distributions` "
"instead of `tfp.distributions`.",
warn_once=True)
def __init__(self, event_shape_out, event_shape_in=(-1,),
validate_args=False, name=None):
"""Creates a `Reshape` bijector.
Args:
event_shape_out: An `int`-like vector-shaped `Tensor`
representing the event shape of the transformed output.
event_shape_in: An optional `int`-like vector-shape `Tensor`
representing the event shape of the input. This is required in
order to define inverse operations; the default of (-1,)
assumes a vector-shaped input.
validate_args: Python `bool` indicating whether arguments should
be checked for correctness.
name: Python `str`, name given to ops managed by this object.
Raises:
TypeError: if either `event_shape_in` or `event_shape_out` has
non-integer `dtype`.
ValueError: if either of `event_shape_in` or `event_shape_out`
has non-vector shape (`rank > 1`), or if their sizes do not
match.
"""
with ops.name_scope(name, "reshape",
values=[event_shape_out, event_shape_in]):
event_shape_out = ops.convert_to_tensor(event_shape_out,
name="event_shape_out",
preferred_dtype=dtypes.int32)
event_shape_in = ops.convert_to_tensor(event_shape_in,
name="event_shape_in",
preferred_dtype=dtypes.int32)
assertions = []
assertions.extend(self._maybe_check_valid_shape(
event_shape_out, validate_args))
assertions.extend(self._maybe_check_valid_shape(
event_shape_in, validate_args))
self._assertions = assertions
self._event_shape_in = event_shape_in
self._event_shape_out = event_shape_out
super(Reshape, self).__init__(
forward_min_event_ndims=0,
is_constant_jacobian=True,
validate_args=validate_args,
name=name or "reshape")
def _maybe_check_valid_shape(self, shape, validate_args):
"""Check that a shape Tensor is int-type and otherwise sane."""
if not shape.dtype.is_integer:
raise TypeError("{} dtype ({}) should be `int`-like.".format(
shape, shape.dtype.name))
assertions = []
ndims = array_ops.rank(shape)
ndims_ = tensor_util.constant_value(ndims)
if ndims_ is not None and ndims_ > 1:
raise ValueError("`{}` rank ({}) should be <= 1.".format(
shape, ndims_))
elif validate_args:
assertions.append(check_ops.assert_less_equal(
ndims, 1, message="`{}` rank should be <= 1.".format(shape)))
shape_ = tensor_util.constant_value_as_shape(shape)
if shape_.is_fully_defined():
es = np.int32(shape_.as_list())
if sum(es == -1) > 1:
raise ValueError(
"`{}` must have at most one `-1` (given {})"
.format(shape, es))
if np.any(es < -1):
raise ValueError(
"`{}` elements must be either positive integers or `-1`"
"(given {})."
.format(shape, es))
elif validate_args:
assertions.extend([
check_ops.assert_less_equal(
math_ops.reduce_sum(
math_ops.cast(math_ops.equal(shape, -1), dtypes.int32)),
1,
message="`{}` elements must have at most one `-1`."
.format(shape)),
check_ops.assert_greater_equal(
shape, -1,
message="`{}` elements must be either positive integers or `-1`."
.format(shape)),
])
return assertions
def _reshape_helper(self, x, event_shape_in, event_shape_out):
"""Reshape only the event_shape of an input `Tensor`."""
event_ndims_in_ = _static_ndims_from_shape(event_shape_in)
event_ndims_in = _ndims_from_shape(event_shape_in)
x_ndims_, x_ndims = x.shape.ndims, array_ops.rank(x)
assertions = []
# Ensure x.event_shape is compatible with event_shape_in.
if (event_ndims_in_ is not None
and x_ndims_ is not None
and x.shape.with_rank_at_least(event_ndims_in_)[
x_ndims_-event_ndims_in_:].is_fully_defined()):
x_event_shape_, x_event_shape = [ # pylint: disable=unbalanced-tuple-unpacking
np.int32(x.shape[x_ndims_-event_ndims_in_:])]*2
else:
x_event_shape_, x_event_sh |
lukecwik/incubator-beam | sdks/python/apache_beam/runners/dataflow/internal/apiclient_test.py | Python | apache-2.0 | 47,214 | 0.003558 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the apiclient module."""
# pytype: skip-file
import json
import logging
import sys
import unittest
import mock
from apache_beam.metrics.cells import DistributionData
from apache_beam.options.pipeline_options import GoogleCloudOptions
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.pipeline import Pipeline
from apache_beam.portability import common_urns
from apache_beam.portability.api import beam_runner_api_pb2
from apache_beam.runners.dataflow.internal import names
from apache_beam.runners.dataflow.internal.clients import dataflow
from apache_beam.transforms import Create
from apache_beam.transforms import DataflowDistributionCounter
from apache_beam.transforms import DoFn
from apache_beam.transforms import ParDo
from apache_beam.transforms.environments import DockerEnvironment
# Protect against environments where apitools library is not available.
# pylint: disable=wrong-import-order, wrong-import-position, ungrouped-imports
try:
from apache_beam.runners. | dataflow.internal import apiclient
except ImportError:
apiclient = None # type: ignore
# pylint: enable=wrong-import-order, wrong-import-position
FAKE_PIPELINE_URL = "gs://invalid-bucket/anywhere"
_LOGGER = logging.getLogger(__name__)
@unittest.skipIf(apiclient is None, 'GCP dependencies are not installed')
class UtilTest(unittest.TestCase):
@unittest.skip("Enable once BEAM-1080 is fixed.")
def te | st_create_application_client(self):
pipeline_options = PipelineOptions()
apiclient.DataflowApplicationClient(pipeline_options)
def test_pipeline_url(self):
pipeline_options = PipelineOptions([
'--subnetwork',
'/regions/MY/subnetworks/SUBNETWORK',
'--temp_location',
'gs://any-location/temp'
])
env = apiclient.Environment(
[],
pipeline_options,
'2.0.0', # any environment version
FAKE_PIPELINE_URL)
recovered_options = None
for additionalProperty in env.proto.sdkPipelineOptions.additionalProperties:
if additionalProperty.key == 'options':
recovered_options = additionalProperty.value
break
else:
self.fail(
'No pipeline options found in %s' % env.proto.sdkPipelineOptions)
pipeline_url = None
for property in recovered_options.object_value.properties:
if property.key == 'pipelineUrl':
pipeline_url = property.value
break
else:
self.fail('No pipeline_url found in %s' % recovered_options)
self.assertEqual(pipeline_url.string_value, FAKE_PIPELINE_URL)
def test_set_network(self):
pipeline_options = PipelineOptions([
'--network',
'anetworkname',
'--temp_location',
'gs://any-location/temp'
])
env = apiclient.Environment(
[], #packages
pipeline_options,
'2.0.0', #any environment version
FAKE_PIPELINE_URL)
self.assertEqual(env.proto.workerPools[0].network, 'anetworkname')
def test_set_subnetwork(self):
pipeline_options = PipelineOptions([
'--subnetwork',
'/regions/MY/subnetworks/SUBNETWORK',
'--temp_location',
'gs://any-location/temp'
])
env = apiclient.Environment(
[], #packages
pipeline_options,
'2.0.0', #any environment version
FAKE_PIPELINE_URL)
self.assertEqual(
env.proto.workerPools[0].subnetwork,
'/regions/MY/subnetworks/SUBNETWORK')
def test_flexrs_blank(self):
pipeline_options = PipelineOptions(
['--temp_location', 'gs://any-location/temp'])
env = apiclient.Environment(
[], #packages
pipeline_options,
'2.0.0', #any environment version
FAKE_PIPELINE_URL)
self.assertEqual(env.proto.flexResourceSchedulingGoal, None)
def test_flexrs_cost(self):
pipeline_options = PipelineOptions([
'--flexrs_goal',
'COST_OPTIMIZED',
'--temp_location',
'gs://any-location/temp'
])
env = apiclient.Environment(
[], #packages
pipeline_options,
'2.0.0', #any environment version
FAKE_PIPELINE_URL)
self.assertEqual(
env.proto.flexResourceSchedulingGoal,
(
dataflow.Environment.FlexResourceSchedulingGoalValueValuesEnum.
FLEXRS_COST_OPTIMIZED))
def test_flexrs_speed(self):
pipeline_options = PipelineOptions([
'--flexrs_goal',
'SPEED_OPTIMIZED',
'--temp_location',
'gs://any-location/temp'
])
env = apiclient.Environment(
[], #packages
pipeline_options,
'2.0.0', #any environment version
FAKE_PIPELINE_URL)
self.assertEqual(
env.proto.flexResourceSchedulingGoal,
(
dataflow.Environment.FlexResourceSchedulingGoalValueValuesEnum.
FLEXRS_SPEED_OPTIMIZED))
def test_default_environment_get_set(self):
pipeline_options = PipelineOptions([
'--experiments=beam_fn_api',
'--experiments=use_unified_worker',
'--temp_location',
'gs://any-location/temp'
])
pipeline = Pipeline(options=pipeline_options)
pipeline | Create([1, 2, 3]) | ParDo(DoFn()) # pylint:disable=expression-not-assigned
test_environment = DockerEnvironment(container_image='test_default_image')
proto_pipeline, _ = pipeline.to_runner_api(
return_context=True, default_environment=test_environment)
dummy_env = beam_runner_api_pb2.Environment(
urn=common_urns.environments.DOCKER.urn,
payload=(
beam_runner_api_pb2.DockerPayload(
container_image='dummy_image')).SerializeToString())
proto_pipeline.components.environments['dummy_env_id'].CopyFrom(dummy_env)
dummy_transform = beam_runner_api_pb2.PTransform(
environment_id='dummy_env_id')
proto_pipeline.components.transforms['dummy_transform_id'].CopyFrom(
dummy_transform)
env = apiclient.Environment(
[], # packages
pipeline_options,
'2.0.0', # any environment version
FAKE_PIPELINE_URL,
proto_pipeline,
_sdk_image_overrides={
'.*dummy.*': 'dummy_image', '.*test.*': 'test_default_image'
})
worker_pool = env.proto.workerPools[0]
self.assertEqual(2, len(worker_pool.sdkHarnessContainerImages))
images_from_proto = [
sdk_info.containerImage
for sdk_info in worker_pool.sdkHarnessContainerImages
]
self.assertIn('test_default_image', images_from_proto)
def test_sdk_harness_container_image_overrides(self):
test_environment = DockerEnvironment(
container_image='dummy_container_image')
proto_pipeline, _ = Pipeline().to_runner_api(
return_context=True, default_environment=test_environment)
pipeline_options = PipelineOptions([
'--experiments=beam_fn_api',
'--experiments=use_unified_worker',
'--temp_location',
'gs://any-location/temp'
])
# Accessing non-public method for testing.
apiclient.DataflowApplicationClient._apply_sdk_environment_overrides(
proto_pipeline,
{
'.*dummy.*': 'new_dummy_container_image',
'.*notfound.*': 'new_dummy_container_image_2'
},
pipeline_options)
self.assertIsNotNone(1, len(proto_pipeline.components.environments))
env = list( |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.