text
stringlengths 4
1.02M
| meta
dict |
|---|---|
import logging
import textwrap
import time
from datetime import date, datetime, timedelta
from django.conf import settings
from django.contrib.auth.models import Group
from django.core.mail import send_mail
from django.db import connection, transaction
import cronjobs
from kitsune.questions import config
from kitsune.questions.models import (
Question, QuestionVote, QuestionMappingType, QuestionVisits, Answer)
from kitsune.questions.tasks import (
escalate_question, update_question_vote_chunk)
from kitsune.search.es_utils import ES_EXCEPTIONS, get_documents
from kitsune.search.tasks import index_task
from kitsune.sumo.utils import chunked
log = logging.getLogger('k.cron')
@cronjobs.register
def update_weekly_votes():
"""Keep the num_votes_past_week value accurate."""
# Get all questions (id) with a vote in the last week.
recent = datetime.now() - timedelta(days=7)
q = QuestionVote.objects.filter(created__gte=recent)
q = q.values_list('question_id', flat=True).order_by('question')
q = q.distinct()
q_with_recent_votes = list(q)
# Get all questions with num_votes_past_week > 0
q = Question.objects.filter(num_votes_past_week__gt=0)
q = q.values_list('id', flat=True)
q_with_nonzero_votes = list(q)
# Union.
qs_to_update = list(set(q_with_recent_votes + q_with_nonzero_votes))
# Chunk them for tasks.
for chunk in chunked(qs_to_update, 50):
update_question_vote_chunk.apply_async(args=[chunk])
@cronjobs.register
def auto_archive_old_questions():
"""Archive all questions that were created over 180 days ago"""
# Set up logging so it doesn't send Ricky email.
logging.basicConfig(level=logging.ERROR)
# Get a list of ids of questions we're going to go change. We need
# a list of ids so that we can feed it to the update, but then
# also know what we need to update in the index.
days_180 = datetime.now() - timedelta(days=180)
q_ids = list(Question.objects.filter(is_archived=False)
.filter(created__lte=days_180)
.values_list('id', flat=True))
if q_ids:
log.info('Updating %d questions', len(q_ids))
sql = """
UPDATE questions_question
SET is_archived = 1
WHERE id IN (%s)
""" % ','.join(map(str, q_ids))
cursor = connection.cursor()
cursor.execute(sql)
transaction.commit_unless_managed()
if settings.ES_LIVE_INDEXING:
try:
# So... the first time this runs, it'll handle 160K
# questions or so which stresses everything. Thus we
# do it in chunks because otherwise this won't work.
#
# After we've done this for the first time, we can nix
# the chunking code.
from kitsune.search.utils import chunked
for chunk in chunked(q_ids, 100):
# Fetch all the documents we need to update.
es_docs = get_documents(QuestionMappingType, chunk)
log.info('Updating %d index documents', len(es_docs))
documents = []
# For each document, update the data and stick it
# back in the index.
for doc in es_docs:
doc[u'question_is_archived'] = True
doc[u'indexed_on'] = int(time.time())
documents.append(doc)
QuestionMappingType.bulk_index(documents)
except ES_EXCEPTIONS:
# Something happened with ES, so let's push index
# updating into an index_task which retries when it
# fails because of ES issues.
index_task.delay(QuestionMappingType, q_ids)
@cronjobs.register
def reload_question_traffic_stats():
"""Reload question views from the analytics."""
if settings.STAGE:
return
QuestionVisits.reload_from_analytics(verbose=settings.DEBUG)
@cronjobs.register
def escalate_questions():
"""Escalate questions needing attention.
Escalate questions where the status is "needs attention" and
still have no replies after 24 hours, but not that are older
than 25 hours (this runs every hour).
"""
if settings.STAGE:
return
# Get all the questions that need attention and haven't been escalated.
qs = Question.objects.needs_attention().exclude(
tags__slug__in=[config.ESCALATE_TAG_NAME])
# Only include English.
qs = qs.filter(locale=settings.WIKI_DEFAULT_LANGUAGE)
# Exclude certain products.
qs = qs.exclude(product__slug__in=config.ESCALATE_EXCLUDE_PRODUCTS)
# Exclude those by inactive users.
qs = qs.exclude(creator__is_active=False)
# Filter them down to those that haven't been replied to and are over
# 24 hours old but less than 25 hours old. We run this once an hour.
start = datetime.now() - timedelta(hours=24)
end = datetime.now() - timedelta(hours=25)
qs_no_replies_yet = qs.filter(
last_answer__isnull=True,
created__lt=start,
created__gt=end)
for question in qs_no_replies_yet:
escalate_question.delay(question.id)
return len(qs_no_replies_yet)
@cronjobs.register
def report_employee_answers():
"""Send an email about employee answered questions.
We report on the users in the "Support Forum Tracked" group.
We send the email to the users in the "Support Forum Metrics" group.
"""
tracked_group = Group.objects.get(name='Support Forum Tracked')
report_group = Group.objects.get(name='Support Forum Metrics')
tracked_users = tracked_group.user_set.all()
report_recipients = report_group.user_set.all()
if len(tracked_users) == 0 or len(report_recipients) == 0:
return
yesterday = date.today() - timedelta(days=1)
day_before_yesterday = yesterday - timedelta(days=1)
# Total number of questions asked the day before yesterday
questions = Question.objects.filter(
creator__is_active=True,
created__gte=day_before_yesterday,
created__lt=yesterday)
num_questions = questions.count()
# Total number of answered questions day before yesterday
num_answered = questions.filter(num_answers__gt=0).count()
# Total number of questions answered by user in tracked_group
num_answered_by_tracked = {}
for user in tracked_users:
num_answered_by_tracked[user.username] = Answer.objects.filter(
question__in=questions,
creator=user).values_list('question_id').distinct().count()
email_subject = 'Support Forum answered report for {date}'.format(date=day_before_yesterday)
email_body_tmpl = textwrap.dedent("""\
Date: {date}
Number of questions asked: {num_questions}
Number of questions answered: {num_answered}
""")
email_body = email_body_tmpl.format(
date=day_before_yesterday,
num_questions=num_questions,
num_answered=num_answered)
for username, count in num_answered_by_tracked.items():
email_body += 'Number of questions answered by {username}: {count}\n'.format(
username=username, count=count)
email_addresses = [u.email for u in report_recipients]
send_mail(email_subject, email_body, settings.TIDINGS_FROM_ADDRESS, email_addresses)
|
{
"content_hash": "819bfad1a05daf54b0f1595c57387647",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 96,
"avg_line_length": 35.183098591549296,
"alnum_prop": 0.6451828129170003,
"repo_name": "feer56/Kitsune2",
"id": "bceb1c70b480f19913ebe7706e46913915074d3e",
"size": "7494",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kitsune/questions/cron.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "2694"
},
{
"name": "CSS",
"bytes": "286099"
},
{
"name": "HTML",
"bytes": "617557"
},
{
"name": "JavaScript",
"bytes": "760075"
},
{
"name": "Python",
"bytes": "2754855"
},
{
"name": "Shell",
"bytes": "11614"
},
{
"name": "Smarty",
"bytes": "2062"
}
],
"symlink_target": ""
}
|
import mock
from webob import exc
from heat.api.openstack.v1 import util
from heat.common import context
from heat.common import policy
from heat.common.wsgi import Request
from heat.tests.common import HeatTestCase
class TestGetAllowedParams(HeatTestCase):
def setUp(self):
super(TestGetAllowedParams, self).setUp()
req = Request({})
self.params = req.params.copy()
self.params.add('foo', 'foo value')
self.whitelist = {'foo': 'single'}
def test_returns_empty_dict(self):
self.whitelist = {}
result = util.get_allowed_params(self.params, self.whitelist)
self.assertEqual({}, result)
def test_only_adds_whitelisted_params_if_param_exists(self):
self.whitelist = {'foo': 'single'}
self.params.clear()
result = util.get_allowed_params(self.params, self.whitelist)
self.assertNotIn('foo', result)
def test_returns_only_whitelisted_params(self):
self.params.add('bar', 'bar value')
result = util.get_allowed_params(self.params, self.whitelist)
self.assertIn('foo', result)
self.assertNotIn('bar', result)
def test_handles_single_value_params(self):
result = util.get_allowed_params(self.params, self.whitelist)
self.assertEqual('foo value', result['foo'])
def test_handles_multiple_value_params(self):
self.whitelist = {'foo': 'multi'}
self.params.add('foo', 'foo value 2')
result = util.get_allowed_params(self.params, self.whitelist)
self.assertEqual(2, len(result['foo']))
self.assertIn('foo value', result['foo'])
self.assertIn('foo value 2', result['foo'])
def test_handles_mixed_value_param_with_multiple_entries(self):
self.whitelist = {'foo': 'mixed'}
self.params.add('foo', 'foo value 2')
result = util.get_allowed_params(self.params, self.whitelist)
self.assertEqual(2, len(result['foo']))
self.assertIn('foo value', result['foo'])
self.assertIn('foo value 2', result['foo'])
def test_handles_mixed_value_param_with_single_entry(self):
self.whitelist = {'foo': 'mixed'}
result = util.get_allowed_params(self.params, self.whitelist)
self.assertEqual('foo value', result['foo'])
def test_ignores_bogus_whitelist_items(self):
self.whitelist = {'foo': 'blah'}
result = util.get_allowed_params(self.params, self.whitelist)
self.assertNotIn('foo', result)
class TestPolicyEnforce(HeatTestCase):
def setUp(self):
super(TestPolicyEnforce, self).setUp()
self.req = Request({})
self.req.context = context.RequestContext(tenant_id='foo',
is_admin=False)
class DummyController(object):
REQUEST_SCOPE = 'test'
@util.policy_enforce
def an_action(self, req):
return 'woot'
self.controller = DummyController()
@mock.patch.object(policy.Enforcer, 'enforce')
def test_policy_enforce_tenant_mismatch(self, mock_enforce):
mock_enforce.return_value = True
self.assertEqual('woot',
self.controller.an_action(self.req, 'foo'))
self.assertRaises(exc.HTTPForbidden,
self.controller.an_action,
self.req, tenant_id='bar')
@mock.patch.object(policy.Enforcer, 'enforce')
def test_policy_enforce_policy_deny(self, mock_enforce):
mock_enforce.return_value = False
self.assertRaises(exc.HTTPForbidden,
self.controller.an_action,
self.req, tenant_id='foo')
|
{
"content_hash": "5fb524afffdf4d5682965035eb1ede96",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 69,
"avg_line_length": 34.700934579439256,
"alnum_prop": 0.6186372205763534,
"repo_name": "NeCTAR-RC/heat",
"id": "8208b512cdbbfe27cd88dfb313fd9b6111e80069",
"size": "4287",
"binary": false,
"copies": "1",
"ref": "refs/heads/nectar/icehouse",
"path": "heat/tests/test_api_openstack_v1_util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "5565"
},
{
"name": "Python",
"bytes": "4229675"
},
{
"name": "Shell",
"bytes": "25339"
}
],
"symlink_target": ""
}
|
total = 0
totalRibbon = 0
f = open("input.txt")
for line in f:
temp = ""
length = 0
width = 0
height = 0
for c in line:
if c.isdigit():
temp += c
else:
if length == 0:
length = int(temp)
elif width == 0:
width = int(temp)
elif height == 0:
height = int(temp)
temp = ""
area = ( 2 * length * width ) + ( 2 * width * height ) + (2 * height * length)
smallestSide = length * width
if ( width * height ) < smallestSide:
smallestSide = width * height
if ( height * length ) < smallestSide:
smallestSide = height * length
dimensions = sorted([length, width, height])
ribbonLength = ( 2 * dimensions[0] ) + ( 2 * dimensions[1] ) + ( length * width * height )
area += smallestSide
total += area
totalRibbon += ribbonLength
print("total paper: {size} sq ft".format(size=total))
print("total ribbon: {size} ft".format(size=totalRibbon))
|
{
"content_hash": "0806170af33ed8ede5177be6d43b5c74",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 91,
"avg_line_length": 20.74418604651163,
"alnum_prop": 0.6087443946188341,
"repo_name": "protocol114/AdventOfCode",
"id": "8bf1b74476cacc0642d9c3c4f4f66389037bc14a",
"size": "892",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "day2/2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18942"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
try:
import cPickle as pickle
except ImportError:
import pickle
import zlib
from django.utils.encoding import smart_str
def loads(value, compress=False):
# Integers are not pickled when storing in the cache because we allow
# methods like incr/decr which would fail on pickled values.
if value is None:
return None
try:
return int(value)
except ValueError:
pass
if compress:
value = zlib.decompress(value)
# TODO(usmanm): Is this needed?
value = smart_str(value)
return pickle.loads(value)
def dumps(value, compress=False):
# Don't pickle integers (pickled integers will fail with incr/decr). Plus
# pickling integers wastes memory. Typecast floats to ints and don't pickle
# if you lose precision from the typecast.
if isinstance(value, int):
return value
if isinstance(value, float) and int(value) == value:
return int(value)
value = pickle.dumps(value)
if compress:
value = zlib.compress(value)
return value
|
{
"content_hash": "531436324f409a19e2a826225677ff65",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 77,
"avg_line_length": 26.86842105263158,
"alnum_prop": 0.7247796278158668,
"repo_name": "Locu/djredis",
"id": "0614bc6ff89330b93e954e4cb623812c720ff12d",
"size": "1038",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "djredis/utils/pickle.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "55990"
}
],
"symlink_target": ""
}
|
"""gRPC's Python API."""
import abc
import enum
import sys
import six
from grpc._cython import cygrpc as _cygrpc
############################## Future Interface ###############################
class FutureTimeoutError(Exception):
"""Indicates that a method call on a Future timed out."""
class FutureCancelledError(Exception):
"""Indicates that the computation underlying a Future was cancelled."""
class Future(six.with_metaclass(abc.ABCMeta)):
"""A representation of a computation in another control flow.
Computations represented by a Future may be yet to be begun, may be ongoing,
or may have already completed.
"""
@abc.abstractmethod
def cancel(self):
"""Attempts to cancel the computation.
This method does not block.
Returns:
True if the computation has not yet begun, will not be allowed to take
place, and determination of both was possible without blocking. False
under all other circumstances including but not limited to the
computation's already having begun, the computation's already having
finished, and the computation's having been scheduled for execution on a
remote system for which a determination of whether or not it commenced
before being cancelled cannot be made without blocking.
"""
raise NotImplementedError()
@abc.abstractmethod
def cancelled(self):
"""Describes whether the computation was cancelled.
This method does not block.
Returns:
True if the computation was cancelled any time before its result became
immediately available. False under all other circumstances including but
not limited to this object's cancel method not having been called and
the computation's result having become immediately available.
"""
raise NotImplementedError()
@abc.abstractmethod
def running(self):
"""Describes whether the computation is taking place.
This method does not block.
Returns:
True if the computation is scheduled to take place in the future or is
taking place now, or False if the computation took place in the past or
was cancelled.
"""
raise NotImplementedError()
@abc.abstractmethod
def done(self):
"""Describes whether the computation has taken place.
This method does not block.
Returns:
True if the computation is known to have either completed or have been
unscheduled or interrupted. False if the computation may possibly be
executing or scheduled to execute later.
"""
raise NotImplementedError()
@abc.abstractmethod
def result(self, timeout=None):
"""Accesses the outcome of the computation or raises its exception.
This method may return immediately or may block.
Args:
timeout: The length of time in seconds to wait for the computation to
finish or be cancelled, or None if this method should block until the
computation has finished or is cancelled no matter how long that takes.
Returns:
The return value of the computation.
Raises:
FutureTimeoutError: If a timeout value is passed and the computation does
not terminate within the allotted time.
FutureCancelledError: If the computation was cancelled.
Exception: If the computation raised an exception, this call will raise
the same exception.
"""
raise NotImplementedError()
@abc.abstractmethod
def exception(self, timeout=None):
"""Return the exception raised by the computation.
This method may return immediately or may block.
Args:
timeout: The length of time in seconds to wait for the computation to
terminate or be cancelled, or None if this method should block until
the computation is terminated or is cancelled no matter how long that
takes.
Returns:
The exception raised by the computation, or None if the computation did
not raise an exception.
Raises:
FutureTimeoutError: If a timeout value is passed and the computation does
not terminate within the allotted time.
FutureCancelledError: If the computation was cancelled.
"""
raise NotImplementedError()
@abc.abstractmethod
def traceback(self, timeout=None):
"""Access the traceback of the exception raised by the computation.
This method may return immediately or may block.
Args:
timeout: The length of time in seconds to wait for the computation to
terminate or be cancelled, or None if this method should block until
the computation is terminated or is cancelled no matter how long that
takes.
Returns:
The traceback of the exception raised by the computation, or None if the
computation did not raise an exception.
Raises:
FutureTimeoutError: If a timeout value is passed and the computation does
not terminate within the allotted time.
FutureCancelledError: If the computation was cancelled.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_done_callback(self, fn):
"""Adds a function to be called at completion of the computation.
The callback will be passed this Future object describing the outcome of
the computation.
If the computation has already completed, the callback will be called
immediately.
Args:
fn: A callable taking this Future object as its single parameter.
"""
raise NotImplementedError()
################################ gRPC Enums ##################################
@enum.unique
class ChannelConnectivity(enum.Enum):
"""Mirrors grpc_connectivity_state in the gRPC Core.
Attributes:
IDLE: The channel is idle.
CONNECTING: The channel is connecting.
READY: The channel is ready to conduct RPCs.
TRANSIENT_FAILURE: The channel has seen a failure from which it expects to
recover.
SHUTDOWN: The channel has seen a failure from which it cannot recover.
"""
IDLE = (_cygrpc.ConnectivityState.idle, 'idle')
CONNECTING = (_cygrpc.ConnectivityState.connecting, 'connecting')
READY = (_cygrpc.ConnectivityState.ready, 'ready')
TRANSIENT_FAILURE = (_cygrpc.ConnectivityState.transient_failure,
'transient failure')
SHUTDOWN = (_cygrpc.ConnectivityState.shutdown, 'shutdown')
@enum.unique
class StatusCode(enum.Enum):
"""Mirrors grpc_status_code in the gRPC Core."""
OK = (_cygrpc.StatusCode.ok, 'ok')
CANCELLED = (_cygrpc.StatusCode.cancelled, 'cancelled')
UNKNOWN = (_cygrpc.StatusCode.unknown, 'unknown')
INVALID_ARGUMENT = (_cygrpc.StatusCode.invalid_argument, 'invalid argument')
DEADLINE_EXCEEDED = (_cygrpc.StatusCode.deadline_exceeded,
'deadline exceeded')
NOT_FOUND = (_cygrpc.StatusCode.not_found, 'not found')
ALREADY_EXISTS = (_cygrpc.StatusCode.already_exists, 'already exists')
PERMISSION_DENIED = (_cygrpc.StatusCode.permission_denied,
'permission denied')
RESOURCE_EXHAUSTED = (_cygrpc.StatusCode.resource_exhausted,
'resource exhausted')
FAILED_PRECONDITION = (_cygrpc.StatusCode.failed_precondition,
'failed precondition')
ABORTED = (_cygrpc.StatusCode.aborted, 'aborted')
OUT_OF_RANGE = (_cygrpc.StatusCode.out_of_range, 'out of range')
UNIMPLEMENTED = (_cygrpc.StatusCode.unimplemented, 'unimplemented')
INTERNAL = (_cygrpc.StatusCode.internal, 'internal')
UNAVAILABLE = (_cygrpc.StatusCode.unavailable, 'unavailable')
DATA_LOSS = (_cygrpc.StatusCode.data_loss, 'data loss')
UNAUTHENTICATED = (_cygrpc.StatusCode.unauthenticated, 'unauthenticated')
############################# gRPC Exceptions ################################
class RpcError(Exception):
"""Raised by the gRPC library to indicate non-OK-status RPC termination."""
############################## Shared Context ################################
class RpcContext(six.with_metaclass(abc.ABCMeta)):
"""Provides RPC-related information and action."""
@abc.abstractmethod
def is_active(self):
"""Describes whether the RPC is active or has terminated."""
raise NotImplementedError()
@abc.abstractmethod
def time_remaining(self):
"""Describes the length of allowed time remaining for the RPC.
Returns:
A nonnegative float indicating the length of allowed time in seconds
remaining for the RPC to complete before it is considered to have timed
out, or None if no deadline was specified for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def cancel(self):
"""Cancels the RPC.
Idempotent and has no effect if the RPC has already terminated.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_callback(self, callback):
"""Registers a callback to be called on RPC termination.
Args:
callback: A no-parameter callable to be called on RPC termination.
Returns:
True if the callback was added and will be called later; False if the
callback was not added and will not later be called (because the RPC
already terminated or some other reason).
"""
raise NotImplementedError()
######################### Invocation-Side Context ############################
class Call(six.with_metaclass(abc.ABCMeta, RpcContext)):
"""Invocation-side utility object for an RPC."""
@abc.abstractmethod
def initial_metadata(self):
"""Accesses the initial metadata from the service-side of the RPC.
This method blocks until the value is available.
Returns:
The initial :term:`metadata`.
"""
raise NotImplementedError()
@abc.abstractmethod
def trailing_metadata(self):
"""Accesses the trailing metadata from the service-side of the RPC.
This method blocks until the value is available.
Returns:
The trailing :term:`metadata`.
"""
raise NotImplementedError()
@abc.abstractmethod
def code(self):
"""Accesses the status code emitted by the service-side of the RPC.
This method blocks until the value is available.
Returns:
The StatusCode value for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def details(self):
"""Accesses the details value emitted by the service-side of the RPC.
This method blocks until the value is available.
Returns:
The details string of the RPC.
"""
raise NotImplementedError()
############ Authentication & Authorization Interfaces & Classes #############
class ChannelCredentials(object):
"""A value encapsulating the data required to create a secure Channel.
This class has no supported interface - it exists to define the type of its
instances and its instances exist to be passed to other functions.
"""
def __init__(self, credentials):
self._credentials = credentials
class CallCredentials(object):
"""A value encapsulating data asserting an identity over a channel.
A CallCredentials may be composed with ChannelCredentials to always assert
identity for every call over that Channel.
This class has no supported interface - it exists to define the type of its
instances and its instances exist to be passed to other functions.
"""
def __init__(self, credentials):
self._credentials = credentials
class AuthMetadataContext(six.with_metaclass(abc.ABCMeta)):
"""Provides information to call credentials metadata plugins.
Attributes:
service_url: A string URL of the service being called into.
method_name: A string of the fully qualified method name being called.
"""
class AuthMetadataPluginCallback(six.with_metaclass(abc.ABCMeta)):
"""Callback object received by a metadata plugin."""
def __call__(self, metadata, error):
"""Inform the gRPC runtime of the metadata to construct a CallCredentials.
Args:
metadata: The :term:`metadata` used to construct the CallCredentials.
error: An Exception to indicate error or None to indicate success.
"""
raise NotImplementedError()
class AuthMetadataPlugin(six.with_metaclass(abc.ABCMeta)):
"""A specification for custom authentication."""
def __call__(self, context, callback):
"""Implements authentication by passing metadata to a callback.
Implementations of this method must not block.
Args:
context: An AuthMetadataContext providing information on the RPC that the
plugin is being called to authenticate.
callback: An AuthMetadataPluginCallback to be invoked either synchronously
or asynchronously.
"""
raise NotImplementedError()
class ServerCredentials(object):
"""A value encapsulating the data required to open a secure port on a Server.
This class has no supported interface - it exists to define the type of its
instances and its instances exist to be passed to other functions.
"""
def __init__(self, credentials):
self._credentials = credentials
######################## Multi-Callable Interfaces ###########################
class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a unary-unary RPC."""
@abc.abstractmethod
def __call__(self, request, timeout=None, metadata=None, credentials=None):
"""Synchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
The response value for the RPC.
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
raise NotImplementedError()
@abc.abstractmethod
def with_call(self, request, timeout=None, metadata=None, credentials=None):
"""Synchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: An optional durating of time in seconds to allow for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
The response value for the RPC and a Call value for the RPC.
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
raise NotImplementedError()
@abc.abstractmethod
def future(self, request, timeout=None, metadata=None, credentials=None):
"""Asynchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
An object that is both a Call for the RPC and a Future. In the event of
RPC completion, the return Call-Future's result value will be the
response message of the RPC. Should the event terminate with non-OK
status, the returned Call-Future's exception value will be an RpcError.
"""
raise NotImplementedError()
class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a unary-stream RPC."""
@abc.abstractmethod
def __call__(self, request, timeout=None, metadata=None, credentials=None):
"""Invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
metadata: An optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
An object that is both a Call for the RPC and an iterator of response
values. Drawing response values from the returned Call-iterator may
raise RpcError indicating termination of the RPC with non-OK status.
"""
raise NotImplementedError()
class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a stream-unary RPC in any call style."""
@abc.abstractmethod
def __call__(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None):
"""Synchronously invokes the underlying RPC.
Args:
request_iterator: An iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
The response value for the RPC.
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
raise NotImplementedError()
@abc.abstractmethod
def with_call(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None):
"""Synchronously invokes the underlying RPC.
Args:
request_iterator: An iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
The response value for the RPC and a Call for the RPC.
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
raise NotImplementedError()
@abc.abstractmethod
def future(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None):
"""Asynchronously invokes the underlying RPC.
Args:
request_iterator: An iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
An object that is both a Call for the RPC and a Future. In the event of
RPC completion, the return Call-Future's result value will be the
response message of the RPC. Should the event terminate with non-OK
status, the returned Call-Future's exception value will be an RpcError.
"""
raise NotImplementedError()
class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a stream-stream RPC in any call style."""
@abc.abstractmethod
def __call__(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None):
"""Invokes the underlying RPC.
Args:
request_iterator: An iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
An object that is both a Call for the RPC and an iterator of response
values. Drawing response values from the returned Call-iterator may
raise RpcError indicating termination of the RPC with non-OK status.
"""
raise NotImplementedError()
############################# Channel Interface ##############################
class Channel(six.with_metaclass(abc.ABCMeta)):
"""Affords RPC invocation via generic methods."""
@abc.abstractmethod
def subscribe(self, callback, try_to_connect=False):
"""Subscribes to this Channel's connectivity.
Args:
callback: A callable to be invoked and passed a ChannelConnectivity value
describing this Channel's connectivity. The callable will be invoked
immediately upon subscription and again for every change to this
Channel's connectivity thereafter until it is unsubscribed or this
Channel object goes out of scope.
try_to_connect: A boolean indicating whether or not this Channel should
attempt to connect if it is not already connected and ready to conduct
RPCs.
"""
raise NotImplementedError()
@abc.abstractmethod
def unsubscribe(self, callback):
"""Unsubscribes a callback from this Channel's connectivity.
Args:
callback: A callable previously registered with this Channel from having
been passed to its "subscribe" method.
"""
raise NotImplementedError()
@abc.abstractmethod
def unary_unary(self,
method,
request_serializer=None,
response_deserializer=None):
"""Creates a UnaryUnaryMultiCallable for a unary-unary method.
Args:
method: The name of the RPC method.
request_serializer: Optional behaviour for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional behaviour for deserializing the response
message. Response goes undeserialized in case None is passed.
Returns:
A UnaryUnaryMultiCallable value for the named unary-unary method.
"""
raise NotImplementedError()
@abc.abstractmethod
def unary_stream(self,
method,
request_serializer=None,
response_deserializer=None):
"""Creates a UnaryStreamMultiCallable for a unary-stream method.
Args:
method: The name of the RPC method.
request_serializer: Optional behaviour for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional behaviour for deserializing the response
message. Response goes undeserialized in case None is passed.
Returns:
A UnaryStreamMultiCallable value for the name unary-stream method.
"""
raise NotImplementedError()
@abc.abstractmethod
def stream_unary(self,
method,
request_serializer=None,
response_deserializer=None):
"""Creates a StreamUnaryMultiCallable for a stream-unary method.
Args:
method: The name of the RPC method.
request_serializer: Optional behaviour for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional behaviour for deserializing the response
message. Response goes undeserialized in case None is passed.
Returns:
A StreamUnaryMultiCallable value for the named stream-unary method.
"""
raise NotImplementedError()
@abc.abstractmethod
def stream_stream(self,
method,
request_serializer=None,
response_deserializer=None):
"""Creates a StreamStreamMultiCallable for a stream-stream method.
Args:
method: The name of the RPC method.
request_serializer: Optional behaviour for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional behaviour for deserializing the response
message. Response goes undeserialized in case None is passed.
Returns:
A StreamStreamMultiCallable value for the named stream-stream method.
"""
raise NotImplementedError()
########################## Service-Side Context ##############################
class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
"""A context object passed to method implementations."""
@abc.abstractmethod
def invocation_metadata(self):
"""Accesses the metadata from the invocation-side of the RPC.
Returns:
The invocation :term:`metadata`.
"""
raise NotImplementedError()
@abc.abstractmethod
def peer(self):
"""Identifies the peer that invoked the RPC being serviced.
Returns:
A string identifying the peer that invoked the RPC being serviced.
"""
raise NotImplementedError()
@abc.abstractmethod
def send_initial_metadata(self, initial_metadata):
"""Sends the initial metadata value to the invocation-side of the RPC.
This method need not be called by method implementations if they have no
service-side initial metadata to transmit.
Args:
initial_metadata: The initial :term:`metadata`.
"""
raise NotImplementedError()
@abc.abstractmethod
def set_trailing_metadata(self, trailing_metadata):
"""Accepts the trailing metadata value of the RPC.
This method need not be called by method implementations if they have no
service-side trailing metadata to transmit.
Args:
trailing_metadata: The trailing :term:`metadata`.
"""
raise NotImplementedError()
@abc.abstractmethod
def set_code(self, code):
"""Accepts the status code of the RPC.
This method need not be called by method implementations if they wish the
gRPC runtime to determine the status code of the RPC.
Args:
code: A StatusCode value to be transmitted to the invocation side of the
RPC as the status code of the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def set_details(self, details):
"""Accepts the service-side details of the RPC.
This method need not be called by method implementations if they have no
details to transmit.
Args:
details: A string to be transmitted to the invocation side of the RPC as
the status details of the RPC.
"""
raise NotImplementedError()
##################### Service-Side Handler Interfaces ########################
class RpcMethodHandler(six.with_metaclass(abc.ABCMeta)):
"""An implementation of a single RPC method.
Attributes:
request_streaming: Whether the RPC supports exactly one request message or
any arbitrary number of request messages.
response_streaming: Whether the RPC supports exactly one response message or
any arbitrary number of response messages.
request_deserializer: A callable behavior that accepts a byte string and
returns an object suitable to be passed to this object's business logic,
or None to indicate that this object's business logic should be passed the
raw request bytes.
response_serializer: A callable behavior that accepts an object produced by
this object's business logic and returns a byte string, or None to
indicate that the byte strings produced by this object's business logic
should be transmitted on the wire as they are.
unary_unary: This object's application-specific business logic as a callable
value that takes a request value and a ServicerContext object and returns
a response value. Only non-None if both request_streaming and
response_streaming are False.
unary_stream: This object's application-specific business logic as a
callable value that takes a request value and a ServicerContext object and
returns an iterator of response values. Only non-None if request_streaming
is False and response_streaming is True.
stream_unary: This object's application-specific business logic as a
callable value that takes an iterator of request values and a
ServicerContext object and returns a response value. Only non-None if
request_streaming is True and response_streaming is False.
stream_stream: This object's application-specific business logic as a
callable value that takes an iterator of request values and a
ServicerContext object and returns an iterator of response values. Only
non-None if request_streaming and response_streaming are both True.
"""
class HandlerCallDetails(six.with_metaclass(abc.ABCMeta)):
"""Describes an RPC that has just arrived for service.
Attributes:
method: The method name of the RPC.
invocation_metadata: The :term:`metadata` from the invocation side of the RPC.
"""
class GenericRpcHandler(six.with_metaclass(abc.ABCMeta)):
"""An implementation of arbitrarily many RPC methods."""
@abc.abstractmethod
def service(self, handler_call_details):
"""Services an RPC (or not).
Args:
handler_call_details: A HandlerCallDetails describing the RPC.
Returns:
An RpcMethodHandler with which the RPC may be serviced, or None to
indicate that this object will not be servicing the RPC.
"""
raise NotImplementedError()
class ServiceRpcHandler(six.with_metaclass(abc.ABCMeta, GenericRpcHandler)):
"""An implementation of RPC methods belonging to a service.
A service handles RPC methods with structured names of the form
'/Service.Name/Service.MethodX', where 'Service.Name' is the value
returned by service_name(), and 'Service.MethodX' is the service method
name. A service can have multiple service methods names, but only a single
service name.
"""
@abc.abstractmethod
def service_name(self):
"""Returns this services name.
Returns:
The service name.
"""
raise NotImplementedError()
############################# Server Interface ###############################
class Server(six.with_metaclass(abc.ABCMeta)):
"""Services RPCs."""
@abc.abstractmethod
def add_generic_rpc_handlers(self, generic_rpc_handlers):
"""Registers GenericRpcHandlers with this Server.
This method is only safe to call before the server is started.
Args:
generic_rpc_handlers: An iterable of GenericRpcHandlers that will be used
to service RPCs after this Server is started.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_insecure_port(self, address):
"""Reserves a port for insecure RPC service once this Server becomes active.
This method may only be called before calling this Server's start method is
called.
Args:
address: The address for which to open a port.
Returns:
An integer port on which RPCs will be serviced after this link has been
started. This is typically the same number as the port number contained
in the passed address, but will likely be different if the port number
contained in the passed address was zero.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_secure_port(self, address, server_credentials):
"""Reserves a port for secure RPC service after this Server becomes active.
This method may only be called before calling this Server's start method is
called.
Args:
address: The address for which to open a port.
server_credentials: A ServerCredentials.
Returns:
An integer port on which RPCs will be serviced after this link has been
started. This is typically the same number as the port number contained
in the passed address, but will likely be different if the port number
contained in the passed address was zero.
"""
raise NotImplementedError()
@abc.abstractmethod
def start(self):
"""Starts this Server's service of RPCs.
This method may only be called while the server is not serving RPCs (i.e. it
is not idempotent).
"""
raise NotImplementedError()
@abc.abstractmethod
def stop(self, grace):
"""Stops this Server's service of RPCs.
All calls to this method immediately stop service of new RPCs. When existing
RPCs are aborted is controlled by the grace period parameter passed to this
method.
This method may be called at any time and is idempotent. Passing a smaller
grace value than has been passed in a previous call will have the effect of
stopping the Server sooner. Passing a larger grace value than has been
passed in a previous call will not have the effect of stopping the server
later.
This method does not block for any significant length of time. If None is
passed as the grace value, existing RPCs are immediately aborted and this
method blocks until this Server is completely stopped.
Args:
grace: A duration of time in seconds or None. If a duration of time in
seconds, the time to allow existing RPCs to complete before being
aborted by this Server's stopping. If None, all RPCs will be aborted
immediately and this method will block until this Server is completely
stopped.
Returns:
A threading.Event that will be set when this Server has completely
stopped. The returned event may not be set until after the full grace
period (if some ongoing RPC continues for the full length of the period)
of it may be set much sooner (such as if this Server had no RPCs underway
at the time it was stopped or if all RPCs that it had underway completed
very early in the grace period).
"""
raise NotImplementedError()
################################# Functions ################################
def unary_unary_rpc_method_handler(behavior,
request_deserializer=None,
response_serializer=None):
"""Creates an RpcMethodHandler for a unary-unary RPC method.
Args:
behavior: The implementation of an RPC method as a callable behavior taking
a single request value and returning a single response value.
request_deserializer: An optional request deserialization behavior.
response_serializer: An optional response serialization behavior.
Returns:
An RpcMethodHandler for a unary-unary RPC method constructed from the given
parameters.
"""
from grpc import _utilities
return _utilities.RpcMethodHandler(False, False, request_deserializer,
response_serializer, behavior, None,
None, None)
def unary_stream_rpc_method_handler(behavior,
request_deserializer=None,
response_serializer=None):
"""Creates an RpcMethodHandler for a unary-stream RPC method.
Args:
behavior: The implementation of an RPC method as a callable behavior taking
a single request value and returning an iterator of response values.
request_deserializer: An optional request deserialization behavior.
response_serializer: An optional response serialization behavior.
Returns:
An RpcMethodHandler for a unary-stream RPC method constructed from the
given parameters.
"""
from grpc import _utilities
return _utilities.RpcMethodHandler(False, True, request_deserializer,
response_serializer, None, behavior,
None, None)
def stream_unary_rpc_method_handler(behavior,
request_deserializer=None,
response_serializer=None):
"""Creates an RpcMethodHandler for a stream-unary RPC method.
Args:
behavior: The implementation of an RPC method as a callable behavior taking
an iterator of request values and returning a single response value.
request_deserializer: An optional request deserialization behavior.
response_serializer: An optional response serialization behavior.
Returns:
An RpcMethodHandler for a stream-unary RPC method constructed from the
given parameters.
"""
from grpc import _utilities
return _utilities.RpcMethodHandler(True, False, request_deserializer,
response_serializer, None, None,
behavior, None)
def stream_stream_rpc_method_handler(behavior,
request_deserializer=None,
response_serializer=None):
"""Creates an RpcMethodHandler for a stream-stream RPC method.
Args:
behavior: The implementation of an RPC method as a callable behavior taking
an iterator of request values and returning an iterator of response
values.
request_deserializer: An optional request deserialization behavior.
response_serializer: An optional response serialization behavior.
Returns:
An RpcMethodHandler for a stream-stream RPC method constructed from the
given parameters.
"""
from grpc import _utilities
return _utilities.RpcMethodHandler(True, True, request_deserializer,
response_serializer, None, None, None,
behavior)
def method_handlers_generic_handler(service, method_handlers):
"""Creates a grpc.GenericRpcHandler from RpcMethodHandlers.
Args:
service: A service name to be used for the given method handlers.
method_handlers: A dictionary from method name to RpcMethodHandler
implementing the named method.
Returns:
A GenericRpcHandler constructed from the given parameters.
"""
from grpc import _utilities
return _utilities.DictionaryGenericHandler(service, method_handlers)
def ssl_channel_credentials(root_certificates=None,
private_key=None,
certificate_chain=None):
"""Creates a ChannelCredentials for use with an SSL-enabled Channel.
Args:
root_certificates: The PEM-encoded root certificates or unset to ask for
them to be retrieved from a default location.
private_key: The PEM-encoded private key to use or unset if no private key
should be used.
certificate_chain: The PEM-encoded certificate chain to use or unset if no
certificate chain should be used.
Returns:
A ChannelCredentials for use with an SSL-enabled Channel.
"""
if private_key is not None or certificate_chain is not None:
pair = _cygrpc.SslPemKeyCertPair(private_key, certificate_chain)
else:
pair = None
return ChannelCredentials(
_cygrpc.channel_credentials_ssl(root_certificates, pair))
def metadata_call_credentials(metadata_plugin, name=None):
"""Construct CallCredentials from an AuthMetadataPlugin.
Args:
metadata_plugin: An AuthMetadataPlugin to use as the authentication behavior
in the created CallCredentials.
name: A name for the plugin.
Returns:
A CallCredentials.
"""
from grpc import _plugin_wrapping
if name is None:
try:
effective_name = metadata_plugin.__name__
except AttributeError:
effective_name = metadata_plugin.__class__.__name__
else:
effective_name = name
return CallCredentials(
_plugin_wrapping.call_credentials_metadata_plugin(metadata_plugin,
effective_name))
def access_token_call_credentials(access_token):
"""Construct CallCredentials from an access token.
Args:
access_token: A string to place directly in the http request
authorization header, ie "authorization: Bearer <access_token>".
Returns:
A CallCredentials.
"""
from grpc import _auth
return metadata_call_credentials(
_auth.AccessTokenCallCredentials(access_token))
def composite_call_credentials(*call_credentials):
"""Compose multiple CallCredentials to make a new CallCredentials.
Args:
*call_credentials: At least two CallCredentials objects.
Returns:
A CallCredentials object composed of the given CallCredentials objects.
"""
from grpc import _credential_composition
cygrpc_call_credentials = tuple(
single_call_credentials._credentials
for single_call_credentials in call_credentials)
return CallCredentials(
_credential_composition.call(cygrpc_call_credentials))
def composite_channel_credentials(channel_credentials, *call_credentials):
"""Compose a ChannelCredentials and one or more CallCredentials objects.
Args:
channel_credentials: A ChannelCredentials.
*call_credentials: One or more CallCredentials objects.
Returns:
A ChannelCredentials composed of the given ChannelCredentials and
CallCredentials objects.
"""
from grpc import _credential_composition
cygrpc_call_credentials = tuple(
single_call_credentials._credentials
for single_call_credentials in call_credentials)
return ChannelCredentials(
_credential_composition.channel(channel_credentials._credentials,
cygrpc_call_credentials))
def ssl_server_credentials(private_key_certificate_chain_pairs,
root_certificates=None,
require_client_auth=False):
"""Creates a ServerCredentials for use with an SSL-enabled Server.
Args:
private_key_certificate_chain_pairs: A nonempty sequence each element of
which is a pair the first element of which is a PEM-encoded private key
and the second element of which is the corresponding PEM-encoded
certificate chain.
root_certificates: PEM-encoded client root certificates to be used for
verifying authenticated clients. If omitted, require_client_auth must also
be omitted or be False.
require_client_auth: A boolean indicating whether or not to require clients
to be authenticated. May only be True if root_certificates is not None.
Returns:
A ServerCredentials for use with an SSL-enabled Server.
"""
if len(private_key_certificate_chain_pairs) == 0:
raise ValueError(
'At least one private key-certificate chain pair is required!')
elif require_client_auth and root_certificates is None:
raise ValueError(
'Illegal to require client auth without providing root certificates!'
)
else:
return ServerCredentials(
_cygrpc.server_credentials_ssl(root_certificates, [
_cygrpc.SslPemKeyCertPair(key, pem)
for key, pem in private_key_certificate_chain_pairs
], require_client_auth))
def channel_ready_future(channel):
"""Creates a Future tracking when a Channel is ready.
Cancelling the returned Future does not tell the given Channel to abandon
attempts it may have been making to connect; cancelling merely deactivates the
returned Future's subscription to the given Channel's connectivity.
Args:
channel: A Channel.
Returns:
A Future that matures when the given Channel has connectivity
ChannelConnectivity.READY.
"""
from grpc import _utilities
return _utilities.channel_ready_future(channel)
def insecure_channel(target, options=None):
"""Creates an insecure Channel to a server.
Args:
target: The target to which to connect.
options: A sequence of string-value pairs according to which to configure
the created channel.
Returns:
A Channel to the target through which RPCs may be conducted.
"""
from grpc import _channel
return _channel.Channel(target, () if options is None else options, None)
def secure_channel(target, credentials, options=None):
"""Creates a secure Channel to a server.
Args:
target: The target to which to connect.
credentials: A ChannelCredentials instance.
options: A sequence of string-value pairs according to which to configure
the created channel.
Returns:
A Channel to the target through which RPCs may be conducted.
"""
from grpc import _channel
return _channel.Channel(target, () if options is None else options,
credentials._credentials)
def server(thread_pool, handlers=None, options=None):
"""Creates a Server with which RPCs can be serviced.
Args:
thread_pool: A futures.ThreadPoolExecutor to be used by the returned Server
to service RPCs.
handlers: An optional sequence of GenericRpcHandlers to be used to service
RPCs after the returned Server is started. These handlers need not be the
only handlers the server will use to service RPCs; other handlers may
later be added by calling add_generic_rpc_handlers any time before the
returned Server is started.
options: A sequence of string-value pairs according to which to configure
the created server.
Returns:
A Server with which RPCs can be serviced.
"""
from grpc import _server
return _server.Server(thread_pool, () if handlers is None else handlers, ()
if options is None else options)
################################### __all__ #################################
__all__ = (
'FutureTimeoutError',
'FutureCancelledError',
'Future',
'ChannelConnectivity',
'StatusCode',
'RpcError',
'RpcContext',
'Call',
'ChannelCredentials',
'CallCredentials',
'AuthMetadataContext',
'AuthMetadataPluginCallback',
'AuthMetadataPlugin',
'ServerCredentials',
'UnaryUnaryMultiCallable',
'UnaryStreamMultiCallable',
'StreamUnaryMultiCallable',
'StreamStreamMultiCallable',
'Channel',
'ServicerContext',
'RpcMethodHandler',
'HandlerCallDetails',
'GenericRpcHandler',
'ServiceRpcHandler',
'Server',
'unary_unary_rpc_method_handler',
'unary_stream_rpc_method_handler',
'stream_unary_rpc_method_handler',
'stream_stream_rpc_method_handler',
'method_handlers_generic_handler',
'ssl_channel_credentials',
'metadata_call_credentials',
'access_token_call_credentials',
'composite_call_credentials',
'composite_channel_credentials',
'ssl_server_credentials',
'channel_ready_future',
'insecure_channel',
'secure_channel',
'server',)
############################### Extension Shims ################################
# Here to maintain backwards compatibility; avoid using these in new code!
try:
import grpc_tools
sys.modules.update({'grpc.tools': grpc_tools})
except ImportError:
pass
try:
import grpc_health
sys.modules.update({'grpc.health': grpc_health})
except ImportError:
pass
try:
import grpc_reflection
sys.modules.update({'grpc.reflection': grpc_reflection})
except ImportError:
pass
|
{
"content_hash": "8467449b6a93d0de4e22e97223e81e5d",
"timestamp": "",
"source": "github",
"line_count": 1331,
"max_line_length": 84,
"avg_line_length": 35.61081893313298,
"alnum_prop": 0.6769061985737795,
"repo_name": "ininex/geofire-python",
"id": "fe2997179921870d02aa9197c1d06b33f3becb30",
"size": "48931",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "resource/lib/python2.7/site-packages/grpc/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5939"
},
{
"name": "CSS",
"bytes": "6231"
},
{
"name": "JavaScript",
"bytes": "6187"
},
{
"name": "Protocol Buffer",
"bytes": "158375"
},
{
"name": "Python",
"bytes": "13368780"
},
{
"name": "Shell",
"bytes": "5031"
}
],
"symlink_target": ""
}
|
from Acquisition import aq_inner
from Acquisition import aq_parent
from bika.lims.permissions import *
def upgrade(tool):
"""AJS changes
"""
# Hack prevent out-of-date upgrading
# Related: PR #1484
# https://github.com/bikalabs/Bika-LIMS/pull/1484
from bika.lims.upgrade import skip_pre315
if skip_pre315(aq_parent(aq_inner(tool))):
return True
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
# reread jsregistry with the new data
setup.runImportStepFromProfile('profile-bika.lims:default', 'jsregistry')
return True
|
{
"content_hash": "9bad2721fcb501c6508c30f948c3216c",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 77,
"avg_line_length": 26.954545454545453,
"alnum_prop": 0.6981450252951096,
"repo_name": "hocinebendou/bika.gsoc",
"id": "3f38fa632358506381aba084f436a3807f7d8cae",
"size": "593",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "bika/lims/upgrade/to3048.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "403"
},
{
"name": "COBOL",
"bytes": "5987"
},
{
"name": "CSS",
"bytes": "29758"
},
{
"name": "JavaScript",
"bytes": "411425"
},
{
"name": "Python",
"bytes": "4330980"
},
{
"name": "RobotFramework",
"bytes": "239735"
},
{
"name": "Shell",
"bytes": "11201"
}
],
"symlink_target": ""
}
|
"""
Django settings for _2michaeltaylor project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
MAIN_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ie06ntlaluelb7lh5@4-qyksf6+_3pkle^jh0kco!5slnlabm0'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'core',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = '_2michaeltaylor.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(MAIN_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = '_2michaeltaylor.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Parse database configuration from $DATABASE_URL
import dj_database_url
DATABASES['default'] = dj_database_url.config()
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(MAIN_DIR, 'static'),
)
STATIC_ROOT = 'staticfiles'
|
{
"content_hash": "80c602fcf4e8676e0b74ae4311101350",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 71,
"avg_line_length": 26.327731092436974,
"alnum_prop": 0.6974146185764443,
"repo_name": "mjt145/2michaeltaylor",
"id": "4a078699f572a5189ecac85685b523705cccf793",
"size": "3133",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_2michaeltaylor/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16668"
},
{
"name": "HTML",
"bytes": "33522"
},
{
"name": "JavaScript",
"bytes": "40972"
},
{
"name": "Python",
"bytes": "6139"
}
],
"symlink_target": ""
}
|
"""
Volume driver common utilities for HPE 3PAR Storage array
The 3PAR drivers requires 3.1.3 firmware on the 3PAR array.
You will need to install the python hpe3parclient module.
sudo pip install python-3parclient
The drivers uses both the REST service and the SSH
command line to correctly operate. Since the
ssh credentials and the REST credentials can be different
we need to have settings for both.
The drivers requires the use of the san_ip, san_login,
san_password settings for ssh connections into the 3PAR
array. It also requires the setting of
hpe3par_api_url, hpe3par_username, hpe3par_password
for credentials to talk to the REST service on the 3PAR
array.
"""
import ast
import json
import math
import pprint
import re
import six
import uuid
from oslo_serialization import base64
from oslo_utils import importutils
hpe3parclient = importutils.try_import("hpe3parclient")
if hpe3parclient:
from hpe3parclient import client
from hpe3parclient import exceptions as hpeexceptions
from oslo_config import cfg
from oslo_log import log as logging
from oslo_log import versionutils
from oslo_service import loopingcall
from oslo_utils import excutils
from oslo_utils import units
from cinder import context
from cinder import exception
from cinder import flow_utils
from cinder.i18n import _, _LE, _LI, _LW
from cinder.objects import fields
from cinder.volume import qos_specs
from cinder.volume import utils as volume_utils
from cinder.volume import volume_types
import taskflow.engines
from taskflow.patterns import linear_flow
LOG = logging.getLogger(__name__)
MIN_CLIENT_VERSION = '4.2.0'
DEDUP_API_VERSION = 30201120
FLASH_CACHE_API_VERSION = 30201200
SRSTATLD_API_VERSION = 30201200
REMOTE_COPY_API_VERSION = 30202290
hpe3par_opts = [
cfg.StrOpt('hpe3par_api_url',
default='',
help="3PAR WSAPI Server Url like "
"https://<3par ip>:8080/api/v1",
deprecated_name='hp3par_api_url'),
cfg.StrOpt('hpe3par_username',
default='',
help="3PAR username with the 'edit' role",
deprecated_name='hp3par_username'),
cfg.StrOpt('hpe3par_password',
default='',
help="3PAR password for the user specified in hpe3par_username",
secret=True,
deprecated_name='hp3par_password'),
cfg.ListOpt('hpe3par_cpg',
default=["OpenStack"],
help="List of the CPG(s) to use for volume creation",
deprecated_name='hp3par_cpg'),
cfg.StrOpt('hpe3par_cpg_snap',
default="",
help="The CPG to use for Snapshots for volumes. "
"If empty the userCPG will be used.",
deprecated_name='hp3par_cpg_snap'),
cfg.StrOpt('hpe3par_snapshot_retention',
default="",
help="The time in hours to retain a snapshot. "
"You can't delete it before this expires.",
deprecated_name='hp3par_snapshot_retention'),
cfg.StrOpt('hpe3par_snapshot_expiration',
default="",
help="The time in hours when a snapshot expires "
" and is deleted. This must be larger than expiration",
deprecated_name='hp3par_snapshot_expiration'),
cfg.BoolOpt('hpe3par_debug',
default=False,
help="Enable HTTP debugging to 3PAR",
deprecated_name='hp3par_debug'),
cfg.ListOpt('hpe3par_iscsi_ips',
default=[],
help="List of target iSCSI addresses to use.",
deprecated_name='hp3par_iscsi_ips'),
cfg.BoolOpt('hpe3par_iscsi_chap_enabled',
default=False,
help="Enable CHAP authentication for iSCSI connections.",
deprecated_name='hp3par_iscsi_chap_enabled'),
]
CONF = cfg.CONF
CONF.register_opts(hpe3par_opts)
# Input/output (total read/write) operations per second.
THROUGHPUT = 'throughput'
# Data processed (total read/write) per unit time: kilobytes per second.
BANDWIDTH = 'bandwidth'
# Response time (total read/write): microseconds.
LATENCY = 'latency'
# IO size (total read/write): kilobytes.
IO_SIZE = 'io_size'
# Queue length for processing IO requests
QUEUE_LENGTH = 'queue_length'
# Average busy percentage
AVG_BUSY_PERC = 'avg_busy_perc'
class HPE3PARCommon(object):
"""Class that contains common code for the 3PAR drivers.
Version history:
.. code-block:: none
1.2.0 - Updated hp3parclient API use to 2.0.x
1.2.1 - Check that the VVS exists
1.2.2 - log prior to raising exceptions
1.2.3 - Methods to update key/value pair bug #1258033
1.2.4 - Remove deprecated config option hp3par_domain
1.2.5 - Raise Ex when deleting snapshot with dependencies bug #1250249
1.2.6 - Allow optional specifying n:s:p for vlun creation bug #1269515
This update now requires 3.1.2 MU3 firmware
1.3.0 - Removed all SSH code. We rely on the hp3parclient now.
2.0.0 - Update hp3parclient API uses 3.0.x
2.0.1 - Updated to use qos_specs, added new qos settings and personas
2.0.2 - Add back-end assisted volume migrate
2.0.3 - Allow deleting missing snapshots bug #1283233
2.0.4 - Allow volumes created from snapshots to be larger bug #1279478
2.0.5 - Fix extend volume units bug #1284368
2.0.6 - use loopingcall.wait instead of time.sleep
2.0.7 - Allow extend volume based on snapshot bug #1285906
2.0.8 - Fix detach issue for multiple hosts bug #1288927
2.0.9 - Remove unused 3PAR driver method bug #1310807
2.0.10 - Fixed an issue with 3PAR vlun location bug #1315542
2.0.11 - Remove hp3parclient requirement from unit tests #1315195
2.0.12 - Volume detach hangs when host is in a host set bug #1317134
2.0.13 - Added support for managing/unmanaging of volumes
2.0.14 - Modified manage volume to use standard 'source-name' element.
2.0.15 - Added support for volume retype
2.0.16 - Add a better log during delete_volume time. Bug #1349636
2.0.17 - Added iSCSI CHAP support
This update now requires 3.1.3 MU1 firmware
and hp3parclient 3.1.0
2.0.18 - HP 3PAR manage_existing with volume-type support
2.0.19 - Update default persona from Generic to Generic-ALUA
2.0.20 - Configurable SSH missing key policy and known hosts file
2.0.21 - Remove bogus invalid snapCPG=None exception
2.0.22 - HP 3PAR drivers should not claim to have 'infinite' space
2.0.23 - Increase the hostname size from 23 to 31 Bug #1371242
2.0.24 - Add pools (hp3par_cpg now accepts a list of CPGs)
2.0.25 - Migrate without losing type settings bug #1356608
2.0.26 - Don't ignore extra-specs snap_cpg when missing cpg #1368972
2.0.27 - Fixing manage source-id error bug #1357075
2.0.28 - Removing locks bug #1381190
2.0.29 - Report a limitless cpg's stats better bug #1398651
2.0.30 - Update the minimum hp3parclient version bug #1402115
2.0.31 - Removed usage of host name cache #1398914
2.0.32 - Update LOG usage to fix translations. bug #1384312
2.0.33 - Fix host persona to match WSAPI mapping bug #1403997
2.0.34 - Fix log messages to match guidelines. bug #1411370
2.0.35 - Fix default snapCPG for manage_existing bug #1393609
2.0.36 - Added support for dedup provisioning
2.0.37 - Added support for enabling Flash Cache
2.0.38 - Add stats for hp3par goodness_function and filter_function
2.0.39 - Added support for updated detach_volume attachment.
2.0.40 - Make the 3PAR drivers honor the pool in create bug #1432876
2.0.41 - Only log versions at startup. bug #1447697
2.0.42 - Fix type for snapshot config settings. bug #1461640
2.0.43 - Report the capability of supporting multiattach
2.0.44 - Update help strings to reduce the 3PAR user role requirements
2.0.45 - Python 3 fixes
2.0.46 - Improved VLUN creation and deletion logic. #1469816
2.0.47 - Changed initialize_connection to use getHostVLUNs. #1475064
2.0.48 - Adding changes to support 3PAR iSCSI multipath.
2.0.49 - Added client CPG stats to driver volume stats. bug #1482741
2.0.50 - Add over subscription support
2.0.51 - Adds consistency group support
2.0.52 - Added update_migrated_volume. bug #1492023
2.0.53 - Fix volume size conversion. bug #1513158
3.0.0 - Rebranded HP to HPE.
3.0.1 - Fixed find_existing_vluns bug #1515033
3.0.2 - Python 3 support
3.0.3 - Remove db access for consistency groups
3.0.4 - Adds v2 managed replication support
3.0.5 - Adds v2 unmanaged replication support
3.0.6 - Adding manage/unmanage snapshot support
3.0.7 - Enable standard capabilities based on 3PAR licenses
3.0.8 - Optimize array ID retrieval
3.0.9 - Bump minimum API version for volume replication
3.0.10 - Added additional volumes checks to the manage snapshot API
3.0.11 - Fix the image cache capability bug #1491088
3.0.12 - Remove client version checks for replication
3.0.13 - Support creating a cg from a source cg
3.0.14 - Comparison of WWNs now handles case difference. bug #1546453
3.0.15 - Update replication to version 2.1
3.0.16 - Use same LUN ID for each VLUN path #1551994
3.0.17 - Don't fail on clearing 3PAR object volume key. bug #1546392
3.0.18 - create_cloned_volume account for larger size. bug #1554740
3.0.19 - Remove metadata that tracks the instance ID. bug #1572665
3.0.20 - Fix lun_id of 0 issue. bug #1573298
3.0.21 - Driver no longer fails to initialize if
System Reporter license is missing. bug #1568078
3.0.22 - Rework delete_vlun. Bug #1582922
3.0.23 - Fix CG create failures with long display name or special
characters. bug #1573647
3.0.24 - Fix terminate connection on failover
3.0.25 - Fix delete volume when online clone is active. bug #1349639
3.0.26 - Fix concurrent snapshot delete conflict. bug #1600104
"""
VERSION = "3.0.26"
stats = {}
# TODO(Ramy): move these to the 3PAR Client
VLUN_TYPE_EMPTY = 1
VLUN_TYPE_PORT = 2
VLUN_TYPE_HOST = 3
VLUN_TYPE_MATCHED_SET = 4
VLUN_TYPE_HOST_SET = 5
THIN = 2
DEDUP = 6
CONVERT_TO_THIN = 1
CONVERT_TO_FULL = 2
CONVERT_TO_DEDUP = 3
# v2 replication constants
SYNC = 1
PERIODIC = 2
EXTRA_SPEC_REP_MODE = "replication:mode"
EXTRA_SPEC_REP_SYNC_PERIOD = "replication:sync_period"
RC_ACTION_CHANGE_TO_PRIMARY = 7
DEFAULT_REP_MODE = 'periodic'
DEFAULT_SYNC_PERIOD = 900
RC_GROUP_STARTED = 3
SYNC_STATUS_COMPLETED = 3
FAILBACK_VALUE = 'default'
# License values for reported capabilities
PRIORITY_OPT_LIC = "Priority Optimization"
THIN_PROV_LIC = "Thin Provisioning"
REMOTE_COPY_LIC = "Remote Copy"
SYSTEM_REPORTER_LIC = "System Reporter"
# Valid values for volume type extra specs
# The first value in the list is the default value
valid_prov_values = ['thin', 'full', 'dedup']
valid_persona_values = ['2 - Generic-ALUA',
'1 - Generic',
'3 - Generic-legacy',
'4 - HPUX-legacy',
'5 - AIX-legacy',
'6 - EGENERA',
'7 - ONTAP-legacy',
'8 - VMware',
'9 - OpenVMS',
'10 - HPUX',
'11 - WindowsServer']
hpe_qos_keys = ['minIOPS', 'maxIOPS', 'minBWS', 'maxBWS', 'latency',
'priority']
qos_priority_level = {'low': 1, 'normal': 2, 'high': 3}
hpe3par_valid_keys = ['cpg', 'snap_cpg', 'provisioning', 'persona', 'vvs',
'flash_cache']
def __init__(self, config, active_backend_id=None):
self.config = config
self.client = None
self.uuid = uuid.uuid4()
self._client_conf = {}
self._replication_targets = []
self._replication_enabled = False
self._active_backend_id = active_backend_id
def get_version(self):
return self.VERSION
def check_flags(self, options, required_flags):
for flag in required_flags:
if not getattr(options, flag, None):
msg = _('%s is not set') % flag
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
def check_replication_flags(self, options, required_flags):
for flag in required_flags:
if not options.get(flag, None):
msg = (_('%s is not set and is required for the replication '
'device to be valid.') % flag)
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
def _create_client(self, timeout=None):
hpe3par_api_url = self._client_conf['hpe3par_api_url']
cl = client.HPE3ParClient(hpe3par_api_url, timeout=timeout)
client_version = hpe3parclient.version
if client_version < MIN_CLIENT_VERSION:
ex_msg = (_('Invalid hpe3parclient version found (%(found)s). '
'Version %(minimum)s or greater required. Run "pip'
' install --upgrade python-3parclient" to upgrade'
' the hpe3parclient.')
% {'found': client_version,
'minimum': MIN_CLIENT_VERSION})
LOG.error(ex_msg)
raise exception.InvalidInput(reason=ex_msg)
return cl
def client_login(self):
try:
LOG.debug("Connecting to 3PAR")
self.client.login(self._client_conf['hpe3par_username'],
self._client_conf['hpe3par_password'])
except hpeexceptions.HTTPUnauthorized as ex:
msg = (_("Failed to Login to 3PAR (%(url)s) because %(err)s") %
{'url': self._client_conf['hpe3par_api_url'], 'err': ex})
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
known_hosts_file = CONF.ssh_hosts_key_file
policy = "AutoAddPolicy"
if CONF.strict_ssh_host_key_policy:
policy = "RejectPolicy"
self.client.setSSHOptions(
self._client_conf['san_ip'],
self._client_conf['san_login'],
self._client_conf['san_password'],
port=self._client_conf['san_ssh_port'],
conn_timeout=self._client_conf['ssh_conn_timeout'],
privatekey=self._client_conf['san_private_key'],
missing_key_policy=policy,
known_hosts_file=known_hosts_file)
def client_logout(self):
LOG.debug("Disconnect from 3PAR REST and SSH %s", self.uuid)
self.client.logout()
def _create_replication_client(self, remote_array):
try:
cl = client.HPE3ParClient(remote_array['hpe3par_api_url'])
cl.login(remote_array['hpe3par_username'],
remote_array['hpe3par_password'])
except hpeexceptions.HTTPUnauthorized as ex:
msg = (_("Failed to Login to 3PAR (%(url)s) because %(err)s") %
{'url': remote_array['hpe3par_api_url'], 'err': ex})
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
known_hosts_file = CONF.ssh_hosts_key_file
policy = "AutoAddPolicy"
if CONF.strict_ssh_host_key_policy:
policy = "RejectPolicy"
cl.setSSHOptions(
remote_array['san_ip'],
remote_array['san_login'],
remote_array['san_password'],
port=remote_array['san_ssh_port'],
conn_timeout=remote_array['ssh_conn_timeout'],
privatekey=remote_array['san_private_key'],
missing_key_policy=policy,
known_hosts_file=known_hosts_file)
return cl
def _destroy_replication_client(self, client):
if client is not None:
client.logout()
def do_setup(self, context, timeout=None, stats=None):
if hpe3parclient is None:
msg = _('You must install hpe3parclient before using 3PAR'
' drivers. Run "pip install python-3parclient" to'
' install the hpe3parclient.')
raise exception.VolumeBackendAPIException(data=msg)
try:
# This will set self._client_conf with the proper credentials
# to communicate with the 3PAR array. It will contain either
# the values for the primary array or secondary array in the
# case of a fail-over.
self._get_3par_config()
self.client = self._create_client(timeout=timeout)
wsapi_version = self.client.getWsApiVersion()
self.API_VERSION = wsapi_version['build']
# If replication is properly configured, the primary array's
# API version must meet the minimum requirements.
if self._replication_enabled and (
self.API_VERSION < REMOTE_COPY_API_VERSION):
self._replication_enabled = False
msg = (_LE("The primary array must have an API version of "
"%(min_ver)s or higher, but is only on "
"%(current_ver)s, therefore replication is not "
"supported.") %
{'min_ver': REMOTE_COPY_API_VERSION,
'current_ver': self.API_VERSION})
LOG.error(msg)
except hpeexceptions.UnsupportedVersion as ex:
# In the event we cannot contact the configured primary array,
# we want to allow a failover if replication is enabled.
self._do_replication_setup()
if self._replication_enabled:
self.client = None
raise exception.InvalidInput(ex)
if context:
# The context is None except at driver startup.
LOG.info(_LI("HPE3PARCommon %(common_ver)s,"
"hpe3parclient %(rest_ver)s"),
{"common_ver": self.VERSION,
"rest_ver": hpe3parclient.get_version_string()})
if self.config.hpe3par_debug:
self.client.debug_rest(True)
if self.API_VERSION < SRSTATLD_API_VERSION:
# Firmware version not compatible with srstatld
LOG.warning(_LW("srstatld requires "
"WSAPI version '%(srstatld_version)s' "
"version '%(version)s' is installed.") %
{'srstatld_version': SRSTATLD_API_VERSION,
'version': self.API_VERSION})
# Get the client ID for provider_location. We only need to retrieve
# the ID directly from the array if the driver stats are not provided.
if not stats:
try:
self.client_login()
info = self.client.getStorageSystemInfo()
self.client.id = six.text_type(info['id'])
except Exception:
self.client.id = 0
finally:
self.client_logout()
else:
self.client.id = stats['array_id']
def check_for_setup_error(self):
if self.client:
self.client_login()
try:
cpg_names = self._client_conf['hpe3par_cpg']
for cpg_name in cpg_names:
self.validate_cpg(cpg_name)
finally:
self.client_logout()
def validate_cpg(self, cpg_name):
try:
self.client.getCPG(cpg_name)
except hpeexceptions.HTTPNotFound:
err = (_("CPG (%s) doesn't exist on array") % cpg_name)
LOG.error(err)
raise exception.InvalidInput(reason=err)
def get_domain(self, cpg_name):
try:
cpg = self.client.getCPG(cpg_name)
except hpeexceptions.HTTPNotFound:
err = (_("Failed to get domain because CPG (%s) doesn't "
"exist on array.") % cpg_name)
LOG.error(err)
raise exception.InvalidInput(reason=err)
if 'domain' in cpg:
return cpg['domain']
return None
def extend_volume(self, volume, new_size):
volume_name = self._get_3par_vol_name(volume['id'])
old_size = volume['size']
growth_size = int(new_size) - old_size
LOG.debug("Extending Volume %(vol)s from %(old)s to %(new)s, "
" by %(diff)s GB.",
{'vol': volume_name, 'old': old_size, 'new': new_size,
'diff': growth_size})
growth_size_mib = growth_size * units.Ki
self._extend_volume(volume, volume_name, growth_size_mib)
def create_consistencygroup(self, context, group):
"""Creates a consistencygroup."""
pool = volume_utils.extract_host(group.host, level='pool')
domain = self.get_domain(pool)
cg_name = self._get_3par_vvs_name(group.id)
extra = {'consistency_group_id': group.id}
if group.cgsnapshot_id:
extra['cgsnapshot_id'] = group.cgsnapshot_id
self.client.createVolumeSet(cg_name, domain=domain,
comment=six.text_type(extra))
model_update = {'status': fields.ConsistencyGroupStatus.AVAILABLE}
return model_update
def create_consistencygroup_from_src(self, context, group, volumes,
cgsnapshot=None, snapshots=None,
source_cg=None, source_vols=None):
self.create_consistencygroup(context, group)
vvs_name = self._get_3par_vvs_name(group.id)
if cgsnapshot and snapshots:
cgsnap_name = self._get_3par_snap_name(cgsnapshot.id)
snap_base = cgsnap_name
elif source_cg and source_vols:
cg_id = source_cg.id
# Create a brand new uuid for the temp snap.
snap_uuid = uuid.uuid4().hex
# Create a temporary snapshot of the volume set in order to
# perform an online copy. These temp snapshots will be deleted
# when the source consistency group is deleted.
temp_snap = self._get_3par_snap_name(snap_uuid, temp_snap=True)
snap_shot_name = temp_snap + "-@count@"
copy_of_name = self._get_3par_vvs_name(cg_id)
optional = {'expirationHours': 1}
self.client.createSnapshotOfVolumeSet(snap_shot_name, copy_of_name,
optional=optional)
snap_base = temp_snap
for i, volume in enumerate(volumes):
snap_name = snap_base + "-" + six.text_type(i)
volume_name = self._get_3par_vol_name(volume['id'])
type_info = self.get_volume_settings_from_type(volume)
cpg = type_info['cpg']
optional = {'online': True, 'snapCPG': cpg}
self.client.copyVolume(snap_name, volume_name, cpg, optional)
self.client.addVolumeToVolumeSet(vvs_name, volume_name)
return None, None
def delete_consistencygroup(self, context, group, volumes):
"""Deletes a consistency group."""
try:
cg_name = self._get_3par_vvs_name(group.id)
self.client.deleteVolumeSet(cg_name)
except hpeexceptions.HTTPNotFound:
err = (_LW("Virtual Volume Set '%s' doesn't exist on array.") %
cg_name)
LOG.warning(err)
except hpeexceptions.HTTPConflict as e:
err = (_LE("Conflict detected in Virtual Volume Set"
" %(volume_set)s: %(error)s"))
LOG.error(err,
{"volume_set": cg_name,
"error": e})
volume_model_updates = []
for volume in volumes:
volume_update = {'id': volume.id}
try:
self.delete_volume(volume)
volume_update['status'] = 'deleted'
except Exception as ex:
LOG.error(_LE("There was an error deleting volume %(id)s: "
"%(error)s."),
{'id': volume.id,
'error': six.text_type(ex)})
volume_update['status'] = 'error'
volume_model_updates.append(volume_update)
model_update = {'status': group.status}
return model_update, volume_model_updates
def update_consistencygroup(self, context, group,
add_volumes=None, remove_volumes=None):
volume_set_name = self._get_3par_vvs_name(group.id)
for volume in add_volumes:
volume_name = self._get_3par_vol_name(volume['id'])
try:
self.client.addVolumeToVolumeSet(volume_set_name, volume_name)
except hpeexceptions.HTTPNotFound:
msg = (_LE('Virtual Volume Set %s does not exist.') %
volume_set_name)
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
for volume in remove_volumes:
volume_name = self._get_3par_vol_name(volume['id'])
try:
self.client.removeVolumeFromVolumeSet(
volume_set_name, volume_name)
except hpeexceptions.HTTPNotFound:
msg = (_LE('Virtual Volume Set %s does not exist.') %
volume_set_name)
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
return None, None, None
def create_cgsnapshot(self, context, cgsnapshot, snapshots):
"""Creates a cgsnapshot."""
cg_id = cgsnapshot.consistencygroup_id
snap_shot_name = self._get_3par_snap_name(cgsnapshot.id) + (
"-@count@")
copy_of_name = self._get_3par_vvs_name(cg_id)
extra = {'cgsnapshot_id': cgsnapshot.id}
extra['consistency_group_id'] = cg_id
extra['description'] = cgsnapshot.description
optional = {'comment': json.dumps(extra),
'readOnly': False}
if self.config.hpe3par_snapshot_expiration:
optional['expirationHours'] = (
int(self.config.hpe3par_snapshot_expiration))
if self.config.hpe3par_snapshot_retention:
optional['retentionHours'] = (
int(self.config.hpe3par_snapshot_retention))
try:
self.client.createSnapshotOfVolumeSet(snap_shot_name, copy_of_name,
optional=optional)
except Exception as ex:
msg = (_('There was an error creating the cgsnapshot: %s'),
six.text_type(ex))
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
snapshot_model_updates = []
for snapshot in snapshots:
snapshot_update = {'id': snapshot['id'],
'status': fields.SnapshotStatus.AVAILABLE}
snapshot_model_updates.append(snapshot_update)
model_update = {'status': 'available'}
return model_update, snapshot_model_updates
def delete_cgsnapshot(self, context, cgsnapshot, snapshots):
"""Deletes a cgsnapshot."""
cgsnap_name = self._get_3par_snap_name(cgsnapshot.id)
snapshot_model_updates = []
for i, snapshot in enumerate(snapshots):
snapshot_update = {'id': snapshot['id']}
try:
snap_name = cgsnap_name + "-" + six.text_type(i)
self.client.deleteVolume(snap_name)
snapshot_update['status'] = fields.SnapshotStatus.DELETED
except hpeexceptions.HTTPNotFound as ex:
# We'll let this act as if it worked
# it helps clean up the cinder entries.
LOG.warning(_LW("Delete Snapshot id not found. Removing from "
"cinder: %(id)s Ex: %(msg)s"),
{'id': snapshot['id'], 'msg': ex})
snapshot_update['status'] = fields.SnapshotStatus.ERROR
except Exception as ex:
LOG.error(_LE("There was an error deleting snapshot %(id)s: "
"%(error)s."),
{'id': snapshot['id'],
'error': six.text_type(ex)})
snapshot_update['status'] = fields.SnapshotStatus.ERROR
snapshot_model_updates.append(snapshot_update)
model_update = {'status': cgsnapshot.status}
return model_update, snapshot_model_updates
def manage_existing(self, volume, existing_ref):
"""Manage an existing 3PAR volume.
existing_ref is a dictionary of the form:
{'source-name': <name of the virtual volume>}
"""
target_vol_name = self._get_existing_volume_ref_name(existing_ref)
# Check for the existence of the virtual volume.
old_comment_str = ""
try:
vol = self.client.getVolume(target_vol_name)
if 'comment' in vol:
old_comment_str = vol['comment']
except hpeexceptions.HTTPNotFound:
err = (_("Virtual volume '%s' doesn't exist on array.") %
target_vol_name)
LOG.error(err)
raise exception.InvalidInput(reason=err)
new_comment = {}
# Use the display name from the existing volume if no new name
# was chosen by the user.
if volume['display_name']:
display_name = volume['display_name']
new_comment['display_name'] = volume['display_name']
elif 'comment' in vol:
display_name = self._get_3par_vol_comment_value(vol['comment'],
'display_name')
if display_name:
new_comment['display_name'] = display_name
else:
display_name = None
# Generate the new volume information based on the new ID.
new_vol_name = self._get_3par_vol_name(volume['id'])
name = 'volume-' + volume['id']
new_comment['volume_id'] = volume['id']
new_comment['name'] = name
new_comment['type'] = 'OpenStack'
volume_type = None
if volume['volume_type_id']:
try:
volume_type = self._get_volume_type(volume['volume_type_id'])
except Exception:
reason = (_("Volume type ID '%s' is invalid.") %
volume['volume_type_id'])
raise exception.ManageExistingVolumeTypeMismatch(reason=reason)
new_vals = {'newName': new_vol_name,
'comment': json.dumps(new_comment)}
# Ensure that snapCPG is set
if 'snapCPG' not in vol:
new_vals['snapCPG'] = vol['userCPG']
LOG.info(_LI("Virtual volume %(disp)s '%(new)s' snapCPG "
"is empty so it will be set to: %(cpg)s"),
{'disp': display_name, 'new': new_vol_name,
'cpg': new_vals['snapCPG']})
# Update the existing volume with the new name and comments.
self.client.modifyVolume(target_vol_name, new_vals)
LOG.info(_LI("Virtual volume '%(ref)s' renamed to '%(new)s'."),
{'ref': existing_ref['source-name'], 'new': new_vol_name})
retyped = False
model_update = None
if volume_type:
LOG.info(_LI("Virtual volume %(disp)s '%(new)s' is "
"being retyped."),
{'disp': display_name, 'new': new_vol_name})
try:
retyped, model_update = self._retype_from_no_type(volume,
volume_type)
LOG.info(_LI("Virtual volume %(disp)s successfully retyped to "
"%(new_type)s."),
{'disp': display_name,
'new_type': volume_type.get('name')})
except Exception:
with excutils.save_and_reraise_exception():
LOG.warning(_LW("Failed to manage virtual volume %(disp)s "
"due to error during retype."),
{'disp': display_name})
# Try to undo the rename and clear the new comment.
self.client.modifyVolume(
new_vol_name,
{'newName': target_vol_name,
'comment': old_comment_str})
updates = {'display_name': display_name}
if retyped and model_update:
updates.update(model_update)
LOG.info(_LI("Virtual volume %(disp)s '%(new)s' is "
"now being managed."),
{'disp': display_name, 'new': new_vol_name})
# Return display name to update the name displayed in the GUI and
# any model updates from retype.
return updates
def manage_existing_snapshot(self, snapshot, existing_ref):
"""Manage an existing 3PAR snapshot.
existing_ref is a dictionary of the form:
{'source-name': <name of the snapshot>}
"""
# Potential parent volume for the snapshot
volume = snapshot['volume']
# Do not allow for managing of snapshots for 'failed-over' volumes.
if volume.get('replication_status') == 'failed-over':
err = (_("Managing of snapshots to failed-over volumes is "
"not allowed."))
raise exception.InvalidInput(reason=err)
target_snap_name = self._get_existing_volume_ref_name(existing_ref,
is_snapshot=True)
# Check for the existence of the snapshot.
try:
snap = self.client.getVolume(target_snap_name)
except hpeexceptions.HTTPNotFound:
err = (_("Snapshot '%s' doesn't exist on array.") %
target_snap_name)
LOG.error(err)
raise exception.InvalidInput(reason=err)
# Make sure the snapshot is being associated with the correct volume.
parent_vol_name = self._get_3par_vol_name(volume['id'])
if parent_vol_name != snap['copyOf']:
err = (_("The provided snapshot '%s' is not a snapshot of "
"the provided volume.") % target_snap_name)
LOG.error(err)
raise exception.InvalidInput(reason=err)
new_comment = {}
# Use the display name from the existing snapshot if no new name
# was chosen by the user.
if snapshot['display_name']:
display_name = snapshot['display_name']
new_comment['display_name'] = snapshot['display_name']
elif 'comment' in snap:
display_name = self._get_3par_vol_comment_value(snap['comment'],
'display_name')
if display_name:
new_comment['display_name'] = display_name
else:
display_name = None
# Generate the new snapshot information based on the new ID.
new_snap_name = self._get_3par_snap_name(snapshot['id'])
new_comment['volume_id'] = volume['id']
new_comment['volume_name'] = 'volume-' + volume['id']
if snapshot.get('display_description', None):
new_comment['description'] = snapshot['display_description']
else:
new_comment['description'] = ""
new_vals = {'newName': new_snap_name,
'comment': json.dumps(new_comment)}
# Update the existing snapshot with the new name and comments.
self.client.modifyVolume(target_snap_name, new_vals)
LOG.info(_LI("Snapshot '%(ref)s' renamed to '%(new)s'."),
{'ref': existing_ref['source-name'], 'new': new_snap_name})
updates = {'display_name': display_name}
LOG.info(_LI("Snapshot %(disp)s '%(new)s' is now being managed."),
{'disp': display_name, 'new': new_snap_name})
# Return display name to update the name displayed in the GUI.
return updates
def manage_existing_get_size(self, volume, existing_ref):
"""Return size of volume to be managed by manage_existing.
existing_ref is a dictionary of the form:
{'source-name': <name of the virtual volume>}
"""
target_vol_name = self._get_existing_volume_ref_name(existing_ref)
# Make sure the reference is not in use.
if re.match('osv-*|oss-*|vvs-*', target_vol_name):
reason = _("Reference must be for an unmanaged virtual volume.")
raise exception.ManageExistingInvalidReference(
existing_ref=target_vol_name,
reason=reason)
# Check for the existence of the virtual volume.
try:
vol = self.client.getVolume(target_vol_name)
except hpeexceptions.HTTPNotFound:
err = (_("Virtual volume '%s' doesn't exist on array.") %
target_vol_name)
LOG.error(err)
raise exception.InvalidInput(reason=err)
return int(math.ceil(float(vol['sizeMiB']) / units.Ki))
def manage_existing_snapshot_get_size(self, snapshot, existing_ref):
"""Return size of snapshot to be managed by manage_existing_snapshot.
existing_ref is a dictionary of the form:
{'source-name': <name of the snapshot>}
"""
target_snap_name = self._get_existing_volume_ref_name(existing_ref,
is_snapshot=True)
# Make sure the reference is not in use.
if re.match('osv-*|oss-*|vvs-*|unm-*', target_snap_name):
reason = _("Reference must be for an unmanaged snapshot.")
raise exception.ManageExistingInvalidReference(
existing_ref=target_snap_name,
reason=reason)
# Check for the existence of the snapshot.
try:
snap = self.client.getVolume(target_snap_name)
except hpeexceptions.HTTPNotFound:
err = (_("Snapshot '%s' doesn't exist on array.") %
target_snap_name)
LOG.error(err)
raise exception.InvalidInput(reason=err)
return int(math.ceil(float(snap['sizeMiB']) / units.Ki))
def unmanage(self, volume):
"""Removes the specified volume from Cinder management."""
# Rename the volume's name to unm-* format so that it can be
# easily found later.
vol_name = self._get_3par_vol_name(volume['id'])
new_vol_name = self._get_3par_unm_name(volume['id'])
self.client.modifyVolume(vol_name, {'newName': new_vol_name})
LOG.info(_LI("Virtual volume %(disp)s '%(vol)s' is no longer managed. "
"Volume renamed to '%(new)s'."),
{'disp': volume['display_name'],
'vol': vol_name,
'new': new_vol_name})
def unmanage_snapshot(self, snapshot):
"""Removes the specified snapshot from Cinder management."""
# Parent volume for the snapshot
volume = snapshot['volume']
# Do not allow unmanaging of snapshots from 'failed-over' volumes.
if volume.get('replication_status') == 'failed-over':
err = (_("Unmanaging of snapshots from failed-over volumes is "
"not allowed."))
LOG.error(err)
# TODO(leeantho) Change this exception to Invalid when the volume
# manager supports handling that.
raise exception.SnapshotIsBusy(snapshot_name=snapshot['id'])
# Rename the snapshots's name to ums-* format so that it can be
# easily found later.
snap_name = self._get_3par_snap_name(snapshot['id'])
new_snap_name = self._get_3par_ums_name(snapshot['id'])
self.client.modifyVolume(snap_name, {'newName': new_snap_name})
LOG.info(_LI("Snapshot %(disp)s '%(vol)s' is no longer managed. "
"Snapshot renamed to '%(new)s'."),
{'disp': snapshot['display_name'],
'vol': snap_name,
'new': new_snap_name})
def _get_existing_volume_ref_name(self, existing_ref, is_snapshot=False):
"""Returns the volume name of an existing reference.
Checks if an existing volume reference has a source-name or
source-id element. If source-name or source-id is not present an
error will be thrown.
"""
vol_name = None
if 'source-name' in existing_ref:
vol_name = existing_ref['source-name']
elif 'source-id' in existing_ref:
if is_snapshot:
vol_name = self._get_3par_ums_name(existing_ref['source-id'])
else:
vol_name = self._get_3par_unm_name(existing_ref['source-id'])
else:
reason = _("Reference must contain source-name or source-id.")
raise exception.ManageExistingInvalidReference(
existing_ref=existing_ref,
reason=reason)
return vol_name
def _extend_volume(self, volume, volume_name, growth_size_mib,
_convert_to_base=False):
model_update = None
rcg_name = self._get_3par_rcg_name(volume['id'])
is_volume_replicated = self._volume_of_replicated_type(volume)
try:
if _convert_to_base:
LOG.debug("Converting to base volume prior to growing.")
model_update = self._convert_to_base_volume(volume)
# If the volume is replicated and we are not failed over,
# remote copy has to be stopped before the volume can be extended.
failed_over = volume.get("replication_status", None)
is_failed_over = failed_over == "failed-over"
if is_volume_replicated and not is_failed_over:
self.client.stopRemoteCopy(rcg_name)
self.client.growVolume(volume_name, growth_size_mib)
if is_volume_replicated and not is_failed_over:
self.client.startRemoteCopy(rcg_name)
except Exception as ex:
# If the extend fails, we must restart remote copy.
if is_volume_replicated:
self.client.startRemoteCopy(rcg_name)
with excutils.save_and_reraise_exception() as ex_ctxt:
if (not _convert_to_base and
isinstance(ex, hpeexceptions.HTTPForbidden) and
ex.get_code() == 150):
# Error code 150 means 'invalid operation: Cannot grow
# this type of volume'.
# Suppress raising this exception because we can
# resolve it by converting it into a base volume.
# Afterwards, extending the volume should succeed, or
# fail with a different exception/error code.
ex_ctxt.reraise = False
model_update = self._extend_volume(
volume, volume_name,
growth_size_mib,
_convert_to_base=True)
else:
LOG.error(_LE("Error extending volume: %(vol)s. "
"Exception: %(ex)s"),
{'vol': volume_name, 'ex': ex})
return model_update
def _get_3par_vol_name(self, volume_id):
"""Get converted 3PAR volume name.
Converts the openstack volume id from
ecffc30f-98cb-4cf5-85ee-d7309cc17cd2
to
osv-7P.DD5jLTPWF7tcwnMF80g
We convert the 128 bits of the uuid into a 24character long
base64 encoded string to ensure we don't exceed the maximum
allowed 31 character name limit on 3Par
We strip the padding '=' and replace + with .
and / with -
"""
volume_name = self._encode_name(volume_id)
return "osv-%s" % volume_name
def _get_3par_snap_name(self, snapshot_id, temp_snap=False):
snapshot_name = self._encode_name(snapshot_id)
if temp_snap:
# is this a temporary snapshot
# this is done during cloning
prefix = "tss-%s"
else:
prefix = "oss-%s"
return prefix % snapshot_name
def _get_3par_ums_name(self, snapshot_id):
ums_name = self._encode_name(snapshot_id)
return "ums-%s" % ums_name
def _get_3par_vvs_name(self, volume_id):
vvs_name = self._encode_name(volume_id)
return "vvs-%s" % vvs_name
def _get_3par_unm_name(self, volume_id):
unm_name = self._encode_name(volume_id)
return "unm-%s" % unm_name
# v2 replication conversion
def _get_3par_rcg_name(self, volume_id):
rcg_name = self._encode_name(volume_id)
rcg = "rcg-%s" % rcg_name
return rcg[:22]
def _get_3par_remote_rcg_name(self, volume_id, provider_location):
return self._get_3par_rcg_name(volume_id) + ".r" + (
six.text_type(provider_location))
def _encode_name(self, name):
uuid_str = name.replace("-", "")
vol_uuid = uuid.UUID('urn:uuid:%s' % uuid_str)
vol_encoded = base64.encode_as_text(vol_uuid.bytes)
# 3par doesn't allow +, nor /
vol_encoded = vol_encoded.replace('+', '.')
vol_encoded = vol_encoded.replace('/', '-')
# strip off the == as 3par doesn't like those.
vol_encoded = vol_encoded.replace('=', '')
return vol_encoded
def _capacity_from_size(self, vol_size):
# because 3PAR volume sizes are in Mebibytes.
if int(vol_size) == 0:
capacity = units.Gi # default: 1GiB
else:
capacity = vol_size * units.Gi
capacity = int(math.ceil(capacity / units.Mi))
return capacity
def _delete_3par_host(self, hostname):
self.client.deleteHost(hostname)
def _create_3par_vlun(self, volume, hostname, nsp, lun_id=None):
try:
location = None
auto = True
if lun_id is not None:
auto = False
if nsp is None:
location = self.client.createVLUN(volume, hostname=hostname,
auto=auto, lun=lun_id)
else:
port = self.build_portPos(nsp)
location = self.client.createVLUN(volume, hostname=hostname,
auto=auto, portPos=port,
lun=lun_id)
vlun_info = None
if location:
# The LUN id is returned as part of the location URI
vlun = location.split(',')
vlun_info = {'volume_name': vlun[0],
'lun_id': int(vlun[1]),
'host_name': vlun[2],
}
if len(vlun) > 3:
vlun_info['nsp'] = vlun[3]
return vlun_info
except hpeexceptions.HTTPBadRequest as e:
if 'must be in the same domain' in e.get_description():
LOG.error(e.get_description())
raise exception.Invalid3PARDomain(err=e.get_description())
else:
raise exception.VolumeBackendAPIException(
data=e.get_description())
def _safe_hostname(self, hostname):
"""We have to use a safe hostname length for 3PAR host names."""
try:
index = hostname.index('.')
except ValueError:
# couldn't find it
index = len(hostname)
# we'll just chop this off for now.
if index > 31:
index = 31
return hostname[:index]
def _get_3par_host(self, hostname):
return self.client.getHost(hostname)
def get_ports(self):
return self.client.getPorts()
def get_active_target_ports(self):
ports = self.get_ports()
target_ports = []
for port in ports['members']:
if (
port['mode'] == self.client.PORT_MODE_TARGET and
port['linkState'] == self.client.PORT_STATE_READY
):
port['nsp'] = self.build_nsp(port['portPos'])
target_ports.append(port)
return target_ports
def get_active_fc_target_ports(self):
ports = self.get_active_target_ports()
fc_ports = []
for port in ports:
if port['protocol'] == self.client.PORT_PROTO_FC:
fc_ports.append(port)
return fc_ports
def get_active_iscsi_target_ports(self):
ports = self.get_active_target_ports()
iscsi_ports = []
for port in ports:
if port['protocol'] == self.client.PORT_PROTO_ISCSI:
iscsi_ports.append(port)
return iscsi_ports
def get_volume_stats(self,
refresh,
filter_function=None,
goodness_function=None):
if refresh:
self._update_volume_stats(
filter_function=filter_function,
goodness_function=goodness_function)
return self.stats
def _update_volume_stats(self,
filter_function=None,
goodness_function=None):
# const to convert MiB to GB
const = 0.0009765625
# storage_protocol and volume_backend_name are
# set in the child classes
pools = []
info = self.client.getStorageSystemInfo()
qos_support = True
thin_support = True
remotecopy_support = True
sr_support = True
if 'licenseInfo' in info:
if 'licenses' in info['licenseInfo']:
valid_licenses = info['licenseInfo']['licenses']
qos_support = self._check_license_enabled(
valid_licenses, self.PRIORITY_OPT_LIC,
"QoS_support")
thin_support = self._check_license_enabled(
valid_licenses, self.THIN_PROV_LIC,
"Thin_provisioning_support")
remotecopy_support = self._check_license_enabled(
valid_licenses, self.REMOTE_COPY_LIC,
"Replication")
sr_support = self._check_license_enabled(
valid_licenses, self.SYSTEM_REPORTER_LIC,
"System_reporter_support")
for cpg_name in self._client_conf['hpe3par_cpg']:
try:
stat_capabilities = {
THROUGHPUT: None,
BANDWIDTH: None,
LATENCY: None,
IO_SIZE: None,
QUEUE_LENGTH: None,
AVG_BUSY_PERC: None
}
cpg = self.client.getCPG(cpg_name)
if (self.API_VERSION >= SRSTATLD_API_VERSION and sr_support):
interval = 'daily'
history = '7d'
try:
stat_capabilities = self.client.getCPGStatData(
cpg_name,
interval,
history)
except Exception as ex:
LOG.warning(_LW("Exception at getCPGStatData() "
"for cpg: '%(cpg_name)s' "
"Reason: '%(reason)s'") %
{'cpg_name': cpg_name, 'reason': ex})
if 'numTDVVs' in cpg:
total_volumes = int(
cpg['numFPVVs'] + cpg['numTPVVs'] + cpg['numTDVVs']
)
else:
total_volumes = int(
cpg['numFPVVs'] + cpg['numTPVVs']
)
if 'limitMiB' not in cpg['SDGrowth']:
# cpg usable free space
cpg_avail_space = (
self.client.getCPGAvailableSpace(cpg_name))
free_capacity = int(
cpg_avail_space['usableFreeMiB'] * const)
# total_capacity is the best we can do for a limitless cpg
total_capacity = int(
(cpg['SDUsage']['usedMiB'] +
cpg['UsrUsage']['usedMiB'] +
cpg_avail_space['usableFreeMiB']) * const)
else:
total_capacity = int(cpg['SDGrowth']['limitMiB'] * const)
free_capacity = int((cpg['SDGrowth']['limitMiB'] -
(cpg['UsrUsage']['usedMiB'] +
cpg['SDUsage']['usedMiB'])) * const)
capacity_utilization = (
(float(total_capacity - free_capacity) /
float(total_capacity)) * 100)
provisioned_capacity = int((cpg['UsrUsage']['totalMiB'] +
cpg['SAUsage']['totalMiB'] +
cpg['SDUsage']['totalMiB']) *
const)
except hpeexceptions.HTTPNotFound:
err = (_("CPG (%s) doesn't exist on array")
% cpg_name)
LOG.error(err)
raise exception.InvalidInput(reason=err)
pool = {'pool_name': cpg_name,
'total_capacity_gb': total_capacity,
'free_capacity_gb': free_capacity,
'provisioned_capacity_gb': provisioned_capacity,
'QoS_support': qos_support,
'thin_provisioning_support': thin_support,
'thick_provisioning_support': True,
'max_over_subscription_ratio': (
self.config.safe_get('max_over_subscription_ratio')),
'reserved_percentage': (
self.config.safe_get('reserved_percentage')),
'location_info': ('HPE3PARDriver:%(sys_id)s:%(dest_cpg)s' %
{'sys_id': info['serialNumber'],
'dest_cpg': cpg_name}),
'total_volumes': total_volumes,
'capacity_utilization': capacity_utilization,
THROUGHPUT: stat_capabilities[THROUGHPUT],
BANDWIDTH: stat_capabilities[BANDWIDTH],
LATENCY: stat_capabilities[LATENCY],
IO_SIZE: stat_capabilities[IO_SIZE],
QUEUE_LENGTH: stat_capabilities[QUEUE_LENGTH],
AVG_BUSY_PERC: stat_capabilities[AVG_BUSY_PERC],
'filter_function': filter_function,
'goodness_function': goodness_function,
'multiattach': True,
'consistencygroup_support': True,
}
if remotecopy_support:
pool['replication_enabled'] = self._replication_enabled
pool['replication_type'] = ['sync', 'periodic']
pool['replication_count'] = len(self._replication_targets)
pools.append(pool)
self.stats = {'driver_version': '3.0',
'storage_protocol': None,
'vendor_name': 'Hewlett Packard Enterprise',
'volume_backend_name': None,
'array_id': info['id'],
'replication_enabled': self._replication_enabled,
'replication_targets': self._get_replication_targets(),
'pools': pools}
def _check_license_enabled(self, valid_licenses,
license_to_check, capability):
"""Check a license against valid licenses on the array."""
if valid_licenses:
for license in valid_licenses:
if license_to_check in license.get('name'):
return True
LOG.debug(("'%(capability)s' requires a '%(license)s' "
"license which is not installed.") %
{'capability': capability,
'license': license_to_check})
return False
def _get_vlun(self, volume_name, hostname, lun_id=None, nsp=None):
"""find a VLUN on a 3PAR host."""
vluns = self.client.getHostVLUNs(hostname)
found_vlun = None
for vlun in vluns:
if volume_name in vlun['volumeName']:
if lun_id is not None:
if vlun['lun'] == lun_id:
if nsp:
port = self.build_portPos(nsp)
if vlun['portPos'] == port:
found_vlun = vlun
break
else:
found_vlun = vlun
break
else:
found_vlun = vlun
break
if found_vlun is None:
LOG.info(_LI("3PAR vlun %(name)s not found on host %(host)s"),
{'name': volume_name, 'host': hostname})
return found_vlun
def create_vlun(self, volume, host, nsp=None, lun_id=None):
"""Create a VLUN.
In order to export a volume on a 3PAR box, we have to create a VLUN.
"""
volume_name = self._get_3par_vol_name(volume['id'])
vlun_info = self._create_3par_vlun(volume_name, host['name'], nsp,
lun_id=lun_id)
return self._get_vlun(volume_name,
host['name'],
vlun_info['lun_id'],
nsp)
def delete_vlun(self, volume, hostname):
volume_name = self._get_3par_vol_name(volume['id'])
vluns = self.client.getHostVLUNs(hostname)
# When deleteing VLUNs, you simply need to remove the template VLUN
# and any active VLUNs will be automatically removed. The template
# VLUN are marked as active: False
volume_vluns = []
for vlun in vluns:
if volume_name in vlun['volumeName']:
# template VLUNs are 'active' = False
if not vlun['active']:
volume_vluns.append(vlun)
if not volume_vluns:
msg = (
_LW("3PAR vlun for volume %(name)s not found on "
"host %(host)s"), {'name': volume_name, 'host': hostname})
LOG.warning(msg)
return
# VLUN Type of MATCHED_SET 4 requires the port to be provided
for vlun in volume_vluns:
if 'portPos' in vlun:
self.client.deleteVLUN(volume_name, vlun['lun'],
hostname=hostname,
port=vlun['portPos'])
else:
self.client.deleteVLUN(volume_name, vlun['lun'],
hostname=hostname)
# Determine if there are other volumes attached to the host.
# This will determine whether we should try removing host from host set
# and deleting the host.
vluns = []
try:
vluns = self.client.getHostVLUNs(hostname)
except hpeexceptions.HTTPNotFound:
LOG.debug("All VLUNs removed from host %s", hostname)
pass
for vlun in vluns:
if volume_name not in vlun['volumeName']:
# Found another volume
break
else:
# We deleted the last vlun, so try to delete the host too.
# This check avoids the old unnecessary try/fail when vluns exist
# but adds a minor race condition if a vlun is manually deleted
# externally at precisely the wrong time. Worst case is leftover
# host, so it is worth the unlikely risk.
try:
self._delete_3par_host(hostname)
except Exception as ex:
# Any exception down here is only logged. The vlun is deleted.
# If the host is in a host set, the delete host will fail and
# the host will remain in the host set. This is desired
# because cinder was not responsible for the host set
# assignment. The host set could be used outside of cinder
# for future needs (e.g. export volume to host set).
# The log info explains why the host was left alone.
LOG.info(_LI("3PAR vlun for volume '%(name)s' was deleted, "
"but the host '%(host)s' was not deleted "
"because: %(reason)s"),
{'name': volume_name, 'host': hostname,
'reason': ex.get_description()})
def _get_volume_type(self, type_id):
ctxt = context.get_admin_context()
return volume_types.get_volume_type(ctxt, type_id)
def _get_key_value(self, hpe3par_keys, key, default=None):
if hpe3par_keys is not None and key in hpe3par_keys:
return hpe3par_keys[key]
else:
return default
def _get_qos_value(self, qos, key, default=None):
if key in qos:
return qos[key]
else:
return default
def _get_qos_by_volume_type(self, volume_type):
qos = {}
qos_specs_id = volume_type.get('qos_specs_id')
specs = volume_type.get('extra_specs')
# NOTE(kmartin): We prefer the qos_specs association
# and override any existing extra-specs settings
# if present.
if qos_specs_id is not None:
kvs = qos_specs.get_qos_specs(context.get_admin_context(),
qos_specs_id)['specs']
else:
kvs = specs
for key, value in kvs.items():
if 'qos:' in key:
fields = key.split(':')
key = fields[1]
if key in self.hpe_qos_keys:
qos[key] = value
return qos
def _get_keys_by_volume_type(self, volume_type):
hpe3par_keys = {}
specs = volume_type.get('extra_specs')
for key, value in specs.items():
if ':' in key:
fields = key.split(':')
key = fields[1]
if key in self.hpe3par_valid_keys:
hpe3par_keys[key] = value
return hpe3par_keys
def _set_qos_rule(self, qos, vvs_name):
min_io = self._get_qos_value(qos, 'minIOPS')
max_io = self._get_qos_value(qos, 'maxIOPS')
min_bw = self._get_qos_value(qos, 'minBWS')
max_bw = self._get_qos_value(qos, 'maxBWS')
latency = self._get_qos_value(qos, 'latency')
priority = self._get_qos_value(qos, 'priority', 'normal')
qosRule = {}
if min_io:
qosRule['ioMinGoal'] = int(min_io)
if max_io is None:
qosRule['ioMaxLimit'] = int(min_io)
if max_io:
qosRule['ioMaxLimit'] = int(max_io)
if min_io is None:
qosRule['ioMinGoal'] = int(max_io)
if min_bw:
qosRule['bwMinGoalKB'] = int(min_bw) * units.Ki
if max_bw is None:
qosRule['bwMaxLimitKB'] = int(min_bw) * units.Ki
if max_bw:
qosRule['bwMaxLimitKB'] = int(max_bw) * units.Ki
if min_bw is None:
qosRule['bwMinGoalKB'] = int(max_bw) * units.Ki
if latency:
qosRule['latencyGoal'] = int(latency)
if priority:
qosRule['priority'] = self.qos_priority_level.get(priority.lower())
try:
self.client.createQoSRules(vvs_name, qosRule)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Error creating QOS rule %s"), qosRule)
def get_flash_cache_policy(self, hpe3par_keys):
if hpe3par_keys is not None:
# First check list of extra spec keys
val = self._get_key_value(hpe3par_keys, 'flash_cache', None)
if val is not None:
# If requested, see if supported on back end
if self.API_VERSION < FLASH_CACHE_API_VERSION:
err = (_("Flash Cache Policy requires "
"WSAPI version '%(fcache_version)s' "
"version '%(version)s' is installed.") %
{'fcache_version': FLASH_CACHE_API_VERSION,
'version': self.API_VERSION})
LOG.error(err)
raise exception.InvalidInput(reason=err)
else:
if val.lower() == 'true':
return self.client.FLASH_CACHE_ENABLED
else:
return self.client.FLASH_CACHE_DISABLED
return None
def _set_flash_cache_policy_in_vvs(self, flash_cache, vvs_name):
# Update virtual volume set
if flash_cache:
try:
self.client.modifyVolumeSet(vvs_name,
flashCachePolicy=flash_cache)
LOG.info(_LI("Flash Cache policy set to %s"), flash_cache)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Error setting Flash Cache policy "
"to %s - exception"), flash_cache)
def _add_volume_to_volume_set(self, volume, volume_name,
cpg, vvs_name, qos, flash_cache):
if vvs_name is not None:
# Admin has set a volume set name to add the volume to
try:
self.client.addVolumeToVolumeSet(vvs_name, volume_name)
except hpeexceptions.HTTPNotFound:
msg = _('VV Set %s does not exist.') % vvs_name
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
else:
vvs_name = self._get_3par_vvs_name(volume['id'])
domain = self.get_domain(cpg)
self.client.createVolumeSet(vvs_name, domain)
try:
self._set_qos_rule(qos, vvs_name)
self._set_flash_cache_policy_in_vvs(flash_cache, vvs_name)
self.client.addVolumeToVolumeSet(vvs_name, volume_name)
except Exception as ex:
# Cleanup the volume set if unable to create the qos rule
# or flash cache policy or add the volume to the volume set
self.client.deleteVolumeSet(vvs_name)
raise exception.CinderException(ex)
def get_cpg(self, volume, allowSnap=False):
volume_name = self._get_3par_vol_name(volume['id'])
vol = self.client.getVolume(volume_name)
if 'userCPG' in vol:
return vol['userCPG']
elif allowSnap:
return vol['snapCPG']
return None
def _get_3par_vol_comment(self, volume_name):
vol = self.client.getVolume(volume_name)
if 'comment' in vol:
return vol['comment']
return None
def validate_persona(self, persona_value):
"""Validate persona value.
If the passed in persona_value is not valid, raise InvalidInput,
otherwise return the persona ID.
:param persona_value:
:raises: exception.InvalidInput
:returns: persona ID
"""
if persona_value not in self.valid_persona_values:
err = (_("Must specify a valid persona %(valid)s,"
"value '%(persona)s' is invalid.") %
{'valid': self.valid_persona_values,
'persona': persona_value})
LOG.error(err)
raise exception.InvalidInput(reason=err)
# persona is set by the id so remove the text and return the id
# i.e for persona '1 - Generic' returns 1
persona_id = persona_value.split(' ')
return persona_id[0]
def get_persona_type(self, volume, hpe3par_keys=None):
default_persona = self.valid_persona_values[0]
type_id = volume.get('volume_type_id', None)
if type_id is not None:
volume_type = self._get_volume_type(type_id)
if hpe3par_keys is None:
hpe3par_keys = self._get_keys_by_volume_type(volume_type)
persona_value = self._get_key_value(hpe3par_keys, 'persona',
default_persona)
return self.validate_persona(persona_value)
def get_type_info(self, type_id):
"""Get 3PAR type info for the given type_id.
Reconciles VV Set, old-style extra-specs, and QOS specs
and returns commonly used info about the type.
:returns: hpe3par_keys, qos, volume_type, vvs_name
"""
volume_type = None
vvs_name = None
hpe3par_keys = {}
qos = {}
if type_id is not None:
volume_type = self._get_volume_type(type_id)
hpe3par_keys = self._get_keys_by_volume_type(volume_type)
vvs_name = self._get_key_value(hpe3par_keys, 'vvs')
if vvs_name is None:
qos = self._get_qos_by_volume_type(volume_type)
return hpe3par_keys, qos, volume_type, vvs_name
def get_volume_settings_from_type_id(self, type_id, pool):
"""Get 3PAR volume settings given a type_id.
Combines type info and config settings to return a dictionary
describing the 3PAR volume settings. Does some validation (CPG).
Uses pool as the default cpg (when not specified in volume type specs).
:param type_id: id of type to get settings for
:param pool: CPG to use if type does not have one set
:returns: dict
"""
hpe3par_keys, qos, volume_type, vvs_name = self.get_type_info(type_id)
# Default to pool extracted from host.
# If that doesn't work use the 1st CPG in the config as the default.
default_cpg = pool or self._client_conf['hpe3par_cpg'][0]
cpg = self._get_key_value(hpe3par_keys, 'cpg', default_cpg)
if cpg is not default_cpg:
# The cpg was specified in a volume type extra spec so it
# needs to be validated that it's in the correct domain.
# log warning here
msg = _LW("'hpe3par:cpg' is not supported as an extra spec "
"in a volume type. CPG's are chosen by "
"the cinder scheduler, as a pool, from the "
"cinder.conf entry 'hpe3par_cpg', which can "
"be a list of CPGs.")
versionutils.report_deprecated_feature(LOG, msg)
LOG.info(_LI("Using pool %(pool)s instead of %(cpg)s"),
{'pool': pool, 'cpg': cpg})
cpg = pool
self.validate_cpg(cpg)
# Look to see if the snap_cpg was specified in volume type
# extra spec, if not use hpe3par_cpg_snap from config as the
# default.
snap_cpg = self.config.hpe3par_cpg_snap
snap_cpg = self._get_key_value(hpe3par_keys, 'snap_cpg', snap_cpg)
# If it's still not set or empty then set it to the cpg.
if not snap_cpg:
snap_cpg = cpg
# if provisioning is not set use thin
default_prov = self.valid_prov_values[0]
prov_value = self._get_key_value(hpe3par_keys, 'provisioning',
default_prov)
# check for valid provisioning type
if prov_value not in self.valid_prov_values:
err = (_("Must specify a valid provisioning type %(valid)s, "
"value '%(prov)s' is invalid.") %
{'valid': self.valid_prov_values,
'prov': prov_value})
LOG.error(err)
raise exception.InvalidInput(reason=err)
tpvv = True
tdvv = False
if prov_value == "full":
tpvv = False
elif prov_value == "dedup":
tpvv = False
tdvv = True
if tdvv and (self.API_VERSION < DEDUP_API_VERSION):
err = (_("Dedup is a valid provisioning type, "
"but requires WSAPI version '%(dedup_version)s' "
"version '%(version)s' is installed.") %
{'dedup_version': DEDUP_API_VERSION,
'version': self.API_VERSION})
LOG.error(err)
raise exception.InvalidInput(reason=err)
return {'hpe3par_keys': hpe3par_keys,
'cpg': cpg, 'snap_cpg': snap_cpg,
'vvs_name': vvs_name, 'qos': qos,
'tpvv': tpvv, 'tdvv': tdvv, 'volume_type': volume_type}
def get_volume_settings_from_type(self, volume, host=None):
"""Get 3PAR volume settings given a volume.
Combines type info and config settings to return a dictionary
describing the 3PAR volume settings. Does some validation (CPG and
persona).
:param volume:
:param host: Optional host to use for default pool.
:returns: dict
"""
type_id = volume.get('volume_type_id', None)
pool = None
if host:
pool = volume_utils.extract_host(host['host'], 'pool')
else:
pool = volume_utils.extract_host(volume['host'], 'pool')
volume_settings = self.get_volume_settings_from_type_id(type_id, pool)
# check for valid persona even if we don't use it until
# attach time, this will give the end user notice that the
# persona type is invalid at volume creation time
self.get_persona_type(volume, volume_settings['hpe3par_keys'])
return volume_settings
def create_volume(self, volume):
LOG.debug('CREATE VOLUME (%(disp_name)s: %(vol_name)s %(id)s on '
'%(host)s)',
{'disp_name': volume['display_name'],
'vol_name': volume['name'],
'id': self._get_3par_vol_name(volume['id']),
'host': volume['host']})
try:
comments = {'volume_id': volume['id'],
'name': volume['name'],
'type': 'OpenStack'}
name = volume.get('display_name', None)
if name:
comments['display_name'] = name
# get the options supported by volume types
type_info = self.get_volume_settings_from_type(volume)
volume_type = type_info['volume_type']
vvs_name = type_info['vvs_name']
qos = type_info['qos']
cpg = type_info['cpg']
snap_cpg = type_info['snap_cpg']
tpvv = type_info['tpvv']
tdvv = type_info['tdvv']
flash_cache = self.get_flash_cache_policy(
type_info['hpe3par_keys'])
cg_id = volume.get('consistencygroup_id', None)
if cg_id:
vvs_name = self._get_3par_vvs_name(cg_id)
type_id = volume.get('volume_type_id', None)
if type_id is not None:
comments['volume_type_name'] = volume_type.get('name')
comments['volume_type_id'] = type_id
if vvs_name is not None:
comments['vvs'] = vvs_name
else:
comments['qos'] = qos
extras = {'comment': json.dumps(comments),
'snapCPG': snap_cpg,
'tpvv': tpvv}
# Only set the dedup option if the backend supports it.
if self.API_VERSION >= DEDUP_API_VERSION:
extras['tdvv'] = tdvv
capacity = self._capacity_from_size(volume['size'])
volume_name = self._get_3par_vol_name(volume['id'])
self.client.createVolume(volume_name, cpg, capacity, extras)
if qos or vvs_name or flash_cache is not None:
try:
self._add_volume_to_volume_set(volume, volume_name,
cpg, vvs_name, qos,
flash_cache)
except exception.InvalidInput as ex:
# Delete the volume if unable to add it to the volume set
self.client.deleteVolume(volume_name)
LOG.error(_LE("Exception: %s"), ex)
raise exception.CinderException(ex)
# v2 replication check
replication_flag = False
if self._volume_of_replicated_type(volume) and (
self._do_volume_replication_setup(volume)):
replication_flag = True
except hpeexceptions.HTTPConflict:
msg = _("Volume (%s) already exists on array") % volume_name
LOG.error(msg)
raise exception.Duplicate(msg)
except hpeexceptions.HTTPBadRequest as ex:
LOG.error(_LE("Exception: %s"), ex)
raise exception.Invalid(ex.get_description())
except exception.InvalidInput as ex:
LOG.error(_LE("Exception: %s"), ex)
raise
except exception.CinderException as ex:
LOG.error(_LE("Exception: %s"), ex)
raise
except Exception as ex:
LOG.error(_LE("Exception: %s"), ex)
raise exception.CinderException(ex)
return self._get_model_update(volume['host'], cpg,
replication=replication_flag,
provider_location=self.client.id)
def _copy_volume(self, src_name, dest_name, cpg, snap_cpg=None,
tpvv=True, tdvv=False):
# Virtual volume sets are not supported with the -online option
LOG.debug('Creating clone of a volume %(src)s to %(dest)s.',
{'src': src_name, 'dest': dest_name})
optional = {'tpvv': tpvv, 'online': True}
if snap_cpg is not None:
optional['snapCPG'] = snap_cpg
if self.API_VERSION >= DEDUP_API_VERSION:
optional['tdvv'] = tdvv
body = self.client.copyVolume(src_name, dest_name, cpg, optional)
return body['taskid']
def get_next_word(self, s, search_string):
"""Return the next word.
Search 's' for 'search_string', if found return the word preceding
'search_string' from 's'.
"""
word = re.search(search_string.strip(' ') + ' ([^ ]*)', s)
return word.groups()[0].strip(' ')
def _get_3par_vol_comment_value(self, vol_comment, key):
comment_dict = dict(ast.literal_eval(vol_comment))
if key in comment_dict:
return comment_dict[key]
return None
def _get_model_update(self, volume_host, cpg, replication=False,
provider_location=None):
"""Get model_update dict to use when we select a pool.
The pools implementation uses a volume['host'] suffix of :poolname.
When the volume comes in with this selected pool, we sometimes use
a different pool (e.g. because the type says to use a different pool).
So in the several places that we do this, we need to return a model
update so that the volume will have the actual pool name in the host
suffix after the operation.
Given a volume_host, which should (might) have the pool suffix, and
given the CPG we actually chose to use, return a dict to use for a
model update iff an update is needed.
:param volume_host: The volume's host string.
:param cpg: The actual pool (cpg) used, for example from the type.
:returns: dict Model update if we need to update volume host, else None
"""
model_update = {}
host = volume_utils.extract_host(volume_host, 'backend')
host_and_pool = volume_utils.append_host(host, cpg)
if volume_host != host_and_pool:
# Since we selected a pool based on type, update the model.
model_update['host'] = host_and_pool
if replication:
model_update['replication_status'] = 'enabled'
if replication and provider_location:
model_update['provider_location'] = provider_location
if not model_update:
model_update = None
return model_update
def _create_temp_snapshot(self, volume):
"""This creates a temporary snapshot of a volume.
This is used by cloning a volume so that we can then
issue extend volume against the original volume.
"""
vol_name = self._get_3par_vol_name(volume['id'])
# create a brand new uuid for the temp snap
snap_uuid = uuid.uuid4().hex
# this will be named tss-%s
snap_name = self._get_3par_snap_name(snap_uuid, temp_snap=True)
extra = {'volume_name': volume['name'],
'volume_id': volume['id']}
optional = {'comment': json.dumps(extra)}
# let the snapshot die in an hour
optional['expirationHours'] = 1
LOG.info(_LI("Creating temp snapshot %(snap)s from volume %(vol)s"),
{'snap': snap_name, 'vol': vol_name})
self.client.createSnapshot(snap_name, vol_name, optional)
return self.client.getVolume(snap_name)
def create_cloned_volume(self, volume, src_vref):
try:
vol_name = self._get_3par_vol_name(volume['id'])
src_vol_name = self._get_3par_vol_name(src_vref['id'])
# if the sizes of the 2 volumes are the same
# we can do an online copy, which is a background process
# on the 3PAR that makes the volume instantly available.
# We can't resize a volume, while it's being copied.
if volume['size'] == src_vref['size']:
LOG.debug("Creating a clone of same size, using online copy.")
# create a temporary snapshot
snapshot = self._create_temp_snapshot(src_vref)
type_info = self.get_volume_settings_from_type(volume)
cpg = type_info['cpg']
# make the 3PAR copy the contents.
# can't delete the original until the copy is done.
self._copy_volume(snapshot['name'], vol_name, cpg=cpg,
snap_cpg=type_info['snap_cpg'],
tpvv=type_info['tpvv'],
tdvv=type_info['tdvv'])
# v2 replication check
replication_flag = False
if self._volume_of_replicated_type(volume) and (
self._do_volume_replication_setup(volume)):
replication_flag = True
return self._get_model_update(volume['host'], cpg,
replication=replication_flag,
provider_location=self.client.id)
else:
# The size of the new volume is different, so we have to
# copy the volume and wait. Do the resize after the copy
# is complete.
LOG.debug("Clone a volume with a different target size. "
"Using non-online copy.")
# we first have to create the destination volume
model_update = self.create_volume(volume)
optional = {'priority': 1}
body = self.client.copyVolume(src_vol_name, vol_name, None,
optional=optional)
task_id = body['taskid']
task_status = self._wait_for_task_completion(task_id)
if task_status['status'] is not self.client.TASK_DONE:
dbg = {'status': task_status, 'id': volume['id']}
msg = _('Copy volume task failed: create_cloned_volume '
'id=%(id)s, status=%(status)s.') % dbg
raise exception.CinderException(msg)
else:
LOG.debug('Copy volume completed: create_cloned_volume: '
'id=%s.', volume['id'])
return model_update
except hpeexceptions.HTTPForbidden:
raise exception.NotAuthorized()
except hpeexceptions.HTTPNotFound:
raise exception.NotFound()
except Exception as ex:
LOG.error(_LE("Exception: %s"), ex)
raise exception.CinderException(ex)
def delete_volume(self, volume):
# v2 replication check
# If the volume type is replication enabled, we want to call our own
# method of deconstructing the volume and its dependencies
if self._volume_of_replicated_type(volume):
replication_status = volume.get('replication_status', None)
if replication_status and replication_status == "failed-over":
self._delete_replicated_failed_over_volume(volume)
else:
self._do_volume_replication_destroy(volume)
return
try:
volume_name = self._get_3par_vol_name(volume['id'])
# Try and delete the volume, it might fail here because
# the volume is part of a volume set which will have the
# volume set name in the error.
try:
self.client.deleteVolume(volume_name)
except hpeexceptions.HTTPBadRequest as ex:
if ex.get_code() == 29:
if self.client.isOnlinePhysicalCopy(volume_name):
LOG.debug("Found an online copy for %(volume)s",
{'volume': volume_name})
# the volume is in process of being cloned.
# stopOnlinePhysicalCopy will also delete
# the volume once it stops the copy.
self.client.stopOnlinePhysicalCopy(volume_name)
else:
LOG.error(_LE("Exception: %s"), ex)
raise
else:
LOG.error(_LE("Exception: %s"), ex)
raise
except hpeexceptions.HTTPConflict as ex:
if ex.get_code() == 34:
# This is a special case which means the
# volume is part of a volume set.
vvset_name = self.client.findVolumeSet(volume_name)
LOG.debug("Returned vvset_name = %s", vvset_name)
if vvset_name is not None and \
vvset_name.startswith('vvs-'):
# We have a single volume per volume set, so
# remove the volume set.
self.client.deleteVolumeSet(
self._get_3par_vvs_name(volume['id']))
elif vvset_name is not None:
# We have a pre-defined volume set just remove the
# volume and leave the volume set.
self.client.removeVolumeFromVolumeSet(vvset_name,
volume_name)
self.client.deleteVolume(volume_name)
elif ex.get_code() == 151:
if self.client.isOnlinePhysicalCopy(volume_name):
LOG.debug("Found an online copy for %(volume)s",
{'volume': volume_name})
# the volume is in process of being cloned.
# stopOnlinePhysicalCopy will also delete
# the volume once it stops the copy.
self.client.stopOnlinePhysicalCopy(volume_name)
else:
# the volume is being operated on in a background
# task on the 3PAR.
# TODO(walter-boring) do a retry a few times.
# for now lets log a better message
msg = _("The volume is currently busy on the 3PAR"
" and cannot be deleted at this time. "
"You can try again later.")
LOG.error(msg)
raise exception.VolumeIsBusy(message=msg)
elif (ex.get_code() == 32):
# Error 32 means that the volume has children
# see if we have any temp snapshots
snaps = self.client.getVolumeSnapshots(volume_name)
for snap in snaps:
if snap.startswith('tss-'):
# looks like we found a temp snapshot.
LOG.info(
_LI("Found a temporary snapshot %(name)s"),
{'name': snap})
try:
self.client.deleteVolume(snap)
except hpeexceptions.HTTPNotFound:
# if the volume is gone, it's as good as a
# successful delete
pass
except Exception:
msg = _("Volume has a temporary snapshot that "
"can't be deleted at this time.")
raise exception.VolumeIsBusy(message=msg)
try:
self.delete_volume(volume)
except Exception:
msg = _("Volume has children and cannot be deleted!")
raise exception.VolumeIsBusy(message=msg)
else:
LOG.error(_LE("Exception: %s"), ex)
raise exception.VolumeIsBusy(message=ex.get_description())
except hpeexceptions.HTTPNotFound as ex:
# We'll let this act as if it worked
# it helps clean up the cinder entries.
LOG.warning(_LW("Delete volume id not found. Removing from "
"cinder: %(id)s Ex: %(msg)s"),
{'id': volume['id'], 'msg': ex})
except hpeexceptions.HTTPForbidden as ex:
LOG.error(_LE("Exception: %s"), ex)
raise exception.NotAuthorized(ex.get_description())
except hpeexceptions.HTTPConflict as ex:
LOG.error(_LE("Exception: %s"), ex)
raise exception.VolumeIsBusy(message=ex.get_description())
except Exception as ex:
LOG.error(_LE("Exception: %s"), ex)
raise exception.CinderException(ex)
def create_volume_from_snapshot(self, volume, snapshot, snap_name=None,
vvs_name=None):
"""Creates a volume from a snapshot."""
LOG.debug("Create Volume from Snapshot\n%(vol_name)s\n%(ss_name)s",
{'vol_name': pprint.pformat(volume['display_name']),
'ss_name': pprint.pformat(snapshot['display_name'])})
model_update = {}
if volume['size'] < snapshot['volume_size']:
err = ("You cannot reduce size of the volume. It must "
"be greater than or equal to the snapshot.")
LOG.error(err)
raise exception.InvalidInput(reason=err)
try:
if not snap_name:
snap_name = self._get_3par_snap_name(snapshot['id'])
volume_name = self._get_3par_vol_name(volume['id'])
extra = {'volume_id': volume['id'],
'snapshot_id': snapshot['id']}
type_id = volume.get('volume_type_id', None)
hpe3par_keys, qos, _volume_type, vvs = self.get_type_info(
type_id)
if vvs:
vvs_name = vvs
name = volume.get('display_name', None)
if name:
extra['display_name'] = name
description = volume.get('display_description', None)
if description:
extra['description'] = description
optional = {'comment': json.dumps(extra),
'readOnly': False}
self.client.createSnapshot(volume_name, snap_name, optional)
# Grow the snapshot if needed
growth_size = volume['size'] - snapshot['volume_size']
if growth_size > 0:
try:
LOG.debug('Converting to base volume type: %s.',
volume['id'])
model_update = self._convert_to_base_volume(volume)
growth_size_mib = growth_size * units.Gi / units.Mi
LOG.debug('Growing volume: %(id)s by %(size)s GiB.',
{'id': volume['id'], 'size': growth_size})
self.client.growVolume(volume_name, growth_size_mib)
except Exception as ex:
LOG.error(_LE("Error extending volume %(id)s. "
"Ex: %(ex)s"),
{'id': volume['id'], 'ex': ex})
# Delete the volume if unable to grow it
self.client.deleteVolume(volume_name)
raise exception.CinderException(ex)
# Check for flash cache setting in extra specs
flash_cache = self.get_flash_cache_policy(hpe3par_keys)
if qos or vvs_name or flash_cache is not None:
cpg_names = self._get_key_value(
hpe3par_keys, 'cpg', self._client_conf['hpe3par_cpg'])
try:
self._add_volume_to_volume_set(volume, volume_name,
cpg_names[0], vvs_name,
qos, flash_cache)
except Exception as ex:
# Delete the volume if unable to add it to the volume set
self.client.deleteVolume(volume_name)
LOG.error(_LE("Exception: %s"), ex)
raise exception.CinderException(ex)
# v2 replication check
if self._volume_of_replicated_type(volume) and (
self._do_volume_replication_setup(volume)):
model_update['replication_status'] = 'enabled'
model_update['provider_location'] = self.client.id
except hpeexceptions.HTTPForbidden as ex:
LOG.error(_LE("Exception: %s"), ex)
raise exception.NotAuthorized()
except hpeexceptions.HTTPNotFound as ex:
LOG.error(_LE("Exception: %s"), ex)
raise exception.NotFound()
except Exception as ex:
LOG.error(_LE("Exception: %s"), ex)
raise exception.CinderException(ex)
return model_update
def create_snapshot(self, snapshot):
LOG.debug("Create Snapshot\n%s", pprint.pformat(snapshot))
try:
snap_name = self._get_3par_snap_name(snapshot['id'])
vol_name = self._get_3par_vol_name(snapshot['volume_id'])
extra = {'volume_name': snapshot['volume_name']}
vol_id = snapshot.get('volume_id', None)
if vol_id:
extra['volume_id'] = vol_id
try:
extra['display_name'] = snapshot['display_name']
except AttributeError:
pass
try:
extra['description'] = snapshot['display_description']
except AttributeError:
pass
optional = {'comment': json.dumps(extra),
'readOnly': True}
if self.config.hpe3par_snapshot_expiration:
optional['expirationHours'] = (
int(self.config.hpe3par_snapshot_expiration))
if self.config.hpe3par_snapshot_retention:
optional['retentionHours'] = (
int(self.config.hpe3par_snapshot_retention))
self.client.createSnapshot(snap_name, vol_name, optional)
except hpeexceptions.HTTPForbidden as ex:
LOG.error(_LE("Exception: %s"), ex)
raise exception.NotAuthorized()
except hpeexceptions.HTTPNotFound as ex:
LOG.error(_LE("Exception: %s"), ex)
raise exception.NotFound()
def migrate_volume(self, volume, host):
"""Migrate directly if source and dest are managed by same storage.
:param volume: A dictionary describing the volume to migrate
:param host: A dictionary describing the host to migrate to, where
host['host'] is its name, and host['capabilities'] is a
dictionary of its reported capabilities.
:returns: (False, None) if the driver does not support migration,
(True, model_update) if successful
"""
dbg = {'id': volume['id'],
'host': host['host'],
'status': volume['status']}
LOG.debug('enter: migrate_volume: id=%(id)s, host=%(host)s, '
'status=%(status)s.', dbg)
ret = False, None
if volume['status'] in ['available', 'in-use']:
volume_type = None
if volume['volume_type_id']:
volume_type = self._get_volume_type(volume['volume_type_id'])
try:
ret = self.retype(volume, volume_type, None, host)
except Exception as e:
LOG.info(_LI('3PAR driver cannot perform migration. '
'Retype exception: %s'), e)
LOG.debug('leave: migrate_volume: id=%(id)s, host=%(host)s, '
'status=%(status)s.', dbg)
dbg_ret = {'supported': ret[0], 'model_update': ret[1]}
LOG.debug('migrate_volume result: %(supported)s, %(model_update)s',
dbg_ret)
return ret
def update_migrated_volume(self, context, volume, new_volume,
original_volume_status):
"""Rename the new (temp) volume to it's original name.
This method tries to rename the new volume to it's original
name after the migration has completed.
"""
LOG.debug("Update volume name for %(id)s", {'id': new_volume['id']})
name_id = None
provider_location = None
if original_volume_status == 'available':
# volume isn't attached and can be updated
original_name = self._get_3par_vol_name(volume['id'])
current_name = self._get_3par_vol_name(new_volume['id'])
try:
volumeMods = {'newName': original_name}
self.client.modifyVolume(current_name, volumeMods)
LOG.info(_LI("Volume name changed from %(tmp)s to %(orig)s"),
{'tmp': current_name, 'orig': original_name})
except Exception as e:
LOG.error(_LE("Changing the volume name from %(tmp)s to "
"%(orig)s failed because %(reason)s"),
{'tmp': current_name, 'orig': original_name,
'reason': e})
name_id = new_volume['_name_id'] or new_volume['id']
provider_location = new_volume['provider_location']
else:
# the backend can't change the name.
name_id = new_volume['_name_id'] or new_volume['id']
provider_location = new_volume['provider_location']
return {'_name_id': name_id, 'provider_location': provider_location}
def _wait_for_task_completion(self, task_id):
"""This waits for a 3PAR background task complete or fail.
This looks for a task to get out of the 'active' state.
"""
# Wait for the physical copy task to complete
def _wait_for_task(task_id):
status = self.client.getTask(task_id)
LOG.debug("3PAR Task id %(id)s status = %(status)s",
{'id': task_id,
'status': status['status']})
if status['status'] is not self.client.TASK_ACTIVE:
self._task_status = status
raise loopingcall.LoopingCallDone()
self._task_status = None
timer = loopingcall.FixedIntervalLoopingCall(
_wait_for_task, task_id)
timer.start(interval=1).wait()
return self._task_status
def _convert_to_base_volume(self, volume, new_cpg=None):
try:
type_info = self.get_volume_settings_from_type(volume)
if new_cpg:
cpg = new_cpg
else:
cpg = type_info['cpg']
# Change the name such that it is unique since 3PAR
# names must be unique across all CPGs
volume_name = self._get_3par_vol_name(volume['id'])
temp_vol_name = volume_name.replace("osv-", "omv-")
# Create a physical copy of the volume
task_id = self._copy_volume(volume_name, temp_vol_name,
cpg, cpg, type_info['tpvv'],
type_info['tdvv'])
LOG.debug('Copy volume scheduled: convert_to_base_volume: '
'id=%s.', volume['id'])
task_status = self._wait_for_task_completion(task_id)
if task_status['status'] is not self.client.TASK_DONE:
dbg = {'status': task_status, 'id': volume['id']}
msg = _('Copy volume task failed: convert_to_base_volume: '
'id=%(id)s, status=%(status)s.') % dbg
raise exception.CinderException(msg)
else:
LOG.debug('Copy volume completed: convert_to_base_volume: '
'id=%s.', volume['id'])
comment = self._get_3par_vol_comment(volume_name)
if comment:
self.client.modifyVolume(temp_vol_name, {'comment': comment})
LOG.debug('Volume rename completed: convert_to_base_volume: '
'id=%s.', volume['id'])
# Delete source volume after the copy is complete
self.client.deleteVolume(volume_name)
LOG.debug('Delete src volume completed: convert_to_base_volume: '
'id=%s.', volume['id'])
# Rename the new volume to the original name
self.client.modifyVolume(temp_vol_name, {'newName': volume_name})
LOG.info(_LI('Completed: convert_to_base_volume: '
'id=%s.'), volume['id'])
except hpeexceptions.HTTPConflict:
msg = _("Volume (%s) already exists on array.") % volume_name
LOG.error(msg)
raise exception.Duplicate(msg)
except hpeexceptions.HTTPBadRequest as ex:
LOG.error(_LE("Exception: %s"), ex)
raise exception.Invalid(ex.get_description())
except exception.InvalidInput as ex:
LOG.error(_LE("Exception: %s"), ex)
raise
except exception.CinderException as ex:
LOG.error(_LE("Exception: %s"), ex)
raise
except Exception as ex:
LOG.error(_LE("Exception: %s"), ex)
raise exception.CinderException(ex)
return self._get_model_update(volume['host'], cpg)
def delete_snapshot(self, snapshot):
LOG.debug("Delete Snapshot id %(id)s %(name)s",
{'id': snapshot['id'], 'name': pprint.pformat(snapshot)})
try:
snap_name = self._get_3par_snap_name(snapshot['id'])
self.client.deleteVolume(snap_name)
except hpeexceptions.HTTPForbidden as ex:
LOG.error(_LE("Exception: %s"), ex)
raise exception.NotAuthorized()
except hpeexceptions.HTTPNotFound as ex:
# We'll let this act as if it worked
# it helps clean up the cinder entries.
LOG.warning(_LW("Delete Snapshot id not found. Removing from "
"cinder: %(id)s Ex: %(msg)s"),
{'id': snapshot['id'], 'msg': ex})
except hpeexceptions.HTTPConflict as ex:
if (ex.get_code() == 32):
# Error 32 means that the snapshot has children
# see if we have any temp snapshots
snaps = self.client.getVolumeSnapshots(snap_name)
for snap in snaps:
if snap.startswith('tss-'):
LOG.info(
_LI("Found a temporary snapshot %(name)s"),
{'name': snap})
try:
self.client.deleteVolume(snap)
except hpeexceptions.HTTPNotFound:
# if the volume is gone, it's as good as a
# successful delete
pass
except Exception:
msg = _("Snapshot has a temporary snapshot that "
"can't be deleted at this time.")
raise exception.SnapshotIsBusy(message=msg)
try:
self.client.deleteVolume(snap_name)
except Exception:
msg = _("Snapshot has children and cannot be deleted!")
raise exception.SnapshotIsBusy(message=msg)
else:
LOG.error(_LE("Exception: %s"), ex)
raise exception.SnapshotIsBusy(message=ex.get_description())
def _get_3par_hostname_from_wwn_iqn(self, wwns, iqns):
if wwns is not None and not isinstance(wwns, list):
wwns = [wwns]
if iqns is not None and not isinstance(iqns, list):
iqns = [iqns]
out = self.client.getHosts()
hosts = out['members']
for host in hosts:
if 'iSCSIPaths' in host and iqns is not None:
iscsi_paths = host['iSCSIPaths']
for iscsi in iscsi_paths:
for iqn in iqns:
if iqn == iscsi['name']:
return host['name']
if 'FCPaths' in host and wwns is not None:
fc_paths = host['FCPaths']
for fc in fc_paths:
for wwn in wwns:
if wwn.upper() == fc['wwn'].upper():
return host['name']
def terminate_connection(self, volume, hostname, wwn=None, iqn=None):
"""Driver entry point to unattach a volume from an instance."""
# does 3par know this host by a different name?
hosts = None
if wwn:
hosts = self.client.queryHost(wwns=wwn)
elif iqn:
hosts = self.client.queryHost(iqns=[iqn])
if hosts and hosts['members'] and 'name' in hosts['members'][0]:
hostname = hosts['members'][0]['name']
try:
self.delete_vlun(volume, hostname)
return
except hpeexceptions.HTTPNotFound as e:
if 'host does not exist' in e.get_description():
# If a host is failed-over, we want to allow the detach to
# 'succeed' when it cannot find the host. We can simply
# return out of the terminate connection in order for things
# to be updated correctly.
if self._active_backend_id:
LOG.warning(_LW("Because the host is currently in a "
"failed-over state, the volume will not "
"be properly detached from the primary "
"array. The detach will be considered a "
"success as far as Cinder is concerned. "
"The volume can now be attached to the "
"secondary target."))
return
else:
# use the wwn to see if we can find the hostname
hostname = self._get_3par_hostname_from_wwn_iqn(wwn, iqn)
# no 3par host, re-throw
if hostname is None:
LOG.error(_LE("Exception: %s"), e)
raise
else:
# not a 'host does not exist' HTTPNotFound exception, re-throw
LOG.error(_LE("Exception: %s"), e)
raise
# try again with name retrieved from 3par
self.delete_vlun(volume, hostname)
def build_nsp(self, portPos):
return '%s:%s:%s' % (portPos['node'],
portPos['slot'],
portPos['cardPort'])
def build_portPos(self, nsp):
split = nsp.split(":")
portPos = {}
portPos['node'] = int(split[0])
portPos['slot'] = int(split[1])
portPos['cardPort'] = int(split[2])
return portPos
def tune_vv(self, old_tpvv, new_tpvv, old_tdvv, new_tdvv,
old_cpg, new_cpg, volume_name):
"""Tune the volume to change the userCPG and/or provisioningType.
The volume will be modified/tuned/converted to the new userCPG and
provisioningType, as needed.
TaskWaiter is used to make this function wait until the 3PAR task
is no longer active. When the task is no longer active, then it must
either be done or it is in a state that we need to treat as an error.
"""
if old_tpvv == new_tpvv and old_tdvv == new_tdvv:
if new_cpg != old_cpg:
LOG.info(_LI("Modifying %(volume_name)s userCPG "
"from %(old_cpg)s"
" to %(new_cpg)s"),
{'volume_name': volume_name,
'old_cpg': old_cpg, 'new_cpg': new_cpg})
_response, body = self.client.modifyVolume(
volume_name,
{'action': 6,
'tuneOperation': 1,
'userCPG': new_cpg})
task_id = body['taskid']
status = self.TaskWaiter(self.client, task_id).wait_for_task()
if status['status'] is not self.client.TASK_DONE:
msg = (_('Tune volume task stopped before it was done: '
'volume_name=%(volume_name)s, '
'task-status=%(status)s.') %
{'status': status, 'volume_name': volume_name})
raise exception.VolumeBackendAPIException(msg)
else:
if new_tpvv:
cop = self.CONVERT_TO_THIN
LOG.info(_LI("Converting %(volume_name)s to thin provisioning "
"with userCPG=%(new_cpg)s"),
{'volume_name': volume_name, 'new_cpg': new_cpg})
elif new_tdvv:
cop = self.CONVERT_TO_DEDUP
LOG.info(_LI("Converting %(volume_name)s to thin dedup "
"provisioning with userCPG=%(new_cpg)s"),
{'volume_name': volume_name, 'new_cpg': new_cpg})
else:
cop = self.CONVERT_TO_FULL
LOG.info(_LI("Converting %(volume_name)s to full provisioning "
"with userCPG=%(new_cpg)s"),
{'volume_name': volume_name, 'new_cpg': new_cpg})
try:
response, body = self.client.modifyVolume(
volume_name,
{'action': 6,
'tuneOperation': 1,
'userCPG': new_cpg,
'conversionOperation': cop})
except hpeexceptions.HTTPBadRequest as ex:
if ex.get_code() == 40 and "keepVV" in six.text_type(ex):
# Cannot retype with snapshots because we don't want to
# use keepVV and have straggling volumes. Log additional
# info and then raise.
LOG.info(_LI("tunevv failed because the volume '%s' "
"has snapshots."), volume_name)
raise
task_id = body['taskid']
status = self.TaskWaiter(self.client, task_id).wait_for_task()
if status['status'] is not self.client.TASK_DONE:
msg = (_('Tune volume task stopped before it was done: '
'volume_name=%(volume_name)s, '
'task-status=%(status)s.') %
{'status': status, 'volume_name': volume_name})
raise exception.VolumeBackendAPIException(msg)
def _retype_pre_checks(self, volume, host, new_persona,
old_cpg, new_cpg,
new_snap_cpg):
"""Test retype parameters before making retype changes.
Do pre-retype parameter validation. These checks will
raise an exception if we should not attempt this retype.
"""
if new_persona:
self.validate_persona(new_persona)
if host is not None:
(host_type, host_id, _host_cpg) = (
host['capabilities']['location_info']).split(':')
if not (host_type == 'HPE3PARDriver'):
reason = (_("Cannot retype from HPE3PARDriver to %s.") %
host_type)
raise exception.InvalidHost(reason)
sys_info = self.client.getStorageSystemInfo()
if not (host_id == sys_info['serialNumber']):
reason = (_("Cannot retype from one 3PAR array to another."))
raise exception.InvalidHost(reason)
# Validate new_snap_cpg. A white-space snapCPG will fail eventually,
# but we'd prefer to fail fast -- if this ever happens.
if not new_snap_cpg or new_snap_cpg.isspace():
reason = (_("Invalid new snapCPG name for retype. "
"new_snap_cpg='%s'.") % new_snap_cpg)
raise exception.InvalidInput(reason)
# Check to make sure CPGs are in the same domain
domain = self.get_domain(old_cpg)
if domain != self.get_domain(new_cpg):
reason = (_('Cannot retype to a CPG in a different domain.'))
raise exception.Invalid3PARDomain(reason)
if domain != self.get_domain(new_snap_cpg):
reason = (_('Cannot retype to a snap CPG in a different domain.'))
raise exception.Invalid3PARDomain(reason)
def _retype(self, volume, volume_name, new_type_name, new_type_id, host,
new_persona, old_cpg, new_cpg, old_snap_cpg, new_snap_cpg,
old_tpvv, new_tpvv, old_tdvv, new_tdvv,
old_vvs, new_vvs, old_qos, new_qos,
old_flash_cache, new_flash_cache,
old_comment):
action = "volume:retype"
self._retype_pre_checks(volume, host, new_persona,
old_cpg, new_cpg,
new_snap_cpg)
flow_name = action.replace(":", "_") + "_api"
retype_flow = linear_flow.Flow(flow_name)
# Keep this linear and do the big tunevv last. Everything leading
# up to that is reversible, but we'd let the 3PAR deal with tunevv
# errors on its own.
retype_flow.add(
ModifyVolumeTask(action),
ModifySpecsTask(action),
TuneVolumeTask(action))
taskflow.engines.run(
retype_flow,
store={'common': self,
'volume_name': volume_name, 'volume': volume,
'old_tpvv': old_tpvv, 'new_tpvv': new_tpvv,
'old_tdvv': old_tdvv, 'new_tdvv': new_tdvv,
'old_cpg': old_cpg, 'new_cpg': new_cpg,
'old_snap_cpg': old_snap_cpg, 'new_snap_cpg': new_snap_cpg,
'old_vvs': old_vvs, 'new_vvs': new_vvs,
'old_qos': old_qos, 'new_qos': new_qos,
'old_flash_cache': old_flash_cache,
'new_flash_cache': new_flash_cache,
'new_type_name': new_type_name, 'new_type_id': new_type_id,
'old_comment': old_comment
})
def _retype_from_old_to_new(self, volume, new_type, old_volume_settings,
host):
"""Convert the volume to be of the new type. Given old type settings.
Returns True if the retype was successful.
Uses taskflow to revert changes if errors occur.
:param volume: A dictionary describing the volume to retype
:param new_type: A dictionary describing the volume type to convert to
:param old_volume_settings: Volume settings describing the old type.
:param host: A dictionary describing the host, where
host['host'] is its name, and host['capabilities'] is a
dictionary of its reported capabilities. Host validation
is just skipped if host is None.
"""
volume_id = volume['id']
volume_name = self._get_3par_vol_name(volume_id)
new_type_name = None
new_type_id = None
if new_type:
new_type_name = new_type['name']
new_type_id = new_type['id']
pool = None
if host:
pool = volume_utils.extract_host(host['host'], 'pool')
else:
pool = volume_utils.extract_host(volume['host'], 'pool')
new_volume_settings = self.get_volume_settings_from_type_id(
new_type_id, pool)
new_cpg = new_volume_settings['cpg']
new_snap_cpg = new_volume_settings['snap_cpg']
new_tpvv = new_volume_settings['tpvv']
new_tdvv = new_volume_settings['tdvv']
new_qos = new_volume_settings['qos']
new_vvs = new_volume_settings['vvs_name']
new_persona = None
new_hpe3par_keys = new_volume_settings['hpe3par_keys']
if 'persona' in new_hpe3par_keys:
new_persona = new_hpe3par_keys['persona']
new_flash_cache = self.get_flash_cache_policy(new_hpe3par_keys)
old_qos = old_volume_settings['qos']
old_vvs = old_volume_settings['vvs_name']
old_hpe3par_keys = old_volume_settings['hpe3par_keys']
old_flash_cache = self.get_flash_cache_policy(old_hpe3par_keys)
# Get the current volume info because we can get in a bad state
# if we trust that all the volume type settings are still the
# same settings that were used with this volume.
old_volume_info = self.client.getVolume(volume_name)
old_tpvv = old_volume_info['provisioningType'] == self.THIN
old_tdvv = old_volume_info['provisioningType'] == self.DEDUP
old_cpg = old_volume_info['userCPG']
old_comment = old_volume_info['comment']
old_snap_cpg = None
if 'snapCPG' in old_volume_info:
old_snap_cpg = old_volume_info['snapCPG']
LOG.debug("retype old_volume_info=%s", old_volume_info)
LOG.debug("retype old_volume_settings=%s", old_volume_settings)
LOG.debug("retype new_volume_settings=%s", new_volume_settings)
self._retype(volume, volume_name, new_type_name, new_type_id,
host, new_persona, old_cpg, new_cpg,
old_snap_cpg, new_snap_cpg, old_tpvv, new_tpvv,
old_tdvv, new_tdvv, old_vvs, new_vvs,
old_qos, new_qos, old_flash_cache, new_flash_cache,
old_comment)
if host:
return True, self._get_model_update(host['host'], new_cpg)
else:
return True, self._get_model_update(volume['host'], new_cpg)
def _retype_from_no_type(self, volume, new_type):
"""Convert the volume to be of the new type. Starting from no type.
Returns True if the retype was successful.
Uses taskflow to revert changes if errors occur.
:param volume: A dictionary describing the volume to retype. Except the
volume-type is not used here. This method uses None.
:param new_type: A dictionary describing the volume type to convert to
"""
pool = volume_utils.extract_host(volume['host'], 'pool')
none_type_settings = self.get_volume_settings_from_type_id(None, pool)
return self._retype_from_old_to_new(volume, new_type,
none_type_settings, None)
def retype(self, volume, new_type, diff, host):
"""Convert the volume to be of the new type.
Returns True if the retype was successful.
Uses taskflow to revert changes if errors occur.
:param volume: A dictionary describing the volume to retype
:param new_type: A dictionary describing the volume type to convert to
:param diff: A dictionary with the difference between the two types
:param host: A dictionary describing the host, where
host['host'] is its name, and host['capabilities'] is a
dictionary of its reported capabilities. Host validation
is just skipped if host is None.
"""
LOG.debug(("enter: retype: id=%(id)s, new_type=%(new_type)s,"
"diff=%(diff)s, host=%(host)s"), {'id': volume['id'],
'new_type': new_type,
'diff': diff,
'host': host})
old_volume_settings = self.get_volume_settings_from_type(volume, host)
return self._retype_from_old_to_new(volume, new_type,
old_volume_settings, host)
def find_existing_vlun(self, volume, host):
"""Finds an existing VLUN for a volume on a host.
Returns an existing VLUN's information. If no existing VLUN is found,
None is returned.
:param volume: A dictionary describing a volume.
:param host: A dictionary describing a host.
"""
existing_vlun = None
try:
vol_name = self._get_3par_vol_name(volume['id'])
host_vluns = self.client.getHostVLUNs(host['name'])
# The first existing VLUN found will be returned.
for vlun in host_vluns:
if vlun['volumeName'] == vol_name:
existing_vlun = vlun
break
except hpeexceptions.HTTPNotFound:
# ignore, no existing VLUNs were found
LOG.debug("No existing VLUNs were found for host/volume "
"combination: %(host)s, %(vol)s",
{'host': host['name'],
'vol': vol_name})
pass
return existing_vlun
def find_existing_vluns(self, volume, host):
existing_vluns = []
try:
vol_name = self._get_3par_vol_name(volume['id'])
host_vluns = self.client.getHostVLUNs(host['name'])
for vlun in host_vluns:
if vlun['volumeName'] == vol_name:
existing_vluns.append(vlun)
except hpeexceptions.HTTPNotFound:
# ignore, no existing VLUNs were found
LOG.debug("No existing VLUNs were found for host/volume "
"combination: %(host)s, %(vol)s",
{'host': host['name'],
'vol': vol_name})
pass
return existing_vluns
# v2 replication methods
def failover_host(self, context, volumes, secondary_backend_id):
"""Force failover to a secondary replication target."""
# Ensure replication is enabled before we try and failover.
if not self._replication_enabled:
msg = _LE("Issuing a fail-over failed because replication is "
"not properly configured.")
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
# Check to see if the user requested to failback.
if (secondary_backend_id and
secondary_backend_id == self.FAILBACK_VALUE):
volume_update_list = self._replication_failback(volumes)
target_id = None
else:
# Find the failover target.
failover_target = None
for target in self._replication_targets:
if target['backend_id'] == secondary_backend_id:
failover_target = target
break
if not failover_target:
msg = _("A valid secondary target MUST be specified in order "
"to failover.")
LOG.error(msg)
raise exception.InvalidReplicationTarget(reason=msg)
target_id = failover_target['backend_id']
# For each volume, if it is replicated, we want to fail it over.
volume_update_list = []
for volume in volumes:
if self._volume_of_replicated_type(volume):
try:
# Try and stop remote-copy on main array. We eat the
# exception here because when an array goes down, the
# groups will stop automatically.
rcg_name = self._get_3par_rcg_name(volume['id'])
self.client.stopRemoteCopy(rcg_name)
except Exception:
pass
try:
# Failover to secondary array.
remote_rcg_name = self._get_3par_remote_rcg_name(
volume['id'], volume['provider_location'])
cl = self._create_replication_client(failover_target)
cl.recoverRemoteCopyGroupFromDisaster(
remote_rcg_name, self.RC_ACTION_CHANGE_TO_PRIMARY)
volume_update_list.append(
{'volume_id': volume['id'],
'updates': {'replication_status': 'failed-over'}})
except Exception as ex:
msg = (_LE("There was a problem with the failover "
"(%(error)s) and it was unsuccessful. "
"Volume '%(volume)s will not be available "
"on the failed over target."),
{'error': six.text_type(ex),
'volume': volume['id']})
LOG.error(msg)
volume_update_list.append(
{'volume_id': volume['id'],
'updates': {'replication_status': 'error'}})
finally:
self._destroy_replication_client(cl)
else:
# If the volume is not of replicated type, we need to
# force the status into error state so a user knows they
# do not have access to the volume.
volume_update_list.append(
{'volume_id': volume['id'],
'updates': {'status': 'error'}})
return target_id, volume_update_list
def _replication_failback(self, volumes):
# Make sure the proper steps on the backend have been completed before
# we allow a fail-over.
if not self._is_host_ready_for_failback(volumes):
msg = _("The host is not ready to be failed back. Please "
"resynchronize the volumes and resume replication on the "
"3PAR backends.")
LOG.error(msg)
raise exception.InvalidReplicationTarget(reason=msg)
# Update the volumes status to available.
volume_update_list = []
for volume in volumes:
if self._volume_of_replicated_type(volume):
volume_update_list.append(
{'volume_id': volume['id'],
'updates': {'replication_status': 'available'}})
else:
# Upon failing back, we can move the non-replicated volumes
# back into available state.
volume_update_list.append(
{'volume_id': volume['id'],
'updates': {'status': 'available'}})
return volume_update_list
def _is_host_ready_for_failback(self, volumes):
"""Checks to make sure the volume has been synchronized
This ensures that all the remote copy targets have been restored
to their natural direction, and all of the volumes have been
fully synchronized.
"""
try:
for volume in volumes:
if self._volume_of_replicated_type(volume):
location = volume.get('provider_location')
remote_rcg_name = self._get_3par_remote_rcg_name(
volume['id'],
location)
rcg = self.client.getRemoteCopyGroup(remote_rcg_name)
# Make sure all targets are in their natural direction.
targets = rcg['targets']
for target in targets:
if target['roleReversed'] or (
target['state'] != self.RC_GROUP_STARTED):
return False
# Make sure all volumes are fully synced.
volumes = rcg['volumes']
for volume in volumes:
remote_volumes = volume['remoteVolumes']
for remote_volume in remote_volumes:
if remote_volume['syncStatus'] != (
self.SYNC_STATUS_COMPLETED):
return False
except Exception:
# If there was a problem, we will return false so we can
# log an error in the parent function.
return False
return True
def _do_replication_setup(self):
replication_targets = []
replication_devices = self.config.replication_device
if replication_devices:
for dev in replication_devices:
remote_array = dict(dev.items())
# Override and set defaults for certain entries
remote_array['managed_backend_name'] = (
dev.get('managed_backend_name'))
remote_array['replication_mode'] = (
self._get_remote_copy_mode_num(
dev.get('replication_mode')))
remote_array['san_ssh_port'] = (
dev.get('san_ssh_port', self.config.san_ssh_port))
remote_array['ssh_conn_timeout'] = (
dev.get('ssh_conn_timeout', self.config.ssh_conn_timeout))
remote_array['san_private_key'] = (
dev.get('san_private_key', self.config.san_private_key))
# Format iscsi IPs correctly
iscsi_ips = dev.get('hpe3par_iscsi_ips')
if iscsi_ips:
remote_array['hpe3par_iscsi_ips'] = iscsi_ips.split(' ')
# Format hpe3par_iscsi_chap_enabled as a bool
remote_array['hpe3par_iscsi_chap_enabled'] = (
dev.get('hpe3par_iscsi_chap_enabled') == 'True')
array_name = remote_array['backend_id']
# Make sure we can log into the array, that it has been
# correctly configured, and its API version meets the
# minimum requirement.
cl = None
try:
cl = self._create_replication_client(remote_array)
array_id = six.text_type(cl.getStorageSystemInfo()['id'])
remote_array['id'] = array_id
wsapi_version = cl.getWsApiVersion()['build']
if wsapi_version < REMOTE_COPY_API_VERSION:
msg = (_LW("The secondary array must have an API "
"version of %(min_ver)s or higher. Array "
"'%(target)s' is on %(target_ver)s, "
"therefore it will not be added as a valid "
"replication target.") %
{'target': array_name,
'min_ver': REMOTE_COPY_API_VERSION,
'target_ver': wsapi_version})
LOG.warning(msg)
elif not self._is_valid_replication_array(remote_array):
msg = (_LW("'%s' is not a valid replication array. "
"In order to be valid, backend_id, "
"replication_mode, "
"hpe3par_api_url, hpe3par_username, "
"hpe3par_password, cpg_map, san_ip, "
"san_login, and san_password "
"must be specified. If the target is "
"managed, managed_backend_name must be set "
"as well.") % array_name)
LOG.warning(msg)
else:
replication_targets.append(remote_array)
except Exception:
msg = (_LE("Could not log in to 3PAR array (%s) with the "
"provided credentials.") % array_name)
LOG.error(msg)
finally:
self._destroy_replication_client(cl)
self._replication_targets = replication_targets
if self._is_replication_configured_correct():
self._replication_enabled = True
def _is_valid_replication_array(self, target):
required_flags = ['hpe3par_api_url', 'hpe3par_username',
'hpe3par_password', 'san_ip', 'san_login',
'san_password', 'backend_id',
'replication_mode', 'cpg_map']
try:
self.check_replication_flags(target, required_flags)
return True
except Exception:
return False
def _is_replication_configured_correct(self):
rep_flag = True
# Make sure there is at least one replication target.
if len(self._replication_targets) < 1:
LOG.error(_LE("There must be at least one valid replication "
"device configured."))
rep_flag = False
return rep_flag
def _is_replication_mode_correct(self, mode, sync_num):
rep_flag = True
# Make sure replication_mode is set to either sync|periodic.
mode = self._get_remote_copy_mode_num(mode)
if not mode:
LOG.error(_LE("Extra spec replication:mode must be set and must "
"be either 'sync' or 'periodic'."))
rep_flag = False
else:
# If replication:mode is periodic, replication_sync_period must be
# set between 300 - 31622400 seconds.
if mode == self.PERIODIC and (
sync_num < 300 or sync_num > 31622400):
LOG.error(_LE("Extra spec replication:sync_period must be "
"greater than 299 and less than 31622401 "
"seconds."))
rep_flag = False
return rep_flag
def _volume_of_replicated_type(self, volume):
replicated_type = False
volume_type_id = volume.get('volume_type_id')
if volume_type_id:
volume_type = self._get_volume_type(volume_type_id)
extra_specs = volume_type.get('extra_specs')
if extra_specs and 'replication_enabled' in extra_specs:
rep_val = extra_specs['replication_enabled']
replicated_type = (rep_val == "<is> True")
return replicated_type
def _is_volume_in_remote_copy_group(self, volume):
rcg_name = self._get_3par_rcg_name(volume['id'])
try:
self.client.getRemoteCopyGroup(rcg_name)
return True
except hpeexceptions.HTTPNotFound:
return False
def _get_remote_copy_mode_num(self, mode):
ret_mode = None
if mode == "sync":
ret_mode = self.SYNC
if mode == "periodic":
ret_mode = self.PERIODIC
return ret_mode
def _get_3par_config(self):
self._do_replication_setup()
conf = None
if self._replication_enabled:
for target in self._replication_targets:
if target['backend_id'] == self._active_backend_id:
conf = target
break
self._build_3par_config(conf)
def _build_3par_config(self, conf=None):
"""Build 3PAR client config dictionary.
self._client_conf will contain values from self.config if the volume
is located on the primary array in order to properly contact it. If
the volume has been failed over and therefore on a secondary array,
self._client_conf will contain values on how to contact that array.
The only time we will return with entries from a secondary array is
with unmanaged replication.
"""
if conf:
self._client_conf['hpe3par_cpg'] = self._generate_hpe3par_cpgs(
conf.get('cpg_map'))
self._client_conf['hpe3par_username'] = (
conf.get('hpe3par_username'))
self._client_conf['hpe3par_password'] = (
conf.get('hpe3par_password'))
self._client_conf['san_ip'] = conf.get('san_ip')
self._client_conf['san_login'] = conf.get('san_login')
self._client_conf['san_password'] = conf.get('san_password')
self._client_conf['san_ssh_port'] = conf.get('san_ssh_port')
self._client_conf['ssh_conn_timeout'] = (
conf.get('ssh_conn_timeout'))
self._client_conf['san_private_key'] = conf.get('san_private_key')
self._client_conf['hpe3par_api_url'] = conf.get('hpe3par_api_url')
self._client_conf['hpe3par_iscsi_ips'] = (
conf.get('hpe3par_iscsi_ips'))
self._client_conf['hpe3par_iscsi_chap_enabled'] = (
conf.get('hpe3par_iscsi_chap_enabled'))
self._client_conf['iscsi_ip_address'] = (
conf.get('iscsi_ip_address'))
self._client_conf['iscsi_port'] = conf.get('iscsi_port')
else:
self._client_conf['hpe3par_cpg'] = (
self.config.hpe3par_cpg)
self._client_conf['hpe3par_username'] = (
self.config.hpe3par_username)
self._client_conf['hpe3par_password'] = (
self.config.hpe3par_password)
self._client_conf['san_ip'] = self.config.san_ip
self._client_conf['san_login'] = self.config.san_login
self._client_conf['san_password'] = self.config.san_password
self._client_conf['san_ssh_port'] = self.config.san_ssh_port
self._client_conf['ssh_conn_timeout'] = (
self.config.ssh_conn_timeout)
self._client_conf['san_private_key'] = self.config.san_private_key
self._client_conf['hpe3par_api_url'] = self.config.hpe3par_api_url
self._client_conf['hpe3par_iscsi_ips'] = (
self.config.hpe3par_iscsi_ips)
self._client_conf['hpe3par_iscsi_chap_enabled'] = (
self.config.hpe3par_iscsi_chap_enabled)
self._client_conf['iscsi_ip_address'] = (
self.config.iscsi_ip_address)
self._client_conf['iscsi_port'] = self.config.iscsi_port
def _get_cpg_from_cpg_map(self, cpg_map, target_cpg):
ret_target_cpg = None
cpg_pairs = cpg_map.split(' ')
for cpg_pair in cpg_pairs:
cpgs = cpg_pair.split(':')
cpg = cpgs[0]
dest_cpg = cpgs[1]
if cpg == target_cpg:
ret_target_cpg = dest_cpg
return ret_target_cpg
def _generate_hpe3par_cpgs(self, cpg_map):
hpe3par_cpgs = []
cpg_pairs = cpg_map.split(' ')
for cpg_pair in cpg_pairs:
cpgs = cpg_pair.split(':')
hpe3par_cpgs.append(cpgs[1])
return hpe3par_cpgs
def _get_replication_targets(self):
replication_targets = []
for target in self._replication_targets:
replication_targets.append(target['backend_id'])
return replication_targets
def _do_volume_replication_setup(self, volume):
"""This function will do or ensure the following:
-Create volume on main array (already done in create_volume)
-Create Remote Copy Group on main array
-Add volume to Remote Copy Group on main array
-Start remote copy
If anything here fails, we will need to clean everything up in
reverse order, including the original volume.
"""
rcg_name = self._get_3par_rcg_name(volume['id'])
# If the volume is already in a remote copy group, return True
# after starting remote copy. If remote copy is already started,
# issuing this command again will be fine.
if self._is_volume_in_remote_copy_group(volume):
try:
self.client.startRemoteCopy(rcg_name)
except Exception:
pass
return True
try:
# Grab the extra_spec entries for replication and make sure they
# are set correctly.
volume_type = self._get_volume_type(volume["volume_type_id"])
extra_specs = volume_type.get("extra_specs")
replication_mode = extra_specs.get(
self.EXTRA_SPEC_REP_MODE, self.DEFAULT_REP_MODE)
replication_mode_num = self._get_remote_copy_mode_num(
replication_mode)
replication_sync_period = extra_specs.get(
self.EXTRA_SPEC_REP_SYNC_PERIOD, self.DEFAULT_SYNC_PERIOD)
if replication_sync_period:
replication_sync_period = int(replication_sync_period)
if not self._is_replication_mode_correct(replication_mode,
replication_sync_period):
msg = _("The replication mode was not configured correctly "
"in the volume type extra_specs. If replication:mode "
"is periodic, replication:sync_period must also be "
"specified and be between 300 and 31622400 seconds.")
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
vol_settings = self.get_volume_settings_from_type(volume)
local_cpg = vol_settings['cpg']
vol_name = self._get_3par_vol_name(volume['id'])
# Create remote copy group on main array.
rcg_targets = []
sync_targets = []
for target in self._replication_targets:
# Only add targets that match the volumes replication mode.
if target['replication_mode'] == replication_mode_num:
cpg = self._get_cpg_from_cpg_map(target['cpg_map'],
local_cpg)
rcg_target = {'targetName': target['backend_id'],
'mode': replication_mode_num,
'snapCPG': cpg,
'userCPG': cpg}
rcg_targets.append(rcg_target)
sync_target = {'targetName': target['backend_id'],
'syncPeriod': replication_sync_period}
sync_targets.append(sync_target)
optional = {'localSnapCPG': vol_settings['snap_cpg'],
'localUserCPG': local_cpg}
pool = volume_utils.extract_host(volume['host'], level='pool')
domain = self.get_domain(pool)
if domain:
optional["domain"] = domain
try:
self.client.createRemoteCopyGroup(rcg_name, rcg_targets,
optional)
except Exception as ex:
msg = (_("There was an error creating the remote copy "
"group: %s.") %
six.text_type(ex))
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
# Add volume to remote copy group.
rcg_targets = []
for target in self._replication_targets:
# Only add targets that match the volumes replication mode.
if target['replication_mode'] == replication_mode_num:
rcg_target = {'targetName': target['backend_id'],
'secVolumeName': vol_name}
rcg_targets.append(rcg_target)
optional = {'volumeAutoCreation': True}
try:
self.client.addVolumeToRemoteCopyGroup(rcg_name, vol_name,
rcg_targets,
optional=optional)
except Exception as ex:
msg = (_("There was an error adding the volume to the remote "
"copy group: %s.") %
six.text_type(ex))
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
# Check and see if we are in periodic mode. If we are, update
# Remote Copy Group to have a sync period.
if replication_sync_period and (
replication_mode_num == self.PERIODIC):
opt = {'targets': sync_targets}
try:
self.client.modifyRemoteCopyGroup(rcg_name, opt)
except Exception as ex:
msg = (_("There was an error setting the sync period for "
"the remote copy group: %s.") %
six.text_type(ex))
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
# Start the remote copy.
try:
self.client.startRemoteCopy(rcg_name)
except Exception as ex:
msg = (_("There was an error starting remote copy: %s.") %
six.text_type(ex))
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return True
except Exception as ex:
self._do_volume_replication_destroy(volume)
msg = (_("There was an error setting up a remote copy group "
"on the 3PAR arrays: ('%s'). The volume will not be "
"recognized as replication type.") %
six.text_type(ex))
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
def _do_volume_replication_destroy(self, volume, rcg_name=None):
"""This will completely remove all traces of a remote copy group.
It should be used when deleting a replication enabled volume
or if setting up a remote copy group fails. It will try and do the
following:
-Stop remote copy
-Remove volume from Remote Copy Group on main array
-Delete Remote Copy Group from main array
-Delete volume from main array
"""
if not rcg_name:
rcg_name = self._get_3par_rcg_name(volume['id'])
vol_name = self._get_3par_vol_name(volume['id'])
# Stop remote copy.
try:
self.client.stopRemoteCopy(rcg_name)
except Exception:
pass
# Delete volume from remote copy group on main array.
try:
self.client.removeVolumeFromRemoteCopyGroup(
rcg_name, vol_name, removeFromTarget=True)
except Exception:
pass
# Delete remote copy group on main array.
try:
self.client.removeRemoteCopyGroup(rcg_name)
except Exception:
pass
# Delete volume on the main array.
try:
self.client.deleteVolume(vol_name)
except Exception:
pass
def _delete_replicated_failed_over_volume(self, volume):
location = volume.get('provider_location')
rcg_name = self._get_3par_remote_rcg_name(volume['id'], location)
targets = self.client.getRemoteCopyGroup(rcg_name)['targets']
# When failed over, we want to temporarily disable config mirroring
# in order to be allowed to delete the volume and remote copy group
for target in targets:
target_name = target['targetName']
self.client.toggleRemoteCopyConfigMirror(target_name,
mirror_config=False)
# Do regular volume replication destroy now config mirroring is off
try:
self._do_volume_replication_destroy(volume, rcg_name)
except Exception as ex:
msg = (_("The failed-over volume could not be deleted: %s") %
six.text_type(ex))
LOG.error(msg)
raise exception.VolumeIsBusy(message=msg)
finally:
# Turn config mirroring back on
for target in targets:
target_name = target['targetName']
self.client.toggleRemoteCopyConfigMirror(target_name,
mirror_config=True)
class TaskWaiter(object):
"""TaskWaiter waits for task to be not active and returns status."""
def __init__(self, client, task_id, interval=1, initial_delay=0):
self.client = client
self.task_id = task_id
self.interval = interval
self.initial_delay = initial_delay
def _wait_for_task(self):
status = self.client.getTask(self.task_id)
LOG.debug("3PAR Task id %(id)s status = %(status)s",
{'id': self.task_id,
'status': status['status']})
if status['status'] is not self.client.TASK_ACTIVE:
raise loopingcall.LoopingCallDone(status)
def wait_for_task(self):
timer = loopingcall.FixedIntervalLoopingCall(self._wait_for_task)
return timer.start(interval=self.interval,
initial_delay=self.initial_delay).wait()
class ModifyVolumeTask(flow_utils.CinderTask):
"""Task to change a volume's snapCPG and comment.
This is a task for changing the snapCPG and comment. It is intended for
use during retype(). These changes are done together with a single
modify request which should be fast and easy to revert.
Because we do not support retype with existing snapshots, we can change
the snapCPG without using a keepVV. If snapshots exist, then this will
fail, as desired.
This task does not change the userCPG or provisioningType. Those changes
may require tunevv, so they are done by the TuneVolumeTask.
The new comment will contain the new type, VVS and QOS information along
with whatever else was in the old comment dict.
The old comment and snapCPG are restored if revert is called.
"""
def __init__(self, action):
self.needs_revert = False
super(ModifyVolumeTask, self).__init__(addons=[action])
def _get_new_comment(self, old_comment, new_vvs, new_qos,
new_type_name, new_type_id):
# Modify the comment during ModifyVolume
comment_dict = dict(ast.literal_eval(old_comment))
if 'vvs' in comment_dict:
del comment_dict['vvs']
if 'qos' in comment_dict:
del comment_dict['qos']
if new_vvs:
comment_dict['vvs'] = new_vvs
elif new_qos:
comment_dict['qos'] = new_qos
else:
comment_dict['qos'] = {}
if new_type_name:
comment_dict['volume_type_name'] = new_type_name
else:
comment_dict.pop('volume_type_name', None)
if new_type_id:
comment_dict['volume_type_id'] = new_type_id
else:
comment_dict.pop('volume_type_id', None)
return comment_dict
def execute(self, common, volume_name, old_snap_cpg, new_snap_cpg,
old_comment, new_vvs, new_qos, new_type_name, new_type_id):
comment_dict = self._get_new_comment(
old_comment, new_vvs, new_qos, new_type_name, new_type_id)
if new_snap_cpg != old_snap_cpg:
# Modify the snap_cpg. This will fail with snapshots.
LOG.info(_LI("Modifying %(volume_name)s snap_cpg from "
"%(old_snap_cpg)s to %(new_snap_cpg)s."),
{'volume_name': volume_name,
'old_snap_cpg': old_snap_cpg,
'new_snap_cpg': new_snap_cpg})
common.client.modifyVolume(
volume_name,
{'snapCPG': new_snap_cpg,
'comment': json.dumps(comment_dict)})
self.needs_revert = True
else:
LOG.info(_LI("Modifying %s comments."), volume_name)
common.client.modifyVolume(
volume_name,
{'comment': json.dumps(comment_dict)})
self.needs_revert = True
def revert(self, common, volume_name, old_snap_cpg, new_snap_cpg,
old_comment, **kwargs):
if self.needs_revert:
LOG.info(_LI("Retype revert %(volume_name)s snap_cpg from "
"%(new_snap_cpg)s back to %(old_snap_cpg)s."),
{'volume_name': volume_name,
'new_snap_cpg': new_snap_cpg,
'old_snap_cpg': old_snap_cpg})
try:
common.client.modifyVolume(
volume_name,
{'snapCPG': old_snap_cpg, 'comment': old_comment})
except Exception as ex:
LOG.error(_LE("Exception during snapCPG revert: %s"), ex)
class TuneVolumeTask(flow_utils.CinderTask):
"""Task to change a volume's CPG and/or provisioning type.
This is a task for changing the CPG and/or provisioning type. It is
intended for use during retype(). This task has no revert. The current
design is to do this task last and do revert-able tasks first. Un-doing a
tunevv can be expensive and should be avoided.
"""
def __init__(self, action, **kwargs):
super(TuneVolumeTask, self).__init__(addons=[action])
def execute(self, common, old_tpvv, new_tpvv, old_tdvv, new_tdvv,
old_cpg, new_cpg, volume_name):
common.tune_vv(old_tpvv, new_tpvv, old_tdvv, new_tdvv,
old_cpg, new_cpg, volume_name)
class ModifySpecsTask(flow_utils.CinderTask):
"""Set/unset the QOS settings and/or VV set for the volume's new type.
This is a task for changing the QOS settings and/or VV set. It is intended
for use during retype(). If changes are made during execute(), then they
need to be undone if revert() is called (i.e., if a later task fails).
For 3PAR, we ignore QOS settings if a VVS is explicitly set, otherwise we
create a VV set and use that for QOS settings. That is why they are lumped
together here. Most of the decision-making about VVS vs. QOS settings vs.
old-style scoped extra-specs is handled in existing reusable code. Here
we mainly need to know what old stuff to remove before calling the function
that knows how to set the new stuff.
Basic task flow is as follows: Remove the volume from the old externally
created VVS (when appropriate), delete the old cinder-created VVS, call
the function that knows how to set a new VVS or QOS settings.
If any changes are made during execute, then revert needs to reverse them.
"""
def __init__(self, action):
self.needs_revert = False
super(ModifySpecsTask, self).__init__(addons=[action])
def execute(self, common, volume_name, volume, old_cpg, new_cpg,
old_vvs, new_vvs, old_qos, new_qos,
old_flash_cache, new_flash_cache):
if (old_vvs != new_vvs or
old_qos != new_qos or
old_flash_cache != new_flash_cache):
# Remove VV from old VV Set.
if old_vvs is not None and old_vvs != new_vvs:
common.client.removeVolumeFromVolumeSet(old_vvs,
volume_name)
self.needs_revert = True
# If any extra or qos specs changed then remove the old
# special VV set that we create. We'll recreate it
# as needed.
vvs_name = common._get_3par_vvs_name(volume['id'])
try:
common.client.deleteVolumeSet(vvs_name)
self.needs_revert = True
except hpeexceptions.HTTPNotFound as ex:
# HTTPNotFound(code=102) is OK. Set does not exist.
if ex.get_code() != 102:
LOG.error(_LE("Unexpected error when retype() tried to "
"deleteVolumeSet(%s)"), vvs_name)
raise
if new_vvs or new_qos or new_flash_cache:
common._add_volume_to_volume_set(
volume, volume_name, new_cpg, new_vvs, new_qos,
new_flash_cache)
self.needs_revert = True
def revert(self, common, volume_name, volume, old_vvs, new_vvs, old_qos,
old_cpg, **kwargs):
if self.needs_revert:
# If any extra or qos specs changed then remove the old
# special VV set that we create and recreate it per
# the old type specs.
vvs_name = common._get_3par_vvs_name(volume['id'])
try:
common.client.deleteVolumeSet(vvs_name)
except hpeexceptions.HTTPNotFound as ex:
# HTTPNotFound(code=102) is OK. Set does not exist.
if ex.get_code() != 102:
LOG.error(_LE("Unexpected error when retype() revert "
"tried to deleteVolumeSet(%s)"), vvs_name)
except Exception:
LOG.error(_LE("Unexpected error when retype() revert "
"tried to deleteVolumeSet(%s)"), vvs_name)
if old_vvs is not None or old_qos is not None:
try:
common._add_volume_to_volume_set(
volume, volume_name, old_cpg, old_vvs, old_qos)
except Exception as ex:
LOG.error(_LE("%(exception)s: Exception during revert of "
"retype for volume %(volume_name)s. "
"Original volume set/QOS settings may not "
"have been fully restored."),
{'exception': ex, 'volume_name': volume_name})
if new_vvs is not None and old_vvs != new_vvs:
try:
common.client.removeVolumeFromVolumeSet(
new_vvs, volume_name)
except Exception as ex:
LOG.error(_LE("%(exception)s: Exception during revert of "
"retype for volume %(volume_name)s. "
"Failed to remove from new volume set "
"%(new_vvs)s."),
{'exception': ex,
'volume_name': volume_name,
'new_vvs': new_vvs})
|
{
"content_hash": "c0291cde19981823772666dbdbc33d99",
"timestamp": "",
"source": "github",
"line_count": 3696,
"max_line_length": 79,
"avg_line_length": 43.85768398268398,
"alnum_prop": 0.535052869251934,
"repo_name": "cloudbase/cinder",
"id": "eafbb90da1f05d684d529478155e94a867924eee",
"size": "162771",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "cinder/volume/drivers/hpe/hpe_3par_common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17586629"
},
{
"name": "Shell",
"bytes": "8187"
}
],
"symlink_target": ""
}
|
from __future__ import (nested_scopes, generators, division, absolute_import, with_statement,
print_function, unicode_literals)
from pex.interpreter import PythonInterpreter
from pants.backend.python.binary_builder import PythonBinaryBuilder
from pants.backend.python.test_builder import PythonTestBuilder
from pants.backend.python.targets.python_binary import PythonBinary
from pants.backend.python.targets.python_tests import PythonTests
class PythonBuilder(object):
def __init__(self, run_tracker):
self._run_tracker = run_tracker
def build(self, targets, args, interpreter=None, conn_timeout=None, fast_tests=False,
debug=False):
test_targets = []
binary_targets = []
interpreter = interpreter or PythonInterpreter.get()
for target in targets:
assert target.is_python, "PythonBuilder can only build PythonTargets, given %s" % str(target)
# PythonBuilder supports PythonTests and PythonBinaries
for target in targets:
if isinstance(target, PythonTests):
test_targets.append(target)
elif isinstance(target, PythonBinary):
binary_targets.append(target)
rv = PythonTestBuilder(
test_targets,
args,
interpreter=interpreter,
conn_timeout=conn_timeout,
fast=fast_tests,
debug=debug).run()
if rv != 0:
return rv
for binary_target in binary_targets:
rv = PythonBinaryBuilder(
binary_target,
self._run_tracker,
interpreter=interpreter,
conn_timeout=conn_timeout).run()
if rv != 0:
return rv
return 0
|
{
"content_hash": "d4b407a0cb05961631289fac91764b0f",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 99,
"avg_line_length": 31.48076923076923,
"alnum_prop": 0.681734880879658,
"repo_name": "square/pants",
"id": "aeee0ec902c36ad3e0ab47288804c73a327d3602",
"size": "1784",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/python/pants/backend/python/python_builder.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AGS Script",
"bytes": "273"
},
{
"name": "CSS",
"bytes": "9347"
},
{
"name": "GAP",
"bytes": "4684"
},
{
"name": "Java",
"bytes": "46389"
},
{
"name": "JavaScript",
"bytes": "9523"
},
{
"name": "Python",
"bytes": "2250380"
},
{
"name": "Scala",
"bytes": "5517"
},
{
"name": "Shell",
"bytes": "29381"
},
{
"name": "Thrift",
"bytes": "1674"
}
],
"symlink_target": ""
}
|
import hashlib
import sys
def solve(secret_key, starts_with):
result = 1
while True:
hash_input = "%s%s" % (secret_key, result)
hash_result = hashlib.md5(hash_input).hexdigest()
if hash_result.startswith(starts_with):
return result
result += 1
if __name__ == '__main__':
"""
$ python solution.py yzbqklnj 00000
First decimal that creates a hash that starts with `00000` based on secret key `yzbqklnj`: 282749
$ python solution.py yzbqklnj 000000
First decimal that creates a hash that starts with `000000` based on secret key `yzbqklnj`: 9962624
"""
secret_key = sys.argv[1]
starts_with = sys.argv[2]
result = solve(secret_key, starts_with)
print "First decimal that creates a hash that starts with `%s` based on secret key `%s`: %s" % (
starts_with,
secret_key,
result
)
|
{
"content_hash": "a71a882dc78791f0bfb43f5d71a18436",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 103,
"avg_line_length": 30.896551724137932,
"alnum_prop": 0.6261160714285714,
"repo_name": "loisaidasam/adventofcode",
"id": "389ad066f4c179f629af7f2ace67fc034241d8bb",
"size": "897",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "2015/day04/solution.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "324"
},
{
"name": "C++",
"bytes": "118181"
},
{
"name": "Java",
"bytes": "143236"
},
{
"name": "Python",
"bytes": "5647"
},
{
"name": "Ruby",
"bytes": "2990"
},
{
"name": "Shell",
"bytes": "893"
}
],
"symlink_target": ""
}
|
'''
Exercise 7 - Class 8 - Thread connection to each device and run the command
Gleydson Mazioli <gleydsonmazioli@gmail.com>
'''
from net_system.models import NetworkDevice
import netmiko
import django
import time
from multiprocessing import Process
def get_cred_type(l_credentials, l_type):
'''
Get the associated credential from the database (instead
of using a hard coded list)
'''
for cred in l_credentials:
if l_type in cred.description.lower():
return cred
def run_command(device):
'''
Run a command on the remote device
'''
device_type = device.device_type
port = device.port
secret = ''
ip_addr = device.ip_address
creds = device.credentials
start_dev_time = int(time.time())
try:
username = creds.username
password = creds.password
except AttributeError:
print '%s: No credentials attributes for login. Skipping' % (device)
return 1
print device, device_type, port, ip_addr, username, password
try:
rem_conn = netmiko.ConnectHandler(device_type=device_type, ip=ip_addr,
username=username, password=password,
port=port, secret=secret)
except netmiko.ssh_exception.NetMikoAuthenticationException:
print " %s: Unable to connect (check user/pw)" % (device)
return 1
print rem_conn.send_command_expect("show version")
end_dev_time = int(time.time())
print '*' * 70
print 'Elapsed time on %s: %s s' % (device, end_dev_time - start_dev_time)
print '*' * 70
print '\n'
def main():
'''
Main function
'''
django.setup()
net_devices = NetworkDevice.objects.all()
start_time = int(time.time())
procs = []
for device in net_devices:
my_proc = Process(target=run_command, args=(device,))
my_proc.start()
procs.append(my_proc)
for a_proc in procs:
print a_proc
a_proc.join()
print '\nElapsed time: %s seconds' % (int(time.time()) - start_time)
if __name__ == '__main__':
main()
|
{
"content_hash": "3d56c746898060b662579e32fb716265",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 79,
"avg_line_length": 28.013157894736842,
"alnum_prop": 0.611085016439643,
"repo_name": "gleydsonm/pynet_ex",
"id": "b1c60db95108ab5d60edb3a16157872e1985f2a1",
"size": "2151",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "class8/exercise7.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "42"
},
{
"name": "Python",
"bytes": "64190"
}
],
"symlink_target": ""
}
|
import unittest
from unittest.mock import patch
from flask import json
from pipeline_notifier_test.test_utils import *
from pipeline_notifier.main import build_app
class IntegrationTests(unittest.TestCase):
def test_status_page_returns_ok(self):
client = self.buildClient()
result = client.get('/status')
status = json.loads(result.data)
self.assertEqual("ok", status["status"])
def test_status_page_returns_pipeline_details(self):
client = self.buildClient({"PIPELINE_NOTIFIER_PIPELINES": json.dumps([
{"name": "Pipeline A", "steps": ["Step 1"]},
{"name": "Pipeline B", "steps": ["Step 2", "Step 3"]}
])})
result = client.get('/status')
status = json.loads(result.data)
self.assertEqual(status["pipelines"], [
{"name": "Pipeline A", "steps": [
{"name": "Step 1", "waiting": [], "in-progress": []}
]},
{"name": "Pipeline B", "steps": [
{"name": "Step 2", "waiting": [], "in-progress": []},
{"name": "Step 3", "waiting": [], "in-progress": []}
]}
])
def test_no_notifications_sent_for_successes_without_commits(self):
client = self.buildClient({"PIPELINE_NOTIFIER_PIPELINES": json.dumps([
{"name": "Pipeline", "steps": ["Step 1"]},
])})
self.announceStepStart("Step 1", client)
self.announceStepSuccess("Step 1", client)
self.assertEqual(len(hipchatCallsTo(self.hipchatMock)), 0)
def test_notifications_are_sent_for_failures_even_without_commits(self):
client = self.buildClient({"PIPELINE_NOTIFIER_PIPELINES": json.dumps([
{"name": "Pipeline", "steps": ["Step 1"]},
])})
self.announceStepStart("Step 1", client)
self.announceStepSuccess("Step 1", client)
self.assertEqual(len(hipchatCallsTo(self.hipchatMock)), 0)
def test_single_step_pipeline_notifies_successes(self):
client = self.buildClient({"PIPELINE_NOTIFIER_PIPELINES": json.dumps([
{"name": "Pipeline", "steps": ["Step 1"]},
])})
self.announceCommit(client)
self.announceStepStart("Step 1", client)
self.announceStepSuccess("Step 1", client)
self.assertOneHipchatNotificationSent(Matches(lambda message: "completed" in message and
"failed" not in message))
def test_two_step_pipeline_notifies_final_successes(self):
client = self.buildClient({"PIPELINE_NOTIFIER_PIPELINES": json.dumps([
{"name": "Pipeline", "steps": ["Step 1", "Step 2"]},
])})
self.announceCommit(client)
self.announceStepStart("Step 1", client)
self.announceStepSuccess("Step 1", client)
self.announceStepStart("Step 2", client)
self.announceStepSuccess("Step 2", client)
self.assertOneHipchatNotificationSent(Matches(lambda message: "completed" in message and
"failed" not in message))
def getHipchatNotificationsSent(self):
return [c for c in self.hipchatMock.return_value.method.call_args_list]
def assertOneHipchatNotificationSent(self, matcher = Matches(lambda m: True)):
self.hipchatMock.assert_called_once_with(token=self.hipchat_token)
calls = hipchatCallsTo(self.hipchatMock)
self.assertEquals(len(calls), 1)
notificationParameters = calls[0][1]["parameters"]
self.assertEquals(notificationParameters["room_id"], self.hipchat_room_id)
message = notificationParameters["message"]
self.assertEquals(message, matcher)
def buildClient(self, envSettings={}):
defaultEnv = {"PIPELINE_NOTIFIER_PIPELINES": "[]",
"PIPELINE_NOTIFIER_HIPCHAT_ROOM": 123,
"PIPELINE_NOTIFIER_HIPCHAT_TOKEN": "qwe"}
defaultEnv.update(envSettings)
# Backup hipchat config for later assertions
self.hipchat_room_id = defaultEnv["PIPELINE_NOTIFIER_HIPCHAT_ROOM"]
self.hipchat_token = defaultEnv["PIPELINE_NOTIFIER_HIPCHAT_TOKEN"]
self.patchEnvWithMock(defaultEnv)
self.patchHipchatWithMock()
app = build_app()
app.testing = True
return app.test_client()
def patchEnvWithMock(self, envMock):
envPatcher = patch("os.environ", new=envMock)
self.envMock = envPatcher.start();
self.addCleanup(envPatcher.stop)
def patchHipchatWithMock(self):
hipchatPatcher = patch("hipchat.HipChat")
self.hipchatMock = hipchatPatcher.start()
self.addCleanup(hipchatPatcher.stop)
def announceCommit(self, client):
client.post('/bitbucket', data={"payload": json.dumps({
"commits": [{
"author": "Bob",
"branch": "master",
"files": [{
"file": "somefileA.py",
"type": "modified"
}],
"message": "Fixed bug 4",
"node": "620ade18607a",
"timestamp": "2013-11-25 19:21:21+00:00",
"utctimestamp": "2013-11-25 19:21:21Z"
}],
"repository": {
"absolute_url": "/project/path/",
"fork": False,
"is_private": False,
"name": "Project Name",
"owner": "Mr Project Owner",
"scm": "git",
"slug": "project-name",
"website": "https://project-website.com/"
},
"user": "Bob"
})})
def announceStepStart(self, stepName, client):
client.post('/jenkins', data=json.dumps({
"name": stepName,
"url": "http://jenkins.example.com/step1",
"build": {
"number": 1,
"phase": "STARTED",
"status": "SUCCESS",
"url": "job/project/5",
"full_url": "http://ci.jenkins.org/job/project/5",
"parameters": {"branch":"master"}
}
}))
def announceStepSuccess(self, stepName, client):
client.post('/jenkins', data=json.dumps({
"name": stepName,
"url": "http://jenkins.example.com/step1",
"build": {
"number": 1,
"phase": "FINISHED",
"status": "SUCCESS",
"url": "job/project/5",
"full_url": "http://ci.jenkins.org/job/project/5",
"parameters": {"branch":"master"}
}
}))
def announceStepFailure(self, stepName, client):
client.post('/jenkins', data=json.dumps({
"name": stepName,
"url": "http://jenkins.example.com/step",
"build": {
"number": 1,
"phase": "FINISHED",
"status": "FAILED",
"url": "job/project/5",
"full_url": "http://ci.jenkins.org/job/project/5",
"parameters": {"branch":"master"}
}
}))
|
{
"content_hash": "d2879a1d55ab357da9127fac4628328f",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 96,
"avg_line_length": 37.671875,
"alnum_prop": 0.5407161620351169,
"repo_name": "pimterry/pipeline-notifier",
"id": "2ec442bf5e1c83dedd6eba2bcf57c48e2a005733",
"size": "7233",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pipeline_notifier_test/integration_tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "37229"
}
],
"symlink_target": ""
}
|
"""
Zappa CLI
Deploy arbitrary Python programs as serverless Zappa applications.
"""
from __future__ import unicode_literals
from __future__ import division
import argcomplete
import argparse
import base64
import pkgutil
import botocore
import click
import collections
import hjson as json
import inspect
import importlib
import logging
import os
import pkg_resources
import random
import re
import requests
import slugify
import string
import sys
import tempfile
import time
import toml
import yaml
import zipfile
from click.exceptions import ClickException
from dateutil import parser
from datetime import datetime,timedelta
from zappa import Zappa, logger, API_GATEWAY_REGIONS
from util import (check_new_version_available, detect_django_settings,
detect_flask_apps, parse_s3_url, human_size)
CUSTOM_SETTINGS = [
'assume_policy',
'attach_policy',
'aws_region',
'delete_local_zip',
'delete_s3_zip',
'exclude',
'http_methods',
'integration_response_codes',
'method_header_types',
'method_response_codes',
'parameter_depth',
'role_name',
'touch',
]
##
# Main Input Processing
##
class ZappaCLI(object):
"""
ZappaCLI object is responsible for loading the settings,
handling the input arguments and executing the calls to the core library.
"""
# CLI
vargs = None
command = None
command_env = None
# Zappa settings
zappa = None
zappa_settings = None
load_credentials = True
# Specific settings
api_stage = None
app_function = None
aws_region = None
debug = None
prebuild_script = None
project_name = None
profile_name = None
lambda_arn = None
lambda_name = None
lambda_description = None
s3_bucket_name = None
settings_file = None
zip_path = None
handler_path = None
vpc_config = None
memory_size = None
use_apigateway = None
lambda_handler = None
django_settings = None
manage_roles = True
exception_handler = None
environment_variables = None
authorizer = None
stage_name_env_pattern = re.compile('^[a-zA-Z0-9_]+$')
def __init__(self):
self._stage_config_overrides = {} # change using self.override_stage_config_setting(key, val)
@property
def stage_config(self):
"""
A shortcut property for settings of a stage.
"""
def get_stage_setting(stage, extended_stages=None):
if extended_stages is None:
extended_stages = []
if stage in extended_stages:
raise RuntimeError(stage + " has already been extended to these settings. "
"There is a circular extends within the settings file.")
extended_stages.append(stage)
try:
stage_settings = dict(self.zappa_settings[stage].copy())
except KeyError:
raise ClickException("Cannot extend settings for undefined environment '" + stage + "'.")
extends_stage = self.zappa_settings[stage].get('extends', None)
if not extends_stage:
return stage_settings
extended_settings = get_stage_setting(stage=extends_stage, extended_stages=extended_stages)
extended_settings.update(stage_settings)
return extended_settings
settings = get_stage_setting(stage=self.api_stage)
# Backwards compatible for delete_zip setting that was more explicitly named delete_local_zip
if u'delete_zip' in settings:
settings[u'delete_local_zip'] = settings.get(u'delete_zip')
settings.update(self.stage_config_overrides)
return settings
@property
def stage_config_overrides(self):
"""
Returns zappa_settings we forcefully override for the current stage
set by `self.override_stage_config_setting(key, value)`
"""
return getattr(self, '_stage_config_overrides', {}).get(self.api_stage, {})
def override_stage_config_setting(self, key, val):
"""
Forcefully override a setting set by zappa_settings (for the current stage only)
:param key: settings key
:param val: value
"""
self._stage_config_overrides = getattr(self, '_stage_config_overrides', {})
self._stage_config_overrides.setdefault(self.api_stage, {})[key] = val
def handle(self, argv=None):
"""
Main function.
Parses command, load settings and dispatches accordingly.
"""
desc = ('Zappa - Deploy Python applications to AWS Lambda'
' and API Gateway.\n')
parser = argparse.ArgumentParser(description=desc)
parser.add_argument(
'-v', '--version', action='version',
version=pkg_resources.get_distribution("zappa").version,
help='Print the zappa version'
)
parser.add_argument(
'-a', '--app_function', help='The WSGI application function.'
)
parser.add_argument(
'-s', '--settings_file', help='The path to a Zappa settings file.'
)
env_parser = argparse.ArgumentParser(add_help=False)
group = env_parser.add_mutually_exclusive_group()
all_help = ('Execute this command for all of our defined '
'Zappa environments.')
group.add_argument('--all', action='store_true', help=all_help)
group.add_argument('command_env', nargs='?')
##
# Certify
##
subparsers = parser.add_subparsers(title='subcommands', dest='command')
cert_parser = subparsers.add_parser(
'certify', parents=[env_parser],
help='Create and install SSL certificate'
)
cert_parser.add_argument(
'--no-cleanup', action='store_true',
help=("Don't remove certificate files from /tmp during certify."
" Dangerous.")
)
##
# Deploy
##
subparsers.add_parser(
'deploy', parents=[env_parser], help='Deploy application.'
)
subparsers.add_parser('init', help='Initialize Zappa app.')
##
# Package
##
package_parser = subparsers.add_parser(
'package', parents=[env_parser], help='Build the application zip package locally.'
)
##
# Invocation
##
invoke_parser = subparsers.add_parser(
'invoke', parents=[env_parser],
help='Invoke remote function.'
)
invoke_parser.add_argument(
'--raw', action='store_true',
help=('When invoking remotely, invoke this python as a string,'
' not as a modular path.')
)
invoke_parser.add_argument('command_rest')
##
# Manage
##
manage_parser = subparsers.add_parser(
'manage',
help='Invoke remote Django manage.py commands.'
)
rest_help = ("Command in the form of <env> <command>. <env> is not "
"required if --all is specified")
manage_parser.add_argument('--all', action='store_true', help=all_help)
manage_parser.add_argument('command_rest', nargs='+', help=rest_help)
##
# Rollback
##
def positive_int(s):
""" Ensure an arg is positive """
i = int(s)
if i < 0:
msg = "This argument must be positive (got {})".format(s)
raise argparse.ArgumentTypeError(msg)
return i
rollback_parser = subparsers.add_parser(
'rollback', parents=[env_parser],
help='Rollback deployed code to a previous version.'
)
rollback_parser.add_argument(
'-n', '--num-rollback', type=positive_int, default=0,
help='The number of versions to rollback.'
)
##
# Scheduling
##
subparsers.add_parser(
'schedule', parents=[env_parser],
help='Schedule functions to occur at regular intervals.'
)
##
# Status
##
status_parser = subparsers.add_parser(
'status', parents=[env_parser],
help='Show deployment status and event schedules.'
)
status_parser.add_argument(
'--json', action='store_true',
help='Returns status in JSON format.'
) # https://github.com/Miserlou/Zappa/issues/407
##
# Log Tailing
##
tail_parser = subparsers.add_parser(
'tail', parents=[env_parser], help='Tail deployment logs.'
)
tail_parser.add_argument(
'--no-color', action='store_true',
help="Don't color log tail output."
)
tail_parser.add_argument(
'--http', action='store_true',
help='Only show HTTP requests in tail output.'
)
tail_parser.add_argument(
'--non-http', action='store_true',
help='Only show non-HTTP requests in tail output.'
)
tail_parser.add_argument(
'--since', type=str, default="100000s",
help="Only show lines since a certain timeframe."
)
tail_parser.add_argument(
'--filter', type=str, default="",
help="Apply a filter pattern to the logs."
)
##
# Undeploy
##
undeploy_parser = subparsers.add_parser(
'undeploy', parents=[env_parser], help='Undeploy application.'
)
undeploy_parser.add_argument(
'--remove-logs', action='store_true',
help=('Removes log groups of api gateway and lambda task'
' during the undeployment.'),
)
undeploy_parser.add_argument(
'-y', '--yes', action='store_true', help='Auto confirm yes.'
)
##
# Unschedule
##
subparsers.add_parser('unschedule', parents=[env_parser],
help='Unschedule functions.')
##
# Updating
##
subparsers.add_parser(
'update', parents=[env_parser], help='Update deployed application.'
)
argcomplete.autocomplete(parser)
args = parser.parse_args(argv)
self.vargs = vars(args)
# Parse the input
# NOTE(rmoe): Special case for manage command
# The manage command can't have both command_env and command_rest
# arguments. Since they are both positional arguments argparse can't
# differentiate the two. This causes problems when used with --all.
# (e.g. "manage --all showmigrations admin" argparse thinks --all has
# been specified AND that command_env='showmigrations')
# By having command_rest collect everything but --all we can split it
# apart here instead of relying on argparse.
if args.command == 'manage' and not self.vargs.get('all'):
self.command_env = self.vargs['command_rest'].pop(0)
else:
self.command_env = self.vargs.get('command_env')
self.command = args.command
# We don't have any settings yet, so make those first!
# (Settings-based interactions will fail
# before a project has been initialized.)
if self.command == 'init':
self.init()
return
# Make sure there isn't a new version available
if not self.vargs.get('json'):
self.check_for_update()
# Load and Validate Settings File
self.load_settings_file(self.vargs.get('settings_file'))
# Should we execute this for all environments, or just one?
all_environments = self.vargs.get('all')
environments = []
if all_environments: # All envs!
environments = self.zappa_settings.keys()
else: # Just one env.
if not self.command_env:
# If there's only one environment defined in the settings,
# use that as the default.
if len(self.zappa_settings.keys()) == 1:
environments.append(self.zappa_settings.keys()[0])
else:
parser.error("Please supply an environment to interact with.")
else:
environments.append(self.command_env)
for environment in environments:
try:
self.dispatch_command(self.command, environment)
except ClickException as e:
# Discussion on exit codes: https://github.com/Miserlou/Zappa/issues/407
e.show()
sys.exit(e.exit_code)
def dispatch_command(self, command, environment):
"""
Given a command to execute and environment,
execute that command.
"""
self.api_stage = environment
if command not in ['status', 'manage']:
click.echo("Calling " + click.style(command, fg="green", bold=True) + " for environment " +
click.style(self.api_stage, bold=True) + ".." )
# Explicity define the app function.
if self.vargs['app_function'] is not None:
self.app_function = self.vargs['app_function']
# Load our settings, based on api_stage.
try:
self.load_settings(self.vargs['settings_file'])
except ValueError as e:
print("Error: {}".format(e.message))
sys.exit(-1)
self.callback('settings')
# Hand it off
if command == 'deploy': # pragma: no cover
self.deploy()
if command == 'package': # pragma: no cover
self.package()
elif command == 'update': # pragma: no cover
self.update()
elif command == 'rollback': # pragma: no cover
self.rollback(self.vargs['num_rollback'])
elif command == 'invoke': # pragma: no cover
if not self.vargs.get('command_rest'):
print("Please enter the function to invoke.")
return
self.invoke(self.vargs['command_rest'], raw_python=self.vargs['raw'])
elif command == 'manage': # pragma: no cover
if not self.vargs.get('command_rest'):
print("Please enter the management command to invoke.")
return
if not self.django_settings:
print("This command is for Django projects only!")
print("If this is a Django project, please define django_settings in your zappa_settings.")
return
command_tail = self.vargs.get('command_rest')
if len(command_tail) > 1:
command = " ".join(command_tail) # ex: zappa manage dev "shell --version"
else:
command = command_tail[0] # ex: zappa manage dev showmigrations admin
self.invoke(command, command="manage")
elif command == 'tail': # pragma: no cover
self.tail(
colorize=(not self.vargs['no_color']),
http=self.vargs['http'],
non_http=self.vargs['non_http'],
since=self.vargs['since'],
filter_pattern=self.vargs['filter'],
)
elif command == 'undeploy': # pragma: no cover
self.undeploy(
noconfirm=self.vargs['yes'],
remove_logs=self.vargs['remove_logs']
)
elif command == 'schedule': # pragma: no cover
self.schedule()
elif command == 'unschedule': # pragma: no cover
self.unschedule()
elif command == 'status': # pragma: no cover
self.status(return_json=self.vargs['json'])
elif command == 'certify': # pragma: no cover
self.certify(no_cleanup=self.vargs['no_cleanup'])
##
# The Commands
##
def package(self):
"""
Only build the package
"""
# force not to delete the local zip
self.override_stage_config_setting('delete_local_zip', False)
# Execute the prebuild script
if self.prebuild_script:
self.execute_prebuild_script()
# Create the Lambda Zip
self.create_package()
self.callback('zip')
size = human_size(os.path.getsize(self.zip_path))
click.echo(click.style("Package created", fg="green", bold=True) + ": " + click.style(self.zip_path, bold=True) + " (" + size + ")")
def deploy(self):
"""
Package your project, upload it to S3, register the Lambda function
and create the API Gateway routes.
"""
# Execute the prebuild script
if self.prebuild_script:
self.execute_prebuild_script()
# Make sure this isn't already deployed.
deployed_versions = self.zappa.get_lambda_function_versions(self.lambda_name)
if len(deployed_versions) > 0:
raise ClickException("This application is " + click.style("already deployed", fg="red") +
" - did you mean to call " + click.style("update", bold=True) + "?")
# Make sure the necessary IAM execution roles are available
if self.manage_roles:
try:
self.zappa.create_iam_roles()
except botocore.client.ClientError:
raise ClickException(
click.style("Failed", fg="red") + " to " + click.style("manage IAM roles", bold=True) + "!\n" +
"You may " + click.style("lack the necessary AWS permissions", bold=True) +
" to automatically manage a Zappa execution role.\n" +
"To fix this, see here: " +
click.style("https://github.com/Miserlou/Zappa#using-custom-aws-iam-roles-and-policies", bold=True)
+ '\n')
# Create the Lambda Zip
self.create_package()
self.callback('zip')
# Upload it to S3
success = self.zappa.upload_to_s3(
self.zip_path, self.s3_bucket_name)
if not success: # pragma: no cover
raise ClickException("Unable to upload to S3. Quitting.")
# If using a slim handler, upload it to S3 and tell lambda to use this slim handler zip
if self.stage_config.get('slim_handler', False):
# https://github.com/Miserlou/Zappa/issues/510
success = self.zappa.upload_to_s3(self.handler_path, self.s3_bucket_name)
if not success: # pragma: no cover
raise ClickException("Unable to upload handler to S3. Quitting.")
# Copy the project zip to the current project zip
current_project_name = '{0!s}_current_project.zip'.format(self.project_name)
success = self.zappa.copy_on_s3(src_file_name=self.zip_path, dst_file_name=current_project_name,
bucket_name=self.s3_bucket_name)
if not success: # pragma: no cover
raise ClickException("Unable to copy the zip to be the current project. Quitting.")
handler_file = self.handler_path
else:
handler_file = self.zip_path
# Fixes https://github.com/Miserlou/Zappa/issues/613
try:
self.lambda_arn = self.zappa.get_lambda_function(
function_name=self.lambda_name)
except botocore.client.ClientError:
# Register the Lambda function with that zip as the source
# You'll also need to define the path to your lambda_handler code.
self.lambda_arn = self.zappa.create_lambda_function(
bucket=self.s3_bucket_name,
s3_key=handler_file,
function_name=self.lambda_name,
handler=self.lambda_handler,
description=self.lambda_description,
vpc_config=self.vpc_config,
timeout=self.timeout_seconds,
memory_size=self.memory_size
)
# Schedule events for this deployment
self.schedule()
endpoint_url = ''
deployment_string = click.style("Deployment complete", fg="green", bold=True) + "!"
if self.use_apigateway:
# Create and configure the API Gateway
template = self.zappa.create_stack_template(self.lambda_arn,
self.lambda_name,
self.api_key_required,
self.integration_content_type_aliases,
self.iam_authorization,
self.authorizer,
self.cors)
self.zappa.update_stack(self.lambda_name, self.s3_bucket_name, wait=True)
# Deploy the API!
api_id = self.zappa.get_api_id(self.lambda_name)
endpoint_url = self.deploy_api_gateway(api_id)
deployment_string = deployment_string + ": {}".format(endpoint_url)
# Create/link API key
if self.api_key_required:
if self.api_key is None:
self.zappa.create_api_key(api_id=api_id, stage_name=self.api_stage)
else:
self.zappa.add_api_stage_to_api_key(api_key=self.api_key, api_id=api_id, stage_name=self.api_stage)
if self.stage_config.get('touch', True):
requests.get(endpoint_url)
# Finally, delete the local copy our zip package
if self.stage_config.get('delete_local_zip', True):
self.remove_local_zip()
# Remove the project zip from S3.
self.remove_uploaded_zip()
self.callback('post')
click.echo(deployment_string)
def update(self):
"""
Repackage and update the function code.
"""
# Execute the prebuild script
if self.prebuild_script:
self.execute_prebuild_script()
# Temporary version check
try:
updated_time = 1472581018
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
conf = function_response['Configuration']
last_updated = parser.parse(conf['LastModified'])
last_updated_unix = time.mktime(last_updated.timetuple())
except Exception as e:
click.echo(click.style("Warning!", fg="red") + " Couldn't get function " + self.lambda_name +
" in " + self.zappa.aws_region + " - have you deployed yet?")
sys.exit(-1)
if last_updated_unix <= updated_time:
click.echo(click.style("Warning!", fg="red") +
" You may have upgraded Zappa since deploying this application. You will need to " +
click.style("redeploy", bold=True) + " for this deployment to work properly!")
# Make sure the necessary IAM execution roles are available
if self.manage_roles:
try:
self.zappa.create_iam_roles()
except botocore.client.ClientError:
click.echo(click.style("Failed", fg="red") + " to " + click.style("manage IAM roles", bold=True) + "!")
click.echo("You may " + click.style("lack the necessary AWS permissions", bold=True) +
" to automatically manage a Zappa execution role.")
click.echo("To fix this, see here: " +
click.style("https://github.com/Miserlou/Zappa#using-custom-aws-iam-roles-and-policies",
bold=True))
sys.exit(-1)
# Create the Lambda Zip,
self.create_package()
self.callback('zip')
# Upload it to S3
success = self.zappa.upload_to_s3(self.zip_path, self.s3_bucket_name)
if not success: # pragma: no cover
raise ClickException("Unable to upload project to S3. Quitting.")
# If using a slim handler, upload it to S3 and tell lambda to use this slim handler zip
if self.stage_config.get('slim_handler', False):
# https://github.com/Miserlou/Zappa/issues/510
success = self.zappa.upload_to_s3(self.handler_path, self.s3_bucket_name)
if not success: # pragma: no cover
raise ClickException("Unable to upload handler to S3. Quitting.")
# Copy the project zip to the current project zip
current_project_name = '{0!s}_current_project.zip'.format(self.project_name)
success = self.zappa.copy_on_s3(src_file_name=self.zip_path, dst_file_name=current_project_name,
bucket_name=self.s3_bucket_name)
if not success: # pragma: no cover
raise ClickException("Unable to copy the zip to be the current project. Quitting.")
handler_file = self.handler_path
else:
handler_file = self.zip_path
# Register the Lambda function with that zip as the source
# You'll also need to define the path to your lambda_handler code.
self.lambda_arn = self.zappa.update_lambda_function(
self.s3_bucket_name, handler_file, self.lambda_name)
# Remove the uploaded zip from S3, because it is now registered..
self.remove_uploaded_zip()
# Update the configuration, in case there are changes.
self.lambda_arn = self.zappa.update_lambda_configuration(lambda_arn=self.lambda_arn,
function_name=self.lambda_name,
handler=self.lambda_handler,
description=self.lambda_description,
vpc_config=self.vpc_config,
timeout=self.timeout_seconds,
memory_size=self.memory_size)
# Finally, delete the local copy our zip package
if self.stage_config.get('delete_local_zip', True):
self.remove_local_zip()
if self.use_apigateway:
self.zappa.create_stack_template(self.lambda_arn,
self.lambda_name,
self.api_key_required,
self.integration_content_type_aliases,
self.iam_authorization,
self.authorizer,
self.cors)
self.zappa.update_stack(self.lambda_name, self.s3_bucket_name, wait=True, update_only=True)
api_id = self.zappa.get_api_id(self.lambda_name)
endpoint_url = self.deploy_api_gateway(api_id)
if self.stage_config.get('domain', None):
endpoint_url = self.stage_config.get('domain')
else:
endpoint_url = None
self.schedule()
self.callback('post')
if endpoint_url and 'https://' not in endpoint_url:
endpoint_url = 'https://' + endpoint_url
deployed_string = "Your updated Zappa deployment is " + click.style("live", fg='green', bold=True) + "!"
if self.use_apigateway:
deployed_string = deployed_string + ": " + click.style("{}".format(endpoint_url), bold=True)
api_url = None
if endpoint_url and 'amazonaws.com' not in endpoint_url:
api_url = self.zappa.get_api_url(
self.lambda_name,
self.api_stage)
if endpoint_url != api_url:
deployed_string = deployed_string + " (" + api_url + ")"
if self.stage_config.get('touch', True):
if api_url:
requests.get(api_url)
elif endpoint_url:
requests.get(endpoint_url)
click.echo(deployed_string)
def rollback(self, revision):
"""
Rollsback the currently deploy lambda code to a previous revision.
"""
print("Rolling back..")
self.zappa.rollback_lambda_function_version(
self.lambda_name, versions_back=revision)
print("Done!")
def tail(self, since, filter_pattern, limit=10000, keep_open=True, colorize=True, http=False, non_http=False):
"""
Tail this function's logs.
if keep_open, do so repeatedly, printing any new logs
"""
try:
from util import string_to_timestamp
since_stamp = string_to_timestamp(since)
last_since = since_stamp
while True:
new_logs = self.zappa.fetch_logs(
self.lambda_name,
start_time=since_stamp,
limit=limit,
filter_pattern=filter_pattern,
)
new_logs = [ e for e in new_logs if e['timestamp'] > last_since ]
self.print_logs(new_logs, colorize, http, non_http)
if not keep_open:
break
if new_logs:
last_since = new_logs[-1]['timestamp']
time.sleep(1)
except KeyboardInterrupt: # pragma: no cover
# Die gracefully
try:
sys.exit(0)
except SystemExit:
os._exit(130)
def undeploy(self, noconfirm=False, remove_logs=False):
"""
Tear down an exiting deployment.
"""
if not noconfirm: # pragma: no cover
confirm = raw_input("Are you sure you want to undeploy? [y/n] ")
if confirm != 'y':
return
if self.use_apigateway:
if remove_logs:
self.zappa.remove_api_gateway_logs(self.lambda_name)
domain_name = self.stage_config.get('domain', None)
# Only remove the api key when not specified
if self.api_key_required and self.api_key is None:
api_id = self.zappa.get_api_id(self.lambda_name)
self.zappa.remove_api_key(api_id, self.api_stage)
gateway_id = self.zappa.undeploy_api_gateway(
self.lambda_name,
domain_name=domain_name
)
self.unschedule() # removes event triggers, including warm up event.
self.zappa.delete_lambda_function(self.lambda_name)
if remove_logs:
self.zappa.remove_lambda_function_logs(self.lambda_name)
click.echo(click.style("Done", fg="green", bold=True) + "!")
def schedule(self):
"""
Given a a list of functions and a schedule to execute them,
setup up regular execution.
"""
events = self.stage_config.get('events', [])
if events:
if not isinstance(events, list): # pragma: no cover
print("Events must be supplied as a list.")
return
for event in events:
self.collision_warning(event.get('function'))
if self.stage_config.get('keep_warm', True):
if not events:
events = []
keep_warm_rate = self.stage_config.get('keep_warm_expression', "rate(4 minutes)")
events.append({'name': 'zappa-keep-warm',
'function': 'handler.keep_warm_callback',
'expression': keep_warm_rate,
'description': 'Zappa Keep Warm - {}'.format(self.lambda_name)})
if self.stage_config.get('lets_encrypt_expression'):
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
conf = function_response['Configuration']
timeout = conf['Timeout']
if timeout < 60:
click.echo(click.style("Unable to schedule certificate autorenewer!", fg="red", bold=True) +
" Please redeploy with a " + click.style("timeout_seconds", bold=True) + " greater than 60!")
else:
events.append({'name': 'zappa-le-certify',
'function': 'handler.certify_callback',
'expression': self.stage_config.get('lets_encrypt_expression'),
'description': 'Zappa LE Certificate Renewer - {}'.format(self.lambda_name)})
if events:
try:
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
except botocore.exceptions.ClientError as e: # pragma: no cover
click.echo(click.style("Function does not exist", fg="yellow") + ", please " +
click.style("deploy", bold=True) + "first. Ex:" +
click.style("zappa deploy {}.".format(self.api_stage), bold=True))
sys.exit(-1)
print("Scheduling..")
self.zappa.schedule_events(
lambda_arn=function_response['Configuration']['FunctionArn'],
lambda_name=self.lambda_name,
events=events
)
def unschedule(self):
"""
Given a a list of scheduled functions,
tear down their regular execution.
"""
# Run even if events are not defined to remove previously existing ones (thus default to []).
events = self.stage_config.get('events', [])
if not isinstance(events, list): # pragma: no cover
print("Events must be supplied as a list.")
return
function_arn = None
try:
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
function_arn = function_response['Configuration']['FunctionArn']
except botocore.exceptions.ClientError as e: # pragma: no cover
raise ClickException("Function does not exist, you should deploy first. Ex: zappa deploy {}. "
"Proceeding to unschedule CloudWatch based events.".format(self.api_stage))
print("Unscheduling..")
self.zappa.unschedule_events(
lambda_name=self.lambda_name,
lambda_arn=function_arn,
events=events,
)
def invoke(self, function_name, raw_python=False, command=None):
"""
Invoke a remote function.
"""
# There are three likely scenarios for 'command' here:
# command, which is a modular function path
# raw_command, which is a string of python to execute directly
# manage, which is a Django-specific management command invocation
key = command if command is not None else 'command'
if raw_python:
command = {'raw_command': function_name}
else:
command = {key: function_name}
# Can't use hjson
import json as json
response = self.zappa.invoke_lambda_function(
self.lambda_name,
json.dumps(command),
invocation_type='RequestResponse',
)
if 'LogResult' in response:
print(base64.b64decode(response['LogResult']))
else:
print(response)
def status(self, return_json=False):
"""
Describe the status of the current deployment.
"""
def tabular_print(title, value):
"""
Convience function for priting formatted table items.
"""
click.echo('%-*s%s' % (32, click.style("\t" + title, fg='green') + ':', str(value)))
return
# Lambda Env Details
lambda_versions = self.zappa.get_lambda_function_versions(self.lambda_name)
if not lambda_versions:
raise ClickException(click.style("No Lambda %s detected in %s - have you deployed yet?" %
(self.lambda_name, self.zappa.aws_region), fg='red'))
status_dict = collections.OrderedDict()
status_dict["Lambda Versions"] = len(lambda_versions)
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
conf = function_response['Configuration']
self.lambda_arn = conf['FunctionArn']
status_dict["Lambda Name"] = self.lambda_name
status_dict["Lambda ARN"] = self.lambda_arn
status_dict["Lambda Role ARN"] = conf['Role']
status_dict["Lambda Handler"] = conf['Handler']
status_dict["Lambda Code Size"] = conf['CodeSize']
status_dict["Lambda Version"] = conf['Version']
status_dict["Lambda Last Modified"] = conf['LastModified']
status_dict["Lambda Memory Size"] = conf['MemorySize']
status_dict["Lambda Timeout"] = conf['Timeout']
status_dict["Lambda Runtime"] = conf['Runtime']
if 'VpcConfig' in conf.keys():
status_dict["Lambda VPC ID"] = conf.get('VpcConfig', {}).get('VpcId', 'Not assigned')
else:
status_dict["Lambda VPC ID"] = None
# Calculated statistics
try:
function_invocations = self.zappa.cloudwatch.get_metric_statistics(
Namespace='AWS/Lambda',
MetricName='Invocations',
StartTime=datetime.utcnow()-timedelta(days=1),
EndTime=datetime.utcnow(),
Period=1440,
Statistics=['Sum'],
Dimensions=[{'Name': 'FunctionName',
'Value': '{}'.format(self.lambda_name)}]
)['Datapoints'][0]['Sum']
except Exception as e:
function_invocations = 0
try:
function_errors = self.zappa.cloudwatch.get_metric_statistics(
Namespace='AWS/Lambda',
MetricName='Errors',
StartTime=datetime.utcnow()-timedelta(days=1),
EndTime=datetime.utcnow(),
Period=1440,
Statistics=['Sum'],
Dimensions=[{'Name': 'FunctionName',
'Value': '{}'.format(self.lambda_name)}]
)['Datapoints'][0]['Sum']
except Exception as e:
function_errors = 0
try:
error_rate = "{0:.2f}%".format(function_errors / function_invocations * 100)
except:
error_rate = "Error calculating"
status_dict["Invocations (24h)"] = int(function_invocations)
status_dict["Errors (24h)"] = int(function_errors)
status_dict["Error Rate (24h)"] = error_rate
# URLs
if self.use_apigateway:
api_url = self.zappa.get_api_url(
self.lambda_name,
self.api_stage)
status_dict["API Gateway URL"] = api_url
# Api Keys
api_id = self.zappa.get_api_id(self.lambda_name)
for api_key in self.zappa.get_api_keys(api_id, self.api_stage):
status_dict["API Gateway x-api-key"] = api_key
# There literally isn't a better way to do this.
# AWS provides no way to tie a APIGW domain name to its Lambda funciton.
domain_url = self.stage_config.get('domain', None)
if domain_url:
status_dict["Domain URL"] = 'https://' + domain_url
else:
status_dict["Domain URL"] = "None Supplied"
# Scheduled Events
event_rules = self.zappa.get_event_rules_for_lambda(lambda_arn=self.lambda_arn)
status_dict["Num. Event Rules"] = len(event_rules)
if len(event_rules) > 0:
status_dict['Events'] = []
for rule in event_rules:
event_dict = {}
rule_name = rule['Name']
event_dict["Event Rule Name"] = rule_name
event_dict["Event Rule Schedule"] = rule.get(u'ScheduleExpression', None)
event_dict["Event Rule State"] = rule.get(u'State', None).title()
event_dict["Event Rule ARN"] = rule.get(u'Arn', None)
status_dict['Events'].append(event_dict)
if return_json:
# Putting the status in machine readable format
# https://github.com/Miserlou/Zappa/issues/407
print(json.dumpsJSON(status_dict))
else:
click.echo("Status for " + click.style(self.lambda_name, bold=True) + ": ")
for k, v in status_dict.items():
if k == 'Events':
# Events are a list of dicts
for event in v:
for item_k, item_v in event.items():
tabular_print(item_k, item_v)
else:
tabular_print(k, v)
# TODO: S3/SQS/etc. type events?
return True
def check_stage_name(self, stage_name):
"""
Make sure the stage name matches the AWS-allowed pattern
(calls to apigateway_client.create_deployment, will fail with error
message "ClientError: An error occurred (BadRequestException) when
calling the CreateDeployment operation: Stage name only allows
a-zA-Z0-9_" if the pattern does not match)
"""
if self.stage_name_env_pattern.match(stage_name):
return True
raise ValueError("AWS requires stage name to match a-zA-Z0-9_")
def check_environment(self, environment):
"""
Make sure the environment contains only strings
(since putenv needs a string)
"""
non_strings = []
for k,v in environment.iteritems():
if not isinstance(v, basestring):
non_strings.append(k)
if non_strings:
raise ValueError("The following environment variables are not strings: {}".format(", ".join(non_strings)))
else:
return True
def init(self, settings_file="zappa_settings.json"):
"""
Initialize a new Zappa project by creating a new zappa_settings.json in a guided process.
This should probably be broken up into few separate componants once it's stable.
Testing these raw_inputs requires monkeypatching with mock, which isn't pretty.
"""
# Ensure that we don't already have a zappa_settings file.
if os.path.isfile(settings_file):
raise ClickException("This project is " + click.style("already initialized", fg="red", bold=True) + "!")
# Ensure P2 until Lambda supports it.
if sys.version_info >= (3,0): # pragma: no cover
raise ClickException("Zappa curently only works with Python 2, until AWS Lambda adds Python 3 support.")
# Ensure inside virtualenv.
if not ( hasattr(sys, 'prefix') or hasattr(sys, 'real_prefix') or hasattr(sys, 'base_prefix') ): # pragma: no cover
raise ClickException(
"Zappa must be run inside of a virtual environment!\n"
"Learn more about virtual environments here: http://docs.python-guide.org/en/latest/dev/virtualenvs/")
# Explain system.
click.echo(click.style(u"""\n███████╗ █████╗ ██████╗ ██████╗ █████╗
╚══███╔╝██╔══██╗██╔══██╗██╔══██╗██╔══██╗
███╔╝ ███████║██████╔╝██████╔╝███████║
███╔╝ ██╔══██║██╔═══╝ ██╔═══╝ ██╔══██║
███████╗██║ ██║██║ ██║ ██║ ██║
╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═╝\n""", fg='green', bold=True))
click.echo(click.style("Welcome to ", bold=True) + click.style("Zappa", fg='green', bold=True) + click.style("!\n", bold=True))
click.echo(click.style("Zappa", bold=True) + " is a system for running server-less Python web applications"
" on AWS Lambda and AWS API Gateway.")
click.echo("This `init` command will help you create and configure your new Zappa deployment.")
click.echo("Let's get started!\n")
# Create Env
while True:
click.echo("Your Zappa configuration can support multiple production environments, like '" +
click.style("dev", bold=True) + "', '" + click.style("staging", bold=True) + "', and '" +
click.style("production", bold=True) + "'.")
env = raw_input("What do you want to call this environment (default 'dev'): ") or "dev"
try:
self.check_stage_name(env)
break
except ValueError:
click.echo(click.style("Environment names must match a-zA-Z0-9_", fg='red'))
# Create Bucket
click.echo("\nYour Zappa deployments will need to be uploaded to a " + click.style("private S3 bucket", bold=True) + ".")
click.echo("If you don't have a bucket yet, we'll create one for you too.")
default_bucket = "zappa-" + ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(9))
bucket = raw_input("What do you want call your bucket? (default '%s'): " % default_bucket) or default_bucket
# TODO actually create bucket.
# Detect Django/Flask
try: # pragma: no cover
import django
has_django = True
except ImportError as e:
has_django = False
try: # pragma: no cover
import flask
has_flask = True
except ImportError as e:
has_flask = False
print('')
# App-specific
if has_django: # pragma: no cover
click.echo("It looks like this is a " + click.style("Django", bold=True) + " application!")
click.echo("What is the " + click.style("module path", bold=True) + " to your projects's Django settings?")
django_settings = None
matches = detect_django_settings()
while django_settings in [None, '']:
if matches:
click.echo("We discovered: " + click.style(', '.join('{}'.format(i) for v, i in enumerate(matches)), bold=True))
django_settings = raw_input("Where are your project's settings? (default '%s'): " % matches[0]) or matches[0]
else:
click.echo("(This will likely be something like 'your_project.settings')")
django_settings = raw_input("Where are your project's settings?: ")
django_settings = django_settings.replace("'", "")
django_settings = django_settings.replace('"', "")
else:
matches = None
if has_flask:
click.echo("It looks like this is a " + click.style("Flask", bold=True) + " application.")
matches = detect_flask_apps()
click.echo("What's the " + click.style("modular path", bold=True) + " to your app's function?")
click.echo("This will likely be something like 'your_module.app'.")
app_function = None
while app_function in [None, '']:
if matches:
click.echo("We discovered: " + click.style(', '.join('{}'.format(i) for v, i in enumerate(matches)), bold=True))
app_function = raw_input("Where is your app's function? (default '%s'): " % matches[0]) or matches[0]
else:
app_function = raw_input("Where is your app's function?: ")
app_function = app_function.replace("'", "")
app_function = app_function.replace('"', "")
# TODO: Create VPC?
# Memory size? Time limit?
# Domain? LE keys? Region?
# 'Advanced Settings' mode?
# Globalize
click.echo("\nYou can optionally deploy to " + click.style("all available regions", bold=True) + " in order to provide fast global service.")
click.echo("If you are using Zappa for the first time, you probably don't want to do this!")
global_deployment = False
while True:
global_type = raw_input("Would you like to deploy this application to " + click.style("globally", bold=True) + "? (default 'n') [y/n/(p)rimary]: ")
if not global_type:
break
if global_type.lower() in ["y", "yes", "p", "primary"]:
global_deployment = True
break
if global_type.lower() in ["n", "no"]:
global_deployment = False
break
if global_deployment:
regions = API_GATEWAY_REGIONS
if global_type.lower() in ["p", "primary"]:
envs = [{env + '_' + region.replace('-', '_'): { 'aws_region': region}} for region in regions if '-1' in region]
else:
envs = [{env + '_' + region.replace('-', '_'): { 'aws_region': region}} for region in regions]
else:
region = None # assume system default
envs = [{env: {}}]
zappa_settings = {}
for each_env in envs:
# Honestly, this could be cleaner.
env_name = each_env.keys()[0]
env_dict = each_env[env_name]
env_bucket = bucket
if global_deployment:
env_bucket = bucket.replace('-', '_') + '_' + env_name
env_zappa_settings = {
env_name: {
's3_bucket': env_bucket,
}
}
if env_dict.has_key('aws_region'):
env_zappa_settings[env_name]['aws_region'] = env_dict.get('aws_region')
zappa_settings.update(env_zappa_settings)
if has_django:
zappa_settings[env_name]['django_settings'] = django_settings
else:
zappa_settings[env_name]['app_function'] = app_function
import json as json # hjson is fine for loading, not fine for writing.
zappa_settings_json = json.dumps(zappa_settings, sort_keys=True, indent=4)
click.echo("\nOkay, here's your " + click.style("zappa_settings.js", bold=True) + ":\n")
click.echo(click.style(zappa_settings_json, fg="yellow", bold=False))
confirm = raw_input("\nDoes this look " + click.style("okay", bold=True, fg="green") + "? (default 'y') [y/n]: ") or 'yes'
if confirm[0] not in ['y', 'Y', 'yes', 'YES']:
click.echo("" + click.style("Sorry", bold=True, fg='red') + " to hear that! Please init again.")
return
# Write
with open("zappa_settings.json", "w") as zappa_settings_file:
zappa_settings_file.write(zappa_settings_json)
if global_deployment:
click.echo("\n" + click.style("Done", bold=True) + "! You can also " + click.style("deploy all", bold=True) + " by executing:\n")
click.echo(click.style("\t$ zappa deploy --all", bold=True))
click.echo("\nAfter that, you can " + click.style("update", bold=True) + " your application code with:\n")
click.echo(click.style("\t$ zappa update --all", bold=True))
else:
click.echo("\n" + click.style("Done", bold=True) + "! Now you can " + click.style("deploy", bold=True) + " your Zappa application by executing:\n")
click.echo(click.style("\t$ zappa deploy %s" % env, bold=True))
click.echo("\nAfter that, you can " + click.style("update", bold=True) + " your application code with:\n")
click.echo(click.style("\t$ zappa update %s" % env, bold=True))
click.echo("\nTo learn more, check out our project page on " + click.style("GitHub", bold=True) +
" here: " + click.style("https://github.com/Miserlou/Zappa", fg="cyan", bold=True))
click.echo("and stop by our " + click.style("Slack", bold=True) + " channel here: " +
click.style("http://bit.do/zappa", fg="cyan", bold=True))
click.echo("\nEnjoy!,")
click.echo(" ~ Team " + click.style("Zappa", bold=True) + "!")
return
def certify(self, no_cleanup=False):
"""
Register or update a domain certificate for this env.
"""
# Give warning on --no-cleanup
if no_cleanup:
clean_up = False
click.echo(click.style("Warning!", fg="red", bold=True) + " You are calling certify with " +
click.style("--no-cleanup", bold=True) +
". Your certificate files will remain in the system temporary directory after this command executes!")
else:
clean_up = True
# Make sure this isn't already deployed.
deployed_versions = self.zappa.get_lambda_function_versions(self.lambda_name)
if len(deployed_versions) == 0:
raise ClickException("This application " + click.style("isn't deployed yet", fg="red") +
" - did you mean to call " + click.style("deploy", bold=True) + "?")
# Get install account_key to /tmp/account_key.pem
account_key_location = self.stage_config.get('lets_encrypt_key')
domain = self.stage_config.get('domain')
cert_location = self.stage_config.get('certificate', None)
cert_key_location = self.stage_config.get('certificate_key', None)
cert_chain_location = self.stage_config.get('certificate_chain', None)
if not domain:
raise ClickException("Can't certify a domain without " + click.style("domain", fg="red", bold=True) + " configured!")
if not cert_location:
if not account_key_location:
raise ClickException("Can't certify a domain without " + click.style("lets_encrypt_key", fg="red", bold=True) + " configured!")
if account_key_location.startswith('s3://'):
bucket, key_name = parse_s3_url(account_key_location)
self.zappa.s3_client.download_file(bucket, key_name, '/tmp/account.key')
else:
from shutil import copyfile
copyfile(account_key_location, '/tmp/account.key')
else:
if not cert_location or not cert_key_location or not cert_chain_location:
raise ClickException("Can't certify a domain without " +
click.style("certificate, certificate_key and certificate_chain", fg="red", bold=True) + " configured!")
# Read the supplied certificates.
with open(cert_location) as f:
certificate_body = f.read()
with open(cert_key_location) as f:
certificate_private_key = f.read()
with open(cert_chain_location) as f:
certificate_chain = f.read()
click.echo("Certifying domain " + click.style(domain, fg="green", bold=True) + "..")
# Get cert and update domain.
if not cert_location:
from letsencrypt import get_cert_and_update_domain, cleanup
cert_success = get_cert_and_update_domain(
self.zappa,
self.lambda_name,
self.api_stage,
domain,
clean_up
)
else:
if not self.zappa.get_domain_name(domain):
self.zappa.create_domain_name(
domain,
domain + "-Zappa-Cert",
certificate_body,
certificate_private_key,
certificate_chain,
self.lambda_name,
self.api_stage
)
print("Created a new domain name. Please note that it can take up to 40 minutes for this domain to be "
"created and propagated through AWS, but it requires no further work on your part.")
else:
self.zappa.update_domain_name(
domain,
domain + "-Zappa-Cert",
certificate_body,
certificate_private_key,
certificate_chain
)
cert_success = True
# Deliberately undocumented feature (for now, at least.)
# We are giving the user the ability to shoot themselves in the foot.
# _This is probably not a good idea._
# However, I am sick and tired of hitting the Let's Encrypt cert
# limit while testing.
if clean_up:
cleanup()
if cert_success:
click.echo("Certificate " + click.style("updated", fg="green", bold=True) + "!")
else:
click.echo(click.style("Failed", fg="red", bold=True) + " to generate or install certificate! :(")
click.echo("\n==============\n")
shamelessly_promote()
##
# Utility
##
def callback(self, position):
"""
Allows the execution of custom code between creation of the zip file and deployment to AWS.
:return: None
"""
callbacks = self.stage_config.get('callbacks', {})
callback = callbacks.get(position)
if callback:
(mod_path, cb_func_name) = callback.rsplit('.', 1)
try: # Prefer callback in working directory
if mod_path.count('.') >= 1: # Callback function is nested in a folder
(mod_folder_path, mod_name) = mod_path.rsplit('.', 1)
mod_folder_path_fragments = mod_folder_path.split('.')
working_dir = os.path.join(os.getcwd(), *mod_folder_path_fragments)
else:
mod_name = mod_path
working_dir = os.getcwd()
working_dir_importer = pkgutil.get_importer(working_dir)
module_ = working_dir_importer.find_module(mod_name).load_module(mod_name)
except (ImportError, AttributeError):
try: # Callback func might be in virtualenv
module_ = importlib.import_module(mod_path)
except ImportError: # pragma: no cover
raise ClickException(click.style("Failed ", fg="red") + 'to ' + click.style(
"import {position} callback ".format(position=position),
bold=True) + 'module: "{mod_path}"'.format(mod_path=click.style(mod_path, bold=True)))
if not hasattr(module_, cb_func_name): # pragma: no cover
raise ClickException(click.style("Failed ", fg="red") + 'to ' + click.style(
"find {position} callback ".format(position=position), bold=True) + 'function: "{cb_func_name}" '.format(
cb_func_name=click.style(cb_func_name, bold=True)) + 'in module "{mod_path}"'.format(mod_path=mod_path))
cb_func = getattr(module_, cb_func_name)
cb_func(self) # Call the function passing self
def check_for_update(self):
"""
Print a warning if there's a new Zappa version available.
"""
try:
version = pkg_resources.require("zappa")[0].version
updateable = check_new_version_available(version)
if updateable:
click.echo(click.style("Important!", fg="yellow", bold=True) +
" A new version of " + click.style("Zappa", bold=True) + " is available!")
click.echo("Upgrade with: " + click.style("pip install zappa --upgrade", bold=True))
click.echo("Visit the project page on GitHub to see the latest changes: " +
click.style("https://github.com/Miserlou/Zappa", bold=True))
except Exception as e: # pragma: no cover
print(e)
return
def load_settings(self, settings_file=None, session=None):
"""
Load the local zappa_settings file.
An existing boto session can be supplied, though this is likely for testing purposes.
Returns the loaded Zappa object.
"""
# Ensure we're passed a valid settings file.
if not settings_file:
settings_file = self.get_json_or_yaml_settings()
if not os.path.isfile(settings_file):
raise ClickException("Please configure your zappa_settings file.")
# Load up file
self.load_settings_file(settings_file)
# Make sure that the environments are valid names:
for stage_name in self.zappa_settings.keys():
try:
self.check_stage_name(stage_name)
except ValueError:
raise ValueError("API stage names must match a-zA-Z0-9_ ; '{0!s}' does not.".format(stage_name))
# Make sure that this environment is our settings
if self.api_stage not in self.zappa_settings.keys():
raise ClickException("Please define '{0!s}' in your Zappa settings.".format(self.api_stage))
# We need a working title for this project. Use one if supplied, else cwd dirname.
if 'project_name' in self.stage_config: # pragma: no cover
self.project_name = self.stage_config['project_name']
else:
self.project_name = slugify.slugify(os.getcwd().split(os.sep)[-1])[:15]
if len(self.project_name) > 15: # pragma: no cover
click.echo(click.style("Warning", fg="red", bold=True) + "! Your " + click.style("project_name", bold=True) +
" may be too long to deploy! Please make it <16 characters.")
# The name of the actual AWS Lambda function, ex, 'helloworld-dev'
# Django's slugify doesn't replace _, but this does.
self.lambda_name = slugify.slugify(self.project_name + '-' + self.api_stage)
# Load environment-specific settings
self.s3_bucket_name = self.stage_config.get('s3_bucket', "zappa-" + ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(9)))
self.vpc_config = self.stage_config.get('vpc_config', {})
self.memory_size = self.stage_config.get('memory_size', 512)
self.app_function = self.stage_config.get('app_function', None)
self.exception_handler = self.stage_config.get('exception_handler', None)
self.aws_region = self.stage_config.get('aws_region', None)
self.debug = self.stage_config.get('debug', True)
self.prebuild_script = self.stage_config.get('prebuild_script', None)
self.profile_name = self.stage_config.get('profile_name', None)
self.log_level = self.stage_config.get('log_level', "DEBUG")
self.domain = self.stage_config.get('domain', None)
self.timeout_seconds = self.stage_config.get('timeout_seconds', 30)
# Provide legacy support for `use_apigateway`, now `apigateway_enabled`.
# https://github.com/Miserlou/Zappa/issues/490
# https://github.com/Miserlou/Zappa/issues/493
self.use_apigateway = self.stage_config.get('use_apigateway', True)
if self.use_apigateway:
self.use_apigateway = self.stage_config.get('apigateway_enabled', True)
self.integration_content_type_aliases = self.stage_config.get('integration_content_type_aliases', {})
self.lambda_handler = self.stage_config.get('lambda_handler', 'handler.lambda_handler')
# DEPRICATED. https://github.com/Miserlou/Zappa/issues/456
self.remote_env_bucket = self.stage_config.get('remote_env_bucket', None)
self.remote_env_file = self.stage_config.get('remote_env_file', None)
self.remote_env = self.stage_config.get('remote_env', None)
self.settings_file = self.stage_config.get('settings_file', None)
self.django_settings = self.stage_config.get('django_settings', None)
self.manage_roles = self.stage_config.get('manage_roles', True)
self.api_key_required = self.stage_config.get('api_key_required', False)
self.api_key = self.stage_config.get('api_key')
self.iam_authorization = self.stage_config.get('iam_authorization', False)
self.cors = self.stage_config.get("cors", None)
self.lambda_description = self.stage_config.get('lambda_description', "Zappa Deployment")
self.environment_variables = self.stage_config.get('environment_variables', {})
self.check_environment(self.environment_variables)
self.authorizer = self.stage_config.get('authorizer', {})
self.zappa = Zappa( boto_session=session,
profile_name=self.profile_name,
aws_region=self.aws_region,
load_credentials=self.load_credentials
)
for setting in CUSTOM_SETTINGS:
if setting in self.stage_config:
setting_val = self.stage_config[setting]
# Read the policy file contents.
if setting.endswith('policy'):
with open(setting_val, 'r') as f:
setting_val = f.read()
setattr(self.zappa, setting, setting_val)
if self.app_function:
self.collision_warning(self.app_function)
if self.app_function[-3:] == '.py':
click.echo(click.style("Warning!", fg="red", bold=True) +
" Your app_function is pointing to a " + click.style("file and not a function", bold=True) +
"! It should probably be something like 'my_file.app', not 'my_file.py'!")
return self.zappa
def get_json_or_yaml_settings(self, settings_name="zappa_settings"):
"""
Return zappa_settings path as JSON or YAML (or TOML), as appropriate.
"""
zs_json = settings_name + ".json"
zs_yaml = settings_name + ".yml"
zs_toml = settings_name + ".toml"
# Must have at least one
if not os.path.isfile(zs_json) \
and not os.path.isfile(zs_yaml) \
and not os.path.isfile(zs_toml):
raise ClickException("Please configure a zappa_settings file.")
# Prefer JSON
if os.path.isfile(zs_json):
settings_file = zs_json
elif os.path.isfile(zs_toml):
settings_file = zs_toml
else:
settings_file = zs_yaml
return settings_file
def load_settings_file(self, settings_file=None):
"""
Load our settings file.
"""
if not settings_file:
settings_file = self.get_json_or_yaml_settings()
if not os.path.isfile(settings_file):
raise ClickException("Please configure your zappa_settings file.")
if '.yml' in settings_file:
with open(settings_file) as yaml_file:
try:
self.zappa_settings = yaml.load(yaml_file)
except ValueError: # pragma: no cover
raise ValueError("Unable to load the Zappa settings YAML. It may be malformed.")
elif '.toml' in settings_file:
with open(settings_file) as toml_file:
try:
self.zappa_settings = toml.load(toml_file)
except ValueError: # pragma: no cover
raise ValueError("Unable to load the Zappa settings TOML. It may be malformed.")
else:
with open(settings_file) as json_file:
try:
self.zappa_settings = json.load(json_file)
except ValueError: # pragma: no cover
raise ValueError("Unable to load the Zappa settings JSON. It may be malformed.")
def create_package(self):
"""
Ensure that the package can be properly configured,
and then create it.
"""
# Create the Lambda zip package (includes project and virtualenvironment)
# Also define the path the handler file so it can be copied to the zip
# root for Lambda.
current_file = os.path.dirname(os.path.abspath(
inspect.getfile(inspect.currentframe())))
handler_file = os.sep.join(current_file.split(os.sep)[0:]) + os.sep + 'handler.py'
# Create the zip file(s)
if self.stage_config.get('slim_handler', False):
# Create two zips. One with the application and the other with just the handler.
# https://github.com/Miserlou/Zappa/issues/510
self.zip_path = self.zappa.create_lambda_zip(
prefix=self.lambda_name,
use_precompiled_packages=self.stage_config.get('use_precompiled_packages', True),
exclude=self.stage_config.get('exclude', [])
)
# Make sure the normal venv is not included in the handler's zip
exclude = self.stage_config.get('exclude', [])
cur_venv = self.zappa.get_current_venv()
exclude.append(cur_venv.split('/')[-1])
self.handler_path = self.zappa.create_lambda_zip(
prefix='handler_{0!s}'.format(self.lambda_name),
venv=self.zappa.create_handler_venv(),
handler_file=handler_file,
slim_handler=True,
exclude=exclude
)
else:
# Create a single zip that has the handler and application
self.zip_path = self.zappa.create_lambda_zip(
prefix=self.lambda_name,
handler_file=handler_file,
use_precompiled_packages=self.stage_config.get('use_precompiled_packages', True),
exclude=self.stage_config.get(
'exclude',
# Exclude packages already builtin to the python lambda environment
# https://github.com/Miserlou/Zappa/issues/556
["boto3", "dateutil", "botocore", "s3transfer", "six.py", "jmespath", "concurrent"])
)
# Warn if this is too large for Lambda.
file_stats = os.stat(self.zip_path)
if file_stats.st_size > 52428800: # pragma: no cover
print('\n\nWarning: Application zip package is likely to be too large for AWS Lambda. '
'Try setting "slim_handler" to true in your Zappa settings file.\n\n')
# Throw custom setings into the zip that handles requests
if self.stage_config.get('slim_handler', False):
handler_zip = self.handler_path
else:
handler_zip = self.zip_path
with zipfile.ZipFile(handler_zip, 'a') as lambda_zip:
settings_s = "# Generated by Zappa\n"
if self.app_function:
if '.' not in self.app_function: # pragma: no cover
raise ClickException("Your " + click.style("app_function", fg='red', bold=True) + " value is not a modular path." +
" It needs to be in the format `" + click.style("your_module.your_app_object", bold=True) + "`.")
app_module, app_function = self.app_function.rsplit('.', 1)
settings_s = settings_s + "APP_MODULE='{0!s}'\nAPP_FUNCTION='{1!s}'\n".format(app_module, app_function)
if self.exception_handler:
settings_s += "EXCEPTION_HANDLER='{0!s}'\n".format(self.exception_handler)
else:
settings_s += "EXCEPTION_HANDLER=None\n"
if self.debug:
settings_s = settings_s + "DEBUG=True\n"
else:
settings_s = settings_s + "DEBUG=False\n"
settings_s = settings_s + "LOG_LEVEL='{0!s}'\n".format((self.log_level))
# If we're on a domain, we don't need to define the /<<env>> in
# the WSGI PATH
if self.domain:
settings_s = settings_s + "DOMAIN='{0!s}'\n".format((self.domain))
else:
settings_s = settings_s + "DOMAIN=None\n"
# Pass through remote config bucket and path
if self.remote_env:
settings_s = settings_s + "REMOTE_ENV='{0!s}'\n".format(
self.remote_env
)
# DEPRICATED. use remove_env instead
elif self.remote_env_bucket and self.remote_env_file:
settings_s = settings_s + "REMOTE_ENV='s3://{0!s}/{1!s}'\n".format(
self.remote_env_bucket, self.remote_env_file
)
# Local envs
env_dict = {}
if self.aws_region:
env_dict['AWS_REGION'] = self.aws_region
env_dict.update(dict(self.environment_variables))
# Environement variable keys can't be Unicode
# https://github.com/Miserlou/Zappa/issues/604
try:
env_dict = dict((k.encode('ascii'), v) for (k, v) in env_dict.items())
except Exception: # pragma: nocover
raise ValueError("Environment variable keys must not be unicode.")
settings_s = settings_s + "ENVIRONMENT_VARIABLES={0}\n".format(
env_dict
)
# We can be environment-aware
settings_s = settings_s + "API_STAGE='{0!s}'\n".format((self.api_stage))
settings_s = settings_s + "PROJECT_NAME='{0!s}'\n".format((self.project_name))
if self.settings_file:
settings_s = settings_s + "SETTINGS_FILE='{0!s}'\n".format((self.settings_file))
else:
settings_s = settings_s + "SETTINGS_FILE=None\n"
if self.django_settings:
settings_s = settings_s + "DJANGO_SETTINGS='{0!s}'\n".format((self.django_settings))
else:
settings_s = settings_s + "DJANGO_SETTINGS=None\n"
# If slim handler, path to project zip
if self.stage_config.get('slim_handler', False):
settings_s += "ZIP_PATH='s3://{0!s}/{1!s}_current_project.zip'\n".format(self.s3_bucket_name, self.project_name)
# AWS Events function mapping
event_mapping = {}
events = self.stage_config.get('events', [])
for event in events:
arn = event.get('event_source', {}).get('arn')
function = event.get('function')
if arn and function:
event_mapping[arn] = function
settings_s = settings_s + "AWS_EVENT_MAPPING={0!s}\n".format(event_mapping)
# Authorizer config
authorizer_function = self.authorizer.get('function', None)
if authorizer_function:
settings_s += "AUTHORIZER_FUNCTION='{0!s}'\n".format(authorizer_function)
# Copy our Django app into root of our package.
# It doesn't work otherwise.
if self.django_settings:
base = __file__.rsplit(os.sep, 1)[0]
django_py = ''.join(os.path.join(base, 'ext', 'django_zappa.py'))
lambda_zip.write(django_py, 'django_zappa_app.py')
# Lambda requires a specific chmod
temp_settings = tempfile.NamedTemporaryFile(delete=False)
os.chmod(temp_settings.name, 0o644)
temp_settings.write(settings_s)
temp_settings.close()
lambda_zip.write(temp_settings.name, 'zappa_settings.py')
os.remove(temp_settings.name)
def remove_local_zip(self):
"""
Remove our local zip file.
"""
if self.stage_config.get('delete_local_zip', True):
try:
if os.path.isfile(self.zip_path):
os.remove(self.zip_path)
if self.handler_path and os.path.isfile(self.handler_path):
os.remove(self.handler_path)
except Exception as e: # pragma: no cover
sys.exit(-1)
def remove_uploaded_zip(self):
"""
Remove the local and S3 zip file after uploading and updating.
"""
# Remove the uploaded zip from S3, because it is now registered..
if self.stage_config.get('delete_s3_zip', True):
self.zappa.remove_from_s3(self.zip_path, self.s3_bucket_name)
if self.stage_config.get('slim_handler', False):
# Need to keep the project zip as the slim handler uses it.
self.zappa.remove_from_s3(self.handler_path, self.s3_bucket_name)
def on_exit(self):
"""
Cleanup after the command finishes.
Always called: SystemExit, KeyboardInterrupt and any other Exception that occurs.
"""
if self.zip_path:
self.remove_uploaded_zip()
self.remove_local_zip()
def print_logs(self, logs, colorize=True, http=False, non_http=False):
"""
Parse, filter and print logs to the console.
"""
for log in logs:
timestamp = log['timestamp']
message = log['message']
if "START RequestId" in message:
continue
if "REPORT RequestId" in message:
continue
if "END RequestId" in message:
continue
if not colorize:
if http:
if self.is_http_log_entry(message.strip()):
print("[" + str(timestamp) + "] " + message.strip())
elif non_http:
if not self.is_http_log_entry(message.strip()):
print("[" + str(timestamp) + "] " + message.strip())
else:
print("[" + str(timestamp) + "] " + message.strip())
else:
if http:
if self.is_http_log_entry(message.strip()):
click.echo(click.style("[", fg='cyan') + click.style(str(timestamp), bold=True) + click.style("]", fg='cyan') + self.colorize_log_entry(message.strip()))
elif non_http:
if not self.is_http_log_entry(message.strip()):
click.echo(click.style("[", fg='cyan') + click.style(str(timestamp), bold=True) + click.style("]", fg='cyan') + self.colorize_log_entry(message.strip()))
else:
click.echo(click.style("[", fg='cyan') + click.style(str(timestamp), bold=True) + click.style("]", fg='cyan') + self.colorize_log_entry(message.strip()))
def is_http_log_entry(self, string):
"""
Determines if a log entry is an HTTP-formatted log string or not.
"""
# Debug event filter
if 'Zappa Event' in string:
return False
# IP address filter
for token in string.replace('\t', ' ').split(' '):
try:
if (token.count('.') is 3 and token.replace('.', '').isnumeric()):
return True
except Exception: # pragma: no cover
pass
return False
def colorize_log_entry(self, string):
"""
Apply various heuristics to return a colorized version of a string.
If these fail, simply return the string in plaintext.
"""
final_string = string
try:
# First, do stuff in square brackets
inside_squares = re.findall(r'\[([^]]*)\]', string)
for token in inside_squares:
if token in ['CRITICAL', 'ERROR', 'WARNING', 'DEBUG', 'INFO', 'NOTSET']:
final_string = final_string.replace('[' + token + ']', click.style("[", fg='cyan') + click.style(token, fg='cyan', bold=True) + click.style("]", fg='cyan'))
else:
final_string = final_string.replace('[' + token + ']', click.style("[", fg='cyan') + click.style(token, bold=True) + click.style("]", fg='cyan'))
# Then do quoted strings
quotes = re.findall(r'"[^"]*"', string)
for token in quotes:
final_string = final_string.replace(token, click.style(token, fg="yellow"))
# And UUIDs
for token in final_string.replace('\t', ' ').split(' '):
try:
if token.count('-') is 4 and token.replace('-', '').isalnum():
final_string = final_string.replace(token, click.style(token, fg="magenta"))
except Exception: # pragma: no cover
pass
# And IP addresses
try:
if token.count('.') is 3 and token.replace('.', '').isnumeric():
final_string = final_string.replace(token, click.style(token, fg="red"))
except Exception: # pragma: no cover
pass
# And status codes
try:
if token in ['200']:
final_string = final_string.replace(token, click.style(token, fg="green"))
if token in ['400', '401', '403', '404', '405', '500']:
final_string = final_string.replace(token, click.style(token, fg="red"))
except Exception: # pragma: no cover
pass
# And Zappa Events
try:
if "Zappa Event:" in final_string:
final_string = final_string.replace("Zappa Event:", click.style("Zappa Event:", bold=True, fg="green"))
except Exception: # pragma: no cover
pass
# And dates
for token in final_string.split('\t'):
try:
is_date = parser.parse(token)
final_string = final_string.replace(token, click.style(token, fg="green"))
except Exception: # pragma: no cover
pass
final_string = final_string.replace('\t', ' ').replace(' ', ' ')
if final_string[0] != ' ':
final_string = ' ' + final_string
return final_string
except Exception as e: # pragma: no cover
return string
def execute_prebuild_script(self):
"""
Parse and execute the prebuild_script from the zappa_settings.
"""
(pb_mod_path, pb_func) = self.prebuild_script.rsplit('.', 1)
try: # Prefer prebuild script in working directory
if pb_mod_path.count('.') >= 1: # Prebuild script func is nested in a folder
(mod_folder_path, mod_name) = pb_mod_path.rsplit('.', 1)
mod_folder_path_fragments = mod_folder_path.split('.')
working_dir = os.path.join(os.getcwd(), *mod_folder_path_fragments)
else:
mod_name = pb_mod_path
working_dir = os.getcwd()
working_dir_importer = pkgutil.get_importer(working_dir)
module_ = working_dir_importer.find_module(mod_name).load_module(mod_name)
except (ImportError, AttributeError):
try: # Prebuild func might be in virtualenv
module_ = importlib.import_module(pb_mod_path)
except ImportError: # pragma: no cover
raise ClickException(click.style("Failed ", fg="red") + 'to ' + click.style(
"import prebuild script ", bold=True) + 'module: "{pb_mod_path}"'.format(
pb_mod_path=click.style(pb_mod_path, bold=True)))
if not hasattr(module_, pb_func): # pragma: no cover
raise ClickException(click.style("Failed ", fg="red") + 'to ' + click.style(
"find prebuild script ", bold=True) + 'function: "{pb_func}" '.format(
pb_func=click.style(pb_func, bold=True)) + 'in module "{pb_mod_path}"'.format(
pb_mod_path=pb_mod_path))
prebuild_function = getattr(module_, pb_func)
prebuild_function() # Call the function
def collision_warning(self, item):
"""
Given a string, print a warning if this could
collide with a Zappa core package module.
Use for app functions and events.
"""
namespace_collisions = [
"zappa.", "wsgi.", "middleware.", "handler.", "util.", "letsencrypt.", "cli."
]
for namespace_collision in namespace_collisions:
if namespace_collision in item:
click.echo(click.style("Warning!", fg="red", bold=True) +
" You may have a namespace collision with " + click.style(item, bold=True) +
"! You may want to rename that file.")
def deploy_api_gateway(self, api_id):
cache_cluster_enabled = self.stage_config.get('cache_cluster_enabled', False)
cache_cluster_size = str(self.stage_config.get('cache_cluster_size', .5))
endpoint_url = self.zappa.deploy_api_gateway(
api_id=api_id,
stage_name=self.api_stage,
cache_cluster_enabled=cache_cluster_enabled,
cache_cluster_size=cache_cluster_size,
cloudwatch_log_level=self.stage_config.get('cloudwatch_log_level', 'OFF'),
cloudwatch_data_trace=self.stage_config.get('cloudwatch_data_trace', False),
cloudwatch_metrics_enabled=self.stage_config.get('cloudwatch_metrics_enabled', False),
)
return endpoint_url
####################################################################
# Main
####################################################################
def shamelessly_promote():
"""
Shamelessly promote our little community.
"""
click.echo("Need " + click.style("help", fg='green', bold=True) +
"? Found a " + click.style("bug", fg='green', bold=True) +
"? Let us " + click.style("know", fg='green', bold=True) + "! :D")
click.echo("File bug reports on " + click.style("GitHub", bold=True) + " here: "
+ click.style("https://github.com/Miserlou/Zappa", fg='cyan', bold=True))
click.echo("And join our " + click.style("Slack", bold=True) + " channel here: "
+ click.style("https://slack.zappa.io", fg='cyan', bold=True))
click.echo("Love!,")
click.echo(" ~ Team " + click.style("Zappa", bold=True) + "!")
def handle(): # pragma: no cover
"""
Main program execution handler.
"""
try:
cli = ZappaCLI()
sys.exit(cli.handle())
except SystemExit as e: # pragma: no cover
cli.on_exit()
sys.exit(e.code)
except KeyboardInterrupt: # pragma: no cover
cli.on_exit()
sys.exit(130)
except Exception as e:
cli.on_exit()
click.echo("Oh no! An " + click.style("error occurred", fg='red', bold=True) + "! :(")
click.echo("\n==============\n")
import traceback
traceback.print_exc()
click.echo("\n==============\n")
shamelessly_promote()
sys.exit(-1)
if __name__ == '__main__': # pragma: no cover
handle()
|
{
"content_hash": "e4eb5618c03ef048b79fe4b84a04689f",
"timestamp": "",
"source": "github",
"line_count": 2087,
"max_line_length": 177,
"avg_line_length": 41.94489698131289,
"alnum_prop": 0.5502004820708484,
"repo_name": "parroyo/Zappa",
"id": "a8123298ec1e8275dec0b400d8a64437a8928d52",
"size": "87982",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zappa/cli.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "323663"
},
{
"name": "Shell",
"bytes": "147"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('organization', '0011_auto_20160619_2345'),
('organization', '0015_auto_20160619_1852'),
]
operations = [
]
|
{
"content_hash": "20217bd3128b22d24a4ae0db88861049",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 52,
"avg_line_length": 19.642857142857142,
"alnum_prop": 0.6472727272727272,
"repo_name": "sakset/getyourdata",
"id": "4478bfea2d880bc234fd025f9732eec27ddf2927",
"size": "347",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "getyourdata/organization/migrations/0016_merge.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2791"
},
{
"name": "HTML",
"bytes": "64735"
},
{
"name": "JavaScript",
"bytes": "1519"
},
{
"name": "Python",
"bytes": "218082"
},
{
"name": "Shell",
"bytes": "2722"
}
],
"symlink_target": ""
}
|
import json
import os
from unittest.mock import patch
from importlib import reload
from django.apps import apps
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.models import User
from django.db import models
from django.test import Client, TestCase, override_settings
from django.urls import reverse
from django_summernote.utils import (get_attachment_model,
get_attachment_storage)
IMAGE_FILE = 'django_summernote/static/summernote/summernote.png'
class DjangoSummernoteTest(TestCase):
def setUp(self):
self.username = 'lqez'
self.password = 'ohmygoddess'
self.site = AdminSite()
self.app_config = apps.get_app_config('django_summernote')
self.app_config.update_config()
self.summernote_config = self.app_config.config
def test_base(self):
self.assertTrue(True)
def test_url(self):
url = reverse('django_summernote-editor', kwargs={'id': 'foobar'})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'summernote.min.js')
self.assertContains(response, 'summernote.css')
def test_widget(self):
from django_summernote.widgets import SummernoteWidget
widget = SummernoteWidget()
html = widget.render(
'foobar', 'lorem ipsum', attrs={'id': 'id_foobar'}
)
url = reverse('django_summernote-editor', kwargs={'id': 'id_foobar'})
assert url in html
assert 'id="id_foobar"' in html
def test_widget_inplace(self):
from django_summernote.widgets import SummernoteInplaceWidget
widget = SummernoteInplaceWidget()
html = widget.render(
'foobar', 'lorem ipsum', attrs={'id': 'id_foobar'}
)
assert 'summernote' in html
def test_form(self):
from django import forms
from django_summernote.widgets import SummernoteWidget
class SimpleForm(forms.Form):
foobar = forms.CharField(widget=SummernoteWidget())
f = SimpleForm()
html = f.as_p()
url = reverse('django_summernote-editor', kwargs={'id': 'id_foobar'})
assert url in html
assert 'id="id_foobar"' in html
def test_formfield(self):
from django import forms
from django_summernote.fields import SummernoteTextFormField
class SimpleForm(forms.Form):
foobar = SummernoteTextFormField()
f = SimpleForm()
html = f.as_p()
url = reverse('django_summernote-editor', kwargs={'id': 'id_foobar'})
assert url in html
assert 'id="id_foobar"' in html
illegal_tags = '<script></script>'
form_field = SummernoteTextFormField()
cleaned_text = form_field.clean(illegal_tags)
self.assertEqual(cleaned_text, '<script></script>')
def test_field(self):
from django import forms
from django_summernote.fields import SummernoteTextField
class SimpleModel1(models.Model):
foobar = SummernoteTextField()
class SimpleForm(forms.ModelForm):
class Meta:
model = SimpleModel1
fields = "__all__"
f = SimpleForm()
html = f.as_p()
url = reverse('django_summernote-editor', kwargs={'id': 'id_foobar'})
assert url in html
assert 'id="id_foobar"' in html
illegal_tags = '<script></script>'
model_field = SummernoteTextField()
model_instance = SimpleModel1()
cleaned_text = model_field.clean(illegal_tags, model_instance)
self.assertEqual(cleaned_text, '<script></script>')
def test_empty(self):
from django import forms
from django_summernote.widgets import SummernoteWidget
class SimpleForm(forms.Form):
foobar = forms.CharField(widget=SummernoteWidget())
should_be_parsed_as_empty = '<p><br></p>'
should_not_be_parsed_as_empty = '<p>lorem ipsum</p>'
f = SimpleForm({'foobar': should_be_parsed_as_empty})
assert not f.is_valid()
assert not f.cleaned_data.get('foobar')
f = SimpleForm({'foobar': should_not_be_parsed_as_empty})
assert f.is_valid()
assert f.cleaned_data.get('foobar')
def test_attachment(self):
url = reverse('django_summernote-upload_attachment')
with open(IMAGE_FILE, 'rb') as fp:
response = self.client.post(url, {'files': [fp]})
self.assertEqual(response.status_code, 200)
self.assertContains(
response, '"name": "%s"' % os.path.basename(IMAGE_FILE))
self.assertContains(response, '"url": ')
self.assertContains(response, '"size": ')
def test_attachment_with_custom_storage(self):
self.summernote_config['attachment_storage_class'] = \
'django.core.files.storage.DefaultStorage'
file_field = get_attachment_model()._meta.get_field('file')
original_storage = file_field.storage
file_field.storage = get_attachment_storage()
url = reverse('django_summernote-upload_attachment')
with open(IMAGE_FILE, 'rb') as fp:
response = self.client.post(url, {'files': [fp]})
self.assertEqual(response.status_code, 200)
file_field.storage = original_storage
def test_attachment_with_bad_storage(self):
from django.core.exceptions import ImproperlyConfigured
# ValueError
self.summernote_config['attachment_storage_class'] = \
'wow_no_dot_storage_class_name'
with self.assertRaises(ImproperlyConfigured):
from django_summernote import models
reload(models)
# ImportError
self.summernote_config['attachment_storage_class'] = \
'wow.such.fake.storage'
with self.assertRaises(ImproperlyConfigured):
from django_summernote import models
reload(models)
# AttributeError
self.summernote_config['attachment_storage_class'] = \
'django.core.files.storage.DogeStorage'
with self.assertRaises(ImproperlyConfigured):
from django_summernote import models
reload(models)
# IOError with patching storage class
from dummyplug.storage import IOErrorStorage
file_field = get_attachment_model()._meta.get_field('file')
original_storage = file_field.storage
file_field.storage = IOErrorStorage()
url = reverse('django_summernote-upload_attachment')
with open(IMAGE_FILE, 'rb') as fp:
response = self.client.post(url, {'files': [fp]})
self.assertNotEqual(response.status_code, 200)
file_field.storage = original_storage
def test_get_attachment_model(self):
from django.core.exceptions import ImproperlyConfigured
# ValueError
self.summernote_config['attachment_model'] = \
'wow_no_dot_model_designation'
with self.assertRaises(ImproperlyConfigured):
get_attachment_model()
# LookupError
self.summernote_config['attachment_model'] = \
'wow.not.installed.app.model'
with self.assertRaises(ImproperlyConfigured):
get_attachment_model()
# Ensures proper inheritance, using built-in User class to test
self.summernote_config['attachment_model'] = \
'auth.User'
with self.assertRaises(ImproperlyConfigured):
get_attachment_model()
def test_attachment_bad_request(self):
url = reverse('django_summernote-upload_attachment')
response = self.client.get(url)
self.assertNotEqual(response.status_code, 200)
def test_attachment_no_attachment(self):
url = reverse('django_summernote-upload_attachment')
response = self.client.post(url)
self.assertNotEqual(response.status_code, 200)
def test_attachment_filesize_exceed(self):
url = reverse('django_summernote-upload_attachment')
size = os.path.getsize(IMAGE_FILE)
old_limit = self.summernote_config['attachment_filesize_limit']
self.summernote_config['attachment_filesize_limit'] = size - 1
with open(IMAGE_FILE, 'rb') as fp:
response = self.client.post(url, {'files': [fp]})
self.assertNotEqual(response.status_code, 200)
self.assertEqual(response.json()['message'], 'File size exceeds the limit allowed and cannot be saved')
self.summernote_config['attachment_filesize_limit'] = old_limit
def test_attachment_require_authentication(self):
url = reverse('django_summernote-upload_attachment')
self.summernote_config['attachment_require_authentication'] = True
self.user = User.objects.create_user(
username=self.username, password=self.password)
with open(IMAGE_FILE, 'rb') as fp:
response = self.client.post(url, {'files': [fp]})
self.assertEqual(response.status_code, 403)
c = Client()
c.login(username=self.username, password=self.password)
with open(IMAGE_FILE, 'rb') as fp:
response = c.post(url, {'files': [fp]})
self.assertEqual(response.status_code, 200)
self.summernote_config['attachment_require_authentication'] = False
@patch('django_summernote.views.logger')
def test_attachment_disable_attachment(self, mock_logging):
url = reverse('django_summernote-upload_attachment')
self.summernote_config['disable_attachment'] = True
with open(IMAGE_FILE, 'rb') as fp:
response = self.client.post(url, {'files': [fp]})
self.assertEqual(response.status_code, 403)
self.assertDictEqual(response.json(), {"status": "false", "message": "Attachment module is disabled"})
self.assertTrue(mock_logging.error.called)
self.summernote_config['disable_attachment'] = False
@patch('django_summernote.views.logger')
def test_wrong_attachment(self, mock_logging):
url = reverse('django_summernote-upload_attachment')
try:
from PIL import Image # noqa: F401
with open(IMAGE_FILE, 'rb') as fp:
response = self.client.post(url, {'files': [fp]})
self.assertEqual(response.status_code, 200)
with open(__file__, 'rb') as fp:
response = self.client.post(url, {'files': [fp]})
self.assertEqual(response.status_code, 400)
self.assertDictEqual(
response.json(),
{
"status": "false",
"message": "Upload a valid image. The file you uploaded was either not an image or a corrupted image."
}
)
self.assertTrue(mock_logging.error.called)
except ImportError:
# Without PIL, we cannot check the uploaded attachement has image format or not
with open(IMAGE_FILE, 'rb') as fp:
response = self.client.post(url, {'files': [fp]})
self.assertEqual(response.status_code, 200)
def test_attachment_not_require_authentication(self):
url = reverse('django_summernote-upload_attachment')
self.summernote_config['attachment_require_authentication'] = False
self.user = User.objects.create_user(
username=self.username, password=self.password)
with open(IMAGE_FILE, 'rb') as fp:
response = self.client.post(url, {'files': [fp]})
self.assertEqual(response.status_code, 200)
@override_settings(USE_THOUSAND_SEPARATOR=True)
def test_attachment_with_thousand_separator_option(self):
url = reverse('django_summernote-upload_attachment')
size = os.path.getsize(IMAGE_FILE)
with open(IMAGE_FILE, 'rb') as fp:
response = self.client.post(url, {'files': [fp]})
self.assertEqual(response.status_code, 200)
res = json.loads(response.content.decode('utf-8'))
self.assertEqual(res['files'][0]['size'], size)
def test_lang_specified(self):
old_lang = self.summernote_config['summernote']['lang']
self.summernote_config['summernote']['lang'] = 'ko-KR'
from django_summernote import widgets
widget = widgets.SummernoteInplaceWidget()
html = widget.render(
'foobar', 'lorem ipsum', attrs={'id': 'id_foobar'}
)
self.summernote_config['summernote']['lang'] = old_lang
assert '"lang": "ko-KR"' in html
def test_lang_accept_language(self):
from django.utils.translation import activate
activate('fr')
from django_summernote import widgets
widget = widgets.SummernoteInplaceWidget()
html = widget.render(
'foobar', 'lorem ipsum', attrs={'id': 'id_foobar'}
)
assert '"lang": "fr-FR"' in html
def test_admin_model(self):
from django_summernote.admin import SummernoteModelAdmin
from django_summernote.admin import SummernoteInlineModelAdmin
from django_summernote.widgets import SummernoteWidget
class SimpleModel(models.Model):
foobar = models.TextField()
class SimpleModelAdmin(SummernoteModelAdmin):
pass
ma = SimpleModelAdmin(SimpleModel, self.site)
assert isinstance(
ma.get_form(None).base_fields['foobar'].widget,
SummernoteWidget
)
class SimpleParentModel(models.Model):
foobar = models.TextField()
class SimpleModel2(models.Model):
foobar = models.TextField()
parent = models.ForeignKey(SimpleParentModel, on_delete=models.CASCADE)
class SimpleModelInline(SummernoteInlineModelAdmin):
model = SimpleModel2
class SimpleParentModelAdmin(SummernoteModelAdmin):
inlines = [SimpleModelInline]
ma = SimpleParentModelAdmin(SimpleParentModel, self.site)
assert isinstance(
ma.get_form(None).base_fields['foobar'].widget,
SummernoteWidget
)
def test_admin_model_inplace(self):
from django_summernote.admin import SummernoteModelAdmin
from django_summernote.widgets import SummernoteInplaceWidget
class SimpleModel3(models.Model):
foobar = models.TextField()
self.summernote_config['iframe'] = False
class SimpleModelAdmin(SummernoteModelAdmin):
pass
ma = SimpleModelAdmin(SimpleModel3, self.site)
assert isinstance(
ma.get_form(None).base_fields['foobar'].widget,
SummernoteInplaceWidget
)
self.summernote_config['iframe'] = True
def test_admin_summernote_fields(self):
from django_summernote.admin import SummernoteModelAdmin
from django_summernote.widgets import SummernoteWidget
class SimpleModel4(models.Model):
foo = models.TextField()
bar = models.TextField()
class SimpleModelAdmin(SummernoteModelAdmin):
summernote_fields = ('foo',)
ma = SimpleModelAdmin(SimpleModel4, self.site)
assert isinstance(
ma.get_form(None).base_fields['foo'].widget,
SummernoteWidget
)
assert not isinstance(
ma.get_form(None).base_fields['bar'].widget,
SummernoteWidget
)
def test_attachment_admin_default_name(self):
from django_summernote.admin import AttachmentAdmin
from django_summernote.models import Attachment
from django.core.files import File
import os
aa = AttachmentAdmin(Attachment, self.site)
attachment = Attachment()
with open(IMAGE_FILE, 'rb') as fp:
django_file = File(fp)
django_file.name = os.path.basename(django_file.name)
attachment.file = django_file
self.assertFalse(aa.form().is_valid())
self.assertEqual(attachment.name, None)
aa.save_model(None, attachment, None, None)
self.assertEqual(attachment.name, os.path.basename(IMAGE_FILE))
def test_attachment_as_string(self):
from django_summernote.models import Attachment
from django.core.files import File
import os
attachment = Attachment()
with open(IMAGE_FILE, 'rb') as fp:
djangoFile = File(fp)
djangoFile.name = os.path.basename(djangoFile.name)
attachment.file = djangoFile
attachment.save()
self.assertEqual(str(attachment), djangoFile.name)
def test_config_allow_blank_values(self):
from django_summernote.widgets import SummernoteWidget
self.summernote_config['summernote']['tableClassName'] = ''
widget = SummernoteWidget()
html = widget.render(
'foobar', 'lorem ipsum', attrs={'id': 'id_foobar'}
)
assert '"tableClassName": ""' in html
def test_widgets_with_attributes(self):
from django_summernote.widgets import (SummernoteWidget, SummernoteInplaceWidget)
widget = SummernoteInplaceWidget(attrs={'class': 'special'})
html = widget.render(
'foobar', 'lorem ipsum', attrs={'id': 'id_foobar'}
)
assert 'class="special"' in html
widget = SummernoteWidget(attrs={'class': 'special'})
html = widget.render(
'foobar', 'lorem ipsum', attrs={'id': 'id_foobar'}
)
assert 'class="special"' in html
def test_widgets_with_adhoc_settings(self):
from django_summernote.widgets import (SummernoteWidget, SummernoteInplaceWidget)
widget = SummernoteInplaceWidget(attrs={'summernote': {'toolbar': [['font', ['bold']]]}})
html = widget.render(
'foobar', 'lorem ipsum', attrs={'id': 'id_foobar'}
)
assert '"toolbar": [["font", ["bold"]]]' in html
widget = SummernoteWidget(attrs={'summernote': {'toolbar': [['font', ['italic']]]}})
html = widget.render(
'foobar', 'lorem ipsum', attrs={'id': 'id_foobar'}
)
assert '"toolbar": [["font", ["italic"]]]' in html
def test_old_style_configs(self):
from django_summernote.widgets import (SummernoteWidget, SummernoteInplaceWidget)
OLD_STYLE_CONFIG = {
'width': '640px',
'toolbar': [
['font', ['bold']],
],
}
self.app_config._copy_old_configs(OLD_STYLE_CONFIG, self.app_config.get_default_config())
widget = SummernoteInplaceWidget()
html = widget.render(
'foobar', 'lorem ipsum', attrs={'id': 'id_foobar'}
)
assert '"width": "640px"' in html
assert '"height": 480' in html
assert '"toolbar": [["font", ["bold"]]]' in html
widget = SummernoteWidget()
html = widget.render(
'foobar', 'lorem ipsum', attrs={'id': 'id_foobar'}
)
assert '"width": "640px"' in html
assert '"height": 480' in html
assert '"toolbar": [["font", ["bold"]]]' in html
def test_theme_bootstrap3(self):
from django_summernote.utils import SUMMERNOTE_THEME_FILES
url = reverse('django_summernote-editor', kwargs={'id': 'id_foobar'})
response = self.client.get(url)
html = response.content.decode('utf-8')
assert SUMMERNOTE_THEME_FILES['bs3']['base_css'][0] in html
@override_settings(SUMMERNOTE_THEME='bs4')
def test_theme_bootstrap4(self):
from django_summernote.utils import SUMMERNOTE_THEME_FILES
# Force update summernote config to reset theme files
self.app_config.update_config()
url = reverse('django_summernote-editor', kwargs={'id': 'id_foobar'})
response = self.client.get(url)
html = response.content.decode('utf-8')
assert SUMMERNOTE_THEME_FILES['bs4']['base_css'][0] in html
|
{
"content_hash": "94d333a9251b25cf87e47e7cdd5e77a8",
"timestamp": "",
"source": "github",
"line_count": 564,
"max_line_length": 126,
"avg_line_length": 35.81382978723404,
"alnum_prop": 0.618545472548146,
"repo_name": "summernote/django-summernote",
"id": "c7677f5986dbaed8cba0446795c317fbb04b8b17",
"size": "20199",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "django_summernote/test_django_summernote.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "126"
},
{
"name": "HTML",
"bytes": "10453"
},
{
"name": "JavaScript",
"bytes": "249029"
},
{
"name": "Python",
"bytes": "60580"
}
],
"symlink_target": ""
}
|
import time
from uuid import getnode as get_mac
import agentRcvModule
import agentManager
import agentFreLib as freLib
from agentUtil import *
from agentSndModule import *
from hone_message import *
HostId = str(get_mac())
ControllerPort = 8866
HostRelayPort = 8877
def ToUpperLevel(jobId, flowId, level):
def push(x):
if x or isinstance(x, (int,long,float,complex)):
key = composeMiddleJobKey(jobId, flowId, level)
if key in agentRcvModule.middleJobTable:
parentAddress = agentRcvModule.middleJobTable[key].parentAddress
sequence = agentRcvModule.middleJobTable[key].lastSeq
message = HoneMessage()
message.messageType = HoneMessageType_RelayStatsIn
message.hostId = HostId
message.jobId = jobId
message.flowId = flowId
message.level = level + 1
message.sequence = sequence
message.content = x
if parentAddress == agentManager.CtrlAddress:
port = ControllerPort
else:
port = HostRelayPort
if parentAddress:
sndSocket = HostAgentRelaySndSocket(parentAddress, port)
sndSocket.sendMessage(message)
agentRcvModule.middleEvalTimestamp += '#DoneToUpperLevel${0:6f}${1}${2}${3}${4}${5}'.format(time.time(), jobId, flowId, message.level, message.sequence, parentAddress)
# LogUtil.DebugLog('lib', 'in ToUpperLevel', jobId, flowId, level, sequence)
if level == agentRcvModule.highestMiddleJobLevel:
LogUtil.EvalLog('MiddleExecution', agentRcvModule.middleEvalTimestamp)
agentRcvModule.middleEvalTimestamp = 'Begin'
return freLib.FListener(push=push)
|
{
"content_hash": "f1196a4a6dfd42e6e7a4367e447a1c02",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 183,
"avg_line_length": 43.27906976744186,
"alnum_prop": 0.6217087587318646,
"repo_name": "pupeng/hone",
"id": "ffc1a91014ecdc8d31885a518d110ae3d7d50771",
"size": "2067",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "HostAgent/agentMiddleLib.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "10752"
},
{
"name": "C++",
"bytes": "8253"
},
{
"name": "Makefile",
"bytes": "493"
},
{
"name": "Python",
"bytes": "452248"
},
{
"name": "Shell",
"bytes": "1254"
}
],
"symlink_target": ""
}
|
"""
GoGrid driver
"""
import time
import hashlib
import copy
from libcloud.utils.py3 import b
from libcloud.common.types import InvalidCredsError, LibcloudError
from libcloud.common.gogrid import GoGridConnection, BaseGoGridDriver
from libcloud.compute.providers import Provider
from libcloud.compute.types import NodeState
from libcloud.compute.base import Node, NodeDriver
from libcloud.compute.base import NodeSize, NodeImage, NodeLocation
STATE = {
"Starting": NodeState.PENDING,
"On": NodeState.RUNNING,
"On/Saving": NodeState.RUNNING,
"Off": NodeState.PENDING,
"Restarting": NodeState.REBOOTING,
"Saving": NodeState.PENDING,
"Restoring": NodeState.PENDING,
}
GOGRID_INSTANCE_TYPES = {
'512MB': {'id': '512MB',
'name': '512MB',
'ram': 512,
'disk': 30,
'bandwidth': None},
'1GB': {'id': '1GB',
'name': '1GB',
'ram': 1024,
'disk': 60,
'bandwidth': None},
'2GB': {'id': '2GB',
'name': '2GB',
'ram': 2048,
'disk': 120,
'bandwidth': None},
'4GB': {'id': '4GB',
'name': '4GB',
'ram': 4096,
'disk': 240,
'bandwidth': None},
'8GB': {'id': '8GB',
'name': '8GB',
'ram': 8192,
'disk': 480,
'bandwidth': None},
'16GB': {'id': '16GB',
'name': '16GB',
'ram': 16384,
'disk': 960,
'bandwidth': None},
'24GB': {'id': '24GB',
'name': '24GB',
'ram': 24576,
'disk': 960,
'bandwidth': None},
}
class GoGridNode(Node):
# Generating uuid based on public ip to get around missing id on
# create_node in gogrid api
#
# Used public ip since it is not mutable and specified at create time,
# so uuid of node should not change after add is completed
def get_uuid(self):
return hashlib.sha1(
b("%s:%s" % (self.public_ips, self.driver.type))
).hexdigest()
class GoGridNodeDriver(BaseGoGridDriver, NodeDriver):
"""
GoGrid node driver
"""
connectionCls = GoGridConnection
type = Provider.GOGRID
api_name = 'gogrid'
name = 'GoGrid'
website = 'http://www.gogrid.com/'
features = {"create_node": ["generates_password"]}
_instance_types = GOGRID_INSTANCE_TYPES
def __init__(self, *args, **kwargs):
"""
@inherits: :class:`NodeDriver.__init__`
"""
super(GoGridNodeDriver, self).__init__(*args, **kwargs)
def _get_state(self, element):
try:
return STATE[element['state']['name']]
except Exception:
pass
return NodeState.UNKNOWN
def _get_ip(self, element):
return element.get('ip').get('ip')
def _get_id(self, element):
return element.get('id')
def _to_node(self, element, password=None):
state = self._get_state(element)
ip = self._get_ip(element)
id = self._get_id(element)
n = GoGridNode(id=id,
name=element['name'],
state=state,
public_ips=[ip],
private_ips=[],
extra={'ram': element.get('ram').get('name'),
'description': element.get('description', '')},
driver=self.connection.driver)
if password:
n.extra['password'] = password
return n
def _to_image(self, element):
n = NodeImage(id=element['id'],
name=element['friendlyName'],
driver=self.connection.driver)
return n
def _to_images(self, object):
return [self._to_image(el)
for el in object['list']]
def _to_location(self, element):
location = NodeLocation(id=element['id'],
name=element['name'],
country="US",
driver=self.connection.driver)
return location
def _to_locations(self, object):
return [self._to_location(el)
for el in object['list']]
def list_images(self, location=None):
params = {}
if location is not None:
params["datacenter"] = location.id
images = self._to_images(
self.connection.request('/api/grid/image/list', params).object)
return images
def list_nodes(self):
"""
@inherits: :class:`NodeDriver.list_nodes`
:rtype: ``list`` of :class:`GoGridNode`
"""
passwords_map = {}
res = self._server_list()
try:
for password in self._password_list()['list']:
try:
passwords_map[password['server']['id']] = \
password['password']
except KeyError:
pass
except InvalidCredsError:
# some gogrid API keys don't have permission to access the
# password list.
pass
return [self._to_node(el, passwords_map.get(el.get('id')))
for el in res['list']]
def reboot_node(self, node):
"""
@inherits: :class:`NodeDriver.reboot_node`
:type node: :class:`GoGridNode`
"""
id = node.id
power = 'restart'
res = self._server_power(id, power)
if not res.success():
raise Exception(res.parse_error())
return True
def destroy_node(self, node):
"""
@inherits: :class:`NodeDriver.reboot_node`
:type node: :class:`GoGridNode`
"""
id = node.id
res = self._server_delete(id)
if not res.success():
raise Exception(res.parse_error())
return True
def _server_list(self):
return self.connection.request('/api/grid/server/list').object
def _password_list(self):
return self.connection.request('/api/support/password/list').object
def _server_power(self, id, power):
# power in ['start', 'stop', 'restart']
params = {'id': id, 'power': power}
return self.connection.request("/api/grid/server/power", params,
method='POST')
def _server_delete(self, id):
params = {'id': id}
return self.connection.request("/api/grid/server/delete", params,
method='POST')
def _get_first_ip(self, location=None):
ips = self.ex_list_ips(public=True, assigned=False, location=location)
try:
return ips[0].ip
except IndexError:
raise LibcloudError('No public unassigned IPs left',
GoGridNodeDriver)
def list_sizes(self, location=None):
sizes = []
for key, values in self._instance_types.items():
attributes = copy.deepcopy(values)
attributes.update({'price': self._get_size_price(size_id=key)})
sizes.append(NodeSize(driver=self.connection.driver, **attributes))
return sizes
def list_locations(self):
locations = self._to_locations(
self.connection.request('/api/common/lookup/list',
params={'lookup': 'ip.datacenter'}).object)
return locations
def ex_create_node_nowait(self, name, size, image, location=None,
ex_description=None, ex_ip=None):
"""Don't block until GoGrid allocates id for a node
but return right away with id == None.
The existence of this method is explained by the fact
that GoGrid assigns id to a node only few minutes after
creation.
:keyword name: String with a name for this new node (required)
:type name: ``str``
:keyword size: The size of resources allocated to this node .
(required)
:type size: :class:`NodeSize`
:keyword image: OS Image to boot on node. (required)
:type image: :class:`NodeImage`
:keyword ex_description: Description of a Node
:type ex_description: ``str``
:keyword ex_ip: Public IP address to use for a Node. If not
specified, first available IP address will be picked
:type ex_ip: ``str``
:rtype: :class:`GoGridNode`
"""
if not ex_ip:
ip = self._get_first_ip(location)
params = {'name': name,
'image': image.id,
'description': ex_description or '',
'server.ram': size.id,
'ip': ip}
object = self.connection.request('/api/grid/server/add',
params=params, method='POST').object
node = self._to_node(object['list'][0])
return node
def create_node(self, name, size, image, location=None,
ex_description=None, ex_ip=None):
"""Create a new GoGird node
@inherits: :class:`NodeDriver.create_node`
:keyword ex_description: Description of a Node
:type ex_description: ``str``
:keyword ex_ip: Public IP address to use for a Node. If not
specified, first available IP address will be picked
:type ex_ip: ``str``
:rtype: :class:`GoGridNode`
"""
node = self.ex_create_node_nowait(name=name, size=size, image=image,
ex_description=ex_description,
ex_ip=ex_ip)
timeout = 60 * 20
waittime = 0
interval = 2 * 60
while node.id is None and waittime < timeout:
nodes = self.list_nodes()
for i in nodes:
if i.public_ips[0] == node.public_ips[0] and i.id is not None:
return i
waittime += interval
time.sleep(interval)
if id is None:
raise Exception(
"Wasn't able to wait for id allocation for the node %s"
% str(node))
return node
def ex_save_image(self, node, name):
"""Create an image for node.
Please refer to GoGrid documentation to get info
how prepare a node for image creation:
http://wiki.gogrid.com/wiki/index.php/MyGSI
:keyword node: node to use as a base for image
:type node: :class:`GoGridNode`
:keyword name: name for new image
:type name: ``str``
:rtype: :class:`NodeImage`
"""
params = {'server': node.id,
'friendlyName': name}
object = self.connection.request('/api/grid/image/save', params=params,
method='POST').object
return self._to_images(object)[0]
def ex_edit_node(self, **kwargs):
"""Change attributes of a node.
:keyword node: node to be edited (required)
:type node: :class:`GoGridNode`
:keyword size: new size of a node (required)
:type size: :class:`NodeSize`
:keyword ex_description: new description of a node
:type ex_description: ``str``
:rtype: :class:`Node`
"""
node = kwargs['node']
size = kwargs['size']
params = {'id': node.id,
'server.ram': size.id}
if 'ex_description' in kwargs:
params['description'] = kwargs['ex_description']
object = self.connection.request('/api/grid/server/edit',
params=params).object
return self._to_node(object['list'][0])
def ex_edit_image(self, **kwargs):
"""Edit metadata of a server image.
:keyword image: image to be edited (required)
:type image: :class:`NodeImage`
:keyword public: should be the image public (required)
:type public: ``bool``
:keyword ex_description: description of the image (optional)
:type ex_description: ``str``
:keyword name: name of the image
:type name: ``str``
:rtype: :class:`NodeImage`
"""
image = kwargs['image']
public = kwargs['public']
params = {'id': image.id,
'isPublic': str(public).lower()}
if 'ex_description' in kwargs:
params['description'] = kwargs['ex_description']
if 'name' in kwargs:
params['friendlyName'] = kwargs['name']
object = self.connection.request('/api/grid/image/edit',
params=params).object
return self._to_image(object['list'][0])
def ex_list_ips(self, **kwargs):
"""Return list of IP addresses assigned to
the account.
:keyword public: set to True to list only
public IPs or False to list only
private IPs. Set to None or not specify
at all not to filter by type
:type public: ``bool``
:keyword assigned: set to True to list only addresses
assigned to servers, False to list unassigned
addresses and set to None or don't set at all
not no filter by state
:type assigned: ``bool``
:keyword location: filter IP addresses by location
:type location: :class:`NodeLocation`
:rtype: ``list`` of :class:`GoGridIpAddress`
"""
params = {}
if "public" in kwargs and kwargs["public"] is not None:
params["ip.type"] = {True: "Public",
False: "Private"}[kwargs["public"]]
if "assigned" in kwargs and kwargs["assigned"] is not None:
params["ip.state"] = {True: "Assigned",
False: "Unassigned"}[kwargs["assigned"]]
if "location" in kwargs and kwargs['location'] is not None:
params['datacenter'] = kwargs['location'].id
ips = self._to_ips(
self.connection.request('/api/grid/ip/list',
params=params).object)
return ips
|
{
"content_hash": "6164572525e22ffc26364c61cebdda47",
"timestamp": "",
"source": "github",
"line_count": 449,
"max_line_length": 79,
"avg_line_length": 32.224944320712694,
"alnum_prop": 0.5226345981062962,
"repo_name": "Kami/libcloud",
"id": "0226f7bb6350b89e3b0a0504b8cdedbbf0e260e9",
"size": "15250",
"binary": false,
"copies": "2",
"ref": "refs/heads/trunk",
"path": "libcloud/compute/drivers/gogrid.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1819"
},
{
"name": "HTML",
"bytes": "2545"
},
{
"name": "PowerShell",
"bytes": "410"
},
{
"name": "Python",
"bytes": "9122888"
},
{
"name": "Shell",
"bytes": "12994"
}
],
"symlink_target": ""
}
|
"""Gradients for operators defined in array_ops.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from math import ceil
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_util
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import sparse_ops
@ops.RegisterGradient("Pack")
def _PackGrad(op, grad):
"""Gradient for pack op."""
return array_ops.unstack(grad, num=op.get_attr("N"), axis=op.get_attr("axis"))
@ops.RegisterGradient("Unpack")
def _UnpackGrad(op, *grads):
"""Gradient for unpack op."""
return array_ops.stack(grads, axis=op.get_attr("axis"))
def _ConcatGradHelper(op, grad, start_value_index, end_value_index, dim_index):
"""Gradient for concat op.
Args:
op: An operation.
grad: `Tensor` or `IndexedSlices` representing the gradients with respect
to each output of the op.
start_value_index: An integer index of the first value in the op.inputs.
end_value_index: An integer index of the last value in the op.inputs.
dim_index: An interger index of concat_dim or axis parameter in op.inputs.
Returns:
Tensors representing the partial gradients with respect to each input
of the op.
Raises:
ValueError: if concat_dim/axis is not statically known.
"""
def _CreateDenseMaskAndBegin(sizes, concat_dim):
"""Create variables for iteratively slicing a dense gradients tensor."""
# Since shape is 1-D, shape_of_shape = [rank-of-inputs]
shape_of_shape = array_ops.shape(sizes[0])
# Make a vector of length equal to the input's dimensions,
# with 0's everywhere and 1 in the concat dim position.
# Note: Can't use sparse_to_dense since it isn't GPU-capable (for now)
mask = array_ops.concat([
array_ops.fill(array_ops.expand_dims(concat_dim, 0), 0), [1],
array_ops.fill(shape_of_shape - concat_dim - 1, 0)
], 0)
begin = array_ops.fill(shape_of_shape, 0)
return mask, begin
def _ExtractInputShapes(inputs):
"""Extract the shapes of a set of input tensors."""
if context.executing_eagerly():
return array_ops.shape_n(inputs)
sizes = []
fully_known = True
for x in inputs:
input_shape = array_ops.shape(x)
if not isinstance(input_shape,
ops.Tensor) or input_shape.op.type != "Const":
fully_known = False
break
sizes.append(input_shape)
if fully_known:
return sizes
else:
return array_ops.shape_n(inputs)
# Degenerate concatenation, just return grad.
if len(op.inputs) == 2:
return grad + [None] if end_value_index <= dim_index else [None] + grad
concat_dim = op.inputs[dim_index]
input_values = op.inputs[start_value_index:end_value_index]
out_grads = []
if isinstance(grad, ops.Tensor):
if context.executing_eagerly():
# Using mod here for convenience since concat_dim is already verified
# in concat implementation to be within the allowed [-rank, rank) range.
non_neg_concat_dim = (
concat_dim._numpy().item(0) % input_values[0]._rank()) # pylint: disable=protected-access
# All inputs are guaranteed to be EagerTensors in eager mode
sizes = pywrap_tensorflow.TFE_Py_TensorShapeSlice(input_values,
non_neg_concat_dim)
out_grads = array_ops.split(grad, sizes, non_neg_concat_dim)
else:
if constant_op.is_constant(concat_dim):
# If concat_dim is a constant defined in a different context,
# then we duplicate it in the current context to avoid passing it
# through an Enter node.
# This is a small optimization in general, but it is required when
# compiling with XLA, as XLA needs the concat input to be folded into a
# constant.
grad_context = control_flow_util.GetOutputContext(grad.op)
dim_context = control_flow_util.GetOutputContext(concat_dim.op)
if dim_context != grad_context:
value = tensor_util.constant_value(concat_dim)
concat_dim = constant_op.constant(value=value, dtype=concat_dim.dtype)
# Using mod here for convenience since concat_dim is already verified
# in concat implementation to be within the allowed [-rank, rank) range.
non_neg_concat_dim = concat_dim % array_ops.rank(input_values[0])
# Get the inputs' tensor shapes
sizes = _ExtractInputShapes(input_values)
# The magic number of 16 was found through benchmarking a range of sizes
# on CPUs and a Maxwell TitanX. A speedup was seen in a large majority of
# cases when switching implementations at N=16, but it is possible that
# there will be a small number of performance regressions.
if len(sizes) > 16:
# extract the size of each input along the concat dimension
sizes = array_ops.squeeze(
array_ops.slice(
array_ops.stack(sizes, axis=1), [non_neg_concat_dim, 0],
[1, -1]))
out_grads = array_ops.split(grad, sizes, non_neg_concat_dim)
else:
offset = gen_array_ops.concat_offset(non_neg_concat_dim, sizes)
for (begin, size) in zip(offset, sizes):
out_grads.append(array_ops.slice(grad, begin, size))
elif isinstance(grad, ops.IndexedSlices):
# Using mod here for convenience since concat_dim is already verified
# in concat implementation to be within the allowed [-rank, rank) range.
non_neg_concat_dim = concat_dim % array_ops.rank(input_values[0])
concat_dim_static = tensor_util.constant_value(concat_dim)
if concat_dim_static is None:
raise ValueError("Can only compute IndexedSlices gradient with "
"statically-known concat_dim")
if concat_dim_static < 0:
rank = tensor_util.constant_value(array_ops.rank(input_values[0]))
if rank is None:
raise ValueError("Can only compute IndexedSlices gradient with "
"negative concat_dim when first value rank is "
"statically-known.")
concat_dim_static %= rank
# Get the inputs' tensor shapes
sizes = [array_ops.shape(x) for x in input_values]
if concat_dim_static > 0:
# IndexedSlices, non_neg_concat_dim > 0. Each input gets IndexedSlices
# gradients with all the indices, but with grad.values sliced accordingly.
# This is like the Tensor case, except shape(grad.values)[0] is not equal
# to shape(sizes[i])[0], since only a subset of the dim-0 values are
# stored.
mask, begin = _CreateDenseMaskAndBegin(sizes, non_neg_concat_dim)
for size in sizes:
new_values = array_ops.slice(
grad.values, begin,
array_ops.concat([[-1], array_ops.slice(size, [1], [-1])], 0))
out_grads.append(ops.IndexedSlices(new_values, grad.indices, size))
# Lint complains begin = begin + ...
begin = math_ops.add(begin, size * mask)
else:
# IndexedSlices, concat_dim == 0. Each input gets IndexedSlices gradients
# only for the relevant indices.
start = constant_op.constant(0, dtype=grad.indices.dtype)
for size in sizes:
size_concat_dim = array_ops.gather(size, non_neg_concat_dim)
if size_concat_dim.dtype != grad.indices.dtype:
size_concat_dim = math_ops.cast(
size_concat_dim, dtype=grad.indices.dtype)
end = start + size_concat_dim
# Compute the 1-D Tensor of indices relevant for this input.
indices_to_select = array_ops.squeeze(
array_ops.where(
math_ops.logical_and(grad.indices >= start,
grad.indices < end)),
axis=[1])
new_indices = array_ops.gather(grad.indices, indices_to_select) - start
new_values = array_ops.gather(grad.values, indices_to_select)
out_grads.append(ops.IndexedSlices(new_values, new_indices, size))
start = end
else:
raise TypeError("Expected Tensor or IndexedSlices, got %s" % type(grad))
return (out_grads + [None]
if end_value_index <= dim_index else [None] + out_grads)
@ops.RegisterGradient("Concat")
def _ConcatGrad(op, grad):
return _ConcatGradHelper(
op,
grad,
start_value_index=1,
end_value_index=len(op.inputs),
dim_index=0)
@ops.RegisterGradient("ConcatV2")
def _ConcatGradV2(op, grad):
return _ConcatGradHelper(
op, grad, start_value_index=0, end_value_index=-1, dim_index=-1)
ops.NotDifferentiable("ConcatOffset")
@ops.RegisterGradient("Slice")
def _SliceGrad(op, grad):
"""Gradient for Slice op."""
# Create an Nx2 padding where the first column represents how many
# zeros are to be prepended for each dimension, and the second
# column indicates how many zeros are appended.
#
# The number of zeros to append is the shape of the input
# elementwise-subtracted by both the begin vector and sizes vector.
#
# Some more reshaping is needed to assemble this tensor with the
# right dimensions.
input_vec = op.inputs[0]
begin_vec = op.inputs[1]
input_rank = array_ops.rank(input_vec)
slice_size = array_ops.shape(op.outputs[0])
shape = array_ops.stack([input_rank, 1])
before_pad = array_ops.reshape(begin_vec, shape)
after_pad = array_ops.reshape(
array_ops.shape(input_vec) - slice_size - begin_vec, shape)
paddings = array_ops.concat([before_pad, after_pad], 1)
return array_ops.pad(grad, paddings), None, None
@ops.RegisterGradient("StridedSlice")
def _StridedSliceGrad(op, grad):
"""Gradient for StridedSlice op."""
begin = op.inputs[1]
end = op.inputs[2]
strides = op.inputs[3]
# StridedSliceGrad requires `x`, `begin`, `end` and `strides` to be of the
# same dtype so we build a shape of the same type as other args.
# Note that the choice of `begin` for specifying `out_type` is arbitrary.
# We could choose any of {begin|end|strides}.dtype since they are required to
# be the same.
x = array_ops.shape(op.inputs[0], out_type=begin.dtype)
return array_ops.strided_slice_grad(
x,
begin,
end,
strides,
grad,
begin_mask=op.get_attr("begin_mask"),
end_mask=op.get_attr("end_mask"),
ellipsis_mask=op.get_attr("ellipsis_mask"),
new_axis_mask=op.get_attr("new_axis_mask"),
shrink_axis_mask=op.get_attr("shrink_axis_mask")), None, None, None
@ops.RegisterGradient("StridedSliceGrad")
def _StridedSliceGradGrad(op, grad):
"""Gradient for StridedSliceGrad op."""
begin = op.inputs[1]
end = op.inputs[2]
strides = op.inputs[3]
return None, None, None, None, array_ops.strided_slice(
grad,
begin,
end,
strides,
begin_mask=op.get_attr("begin_mask"),
end_mask=op.get_attr("end_mask"),
ellipsis_mask=op.get_attr("ellipsis_mask"),
new_axis_mask=op.get_attr("new_axis_mask"),
shrink_axis_mask=op.get_attr("shrink_axis_mask"))
@ops.RegisterGradient("Split")
def _SplitGrad(op, *grads):
return None, array_ops.concat(list(grads), op.inputs[0])
@ops.RegisterGradient("SplitV")
def _SplitVGrad(op, *grads):
returnval = array_ops.concat(list(grads), op.inputs[2])
returnval = [returnval] + [
None,
] * (
len(op.inputs) - 1)
return returnval
ops.NotDifferentiable("Const")
@ops.RegisterGradient("Diag")
def _DiagGrad(_, grad):
return array_ops.diag_part(grad)
@ops.RegisterGradient("DiagPart")
def _DiagPartGrad(_, grad):
return array_ops.diag(grad)
@ops.RegisterGradient("MatrixDiag")
def _MatrixDiagGrad(_, grad):
return array_ops.matrix_diag_part(grad)
@ops.RegisterGradient("MatrixDiagPart")
def _MatrixDiagPartGrad(op, grad):
matrix_shape = op.inputs[0].get_shape()[-2:]
if matrix_shape.is_fully_defined() and matrix_shape[0] == matrix_shape[1]:
return array_ops.matrix_diag(grad)
else:
return array_ops.matrix_set_diag(array_ops.zeros_like(op.inputs[0]), grad)
@ops.RegisterGradient("MatrixSetDiag")
def _MatrixSetDiagGrad(op, grad):
"""Gradient for MatrixSetDiag."""
input_shape = op.inputs[0].get_shape().merge_with(grad.get_shape())
diag_shape = op.inputs[1].get_shape()
batch_shape = input_shape[:-2].merge_with(diag_shape[:-1])
matrix_shape = input_shape[-2:]
if batch_shape.is_fully_defined() and matrix_shape.is_fully_defined():
diag_shape = batch_shape.as_list() + [min(matrix_shape.as_list())]
else:
with ops.colocate_with(grad):
grad_shape = array_ops.shape(grad)
grad_rank = array_ops.rank(grad)
batch_shape = array_ops.slice(grad_shape, [0], [grad_rank - 2])
matrix_shape = array_ops.slice(grad_shape, [grad_rank - 2], [2])
min_dim = math_ops.reduce_min(matrix_shape)
diag_shape = array_ops.concat([batch_shape, [min_dim]], 0)
grad_input = array_ops.matrix_set_diag(grad,
array_ops.zeros(
diag_shape, dtype=grad.dtype))
grad_diag = array_ops.matrix_diag_part(grad)
return (grad_input, grad_diag)
@ops.RegisterGradient("MatrixBandPart")
def _MatrixBandPartGrad(op, grad):
num_lower = op.inputs[1]
num_upper = op.inputs[2]
return (array_ops.matrix_band_part(grad, num_lower, num_upper), None, None)
# Edit Distance has no gradient (but can be used to eval seq2seq or CTC).
ops.NotDifferentiable("EditDistance")
@ops.RegisterGradient("Fill")
def _FillGrad(_, grad):
return None, math_ops.reduce_sum(grad)
ops.NotDifferentiable("ZerosLike")
ops.NotDifferentiable("OnesLike")
@ops.RegisterGradient("PreventGradient")
def _PreventGradientGrad(op, _):
raise LookupError(
"Gradient explicitly disabled. Reason: %s" % op.get_attr("message"))
@ops.RegisterGradient("Gather")
def _GatherGrad(op, grad):
"""Gradient for Gather op."""
# params can be large, so colocate the shape calculation with it.
#
# params can be very large for sparse model, array_ops.shape raises
# exception on the Windows platform when any dimension is larger than
# int32. params_shape is not used in optimizer apply_sparse gradients,
# so it's fine to convert it back to int32 regardless of truncation.
params = op.inputs[0]
with ops.colocate_with(params):
params_shape = array_ops.shape(params, out_type=ops.dtypes.int64)
params_shape = math_ops.to_int32(params_shape)
# Build appropriately shaped IndexedSlices
indices = op.inputs[1]
size = array_ops.expand_dims(array_ops.size(indices), 0)
values_shape = array_ops.concat([size, params_shape[1:]], 0)
values = array_ops.reshape(grad, values_shape)
indices = array_ops.reshape(indices, size)
return [ops.IndexedSlices(values, indices, params_shape), None]
@ops.RegisterGradient("GatherV2")
def _GatherV2Grad(op, grad):
"""Gradient for GatherV2 op."""
# params can be large, so colocate the shape calculation with it.
#
# params can be very large for sparse model, array_ops.shape raises
# exception on the Windows platform when any dimension is larger than
# int32. params_shape is not used in optimizer apply_sparse gradients,
# so it's fine to convert it back to int32 regardless of truncation.
params = op.inputs[0]
with ops.colocate_with(params):
params_shape = array_ops.shape(params, out_type=ops.dtypes.int64)
params_shape = math_ops.to_int32(params_shape)
indices = op.inputs[1]
indices_size = array_ops.expand_dims(array_ops.size(indices), 0)
axis = op.inputs[2]
axis_static = tensor_util.constant_value(axis)
# For axis 0 gathers, build an appropriately shaped IndexedSlices.
if axis_static == 0:
if context.executing_eagerly():
params_tail_shape = params_shape.cpu()[1:]
else:
params_tail_shape = params_shape[1:]
values_shape = array_ops.concat([indices_size, params_tail_shape], 0)
values = array_ops.reshape(grad, values_shape)
indices = array_ops.reshape(indices, indices_size)
return [ops.IndexedSlices(values, indices, params_shape), None, None]
outer_shape = params_shape[:axis]
outer_dims = array_ops.size(outer_shape)
inner_shape = params_shape[axis:][1:]
inner_dims = array_ops.size(inner_shape)
outer_axes_indices = math_ops.range(outer_dims)
inner_axes_indices = math_ops.range(outer_dims + 1,
outer_dims + 1 + inner_dims)
values_shape = array_ops.concat([outer_shape, indices_size, inner_shape], 0)
values = array_ops.reshape(grad, values_shape)
indices = array_ops.reshape(indices, indices_size)
# We need to sum up every slice `values[..., i, ....]` corresponding to
# `params[..., indices[i], ...]`. Since `unsorted_segment_sum` does not
# support an axis parameter, we transpose the gather dimension to the front,
# then use `unsorted_segment_sum` to build a
# [gather_axis, outer_axes, inner_axes] tensor with all the gradients
# affecting each index in `gather_axis` summed up.
transpose_dims = array_ops.concat(
[[outer_dims], outer_axes_indices, inner_axes_indices], 0)
values_transpose = array_ops.transpose(values, transpose_dims)
num_segments = params_shape[axis]
params_grad = math_ops.unsorted_segment_sum(values_transpose, indices,
num_segments)
# Inverts the above transpose by moving dimension 0 back to its original
# position.
invert_transpose_dims = array_ops.concat(
[outer_axes_indices + 1, [0], inner_axes_indices], 0)
params_grad = array_ops.transpose(params_grad, invert_transpose_dims)
return [params_grad, None, None]
@ops.RegisterGradient("GatherNd")
def _GatherNdGrad(op, grad):
ref = op.inputs[0]
indices = op.inputs[1]
ref_shape = array_ops.shape(ref, out_type=indices.dtype)
if indices.shape.ndims == 2 and indices.shape[-1].value == 1:
ref_grad = ops.IndexedSlices(grad, array_ops.squeeze(indices, axis=-1),
ref_shape)
else:
ref_grad = array_ops.scatter_nd(indices, grad, ref_shape)
return [ref_grad, None]
@ops.RegisterGradient("CheckNumerics")
def _CheckNumericsGrad(_, grad):
"""Gradient for check_numerics op."""
return array_ops.check_numerics(
grad, "Not a number (NaN) or infinity (Inf) values detected in gradient.")
@ops.RegisterGradient("PlaceholderWithDefault")
@ops.RegisterGradient("Identity")
def _IdGrad(_, grad):
return grad
@ops.RegisterGradient("RefIdentity")
def _RefIdGrad(_, grad):
return grad
@ops.RegisterGradient("IdentityN")
def _IdNGrad(_, *grad):
return grad
ops.NotDifferentiable("StopGradient")
@ops.RegisterGradient("Reshape")
def _ReshapeGrad(op, grad):
return [array_ops.reshape(grad, array_ops.shape(op.inputs[0])), None]
ops.NotDifferentiable("InvertPermutation")
def _ReshapeToInput(op, grad):
"""Reshapes the gradient to the shape of the original input."""
return array_ops.reshape(grad, array_ops.shape(op.inputs[0]))
@ops.RegisterGradient("ExpandDims")
def _ExpandDimsGrad(op, grad):
return [_ReshapeToInput(op, grad), None]
@ops.RegisterGradient("Squeeze")
def _SqueezeGrad(op, grad):
return _ReshapeToInput(op, grad)
@ops.RegisterGradient("Transpose")
def _TransposeGrad(op, grad):
"""Returns unshuffle(grad)."""
p = op.inputs[1]
return [array_ops.transpose(grad, array_ops.invert_permutation(p)), None]
@ops.RegisterGradient("ConjugateTranspose")
def _ConjugateTransposeGrad(op, grad):
"""Returns conj(unshuffle(grad))."""
p = op.inputs[1]
return [
array_ops.transpose(
grad, array_ops.invert_permutation(p), conjugate=True), None
]
ops.NotDifferentiable("Shape")
ops.NotDifferentiable("ShapeN")
ops.NotDifferentiable("Rank")
ops.NotDifferentiable("Size")
@ops.RegisterGradient("Tile")
def _TileGrad(op, grad):
"""Sum reduces grad along the tiled dimensions."""
input_shape = array_ops.shape(op.inputs[0])
# We interleave multiples and input_shape to get split_shape,
# reshape grad to split_shape, and reduce along all even
# dimensions (the tiled dimensions) to get the result
# with shape input_shape. For example
# input_shape = [20, 30, 40]
# multiples = [2, 3, 4]
# split_shape = [2, 20, 3, 30, 4, 40]
# axes = [0, 2, 4]
split_shape = array_ops.reshape(
array_ops.transpose(array_ops.stack([op.inputs[1], input_shape])), [-1])
axes = math_ops.range(0, array_ops.size(split_shape), 2)
# Sum reduces grad along the first dimension for IndexedSlices
if isinstance(grad, ops.IndexedSlices):
grad = math_ops.unsorted_segment_sum(
grad.values,
math_ops.mod(grad.indices, input_shape[0]),
input_shape[0])
split_shape = array_ops.concat([[1], split_shape[1:]], axis=0)
input_grad = math_ops.reduce_sum(array_ops.reshape(grad, split_shape), axes)
# Fix shape inference
if not context.executing_eagerly():
input_grad.set_shape(op.inputs[0].get_shape())
return [input_grad, None]
ops.NotDifferentiable("BroadcastGradientArgs")
def _PadGrad(op, grad):
"""Gradient for Pad."""
# Pad introduces values around the original tensor, so the gradient function
# slices the original shape out of the gradient."""
x = op.inputs[0]
a = op.inputs[1] # [Rank(x), 2]
# Takes a slice of a. The 1st column. [Rank(x), 1].
pad_before = array_ops.slice(a, [0, 0],
array_ops.stack([array_ops.rank(x), 1]))
# Make it a 1-D tensor.
begin = array_ops.reshape(pad_before, [-1])
sizes = array_ops.shape(x)
x_grad = array_ops.slice(grad, begin, sizes)
if len(op.inputs) == 3:
return x_grad, None, None
else:
return x_grad, None
ops.RegisterGradient("Pad")(_PadGrad)
ops.RegisterGradient("PadV2")(_PadGrad)
# ReverseSequence is just a permutation. The gradient permutes back.
@ops.RegisterGradient("ReverseSequence")
def _ReverseSequenceGrad(op, grad):
seq_lengths = op.inputs[1]
return [
array_ops.reverse_sequence(
grad,
batch_axis=op.get_attr("batch_dim"),
seq_axis=op.get_attr("seq_dim"),
seq_lengths=seq_lengths), None
]
@ops.RegisterGradient("Reverse")
def _ReverseGrad(op, grad):
reverse_dims = op.inputs[1]
return gen_array_ops.reverse(grad, reverse_dims), None
@ops.RegisterGradient("ReverseV2")
def _ReverseV2Grad(op, grad):
axis = op.inputs[1]
return array_ops.reverse_v2(grad, axis), None
@ops.RegisterGradient("SpaceToBatch")
def _SpaceToBatchGrad(op, grad):
# Its gradient is the opposite op: BatchToSpace.
block_size = op.get_attr("block_size")
return [
array_ops.batch_to_space(grad, op.inputs[1], block_size=block_size), None
]
@ops.RegisterGradient("SpaceToBatchND")
def _SpaceToBatchNDGrad(op, grad):
# Its gradient is the opposite op: BatchToSpaceND.
return [
array_ops.batch_to_space_nd(grad, op.inputs[1], op.inputs[2]), None, None
]
@ops.RegisterGradient("BatchToSpace")
def _BatchToSpaceGrad(op, grad):
# Its gradient is the opposite op: SpaceToBatch.
block_size = op.get_attr("block_size")
return [
array_ops.space_to_batch(grad, op.inputs[1], block_size=block_size), None
]
@ops.RegisterGradient("BatchToSpaceND")
def _BatchToSpaceNDGrad(op, grad):
# Its gradient is the opposite op: SpaceToBatchND.
return [
array_ops.space_to_batch_nd(grad, op.inputs[1], op.inputs[2]), None, None
]
@ops.RegisterGradient("SpaceToDepth")
def _SpaceToDepthGrad(op, grad):
# Its gradient is the opposite op: DepthToSpace.
block_size = op.get_attr("block_size")
data_format = op.get_attr("data_format")
if data_format == "NCHW_VECT_C":
raise ValueError("Cannot compute SpaceToDepth gradient with NCHW_VECT_C. "
"NCHW_VECT_C requires qint8 data type.")
return array_ops.depth_to_space(grad, block_size, data_format=data_format)
@ops.RegisterGradient("DepthToSpace")
def _DepthToSpaceGrad(op, grad):
# Its gradient is the opposite op: SpaceToDepth.
block_size = op.get_attr("block_size")
data_format = op.get_attr("data_format")
if data_format == "NCHW_VECT_C":
raise ValueError("Cannot compute DepthToSpace gradient with NCHW_VECT_C. "
"NCHW_VECT_C requires qint8 data type.")
return array_ops.space_to_depth(grad, block_size, data_format=data_format)
ops.NotDifferentiable("OneHot")
@ops.RegisterGradient("MirrorPad")
def _MirrorPadGrad(op, grad):
mode = op.get_attr("mode")
return [gen_array_ops.mirror_pad_grad(grad, op.inputs[1], mode=mode), None]
@ops.RegisterGradient("MirrorPadGrad")
def _MirrorPadGradGrad(op, grad):
mode = op.get_attr("mode")
return [gen_array_ops.mirror_pad(grad, op.inputs[1], mode=mode), None]
@ops.RegisterGradient("QuantizeAndDequantize")
def _QuantizeAndDequantizeGrad(_, grad):
return grad
@ops.RegisterGradient("QuantizeAndDequantizeV2")
def _QuantizeAndDequantizeV2Grad(_, grad):
return [grad, None, None]
@ops.RegisterGradient("QuantizeAndDequantizeV3")
def _QuantizeAndDequantizeV3Grad(_, grad):
# Only propagate the gradient for the unquantized input.
return [grad, None, None, None]
@ops.RegisterGradient("ExtractImagePatches")
def _ExtractImagePatchesGrad(op, grad):
batch_size, rows_in, cols_in, channels = [
dim.value for dim in op.inputs[0].get_shape()
]
input_bhwc = array_ops.shape(op.inputs[0])
batch_size = input_bhwc[0]
channels = input_bhwc[3]
_, rows_out, cols_out, _ = [dim.value for dim in op.outputs[0].get_shape()]
_, ksize_r, ksize_c, _ = op.get_attr("ksizes")
_, stride_r, stride_h, _ = op.get_attr("strides")
_, rate_r, rate_c, _ = op.get_attr("rates")
padding = op.get_attr("padding")
ksize_r_eff = ksize_r + (ksize_r - 1) * (rate_r - 1)
ksize_c_eff = ksize_c + (ksize_c - 1) * (rate_c - 1)
if padding == b"SAME":
rows_out = int(ceil(rows_in / stride_r))
cols_out = int(ceil(cols_in / stride_h))
pad_rows = ((rows_out - 1) * stride_r + ksize_r_eff - rows_in) // 2
pad_cols = ((cols_out - 1) * stride_h + ksize_c_eff - cols_in) // 2
elif padding == b"VALID":
rows_out = int(ceil((rows_in - ksize_r_eff + 1) / stride_r))
cols_out = int(ceil((cols_in - ksize_c_eff + 1) / stride_h))
pad_rows = (rows_out - 1) * stride_r + ksize_r_eff - rows_in
pad_cols = (cols_out - 1) * stride_h + ksize_c_eff - cols_in
pad_rows, pad_cols = max(0, pad_rows), max(0, pad_cols)
grad_expanded = array_ops.transpose(
array_ops.reshape(
grad, (batch_size, rows_out, cols_out, ksize_r, ksize_c, channels)),
(1, 2, 3, 4, 0, 5))
grad_flat = array_ops.reshape(grad_expanded, (-1, batch_size * channels))
row_steps = range(0, rows_out * stride_r, stride_r)
col_steps = range(0, cols_out * stride_h, stride_h)
idx = []
for i in range(rows_out):
for j in range(cols_out):
r_low, c_low = row_steps[i] - pad_rows, col_steps[j] - pad_cols
r_high, c_high = r_low + ksize_r_eff, c_low + ksize_c_eff
idx.extend([(r * (cols_in) + c, i * (cols_out * ksize_r * ksize_c) + j *
(ksize_r * ksize_c) + ri * (ksize_c) + ci)
for (ri, r) in enumerate(range(r_low, r_high, rate_r))
for (ci, c) in enumerate(range(c_low, c_high, rate_c))
if 0 <= r and r < rows_in and 0 <= c and c < cols_in])
sp_shape = (rows_in * cols_in, rows_out * cols_out * ksize_r * ksize_c)
sp_mat = sparse_tensor.SparseTensor(
array_ops.constant(idx, dtype=ops.dtypes.int64),
array_ops.ones((len(idx),), dtype=ops.dtypes.float32), sp_shape)
jac = sparse_ops.sparse_tensor_dense_matmul(sp_mat, grad_flat)
grad_out = array_ops.reshape(jac, (rows_in, cols_in, batch_size, channels))
grad_out = array_ops.transpose(grad_out, (2, 0, 1, 3))
return [grad_out]
@ops.RegisterGradient("ScatterNd")
def _ScatterNdGrad(op, grad):
indices = op.inputs[0]
updates_grad = array_ops.gather_nd(grad, indices)
return [None, updates_grad, None]
@ops.RegisterGradient("ScatterNdNonAliasingAdd")
def _ScatterNdNonAliasingAddGrad(op, grad):
indices = op.inputs[1]
updates_grad = array_ops.gather_nd(grad, indices)
return [grad, None, updates_grad]
|
{
"content_hash": "e066c22fd432b1e4a9b3ebfa6f9e362f",
"timestamp": "",
"source": "github",
"line_count": 800,
"max_line_length": 100,
"avg_line_length": 35.54875,
"alnum_prop": 0.6709096663033158,
"repo_name": "lukeiwanski/tensorflow",
"id": "fe459a96b98733f8a706b0c3b84000c5a74894ad",
"size": "29128",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "tensorflow/python/ops/array_grad.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "9258"
},
{
"name": "C",
"bytes": "305344"
},
{
"name": "C++",
"bytes": "44091926"
},
{
"name": "CMake",
"bytes": "206801"
},
{
"name": "Go",
"bytes": "1163771"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "768682"
},
{
"name": "Jupyter Notebook",
"bytes": "2245985"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "49862"
},
{
"name": "Objective-C",
"bytes": "15650"
},
{
"name": "Objective-C++",
"bytes": "99265"
},
{
"name": "PHP",
"bytes": "2140"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "37482296"
},
{
"name": "Ruby",
"bytes": "533"
},
{
"name": "Shell",
"bytes": "443812"
},
{
"name": "Smarty",
"bytes": "6870"
}
],
"symlink_target": ""
}
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
# TODO: put package requirements here
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='testrepo',
version='0.1.0',
description="A test repo for figuring out cookiecutter, tox, travis ci and other things",
long_description=readme + '\n\n' + history,
author="Gautam Sisodia",
author_email='gautam.sisodia@gmail.com',
url='https://github.com/gautsi/testrepo',
packages=[
'testrepo',
],
package_dir={'testrepo':
'testrepo'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='testrepo',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
test_suite='tests',
tests_require=test_requirements
)
|
{
"content_hash": "bed690af12de9efedee050bcb0212877",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 93,
"avg_line_length": 28.471698113207548,
"alnum_prop": 0.6209410205434063,
"repo_name": "gautsi/test_package",
"id": "e88bdf58245f4b4f54a7ce44fc1de39b424911df",
"size": "1557",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1700"
},
{
"name": "Python",
"bytes": "2325"
}
],
"symlink_target": ""
}
|
import json
import yaml
import sys
import re
import traceback
from os.path import expanduser
import os
import urllib
import pyxb
from uforge.objects.uforge import *
import ussclicore.utils.download_utils
from ussclicore.utils import printer
from ussclicore.utils import generics_utils
from hammr.utils.bundle_utils import *
from hammr.utils import constants
def check_mandatory_stack(stack):
if not "name" in stack:
printer.out("no attribute [name] for [stack]", printer.ERROR)
return
if not "version" in stack:
printer.out("no attribute [version] for [stack]", printer.ERROR)
return
if not "os" in stack:
printer.out("no attribute [os] for [stack]", printer.ERROR)
return
else:
if not "name" in stack["os"]:
printer.out("no attribute [name] for [os]", printer.ERROR)
return
if not "version" in stack["os"]:
printer.out("no attribute [version] for [os]", printer.ERROR)
return
if not "arch" in stack["os"]:
printer.out("no attribute [arch] for [os]", printer.ERROR)
return
return stack
def check_mandatory_builders(builders):
return builders
#TODO
def check_mandatory_generate_scan(builders):
for builder in builders:
if not "installation" in builder:
printer.out("no attribute installation in builder", printer.ERROR)
return
if not "diskSize" in builder["installation"]:
printer.out("no attribute diskSize in the installation part of builder", printer.ERROR)
return
if not "hardwareSettings" in builder:
printer.out("no attribute hardwareSettings in builder", printer.ERROR)
return
if not "memory" in builder["hardwareSettings"]:
printer.out("no attribute diskSize in the memory part of hardwareSettings", printer.ERROR)
return
return builders
def check_mandatory_create_account(iterables, type):
#iterables can be builders or accounts
for iterable in iterables:
if type=="builders":
if "account" in iterable:
if not "type" in iterable and not "type" in iterable["account"]:
printer.out("no attribute type in builder", printer.ERROR)
return
if "file" in iterable["account"]:
file = get_file(iterable["account"]["file"])
if file is None:
return 2
data = load_data(file)
if data is None:
return 2
if "accounts" in data:
return check_mandatory_create_account(data["accounts"], "accounts")
if type=="accounts":
if not "type" in iterable:
printer.out("no attribute type in accounts", printer.ERROR)
return
#TODO
return iterables
def check_extension_is_json(file_path):
file_extension = os.path.splitext(file_path)[1]
if file_extension == ".yml" or file_extension == ".yaml":
return False
elif file_extension == ".json":
return True
else:
printer.out("please provide a json or yaml file \n", printer.ERROR)
raise Exception("File '" + file_path + "' is not allowed. Please provide a json or yaml file.")
def load_data(file):
isJson = check_extension_is_json(file)
if isJson:
print "you provided a json file, checking the syntax..."
data = generics_utils.check_json_syntax(file)
else:
print "you provided a yaml file, checking the syntax..."
data = generics_utils.check_yaml_syntax(file)
return data
def validate(file_path):
is_json = check_extension_is_json(file_path)
if is_json:
printer.out("You provided a json file, checking...", printer.INFO)
template = validate_configurations_file(file_path, isJson=True)
else:
printer.out("You provided a yaml file, checking...", printer.INFO)
template = validate_configurations_file(file_path, isJson=False)
return template
def validate_configurations_file(file, isJson):
if isJson:
data = generics_utils.check_json_syntax(file)
else:
data = generics_utils.check_yaml_syntax(file)
if data is None:
return
#check manadatory fields
if "stack" in data:
stack=check_mandatory_stack(data["stack"])
if stack is None:
return
if "bundles" in data["stack"]:
for bundle in data["stack"]["bundles"]:
bundle = check_bundle(bundle)
if bundle is None:
return
if "builders" in data:
check_mandatory_builders(data["builders"])
return data
def validate_bundle(file):
try:
isJson = check_extension_is_json(file)
if isJson:
print "you provided a json file, checking..."
data = generics_utils.check_json_syntax(file)
else:
print "you provided a yaml file, checking..."
data = generics_utils.check_yaml_syntax(file)
if data is None:
return
data = check_bundle(data)
if data is None:
return
return data
except ValueError as e:
printer.out("JSON parsing error: "+str(e), printer.ERROR)
printer.out("Syntax of bundle file ["+file+"]: FAILED")
except IOError as e:
printer.out("unknown error bundle json file", printer.ERROR)
def dump_data_in_file(data, archive_files, isJsonFile, fileName, newFileName):
file = open(constants.TMP_WORKING_DIR + os.sep + newFileName, "w")
if isJsonFile:
json.dump(data, file, indent=4, separators=(',', ': '))
else:
yaml.safe_dump(data, file, default_flow_style=False, indent=2, explicit_start='---')
file.close()
archive_files.append([fileName, constants.TMP_WORKING_DIR + os.sep + newFileName])
return archive_files
#manage uforge exception
def is_uforge_exception(e):
if len(e.args)>=1 and type(e.args[0]) is UForgeError:
return True
def get_uforge_exception(e):
if len(e.args)>=1 and type(e.args[0]) is UForgeError:
return "UForge Error '"+str(e.args[0].statusCode)+"' with method: "+e.args[0].requestMethod+" "+e.args[0].requestUri+"\n"+"Message:\n\t"+e.args[0].localizedErrorMsg.message
def print_uforge_exception(e):
if len(e.args)>=1 and type(e.args[0]) is UForgeError:
printer.out(get_uforge_exception(e), printer.ERROR)
else:
traceback.print_exc()
def handle_uforge_exception(e):
print_uforge_exception(e)
return 2
def get_uforge_url_from_ws_url(ws_url):
if ws_url[-1:]!='/':
return ws_url.rpartition('/')[0]
else:
return ws_url[:-1].rpartition('/')[0]
def get_hammr_dir():
dir = ussclicore.utils.generics_utils.get_home_dir()+os.sep+".hammr"
if not os.path.isdir(dir):
os.mkdir(dir)
return dir
def create_user_ssh_key(api, login, sshKey):
if not "name" in sshKey:
printer.out("sshKey name not found in builder", printer.ERROR)
return 2
if not "publicKey" in sshKey:
printer.out("publicKey in sshKey not found in builder", printer.ERROR)
return 2
mySshKey = sshKey()
mySshKey.name=sshKey["name"]
mySshKey.publicKey=sshKey["publicKey"]
key = self.api.Users(login).Sshkeys().Create(mySshKey)
if key is None:
printer.out("Impossible to create sshKey ["+mySshKey.name+"]", printer.ERROR)
return 2
return key
def is_uri_based_on_appliance(uri):
match = re.match( r'users/[^/]+/appliances/[0-9]+($|/)', uri)
if match:
return True
else:
return False
def is_uri_based_on_scan(uri):
match = re.match( r'users/[^/]+/scannedinstances/[0-9]+/scans/[0-9]+($|/)', uri)
if match:
return True
else:
return False
def extract_scannedinstance_id(image_uri):
match = re.match( r'users/[^/]+/scannedinstances/([0-9]+)($|/)', image_uri)
if match:
return int(match.group(1))
else:
return None
def extract_scan_id(image_uri):
match = re.match( r'users/[^/]+/scannedinstances/[0-9]+/scans/([0-9]+)($|/)', image_uri)
if match:
return int(match.group(1))
else:
return None
|
{
"content_hash": "3668c745badec674e51f444e9e2586c1",
"timestamp": "",
"source": "github",
"line_count": 257,
"max_line_length": 180,
"avg_line_length": 32.65758754863813,
"alnum_prop": 0.6107470511140236,
"repo_name": "MaxTakahashi/hammr",
"id": "8eb49991604d23325e798968bc0f5027c84b930d",
"size": "9028",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hammr/utils/hammr_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "201"
},
{
"name": "Python",
"bytes": "356616"
},
{
"name": "Shell",
"bytes": "1091"
}
],
"symlink_target": ""
}
|
import unittest
from app.launch_scrubber import Scrubber
class LaunchScrubberTests(unittest.TestCase):
def setUp(self):
self.scrubber = Scrubber()
def test_launch_is_scrubbed_when_its_too_cold(self):
# ...
self.assertFalse(self.scrubber.go_for_launch())
def test_launch_is_go_when_weather_looks_good(self):
# ...
self.assertTrue(self.scrubber.go_for_launch())
def test_launch_is_no_go_when_its_too_windy(self):
# ...
self.assertFalse(self.scrubber.go_for_launch())
def test_launch_is_no_go_if_not_warm_enough_on_calm_day(self):
# ...
self.assertFalse(self.scrubber.go_for_launch())
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "c2594b612fbef7f7fd85a0bd7996eb1c",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 66,
"avg_line_length": 26.071428571428573,
"alnum_prop": 0.6315068493150685,
"repo_name": "greghaskins/launch-scrubber-kata",
"id": "1cebceeeb1e630ade343ca565e26f147c01d0dd3",
"size": "730",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/launch_scrubber_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1126"
}
],
"symlink_target": ""
}
|
import skimage.morphology
import skimage.filter
import skimage.feature
import numpy as np
import timeit
import mahotas
from os import path
luispedro_image = path.join(
path.dirname(mahotas.__file__),
'demos',
'data',
'luispedro.jpg')
f = mahotas.imread(luispedro_image, as_grey=True)
markers = np.zeros_like(f)
markers[100, 100] = 1
markers[200, 200] = 2
f = f.astype(int)
markers = markers.astype(int)
otsu = mahotas.otsu(f.astype(np.uint8))
fbin = f > otsu
fbin8 = fbin.astype(np.uint8)
Bc = np.eye(3)
Bc = Bc.astype(bool)
Bc8 = Bc.astype(np.uint8)
pre = '''
import skimage.filter
import skimage.morphology
import skimage.feature
import numpy as np
import mahotas
import pymorph
import timethings
f = timethings.f
fbin = timethings.fbin
fbin8 = timethings.fbin8
f64 = f.astype(np.float64)
Bc = timethings.Bc
Bc8 = timethings.Bc8
markers = timethings.markers
'''
def t(s):
return timeit.timeit(s, setup=pre, number=24)
tests = [
('erode', [
'mahotas.erode(fbin, Bc)',
'pymorph.erode(fbin, Bc)',
'skimage.morphology.opening(fbin8, Bc8)',
]),
('dilate', [
'mahotas.dilate(fbin, Bc)',
'pymorph.dilate(fbin, Bc)',
'skimage.morphology.dilation(fbin8, Bc8)',
]),
('open', [
'mahotas.open(fbin, Bc)',
'pymorph.open(fbin, Bc)',
'skimage.morphology.opening(fbin8, Bc8)',
]),
('center mass', [
'mahotas.center_of_mass(f)',
None,
None,
]),
('sobel', [
'mahotas.sobel(f)',
None,
'skimage.filter.sobel(f64)',
]),
('cwatershed', [
'mahotas.cwatershed(f, markers)',
'pymorph.cwatershed(f, markers)',
'skimage.morphology.watershed(f, markers)',
]),
('daubechies', [
'mahotas.daubechies(f, "D4")',
None,
None,
]),
('haralick', [
'mahotas.features.haralick(f)',
None,
'skimage.feature.greycoprops(skimage.feature.greycomatrix(f, [1], [0]))',
]),
]
if __name__ == '__main__':
base = t('np.max(f)')
for name, statements in tests:
print r'%-12s&' % name,
for st in statements:
if st is None:
print ' NA &',
else:
time = '%.2f' % (t(st) / base)
print '%8s &' % time,
print r'\\'
# import the necessary packages
from sklearn.cross_validation import train_test_split
from sklearn.metrics import classification_report
from sklearn import datasets
from nolearn.dbn import DBN
import numpy as np
import cv2
# grab the MNIST dataset (if this is the first time you are running
# this script, this make take a minute -- the 55mb MNIST digit dataset
# will be downloaded)
print "[X] downloading data..."
dataset = datasets.fetch_mldata("MNIST Original")
# scale the data to the range [0, 1] and then construct the training
# and testing splits
(trainX, testX, trainY, testY) = train_test_split(
dataset.data / 255.0, dataset.target.astype("int0"), test_size = 0.33)
# train the Deep Belief Network with 784 input units (the flattened,
# 28x28 grayscale image), 300 hidden units, 10 output units (one for
# each possible output classification, which are the digits 1-10)
dbn = DBN(
[trainX.shape[1], 300, 10],
learn_rates = 0.3,
learn_rate_decays = 0.9,
epochs = 10,
verbose = 1)
dbn.fit(trainX, trainY)
# compute the predictions for the test data and show a classification
# report
preds = dbn.predict(testX)
print classification_report(testY, preds)
# randomly select a few of the test instances
for i in np.random.choice(np.arange(0, len(testY)), size = (10,)):
# classify the digit
pred = dbn.predict(np.atleast_2d(testX[i]))
# reshape the feature vector to be a 28x28 pixel image, then change
# the data type to be an unsigned 8-bit integer
image = (testX[i] * 255).reshape((28, 28)).astype("uint8")
# show the image and prediction
print "Actual digit is {0}, predicted {1}".format(testY[i], pred[0])
# cv2.imshow("Digit", image)
# cv2.waitKey(0)
|
{
"content_hash": "ac024d11e8c6b7270d768c35c622302d",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 81,
"avg_line_length": 27.62,
"alnum_prop": 0.6217716630461019,
"repo_name": "Chaparqanatoos/kaggle-knowledge",
"id": "d3c859f5ea74a23fa4e2e504740a9ad309e1af4b",
"size": "4143",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/main/python/march_mania.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "161034"
},
{
"name": "Python",
"bytes": "71591"
},
{
"name": "R",
"bytes": "33590"
}
],
"symlink_target": ""
}
|
import numpy
import theano
import theano.tensor as T
from seq_to_seq.layers_core import Layer
class Embedding(Layer):
"""
Embedding class.
:param size: int
The size of the layer (i.e., the number of rows, the size of the input vocabulary).
:param dim_proj: int
The size of the projection (i.e., the number of columns). This is the size of the vector
that will represent each of the inputs.
:param previous_layer: Layer object
The previous layer in the computational path.
:param layer_number: int
The layer position in the computational path.
:param seed: int
The seed to feed the random number generator.
:param auto_setup: boolean
Flag indicating if the model should call setup() when initializing the model or leave it
to the user to call it explicitly.
:param dtype: theano.config.floatX
Type of floating point to be used.
:return:
"""
def __init__(self,
size,
dim_proj,
previous_layer=None,
layer_number=1,
seed=123,
auto_setup=True,
dtype=theano.config.floatX):
self.W = None
self.current_mask = None
Layer.__init__(self,
size,
dim_proj,
previous_layer=previous_layer,
layer_number=layer_number,
seed=seed,
auto_setup=auto_setup,
dtype=dtype)
def init_params(self, seed=123):
"""
Function that will perform the parameter's initialization. For this layer it is a matrix
(size x dim_proj).
:param seed: int
A seed to feed the random number generator.
:return:
"""
rng = numpy.random.RandomState(seed)
self.W = theano.shared(
value=rng.uniform(low=-.1, high=.1, size=(self.n_in, self.n_out)).astype(self.dtype),
name='W_%s' % self.layer_number, borrow=True, allow_downcast=True)
def get_layer_parameters(self):
"""
Function to return the layer's parameters
:return: list
A list containing the layer's parameters in the form of theano.shared variables. For
this layer it is a matrix (size x dim_proj).
"""
return [self.W]
def get_mask(self):
"""
Return the mask to be applied to the inputs. The mask is used to 'prevent' some values to
be used during computations.
Example: input = [1, 2, 3, 8, 9] mask = [1, 1, 1, 0, 0] - if we apply 'mask' to the
'input', the last 2 values (corresponding to 0s in the mask) will no be used when
performing the computations.
Notes:
------
1. A new mask is computed whenever new data is passed for activation.
:return: theano.tensor
Symbolic representation of the mask.
"""
return self.current_mask
def activate(self, x):
"""
Compute the layer's output. For this layer it turns a single index into a vector of
(dim_proj) size.
:param x: theano.tensor
Symbolic representation of the layer's input.
:return: theano.tensor
Symbolic representation of the layer's output.
"""
if self.previous_layer is None:
act0 = x
else:
act0 = self.previous_layer.activate(x)
activation = self.W[act0]
self.current_mask = T.ones_like(x) * (1 - T.eq(x, -1))
return activation
def get_weights(self):
"""
Return a list containing the actual values of the of the layer's parameters. For this
layer it will be a list of length 1 (just weights).
:return: list
A list containing the numpy.ndarrays representing the current weights of the layer.
"""
weights = [self.W.get_value(borrow=True)]
return weights
def set_weights(self, parameters, layer_number):
"""
Set the layer's parameters when loaded from a saved model
:param parameters: list
A list containing the numpy.ndarrays representing the actual weights. For this
particular layer, the size of the list is 1.
:param layer_number: integer
The position of the layer in the computational path. It is used to name the
theano.shared variable.
:return:
"""
assert len(parameters) == 1, 'Wrong number of parameters to be set to EmbbedingLayer!'
self.layer_number = layer_number
w = parameters[0].value
self.W = theano.shared(value=w, name='W_%s' % self.layer_number, borrow=True)
|
{
"content_hash": "91598c703fd17e4b36d93f7e3864a4d8",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 97,
"avg_line_length": 30.228395061728396,
"alnum_prop": 0.5721870532979375,
"repo_name": "giancds/seq_to_seq",
"id": "358b4bcaf3f24b667bf5fd4ff8543caeb4c6de78",
"size": "4897",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "seq_to_seq/embedding_layers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "128274"
}
],
"symlink_target": ""
}
|
"""
This Module contains classes for decoding and encoding of datatypes
Available datatypes:
- Boolean TODO
- Byte TODO
- Unsigned Byte TODO
- Short TODO
- Unsigned Short DONE!
- Int TODO
- Long TODO
- Float TODO
- Double TODO
- String DONE!
- Chat TODO
- VarInt DONE!
- VarLong (DONE!)
- Entity Metadata TODO
- Slot TODO
- NBT Tag TODO
- Position TODO
- Angle TODO
- UUID TODO
- Optional X TODO
- Array of X TODO
- X Enum TODO
- Byte Array TODO
"""
from io import BytesIO
def join_bytes(bytes):
"""Join a list of bytes"""
r = b""
for byte in bytes:
r += byte
return r
def split_bytes(bytes):
"""Split a bytes to a list of bytes"""
return [bytes[i:i + 1] for i in range(len(bytes))]
def _byte(b):
"""Get a byte"""
return bytes((b,))
class VarInt():
"""
A VarInt is an encoded Integer that can
take up to 5 bytes in size
"""
@staticmethod
def _read_one(stream):
"""Read one byte from a stream"""
c = stream.read(1)
if c == '':
raise EOFError("Unexpected EOF while reading bytes")
return ord(c)
@staticmethod
def encode(number):
"""Encode an integer to a VarInt"""
# Return if it's not an int
if not type(number) is int:
return number
buf = b""
while True:
towrite = number & 0x7f
number >>= 7
if len(buf) >= 5: raise Exception("VarInt too long")
if number:
buf += _byte(towrite | 0x80)
else:
buf += _byte(towrite)
break
return buf
@staticmethod
def decode_stream(stream):
"""Decode a stream to a VarInt"""
shift = 0
result = 0
while True:
i = VarInt._read_one(stream)
result |= (i & 0x7f) << shift
shift += 7
if not (i & 0x80):
break
return result
@staticmethod
def decode(buf):
"""Decode bytes to a VarInt"""
if len(buf) > 5: raise Exception("VarInt too long")
return VarInt.decode_stream(BytesIO(buf))
@staticmethod
def find_stream(stream):
"""
Find the length and bytes of
a stream of bytes that are supposed
to represent a VarInt
"""
result = b""
while True:
i = VarInt._read_one(stream)
result += _byte(i)
if not (i & 0x80):
return (result, len(result))
@staticmethod
def find(buf):
"""
Find the length and bytes of
bytes that are supposed
to represent a VarInt
"""
if type(buf) is list: return VarInt.find_stream(BytesIO(join_bytes(buf)))
return VarInt.find_stream(BytesIO(buf))
@staticmethod
def willcontinue_stream(stream):
"""
Find out if a byte stream
containing a single byte
representing a VarInt is
a finished VarInt
"""
return bool(VarInt._read_one(stream) & 0x80)
@staticmethod
def willcontinue(buf):
"""
Find out if a byte
representing a VarInt is
a finished VarInt
"""
return VarInt.willcontinue_stream(BytesIO(buf))
class String():
"""
A String is a string in bytes
prepended with the length of the
string decoded as a VarInt
"""
@staticmethod
def encode(s):
"""Encode a string to a String"""
if type(s) is str: return VarInt.encode(len(s)) + bytes(s, "utf-8")
return s
@staticmethod
def decode(b):
"""Decode a String to a string"""
if VarInt.find(b)[0] == b"":
return b.decode("utf-8")
else:
return String.find(b)[0].decode("utf-8")
@staticmethod
def find(b):
"""
Find the length and bytes of
bytes that are supposed
to represent a String
"""
length = VarInt.find(b)
if isinstance(b, list):
return (join_bytes(b[length[1]:length[1] + VarInt.decode(length[0])]), length[1] + VarInt.decode(length[0]))
else:
return (join_bytes(split_bytes(b)[length[1]:length[1] + VarInt.decode(length[0])]), length[1] + VarInt.decode(length[0]))
class UnsignedShort():
"""
An UnsignedShort is an number
that is always 2 bytes long
And unsigned
Lol
"""
@staticmethod
def encode(i):
"""Encode a number to an UnsignedShort"""
return i.to_bytes(2, byteorder="big", signed=False)
@staticmethod
def decode(b):
"""Encode an UnsignedShort to a number"""
if len(b) > 2: raise Exception("Expected a maximum of 2 bytes")
return int.from_bytes(b, byteorder="big", signed=False)
@staticmethod
def find(b):
"""
Basically just return the UnsignedShort
in a list with it's length which is
always 2
Lol
"""
if not isinstance(b, list):
return (join_bytes(split_bytes(b[:2])), 2)
return (join_bytes(b[:2]), 2)
class Long():
"""A Long is...a long (8 bytes)"""
@staticmethod
def encode(i):
"""Encode a number to a long"""
return i.to_bytes(8, byteorder="big", signed=True)
@staticmethod
def decode(b):
"""Decode a long to a number"""
return int.from_bytes(b, byteorder="big", signed=True)
@staticmethod
def find(b):
"""Return itself with its length (always 8)"""
if not isinstance(b, list):
return (join_bytes(split_bytes(b[:8])), 8)
return (join_bytes(b[:8]), 8)
class ByteArray:
@staticmethod
def encode(b): return b
@staticmethod
def decode(b): return b
@staticmethod
def find(b): return b
|
{
"content_hash": "3caeed43b7deba28cf25f42dd9154fec",
"timestamp": "",
"source": "github",
"line_count": 253,
"max_line_length": 124,
"avg_line_length": 20.57312252964427,
"alnum_prop": 0.6217098943323727,
"repo_name": "fabian0010/Blaze",
"id": "4db0a3642fb3f8a80ff7b40de3949bd6d797795f",
"size": "5205",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Core/Types.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "388259"
}
],
"symlink_target": ""
}
|
def decode_line(encoded):
"""Decodes a polyline that was encoded using the Google Maps method.
See http://code.google.com/apis/maps/documentation/polylinealgorithm.html
This is a straightforward Python port of Mark McClure's JavaScript polyline decoder
(http://facstaff.unca.edu/mcmcclur/GoogleMaps/EncodePolyline/decode.js)
and Peter Chng's PHP polyline decode
(http://unitstep.net/blog/2008/08/02/decoding-google-maps-encoded-polylines-using-php/)
"""
encoded_len = len(encoded)
index = 0
array = []
lat = 0
lng = 0
while index < encoded_len:
b = 0
shift = 0
result = 0
while True:
b = ord(encoded[index]) - 63
index = index + 1
result |= (b & 0x1f) << shift
shift += 5
if b < 0x20:
break
dlat = ~(result >> 1) if result & 1 else result >> 1
lat += dlat
shift = 0
result = 0
while True:
b = ord(encoded[index]) - 63
index = index + 1
result |= (b & 0x1f) << shift
shift += 5
if b < 0x20:
break
dlng = ~(result >> 1) if result & 1 else result >> 1
lng += dlng
array.append((lat * 1e-5, lng * 1e-5))
return array
|
{
"content_hash": "97378bfb40268731024c3884bc2ed7e9",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 91,
"avg_line_length": 26.058823529411764,
"alnum_prop": 0.5312264860797592,
"repo_name": "ryankanno/hitraffic-alert",
"id": "ceaa585dde424173cc4cf7f5f8388f567e35cf7a",
"size": "1329",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "polyline.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "124"
},
{
"name": "HTML",
"bytes": "5789"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Python",
"bytes": "9774"
}
],
"symlink_target": ""
}
|
# This module, and the timer.pyd core timer support, were written by
# Sam Rushing (rushing@nightmare.com)
import timer
import time
# Timers are based on Windows messages. So we need
# to do the event-loop thing!
import win32event, win32gui
# glork holds a simple counter for us.
class glork:
def __init__ (self, delay=1000, max=10):
self.x = 0
self.max = max
self.id = timer.set_timer (delay, self.increment)
# Could use the threading module, but this is
# a win32 extension test after all! :-)
self.event = win32event.CreateEvent(None, 0, 0, None)
def increment (self, id, time):
print 'x = %d' % self.x
self.x = self.x + 1
# if we've reached the max count,
# kill off the timer.
if self.x > self.max:
# we could have used 'self.id' here, too
timer.kill_timer (id)
win32event.SetEvent(self.event)
# create a counter that will count from '1' thru '10', incrementing
# once a second, and then stop.
def demo (delay=1000, stop=10):
g = glork(delay, stop)
# Timers are message based - so we need
# To run a message loop while waiting for our timers
# to expire.
start_time = time.time()
while 1:
# We can't simply give a timeout of 30 seconds, as
# we may continouusly be recieving other input messages,
# and therefore never expire.
rc = win32event.MsgWaitForMultipleObjects(
(g.event,), # list of objects
0, # wait all
500, # timeout
win32event.QS_ALLEVENTS, # type of input
)
if rc == win32event.WAIT_OBJECT_0:
# Event signalled.
break
elif rc == win32event.WAIT_OBJECT_0+1:
# Message waiting.
if win32gui.PumpWaitingMessages():
raise RuntimeError("We got an unexpected WM_QUIT message!")
else:
# This wait timed-out.
if time.time()-start_time > 30:
raise RuntimeError("We timed out waiting for the timers to expire!")
if __name__=='__main__':
demo()
|
{
"content_hash": "298f36d2b91331fd6d270954d9e5db0f",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 84,
"avg_line_length": 33.78787878787879,
"alnum_prop": 0.568609865470852,
"repo_name": "zhanqxun/cv_fish",
"id": "3d4a9d51c43849fabad4c7a73dc6591b56046c4c",
"size": "2271",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "win32/Demos/timer_demo.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "2209"
},
{
"name": "C",
"bytes": "306616"
},
{
"name": "C++",
"bytes": "85075"
},
{
"name": "FORTRAN",
"bytes": "3200"
},
{
"name": "HTML",
"bytes": "68199"
},
{
"name": "JavaScript",
"bytes": "1701"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "Python",
"bytes": "9775078"
},
{
"name": "Visual Basic",
"bytes": "949"
},
{
"name": "XSLT",
"bytes": "2113"
}
],
"symlink_target": ""
}
|
from telemetry.core.platform.profiler import iprofiler_profiler
from telemetry.core.platform.profiler import perf_profiler
from telemetry.core.platform.profiler import sample_profiler
_PROFILERS = [iprofiler_profiler.IprofilerProfiler,
perf_profiler.PerfProfiler,
sample_profiler.SampleProfiler]
def FindProfiler(name):
for profiler in _PROFILERS:
if profiler.name() == name:
return profiler
return None
def GetAllAvailableProfilers():
return [p.name() for p in _PROFILERS]
|
{
"content_hash": "d6c28ff1207ce040b92ef09b7bfe6044",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 63,
"avg_line_length": 27.57894736842105,
"alnum_prop": 0.7461832061068703,
"repo_name": "loopCM/chromium",
"id": "9ae251084ebbb64a75d2d665f9eed3b2a5c24de3",
"size": "691",
"binary": false,
"copies": "1",
"ref": "refs/heads/trunk",
"path": "tools/telemetry/telemetry/core/platform/profiler/profiler_finder.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import numpy as np
import rbmpy.plotter as plotter
import rbmpy.datasets as datasets
import math
from sklearn.utils.extmath import safe_sparse_dot
from sklearn.utils.extmath import log_logistic
from sklearn.linear_model import Perceptron
def log_likelyhood_score(sample, target):
"""Lets find the log likelyhood"""
# we need the actual visible pattern that we want to compute the score for
# if vi|ha is 400 | 786
# and actual vi is 400 | 786 we are in business
score = (target * np.log(sample)) + ((1 - target) * np.log((1 - sample)))
return score
class plot_correction_decorator(object):
def __init__(self, f):
self.f = f
def __call__(self, *args):
"""
The __call__ method is not called until the
decorated function is called.
"""
result = self.f(*args)
plotter.plot_weights(result[0].sum(1))
return result
class Average_Decorator(object):
def __init__(self,run_times = 3):
self.run_times = run_times
def __call__(self,f):
def wrapped_f(*args, **kwargs):
print("Inside wrapped_f()")
results = []
for i in range(self.run_times):
results.append(f(*args, **kwargs))
return results
print("After f(*args)")
return wrapped_f
class Result:
def __init__(self, num_items, num_samples, rbm_a, rbm_b, data_a, data_b):
self.rbm_a = rbm_a
self.rbm_b = rbm_b
self.num_items = num_items
self.num_samples = num_samples
self.part_sampler = sampler.PartitionedSampler(rbm_a, rbm_b, num_items= self.num_items)
self.van_data_a_sampler = sampler.VanillaSampler(rbm_a)
self.van_data_b_sampler = sampler.VanillaSampler(rbm_b)
self.vis_target_a = self.van_data_a_sampler.reconstruction_given_visible(data_a)
self.vis_target_b = self.van_data_b_sampler.reconstruction_given_visible(data_b)
print("Constructing Composite Dataset")
self.composite = datasets.composite_datasets(data_a, data_b)
def calculate_result(self):
self.run_vanilla()
self.run_partitioned()
self.score_a, self.score_b = self.imagewise_score()
def run_vanilla(self):
print("Generating Vanilla Samples")
self.vis_van_a = self.van_data_a_sampler.reconstruction_given_visible(self.composite)
self.vis_van_b = self.van_data_b_sampler.reconstruction_given_visible(self.composite)
def run_partitioned(self, stored_hidden_interval = 10):
self.stored_hidden_interval = stored_hidden_interval # number of samples between stores of the hidden layer
mini_batches = math.floor(self.num_samples / self.stored_hidden_interval)
print("Generating Partitioned Reconstructions (This may take a while)")
stored_hiddens = {}
hid_a = None
hid_b = None
for batch in range(mini_batches):
print("Running batch {} of {}".format(batch, mini_batches))
hid_a, hid_b = self.part_sampler.visible_to_hidden(self.composite, num_samples = self.stored_hidden_interval,hidden_a = hid_a,hidden_b = hid_b)
stored_hiddens[batch] = (hid_a, hid_b)
self.stored_hiddens = stored_hiddens
def visibles_for_stored_hidden(self, iteration):
a= self.part_sampler.hidden_to_sample(self.stored_hiddens[iteration][0],self.rbm_a)
b= self.part_sampler.hidden_to_sample(self.stored_hiddens[iteration][1],self.rbm_b)
return a,b
def visibles_for_partitioned(self):
return self.visibles_for_stored_hidden(len(self.stored_hiddens)-1)
def imagewise_score_at_iter(self, hiddens_at_iteration):
part_vis_a, part_vis_b = self.visibles_for_stored_hidden(hiddens_at_iteration)
part_vis_a_score = log_likelyhood_score(part_vis_a, self.vis_target_a)
part_vis_b_score = log_likelyhood_score(part_vis_b, self.vis_target_b)
van_vis_a_score = log_likelyhood_score(self.vis_van_a, self.vis_target_a)
van_vis_b_score = log_likelyhood_score(self.vis_van_b, self.vis_target_b)
score_a = {"PART" : part_vis_a_score.sum(1), "VAN" : van_vis_a_score.sum(1)}
score_b = {"PART" : part_vis_b_score.sum(1), "VAN" : van_vis_b_score.sum(1)}
return score_a, score_b
def imagewise_score(self):
return self.imagewise_score_at_iter(len(self.stored_hiddens)-1)
def win_images(self, score_a, score_b):
part_a = score_a["PART"]
part_b = score_b["PART"]
van_a = score_a["VAN"]
van_b = score_b["VAN"]
win_a = np.compress((part_a > van_a), self.composite, axis = 0)
win_b = np.compress((part_b > van_b), self.composite, axis = 0)
return (win_a, win_b)
def equal_images(self,score_a, score_b):
part_a = score_a["PART"]
part_b = score_b["PART"]
van_a = score_a["VAN"]
van_b = score_b["VAN"]
win_a = np.compress((part_a == van_a), self.composite, axis = 0)
win_b = np.compress((part_b == van_b), self.composite, axis = 0)
return (win_a, win_b)
def lose_images(self,score_a, score_b):
part_a = score_a["PART"]
part_b = score_b["PART"]
van_a = score_a["VAN"]
van_b = score_b["VAN"]
win_a = np.compress((part_a < van_a), self.composite, axis = 0)
win_b = np.compress((part_b < van_b), self.composite, axis = 0)
return (win_a, win_b)
def plot_various_images(self):
win_a, win_b = self.win_images(self.score_a, self.score_b)
lose_a, lose_b = self.lose_images(self.score_a, self.score_b)
equal_a, equal_b = self.equal_images(self.score_a, self.score_b)
print("Wins For Model A:{} plotting will only show 54 at maximum".format(win_a.shape[0]));
plotter.plot(win_a)
print("Losses For Model A: {} plotting will only show 54 at maximum ".format(lose_a.shape[0]));
plotter.plot(lose_a)
print("Tie For Model A: {} plotting will only show 54 at maximum ".format(equal_a.shape[0]));
plotter.plot(equal_a)
def classify(title, train, test, train_labels, test_labels):
classifier = Perceptron()
classifier.fit(train, train_labels)
print("{} {}".format(title,classifier.score(test, test_labels)))
|
{
"content_hash": "b9984fbc34fe54bb398c5c11713c060f",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 155,
"avg_line_length": 38.04216867469879,
"alnum_prop": 0.6231195566112431,
"repo_name": "garibaldu/multicauseRBM",
"id": "16317ce259a070710e6625da3f0a8a53f320c675",
"size": "6315",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Max/rbmpy/performance.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "9470565"
},
{
"name": "Python",
"bytes": "121462"
},
{
"name": "Shell",
"bytes": "608"
},
{
"name": "TeX",
"bytes": "232429"
}
],
"symlink_target": ""
}
|
from nltk.stem import PorterStemmer
from nltk.tokenize import sent_tokenize, word_tokenize,
import sys
def remove_stopwords(tweets):
stemmer = PorterStemmer()
with open(tweets, 'r', buffering=1028) as read_tweet:
for tweet in read_tweet:
#Use stop word method
stop_words = set(stopwords.words('english'))
word_tokens = word_tokenize(tweet)
filtered_tweet = []
for word in word_tokens:
if word not in stop_words:
# Capture only words not listed in stop_word txt
filtered_tweet.append(word)
print(filtered_tweet)
def main():
tweets = "/Users/alanseciwa/Desktop/Independent_Study/Sep16-GOP-TweetsONLY/clean_data-TWEETONLY.csv"
remove_stopwords(tweets)
if __name__ == '__main__':
main()
sys.exit()
|
{
"content_hash": "c01d77f65f2134b79c1812a32ce846f0",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 104,
"avg_line_length": 25.470588235294116,
"alnum_prop": 0.6096997690531177,
"repo_name": "aseciwa/independent-study",
"id": "85ef93ab0ce79b1cf2d13fd6da67419338c1d3ca",
"size": "866",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/stemming.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "14627"
},
{
"name": "Python",
"bytes": "190915"
}
],
"symlink_target": ""
}
|
import sys
import numpy as np
import scipy as sp
import scipy.ndimage
import scipy.interpolate
import os
import time
from struct import *
import glob
import png
from scipy.spatial import cKDTree
import multiprocessing
from joblib import Parallel, delayed
# read binary displacement data
def readBinary(fname):
coords = []
# data format is lon, lat, elevation
# we will use lat, lon, elevation to be consistent with DEM
nbytes = 4 # data is recorded as floats
with open(fname, "rb") as f:
byte = f.read(nbytes)
while len(byte) > 0:
# binary data is big-endian
lat = unpack('>f', byte)[0]
byte = f.read(nbytes)
lon = unpack('>f', byte)[0]
byte = f.read(nbytes)
elev = unpack('>f', byte)[0]
coords.append((lon, lat, elev))
byte = f.read(nbytes)
return coords
def interpolate(fname, dem_lat, dem_lon, output_dir, files):
slice = readBinary(fname)
# slice is sim results at each timestep
# each slice has array of (lat, lon, elevation)
# we want the simulation elevation at the dem lat, lon coords
# size of data dimensions
n_lat = len(dem_lat)
n_lon = len(dem_lon)
# record time taken to interpolate
s = time.time()
# the slice data is our function of (lat, lon)
# define the lat and lon vectors and ndarrays
lat, lon, elev = zip(*slice)
lat = np.array(lat, dtype=np.float)
lon = np.array(lon, dtype=np.float)
elev = np.array(elev, dtype=np.float)
# search for nearby points using k-nearest neighbour
k = 9 # number of neighbours
r = 1 # max distance of neighbourhood
a = np.column_stack((lat, lon)) # format input array for tree
tree = scipy.spatial.cKDTree(a.copy()) # init tree with copy of data
disp = np.zeros((n_lat, n_lon)) # init output displacement matrix
# find nearest neighbours for each DEM lat lon
for i in range(n_lat)[:]:
for j in range(n_lon)[:]:
# print dem_lat[i], dem_lon[j]
q = np.array([dem_lat[i], dem_lon[j]]) # query point
n_d, n_idx = tree.query(q, k=k, eps=0, p=2,
distance_upper_bound=r) # query returns neighbour distances and indices
disp[i][j] = 0
# print d, idx
wm = 0 # weighted mean
count = 0
# determine mean of weigthed contributions from neighbours
for ni in range(len(n_idx)):
if (np.isfinite(n_d[ni])):
count = count + 1
w = (r - n_d[ni]) / r
wm += w * elev[n_idx[ni]] # weighted distace x elevation at neighbour
if count > 0:
disp[i][j] = wm / count # average of weighted contributions
disp = np.flipud(disp)
e = time.time()
print(os.path.basename(fname) + " %.2f s" % (e - s))
sys.stdout.flush()
fname = "%s/disp_%d.csv" % (output_dir, files.index(fname))
np.savetxt(fname, disp, delimiter=",")
def scale(d, n):
return d * n
if __name__ == '__main__':
# input and output files
in_file = sys.argv[1]
input_dir = os.path.abspath(sys.argv[2])
output_dir = os.path.abspath(sys.argv[3])
print("Reading DEM basemap..."),
s = time.time()
# Open and read .in file
o_lat = []
o_lon = []
o_elev = []
with open(in_file, "r") as file:
# read lines
lines = file.readlines()
# determine number of latitude and longitude points
n_lat, n_lon = lines[0].split()
n_lat = int(n_lat)
n_lon = int(n_lon)
# read latitude values
o_lat = [float(x) for x in lines[1].split()]
# read longitude values
o_lon = [float(x) for x in lines[2].split()]
# read lat, long grid
# Bottom row in image is top row in data
for i in range(0, n_lat):
# flat row format
o_elev = [float(x) for x in lines[3 + i].split()] + o_elev
e = time.time()
print("done %.2f s" % (e - s))
# reshape data
o_elev = np.array(o_elev)
elev = o_elev.reshape((n_lat, n_lon))
# Write out base map:
np.savetxt("dem.csv", elev, delimiter=",")
# read displacement data
files = glob.glob(input_dir + "/*")
files.sort()
if (len(files) == 0):
print("No input files found");
sys.exit(1)
# Map displacement data to DEM grid
print("Interpolating data ...")
sys.stdout.flush()
num_cores = multiprocessing.cpu_count()
Parallel(n_jobs=num_cores)(delayed(interpolate)(f, o_lat, o_lon, output_dir, files) for f in files)
|
{
"content_hash": "b8ba04f39bdee9c04ef01669d9f004e4",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 108,
"avg_line_length": 31.22,
"alnum_prop": 0.5712150330984411,
"repo_name": "UoA-eResearch/earthquake-viz",
"id": "4823dc1dbdb43caa64cbfcdfdceeab2887c0162b",
"size": "4738",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "preprocess.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "79"
},
{
"name": "Python",
"bytes": "20337"
},
{
"name": "Shell",
"bytes": "209"
}
],
"symlink_target": ""
}
|
from base64 import b64encode
from datetime import (
datetime,
timedelta,
)
from hashlib import md5
import re
import struct
import zlib
try:
import simplejson as json
except ImportError:
import json
from webob.byterange import ContentRange
from webob.cachecontrol import (
CacheControl,
serialize_cache_control,
)
from webob.compat import (
PY3,
bytes_,
native_,
text_type,
url_quote,
urlparse,
)
from webob.cookies import (
Cookie,
make_cookie,
)
from webob.datetime_utils import (
parse_date_delta,
serialize_date_delta,
timedelta_to_seconds,
)
from webob.descriptors import (
CHARSET_RE,
SCHEME_RE,
converter,
date_header,
header_getter,
list_header,
parse_auth,
parse_content_range,
parse_etag_response,
parse_int,
parse_int_safe,
serialize_auth,
serialize_content_range,
serialize_etag_response,
serialize_int,
)
from webob.headers import ResponseHeaders
from webob.request import BaseRequest
from webob.util import status_reasons, status_generic_reasons, warn_deprecation
__all__ = ['Response']
_PARAM_RE = re.compile(r'([a-z0-9]+)=(?:"([^"]*)"|([a-z0-9_.-]*))', re.I)
_OK_PARAM_RE = re.compile(r'^[a-z0-9_.-]+$', re.I)
_gzip_header = b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x02\xff'
_marker = object()
class Response(object):
"""
Represents a WSGI response.
If no arguments are passed, creates a :class:`~Response` that uses a
variety of defaults. The defaults may be changed by sub-classing the
:class:`~Response`. See the :ref:`sub-classing notes
<response_subclassing_notes>`.
:cvar ~Response.body: If ``body`` is a ``text_type``, then it will be
encoded using either ``charset`` when provided or ``default_encoding``
when ``charset`` is not provided if the ``content_type`` allows for a
``charset``. This argument is mutually exclusive with ``app_iter``.
:vartype ~Response.body: bytes or text_type
:cvar ~Response.status: Either an :class:`int` or a string that is
an integer followed by the status text. If it is an integer, it will be
converted to a proper status that also includes the status text. Any
existing status text will be kept. Non-standard values are allowed.
:vartype ~Response.status: int or str
:cvar ~Response.headerlist: A list of HTTP headers for the response.
:vartype ~Response.headerlist: list
:cvar ~Response.app_iter: An iterator that is used as the body of the
response. Should conform to the WSGI requirements and should provide
bytes. This argument is mutually exclusive with ``body``.
:vartype ~Response.app_iter: iterable
:cvar ~Response.content_type: Sets the ``Content-Type`` header. If no
``content_type`` is provided, and there is no ``headerlist``, the
``default_content_type`` will be automatically set. If ``headerlist``
is provided then this value is ignored.
:vartype ~Response.content_type: str or None
:cvar conditional_response: Used to change the behavior of the
:class:`~Response` to check the original request for conditional
response headers. See :meth:`~Response.conditional_response_app` for
more information.
:vartype conditional_response: bool
:cvar ~Response.charset: Adds a ``charset`` ``Content-Type`` parameter. If
no ``charset`` is provided and the ``Content-Type`` is text, then the
``default_charset`` will automatically be added. Currently the only
``Content-Type``'s that allow for a ``charset`` are defined to be
``text/*``, ``application/xml``, and ``*/*+xml``. Any other
``Content-Type``'s will not have a ``charset`` added. If a
``headerlist`` is provided this value is ignored.
:vartype ~Response.charset: str or None
All other response attributes may be set on the response by providing them
as keyword arguments. A :exc:`TypeError` will be raised for any unexpected
keywords.
.. _response_subclassing_notes:
**Sub-classing notes:**
* The ``default_content_type`` is used as the default for the
``Content-Type`` header that is returned on the response. It is
``text/html``.
* The ``default_charset`` is used as the default character set to return on
the ``Content-Type`` header, if the ``Content-Type`` allows for a
``charset`` parameter. Currently the only ``Content-Type``'s that allow
for a ``charset`` are defined to be: ``text/*``, ``application/xml``, and
``*/*+xml``. Any other ``Content-Type``'s will not have a ``charset``
added.
* The ``unicode_errors`` is set to ``strict``, and access on a
:attr:`~Response.text` will raise an error if it fails to decode the
:attr:`~Response.body`.
* ``default_conditional_response`` is set to False. This flag may be set to
True so that all ``Response`` objects will attempt to check the original
request for conditional response headers. See
:meth:`~Response.conditional_response_app` for more information.
* ``default_body_encoding`` is set to 'UTF-8' by default, it exists to
allow users to get/set the Response object using .text, even if no
charset has been set for the Content-Type.
"""
default_content_type = 'text/html'
default_charset = 'UTF-8'
unicode_errors = 'strict'
default_conditional_response = False
default_body_encoding = 'UTF-8'
# These two are only around so that when people pass them into the
# constructor they correctly get saved and set, however they are not used
# by any part of the Response. See commit
# 627593bbcd4ab52adc7ee569001cdda91c670d5d for rationale.
request = None
environ = None
#
# __init__, from_file, copy
#
def __init__(self, body=None, status=None, headerlist=None, app_iter=None,
content_type=None, conditional_response=None, charset=_marker,
**kw):
# Do some sanity checking, and turn json_body into an actual body
if app_iter is None and body is None and ('json_body' in kw or 'json' in kw):
if 'json_body' in kw:
json_body = kw.pop('json_body')
else:
json_body = kw.pop('json')
body = json.dumps(json_body, separators=(',', ':')).encode('UTF-8')
if content_type is None:
content_type = 'application/json'
if app_iter is None:
if body is None:
body = b''
elif body is not None:
raise TypeError(
"You may only give one of the body and app_iter arguments")
# Set up Response.status
if status is None:
self._status = '200 OK'
else:
self.status = status
# Initialize headers
self._headers = None
if headerlist is None:
self._headerlist = []
else:
self._headerlist = headerlist
# Set the encoding for the Response to charset, so if a charset is
# passed but the Content-Type does not allow for a charset, we can
# still encode text_type body's.
# r = Response(
# content_type='application/foo',
# charset='UTF-8',
# body=u'somebody')
# Should work without issues, and the header will be correctly set to
# Content-Type: application/foo with no charset on it.
encoding = None
if charset is not _marker:
encoding = charset
# Does the status code have a body or not?
code_has_body = (
self._status[0] != '1' and
self._status[:3] not in ('204', '205', '304')
)
# We only set the content_type to the one passed to the constructor or
# the default content type if there is none that exists AND there was
# no headerlist passed. If a headerlist was provided then most likely
# the ommission of the Content-Type is on purpose and we shouldn't try
# to be smart about it.
#
# Also allow creation of a empty Response with just the status set to a
# Response with empty body, such as Response(status='204 No Content')
# without the default content_type being set (since empty bodies have
# no Content-Type)
#
# Check if content_type is set because default_content_type could be
# None, in which case there is no content_type, and thus we don't need
# to anything
content_type = content_type or self.default_content_type
if headerlist is None and code_has_body and content_type:
# Set up the charset, if the content_type doesn't already have one
has_charset = 'charset=' in content_type
# If the Content-Type already has a charset, we don't set the user
# provided charset on the Content-Type, so we shouldn't use it as
# the encoding for text_type based body's.
if has_charset:
encoding = None
# Do not use the default_charset for the encoding because we
# want things like
# Response(content_type='image/jpeg',body=u'foo') to raise when
# trying to encode the body.
new_charset = encoding
if (
not has_charset and
charset is _marker and
self.default_charset
):
new_charset = self.default_charset
# Optimize for the default_content_type as shipped by
# WebOb, becuase we know that 'text/html' has a charset,
# otherwise add a charset if the content_type has a charset.
#
# Even if the user supplied charset explicitly, we do not add
# it to the Content-Type unless it has has a charset, instead
# the user supplied charset is solely used for encoding the
# body if it is a text_type
if (
new_charset and
(
content_type == 'text/html' or
_content_type_has_charset(content_type)
)
):
content_type += '; charset=' + new_charset
self._headerlist.append(('Content-Type', content_type))
# Set up conditional response
if conditional_response is None:
self.conditional_response = self.default_conditional_response
else:
self.conditional_response = bool(conditional_response)
# Set up app_iter if the HTTP Status code has a body
if app_iter is None and code_has_body:
if isinstance(body, text_type):
# Fall back to trying self.charset if encoding is not set. In
# most cases encoding will be set to the default value.
encoding = encoding or self.charset
if encoding is None:
raise TypeError(
"You cannot set the body to a text value without a "
"charset")
body = body.encode(encoding)
app_iter = [body]
if headerlist is not None:
self._headerlist[:] = [
(k, v)
for (k, v)
in self._headerlist
if k.lower() != 'content-length'
]
self._headerlist.append(('Content-Length', str(len(body))))
elif app_iter is None and not code_has_body:
app_iter = [b'']
self._app_iter = app_iter
# Loop through all the remaining keyword arguments
for name, value in kw.items():
if not hasattr(self.__class__, name):
# Not a basic attribute
raise TypeError(
"Unexpected keyword: %s=%r" % (name, value))
setattr(self, name, value)
@classmethod
def from_file(cls, fp):
"""Reads a response from a file-like object (it must implement
``.read(size)`` and ``.readline()``).
It will read up to the end of the response, not the end of the
file.
This reads the response as represented by ``str(resp)``; it
may not read every valid HTTP response properly. Responses
must have a ``Content-Length``"""
headerlist = []
status = fp.readline().strip()
is_text = isinstance(status, text_type)
if is_text:
_colon = ':'
_http = 'HTTP/'
else:
_colon = b':'
_http = b'HTTP/'
if status.startswith(_http):
(http_ver, status_num, status_text) = status.split(None, 2)
status = '%s %s' % (native_(status_num), native_(status_text))
while 1:
line = fp.readline().strip()
if not line:
# end of headers
break
try:
header_name, value = line.split(_colon, 1)
except ValueError:
raise ValueError('Bad header line: %r' % line)
value = value.strip()
headerlist.append((
native_(header_name, 'latin-1'),
native_(value, 'latin-1')
))
r = cls(
status=status,
headerlist=headerlist,
app_iter=(),
)
body = fp.read(r.content_length or 0)
if is_text:
r.text = body
else:
r.body = body
return r
def copy(self):
"""Makes a copy of the response"""
# we need to do this for app_iter to be reusable
app_iter = list(self._app_iter)
iter_close(self._app_iter)
# and this to make sure app_iter instances are different
self._app_iter = list(app_iter)
return self.__class__(
status=self._status,
headerlist=self._headerlist[:],
app_iter=app_iter,
conditional_response=self.conditional_response)
#
# __repr__, __str__
#
def __repr__(self):
return '<%s at 0x%x %s>' % (self.__class__.__name__, abs(id(self)),
self.status)
def __str__(self, skip_body=False):
parts = [self.status]
if not skip_body:
# Force enumeration of the body (to set content-length)
self.body
parts += map('%s: %s'.__mod__, self.headerlist)
if not skip_body and self.body:
parts += ['', self.text if PY3 else self.body]
return '\r\n'.join(parts)
#
# status, status_code/status_int
#
def _status__get(self):
"""
The status string
"""
return self._status
def _status__set(self, value):
try:
code = int(value)
except (ValueError, TypeError):
pass
else:
self.status_code = code
return
if PY3:
if isinstance(value, bytes):
value = value.decode('ascii')
elif isinstance(value, text_type):
value = value.encode('ascii')
if not isinstance(value, str):
raise TypeError(
"You must set status to a string or integer (not %s)"
% type(value))
# Attempt to get the status code itself, if this fails we should fail
try:
# We don't need this value anywhere, we just want to validate it's
# an integer. So we are using the side-effect of int() raises a
# ValueError as a test
int(value.split()[0])
except ValueError:
raise ValueError('Invalid status code, integer required.')
self._status = value
status = property(_status__get, _status__set, doc=_status__get.__doc__)
def _status_code__get(self):
"""
The status as an integer
"""
return int(self._status.split()[0])
def _status_code__set(self, code):
try:
self._status = '%d %s' % (code, status_reasons[code])
except KeyError:
self._status = '%d %s' % (code, status_generic_reasons[code // 100])
status_code = status_int = property(_status_code__get, _status_code__set,
doc=_status_code__get.__doc__)
#
# headerslist, headers
#
def _headerlist__get(self):
"""
The list of response headers
"""
return self._headerlist
def _headerlist__set(self, value):
self._headers = None
if not isinstance(value, list):
if hasattr(value, 'items'):
value = value.items()
value = list(value)
self._headerlist = value
def _headerlist__del(self):
self.headerlist = []
headerlist = property(_headerlist__get, _headerlist__set,
_headerlist__del, doc=_headerlist__get.__doc__)
def _headers__get(self):
"""
The headers in a dictionary-like object
"""
if self._headers is None:
self._headers = ResponseHeaders.view_list(self._headerlist)
return self._headers
def _headers__set(self, value):
if hasattr(value, 'items'):
value = value.items()
self.headerlist = value
self._headers = None
headers = property(_headers__get, _headers__set, doc=_headers__get.__doc__)
#
# body
#
def _body__get(self):
"""
The body of the response, as a :class:`bytes`. This will read in
the entire app_iter if necessary.
"""
app_iter = self._app_iter
# try:
# if len(app_iter) == 1:
# return app_iter[0]
# except:
# pass
if isinstance(app_iter, list) and len(app_iter) == 1:
return app_iter[0]
if app_iter is None:
raise AttributeError("No body has been set")
try:
body = b''.join(app_iter)
finally:
iter_close(app_iter)
if isinstance(body, text_type):
raise _error_unicode_in_app_iter(app_iter, body)
self._app_iter = [body]
if len(body) == 0:
# if body-length is zero, we assume it's a HEAD response and
# leave content_length alone
pass
elif self.content_length is None:
self.content_length = len(body)
elif self.content_length != len(body):
raise AssertionError(
"Content-Length is different from actual app_iter length "
"(%r!=%r)"
% (self.content_length, len(body))
)
return body
def _body__set(self, value=b''):
if not isinstance(value, bytes):
if isinstance(value, text_type):
msg = ("You cannot set Response.body to a text object "
"(use Response.text)")
else:
msg = ("You can only set the body to a binary type (not %s)" %
type(value))
raise TypeError(msg)
if self._app_iter is not None:
self.content_md5 = None
self._app_iter = [value]
self.content_length = len(value)
# def _body__del(self):
# self.body = ''
# #self.content_length = None
body = property(_body__get, _body__set, _body__set)
def _json_body__get(self):
"""
Set/get the body of the response as JSON
.. note::
This will automatically :meth:`~bytes.decode` the
:attr:`~Response.body` as ``UTF-8`` on get, and
:meth:`~str.encode` the :meth:`json.dumps` as ``UTF-8``
before assigning to :attr:`~Response.body`.
"""
# Note: UTF-8 is a content-type specific default for JSON
return json.loads(self.body.decode('UTF-8'))
def _json_body__set(self, value):
self.body = json.dumps(value, separators=(',', ':')).encode('UTF-8')
def _json_body__del(self):
del self.body
json = json_body = property(_json_body__get, _json_body__set, _json_body__del)
def _has_body__get(self):
"""
Determine if the the response has a :attr:`~Response.body`. In
contrast to simply accessing :attr:`~Response.body` this method
will **not** read the underlying :attr:`~Response.app_iter`.
"""
app_iter = self._app_iter
if isinstance(app_iter, list) and len(app_iter) == 1:
if app_iter[0] != b'':
return True
else:
return False
if app_iter is None: # pragma: no cover
return False
return True
has_body = property(_has_body__get)
#
# text, unicode_body, ubody
#
def _text__get(self):
"""
Get/set the text value of the body using the charset of the
Content-Type or the default_body_encoding.
"""
if not self.charset and not self.default_body_encoding:
raise AttributeError(
"You cannot access Response.text unless charset or default_body_encoding"
" is set"
)
decoding = self.charset or self.default_body_encoding
body = self.body
return body.decode(decoding, self.unicode_errors)
def _text__set(self, value):
if not self.charset and not self.default_body_encoding:
raise AttributeError(
"You cannot access Response.text unless charset or default_body_encoding"
" is set"
)
if not isinstance(value, text_type):
raise TypeError(
"You can only set Response.text to a unicode string "
"(not %s)" % type(value))
encoding = self.charset or self.default_body_encoding
self.body = value.encode(encoding)
def _text__del(self):
del self.body
text = property(_text__get, _text__set, _text__del, doc=_text__get.__doc__)
unicode_body = ubody = property(_text__get, _text__set, _text__del,
"Deprecated alias for .text")
#
# body_file, write(text)
#
def _body_file__get(self):
"""
A file-like object that can be used to write to the
body. If you passed in a list app_iter, that app_iter will be
modified by writes.
"""
return ResponseBodyFile(self)
def _body_file__set(self, file):
self.app_iter = iter_file(file)
def _body_file__del(self):
del self.body
body_file = property(_body_file__get, _body_file__set, _body_file__del,
doc=_body_file__get.__doc__)
def write(self, text):
if not isinstance(text, bytes):
if not isinstance(text, text_type):
msg = "You can only write str to a Response.body_file, not %s"
raise TypeError(msg % type(text))
if not self.charset:
msg = ("You can only write text to Response if charset has "
"been set")
raise TypeError(msg)
text = text.encode(self.charset)
app_iter = self._app_iter
if not isinstance(app_iter, list):
try:
new_app_iter = self._app_iter = list(app_iter)
finally:
iter_close(app_iter)
app_iter = new_app_iter
self.content_length = sum(len(chunk) for chunk in app_iter)
app_iter.append(text)
if self.content_length is not None:
self.content_length += len(text)
#
# app_iter
#
def _app_iter__get(self):
"""
Returns the app_iter of the response.
If body was set, this will create an app_iter from that body
(a single-item list)
"""
return self._app_iter
def _app_iter__set(self, value):
if self._app_iter is not None:
# Undo the automatically-set content-length
self.content_length = None
self.content_md5 = None
self._app_iter = value
def _app_iter__del(self):
self._app_iter = []
self.content_length = None
app_iter = property(_app_iter__get, _app_iter__set, _app_iter__del,
doc=_app_iter__get.__doc__)
#
# headers attrs
#
allow = list_header('Allow', '14.7')
# TODO: (maybe) support response.vary += 'something'
# TODO: same thing for all listy headers
vary = list_header('Vary', '14.44')
content_length = converter(
header_getter('Content-Length', '14.17'),
parse_int, serialize_int, 'int')
content_encoding = header_getter('Content-Encoding', '14.11')
content_language = list_header('Content-Language', '14.12')
content_location = header_getter('Content-Location', '14.14')
content_md5 = header_getter('Content-MD5', '14.14')
content_disposition = header_getter('Content-Disposition', '19.5.1')
accept_ranges = header_getter('Accept-Ranges', '14.5')
content_range = converter(
header_getter('Content-Range', '14.16'),
parse_content_range, serialize_content_range, 'ContentRange object')
date = date_header('Date', '14.18')
expires = date_header('Expires', '14.21')
last_modified = date_header('Last-Modified', '14.29')
_etag_raw = header_getter('ETag', '14.19')
etag = converter(
_etag_raw,
parse_etag_response, serialize_etag_response,
'Entity tag'
)
@property
def etag_strong(self):
return parse_etag_response(self._etag_raw, strong=True)
location = header_getter('Location', '14.30')
pragma = header_getter('Pragma', '14.32')
age = converter(
header_getter('Age', '14.6'),
parse_int_safe, serialize_int, 'int')
retry_after = converter(
header_getter('Retry-After', '14.37'),
parse_date_delta, serialize_date_delta, 'HTTP date or delta seconds')
server = header_getter('Server', '14.38')
# TODO: the standard allows this to be a list of challenges
www_authenticate = converter(
header_getter('WWW-Authenticate', '14.47'),
parse_auth, serialize_auth,
)
#
# charset
#
def _charset__get(self):
"""
Get/set the charset specified in Content-Type.
There is no checking to validate that a ``content_type`` actually allows
for a charset parameter.
"""
header = self.headers.get('Content-Type')
if not header:
return None
match = CHARSET_RE.search(header)
if match:
return match.group(1)
return None
def _charset__set(self, charset):
if charset is None:
self._charset__del()
return
header = self.headers.get('Content-Type', None)
if header is None:
raise AttributeError("You cannot set the charset when no "
"content-type is defined")
match = CHARSET_RE.search(header)
if match:
header = header[:match.start()] + header[match.end():]
header += '; charset=%s' % charset
self.headers['Content-Type'] = header
def _charset__del(self):
header = self.headers.pop('Content-Type', None)
if header is None:
# Don't need to remove anything
return
match = CHARSET_RE.search(header)
if match:
header = header[:match.start()] + header[match.end():]
self.headers['Content-Type'] = header
charset = property(_charset__get, _charset__set, _charset__del,
doc=_charset__get.__doc__)
#
# content_type
#
def _content_type__get(self):
"""
Get/set the Content-Type header. If no Content-Type header is set, this
will return None.
.. versionchanged:: 1.7
Setting a new Content-Type will remove all Content-Type parameters
and reset the charset to the default if the Content-Type is
``text/*`` or XML (``application/xml``, or ``*/*+xml``)
To preserve all Content-Type parameters you may use the following
code:
.. code::
resp = Response()
params = resp.content_type_params
resp.content_type = 'application/something'
resp.content_type_params = params
"""
header = self.headers.get('Content-Type')
if not header:
return None
return header.split(';', 1)[0]
def _content_type__set(self, value):
if not value:
self._content_type__del()
return
else:
content_type = value
# Set up the charset if the content-type doesn't have one
has_charset = 'charset=' in content_type
new_charset = None
if (
not has_charset and
self.default_charset
):
new_charset = self.default_charset
# Optimize for the default_content_type as shipped by
# WebOb, becuase we know that 'text/html' has a charset,
# otherwise add a charset if the content_type has a charset.
#
# We add the default charset if the content-type is "texty".
if (
new_charset and
(
content_type == 'text/html' or
_content_type_has_charset(content_type)
)
):
content_type += '; charset=' + new_charset
self.headers['Content-Type'] = content_type
def _content_type__del(self):
self.headers.pop('Content-Type', None)
content_type = property(_content_type__get, _content_type__set,
_content_type__del, doc=_content_type__get.__doc__)
#
# content_type_params
#
def _content_type_params__get(self):
"""
A dictionary of all the parameters in the content type.
(This is not a view, set to change, modifications of the dict will not
be applied otherwise)
"""
params = self.headers.get('Content-Type', '')
if ';' not in params:
return {}
params = params.split(';', 1)[1]
result = {}
for match in _PARAM_RE.finditer(params):
result[match.group(1)] = match.group(2) or match.group(3) or ''
return result
def _content_type_params__set(self, value_dict):
if not value_dict:
self._content_type_params__del()
return
params = []
for k, v in sorted(value_dict.items()):
if not _OK_PARAM_RE.search(v):
v = '"%s"' % v.replace('"', '\\"')
params.append('; %s=%s' % (k, v))
ct = self.headers.pop('Content-Type', '').split(';', 1)[0]
ct += ''.join(params)
self.headers['Content-Type'] = ct
def _content_type_params__del(self):
self.headers['Content-Type'] = self.headers.get(
'Content-Type', '').split(';', 1)[0]
content_type_params = property(
_content_type_params__get,
_content_type_params__set,
_content_type_params__del,
_content_type_params__get.__doc__
)
#
# set_cookie, unset_cookie, delete_cookie, merge_cookies
#
def set_cookie(self, name=None, value='', max_age=None,
path='/', domain=None, secure=False, httponly=False,
comment=None, expires=None, overwrite=False):
"""
Set (add) a cookie for the response.
Arguments are:
``name``
The cookie name.
``value``
The cookie value, which should be a string or ``None``. If
``value`` is ``None``, it's equivalent to calling the
:meth:`webob.response.Response.unset_cookie` method for this
cookie key (it effectively deletes the cookie on the client).
``max_age``
An integer representing a number of seconds, ``datetime.timedelta``,
or ``None``. This value is used as the ``Max-Age`` of the generated
cookie. If ``expires`` is not passed and this value is not
``None``, the ``max_age`` value will also influence the ``Expires``
value of the cookie (``Expires`` will be set to now + max_age). If
this value is ``None``, the cookie will not have a ``Max-Age`` value
(unless ``expires`` is set). If both ``max_age`` and ``expires`` are
set, this value takes precedence.
``path``
A string representing the cookie ``Path`` value. It defaults to
``/``.
``domain``
A string representing the cookie ``Domain``, or ``None``. If
domain is ``None``, no ``Domain`` value will be sent in the
cookie.
``secure``
A boolean. If it's ``True``, the ``secure`` flag will be sent in
the cookie, if it's ``False``, the ``secure`` flag will not be
sent in the cookie.
``httponly``
A boolean. If it's ``True``, the ``HttpOnly`` flag will be sent
in the cookie, if it's ``False``, the ``HttpOnly`` flag will not
be sent in the cookie.
``comment``
A string representing the cookie ``Comment`` value, or ``None``.
If ``comment`` is ``None``, no ``Comment`` value will be sent in
the cookie.
``expires``
A ``datetime.timedelta`` object representing an amount of time,
``datetime.datetime`` or ``None``. A non-``None`` value is used to
generate the ``Expires`` value of the generated cookie. If
``max_age`` is not passed, but this value is not ``None``, it will
influence the ``Max-Age`` header. If this value is ``None``, the
``Expires`` cookie value will be unset (unless ``max_age`` is set).
If ``max_age`` is set, it will be used to generate the ``expires``
and this value is ignored.
If a ``datetime.datetime`` is provided it has to either be timezone
aware or be based on UTC. ``datetime.datetime`` objects that are
local time are not supported. Timezone aware ``datetime.datetime``
objects are converted to UTC.
This argument will be removed in future
versions of WebOb (version 1.9).
``overwrite``
If this key is ``True``, before setting the cookie, unset any
existing cookie.
"""
# Remove in WebOb 1.10
if expires:
warn_deprecation('Argument "expires" will be removed in a future '
'version of WebOb, please use "max_age".', 1.10, 1)
if name is None:
raise TypeError('set_cookie() takes at least 1 argument')
if overwrite:
self.unset_cookie(name, strict=False)
# If expires is set, but not max_age we set max_age to expires
if not max_age and isinstance(expires, timedelta):
max_age = expires
# expires can also be a datetime
if not max_age and isinstance(expires, datetime):
# If expires has a timezone attached, convert it to UTC
if expires.tzinfo and expires.utcoffset():
expires = (expires - expires.utcoffset()).replace(tzinfo=None)
max_age = expires - datetime.utcnow()
value = bytes_(value, 'utf-8')
cookie = make_cookie(
name, value, max_age=max_age, path=path,
domain=domain, secure=secure, httponly=httponly,
comment=comment)
self.headerlist.append(('Set-Cookie', cookie))
def delete_cookie(self, name, path='/', domain=None):
"""
Delete a cookie from the client. Note that path and domain must match
how the cookie was originally set.
This sets the cookie to the empty string, and max_age=0 so
that it should expire immediately.
"""
self.set_cookie(name, None, path=path, domain=domain)
def unset_cookie(self, name, strict=True):
"""
Unset a cookie with the given name (remove it from the
response).
"""
existing = self.headers.getall('Set-Cookie')
if not existing and not strict:
return
cookies = Cookie()
for header in existing:
cookies.load(header)
if isinstance(name, text_type):
name = name.encode('utf8')
if name in cookies:
del cookies[name]
del self.headers['Set-Cookie']
for m in cookies.values():
self.headerlist.append(('Set-Cookie', m.serialize()))
elif strict:
raise KeyError("No cookie has been set with the name %r" % name)
def merge_cookies(self, resp):
"""Merge the cookies that were set on this response with the
given `resp` object (which can be any WSGI application).
If the `resp` is a :class:`webob.Response` object, then the
other object will be modified in-place.
"""
if not self.headers.get('Set-Cookie'):
return resp
if isinstance(resp, Response):
for header in self.headers.getall('Set-Cookie'):
resp.headers.add('Set-Cookie', header)
return resp
else:
c_headers = [h for h in self.headerlist if
h[0].lower() == 'set-cookie']
def repl_app(environ, start_response):
def repl_start_response(status, headers, exc_info=None):
return start_response(status, headers + c_headers,
exc_info=exc_info)
return resp(environ, repl_start_response)
return repl_app
#
# cache_control
#
_cache_control_obj = None
def _cache_control__get(self):
"""
Get/set/modify the Cache-Control header (`HTTP spec section 14.9
<http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9>`_)
"""
value = self.headers.get('cache-control', '')
if self._cache_control_obj is None:
self._cache_control_obj = CacheControl.parse(
value, updates_to=self._update_cache_control, type='response')
self._cache_control_obj.header_value = value
if self._cache_control_obj.header_value != value:
new_obj = CacheControl.parse(value, type='response')
self._cache_control_obj.properties.clear()
self._cache_control_obj.properties.update(new_obj.properties)
self._cache_control_obj.header_value = value
return self._cache_control_obj
def _cache_control__set(self, value):
# This actually becomes a copy
if not value:
value = ""
if isinstance(value, dict):
value = CacheControl(value, 'response')
if isinstance(value, text_type):
value = str(value)
if isinstance(value, str):
if self._cache_control_obj is None:
self.headers['Cache-Control'] = value
return
value = CacheControl.parse(value, 'response')
cache = self.cache_control
cache.properties.clear()
cache.properties.update(value.properties)
def _cache_control__del(self):
self.cache_control = {}
def _update_cache_control(self, prop_dict):
value = serialize_cache_control(prop_dict)
if not value:
if 'Cache-Control' in self.headers:
del self.headers['Cache-Control']
else:
self.headers['Cache-Control'] = value
cache_control = property(
_cache_control__get, _cache_control__set,
_cache_control__del, doc=_cache_control__get.__doc__)
#
# cache_expires
#
def _cache_expires(self, seconds=0, **kw):
"""
Set expiration on this request. This sets the response to
expire in the given seconds, and any other attributes are used
for cache_control (e.g., private=True, etc).
"""
if seconds is True:
seconds = 0
elif isinstance(seconds, timedelta):
seconds = timedelta_to_seconds(seconds)
cache_control = self.cache_control
if seconds is None:
pass
elif not seconds:
# To really expire something, you have to force a
# bunch of these cache control attributes, and IE may
# not pay attention to those still so we also set
# Expires.
cache_control.no_store = True
cache_control.no_cache = True
cache_control.must_revalidate = True
cache_control.max_age = 0
cache_control.post_check = 0
cache_control.pre_check = 0
self.expires = datetime.utcnow()
if 'last-modified' not in self.headers:
self.last_modified = datetime.utcnow()
self.pragma = 'no-cache'
else:
cache_control.properties.clear()
cache_control.max_age = seconds
self.expires = datetime.utcnow() + timedelta(seconds=seconds)
self.pragma = None
for name, value in kw.items():
setattr(cache_control, name, value)
cache_expires = property(lambda self: self._cache_expires, _cache_expires)
#
# encode_content, decode_content, md5_etag
#
def encode_content(self, encoding='gzip', lazy=False):
"""
Encode the content with the given encoding (only gzip and
identity are supported).
"""
assert encoding in ('identity', 'gzip'), \
"Unknown encoding: %r" % encoding
if encoding == 'identity':
self.decode_content()
return
if self.content_encoding == 'gzip':
return
if lazy:
self.app_iter = gzip_app_iter(self._app_iter)
self.content_length = None
else:
self.app_iter = list(gzip_app_iter(self._app_iter))
self.content_length = sum(map(len, self._app_iter))
self.content_encoding = 'gzip'
def decode_content(self):
content_encoding = self.content_encoding or 'identity'
if content_encoding == 'identity':
return
if content_encoding not in ('gzip', 'deflate'):
raise ValueError(
"I don't know how to decode the content %s" % content_encoding)
if content_encoding == 'gzip':
from gzip import GzipFile
from io import BytesIO
gzip_f = GzipFile(filename='', mode='r', fileobj=BytesIO(self.body))
self.body = gzip_f.read()
self.content_encoding = None
gzip_f.close()
else:
# Weird feature: http://bugs.python.org/issue5784
self.body = zlib.decompress(self.body, -15)
self.content_encoding = None
def md5_etag(self, body=None, set_content_md5=False):
"""
Generate an etag for the response object using an MD5 hash of
the body (the body parameter, or ``self.body`` if not given)
Sets ``self.etag``
If ``set_content_md5`` is True sets ``self.content_md5`` as well
"""
if body is None:
body = self.body
md5_digest = md5(body).digest()
md5_digest = b64encode(md5_digest)
md5_digest = md5_digest.replace(b'\n', b'')
md5_digest = native_(md5_digest)
self.etag = md5_digest.strip('=')
if set_content_md5:
self.content_md5 = md5_digest
@staticmethod
def _make_location_absolute(environ, value):
if SCHEME_RE.search(value):
return value
new_location = urlparse.urljoin(_request_uri(environ), value)
return new_location
def _abs_headerlist(self, environ):
# Build the headerlist, if we have a Location header, make it absolute
return [
(k, v) if k.lower() != 'location'
else (k, self._make_location_absolute(environ, v))
for (k, v)
in self._headerlist
]
#
# __call__, conditional_response_app
#
def __call__(self, environ, start_response):
"""
WSGI application interface
"""
if self.conditional_response:
return self.conditional_response_app(environ, start_response)
headerlist = self._abs_headerlist(environ)
start_response(self.status, headerlist)
if environ['REQUEST_METHOD'] == 'HEAD':
# Special case here...
return EmptyResponse(self._app_iter)
return self._app_iter
_safe_methods = ('GET', 'HEAD')
def conditional_response_app(self, environ, start_response):
"""
Like the normal __call__ interface, but checks conditional headers:
* If-Modified-Since (304 Not Modified; only on GET, HEAD)
* If-None-Match (304 Not Modified; only on GET, HEAD)
* Range (406 Partial Content; only on GET, HEAD)
"""
req = BaseRequest(environ)
headerlist = self._abs_headerlist(environ)
method = environ.get('REQUEST_METHOD', 'GET')
if method in self._safe_methods:
status304 = False
if req.if_none_match and self.etag:
status304 = self.etag in req.if_none_match
elif req.if_modified_since and self.last_modified:
status304 = self.last_modified <= req.if_modified_since
if status304:
start_response('304 Not Modified', filter_headers(headerlist))
return EmptyResponse(self._app_iter)
if (
req.range and self in req.if_range and
self.content_range is None and
method in ('HEAD', 'GET') and
self.status_code == 200 and
self.content_length is not None
):
content_range = req.range.content_range(self.content_length)
if content_range is None:
iter_close(self._app_iter)
body = bytes_("Requested range not satisfiable: %s" % req.range)
headerlist = [
('Content-Length', str(len(body))),
('Content-Range', str(ContentRange(None, None,
self.content_length))),
('Content-Type', 'text/plain'),
] + filter_headers(headerlist)
start_response('416 Requested Range Not Satisfiable',
headerlist)
if method == 'HEAD':
return ()
return [body]
else:
app_iter = self.app_iter_range(content_range.start,
content_range.stop)
if app_iter is not None:
# the following should be guaranteed by
# Range.range_for_length(length)
assert content_range.start is not None
headerlist = [
('Content-Length',
str(content_range.stop - content_range.start)),
('Content-Range', str(content_range)),
] + filter_headers(headerlist, ('content-length',))
start_response('206 Partial Content', headerlist)
if method == 'HEAD':
return EmptyResponse(app_iter)
return app_iter
start_response(self.status, headerlist)
if method == 'HEAD':
return EmptyResponse(self._app_iter)
return self._app_iter
def app_iter_range(self, start, stop):
"""
Return a new app_iter built from the response app_iter, that
serves up only the given ``start:stop`` range.
"""
app_iter = self._app_iter
if hasattr(app_iter, 'app_iter_range'):
return app_iter.app_iter_range(start, stop)
return AppIterRange(app_iter, start, stop)
def filter_headers(hlist, remove_headers=('content-length', 'content-type')):
return [h for h in hlist if (h[0].lower() not in remove_headers)]
def iter_file(file, block_size=1 << 18): # 256Kb
while True:
data = file.read(block_size)
if not data:
break
yield data
class ResponseBodyFile(object):
mode = 'wb'
closed = False
def __init__(self, response):
"""
Represents a :class:`~Response` as a file like object.
"""
self.response = response
self.write = response.write
def __repr__(self):
return '<body_file for %r>' % self.response
encoding = property(
lambda self: self.response.charset,
doc="The encoding of the file (inherited from response.charset)"
)
def writelines(self, seq):
"""
Write a sequence of lines to the response
"""
for item in seq:
self.write(item)
def close(self):
raise NotImplementedError("Response bodies cannot be closed")
def flush(self):
pass
def tell(self):
"""
Provide the current location where we are going to start writing
"""
if self.response.app_iter is None: # pragma: no cover
return 0
return sum([len(chunk) for chunk in self.response.app_iter])
class AppIterRange(object):
"""
Wraps an app_iter, returning just a range of bytes
"""
def __init__(self, app_iter, start, stop):
assert start >= 0, "Bad start: %r" % start
assert stop is None or (stop >= 0 and stop >= start), (
"Bad stop: %r" % stop)
self.app_iter = iter(app_iter)
self._pos = 0 # position in app_iter
self.start = start
self.stop = stop
def __iter__(self):
return self
def _skip_start(self):
start, stop = self.start, self.stop
for chunk in self.app_iter:
self._pos += len(chunk)
if self._pos < start:
continue
elif self._pos == start:
return b''
else:
chunk = chunk[start - self._pos:]
if stop is not None and self._pos > stop:
chunk = chunk[:stop - self._pos]
assert len(chunk) == stop - start
return chunk
else:
raise StopIteration()
def next(self):
if self._pos < self.start:
# need to skip some leading bytes
return self._skip_start()
stop = self.stop
if stop is not None and self._pos >= stop:
raise StopIteration
chunk = next(self.app_iter)
self._pos += len(chunk)
if stop is None or self._pos <= stop:
return chunk
else:
return chunk[:stop - self._pos]
__next__ = next # py3
def close(self):
iter_close(self.app_iter)
class EmptyResponse(object):
"""An empty WSGI response.
An iterator that immediately stops. Optionally provides a close
method to close an underlying app_iter it replaces.
"""
def __init__(self, app_iter=None):
if app_iter is not None and hasattr(app_iter, 'close'):
self.close = app_iter.close
def __iter__(self):
return self
def __len__(self):
return 0
def next(self):
raise StopIteration()
__next__ = next # py3
def _is_xml(content_type):
return (
content_type.startswith('application/xml') or
(
content_type.startswith('application/') and
content_type.endswith('+xml')
) or
(
content_type.startswith('image/') and
content_type.endswith('+xml')
)
)
def _content_type_has_charset(content_type):
return (
content_type.startswith('text/') or
_is_xml(content_type)
)
def _request_uri(environ):
"""Like wsgiref.url.request_uri, except eliminates :80 ports
Return the full request URI"""
url = environ['wsgi.url_scheme'] + '://'
if environ.get('HTTP_HOST'):
url += environ['HTTP_HOST']
else:
url += environ['SERVER_NAME'] + ':' + environ['SERVER_PORT']
if url.endswith(':80') and environ['wsgi.url_scheme'] == 'http':
url = url[:-3]
elif url.endswith(':443') and environ['wsgi.url_scheme'] == 'https':
url = url[:-4]
if PY3: # pragma: no cover
script_name = bytes_(environ.get('SCRIPT_NAME', '/'), 'latin-1')
path_info = bytes_(environ.get('PATH_INFO', ''), 'latin-1')
else:
script_name = environ.get('SCRIPT_NAME', '/')
path_info = environ.get('PATH_INFO', '')
url += url_quote(script_name)
qpath_info = url_quote(path_info)
if 'SCRIPT_NAME' not in environ:
url += qpath_info[1:]
else:
url += qpath_info
return url
def iter_close(iter):
if hasattr(iter, 'close'):
iter.close()
def gzip_app_iter(app_iter):
size = 0
crc = zlib.crc32(b"") & 0xffffffff
compress = zlib.compressobj(9, zlib.DEFLATED, -zlib.MAX_WBITS,
zlib.DEF_MEM_LEVEL, 0)
yield _gzip_header
for item in app_iter:
size += len(item)
crc = zlib.crc32(item, crc) & 0xffffffff
# The compress function may return zero length bytes if the input is
# small enough; it buffers the input for the next iteration or for a
# flush.
result = compress.compress(item)
if result:
yield result
# Similarly, flush may also not yield a value.
result = compress.flush()
if result:
yield result
yield struct.pack("<2L", crc, size & 0xffffffff)
def _error_unicode_in_app_iter(app_iter, body):
app_iter_repr = repr(app_iter)
if len(app_iter_repr) > 50:
app_iter_repr = (
app_iter_repr[:30] + '...' + app_iter_repr[-10:])
raise TypeError(
'An item of the app_iter (%s) was text, causing a '
'text body: %r' % (app_iter_repr, body))
|
{
"content_hash": "31a1e85d9b27bf5ab904dde9ee138247",
"timestamp": "",
"source": "github",
"line_count": 1611,
"max_line_length": 89,
"avg_line_length": 33.92551210428305,
"alnum_prop": 0.5582756980275918,
"repo_name": "stefanv/aandete",
"id": "1f369e1df928c5de1555cd31e156a116ef33892c",
"size": "54654",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/lib/webob/response.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "248684"
},
{
"name": "Python",
"bytes": "6478502"
}
],
"symlink_target": ""
}
|
from dj_diabetes.models.foods import Foods
from dj_diabetes.tests import MainTest
class FoodsTest(MainTest):
def setUp(self):
super(FoodsTest, self).setUp()
title = 'Chocolate'
self.food = Foods.objects.create(title=title)
def test_foods(self):
self.assertTrue(isinstance(self.food, Foods))
self.assertEqual(self.food.__str__(), "%s" % self.food.title)
|
{
"content_hash": "ac1157445d0af5a61d3edcd322782622",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 69,
"avg_line_length": 27.066666666666666,
"alnum_prop": 0.6625615763546798,
"repo_name": "foxmask/dj-diabetes",
"id": "2317864f37e554715ca572d2e9d06523d0dbede0",
"size": "423",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dj_diabetes/tests/test_foods.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "5215"
},
{
"name": "HTML",
"bytes": "61293"
},
{
"name": "Python",
"bytes": "85029"
}
],
"symlink_target": ""
}
|
__version__ = '0.0.8'
__all__ = ('Flacro', 'FlacroFor', 'AccordionItem', 'AccordionGroupMacro',
'TabItem', 'TabGroupMacro', 'ListMacro', 'LiItem')
from .flacro import (Flacro, FlacroFor)
from .packaged_macros import (AccordionItem, AccordionGroupMacro,
TabItem, TabGroupMacro, ListMacro, LiItem)
|
{
"content_hash": "88ffe680c9e4741f2505c93cfacd80a9",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 73,
"avg_line_length": 38.25,
"alnum_prop": 0.7091503267973857,
"repo_name": "thrisp/flacro",
"id": "c11c051d84ad01e9fc6e43a5df85f26e9a58056f",
"size": "306",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "flask_flacro/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27513"
}
],
"symlink_target": ""
}
|
from atom.api import Unicode
from enaml.core.declarative import d_
from html_object import HTMLObject
from lxml.html import builder as E
class Item(HTMLObject):
tag = E.LI
text = d_(Unicode())
def initialize(self):
super(Item, self).initialize()
def buildHTML(self, *args):
self.addTags()
self.addText(self.text)
self.addAttributes()
return super(Item, self).buildHTML(*args)
|
{
"content_hash": "67c2f606d3577b217585eaf1658fde7d",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 49,
"avg_line_length": 20.857142857142858,
"alnum_prop": 0.6598173515981736,
"repo_name": "ContinuumIO/ashiba",
"id": "c2972cbee4ec49496a22c9f97ac8883d9d7c9989",
"size": "439",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "enaml/enaml/web/item.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4560"
},
{
"name": "C",
"bytes": "738"
},
{
"name": "C++",
"bytes": "77464"
},
{
"name": "CSS",
"bytes": "2286"
},
{
"name": "Emacs Lisp",
"bytes": "1210"
},
{
"name": "HTML",
"bytes": "4891"
},
{
"name": "JavaScript",
"bytes": "17243"
},
{
"name": "Makefile",
"bytes": "4590"
},
{
"name": "Python",
"bytes": "3241535"
},
{
"name": "Shell",
"bytes": "119"
},
{
"name": "VimL",
"bytes": "1821"
}
],
"symlink_target": ""
}
|
import pytest
from testtools import ExpectedException
from testtools.assertions import assert_that
from testtools.matchers import (
Equals, Is, IsInstance, MatchesAll, MatchesDict, MatchesListwise,
MatchesPredicate, MatchesStructure)
from testtools.twistedsupport import failed, succeeded
from twisted.internet.defer import succeed
from marathon_acme.clients.tests.helpers import PerLocationAgent
from marathon_acme.tests.helpers import FailingAgent
class DummyAgent(object):
def request(self, *args, **kwargs):
return succeed((args, kwargs))
class TestPerLocationAgent(object):
@pytest.fixture
def agent(self):
return PerLocationAgent()
def test_keyerror_if_location_unset(self, agent):
"""
When a request is made using the agent and no delegate agent has been
added for the URI location/authority, a KeyError is expected.
"""
with ExpectedException(KeyError, r"b?'foo:8080'"):
agent.request(b'GET', b'http://foo:8080')
def test_delegates_to_agent_for_location(self, agent):
"""
When a request is made using the agent, the added agents are delegated
to based on the URI location/authority.
"""
agent.add_agent(b'foo:8080', DummyAgent())
agent.add_agent(b'bar:8080', FailingAgent(RuntimeError('bar')))
agent.add_agent(b'foo:9090', FailingAgent(RuntimeError('9090')))
d = agent.request(b'GET', b'http://foo:8080')
assert_that(d, succeeded(MatchesListwise([
MatchesListwise([Equals(b'GET'), Equals(b'http://foo:8080')]),
MatchesDict({'headers': Is(None), 'bodyProducer': Is(None)})
])))
# Scheme doesn't matter
d = agent.request(b'GET', b'https://foo:8080')
assert_that(d, succeeded(MatchesListwise([
MatchesListwise([Equals(b'GET'), Equals(b'https://foo:8080')]),
MatchesDict({'headers': Is(None), 'bodyProducer': Is(None)})
])))
# Path doesn't matter
d = agent.request(b'GET', b'http://foo:8080/bar/baz')
assert_that(d, succeeded(MatchesListwise([
MatchesListwise([
Equals(b'GET'), Equals(b'http://foo:8080/bar/baz')]),
MatchesDict({'headers': Is(None), 'bodyProducer': Is(None)})
])))
# Hostname *does* matter
d = agent.request(b'GET', b'http://bar:8080')
assert_that(d, failed(MatchesStructure(value=MatchesAll(
IsInstance(RuntimeError),
MatchesPredicate(str, Equals('bar'))
))))
# Port *does* matter
d = agent.request(b'GET', b'http://foo:9090')
assert_that(d, failed(MatchesStructure(value=MatchesAll(
IsInstance(RuntimeError),
MatchesPredicate(str, Equals('9090'))
))))
# Other args passed through
d = agent.request(b'GET', b'http://foo:8080', 'bar', 'baz')
assert_that(d, succeeded(MatchesListwise([
MatchesListwise([Equals(b'GET'), Equals(b'http://foo:8080')]),
MatchesDict(
{'headers': Equals('bar'), 'bodyProducer': Equals('baz')})
])))
|
{
"content_hash": "048c211b5371dce548d007c75e288415",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 78,
"avg_line_length": 37.773809523809526,
"alnum_prop": 0.6208635360857233,
"repo_name": "praekeltfoundation/certbot",
"id": "4fb53b0fabfcb79590fd8c58f86eff220b763186",
"size": "3173",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "marathon_acme/clients/tests/test_test_helpers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "598"
},
{
"name": "Python",
"bytes": "271690"
},
{
"name": "Shell",
"bytes": "561"
}
],
"symlink_target": ""
}
|
import json
import logging
import os
import threading
from datetime import datetime, timedelta
from typing import NamedTuple, Optional
import dateutil.parser
from dateutil.tz import tzutc
from botocore import UNSIGNED
from botocore.compat import total_seconds
from botocore.config import Config
from botocore.exceptions import (
ClientError,
InvalidConfigError,
TokenRetrievalError,
)
from botocore.utils import CachedProperty, JSONFileCache, SSOTokenLoader
logger = logging.getLogger(__name__)
def _utc_now():
return datetime.now(tzutc())
def create_token_resolver(session):
providers = [
SSOTokenProvider(session),
]
return TokenProviderChain(providers=providers)
def _serialize_utc_timestamp(obj):
if isinstance(obj, datetime):
return obj.strftime("%Y-%m-%dT%H:%M:%SZ")
return obj
def _sso_json_dumps(obj):
return json.dumps(obj, default=_serialize_utc_timestamp)
class FrozenAuthToken(NamedTuple):
token: str
expiration: Optional[datetime] = None
class DeferredRefreshableToken:
# The time at which we'll attempt to refresh, but not block if someone else
# is refreshing.
_advisory_refresh_timeout = 15 * 60
# The time at which all threads will block waiting for a refreshed token
_mandatory_refresh_timeout = 10 * 60
# Refresh at most once every minute to avoid blocking every request
_attempt_timeout = 60
def __init__(self, method, refresh_using, time_fetcher=_utc_now):
self._time_fetcher = time_fetcher
self._refresh_using = refresh_using
self.method = method
# The frozen token is protected by this lock
self._refresh_lock = threading.Lock()
self._frozen_token = None
self._next_refresh = None
def get_frozen_token(self):
self._refresh()
return self._frozen_token
def _refresh(self):
# If we don't need to refresh just return
refresh_type = self._should_refresh()
if not refresh_type:
return None
# Block for refresh if we're in the mandatory refresh window
block_for_refresh = refresh_type == "mandatory"
if self._refresh_lock.acquire(block_for_refresh):
try:
self._protected_refresh()
finally:
self._refresh_lock.release()
def _protected_refresh(self):
# This should only be called after acquiring the refresh lock
# Another thread may have already refreshed, double check refresh
refresh_type = self._should_refresh()
if not refresh_type:
return None
try:
now = self._time_fetcher()
self._next_refresh = now + timedelta(seconds=self._attempt_timeout)
self._frozen_token = self._refresh_using()
except Exception:
logger.warning(
"Refreshing token failed during the %s refresh period.",
refresh_type,
exc_info=True,
)
if refresh_type == "mandatory":
# This refresh was mandatory, error must be propagated back
raise
if self._is_expired():
# Fresh credentials should never be expired
raise TokenRetrievalError(
provider=self.method,
error_msg="Token has expired and refresh failed",
)
def _is_expired(self):
if self._frozen_token is None:
return False
expiration = self._frozen_token.expiration
remaining = total_seconds(expiration - self._time_fetcher())
return remaining <= 0
def _should_refresh(self):
if self._frozen_token is None:
# We don't have a token yet, mandatory refresh
return "mandatory"
expiration = self._frozen_token.expiration
if expiration is None:
# No expiration, so assume we don't need to refresh.
return None
now = self._time_fetcher()
if now < self._next_refresh:
return None
remaining = total_seconds(expiration - now)
if remaining < self._mandatory_refresh_timeout:
return "mandatory"
elif remaining < self._advisory_refresh_timeout:
return "advisory"
return None
class TokenProviderChain:
def __init__(self, providers=None):
if providers is None:
providers = []
self._providers = providers
def load_token(self):
for provider in self._providers:
token = provider.load_token()
if token is not None:
return token
return None
class SSOTokenProvider:
METHOD = "sso"
_REFRESH_WINDOW = 15 * 60
_SSO_TOKEN_CACHE_DIR = os.path.expanduser(
os.path.join("~", ".aws", "sso", "cache")
)
_SSO_CONFIG_VARS = [
"sso_start_url",
"sso_region",
]
_GRANT_TYPE = "refresh_token"
DEFAULT_CACHE_CLS = JSONFileCache
def __init__(self, session, cache=None, time_fetcher=_utc_now):
self._session = session
if cache is None:
cache = self.DEFAULT_CACHE_CLS(
self._SSO_TOKEN_CACHE_DIR,
dumps_func=_sso_json_dumps,
)
self._now = time_fetcher
self._cache = cache
self._token_loader = SSOTokenLoader(cache=self._cache)
def _load_sso_config(self):
loaded_config = self._session.full_config
profiles = loaded_config.get("profiles", {})
sso_sessions = loaded_config.get("sso_sessions", {})
profile_name = self._session.get_config_variable("profile")
if not profile_name:
profile_name = "default"
profile_config = profiles.get(profile_name, {})
if "sso_session" not in profile_config:
return
sso_session_name = profile_config["sso_session"]
sso_config = sso_sessions.get(sso_session_name, None)
if not sso_config:
error_msg = (
f'The profile "{profile_name}" is configured to use the SSO '
f'token provider but the "{sso_session_name}" sso_session '
f"configuration does not exist."
)
raise InvalidConfigError(error_msg=error_msg)
missing_configs = []
for var in self._SSO_CONFIG_VARS:
if var not in sso_config:
missing_configs.append(var)
if missing_configs:
error_msg = (
f'The profile "{profile_name}" is configured to use the SSO '
f"token provider but is missing the following configuration: "
f"{missing_configs}."
)
raise InvalidConfigError(error_msg=error_msg)
return {
"session_name": sso_session_name,
"sso_region": sso_config["sso_region"],
"sso_start_url": sso_config["sso_start_url"],
}
@CachedProperty
def _sso_config(self):
return self._load_sso_config()
@CachedProperty
def _client(self):
config = Config(
region_name=self._sso_config["sso_region"],
signature_version=UNSIGNED,
)
return self._session.create_client("sso-oidc", config=config)
def _attempt_create_token(self, token):
response = self._client.create_token(
grantType=self._GRANT_TYPE,
clientId=token["clientId"],
clientSecret=token["clientSecret"],
refreshToken=token["refreshToken"],
)
expires_in = timedelta(seconds=response["expiresIn"])
new_token = {
"startUrl": self._sso_config["sso_start_url"],
"region": self._sso_config["sso_region"],
"accessToken": response["accessToken"],
"expiresAt": self._now() + expires_in,
# Cache the registration alongside the token
"clientId": token["clientId"],
"clientSecret": token["clientSecret"],
"registrationExpiresAt": token["registrationExpiresAt"],
}
if "refreshToken" in response:
new_token["refreshToken"] = response["refreshToken"]
logger.info("SSO Token refresh succeeded")
return new_token
def _refresh_access_token(self, token):
keys = (
"refreshToken",
"clientId",
"clientSecret",
"registrationExpiresAt",
)
missing_keys = [k for k in keys if k not in token]
if missing_keys:
msg = f"Unable to refresh SSO token: missing keys: {missing_keys}"
logger.info(msg)
return None
expiry = dateutil.parser.parse(token["registrationExpiresAt"])
if total_seconds(expiry - self._now()) <= 0:
logger.info(f"SSO token registration expired at {expiry}")
return None
try:
return self._attempt_create_token(token)
except ClientError:
logger.warning("SSO token refresh attempt failed", exc_info=True)
return None
def _refresher(self):
start_url = self._sso_config["sso_start_url"]
session_name = self._sso_config["session_name"]
logger.info(f"Loading cached SSO token for {session_name}")
token_dict = self._token_loader(start_url, session_name=session_name)
expiration = dateutil.parser.parse(token_dict["expiresAt"])
logger.debug(f"Cached SSO token expires at {expiration}")
remaining = total_seconds(expiration - self._now())
if remaining < self._REFRESH_WINDOW:
new_token_dict = self._refresh_access_token(token_dict)
if new_token_dict is not None:
token_dict = new_token_dict
expiration = token_dict["expiresAt"]
self._token_loader.save_token(
start_url, token_dict, session_name=session_name
)
return FrozenAuthToken(
token_dict["accessToken"], expiration=expiration
)
def load_token(self):
if self._sso_config is None:
return None
return DeferredRefreshableToken(
self.METHOD, self._refresher, time_fetcher=self._now
)
|
{
"content_hash": "8609afdce274cfa6d685cd5f7f7b3f6f",
"timestamp": "",
"source": "github",
"line_count": 314,
"max_line_length": 79,
"avg_line_length": 32.74203821656051,
"alnum_prop": 0.5936192977336835,
"repo_name": "boto/botocore",
"id": "12b38ba113502d980c98a0a986f07ec29636d9cf",
"size": "10842",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "botocore/tokens.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Gherkin",
"bytes": "23820"
},
{
"name": "Python",
"bytes": "3352371"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from __future__ import print_function
from collections import deque
from contextlib import contextmanager
import logging
import os
import re
import sys
from scss import config
from scss.calculator import Calculator
from scss.compiler import _prop_split_re
from scss.compiler import Compiler
from scss.errors import SassEvaluationError
from scss.legacy import Scss
from scss.legacy import _default_scss_vars
from scss.namespace import Namespace
from scss.rule import SassRule
from scss.rule import UnparsedBlock
from scss.scss_meta import BUILD_INFO
from scss.source import SourceFile
from scss.util import profiling
try:
raw_input
except NameError:
raw_input = input
log = logging.getLogger(__name__)
logging.getLogger('scss').setLevel(logging.INFO)
def main():
logging.basicConfig(format="%(levelname)s: %(message)s")
from optparse import OptionGroup, OptionParser, SUPPRESS_HELP
if hasattr(config.LOAD_PATHS, 'split'):
initial_load_paths = [p.strip() for p in config.LOAD_PATHS.split(',')]
else:
initial_load_paths = list(config.LOAD_PATHS)
def append_load_path(option, opt_str, value, parser):
dest = getattr(parser.values, option.dest)
paths = value.replace(os.pathsep, ',').replace(';', ',').split(',')
for path in paths:
path = path.strip()
if path and path not in dest:
dest.append(path)
parser = OptionParser(usage="Usage: %prog [options] [file]",
description="Converts Scss files to CSS.",
add_help_option=False)
parser.add_option("-i", "--interactive", action="store_true",
help="Run an interactive Scss shell")
parser.add_option("-w", "--watch", metavar="DIR",
help="Watch the files in DIR, and recompile when they change")
parser.add_option("-r", "--recursive", action="store_true", default=False,
help="Also watch directories inside of the watch directory")
parser.add_option("-o", "--output", metavar="PATH",
help="Write output to PATH (a directory if using watch, a file otherwise)")
parser.add_option("-s", "--suffix", metavar="STRING",
help="If using watch, a suffix added to the output filename (i.e. filename.STRING.css)")
parser.add_option("--time", action="store_true",
help="Ignored, will be removed in 2.0")
parser.add_option("--debug-info", action="store_true",
help="Turns on scss's debugging information")
parser.add_option("--no-debug-info", action="store_false",
dest="debug_info", default=False,
help="Turns off scss's debugging information")
parser.add_option("-T", "--test", action="store_true", help=SUPPRESS_HELP)
parser.add_option("-t", "--style", metavar="NAME",
dest="style", default='nested',
help="Output style. Can be nested (default), compact, compressed, or expanded.")
parser.add_option("-C", "--no-compress", action="store_false", dest="style", default=True,
help="Don't minify outputted CSS")
parser.add_option("-?", action="help", help=SUPPRESS_HELP)
parser.add_option("-h", "--help", action="help",
help="Show this message and exit")
parser.add_option("-v", "--version", action="store_true",
help="Print version and exit")
paths_group = OptionGroup(parser, "Resource Paths")
paths_group.add_option("-I", "--load-path", metavar="PATH", type="string",
action="callback", callback=append_load_path, dest="load_paths",
default=initial_load_paths,
help="Add a scss import path, may be given multiple times")
paths_group.add_option("-S", "--static-root", metavar="PATH", dest="static_root",
help="Static root path (Where images and static resources are located)")
paths_group.add_option("-A", "--assets-root", metavar="PATH", dest="assets_root",
help="Assets root path (Sprite images will be created here)")
paths_group.add_option("-a", "--assets-url", metavar="URL", dest="assets_url",
help="URL to reach the files in your assets_root")
paths_group.add_option("-F", "--fonts-root", metavar="PATH", dest="fonts_root",
help="Fonts root path (Where fonts are located)")
paths_group.add_option("-f", "--fonts-url", metavar="PATH", dest="fonts_url",
help="URL to reach the fonts in your fonts_root")
paths_group.add_option("--images-root", metavar="PATH", dest="images_root",
help="Images root path (Where images are located)")
paths_group.add_option("--images-url", metavar="PATH", dest="images_url",
help="URL to reach the images in your images_root")
paths_group.add_option("--cache-root", metavar="PATH", dest="cache_root",
help="Cache root path (Cache files will be created here)")
parser.add_option_group(paths_group)
parser.add_option("--sass", action="store_true",
dest="is_sass", default=None,
help="Sass mode")
options, args = parser.parse_args()
# General runtime configuration
if options.static_root is not None:
config.STATIC_ROOT = options.static_root
if options.assets_root is not None:
config.ASSETS_ROOT = options.assets_root
if options.fonts_root is not None:
config.FONTS_ROOT = options.fonts_root
if options.fonts_url is not None:
config.FONTS_URL = options.fonts_url
if options.images_root is not None:
config.IMAGES_ROOT = options.images_root
if options.images_url is not None:
config.IMAGES_URL = options.images_url
if options.cache_root is not None:
config.CACHE_ROOT = options.cache_root
if options.assets_url is not None:
config.ASSETS_URL = options.assets_url
# Execution modes
if options.test:
run_tests()
elif options.version:
print_version()
elif options.interactive:
run_repl(options)
elif options.watch:
watch_sources(options)
else:
do_build(options, args)
def print_version():
print(BUILD_INFO)
def run_tests():
try:
import pytest
except ImportError:
raise ImportError("You need py.test installed to run the test suite.")
pytest.main("") # don't let py.test re-consume our arguments
def do_build(options, args):
if options.output is not None:
out = open(options.output, 'wb')
else:
out = sys.stdout
# Get the unencoded stream on Python 3
out = getattr(out, 'buffer', out)
css = Scss(scss_opts={
'style': options.style,
'debug_info': options.debug_info,
},
search_paths=options.load_paths,
)
if not args:
args = ['-']
source_files = []
for path in args:
if path == '-':
source = SourceFile.from_file(sys.stdin, relpath="<stdin>", is_sass=options.is_sass)
else:
source = SourceFile.from_filename(path, is_sass=options.is_sass)
source_files.append(source)
encodings = set(source.encoding for source in source_files)
if len(encodings) > 1:
sys.stderr.write(
"Can't combine these files! "
"They have different encodings: {0}\n"
.format(', '.join(encodings))
)
sys.exit(3)
output = css.compile(source_files=source_files)
out.write(output.encode(source_files[0].encoding))
for f, t in profiling.items():
sys.stderr.write("%s took %03fs" % (f, t))
def watch_sources(options):
import time
try:
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
except ImportError:
sys.stderr.write("Using watch functionality requires the `watchdog` library: http://pypi.python.org/pypi/watchdog/")
sys.exit(1)
if options.output and not os.path.isdir(options.output):
sys.stderr.write("watch file output directory is invalid: '%s'" % (options.output))
sys.exit(2)
class ScssEventHandler(PatternMatchingEventHandler):
def __init__(self, *args, **kwargs):
super(ScssEventHandler, self).__init__(*args, **kwargs)
self.css = Scss(scss_opts={
'style': options.style,
'debug_info': options.debug_info,
},
search_paths=options.load_paths,
)
self.output = options.output
self.suffix = options.suffix
def is_valid(self, path):
return os.path.isfile(path) and (path.endswith('.scss') or path.endswith('.sass')) and not os.path.basename(path).startswith('_')
def process(self, path):
if os.path.isdir(path):
for f in os.listdir(path):
full = os.path.join(path, f)
if self.is_valid(full):
self.compile(full)
elif self.is_valid(path):
self.compile(path)
def compile(self, src_path):
fname = os.path.basename(src_path)
if fname.endswith('.scss') or fname.endswith('.sass'):
fname = fname[:-5]
if self.suffix:
fname += '.' + self.suffix
fname += '.css'
else:
# you didn't give me a file of the correct type!
return False
if self.output:
dest_path = os.path.join(self.output, fname)
else:
dest_path = os.path.join(os.path.dirname(src_path), fname)
print("Compiling %s => %s" % (src_path, dest_path))
dest_file = open(dest_path, 'wb')
dest_file.write(self.css.compile(scss_file=src_path).encode('utf-8'))
def on_moved(self, event):
super(ScssEventHandler, self).on_moved(event)
self.process(event.dest_path)
def on_created(self, event):
super(ScssEventHandler, self).on_created(event)
self.process(event.src_path)
def on_modified(self, event):
super(ScssEventHandler, self).on_modified(event)
self.process(event.src_path)
event_handler = ScssEventHandler(patterns=['*.scss', '*.sass'])
observer = Observer()
observer.schedule(event_handler, path=options.watch, recursive=options.recursive)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
@contextmanager
def readline_history(fn):
try:
import readline
except ImportError:
yield
return
try:
readline.read_history_file(fn)
except IOError:
pass
try:
yield
finally:
try:
readline.write_history_file(fn)
except IOError:
pass
def run_repl(is_sass=False):
repl = SassRepl()
with readline_history(os.path.expanduser('~/.scss-history')):
print("Welcome to %s interactive shell" % (BUILD_INFO,))
while True:
try:
in_ = raw_input('>>> ').strip()
for output in repl(in_):
print(output)
except (EOFError, KeyboardInterrupt):
print("Bye!")
return
class SassRepl(object):
def __init__(self, is_sass=False):
# TODO it would be lovely to get these out of here, somehow
self.namespace = Namespace(variables=_default_scss_vars)
self.compiler = Compiler(namespace=self.namespace)
self.compilation = self.compiler.make_compilation()
self.legacy_compiler_options = {}
self.source_file = SourceFile.from_string('', '<shell>', is_sass=is_sass)
self.calculator = Calculator(self.namespace)
def __call__(self, s):
# TODO this is kind of invasive; surely it's possible to do this
# without calling only private methods
from pprint import pformat
if s in ('exit', 'quit'):
raise KeyboardInterrupt
for s in s.split(';'):
s = self.source_file.prepare_source(s.strip())
if not s:
continue
elif s.startswith('@'):
scope = None
properties = []
children = deque()
rule = SassRule(self.source_file, namespace=self.namespace, legacy_compiler_options=self.legacy_compiler_options, properties=properties)
block = UnparsedBlock(rule, 1, s, None)
code, name = (s.split(None, 1) + [''])[:2]
if code == '@option':
self.compilation._at_options(self.calculator, rule, scope, block)
continue
elif code == '@import':
# TODO this doesn't really work either since there's no path
self.compilation._at_import(self.calculator, rule, scope, block)
continue
elif code == '@include':
final_cont = ''
self.compilation._at_include(self.calculator, rule, scope, block)
code = self.compilation._print_properties(properties).rstrip('\n')
if code:
final_cont += code
if children:
# TODO this almost certainly doesn't work, and is kind of goofy anyway since @mixin isn't supported
self.compilation.children.extendleft(children)
self.compilation.parse_children()
code = self.compilation._create_css(self.compilation.rules).rstrip('\n')
if code:
final_cont += code
yield final_cont
continue
elif s == 'ls' or s.startswith('show(') or s.startswith('show ') or s.startswith('ls(') or s.startswith('ls '):
m = re.match(r'(?:show|ls)(\()?\s*([^,/\\) ]*)(?:[,/\\ ]([^,/\\ )]+))*(?(1)\))', s, re.IGNORECASE)
if m:
name = m.group(2)
code = m.group(3)
name = name and name.strip().rstrip('s') # remove last 's' as in functions
code = code and code.strip()
ns = self.namespace
if not name:
yield pformat(list(sorted(['vars', 'options', 'mixins', 'functions'])))
elif name in ('v', 'var', 'variable'):
variables = dict(ns._variables)
if code == '*':
pass
elif code:
variables = dict((k, v) for k, v in variables.items() if code in k)
else:
variables = dict((k, v) for k, v in variables.items() if not k.startswith('$--'))
yield pformat(variables)
elif name in ('o', 'opt', 'option'):
opts = self.legacy_compiler_options
if code == '*':
pass
elif code:
opts = dict((k, v) for k, v in opts.items() if code in k)
else:
opts = dict((k, v) for k, v in opts.items())
yield pformat(opts)
elif name in ('m', 'mix', 'mixin', 'f', 'func', 'funct', 'function'):
if name.startswith('m'):
funcs = dict(ns._mixins)
elif name.startswith('f'):
funcs = dict(ns._functions)
if code == '*':
pass
elif code:
funcs = dict((k, v) for k, v in funcs.items() if code in k[0])
else:
pass
# TODO print source when possible
yield pformat(funcs)
continue
elif s.startswith('$') and (':' in s or '=' in s):
prop, value = [a.strip() for a in _prop_split_re.split(s, 1)]
prop = self.calculator.do_glob_math(prop)
value = self.calculator.calculate(value)
self.namespace.set_variable(prop, value)
continue
# TODO respect compress?
try:
yield(self.calculator.calculate(s).render())
except (SyntaxError, SassEvaluationError) as e:
print("%s" % e, file=sys.stderr)
if __name__ == "__main__":
main()
|
{
"content_hash": "c6e7a2fab84cdabfc3a14040967e1a99",
"timestamp": "",
"source": "github",
"line_count": 427,
"max_line_length": 152,
"avg_line_length": 40.07025761124122,
"alnum_prop": 0.54833430742256,
"repo_name": "Kronuz/pyScss",
"id": "f9b83b49a5ab70bd1df1aa47e9404df74825b6c0",
"size": "17132",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scss/tool.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "49812"
},
{
"name": "C++",
"bytes": "1440"
},
{
"name": "CSS",
"bytes": "977450"
},
{
"name": "GAP",
"bytes": "17887"
},
{
"name": "HTML",
"bytes": "5567"
},
{
"name": "Python",
"bytes": "606917"
}
],
"symlink_target": ""
}
|
import base64
from xdrlib import Packer, Unpacker
from .transaction_history_entry_ext import TransactionHistoryEntryExt
from .transaction_set import TransactionSet
from .uint32 import Uint32
__all__ = ["TransactionHistoryEntry"]
class TransactionHistoryEntry:
"""
XDR Source Code::
struct TransactionHistoryEntry
{
uint32 ledgerSeq;
TransactionSet txSet;
// reserved for future use
union switch (int v)
{
case 0:
void;
}
ext;
};
"""
def __init__(
self,
ledger_seq: Uint32,
tx_set: TransactionSet,
ext: TransactionHistoryEntryExt,
) -> None:
self.ledger_seq = ledger_seq
self.tx_set = tx_set
self.ext = ext
def pack(self, packer: Packer) -> None:
self.ledger_seq.pack(packer)
self.tx_set.pack(packer)
self.ext.pack(packer)
@classmethod
def unpack(cls, unpacker: Unpacker) -> "TransactionHistoryEntry":
ledger_seq = Uint32.unpack(unpacker)
tx_set = TransactionSet.unpack(unpacker)
ext = TransactionHistoryEntryExt.unpack(unpacker)
return cls(
ledger_seq=ledger_seq,
tx_set=tx_set,
ext=ext,
)
def to_xdr_bytes(self) -> bytes:
packer = Packer()
self.pack(packer)
return packer.get_buffer()
@classmethod
def from_xdr_bytes(cls, xdr: bytes) -> "TransactionHistoryEntry":
unpacker = Unpacker(xdr)
return cls.unpack(unpacker)
def to_xdr(self) -> str:
xdr_bytes = self.to_xdr_bytes()
return base64.b64encode(xdr_bytes).decode()
@classmethod
def from_xdr(cls, xdr: str) -> "TransactionHistoryEntry":
xdr_bytes = base64.b64decode(xdr.encode())
return cls.from_xdr_bytes(xdr_bytes)
def __eq__(self, other: object):
if not isinstance(other, self.__class__):
return NotImplemented
return (
self.ledger_seq == other.ledger_seq
and self.tx_set == other.tx_set
and self.ext == other.ext
)
def __str__(self):
out = [
f"ledger_seq={self.ledger_seq}",
f"tx_set={self.tx_set}",
f"ext={self.ext}",
]
return f"<TransactionHistoryEntry [{', '.join(out)}]>"
|
{
"content_hash": "dc25eb0030458e6c57e9f2f0bb017802",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 69,
"avg_line_length": 26.755555555555556,
"alnum_prop": 0.5643687707641196,
"repo_name": "StellarCN/py-stellar-base",
"id": "35d3860c819d77097759956dd0969c1f2619fdcf",
"size": "2500",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "stellar_sdk/xdr/transaction_history_entry.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1737"
},
{
"name": "Makefile",
"bytes": "1085"
},
{
"name": "Python",
"bytes": "2044193"
},
{
"name": "RPC",
"bytes": "76503"
}
],
"symlink_target": ""
}
|
"""This module implements the chamfer distance."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow_graphics.util import export_api
from tensorflow_graphics.util import shape
from tensorflow_graphics.util import type_alias
def evaluate(point_set_a: type_alias.TensorLike,
point_set_b: type_alias.TensorLike,
name: str = "chamfer_distance_evaluate") -> tf.Tensor:
"""Computes the Chamfer distance for the given two point sets.
Note:
This is a symmetric version of the Chamfer distance, calculated as the sum
of the average minimum distance from point_set_a to point_set_b and vice
versa.
The average minimum distance from one point set to another is calculated as
the average of the distances between the points in the first set and their
closest point in the second set, and is thus not symmetrical.
Note:
This function returns the exact Chamfer distance and not an approximation.
Note:
In the following, A1 to An are optional batch dimensions, which must be
broadcast compatible.
Args:
point_set_a: A tensor of shape `[A1, ..., An, N, D]`, where the last axis
represents points in a D dimensional space.
point_set_b: A tensor of shape `[A1, ..., An, M, D]`, where the last axis
represents points in a D dimensional space.
name: A name for this op. Defaults to "chamfer_distance_evaluate".
Returns:
A tensor of shape `[A1, ..., An]` storing the chamfer distance between the
two point sets.
Raises:
ValueError: if the shape of `point_set_a`, `point_set_b` is not supported.
"""
with tf.name_scope(name):
point_set_a = tf.convert_to_tensor(value=point_set_a)
point_set_b = tf.convert_to_tensor(value=point_set_b)
shape.compare_batch_dimensions(
tensors=(point_set_a, point_set_b),
tensor_names=("point_set_a", "point_set_b"),
last_axes=-3,
broadcast_compatible=True)
# Verify that the last axis of the tensors has the same dimension.
dimension = point_set_a.shape.as_list()[-1]
shape.check_static(
tensor=point_set_b,
tensor_name="point_set_b",
has_dim_equals=(-1, dimension))
# Create N x M matrix where the entry i,j corresponds to ai - bj (vector of
# dimension D).
difference = (
tf.expand_dims(point_set_a, axis=-2) -
tf.expand_dims(point_set_b, axis=-3))
# Calculate the square distances between each two points: |ai - bj|^2.
square_distances = tf.einsum("...i,...i->...", difference, difference)
minimum_square_distance_a_to_b = tf.reduce_min(
input_tensor=square_distances, axis=-1)
minimum_square_distance_b_to_a = tf.reduce_min(
input_tensor=square_distances, axis=-2)
return (
tf.reduce_mean(input_tensor=minimum_square_distance_a_to_b, axis=-1) +
tf.reduce_mean(input_tensor=minimum_square_distance_b_to_a, axis=-1))
# API contains all public functions and classes.
__all__ = export_api.get_functions_and_classes()
|
{
"content_hash": "9cd6f257c52ebbaa7530fd45c20bd64e",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 79,
"avg_line_length": 37.566265060240966,
"alnum_prop": 0.6799230275817832,
"repo_name": "tensorflow/graphics",
"id": "e22e404258b724cc6392a0c644a9cd0efa6034e7",
"size": "3704",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow_graphics/nn/loss/chamfer_distance.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2403"
},
{
"name": "C++",
"bytes": "115377"
},
{
"name": "Cython",
"bytes": "12955"
},
{
"name": "JavaScript",
"bytes": "22252"
},
{
"name": "Jupyter Notebook",
"bytes": "246839"
},
{
"name": "Python",
"bytes": "2222139"
},
{
"name": "Shell",
"bytes": "4281"
},
{
"name": "Starlark",
"bytes": "2233"
}
],
"symlink_target": ""
}
|
import braintree
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.forms import AuthenticationForm
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, Http404, JsonResponse
from django.shortcuts import render, get_object_or_404, redirect
from django.views.generic.base import View
from django.views.generic.detail import SingleObjectMixin, DetailView
from django.views.generic.edit import FormMixin
# Create your views here.
from orders.forms import GuestCheckoutForm
from orders.mixins import CartOrderMixin
from orders.models import UserCheckout, Order, UserAddress
from products.models import Variation
from .models import Cart, CartItem
if settings.DEBUG:
braintree.Configuration.configure(braintree.Environment.Sandbox,
merchant_id=settings.BRAINTREE_MERCHANT_ID,
public_key=settings.BRAINTREE_PUBLIC,
private_key=settings.BRAINTREE_PRIVATE)
class ItemCountView(View):
def get(self, request, *args, **kwargs):
if request.is_ajax():
cart_id = self.request.session.get("cart_id")
if cart_id == None:
count = 0
else:
cart = Cart.objects.get(id=cart_id)
count = cart.items.count()
request.session["cart_item_count"] = count
return JsonResponse({"count": count})
else:
raise Http404
class CartView(SingleObjectMixin, View):
model = Cart
template_name = "carts/view.html"
def get_object(self, *args, **kwargs):
self.request.session.set_expiry(0) #5 minutes
cart_id = self.request.session.get("cart_id")
if cart_id == None:
cart = Cart()
cart.tax_percentage = 0.075
cart.save()
cart_id = cart.id
self.request.session["cart_id"] = cart_id
cart = Cart.objects.get(id=cart_id)
if self.request.user.is_authenticated():
cart.user = self.request.user
cart.save()
return cart
def get(self, request, *args, **kwargs):
cart = self.get_object()
item_id = request.GET.get("item")
delete_item = request.GET.get("delete", False)
flash_message = ""
item_added = False
if item_id:
item_instance = get_object_or_404(Variation, id=item_id)
qty = request.GET.get("qty", 1)
try:
if int(qty) < 1:
delete_item = True
except:
raise Http404
cart_item, created = CartItem.objects.get_or_create(cart=cart, item=item_instance)
if created:
flash_message = "Successfully added to the cart"
item_added = True
if delete_item:
flash_message = "Item removed successfully."
cart_item.delete()
else:
if not created:
flash_message = "Quantity has been updated successfully."
cart_item.quantity = qty
cart_item.save()
if not request.is_ajax():
return HttpResponseRedirect(reverse("cart"))
#return cart_item.cart.get_absolute_url()
if request.is_ajax():
try:
total = cart_item.line_item_total
except:
total = None
try:
subtotal = cart_item.cart.subtotal
except:
subtotal = None
try:
cart_total = cart_item.cart.total
except:
cart_total = None
try:
tax_total = cart_item.cart.tax_total
except:
tax_total = None
try:
total_items = cart_item.cart.items.count()
except:
total_items = 0
data = {
"deleted": delete_item,
"item_added": item_added,
"line_total": total,
"subtotal": subtotal,
"cart_total": cart_total,
"tax_total": tax_total,
"flash_message": flash_message,
"total_items": total_items
}
return JsonResponse(data)
context = {
"object": self.get_object()
}
template = self.template_name
return render(request, template, context)
class CheckoutView(CartOrderMixin, FormMixin, DetailView):
model = Cart
template_name = "carts/checkout_view.html"
form_class = GuestCheckoutForm
def get_object(self, *args, **kwargs):
cart = self.get_cart()
if cart == None:
return None
return cart
def get_context_data(self, *args, **kwargs):
context = super(CheckoutView, self).get_context_data(*args, **kwargs)
user_can_continue = False
user_check_id = self.request.session.get("user_checkout_id")
if self.request.user.is_authenticated():
user_can_continue = True
user_checkout, created = UserCheckout.objects.get_or_create(email=self.request.user.email)
user_checkout.user = self.request.user
user_checkout.save()
context["client_token"] = user_checkout.get_client_token()
self.request.session["user_checkout_id"] = user_checkout.id
elif not self.request.user.is_authenticated() and user_check_id == None:
context["login_form"] = AuthenticationForm()
context["next_url"] = self.request.build_absolute_uri()
else:
pass
if user_check_id != None:
user_can_continue = True
if not self.request.user.is_authenticated(): #GUEST USER
user_checkout_2 = UserCheckout.objects.get(id=user_check_id)
context["client_token"] = user_checkout_2.get_client_token()
#if self.get_cart() is not None:
context["order"] = self.get_order()
context["user_can_continue"] = user_can_continue
context["form"] = self.get_form()
return context
def post(self, request, *args, **kwargs):
self.object = self.get_object()
form = self.get_form()
if form.is_valid():
email = form.cleaned_data.get("email")
user_checkout, created = UserCheckout.objects.get_or_create(email=email)
request.session["user_checkout_id"] = user_checkout.id
return self.form_valid(form)
else:
return self.form_invalid(form)
def get_success_url(self):
return reverse("checkout")
def get(self, request, *args, **kwargs):
get_data = super(CheckoutView, self).get(request, *args, **kwargs)
cart = self.get_object()
if cart == None:
return redirect("cart")
new_order = self.get_order()
user_checkout_id = request.session.get("user_checkout_id")
if user_checkout_id != None:
user_checkout = UserCheckout.objects.get(id=user_checkout_id)
if new_order.billing_address == None or new_order.shipping_address == None:
return redirect("order_address")
new_order.user = user_checkout
new_order.save()
return get_data
class CheckoutFinalView(CartOrderMixin, View):
def post(self, request, *args, **kwargs):
order = self.get_order()
order_total = order.order_total
nonce = request.POST.get("payment_method_nonce")
if nonce:
result = braintree.Transaction.sale({
"amount": order_total,
"payment_method_nonce": nonce,
"billing": {
"postal_code": "%s" %(order.billing_address.zipcode),
},
"options": {
"submit_for_settlement": True
}
})
if result.is_success:
#result.transaction.id to order
order.mark_completed(order_id=result.transaction.id)
messages.success(request, "Thank you for your order.")
del request.session["cart_id"]
del request.session["order_id"]
else:
#messages.success(request, "There was a problem with your order.")
messages.success(request, "%s" %(result.message))
return redirect("checkout")
return redirect("order_detail", pk=order.pk)
def get(self, request, *args, **kwargs):
return redirect("checkout")
|
{
"content_hash": "2a505e97ad0f4c6bb8d5d784824fbc19",
"timestamp": "",
"source": "github",
"line_count": 273,
"max_line_length": 93,
"avg_line_length": 26.15018315018315,
"alnum_prop": 0.6849698837372181,
"repo_name": "codingforentrepreneurs/ecommerce-2",
"id": "687a569c69d2098a65c9a2ef088bcb4aa3b3cbce",
"size": "7139",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/carts/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "43915"
},
{
"name": "HTML",
"bytes": "29425"
},
{
"name": "JavaScript",
"bytes": "79967"
},
{
"name": "Python",
"bytes": "74241"
}
],
"symlink_target": ""
}
|
"""Configuration for Elmax tests."""
import json
from elmax_api.constants import (
BASE_URL,
ENDPOINT_DEVICES,
ENDPOINT_DISCOVERY,
ENDPOINT_LOGIN,
)
from httpx import Response
import pytest
import respx
from . import MOCK_PANEL_ID, MOCK_PANEL_PIN
from tests.common import load_fixture
@pytest.fixture(autouse=True)
def httpx_mock_fixture(requests_mock):
"""Configure httpx fixture."""
with respx.mock(base_url=BASE_URL, assert_all_called=False) as respx_mock:
# Mock Login POST.
login_route = respx_mock.post(f"/{ENDPOINT_LOGIN}", name="login")
login_route.return_value = Response(
200, json=json.loads(load_fixture("login.json", "elmax"))
)
# Mock Device list GET.
list_devices_route = respx_mock.get(f"/{ENDPOINT_DEVICES}", name="list_devices")
list_devices_route.return_value = Response(
200, json=json.loads(load_fixture("list_devices.json", "elmax"))
)
# Mock Panel GET.
get_panel_route = respx_mock.get(
f"/{ENDPOINT_DISCOVERY}/{MOCK_PANEL_ID}/{MOCK_PANEL_PIN}", name="get_panel"
)
get_panel_route.return_value = Response(
200, json=json.loads(load_fixture("get_panel.json", "elmax"))
)
yield respx_mock
|
{
"content_hash": "a3047e8b9e8446ec836a98c91d73f31d",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 88,
"avg_line_length": 30.27906976744186,
"alnum_prop": 0.6351766513056836,
"repo_name": "nkgilley/home-assistant",
"id": "70e3af7670277c3c428eff03ab9d974ec82b1993",
"size": "1302",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/elmax/conftest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "51597279"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
}
|
import gettext
gettext.install('glance', unicode=1)
|
{
"content_hash": "08b59aa041e9f8b61f28effe6d1c5604",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 36,
"avg_line_length": 17.666666666666668,
"alnum_prop": 0.7735849056603774,
"repo_name": "tylertian/Openstack",
"id": "b6d314b4074e836bf6223f7c6be292db22ebb542",
"size": "733",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "openstack F/glance/glance/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "239919"
},
{
"name": "JavaScript",
"bytes": "156942"
},
{
"name": "Python",
"bytes": "16949418"
},
{
"name": "Shell",
"bytes": "96743"
}
],
"symlink_target": ""
}
|
import matplotlib
import os
import socket
import struct
from matplotlib._pylab_helpers import Gcf
from matplotlib.backend_bases import FigureManagerBase, ShowBase
from matplotlib.backends.backend_agg import FigureCanvasAgg
from matplotlib.figure import Figure
HOST = 'localhost'
PORT = os.getenv("PYCHARM_MATPLOTLIB_PORT")
PORT = int(PORT) if PORT is not None else None
PORT = PORT if PORT != -1 else None
index = int(os.getenv("PYCHARM_MATPLOTLIB_INDEX", 0))
rcParams = matplotlib.rcParams
verbose = matplotlib.verbose
class Show(ShowBase):
def __call__(self, **kwargs):
managers = Gcf.get_all_fig_managers()
if not managers:
return
for manager in managers:
manager.show(**kwargs)
def mainloop(self):
pass
show = Show()
# from pyplot API
def draw_if_interactive():
if matplotlib.is_interactive():
figManager = Gcf.get_active()
if figManager is not None:
figManager.canvas.show()
# from pyplot API
def new_figure_manager(num, *args, **kwargs):
FigureClass = kwargs.pop('FigureClass', Figure)
figure = FigureClass(*args, **kwargs)
return new_figure_manager_given_figure(num, figure)
# from pyplot API
def new_figure_manager_given_figure(num, figure):
canvas = FigureCanvasInterAgg(figure)
manager = FigureManagerInterAgg(canvas, num)
return manager
# from pyplot API
class FigureCanvasInterAgg(FigureCanvasAgg):
def __init__(self, figure):
FigureCanvasAgg.__init__(self, figure)
def show(self):
self.figure.tight_layout()
FigureCanvasAgg.draw(self)
if PORT is None:
return
if matplotlib.__version__ < '1.2':
buffer = self.tostring_rgb(0, 0)
else:
buffer = self.tostring_rgb()
if len(set(buffer)) <= 1:
# do not plot empty
return
render = self.get_renderer()
width = int(render.width)
plot_index = index if os.getenv("PYCHARM_MATPLOTLIB_INTERACTIVE", False) else -1
try:
sock = socket.socket()
sock.connect((HOST, PORT))
sock.send(struct.pack('>i', width))
sock.send(struct.pack('>i', plot_index))
sock.send(struct.pack('>i', len(buffer)))
sock.send(buffer)
except OSError as _:
# nothing bad. It just means, that our tool window doesn't run yet
pass
def draw(self):
is_interactive = os.getenv("PYCHARM_MATPLOTLIB_INTERACTIVE", False)
if is_interactive and matplotlib.is_interactive():
self.show()
class FigureManagerInterAgg(FigureManagerBase):
def __init__(self, canvas, num):
FigureManagerBase.__init__(self, canvas, num)
global index
index += 1
self.canvas = canvas
self._num = num
self._shown = False
def show(self, **kwargs):
self.canvas.show()
Gcf.destroy(self._num)
|
{
"content_hash": "43c6da8635e07b7c248111b087f15f50",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 88,
"avg_line_length": 27.1,
"alnum_prop": 0.6249580677624959,
"repo_name": "xfournet/intellij-community",
"id": "1c70df0ab5ad842de5b6064450bf7d5b4e685b33",
"size": "2981",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/helpers/pycharm_matplotlib_backend/backend_interagg.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AMPL",
"bytes": "20665"
},
{
"name": "AspectJ",
"bytes": "182"
},
{
"name": "Batchfile",
"bytes": "60827"
},
{
"name": "C",
"bytes": "211454"
},
{
"name": "C#",
"bytes": "1264"
},
{
"name": "C++",
"bytes": "199030"
},
{
"name": "CMake",
"bytes": "1675"
},
{
"name": "CSS",
"bytes": "201445"
},
{
"name": "CoffeeScript",
"bytes": "1759"
},
{
"name": "Erlang",
"bytes": "10"
},
{
"name": "Groovy",
"bytes": "3289024"
},
{
"name": "HLSL",
"bytes": "57"
},
{
"name": "HTML",
"bytes": "1901772"
},
{
"name": "J",
"bytes": "5050"
},
{
"name": "Java",
"bytes": "166392304"
},
{
"name": "JavaScript",
"bytes": "570364"
},
{
"name": "Jupyter Notebook",
"bytes": "93222"
},
{
"name": "Kotlin",
"bytes": "4720744"
},
{
"name": "Lex",
"bytes": "147486"
},
{
"name": "Makefile",
"bytes": "2352"
},
{
"name": "NSIS",
"bytes": "51061"
},
{
"name": "Objective-C",
"bytes": "27861"
},
{
"name": "Perl",
"bytes": "903"
},
{
"name": "Perl 6",
"bytes": "26"
},
{
"name": "Protocol Buffer",
"bytes": "6680"
},
{
"name": "Python",
"bytes": "25477371"
},
{
"name": "Roff",
"bytes": "37534"
},
{
"name": "Ruby",
"bytes": "1217"
},
{
"name": "Shell",
"bytes": "64141"
},
{
"name": "Smalltalk",
"bytes": "338"
},
{
"name": "TeX",
"bytes": "25473"
},
{
"name": "Thrift",
"bytes": "1846"
},
{
"name": "TypeScript",
"bytes": "9469"
},
{
"name": "Visual Basic",
"bytes": "77"
},
{
"name": "XSLT",
"bytes": "113040"
}
],
"symlink_target": ""
}
|
import logging
from random import sample
import numpy as np
import Q_Learning_Agent as Q
from Agent import agent_controls
class helicopter(agent_controls):
"""
Class that Controls, interacts and links all the other classes togeather
Q-Learning, Agent Movements, World
"""
def __init__(self, world, settings):
agent_controls.__init__(self)
self.ai = None
self.model_version = settings['model']
self.world = world
self.settings = settings
# Load in the Agent
self._create_agent()
# Agent Metrics
self.crashed = 0
self.completed = 0
# Storing States
self.lastState = None
self.current_state = None
# Storing Locations
self.origin = (world.st_x, world.st_y)
self.current_location = self.origin
self.previous_location = None
# Storing Actions
self.lastAction = None
# Agents Info
self.final_location = []
self.q_matrix = [] # Q-Matrix state(p) vs state(c) - Q-Value
self.r_matrix = [] # R_Matrix state(c) vs Action - Reward
self.trial_n = 1
# Recording States
self.state_record = []
# Reward Functions
self.reward_completed = settings['completed']
self.reward_crashed = settings['crashed']
self.reward_no_obstacle = settings['open']
self.reward_sum = 0
self.prev_reward = None
self.new_state = None
self.vals = [int(self.world.track_width * 0.92),
int(self.world.track_width * 0.4),
int(self.world.track_width * 0.5),
int(self.world.track_width * 0.98),
int(self.world.track_width * 0.6),
int(self.world.track_width * 0.7),
int(self.world.track_width * 0.8),
int(self.world.track_width * 0.95),
int(self.world.track_width * 0.2),
int(self.world.track_width * 0.1),
int(self.world.track_width * 0.99)]
def _create_agent(self):
"""
Loads the Respective Model
"""
if self.model_version == 1:
self.ai = Q.Q_Learning_Algorithm(settings=self.settings)
elif self.model_version == 2:
self.ai = Q.Q_Learning_Epsilon_Decay(settings=self.settings)
elif self.model_version == 3:
self.ai = Q.Q_Neural_Network(settings=self.settings,
track_height=self.world.track_height)
def update(self):
"""
Increment the Agent in the World by one
:return: Boolean
"""
# Get the Current State
location = self.current_location
world_val = self.world.check_location(location[0],
location[1])
state = self.find_states(self.current_location)
# Record State
self.state_record.append(state)
# Is Current State Obstacle?
if world_val == -1:
logging.debug(
"------------Helicopter Crashed on the Course-----------")
self.crashed += 1
self.reward_sum += self.reward_crashed
self.prev_reward = self.reward_crashed
if self.model_version == 3: # Neural Network
self.ai.update_train(p_state=self.lastState,
action=self.lastAction,
p_reward=self.reward_no_obstacle,
new_state=state,
terminal=[self.reward_completed,
self.reward_crashed])
if self.lastState is not None and self.model_version != 3:
self.ai.learn(
self.lastState,
self.lastAction,
self.reward_crashed,
state)
self.final_location.append([self.current_location[0],
self.trial_n,
self.current_location[1],
self.reward_sum])
self.r_matrix.append([self.lastState,
self.lastAction,
self.reward_crashed])
self.q_matrix.append([self.lastState,
state,
self.reward_crashed])
self.trial_n += 1
# Agent Crashed - Reset the world
return False
# Is the Current State on the Finish Line?
if world_val == 10:
logging.debug("-----------Helicopter Completed Course-----------")
self.completed += 1
self.reward_sum += self.reward_completed
self.prev_reward = self.reward_completed
if self.model_version == 3: # Neural Network
self.ai.update_train(p_state=self.lastState,
action=self.lastAction,
p_reward=self.reward_no_obstacle,
new_state=state,
terminal=[self.reward_completed,
self.reward_crashed])
if self.lastState is not None and self.model_version != 3:
self.ai.learn(self.lastState,
self.lastAction,
self.reward_completed,
state)
self.final_location.append([self.current_location[0],
self.trial_n,
self.current_location[1],
self.reward_sum])
self.r_matrix.append([self.lastState,
self.lastAction,
self.reward_completed])
self.trial_n += 1
# Agent Completed Course - Reset the world
return False
# Is the Current in the Open - Continue Journey
self.reward_sum += self.reward_no_obstacle
self.prev_reward = self.reward_no_obstacle
if self.lastState is not None and self.model_version != 3:
self.ai.learn(self.lastState,
self.lastAction,
self.reward_no_obstacle,
state)
# Select an Action
if self.model_version < 3:
action = self.ai.choose_Action(state)
else:
action = self.ai.choose_Action(state=state,
pstate=self.lastState,
paction=self.lastAction,
preward=self.reward_no_obstacle)
self.r_matrix.append([self.lastState,
self.lastAction,
self.reward_no_obstacle])
self.q_matrix.append([self.lastState,
state,
self.reward_no_obstacle])
self.lastState = state
self.lastAction = action
# Move Depending on the Wind at the current location
self.current_location = self.action_wind(world_val,
self.current_location)
if self.current_location is None:
return False
# Move Depending on the Action from Q-Learning
self.current_location = self.action_move(action,
self.current_location)
self.new_state = state
if self.model_version == 3: # Neural Network
self.ai.update_train(p_state=self.lastState,
action=self.lastAction,
p_reward=self.reward_no_obstacle,
new_state=state,
terminal=[self.completed,
self.crashed])
return True
def reset(self):
"""
If the Agents requires a restart then reload parameters
"""
if self.settings['train']:
self.current_location = (
self.origin[0] + sample(self.vals, 1)[0],
self.origin[1])
else:
self.current_location = self.origin
self.previous_location = None
self.lastAction = None
self.lastState = None
self.current_state = None
self.reward_sum = 0
def find_states(self, location):
"""
Find the State given the Agents current location
:param location: tuple(int, int)
:return: tuple(int,....)
"""
x, y = location[0], location[1]
state_space = list()
# Increase from 1 to 0
for i in range(0, 3):
for j in range(-2, 3):
value = self.world.check_location(x=x + i,
y=y + j)
state_space.append(value)
# Add the current height into the state space.
# state_space.append(y)
return tuple(state_space)
def return_q_view(self):
"""
Function to retrieve the Q-Values of the Current Location
:return: (int, np.array)
"""
qw_mat = self.model_view()
start = int(self.current_location[1])
array1 = np.zeros(shape=(1, self.world.track_height + 3))
array3 = np.array(qw_mat)
array2 = np.ma.masked_array(array3, mask=[5])
# Dealing with Edge Plotting
lower = max(start - 2, 0)
upper = min(start + 3, self.world.track_height + 1)
array1[0, lower:upper] = array2[:upper - lower]
return min(self.current_location[0], self.world.track_width), \
array1[0, :self.world.track_height]
def model_view(self):
"""
Get the Q-Values of the Current Location
:return: list/np.array
"""
view_current = self.q_matrix[- 1][1]
qw_mat = []
if self.model_version < 3:
for i in range(self.settings['nb_actions']):
key = (view_current, i + 1)
if key not in list(self.ai.q.keys()):
qw_mat.append(0)
else:
qw_mat.append(self.ai.q[key])
else:
state = np.concatenate(
(list(
self.lastState), [
self.lastAction], [
self.ai.reward_change[
self.prev_reward]], list(
self.new_state))) + 1
state = np.asarray(state).reshape(1, self.ai.input_dim)
qw_mat = self.ai.model.predict(state, batch_size=1)
return qw_mat
|
{
"content_hash": "686921a40d4dd1a1f7b2b2ecddfd01da",
"timestamp": "",
"source": "github",
"line_count": 299,
"max_line_length": 78,
"avg_line_length": 36.819397993311036,
"alnum_prop": 0.4819692978472159,
"repo_name": "dandxy89/rf_helicopter",
"id": "45bd613f2b72f856b3088e151e48eaae3fd3660d",
"size": "11240",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Model/Helicopter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "99688"
}
],
"symlink_target": ""
}
|
from ncclient import manager
host = '127.0.0.1'
user = 'root'
password = 'Juniper'
port = 2200
with manager.connect(host=host,
port=port,
username=user,
hostkey_verify=False,
password=password,
device_params={'name': 'junos'}) as m:
print m.dispatch('get-lldp-neighbors-information')
# Dispatch an RPC command yourself (obviously vendor-specific)
from pprint import pprint
from jnpr.junos import Device
dev = Device(host='10.12.0.77', user='root', password='Password1!')
dev.open()
pprint(dev.facts)
dev.close()
|
{
"content_hash": "a032beed6306dc481ea5d019d8b4bb4a",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 67,
"avg_line_length": 24.307692307692307,
"alnum_prop": 0.6060126582278481,
"repo_name": "SivagnanamCiena/nwkauto",
"id": "aa191724a8e80fff7825ccc8881bf657a2f2d652",
"size": "632",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "4-python/ncclient_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "11387"
},
{
"name": "Ruby",
"bytes": "2601"
},
{
"name": "Shell",
"bytes": "1362"
}
],
"symlink_target": ""
}
|
from pytest import fixture
from beyond.dates import Date, timedelta
from beyond.io.ccsds import dumps, loads
from beyond.utils.measures import MeasureSet, Range, Azimut, Elevation
@fixture
def measureset(orbit, station):
path = "{0} {1} {0}".format(station.name, orbit.cospar_id).split()
aos = Date(2008, 9, 20, 18, 16, 3, 690790)
los = Date(2008, 9, 20, 18, 24, 58, 852563)
measures = MeasureSet([])
for orb in orbit.iter(start=aos, stop=los, step=timedelta(seconds=5)):
sph = orb.copy(frame=station, form='spherical')
measures.append(Range(path, orb.date, sph.r * 2))
measures.append(Azimut(path, orb.date, sph.theta))
measures.append(Elevation(path, orb.date, sph.phi))
return measures
def test_dump(measureset, station, ccsds_format, datafile, helper):
ref = datafile("tdm")
txt = dumps(measureset, fmt=ccsds_format)
helper.assert_string(ref, txt)
def test_load(measureset, datafile):
data = loads(datafile("tdm"))
assert len(measureset) == len(data)
assert measureset.types == data.types
assert measureset.start == data.start
assert measureset.stop == data.stop
assert measureset.sources == data.sources
assert measureset.paths == data.paths
|
{
"content_hash": "d467f3ef9706102e57b1ee6a50f87403",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 74,
"avg_line_length": 29.25581395348837,
"alnum_prop": 0.6820349761526232,
"repo_name": "galactics/beyond",
"id": "dd31cbfc759baf16970cb190e456897be325962c",
"size": "1259",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/io/ccsds/test_tdm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "459880"
},
{
"name": "XSLT",
"bytes": "18160"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function
import datetime
import operator
from cryptography import utils, x509
from cryptography.exceptions import UnsupportedAlgorithm
from cryptography.hazmat.backends.openssl.decode_asn1 import (
_asn1_integer_to_int,
_asn1_string_to_bytes,
_decode_x509_name,
_obj2txt,
_parse_asn1_time,
)
from cryptography.hazmat.backends.openssl.encode_asn1 import (
_encode_asn1_int_gc,
_txt2obj_gc,
)
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
from cryptography.x509.name import _ASN1Type
@utils.register_interface(x509.Certificate)
class _Certificate(object):
def __init__(self, backend, x509_cert):
self._backend = backend
self._x509 = x509_cert
version = self._backend._lib.X509_get_version(self._x509)
if version == 0:
self._version = x509.Version.v1
elif version == 2:
self._version = x509.Version.v3
else:
raise x509.InvalidVersion(
"{} is not a valid X509 version".format(version), version
)
def __repr__(self):
return "<Certificate(subject={}, ...)>".format(self.subject)
def __eq__(self, other):
if not isinstance(other, x509.Certificate):
return NotImplemented
res = self._backend._lib.X509_cmp(self._x509, other._x509)
return res == 0
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.public_bytes(serialization.Encoding.DER))
def __deepcopy__(self, memo):
return self
def fingerprint(self, algorithm):
h = hashes.Hash(algorithm, self._backend)
h.update(self.public_bytes(serialization.Encoding.DER))
return h.finalize()
version = utils.read_only_property("_version")
@property
def serial_number(self):
asn1_int = self._backend._lib.X509_get_serialNumber(self._x509)
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
return _asn1_integer_to_int(self._backend, asn1_int)
def public_key(self):
pkey = self._backend._lib.X509_get_pubkey(self._x509)
if pkey == self._backend._ffi.NULL:
# Remove errors from the stack.
self._backend._consume_errors()
raise ValueError("Certificate public key is of an unknown type")
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
return self._backend._evp_pkey_to_public_key(pkey)
@property
def not_valid_before(self):
asn1_time = self._backend._lib.X509_getm_notBefore(self._x509)
return _parse_asn1_time(self._backend, asn1_time)
@property
def not_valid_after(self):
asn1_time = self._backend._lib.X509_getm_notAfter(self._x509)
return _parse_asn1_time(self._backend, asn1_time)
@property
def issuer(self):
issuer = self._backend._lib.X509_get_issuer_name(self._x509)
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, issuer)
@property
def subject(self):
subject = self._backend._lib.X509_get_subject_name(self._x509)
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, subject)
@property
def signature_hash_algorithm(self):
oid = self.signature_algorithm_oid
try:
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
"Signature algorithm OID:{} not recognized".format(oid)
)
@property
def signature_algorithm_oid(self):
alg = self._backend._ffi.new("X509_ALGOR **")
self._backend._lib.X509_get0_signature(
self._backend._ffi.NULL, alg, self._x509
)
self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
oid = _obj2txt(self._backend, alg[0].algorithm)
return x509.ObjectIdentifier(oid)
@utils.cached_property
def extensions(self):
return self._backend._certificate_extension_parser.parse(self._x509)
@property
def signature(self):
sig = self._backend._ffi.new("ASN1_BIT_STRING **")
self._backend._lib.X509_get0_signature(
sig, self._backend._ffi.NULL, self._x509
)
self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
return _asn1_string_to_bytes(self._backend, sig[0])
@property
def tbs_certificate_bytes(self):
pp = self._backend._ffi.new("unsigned char **")
res = self._backend._lib.i2d_re_X509_tbs(self._x509, pp)
self._backend.openssl_assert(res > 0)
pp = self._backend._ffi.gc(
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
)
return self._backend._ffi.buffer(pp[0], res)[:]
def public_bytes(self, encoding):
bio = self._backend._create_mem_bio_gc()
if encoding is serialization.Encoding.PEM:
res = self._backend._lib.PEM_write_bio_X509(bio, self._x509)
elif encoding is serialization.Encoding.DER:
res = self._backend._lib.i2d_X509_bio(bio, self._x509)
else:
raise TypeError("encoding must be an item from the Encoding enum")
self._backend.openssl_assert(res == 1)
return self._backend._read_mem_bio(bio)
@utils.register_interface(x509.RevokedCertificate)
class _RevokedCertificate(object):
def __init__(self, backend, crl, x509_revoked):
self._backend = backend
# The X509_REVOKED_value is a X509_REVOKED * that has
# no reference counting. This means when X509_CRL_free is
# called then the CRL and all X509_REVOKED * are freed. Since
# you can retain a reference to a single revoked certificate
# and let the CRL fall out of scope we need to retain a
# private reference to the CRL inside the RevokedCertificate
# object to prevent the gc from being called inappropriately.
self._crl = crl
self._x509_revoked = x509_revoked
@property
def serial_number(self):
asn1_int = self._backend._lib.X509_REVOKED_get0_serialNumber(
self._x509_revoked
)
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
return _asn1_integer_to_int(self._backend, asn1_int)
@property
def revocation_date(self):
return _parse_asn1_time(
self._backend,
self._backend._lib.X509_REVOKED_get0_revocationDate(
self._x509_revoked
),
)
@utils.cached_property
def extensions(self):
return self._backend._revoked_cert_extension_parser.parse(
self._x509_revoked
)
@utils.register_interface(x509.CertificateRevocationList)
class _CertificateRevocationList(object):
def __init__(self, backend, x509_crl):
self._backend = backend
self._x509_crl = x509_crl
def __eq__(self, other):
if not isinstance(other, x509.CertificateRevocationList):
return NotImplemented
res = self._backend._lib.X509_CRL_cmp(self._x509_crl, other._x509_crl)
return res == 0
def __ne__(self, other):
return not self == other
def fingerprint(self, algorithm):
h = hashes.Hash(algorithm, self._backend)
bio = self._backend._create_mem_bio_gc()
res = self._backend._lib.i2d_X509_CRL_bio(bio, self._x509_crl)
self._backend.openssl_assert(res == 1)
der = self._backend._read_mem_bio(bio)
h.update(der)
return h.finalize()
@utils.cached_property
def _sorted_crl(self):
# X509_CRL_get0_by_serial sorts in place, which breaks a variety of
# things we don't want to break (like iteration and the signature).
# Let's dupe it and sort that instead.
dup = self._backend._lib.X509_CRL_dup(self._x509_crl)
self._backend.openssl_assert(dup != self._backend._ffi.NULL)
dup = self._backend._ffi.gc(dup, self._backend._lib.X509_CRL_free)
return dup
def get_revoked_certificate_by_serial_number(self, serial_number):
revoked = self._backend._ffi.new("X509_REVOKED **")
asn1_int = _encode_asn1_int_gc(self._backend, serial_number)
res = self._backend._lib.X509_CRL_get0_by_serial(
self._sorted_crl, revoked, asn1_int
)
if res == 0:
return None
else:
self._backend.openssl_assert(revoked[0] != self._backend._ffi.NULL)
return _RevokedCertificate(
self._backend, self._sorted_crl, revoked[0]
)
@property
def signature_hash_algorithm(self):
oid = self.signature_algorithm_oid
try:
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
"Signature algorithm OID:{} not recognized".format(oid)
)
@property
def signature_algorithm_oid(self):
alg = self._backend._ffi.new("X509_ALGOR **")
self._backend._lib.X509_CRL_get0_signature(
self._x509_crl, self._backend._ffi.NULL, alg
)
self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
oid = _obj2txt(self._backend, alg[0].algorithm)
return x509.ObjectIdentifier(oid)
@property
def issuer(self):
issuer = self._backend._lib.X509_CRL_get_issuer(self._x509_crl)
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, issuer)
@property
def next_update(self):
nu = self._backend._lib.X509_CRL_get_nextUpdate(self._x509_crl)
self._backend.openssl_assert(nu != self._backend._ffi.NULL)
return _parse_asn1_time(self._backend, nu)
@property
def last_update(self):
lu = self._backend._lib.X509_CRL_get_lastUpdate(self._x509_crl)
self._backend.openssl_assert(lu != self._backend._ffi.NULL)
return _parse_asn1_time(self._backend, lu)
@property
def signature(self):
sig = self._backend._ffi.new("ASN1_BIT_STRING **")
self._backend._lib.X509_CRL_get0_signature(
self._x509_crl, sig, self._backend._ffi.NULL
)
self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
return _asn1_string_to_bytes(self._backend, sig[0])
@property
def tbs_certlist_bytes(self):
pp = self._backend._ffi.new("unsigned char **")
res = self._backend._lib.i2d_re_X509_CRL_tbs(self._x509_crl, pp)
self._backend.openssl_assert(res > 0)
pp = self._backend._ffi.gc(
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
)
return self._backend._ffi.buffer(pp[0], res)[:]
def public_bytes(self, encoding):
bio = self._backend._create_mem_bio_gc()
if encoding is serialization.Encoding.PEM:
res = self._backend._lib.PEM_write_bio_X509_CRL(
bio, self._x509_crl
)
elif encoding is serialization.Encoding.DER:
res = self._backend._lib.i2d_X509_CRL_bio(bio, self._x509_crl)
else:
raise TypeError("encoding must be an item from the Encoding enum")
self._backend.openssl_assert(res == 1)
return self._backend._read_mem_bio(bio)
def _revoked_cert(self, idx):
revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl)
r = self._backend._lib.sk_X509_REVOKED_value(revoked, idx)
self._backend.openssl_assert(r != self._backend._ffi.NULL)
return _RevokedCertificate(self._backend, self, r)
def __iter__(self):
for i in range(len(self)):
yield self._revoked_cert(i)
def __getitem__(self, idx):
if isinstance(idx, slice):
start, stop, step = idx.indices(len(self))
return [self._revoked_cert(i) for i in range(start, stop, step)]
else:
idx = operator.index(idx)
if idx < 0:
idx += len(self)
if not 0 <= idx < len(self):
raise IndexError
return self._revoked_cert(idx)
def __len__(self):
revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl)
if revoked == self._backend._ffi.NULL:
return 0
else:
return self._backend._lib.sk_X509_REVOKED_num(revoked)
@utils.cached_property
def extensions(self):
return self._backend._crl_extension_parser.parse(self._x509_crl)
def is_signature_valid(self, public_key):
if not isinstance(
public_key,
(dsa.DSAPublicKey, rsa.RSAPublicKey, ec.EllipticCurvePublicKey),
):
raise TypeError(
"Expecting one of DSAPublicKey, RSAPublicKey,"
" or EllipticCurvePublicKey."
)
res = self._backend._lib.X509_CRL_verify(
self._x509_crl, public_key._evp_pkey
)
if res != 1:
self._backend._consume_errors()
return False
return True
@utils.register_interface(x509.CertificateSigningRequest)
class _CertificateSigningRequest(object):
def __init__(self, backend, x509_req):
self._backend = backend
self._x509_req = x509_req
def __eq__(self, other):
if not isinstance(other, _CertificateSigningRequest):
return NotImplemented
self_bytes = self.public_bytes(serialization.Encoding.DER)
other_bytes = other.public_bytes(serialization.Encoding.DER)
return self_bytes == other_bytes
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.public_bytes(serialization.Encoding.DER))
def public_key(self):
pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req)
self._backend.openssl_assert(pkey != self._backend._ffi.NULL)
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
return self._backend._evp_pkey_to_public_key(pkey)
@property
def subject(self):
subject = self._backend._lib.X509_REQ_get_subject_name(self._x509_req)
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, subject)
@property
def signature_hash_algorithm(self):
oid = self.signature_algorithm_oid
try:
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
"Signature algorithm OID:{} not recognized".format(oid)
)
@property
def signature_algorithm_oid(self):
alg = self._backend._ffi.new("X509_ALGOR **")
self._backend._lib.X509_REQ_get0_signature(
self._x509_req, self._backend._ffi.NULL, alg
)
self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
oid = _obj2txt(self._backend, alg[0].algorithm)
return x509.ObjectIdentifier(oid)
@utils.cached_property
def extensions(self):
x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req)
x509_exts = self._backend._ffi.gc(
x509_exts,
lambda x: self._backend._lib.sk_X509_EXTENSION_pop_free(
x,
self._backend._ffi.addressof(
self._backend._lib._original_lib, "X509_EXTENSION_free"
),
),
)
return self._backend._csr_extension_parser.parse(x509_exts)
def public_bytes(self, encoding):
bio = self._backend._create_mem_bio_gc()
if encoding is serialization.Encoding.PEM:
res = self._backend._lib.PEM_write_bio_X509_REQ(
bio, self._x509_req
)
elif encoding is serialization.Encoding.DER:
res = self._backend._lib.i2d_X509_REQ_bio(bio, self._x509_req)
else:
raise TypeError("encoding must be an item from the Encoding enum")
self._backend.openssl_assert(res == 1)
return self._backend._read_mem_bio(bio)
@property
def tbs_certrequest_bytes(self):
pp = self._backend._ffi.new("unsigned char **")
res = self._backend._lib.i2d_re_X509_REQ_tbs(self._x509_req, pp)
self._backend.openssl_assert(res > 0)
pp = self._backend._ffi.gc(
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
)
return self._backend._ffi.buffer(pp[0], res)[:]
@property
def signature(self):
sig = self._backend._ffi.new("ASN1_BIT_STRING **")
self._backend._lib.X509_REQ_get0_signature(
self._x509_req, sig, self._backend._ffi.NULL
)
self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
return _asn1_string_to_bytes(self._backend, sig[0])
@property
def is_signature_valid(self):
pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req)
self._backend.openssl_assert(pkey != self._backend._ffi.NULL)
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
res = self._backend._lib.X509_REQ_verify(self._x509_req, pkey)
if res != 1:
self._backend._consume_errors()
return False
return True
def get_attribute_for_oid(self, oid):
obj = _txt2obj_gc(self._backend, oid.dotted_string)
pos = self._backend._lib.X509_REQ_get_attr_by_OBJ(
self._x509_req, obj, -1
)
if pos == -1:
raise x509.AttributeNotFound(
"No {} attribute was found".format(oid), oid
)
attr = self._backend._lib.X509_REQ_get_attr(self._x509_req, pos)
self._backend.openssl_assert(attr != self._backend._ffi.NULL)
# We don't support multiple valued attributes for now.
self._backend.openssl_assert(
self._backend._lib.X509_ATTRIBUTE_count(attr) == 1
)
asn1_type = self._backend._lib.X509_ATTRIBUTE_get0_type(attr, 0)
self._backend.openssl_assert(asn1_type != self._backend._ffi.NULL)
# We need this to ensure that our C type cast is safe.
# Also this should always be a sane string type, but we'll see if
# that is true in the real world...
if asn1_type.type not in (
_ASN1Type.UTF8String.value,
_ASN1Type.PrintableString.value,
_ASN1Type.IA5String.value,
):
raise ValueError(
"OID {} has a disallowed ASN.1 type: {}".format(
oid, asn1_type.type
)
)
data = self._backend._lib.X509_ATTRIBUTE_get0_data(
attr, 0, asn1_type.type, self._backend._ffi.NULL
)
self._backend.openssl_assert(data != self._backend._ffi.NULL)
# This cast is safe iff we assert on the type above to ensure
# that it is always a type of ASN1_STRING
data = self._backend._ffi.cast("ASN1_STRING *", data)
return _asn1_string_to_bytes(self._backend, data)
@utils.register_interface(
x509.certificate_transparency.SignedCertificateTimestamp
)
class _SignedCertificateTimestamp(object):
def __init__(self, backend, sct_list, sct):
self._backend = backend
# Keep the SCT_LIST that this SCT came from alive.
self._sct_list = sct_list
self._sct = sct
@property
def version(self):
version = self._backend._lib.SCT_get_version(self._sct)
assert version == self._backend._lib.SCT_VERSION_V1
return x509.certificate_transparency.Version.v1
@property
def log_id(self):
out = self._backend._ffi.new("unsigned char **")
log_id_length = self._backend._lib.SCT_get0_log_id(self._sct, out)
assert log_id_length >= 0
return self._backend._ffi.buffer(out[0], log_id_length)[:]
@property
def timestamp(self):
timestamp = self._backend._lib.SCT_get_timestamp(self._sct)
milliseconds = timestamp % 1000
return datetime.datetime.utcfromtimestamp(timestamp // 1000).replace(
microsecond=milliseconds * 1000
)
@property
def entry_type(self):
entry_type = self._backend._lib.SCT_get_log_entry_type(self._sct)
# We currently only support loading SCTs from the X.509 extension, so
# we only have precerts.
assert entry_type == self._backend._lib.CT_LOG_ENTRY_TYPE_PRECERT
return x509.certificate_transparency.LogEntryType.PRE_CERTIFICATE
@property
def _signature(self):
ptrptr = self._backend._ffi.new("unsigned char **")
res = self._backend._lib.SCT_get0_signature(self._sct, ptrptr)
self._backend.openssl_assert(res > 0)
self._backend.openssl_assert(ptrptr[0] != self._backend._ffi.NULL)
return self._backend._ffi.buffer(ptrptr[0], res)[:]
def __hash__(self):
return hash(self._signature)
def __eq__(self, other):
if not isinstance(other, _SignedCertificateTimestamp):
return NotImplemented
return self._signature == other._signature
def __ne__(self, other):
return not self == other
|
{
"content_hash": "33e4ba8337043d114cd5d62dce84884d",
"timestamp": "",
"source": "github",
"line_count": 583,
"max_line_length": 79,
"avg_line_length": 36.77358490566038,
"alnum_prop": 0.6081440365688698,
"repo_name": "kimjinyong/i2nsf-framework",
"id": "4d0dac7649a60060efd1d86c2fa1fba8cd126b43",
"size": "21620",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "Hackathon-112/analyzer/.local/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/x509.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "4396520"
},
{
"name": "C++",
"bytes": "9389"
},
{
"name": "CSS",
"bytes": "51736"
},
{
"name": "Dockerfile",
"bytes": "3839"
},
{
"name": "Emacs Lisp",
"bytes": "24812"
},
{
"name": "Erlang",
"bytes": "1364078"
},
{
"name": "HTML",
"bytes": "42486541"
},
{
"name": "Hack",
"bytes": "6349"
},
{
"name": "Java",
"bytes": "7976"
},
{
"name": "JavaScript",
"bytes": "533000"
},
{
"name": "Makefile",
"bytes": "401170"
},
{
"name": "PHP",
"bytes": "164007"
},
{
"name": "Perl",
"bytes": "2188"
},
{
"name": "Python",
"bytes": "3004949"
},
{
"name": "QMake",
"bytes": "360"
},
{
"name": "Roff",
"bytes": "3906372"
},
{
"name": "Shell",
"bytes": "83872"
},
{
"name": "XSLT",
"bytes": "167018"
}
],
"symlink_target": ""
}
|
import cv2
import time
import rpyc
import logging
import Tkinter as tk
import numpy as np
from PIL import Image, ImageTk
import UI_Mobot_Configuration as ui
def profile(fn):
# A decorator function to determine the run time of functions
def with_profiling(*args, **kwargs):
start_time = time.time()
ret = fn(*args, **kwargs)
elapsed_time = time.time() - start_time
print "Time elapsed for function: %s:"%(fn.__name__)
print "%.3f"%(elapsed_time)
return ret
return with_profiling
ex_img = cv2.imread('../tests/1.JPG',0)
WIDTH, HEIGHT = ex_img.shape
ex_img = cv2.resize(ex_img, dsize=(WIDTH//16, HEIGHT//16))
blurred = cv2.medianBlur(ex_img, 5)
#blurred = cv2.GaussianBlur(ex_img, (5,5), 0)
edges = cv2.Canny(blurred,0,500)
def grayToRGB(img):
# Converts grayscale image into RGB
# This conversion uses numpy array operation and takes less time
return cv2.cvtColor(img, cv2.COLOR_GRAY2RGB)
# By accessing pixel arrays:
# splited = cv2.split(image)
# B = splited[:,0]
# G = splited[:,1]
# R = splited[:,2]
# Merged = cv2.merge((R, G, B))
def grayToTkImage(img):
# Convert the Image object into a TkPhoto object
im = Image.fromarray(grayToRGB(img))
return ImageTk.PhotoImage(image=im)
@profile
def findEdges(img, blur_factor, edge_low, edge_high):
blurred = cv2.medianBlur(img, int(blur_factor//2*2+1))
edges = cv2.Canny(blurred, edge_low, edge_high)
return edges
@profile
def determinePolynominals(img):
# CORE, Closed source code
# To be Implemented
return img
class VectorEquation():
# This class defines a vector equation with arc lenth parameterization
# methods: VectorEquation.getDerivativeAt(s=0) -> Vector quantity
pass
def getMobotStatus():
# Use rpyc to get state of mobot
# -------STRUCT------
# SPEED: (Int, Int), BATT: Int,
# INTEGRITY: Int, CAMERA_ENABLED: Bool,
# FPS: Int, CPU: Int, CAMERA_MOUNT_ANGLE: Int
# CAMERA_X_OFFSET: Int, CAMERA_Y_OFFSET: Int
# -------------------- more to come.
pass
@profile
def calculateDesiredVelocityVector():
# Takes in a equation (set) and calculate desired velocity vector
pass
def convertVelocityVectorToSpeeds():
# This function converts a velocity vector into L/R speed tuple
pass
class ConfigurationMainFrame():
def __init__(self):
#self.ReceiveDiagnoseBool = tk.StringVar()
ui.UI_Mobot_Configuration_support.connectAction = \
self.connectAction
ui.UI_Mobot_Configuration_support.pingAction = \
self.pingAction
ui.UI_Mobot_Configuration_support.settingsChanged = \
self.settingsChanged
ui.UI_Mobot_Configuration_support.emergencyStopAction = \
self.emergencyStopAction
#ui.UI_Mobot_Configuration_support.ReceiveDiagnoseBool = \
# self.ReceiveDiagnoseBool
self.conn = None
# UI Methods
def connectAction(self):
print "Connect Action"
HOSTNAME = ui.w.IPEntry.get()
PORT = int(ui.w.PortEntry.get())
logging.info("Connecting to client: %s@PORT%d"%(HOSTNAME, PORT))
try:
self.conn = rpyc.connect(HOSTNAME, PORT)
except:
logging.warn("No valid services found.")
self.updateMobotInfo()
def pingAction(self):
print "Ping Action"
def settingsChanged(self, e):
print "Settings Changed"
self.updateDashboardImages()
def emergencyStopAction(self):
print "Emergency Stop"
# Framework Methods
@profile
def updateDashboardImages(self):
#if self.conn == None: return
BLUR_FACTOR = ui.w.BlurScale.get()
CANNY_LO = ui.w.CannyLoScale.get()
CANNY_HI = ui.w.CannyHiScale.get()
originalImage = ex_img
print type(originalImage)
processedImage = findEdges(originalImage,
BLUR_FACTOR, CANNY_LO, CANNY_HI)
print type(processedImage)
print processedImage
originalImage = grayToTkImage(originalImage)
processedImage = grayToTkImage(processedImage)
ui.w.OriginalImageLabel.configure(image = originalImage)
ui.w.OriginalImageLabel.image = originalImage
ui.w.ProcessedImageLabel.configure(image = processedImage)
ui.w.ProcessedImageLabel.image = processedImage
def updateMobotInfo(self):
if self.conn == None: return
batt = self.conn.root.getBattery()
ui.w.TProgressbar1.step(amount=abs(batt-1))
ui.w.Label9.config(text=str(batt))
if __name__ == "__main__":
MainFrame = ConfigurationMainFrame()
ui.vp_start_gui()
|
{
"content_hash": "b26e5333b8a8b8bf1462faaf215a16f8",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 74,
"avg_line_length": 31.24,
"alnum_prop": 0.6515151515151515,
"repo_name": "harveybia/the-flash",
"id": "c29075b646318ac285d446a27d002a5c555aa9c2",
"size": "4686",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "26284"
},
{
"name": "Tcl",
"bytes": "17174"
}
],
"symlink_target": ""
}
|
"""Setup application's models."""
import datetime as dt
from pathlib import Path
import peewee as pw
from muffin_peewee import JSONField, Plugin
from . import app
db = Plugin(app, connection=f"sqlite:///{ Path(__file__).parent.parent / 'db.sqlite' }")
class BaseModel(db.Model):
"""Automatically keep the model's creation time."""
created = pw.DateTimeField(default=dt.datetime.utcnow)
@db.register
class Group(BaseModel):
"""A group."""
name = pw.CharField(max_length=255, unique=True)
@db.register
class User(BaseModel):
"""A simple user model."""
email = pw.CharField()
first_name = pw.CharField(null=True, help_text="First name")
last_name = pw.CharField(null=True)
password = pw.CharField(null=True) # not secure only for the example
picture = pw.CharField(
default="https://picsum.photos/100", help_text="Full URL to the picture"
)
meta = JSONField(default={})
is_active = pw.BooleanField(default=True)
role = pw.CharField(
choices=(("user", "user"), ("manager", "manager"), ("admin", "admin"))
)
# Relationships
group = pw.ForeignKeyField(Group, backref="users", null=True)
@db.register
class Message(BaseModel):
"""Just a users' messages."""
status = pw.CharField(choices=(("new", "new"), ("published", "published")))
title = pw.CharField()
body = pw.TextField()
dtpublish = pw.DateTimeField(null=True)
user = pw.ForeignKeyField(User)
|
{
"content_hash": "25f830c354670bbde44a38bf8c8268b5",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 88,
"avg_line_length": 23.79032258064516,
"alnum_prop": 0.6569491525423728,
"repo_name": "klen/muffin-admin",
"id": "332c9f8c6cdc772e3899c1d4e9f08887aa8b110f",
"size": "1475",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "examples/peewee_orm/database.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1007"
},
{
"name": "JavaScript",
"bytes": "25118"
},
{
"name": "Makefile",
"bytes": "2840"
},
{
"name": "Python",
"bytes": "42806"
}
],
"symlink_target": ""
}
|
from fcntl import flock, LOCK_EX, LOCK_UN, LOCK_NB
from imp import load_source
from os import listdir, system
from os.path import dirname, isfile, join, realpath
from shutil import move
from subprocess import Popen, PIPE
from sys import argv, exc_info, exit, stdout, version_info
from traceback import format_exc
from xml.dom.minidom import parse
from OmsConfigHostHelpers import write_omsconfig_host_telemetry, write_omsconfig_host_switch_event, write_omsconfig_host_log, stop_old_host_instances
from time import sleep
pathToCurrentScript = realpath(__file__)
pathToCommonScriptsFolder = dirname(pathToCurrentScript)
helperLibPath = join(pathToCommonScriptsFolder, 'helperlib.py')
helperlib = load_source('helperlib', helperLibPath)
operationStatusUtilityPath = join(pathToCommonScriptsFolder, 'OperationStatusUtility.py')
operationStatusUtility = load_source('operationStatusUtility', operationStatusUtilityPath)
operation = 'PerformInventory'
# Redirect output to our log file
fullPathDSCLogger = join(pathToCommonScriptsFolder, 'nxDSCLog.py')
nxDSCLog = load_source('nxDSCLog', fullPathDSCLogger)
logger = nxDSCLog.ConsoleAndFileLogger()
stdout = logger
def usage():
write_omsconfig_host_log('Incorrect parameters to PerformInventory.py: ' + str(argv), pathToCurrentScript, 'WARNING')
print("""Usage: PerformInventory.py [OPTIONS]
OPTIONS (case insensitive):
--InMOF PATH_TO_INVENTORY.MOF
--OutXML PATH_TO_OUTPUT_REPORT.XML
--help
""")
def exitWithError(message, errorCode = 1):
timestamp = operationStatusUtility.get_current_timestamp()
errorMessage = timestamp + ": ERROR from PerformInventory.py: " + message
print(errorMessage)
exit(errorCode)
def printVerboseMessage(message):
timestamp = operationStatusUtility.get_current_timestamp()
verboseMessage = str(timestamp) + ": VERBOSE from PerformInventory.py: " + str(message)
print(verboseMessage)
def main(args):
try:
perform_inventory(args)
except SystemExit:
exit(exc_info()[1])
except Exception:
# Python 2.4-2.7 and 2.6-3 recognize different formats for exceptions. This methods works in all versions.
formattedExceptionMessage = format_exc()
write_omsconfig_host_log('Python exception raised from PerformInventory.py: ' + formattedExceptionMessage, pathToCurrentScript, 'ERROR')
raise
def perform_inventory(args):
Variables = dict()
# Parse command line arguments
optlist = []
command_line_length = len(args)
argIndex = 0
inArgument = False
currentArgument = ""
arg = ""
while argIndex < command_line_length:
arg = args[argIndex]
if argIndex == 0:
# skip the program name
argIndex += 1
continue
if inArgument:
Variables[currentArgument] = arg
inArgument = False
else:
if arg[0:2] == "--":
inArgument = True
currentArgument = arg[2:].lower()
else:
# The rest are not options
args = args[argIndex:]
break
argIndex += 1
if inArgument:
Variables[currentArgument] = arg
AcceptableOptions = ["inmof", "outxml", "help"]
if "help" in Variables:
usage()
exit(0)
optionsValid = True
for arg in Variables.keys():
if arg.lower() not in AcceptableOptions:
optionsValid = False
exitWithError("Error: %s is not a valid option" % arg)
if optionsValid == False:
usage()
exit(1)
dsc_sysconfdir = join(helperlib.CONFIG_SYSCONFDIR, helperlib.CONFIG_SYSCONFDIR_DSC)
dsc_reportdir = join(dsc_sysconfdir, 'InventoryReports')
omicli_path = join(helperlib.CONFIG_BINDIR, 'omicli')
dsc_host_base_path = helperlib.DSC_HOST_BASE_PATH
dsc_host_path = join(dsc_host_base_path, 'bin/dsc_host')
dsc_host_output_path = join(dsc_host_base_path, 'output')
dsc_host_lock_path = join(dsc_host_base_path, 'dsc_host_lock')
dsc_host_switch_path = join(dsc_host_base_path, 'dsc_host_ready')
dsc_configuration_path = join(dsc_sysconfdir, 'configuration')
temp_report_path = join(dsc_configuration_path, 'Inventory.xml.temp')
report_path = join(dsc_configuration_path, 'Inventory.xml')
inventorylock_path = join(dsc_sysconfdir, 'inventory_lock')
if ("omsconfig" in helperlib.DSC_SCRIPT_PATH):
write_omsconfig_host_switch_event(pathToCurrentScript, isfile(dsc_host_switch_path))
if ("omsconfig" in helperlib.DSC_SCRIPT_PATH) and (isfile(dsc_host_switch_path)):
use_omsconfig_host = True
else:
use_omsconfig_host = False
if "outxml" in Variables:
report_path = Variables["outxml"]
parameters = []
if use_omsconfig_host:
parameters.append(dsc_host_path)
parameters.append(dsc_host_output_path)
if "inmof" in Variables:
parameters.append("PerformInventoryOOB")
parameters.append(Variables["inmof"])
else:
parameters.append("PerformInventory")
else:
parameters.append(omicli_path)
parameters.append("iv")
parameters.append(helperlib.DSC_NAMESPACE)
parameters.append("{")
parameters.append("MSFT_DSCLocalConfigurationManager")
parameters.append("}")
if "inmof" in Variables:
parameters.append("PerformInventoryOOB")
parameters.append("{")
parameters.append("InventoryMOFPath")
parameters.append(Variables["inmof"])
parameters.append("}")
else:
parameters.append("PerformInventory")
# Ensure inventory lock file permission is set correctly before opening
operationStatusUtility.ensure_file_permissions(inventorylock_path, '644')
# Open the inventory lock file. This also creates a file if it does not exist.
inventorylock_filehandle = open(inventorylock_path, 'w')
printVerboseMessage("Opened the inventory lock file at the path '" + inventorylock_path + "'")
retval = 0
inventorylock_acquired = True
dschostlock_filehandle = None
inmof_file = ''
if "inmof" in Variables:
inmof_file = Variables["inmof"]
try:
# Acquire inventory file lock
try:
flock(inventorylock_filehandle, LOCK_EX | LOCK_NB)
write_omsconfig_host_log('Inventory lock is acquired by : ' + inmof_file, pathToCurrentScript)
except IOError:
inventorylock_acquired = False
write_omsconfig_host_log('Failed to acquire inventory lock.', pathToCurrentScript, 'WARNING')
if inventorylock_acquired:
dschostlock_acquired = False
if use_omsconfig_host:
if isfile(dsc_host_lock_path):
stop_old_host_instances(dsc_host_lock_path)
# Open the dsc host lock file. This also creates a file if it does not exist.
dschostlock_filehandle = open(dsc_host_lock_path, 'w')
printVerboseMessage("Opened the dsc host lock file at the path '" + dsc_host_lock_path + "'")
# Acquire dsc host file lock
for retry in range(10):
try:
flock(dschostlock_filehandle, LOCK_EX | LOCK_NB)
dschostlock_acquired = True
write_omsconfig_host_log('dsc_host lock file is acquired by : ' + inmof_file, pathToCurrentScript)
break
except IOError:
write_omsconfig_host_log('dsc_host lock file not acquired. retry (#' + str(retry) + ') after 60 seconds...', pathToCurrentScript)
sleep(60)
else:
write_omsconfig_host_log('dsc_host lock file does not exist. Skipping this operation until next consistency hits.', pathToCurrentScript, 'WARNING')
if dschostlock_acquired or (not use_omsconfig_host):
try:
system("rm -f " + dsc_reportdir + "/*")
process = Popen(parameters, stdout = PIPE, stderr = PIPE)
stdout, stderr = process.communicate()
retval = process.returncode
printVerboseMessage(stdout)
if (retval > 0):
write_omsconfig_host_log('dsc_host failed with code = ' + str(retval), pathToCurrentScript)
exit(retval)
# Combine reports together
reportFiles = listdir(dsc_reportdir)
final_xml_report = '<INSTANCE CLASSNAME="Inventory"><PROPERTY.ARRAY NAME="Instances" TYPE="string" EmbeddedObject="object"><VALUE.ARRAY>'
values = []
for reportFileName in reportFiles:
reportFilePath = join(dsc_reportdir, reportFileName)
if not isfile(reportFilePath):
continue
report = parse(reportFilePath)
for valueNode in report.getElementsByTagName('VALUE'):
values.append(valueNode.toxml())
final_xml_report = final_xml_report + "".join(values) + "</VALUE.ARRAY></PROPERTY.ARRAY></INSTANCE>"
# Ensure temporary inventory report file permission is set correctly before opening
operationStatusUtility.ensure_file_permissions(temp_report_path, '644')
tempReportFileHandle = open(temp_report_path, 'w')
try:
tempReportFileHandle.write(final_xml_report)
finally:
if (tempReportFileHandle):
tempReportFileHandle.close()
# Ensure temporary inventory report file permission is set correctly after opening
operationStatusUtility.ensure_file_permissions(temp_report_path, '644')
system("rm -f " + dsc_reportdir + "/*")
move(temp_report_path, report_path)
# Ensure inventory report file permission is set correctly
operationStatusUtility.ensure_file_permissions(report_path, '644')
finally:
if (dschostlock_filehandle):
# Release inventory file lock
flock(inventorylock_filehandle, LOCK_UN)
# Release dsc host file lock
if isfile(dsc_host_lock_path) and use_omsconfig_host:
try:
flock(dschostlock_filehandle, LOCK_UN)
except:
pass
finally:
if (inventorylock_filehandle):
# Close inventory lock file handle
inventorylock_filehandle.close()
if (dschostlock_filehandle):
# Close dsc host lock file handle
if use_omsconfig_host:
try:
dschostlock_filehandle.close()
except:
pass
# Ensure inventory lock file permission is set correctly after opening
operationStatusUtility.ensure_file_permissions(inventorylock_path, '644')
# Ensure dsc host lock file permission is set correctly after opening
if use_omsconfig_host:
operationStatusUtility.ensure_file_permissions(dsc_host_lock_path, '644')
exit(retval)
if __name__ == "__main__":
main(argv)
|
{
"content_hash": "353c3070f3c6d4f120d3aa5be32eb830",
"timestamp": "",
"source": "github",
"line_count": 291,
"max_line_length": 167,
"avg_line_length": 40.65635738831615,
"alnum_prop": 0.6028230918772716,
"repo_name": "MSFTOSSMgmt/WPSDSCLinux",
"id": "46983a08aff02fb9e1f3dd1c55d1bb56ffe041d7",
"size": "11849",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "LCM/scripts/PerformInventory.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5870322"
},
{
"name": "C#",
"bytes": "98943"
},
{
"name": "C++",
"bytes": "670183"
},
{
"name": "CMake",
"bytes": "13826"
},
{
"name": "HTML",
"bytes": "166861"
},
{
"name": "Makefile",
"bytes": "164013"
},
{
"name": "Objective-C",
"bytes": "61644"
},
{
"name": "PowerShell",
"bytes": "40239"
},
{
"name": "Python",
"bytes": "1858427"
},
{
"name": "Shell",
"bytes": "8136"
},
{
"name": "SourcePawn",
"bytes": "60242"
},
{
"name": "Yacc",
"bytes": "35814"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, unicode_literals
DEBUG = True
USE_TZ = True
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
}
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"thecut.forms",
"test_app",
]
SITE_ID = 1
SECRET_KEY = 'thecut'
MIDDLEWARE_CLASSES = [] # silences dj1.7 warning
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'loaders': [
('django.template.loaders.cached.Loader',
['django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader'])
],
},
},
],
|
{
"content_hash": "a2598faaddc7ee95e875f7bd3346eb11",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 69,
"avg_line_length": 20.31578947368421,
"alnum_prop": 0.5712435233160622,
"repo_name": "thecut/thecut-forms",
"id": "d449f9e937f15252eb3cd5ea5293ad0781eed6c2",
"size": "796",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test_app/settings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1104"
},
{
"name": "Python",
"bytes": "29959"
}
],
"symlink_target": ""
}
|
"""
Django settings for zhihumonitor project.
Generated by 'django-admin startproject' using Django 1.10.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$f)sp+h+dj7&vsw$9$5gngg5z6+bm&p==o28ge^^!$z6#ms-vz'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'zhihumonitor.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'zhihumonitor.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "static/")
|
{
"content_hash": "1ee5f1ed0b51f213a8917b30536c58f9",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 91,
"avg_line_length": 26.132231404958677,
"alnum_prop": 0.6872232764073372,
"repo_name": "WebMonitor/ZhihuMonitor",
"id": "3a263fa6f1a3e98cd252139bd02fff607729dd58",
"size": "3162",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zhihumonitor/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17182"
}
],
"symlink_target": ""
}
|
import scipy.io as sio
import numpy as np
def load_mat(filename):
"""
this function should be called instead of direct spio.loadmat
as it cures the problem of not properly recovering python dictionaries
from mat files. It calls the function check keys to cure all entries
which are still mat-objects
Parameters:
-----------
filename : str
matlab data file ('.mat')
Returns:
--------
dict
"""
data = sio.loadmat(filename, struct_as_record=False, squeeze_me=True)
return _check_keys(data)
def _check_keys(dict):
"""
checks if entries in dictionary are mat-objects. If yes
todict is called to change them to nested dictionaries
"""
for key in dict:
if isinstance(dict[key], sio.matlab.mio5_params.mat_struct):
dict[key] = _todict(dict[key])
return dict
def _todict(matobj):
"""
A recursive function which constructs from matobjects nested dictionaries
"""
dict = {}
for strg in matobj._fieldnames:
elem = matobj.__dict__[strg]
if isinstance(elem, sio.matlab.mio5_params.mat_struct):
dict[strg] = _todict(elem)
else:
dict[strg] = elem
return dict
def load_rDAT(fin, nheaderrows=0, fmt=None):
if fmt == None: # replace with your own rdat format
fmt = [
("session", "i4"),
("trial", "i4"),
("normal", "b"),
("stimulus", "a64"),
("class", "i4"),
("R_sel", "i4"),
("R_acc", "i4"),
("ReactionTime", "f4"),
("Reinforced", "b"),
("TimeOfDay", "a8"),
("Date", "a8"),
]
while True:
if nheaderrows > 100:
raise ValueError("Recursively found more than 100 header rows.")
try:
data = np.genfromtxt(
fin, dtype=fmt, invalid_raise=False, skip_header=nheaderrows
)
return data
except ValueError:
nheaderrows += 1
def load_data_pandas(subjects, data_folder, force_boolean=["reward"]):
"""
This function is deprecated and has been moved to Gentnerlab/behav-analysis
"""
raise DeprecationWarning("Moved to Gentnerlab/behav-analysis")
def binomial_ci(x, N, CL=95.0):
"""
This function is deprecated and has been moved to Gentnerlab/behav-analysis
"""
raise DeprecationWarning("Moved to Gentnerlab/behav-analysis")
def vinjegallant(response):
"""
calculates the activity fraction of a set of responses
Parameters:
-----------
response : list or tuple or NumPy array
the set of responses to calculate the activity fraction over
Returns:
--------
float
"""
R = np.asarray(response[:])
n = np.float_(len(R))
eps = np.spacing(np.float64(1))
A = ((R.sum() / n) ** 2) / (((R ** 2).sum() / n) + eps)
S = (1 - A) / (1 - 1 / n)
return S
def accperstimplot(subj, df, days=7, stims_all=None):
"""
This function is deprecated and has been moved to Gentnerlab/behav-analysis
"""
raise DeprecationWarning("Moved to Gentnerlab/behav-analysis")
def stars(p):
"""
This function is deprecated and has been moved to Gentnerlab/behav-analysis
"""
raise DeprecationWarning("Moved to Gentnerlab/behav-analysis")
def plot_stars(p, x, y, size="large", horizontalalignment="center", **kwargs):
"""
This function is deprecated and has been moved to Gentnerlab/behav-analysis
"""
raise DeprecationWarning("Moved to Gentnerlab/behav-analysis")
def plot_linestar(p, x1, x2, y):
"""
This function is deprecated and has been moved to Gentnerlab/behav-analysis
"""
raise DeprecationWarning("Moved to Gentnerlab/behav-analysis")
|
{
"content_hash": "e46694aab7d9adba3c3fb5c3373fc7fb",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 79,
"avg_line_length": 27.15714285714286,
"alnum_prop": 0.5994213571804313,
"repo_name": "gentnerlab/glab-common-py",
"id": "ad4a2fc581aaecf2efcabd405afc04b638b23134",
"size": "3802",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "glab_common/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "5823967"
},
{
"name": "Python",
"bytes": "13085"
}
],
"symlink_target": ""
}
|
import antlr3
import testbase
import unittest
class t015calc(testbase.ANTLRTest):
def setUp(self):
self.compileGrammar()
def _evaluate(self, expr, expected, errors=[]):
cStream = antlr3.StringStream(expr)
lexer = self.getLexer(cStream)
tStream = antlr3.CommonTokenStream(lexer)
parser = self.getParser(tStream)
result = parser.evaluate()
assert result == expected, "%r != %r" % (result, expected)
assert len(parser.reportedErrors) == len(errors), parser.reportedErrors
def testValid01(self):
self._evaluate("1 + 2", 3)
def testValid02(self):
self._evaluate("1 + 2 * 3", 7)
def testValid03(self):
self._evaluate("10 / 2", 5)
def testValid04(self):
self._evaluate("6 + 2*(3+1) - 4", 10)
def testMalformedInput(self):
self._evaluate("6 - (2*1", 4, ["mismatched token at pos 8"])
# FIXME: most parse errors result in TypeErrors in action code, because
# rules return None, which is then added/multiplied... to integers.
# evaluate("6 - foo 2", 4, ["some error"])
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "c3ac452a76eb809e8943953b1083602b",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 79,
"avg_line_length": 25.804347826086957,
"alnum_prop": 0.6015164279696714,
"repo_name": "indashnet/InDashNet.Open.UN2000",
"id": "0f1fe8a39af4648512f9e83f96293f45b7308e07",
"size": "1187",
"binary": false,
"copies": "21",
"ref": "refs/heads/master",
"path": "android/external/antlr/antlr-3.4/runtime/Python/tests/t015calc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('information_technology', '0002_auto_20180131_1525'),
]
operations = [
migrations.AlterField(
model_name='component',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='processframework',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
|
{
"content_hash": "49d22cec9963b23993d615605ee9ccf4",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 111,
"avg_line_length": 30.095238095238095,
"alnum_prop": 0.6060126582278481,
"repo_name": "Semprini/cbe",
"id": "f99002c0981816e02e4a090555171b3fc1d9bc7f",
"size": "681",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cbe/cbe/information_technology/migrations/0003_auto_20210617_2350.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2292"
},
{
"name": "HTML",
"bytes": "3112"
},
{
"name": "PowerShell",
"bytes": "20448"
},
{
"name": "Python",
"bytes": "241197"
}
],
"symlink_target": ""
}
|
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1beta2_scale_spec import V1beta2ScaleSpec
class TestV1beta2ScaleSpec(unittest.TestCase):
""" V1beta2ScaleSpec unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1beta2ScaleSpec(self):
"""
Test V1beta2ScaleSpec
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1beta2_scale_spec.V1beta2ScaleSpec()
pass
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "3c54cb311ce20c3453b089fa8f807ecb",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 105,
"avg_line_length": 22.476190476190474,
"alnum_prop": 0.6927966101694916,
"repo_name": "mbohlool/client-python",
"id": "32c579c489242bf1f312e42647f8f37aafb6b74c",
"size": "961",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubernetes/test/test_v1beta2_scale_spec.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8417639"
},
{
"name": "Shell",
"bytes": "16830"
}
],
"symlink_target": ""
}
|
revision = '07ea7b63c8fc'
down_revision = '3cecf6a39f78'
branch_labels = None
depends_on = None
import alembic
import sqlalchemy
def upgrade():
alembic.op.alter_column("cards", "cardid", new_column_name="id")
alembic.op.alter_column("card_multiverse", "multiverseid", new_column_name="id")
alembic.op.alter_column("history", "historykey", new_column_name="id")
alembic.op.alter_column("notification", "notificationkey", new_column_name="id")
alembic.op.alter_column("quotes", "qid", new_column_name="id")
def downgrade():
alembic.op.alter_column("cards", "id", new_column_name="cardid")
alembic.op.alter_column("card_multiverse", "id", new_column_name="multiverseid")
alembic.op.alter_column("history", "id", new_column_name="historykey")
alembic.op.alter_column("notification", "id", new_column_name="notificationkey")
alembic.op.alter_column("quotes", "id", new_column_name="qid")
|
{
"content_hash": "8be6d6af5f780c84ea348a5845e43bec",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 81,
"avg_line_length": 42.666666666666664,
"alnum_prop": 0.7299107142857143,
"repo_name": "andreasots/lrrbot",
"id": "f9fe3132800f549a0d3cb124e0e4b21ff587bb25",
"size": "896",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "alembic/versions/07ea7b63c8fc_id_is_primary_key.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "15924"
},
{
"name": "HTML",
"bytes": "65230"
},
{
"name": "JavaScript",
"bytes": "39616"
},
{
"name": "Mako",
"bytes": "318"
},
{
"name": "Python",
"bytes": "381399"
}
],
"symlink_target": ""
}
|
"""
Middleware detection and setup code
"""
import sys
def startupMiddleware():
"""
Do the actual detection and startup, also defines all necessary globals
:returns: tuple -- current server rank and total world size
"""
if "MPI" in globals():
# Force local simulation
return 0, 1
# Try loading MPI
global COMM_WORLD
global MPI
try:
from mpi4py import MPI
COMM_WORLD = MPI.COMM_WORLD
except ImportError:
# No MPI4Py found, so force local MPI simulation
from pypdevs.MPIRedirect import MPIFaker
COMM_WORLD = MPIFaker()
# Now we should take care of the starting of the server
rank = COMM_WORLD.Get_rank()
if rank != 0:
# We should stop immediately, to prevent multiple constructions of the model
# This is a 'good' stop, so return with a zero
from pypdevs.server import Server
server = Server(int(rank), COMM_WORLD.Get_size())
sys.exit(0)
else:
# We should still shutdown every simulation kernel at exit by having the controller send these messages
# Use the atexit code at the end
if COMM_WORLD.Get_size() > 1:
import atexit
atexit.register(cleanupMPI)
return 0, COMM_WORLD.Get_size()
def cleanupMPI():
"""
Shut down the MPI backend by sending a termination message to all listening nodes
"""
for i in range(COMM_WORLD.Get_size()):
if i == COMM_WORLD.Get_rank():
req = COMM_WORLD.isend(0, dest=i, tag=0)
else:
COMM_WORLD.send(0, dest=i, tag=0)
if COMM_WORLD.Get_size() > 1:
MPI.Request.wait(req)
|
{
"content_hash": "3dadfe35fcee548d223983eeb0804d2e",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 111,
"avg_line_length": 31.037037037037038,
"alnum_prop": 0.6217183770883055,
"repo_name": "kdheepak89/pypdevs",
"id": "7bf26851e10e33f752ee84a927b0dce96334edb5",
"size": "2366",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pypdevs/middleware.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "1886"
},
{
"name": "Python",
"bytes": "3811050"
},
{
"name": "Shell",
"bytes": "1835"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
from ..utils import (
HEADRequest,
unified_strdate,
ExtractorError,
)
class ORFIE(InfoExtractor):
_VALID_URL = r'https?://tvthek\.orf\.at/(?:programs/.+?/episodes|topics/.+?|program/[^/]+)/(?P<id>\d+)'
_TEST = {
'url': 'http://tvthek.orf.at/program/matinee-Was-Sie-schon-immer-ueber-Klassik-wissen-wollten/7317210/Was-Sie-schon-immer-ueber-Klassik-wissen-wollten/7319746/Was-Sie-schon-immer-ueber-Klassik-wissen-wollten/7319747',
'file': '7319747.mp4',
'md5': 'bd803c5d8c32d3c64a0ea4b4eeddf375',
'info_dict': {
'title': 'Was Sie schon immer über Klassik wissen wollten',
'description': 'md5:0ddf0d5f0060bd53f744edaa5c2e04a4',
'duration': 3508,
'upload_date': '20140105',
},
'skip': 'Blocked outside of Austria',
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
playlist_id = mobj.group('id')
webpage = self._download_webpage(url, playlist_id)
data_json = self._search_regex(
r'initializeAdworx\((.+?)\);\n', webpage, 'video info')
all_data = json.loads(data_json)
def get_segments(all_data):
for data in all_data:
if data['name'] == 'Tracker::EPISODE_DETAIL_PAGE_OVER_PROGRAM':
return data['values']['segments']
sdata = get_segments(all_data)
if not sdata:
raise ExtractorError('Unable to extract segments')
def quality_to_int(s):
m = re.search('([0-9]+)', s)
if m is None:
return -1
return int(m.group(1))
entries = []
for sd in sdata:
video_id = sd['id']
formats = [{
'preference': -10 if fd['delivery'] == 'hls' else None,
'format_id': '%s-%s-%s' % (
fd['delivery'], fd['quality'], fd['quality_string']),
'url': fd['src'],
'protocol': fd['protocol'],
'quality': quality_to_int(fd['quality']),
} for fd in sd['playlist_item_array']['sources']]
# Check for geoblocking.
# There is a property is_geoprotection, but that's always false
geo_str = sd.get('geoprotection_string')
if geo_str:
try:
http_url = next(
f['url']
for f in formats
if re.match(r'^https?://.*\.mp4$', f['url']))
except StopIteration:
pass
else:
req = HEADRequest(http_url)
self._request_webpage(
req, video_id,
note='Testing for geoblocking',
errnote=((
'This video seems to be blocked outside of %s. '
'You may want to try the streaming-* formats.')
% geo_str),
fatal=False)
self._sort_formats(formats)
upload_date = unified_strdate(sd['created_date'])
entries.append({
'_type': 'video',
'id': video_id,
'title': sd['header'],
'formats': formats,
'description': sd.get('description'),
'duration': int(sd['duration_in_seconds']),
'upload_date': upload_date,
'thumbnail': sd.get('image_full_url'),
})
return {
'_type': 'playlist',
'entries': entries,
'id': playlist_id,
}
|
{
"content_hash": "5b05165c995f118746d65ec368eed6d4",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 225,
"avg_line_length": 35.85849056603774,
"alnum_prop": 0.4861878453038674,
"repo_name": "Grassboy/plugin.video.plurkTrend",
"id": "03421d1d5c78f2acd712e560ae17fb96d4a323be",
"size": "3818",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "youtube_dl/extractor/orf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1059158"
},
{
"name": "Shell",
"bytes": "115"
}
],
"symlink_target": ""
}
|
import socket
import os
import sys
import time
BUFFER_SIZE = 200000
METRICS_REQ = "{\"action\":0,\"command\":\"ports_all_stat_values\",\"data\":null}"
API_REG = "{\"action\":1,\"command\":\"clients\",\"data\":{\"client_path\":\""
API_UNREG = "{\"action\":2,\"command\":\"clients\",\"data\":{\"client_path\":\""
GLOBAL_METRICS_REQ = "{\"action\":0,\"command\":\"global_stat_values\",\"data\":null}"
DEFAULT_FP = "/var/run/dpdk/default_client"
class Socket:
def __init__(self):
self.send_fd = socket.socket(socket.AF_UNIX, socket.SOCK_SEQPACKET)
self.recv_fd = socket.socket(socket.AF_UNIX, socket.SOCK_SEQPACKET)
self.client_fd = None
def __del__(self):
try:
self.send_fd.close()
self.recv_fd.close()
self.client_fd.close()
except:
print("Error - Sockets could not be closed")
class Client:
def __init__(self): # Creates a client instance
self.socket = Socket()
self.file_path = None
self.choice = None
self.unregistered = 0
def __del__(self):
try:
if self.unregistered == 0:
self.unregister();
except:
print("Error - Client could not be destroyed")
def getFilepath(self, file_path): # Gets arguments from Command-Line and assigns to instance of client
self.file_path = file_path
def register(self): # Connects a client to DPDK-instance
if os.path.exists(self.file_path):
os.unlink(self.file_path)
try:
self.socket.recv_fd.bind(self.file_path)
except socket.error as msg:
print ("Error - Socket binding error: " + str(msg) + "\n")
self.socket.recv_fd.settimeout(2)
self.socket.send_fd.connect("/var/run/dpdk/rte/telemetry")
JSON = (API_REG + self.file_path + "\"}}")
self.socket.send_fd.sendall(JSON.encode())
self.socket.recv_fd.listen(1)
self.socket.client_fd = self.socket.recv_fd.accept()[0]
def unregister(self): # Unregister a given client
self.socket.client_fd.send((API_UNREG + self.file_path + "\"}}").encode())
self.socket.client_fd.close()
def requestMetrics(self): # Requests metrics for given client
self.socket.client_fd.send(METRICS_REQ.encode())
data = self.socket.client_fd.recv(BUFFER_SIZE).decode()
print("\nResponse: \n", data)
def repeatedlyRequestMetrics(self, sleep_time): # Recursively requests metrics for given client
print("\nPlease enter the number of times you'd like to continuously request Metrics:")
n_requests = int(input("\n:"))
print("\033[F") #Removes the user input from screen, cleans it up
print("\033[K")
for i in range(n_requests):
self.requestMetrics()
time.sleep(sleep_time)
def requestGlobalMetrics(self): #Requests global metrics for given client
self.socket.client_fd.send(GLOBAL_METRICS_REQ.encode())
data = self.socket.client_fd.recv(BUFFER_SIZE).decode()
print("\nResponse: \n", data)
def interactiveMenu(self, sleep_time): # Creates Interactive menu within the script
while self.choice != 4:
print("\nOptions Menu")
print("[1] Send for Metrics for all ports")
print("[2] Send for Metrics for all ports recursively")
print("[3] Send for global Metrics")
print("[4] Unregister client")
try:
self.choice = int(input("\n:"))
print("\033[F") #Removes the user input for screen, cleans it up
print("\033[K")
if self.choice == 1:
self.requestMetrics()
elif self.choice == 2:
self.repeatedlyRequestMetrics(sleep_time)
elif self.choice == 3:
self.requestGlobalMetrics()
elif self.choice == 4:
self.unregister()
self.unregistered = 1
else:
print("Error - Invalid request choice")
except:
pass
if __name__ == "__main__":
sleep_time = 1
file_path = ""
if len(sys.argv) == 2:
file_path = sys.argv[1]
else:
print("Warning - No filepath passed, using default (" + DEFAULT_FP + ").")
file_path = DEFAULT_FP
client = Client()
client.getFilepath(file_path)
client.register()
client.interactiveMenu(sleep_time)
|
{
"content_hash": "0bcedebcf8981a31d9a7ca7546334b8c",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 106,
"avg_line_length": 36.8780487804878,
"alnum_prop": 0.576058201058201,
"repo_name": "john-mcnamara-intel/dpdk",
"id": "df41d04fbe9ea4b27640dcaf96f460c273663a68",
"size": "4639",
"binary": false,
"copies": "7",
"ref": "refs/heads/main",
"path": "usertools/dpdk-telemetry-client.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "1623"
},
{
"name": "C",
"bytes": "39269990"
},
{
"name": "C++",
"bytes": "860345"
},
{
"name": "Makefile",
"bytes": "342834"
},
{
"name": "Meson",
"bytes": "144875"
},
{
"name": "Objective-C",
"bytes": "224248"
},
{
"name": "Python",
"bytes": "115929"
},
{
"name": "Shell",
"bytes": "77250"
},
{
"name": "SmPL",
"bytes": "2074"
}
],
"symlink_target": ""
}
|
from django.db import models
sexes = ((1, "Male"), (2, "Female"), (3, "Other"))
class Person(models.Model):
sex = models.PositiveSmallIntegerField(choices=sexes)
first = models.CharField(max_length=60)
last = models.CharField(max_length=60)
age = models.PositiveSmallIntegerField()
alive = models.BooleanField(default=True)
spouse = models.ForeignKey('Person', null=True, blank=True)
parents = models.ManyToManyField('Person', related_name='person_parents', null=True, blank=True)
siblings = models.ManyToManyField('Person', related_name='person_siblings', null=True, blank=True)
children = models.ManyToManyField('Person', related_name='person_children', null=True, blank=True)
def __unicode__(self):
return self.first + ' ' + self.last
class Family(models.Model):
name = models.CharField(max_length=60)
members = models.ManyToManyField(Person)
pets = models.ManyToManyField('Pet', null=True, blank=True)
def __unicode__(self):
return self.name
class Pet(models.Model):
sex = models.PositiveSmallIntegerField(choices=sexes)
owner = models.ForeignKey(Person, null=True, blank=True)
name = models.CharField(max_length=60)
age = models.PositiveSmallIntegerField()
species = models.CharField(max_length=60)
alive = models.BooleanField(default=True)
def __unicode__(self):
return self.name
|
{
"content_hash": "544bce55345503911016fff0a398e83a",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 102,
"avg_line_length": 39.97142857142857,
"alnum_prop": 0.7019299499642602,
"repo_name": "hodgesds/django-hammer",
"id": "b08fd7db4d2a2d3d9272824411ea90ac5edca5d8",
"size": "1399",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demo/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7893"
},
{
"name": "JavaScript",
"bytes": "734"
},
{
"name": "Python",
"bytes": "96072"
}
],
"symlink_target": ""
}
|
"""Find location of test executable."""
import os
import layout_exceptions
build_target_suggestion_message = (
'Did you forget to build the ninja target "lb_layout_tests" first?')
def FindTestExecutable(default_build_order, use_build, exe_name, exe_base_dir):
def PathToExe(build):
return os.path.join(exe_base_dir, build, exe_name)
if use_build:
path_to_exe = PathToExe(use_build)
# If provided, search for and use the specified executable
if os.path.exists(path_to_exe):
return os.path.expanduser(path_to_exe)
else:
raise layout_exceptions.TestClientError(
'Unable to find layout test executable\n{}\n{}'.format(
path_to_exe, build_target_suggestion_message))
else:
# Search for the layout test executable in the project 'out' directory
for build in default_build_order:
path_to_exe = PathToExe(build)
if os.path.exists(path_to_exe):
return os.path.expanduser(path_to_exe)
raise layout_exceptions.TestClientError(
'Unable to find layout test executable in base directory\n'
'"{}"\n after searching through sub-directories {}.\n'
'{}'.format(exe_base_dir,
str(default_build_order),
build_target_suggestion_message))
|
{
"content_hash": "397eb4f01de605050af697be93fb2a8e",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 79,
"avg_line_length": 35.72222222222222,
"alnum_prop": 0.6671850699844479,
"repo_name": "snibug/gyp_example",
"id": "de4f4987f858cf724caee6ace356a83ef6f8e743",
"size": "1286",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "layout_tests/find_executable.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "11242"
},
{
"name": "C++",
"bytes": "1097599"
},
{
"name": "CSS",
"bytes": "1719"
},
{
"name": "GLSL",
"bytes": "2217"
},
{
"name": "HTML",
"bytes": "467"
},
{
"name": "JavaScript",
"bytes": "4339"
},
{
"name": "Python",
"bytes": "125526"
}
],
"symlink_target": ""
}
|
import asyncio
import functools
import wrapt
def futurized(o):
''' Makes the given object to be awaitable.
:param any o: Object to wrap
:return: awaitable that resolves to provided object
:rtype: asyncio.Future
Anything passed to :code:`futurized` is wrapped in :code:`asyncio.Future`.
This makes it awaitable (can be run with :code:`await` or :code:`yield from`) as
a result of await it returns the original object.
If provided object is a Exception (or its sublcass) then the `Future` will raise it on await.
.. code-block:: python
fut = aiounittest.futurized('SOME TEXT')
ret = await fut
print(ret) # prints SOME TEXT
fut = aiounittest.futurized(Exception('Dummy error'))
ret = await fut # will raise the exception "dummy error"
The main goal is to use it with :code:`unittest.mock.Mock` (or :code:`MagicMock`) to
be able to mock awaitable functions (coroutines).
Consider the below code
.. code-block:: python
from asyncio import sleep
async def add(x, y):
await sleep(666)
return x + y
You rather don't want to wait 666 seconds, you've gotta mock that.
.. code-block:: python
from aiounittest import futurized, AsyncTestCase
from unittest.mock import Mock, patch
import dummy_math
class MyAddTest(AsyncTestCase):
async def test_add(self):
mock_sleep = Mock(return_value=futurized('whatever'))
patch('dummy_math.sleep', mock_sleep).start()
ret = await dummy_math.add(5, 6)
self.assertEqual(ret, 11)
mock_sleep.assert_called_once_with(666)
async def test_fail(self):
mock_sleep = Mock(return_value=futurized(Exception('whatever')))
patch('dummy_math.sleep', mock_sleep).start()
with self.assertRaises(Exception) as e:
await dummy_math.add(5, 6)
mock_sleep.assert_called_once_with(666)
'''
f = asyncio.Future()
if isinstance(o, Exception):
f.set_exception(o)
else:
f.set_result(o)
return f
def run_sync(func=None, loop=None):
''' Runs synchonously given function (coroutine)
:param callable func: function to run (mostly coroutine)
:param ioloop loop: event loop to use to run `func`
:type loop: event loop of None
By default the brand new event loop will be created (old closed). After completion, the loop will be closed and then recreated, set as default,
leaving asyncio clean.
**Note**: :code:`aiounittest.async_test` is an alias of :code:`aiounittest.helpers.run_sync`
Function can be used like a `pytest.mark.asyncio` (implementation differs),
but it's compatible with :code:`unittest.TestCase` class.
.. code-block:: python
import asyncio
import unittest
from aiounittest import async_test
async def add(x, y):
await asyncio.sleep(0.1)
return x + y
class MyAsyncTestDecorator(unittest.TestCase):
@async_test
async def test_async_add(self):
ret = await add(5, 6)
self.assertEqual(ret, 11)
.. note::
If the loop is provided, it won't be closed. It's up to you.
This function is also used internally by :code:`aiounittest.AsyncTestCase` to run coroutines.
'''
def get_brand_new_default_event_loop():
try:
old_loop = asyncio.get_event_loop()
if not old_loop.is_closed():
old_loop.close()
except RuntimeError:
# no default event loop, ignore exception
pass
_loop = asyncio.new_event_loop()
asyncio.set_event_loop(_loop)
return _loop
@wrapt.decorator
def decorator(wrapped, instance, args, kwargs):
nonlocal loop
use_default_event_loop = loop is None
if use_default_event_loop:
loop = get_brand_new_default_event_loop()
try:
ret = wrapped(*args, **kwargs)
future = asyncio.ensure_future(ret, loop=loop)
return loop.run_until_complete(future)
finally:
if use_default_event_loop:
# clean up
loop.close()
del loop
# again set a new (unstopped) event loop
get_brand_new_default_event_loop()
if func is None:
return decorator
else:
return decorator(func)
async_test = run_sync
|
{
"content_hash": "ce316d1b8ae2f50ccbc6fc48a06dcc5e",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 147,
"avg_line_length": 30.986928104575163,
"alnum_prop": 0.5872178865218308,
"repo_name": "kwarunek/aiounittest",
"id": "a6d709dd7d01b81531a58fd3db4fc083efe71b55",
"size": "4741",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aiounittest/helpers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15702"
}
],
"symlink_target": ""
}
|
import os
import io
from zipfile import ZipFile, is_zipfile
from contextlib import contextmanager
import pandas as pd
__author__ = 'Semyon'
def extract_csv(filepath):
zp = ZipFile(filepath)
csv = [f for f in zp.namelist() if os.path.splitext(f)[-1] == '.csv']
return zp.open(csv.pop())
@contextmanager
def zip_csv_opener(filepath):
fp = extract_csv(filepath) if is_zipfile(filepath) else open(filepath, 'rb')
try:
yield fp
finally:
fp.close()
def input_transformer(filepath):
with zip_csv_opener(filepath) as fp:
raw = fp.read().decode('utf-8')
return pd.read_csv(io.StringIO(raw), parse_dates=True, index_col=0, na_values='NONE')
def load(name):
cur_dir = os.path.dirname(os.path.realpath('__file__'))
filename = os.path.join(cur_dir, name)
df = input_transformer(filename)
return df
|
{
"content_hash": "4914b70337cbed7354f3e2ed737f47b3",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 89,
"avg_line_length": 24.083333333333332,
"alnum_prop": 0.6608996539792388,
"repo_name": "SammyVimes/san_francisco_crimes",
"id": "961361211a59c40311f23203bd1fcdbf23c91d13",
"size": "867",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data/input_reader.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "704"
},
{
"name": "Python",
"bytes": "19643"
},
{
"name": "Shell",
"bytes": "1612"
}
],
"symlink_target": ""
}
|
from tashi.aws.wsdl.AmazonEC2_services_server import *
from tashi.aws.util import *
def ConfirmProductInstance(productCode, instanceId):
res = ConfirmProductInstanceResponseMsg()
res.requestId = genRequestId()
res.__dict__['return'] = True
for i in client.getInstances():
if i.id == int(instanceId):
res.ownerId = i.userId
break
else:
res.__dict__['return'] = False
return res
functions = ['ConfirmProductInstance']
|
{
"content_hash": "41a92f58dc6f8c395fdf2fcc374c5c20",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 54,
"avg_line_length": 27.125,
"alnum_prop": 0.728110599078341,
"repo_name": "stroucki/tashi",
"id": "43da3db687b5d5b6abe5597375483c9fa5f54f60",
"size": "1226",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/tashi/aws/impl/other.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "9326"
},
{
"name": "PHP",
"bytes": "28750"
},
{
"name": "Python",
"bytes": "606584"
},
{
"name": "Shell",
"bytes": "28185"
}
],
"symlink_target": ""
}
|
from RGT.XML.SVG.Attribs.basicSvgAttribute import BasicSvgAttribute
from types import StringType
class XlinkAttributes(BasicSvgAttribute):
ATTRIBUTE_XLINK_HREF = 'xlink:href'
ATTRIBUTE_XLINK_SHOW = 'xlink:show'
ATTRIBUTE_XLINK_ACTUATE = 'xlink:actuate'
ATTRIBUTE_XLINK_TYPE = 'xlink:type'
ATTRIBUTE_XLINK_ROLE = 'xlink:role'
ATTRIBUTE_XLINK_ARCROLE = 'xlink:arcrole'
ATTRIBUTE_XLINK_TITLE = 'xlink:title'
def __init__(self):
BasicSvgAttribute.__init__(self)
def setXlinkHref(self, data):
if data is not None:
if type(data) is not StringType:
data = str(data)
self._setNodeAttribute(self.ATTRIBUTE_XLINK_HREF, data)
def setXlinkShow(self, data):
allowedValues = ['new', 'replace', 'embed', 'other', 'none']
if data is not None:
if data not in allowedValues:
values = ''
for value in allowedValues:
values += value + ', '
values = values[0: len(values) - 2]
raise ValueError('Value not allowed, only ' + values + 'are allowed')
else:
self._setNodeAttribute(self.ATTRIBUTE_XLINK_SHOW, data)
def setXlinkActuate(self, data):
allowedValues = ['onLoad']
if data is not None:
if data not in allowedValues:
values = ''
for value in allowedValues:
values += value + ', '
values = values[0: len(values) - 2]
raise ValueError('Value not allowed, only ' + values + 'are allowed')
else:
self._setNodeAttribute(self.ATTRIBUTE_XLINK_ACTUATE, data)
def setXlinkType(self, data):
allowedValues = ['simple']
if data is not None:
if data not in allowedValues:
values = ''
for value in allowedValues:
values += value + ', '
values = values[0: len(values) - 2]
raise ValueError('Value not allowed, only ' + values + 'are allowed')
else:
self._setNodeAttribute(self.ATTRIBUTE_XLINK_TYPE, data)
def setXlinkRole(self, data):
if data is not None:
if type(data) is not StringType:
data = str(data)
self._setNodeAttribute(self.ATTRIBUTE_XLINK_ROLE, data)
def setXlinkArcrole(self, data):
if data is not None:
if type(data) is not StringType:
data = str(data)
self._setNodeAttribute(self.ATTRIBUTE_XLINK_ARCROLE, data)
def setXlinkTitle(self, data):
if data is not None:
if type(data) is not StringType:
data = str(data)
self._setNodeAttribute(self.ATTRIBUTE_XLINK_TITLE, data)
def getXlinkHref(self):
node = self._getNodeAttribute(self.ATTRIBUTE_XLINK_HREF)
if node is not None:
return node.nodeValue
return None
def getXlinkShow(self):
node = self._getNodeAttribute(self.ATTRIBUTE_XLINK_SHOW)
if node is not None:
return node.nodeValue
return None
def getXlinkActuate(self):
node = self._getNodeAttribute(self.ATTRIBUTE_XLINK_ACTUATE)
if node is not None:
return node.nodeValue
return None
def getXlinkType(self):
node = self._getNodeAttribute(self.ATTRIBUTE_XLINK_TYPE)
if node is not None:
return node.nodeValue
return None
def getXlinkRole(self):
node = self._getNodeAttribute(self.ATTRIBUTE_XLINK_ROLE)
if node is not None:
return node.nodeValue
return None
def getXlinkArcrole(self):
node = self._getNodeAttribute(self.ATTRIBUTE_XLINK_ARCROLE)
if node is not None:
return node.nodeValue
return None
def getXlinkTitle(self):
node = self._getNodeAttribute(self.ATTRIBUTE_XLINK_TITLE)
if node is not None:
return node.nodeValue
return None
|
{
"content_hash": "0d340c24829c491e90b7a3a7889c4c13",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 85,
"avg_line_length": 34.85123966942149,
"alnum_prop": 0.5648565330803889,
"repo_name": "danrg/RGT-tool",
"id": "53d3188a921af12ff069484522f98a16a26b5dc0",
"size": "4217",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/RGT/XML/SVG/Attribs/xlinkAttributes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "83200"
},
{
"name": "HTML",
"bytes": "93970"
},
{
"name": "JavaScript",
"bytes": "111380"
},
{
"name": "Python",
"bytes": "788710"
},
{
"name": "SQLPL",
"bytes": "722"
}
],
"symlink_target": ""
}
|
"""
A web interface built on Flask and the Cisco UCS Python SDK that has the
following basic functionality:
- Connect to a UCSM domain
- View and Add VLANs
- Add a VLAN to a vNIC
The web UI currently has little to none error handling in place so proceed
accordingly.
"""
from flask import Flask, render_template, request, redirect, url_for
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
index.button_text = "Default"
if request.method == "POST":
index.button_text = request.form["button_text"]
return redirect(url_for('button'))
return render_template('index.html')
@app.route('/button', methods=['GET', 'POST'])
def button():
if request.method == "POST":
return redirect(url_for('index'))
return render_template('button.html', button_text=index.button_text)
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
{
"content_hash": "d7d2a54ef8a7ba0918291dffb7be1cc2",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 74,
"avg_line_length": 20.155555555555555,
"alnum_prop": 0.6571113561190739,
"repo_name": "drew-russell/Cisco-UCS-VLAN-Management",
"id": "6ef8d3e535ebc0c31c7b8744af6ccfb676d368dc",
"size": "907",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "button_generator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "37615"
},
{
"name": "HTML",
"bytes": "13934"
},
{
"name": "JavaScript",
"bytes": "1844"
},
{
"name": "Python",
"bytes": "19096"
}
],
"symlink_target": ""
}
|
from datadog_checks.base import AgentCheck
EVENT_TYPE = SOURCE_TYPE_NAME = 'gnatsd'
class GnatsdConfig:
def __init__(self, instance):
self.instance = instance
self.host = instance.get('host', '')
self.port = int(instance.get('port', 8222))
self.url = '{}:{}'.format(self.host, self.port)
self.server_name = instance.get('server_name', '')
self.tags = instance.get('tags', [])
class GnatsdCheckInvocation:
SERVICE_CHECK_NAME = 'gnatsd.can_connect'
METRICS = {
'varz': {
'connections': 'gauge',
'subscriptions': 'gauge',
'slow_consumers': 'count',
'remotes': 'gauge',
'routes': 'gauge',
'in_msgs': 'count',
'out_msgs': 'count',
'in_bytes': 'count',
'out_bytes': 'count',
'mem': 'gauge',
},
'connz': {
'num_connections': 'gauge',
'total': 'count',
'connections': {
'pending_bytes': 'gauge',
'in_msgs': 'count',
'out_msgs': 'count',
'subscriptions': 'gauge',
'in_bytes': 'count',
'out_bytes': 'count',
},
},
'routez': {
'num_routes': 'gauge',
'routes': {
'pending_size': 'gauge',
'in_msgs': 'count',
'out_msgs': 'count',
'subscriptions': 'gauge',
'in_bytes': 'count',
'out_bytes': 'count',
},
},
}
TAGS = {
'varz': ['server_id'],
'connz.connections': ['cid', 'ip', 'name', 'lang', 'version'],
'routez.routes': ['rid', 'remote_id', 'ip'],
}
def __init__(self, instance, checker):
self.instance = instance
self.checker = checker
self.config = GnatsdConfig(instance)
self.tags = self.config.tags + ['server_name:%s' % self.config.server_name]
self.service_check_tags = self.tags + ['url:%s' % self.config.host]
def check(self):
# Confirm monitor endpoint is available
self._status_check()
# Gather NATS metrics
for endpoint, metrics in self.METRICS.items():
self._check_endpoint(endpoint, metrics)
def _status_check(self):
try:
response = self.checker.http.get(self.config.url)
if response.status_code == 200:
self.checker.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK, tags=self.service_check_tags)
else:
raise ValueError('Non 200 response from NATS monitor port')
except Exception as e:
msg = "Unable to fetch NATS stats: %s" % str(e)
self.checker.service_check(
self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, message=msg, tags=self.service_check_tags
)
raise e
def _check_endpoint(self, endpoint, metrics):
data = self.checker.http.get('{}/{}'.format(self.config.url, endpoint)).json()
self._track_metrics(endpoint, metrics, data)
def _track_metrics(self, namespace, metrics, data, tags=None):
if not tags:
tags = self._metric_tags(namespace, data)
for mname, mtype in metrics.items():
path = '{}.{}'.format(namespace, mname)
if isinstance(mtype, dict):
for instance in data.get(mname, []):
if 'routez' in namespace:
# . is not a valid character in identifiers so replace it in IP addresses with _
title = str(instance.get('ip')).replace('.', '_')
else:
title = str(instance.get('name') or 'unnamed')
self._track_metrics(
'{}.{}'.format(path, title), mtype, instance, tags=self._metric_tags(path, instance)
)
else:
if mtype == 'count':
mid = str(data.get('cid') or data.get('rid') or '')
metric = self._count_delta('{}.{}'.format(path, mid), data[mname])
else:
metric = data[mname]
# Send metric to Datadog
getattr(self.checker, mtype)('gnatsd.{}'.format(path), metric, tags=tags)
def _metric_tags(self, endpoint, data):
tags = self.tags[:]
if endpoint in self.TAGS:
for tag in self.TAGS[endpoint]:
if tag in data:
tags.append('gnatsd-{}:{}'.format(tag, data[tag]))
return tags
def _count_delta(self, count_id, current_value):
self.checker.counts.setdefault(count_id, 0)
delta = current_value - self.checker.counts[count_id]
self.checker.counts[count_id] = current_value
return delta
class GnatsdCheck(AgentCheck):
def __init__(self, name, init_config, instances):
super(GnatsdCheck, self).__init__(name, init_config, instances)
self.counts = {}
def check(self, instance):
GnatsdCheckInvocation(instance, self).check()
|
{
"content_hash": "1f39a463a98020c1d2a3653447a1f2fd",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 112,
"avg_line_length": 35.21768707482993,
"alnum_prop": 0.5122657909986479,
"repo_name": "DataDog/integrations-extras",
"id": "0a718aacbdfebf803c3e08f6cfbfd090bbc90fa9",
"size": "5284",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gnatsd/datadog_checks/gnatsd/gnatsd.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "4265"
},
{
"name": "Go",
"bytes": "4119"
},
{
"name": "PHP",
"bytes": "3192"
},
{
"name": "Python",
"bytes": "1219552"
},
{
"name": "Ruby",
"bytes": "8005"
},
{
"name": "Shell",
"bytes": "4237"
}
],
"symlink_target": ""
}
|
import dnslib
import argparse
from scapy.all import *
try:
from termcolor import colored
except ImportError:
colored = lambda msg, color: msg
def show_packet(pkt):
if ARP in pkt:
print(pkt.summary())
if pkt[ARP].op == 1: #who-has (request)
if pkt[ARP].psrc == '0.0.0.0': # ARP Probe
print(colored('\tARP Probe from: ' + pkt[ARP].hwsrc, 'red'))
elif UDP in pkt:
print(repr(pkt))
# Try to show mDNS info
try:
raw_load = pkt.getlayer(3).load
dns_parsed = dnslib.DNSRecord.parse(raw_load)
if dns_parsed.header.ar > 0:
mdns_name = [i.rname for i in dns_parsed.ar]
print(colored('\tmDNS Name: {}'.format(repr(mdns_name)), 'red'))
except Exception as e:
print('ERROR: {}'.format(e))
else:
print(repr(pkt))
def watch_network(interface):
try:
sniff(iface=interface, prn=show_packet, filter="arp or (udp port 53) or (udp port 5353)", store=0)
except KeyboardInterrupt:
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--interface', type=str, dest='interface', default='wlan0',
help='Network interface to use')
args = parser.parse_args()
print('Sniffing on interface: {}'.format(args.interface))
watch_network(args.interface)
|
{
"content_hash": "f080f83bb8f1f8ac99e91ace0bac78d8",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 106,
"avg_line_length": 29.666666666666668,
"alnum_prop": 0.5779494382022472,
"repo_name": "calebmadrigal/network-hacking-scripts",
"id": "f3ef2ae278b5ebd9a47f994d71fa999fb2bd5a1a",
"size": "1448",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "watch-network-devices.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "638"
},
{
"name": "JavaScript",
"bytes": "372"
},
{
"name": "Python",
"bytes": "9925"
},
{
"name": "Shell",
"bytes": "283"
}
],
"symlink_target": ""
}
|
from client.sources.common import interpreter
from client.sources.common import pyconsole
import client
import unittest
class PythonConsoleTest(unittest.TestCase):
def createConsole(self, verbose=True, interactive=False, timeout=None):
return pyconsole.PythonConsole(
verbose, interactive, timeout)
def calls_interpret(self, success, code, setup='', teardown=''):
self.console = self.createConsole()
lines = interpreter.CodeCase.split_code(code, self.console.PS1,
self.console.PS2)
self.console.load(lines, setup, teardown)
result = self.console.interpret()
self.assertEqual(success, result)
def testPass_equals(self):
self.calls_interpret(True,
"""
>>> 3 + 4
7
""")
def testPass_expectException(self):
self.calls_interpret(True,
"""
>>> 1 / 0
ZeroDivisionError
""")
def testPass_multilineSinglePrompt(self):
self.calls_interpret(True,
"""
>>> x = 5
>>> x + 4
9
""")
def testPass_multiplePrompts(self):
self.calls_interpret(True,
"""
>>> x = 5
>>> x + 4
9
>>> 5 + x
10
""")
def testPass_multilineWithIndentation(self):
self.calls_interpret(True,
"""
>>> def square(x):
... return x * x
>>> square(4)
16
""")
def testPass_setup(self):
self.calls_interpret(True,
"""
>>> def square(x):
... return x * x
>>> square(x)
9
>>> square(y)
1
""",
setup="""
>>> x = 3
>>> y = 1
""")
def testPass_teardown(self):
self.calls_interpret(True,
"""
>>> def square(x):
... return x * x
>>> square(3)
9
>>> square(1)
1
""",
teardown="""
>>> import client
>>> client.foo = 1
""")
self.assertEqual(1, client.foo)
def testError_notEqualError(self):
self.calls_interpret(False,
"""
>>> 2 + 4
7
""")
def testError_expectedException(self):
self.calls_interpret(False,
"""
>>> 1 + 2
ZeroDivisionError
""")
def testError_wrongException(self):
self.calls_interpret(False,
"""
>>> 1 / 0
TypeError
""")
def testError_runtimeError(self):
self.calls_interpret(False,
"""
>>> f = lambda: f()
>>> f()
4
""")
def testError_timeoutError(self):
# TODO(albert): test timeout errors without actually waiting
# for timeouts.
pass
def testError_teardown(self):
self.calls_interpret(False,
"""
>>> 1 / 0
""",
teardown="""
>>> import client
>>> client.foo = 2
""")
self.assertEqual(2, client.foo)
def testError_setUpFails(self):
self.calls_interpret(False,
"""
>>> client.foo = 4
""",
setup="""
>>> import client
>>> client.foo = 3
>>> 1 / 0
""",
teardown="""
>>> client.foo = 5
""")
self.assertEqual(3, client.foo)
def testError_tearDownFails(self):
self.calls_interpret(False,
"""
>>> x = 3
""",
teardown="""
>>> 1 / 0
""")
|
{
"content_hash": "8999a74cee842378f27a6844fb0c31c0",
"timestamp": "",
"source": "github",
"line_count": 159,
"max_line_length": 75,
"avg_line_length": 24.81761006289308,
"alnum_prop": 0.4237202230106437,
"repo_name": "jackzhao-mj/ok-client",
"id": "3b093547203a93dc93d45606b4f1433cdac81de2",
"size": "3946",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/sources/common/pyconsole_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "249444"
},
{
"name": "Scheme",
"bytes": "113"
}
],
"symlink_target": ""
}
|
"""
State to synchronize files and directories with rsync.
.. versionadded:: 2016.3.0
.. code-block:: yaml
/opt/user-backups:
rsync.synchronized:
- source: /home
- force: True
"""
import logging
import os
import salt.utils.path
log = logging.getLogger(__name__)
def __virtual__():
"""
Only if Rsync is available.
:return:
"""
if salt.utils.path.which("rsync"):
return True
return (False, "Command not found: rsync")
def _get_summary(rsync_out):
"""
Get summary from the rsync successful output.
"""
return "- " + "\n- ".join(
[
elm
for elm in rsync_out.split("\n\n")[-1].replace(" ", "\n").split("\n")
if elm
]
)
def _get_changes(rsync_out):
"""
Get changes from the rsync successful output.
"""
copied = list()
deleted = list()
for line in rsync_out.split("\n\n")[0].split("\n")[1:]:
if line.startswith("deleting "):
deleted.append(line.split(" ", 1)[-1])
else:
copied.append(line)
ret = {
"copied": os.linesep.join(sorted(copied)) or "N/A",
"deleted": os.linesep.join(sorted(deleted)) or "N/A",
}
# Return whether anything really changed
ret["changed"] = not ((ret["copied"] == "N/A") and (ret["deleted"] == "N/A"))
return ret
def synchronized(
name,
source,
delete=False,
force=False,
update=False,
passwordfile=None,
exclude=None,
excludefrom=None,
prepare=False,
dryrun=False,
additional_opts=None,
):
"""
Guarantees that the source directory is always copied to the target.
name
Name of the target directory.
source
Source directory.
prepare
Create destination directory if it does not exists.
delete
Delete extraneous files from the destination dirs (True or False)
force
Force deletion of dirs even if not empty
update
Skip files that are newer on the receiver (True or False)
passwordfile
Read daemon-access password from the file (path)
exclude
Exclude files, that matches pattern.
excludefrom
Read exclude patterns from the file (path)
dryrun
Perform a trial run with no changes made. Is the same as
doing test=True
.. versionadded:: 2016.3.1
additional_opts
Pass additional options to rsync, should be included as a list.
.. versionadded:: 2018.3.0
"""
ret = {"name": name, "changes": {}, "result": True, "comment": ""}
if not os.path.exists(name) and not force and not prepare:
ret["result"] = False
ret["comment"] = "Destination directory {dest} was not found.".format(dest=name)
else:
if not os.path.exists(name) and prepare:
os.makedirs(name)
if __opts__["test"]:
dryrun = True
result = __salt__["rsync.rsync"](
source,
name,
delete=delete,
force=force,
update=update,
passwordfile=passwordfile,
exclude=exclude,
excludefrom=excludefrom,
dryrun=dryrun,
additional_opts=additional_opts,
)
if __opts__["test"] or dryrun:
ret["result"] = None
ret["comment"] = _get_summary(result["stdout"])
return ret
# Failed
if result.get("retcode"):
ret["result"] = False
ret["comment"] = result["stderr"]
# Changed
elif _get_changes(result["stdout"])["changed"]:
ret["comment"] = _get_summary(result["stdout"])
ret["changes"] = _get_changes(result["stdout"])
del ret["changes"]["changed"] # Don't need to print the boolean
# Clean
else:
ret["comment"] = _get_summary(result["stdout"])
ret["changes"] = {}
return ret
|
{
"content_hash": "0d1036b815d581dad191f4510d6a5774",
"timestamp": "",
"source": "github",
"line_count": 171,
"max_line_length": 88,
"avg_line_length": 23.309941520467838,
"alnum_prop": 0.5559458103361766,
"repo_name": "saltstack/salt",
"id": "79be3def58350c5f5be0b849d4314ef5f851b8a1",
"size": "4556",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "salt/states/rsync.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14911"
},
{
"name": "C",
"bytes": "1571"
},
{
"name": "Cython",
"bytes": "1458"
},
{
"name": "Dockerfile",
"bytes": "184"
},
{
"name": "Groovy",
"bytes": "12318"
},
{
"name": "HCL",
"bytes": "257"
},
{
"name": "HTML",
"bytes": "8031"
},
{
"name": "Jinja",
"bytes": "45598"
},
{
"name": "Makefile",
"bytes": "713"
},
{
"name": "NSIS",
"bytes": "76572"
},
{
"name": "PowerShell",
"bytes": "75891"
},
{
"name": "Python",
"bytes": "41444811"
},
{
"name": "Rich Text Format",
"bytes": "6242"
},
{
"name": "Roff",
"bytes": "191"
},
{
"name": "Ruby",
"bytes": "961"
},
{
"name": "SaltStack",
"bytes": "35856"
},
{
"name": "Scheme",
"bytes": "895"
},
{
"name": "Scilab",
"bytes": "1147"
},
{
"name": "Shell",
"bytes": "524917"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import with_statement
import warnings
class FSM(object):
"""
Override Matcher and Handler and pass instances to add_handler to
create transitions between states. If a transition handles
something, it returns the next state.
If you want something to track global state, but it in your data
instance passed to process so that transitions, states can access
it.
"""
states = []
state = None
def __init__(self, states):
"""first state is the initial state"""
if len(states) > 0:
self.state = states[0]
self.states = states
def process(self, data):
if self.state is None:
raise RuntimeError("There is no initial state.")
next_state = self.state.process(data)
if next_state:
self.state = next_state
else:
warnings.warn("No next state", RuntimeWarning)
def add_state(self, state):
# first added state is initial state
if len(self.states) == 0:
self.state = state
self.states.append(state)
def dotty(self):
r = 'digraph fsm {\n\n'
for s in self.states:
r = r + s.dotty()
r = r + '\n}\n'
return r
class State(object):
def __init__(self, name):
self.name = name
self.transitions = []
def process(self, data):
for t in self.transitions:
r = t.process(data)
if r is not None:
return r
return None
def add_transition(self, t):
self.transitions.append(t)
t.start_state = self
def add_transitions(self, transitions):
for t in transitions:
self.add_transition(t)
def __str__(self):
r = '<State %s [' % self.name
for t in self.transitions:
r = r + (' ->%s ' % t.next_state.name)
r = r + ']>'
return r
def dotty(self):
r = '%s;\n' % self.name
r = r + 'edge [fontsize=8]\n'
r = r + 'rankdir=TB;\nnodesep=2;\n'
for t in self.transitions:
r = r + '%s -> %s [label="%s\\n%s"]\n' % (self.name,
t.next_state.name,
t.matcher.__name__,
t.handler.__name__)
return r
class Transition(object):
def __init__(self, next_state, matcher, handler):
self.matcher = matcher
self.handler = handler
self.start_state = None
self.next_state = next_state
if self.next_state is None:
raise RuntimeError("next_state must be valid")
def match(self, data):
"""
used by process; calls handler if matcher returns true for
data by default. may override instead of providing a matcher
methdo to ctor.
"""
if self.matcher is not None:
return self.matcher(data)
return True
def handle(self, data):
"""
return next state. May override in a subclass to change
behavior or pass a handler method to ctor
"""
if self.handler:
state = self.handler(data)
if state is None:
return self.next_state
return state
return self.next_state
def process(self, data):
"""return next state, or None if not handled."""
if self.match(data):
return self.handle(data)
return None
def __str__(self):
if self.start_state:
return "<Transition %s->%s>" % (self.start_state.name,
self.next_state.name)
return "<Transition ->%s>" % (self.next_state.name,)
|
{
"content_hash": "76573aef3e69434af41b6e7e80607d32",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 73,
"avg_line_length": 29.53435114503817,
"alnum_prop": 0.5285603515120186,
"repo_name": "ghtdak/txtorcon",
"id": "05a7f18f1137559850bcd712e030ad8ee9732889",
"size": "3894",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "txtorcon/spaghetti.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "3568"
},
{
"name": "Python",
"bytes": "442167"
},
{
"name": "Shell",
"bytes": "681"
}
],
"symlink_target": ""
}
|
import sys, os
parent_path = os.path.split(os.path.abspath("."))[0]
if parent_path not in sys.path:
sys.path.insert(0, parent_path)
from pyspec import *
from pyspec.mockobject import *
import pyspec.framework
import pyspec.reloader as reloader
import StringIO, string
class AssertionMethods_Behaivior(object):
@spec
def pyspec_should_describe_exception(self):
raise ValueError("abc")
@spec
def pyspec_should_find_the_value_is_not_True(self):
a = False
About(a).should_be_true()
@spec
def pyspec_should_describe_the_value_is_True(self):
a = True
About(a).should_be_true()
@spec
def pyspec_should_find_the_value_is_not_False(self):
a = True
About(a).should_be_false()
@spec
def pyspec_should_describe_the_value_is_False(self):
a = False
About(a).should_be_false()
@spec
def pyspec_should_find_the_value_is_not_None(self):
a = True
About(a).should_be_none()
@spec
def pyspec_should_describe_the_value_is_None(self):
a = None
About(a).should_be_none()
@spec
def pyspec_should_find_the_value_is_None(self):
a = None
About(a).should_not_be_none()
@spec
def pyspec_should_describe_the_value_is_not_None(self):
a = True
About(a).should_not_be_none()
@spec
def pyspec_should_find_the_values_does_not_equal(self):
a = "abc"
About(a).should_equal("def")
@spec
def pyspec_should_describe_the_values_equal(self):
a = "abc"
About(a).should_equal("abc")
@spec
def pyspec_should_find_the_values_are_not_near(self):
a = 1.1
About(a).should_equal_nearly(1.3, 0.1)
@spec
def pyspec_should_describe_the_values_are_near(self):
a = 1.1
About(a).should_equal_nearly(1.2, 0.2)
@spec
def pyspec_should_find_the_values_equal(self):
a = "abc"
About(a).should_not_equal("abc")
@spec
def pyspec_should_describe_the_values_do_not_equal(self):
a = "abc"
About(a).should_not_equal("def")
@spec
def pyspec_should_find_the_objects_are_not_same(self):
a = xrange(5)
b = xrange(5) # diffenent object!
About(a).should_be_same(b)
@spec
def pyspec_should_describe_the_objects_are_same(self):
a = xrange(5)
b = a # same object!
About(a).should_be_same(b)
@spec
def pyspec_should_find_the_objects_are_same(self):
a = xrange(5)
b = a # same object!
About(a).should_not_be_same(b)
@spec
def pyspec_should_describe_the_objects_are_not_same(self):
a = xrange(5)
b = xrange(5) # diffenent object!
About(a).should_not_be_same(b)
class IgnoreTestCase(object):
@ignore
@spec
def ignore_test(self):
pass
@spec
def pyspec_can_ignore_the_method_which_have_ignore_decoretor(self):
sample = pyspec.framework.SpecTestTrigger(self.IgnoreTestCase())
result = sample.run()
About(result.ignore_count).should_equal(1)
@spec
def pyspec_has_method_that_describe_the_flow_is_bad(self):
Verify.fail("fail test")
@spec(expected=pyspec.IgnoreTestCase)
def pyspec_has_method_that_describe_the_spec_should_be_ignored(self):
Verify.ignore("ignore test")
@spec
def should_include__success(self):
a = range(5)
About(a).should_include(3)
@spec
def should_include__fail(self):
a = range(5)
About(a).should_include(10)
@spec(expected=TypeError)
def should_include__error(self):
a = 1
About(a).should_include(1)
@spec
def should_not_include__success(self):
a = range(5)
About(a).should_not_include(10)
@spec
def should_not_include__fail(self):
a = range(5)
About(a).should_not_include(3)
@spec(expected=TypeError)
def should_not_include__error(self):
a = 1
About(a).should_not_include(1)
@spec
def should_be_in__success(self):
About(2).should_be_in(range(5))
@spec
def should_be_in__fail(self):
About(10).should_be_in(range(5))
@spec
def should_be_in__error(self):
About(10).should_be_in(1)
@spec
def should_not_be_in__success(self):
About(10).should_not_be_in(range(5))
@spec
def should_not_be_in__fail(self):
About(2).should_not_be_in(range(5))
@spec
def should_not_be_in__error(self):
About(10).should_not_be_in(1)
@spec
def should_not_be_changed__success(self):
value = 10
About(value).should_not_be_changed()
@spec
def should_not_be_changed__fail(self):
import random
value = random.randint(0, 1000)
About(value).should_not_be_changed()
class FailDataProvider(object):
@classmethod
@data_provider(key=("i", "j"))
def generate_pair_data(cls):
#key length and result data length is not same.
return [(1, 2, 3, 4), (3, 4, 5, 6)]
@spec
def data_provider_error(self, i, j):
pass
if __name__ == "__main__":
run_test()
|
{
"content_hash": "a154e07569568d8644b4ff69fab8ecdb",
"timestamp": "",
"source": "github",
"line_count": 210,
"max_line_length": 73,
"avg_line_length": 24.99047619047619,
"alnum_prop": 0.5891768292682927,
"repo_name": "shibu/pyspec",
"id": "a420f77f4a1b33bcc17c7847cd8e319ab6177d2e",
"size": "5273",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sample/pyspec_message_check.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "519663"
}
],
"symlink_target": ""
}
|
"""
eve.methods.post
~~~~~~~~~~~~~~~~
This module imlements the POST method, supported by the resources
endopints.
:copyright: (c) 2013 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from datetime import datetime
from flask import current_app as app, request
from eve.utils import document_link, config, document_etag
from eve.auth import requires_auth
from eve.validation import ValidationError
from eve.methods.common import parse, payload, ratelimit
@ratelimit()
@requires_auth('resource')
def post(resource, payl=None):
""" Adds one or more documents to a resource. Each document is validated
against the domain schema. If validation passes the document is inserted
and ID_FIELD, LAST_UPDATED and DATE_CREATED along with a link to the
document are returned. If validation fails, a list of validation issues
is returned.
:param resource: name of the resource involved.
:param payl: alternative payload. When calling post() from your own code
you can provide an alternative payload This can be useful, for
example, when you have a callback function hooked to a certain
endpoint, and want to perform additional post() calls from
there.
Please be advised that in order to successfully use this
option, a request context must be available.
See https://github.com/nicolaiarocci/eve/issues/74 for a
discussion, and a typical use case.
.. versionchanged:: 0.1.1
auth.request_auth_value is now used to store the auth_field value.
.. versionchanged:: 0.1.0
More robust handling of auth_field.
Support for optional HATEOAS.
.. versionchanged: 0.0.9
Event hooks renamed to be more robuts and consistent: 'on_posting'
renamed to 'on_insert'.
You can now pass a pre-defined custom payload to the funcion.
.. versionchanged:: 0.0.9
Storing self.app.auth.userid in auth_field when 'user-restricted
resource access' is enabled.
.. versionchanged: 0.0.7
Support for Rate-Limiting.
Support for 'extra_response_fields'.
'on_posting' and 'on_posting_<resource>' events are raised before the
documents are inserted into the database. This allows callback functions
to arbitrarily edit/update the documents being stored.
.. versionchanged:: 0.0.6
Support for bulk inserts.
Please note: validation constraints are checked against the database,
and not between the payload documents themselves. This causes an
interesting corner case: in the event of a multiple documents payload
where two or more documents carry the same value for a field where the
'unique' constraint is set, the payload will validate successfully, as
there are no duplicates in the database (yet). If this is an issue, the
client can always send the documents once at a time for insertion, or
validate locally before submitting the payload to the API.
.. versionchanged:: 0.0.5
Support for 'application/json' Content-Type .
Support for 'user-restricted resource access'.
.. versionchanged:: 0.0.4
Added the ``requires_auth`` decorator.
.. versionchanged:: 0.0.3
JSON links. Superflous ``response`` container removed.
"""
date_utc = datetime.utcnow().replace(microsecond=0)
resource_def = app.config['DOMAIN'][resource]
schema = resource_def['schema']
validator = app.validator(schema, resource)
documents = []
issues = []
# validation, and additional fields
if payl is None:
payl = payload()
if isinstance(payl, dict):
payl = [payl]
for value in payl:
document = []
doc_issues = []
try:
document = parse(value, resource)
validation = validator.validate(document)
if validation:
# validation is successful
document[config.LAST_UPDATED] = \
document[config.DATE_CREATED] = date_utc
# if 'user-restricted resource access' is enabled
# and there's an Auth request active,
# inject the auth_field into the document
auth_field = resource_def['auth_field']
if app.auth and auth_field:
request_auth_value = app.auth.request_auth_value
if request_auth_value and request.authorization:
document[auth_field] = request_auth_value
else:
# validation errors added to list of document issues
doc_issues.extend(validator.errors)
except ValidationError as e:
raise e
except Exception as e:
# most likely a problem with the incoming payload, report back to
# the client as if it was a validation issue
doc_issues.append(str(e))
issues.append(doc_issues)
if len(doc_issues) == 0:
documents.append(document)
if len(documents):
# notify callbacks
getattr(app, "on_insert")(resource, documents)
getattr(app, "on_insert_%s" % resource)(documents)
# bulk insert
ids = app.data.insert(resource, documents)
# build response payload
response = []
for doc_issues in issues:
response_item = {}
if len(doc_issues):
response_item['status'] = config.STATUS_ERR
response_item['issues'] = doc_issues
else:
response_item['status'] = config.STATUS_OK
response_item[config.ID_FIELD] = ids.pop(0)
document = documents.pop(0)
response_item[config.LAST_UPDATED] = document[config.LAST_UPDATED]
response_item['etag'] = document_etag(document)
if resource_def['hateoas']:
response_item['_links'] = \
{'self': document_link(resource,
response_item[config.ID_FIELD])}
# add any additional field that might be needed
allowed_fields = [x for x in resource_def['extra_response_fields']
if x in document.keys()]
for field in allowed_fields:
response_item[field] = document[field]
response.append(response_item)
if len(response) == 1:
response = response.pop(0)
return response, None, None, 200
|
{
"content_hash": "cf75b14a70274f980cacbf3050a0b6b0",
"timestamp": "",
"source": "github",
"line_count": 173,
"max_line_length": 79,
"avg_line_length": 37.971098265895954,
"alnum_prop": 0.6227736337342061,
"repo_name": "PulsePod/evepod",
"id": "740e07d0c4a32ceb40c2cc76e3e1b83f4dd316f3",
"size": "6594",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lib/python2.7/site-packages/eve/methods/post.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "5939"
},
{
"name": "CSS",
"bytes": "6111"
},
{
"name": "JavaScript",
"bytes": "6345"
},
{
"name": "Perl",
"bytes": "84"
},
{
"name": "Python",
"bytes": "6111061"
},
{
"name": "Shell",
"bytes": "4078"
}
],
"symlink_target": ""
}
|
from setuptools import setup, find_packages
from marten import __version__, __doc__
def parse_requirements(requirements):
with open(requirements) as f:
return [l.strip('\n') for l in f if l.strip('\n') and not l.startswith('#')]
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import sys
stderr = sys.stderr
sys.stderr = StringIO()
try:
from pypandoc import convert
long_description = convert('README.md', 'rst')
except (ImportError, OSError):
long_description = __doc__
sys.stderr = stderr
test_requirements = parse_requirements('test-requirements.txt')
setup(
name='marten',
version=__version__,
packages=find_packages(exclude=('tests.*', 'tests',)),
url='https://github.com/nick-allen/python-marten',
license='MIT',
author='Nick Allen',
author_email='nick.allen.cse@gmail.com',
description=__doc__,
long_description=long_description,
include_package_data=True,
zip_safe=False,
install_requires=parse_requirements('requirements.txt'),
extras_require={
'test': test_requirements
},
entry_points={
'console_scripts': [
'marten = marten.cli:marten_cli'
]
},
test_suite='nose.collector',
tests_require=test_requirements
)
|
{
"content_hash": "6194fdf59aa430ecb9eccf0e623aa89e",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 78,
"avg_line_length": 23.41176470588235,
"alnum_prop": 0.7185929648241206,
"repo_name": "nick-allen/marten",
"id": "e231a6767fcb95c8608ab4fb1899b9f75b77811b",
"size": "1241",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15140"
}
],
"symlink_target": ""
}
|
import errno
import functools
import logging
import os
import subprocess
import sys
import unittest
import unittest.mock
from copy import deepcopy
import pytest
from airflow import models
from airflow.jobs.backfill_job import BackfillJob
from airflow.utils.db import add_default_pool_if_not_exists
from airflow.utils.state import State
from airflow.utils.timezone import datetime
DEV_NULL = '/dev/null'
TEST_ROOT_FOLDER = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
TEST_DAG_FOLDER = os.path.join(TEST_ROOT_FOLDER, 'dags')
TEST_DAG_CORRUPTED_FOLDER = os.path.join(TEST_ROOT_FOLDER, 'dags_corrupted')
TEST_UTILS_FOLDER = os.path.join(TEST_ROOT_FOLDER, 'test_utils')
DEFAULT_DATE = datetime(2015, 1, 1)
TEST_USER = 'airflow_test_user'
logger = logging.getLogger(__name__)
def mock_custom_module_path(path: str):
"""
This decorator adds a path to sys.path to simulate running the current script with
the :envvar:`PYTHONPATH` environment variable set and sets the environment variable
:envvar:`PYTHONPATH` to change the module load directory for child scripts.
"""
def wrapper(func):
@functools.wraps(func)
def decorator(*args, **kwargs):
copy_sys_path = deepcopy(sys.path)
sys.path.append(path)
try:
with unittest.mock.patch.dict('os.environ', {'PYTHONPATH': path}):
return func(*args, **kwargs)
finally:
sys.path = copy_sys_path
return decorator
return wrapper
def grant_permissions():
airflow_home = os.environ['AIRFLOW_HOME']
subprocess.check_call(
'find "%s" -exec sudo chmod og+w {} +; sudo chmod og+rx /root' % airflow_home, shell=True
)
def revoke_permissions():
airflow_home = os.environ['AIRFLOW_HOME']
subprocess.check_call(
'find "%s" -exec sudo chmod og-w {} +; sudo chmod og-rx /root' % airflow_home, shell=True
)
def check_original_docker_image():
if not os.path.isfile('/.dockerenv') or os.environ.get('PYTHON_BASE_IMAGE') is None:
raise unittest.SkipTest(
"""Adding/removing a user as part of a test is very bad for host os
(especially if the user already existed to begin with on the OS), therefore we check if we run inside a
the official docker container and only allow to run the test there. This is done by checking /.dockerenv
file (always present inside container) and checking for PYTHON_BASE_IMAGE variable.
"""
)
def create_user():
try:
subprocess.check_output(['sudo', 'useradd', '-m', TEST_USER, '-g', str(os.getegid())])
except OSError as e:
if e.errno == errno.ENOENT:
raise unittest.SkipTest(
"The 'useradd' command did not exist so unable to test "
"impersonation; Skipping Test. These tests can only be run on a "
"linux host that supports 'useradd'."
)
else:
raise unittest.SkipTest(
"The 'useradd' command exited non-zero; Skipping tests. Does the "
"current user have permission to run 'useradd' without a password "
"prompt (check sudoers file)?"
)
@pytest.mark.quarantined
class TestImpersonation(unittest.TestCase):
def setUp(self):
check_original_docker_image()
grant_permissions()
add_default_pool_if_not_exists()
self.dagbag = models.DagBag(
dag_folder=TEST_DAG_FOLDER,
include_examples=False,
)
logger.info('Loaded DAGS:')
logger.info(self.dagbag.dagbag_report())
create_user()
def tearDown(self):
subprocess.check_output(['sudo', 'userdel', '-r', TEST_USER])
revoke_permissions()
def run_backfill(self, dag_id, task_id):
dag = self.dagbag.get_dag(dag_id)
dag.clear()
BackfillJob(dag=dag, start_date=DEFAULT_DATE, end_date=DEFAULT_DATE).run()
ti = models.TaskInstance(task=dag.get_task(task_id), execution_date=DEFAULT_DATE)
ti.refresh_from_db()
self.assertEqual(ti.state, State.SUCCESS)
def test_impersonation(self):
"""
Tests that impersonating a unix user works
"""
self.run_backfill('test_impersonation', 'test_impersonated_user')
def test_no_impersonation(self):
"""
If default_impersonation=None, tests that the job is run
as the current user (which will be a sudoer)
"""
self.run_backfill(
'test_no_impersonation',
'test_superuser',
)
@unittest.mock.patch.dict('os.environ', AIRFLOW__CORE__DEFAULT_IMPERSONATION=TEST_USER)
def test_default_impersonation(self):
"""
If default_impersonation=TEST_USER, tests that the job defaults
to running as TEST_USER for a test without run_as_user set
"""
self.run_backfill('test_default_impersonation', 'test_deelevated_user')
def test_impersonation_subdag(self):
"""
Tests that impersonation using a subdag correctly passes the right configuration
:return:
"""
self.run_backfill('impersonation_subdag', 'test_subdag_operation')
@pytest.mark.quarantined
class TestImpersonationWithCustomPythonPath(unittest.TestCase):
@mock_custom_module_path(TEST_UTILS_FOLDER)
def setUp(self):
check_original_docker_image()
grant_permissions()
add_default_pool_if_not_exists()
self.dagbag = models.DagBag(
dag_folder=TEST_DAG_CORRUPTED_FOLDER,
include_examples=False,
)
logger.info('Loaded DAGS:')
logger.info(self.dagbag.dagbag_report())
create_user()
def tearDown(self):
subprocess.check_output(['sudo', 'userdel', '-r', TEST_USER])
revoke_permissions()
def run_backfill(self, dag_id, task_id):
dag = self.dagbag.get_dag(dag_id)
dag.clear()
BackfillJob(dag=dag, start_date=DEFAULT_DATE, end_date=DEFAULT_DATE).run()
ti = models.TaskInstance(task=dag.get_task(task_id), execution_date=DEFAULT_DATE)
ti.refresh_from_db()
self.assertEqual(ti.state, State.SUCCESS)
@mock_custom_module_path(TEST_UTILS_FOLDER)
def test_impersonation_custom(self):
"""
Tests that impersonation using a unix user works with custom packages in
PYTHONPATH
"""
# PYTHONPATH is already set in script triggering tests
assert 'PYTHONPATH' in os.environ
self.run_backfill('impersonation_with_custom_pkg', 'exec_python_fn')
|
{
"content_hash": "d10718b0992791df441ab712c075c745",
"timestamp": "",
"source": "github",
"line_count": 199,
"max_line_length": 104,
"avg_line_length": 33.27638190954774,
"alnum_prop": 0.6411960132890365,
"repo_name": "airbnb/airflow",
"id": "686142ebcb8e95ac17ad36580b0250797b41dfd1",
"size": "7410",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/core/test_impersonation_tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "36374"
},
{
"name": "HTML",
"bytes": "99535"
},
{
"name": "JavaScript",
"bytes": "891618"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "796220"
},
{
"name": "Shell",
"bytes": "9040"
}
],
"symlink_target": ""
}
|
"""
test_django-watchman
------------
Tests for `django-watchman` views module.
"""
from __future__ import unicode_literals
import json
import sys
import unittest
from importlib import reload
from unittest.mock import patch
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.core import mail
from django.test import TestCase as DjangoTestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from watchman import checks, views
class AuthenticatedUser(AnonymousUser):
@property
def is_authenticated(self):
class CallableTrue(object):
def __call__(self, *args, **kwargs):
return True
def __bool__(self):
return True
__nonzero__ = __bool__
return CallableTrue()
# Initialize Django
django.setup()
# Silence MIDDLEWARE_CLASSES warning as this is not an actual Django project
settings.SILENCED_SYSTEM_CHECKS = ["1_7.W001"]
def reload_settings():
# Reload settings - and all dependent modules - from scratch
reload(sys.modules["watchman.settings"])
reload(sys.modules["watchman.decorators"])
reload(sys.modules["watchman.views"])
class TestWatchman(unittest.TestCase):
def setUp(self):
# Ensure that every test executes with separate settings
reload_settings()
def test_response_content_type_json(self):
request = RequestFactory().get("/")
response = views.status(request)
self.assertEqual(response["Content-Type"], "application/json")
def test_response_contains_expected_checks(self):
expected_checks = [
"caches",
"databases",
"storage",
]
request = RequestFactory().get("/")
response = views.status(request)
content = json.loads(response.content.decode("utf-8"))
self.assertCountEqual(expected_checks, content.keys())
def test_check_database_handles_exception(self):
response = checks._check_database("foo")
self.assertFalse(response["foo"]["ok"])
self.assertEqual(
response["foo"]["error"], "The connection 'foo' doesn't exist."
)
def test_check_cache_handles_exception(self):
response = checks._check_cache("foo")
self.assertFalse(response["foo"]["ok"])
self.assertIn(
response["foo"]["error"],
"The connection 'foo' doesn't exist.",
)
def test_response_skipped_checks(self):
expected_checks = [
"caches",
"storage",
]
request = RequestFactory().get(
"/",
data={
"skip": "watchman.checks.databases",
},
)
response = views.status(request)
content = json.loads(response.content.decode("utf-8"))
self.assertCountEqual(expected_checks, content.keys())
def test_response_is_404_for_checked_and_skipped_check(self):
# This is a bit of a weird one, basically if you explicitly include and
# skip the same check, you should get back a 404 as they cancel each
# other out
request = RequestFactory().get(
"/",
data={
"check": "watchman.checks.email",
"skip": "watchman.checks.email",
},
)
response = views.status(request)
self.assertEqual(response.status_code, 404)
@patch("watchman.checks._check_databases")
def test_response_only_single_check(self, patched_check_databases):
patched_check_databases.return_value = []
request = RequestFactory().get(
"/",
data={
"check": "watchman.checks.databases",
},
)
response = views.status(request)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content.decode("utf-8"))
self.assertCountEqual({"databases": []}, content)
def test_response_404_when_none_specified(self):
request = RequestFactory().get(
"/",
data={
"check": "",
},
)
response = views.status(request)
self.assertEqual(response.status_code, 404)
content = json.loads(response.content.decode("utf-8"))
self.assertCountEqual({"message": "No checks found", "error": 404}, content)
@override_settings(WATCHMAN_TOKEN="ABCDE")
@override_settings(WATCHMAN_AUTH_DECORATOR="watchman.decorators.token_required")
def test_login_not_required_with_get_param(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp(), but before self.tearDown()
reload_settings()
request = RequestFactory().get(
"/",
data={
"watchman-token": "ABCDE",
},
)
response = views.status(request)
self.assertEqual(response.status_code, 200)
@override_settings(WATCHMAN_TOKEN="ABCDE")
def test_version_header_not_included_when_token_auth_fails(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp(), but before self.tearDown()
reload_settings()
request = RequestFactory().get("/")
response = views.status(request)
self.assertEqual(response.status_code, 403)
self.assertFalse(response.has_header("X-Watchman-Version"))
@override_settings(WATCHMAN_TOKEN="ABCDE")
@override_settings(WATCHMAN_AUTH_DECORATOR="watchman.decorators.token_required")
def test_login_not_required_with_authorization_header(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp(), but before self.tearDown()
reload_settings()
request = RequestFactory().get(
"/", HTTP_AUTHORIZATION='WATCHMAN-TOKEN Token="ABCDE"'
)
response = views.status(request)
self.assertEqual(response.status_code, 200)
@override_settings(WATCHMAN_TOKEN="123-456-ABCD")
@override_settings(WATCHMAN_AUTH_DECORATOR="watchman.decorators.token_required")
def test_login_not_required_with_authorization_header_dashes_in_token(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp(), but before self.tearDown()
reload_settings()
request = RequestFactory().get(
"/", HTTP_AUTHORIZATION='WATCHMAN-TOKEN Token="123-456-ABCD"'
)
response = views.status(request)
self.assertEqual(response.status_code, 200)
@override_settings(WATCHMAN_TOKEN="ABCDE")
@override_settings(WATCHMAN_AUTH_DECORATOR="watchman.decorators.token_required")
def test_login_fails_with_invalid_get_param(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp(), but before self.tearDown()
reload_settings()
request = RequestFactory().get(
"/",
data={
"watchman-token": "12345",
},
)
response = views.status(request)
self.assertEqual(response.status_code, 403)
@override_settings(WATCHMAN_TOKEN="ABCDE")
@override_settings(WATCHMAN_AUTH_DECORATOR="watchman.decorators.token_required")
def test_login_fails_with_invalid_authorization_header(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp(), but before self.tearDown()
reload_settings()
request = RequestFactory().get(
"/", HTTP_AUTHORIZATION='WATCHMAN-TOKEN Token="12345"'
)
response = views.status(request)
self.assertEqual(response.status_code, 403)
@override_settings(
WATCHMAN_AUTH_DECORATOR="django.contrib.auth.decorators.login_required"
)
def test_response_when_login_required_is_redirect(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp()
reload_settings()
request = RequestFactory().get("/")
request.user = AnonymousUser()
response = views.status(request)
self.assertEqual(response.status_code, 302)
@override_settings(
WATCHMAN_AUTH_DECORATOR="django.contrib.auth.decorators.login_required"
)
def test_response_when_login_required(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp()
reload_settings()
request = RequestFactory().get("/")
request.user = AuthenticatedUser()
response = views.status(request)
self.assertEqual(response.status_code, 200)
def test_response_version_header_missing_by_default(self):
request = RequestFactory().get("/")
response = views.status(request)
self.assertFalse(response.has_header("X-Watchman-Version"))
@override_settings(EXPOSE_WATCHMAN_VERSION=True)
def test_response_version_header(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp()
reload_settings()
request = RequestFactory().get("/")
response = views.status(request)
self.assertTrue(response.has_header("X-Watchman-Version"))
@patch("watchman.checks._check_databases")
@override_settings(WATCHMAN_ERROR_CODE=503)
def test_custom_error_code(self, patched_check_databases):
reload_settings()
# Fake a DB error, ensure we get our error code
patched_check_databases.return_value = [
{
"foo": {
"ok": False,
"error": "Fake DB Error",
"stacktrace": "Fake DB Stack Trace",
},
}
]
request = RequestFactory().get(
"/",
data={
"check": "watchman.checks.databases",
},
)
response = views.status(request)
self.assertEqual(response.status_code, 503)
@patch("watchman.checks._check_databases")
def test_default_error_code(self, patched_check_databases):
reload_settings()
# Fake a DB error, ensure we get our error code
patched_check_databases.return_value = [
{
"foo": {
"ok": False,
"error": "Fake DB Error",
"stacktrace": "Fake DB Stack Trace",
},
}
]
request = RequestFactory().get(
"/",
data={
"check": "watchman.checks.databases",
},
)
response = views.status(request)
self.assertEqual(response.status_code, 500)
class TestWatchmanDashboard(unittest.TestCase):
def setUp(self):
# Ensure that every test executes with separate settings
reload_settings()
def test_dashboard_response_code(self):
request = RequestFactory().get("/")
response = views.dashboard(request)
self.assertEqual(response.status_code, 200)
def test_response_version_header_and_html_missing_by_default(self):
request = RequestFactory().get("/")
response = views.dashboard(request)
self.assertFalse(response.has_header("X-Watchman-Version"))
self.assertNotIn("Watchman version:", response.content.decode())
@override_settings(EXPOSE_WATCHMAN_VERSION=True)
def test_response_has_version_header_and_html(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp()
reload_settings()
request = RequestFactory().get("/")
response = views.dashboard(request)
self.assertTrue(response.has_header("X-Watchman-Version"))
self.assertIn("Watchman version:", response.content.decode())
class TestPing(unittest.TestCase):
def setUp(self):
# Ensure that every test executes with separate settings
reload_settings()
def test_returns_pong(self):
request = RequestFactory().get("/")
response = views.ping(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode(), "pong")
self.assertEqual(response["Content-Type"], "text/plain")
class TestBareStatus(unittest.TestCase):
def setUp(self):
# Ensure that every test executes with separate settings
reload_settings()
def test_bare_status_success(self):
request = RequestFactory().get("/")
response = views.bare_status(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode(), "")
@patch("watchman.checks._check_databases")
@override_settings(WATCHMAN_ERROR_CODE=503)
def test_bare_status_error(self, patched_check_databases):
reload_settings()
# Fake a DB error, ensure we get our error code
patched_check_databases.return_value = [
{
"foo": {
"ok": False,
"error": "Fake DB Error",
"stacktrace": "Fake DB Stack Trace",
},
}
]
request = RequestFactory().get(
"/",
data={
"check": "watchman.checks.databases",
},
)
response = views.bare_status(request)
self.assertEqual(response.status_code, 503)
self.assertEqual(response.content.decode(), "")
@patch("watchman.checks._check_databases")
def test_bare_status_default_error(self, patched_check_databases):
reload_settings()
# Fake a DB error, ensure we get our error code
patched_check_databases.return_value = [
{
"foo": {
"ok": False,
"error": "Fake DB Error",
"stacktrace": "Fake DB Stack Trace",
},
}
]
request = RequestFactory().get(
"/",
data={
"check": "watchman.checks.databases",
},
)
response = views.bare_status(request)
self.assertEqual(response.status_code, 500)
self.assertEqual(response.content.decode(), "")
class TestEmailCheck(DjangoTestCase):
def setUp(self):
# Ensure that every test executes with separate settings
reload_settings()
def def_test_email_with_default_recipient(self):
checks._check_email()
# Test that one message has been sent.
self.assertEqual(len(mail.outbox), 1)
sent_email = mail.outbox[0]
expected_recipients = ["to@example.com"]
self.assertEqual(sent_email.to, expected_recipients)
@override_settings(WATCHMAN_EMAIL_RECIPIENTS=["custom@example.com"])
def def_test_email_with_custom_recipient(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp()
reload_settings()
checks._check_email()
# Test that one message has been sent.
self.assertEqual(len(mail.outbox), 1)
sent_email = mail.outbox[0]
expected_recipients = ["custom@example.com"]
self.assertEqual(sent_email.to, expected_recipients)
@override_settings(WATCHMAN_EMAIL_RECIPIENTS=["to1@example.com", "to2@example.com"])
def def_test_email_with_multiple_recipients(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp()
reload_settings()
checks._check_email()
# Test that one message has been sent.
self.assertEqual(len(mail.outbox), 1)
sent_email = mail.outbox[0]
expected_recipients = ["to1@example.com", "to2@example.com"]
self.assertEqual(sent_email.to, expected_recipients)
def test_email_check_with_default_headers(self):
checks._check_email()
# Test that one message has been sent.
self.assertEqual(len(mail.outbox), 1)
sent_email = mail.outbox[0]
expected_headers = {
"X-DJANGO-WATCHMAN": True,
}
self.assertEqual(sent_email.extra_headers, expected_headers)
@override_settings(WATCHMAN_EMAIL_HEADERS={"foo": "bar"})
def test_email_check_with_custom_headers(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp()
reload_settings()
checks._check_email()
# Test that one message has been sent.
self.assertEqual(len(mail.outbox), 1)
sent_email = mail.outbox[0]
expected_headers = {
"X-DJANGO-WATCHMAN": True,
"foo": "bar",
}
self.assertEqual(sent_email.extra_headers, expected_headers)
def def_test_email_with_default_sender(self):
checks._check_email()
# Test that one message has been sent.
self.assertEqual(len(mail.outbox), 1)
sent_email = mail.outbox[0]
expected_sender = "watchman@example.com"
self.assertEqual(sent_email.from_email, expected_sender)
@override_settings(WATCHMAN_EMAIL_SENDER="custom@example.com")
def def_test_email_with_custom_sender(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp()
reload_settings()
checks._check_email()
# Test that one message has been sent.
self.assertEqual(len(mail.outbox), 1)
sent_email = mail.outbox[0]
expected_sender = "custom@example.com"
self.assertEqual(sent_email.from_email, expected_sender)
|
{
"content_hash": "1e95fc7567b99a0d7ae80e9714159bd9",
"timestamp": "",
"source": "github",
"line_count": 512,
"max_line_length": 88,
"avg_line_length": 34.89453125,
"alnum_prop": 0.6143512817642449,
"repo_name": "mwarkentin/django-watchman",
"id": "9d83ab2b18f7c6bc9474117bbac6196bc686a887",
"size": "17891",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/test_views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "190"
},
{
"name": "HTML",
"bytes": "5808"
},
{
"name": "Makefile",
"bytes": "1316"
},
{
"name": "Python",
"bytes": "52607"
}
],
"symlink_target": ""
}
|
"""Implementation of DPLL algorithm.
Further improvements: eliminate calls to pl_true, implement branching rules,
efficient unit propagation.
References
==========
* https://en.wikipedia.org/wiki/DPLL_algorithm
* https://www.researchgate.net/publication/242384772
"""
from ...core.compatibility import default_sort_key
from ..boolalg import _find_predicates, conjuncts, to_cnf, to_int_repr
def dpll_satisfiable(expr):
"""Check satisfiability of a propositional sentence.
It returns a model rather than True when it succeeds
>>> dpll_satisfiable(a & ~b)
{a: True, b: False}
>>> dpll_satisfiable(a & ~a)
False
"""
clauses = conjuncts(to_cnf(expr))
if False in clauses:
return False
symbols = sorted(_find_predicates(expr), key=default_sort_key)
symbols_int_repr = set(range(1, len(symbols) + 1))
clauses_int_repr = to_int_repr(clauses, symbols)
result = dpll(clauses_int_repr, symbols_int_repr, {})
if not result:
return result
output = {}
for key in result:
output.update({symbols[key - 1]: result[key]})
return output
def dpll(clauses, symbols, model):
"""Compute satisfiability in a partial model.
Clauses is an array of conjuncts. Arguments are expected to be
in integer representation
>>> dpll([{1}, {2}, {3}], {1, 2}, {3: False})
False
"""
# compute DP kernel
P, value = find_unit_clause(clauses, model)
while P:
model.update({P: value})
symbols.remove(P)
if not value:
P = -P
clauses = unit_propagate(clauses, P)
P, value = find_unit_clause(clauses, model)
P, value = find_pure_symbol(symbols, clauses)
while P:
model.update({P: value})
symbols.remove(P)
if not value:
P = -P
clauses = unit_propagate(clauses, P)
P, value = find_pure_symbol(symbols, clauses)
# end DP kernel
unknown_clauses = []
for c in clauses:
val = pl_true_int_repr(c, model)
if val is False:
return False
if val is not True:
unknown_clauses.append(c)
if not unknown_clauses:
return model
P = symbols.pop()
model_copy = model.copy()
model.update({P: True})
model_copy.update({P: False})
symbols_copy = symbols.copy()
return (dpll(unit_propagate(unknown_clauses, P), symbols, model) or
dpll(unit_propagate(unknown_clauses, -P), symbols_copy, model_copy))
def pl_true_int_repr(clause, model={}):
"""Lightweight version of pl_true.
Argument clause represents the set of args of an Or clause. This is used
inside dpll, it is not meant to be used directly.
>>> pl_true_int_repr({1, 2}, {1: False})
>>> pl_true_int_repr({1, 2}, {1: False, 2: False})
False
"""
result = False
for lit in clause:
if lit < 0:
p = model.get(-lit)
if p is not None:
p = not p
else:
p = model.get(lit)
if p is True:
return True
elif p is None:
result = None
return result
def unit_propagate(clauses, s):
"""Returns an equivalent set of clauses.
If a set of clauses contains the unit clause l, the other clauses are
simplified by the application of the two following rules:
1. every clause containing l is removed
2. in every clause that contains ~l this literal is deleted
Arguments are expected to be in integer representation.
>>> unit_propagate([{1, 2}, {3, -2}, {2}], 2)
[{3}]
"""
negated = {-s}
return [clause - negated for clause in clauses if s not in clause]
def find_pure_symbol(symbols, unknown_clauses):
"""
Find a symbol and its value if it appears only as a positive literal
(or only as a negative) in clauses.
Arguments are expected to be in integer representation.
>>> find_pure_symbol({1, 2, 3}, [{1, -2}, {-2, -3}, {3, 1}])
(1, True)
"""
all_symbols = set().union(*unknown_clauses)
found_pos = all_symbols.intersection(symbols)
found_neg = all_symbols.intersection([-s for s in symbols])
for p in found_pos:
if -p not in found_neg:
return p, True
for p in found_neg:
if -p not in found_pos:
return -p, False
return None, None
def find_unit_clause(clauses, model):
"""Find a unit clause has only 1 variable that is not bound in the model.
Arguments are expected to be in integer representation.
>>> find_unit_clause([{1, 2, 3}, {2, -3}, {1, -2}], {1: True})
(2, False)
"""
bound = set(model) | {-sym for sym in model}
for clause in clauses:
unbound = clause - bound
if len(unbound) == 1:
p = unbound.pop()
if p < 0:
return -p, False
else:
return p, True
return None, None
|
{
"content_hash": "77d221a6d313ad384d2259e94cef8934",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 80,
"avg_line_length": 27.982954545454547,
"alnum_prop": 0.5981725888324874,
"repo_name": "skirpichev/omg",
"id": "bb14c6cd3878374098746ffc2f50c8c7687a2324",
"size": "4925",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "diofant/logic/algorithms/dpll.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "10305079"
}
],
"symlink_target": ""
}
|
from unittest import TestCase
from .util import (
connect_db,
destroy_db
)
class ConnectionTestCase(TestCase):
def setUp(self):
self.connection = None
import django
django.setup()
def tearDown(self):
if None is not self.connection:
destroy_db(self.connection)
def test_connection(self):
self.connection = connect_db()
self.assertIsNotNone(self.connection)
class ConnectionCapabilitiesTestCase(TestCase):
def setUp(self):
self.connection = connect_db()
import django
django.setup()
def tearDown(self):
destroy_db(self.connection)
def test_create_cursor(self):
cursor = self.connection.create_cursor()
self.assertIsNotNone(cursor)
cursor2 = self.connection._cursor()
self.assertIsNotNone(cursor2)
def test_get_current_keyspace(self):
keyspace = self.connection.settings_dict.get('DEFAULT_KEYSPACE')
self.assertEqual(
keyspace,
self.connection.current_keyspace()
)
|
{
"content_hash": "196fd1033db22147064f6f127adfd134",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 72,
"avg_line_length": 22.583333333333332,
"alnum_prop": 0.6337638376383764,
"repo_name": "Knotis/djangocassandra",
"id": "d3ba5d410da8cd1d067bce47197324aa9870640f",
"size": "1084",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_connection.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "141552"
}
],
"symlink_target": ""
}
|
from collections import defaultdict
from datetime import datetime
from io import StringIO
import math
import operator
import re
import numpy as np
import pytest
from pandas.compat import IS64
from pandas.errors import InvalidIndexError
from pandas.util._test_decorators import async_mark
import pandas as pd
from pandas import (
CategoricalIndex,
DataFrame,
DatetimeIndex,
IntervalIndex,
PeriodIndex,
RangeIndex,
Series,
TimedeltaIndex,
date_range,
period_range,
)
import pandas._testing as tm
from pandas.core.api import (
Float64Index,
Int64Index,
NumericIndex,
UInt64Index,
)
from pandas.core.indexes.api import (
Index,
MultiIndex,
_get_combined_index,
ensure_index,
ensure_index_from_sequences,
)
from pandas.tests.indexes.common import Base
class TestIndex(Base):
_index_cls = Index
@pytest.fixture
def simple_index(self) -> Index:
return self._index_cls(list("abcde"))
def test_can_hold_identifiers(self, simple_index):
index = simple_index
key = index[0]
assert index._can_hold_identifiers_and_holds_name(key) is True
@pytest.mark.parametrize("index", ["datetime"], indirect=True)
def test_new_axis(self, index):
with tm.assert_produces_warning(FutureWarning):
# GH#30588 multi-dimensional indexing deprecated
new_index = index[None, :]
assert new_index.ndim == 2
assert isinstance(new_index, np.ndarray)
def test_constructor_regular(self, index):
tm.assert_contains_all(index, index)
@pytest.mark.parametrize("index", ["string"], indirect=True)
def test_constructor_casting(self, index):
# casting
arr = np.array(index)
new_index = Index(arr)
tm.assert_contains_all(arr, new_index)
tm.assert_index_equal(index, new_index)
@pytest.mark.parametrize("index", ["string"], indirect=True)
def test_constructor_copy(self, index):
arr = np.array(index)
new_index = Index(arr, copy=True, name="name")
assert isinstance(new_index, Index)
assert new_index.name == "name"
tm.assert_numpy_array_equal(arr, new_index.values)
arr[0] = "SOMEBIGLONGSTRING"
assert new_index[0] != "SOMEBIGLONGSTRING"
@pytest.mark.parametrize("cast_as_obj", [True, False])
@pytest.mark.parametrize(
"index",
[
date_range(
"2015-01-01 10:00",
freq="D",
periods=3,
tz="US/Eastern",
name="Green Eggs & Ham",
), # DTI with tz
date_range("2015-01-01 10:00", freq="D", periods=3), # DTI no tz
pd.timedelta_range("1 days", freq="D", periods=3), # td
period_range("2015-01-01", freq="D", periods=3), # period
],
)
def test_constructor_from_index_dtlike(self, cast_as_obj, index):
if cast_as_obj:
result = Index(index.astype(object))
else:
result = Index(index)
tm.assert_index_equal(result, index)
if isinstance(index, DatetimeIndex):
assert result.tz == index.tz
if cast_as_obj:
# GH#23524 check that Index(dti, dtype=object) does not
# incorrectly raise ValueError, and that nanoseconds are not
# dropped
index += pd.Timedelta(nanoseconds=50)
result = Index(index, dtype=object)
assert result.dtype == np.object_
assert list(result) == list(index)
@pytest.mark.parametrize(
"index,has_tz",
[
(
date_range("2015-01-01 10:00", freq="D", periods=3, tz="US/Eastern"),
True,
), # datetimetz
(pd.timedelta_range("1 days", freq="D", periods=3), False), # td
(period_range("2015-01-01", freq="D", periods=3), False), # period
],
)
def test_constructor_from_series_dtlike(self, index, has_tz):
result = Index(Series(index))
tm.assert_index_equal(result, index)
if has_tz:
assert result.tz == index.tz
def test_constructor_from_series_freq(self):
# GH 6273
# create from a series, passing a freq
dts = ["1-1-1990", "2-1-1990", "3-1-1990", "4-1-1990", "5-1-1990"]
expected = DatetimeIndex(dts, freq="MS")
s = Series(pd.to_datetime(dts))
result = DatetimeIndex(s, freq="MS")
tm.assert_index_equal(result, expected)
def test_constructor_from_frame_series_freq(self):
# GH 6273
# create from a series, passing a freq
dts = ["1-1-1990", "2-1-1990", "3-1-1990", "4-1-1990", "5-1-1990"]
expected = DatetimeIndex(dts, freq="MS")
df = DataFrame(np.random.rand(5, 3))
df["date"] = dts
result = DatetimeIndex(df["date"], freq="MS")
assert df["date"].dtype == object
expected.name = "date"
tm.assert_index_equal(result, expected)
expected = Series(dts, name="date")
tm.assert_series_equal(df["date"], expected)
# GH 6274
# infer freq of same
freq = pd.infer_freq(df["date"])
assert freq == "MS"
def test_constructor_int_dtype_nan(self):
# see gh-15187
data = [np.nan]
expected = Float64Index(data)
result = Index(data, dtype="float")
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"klass,dtype,na_val",
[
(Float64Index, np.float64, np.nan),
(DatetimeIndex, "datetime64[ns]", pd.NaT),
],
)
def test_index_ctor_infer_nan_nat(self, klass, dtype, na_val):
# GH 13467
na_list = [na_val, na_val]
expected = klass(na_list)
assert expected.dtype == dtype
result = Index(na_list)
tm.assert_index_equal(result, expected)
result = Index(np.array(na_list))
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"vals,dtype",
[
([1, 2, 3, 4, 5], "int"),
([1.1, np.nan, 2.2, 3.0], "float"),
(["A", "B", "C", np.nan], "obj"),
],
)
def test_constructor_simple_new(self, vals, dtype):
index = Index(vals, name=dtype)
result = index._simple_new(index.values, dtype)
tm.assert_index_equal(result, index)
@pytest.mark.filterwarnings("ignore:Passing keywords other:FutureWarning")
@pytest.mark.parametrize("attr", ["values", "asi8"])
@pytest.mark.parametrize("klass", [Index, DatetimeIndex])
def test_constructor_dtypes_datetime(self, tz_naive_fixture, attr, klass):
# Test constructing with a datetimetz dtype
# .values produces numpy datetimes, so these are considered naive
# .asi8 produces integers, so these are considered epoch timestamps
# ^the above will be true in a later version. Right now we `.view`
# the i8 values as NS_DTYPE, effectively treating them as wall times.
index = date_range("2011-01-01", periods=5)
arg = getattr(index, attr)
index = index.tz_localize(tz_naive_fixture)
dtype = index.dtype
warn = None if tz_naive_fixture is None else FutureWarning
# astype dt64 -> dt64tz deprecated
if attr == "asi8":
result = DatetimeIndex(arg).tz_localize(tz_naive_fixture)
else:
result = klass(arg, tz=tz_naive_fixture)
tm.assert_index_equal(result, index)
if attr == "asi8":
with tm.assert_produces_warning(warn):
result = DatetimeIndex(arg).astype(dtype)
else:
result = klass(arg, dtype=dtype)
tm.assert_index_equal(result, index)
if attr == "asi8":
result = DatetimeIndex(list(arg)).tz_localize(tz_naive_fixture)
else:
result = klass(list(arg), tz=tz_naive_fixture)
tm.assert_index_equal(result, index)
if attr == "asi8":
with tm.assert_produces_warning(warn):
result = DatetimeIndex(list(arg)).astype(dtype)
else:
result = klass(list(arg), dtype=dtype)
tm.assert_index_equal(result, index)
@pytest.mark.parametrize("attr", ["values", "asi8"])
@pytest.mark.parametrize("klass", [Index, TimedeltaIndex])
def test_constructor_dtypes_timedelta(self, attr, klass):
index = pd.timedelta_range("1 days", periods=5)
index = index._with_freq(None) # won't be preserved by constructors
dtype = index.dtype
values = getattr(index, attr)
result = klass(values, dtype=dtype)
tm.assert_index_equal(result, index)
result = klass(list(values), dtype=dtype)
tm.assert_index_equal(result, index)
@pytest.mark.parametrize("value", [[], iter([]), (_ for _ in [])])
@pytest.mark.parametrize(
"klass",
[
Index,
Float64Index,
Int64Index,
UInt64Index,
CategoricalIndex,
DatetimeIndex,
TimedeltaIndex,
],
)
def test_constructor_empty(self, value, klass):
empty = klass(value)
assert isinstance(empty, klass)
assert not len(empty)
@pytest.mark.parametrize(
"empty,klass",
[
(PeriodIndex([], freq="B"), PeriodIndex),
(PeriodIndex(iter([]), freq="B"), PeriodIndex),
(PeriodIndex((_ for _ in []), freq="B"), PeriodIndex),
(RangeIndex(step=1), RangeIndex),
(MultiIndex(levels=[[1, 2], ["blue", "red"]], codes=[[], []]), MultiIndex),
],
)
def test_constructor_empty_special(self, empty, klass):
assert isinstance(empty, klass)
assert not len(empty)
@pytest.mark.parametrize(
"index",
[
"datetime",
"float",
"int",
"period",
"range",
"repeats",
"timedelta",
"tuples",
"uint",
],
indirect=True,
)
def test_view_with_args(self, index):
index.view("i8")
@pytest.mark.parametrize(
"index",
[
"string",
pytest.param("categorical", marks=pytest.mark.xfail(reason="gh-25464")),
"bool-object",
"bool-dtype",
"empty",
],
indirect=True,
)
def test_view_with_args_object_array_raises(self, index):
if index.dtype == bool:
msg = "When changing to a larger dtype"
with pytest.raises(ValueError, match=msg):
index.view("i8")
else:
msg = "Cannot change data-type for object array"
with pytest.raises(TypeError, match=msg):
index.view("i8")
@pytest.mark.parametrize("index", ["int", "range"], indirect=True)
def test_astype(self, index):
casted = index.astype("i8")
# it works!
casted.get_loc(5)
# pass on name
index.name = "foobar"
casted = index.astype("i8")
assert casted.name == "foobar"
def test_equals_object(self):
# same
assert Index(["a", "b", "c"]).equals(Index(["a", "b", "c"]))
@pytest.mark.parametrize(
"comp", [Index(["a", "b"]), Index(["a", "b", "d"]), ["a", "b", "c"]]
)
def test_not_equals_object(self, comp):
assert not Index(["a", "b", "c"]).equals(comp)
def test_identical(self):
# index
i1 = Index(["a", "b", "c"])
i2 = Index(["a", "b", "c"])
assert i1.identical(i2)
i1 = i1.rename("foo")
assert i1.equals(i2)
assert not i1.identical(i2)
i2 = i2.rename("foo")
assert i1.identical(i2)
i3 = Index([("a", "a"), ("a", "b"), ("b", "a")])
i4 = Index([("a", "a"), ("a", "b"), ("b", "a")], tupleize_cols=False)
assert not i3.identical(i4)
def test_is_(self):
ind = Index(range(10))
assert ind.is_(ind)
assert ind.is_(ind.view().view().view().view())
assert not ind.is_(Index(range(10)))
assert not ind.is_(ind.copy())
assert not ind.is_(ind.copy(deep=False))
assert not ind.is_(ind[:])
assert not ind.is_(np.array(range(10)))
# quasi-implementation dependent
assert ind.is_(ind.view())
ind2 = ind.view()
ind2.name = "bob"
assert ind.is_(ind2)
assert ind2.is_(ind)
# doesn't matter if Indices are *actually* views of underlying data,
assert not ind.is_(Index(ind.values))
arr = np.array(range(1, 11))
ind1 = Index(arr, copy=False)
ind2 = Index(arr, copy=False)
assert not ind1.is_(ind2)
def test_asof_numeric_vs_bool_raises(self):
left = Index([1, 2, 3])
right = Index([True, False], dtype=object)
msg = "Cannot compare dtypes int64 and bool"
with pytest.raises(TypeError, match=msg):
left.asof(right[0])
# TODO: should right.asof(left[0]) also raise?
with pytest.raises(InvalidIndexError, match=re.escape(str(right))):
left.asof(right)
with pytest.raises(InvalidIndexError, match=re.escape(str(left))):
right.asof(left)
@pytest.mark.parametrize("index", ["string"], indirect=True)
def test_booleanindex(self, index):
bool_index = np.ones(len(index), dtype=bool)
bool_index[5:30:2] = False
sub_index = index[bool_index]
for i, val in enumerate(sub_index):
assert sub_index.get_loc(val) == i
sub_index = index[list(bool_index)]
for i, val in enumerate(sub_index):
assert sub_index.get_loc(val) == i
def test_fancy(self, simple_index):
index = simple_index
sl = index[[1, 2, 3]]
for i in sl:
assert i == sl[sl.get_loc(i)]
@pytest.mark.parametrize("index", ["string", "int", "float"], indirect=True)
@pytest.mark.parametrize("dtype", [np.int_, np.bool_])
def test_empty_fancy(self, index, dtype):
empty_arr = np.array([], dtype=dtype)
empty_index = type(index)([])
assert index[[]].identical(empty_index)
assert index[empty_arr].identical(empty_index)
@pytest.mark.parametrize("index", ["string", "int", "float"], indirect=True)
def test_empty_fancy_raises(self, index):
# DatetimeIndex is excluded, because it overrides getitem and should
# be tested separately.
empty_farr = np.array([], dtype=np.float_)
empty_index = type(index)([])
assert index[[]].identical(empty_index)
# np.ndarray only accepts ndarray of int & bool dtypes, so should Index
msg = r"arrays used as indices must be of integer \(or boolean\) type"
with pytest.raises(IndexError, match=msg):
index[empty_farr]
def test_union_dt_as_obj(self, simple_index):
# TODO: Replace with fixturesult
index = simple_index
date_index = date_range("2019-01-01", periods=10)
first_cat = index.union(date_index)
second_cat = index.union(index)
appended = np.append(index, date_index.astype("O"))
assert tm.equalContents(first_cat, appended)
assert tm.equalContents(second_cat, index)
tm.assert_contains_all(index, first_cat)
tm.assert_contains_all(index, second_cat)
tm.assert_contains_all(date_index, first_cat)
def test_map_with_tuples(self):
# GH 12766
# Test that returning a single tuple from an Index
# returns an Index.
index = tm.makeIntIndex(3)
result = tm.makeIntIndex(3).map(lambda x: (x,))
expected = Index([(i,) for i in index])
tm.assert_index_equal(result, expected)
# Test that returning a tuple from a map of a single index
# returns a MultiIndex object.
result = index.map(lambda x: (x, x == 1))
expected = MultiIndex.from_tuples([(i, i == 1) for i in index])
tm.assert_index_equal(result, expected)
def test_map_with_tuples_mi(self):
# Test that returning a single object from a MultiIndex
# returns an Index.
first_level = ["foo", "bar", "baz"]
multi_index = MultiIndex.from_tuples(zip(first_level, [1, 2, 3]))
reduced_index = multi_index.map(lambda x: x[0])
tm.assert_index_equal(reduced_index, Index(first_level))
@pytest.mark.parametrize(
"attr", ["makeDateIndex", "makePeriodIndex", "makeTimedeltaIndex"]
)
def test_map_tseries_indices_return_index(self, attr):
index = getattr(tm, attr)(10)
expected = Index([1] * 10)
result = index.map(lambda x: 1)
tm.assert_index_equal(expected, result)
def test_map_tseries_indices_accsr_return_index(self):
date_index = tm.makeDateIndex(24, freq="h", name="hourly")
expected = Int64Index(range(24), name="hourly")
tm.assert_index_equal(expected, date_index.map(lambda x: x.hour), exact=True)
@pytest.mark.parametrize(
"mapper",
[
lambda values, index: {i: e for e, i in zip(values, index)},
lambda values, index: Series(values, index),
],
)
def test_map_dictlike_simple(self, mapper):
# GH 12756
expected = Index(["foo", "bar", "baz"])
index = tm.makeIntIndex(3)
result = index.map(mapper(expected.values, index))
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"mapper",
[
lambda values, index: {i: e for e, i in zip(values, index)},
lambda values, index: Series(values, index),
],
)
def test_map_dictlike(self, index, mapper, request):
# GH 12756
if isinstance(index, CategoricalIndex):
# Tested in test_categorical
return
elif not index.is_unique:
# Cannot map duplicated index
return
rng = np.arange(len(index), 0, -1)
if index.empty:
# to match proper result coercion for uints
expected = Index([])
elif index._is_backward_compat_public_numeric_index:
expected = index._constructor(rng, dtype=index.dtype)
elif type(index) is Index and index.dtype != object:
# i.e. EA-backed, for now just Nullable
expected = Index(rng, dtype=index.dtype)
elif index.dtype.kind == "u":
expected = Index(rng, dtype=index.dtype)
else:
expected = Index(rng)
result = index.map(mapper(expected, index))
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"mapper",
[Series(["foo", 2.0, "baz"], index=[0, 2, -1]), {0: "foo", 2: 2.0, -1: "baz"}],
)
def test_map_with_non_function_missing_values(self, mapper):
# GH 12756
expected = Index([2.0, np.nan, "foo"])
result = Index([2, 1, 0]).map(mapper)
tm.assert_index_equal(expected, result)
def test_map_na_exclusion(self):
index = Index([1.5, np.nan, 3, np.nan, 5])
result = index.map(lambda x: x * 2, na_action="ignore")
expected = index * 2
tm.assert_index_equal(result, expected)
def test_map_defaultdict(self):
index = Index([1, 2, 3])
default_dict = defaultdict(lambda: "blank")
default_dict[1] = "stuff"
result = index.map(default_dict)
expected = Index(["stuff", "blank", "blank"])
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("name,expected", [("foo", "foo"), ("bar", None)])
def test_append_empty_preserve_name(self, name, expected):
left = Index([], name="foo")
right = Index([1, 2, 3], name=name)
result = left.append(right)
assert result.name == expected
@pytest.mark.parametrize(
"index, expected",
[
("string", False),
("bool-object", False),
("bool-dtype", False),
("categorical", False),
("int", True),
("datetime", False),
("float", True),
],
indirect=["index"],
)
def test_is_numeric(self, index, expected):
assert index.is_numeric() is expected
@pytest.mark.parametrize(
"index, expected",
[
("string", True),
("bool-object", True),
("bool-dtype", False),
("categorical", False),
("int", False),
("datetime", False),
("float", False),
],
indirect=["index"],
)
def test_is_object(self, index, expected):
assert index.is_object() is expected
@pytest.mark.parametrize(
"index, expected",
[
("string", False),
("bool-object", False),
("bool-dtype", False),
("categorical", False),
("int", False),
("datetime", True),
("float", False),
],
indirect=["index"],
)
def test_is_all_dates(self, index, expected):
with tm.assert_produces_warning(FutureWarning):
assert index.is_all_dates is expected
def test_summary(self, index):
index._summary()
def test_format_bug(self):
# GH 14626
# windows has different precision on datetime.datetime.now (it doesn't
# include us since the default for Timestamp shows these but Index
# formatting does not we are skipping)
now = datetime.now()
if not str(now).endswith("000"):
index = Index([now])
formatted = index.format()
expected = [str(index[0])]
assert formatted == expected
Index([]).format()
@pytest.mark.parametrize("vals", [[1, 2.0 + 3.0j, 4.0], ["a", "b", "c"]])
def test_format_missing(self, vals, nulls_fixture):
# 2845
vals = list(vals) # Copy for each iteration
vals.append(nulls_fixture)
index = Index(vals, dtype=object)
# TODO: case with complex dtype?
formatted = index.format()
null_repr = "NaN" if isinstance(nulls_fixture, float) else str(nulls_fixture)
expected = [str(index[0]), str(index[1]), str(index[2]), null_repr]
assert formatted == expected
assert index[3] is nulls_fixture
@pytest.mark.parametrize("op", ["any", "all"])
def test_logical_compat(self, op, simple_index):
index = simple_index
assert getattr(index, op)() == getattr(index.values, op)()
@pytest.mark.parametrize("index", ["string", "int", "float"], indirect=True)
def test_drop_by_str_label(self, index):
n = len(index)
drop = index[list(range(5, 10))]
dropped = index.drop(drop)
expected = index[list(range(5)) + list(range(10, n))]
tm.assert_index_equal(dropped, expected)
dropped = index.drop(index[0])
expected = index[1:]
tm.assert_index_equal(dropped, expected)
@pytest.mark.parametrize("index", ["string", "int", "float"], indirect=True)
@pytest.mark.parametrize("keys", [["foo", "bar"], ["1", "bar"]])
def test_drop_by_str_label_raises_missing_keys(self, index, keys):
with pytest.raises(KeyError, match=""):
index.drop(keys)
@pytest.mark.parametrize("index", ["string", "int", "float"], indirect=True)
def test_drop_by_str_label_errors_ignore(self, index):
n = len(index)
drop = index[list(range(5, 10))]
mixed = drop.tolist() + ["foo"]
dropped = index.drop(mixed, errors="ignore")
expected = index[list(range(5)) + list(range(10, n))]
tm.assert_index_equal(dropped, expected)
dropped = index.drop(["foo", "bar"], errors="ignore")
expected = index[list(range(n))]
tm.assert_index_equal(dropped, expected)
def test_drop_by_numeric_label_loc(self):
# TODO: Parametrize numeric and str tests after self.strIndex fixture
index = Index([1, 2, 3])
dropped = index.drop(1)
expected = Index([2, 3])
tm.assert_index_equal(dropped, expected)
def test_drop_by_numeric_label_raises_missing_keys(self):
index = Index([1, 2, 3])
with pytest.raises(KeyError, match=""):
index.drop([3, 4])
@pytest.mark.parametrize(
"key,expected", [(4, Index([1, 2, 3])), ([3, 4, 5], Index([1, 2]))]
)
def test_drop_by_numeric_label_errors_ignore(self, key, expected):
index = Index([1, 2, 3])
dropped = index.drop(key, errors="ignore")
tm.assert_index_equal(dropped, expected)
@pytest.mark.parametrize(
"values",
[["a", "b", ("c", "d")], ["a", ("c", "d"), "b"], [("c", "d"), "a", "b"]],
)
@pytest.mark.parametrize("to_drop", [[("c", "d"), "a"], ["a", ("c", "d")]])
def test_drop_tuple(self, values, to_drop):
# GH 18304
index = Index(values)
expected = Index(["b"])
result = index.drop(to_drop)
tm.assert_index_equal(result, expected)
removed = index.drop(to_drop[0])
for drop_me in to_drop[1], [to_drop[1]]:
result = removed.drop(drop_me)
tm.assert_index_equal(result, expected)
removed = index.drop(to_drop[1])
msg = rf"\"\[{re.escape(to_drop[1].__repr__())}\] not found in axis\""
for drop_me in to_drop[1], [to_drop[1]]:
with pytest.raises(KeyError, match=msg):
removed.drop(drop_me)
def test_drop_with_duplicates_in_index(self, index):
# GH38051
if len(index) == 0 or isinstance(index, MultiIndex):
return
if isinstance(index, IntervalIndex) and not IS64:
pytest.skip("Cannot test IntervalIndex with int64 dtype on 32 bit platform")
index = index.unique().repeat(2)
expected = index[2:]
result = index.drop(index[0])
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"attr",
[
"is_monotonic_increasing",
"is_monotonic_decreasing",
"_is_strictly_monotonic_increasing",
"_is_strictly_monotonic_decreasing",
],
)
def test_is_monotonic_incomparable(self, attr):
index = Index([5, datetime.now(), 7])
assert not getattr(index, attr)
def test_set_value_deprecated(self, simple_index):
# GH 28621
idx = simple_index
arr = np.array([1, 2, 3])
with tm.assert_produces_warning(FutureWarning):
idx.set_value(arr, idx[1], 80)
assert arr[1] == 80
@pytest.mark.parametrize("values", [["foo", "bar", "quux"], {"foo", "bar", "quux"}])
@pytest.mark.parametrize(
"index,expected",
[
(Index(["qux", "baz", "foo", "bar"]), np.array([False, False, True, True])),
(Index([]), np.array([], dtype=bool)), # empty
],
)
def test_isin(self, values, index, expected):
result = index.isin(values)
tm.assert_numpy_array_equal(result, expected)
def test_isin_nan_common_object(self, nulls_fixture, nulls_fixture2):
# Test cartesian product of null fixtures and ensure that we don't
# mangle the various types (save a corner case with PyPy)
# all nans are the same
if (
isinstance(nulls_fixture, float)
and isinstance(nulls_fixture2, float)
and math.isnan(nulls_fixture)
and math.isnan(nulls_fixture2)
):
tm.assert_numpy_array_equal(
Index(["a", nulls_fixture]).isin([nulls_fixture2]),
np.array([False, True]),
)
elif nulls_fixture is nulls_fixture2: # should preserve NA type
tm.assert_numpy_array_equal(
Index(["a", nulls_fixture]).isin([nulls_fixture2]),
np.array([False, True]),
)
else:
tm.assert_numpy_array_equal(
Index(["a", nulls_fixture]).isin([nulls_fixture2]),
np.array([False, False]),
)
def test_isin_nan_common_float64(self, nulls_fixture):
if nulls_fixture is pd.NaT or nulls_fixture is pd.NA:
# Check 1) that we cannot construct a Float64Index with this value
# and 2) that with an NaN we do not have .isin(nulls_fixture)
msg = "data is not compatible with Float64Index"
with pytest.raises(ValueError, match=msg):
Float64Index([1.0, nulls_fixture])
idx = Float64Index([1.0, np.nan])
assert not idx.isin([nulls_fixture]).any()
return
idx = Float64Index([1.0, nulls_fixture])
res = idx.isin([np.nan])
tm.assert_numpy_array_equal(res, np.array([False, True]))
# we cannot compare NaT with NaN
res = idx.isin([pd.NaT])
tm.assert_numpy_array_equal(res, np.array([False, False]))
@pytest.mark.parametrize("level", [0, -1])
@pytest.mark.parametrize(
"index",
[
Index(["qux", "baz", "foo", "bar"]),
# Float64Index overrides isin, so must be checked separately
Float64Index([1.0, 2.0, 3.0, 4.0]),
],
)
def test_isin_level_kwarg(self, level, index):
values = index.tolist()[-2:] + ["nonexisting"]
expected = np.array([False, False, True, True])
tm.assert_numpy_array_equal(expected, index.isin(values, level=level))
index.name = "foobar"
tm.assert_numpy_array_equal(expected, index.isin(values, level="foobar"))
def test_isin_level_kwarg_bad_level_raises(self, index):
for level in [10, index.nlevels, -(index.nlevels + 1)]:
with pytest.raises(IndexError, match="Too many levels"):
index.isin([], level=level)
@pytest.mark.parametrize("label", [1.0, "foobar", "xyzzy", np.nan])
def test_isin_level_kwarg_bad_label_raises(self, label, index):
if isinstance(index, MultiIndex):
index = index.rename(["foo", "bar"] + index.names[2:])
msg = f"'Level {label} not found'"
else:
index = index.rename("foo")
msg = rf"Requested level \({label}\) does not match index name \(foo\)"
with pytest.raises(KeyError, match=msg):
index.isin([], level=label)
@pytest.mark.parametrize("empty", [[], Series(dtype=object), np.array([])])
def test_isin_empty(self, empty):
# see gh-16991
index = Index(["a", "b"])
expected = np.array([False, False])
result = index.isin(empty)
tm.assert_numpy_array_equal(expected, result)
@pytest.mark.parametrize(
"values",
[
[1, 2, 3, 4],
[1.0, 2.0, 3.0, 4.0],
[True, True, True, True],
["foo", "bar", "baz", "qux"],
date_range("2018-01-01", freq="D", periods=4),
],
)
def test_boolean_cmp(self, values):
index = Index(values)
result = index == values
expected = np.array([True, True, True, True], dtype=bool)
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize("index", ["string"], indirect=True)
@pytest.mark.parametrize("name,level", [(None, 0), ("a", "a")])
def test_get_level_values(self, index, name, level):
expected = index.copy()
if name:
expected.name = name
result = expected.get_level_values(level)
tm.assert_index_equal(result, expected)
def test_slice_keep_name(self):
index = Index(["a", "b"], name="asdf")
assert index.name == index[1:].name
@pytest.mark.parametrize(
"index",
["string", "datetime", "int", "uint", "float"],
indirect=True,
)
def test_join_self(self, index, join_type):
joined = index.join(index, how=join_type)
assert index is joined
@pytest.mark.parametrize("method", ["strip", "rstrip", "lstrip"])
def test_str_attribute(self, method):
# GH9068
index = Index([" jack", "jill ", " jesse ", "frank"])
expected = Index([getattr(str, method)(x) for x in index.values])
result = getattr(index.str, method)()
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"index",
[
Index(range(5)),
tm.makeDateIndex(10),
MultiIndex.from_tuples([("foo", "1"), ("bar", "3")]),
period_range(start="2000", end="2010", freq="A"),
],
)
def test_str_attribute_raises(self, index):
with pytest.raises(AttributeError, match="only use .str accessor"):
index.str.repeat(2)
@pytest.mark.parametrize(
"expand,expected",
[
(None, Index([["a", "b", "c"], ["d", "e"], ["f"]])),
(False, Index([["a", "b", "c"], ["d", "e"], ["f"]])),
(
True,
MultiIndex.from_tuples(
[("a", "b", "c"), ("d", "e", np.nan), ("f", np.nan, np.nan)]
),
),
],
)
def test_str_split(self, expand, expected):
index = Index(["a b c", "d e", "f"])
if expand is not None:
result = index.str.split(expand=expand)
else:
result = index.str.split()
tm.assert_index_equal(result, expected)
def test_str_bool_return(self):
# test boolean case, should return np.array instead of boolean Index
index = Index(["a1", "a2", "b1", "b2"])
result = index.str.startswith("a")
expected = np.array([True, True, False, False])
tm.assert_numpy_array_equal(result, expected)
assert isinstance(result, np.ndarray)
def test_str_bool_series_indexing(self):
index = Index(["a1", "a2", "b1", "b2"])
s = Series(range(4), index=index)
result = s[s.index.str.startswith("a")]
expected = Series(range(2), index=["a1", "a2"])
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"index,expected", [(Index(list("abcd")), True), (Index(range(4)), False)]
)
def test_tab_completion(self, index, expected):
# GH 9910
result = "str" in dir(index)
assert result == expected
def test_indexing_doesnt_change_class(self):
index = Index([1, 2, 3, "a", "b", "c"])
assert index[1:3].identical(Index([2, 3], dtype=np.object_))
assert index[[0, 1]].identical(Index([1, 2], dtype=np.object_))
def test_outer_join_sort(self):
left_index = Index(np.random.permutation(15))
right_index = tm.makeDateIndex(10)
with tm.assert_produces_warning(RuntimeWarning):
result = left_index.join(right_index, how="outer")
# right_index in this case because DatetimeIndex has join precedence
# over Int64Index
with tm.assert_produces_warning(RuntimeWarning):
expected = right_index.astype(object).union(left_index.astype(object))
tm.assert_index_equal(result, expected)
def test_take_fill_value(self):
# GH 12631
index = Index(list("ABC"), name="xxx")
result = index.take(np.array([1, 0, -1]))
expected = Index(list("BAC"), name="xxx")
tm.assert_index_equal(result, expected)
# fill_value
result = index.take(np.array([1, 0, -1]), fill_value=True)
expected = Index(["B", "A", np.nan], name="xxx")
tm.assert_index_equal(result, expected)
# allow_fill=False
result = index.take(np.array([1, 0, -1]), allow_fill=False, fill_value=True)
expected = Index(["B", "A", "C"], name="xxx")
tm.assert_index_equal(result, expected)
def test_take_fill_value_none_raises(self):
index = Index(list("ABC"), name="xxx")
msg = (
"When allow_fill=True and fill_value is not None, "
"all indices must be >= -1"
)
with pytest.raises(ValueError, match=msg):
index.take(np.array([1, 0, -2]), fill_value=True)
with pytest.raises(ValueError, match=msg):
index.take(np.array([1, 0, -5]), fill_value=True)
def test_take_bad_bounds_raises(self):
index = Index(list("ABC"), name="xxx")
with pytest.raises(IndexError, match="out of bounds"):
index.take(np.array([1, -5]))
@pytest.mark.parametrize("name", [None, "foobar"])
@pytest.mark.parametrize(
"labels",
[
[],
np.array([]),
["A", "B", "C"],
["C", "B", "A"],
np.array(["A", "B", "C"]),
np.array(["C", "B", "A"]),
# Must preserve name even if dtype changes
date_range("20130101", periods=3).values,
date_range("20130101", periods=3).tolist(),
],
)
def test_reindex_preserves_name_if_target_is_list_or_ndarray(self, name, labels):
# GH6552
index = Index([0, 1, 2])
index.name = name
assert index.reindex(labels)[0].name == name
@pytest.mark.parametrize("labels", [[], np.array([]), np.array([], dtype=np.int64)])
def test_reindex_preserves_type_if_target_is_empty_list_or_array(self, labels):
# GH7774
index = Index(list("abc"))
assert index.reindex(labels)[0].dtype.type == np.object_
@pytest.mark.parametrize(
"labels,dtype",
[
(Int64Index([]), np.int64),
(Float64Index([]), np.float64),
(DatetimeIndex([]), np.datetime64),
],
)
def test_reindex_doesnt_preserve_type_if_target_is_empty_index(self, labels, dtype):
# GH7774
index = Index(list("abc"))
assert index.reindex(labels)[0].dtype.type == dtype
def test_reindex_no_type_preserve_target_empty_mi(self):
index = Index(list("abc"))
result = index.reindex(
MultiIndex([Int64Index([]), Float64Index([])], [[], []])
)[0]
assert result.levels[0].dtype.type == np.int64
assert result.levels[1].dtype.type == np.float64
def test_reindex_ignoring_level(self):
# GH#35132
idx = Index([1, 2, 3], name="x")
idx2 = Index([1, 2, 3, 4], name="x")
expected = Index([1, 2, 3, 4], name="x")
result, _ = idx.reindex(idx2, level="x")
tm.assert_index_equal(result, expected)
def test_groupby(self):
index = Index(range(5))
result = index.groupby(np.array([1, 1, 2, 2, 2]))
expected = {1: Index([0, 1]), 2: Index([2, 3, 4])}
tm.assert_dict_equal(result, expected)
@pytest.mark.parametrize(
"mi,expected",
[
(MultiIndex.from_tuples([(1, 2), (4, 5)]), np.array([True, True])),
(MultiIndex.from_tuples([(1, 2), (4, 6)]), np.array([True, False])),
],
)
def test_equals_op_multiindex(self, mi, expected):
# GH9785
# test comparisons of multiindex
df = pd.read_csv(StringIO("a,b,c\n1,2,3\n4,5,6"), index_col=[0, 1])
result = df.index == mi
tm.assert_numpy_array_equal(result, expected)
def test_equals_op_multiindex_identify(self):
df = pd.read_csv(StringIO("a,b,c\n1,2,3\n4,5,6"), index_col=[0, 1])
result = df.index == df.index
expected = np.array([True, True])
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize(
"index",
[
MultiIndex.from_tuples([(1, 2), (4, 5), (8, 9)]),
Index(["foo", "bar", "baz"]),
],
)
def test_equals_op_mismatched_multiindex_raises(self, index):
df = pd.read_csv(StringIO("a,b,c\n1,2,3\n4,5,6"), index_col=[0, 1])
with pytest.raises(ValueError, match="Lengths must match"):
df.index == index
def test_equals_op_index_vs_mi_same_length(self):
mi = MultiIndex.from_tuples([(1, 2), (4, 5), (8, 9)])
index = Index(["foo", "bar", "baz"])
result = mi == index
expected = np.array([False, False, False])
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize("dt_conv", [pd.to_datetime, pd.to_timedelta])
def test_dt_conversion_preserves_name(self, dt_conv):
# GH 10875
index = Index(["01:02:03", "01:02:04"], name="label")
assert index.name == dt_conv(index).name
def test_cached_properties_not_settable(self):
index = Index([1, 2, 3])
with pytest.raises(AttributeError, match="Can't set attribute"):
index.is_unique = False
@async_mark()
async def test_tab_complete_warning(self, ip):
# https://github.com/pandas-dev/pandas/issues/16409
pytest.importorskip("IPython", minversion="6.0.0")
from IPython.core.completer import provisionalcompleter
code = "import pandas as pd; idx = pd.Index([1, 2])"
await ip.run_code(code)
# GH 31324 newer jedi version raises Deprecation warning;
# appears resolved 2021-02-02
with tm.assert_produces_warning(None):
with provisionalcompleter("ignore"):
list(ip.Completer.completions("idx.", 4))
def test_contains_method_removed(self, index):
# GH#30103 method removed for all types except IntervalIndex
if isinstance(index, IntervalIndex):
index.contains(1)
else:
msg = f"'{type(index).__name__}' object has no attribute 'contains'"
with pytest.raises(AttributeError, match=msg):
index.contains(1)
def test_sortlevel(self):
index = Index([5, 4, 3, 2, 1])
with pytest.raises(Exception, match="ascending must be a single bool value or"):
index.sortlevel(ascending="True")
with pytest.raises(
Exception, match="ascending must be a list of bool values of length 1"
):
index.sortlevel(ascending=[True, True])
with pytest.raises(Exception, match="ascending must be a bool value"):
index.sortlevel(ascending=["True"])
expected = Index([1, 2, 3, 4, 5])
result = index.sortlevel(ascending=[True])
tm.assert_index_equal(result[0], expected)
expected = Index([1, 2, 3, 4, 5])
result = index.sortlevel(ascending=True)
tm.assert_index_equal(result[0], expected)
expected = Index([5, 4, 3, 2, 1])
result = index.sortlevel(ascending=False)
tm.assert_index_equal(result[0], expected)
class TestMixedIntIndex(Base):
# Mostly the tests from common.py for which the results differ
# in py2 and py3 because ints and strings are uncomparable in py3
# (GH 13514)
_index_cls = Index
@pytest.fixture
def simple_index(self) -> Index:
return self._index_cls([0, "a", 1, "b", 2, "c"])
@pytest.fixture(params=[[0, "a", 1, "b", 2, "c"]], ids=["mixedIndex"])
def index(self, request):
return Index(request.param)
def test_argsort(self, simple_index):
index = simple_index
with pytest.raises(TypeError, match="'>|<' not supported"):
index.argsort()
def test_numpy_argsort(self, simple_index):
index = simple_index
with pytest.raises(TypeError, match="'>|<' not supported"):
np.argsort(index)
def test_copy_name(self, simple_index):
# Check that "name" argument passed at initialization is honoured
# GH12309
index = simple_index
first = type(index)(index, copy=True, name="mario")
second = type(first)(first, copy=False)
# Even though "copy=False", we want a new object.
assert first is not second
tm.assert_index_equal(first, second)
assert first.name == "mario"
assert second.name == "mario"
s1 = Series(2, index=first)
s2 = Series(3, index=second[:-1])
s3 = s1 * s2
assert s3.index.name == "mario"
def test_copy_name2(self):
# Check that adding a "name" parameter to the copy is honored
# GH14302
index = Index([1, 2], name="MyName")
index1 = index.copy()
tm.assert_index_equal(index, index1)
index2 = index.copy(name="NewName")
tm.assert_index_equal(index, index2, check_names=False)
assert index.name == "MyName"
assert index2.name == "NewName"
with tm.assert_produces_warning(FutureWarning):
index3 = index.copy(names=["NewName"])
tm.assert_index_equal(index, index3, check_names=False)
assert index.name == "MyName"
assert index.names == ["MyName"]
assert index3.name == "NewName"
assert index3.names == ["NewName"]
def test_copy_names_deprecated(self, simple_index):
# GH44916
with tm.assert_produces_warning(FutureWarning):
simple_index.copy(names=["a"])
def test_unique_na(self):
idx = Index([2, np.nan, 2, 1], name="my_index")
expected = Index([2, np.nan, 1], name="my_index")
result = idx.unique()
tm.assert_index_equal(result, expected)
def test_logical_compat(self, simple_index):
index = simple_index
assert index.all() == index.values.all()
assert index.any() == index.values.any()
@pytest.mark.parametrize("how", ["any", "all"])
@pytest.mark.parametrize("dtype", [None, object, "category"])
@pytest.mark.parametrize(
"vals,expected",
[
([1, 2, 3], [1, 2, 3]),
([1.0, 2.0, 3.0], [1.0, 2.0, 3.0]),
([1.0, 2.0, np.nan, 3.0], [1.0, 2.0, 3.0]),
(["A", "B", "C"], ["A", "B", "C"]),
(["A", np.nan, "B", "C"], ["A", "B", "C"]),
],
)
def test_dropna(self, how, dtype, vals, expected):
# GH 6194
index = Index(vals, dtype=dtype)
result = index.dropna(how=how)
expected = Index(expected, dtype=dtype)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("how", ["any", "all"])
@pytest.mark.parametrize(
"index,expected",
[
(
DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03"]),
DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03"]),
),
(
DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03", pd.NaT]),
DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03"]),
),
(
TimedeltaIndex(["1 days", "2 days", "3 days"]),
TimedeltaIndex(["1 days", "2 days", "3 days"]),
),
(
TimedeltaIndex([pd.NaT, "1 days", "2 days", "3 days", pd.NaT]),
TimedeltaIndex(["1 days", "2 days", "3 days"]),
),
(
PeriodIndex(["2012-02", "2012-04", "2012-05"], freq="M"),
PeriodIndex(["2012-02", "2012-04", "2012-05"], freq="M"),
),
(
PeriodIndex(["2012-02", "2012-04", "NaT", "2012-05"], freq="M"),
PeriodIndex(["2012-02", "2012-04", "2012-05"], freq="M"),
),
],
)
def test_dropna_dt_like(self, how, index, expected):
result = index.dropna(how=how)
tm.assert_index_equal(result, expected)
def test_dropna_invalid_how_raises(self):
msg = "invalid how option: xxx"
with pytest.raises(ValueError, match=msg):
Index([1, 2, 3]).dropna(how="xxx")
@pytest.mark.parametrize(
"index",
[
Index([np.nan]),
Index([np.nan, 1]),
Index([1, 2, np.nan]),
Index(["a", "b", np.nan]),
pd.to_datetime(["NaT"]),
pd.to_datetime(["NaT", "2000-01-01"]),
pd.to_datetime(["2000-01-01", "NaT", "2000-01-02"]),
pd.to_timedelta(["1 day", "NaT"]),
],
)
def test_is_monotonic_na(self, index):
assert index.is_monotonic_increasing is False
assert index.is_monotonic_decreasing is False
assert index._is_strictly_monotonic_increasing is False
assert index._is_strictly_monotonic_decreasing is False
def test_int_name_format(self, frame_or_series):
index = Index(["a", "b", "c"], name=0)
result = frame_or_series(list(range(3)), index=index)
assert "0" in repr(result)
def test_str_to_bytes_raises(self):
# GH 26447
index = Index([str(x) for x in range(10)])
msg = "^'str' object cannot be interpreted as an integer$"
with pytest.raises(TypeError, match=msg):
bytes(index)
@pytest.mark.filterwarnings("ignore:elementwise comparison failed:FutureWarning")
def test_index_with_tuple_bool(self):
# GH34123
# TODO: also this op right now produces FutureWarning from numpy
# https://github.com/numpy/numpy/issues/11521
idx = Index([("a", "b"), ("b", "c"), ("c", "a")])
result = idx == ("c", "a")
expected = np.array([False, False, True])
tm.assert_numpy_array_equal(result, expected)
class TestIndexUtils:
@pytest.mark.parametrize(
"data, names, expected",
[
([[1, 2, 3]], None, Index([1, 2, 3])),
([[1, 2, 3]], ["name"], Index([1, 2, 3], name="name")),
(
[["a", "a"], ["c", "d"]],
None,
MultiIndex([["a"], ["c", "d"]], [[0, 0], [0, 1]]),
),
(
[["a", "a"], ["c", "d"]],
["L1", "L2"],
MultiIndex([["a"], ["c", "d"]], [[0, 0], [0, 1]], names=["L1", "L2"]),
),
],
)
def test_ensure_index_from_sequences(self, data, names, expected):
result = ensure_index_from_sequences(data, names)
tm.assert_index_equal(result, expected)
def test_ensure_index_mixed_closed_intervals(self):
# GH27172
intervals = [
pd.Interval(0, 1, closed="left"),
pd.Interval(1, 2, closed="right"),
pd.Interval(2, 3, closed="neither"),
pd.Interval(3, 4, closed="both"),
]
result = ensure_index(intervals)
expected = Index(intervals, dtype=object)
tm.assert_index_equal(result, expected)
def test_ensure_index_uint64(self):
# with both 0 and a large-uint64, np.array will infer to float64
# https://github.com/numpy/numpy/issues/19146
# but a more accurate choice would be uint64
values = [0, np.iinfo(np.uint64).max]
result = ensure_index(values)
assert list(result) == values
expected = Index(values, dtype="uint64")
tm.assert_index_equal(result, expected)
def test_get_combined_index(self):
result = _get_combined_index([])
expected = Index([])
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"opname",
[
"eq",
"ne",
"le",
"lt",
"ge",
"gt",
"add",
"radd",
"sub",
"rsub",
"mul",
"rmul",
"truediv",
"rtruediv",
"floordiv",
"rfloordiv",
"pow",
"rpow",
"mod",
"divmod",
],
)
def test_generated_op_names(opname, index):
opname = f"__{opname}__"
method = getattr(index, opname)
assert method.__name__ == opname
@pytest.mark.parametrize("index_maker", tm.index_subclass_makers_generator())
def test_index_subclass_constructor_wrong_kwargs(index_maker):
# GH #19348
with pytest.raises(TypeError, match="unexpected keyword argument"):
index_maker(foo="bar")
@pytest.mark.filterwarnings("ignore:Passing keywords other:FutureWarning")
def test_deprecated_fastpath():
msg = "[Uu]nexpected keyword argument"
with pytest.raises(TypeError, match=msg):
Index(np.array(["a", "b"], dtype=object), name="test", fastpath=True)
with pytest.raises(TypeError, match=msg):
Int64Index(np.array([1, 2, 3], dtype="int64"), name="test", fastpath=True)
with pytest.raises(TypeError, match=msg):
RangeIndex(0, 5, 2, name="test", fastpath=True)
with pytest.raises(TypeError, match=msg):
CategoricalIndex(["a", "b", "c"], name="test", fastpath=True)
def test_shape_of_invalid_index():
# Currently, it is possible to create "invalid" index objects backed by
# a multi-dimensional array (see https://github.com/pandas-dev/pandas/issues/27125
# about this). However, as long as this is not solved in general,this test ensures
# that the returned shape is consistent with this underlying array for
# compat with matplotlib (see https://github.com/pandas-dev/pandas/issues/27775)
idx = Index([0, 1, 2, 3])
with tm.assert_produces_warning(FutureWarning):
# GH#30588 multi-dimensional indexing deprecated
assert idx[:, None].shape == (4, 1)
def test_validate_1d_input():
# GH#27125 check that we do not have >1-dimensional input
msg = "Index data must be 1-dimensional"
arr = np.arange(8).reshape(2, 2, 2)
with pytest.raises(ValueError, match=msg):
Index(arr)
with pytest.raises(ValueError, match=msg):
Float64Index(arr.astype(np.float64))
with pytest.raises(ValueError, match=msg):
Int64Index(arr.astype(np.int64))
with pytest.raises(ValueError, match=msg):
UInt64Index(arr.astype(np.uint64))
df = DataFrame(arr.reshape(4, 2))
with pytest.raises(ValueError, match=msg):
Index(df)
# GH#13601 trying to assign a multi-dimensional array to an index is not
# allowed
ser = Series(0, range(4))
with pytest.raises(ValueError, match=msg):
ser.index = np.array([[2, 3]] * 4)
@pytest.mark.parametrize(
"klass, extra_kwargs",
[
[Index, {}],
[Int64Index, {}],
[Float64Index, {}],
[DatetimeIndex, {}],
[TimedeltaIndex, {}],
[NumericIndex, {}],
[PeriodIndex, {"freq": "Y"}],
],
)
def test_construct_from_memoryview(klass, extra_kwargs):
# GH 13120
result = klass(memoryview(np.arange(2000, 2005)), **extra_kwargs)
expected = klass(list(range(2000, 2005)), **extra_kwargs)
tm.assert_index_equal(result, expected, exact=True)
def test_index_set_names_pos_args_deprecation():
# GH#41485
idx = Index([1, 2, 3, 4])
msg = (
"In a future version of pandas all arguments of Index.set_names "
"except for the argument 'names' will be keyword-only"
)
with tm.assert_produces_warning(FutureWarning, match=msg):
result = idx.set_names("quarter", None)
expected = Index([1, 2, 3, 4], name="quarter")
tm.assert_index_equal(result, expected)
def test_drop_duplicates_pos_args_deprecation():
# GH#41485
idx = Index([1, 2, 3, 1])
msg = (
"In a future version of pandas all arguments of "
"Index.drop_duplicates will be keyword-only"
)
with tm.assert_produces_warning(FutureWarning, match=msg):
idx.drop_duplicates("last")
result = idx.drop_duplicates("last")
expected = Index([2, 3, 1])
tm.assert_index_equal(expected, result)
def test_get_attributes_dict_deprecated():
# https://github.com/pandas-dev/pandas/pull/44028
idx = Index([1, 2, 3, 1])
with tm.assert_produces_warning(DeprecationWarning):
attrs = idx._get_attributes_dict()
assert attrs == {"name": None}
@pytest.mark.parametrize("op", [operator.lt, operator.gt])
def test_nan_comparison_same_object(op):
# GH#47105
idx = Index([np.nan])
expected = np.array([False])
result = op(idx, idx)
tm.assert_numpy_array_equal(result, expected)
result = op(idx, idx.copy())
tm.assert_numpy_array_equal(result, expected)
|
{
"content_hash": "b4380e78cd93334706eb3cd43536d6c2",
"timestamp": "",
"source": "github",
"line_count": 1620,
"max_line_length": 88,
"avg_line_length": 34.614814814814814,
"alnum_prop": 0.562415293530209,
"repo_name": "datapythonista/pandas",
"id": "43b893b0846726be67bbc1eb07afd01ead80696d",
"size": "56076",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "pandas/tests/indexes/test_base.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "131"
},
{
"name": "C",
"bytes": "355524"
},
{
"name": "CSS",
"bytes": "1662"
},
{
"name": "Cython",
"bytes": "1178139"
},
{
"name": "Dockerfile",
"bytes": "1933"
},
{
"name": "HTML",
"bytes": "456449"
},
{
"name": "Makefile",
"bytes": "505"
},
{
"name": "Python",
"bytes": "19048364"
},
{
"name": "Shell",
"bytes": "10511"
},
{
"name": "Smarty",
"bytes": "8486"
},
{
"name": "XSLT",
"bytes": "1196"
}
],
"symlink_target": ""
}
|
from applications.price_list.models import PriceList, ActivityPriceListItem, TimePriceListItem, UnitPriceListItem, \
PriceListItemEquipment, PriceListItemService
from applications.price_list.serializers import PriceListSerializer, ActivityPriceListItemSerializer, \
TimePriceListItemSerializer, UnitPriceListItemSerializer, PriceListItemEquipmentSerializer, \
PriceListItemServiceSerializer
from rest_framework import viewsets
class PriceListViewSet(viewsets.ModelViewSet):
"""
ViewSet for Price Lists
"""
serializer_class = PriceListSerializer
queryset = PriceList.objects.all()
class ActivityPriceListItemViewSet(viewsets.ModelViewSet):
"""
ViewSet for Activity Price List Items
"""
serializer_class = ActivityPriceListItemSerializer
queryset = ActivityPriceListItem.objects.all()
class TimePriceListItemViewSet(viewsets.ModelViewSet):
"""
ViewSet for Time Price List Items
"""
serializer_class = TimePriceListItemSerializer
queryset = TimePriceListItem.objects.all()
class UnitPriceListItemViewSet(viewsets.ModelViewSet):
"""
ViewSet for Unit Price List Items
"""
serializer_class = UnitPriceListItemSerializer
queryset = UnitPriceListItem.objects.all()
class PriceListItemEquipmentViewSet(viewsets.ModelViewSet):
"""
ViewSet for Price list item equipment relations
"""
serializer_class = PriceListItemEquipmentSerializer
queryset = PriceListItemEquipment.objects.all()
class PriceListItemServiceViewSet(viewsets.ModelViewSet):
"""
ViewSet for Price list item service relations
"""
serializer_class = PriceListItemServiceSerializer
queryset = PriceListItemService.objects.all()
|
{
"content_hash": "384a80aeb2aa3629a63ad9191ad914bc",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 116,
"avg_line_length": 31.944444444444443,
"alnum_prop": 0.7756521739130435,
"repo_name": "awwong1/apollo",
"id": "c66a7d2b1aa2861981a6decf68483e7cdff06dff",
"size": "1725",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "applications/price_list/viewsets.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2892"
},
{
"name": "HTML",
"bytes": "79517"
},
{
"name": "JavaScript",
"bytes": "2154"
},
{
"name": "Python",
"bytes": "197209"
},
{
"name": "Shell",
"bytes": "1560"
}
],
"symlink_target": ""
}
|
from flask import Flask
from photo import photo
from routes import Routes
blueprints = (
(photo.create_blueprint(), '/services/photo'),)
base_routes = Routes()
class Eclipse2017PhotoApp(Flask):
"""
Eclipse 2017 Photo application.
"""
def __init__(
self, project_id, session_enc_key, google_oauth2_client_id,
google_oauth2_client_secret, debug=False,
blueprints=blueprints, routes=base_routes, photo=photo,
**kwargs):
super(Eclipse2017PhotoApp, self).__init__(__name__, **kwargs)
self.config['PROJECT_ID'] = project_id
self.config['SECRET_KEY'] = session_enc_key
self.config['GOOGLE_OAUTH2_CLIENT_ID'] = google_oauth2_client_id
self.config['GOOGLE_OAUTH2_CLIENT_SECRET'] = google_oauth2_client_secret
self.photo = photo
self.debug = debug
routes.register(self, blueprints)
|
{
"content_hash": "eec10bd5e3729d9f4e7211c949c76c64",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 80,
"avg_line_length": 27.696969696969695,
"alnum_prop": 0.6367614879649891,
"repo_name": "google/eclipse2017",
"id": "3c616d043886bb1257405ba16a54e9179c782f6e",
"size": "1492",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "photo/app/app/backend/eclipse2017_photo_app.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1210"
},
{
"name": "HTML",
"bytes": "174182"
},
{
"name": "JavaScript",
"bytes": "72747"
},
{
"name": "Python",
"bytes": "665417"
},
{
"name": "Shell",
"bytes": "47103"
}
],
"symlink_target": ""
}
|
"""Library for constructing data for imagenet experiments."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import os
from absl import flags
from absl import logging
import attr
import robustness_dhtd
from six.moves import range
import tensorflow.compat.v2 as tf
import tensorflow_datasets as tfds
from uq_benchmark_2019 import image_data_utils
from uq_benchmark_2019.imagenet import imagenet_input
flags.DEFINE_string('imagenet_dir', None, 'Path to IMAGENET data tables.')
flags.DEFINE_string('imagenet_c_dir', None, 'Path to IMAGENET-C data tables.')
FLAGS = flags.FLAGS
gfile = tf.io.gfile
IMAGENET_SHAPE = (224, 224, 3)
# Imagenet training and test data sets.
IMAGENET_NUM_CLASSES = 1000
IMAGE_SIZE = 224
APPROX_IMAGENET_TRAINING_IMAGES = 1280000 # Approximate number of images.
IMAGENET_VALIDATION_IMAGES = 50000 # Number of images.
def _download_alt_dataset(config, shuffle_files):
dataset_builder = tfds.builder(config.alt_dataset_name)
dataset_builder.download_and_prepare()
return dataset_builder.as_dataset(split=config.split,
shuffle_files=shuffle_files)
def build_dataset(
config, batch_size, is_training=False, fake_data=False, use_bfloat16=False):
"""Returns a tf.data.Dataset with <image, label> pairs.
Args:
config: DataConfig instance.
batch_size: Dataset batch size.
is_training: Whether to build a dataset for training
(with shuffling and image distortions).
fake_data: If True, use randomly generated data.
use_bfloat16: If True, use bfloat16. If False, use float32.
Returns:
tf.data.Dataset
"""
if fake_data:
logging.info('Generating fake data for config: %s', config)
return image_data_utils.make_fake_data(IMAGENET_SHAPE).batch(batch_size)
if config.alt_dataset_name:
dataset = _download_alt_dataset(config, shuffle_files=is_training)
def prep_fn(image_input):
image = tf.image.convert_image_dtype(image_input['image'], tf.float32)
image = tf.image.crop_to_bounding_box(image, 20, 0, 178, 178)
image = tf.image.resize(image, (224, 224))
# omit CelebA labels
return image, -1
return dataset.map(prep_fn).batch(batch_size)
logging.info('Building dataset for config:\n%s', attr.asdict(config))
if config.corruption_type and config.corruption_static:
return image_data_utils.make_static_dataset(
config, _get_static_imagenet_c).batch(batch_size)
dataset_builder = imagenet_input.ImageNetInput(
is_training=is_training, data_dir=FLAGS.imagenet_dir,
batch_size=batch_size, dataset_split=config.split,
use_bfloat16=use_bfloat16)
dataset = dataset_builder.input_fn()
if config.corruption_type:
assert (config.corruption_value is not None) != (
config.corruption_level > 0)
# NOTE: dhtd corruptions expect to be applied before float32 conversion.
def apply_corruption(image, label):
"""Apply the corruption function to the image."""
image = tf.image.convert_image_dtype(image, tf.uint8)
corruption_fn = functools.partial(
robustness_dhtd.corrupt,
severity=config.corruption_level,
severity_value=config.corruption_value, dim=224,
corruption_name=config.corruption_type, dataset_name='imagenet')
def apply_to_batch(ims):
ims_numpy = ims.numpy()
for i in range(ims_numpy.shape[0]):
ims_numpy[i] = corruption_fn(ims_numpy[i])
return ims_numpy
image = tf.py_function(func=apply_to_batch, inp=[image], Tout=tf.float32)
image = tf.clip_by_value(image, 0., 255.) / 255.
return image, label
dataset = dataset.map(apply_corruption)
if config.roll_pixels:
def roll_fn(image, label):
"""Function to roll pixels."""
image = tf.roll(image, config.roll_pixels, -2)
return image, label
dataset = dataset.map(roll_fn)
return dataset
def _get_static_imagenet_c(corruption_type, corruption_level,
num_parallel_reads=1):
"""Load static imagenet-C images to tf.dataset."""
imagenet_c_images = os.path.join(
FLAGS.imagenet_c_dir, corruption_type, str(corruption_level), 'val/*')
filenames = gfile.glob(imagenet_c_images)
def parse(serialized):
"""Parses a serialized tf.Example."""
keys_to_features = {
'image': tf.io.FixedLenFeature([], dtype=tf.string),
'label': tf.io.FixedLenFeature([], dtype=tf.int64),
}
out = tf.io.parse_single_example(serialized, keys_to_features)
image = tf.image.decode_image(out['image'], channels=3)
image.set_shape([None, None, 3])
image = tf.image.convert_image_dtype(image, tf.float32)
return image, out['label']
dataset = tf.data.TFRecordDataset(
filenames, num_parallel_reads=num_parallel_reads)
return dataset.map(parse, num_parallel_calls=1)
|
{
"content_hash": "c0c122c46f742ba907a558137b6b4eba",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 80,
"avg_line_length": 34.49650349650349,
"alnum_prop": 0.6918710723697548,
"repo_name": "google-research/google-research",
"id": "26ac6681b82563f3f49abac2bd802544451fd903",
"size": "5541",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "uq_benchmark_2019/imagenet/data_lib.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "9817"
},
{
"name": "C++",
"bytes": "4166670"
},
{
"name": "CMake",
"bytes": "6412"
},
{
"name": "CSS",
"bytes": "27092"
},
{
"name": "Cuda",
"bytes": "1431"
},
{
"name": "Dockerfile",
"bytes": "7145"
},
{
"name": "Gnuplot",
"bytes": "11125"
},
{
"name": "HTML",
"bytes": "77599"
},
{
"name": "ImageJ Macro",
"bytes": "50488"
},
{
"name": "Java",
"bytes": "487585"
},
{
"name": "JavaScript",
"bytes": "896512"
},
{
"name": "Julia",
"bytes": "67986"
},
{
"name": "Jupyter Notebook",
"bytes": "71290299"
},
{
"name": "Lua",
"bytes": "29905"
},
{
"name": "MATLAB",
"bytes": "103813"
},
{
"name": "Makefile",
"bytes": "5636"
},
{
"name": "NASL",
"bytes": "63883"
},
{
"name": "Perl",
"bytes": "8590"
},
{
"name": "Python",
"bytes": "53790200"
},
{
"name": "R",
"bytes": "101058"
},
{
"name": "Roff",
"bytes": "1208"
},
{
"name": "Rust",
"bytes": "2389"
},
{
"name": "Shell",
"bytes": "730444"
},
{
"name": "Smarty",
"bytes": "5966"
},
{
"name": "Starlark",
"bytes": "245038"
}
],
"symlink_target": ""
}
|
from coverage import *
|
{
"content_hash": "063eb9c5d34cf1570041f2f06cfea1d0",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 23,
"avg_line_length": 24,
"alnum_prop": 0.75,
"repo_name": "sassoftware/testutils",
"id": "1888c32466718482bed082e04112de881a0a8b88",
"size": "24",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "coverage/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "6320"
},
{
"name": "Makefile",
"bytes": "6912"
},
{
"name": "Python",
"bytes": "248805"
},
{
"name": "Shell",
"bytes": "180"
}
],
"symlink_target": ""
}
|
import unittest, os, datetime
from flask import json, request, Response, session
import MySQLdb
from app.views import create_app
from app.dbconnect import DbConnect
class BasicConnectionTestCase(unittest.TestCase):
"""Checks for app and db connectivity"""
def setUp(self):
"""Setup test app"""
self.app = create_app('app.config')
def tearDown(self):
"""Destroy test app"""
def test_index(self):
"""inital Test. Ensure flask is setup correctly"""
response = self.app.test_client().get('/', content_type='html/text')
self.assertEqual(response.status_code, 200)
def test_db_connection_positive(self):
""""Test MySQL connection for connection"""
data = True
try:
with self.app.app_context():
db = DbConnect(self.app.config)
cursor = db.connection.cursor()
res = cursor.execute('SELECT * from cnx_logger_biomimic_type LIMIT 2')
res = cursor.execute('SELECT * from cnx_logger_properties LIMIT 2')
res = cursor.execute('SELECT * from cnx_logger_geographics LIMIT 2')
res = cursor.execute('SELECT * from cnx_logger LIMIT 2')
res = cursor.execute('SELECT * from cnx_logger_temperature LIMIT 2')
except (MySQLdb.OperationalError, MySQLdb.ProgrammingError) as e:
data = None
finally:
cursor.close()
db.close()
self.assertNotEqual(data, None)
def test_db_connection_username_negative(self):
""""Test MySQL connection given incorrect username"""
try:
with self.app.app_context():
db=MySQLdb.connect(
host=self.app.config['MYSQL_HOST'], \
port=self.app.config['MYSQL_PORT'], \
user='dummy', \
passwd=self.app.config['MYSQL_PASSWORD'], \
db=self.app.config['MYSQL_DB'])
c = db.cursor()
c.execute('SELECT * from cnx_logger_biomimic_type LIMIT 2')
c.execute('SELECT * from cnx_logger_properties LIMIT 2')
c.execute('SELECT * from cnx_logger_geographics LIMIT 2')
c.execute('SELECT * from cnx_logger LIMIT 2')
c.execute('SELECT * from cnx_logger_temperature LIMIT 2')
data = c.fetchone()
c.close()
except MySQLdb.OperationalError as e:
data = None
self.assertEqual(data, None)
def test_db_connection_password_negative(self):
""""Test MySQL connection given incorrect password """
try:
with self.app.app_context():
db=MySQLdb.connect(
host=self.app.config['MYSQL_HOST'], \
port=self.app.config['MYSQL_PORT'], \
user=self.app.config['MYSQL_USER'], \
passwd='dummy', \
db=self.app.config['MYSQL_DB'])
c = db.cursor()
c.execute('SELECT * from cnx_logger_biomimic_type LIMIT 2')
c.execute('SELECT * from cnx_logger_properties LIMIT 2')
c.execute('SELECT * from cnx_logger_geographics LIMIT 2')
c.execute('SELECT * from cnx_logger LIMIT 2')
c.execute('SELECT * from cnx_logger_temperature LIMIT 2')
data = c.fetchone()
c.close()
except MySQLdb.OperationalError as e:
data = None
self.assertEqual(data, None)
def test_db_connection_host_negative(self):
""""Test MySQL connection given incorrect hostname"""
try:
with self.app.app_context():
db=MySQLdb.connect(
host='dummy', \
port=self.app.config['MYSQL_PORT'], \
user=self.app.config['MYSQL_USER'], \
passwd=self.app.config['MYSQL_PASSWORD'],\
db=self.app.config['MYSQL_DB'])
c = db.cursor()
c.execute('SELECT * from cnx_logger_biomimic_type LIMIT 2')
c.execute('SELECT * from cnx_logger_properties LIMIT 2')
c.execute('SELECT * from cnx_logger_geographics LIMIT 2')
c.execute('SELECT * from cnx_logger LIMIT 2')
c.execute('SELECT * from cnx_logger_temperature LIMIT 2')
data = c.fetchone()
c.close()
except MySQLdb.OperationalError as e:
data = None
self.assertEqual(data, None)
def test_db_connection_dbname_negative(self):
""""Test MySQL connection given incorrect Database"""
try:
with self.app.app_context():
db=MySQLdb.connect(
host=self.app.config['MYSQL_HOST'], \
port=self.app.config['MYSQL_PORT'], \
user=self.app.config['MYSQL_USER'], \
passwd=self.app.config['MYSQL_PASSWORD'],\
db='dummy')
c = db.cursor()
c.execute('SELECT * from cnx_logger_biomimic_type LIMIT 2')
c.execute('SELECT * from cnx_logger_properties LIMIT 2')
c.execute('SELECT * from cnx_logger_geographics LIMIT 2')
c.execute('SELECT * from cnx_logger LIMIT 2')
c.execute('SELECT * from cnx_logger_temperature LIMIT 2')
data = c.fetchone()
c.close()
except MySQLdb.OperationalError as e:
data = None
self.assertEqual(data, None)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "5c63935c9df27b70226189728d9e0718",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 90,
"avg_line_length": 44.9,
"alnum_prop": 0.5281822854205928,
"repo_name": "DefendersOfNemo/SavingNemo",
"id": "24e5c7ac7f3fdbcd9910fdd1afaf663be81adb1d",
"size": "5847",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "server/tests/tests_connection.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "776"
},
{
"name": "HTML",
"bytes": "29793"
},
{
"name": "JavaScript",
"bytes": "24889"
},
{
"name": "Makefile",
"bytes": "1437"
},
{
"name": "Python",
"bytes": "116802"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('auth', '0006_require_contenttypes_0002'),
('umibukela', '0016_auto_20170116_1216'),
]
operations = [
migrations.CreateModel(
name='KoboRefreshToken',
fields=[
('user', models.OneToOneField(primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
('token', models.TextField()),
],
),
]
|
{
"content_hash": "1807e4efdde27042db0e667f6e40f7f1",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 111,
"avg_line_length": 26.681818181818183,
"alnum_prop": 0.5945485519591142,
"repo_name": "Code4SA/umibukela",
"id": "6da0508b6c0d6f291b69e9a8b657d6bd9f1cd991",
"size": "611",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "umibukela/migrations/0017_koborefreshtoken.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "136084"
},
{
"name": "HTML",
"bytes": "148202"
},
{
"name": "JavaScript",
"bytes": "70122"
},
{
"name": "Python",
"bytes": "210522"
},
{
"name": "Shell",
"bytes": "511"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
"""Test replica set MotorClient."""
import unittest
import pymongo
import pymongo.auth
import pymongo.errors
import pymongo.mongo_replica_set_client
from tornado import gen
from tornado.testing import gen_test
import motor
import motor.core
import test
from test import SkipTest
from test.test_environment import db_user, db_password, env
from test.tornado_tests import MotorReplicaSetTestBase, MotorTest
from test.utils import one, get_primary_pool
from motor.motor_py2_compat import text_type
class MotorReplicaSetTest(MotorReplicaSetTestBase):
def test_io_loop(self):
with self.assertRaises(TypeError):
motor.MotorClient(test.env.rs_uri, io_loop='foo')
@gen_test
def test_connection_failure(self):
# Assuming there isn't anything actually running on this port
client = motor.MotorClient(
'localhost:8765', replicaSet='rs', io_loop=self.io_loop,
serverSelectionTimeoutMS=10)
# Test the Future interface.
with self.assertRaises(pymongo.errors.ConnectionFailure):
yield client.admin.command('ismaster')
@gen_test
def test_auth_network_error(self):
if not test.env.auth:
raise SkipTest('Authentication is not enabled on server')
# Make sure there's no semaphore leak if we get a network error
# when authenticating a new socket with cached credentials.
# Get a client with one socket so we detect if it's leaked.
c = self.motor_rsc(maxPoolSize=1, waitQueueTimeoutMS=1)
yield c.admin.command('ismaster')
# Simulate an authenticate() call on a different socket.
credentials = pymongo.auth._build_credentials_tuple(
'DEFAULT',
'admin',
text_type(db_user),
text_type(db_password),
{},
'admin')
c.delegate._cache_credentials('test', credentials, connect=False)
# Cause a network error on the actual socket.
pool = get_primary_pool(c)
socket_info = one(pool.sockets)
socket_info.sock.close()
# In __check_auth, the client authenticates its socket with the
# new credential, but gets a socket.error. Should be reraised as
# AutoReconnect.
with self.assertRaises(pymongo.errors.AutoReconnect):
yield c.test.collection.find_one()
# No semaphore leak, the pool is allowed to make a new socket.
yield c.test.collection.find_one()
@gen_test
def test_open_concurrent(self):
# MOTOR-66: don't block on PyMongo's __monitor_lock, but also don't
# spawn multiple monitors.
c = self.motor_rsc()
yield [c.db.collection.find_one(), c.db.collection.find_one()]
class TestReplicaSetClientAgainstStandalone(MotorTest):
"""This is a funny beast -- we want to run tests for a replica set
MotorClient but only if the database at DB_IP and DB_PORT is a standalone.
"""
def setUp(self):
super(TestReplicaSetClientAgainstStandalone, self).setUp()
if test.env.is_replica_set:
raise SkipTest(
"Connected to a replica set, not a standalone mongod")
@gen_test
def test_connect(self):
with self.assertRaises(pymongo.errors.ServerSelectionTimeoutError):
yield motor.MotorClient(
'%s:%s' % (env.host, env.port), replicaSet='anything',
io_loop=self.io_loop,
serverSelectionTimeoutMS=10).test.test.find_one()
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "fdd2ee482739be6eab024ddea555acce",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 78,
"avg_line_length": 34.39047619047619,
"alnum_prop": 0.6577125450013847,
"repo_name": "wujuguang/motor",
"id": "3d5253113a60742fc75c80f383424b59e62c7c6d",
"size": "4190",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/tornado_tests/test_motor_replica_set.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "519031"
},
{
"name": "Shell",
"bytes": "2713"
}
],
"symlink_target": ""
}
|
import requests
from lib.base import OpscenterAction
class ListRequestsAction(OpscenterAction):
def run(self, request_type, list_all=True, cluster_id=None):
if not cluster_id:
cluster_id = self.cluster_id
url = self._get_full_url([cluster_id, 'request', request_type])
return requests.get(url, params={'list_all': int(list_all)}).json()
|
{
"content_hash": "66a0f21b85a9444b8e1df3fc9afae4ec",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 75,
"avg_line_length": 29.384615384615383,
"alnum_prop": 0.6701570680628273,
"repo_name": "pidah/st2contrib",
"id": "c1d4fc726c7df460bc4798992d91277a10619990",
"size": "382",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "packs/opscenter/actions/list_requests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "8532"
},
{
"name": "Makefile",
"bytes": "4592"
},
{
"name": "Python",
"bytes": "665076"
},
{
"name": "Ruby",
"bytes": "3081"
},
{
"name": "Shell",
"bytes": "15738"
}
],
"symlink_target": ""
}
|
import kfp
import kfp.dsl as dsl
from kfp import components
from kubeflow.katib import ApiClient
from kubeflow.katib import V1beta1ExperimentSpec
from kubeflow.katib import V1beta1AlgorithmSpec
from kubeflow.katib import V1beta1ObjectiveSpec
from kubeflow.katib import V1beta1ParameterSpec
from kubeflow.katib import V1beta1FeasibleSpace
from kubeflow.katib import V1beta1TrialTemplate
from kubeflow.katib import V1beta1TrialParameterSpec
# You should define the Experiment name, namespace and number of training steps in the arguments.
def create_katib_experiment_task(experiment_name, experiment_namespace, training_steps):
# Trial count specification.
max_trial_count = 5
max_failed_trial_count = 3
parallel_trial_count = 2
# Objective specification.
objective = V1beta1ObjectiveSpec(
type="minimize",
goal=0.001,
objective_metric_name="loss"
)
# Algorithm specification.
algorithm = V1beta1AlgorithmSpec(
algorithm_name="random",
)
# Experiment search space.
# In this example we tune learning rate and batch size.
parameters = [
V1beta1ParameterSpec(
name="learning_rate",
parameter_type="double",
feasible_space=V1beta1FeasibleSpace(
min="0.01",
max="0.05"
),
),
V1beta1ParameterSpec(
name="batch_size",
parameter_type="int",
feasible_space=V1beta1FeasibleSpace(
min="80",
max="100"
),
)
]
# Experiment Trial template.
# TODO (andreyvelich): Use community image for the mnist example.
trial_spec = {
"apiVersion": "kubeflow.org/v1",
"kind": "TFJob",
"spec": {
"tfReplicaSpecs": {
"Chief": {
"replicas": 1,
"restartPolicy": "OnFailure",
"template": {
"metadata": {
"annotations": {
"sidecar.istio.io/inject": "false"
}
},
"spec": {
"containers": [
{
"name": "tensorflow",
"image": "docker.io/liuhougangxa/tf-estimator-mnist",
"command": [
"python",
"/opt/model.py",
"--tf-train-steps=" + str(training_steps),
"--tf-learning-rate=${trialParameters.learningRate}",
"--tf-batch-size=${trialParameters.batchSize}"
]
}
]
}
}
},
"Worker": {
"replicas": 1,
"restartPolicy": "OnFailure",
"template": {
"metadata": {
"annotations": {
"sidecar.istio.io/inject": "false"
}
},
"spec": {
"containers": [
{
"name": "tensorflow",
"image": "docker.io/liuhougangxa/tf-estimator-mnist",
"command": [
"python",
"/opt/model.py",
"--tf-train-steps=" + str(training_steps),
"--tf-learning-rate=${trialParameters.learningRate}",
"--tf-batch-size=${trialParameters.batchSize}"
]
}
]
}
}
}
}
}
}
# Configure parameters for the Trial template.
trial_template = V1beta1TrialTemplate(
primary_container_name="tensorflow",
primary_pod_labels={"training.kubeflow.org/job-role": "master"},
trial_parameters=[
V1beta1TrialParameterSpec(
name="learningRate",
description="Learning rate for the training model",
reference="learning_rate"
),
V1beta1TrialParameterSpec(
name="batchSize",
description="Batch size for the model",
reference="batch_size"
),
],
trial_spec=trial_spec
)
# Create an Experiment from the above parameters.
experiment_spec = V1beta1ExperimentSpec(
max_trial_count=max_trial_count,
max_failed_trial_count=max_failed_trial_count,
parallel_trial_count=parallel_trial_count,
objective=objective,
algorithm=algorithm,
parameters=parameters,
trial_template=trial_template
)
# Create the KFP task for the Katib Experiment.
# Experiment Spec should be serialized to a valid Kubernetes object.
katib_experiment_launcher_op = components.load_component_from_url(
"https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/katib-launcher/component.yaml")
op = katib_experiment_launcher_op(
experiment_name=experiment_name,
experiment_namespace=experiment_namespace,
experiment_spec=ApiClient().sanitize_for_serialization(experiment_spec),
experiment_timeout_minutes=60,
delete_finished_experiment=False)
return op
# This function converts Katib Experiment HP results to args.
def convert_katib_results(katib_results) -> str:
import json
import pprint
katib_results_json = json.loads(katib_results)
print("Katib results:")
pprint.pprint(katib_results_json)
best_hps = []
for pa in katib_results_json["currentOptimalTrial"]["parameterAssignments"]:
if pa["name"] == "learning_rate":
best_hps.append("--tf-learning-rate=" + pa["value"])
elif pa["name"] == "batch_size":
best_hps.append("--tf-batch-size=" + pa["value"])
print("Best Hyperparameters: {}".format(best_hps))
return " ".join(best_hps)
# You should define the TFJob name, namespace, number of training steps, output of Katib and model volume tasks in the arguments.
def create_tfjob_task(tfjob_name, tfjob_namespace, training_steps, katib_op, model_volume_op):
import json
# Get parameters from the Katib Experiment.
# Parameters are in the format "--tf-learning-rate=0.01 --tf-batch-size=100"
convert_katib_results_op = components.func_to_container_op(convert_katib_results)
best_hp_op = convert_katib_results_op(katib_op.output)
best_hps = str(best_hp_op.output)
# Create the TFJob Chief and Worker specification with the best Hyperparameters.
# TODO (andreyvelich): Use community image for the mnist example.
tfjob_chief_spec = {
"replicas": 1,
"restartPolicy": "OnFailure",
"template": {
"metadata": {
"annotations": {
"sidecar.istio.io/inject": "false"
}
},
"spec": {
"containers": [
{
"name": "tensorflow",
"image": "docker.io/liuhougangxa/tf-estimator-mnist",
"command": [
"sh",
"-c"
],
"args": [
"python /opt/model.py --tf-export-dir=/mnt/export --tf-train-steps={} {}".format(training_steps, best_hps)
],
"volumeMounts": [
{
"mountPath": "/mnt/export",
"name": "model-volume"
}
]
}
],
"volumes": [
{
"name": "model-volume",
"persistentVolumeClaim": {
"claimName": str(model_volume_op.outputs["name"])
}
}
]
}
}
}
tfjob_worker_spec = {
"replicas": 1,
"restartPolicy": "OnFailure",
"template": {
"metadata": {
"annotations": {
"sidecar.istio.io/inject": "false"
}
},
"spec": {
"containers": [
{
"name": "tensorflow",
"image": "docker.io/liuhougangxa/tf-estimator-mnist",
"command": [
"sh",
"-c",
],
"args": [
"python /opt/model.py --tf-export-dir=/mnt/export --tf-train-steps={} {}".format(training_steps, best_hps)
],
}
],
}
}
}
# Create the KFP task for the TFJob.
tfjob_launcher_op = components.load_component_from_url(
"https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/launcher/component.yaml")
op = tfjob_launcher_op(
name=tfjob_name,
namespace=tfjob_namespace,
chief_spec=json.dumps(tfjob_chief_spec),
worker_spec=json.dumps(tfjob_worker_spec),
tfjob_timeout_minutes=60,
delete_finished_tfjob=False)
return op
# You should define the model name, namespace, output of the TFJob and model volume tasks in the arguments.
def create_kserve_task(model_name, model_namespace, tfjob_op, model_volume_op):
inference_service = '''
apiVersion: "serving.kserve.io/v1beta1"
kind: "InferenceService"
metadata:
name: {}
namespace: {}
annotations:
"sidecar.istio.io/inject": "false"
spec:
predictor:
tensorflow:
storageUri: "pvc://{}/"
'''.format(model_name, model_namespace, str(model_volume_op.outputs["name"]))
kserve_launcher_op = components.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kserve/component.yaml')
kserve_launcher_op(action="create", inferenceservice_yaml=inference_service).after(tfjob_op)
name="mnist-e2e"
namespace="kubeflow-user-example-com"
training_steps="200"
@dsl.pipeline(
name="end-to-end-pipeline",
description="An end to end mnist example including hyperparameter tuning, train and inference"
)
def mnist_pipeline(name=name, namespace=namespace, training_steps=training_steps):
# Run the hyperparameter tuning with Katib.
katib_op = create_katib_experiment_task(name, namespace, training_steps)
# Create volume to train and serve the model.
model_volume_op = dsl.VolumeOp(
name="model-volume",
resource_name="model-volume",
size="1Gi",
modes=dsl.VOLUME_MODE_RWM
)
# Run the distributive training with TFJob.
tfjob_op = create_tfjob_task(name, namespace, training_steps, katib_op, model_volume_op)
# Create the KServe inference.
create_kserve_task(name, namespace, tfjob_op, model_volume_op)
if __name__ == '__main__':
from kfp_tekton.compiler import TektonCompiler
TektonCompiler().compile(mnist_pipeline, __file__.replace('.py', '.yaml'))
|
{
"content_hash": "6a3d78f798ff4c2d30aba423377d406f",
"timestamp": "",
"source": "github",
"line_count": 317,
"max_line_length": 134,
"avg_line_length": 37.42586750788644,
"alnum_prop": 0.5101146325016858,
"repo_name": "kubeflow/kfp-tekton",
"id": "f9573dae38083a7c3fe3a64ab6729a0cdbea8593",
"size": "11942",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "samples/e2e-mnist/e2e-mnist.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2171"
},
{
"name": "Dockerfile",
"bytes": "15467"
},
{
"name": "Go",
"bytes": "1590112"
},
{
"name": "HTML",
"bytes": "3656"
},
{
"name": "JavaScript",
"bytes": "526449"
},
{
"name": "Jupyter Notebook",
"bytes": "15265"
},
{
"name": "Makefile",
"bytes": "22228"
},
{
"name": "Mustache",
"bytes": "23652"
},
{
"name": "PowerShell",
"bytes": "3194"
},
{
"name": "Python",
"bytes": "1055595"
},
{
"name": "Shell",
"bytes": "178344"
},
{
"name": "Smarty",
"bytes": "7694"
},
{
"name": "Starlark",
"bytes": "18586"
},
{
"name": "TypeScript",
"bytes": "3995476"
}
],
"symlink_target": ""
}
|
from typing import Any, Dict
from airflow.sensors.sql_sensor import SqlSensor
from airflow.utils.decorators import apply_defaults
class MetastorePartitionSensor(SqlSensor):
"""
An alternative to the HivePartitionSensor that talk directly to the
MySQL db. This was created as a result of observing sub optimal
queries generated by the Metastore thrift service when hitting
subpartitioned tables. The Thrift service's queries were written in a
way that wouldn't leverage the indexes.
:param schema: the schema
:type schema: str
:param table: the table
:type table: str
:param partition_name: the partition name, as defined in the PARTITIONS
table of the Metastore. Order of the fields does matter.
Examples: ``ds=2016-01-01`` or
``ds=2016-01-01/sub=foo`` for a sub partitioned table
:type partition_name: str
:param mysql_conn_id: a reference to the MySQL conn_id for the metastore
:type mysql_conn_id: str
"""
template_fields = ('partition_name', 'table', 'schema')
ui_color = '#8da7be'
poke_context_fields = ('partition_name', 'table', 'schema', 'mysql_conn_id')
@apply_defaults
def __init__(
self,
*,
table: str,
partition_name: str,
schema: str = "default",
mysql_conn_id: str = "metastore_mysql",
**kwargs: Any,
):
self.partition_name = partition_name
self.table = table
self.schema = schema
self.first_poke = True
self.conn_id = mysql_conn_id
# TODO(aoen): We shouldn't be using SqlSensor here but MetastorePartitionSensor.
# The problem is the way apply_defaults works isn't compatible with inheritance.
# The inheritance model needs to be reworked in order to support overriding args/
# kwargs with arguments here, then 'conn_id' and 'sql' can be passed into the
# constructor below and apply_defaults will no longer throw an exception.
super().__init__(**kwargs)
def poke(self, context: Dict[str, Any]) -> Any:
if self.first_poke:
self.first_poke = False
if '.' in self.table:
self.schema, self.table = self.table.split('.')
self.sql = """
SELECT 'X'
FROM PARTITIONS A0
LEFT OUTER JOIN TBLS B0 ON A0.TBL_ID = B0.TBL_ID
LEFT OUTER JOIN DBS C0 ON B0.DB_ID = C0.DB_ID
WHERE
B0.TBL_NAME = '{self.table}' AND
C0.NAME = '{self.schema}' AND
A0.PART_NAME = '{self.partition_name}';
""".format(
self=self
)
return super().poke(context)
|
{
"content_hash": "39dea08d70baf48aa4529c585e28629b",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 89,
"avg_line_length": 37.69444444444444,
"alnum_prop": 0.6138540899042004,
"repo_name": "mrkm4ntr/incubator-airflow",
"id": "09552917cc7761d48914e147b500c42445a64f29",
"size": "3501",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "airflow/providers/apache/hive/sensors/metastore_partition.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "22581"
},
{
"name": "Dockerfile",
"bytes": "31475"
},
{
"name": "HCL",
"bytes": "3786"
},
{
"name": "HTML",
"bytes": "221101"
},
{
"name": "JavaScript",
"bytes": "32643"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "14407542"
},
{
"name": "Shell",
"bytes": "541811"
}
],
"symlink_target": ""
}
|
import time
import unittest
from .basetest import BasetTest, measure, flask_profiler
def doWait(seconds, **kwargs):
time.sleep(seconds)
return seconds
class MeasurementTest(BasetTest):
def setUp(self):
flask_profiler.collection.truncate()
def test_01_returnValue(self):
wrapped = measure(doWait, "doWait", "call", context=None)
waitSeconds = 1
result = wrapped(waitSeconds)
self.assertEqual(waitSeconds, result)
def test_02_measurement(self):
wrapped = measure(doWait, "doWait", "call", context=None)
waitSeconds = 2
result = wrapped(waitSeconds)
m = list(flask_profiler.collection.filter())[0]
self.assertEqual(m["name"], "doWait")
self.assertEqual(float(m["elapsed"]) >= waitSeconds, True)
def test_03_measurement_params(self):
context = {"token": "x"}
name = "name_of_func"
method = "invoke"
wrapped = measure(doWait, name, method, context=context)
waitSeconds = 1
kwargs = {"k1": "kval1", "k2": "kval2"}
result = wrapped(waitSeconds, **kwargs)
m = list(flask_profiler.collection.filter())[0]
self.assertEqual(m["name"], name)
self.assertEqual(m["method"], method)
self.assertEqual(m["args"][0], waitSeconds)
self.assertEqual(m["kwargs"], kwargs)
self.assertEqual(m["context"], context)
self.assertTrue(float(m["elapsed"]) >= waitSeconds)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "7414820beb2d5a0ce16fbc4fbbc8c6cb",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 66,
"avg_line_length": 31.102040816326532,
"alnum_prop": 0.6194225721784777,
"repo_name": "muatik/flask-profiler",
"id": "20009a712af9805ee898f27d71494a298ecb41a7",
"size": "1547",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_measurement.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "58310"
}
],
"symlink_target": ""
}
|
class MandrelException(Exception):
pass
class MissingBootstrapException(MandrelException):
pass
class UnknownConfigurationException(MandrelException):
pass
|
{
"content_hash": "2d7177ed466f4969150a8b76dbc44b4d",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 54,
"avg_line_length": 21.25,
"alnum_prop": 0.8117647058823529,
"repo_name": "ethanrowe/python-mandrel",
"id": "1a0fcd5543d91eeb6c2b86ae1f745e8b62a47441",
"size": "170",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mandrel/exception.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "88308"
}
],
"symlink_target": ""
}
|
"""
sentry.utils.javascript
~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import six
from collections import defaultdict
from datetime import timedelta
from django.core.urlresolvers import reverse
from django.utils import timezone
from django.utils.html import escape
from sentry.app import env, tsdb
from sentry.constants import TAG_LABELS
from sentry.models import (
Group, GroupBookmark, GroupMeta, GroupTagKey, GroupSeen, GroupStatus,
ProjectOption
)
from sentry.templatetags.sentry_plugins import get_legacy_annotations
from sentry.utils import json
from sentry.utils.db import attach_foreignkey
from sentry.utils.http import absolute_uri
transformers = {}
def has_sourcemap(event):
if event.platform != 'javascript':
return False
data = event.data
if 'sentry.interfaces.Exception' not in data:
return False
exception = data['sentry.interfaces.Exception']
for value in exception['values']:
stacktrace = value.get('stacktrace', {})
for frame in stacktrace.get('frames', []):
if 'sourcemap' in frame.get('data', {}):
return True
return False
def transform(objects, request=None):
if request is None:
request = getattr(env, 'request', None)
if not objects:
return objects
elif not isinstance(objects, (list, tuple)):
return transform([objects], request=request)[0]
# elif isinstance(obj, dict):
# return dict((k, transform(v, request=request)) for k, v in six.iteritems(obj))
t = transformers.get(type(objects[0]))
if t:
t.attach_metadata(objects, request=request)
return [t(o, request=request) for o in objects]
return objects
def to_json(obj, request=None):
result = transform(obj, request=request)
return json.dumps_htmlsafe(result)
def register(type):
def wrapped(cls):
transformers[type] = cls()
return cls
return wrapped
class Transformer(object):
def __call__(self, obj, request=None):
return self.transform(obj, request)
def attach_metadata(self, objects, request=None):
pass
def transform(self, obj, request=None):
return {}
@register(Group)
class GroupTransformer(Transformer):
def attach_metadata(self, objects, request=None):
from sentry.templatetags.sentry_plugins import handle_before_events
attach_foreignkey(objects, Group.project, ['team'])
GroupMeta.objects.populate_cache(objects)
if request and objects:
handle_before_events(request, objects)
if request and request.user.is_authenticated() and objects:
bookmarks = set(GroupBookmark.objects.filter(
user=request.user,
group__in=objects,
).values_list('group_id', flat=True))
seen_groups = dict(GroupSeen.objects.filter(
user=request.user,
group__in=objects,
).values_list('group_id', 'last_seen'))
else:
bookmarks = set()
seen_groups = {}
if objects:
end = timezone.now()
start = end - timedelta(days=1)
historical_data = tsdb.get_range(
model=tsdb.models.group,
keys=[g.id for g in objects],
start=start,
end=end,
)
else:
historical_data = {}
project_list = set(o.project for o in objects)
tag_keys = set(['sentry:user'])
project_annotations = {}
for project in project_list:
enabled_annotations = ProjectOption.objects.get_value(
project, 'annotations', ['sentry:user'])
project_annotations[project] = enabled_annotations
tag_keys.update(enabled_annotations)
annotation_counts = defaultdict(dict)
annotation_results = GroupTagKey.objects.filter(
group__in=objects,
key__in=tag_keys,
).values_list('key', 'group', 'values_seen')
for key, group_id, values_seen in annotation_results:
annotation_counts[key][group_id] = values_seen
for g in objects:
g.is_bookmarked = g.pk in bookmarks
g.historical_data = [x[1] for x in historical_data.get(g.id, [])]
active_date = g.active_at or g.first_seen
g.has_seen = seen_groups.get(g.id, active_date) > active_date
g.annotations = []
for key in sorted(tag_keys):
if key in project_annotations[project]:
label = TAG_LABELS.get(key, key.replace('_', ' ')).lower() + 's'
try:
value = annotation_counts[key].get(g.id, 0)
except KeyError:
value = 0
g.annotations.append({
'label': label,
'count': value,
})
def localize_datetime(self, dt, request=None):
if not request:
return dt.isoformat()
elif getattr(request, 'timezone', None):
return dt.astimezone(request.timezone).isoformat()
return dt.isoformat()
def transform(self, obj, request=None):
status = obj.get_status()
if status == GroupStatus.RESOLVED:
status_label = 'resolved'
elif status == GroupStatus.IGNORED:
status_label = 'ignored'
else:
status_label = 'unresolved'
version = obj.last_seen
if obj.resolved_at:
version = max(obj.resolved_at, obj.last_seen)
version = int(version.strftime('%s'))
d = {
'id': six.text_type(obj.id),
'count': six.text_type(obj.times_seen),
'title': escape(obj.title),
'message': escape(obj.get_legacy_message()),
'level': obj.level,
'levelName': escape(obj.get_level_display()),
'logger': escape(obj.logger),
'permalink': absolute_uri(reverse('sentry-group', args=[obj.organization.slug, obj.project.slug, obj.id])),
'firstSeen': self.localize_datetime(obj.first_seen, request=request),
'lastSeen': self.localize_datetime(obj.last_seen, request=request),
'canResolve': request and request.user.is_authenticated(),
'status': status_label,
'isResolved': obj.get_status() == GroupStatus.RESOLVED,
'isPublic': obj.is_public,
'score': getattr(obj, 'sort_value', 0),
'project': {
'name': escape(obj.project.name),
'slug': obj.project.slug,
},
'version': version,
}
if hasattr(obj, 'is_bookmarked'):
d['isBookmarked'] = obj.is_bookmarked
if hasattr(obj, 'has_seen'):
d['hasSeen'] = obj.has_seen
if hasattr(obj, 'historical_data'):
d['historicalData'] = obj.historical_data
if hasattr(obj, 'annotations'):
d['annotations'] = obj.annotations
# TODO(dcramer): these aren't tags, and annotations aren't annotations
if request:
d['tags'] = get_legacy_annotations(obj, request)
return d
|
{
"content_hash": "5f6d930fd607edc657bd6cb64f122bf0",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 119,
"avg_line_length": 33.9954128440367,
"alnum_prop": 0.5846714343543381,
"repo_name": "zenefits/sentry",
"id": "81d6f19c6c4ff633154c2bb459f42640ea6bfbf6",
"size": "7411",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/sentry/utils/javascript.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "249557"
},
{
"name": "HTML",
"bytes": "293019"
},
{
"name": "JavaScript",
"bytes": "975797"
},
{
"name": "Lua",
"bytes": "22367"
},
{
"name": "Makefile",
"bytes": "5959"
},
{
"name": "Python",
"bytes": "12550461"
},
{
"name": "Ruby",
"bytes": "4026"
},
{
"name": "Shell",
"bytes": "793"
}
],
"symlink_target": ""
}
|
"""Implements core PredicateResult specializations when operating on paths."""
# pylint: disable=too-many-arguments
from .path_value import (
PATH_SEP,
PathValue)
from .predicate import (
CloneableWithNewSource,
PredicateResult)
class PathResult(PredicateResult, CloneableWithNewSource):
"""Common base class for results whose subject is a field within a composite.
Attributes:
target_path: A '/'-delimited path from the |source| to the desired field.
source: The outermost containing object that the |path| is relative to.
path_value: An actual path value.
"""
@property
def target_path(self):
"""The desired path."""
return self.__target_path
@property
def path_value(self):
"""The PathValue that we found, or None.
This might not have the full target_path, but will be a subset.
"""
return self.__path_value
@property
def source(self):
"""The source JSON object that we are extracting the path from."""
return self.__source
def export_to_json_snapshot(self, snapshot, entity):
"""Implements JsonSnapshotableEntity interface."""
builder = snapshot.edge_builder
builder.make_control(entity, 'Target Path', self.__target_path)
builder.make_input(entity, 'Source', self.__source, format='json')
builder.make_output(entity, 'PathValue', self.__path_value)
super(PathResult, self).export_to_json_snapshot(snapshot, entity)
def clone_with_source(self, source, base_target_path, base_value_path):
"""Implements CloneableWithNewSource interface."""
target_path = (base_target_path if not self.__target_path
else PATH_SEP.join([base_target_path, self.__target_path]))
value_path = (base_value_path if not self.__path_value.path
else PATH_SEP.join([base_target_path,
self.__path_value.path]))
path_value = PathValue(value_path, self.__path_value.value)
return self._do_clone_with_source(source, target_path, path_value)
def _do_clone_with_source(self, source, final_path, final_path_value):
return self.__class__(
source=source, target_path=final_path, path_value=final_path_value,
valid=self.valid, comment=self.comment, cause=self.cause)
def __init__(self, valid, source, target_path, path_value, **kwargs):
super(PathResult, self).__init__(valid, **kwargs)
self.__source = source
self.__target_path = target_path
self.__path_value = (PathValue(target_path, source)
if path_value is None else path_value)
def __eq__(self, result):
return (super(PathResult, self).__eq__(result)
and self.__target_path == result.target_path
and self.__source == result.source
and self.__path_value == result.path_value)
def __add_outer_path(self, base_path):
"""Helper function to add outer context to our path when cloning it."""
if not base_path:
return self.__target_path
if not self.__target_path:
return base_path
return '{0}/{1}'.format(base_path, self.__target_path)
def __repr__(self):
"""Specializes interface."""
return '{4} source={0} target_path={1} path_value={2} valid={3}'.format(
self.source, self.target_path, self.path_value, self.valid,
self.__class__.__name__)
class PathValueResult(PathResult):
"""A PathResult referencing a particular value."""
@property
def pred(self):
"""The predicate used to filter the value, if any."""
return self.__pred
def __init__(self, source, target_path, path_value, **kwargs):
# pylint: disable=redefined-builtin
"""Constructor.
Args:
source: [obj] The original JSON object path_value is relative to.
This can be none if the path_value is the root path.
target_path: [string] The desired path (relative to source) that
we were looking for. NOTE: This is probably path_value.path.
path_value: [PathValue] The path value the filter was applied to.
valid: [bool] Whether the PredicateResult indicates success.
pred: [ValuePredicate] The predicate applied as the filter, if any.
See base class (PathResult) for additional kwargs.
"""
valid = kwargs.pop('valid', False)
pred = kwargs.pop('pred', None)
super(PathValueResult, self).__init__(
valid=valid, source=source,
target_path=target_path, path_value=path_value, **kwargs)
self.__pred = pred
def __eq__(self, result):
return (super(PathValueResult, self).__eq__(result)
and self.__pred == result.pred)
def __repr__(self):
"""Specializes interface."""
return '{0} pred={1}'.format(super(PathValueResult, self).__repr__(),
self.__pred)
def _do_clone_with_source(self, source, final_path, final_path_value):
"""Specializes interface to pass through filter."""
return self.__class__(
source=source, target_path=final_path, path_value=final_path_value,
valid=self.valid, pred=self.__pred,
comment=self.comment, cause=self.cause)
def export_to_json_snapshot(self, snapshot, entity):
"""Implements JsonSnapshotableEntity interface."""
builder = snapshot.edge_builder
if self.__pred:
builder.make_control(entity, 'Filter', self.__pred)
super(PathValueResult, self).export_to_json_snapshot(snapshot, entity)
class MissingPathError(PathResult):
"""A PathResult indicating the desired path did not exist."""
def __init__(self, source, target_path, path_value=None, **kwargs):
"""Constructor.
Args:
source: [obj] The original JSON object path_value is relative to.
This can be none if the path_value is the root path.
target_path: [string] The desired path (relative to source) that
we were looking for. NOTE: This is probably path_value.path.
path_value: [PathValue] The path value as far along as we could go.
valid: [bool] Whether the PredicateResult indicates success.
See base class (PathResult) for additional kwargs.
"""
valid = kwargs.pop('valid', False)
super(MissingPathError, self).__init__(
valid=valid, source=source, target_path=target_path,
path_value=path_value, **kwargs)
class UnexpectedPathError(PathResult):
"""A PathResult indicating an object contained an unexpected attribute."""
def __init__(self, source, target_path, path_value, **kwargs):
"""Constructor.
Args:
source: [dict] The original JSON object path_value is relative to.
This can be none if the path_value is the root path.
target_path: [string] The unexpected attribute path (relative to source)
that we found. NOTE: This is probably path_value.path.
path_value: [PathValue] The path value we found.
valid: [bool] Whether the PredicateResult indicates success.
See base class (PathResult) for additional kwargs.
"""
valid = kwargs.pop('valid', False)
super(UnexpectedPathError, self).__init__(
valid=valid, source=source, target_path=target_path,
path_value=path_value, **kwargs)
class TypeMismatchError(PathResult):
"""A PathResult indicating the field was not the expected type."""
@property
def expect_type(self):
"""The type we expected."""
return self.__expect_type
@property
def got_type(self):
"""The value type we found."""
return self.__got_type
def _do_clone_with_source(self, source, final_path, final_path_value):
"""Specializes interface to pass through types."""
return self.__class__(
expect_type=self.__expect_type, got_type=self.__got_type,
source=source, target_path=final_path, path_value=final_path_value,
valid=self.valid,
comment=self.comment, cause=self.cause)
def __init__(self, expect_type, got_type,
source, target_path=None, path_value=None, **kwargs):
"""Constructor.
Args:
expect_type: [type] The type we wanted.
got_type: [type] The type we actually found.
source: [obj] The original JSON object path_value is relative to.
This can be none if the path_value is the root path.
target_path: [string] The desired path (relative to source) that
we were looking for. NOTE: This is probably path_value.path.
path_value: [PathValue] The path value as far along as we could go.
valid: [bool] Whether the PredicateResult indicates success.
See base class (PathResult) for additional kwargs.
"""
valid = kwargs.pop('valid', False)
super(TypeMismatchError, self).__init__(
valid=valid, source=source, target_path=target_path,
path_value=path_value)
self.__expect_type = expect_type
self.__got_type = got_type
def __str__(self):
"""Specializes interface."""
return '{0} is not a {1} for field="{2}" trace={3}.'.format(
self.__got_type, self.__expect_type, self.target_path, self.path_value)
def __repr__(self):
"""Specializes interface."""
return (super(TypeMismatchError, self).__repr__()
+ ' expect_type={0} got_type={1}'.format(
self.expect_type, self.got_type))
def __eq__(self, error):
"""Specializes interface."""
return (super(TypeMismatchError, self).__eq__(error)
and self.__got_type == error.got_type
and self.__expect_type == error.expect_type)
class IndexBoundsError(PathResult):
"""A PathResult indicating an array index out of bounds."""
@property
def index(self):
"""The index we asked for."""
return self.__index
def __init__(self, index, source, target_path, path_value, **kwargs):
"""Constructor.
Args:
index: [int] The index we attempted to access.
source: [obj] The original JSON object path_value is relative to.
This can be none if the path_value is the root path.
target_path: [string] The desired path (relative to source) that
we were looking for.
path_value: [PathValue] The path value we attempted to index into.
valid: [bool] Whether the PredicateResult indicates success.
See base class (PathResult) for additional kwargs.
"""
valid = kwargs.pop('valid', False)
super(IndexBoundsError, self).__init__(
valid=valid, source=source, target_path=target_path,
path_value=path_value)
if not isinstance(path_value.value, list):
raise TypeError('{0} is not a list', path_value.value.__class__)
self.__index = index
self.__max = len(path_value.value)
def __str__(self):
"""Specializes interface."""
return '{0} is not in the range 0..{1} for path_value={2}.'.format(
self.__index, self.__max, self.path_value)
def __eq__(self, error):
"""Specializes interface."""
return (super(IndexBoundsError, self).__eq__(error)
and self.__index == error.index)
|
{
"content_hash": "a1ec0fe3852ec920405930963d805f65",
"timestamp": "",
"source": "github",
"line_count": 293,
"max_line_length": 79,
"avg_line_length": 37.25255972696246,
"alnum_prop": 0.6520384791571232,
"repo_name": "google/citest",
"id": "ff5be42a8dab6ffa9781dd70e4511e64b80c10a7",
"size": "11513",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "citest/json_predicate/path_result.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "993608"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import sys
from functools import total_ordering
import re
import itertools as it
try:
izip = it.izip
except AttributeError:
izip = zip
basestring = str
old_snpeff_effect_so = {'CDS': 'coding_sequence_variant',
'CODON_CHANGE': 'coding_sequence_variant',
'CODON_CHANGE_PLUS_CODON_DELETION': 'disruptive_inframe_deletion',
'CODON_CHANGE_PLUS_CODON_INSERTION': 'disruptive_inframe_insertion',
'CODON_DELETION': 'inframe_deletion',
'CODON_INSERTION': 'inframe_insertion',
'DOWNSTREAM': 'downstream_gene_variant',
'EXON': 'exon_variant',
'EXON_DELETED': 'exon_loss_variant',
'FRAME_SHIFT': 'frameshift_variant',
'GENE': 'gene_variant',
'INTERGENIC': 'intergenic_variant',
'INTERGENIC_REGION': 'intergenic_region',
'INTERGENIC_CONSERVED': 'conserved_intergenic_variant',
'INTRAGENIC': 'intragenic_variant',
'INTRON': 'intron_variant',
'INTRON_CONSERVED': 'conserved_intron_variant',
'NON_SYNONYMOUS_CODING': 'missense_variant',
'RARE_AMINO_ACID': 'rare_amino_acid_variant',
'SPLICE_SITE_ACCEPTOR': 'splice_acceptor_variant',
'SPLICE_SITE_DONOR': 'splice_donor_variant',
'SPLICE_SITE_REGION': 'splice_region_variant',
#'START_GAINED': '5_prime_UTR_premature_start_codon_gain_variant',
'START_GAINED': '5_prime_UTR_premature_start_codon_variant',
'START_LOST': 'start_lost',
'STOP_GAINED': 'stop_gained',
'STOP_LOST': 'stop_lost',
'SYNONYMOUS_CODING': 'synonymous_variant',
'SYNONYMOUS_START': 'start_retained_variant',
'SYNONYMOUS_STOP': 'stop_retained_variant',
'TRANSCRIPT': 'transcript_variant',
'UPSTREAM': 'upstream_gene_variant',
'UTR_3_DELETED': '3_prime_UTR_truncation_+_exon_loss_variant',
'UTR_3_PRIME': '3_prime_UTR_variant',
'UTR_5_DELETED': '5_prime_UTR_truncation_+_exon_loss_variant',
'UTR_5_PRIME': '5_prime_UTR_variant',
'NON_SYNONYMOUS_START': 'initiator_codon_variant',
'NONE': 'None',
'CHROMOSOME_LARGE_DELETION': 'chromosomal_deletion'}
old_snpeff_lookup = {'CDS': 'LOW',
'CHROMOSOME_LARGE_DELETION': 'HIGH',
'CODON_CHANGE': 'MED',
'CODON_CHANGE_PLUS_CODON_DELETION': 'MED',
'CODON_CHANGE_PLUS_CODON_INSERTION': 'MED',
'CODON_DELETION': 'MED',
'CODON_INSERTION': 'MED',
'DOWNSTREAM': 'LOW',
'EXON': 'LOW',
'EXON_DELETED': 'HIGH',
'FRAME_SHIFT': 'HIGH',
'GENE': 'LOW',
'INTERGENIC': 'LOW',
'INTERGENIC_CONSERVED': 'LOW',
'INTRAGENIC': 'LOW',
'INTRON': 'LOW',
'INTRON_CONSERVED': 'LOW',
'NONE': 'LOW',
'NON_SYNONYMOUS_CODING': 'MED',
'NON_SYNONYMOUS_START': 'HIGH',
'RARE_AMINO_ACID': 'HIGH',
'SPLICE_SITE_ACCEPTOR': 'HIGH',
'SPLICE_SITE_DONOR': 'HIGH',
'SPLICE_SITE_REGION': 'MED',
'START_GAINED': 'LOW',
'START_LOST': 'HIGH',
'STOP_GAINED': 'HIGH',
'STOP_LOST': 'HIGH',
'SYNONYMOUS_CODING': 'LOW',
'SYNONYMOUS_START': 'LOW',
'SYNONYMOUS_STOP': 'LOW',
'TRANSCRIPT': 'LOW',
'UPSTREAM': 'LOW',
'UTR_3_DELETED': 'MED',
'UTR_3_PRIME': 'LOW',
'UTR_5_DELETED': 'MED',
'UTR_5_PRIME': 'LOW'}
# http://uswest.ensembl.org/info/genome/variation/predicted_data.html#consequences
IMPACT_SEVERITY = [
('chromosome_number_variation', 'HIGH'), # snpEff
('transcript_ablation', 'HIGH'), # VEP
('exon_loss_variant', 'HIGH'), # snpEff
('exon_loss', 'HIGH'), # snpEff
('rare_amino_acid_variant', 'HIGH'),
('protein_protein_contact', 'HIGH'), # snpEff
('structural_interaction_variant', 'HIGH'), #snpEff
('feature_fusion', 'HIGH'), #snpEff
('bidirectional_gene_fusion', 'HIGH'), #snpEff
('gene_fusion', 'HIGH'), #snpEff
('feature_ablation', 'HIGH'), #snpEff, structural varint
('splice_acceptor_variant', 'HIGH'), # VEP
('splice_donor_variant', 'HIGH'), # VEP
('start_retained_variant', 'HIGH'), # new VEP
('stop_gained', 'HIGH'), # VEP
('frameshift_variant', 'HIGH'), # VEP
('stop_lost', 'HIGH'), # VEP
('start_lost', 'HIGH'), # VEP
('transcript_amplification', 'HIGH'), # VEP
('disruptive_inframe_deletion', 'MED'), #snpEff
('conservative_inframe_deletion', 'MED'), #snpEff
('disruptive_inframe_insertion', 'MED'), #snpEff
('conservative_inframe_insertion', 'MED'), #snpEff
('duplication', 'MED'), # snpEff, structural variant
('inversion', 'MED'), # snpEff, structural variant
('exon_region', 'MED'), # snpEff, structural variant
('inframe_insertion', 'MED'), # VEP
('inframe_deletion', 'MED'), # VEP
('missense_variant', 'MED'), # VEP
('protein_altering_variant', 'MED'), # VEP
('initiator_codon_variant', 'MED'), # snpEff
('regulatory_region_ablation', 'MED'), # VEP
('5_prime_UTR_truncation', 'MED'), # found in snpEff
('splice_region_variant', 'MED'), # VEP changed to have medium priority
('3_prime_UTR_truncation', 'LOW'), # found in snpEff
('non_canonical_start_codon', 'LOW'), # found in snpEff
('synonymous_variant', 'LOW'), # VEP
('coding_sequence_variant', 'LOW'), # VEP
('incomplete_terminal_codon_variant', 'LOW'), # VEP
('stop_retained_variant', 'LOW'), # VEP
('mature_miRNA_variant', 'LOW'), # VEP
('5_prime_UTR_premature_start_codon_variant', 'LOW'), # snpEff
('5_prime_UTR_premature_start_codon_gain_variant', 'LOW'), #snpEff
('5_prime_UTR_variant', 'LOW'), # VEP
('3_prime_UTR_variant', 'LOW'), # VEP
('non_coding_transcript_exon_variant', 'LOW'), # VEP
('conserved_intron_variant', 'LOW'), # snpEff
('intron_variant', 'LOW'), # VEP
('exon_variant', 'LOW'), # snpEff
('gene_variant', 'LOW'), # snpEff
('NMD_transcript_variant', 'LOW'), # VEP
('non_coding_transcript_variant', 'LOW'), # VEP
('upstream_gene_variant', 'LOW'), # VEP
('downstream_gene_variant', 'LOW'), # VEP
('TFBS_ablation', 'LOW'), # VEP
('TFBS_amplification', 'LOW'), # VEP
('TF_binding_site_variant', 'LOW'), # VEP
('regulatory_region_amplification', 'LOW'), # VEP
('feature_elongation', 'LOW'), # VEP
('miRNA', 'LOW'), # snpEff
('transcript_variant', 'LOW'), # snpEff
('start_retained', 'LOW'), # snpEff
('regulatory_region_variant', 'LOW'), # VEP
('feature_truncation', 'LOW'), # VEP
('non_coding_exon_variant', 'LOW'),
('nc_transcript_variant', 'LOW'),
('conserved_intergenic_variant', 'LOW'), # snpEff
('intergenic_variant', 'LOW'), # VEP
('intergenic_region', 'LOW'), # snpEff
('intragenic_variant', 'LOW'), # snpEff
('non_coding_transcript_exon_variant', 'LOW'), # snpEff
('non_coding_transcript_variant', 'LOW'), # snpEff
('transcript', 'LOW'), # ? snpEff older
('sequence_feature', 'LOW'), # snpEff older
('non_coding', 'LOW'), # BCSQ
('?', 'UNKNOWN'), # some VEP annotations have '?'
('', 'UNKNOWN'), # some VEP annotations have ''
('UNKNOWN', 'UNKNOWN'), # some snpEFF annotations have 'unknown'
]
# bcftools doesn't add _variant on the end.
for (csq, imp) in list(IMPACT_SEVERITY[::-1]):
if csq.endswith('_variant'):
for i, (a, b) in enumerate(IMPACT_SEVERITY):
if (a, b) == (csq, imp):
IMPACT_SEVERITY.insert(i, (csq[:-8].lower(), imp))
break
IMPACT_SEVERITY_ORDER = dict((x[0], i) for i, x in enumerate(IMPACT_SEVERITY[::-1]))
IMPACT_SEVERITY = dict(IMPACT_SEVERITY)
EXONIC_IMPACTS = set(["stop_gained",
"exon_variant",
"stop_lost",
"frameshift_variant",
"initiator_codon_variant",
"inframe_deletion",
"inframe_insertion",
"missense_variant",
"protein_altering_variant",
"incomplete_terminal_codon_variant",
"stop_retained_variant",
"5_prime_UTR_premature_start_codon_variant",
"synonymous_variant",
"coding_sequence_variant",
"5_prime_UTR_variant",
"3_prime_UTR_variant",
"transcript_ablation",
"transcript_amplification",
"feature_elongation",
"feature_truncation"])
for im in list(EXONIC_IMPACTS):
if im.endswith("_variant"):
EXONIC_IMPACTS.add(im[:-8])
EXONIC_IMPACTS = frozenset(EXONIC_IMPACTS)
def snpeff_aa_length(self):
try:
v = self.effects['AA.pos / AA.length']
if v.strip():
return int(v.split("/")[1].strip())
except:
try:
return int(self.effects['Amino_Acid_length'])
except:
return None
def vep_aa_length(self):
if not 'Protein_position' in self.effects:
return None
try:
return int(self.effects['Protein_position'])
except ValueError:
try:
return self.effects['Protein_position']
except KeyError:
return None
def vep_polyphen_pred(self):
try:
return self.effects['PolyPhen'].split('(')[0]
except (KeyError, IndexError):
return None
def vep_polyphen_score(self):
try:
return float(self.effects['PolyPhen'].split('(')[1][:-1])
except (KeyError, IndexError):
return None
def vep_sift_score(self):
try:
return float(self.effects['SIFT'].split("(")[1][:-1])
except (IndexError, KeyError):
return None
def vep_sift_pred(self):
try:
return self.effects['SIFT'].split("(")[0]
except (IndexError, KeyError):
return None
snpeff_lookup = {
'transcript': ['Feature_ID', 'Transcript_ID', 'Transcript'],
'gene': 'Gene_Name',
'exon': ['Rank', 'Exon', 'Exon_Rank'],
'codon_change': ['HGVS.c', 'Codon_Change'],
'aa_change': ['HGVS.p', 'Amino_Acid_Change', 'Amino_Acid_change'],
'aa_length': snpeff_aa_length,
'biotype': ['Transcript_BioType', 'Gene_BioType'],
'alt': 'Allele',
}
bcft_lookup = {}
vep_lookup = {
'transcript': 'Feature',
'gene': ['SYMBOL', 'HGNC', 'Gene'],
'ensembl_gene_id': 'Gene',
'exon': 'EXON',
'codon_change': 'Codons',
'aa_change': 'Amino_acids',
'aa_length': vep_aa_length,
'biotype': 'BIOTYPE',
'polyphen_pred': vep_polyphen_pred,
'polyphen_score': vep_polyphen_score,
'sift_pred': vep_sift_pred,
'sift_score': vep_sift_score,
'alt': 'ALLELE',
}
# lookup here instead of returning ''.
defaults = {'gene': None}
@total_ordering
class Effect(object):
_top_consequence = None
lookup = None
def __init__(self, key, effect_dict, keys, prioritize_canonical):
raise NotImplemented
@classmethod
def new(self, key, effect_dict, keys):
lookup = {"CSQ": VEP, "ANN": SnpEff, "EFF": OldSnpEff, "BCSQ": BCFT}
assert key in lookup
return lookup[key](effect_dict, keys)
@property
def is_exonic(self):
return self.top_consequence in EXONIC_IMPACTS
def unused(self):
return []
@property
def top_consequence(self):
# sort by order and return the top
if self._top_consequence is None:
self._top_consequence = sorted([(IMPACT_SEVERITY_ORDER.get(c, 0), c) for c in
self.consequences], reverse=True)[0][1]
return self._top_consequence
@property
def so(self):
return self.top_consequence
@property
def is_coding(self):
return self.biotype == "protein_coding" and self.is_exonic and ("_UTR_" not in self.top_consequence)
@property
def is_splicing(self):
return "splice" in self.top_consequence
@property
def is_lof(self):
return self.biotype == "protein_coding" and self.impact_severity == "HIGH"
def __le__(self, other):
# we sort so that the effects with the highest impacts come last
# (highest) and so, we:
# + return true if self has lower impact than other.
# + return false if self has higher impact than other.
self_has_lower_impact = True
self_has_higher_impact = False
if self.prioritize_canonical:
scanon, ocanon = self.is_canonical, other.is_canonical
if scanon and not ocanon:
return self_has_higher_impact
elif ocanon and not scanon:
return self_has_lower_impact
spg = self.is_pseudogene
opg = other.is_pseudogene
if spg and not opg:
return self_has_lower_impact
elif opg and not spg:
return self_has_higher_impact
sc, oc = self.coding, other.coding
if sc and not oc:
# other is not coding. is is splicing?
# if other is splicing, we have lower impact.
if not (self.is_splicing or other.is_splicing):
return self_has_higher_impact
elif oc and not sc:
# self. is not coding. is it splicing?
# if self is splicing it has higher impact
if not (self.is_splicing or other.is_splicing):
return self_has_lower_impact
if self.severity != other.severity:
return self.severity <= other.severity
if self.biotype == "protein_coding" and not other.biotype == "protein_coding":
return False
elif other.biotype == "protein_coding" and not self.biotype == "protein_coding":
return True
if self.biotype == "processed_transcript" and not other.biotype == "processed_transcript":
return False
elif other.biotype == "processed_transcript" and not self.biotype == "processed_transcript":
return True
# sift higher == more damaing
if (self.sift_value or 10000) < (other.sift_value or 10000):
return True
# polyphen, lower == more damaging
if (self.polyphen_value or -10000) > (other.polyphen_value or -10000):
return True
return max(IMPACT_SEVERITY_ORDER.get(c, 0) for c in self.consequences) <= \
max(IMPACT_SEVERITY_ORDER.get(co, 0) for co in other.consequences)
@classmethod
def top_severity(cls, effects):
for i, e in enumerate(effects):
if isinstance(e, basestring):
effects[i] = cls(e)
if len(effects) == 0:
return None
if len(effects) == 1:
return effects[0]
effects = sorted(effects)
if effects[-1] > effects[-2]:
return effects[-1]
ret = [effects[-1], effects[-2]]
for i in range(-3, -(len(effects) - 1), -1):
if effects[-1] > effects[i]: break
ret.append(effects[i])
return ret
def __getitem__(self, key):
return self.effects[key]
def __eq__(self, other):
if not isinstance(other, Effect): return False
return self.effect_string == other.effect_string
def __str__(self):
return repr(self)
def __repr__(self):
return "%s(%s-%s, %s)" % (self.__class__.__name__, self.gene,
self.consequence, self.impact_severity)
@property
def effect_severity(self):
return self.impact_severity
@property
def lof(self):
return self.biotype == "protein_coding" and self.impact_severity == "HIGH"
@property
def severity(self, lookup={'HIGH': 3, 'MED': 2, 'LOW': 1, 'UNKNOWN': 0}, sev=IMPACT_SEVERITY):
# higher is more severe. used for ordering.
try:
v = max(lookup[sev[csq]] for csq in self.consequences)
except KeyError:
v = 0
if v == 0:
excl = []
for i, c in [(i, c) for i, c in enumerate(self.consequences) if not c in sev]:
sys.stderr.write("WARNING: unknown severity for '%s' with effect '%s'\n" % (self.effect_string, c))
sys.stderr.write("Please report this on github with the effect-string above\n")
excl.append(i)
if len(excl) == len(self.consequences):
v = 1
else:
v = max(lookup[sev[csq]] for i, csq in enumerate(self.consequences) if not i in excl)
return max(v, 1)
@property
def impact_severity(self):
return ['xxx', 'LOW', 'MED', 'HIGH'][self.severity]
@property
def consequence(self):
return self.top_consequence
@property
def is_pseudogene(self): #bool
return self.biotype is not None and 'pseudogene' in self.biotype
def __getattr__(self, k):
v = self.lookup.get(k)
if v is None: return v
if isinstance(v, basestring):
ret = self.effects.get(v)
# if we didn't get value, there may be a column
# specific value stored in defaults so we look import
# up.
if not ret and ret is not False:
return defaults.get(k, '')
return ret
elif isinstance(v, list):
for key in v:
try:
return self.effects[key]
except KeyError:
continue
return defaults.get(k, '')
return v(self)
class BCFT(Effect):
__slots__ = ('effect_string', 'effects', 'biotype', 'gene', 'transcript', 'aa_change', 'dna_change')
keys = "consequence,gene,transcript,biotype,strand,amino_acid_change,dna_change".split(",")
lookup = bcft_lookup
def __init__(self, effect_string, keys=None, prioritize_canonical=False):
if keys is not None: self.keys = keys
self.effect_string = effect_string
self.effects = dict(izip(self.keys, (x.strip().replace(' ', '_') for x in effect_string.split("|"))))
self.biotype = self.effects.get('biotype', None)
self.transcript = self.effects.get('transcript', None)
self.gene = self.effects.get('gene', None)
self.aa_change = self.effects.get('amino_acid_change', None)
self.consequences = self.effects[self.keys[0]].split('&')
def unused(self, used=frozenset("csq|gene|transcript|biotype|strand|aa_change|dna_change".lower().split("|"))):
"""Return fields that were in the VCF but weren't utilized as part of the standard fields supported here."""
return [k for k in self.keys if not k.lower() in used]
@property
def exonic(self):
return self.biotype == "protein_coding" and any(csq in EXONIC_IMPACTS for csq in self.consequences)
@property
def coding(self):
# what about start/stop_gained?
return self.exonic and any(csq[1:] != "_prime_utr" for csq in self.consequences)
class VEP(Effect):
__slots__ = ('effect_string', 'effects', 'biotype')
keys = "Consequence|Codons|Amino_acids|Gene|SYMBOL|Feature|EXON|PolyPhen|SIFT|Protein_position|BIOTYPE|CANONICAL".split("|")
lookup = vep_lookup
def __init__(self, effect_string, keys=None, checks=True, prioritize_canonical=False):
if checks:
assert not "," in effect_string
assert not "=" in effect_string
self.effect_string = effect_string
if keys is not None: self.keys = keys
self.effect_string = effect_string
self.effects = dict(izip(self.keys, (x.strip() for x in effect_string.split("|"))))
self.biotype = self.effects.get('BIOTYPE', None)
self.prioritize_canonical = prioritize_canonical
@property
def consequences(self, _cache={}):
try:
# this is a bottleneck so we keep a cache
return _cache[self.effects['Consequence']]
except KeyError:
res = _cache[self.effects['Consequence']] = list(it.chain.from_iterable(x.split("+") for x in self.effects['Consequence'].split('&')))
return res
def unused(self, used=frozenset("Consequence|Codons|Amino_acids|Gene|SYMBOL|Feature|EXON|PolyPhen|SIFT|Protein_position|BIOTYPE|CANONICAL".lower().split("|"))):
"""Return fields that were in the VCF but weren't utilized as part of the standard fields supported here."""
return [k for k in self.keys if not k.lower() in used]
@property
def coding(self):
# what about start/stop_gained?
return self.exonic and any(csq[1:] != "_prime_UTR_variant" for csq in self.consequences)
@property
def exonic(self):
return self.biotype == "protein_coding" and any(csq in EXONIC_IMPACTS for csq in self.consequences)
@property
def is_canonical(self):
return self.effects.get("CANONICAL", "") != ""
class SnpEff(Effect):
lookup = snpeff_lookup
__slots__ = ('effects', 'effect_string', 'biotype')
keys = [x.strip() for x in 'Allele | Annotation | Annotation_Impact | Gene_Name | Gene_ID | Feature_Type | Feature_ID | Transcript_BioType | Rank | HGVS.c | HGVS.p | cDNA.pos / cDNA.length | CDS.pos / CDS.length | AA.pos / AA.length | Distance | ERRORS / WARNINGS / INFO'.split("|")]
def __init__(self, effect_string, keys=None, prioritize_canonical=False):
assert not "," in effect_string
assert not "=" == effect_string[3]
self.effect_string = effect_string
if keys is not None:
self.keys = keys
self.effects = dict(izip(self.keys, (x.strip() for x in effect_string.split("|", len(self.keys)))))
self.biotype = self.effects['Transcript_BioType']
@property
def consequences(self):
return list(it.chain.from_iterable(x.split("+") for x in self.effects['Annotation'].split('&')))
@property
def coding(self):
# TODO: check start_gained and utr
return self.exonic and not "utr" in self.consequence and not "start_gained" in self.consequence
@property
def exonic(self):
csqs = self.consequence
if isinstance(csqs, basestring):
csqs = [csqs]
return any(csq in EXONIC_IMPACTS for csq in csqs) and self.effects['Transcript_BioType'] == 'protein_coding'
class OldSnpEff(SnpEff):
keys = [x.strip() for x in "Effect | Effect_Impact | Functional_Class | Codon_Change | Amino_Acid_change| Amino_Acid_length | Gene_Name | Gene_BioType | Coding | Transcript | Exon | ERRORS | WARNINGS".split("|")]
def __init__(self, effect_string, keys=None, _patt=re.compile("\||\("),
prioritize_canonical=False):
assert not "," in effect_string
assert not "=" in effect_string
effect_string = effect_string.rstrip(")")
self.effect_string = effect_string
if keys is not None:
self.keys = keys
self.effects = dict(izip(self.keys, (x.strip() for x in _patt.split(effect_string))))
@property
def consequence(self):
if '&' in self.effects['Effect']:
return self.effects['Effect'].split('&')
return self.effects['Effect']
@property
def consequences(self):
try:
return [old_snpeff_effect_so.get(c, old_snpeff_effect_so[c.upper()]) for c in it.chain.from_iterable(x.split("+") for x in
self.effects['Effect'].split('&'))]
except KeyError:
return list(it.chain.from_iterable(x.split("+") for x in self.effects['Effect'].split('&')))
@property
def severity(self, lookup={'HIGH': 3, 'MED': 2, 'LOW': 1}):
# higher is more severe. used for ordering.
try:
return max(lookup[old_snpeff_lookup[csq]] for csq in self.consequences)
except KeyError:
try:
#in between
sevs = [IMPACT_SEVERITY.get(csq, "LOW") for csq in self.consequences]
return max(lookup[s] for s in sevs)
except KeyError:
return Effect.severity.fget(self)
@property
def is_lof(self):
return self.biotype == "protein_coding" and self.impact_severity == "HIGH"
|
{
"content_hash": "1c3b44b28f6b0824d9ec460f14fcab44",
"timestamp": "",
"source": "github",
"line_count": 642,
"max_line_length": 287,
"avg_line_length": 37.889408099688474,
"alnum_prop": 0.5818705035971223,
"repo_name": "brentp/geneimpacts",
"id": "75f0233c4a46fb8a41d164878c3af3605eeec9b9",
"size": "24325",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "geneimpacts/effect.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "45291"
}
],
"symlink_target": ""
}
|
r"""
This script runs commands on each entry in the API caches.
Syntax: cache.py [-password] [-delete] [-c '...'] [dir ...]
If no directory are specified, it will detect the API caches.
If no command is specified, it will print the filename of all entries.
If only -delete is specified, it will delete all entries.
The option '-c' must be followed by a command in python syntax.
Example commands:
Print the filename of any entry with 'wikidata' in the key:
entry if "wikidata" in entry._uniquedescriptionstr() else None
Customised output if the site code is 'ar':
entry.site.code == "ar" and print("%s" % entry._uniquedescriptionstr())
Or the state of the login
entry.site._loginstatus == LoginStatus.NOT_ATTEMPTED and \
print("%s" % entry._uniquedescriptionstr())
These functions can be used as a command:
has_password(entry)
is_logout(entry)
empty_response(entry)
not_accessed(entry)
incorrect_hash(entry)
older_than_one_day(entry)
recent(entry)
There are helper functions which can be part of a command:
older_than(entry, interval)
newer_than(entry, interval)
"""
#
# (C) Pywikibot team, 2014
#
# Distributed under the terms of the MIT license.
#
from __future__ import print_function, unicode_literals
__version__ = '$Id: f05e9fd4096e2dc06a111a323b8afd4aaea5c540 $'
#
import os
import datetime
import pickle
import hashlib
import pywikibot
from pywikibot.data import api
from pywikibot.site import APISite, DataSite, LoginStatus # noqa
from pywikibot.page import User # noqa
class ParseError(Exception):
"""Error parsing."""
class CacheEntry(api.CachedRequest):
"""A Request cache entry."""
def __init__(self, directory, filename):
"""Constructor."""
self.directory = directory
self.filename = filename
def __str__(self):
return self.filename
def __repr__(self):
return self._cachefile_path()
def _create_file_name(self):
"""Filename of the cached entry."""
return self.filename
def _get_cache_dir(self):
"""Directory of the cached entry."""
return self.directory
def _cachefile_path(self):
return os.path.join(self._get_cache_dir(),
self._create_file_name())
def _load_cache(self):
"""Load the cache entry."""
with open(self._cachefile_path(), 'rb') as f:
self.key, self._data, self._cachetime = pickle.load(f)
return True
def parse_key(self):
"""Parse the key loaded from the cache entry."""
# find the start of the first parameter
start = self.key.index('(')
# find the end of the first object
end = self.key.index(')')
if not end:
raise ParseError('End of Site() keyword not found: %s' % self.key)
if 'Site' not in self.key[0:start]:
raise ParseError('Site() keyword not found at start of key: %s'
% self.key)
site = self.key[0:end + 1]
if site[0:5] == 'Site(':
site = 'APISite(' + site[5:]
username = None
login_status = None
start = end + 1
if self.key[start:start + 5] == 'User(':
# The addition of user to the cache key used:
# repr(User)
# which includes namespaces resulting in:
# User(User:<username>)
# This also accepts User(<username>)
if self.key[start:start + 10] == 'User(User:':
start += 10
else:
start += 5
end = self.key.index(')', start + 5)
if not end:
raise ParseError('End of User() keyword not found: %s'
% self.key)
username = self.key[start:end]
elif self.key[start:start + 12] == 'LoginStatus(':
end = self.key.index(')', start + 12)
if not end:
raise ParseError('End of LoginStatus() keyword not found: %s'
% self.key)
login_status = self.key[start:end + 1]
# If the key does not contain User(..) or LoginStatus(..),
# it must be the old key format which only contains Site and params
elif self.key[start:start + 3] != "[('":
raise ParseError('Keyword after Site not recognised: %s...'
% self.key)
start = end + 1
params = self.key[start:]
self._parsed_key = (site, username, login_status, params)
return self._parsed_key
def _rebuild(self):
"""Reconstruct the original Request from the key."""
if hasattr(self, '_parsed_key'):
(site, username, login_status, params) = self._parsed_key
else:
(site, username, login_status, params) = self.parse_key()
if not site:
raise ParseError('No Site')
self.site = eval(site)
if login_status:
self.site._loginstatus = eval('LoginStatus.%s'
% login_status[12:-1])
if username:
self.site._username = [username, username]
if not params:
raise ParseError('No request params')
self._params = dict(eval(params))
def _delete(self):
"""Delete the cache entry."""
os.remove(self._cachefile_path())
def process_entries(cache_path, func, use_accesstime=None):
"""
Check the contents of the cache.
This program tries to use file access times to determine
whether cache files are being used.
However file access times are not always usable.
On many modern filesystems, they have been disabled.
On unix, check the filesystem mount options. You may
need to remount with 'strictatime'.
@param use_accesstime: Whether access times should be used.
@type use_accesstime: bool tristate:
- None = detect
- False = dont use
- True = always use
"""
if not cache_path:
cache_path = os.path.join(pywikibot.config2.base_dir, 'apicache')
if not os.path.exists(cache_path):
pywikibot.error('%s: no such file or directory' % cache_path)
return
if os.path.isdir(cache_path):
filenames = [os.path.join(cache_path, filename)
for filename in os.listdir(cache_path)]
else:
filenames = [cache_path]
for filepath in filenames:
filename = os.path.basename(filepath)
cache_dir = os.path.dirname(filepath)
if use_accesstime is not False:
stinfo = os.stat(filepath)
entry = CacheEntry(cache_dir, filename)
try:
entry._load_cache()
except ValueError as e:
print('Failed loading %s' % entry._cachefile_path())
pywikibot.exception(e, tb=True)
continue
if use_accesstime is None:
stinfo2 = os.stat(filepath)
use_accesstime = stinfo.st_atime != stinfo2.st_atime
if use_accesstime:
# Reset access times to values before loading cache entry.
os.utime(filepath, (stinfo.st_atime, stinfo.st_mtime))
entry.stinfo = stinfo
try:
entry.parse_key()
except ParseError:
pywikibot.error(u'Problems parsing %s with key %s'
% (entry.filename, entry.key))
pywikibot.exception()
continue
try:
entry._rebuild()
except Exception as e:
pywikibot.error(u'Problems loading %s with key %s, %r'
% (entry.filename, entry.key, entry._parsed_key))
pywikibot.exception(e, tb=True)
continue
func(entry)
def has_password(entry):
"""Entry has a password in the entry."""
if 'lgpassword' in entry._uniquedescriptionstr():
return entry
def is_logout(entry):
"""Entry is a logout entry."""
if not entry._data and 'logout' in entry.key:
return entry
def empty_response(entry):
"""Entry has no data."""
if not entry._data and 'logout' not in entry.key:
return entry
def not_accessed(entry):
"""Entry has never been accessed."""
if not hasattr(entry, 'stinfo'):
return
if entry.stinfo.st_atime <= entry.stinfo.st_mtime:
return entry
def incorrect_hash(entry):
if hashlib.sha256(entry.key.encode('utf-8')).hexdigest() != entry.filename:
return entry
def older_than(entry, interval):
if entry._cachetime + interval < datetime.datetime.now():
return entry
def newer_than(entry, interval):
if entry._cachetime + interval >= datetime.datetime.now():
return entry
def older_than_one_day(entry):
if older_than(entry, datetime.timedelta(days=1)):
return entry
def recent(entry):
if newer_than(entry, datetime.timedelta(hours=1)):
return entry
def main():
local_args = pywikibot.handleArgs()
cache_paths = None
delete = False
command = None
for arg in local_args:
if command == '':
command = arg
elif arg == '-delete':
delete = True
elif arg == '-password':
command = 'has_password(entry)'
elif arg == '-c':
if command:
pywikibot.error('Only one command may be executed.')
exit(1)
command = ''
else:
if not cache_paths:
cache_paths = [arg]
else:
cache_paths.append(arg)
func = None
if not cache_paths:
cache_paths = ['apicache', 'tests/apicache']
# Also process the base directory, if it isnt the current directory
if os.path.abspath(os.getcwd()) != pywikibot.config2.base_dir:
cache_paths += [
os.path.join(pywikibot.config2.base_dir, 'apicache')]
# Also process the user home cache, if it isnt the config directory
if os.path.expanduser('~/.pywikibot') != pywikibot.config2.base_dir:
cache_paths += [
os.path.join(os.path.expanduser('~/.pywikibot'), 'apicache')]
if delete:
action_func = lambda entry: entry._delete()
else:
action_func = lambda entry: pywikibot.output(entry)
if command:
try:
command_func = eval('lambda entry: ' + command)
except:
pywikibot.exception()
pywikibot.error(u'Can not compile command: %s' % command)
exit(1)
func = lambda entry: command_func(entry) and action_func(entry)
else:
func = action_func
for cache_path in cache_paths:
if len(cache_paths) > 1:
pywikibot.output(u'Processing %s' % cache_path)
process_entries(cache_path, func)
if __name__ == '__main__':
main()
|
{
"content_hash": "828bbbbb7a2e64567607a1e955d8d9af",
"timestamp": "",
"source": "github",
"line_count": 363,
"max_line_length": 79,
"avg_line_length": 30.049586776859503,
"alnum_prop": 0.5803080308030804,
"repo_name": "hperala/kontuwikibot",
"id": "fe69f74f59ae097c08b7613fb3d5e11e5a2f4767",
"size": "10951",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/maintenance/cache.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "97"
},
{
"name": "C",
"bytes": "137889"
},
{
"name": "C++",
"bytes": "4113"
},
{
"name": "Python",
"bytes": "3758566"
}
],
"symlink_target": ""
}
|
try:
import ast
from ast import iter_child_nodes
except ImportError:
from flake8.util import ast, iter_child_nodes
__version__ = "1.1"
class UnicodeStringLiteral(object):
name = "unicode-string-literal"
version = __version__
forbidden_str_methods = {'format', }
W742 = 'W742 Usage of non-unicode string literal: {node.s!r}'
W743 = u'W743 Unsafe operation on str, should use unicode: {node.s!r}'
def __init__(self, tree, filename):
self._node = tree
@classmethod
def add_options(cls, parser):
parser.add_option('--utter-unicode-string-literals',
action='store_true',
parse_from_config=True,
help="Expect all literal strings to be unicode")
@classmethod
def parse_options(cls, options):
cls.all_strings = options.utter_unicode_string_literals
def run(self):
return self.visit_tree(self._node) if self._node else ()
def visit_tree(self, node):
for error in self.visit_node(node):
yield error
for child in iter_child_nodes(node):
for error in self.visit_tree(child):
yield error
def visit_node(self, node):
if self.all_strings:
if isinstance(node, ast.Str):
if not isinstance(node.s, unicode):
err = self.W742.format(node=node)
yield node.lineno, node.col_offset, err, type(self)
elif isinstance(node, ast.Call):
if isinstance(node.func, ast.Attribute):
if node.func.attr in self.forbidden_str_methods:
if isinstance(node.func.value, ast.Str):
if not isinstance(node.func.value.s, unicode):
err = self.W743.format(node=node.func.value)
yield node.lineno, node.col_offset, err, type(self)
|
{
"content_hash": "f73876311f30df95efcb43a359ae3d80",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 79,
"avg_line_length": 36.58490566037736,
"alnum_prop": 0.5740072202166066,
"repo_name": "cogniteev/flake8-unicode-string-literal",
"id": "e9c7640c1a25977fa1b19ddf790fcbedf1ad9fc6",
"size": "1939",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "unicode_string_literal.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2836"
}
],
"symlink_target": ""
}
|
"""
ui celery tasks
"""
from odl_video import logging
from odl_video.celery import app
from ui import api as ovs_api
from ui.models import VideoFile
log = logging.getLogger(__name__)
@app.task
def post_hls_to_edx(video_file_id):
"""Loads a VideoFile and calls our API method to add it to edX"""
video_file = (
VideoFile.objects.filter(id=video_file_id)
.select_related("video__collection")
.first()
)
if not video_file:
log.error("VideoFile doesn't exist", videofile_id=video_file_id)
return
response_dict = ovs_api.post_hls_to_edx(video_file)
return [
(endpoint.full_api_url, getattr(resp, "status_code", None))
for endpoint, resp in response_dict.items()
]
|
{
"content_hash": "c3e6c8c7569838e6b8cee2ba6e144d46",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 72,
"avg_line_length": 27.62962962962963,
"alnum_prop": 0.6528150134048257,
"repo_name": "mitodl/odl-video-service",
"id": "5e36ab9a72e3fa357b2f2afa95bb1fa604ca1e6a",
"size": "746",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ui/tasks.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "1406"
},
{
"name": "HTML",
"bytes": "17434"
},
{
"name": "JavaScript",
"bytes": "449553"
},
{
"name": "Procfile",
"bytes": "267"
},
{
"name": "Python",
"bytes": "504362"
},
{
"name": "SCSS",
"bytes": "23388"
},
{
"name": "Shell",
"bytes": "7222"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Event.mozillian'
db.add_column(u'main_event', 'mozillian',
self.gf('django.db.models.fields.CharField')(max_length=200, null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Event.mozillian'
db.delete_column(u'main_event', 'mozillian')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'main.approval': {
'Meta': {'object_name': 'Approval'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'processed_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
u'main.channel': {
'Meta': {'ordering': "['name']", 'object_name': 'Channel'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 20, 0, 0)'}),
'description': ('django.db.models.fields.TextField', [], {}),
'exclude_from_trending': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'image_is_banner': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Channel']", 'null': 'True'}),
'reverse_order': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'main.curatedgroup': {
'Meta': {'object_name': 'CuratedGroup'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 20, 0, 0)'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
u'main.event': {
'Meta': {'object_name': 'Event'},
'additional_links': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'archive_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'call_info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'channels': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Channel']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'duration': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Location']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'modified_user'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'mozillian': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Participant']", 'symmetrical': 'False'}),
'picture': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'event_picture'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['main.Picture']"}),
'pin': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'placeholder_img': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'popcorn_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '40', 'db_index': 'True'}),
'recruitmentmessage': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.RecruitmentMessage']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'remote_presenters': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'short_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '215', 'blank': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'initiated'", 'max_length': '20', 'db_index': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Tag']", 'symmetrical': 'False', 'blank': 'True'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Template']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'template_environment': ('airmozilla.main.fields.EnvironmentField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'transcript': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'upload': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'event_upload'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['uploads.Upload']"})
},
u'main.eventassignment': {
'Meta': {'object_name': 'EventAssignment'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 20, 0, 0)'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']", 'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'locations': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Location']", 'symmetrical': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'main.eventhitstats': {
'Meta': {'object_name': 'EventHitStats'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']", 'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 20, 0, 0)'}),
'shortcode': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'total_hits': ('django.db.models.fields.IntegerField', [], {})
},
u'main.eventoldslug': {
'Meta': {'object_name': 'EventOldSlug'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '215'})
},
u'main.eventrevision': {
'Meta': {'object_name': 'EventRevision'},
'additional_links': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'call_info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'channels': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Channel']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 20, 0, 0)'}),
'description': ('django.db.models.fields.TextField', [], {}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'placeholder_img': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100'}),
'recruitmentmessage': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.RecruitmentMessage']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'short_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Tag']", 'symmetrical': 'False', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'})
},
u'main.eventtweet': {
'Meta': {'object_name': 'EventTweet'},
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'include_placeholder': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'send_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 20, 0, 0)'}),
'sent_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'tweet_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'})
},
u'main.location': {
'Meta': {'ordering': "['name']", 'object_name': 'Location'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'timezone': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'main.locationdefaultenvironment': {
'Meta': {'unique_together': "(('location', 'privacy', 'template'),)", 'object_name': 'LocationDefaultEnvironment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Location']"}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '40'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Template']"}),
'template_environment': ('airmozilla.main.fields.EnvironmentField', [], {})
},
u'main.participant': {
'Meta': {'object_name': 'Participant'},
'blog_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'clear_token': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'cleared': ('django.db.models.fields.CharField', [], {'default': "'no'", 'max_length': '15', 'db_index': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'participant_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'department': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'irc': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'photo': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '65', 'blank': 'True'}),
'team': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'topic_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'})
},
u'main.picture': {
'Meta': {'object_name': 'Picture'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 20, 0, 0)'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'picture_event'", 'null': 'True', 'to': u"orm['main.Event']"}),
'file': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'height': ('django.db.models.fields.PositiveIntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 20, 0, 0)', 'auto_now': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'notes': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'size': ('django.db.models.fields.PositiveIntegerField', [], {}),
'width': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'main.recruitmentmessage': {
'Meta': {'ordering': "['text']", 'object_name': 'RecruitmentMessage'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 20, 0, 0)'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 20, 0, 0)', 'auto_now': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'main.suggestedevent': {
'Meta': {'object_name': 'SuggestedEvent'},
'accepted': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']", 'null': 'True', 'blank': 'True'}),
'additional_links': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'call_info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'channels': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Channel']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 20, 0, 0)'}),
'description': ('django.db.models.fields.TextField', [], {}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'first_submitted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Location']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Participant']", 'symmetrical': 'False'}),
'placeholder_img': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100'}),
'popcorn_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '40'}),
'remote_presenters': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'review_comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'short_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '215', 'blank': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'created'", 'max_length': '40'}),
'submitted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Tag']", 'symmetrical': 'False', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'upcoming': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'upload': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'upload'", 'null': 'True', 'to': u"orm['uploads.Upload']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'main.suggestedeventcomment': {
'Meta': {'object_name': 'SuggestedEventComment'},
'comment': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 20, 0, 0)'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'suggested_event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.SuggestedEvent']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
u'main.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'main.template': {
'Meta': {'ordering': "['name']", 'object_name': 'Template'},
'content': ('django.db.models.fields.TextField', [], {}),
'default_archive_template': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'default_popcorn_template': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'main.urlmatch': {
'Meta': {'object_name': 'URLMatch'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'string': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'use_count': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'main.urltransform': {
'Meta': {'object_name': 'URLTransform'},
'find': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'match': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.URLMatch']"}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'replace_with': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'main.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'contributor': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'main.vidlysubmission': {
'Meta': {'object_name': 'VidlySubmission'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
'hd': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'submission_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'submission_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 20, 0, 0)'}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'token_protection': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'uploads.upload': {
'Meta': {'object_name': 'Upload'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'event'", 'null': 'True', 'to': u"orm['main.Event']"}),
'file_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mime_type': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'size': ('django.db.models.fields.BigIntegerField', [], {}),
'suggested_event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'suggested_event'", 'null': 'True', 'to': u"orm['main.SuggestedEvent']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '400'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
}
}
complete_apps = ['main']
|
{
"content_hash": "0ed9ab62f6b9006207ccab1b399e1f6a",
"timestamp": "",
"source": "github",
"line_count": 333,
"max_line_length": 209,
"avg_line_length": 85.72672672672672,
"alnum_prop": 0.5499001646407679,
"repo_name": "bugzPDX/airmozilla",
"id": "e10a840db7fa216592f87e961af0265bf1bde862",
"size": "28571",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "airmozilla/main/migrations/0050_auto__add_field_event_mozillian.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Brightscript",
"bytes": "67473"
},
{
"name": "CSS",
"bytes": "132588"
},
{
"name": "Groovy",
"bytes": "458"
},
{
"name": "HTML",
"bytes": "249557"
},
{
"name": "JavaScript",
"bytes": "541080"
},
{
"name": "Makefile",
"bytes": "11608"
},
{
"name": "Puppet",
"bytes": "6677"
},
{
"name": "Python",
"bytes": "2656213"
},
{
"name": "Shell",
"bytes": "8175"
},
{
"name": "Smarty",
"bytes": "1638"
}
],
"symlink_target": ""
}
|
import unittest
import urllib2
from flask.ext.testing import LiveServerTestCase, TestCase
from tmb import app as tmbapp, db
from tmb.models import User
class TestTMB(TestCase):
def setUp(self):
db.create_all()
super(TestCase, self).setUp()
def tearDown(self):
db.session.remove()
db.drop_all()
super(TestCase, self).tearDown()
def create_app(self):
app = tmbapp
app.config['TESTING'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///../test.db"
return app
def test_create_account(self):
steamid = "au9a0ou9ea0"
# There should only be one user
u = User.get_or_create(steamid)
db.session.commit()
user_count = User.query.count()
self.assertEqual(user_count, 1)
# There should only be one user
u2 = User.get_or_create(steamid)
db.session.commit()
user_count2 = User.query.count()
self.assertEqual(user_count2, 1)
# Now there should be 2 users
u3 = User.get_or_create("ah9oe0uh")
db.session.commit()
user_count3 = User.query.count()
self.assertEqual(user_count3, 2)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "6fc82d529c885f5894346b90d42cd633",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 70,
"avg_line_length": 25.428571428571427,
"alnum_prop": 0.6027287319422151,
"repo_name": "TinyMultiplayerBot/TinyMultiplayerBot",
"id": "0310d0a40d0ff88cac9217fc8303c2529485b3c9",
"size": "1246",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4262"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.